hatchet-sdk 0.45.3b1__py3-none-any.whl → 0.46.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/clients/admin.py +63 -127
- hatchet_sdk/clients/dispatcher/action_listener.py +18 -21
- hatchet_sdk/clients/events.py +48 -85
- hatchet_sdk/loader.py +14 -40
- hatchet_sdk/opentelemetry/instrumentor.py +323 -0
- hatchet_sdk/worker/runner/runner.py +102 -140
- {hatchet_sdk-0.45.3b1.dist-info → hatchet_sdk-0.46.0.dist-info}/METADATA +8 -8
- {hatchet_sdk-0.45.3b1.dist-info → hatchet_sdk-0.46.0.dist-info}/RECORD +10 -10
- {hatchet_sdk-0.45.3b1.dist-info → hatchet_sdk-0.46.0.dist-info}/entry_points.txt +1 -0
- hatchet_sdk/utils/tracing.py +0 -70
- {hatchet_sdk-0.45.3b1.dist-info → hatchet_sdk-0.46.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
from importlib.metadata import version
|
|
2
|
+
from typing import Any, Callable, Collection, Coroutine
|
|
3
|
+
|
|
4
|
+
try:
|
|
5
|
+
from opentelemetry.context import Context
|
|
6
|
+
from opentelemetry.instrumentation.instrumentor import ( # type: ignore[attr-defined]
|
|
7
|
+
BaseInstrumentor,
|
|
8
|
+
)
|
|
9
|
+
from opentelemetry.instrumentation.utils import unwrap
|
|
10
|
+
from opentelemetry.metrics import MeterProvider, NoOpMeterProvider, get_meter
|
|
11
|
+
from opentelemetry.trace import (
|
|
12
|
+
NoOpTracerProvider,
|
|
13
|
+
StatusCode,
|
|
14
|
+
TracerProvider,
|
|
15
|
+
get_tracer,
|
|
16
|
+
)
|
|
17
|
+
from opentelemetry.trace.propagation.tracecontext import (
|
|
18
|
+
TraceContextTextMapPropagator,
|
|
19
|
+
)
|
|
20
|
+
from wrapt import wrap_function_wrapper # type: ignore[import-untyped]
|
|
21
|
+
except (RuntimeError, ImportError, ModuleNotFoundError):
|
|
22
|
+
raise ModuleNotFoundError(
|
|
23
|
+
"To use the HatchetInstrumentor, you must install Hatchet's `otel` extra using (e.g.) `pip install hatchet-sdk[otel]`"
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
import hatchet_sdk
|
|
27
|
+
from hatchet_sdk.clients.admin import (
|
|
28
|
+
AdminClient,
|
|
29
|
+
TriggerWorkflowOptions,
|
|
30
|
+
WorkflowRunDict,
|
|
31
|
+
)
|
|
32
|
+
from hatchet_sdk.clients.dispatcher.action_listener import Action
|
|
33
|
+
from hatchet_sdk.clients.events import (
|
|
34
|
+
BulkPushEventWithMetadata,
|
|
35
|
+
EventClient,
|
|
36
|
+
PushEventOptions,
|
|
37
|
+
)
|
|
38
|
+
from hatchet_sdk.contracts.events_pb2 import Event
|
|
39
|
+
from hatchet_sdk.worker.runner.runner import Runner
|
|
40
|
+
from hatchet_sdk.workflow_run import WorkflowRunRef
|
|
41
|
+
|
|
42
|
+
hatchet_sdk_version = version("hatchet-sdk")
|
|
43
|
+
|
|
44
|
+
InstrumentKwargs = TracerProvider | MeterProvider | None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
48
|
+
OTEL_TRACEPARENT_KEY = "traceparent"
|
|
49
|
+
|
|
50
|
+
def __init__(
|
|
51
|
+
self,
|
|
52
|
+
tracer_provider: TracerProvider,
|
|
53
|
+
meter_provider: MeterProvider = NoOpMeterProvider(),
|
|
54
|
+
):
|
|
55
|
+
self.tracer_provider = tracer_provider
|
|
56
|
+
self.meter_provider = meter_provider
|
|
57
|
+
|
|
58
|
+
super().__init__()
|
|
59
|
+
|
|
60
|
+
def create_traceparent(self) -> str | None:
|
|
61
|
+
carrier: dict[str, str] = {}
|
|
62
|
+
TraceContextTextMapPropagator().inject(carrier)
|
|
63
|
+
|
|
64
|
+
return carrier.get("traceparent")
|
|
65
|
+
|
|
66
|
+
def parse_carrier_from_metadata(
|
|
67
|
+
self, metadata: dict[str, str] | None
|
|
68
|
+
) -> Context | None:
|
|
69
|
+
if not metadata:
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
traceparent = metadata.get(self.OTEL_TRACEPARENT_KEY)
|
|
73
|
+
|
|
74
|
+
if not traceparent:
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
return TraceContextTextMapPropagator().extract(
|
|
78
|
+
{self.OTEL_TRACEPARENT_KEY: traceparent}
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
def inject_traceparent_into_metadata(
|
|
82
|
+
self, metadata: dict[str, str], traceparent: str | None
|
|
83
|
+
) -> dict[str, str]:
|
|
84
|
+
if traceparent:
|
|
85
|
+
metadata[self.OTEL_TRACEPARENT_KEY] = traceparent
|
|
86
|
+
|
|
87
|
+
return metadata
|
|
88
|
+
|
|
89
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
90
|
+
return tuple()
|
|
91
|
+
|
|
92
|
+
def _instrument(self, **kwargs: InstrumentKwargs) -> None:
|
|
93
|
+
self._tracer = get_tracer(__name__, hatchet_sdk_version, self.tracer_provider)
|
|
94
|
+
self._meter = get_meter(__name__, hatchet_sdk_version, self.meter_provider)
|
|
95
|
+
|
|
96
|
+
wrap_function_wrapper(
|
|
97
|
+
hatchet_sdk,
|
|
98
|
+
"worker.runner.runner.Runner.handle_start_step_run",
|
|
99
|
+
self._wrap_handle_start_step_run,
|
|
100
|
+
)
|
|
101
|
+
wrap_function_wrapper(
|
|
102
|
+
hatchet_sdk,
|
|
103
|
+
"worker.runner.runner.Runner.handle_start_group_key_run",
|
|
104
|
+
self._wrap_handle_get_group_key_run,
|
|
105
|
+
)
|
|
106
|
+
wrap_function_wrapper(
|
|
107
|
+
hatchet_sdk,
|
|
108
|
+
"worker.runner.runner.Runner.handle_cancel_action",
|
|
109
|
+
self._wrap_handle_cancel_action,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
wrap_function_wrapper(
|
|
113
|
+
hatchet_sdk,
|
|
114
|
+
"clients.events.EventClient.push",
|
|
115
|
+
self._wrap_push_event,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
wrap_function_wrapper(
|
|
119
|
+
hatchet_sdk,
|
|
120
|
+
"clients.events.EventClient.bulk_push",
|
|
121
|
+
self._wrap_bulk_push_event,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
wrap_function_wrapper(
|
|
125
|
+
hatchet_sdk,
|
|
126
|
+
"clients.admin.AdminClient.run_workflow",
|
|
127
|
+
self._wrap_run_workflow,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
wrap_function_wrapper(
|
|
131
|
+
hatchet_sdk,
|
|
132
|
+
"clients.admin.AdminClientAioImpl.run_workflow",
|
|
133
|
+
self._wrap_async_run_workflow,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
wrap_function_wrapper(
|
|
137
|
+
hatchet_sdk,
|
|
138
|
+
"clients.admin.AdminClient.run_workflows",
|
|
139
|
+
self._wrap_run_workflows,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
wrap_function_wrapper(
|
|
143
|
+
hatchet_sdk,
|
|
144
|
+
"clients.admin.AdminClientAioImpl.run_workflows",
|
|
145
|
+
self._wrap_async_run_workflows,
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
149
|
+
async def _wrap_handle_start_step_run(
|
|
150
|
+
self,
|
|
151
|
+
wrapped: Callable[[Action], Coroutine[None, None, Exception | None]],
|
|
152
|
+
instance: Runner,
|
|
153
|
+
args: tuple[Action],
|
|
154
|
+
kwargs: Any,
|
|
155
|
+
) -> Exception | None:
|
|
156
|
+
action = args[0]
|
|
157
|
+
traceparent = self.parse_carrier_from_metadata(action.additional_metadata)
|
|
158
|
+
|
|
159
|
+
with self._tracer.start_as_current_span(
|
|
160
|
+
"hatchet.start_step_run",
|
|
161
|
+
attributes=action.otel_attributes,
|
|
162
|
+
context=traceparent,
|
|
163
|
+
) as span:
|
|
164
|
+
result = await wrapped(*args, **kwargs)
|
|
165
|
+
|
|
166
|
+
if isinstance(result, Exception):
|
|
167
|
+
span.set_status(StatusCode.ERROR, str(result))
|
|
168
|
+
|
|
169
|
+
return result
|
|
170
|
+
|
|
171
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
172
|
+
async def _wrap_handle_get_group_key_run(
|
|
173
|
+
self,
|
|
174
|
+
wrapped: Callable[[Action], Coroutine[None, None, Exception | None]],
|
|
175
|
+
instance: Runner,
|
|
176
|
+
args: tuple[Action],
|
|
177
|
+
kwargs: Any,
|
|
178
|
+
) -> Exception | None:
|
|
179
|
+
action = args[0]
|
|
180
|
+
|
|
181
|
+
with self._tracer.start_as_current_span(
|
|
182
|
+
"hatchet.get_group_key_run",
|
|
183
|
+
attributes=action.otel_attributes,
|
|
184
|
+
) as span:
|
|
185
|
+
result = await wrapped(*args, **kwargs)
|
|
186
|
+
|
|
187
|
+
if isinstance(result, Exception):
|
|
188
|
+
span.set_status(StatusCode.ERROR, str(result))
|
|
189
|
+
|
|
190
|
+
return result
|
|
191
|
+
|
|
192
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
193
|
+
async def _wrap_handle_cancel_action(
|
|
194
|
+
self,
|
|
195
|
+
wrapped: Callable[[str], Coroutine[None, None, Exception | None]],
|
|
196
|
+
instance: Runner,
|
|
197
|
+
args: tuple[str],
|
|
198
|
+
kwargs: Any,
|
|
199
|
+
) -> Exception | None:
|
|
200
|
+
step_run_id = args[0]
|
|
201
|
+
|
|
202
|
+
with self._tracer.start_as_current_span(
|
|
203
|
+
"hatchet.cancel_step_run",
|
|
204
|
+
attributes={
|
|
205
|
+
"hatchet.step_run_id": step_run_id,
|
|
206
|
+
},
|
|
207
|
+
):
|
|
208
|
+
return await wrapped(*args, **kwargs)
|
|
209
|
+
|
|
210
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
211
|
+
def _wrap_push_event(
|
|
212
|
+
self,
|
|
213
|
+
wrapped: Callable[[str, dict[str, Any], PushEventOptions | None], Event],
|
|
214
|
+
instance: EventClient,
|
|
215
|
+
args: tuple[
|
|
216
|
+
str,
|
|
217
|
+
dict[str, Any],
|
|
218
|
+
PushEventOptions | None,
|
|
219
|
+
],
|
|
220
|
+
kwargs: dict[str, str | dict[str, Any] | PushEventOptions | None],
|
|
221
|
+
) -> Event:
|
|
222
|
+
with self._tracer.start_as_current_span(
|
|
223
|
+
"hatchet.push_event",
|
|
224
|
+
):
|
|
225
|
+
return wrapped(*args, **kwargs)
|
|
226
|
+
|
|
227
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
228
|
+
def _wrap_bulk_push_event(
|
|
229
|
+
self,
|
|
230
|
+
wrapped: Callable[
|
|
231
|
+
[list[BulkPushEventWithMetadata], PushEventOptions | None], list[Event]
|
|
232
|
+
],
|
|
233
|
+
instance: EventClient,
|
|
234
|
+
args: tuple[
|
|
235
|
+
list[BulkPushEventWithMetadata],
|
|
236
|
+
PushEventOptions | None,
|
|
237
|
+
],
|
|
238
|
+
kwargs: dict[str, list[BulkPushEventWithMetadata] | PushEventOptions | None],
|
|
239
|
+
) -> list[Event]:
|
|
240
|
+
with self._tracer.start_as_current_span(
|
|
241
|
+
"hatchet.bulk_push_event",
|
|
242
|
+
):
|
|
243
|
+
return wrapped(*args, **kwargs)
|
|
244
|
+
|
|
245
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
246
|
+
def _wrap_run_workflow(
|
|
247
|
+
self,
|
|
248
|
+
wrapped: Callable[[str, Any, TriggerWorkflowOptions | None], WorkflowRunRef],
|
|
249
|
+
instance: AdminClient,
|
|
250
|
+
args: tuple[str, Any, TriggerWorkflowOptions | None],
|
|
251
|
+
kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
|
|
252
|
+
) -> WorkflowRunRef:
|
|
253
|
+
with self._tracer.start_as_current_span(
|
|
254
|
+
"hatchet.run_workflow",
|
|
255
|
+
):
|
|
256
|
+
return wrapped(*args, **kwargs)
|
|
257
|
+
|
|
258
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
259
|
+
async def _wrap_async_run_workflow(
|
|
260
|
+
self,
|
|
261
|
+
wrapped: Callable[
|
|
262
|
+
[str, Any, TriggerWorkflowOptions | None],
|
|
263
|
+
Coroutine[None, None, WorkflowRunRef],
|
|
264
|
+
],
|
|
265
|
+
instance: AdminClient,
|
|
266
|
+
args: tuple[str, Any, TriggerWorkflowOptions | None],
|
|
267
|
+
kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
|
|
268
|
+
) -> WorkflowRunRef:
|
|
269
|
+
with self._tracer.start_as_current_span(
|
|
270
|
+
"hatchet.run_workflow",
|
|
271
|
+
):
|
|
272
|
+
return await wrapped(*args, **kwargs)
|
|
273
|
+
|
|
274
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
275
|
+
def _wrap_run_workflows(
|
|
276
|
+
self,
|
|
277
|
+
wrapped: Callable[
|
|
278
|
+
[list[WorkflowRunDict], TriggerWorkflowOptions | None], list[WorkflowRunRef]
|
|
279
|
+
],
|
|
280
|
+
instance: AdminClient,
|
|
281
|
+
args: tuple[
|
|
282
|
+
list[WorkflowRunDict],
|
|
283
|
+
TriggerWorkflowOptions | None,
|
|
284
|
+
],
|
|
285
|
+
kwargs: dict[str, list[WorkflowRunDict] | TriggerWorkflowOptions | None],
|
|
286
|
+
) -> list[WorkflowRunRef]:
|
|
287
|
+
with self._tracer.start_as_current_span(
|
|
288
|
+
"hatchet.run_workflows",
|
|
289
|
+
):
|
|
290
|
+
return wrapped(*args, **kwargs)
|
|
291
|
+
|
|
292
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
293
|
+
async def _wrap_async_run_workflows(
|
|
294
|
+
self,
|
|
295
|
+
wrapped: Callable[
|
|
296
|
+
[list[WorkflowRunDict], TriggerWorkflowOptions | None],
|
|
297
|
+
Coroutine[None, None, list[WorkflowRunRef]],
|
|
298
|
+
],
|
|
299
|
+
instance: AdminClient,
|
|
300
|
+
args: tuple[
|
|
301
|
+
list[WorkflowRunDict],
|
|
302
|
+
TriggerWorkflowOptions | None,
|
|
303
|
+
],
|
|
304
|
+
kwargs: dict[str, list[WorkflowRunDict] | TriggerWorkflowOptions | None],
|
|
305
|
+
) -> list[WorkflowRunRef]:
|
|
306
|
+
with self._tracer.start_as_current_span(
|
|
307
|
+
"hatchet.run_workflows",
|
|
308
|
+
):
|
|
309
|
+
return await wrapped(*args, **kwargs)
|
|
310
|
+
|
|
311
|
+
def _uninstrument(self, **kwargs: InstrumentKwargs) -> None:
|
|
312
|
+
self.tracer_provider = NoOpTracerProvider()
|
|
313
|
+
self.meter_provider = NoOpMeterProvider()
|
|
314
|
+
|
|
315
|
+
unwrap(hatchet_sdk, "worker.runner.runner.Runner.handle_start_step_run")
|
|
316
|
+
unwrap(hatchet_sdk, "worker.runner.runner.Runner.handle_start_group_key_run")
|
|
317
|
+
unwrap(hatchet_sdk, "worker.runner.runner.Runner.handle_cancel_action")
|
|
318
|
+
unwrap(hatchet_sdk, "clients.events.EventClient.push")
|
|
319
|
+
unwrap(hatchet_sdk, "clients.events.EventClient.bulk_push")
|
|
320
|
+
unwrap(hatchet_sdk, "clients.admin.AdminClient.run_workflow")
|
|
321
|
+
unwrap(hatchet_sdk, "clients.admin.AdminClientAioImpl.run_workflow")
|
|
322
|
+
unwrap(hatchet_sdk, "clients.admin.AdminClient.run_workflows")
|
|
323
|
+
unwrap(hatchet_sdk, "clients.admin.AdminClientAioImpl.run_workflows")
|
|
@@ -10,7 +10,6 @@ from multiprocessing import Queue
|
|
|
10
10
|
from threading import Thread, current_thread
|
|
11
11
|
from typing import Any, Callable, Dict, Literal, Type, TypeVar, cast, overload
|
|
12
12
|
|
|
13
|
-
from opentelemetry.trace import StatusCode
|
|
14
13
|
from pydantic import BaseModel
|
|
15
14
|
|
|
16
15
|
from hatchet_sdk.client import new_client_raw
|
|
@@ -32,7 +31,6 @@ from hatchet_sdk.contracts.dispatcher_pb2 import (
|
|
|
32
31
|
)
|
|
33
32
|
from hatchet_sdk.loader import ClientConfig
|
|
34
33
|
from hatchet_sdk.logger import logger
|
|
35
|
-
from hatchet_sdk.utils.tracing import create_tracer, parse_carrier_from_metadata
|
|
36
34
|
from hatchet_sdk.utils.types import WorkflowValidator
|
|
37
35
|
from hatchet_sdk.v2.callable import DurableContext
|
|
38
36
|
from hatchet_sdk.worker.action_listener_process import ActionEvent
|
|
@@ -88,43 +86,29 @@ class Runner:
|
|
|
88
86
|
labels=labels, client=new_client_raw(config).dispatcher
|
|
89
87
|
)
|
|
90
88
|
|
|
91
|
-
self.otel_tracer = create_tracer(config=config)
|
|
92
|
-
|
|
93
89
|
def create_workflow_run_url(self, action: Action) -> str:
|
|
94
90
|
return f"{self.config.server_url}/workflow-runs/{action.workflow_run_id}?tenant={action.tenant_id}"
|
|
95
91
|
|
|
96
92
|
def run(self, action: Action) -> None:
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
span.add_event(log)
|
|
117
|
-
logger.info(log)
|
|
118
|
-
asyncio.create_task(self.handle_cancel_action(action.step_run_id))
|
|
119
|
-
case ActionType.START_GET_GROUP_KEY:
|
|
120
|
-
log = f"run: get group key: {action.action_id}/{action.get_group_key_run_id}"
|
|
121
|
-
span.add_event(log)
|
|
122
|
-
logger.info(log)
|
|
123
|
-
asyncio.create_task(self.handle_start_group_key_run(action))
|
|
124
|
-
case _:
|
|
125
|
-
log = f"unknown action type: {action.action_type}"
|
|
126
|
-
span.add_event(log)
|
|
127
|
-
logger.error(log)
|
|
93
|
+
if self.worker_context.id() is None:
|
|
94
|
+
self.worker_context._worker_id = action.worker_id
|
|
95
|
+
|
|
96
|
+
match action.action_type:
|
|
97
|
+
case ActionType.START_STEP_RUN:
|
|
98
|
+
log = f"run: start step: {action.action_id}/{action.step_run_id}"
|
|
99
|
+
logger.info(log)
|
|
100
|
+
asyncio.create_task(self.handle_start_step_run(action))
|
|
101
|
+
case ActionType.CANCEL_STEP_RUN:
|
|
102
|
+
log = f"cancel: step run: {action.action_id}/{action.step_run_id}"
|
|
103
|
+
logger.info(log)
|
|
104
|
+
asyncio.create_task(self.handle_cancel_action(action.step_run_id))
|
|
105
|
+
case ActionType.START_GET_GROUP_KEY:
|
|
106
|
+
log = f"run: get group key: {action.action_id}/{action.get_group_key_run_id}"
|
|
107
|
+
logger.info(log)
|
|
108
|
+
asyncio.create_task(self.handle_start_group_key_run(action))
|
|
109
|
+
case _:
|
|
110
|
+
log = f"unknown action type: {action.action_type}"
|
|
111
|
+
logger.error(log)
|
|
128
112
|
|
|
129
113
|
def step_run_callback(self, action: Action) -> Callable[[asyncio.Task[Any]], None]:
|
|
130
114
|
def inner_callback(task: asyncio.Task[Any]) -> None:
|
|
@@ -306,101 +290,87 @@ class Runner:
|
|
|
306
290
|
validator_registry=self.validator_registry,
|
|
307
291
|
)
|
|
308
292
|
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
) as span:
|
|
313
|
-
span.add_event("Starting step run")
|
|
314
|
-
span.set_attributes(action.otel_attributes)
|
|
315
|
-
|
|
316
|
-
action_name = action.action_id
|
|
293
|
+
## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
|
|
294
|
+
async def handle_start_step_run(self, action: Action) -> None | Exception:
|
|
295
|
+
action_name = action.action_id
|
|
317
296
|
|
|
318
|
-
|
|
319
|
-
|
|
297
|
+
# Find the corresponding action function from the registry
|
|
298
|
+
action_func = self.action_registry.get(action_name)
|
|
320
299
|
|
|
321
|
-
|
|
300
|
+
context = self.create_context(action, action_func)
|
|
322
301
|
|
|
323
|
-
|
|
302
|
+
self.contexts[action.step_run_id] = context
|
|
324
303
|
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
)
|
|
304
|
+
if action_func:
|
|
305
|
+
self.event_queue.put(
|
|
306
|
+
ActionEvent(
|
|
307
|
+
action=action,
|
|
308
|
+
type=STEP_EVENT_TYPE_STARTED,
|
|
331
309
|
)
|
|
310
|
+
)
|
|
332
311
|
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
)
|
|
312
|
+
loop = asyncio.get_event_loop()
|
|
313
|
+
task = loop.create_task(
|
|
314
|
+
self.async_wrapped_action_func(
|
|
315
|
+
context, action_func, action, action.step_run_id
|
|
338
316
|
)
|
|
339
|
-
|
|
340
|
-
task.add_done_callback(self.step_run_callback(action))
|
|
341
|
-
self.tasks[action.step_run_id] = task
|
|
342
|
-
|
|
343
|
-
try:
|
|
344
|
-
await task
|
|
345
|
-
span.set_status(StatusCode.OK)
|
|
346
|
-
except Exception as e:
|
|
347
|
-
# do nothing, this should be caught in the callback
|
|
348
|
-
span.set_status(StatusCode.ERROR)
|
|
349
|
-
span.record_exception(e)
|
|
350
|
-
|
|
351
|
-
span.add_event("Finished step run")
|
|
352
|
-
|
|
353
|
-
async def handle_start_group_key_run(self, action: Action) -> None:
|
|
354
|
-
with self.otel_tracer.start_as_current_span(
|
|
355
|
-
f"hatchet.worker.handle_start_step_run.{action.step_id}"
|
|
356
|
-
) as span:
|
|
357
|
-
span.add_event("Starting group key run")
|
|
358
|
-
action_name = action.action_id
|
|
359
|
-
context = Context(
|
|
360
|
-
action,
|
|
361
|
-
self.dispatcher_client,
|
|
362
|
-
self.admin_client,
|
|
363
|
-
self.client.event,
|
|
364
|
-
self.client.rest,
|
|
365
|
-
self.client.workflow_listener,
|
|
366
|
-
self.workflow_run_event_listener,
|
|
367
|
-
self.worker_context,
|
|
368
|
-
self.client.config.namespace,
|
|
369
317
|
)
|
|
370
318
|
|
|
371
|
-
self.
|
|
319
|
+
task.add_done_callback(self.step_run_callback(action))
|
|
320
|
+
self.tasks[action.step_run_id] = task
|
|
372
321
|
|
|
373
|
-
|
|
374
|
-
|
|
322
|
+
try:
|
|
323
|
+
await task
|
|
324
|
+
except Exception as e:
|
|
325
|
+
return e
|
|
375
326
|
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
327
|
+
return None
|
|
328
|
+
|
|
329
|
+
## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
|
|
330
|
+
async def handle_start_group_key_run(self, action: Action) -> Exception | None:
|
|
331
|
+
action_name = action.action_id
|
|
332
|
+
context = Context(
|
|
333
|
+
action,
|
|
334
|
+
self.dispatcher_client,
|
|
335
|
+
self.admin_client,
|
|
336
|
+
self.client.event,
|
|
337
|
+
self.client.rest,
|
|
338
|
+
self.client.workflow_listener,
|
|
339
|
+
self.workflow_run_event_listener,
|
|
340
|
+
self.worker_context,
|
|
341
|
+
self.client.config.namespace,
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
self.contexts[action.get_group_key_run_id] = context
|
|
345
|
+
|
|
346
|
+
# Find the corresponding action function from the registry
|
|
347
|
+
action_func = self.action_registry.get(action_name)
|
|
348
|
+
|
|
349
|
+
if action_func:
|
|
350
|
+
# send an event that the group key run has started
|
|
351
|
+
self.event_queue.put(
|
|
352
|
+
ActionEvent(
|
|
353
|
+
action=action,
|
|
354
|
+
type=GROUP_KEY_EVENT_TYPE_STARTED,
|
|
383
355
|
)
|
|
356
|
+
)
|
|
384
357
|
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
)
|
|
358
|
+
loop = asyncio.get_event_loop()
|
|
359
|
+
task = loop.create_task(
|
|
360
|
+
self.async_wrapped_action_func(
|
|
361
|
+
context, action_func, action, action.get_group_key_run_id
|
|
390
362
|
)
|
|
363
|
+
)
|
|
391
364
|
|
|
392
|
-
|
|
393
|
-
|
|
365
|
+
task.add_done_callback(self.group_key_run_callback(action))
|
|
366
|
+
self.tasks[action.get_group_key_run_id] = task
|
|
394
367
|
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
# do nothing, this should be caught in the callback
|
|
400
|
-
span.set_status(StatusCode.ERROR)
|
|
401
|
-
span.record_exception(e)
|
|
368
|
+
try:
|
|
369
|
+
await task
|
|
370
|
+
except Exception as e:
|
|
371
|
+
return e
|
|
402
372
|
|
|
403
|
-
|
|
373
|
+
return None
|
|
404
374
|
|
|
405
375
|
def force_kill_thread(self, thread: Thread) -> None:
|
|
406
376
|
"""Terminate a python threading.Thread."""
|
|
@@ -431,39 +401,31 @@ class Runner:
|
|
|
431
401
|
except Exception as e:
|
|
432
402
|
logger.exception(f"Failed to terminate thread: {e}")
|
|
433
403
|
|
|
404
|
+
## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
|
|
434
405
|
async def handle_cancel_action(self, run_id: str) -> None:
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
span.set_attribute("run_id", run_id)
|
|
440
|
-
|
|
441
|
-
try:
|
|
442
|
-
# call cancel to signal the context to stop
|
|
443
|
-
if run_id in self.contexts:
|
|
444
|
-
context = self.contexts.get(run_id)
|
|
445
|
-
|
|
446
|
-
if context:
|
|
447
|
-
context.cancel()
|
|
406
|
+
try:
|
|
407
|
+
# call cancel to signal the context to stop
|
|
408
|
+
if run_id in self.contexts:
|
|
409
|
+
context = self.contexts.get(run_id)
|
|
448
410
|
|
|
449
|
-
|
|
411
|
+
if context:
|
|
412
|
+
context.cancel()
|
|
450
413
|
|
|
451
|
-
|
|
452
|
-
future = self.tasks.get(run_id)
|
|
414
|
+
await asyncio.sleep(1)
|
|
453
415
|
|
|
454
|
-
|
|
455
|
-
|
|
416
|
+
if run_id in self.tasks:
|
|
417
|
+
future = self.tasks.get(run_id)
|
|
456
418
|
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
logger.warning(
|
|
460
|
-
f"Thread {self.threads[run_id].ident} with run id {run_id} is still running after cancellation. This could cause the thread pool to get blocked and prevent new tasks from running."
|
|
461
|
-
)
|
|
419
|
+
if future:
|
|
420
|
+
future.cancel()
|
|
462
421
|
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
422
|
+
# check if thread is still running, if so, print a warning
|
|
423
|
+
if run_id in self.threads:
|
|
424
|
+
logger.warning(
|
|
425
|
+
f"Thread {self.threads[run_id].ident} with run id {run_id} is still running after cancellation. This could cause the thread pool to get blocked and prevent new tasks from running."
|
|
426
|
+
)
|
|
427
|
+
finally:
|
|
428
|
+
self.cleanup_run_id(run_id)
|
|
467
429
|
|
|
468
430
|
def serialize_output(self, output: Any) -> str:
|
|
469
431
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: hatchet-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.46.0
|
|
4
4
|
Summary:
|
|
5
5
|
Author: Alexander Belanger
|
|
6
6
|
Author-email: alexander@hatchet.run
|
|
@@ -9,6 +9,7 @@ Classifier: Programming Language :: Python :: 3
|
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.10
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.11
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Provides-Extra: otel
|
|
12
13
|
Requires-Dist: aiohttp (>=3.10.5,<4.0.0)
|
|
13
14
|
Requires-Dist: aiohttp-retry (>=2.8.3,<3.0.0)
|
|
14
15
|
Requires-Dist: aiostream (>=0.5.2,<0.6.0)
|
|
@@ -17,14 +18,13 @@ Requires-Dist: grpcio (>=1.64.1,!=1.68.*) ; python_version < "3.13"
|
|
|
17
18
|
Requires-Dist: grpcio (>=1.69.0) ; python_version >= "3.13"
|
|
18
19
|
Requires-Dist: grpcio-tools (>=1.64.1,!=1.68.*) ; python_version < "3.13"
|
|
19
20
|
Requires-Dist: grpcio-tools (>=1.69.0) ; python_version >= "3.13"
|
|
20
|
-
Requires-Dist: loguru (>=0.7.2,<0.8.0)
|
|
21
21
|
Requires-Dist: nest-asyncio (>=1.6.0,<2.0.0)
|
|
22
|
-
Requires-Dist: opentelemetry-api (>=1.28.0,<2.0.0)
|
|
23
|
-
Requires-Dist: opentelemetry-distro (>=0.
|
|
24
|
-
Requires-Dist: opentelemetry-exporter-otlp (>=1.28.0,<2.0.0)
|
|
25
|
-
Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.28.0,<2.0.0)
|
|
26
|
-
Requires-Dist: opentelemetry-instrumentation (>=0.
|
|
27
|
-
Requires-Dist: opentelemetry-sdk (>=1.28.0,<2.0.0)
|
|
22
|
+
Requires-Dist: opentelemetry-api (>=1.28.0,<2.0.0) ; extra == "otel"
|
|
23
|
+
Requires-Dist: opentelemetry-distro (>=0.49b0) ; extra == "otel"
|
|
24
|
+
Requires-Dist: opentelemetry-exporter-otlp (>=1.28.0,<2.0.0) ; extra == "otel"
|
|
25
|
+
Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.28.0,<2.0.0) ; extra == "otel"
|
|
26
|
+
Requires-Dist: opentelemetry-instrumentation (>=0.49b0) ; extra == "otel"
|
|
27
|
+
Requires-Dist: opentelemetry-sdk (>=1.28.0,<2.0.0) ; extra == "otel"
|
|
28
28
|
Requires-Dist: prometheus-client (>=0.21.1,<0.22.0)
|
|
29
29
|
Requires-Dist: protobuf (>=5.29.1,<6.0.0)
|
|
30
30
|
Requires-Dist: pydantic (>=2.6.3,<3.0.0)
|