hatchet-sdk 1.10.3__py3-none-any.whl → 1.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/clients/admin.py +4 -2
- hatchet_sdk/opentelemetry/instrumentor.py +355 -24
- hatchet_sdk/utils/opentelemetry.py +27 -2
- {hatchet_sdk-1.10.3.dist-info → hatchet_sdk-1.11.1.dist-info}/METADATA +1 -1
- {hatchet_sdk-1.10.3.dist-info → hatchet_sdk-1.11.1.dist-info}/RECORD +7 -7
- {hatchet_sdk-1.10.3.dist-info → hatchet_sdk-1.11.1.dist-info}/WHEEL +0 -0
- {hatchet_sdk-1.10.3.dist-info → hatchet_sdk-1.11.1.dist-info}/entry_points.txt +0 -0
hatchet_sdk/clients/admin.py
CHANGED
|
@@ -361,7 +361,8 @@ class AdminClient:
|
|
|
361
361
|
try:
|
|
362
362
|
resp = cast(
|
|
363
363
|
v0_workflow_protos.TriggerWorkflowResponse,
|
|
364
|
-
|
|
364
|
+
await asyncio.to_thread(
|
|
365
|
+
client.TriggerWorkflow,
|
|
365
366
|
request,
|
|
366
367
|
metadata=get_metadata(self.token),
|
|
367
368
|
),
|
|
@@ -450,7 +451,8 @@ class AdminClient:
|
|
|
450
451
|
|
|
451
452
|
resp = cast(
|
|
452
453
|
v0_workflow_protos.BulkTriggerWorkflowResponse,
|
|
453
|
-
|
|
454
|
+
await asyncio.to_thread(
|
|
455
|
+
client.BulkTriggerWorkflow,
|
|
454
456
|
bulk_request,
|
|
455
457
|
metadata=get_metadata(self.token),
|
|
456
458
|
),
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
+
import json
|
|
1
2
|
from importlib.metadata import version
|
|
2
|
-
from typing import Any, Callable, Collection, Coroutine
|
|
3
|
+
from typing import Any, Callable, Collection, Coroutine, Union, cast
|
|
3
4
|
|
|
5
|
+
from hatchet_sdk.contracts import workflows_pb2 as v0_workflow_protos
|
|
4
6
|
from hatchet_sdk.utils.typing import JSONSerializableMapping
|
|
5
7
|
|
|
6
8
|
try:
|
|
@@ -26,20 +28,29 @@ except (RuntimeError, ImportError, ModuleNotFoundError):
|
|
|
26
28
|
"To use the HatchetInstrumentor, you must install Hatchet's `otel` extra using (e.g.) `pip install hatchet-sdk[otel]`"
|
|
27
29
|
)
|
|
28
30
|
|
|
31
|
+
import inspect
|
|
32
|
+
from datetime import datetime
|
|
33
|
+
|
|
34
|
+
from google.protobuf import timestamp_pb2
|
|
35
|
+
|
|
29
36
|
import hatchet_sdk
|
|
30
37
|
from hatchet_sdk import ClientConfig
|
|
31
38
|
from hatchet_sdk.clients.admin import (
|
|
32
39
|
AdminClient,
|
|
40
|
+
ScheduleTriggerWorkflowOptions,
|
|
33
41
|
TriggerWorkflowOptions,
|
|
34
42
|
WorkflowRunTriggerConfig,
|
|
35
43
|
)
|
|
36
44
|
from hatchet_sdk.clients.events import (
|
|
45
|
+
BulkPushEventOptions,
|
|
37
46
|
BulkPushEventWithMetadata,
|
|
38
47
|
EventClient,
|
|
39
48
|
PushEventOptions,
|
|
40
49
|
)
|
|
41
50
|
from hatchet_sdk.contracts.events_pb2 import Event
|
|
51
|
+
from hatchet_sdk.logger import logger
|
|
42
52
|
from hatchet_sdk.runnables.action import Action
|
|
53
|
+
from hatchet_sdk.utils.opentelemetry import OTelAttribute
|
|
43
54
|
from hatchet_sdk.worker.runner.runner import Runner
|
|
44
55
|
from hatchet_sdk.workflow_run import WorkflowRunRef
|
|
45
56
|
|
|
@@ -51,6 +62,13 @@ OTEL_TRACEPARENT_KEY = "traceparent"
|
|
|
51
62
|
|
|
52
63
|
|
|
53
64
|
def create_traceparent() -> str | None:
|
|
65
|
+
logger.warning(
|
|
66
|
+
"As of SDK version 1.11.0, you no longer need to call `create_traceparent` manually. The traceparent will be automatically created by the instrumentor and injected into the metadata of actions and events when appropriate. This method will be removed in a future version.",
|
|
67
|
+
)
|
|
68
|
+
return _create_traceparent()
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _create_traceparent() -> str | None:
|
|
54
72
|
"""
|
|
55
73
|
Creates and returns a W3C traceparent header value using OpenTelemetry's context propagation.
|
|
56
74
|
|
|
@@ -70,6 +88,16 @@ def create_traceparent() -> str | None:
|
|
|
70
88
|
|
|
71
89
|
def parse_carrier_from_metadata(
|
|
72
90
|
metadata: JSONSerializableMapping | None,
|
|
91
|
+
) -> Context | None:
|
|
92
|
+
logger.warning(
|
|
93
|
+
"As of SDK version 1.11.0, you no longer need to call `parse_carrier_from_metadata` manually. This method will be removed in a future version.",
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
return _parse_carrier_from_metadata(metadata)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _parse_carrier_from_metadata(
|
|
100
|
+
metadata: JSONSerializableMapping | None,
|
|
73
101
|
) -> Context | None:
|
|
74
102
|
"""
|
|
75
103
|
Parses OpenTelemetry trace context from a metadata dictionary.
|
|
@@ -86,7 +114,7 @@ def parse_carrier_from_metadata(
|
|
|
86
114
|
:Example:
|
|
87
115
|
|
|
88
116
|
>>> metadata = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"}
|
|
89
|
-
>>> context =
|
|
117
|
+
>>> context = _parse_carrier_from_metadata(metadata)
|
|
90
118
|
"""
|
|
91
119
|
|
|
92
120
|
if not metadata:
|
|
@@ -102,6 +130,16 @@ def parse_carrier_from_metadata(
|
|
|
102
130
|
|
|
103
131
|
def inject_traceparent_into_metadata(
|
|
104
132
|
metadata: dict[str, str], traceparent: str | None = None
|
|
133
|
+
) -> dict[str, str]:
|
|
134
|
+
logger.warning(
|
|
135
|
+
"As of SDK version 1.11.0, you no longer need to call `inject_traceparent_into_metadata` manually. The traceparent will automatically be injected by the instrumentor. This method will be removed in a future version.",
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
return _inject_traceparent_into_metadata(metadata, traceparent)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _inject_traceparent_into_metadata(
|
|
142
|
+
metadata: dict[str, str], traceparent: str | None = None
|
|
105
143
|
) -> dict[str, str]:
|
|
106
144
|
"""
|
|
107
145
|
Injects OpenTelemetry `traceparent` into a metadata dictionary.
|
|
@@ -125,9 +163,8 @@ def inject_traceparent_into_metadata(
|
|
|
125
163
|
>>> print(new_metadata)
|
|
126
164
|
{"key": "value", "traceparent": "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01"}
|
|
127
165
|
"""
|
|
128
|
-
|
|
129
166
|
if not traceparent:
|
|
130
|
-
traceparent =
|
|
167
|
+
traceparent = _create_traceparent()
|
|
131
168
|
|
|
132
169
|
if not traceparent:
|
|
133
170
|
return metadata
|
|
@@ -213,6 +250,14 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
213
250
|
self._wrap_async_run_workflow,
|
|
214
251
|
)
|
|
215
252
|
|
|
253
|
+
## IMPORTANT: We don't need to instrument the async version of `schedule_workflow`
|
|
254
|
+
## because it just calls the sync version internally.
|
|
255
|
+
wrap_function_wrapper(
|
|
256
|
+
hatchet_sdk,
|
|
257
|
+
"clients.admin.AdminClient.schedule_workflow",
|
|
258
|
+
self._wrap_schedule_workflow,
|
|
259
|
+
)
|
|
260
|
+
|
|
216
261
|
wrap_function_wrapper(
|
|
217
262
|
hatchet_sdk,
|
|
218
263
|
"clients.admin.AdminClient.run_workflows",
|
|
@@ -225,6 +270,19 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
225
270
|
self._wrap_async_run_workflows,
|
|
226
271
|
)
|
|
227
272
|
|
|
273
|
+
def extract_bound_args(
|
|
274
|
+
self,
|
|
275
|
+
wrapped_func: Callable[..., Any],
|
|
276
|
+
args: tuple[Any, ...],
|
|
277
|
+
kwargs: dict[str, Any],
|
|
278
|
+
) -> list[Any]:
|
|
279
|
+
sig = inspect.signature(wrapped_func)
|
|
280
|
+
|
|
281
|
+
bound_args = sig.bind(*args, **kwargs)
|
|
282
|
+
bound_args.apply_defaults()
|
|
283
|
+
|
|
284
|
+
return list(bound_args.arguments.values())
|
|
285
|
+
|
|
228
286
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
229
287
|
async def _wrap_handle_start_step_run(
|
|
230
288
|
self,
|
|
@@ -233,8 +291,11 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
233
291
|
args: tuple[Action],
|
|
234
292
|
kwargs: Any,
|
|
235
293
|
) -> Exception | None:
|
|
236
|
-
|
|
237
|
-
|
|
294
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
295
|
+
|
|
296
|
+
action = cast(Action, params[0])
|
|
297
|
+
|
|
298
|
+
traceparent = _parse_carrier_from_metadata(action.additional_metadata)
|
|
238
299
|
|
|
239
300
|
with self._tracer.start_as_current_span(
|
|
240
301
|
"hatchet.start_step_run",
|
|
@@ -290,66 +351,293 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
290
351
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
291
352
|
def _wrap_push_event(
|
|
292
353
|
self,
|
|
293
|
-
wrapped: Callable[[str, dict[str, Any], PushEventOptions
|
|
354
|
+
wrapped: Callable[[str, dict[str, Any], PushEventOptions], Event],
|
|
294
355
|
instance: EventClient,
|
|
295
356
|
args: tuple[
|
|
296
357
|
str,
|
|
297
358
|
dict[str, Any],
|
|
298
|
-
PushEventOptions
|
|
359
|
+
PushEventOptions,
|
|
299
360
|
],
|
|
300
|
-
kwargs: dict[str, str | dict[str, Any] | PushEventOptions
|
|
361
|
+
kwargs: dict[str, str | dict[str, Any] | PushEventOptions],
|
|
301
362
|
) -> Event:
|
|
363
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
364
|
+
|
|
365
|
+
event_key = cast(str, params[0])
|
|
366
|
+
payload = cast(JSONSerializableMapping, params[1])
|
|
367
|
+
options = cast(
|
|
368
|
+
PushEventOptions,
|
|
369
|
+
params[2] if len(params) > 2 else PushEventOptions(),
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
attributes = {
|
|
373
|
+
OTelAttribute.EVENT_KEY: event_key,
|
|
374
|
+
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
|
|
375
|
+
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
|
376
|
+
options.additional_metadata, default=str
|
|
377
|
+
),
|
|
378
|
+
OTelAttribute.NAMESPACE: options.namespace,
|
|
379
|
+
OTelAttribute.PRIORITY: options.priority,
|
|
380
|
+
OTelAttribute.FILTER_SCOPE: options.scope,
|
|
381
|
+
}
|
|
382
|
+
|
|
302
383
|
with self._tracer.start_as_current_span(
|
|
303
384
|
"hatchet.push_event",
|
|
385
|
+
attributes={
|
|
386
|
+
f"hatchet.{k.value}": v
|
|
387
|
+
for k, v in attributes.items()
|
|
388
|
+
if v
|
|
389
|
+
and k not in self.config.otel.excluded_attributes
|
|
390
|
+
and v != "{}"
|
|
391
|
+
and v != "[]"
|
|
392
|
+
},
|
|
304
393
|
):
|
|
305
|
-
|
|
394
|
+
options = PushEventOptions(
|
|
395
|
+
**options.model_dump(exclude={"additional_metadata"}),
|
|
396
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
397
|
+
dict(options.additional_metadata),
|
|
398
|
+
),
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
return wrapped(event_key, dict(payload), options)
|
|
306
402
|
|
|
307
403
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
308
404
|
def _wrap_bulk_push_event(
|
|
309
405
|
self,
|
|
310
406
|
wrapped: Callable[
|
|
311
|
-
[list[BulkPushEventWithMetadata],
|
|
407
|
+
[list[BulkPushEventWithMetadata], BulkPushEventOptions], list[Event]
|
|
312
408
|
],
|
|
313
409
|
instance: EventClient,
|
|
314
410
|
args: tuple[
|
|
315
411
|
list[BulkPushEventWithMetadata],
|
|
316
|
-
|
|
412
|
+
BulkPushEventOptions,
|
|
317
413
|
],
|
|
318
|
-
kwargs: dict[str, list[BulkPushEventWithMetadata] |
|
|
414
|
+
kwargs: dict[str, list[BulkPushEventWithMetadata] | BulkPushEventOptions],
|
|
319
415
|
) -> list[Event]:
|
|
416
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
417
|
+
|
|
418
|
+
bulk_events = cast(list[BulkPushEventWithMetadata], params[0])
|
|
419
|
+
options = cast(BulkPushEventOptions, params[1])
|
|
420
|
+
|
|
421
|
+
num_bulk_events = len(bulk_events)
|
|
422
|
+
unique_event_keys = {event.key for event in bulk_events}
|
|
423
|
+
|
|
320
424
|
with self._tracer.start_as_current_span(
|
|
321
425
|
"hatchet.bulk_push_event",
|
|
426
|
+
attributes={
|
|
427
|
+
"hatchet.num_events": num_bulk_events,
|
|
428
|
+
"hatchet.unique_event_keys": json.dumps(unique_event_keys, default=str),
|
|
429
|
+
},
|
|
322
430
|
):
|
|
323
|
-
|
|
431
|
+
bulk_events_with_meta = [
|
|
432
|
+
BulkPushEventWithMetadata(
|
|
433
|
+
**event.model_dump(exclude={"additional_metadata"}),
|
|
434
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
435
|
+
dict(event.additional_metadata),
|
|
436
|
+
),
|
|
437
|
+
)
|
|
438
|
+
for event in bulk_events
|
|
439
|
+
]
|
|
440
|
+
|
|
441
|
+
return wrapped(
|
|
442
|
+
bulk_events_with_meta,
|
|
443
|
+
options,
|
|
444
|
+
)
|
|
324
445
|
|
|
325
446
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
326
447
|
def _wrap_run_workflow(
|
|
327
448
|
self,
|
|
328
|
-
wrapped: Callable[
|
|
449
|
+
wrapped: Callable[
|
|
450
|
+
[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
|
451
|
+
WorkflowRunRef,
|
|
452
|
+
],
|
|
329
453
|
instance: AdminClient,
|
|
330
|
-
args: tuple[str,
|
|
331
|
-
kwargs: dict[str, str |
|
|
454
|
+
args: tuple[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
|
455
|
+
kwargs: dict[str, str | JSONSerializableMapping | TriggerWorkflowOptions],
|
|
332
456
|
) -> WorkflowRunRef:
|
|
457
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
458
|
+
|
|
459
|
+
workflow_name = cast(str, params[0])
|
|
460
|
+
payload = cast(JSONSerializableMapping, params[1])
|
|
461
|
+
options = cast(
|
|
462
|
+
TriggerWorkflowOptions,
|
|
463
|
+
params[2] if len(params) > 2 else TriggerWorkflowOptions(),
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
attributes = {
|
|
467
|
+
OTelAttribute.WORKFLOW_NAME: workflow_name,
|
|
468
|
+
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
|
|
469
|
+
OTelAttribute.PARENT_ID: options.parent_id,
|
|
470
|
+
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
|
|
471
|
+
OTelAttribute.CHILD_INDEX: options.child_index,
|
|
472
|
+
OTelAttribute.CHILD_KEY: options.child_key,
|
|
473
|
+
OTelAttribute.NAMESPACE: options.namespace,
|
|
474
|
+
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
|
475
|
+
options.additional_metadata, default=str
|
|
476
|
+
),
|
|
477
|
+
OTelAttribute.PRIORITY: options.priority,
|
|
478
|
+
OTelAttribute.DESIRED_WORKER_ID: options.desired_worker_id,
|
|
479
|
+
OTelAttribute.STICKY: options.sticky,
|
|
480
|
+
OTelAttribute.KEY: options.key,
|
|
481
|
+
}
|
|
482
|
+
|
|
333
483
|
with self._tracer.start_as_current_span(
|
|
334
484
|
"hatchet.run_workflow",
|
|
485
|
+
attributes={
|
|
486
|
+
f"hatchet.{k.value}": v
|
|
487
|
+
for k, v in attributes.items()
|
|
488
|
+
if v
|
|
489
|
+
and k not in self.config.otel.excluded_attributes
|
|
490
|
+
and v != "{}"
|
|
491
|
+
and v != "[]"
|
|
492
|
+
},
|
|
335
493
|
):
|
|
336
|
-
|
|
494
|
+
options = TriggerWorkflowOptions(
|
|
495
|
+
**options.model_dump(exclude={"additional_metadata"}),
|
|
496
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
497
|
+
dict(options.additional_metadata),
|
|
498
|
+
),
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
return wrapped(workflow_name, payload, options)
|
|
337
502
|
|
|
338
503
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
339
504
|
async def _wrap_async_run_workflow(
|
|
340
505
|
self,
|
|
341
506
|
wrapped: Callable[
|
|
342
|
-
[str,
|
|
507
|
+
[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
|
343
508
|
Coroutine[None, None, WorkflowRunRef],
|
|
344
509
|
],
|
|
345
510
|
instance: AdminClient,
|
|
346
|
-
args: tuple[str,
|
|
347
|
-
kwargs: dict[str, str |
|
|
511
|
+
args: tuple[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
|
512
|
+
kwargs: dict[str, str | JSONSerializableMapping | TriggerWorkflowOptions],
|
|
348
513
|
) -> WorkflowRunRef:
|
|
514
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
515
|
+
|
|
516
|
+
workflow_name = cast(str, params[0])
|
|
517
|
+
payload = cast(JSONSerializableMapping, params[1])
|
|
518
|
+
options = cast(
|
|
519
|
+
TriggerWorkflowOptions,
|
|
520
|
+
params[2] if len(params) > 2 else TriggerWorkflowOptions(),
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
attributes = {
|
|
524
|
+
OTelAttribute.WORKFLOW_NAME: workflow_name,
|
|
525
|
+
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
|
|
526
|
+
OTelAttribute.PARENT_ID: options.parent_id,
|
|
527
|
+
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
|
|
528
|
+
OTelAttribute.CHILD_INDEX: options.child_index,
|
|
529
|
+
OTelAttribute.CHILD_KEY: options.child_key,
|
|
530
|
+
OTelAttribute.NAMESPACE: options.namespace,
|
|
531
|
+
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
|
532
|
+
options.additional_metadata, default=str
|
|
533
|
+
),
|
|
534
|
+
OTelAttribute.PRIORITY: options.priority,
|
|
535
|
+
OTelAttribute.DESIRED_WORKER_ID: options.desired_worker_id,
|
|
536
|
+
OTelAttribute.STICKY: options.sticky,
|
|
537
|
+
OTelAttribute.KEY: options.key,
|
|
538
|
+
}
|
|
539
|
+
|
|
349
540
|
with self._tracer.start_as_current_span(
|
|
350
541
|
"hatchet.run_workflow",
|
|
542
|
+
attributes={
|
|
543
|
+
f"hatchet.{k.value}": v
|
|
544
|
+
for k, v in attributes.items()
|
|
545
|
+
if v
|
|
546
|
+
and k not in self.config.otel.excluded_attributes
|
|
547
|
+
and v != "{}"
|
|
548
|
+
and v != "[]"
|
|
549
|
+
},
|
|
351
550
|
):
|
|
352
|
-
|
|
551
|
+
options = TriggerWorkflowOptions(
|
|
552
|
+
**options.model_dump(exclude={"additional_metadata"}),
|
|
553
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
554
|
+
dict(options.additional_metadata),
|
|
555
|
+
),
|
|
556
|
+
)
|
|
557
|
+
|
|
558
|
+
return await wrapped(workflow_name, payload, options)
|
|
559
|
+
|
|
560
|
+
def _ts_to_iso(self, ts: Union[datetime, timestamp_pb2.Timestamp]) -> str:
|
|
561
|
+
if isinstance(ts, datetime):
|
|
562
|
+
return ts.isoformat()
|
|
563
|
+
elif isinstance(ts, timestamp_pb2.Timestamp):
|
|
564
|
+
return ts.ToJsonString()
|
|
565
|
+
else:
|
|
566
|
+
raise TypeError(f"Unsupported type for timestamp conversion: {type(ts)}")
|
|
567
|
+
|
|
568
|
+
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
569
|
+
def _wrap_schedule_workflow(
|
|
570
|
+
self,
|
|
571
|
+
wrapped: Callable[
|
|
572
|
+
[
|
|
573
|
+
str,
|
|
574
|
+
list[Union[datetime, timestamp_pb2.Timestamp]],
|
|
575
|
+
JSONSerializableMapping,
|
|
576
|
+
ScheduleTriggerWorkflowOptions,
|
|
577
|
+
],
|
|
578
|
+
v0_workflow_protos.WorkflowVersion,
|
|
579
|
+
],
|
|
580
|
+
instance: AdminClient,
|
|
581
|
+
args: tuple[
|
|
582
|
+
str,
|
|
583
|
+
list[Union[datetime, timestamp_pb2.Timestamp]],
|
|
584
|
+
JSONSerializableMapping,
|
|
585
|
+
ScheduleTriggerWorkflowOptions,
|
|
586
|
+
],
|
|
587
|
+
kwargs: dict[
|
|
588
|
+
str,
|
|
589
|
+
str
|
|
590
|
+
| list[Union[datetime, timestamp_pb2.Timestamp]]
|
|
591
|
+
| JSONSerializableMapping
|
|
592
|
+
| ScheduleTriggerWorkflowOptions,
|
|
593
|
+
],
|
|
594
|
+
) -> v0_workflow_protos.WorkflowVersion:
|
|
595
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
596
|
+
|
|
597
|
+
workflow_name = cast(str, params[0])
|
|
598
|
+
schedules = cast(list[Union[datetime, timestamp_pb2.Timestamp]], params[1])
|
|
599
|
+
input = cast(JSONSerializableMapping, params[2])
|
|
600
|
+
options = cast(
|
|
601
|
+
ScheduleTriggerWorkflowOptions,
|
|
602
|
+
params[3] if len(params) > 3 else ScheduleTriggerWorkflowOptions(),
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
attributes = {
|
|
606
|
+
OTelAttribute.WORKFLOW_NAME: workflow_name,
|
|
607
|
+
OTelAttribute.RUN_AT_TIMESTAMPS: json.dumps(
|
|
608
|
+
[self._ts_to_iso(ts) for ts in schedules]
|
|
609
|
+
),
|
|
610
|
+
OTelAttribute.ACTION_PAYLOAD: json.dumps(input, default=str),
|
|
611
|
+
OTelAttribute.PARENT_ID: options.parent_id,
|
|
612
|
+
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
|
|
613
|
+
OTelAttribute.CHILD_INDEX: options.child_index,
|
|
614
|
+
OTelAttribute.CHILD_KEY: options.child_key,
|
|
615
|
+
OTelAttribute.NAMESPACE: options.namespace,
|
|
616
|
+
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
|
617
|
+
options.additional_metadata, default=str
|
|
618
|
+
),
|
|
619
|
+
OTelAttribute.PRIORITY: options.priority,
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
with self._tracer.start_as_current_span(
|
|
623
|
+
"hatchet.schedule_workflow",
|
|
624
|
+
attributes={
|
|
625
|
+
f"hatchet.{k.value}": v
|
|
626
|
+
for k, v in attributes.items()
|
|
627
|
+
if v
|
|
628
|
+
and k not in self.config.otel.excluded_attributes
|
|
629
|
+
and v != "{}"
|
|
630
|
+
and v != "[]"
|
|
631
|
+
},
|
|
632
|
+
):
|
|
633
|
+
options = ScheduleTriggerWorkflowOptions(
|
|
634
|
+
**options.model_dump(exclude={"additional_metadata"}),
|
|
635
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
636
|
+
dict(options.additional_metadata),
|
|
637
|
+
),
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
return wrapped(workflow_name, schedules, input, options)
|
|
353
641
|
|
|
354
642
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
355
643
|
def _wrap_run_workflows(
|
|
@@ -362,10 +650,37 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
362
650
|
args: tuple[list[WorkflowRunTriggerConfig],],
|
|
363
651
|
kwargs: dict[str, list[WorkflowRunTriggerConfig]],
|
|
364
652
|
) -> list[WorkflowRunRef]:
|
|
653
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
654
|
+
workflow_run_configs = cast(list[WorkflowRunTriggerConfig], params[0])
|
|
655
|
+
|
|
656
|
+
num_workflows = len(workflow_run_configs)
|
|
657
|
+
unique_workflow_names = {
|
|
658
|
+
config.workflow_name for config in workflow_run_configs
|
|
659
|
+
}
|
|
660
|
+
|
|
365
661
|
with self._tracer.start_as_current_span(
|
|
366
662
|
"hatchet.run_workflows",
|
|
663
|
+
attributes={
|
|
664
|
+
"hatchet.num_workflows": num_workflows,
|
|
665
|
+
"hatchet.unique_workflow_names": json.dumps(
|
|
666
|
+
unique_workflow_names, default=str
|
|
667
|
+
),
|
|
668
|
+
},
|
|
367
669
|
):
|
|
368
|
-
|
|
670
|
+
workflow_run_configs_with_meta = [
|
|
671
|
+
WorkflowRunTriggerConfig(
|
|
672
|
+
**config.model_dump(exclude={"options"}),
|
|
673
|
+
options=TriggerWorkflowOptions(
|
|
674
|
+
**config.options.model_dump(exclude={"additional_metadata"}),
|
|
675
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
676
|
+
dict(config.options.additional_metadata),
|
|
677
|
+
),
|
|
678
|
+
),
|
|
679
|
+
)
|
|
680
|
+
for config in workflow_run_configs
|
|
681
|
+
]
|
|
682
|
+
|
|
683
|
+
return wrapped(workflow_run_configs_with_meta)
|
|
369
684
|
|
|
370
685
|
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
|
371
686
|
async def _wrap_async_run_workflows(
|
|
@@ -378,10 +693,26 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
|
|
378
693
|
args: tuple[list[WorkflowRunTriggerConfig],],
|
|
379
694
|
kwargs: dict[str, list[WorkflowRunTriggerConfig]],
|
|
380
695
|
) -> list[WorkflowRunRef]:
|
|
696
|
+
params = self.extract_bound_args(wrapped, args, kwargs)
|
|
697
|
+
workflow_run_configs = cast(list[WorkflowRunTriggerConfig], params[0])
|
|
698
|
+
|
|
381
699
|
with self._tracer.start_as_current_span(
|
|
382
700
|
"hatchet.run_workflows",
|
|
383
701
|
):
|
|
384
|
-
|
|
702
|
+
workflow_run_configs_with_meta = [
|
|
703
|
+
WorkflowRunTriggerConfig(
|
|
704
|
+
**config.model_dump(exclude={"options"}),
|
|
705
|
+
options=TriggerWorkflowOptions(
|
|
706
|
+
**config.options.model_dump(exclude={"additional_metadata"}),
|
|
707
|
+
additional_metadata=_inject_traceparent_into_metadata(
|
|
708
|
+
dict(config.options.additional_metadata),
|
|
709
|
+
),
|
|
710
|
+
),
|
|
711
|
+
)
|
|
712
|
+
for config in workflow_run_configs
|
|
713
|
+
]
|
|
714
|
+
|
|
715
|
+
return await wrapped(workflow_run_configs_with_meta)
|
|
385
716
|
|
|
386
717
|
def _uninstrument(self, **kwargs: InstrumentKwargs) -> None:
|
|
387
718
|
self.tracer_provider = NoOpTracerProvider()
|
|
@@ -2,8 +2,18 @@ from enum import Enum
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
class OTelAttribute(str, Enum):
|
|
5
|
+
## Shared
|
|
6
|
+
NAMESPACE = "namespace"
|
|
7
|
+
ADDITIONAL_METADATA = "additional_metadata"
|
|
8
|
+
WORKFLOW_NAME = "workflow_name"
|
|
9
|
+
|
|
10
|
+
PRIORITY = "priority"
|
|
11
|
+
|
|
12
|
+
## Unfortunately named - this corresponds to all types of payloads, not just actions
|
|
13
|
+
ACTION_PAYLOAD = "payload"
|
|
14
|
+
|
|
15
|
+
## Action
|
|
5
16
|
ACTION_NAME = "action_name"
|
|
6
|
-
ACTION_PAYLOAD = "action_payload"
|
|
7
17
|
CHILD_WORKFLOW_INDEX = "child_workflow_index"
|
|
8
18
|
CHILD_WORKFLOW_KEY = "child_workflow_key"
|
|
9
19
|
GET_GROUP_KEY_RUN_ID = "get_group_key_run_id"
|
|
@@ -14,6 +24,21 @@ class OTelAttribute(str, Enum):
|
|
|
14
24
|
TENANT_ID = "tenant_id"
|
|
15
25
|
WORKER_ID = "worker_id"
|
|
16
26
|
WORKFLOW_ID = "workflow_id"
|
|
17
|
-
WORKFLOW_NAME = "workflow_name"
|
|
18
27
|
WORKFLOW_RUN_ID = "workflow_run_id"
|
|
19
28
|
WORKFLOW_VERSION_ID = "workflow_version_id"
|
|
29
|
+
|
|
30
|
+
## Push Event
|
|
31
|
+
EVENT_KEY = "event_key"
|
|
32
|
+
FILTER_SCOPE = "scope"
|
|
33
|
+
|
|
34
|
+
## Trigger Workflow
|
|
35
|
+
PARENT_ID = "parent_id"
|
|
36
|
+
PARENT_STEP_RUN_ID = "parent_step_run_id"
|
|
37
|
+
CHILD_INDEX = "child_index"
|
|
38
|
+
CHILD_KEY = "child_key"
|
|
39
|
+
DESIRED_WORKER_ID = "desired_worker_id"
|
|
40
|
+
STICKY = "sticky"
|
|
41
|
+
KEY = "key"
|
|
42
|
+
|
|
43
|
+
## Schedule Workflow
|
|
44
|
+
RUN_AT_TIMESTAMPS = "run_at_timestamps"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
hatchet_sdk/__init__.py,sha256=TW5T_It9W79Lfu-QaxMYBXMjYaXQdOpOS6bvYJm6VjQ,10198
|
|
2
2
|
hatchet_sdk/client.py,sha256=7ONBiE29OKQci3Qaz7U5e-WSxO_ohnWq8F-MSgzG2fU,2312
|
|
3
|
-
hatchet_sdk/clients/admin.py,sha256=
|
|
3
|
+
hatchet_sdk/clients/admin.py,sha256=B4fav9EYdsMbMqaH3GUEwL8sza1tQ3RORlwGXUb7qp0,17019
|
|
4
4
|
hatchet_sdk/clients/dispatcher/action_listener.py,sha256=XUDXye-HW4V30DBgjd_dmq7e03AS9eBxUJi1VsvfSqE,13837
|
|
5
5
|
hatchet_sdk/clients/dispatcher/dispatcher.py,sha256=LoxQEX5FS6v-qKRtOXoqsx3VlBUgFgTkbK58LU1eU3I,8176
|
|
6
6
|
hatchet_sdk/clients/event_ts.py,sha256=MudFszIb9IcPKQYvBTzcatPkcWEy3nxbAtEQ0_NYxMg,2094
|
|
@@ -264,7 +264,7 @@ hatchet_sdk/hatchet.py,sha256=BVWSrKL6GCzarTNAte_r0iWsaBXntxNzrR-syR6qENI,22885
|
|
|
264
264
|
hatchet_sdk/labels.py,sha256=nATgxWE3lFxRTnfISEpoIRLGbMfAZsHF4lZTuG4Mfic,182
|
|
265
265
|
hatchet_sdk/logger.py,sha256=5uOr52T4mImSQm1QvWT8HvZFK5WfPNh3Y1cBQZRFgUQ,333
|
|
266
266
|
hatchet_sdk/metadata.py,sha256=XkRbhnghJJGCdVvF-uzyGBcNaTqpeQ3uiQvNNP1wyBc,107
|
|
267
|
-
hatchet_sdk/opentelemetry/instrumentor.py,sha256=
|
|
267
|
+
hatchet_sdk/opentelemetry/instrumentor.py,sha256=1hbZsvbK_GiuzkL88P97G6lg0BRPCvyeR7ltfQT32DM,27099
|
|
268
268
|
hatchet_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
269
269
|
hatchet_sdk/rate_limit.py,sha256=TwbCuggiZaWpYuo4mjVLlE-z1OfQ2mRBiVvCSaG3lv4,3919
|
|
270
270
|
hatchet_sdk/runnables/action.py,sha256=-CJ1b0SnLdXvWH9HZhhoAmKO68WJRGRn7Y7vcLNr1NU,4396
|
|
@@ -275,7 +275,7 @@ hatchet_sdk/runnables/types.py,sha256=5jf1c7_0QVUFh0bcXi4hIiaOdUiyhspU4LltoAFCwl
|
|
|
275
275
|
hatchet_sdk/runnables/workflow.py,sha256=lgN2z9or8E4jgHf6C31Kf1KsTa8_rEtE0AwIqGpNEH8,39926
|
|
276
276
|
hatchet_sdk/token.py,sha256=KjIiInwG5Kqd_FO4BSW1x_5Uc7PFbnzIVJqr50-ZldE,779
|
|
277
277
|
hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
|
|
278
|
-
hatchet_sdk/utils/opentelemetry.py,sha256=
|
|
278
|
+
hatchet_sdk/utils/opentelemetry.py,sha256=64TVwCLrUzEmcL2BUNPV_QubfiR5jajOZtVeGYLnEEA,1226
|
|
279
279
|
hatchet_sdk/utils/proto_enums.py,sha256=0UybwE3s7TcqmzoQSO8YnhgAKOS8WZXsyPchB8-eksw,1247
|
|
280
280
|
hatchet_sdk/utils/timedelta_to_expression.py,sha256=kwuYZ51JdDdc3h9Sw4vgBFmJBMPkgbGJA4v9uO4_NGk,660
|
|
281
281
|
hatchet_sdk/utils/typing.py,sha256=P6-Nd5K_Hk-VhEkGj6LYki_9ddw05rJtzRA56qGXHag,914
|
|
@@ -518,7 +518,7 @@ hatchet_sdk/worker/runner/runner.py,sha256=9Z3Ac6hRh5rjet7-BRtpcFARlhWvy6TNrdsdj
|
|
|
518
518
|
hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=nHRPSiDBqzhObM7i2X7t03OupVFnE7kQBdR2Ckgg-2w,2709
|
|
519
519
|
hatchet_sdk/worker/worker.py,sha256=SAucu3dg8lkKLbzb3noNWvClZ09lC5tuRcBkYZtIJ-0,16088
|
|
520
520
|
hatchet_sdk/workflow_run.py,sha256=ZwH0HLFGFVXz6jbiqSv4w0Om2XuR52Tzzw6LH4y65jQ,2765
|
|
521
|
-
hatchet_sdk-1.
|
|
522
|
-
hatchet_sdk-1.
|
|
523
|
-
hatchet_sdk-1.
|
|
524
|
-
hatchet_sdk-1.
|
|
521
|
+
hatchet_sdk-1.11.1.dist-info/METADATA,sha256=Fd4BDU2OhReYHqLfbSGLFelTHGTfKbQQeksWS0qhqUM,3636
|
|
522
|
+
hatchet_sdk-1.11.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
|
523
|
+
hatchet_sdk-1.11.1.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
|
|
524
|
+
hatchet_sdk-1.11.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|