hatchet-sdk 0.45.3b1__py3-none-any.whl → 0.46.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

@@ -24,12 +24,6 @@ from hatchet_sdk.contracts.workflows_pb2 import (
24
24
  )
25
25
  from hatchet_sdk.contracts.workflows_pb2_grpc import WorkflowServiceStub
26
26
  from hatchet_sdk.utils.serialization import flatten
27
- from hatchet_sdk.utils.tracing import (
28
- create_carrier,
29
- create_tracer,
30
- inject_carrier_into_metadata,
31
- parse_carrier_from_metadata,
32
- )
33
27
  from hatchet_sdk.workflow_run import RunRef, WorkflowRunRef
34
28
 
35
29
  from ..loader import ClientConfig
@@ -95,7 +89,10 @@ class AdminClientBase:
95
89
  else options["additional_metadata"]
96
90
  )
97
91
  if meta is not None:
98
- options["additional_metadata"] = json.dumps(meta).encode("utf-8")
92
+ options = {
93
+ **options,
94
+ "additional_metadata": json.dumps(meta).encode("utf-8"),
95
+ }
99
96
  except json.JSONDecodeError as e:
100
97
  raise ValueError(f"Error encoding payload: {e}")
101
98
 
@@ -171,7 +168,6 @@ class AdminClientAioImpl(AdminClientBase):
171
168
  self.token = config.token
172
169
  self.listener_client = new_listener(config)
173
170
  self.namespace = config.namespace
174
- self.otel_tracer = create_tracer(config=config)
175
171
 
176
172
  async def run(
177
173
  self,
@@ -190,74 +186,46 @@ class AdminClientAioImpl(AdminClientBase):
190
186
  wrr.workflow_run_id, wrr.workflow_listener, wrr.workflow_run_event_listener
191
187
  )
192
188
 
189
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
193
190
  @tenacity_retry
194
191
  async def run_workflow(
195
192
  self, workflow_name: str, input: any, options: TriggerWorkflowOptions = None
196
193
  ) -> WorkflowRunRef:
197
- ctx = parse_carrier_from_metadata(
198
- (options or {}).get("additional_metadata", {})
199
- )
194
+ try:
195
+ if not self.pooled_workflow_listener:
196
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
200
197
 
201
- with self.otel_tracer.start_as_current_span(
202
- f"hatchet.async_run_workflow.{workflow_name}", context=ctx
203
- ) as span:
204
- carrier = create_carrier()
198
+ namespace = self.namespace
205
199
 
206
- try:
207
- if not self.pooled_workflow_listener:
208
- self.pooled_workflow_listener = PooledWorkflowRunListener(
209
- self.config
210
- )
211
-
212
- namespace = self.namespace
213
-
214
- if (
215
- options is not None
216
- and "namespace" in options
217
- and options["namespace"] is not None
218
- ):
219
- namespace = options.pop("namespace")
220
-
221
- if namespace != "" and not workflow_name.startswith(self.namespace):
222
- workflow_name = f"{namespace}{workflow_name}"
223
-
224
- if options is not None and "additional_metadata" in options:
225
- options["additional_metadata"] = inject_carrier_into_metadata(
226
- options["additional_metadata"], carrier
227
- )
228
- span.set_attributes(
229
- flatten(
230
- options["additional_metadata"], parent_key="", separator="."
231
- )
232
- )
233
-
234
- request = self._prepare_workflow_request(workflow_name, input, options)
235
-
236
- span.add_event(
237
- "Triggering workflow", attributes={"workflow_name": workflow_name}
238
- )
200
+ if (
201
+ options is not None
202
+ and "namespace" in options
203
+ and options["namespace"] is not None
204
+ ):
205
+ namespace = options.pop("namespace")
239
206
 
240
- resp: TriggerWorkflowResponse = await self.aio_client.TriggerWorkflow(
241
- request,
242
- metadata=get_metadata(self.token),
243
- )
207
+ if namespace != "" and not workflow_name.startswith(self.namespace):
208
+ workflow_name = f"{namespace}{workflow_name}"
244
209
 
245
- span.add_event(
246
- "Received workflow response",
247
- attributes={"workflow_name": workflow_name},
248
- )
210
+ request = self._prepare_workflow_request(workflow_name, input, options)
249
211
 
250
- return WorkflowRunRef(
251
- workflow_run_id=resp.workflow_run_id,
252
- workflow_listener=self.pooled_workflow_listener,
253
- workflow_run_event_listener=self.listener_client,
254
- )
255
- except (grpc.RpcError, grpc.aio.AioRpcError) as e:
256
- if e.code() == grpc.StatusCode.ALREADY_EXISTS:
257
- raise DedupeViolationErr(e.details())
212
+ resp: TriggerWorkflowResponse = await self.aio_client.TriggerWorkflow(
213
+ request,
214
+ metadata=get_metadata(self.token),
215
+ )
258
216
 
259
- raise e
217
+ return WorkflowRunRef(
218
+ workflow_run_id=resp.workflow_run_id,
219
+ workflow_listener=self.pooled_workflow_listener,
220
+ workflow_run_event_listener=self.listener_client,
221
+ )
222
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
223
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
224
+ raise DedupeViolationErr(e.details())
225
+
226
+ raise e
260
227
 
228
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
261
229
  @tenacity_retry
262
230
  async def run_workflows(
263
231
  self,
@@ -386,7 +354,6 @@ class AdminClient(AdminClientBase):
386
354
  self.token = config.token
387
355
  self.listener_client = new_listener(config)
388
356
  self.namespace = config.namespace
389
- self.otel_tracer = create_tracer(config=config)
390
357
 
391
358
  @tenacity_retry
392
359
  def put_workflow(
@@ -456,78 +423,47 @@ class AdminClient(AdminClientBase):
456
423
 
457
424
  raise e
458
425
 
459
- ## TODO: `options` is treated as a dict (wrong type hint)
460
- ## TODO: `any` type hint should come from `typing`
426
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
461
427
  @tenacity_retry
462
428
  def run_workflow(
463
429
  self, workflow_name: str, input: any, options: TriggerWorkflowOptions = None
464
430
  ) -> WorkflowRunRef:
465
- ctx = parse_carrier_from_metadata(
466
- (options or {}).get("additional_metadata", {})
467
- )
431
+ try:
432
+ if not self.pooled_workflow_listener:
433
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
468
434
 
469
- with self.otel_tracer.start_as_current_span(
470
- f"hatchet.run_workflow.{workflow_name}", context=ctx
471
- ) as span:
472
- carrier = create_carrier()
435
+ namespace = self.namespace
473
436
 
474
- try:
475
- if not self.pooled_workflow_listener:
476
- self.pooled_workflow_listener = PooledWorkflowRunListener(
477
- self.config
478
- )
479
-
480
- namespace = self.namespace
481
-
482
- ## TODO: Factor this out - it's repeated a lot of places
483
- if (
484
- options is not None
485
- and "namespace" in options
486
- and options["namespace"] is not None
487
- ):
488
- namespace = options.pop("namespace")
489
-
490
- if options is not None and "additional_metadata" in options:
491
- options["additional_metadata"] = inject_carrier_into_metadata(
492
- options["additional_metadata"], carrier
493
- )
494
-
495
- span.set_attributes(
496
- flatten(
497
- options["additional_metadata"], parent_key="", separator="."
498
- )
499
- )
500
-
501
- if namespace != "" and not workflow_name.startswith(self.namespace):
502
- workflow_name = f"{namespace}{workflow_name}"
503
-
504
- request = self._prepare_workflow_request(workflow_name, input, options)
505
-
506
- span.add_event(
507
- "Triggering workflow", attributes={"workflow_name": workflow_name}
508
- )
437
+ ## TODO: Factor this out - it's repeated a lot of places
438
+ if (
439
+ options is not None
440
+ and "namespace" in options
441
+ and options["namespace"] is not None
442
+ ):
443
+ namespace = options.pop("namespace")
509
444
 
510
- resp: TriggerWorkflowResponse = self.client.TriggerWorkflow(
511
- request,
512
- metadata=get_metadata(self.token),
513
- )
445
+ if namespace != "" and not workflow_name.startswith(self.namespace):
446
+ workflow_name = f"{namespace}{workflow_name}"
514
447
 
515
- span.add_event(
516
- "Received workflow response",
517
- attributes={"workflow_name": workflow_name},
518
- )
448
+ request = self._prepare_workflow_request(workflow_name, input, options)
519
449
 
520
- return WorkflowRunRef(
521
- workflow_run_id=resp.workflow_run_id,
522
- workflow_listener=self.pooled_workflow_listener,
523
- workflow_run_event_listener=self.listener_client,
524
- )
525
- except (grpc.RpcError, grpc.aio.AioRpcError) as e:
526
- if e.code() == grpc.StatusCode.ALREADY_EXISTS:
527
- raise DedupeViolationErr(e.details())
450
+ resp: TriggerWorkflowResponse = self.client.TriggerWorkflow(
451
+ request,
452
+ metadata=get_metadata(self.token),
453
+ )
454
+
455
+ return WorkflowRunRef(
456
+ workflow_run_id=resp.workflow_run_id,
457
+ workflow_listener=self.pooled_workflow_listener,
458
+ workflow_run_event_listener=self.listener_client,
459
+ )
460
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
461
+ if e.code() == grpc.StatusCode.ALREADY_EXISTS:
462
+ raise DedupeViolationErr(e.details())
528
463
 
529
- raise e
464
+ raise e
530
465
 
466
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
531
467
  @tenacity_retry
532
468
  def run_workflows(
533
469
  self, workflows: List[WorkflowRunDict], options: TriggerWorkflowOptions = None
@@ -90,27 +90,29 @@ class Action:
90
90
  self.additional_metadata = {}
91
91
 
92
92
  @property
93
- def otel_attributes(self) -> dict[str, Any]:
94
- return flatten(
95
- xs={
96
- "worker_id": self.worker_id,
97
- "tenant_id": self.tenant_id,
98
- "workflow_run_id": self.workflow_run_id,
99
- "get_group_key_run_id": self.get_group_key_run_id,
100
- "job_id": self.job_id,
101
- "job_name": self.job_name,
102
- "job_run_id": self.job_run_id,
103
- "step_id": self.step_id,
104
- "step_run_id": self.step_run_id,
105
- "retry_count": self.retry_count,
106
- "child_workflow_index": self.child_workflow_index,
107
- "child_workflow_key": self.child_workflow_key,
108
- "parent_workflow_run_id": self.parent_workflow_run_id,
109
- "action_payload": self.action_payload,
110
- },
111
- parent_key="",
112
- separator=".",
113
- )
93
+ def otel_attributes(self) -> dict[str, str | int]:
94
+ try:
95
+ payload_str = json.dumps(self.action_payload, default=str)
96
+ except Exception:
97
+ payload_str = str(self.action_payload)
98
+
99
+ attrs: dict[str, str | int | None] = {
100
+ "hatchet.tenant_id": self.tenant_id,
101
+ "hatchet.worker_id": self.worker_id,
102
+ "hatchet.workflow_run_id": self.workflow_run_id,
103
+ "hatchet.step_id": self.step_id,
104
+ "hatchet.step_run_id": self.step_run_id,
105
+ "hatchet.retry_count": self.retry_count,
106
+ "hatchet.parent_workflow_run_id": self.parent_workflow_run_id,
107
+ "hatchet.child_workflow_index": self.child_workflow_index,
108
+ "hatchet.child_workflow_key": self.child_workflow_key,
109
+ "hatchet.action_payload": payload_str,
110
+ "hatchet.workflow_name": self.job_name,
111
+ "hatchet.action_name": self.action_id,
112
+ "hatchet.get_group_key_run_id": self.get_group_key_run_id,
113
+ }
114
+
115
+ return {k: v for k, v in attrs.items() if v}
114
116
 
115
117
 
116
118
  START_STEP_RUN = 0
@@ -17,12 +17,6 @@ from hatchet_sdk.contracts.events_pb2 import (
17
17
  )
18
18
  from hatchet_sdk.contracts.events_pb2_grpc import EventsServiceStub
19
19
  from hatchet_sdk.utils.serialization import flatten
20
- from hatchet_sdk.utils.tracing import (
21
- create_carrier,
22
- create_tracer,
23
- inject_carrier_into_metadata,
24
- parse_carrier_from_metadata,
25
- )
26
20
 
27
21
  from ..loader import ClientConfig
28
22
  from ..metadata import get_metadata
@@ -63,7 +57,6 @@ class EventClient:
63
57
  self.client = client
64
58
  self.token = config.token
65
59
  self.namespace = config.namespace
66
- self.otel_tracer = create_tracer(config=config)
67
60
 
68
61
  async def async_push(
69
62
  self, event_key, payload, options: Optional[PushEventOptions] = None
@@ -79,54 +72,41 @@ class EventClient:
79
72
  ) -> List[Event]:
80
73
  return await asyncio.to_thread(self.bulk_push, events=events, options=options)
81
74
 
75
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
82
76
  @tenacity_retry
83
77
  def push(self, event_key, payload, options: PushEventOptions = None) -> Event:
84
- ctx = parse_carrier_from_metadata(
85
- (options or {}).get("additional_metadata", {})
86
- )
87
-
88
- with self.otel_tracer.start_as_current_span(
89
- "hatchet.push", context=ctx
90
- ) as span:
91
- carrier = create_carrier()
92
- namespace = self.namespace
93
-
94
- if (
95
- options is not None
96
- and "namespace" in options
97
- and options["namespace"] is not None
98
- ):
99
- namespace = options.pop("namespace")
100
-
101
- namespaced_event_key = namespace + event_key
102
-
103
- try:
104
- meta = inject_carrier_into_metadata(
105
- dict() if options is None else options["additional_metadata"],
106
- carrier,
107
- )
108
- meta_bytes = None if meta is None else json.dumps(meta).encode("utf-8")
109
- except Exception as e:
110
- raise ValueError(f"Error encoding meta: {e}")
78
+ namespace = self.namespace
111
79
 
112
- span.set_attributes(flatten(meta, parent_key="", separator="."))
80
+ if (
81
+ options is not None
82
+ and "namespace" in options
83
+ and options["namespace"] is not None
84
+ ):
85
+ namespace = options.pop("namespace")
113
86
 
114
- try:
115
- payload_bytes = json.dumps(payload).encode("utf-8")
116
- except json.UnicodeEncodeError as e:
117
- raise ValueError(f"Error encoding payload: {e}")
87
+ namespaced_event_key = namespace + event_key
118
88
 
119
- request = PushEventRequest(
120
- key=namespaced_event_key,
121
- payload=payload_bytes,
122
- eventTimestamp=proto_timestamp_now(),
123
- additionalMetadata=meta_bytes,
124
- )
89
+ try:
90
+ meta = dict() if options is None else options["additional_metadata"]
91
+ meta_bytes = None if meta is None else json.dumps(meta).encode("utf-8")
92
+ except Exception as e:
93
+ raise ValueError(f"Error encoding meta: {e}")
125
94
 
126
- span.add_event("Pushing event", attributes={"key": namespaced_event_key})
95
+ try:
96
+ payload_bytes = json.dumps(payload).encode("utf-8")
97
+ except json.UnicodeEncodeError as e:
98
+ raise ValueError(f"Error encoding payload: {e}")
99
+
100
+ request = PushEventRequest(
101
+ key=namespaced_event_key,
102
+ payload=payload_bytes,
103
+ eventTimestamp=proto_timestamp_now(),
104
+ additionalMetadata=meta_bytes,
105
+ )
127
106
 
128
- return self.client.Push(request, metadata=get_metadata(self.token))
107
+ return self.client.Push(request, metadata=get_metadata(self.token))
129
108
 
109
+ ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
130
110
  @tenacity_retry
131
111
  def bulk_push(
132
112
  self,
@@ -134,10 +114,6 @@ class EventClient:
134
114
  options: BulkPushEventOptions = None,
135
115
  ) -> List[Event]:
136
116
  namespace = self.namespace
137
- bulk_push_correlation_id = uuid4()
138
- ctx = parse_carrier_from_metadata(
139
- (options or {}).get("additional_metadata", {})
140
- )
141
117
 
142
118
  if (
143
119
  options is not None
@@ -148,43 +124,30 @@ class EventClient:
148
124
 
149
125
  bulk_events = []
150
126
  for event in events:
151
- with self.otel_tracer.start_as_current_span(
152
- "hatchet.bulk_push", context=ctx
153
- ) as span:
154
- carrier = create_carrier()
155
- span.set_attribute(
156
- "bulk_push_correlation_id", str(bulk_push_correlation_id)
157
- )
158
-
159
- event_key = namespace + event["key"]
160
- payload = event["payload"]
161
-
162
- try:
163
- meta = inject_carrier_into_metadata(
164
- event.get("additional_metadata", {}), carrier
165
- )
166
- meta_bytes = json.dumps(meta).encode("utf-8") if meta else None
167
- except Exception as e:
168
- raise ValueError(f"Error encoding meta: {e}")
169
-
170
- span.set_attributes(flatten(meta, parent_key="", separator="."))
171
-
172
- try:
173
- payload_bytes = json.dumps(payload).encode("utf-8")
174
- except json.UnicodeEncodeError as e:
175
- raise ValueError(f"Error encoding payload: {e}")
176
-
177
- request = PushEventRequest(
178
- key=event_key,
179
- payload=payload_bytes,
180
- eventTimestamp=proto_timestamp_now(),
181
- additionalMetadata=meta_bytes,
182
- )
183
- bulk_events.append(request)
127
+ event_key = namespace + event["key"]
128
+ payload = event["payload"]
129
+
130
+ try:
131
+ meta = event.get("additional_metadata", {})
132
+ meta_bytes = json.dumps(meta).encode("utf-8") if meta else None
133
+ except Exception as e:
134
+ raise ValueError(f"Error encoding meta: {e}")
135
+
136
+ try:
137
+ payload_bytes = json.dumps(payload).encode("utf-8")
138
+ except json.UnicodeEncodeError as e:
139
+ raise ValueError(f"Error encoding payload: {e}")
140
+
141
+ request = PushEventRequest(
142
+ key=event_key,
143
+ payload=payload_bytes,
144
+ eventTimestamp=proto_timestamp_now(),
145
+ additionalMetadata=meta_bytes,
146
+ )
147
+ bulk_events.append(request)
184
148
 
185
149
  bulk_request = BulkPushEventRequest(events=bulk_events)
186
150
 
187
- span.add_event("Pushing bulk events")
188
151
  response = self.client.BulkPush(bulk_request, metadata=get_metadata(self.token))
189
152
 
190
153
  return response.events
hatchet_sdk/loader.py CHANGED
@@ -1,7 +1,7 @@
1
- import json
2
1
  import os
3
2
  from logging import Logger, getLogger
4
3
  from typing import Dict, Optional
4
+ from warnings import warn
5
5
 
6
6
  import yaml
7
7
 
@@ -39,10 +39,6 @@ class ClientConfig:
39
39
  logger: Logger = None,
40
40
  grpc_max_recv_message_length: int = 4 * 1024 * 1024, # 4MB
41
41
  grpc_max_send_message_length: int = 4 * 1024 * 1024, # 4MB
42
- otel_exporter_oltp_endpoint: str | None = None,
43
- otel_service_name: str | None = None,
44
- otel_exporter_oltp_headers: dict[str, str] | None = None,
45
- otel_exporter_oltp_protocol: str | None = None,
46
42
  worker_healthcheck_port: int | None = None,
47
43
  worker_healthcheck_enabled: bool | None = None,
48
44
  worker_preset_labels: dict[str, str] = {},
@@ -56,10 +52,6 @@ class ClientConfig:
56
52
  self.logInterceptor = logger
57
53
  self.grpc_max_recv_message_length = grpc_max_recv_message_length
58
54
  self.grpc_max_send_message_length = grpc_max_send_message_length
59
- self.otel_exporter_oltp_endpoint = otel_exporter_oltp_endpoint
60
- self.otel_service_name = otel_service_name
61
- self.otel_exporter_oltp_headers = otel_exporter_oltp_headers
62
- self.otel_exporter_oltp_protocol = otel_exporter_oltp_protocol
63
55
  self.worker_healthcheck_port = worker_healthcheck_port
64
56
  self.worker_healthcheck_enabled = worker_healthcheck_enabled
65
57
  self.worker_preset_labels = worker_preset_labels
@@ -142,33 +134,6 @@ class ConfigLoader:
142
134
 
143
135
  tls_config = self._load_tls_config(config_data["tls"], host_port)
144
136
 
145
- otel_exporter_oltp_endpoint = get_config_value(
146
- "otel_exporter_oltp_endpoint", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_ENDPOINT"
147
- )
148
-
149
- otel_service_name = get_config_value(
150
- "otel_service_name", "HATCHET_CLIENT_OTEL_SERVICE_NAME"
151
- )
152
-
153
- _oltp_headers = get_config_value(
154
- "otel_exporter_oltp_headers", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS"
155
- )
156
-
157
- if _oltp_headers:
158
- try:
159
- otel_header_key, api_key = _oltp_headers.split("=", maxsplit=1)
160
- otel_exporter_oltp_headers = {otel_header_key: api_key}
161
- except ValueError:
162
- raise ValueError(
163
- "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS must be in the format `key=value`"
164
- )
165
- else:
166
- otel_exporter_oltp_headers = None
167
-
168
- otel_exporter_oltp_protocol = get_config_value(
169
- "otel_exporter_oltp_protocol", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_PROTOCOL"
170
- )
171
-
172
137
  worker_healthcheck_port = int(
173
138
  get_config_value(
174
139
  "worker_healthcheck_port", "HATCHET_CLIENT_WORKER_HEALTHCHECK_PORT"
@@ -196,6 +161,19 @@ class ConfigLoader:
196
161
  if autoscaling_target:
197
162
  worker_preset_labels["hatchet-autoscaling-target"] = autoscaling_target
198
163
 
164
+ legacy_otlp_headers = get_config_value(
165
+ "otel_exporter_otlp_endpoint", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_ENDPOINT"
166
+ )
167
+
168
+ legacy_otlp_headers = get_config_value(
169
+ "otel_exporter_otlp_headers", "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS"
170
+ )
171
+
172
+ if legacy_otlp_headers or legacy_otlp_headers:
173
+ warn(
174
+ "The `otel_exporter_otlp_*` fields are no longer supported as of SDK version `0.46.0`. Please see the documentation on OpenTelemetry at https://docs.hatchet.run/home/features/opentelemetry for more information on how to migrate to the new `HatchetInstrumentor`."
175
+ )
176
+
199
177
  return ClientConfig(
200
178
  tenant_id=tenant_id,
201
179
  tls_config=tls_config,
@@ -207,10 +185,6 @@ class ConfigLoader:
207
185
  logger=defaults.logInterceptor,
208
186
  grpc_max_recv_message_length=grpc_max_recv_message_length,
209
187
  grpc_max_send_message_length=grpc_max_send_message_length,
210
- otel_exporter_oltp_endpoint=otel_exporter_oltp_endpoint,
211
- otel_service_name=otel_service_name,
212
- otel_exporter_oltp_headers=otel_exporter_oltp_headers,
213
- otel_exporter_oltp_protocol=otel_exporter_oltp_protocol,
214
188
  worker_healthcheck_port=worker_healthcheck_port,
215
189
  worker_healthcheck_enabled=worker_healthcheck_enabled,
216
190
  worker_preset_labels=worker_preset_labels,