clue-python-sdk-core 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,734 @@
1
+ from __future__ import annotations
2
+
3
+ import time
4
+ from collections.abc import Mapping, Sequence
5
+
6
+ from .adapters import (
7
+ JsonValue,
8
+ _build_schema_evidence,
9
+ build_celery_job_event,
10
+ build_summary_event,
11
+ )
12
+ from .contracts import QUEUE_LIFECYCLE_SUMMARY_EVENT, SDK_COLLECTION_MODE_STANDARD
13
+ from .otel_bridge import annotate_current_otel_span, merge_current_otel_span_context
14
+ from .privacy import fingerprint_value
15
+ from .runtime import (
16
+ CluePythonSettings,
17
+ create_client,
18
+ create_request_context,
19
+ flush_client,
20
+ get_current_client,
21
+ get_current_context,
22
+ load_settings,
23
+ reset_current_state,
24
+ set_current_state,
25
+ )
26
+
27
+ _task_state: dict[str, tuple[object, object, float, object, object]] = {}
28
+ _publish_state: dict[str, dict[str, JsonValue]] = {}
29
+ _CELERY_MESSAGE_FINGERPRINT_SECRET = "clue-backend-sdk-celery-message"
30
+ _MAX_PUBLISH_STATE_ENTRIES = 1024
31
+ _MAX_PAYLOAD_SCHEMA_WIDTH = 50
32
+ _MAX_PAYLOAD_SCHEMA_DEPTH = 6
33
+
34
+
35
+ def _duration_bucket(duration_ms: int) -> str:
36
+ if duration_ms < 100:
37
+ return "lt_100ms"
38
+ if duration_ms < 1_000:
39
+ return "100_999ms"
40
+ if duration_ms < 10_000:
41
+ return "1000_9999ms"
42
+ return "gte_10000ms"
43
+
44
+
45
+ def _record_summary_metric(client: object, count: int = 1) -> None:
46
+ record = getattr(client, "record_events_summarized", None)
47
+ if callable(record):
48
+ record(count)
49
+
50
+
51
+ def _add_summary_event(client: object, event: Mapping[str, JsonValue]) -> bool:
52
+ added = getattr(client, "add_event")(event)
53
+ if added:
54
+ _record_summary_metric(client)
55
+ return bool(added)
56
+
57
+
58
+ def instrument_celery_signals(
59
+ *,
60
+ task_prerun: object,
61
+ task_postrun: object,
62
+ task_failure: object,
63
+ before_task_publish: object | None = None,
64
+ after_task_publish: object | None = None,
65
+ task_retry: object | None = None,
66
+ task_revoked: object | None = None,
67
+ settings_loader=load_settings,
68
+ ) -> bool:
69
+ settings: CluePythonSettings = settings_loader()
70
+ if not settings.enabled or not settings.capture_celery:
71
+ return False
72
+
73
+ def on_before_task_publish(
74
+ sender=None,
75
+ body=None,
76
+ headers=None,
77
+ properties=None,
78
+ exchange=None,
79
+ routing_key=None,
80
+ **kwargs,
81
+ ):
82
+ current_context = get_current_context()
83
+ message_id_raw = _message_id(headers, properties)
84
+ correlation_id = _masked_message_id(message_id_raw)
85
+ published_at_ms = int(time.time() * 1000)
86
+ parent_request_span_id = (
87
+ _string_or_none(current_context.get("request_span_id"))
88
+ if current_context is not None
89
+ else None
90
+ )
91
+ _inject_publish_headers(
92
+ headers=headers,
93
+ correlation_id=correlation_id,
94
+ parent_request_span_id=parent_request_span_id,
95
+ published_at_ms=published_at_ms,
96
+ )
97
+ state_key = _publish_state_key(headers, properties, sender)
98
+ payload_evidence = _payload_schema_evidence(body, settings=settings)
99
+ _remember_publish_state(
100
+ state_key,
101
+ {
102
+ "message_type": _message_type(sender, headers),
103
+ "destination_name": _destination_name(routing_key, exchange),
104
+ "queue_name": _destination_name(routing_key, exchange),
105
+ "message_id_masked": correlation_id,
106
+ "correlation_id": correlation_id,
107
+ "parent_request_span_id": parent_request_span_id,
108
+ "published_at_ms": published_at_ms,
109
+ "payload_schema_hash": payload_evidence["schema_hash"],
110
+ "payload_shape_size": payload_evidence["shape_size"],
111
+ "payload_field_paths": payload_evidence["field_paths"],
112
+ },
113
+ )
114
+
115
+ def on_after_task_publish(
116
+ sender=None,
117
+ body=None,
118
+ headers=None,
119
+ properties=None,
120
+ exchange=None,
121
+ routing_key=None,
122
+ **kwargs,
123
+ ):
124
+ state_key = _publish_state_key(headers, properties, sender)
125
+ publish_state = _publish_state.pop(state_key, None) or {}
126
+ current_client = get_current_client()
127
+ current_context = get_current_context()
128
+ created_client = None
129
+ client = current_client
130
+ context = current_context
131
+ if client is None or context is None:
132
+ created_client = create_client(settings)
133
+ if created_client is None:
134
+ return
135
+ client = created_client
136
+ context = create_request_context(
137
+ settings=settings,
138
+ service_name=settings.service_name,
139
+ request_id=_string_or_none(_message_id(headers, properties)),
140
+ )
141
+ payload_evidence = (
142
+ {
143
+ "schema_hash": publish_state.get("payload_schema_hash"),
144
+ "shape_size": publish_state.get("payload_shape_size") or 0,
145
+ "field_paths": publish_state.get("payload_field_paths") or [],
146
+ }
147
+ if publish_state
148
+ else _payload_schema_evidence(body, settings=settings)
149
+ )
150
+ message_id_masked = (
151
+ _string_or_none(publish_state.get("message_id_masked"))
152
+ or _masked_message_id(_message_id(headers, properties))
153
+ )
154
+ parent_request_span_id = (
155
+ _string_or_none(publish_state.get("parent_request_span_id"))
156
+ or (
157
+ _string_or_none(current_context.get("request_span_id"))
158
+ if current_context is not None
159
+ else None
160
+ )
161
+ )
162
+ _add_summary_event(
163
+ client,
164
+ _queue_summary_event(
165
+ context=context,
166
+ settings=settings,
167
+ message_operation="publish",
168
+ detail_reason=(
169
+ "request_linked_publish"
170
+ if parent_request_span_id is not None
171
+ else "diagnostic_sample"
172
+ ),
173
+ message_type=(
174
+ _string_or_none(publish_state.get("message_type"))
175
+ or _message_type(sender, headers)
176
+ ),
177
+ destination_name=(
178
+ _string_or_none(publish_state.get("destination_name"))
179
+ or _destination_name(routing_key, exchange)
180
+ ),
181
+ queue_name=(
182
+ _string_or_none(publish_state.get("queue_name"))
183
+ or _destination_name(routing_key, exchange)
184
+ ),
185
+ message_id_masked=message_id_masked,
186
+ correlation_id=message_id_masked,
187
+ parent_request_span_id=parent_request_span_id,
188
+ payload_schema_hash=_string_or_none(payload_evidence["schema_hash"]),
189
+ payload_shape_size=_int_or_none(payload_evidence["shape_size"]),
190
+ payload_field_paths=_string_list(payload_evidence["field_paths"]),
191
+ metrics={"publish_count": 1},
192
+ ),
193
+ )
194
+ if created_client is not None:
195
+ flush_client(created_client)
196
+
197
+ def on_task_prerun(task_id=None, task=None, **kwargs):
198
+ client = create_client(settings)
199
+ if client is None:
200
+ return
201
+ context = merge_current_otel_span_context(
202
+ create_request_context(
203
+ settings=settings,
204
+ service_name=settings.service_name,
205
+ request_id=str(task_id) if task_id is not None else None,
206
+ ),
207
+ expected_kind="CONSUMER",
208
+ )
209
+ annotate_current_otel_span(
210
+ {
211
+ "clue.job.type": _task_name(task),
212
+ "clue.job.id": str(task_id) if task_id is not None else None,
213
+ "clue.job.queue_name": _queue_name(task),
214
+ },
215
+ expected_kind="CONSUMER",
216
+ )
217
+ if settings.sdk_collection_mode != SDK_COLLECTION_MODE_STANDARD:
218
+ client.add_event(
219
+ build_celery_job_event(
220
+ context=context,
221
+ status="started",
222
+ task_name=_task_name(task),
223
+ task_id=str(task_id) if task_id is not None else None,
224
+ queue_name=_queue_name(task),
225
+ correlation_id=_task_correlation_id(task, task_id),
226
+ message_id_masked=_task_correlation_id(task, task_id),
227
+ parent_request_span_id=_task_parent_request_span_id(task),
228
+ delay_ms=_task_delay_ms(task),
229
+ attempt_count=_task_attempt_count(task),
230
+ max_attempts=_task_max_attempts(task),
231
+ )
232
+ )
233
+ client_token, context_token = set_current_state(client=client, context=context)
234
+ _task_state[str(task_id)] = (
235
+ client,
236
+ context,
237
+ time.perf_counter(),
238
+ client_token,
239
+ context_token,
240
+ )
241
+
242
+ def on_task_postrun(task_id=None, task=None, **kwargs):
243
+ state = _task_state.pop(str(task_id), None)
244
+ if state is None:
245
+ return
246
+ client, context, started_at, client_token, context_token = state
247
+ duration_ms = int((time.perf_counter() - started_at) * 1000)
248
+ if settings.sdk_collection_mode == SDK_COLLECTION_MODE_STANDARD:
249
+ event = _queue_summary_event(
250
+ context=context,
251
+ settings=settings,
252
+ message_operation="consume",
253
+ detail_reason="success",
254
+ message_type=_task_name(task),
255
+ destination_name=_queue_name(task),
256
+ queue_name=_queue_name(task),
257
+ message_id_masked=_task_correlation_id(task, task_id),
258
+ correlation_id=_task_correlation_id(task, task_id),
259
+ parent_request_span_id=_task_parent_request_span_id(task),
260
+ attempt_count=_task_attempt_count(task),
261
+ max_attempts=_task_max_attempts(task),
262
+ delay_ms=_task_delay_ms(task),
263
+ metrics={
264
+ "duration_ms": duration_ms,
265
+ "normal_success_count": 1,
266
+ "job_type_present": 1 if _task_name(task) != "unknown" else 0,
267
+ },
268
+ )
269
+ event["properties"]["job_type"] = _task_name(task)
270
+ event["properties"]["result_kind"] = "success"
271
+ event["properties"]["duration_bucket"] = _duration_bucket(duration_ms)
272
+ _add_summary_event(client, event)
273
+ else:
274
+ client.add_event(
275
+ build_celery_job_event(
276
+ context=context,
277
+ status="finished",
278
+ task_name=_task_name(task),
279
+ task_id=str(task_id) if task_id is not None else None,
280
+ queue_name=_queue_name(task),
281
+ duration_ms=duration_ms,
282
+ correlation_id=_task_correlation_id(task, task_id),
283
+ message_id_masked=_task_correlation_id(task, task_id),
284
+ parent_request_span_id=_task_parent_request_span_id(task),
285
+ delay_ms=_task_delay_ms(task),
286
+ attempt_count=_task_attempt_count(task),
287
+ max_attempts=_task_max_attempts(task),
288
+ )
289
+ )
290
+ reset_current_state(client_token, context_token)
291
+ flush_client(client)
292
+
293
+ def on_task_failure(task_id=None, task=None, exception=None, **kwargs):
294
+ state = _task_state.pop(str(task_id), None)
295
+ if state is None:
296
+ return
297
+ client, context, started_at, client_token, context_token = state
298
+ client.add_event(
299
+ build_celery_job_event(
300
+ context=context,
301
+ status="failed",
302
+ task_name=_task_name(task),
303
+ task_id=str(task_id) if task_id is not None else None,
304
+ queue_name=_queue_name(task),
305
+ duration_ms=int((time.perf_counter() - started_at) * 1000),
306
+ failure_reason=exception.__class__.__name__ if exception is not None else None,
307
+ correlation_id=_task_correlation_id(task, task_id),
308
+ message_id_masked=_task_correlation_id(task, task_id),
309
+ parent_request_span_id=_task_parent_request_span_id(task),
310
+ delay_ms=_task_delay_ms(task),
311
+ attempt_count=_task_attempt_count(task),
312
+ max_attempts=_task_max_attempts(task),
313
+ failure_type=exception.__class__.__name__ if exception is not None else "failed",
314
+ detail_reason="failed",
315
+ )
316
+ )
317
+ reset_current_state(client_token, context_token)
318
+ flush_client(client)
319
+
320
+ def on_task_retry(request=None, reason=None, **kwargs):
321
+ emit_context = get_current_context()
322
+ emit_client = get_current_client()
323
+ created_client = None
324
+ if emit_client is None or emit_context is None:
325
+ created_client = create_client(settings)
326
+ if created_client is None:
327
+ return
328
+ emit_client = created_client
329
+ emit_context = create_request_context(
330
+ settings=settings,
331
+ service_name=settings.service_name,
332
+ request_id=_string_or_none(_request_attr(request, "id")),
333
+ )
334
+ _add_summary_event(
335
+ emit_client,
336
+ _queue_summary_event(
337
+ context=emit_context,
338
+ settings=settings,
339
+ message_operation="retry",
340
+ detail_reason="retry",
341
+ message_type=_string_or_none(_request_attr(request, "task")),
342
+ destination_name=_request_queue_name(request),
343
+ queue_name=_request_queue_name(request),
344
+ message_id_masked=_masked_message_id(_request_attr(request, "id")),
345
+ correlation_id=_request_correlation_id(request),
346
+ parent_request_span_id=_request_parent_request_span_id(request),
347
+ attempt_count=_request_attempt_count(request),
348
+ failure_type=reason.__class__.__name__ if reason is not None else "retry",
349
+ metrics={"retry_count": 1},
350
+ ),
351
+ )
352
+ if created_client is not None:
353
+ flush_client(created_client)
354
+
355
+ def on_task_revoked(request=None, terminated=None, expired=None, **kwargs):
356
+ emit_client = create_client(settings)
357
+ if emit_client is None:
358
+ return
359
+ emit_context = create_request_context(
360
+ settings=settings,
361
+ service_name=settings.service_name,
362
+ request_id=_string_or_none(_request_attr(request, "id")),
363
+ )
364
+ _add_summary_event(
365
+ emit_client,
366
+ _queue_summary_event(
367
+ context=emit_context,
368
+ settings=settings,
369
+ message_operation="revoke",
370
+ detail_reason="failed",
371
+ message_type=_string_or_none(_request_attr(request, "task")),
372
+ destination_name=_request_queue_name(request),
373
+ queue_name=_request_queue_name(request),
374
+ message_id_masked=_masked_message_id(_request_attr(request, "id")),
375
+ correlation_id=_request_correlation_id(request),
376
+ parent_request_span_id=_request_parent_request_span_id(request),
377
+ attempt_count=_request_attempt_count(request),
378
+ failure_type="revoked",
379
+ dlq_reason="unavailable",
380
+ metrics={
381
+ "revoked_count": 1,
382
+ "terminated_count": 1 if terminated else 0,
383
+ "expired_count": 1 if expired else 0,
384
+ },
385
+ ),
386
+ )
387
+ flush_client(emit_client)
388
+
389
+ task_prerun.connect(on_task_prerun, weak=False)
390
+ task_postrun.connect(on_task_postrun, weak=False)
391
+ task_failure.connect(on_task_failure, weak=False)
392
+ _connect_optional_signal(before_task_publish, on_before_task_publish)
393
+ _connect_optional_signal(after_task_publish, on_after_task_publish)
394
+ _connect_optional_signal(task_retry, on_task_retry)
395
+ _connect_optional_signal(task_revoked, on_task_revoked)
396
+ return True
397
+
398
+
399
+ def _connect_optional_signal(signal: object | None, receiver: object) -> None:
400
+ if signal is None:
401
+ return
402
+ connect = getattr(signal, "connect", None)
403
+ if callable(connect):
404
+ connect(receiver, weak=False)
405
+
406
+
407
+ def _task_name(task: object) -> str:
408
+ name = getattr(task, "name", None)
409
+ if isinstance(name, str) and name.strip():
410
+ return name
411
+ return "unknown"
412
+
413
+
414
+ def _queue_name(task: object) -> str | None:
415
+ request = getattr(task, "request", None)
416
+ delivery_info = getattr(request, "delivery_info", None)
417
+ if isinstance(delivery_info, dict):
418
+ routing_key = delivery_info.get("routing_key")
419
+ if isinstance(routing_key, str) and routing_key.strip():
420
+ return routing_key
421
+ return None
422
+
423
+
424
+ def _string_or_none(value: object) -> str | None:
425
+ return value.strip() if isinstance(value, str) and value.strip() else None
426
+
427
+
428
+ def _int_or_none(value: object) -> int | None:
429
+ if isinstance(value, bool):
430
+ return None
431
+ try:
432
+ parsed = int(str(value))
433
+ except (TypeError, ValueError):
434
+ return None
435
+ return parsed if parsed >= 0 else None
436
+
437
+
438
+ def _string_list(value: object) -> list[str]:
439
+ if not isinstance(value, Sequence) or isinstance(value, (str, bytes, bytearray)):
440
+ return []
441
+ result: list[str] = []
442
+ for entry in value:
443
+ text = _string_or_none(entry)
444
+ if text is not None:
445
+ result.append(text)
446
+ return result
447
+
448
+
449
+ def _message_id(headers: object, properties: object) -> str | None:
450
+ for source in (headers, properties):
451
+ if isinstance(source, Mapping):
452
+ for key in ("correlation_id", "correlation-id", "id", "task_id"):
453
+ text = _string_or_none(source.get(key))
454
+ if text is not None:
455
+ return text
456
+ return None
457
+
458
+
459
+ def _masked_message_id(value: object) -> str | None:
460
+ text = _string_or_none(value)
461
+ if text is None:
462
+ return None
463
+ return fingerprint_value(text, _CELERY_MESSAGE_FINGERPRINT_SECRET)
464
+
465
+
466
+ def _message_type(sender: object, headers: object) -> str:
467
+ if isinstance(headers, Mapping):
468
+ for key in ("task", "type", "message_type"):
469
+ text = _string_or_none(headers.get(key))
470
+ if text is not None:
471
+ return text
472
+ text = _string_or_none(sender)
473
+ return text or "unknown"
474
+
475
+
476
+ def _destination_name(routing_key: object, exchange: object) -> str | None:
477
+ return _string_or_none(routing_key) or _string_or_none(exchange)
478
+
479
+
480
+ def _publish_state_key(headers: object, properties: object, sender: object) -> str:
481
+ message_id = _message_id(headers, properties)
482
+ if message_id is not None:
483
+ return f"id:{message_id}"
484
+ return f"sender:{_message_type(sender, headers)}:{id(headers)}:{id(properties)}"
485
+
486
+
487
+ def _remember_publish_state(key: str, value: Mapping[str, JsonValue]) -> None:
488
+ if len(_publish_state) >= _MAX_PUBLISH_STATE_ENTRIES:
489
+ _publish_state.pop(next(iter(_publish_state)), None)
490
+ _publish_state[key] = dict(value)
491
+
492
+
493
+ def _bounded_schema_input(value: object, *, depth: int = 0) -> object:
494
+ if depth >= _MAX_PAYLOAD_SCHEMA_DEPTH:
495
+ return None
496
+ if isinstance(value, Mapping):
497
+ result: dict[str, object] = {}
498
+ for index, key in enumerate(sorted(value.keys(), key=str)):
499
+ if index >= _MAX_PAYLOAD_SCHEMA_WIDTH:
500
+ result["_clue_truncated_keys"] = True
501
+ break
502
+ result[str(key)] = _bounded_schema_input(value[key], depth=depth + 1)
503
+ return result
504
+ if isinstance(value, Sequence) and not isinstance(value, (str, bytes, bytearray)):
505
+ items = list(value)
506
+ if not items:
507
+ return []
508
+ return [_bounded_schema_input(items[0], depth=depth + 1)]
509
+ return value
510
+
511
+
512
+ def _payload_schema_evidence(
513
+ body: object,
514
+ *,
515
+ settings: CluePythonSettings,
516
+ ) -> dict[str, JsonValue]:
517
+ return _build_schema_evidence(
518
+ _bounded_schema_input(_celery_payload_schema_input(body)),
519
+ denied_keys=settings.denied_keys,
520
+ )
521
+
522
+
523
+ def _celery_payload_schema_input(body: object) -> object:
524
+ if isinstance(body, Sequence) and not isinstance(body, (str, bytes, bytearray)):
525
+ items = list(body)
526
+ if len(items) >= 2:
527
+ return {
528
+ "args": items[0],
529
+ "kwargs": items[1],
530
+ }
531
+ return body
532
+
533
+
534
+ def _inject_publish_headers(
535
+ *,
536
+ headers: object,
537
+ correlation_id: str | None,
538
+ parent_request_span_id: str | None,
539
+ published_at_ms: int,
540
+ ) -> None:
541
+ if not isinstance(headers, dict):
542
+ return
543
+ if correlation_id is not None:
544
+ headers.setdefault("x-clue-correlation-id", correlation_id)
545
+ if parent_request_span_id is not None:
546
+ headers.setdefault("x-clue-parent-request-span-id", parent_request_span_id)
547
+ headers.setdefault("x-clue-published-at-ms", str(published_at_ms))
548
+
549
+
550
+ def _task_headers(task: object) -> Mapping[str, object]:
551
+ request = getattr(task, "request", None)
552
+ headers = getattr(request, "headers", None)
553
+ return headers if isinstance(headers, Mapping) else {}
554
+
555
+
556
+ def _task_correlation_id(task: object, task_id: object) -> str | None:
557
+ headers = _task_headers(task)
558
+ return (
559
+ _string_or_none(headers.get("x-clue-correlation-id"))
560
+ or _masked_message_id(_request_attr(getattr(task, "request", None), "correlation_id"))
561
+ or _masked_message_id(task_id)
562
+ )
563
+
564
+
565
+ def _task_parent_request_span_id(task: object) -> str | None:
566
+ return _string_or_none(_task_headers(task).get("x-clue-parent-request-span-id"))
567
+
568
+
569
+ def _task_delay_ms(task: object) -> int | None:
570
+ published_at_ms = _int_or_none(_task_headers(task).get("x-clue-published-at-ms"))
571
+ if published_at_ms is None:
572
+ return None
573
+ return max(0, int(time.time() * 1000) - published_at_ms)
574
+
575
+
576
+ def _task_attempt_count(task: object) -> int | None:
577
+ return _int_or_none(_request_attr(getattr(task, "request", None), "retries"))
578
+
579
+
580
+ def _task_max_attempts(task: object) -> int | None:
581
+ return _int_or_none(getattr(task, "max_retries", None))
582
+
583
+
584
+ def _request_attr(request: object, name: str) -> object:
585
+ return getattr(request, name, None) if request is not None else None
586
+
587
+
588
+ def _request_headers(request: object) -> Mapping[str, object]:
589
+ headers = _request_attr(request, "headers")
590
+ return headers if isinstance(headers, Mapping) else {}
591
+
592
+
593
+ def _request_correlation_id(request: object) -> str | None:
594
+ headers = _request_headers(request)
595
+ return (
596
+ _string_or_none(headers.get("x-clue-correlation-id"))
597
+ or _masked_message_id(_request_attr(request, "correlation_id"))
598
+ or _masked_message_id(_request_attr(request, "id"))
599
+ )
600
+
601
+
602
+ def _request_parent_request_span_id(request: object) -> str | None:
603
+ return _string_or_none(_request_headers(request).get("x-clue-parent-request-span-id"))
604
+
605
+
606
+ def _request_queue_name(request: object) -> str | None:
607
+ delivery_info = _request_attr(request, "delivery_info")
608
+ if isinstance(delivery_info, Mapping):
609
+ return _string_or_none(delivery_info.get("routing_key"))
610
+ return None
611
+
612
+
613
+ def _request_attempt_count(request: object) -> int | None:
614
+ return _int_or_none(_request_attr(request, "retries"))
615
+
616
+
617
+ def _queue_summary_event(
618
+ *,
619
+ context: Mapping[str, JsonValue],
620
+ settings: CluePythonSettings,
621
+ message_operation: str,
622
+ detail_reason: str,
623
+ message_type: str | None = None,
624
+ destination_name: str | None = None,
625
+ queue_name: str | None = None,
626
+ message_id_masked: str | None = None,
627
+ correlation_id: str | None = None,
628
+ parent_request_span_id: str | None = None,
629
+ payload_schema_hash: str | None = None,
630
+ payload_shape_size: int | None = None,
631
+ payload_field_paths: Sequence[str] = (),
632
+ attempt_count: int | None = None,
633
+ max_attempts: int | None = None,
634
+ delay_ms: int | None = None,
635
+ failure_type: str | None = None,
636
+ dlq_reason: str | None = None,
637
+ metrics: Mapping[str, int | float] | None = None,
638
+ ) -> dict[str, JsonValue]:
639
+ properties: dict[str, JsonValue] = {
640
+ "message_operation": message_operation,
641
+ "messaging_system": "celery",
642
+ "detail_reason": detail_reason,
643
+ "message_type": message_type,
644
+ "destination_name": destination_name,
645
+ "queue_name": queue_name,
646
+ "message_id_masked": message_id_masked,
647
+ "correlation_id": correlation_id,
648
+ "parent_request_span_id": parent_request_span_id,
649
+ "payload_schema_hash": payload_schema_hash,
650
+ "payload_shape_size": payload_shape_size,
651
+ "payload_field_paths": list(payload_field_paths),
652
+ "attempt_count": attempt_count,
653
+ "max_attempts": max_attempts,
654
+ "delay_ms": delay_ms,
655
+ "failure_type": failure_type,
656
+ "dlq_reason": dlq_reason,
657
+ "dlq_capture": "unavailable",
658
+ }
659
+ summary_metrics = dict(metrics or {})
660
+ if delay_ms is not None:
661
+ summary_metrics["delay_ms"] = delay_ms
662
+ if attempt_count is not None:
663
+ summary_metrics["attempt_count"] = attempt_count
664
+ event = build_summary_event(
665
+ context=context,
666
+ event_name=QUEUE_LIFECYCLE_SUMMARY_EVENT,
667
+ collector_name="celery",
668
+ aggregation_kind=message_operation,
669
+ summary_window_ms=0,
670
+ summary_count=1,
671
+ budget_window_ms=60_000,
672
+ rate_limit_key=f"queue:{queue_name or destination_name or 'unknown'}:{message_operation}",
673
+ properties=properties,
674
+ metrics=summary_metrics,
675
+ denied_keys=settings.denied_keys,
676
+ )
677
+ _enrich_queue_event(
678
+ event,
679
+ correlation_id=correlation_id,
680
+ message_id_masked=message_id_masked,
681
+ parent_request_span_id=parent_request_span_id,
682
+ message_operation=message_operation,
683
+ messaging_system="celery",
684
+ detail_reason=detail_reason,
685
+ destination_name=destination_name,
686
+ message_type=message_type,
687
+ attempt_count=attempt_count,
688
+ max_attempts=max_attempts,
689
+ delay_ms=delay_ms,
690
+ failure_type=failure_type,
691
+ dlq_reason=dlq_reason,
692
+ payload_schema_hash=payload_schema_hash,
693
+ )
694
+ return event
695
+
696
+
697
+ def _enrich_queue_event(
698
+ event: object,
699
+ *,
700
+ correlation_id: str | None = None,
701
+ message_id_masked: str | None = None,
702
+ parent_request_span_id: str | None = None,
703
+ message_operation: str | None = None,
704
+ messaging_system: str | None = None,
705
+ detail_reason: str | None = None,
706
+ destination_name: str | None = None,
707
+ message_type: str | None = None,
708
+ attempt_count: int | None = None,
709
+ max_attempts: int | None = None,
710
+ delay_ms: int | None = None,
711
+ failure_type: str | None = None,
712
+ dlq_reason: str | None = None,
713
+ payload_schema_hash: str | None = None,
714
+ ) -> None:
715
+ if not isinstance(event, dict):
716
+ return
717
+ event.update(
718
+ {
719
+ "correlation_id": correlation_id,
720
+ "message_id_masked": message_id_masked,
721
+ "parent_request_span_id": parent_request_span_id,
722
+ "message_operation": message_operation,
723
+ "messaging_system": messaging_system,
724
+ "detail_reason": detail_reason,
725
+ "destination_name": destination_name,
726
+ "message_type": message_type,
727
+ "attempt_count": attempt_count,
728
+ "max_attempts": max_attempts,
729
+ "delay_ms": delay_ms,
730
+ "failure_type": failure_type,
731
+ "dlq_reason": dlq_reason,
732
+ "payload_schema_hash": payload_schema_hash,
733
+ }
734
+ )