opentelemetry-instrumentation-confluent-kafka 0.40b0__py3-none-any.whl → 0.41b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opentelemetry/instrumentation/confluent_kafka/__init__.py +52 -21
- opentelemetry/instrumentation/confluent_kafka/utils.py +32 -2
- opentelemetry/instrumentation/confluent_kafka/version.py +1 -1
- {opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info → opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info}/METADATA +1 -1
- opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info/RECORD +9 -0
- opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info/RECORD +0 -9
- {opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info → opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info}/WHEEL +0 -0
- {opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info → opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info}/entry_points.txt +0 -0
- {opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info → opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info}/licenses/LICENSE +0 -0
@@ -112,6 +112,8 @@ from opentelemetry.trace import Link, SpanKind, Tracer
|
|
112
112
|
from .package import _instruments
|
113
113
|
from .utils import (
|
114
114
|
KafkaPropertiesExtractor,
|
115
|
+
_end_current_consume_span,
|
116
|
+
_create_new_consume_span,
|
115
117
|
_enrich_span,
|
116
118
|
_get_span_name,
|
117
119
|
_kafka_getter,
|
@@ -137,6 +139,12 @@ class AutoInstrumentedConsumer(Consumer):
|
|
137
139
|
def poll(self, timeout=-1): # pylint: disable=useless-super-delegation
|
138
140
|
return super().poll(timeout)
|
139
141
|
|
142
|
+
# This method is deliberately implemented in order to allow wrapt to wrap this function
|
143
|
+
def consume(
|
144
|
+
self, *args, **kwargs
|
145
|
+
): # pylint: disable=useless-super-delegation
|
146
|
+
return super().consume(*args, **kwargs)
|
147
|
+
|
140
148
|
|
141
149
|
class ProxiedProducer(Producer):
|
142
150
|
def __init__(self, producer: Producer, tracer: Tracer):
|
@@ -177,10 +185,14 @@ class ProxiedConsumer(Consumer):
|
|
177
185
|
def commit(self, *args, **kwargs):
|
178
186
|
return self._consumer.commit(*args, **kwargs)
|
179
187
|
|
180
|
-
def consume(
|
181
|
-
|
182
|
-
|
183
|
-
|
188
|
+
def consume(self, *args, **kwargs):
|
189
|
+
return ConfluentKafkaInstrumentor.wrap_consume(
|
190
|
+
self._consumer.consume,
|
191
|
+
self,
|
192
|
+
self._tracer,
|
193
|
+
args,
|
194
|
+
kwargs,
|
195
|
+
)
|
184
196
|
|
185
197
|
def get_watermark_offsets(
|
186
198
|
self, partition, timeout=-1, *args, **kwargs
|
@@ -275,6 +287,11 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
|
|
275
287
|
func, instance, self._tracer, args, kwargs
|
276
288
|
)
|
277
289
|
|
290
|
+
def _inner_wrap_consume(func, instance, args, kwargs):
|
291
|
+
return ConfluentKafkaInstrumentor.wrap_consume(
|
292
|
+
func, instance, self._tracer, args, kwargs
|
293
|
+
)
|
294
|
+
|
278
295
|
wrapt.wrap_function_wrapper(
|
279
296
|
AutoInstrumentedProducer,
|
280
297
|
"produce",
|
@@ -287,6 +304,12 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
|
|
287
304
|
_inner_wrap_poll,
|
288
305
|
)
|
289
306
|
|
307
|
+
wrapt.wrap_function_wrapper(
|
308
|
+
AutoInstrumentedConsumer,
|
309
|
+
"consume",
|
310
|
+
_inner_wrap_consume,
|
311
|
+
)
|
312
|
+
|
290
313
|
def _uninstrument(self, **kwargs):
|
291
314
|
confluent_kafka.Producer = self._original_kafka_producer
|
292
315
|
confluent_kafka.Consumer = self._original_kafka_consumer
|
@@ -326,29 +349,14 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
|
|
326
349
|
@staticmethod
|
327
350
|
def wrap_poll(func, instance, tracer, args, kwargs):
|
328
351
|
if instance._current_consume_span:
|
329
|
-
|
330
|
-
instance._current_context_token = None
|
331
|
-
instance._current_consume_span.end()
|
332
|
-
instance._current_consume_span = None
|
352
|
+
_end_current_consume_span(instance)
|
333
353
|
|
334
354
|
with tracer.start_as_current_span(
|
335
355
|
"recv", end_on_exit=True, kind=trace.SpanKind.CONSUMER
|
336
356
|
):
|
337
357
|
record = func(*args, **kwargs)
|
338
358
|
if record:
|
339
|
-
|
340
|
-
ctx = propagate.extract(record.headers(), getter=_kafka_getter)
|
341
|
-
if ctx:
|
342
|
-
for item in ctx.values():
|
343
|
-
if hasattr(item, "get_span_context"):
|
344
|
-
links.append(Link(context=item.get_span_context()))
|
345
|
-
|
346
|
-
instance._current_consume_span = tracer.start_span(
|
347
|
-
name=f"{record.topic()} process",
|
348
|
-
links=links,
|
349
|
-
kind=SpanKind.CONSUMER,
|
350
|
-
)
|
351
|
-
|
359
|
+
_create_new_consume_span(instance, tracer, [record])
|
352
360
|
_enrich_span(
|
353
361
|
instance._current_consume_span,
|
354
362
|
record.topic(),
|
@@ -361,3 +369,26 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
|
|
361
369
|
)
|
362
370
|
|
363
371
|
return record
|
372
|
+
|
373
|
+
@staticmethod
|
374
|
+
def wrap_consume(func, instance, tracer, args, kwargs):
|
375
|
+
if instance._current_consume_span:
|
376
|
+
_end_current_consume_span(instance)
|
377
|
+
|
378
|
+
with tracer.start_as_current_span(
|
379
|
+
"recv", end_on_exit=True, kind=trace.SpanKind.CONSUMER
|
380
|
+
):
|
381
|
+
records = func(*args, **kwargs)
|
382
|
+
if len(records) > 0:
|
383
|
+
_create_new_consume_span(instance, tracer, records)
|
384
|
+
_enrich_span(
|
385
|
+
instance._current_consume_span,
|
386
|
+
records[0].topic(),
|
387
|
+
operation=MessagingOperationValues.PROCESS,
|
388
|
+
)
|
389
|
+
|
390
|
+
instance._current_context_token = context.attach(
|
391
|
+
trace.set_span_in_context(instance._current_consume_span)
|
392
|
+
)
|
393
|
+
|
394
|
+
return records
|
@@ -1,6 +1,8 @@
|
|
1
1
|
from logging import getLogger
|
2
2
|
from typing import List, Optional
|
3
3
|
|
4
|
+
from opentelemetry import context, propagate
|
5
|
+
from opentelemetry.trace import SpanKind, Link
|
4
6
|
from opentelemetry.propagators import textmap
|
5
7
|
from opentelemetry.semconv.trace import (
|
6
8
|
MessagingDestinationKindValues,
|
@@ -81,6 +83,34 @@ class KafkaContextSetter(textmap.Setter):
|
|
81
83
|
_kafka_getter = KafkaContextGetter()
|
82
84
|
|
83
85
|
|
86
|
+
def _end_current_consume_span(instance):
|
87
|
+
context.detach(instance._current_context_token)
|
88
|
+
instance._current_context_token = None
|
89
|
+
instance._current_consume_span.end()
|
90
|
+
instance._current_consume_span = None
|
91
|
+
|
92
|
+
|
93
|
+
def _create_new_consume_span(instance, tracer, records):
|
94
|
+
links = _get_links_from_records(records)
|
95
|
+
instance._current_consume_span = tracer.start_span(
|
96
|
+
name=f"{records[0].topic()} process",
|
97
|
+
links=links,
|
98
|
+
kind=SpanKind.CONSUMER,
|
99
|
+
)
|
100
|
+
|
101
|
+
|
102
|
+
def _get_links_from_records(records):
|
103
|
+
links = []
|
104
|
+
for record in records:
|
105
|
+
ctx = propagate.extract(record.headers(), getter=_kafka_getter)
|
106
|
+
if ctx:
|
107
|
+
for item in ctx.values():
|
108
|
+
if hasattr(item, "get_span_context"):
|
109
|
+
links.append(Link(context=item.get_span_context()))
|
110
|
+
|
111
|
+
return links
|
112
|
+
|
113
|
+
|
84
114
|
def _enrich_span(
|
85
115
|
span,
|
86
116
|
topic,
|
@@ -94,7 +124,7 @@ def _enrich_span(
|
|
94
124
|
span.set_attribute(SpanAttributes.MESSAGING_SYSTEM, "kafka")
|
95
125
|
span.set_attribute(SpanAttributes.MESSAGING_DESTINATION, topic)
|
96
126
|
|
97
|
-
if partition:
|
127
|
+
if partition is not None:
|
98
128
|
span.set_attribute(SpanAttributes.MESSAGING_KAFKA_PARTITION, partition)
|
99
129
|
|
100
130
|
span.set_attribute(
|
@@ -109,7 +139,7 @@ def _enrich_span(
|
|
109
139
|
|
110
140
|
# https://stackoverflow.com/questions/65935155/identify-and-find-specific-message-in-kafka-topic
|
111
141
|
# A message within Kafka is uniquely defined by its topic name, topic partition and offset.
|
112
|
-
if partition and offset and topic:
|
142
|
+
if partition is not None and offset is not None and topic:
|
113
143
|
span.set_attribute(
|
114
144
|
SpanAttributes.MESSAGING_MESSAGE_ID,
|
115
145
|
f"{topic}.{partition}.{offset}",
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: opentelemetry-instrumentation-confluent-kafka
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.41b0
|
4
4
|
Summary: OpenTelemetry Confluent Kafka instrumentation
|
5
5
|
Project-URL: Homepage, https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-confluent-kafka
|
6
6
|
Author-email: OpenTelemetry Authors <cncf-opentelemetry-contributors@lists.cncf.io>
|
@@ -0,0 +1,9 @@
|
|
1
|
+
opentelemetry/instrumentation/confluent_kafka/__init__.py,sha256=JIjbWOhqS7t1SA5q99EOITDMLN1jQANLw8C0IdjiI_w,13152
|
2
|
+
opentelemetry/instrumentation/confluent_kafka/package.py,sha256=GAdglvW5Pp12xoeaym8zQjRE9fyX8PGFV-u8v8ZjWZI,641
|
3
|
+
opentelemetry/instrumentation/confluent_kafka/utils.py,sha256=50fgsCnN72GMhlU1psVdi1jgzJ5n-n5BXHZ7nls1W-s,4507
|
4
|
+
opentelemetry/instrumentation/confluent_kafka/version.py,sha256=9LtSy_KpXBRCK2aaMFk3l5qiAoEDLs7boZzuBeF3mPY,608
|
5
|
+
opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info/METADATA,sha256=NN6GTpvP3roNdhJwkrm2EZTHbYk5pbHgSqEmu81lobA,1966
|
6
|
+
opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
|
7
|
+
opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info/entry_points.txt,sha256=hAIEwx8JI8eFsUgk3ewaMdYw215At11XX-1a4WjAHBY,120
|
8
|
+
opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info/licenses/LICENSE,sha256=h8jwqxShIeVkc8vOo9ynxGYW16f4fVPxLhZKZs0H5U8,11350
|
9
|
+
opentelemetry_instrumentation_confluent_kafka-0.41b0.dist-info/RECORD,,
|
@@ -1,9 +0,0 @@
|
|
1
|
-
opentelemetry/instrumentation/confluent_kafka/__init__.py,sha256=C12oyiGDgXtTdig3b8Wd0F0AIuLmvjB-NFuYSAAbeUM,12300
|
2
|
-
opentelemetry/instrumentation/confluent_kafka/package.py,sha256=GAdglvW5Pp12xoeaym8zQjRE9fyX8PGFV-u8v8ZjWZI,641
|
3
|
-
opentelemetry/instrumentation/confluent_kafka/utils.py,sha256=bWKcxUcnhP5J_WkqR8zuw0lskKmcHLWjq9HF0p4sV74,3541
|
4
|
-
opentelemetry/instrumentation/confluent_kafka/version.py,sha256=3kOuQWAUh-WCYbTuY1DBtA72c_iLngqxwEVTesRRxTs,608
|
5
|
-
opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info/METADATA,sha256=9VZwFhvuSiiiphcyZhKEGycMp9yR_59Gv6iBZQmGhWs,1966
|
6
|
-
opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
|
7
|
-
opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info/entry_points.txt,sha256=hAIEwx8JI8eFsUgk3ewaMdYw215At11XX-1a4WjAHBY,120
|
8
|
-
opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info/licenses/LICENSE,sha256=h8jwqxShIeVkc8vOo9ynxGYW16f4fVPxLhZKZs0H5U8,11350
|
9
|
-
opentelemetry_instrumentation_confluent_kafka-0.40b0.dist-info/RECORD,,
|
File without changes
|
File without changes
|