opentelemetry-instrumentation-confluent-kafka 0.48b0__tar.gz → 0.49b0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/PKG-INFO +2 -2
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/pyproject.toml +1 -1
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/src/opentelemetry/instrumentation/confluent_kafka/__init__.py +9 -16
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/src/opentelemetry/instrumentation/confluent_kafka/utils.py +2 -4
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/src/opentelemetry/instrumentation/confluent_kafka/version.py +1 -1
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/tests/test_instrumentation.py +15 -0
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/tests/utils.py +2 -6
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/.gitignore +0 -0
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/LICENSE +0 -0
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/README.rst +0 -0
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/src/opentelemetry/instrumentation/confluent_kafka/package.py +0 -0
- {opentelemetry_instrumentation_confluent_kafka-0.48b0 → opentelemetry_instrumentation_confluent_kafka-0.49b0}/tests/__init__.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: opentelemetry-instrumentation-confluent-kafka
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.49b0
|
4
4
|
Summary: OpenTelemetry Confluent Kafka instrumentation
|
5
5
|
Project-URL: Homepage, https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-confluent-kafka
|
6
6
|
Author-email: OpenTelemetry Authors <cncf-opentelemetry-contributors@lists.cncf.io>
|
@@ -18,7 +18,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.12
|
19
19
|
Requires-Python: >=3.8
|
20
20
|
Requires-Dist: opentelemetry-api~=1.12
|
21
|
-
Requires-Dist: opentelemetry-instrumentation==0.
|
21
|
+
Requires-Dist: opentelemetry-instrumentation==0.49b0
|
22
22
|
Requires-Dist: wrapt<2.0.0,>=1.0.0
|
23
23
|
Provides-Extra: instruments
|
24
24
|
Requires-Dist: confluent-kafka<=2.4.0,>=1.8.2; extra == 'instruments'
|
@@ -97,6 +97,7 @@ The _instrument method accepts the following keyword args:
|
|
97
97
|
|
98
98
|
___
|
99
99
|
"""
|
100
|
+
|
100
101
|
from typing import Collection
|
101
102
|
|
102
103
|
import confluent_kafka
|
@@ -123,9 +124,7 @@ from .version import __version__
|
|
123
124
|
|
124
125
|
class AutoInstrumentedProducer(Producer):
|
125
126
|
# This method is deliberately implemented in order to allow wrapt to wrap this function
|
126
|
-
def produce(
|
127
|
-
self, topic, value=None, *args, **kwargs
|
128
|
-
): # pylint: disable=keyword-arg-before-vararg,useless-super-delegation
|
127
|
+
def produce(self, topic, value=None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg,useless-super-delegation
|
129
128
|
super().produce(topic, value, *args, **kwargs)
|
130
129
|
|
131
130
|
|
@@ -139,9 +138,7 @@ class AutoInstrumentedConsumer(Consumer):
|
|
139
138
|
return super().poll(timeout)
|
140
139
|
|
141
140
|
# This method is deliberately implemented in order to allow wrapt to wrap this function
|
142
|
-
def consume(
|
143
|
-
self, *args, **kwargs
|
144
|
-
): # pylint: disable=useless-super-delegation
|
141
|
+
def consume(self, *args, **kwargs): # pylint: disable=useless-super-delegation
|
145
142
|
return super().consume(*args, **kwargs)
|
146
143
|
|
147
144
|
# This method is deliberately implemented in order to allow wrapt to wrap this function
|
@@ -163,9 +160,7 @@ class ProxiedProducer(Producer):
|
|
163
160
|
def purge(self, in_queue=True, in_flight=True, blocking=True):
|
164
161
|
self._producer.purge(in_queue, in_flight, blocking)
|
165
162
|
|
166
|
-
def produce(
|
167
|
-
self, topic, value=None, *args, **kwargs
|
168
|
-
): # pylint: disable=keyword-arg-before-vararg
|
163
|
+
def produce(self, topic, value=None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
|
169
164
|
new_kwargs = kwargs.copy()
|
170
165
|
new_kwargs["topic"] = topic
|
171
166
|
new_kwargs["value"] = value
|
@@ -205,9 +200,7 @@ class ProxiedConsumer(Consumer):
|
|
205
200
|
kwargs,
|
206
201
|
)
|
207
202
|
|
208
|
-
def get_watermark_offsets(
|
209
|
-
self, partition, timeout=-1, *args, **kwargs
|
210
|
-
): # pylint: disable=keyword-arg-before-vararg
|
203
|
+
def get_watermark_offsets(self, partition, timeout=-1, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
|
211
204
|
return self._consumer.get_watermark_offsets(
|
212
205
|
partition, timeout, *args, **kwargs
|
213
206
|
)
|
@@ -220,9 +213,7 @@ class ProxiedConsumer(Consumer):
|
|
220
213
|
self._consumer.poll, self, self._tracer, [timeout], {}
|
221
214
|
)
|
222
215
|
|
223
|
-
def subscribe(
|
224
|
-
self, topics, on_assign=lambda *args: None, *args, **kwargs
|
225
|
-
): # pylint: disable=keyword-arg-before-vararg
|
216
|
+
def subscribe(self, topics, on_assign=lambda *args: None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
|
226
217
|
self._consumer.subscribe(topics, on_assign, *args, **kwargs)
|
227
218
|
|
228
219
|
def original_consumer(self):
|
@@ -363,7 +354,9 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
|
|
363
354
|
headers = []
|
364
355
|
kwargs["headers"] = headers
|
365
356
|
|
366
|
-
topic = KafkaPropertiesExtractor.extract_produce_topic(
|
357
|
+
topic = KafkaPropertiesExtractor.extract_produce_topic(
|
358
|
+
args, kwargs
|
359
|
+
)
|
367
360
|
_enrich_span(
|
368
361
|
span,
|
369
362
|
topic,
|
@@ -25,11 +25,9 @@ class KafkaPropertiesExtractor:
|
|
25
25
|
return kwargs.get(key, default_value)
|
26
26
|
|
27
27
|
@staticmethod
|
28
|
-
def extract_produce_topic(args):
|
28
|
+
def extract_produce_topic(args, kwargs):
|
29
29
|
"""extract topic from `produce` method arguments in Producer class"""
|
30
|
-
|
31
|
-
return args[0]
|
32
|
-
return "unknown"
|
30
|
+
return kwargs.get("topic") or (args[0] if args else "unknown")
|
33
31
|
|
34
32
|
@staticmethod
|
35
33
|
def extract_produce_headers(args, kwargs):
|
@@ -284,6 +284,15 @@ class TestConfluentKafka(TestBase):
|
|
284
284
|
expected_attribute_value, span.attributes[attribute_key]
|
285
285
|
)
|
286
286
|
|
287
|
+
def _assert_topic(self, span, expected_topic: str) -> None:
|
288
|
+
self.assertEqual(
|
289
|
+
span.attributes[SpanAttributes.MESSAGING_DESTINATION],
|
290
|
+
expected_topic,
|
291
|
+
)
|
292
|
+
|
293
|
+
def _assert_span_count(self, span_list, expected_count: int) -> None:
|
294
|
+
self.assertEqual(len(span_list), expected_count)
|
295
|
+
|
287
296
|
def test_producer_poll(self) -> None:
|
288
297
|
instrumentation = ConfluentKafkaInstrumentor()
|
289
298
|
message_queue = []
|
@@ -299,6 +308,9 @@ class TestConfluentKafka(TestBase):
|
|
299
308
|
producer.produce(topic="topic-1", key="key-1", value="value-1")
|
300
309
|
msg = producer.poll()
|
301
310
|
self.assertIsNotNone(msg)
|
311
|
+
span_list = self.memory_exporter.get_finished_spans()
|
312
|
+
self._assert_span_count(span_list, 1)
|
313
|
+
self._assert_topic(span_list[0], "topic-1")
|
302
314
|
|
303
315
|
def test_producer_flush(self) -> None:
|
304
316
|
instrumentation = ConfluentKafkaInstrumentor()
|
@@ -315,3 +327,6 @@ class TestConfluentKafka(TestBase):
|
|
315
327
|
producer.produce(topic="topic-1", key="key-1", value="value-1")
|
316
328
|
msg = producer.flush()
|
317
329
|
self.assertIsNotNone(msg)
|
330
|
+
span_list = self.memory_exporter.get_finished_spans()
|
331
|
+
self._assert_span_count(span_list, 1)
|
332
|
+
self._assert_topic(span_list[0], "topic-1")
|
@@ -8,9 +8,7 @@ class MockConsumer(Consumer):
|
|
8
8
|
self._queue = queue
|
9
9
|
super().__init__(config)
|
10
10
|
|
11
|
-
def consume(
|
12
|
-
self, num_messages=1, *args, **kwargs
|
13
|
-
): # pylint: disable=keyword-arg-before-vararg
|
11
|
+
def consume(self, num_messages=1, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
|
14
12
|
messages = self._queue[:num_messages]
|
15
13
|
self._queue = self._queue[num_messages:]
|
16
14
|
return messages
|
@@ -62,9 +60,7 @@ class MockedProducer(Producer):
|
|
62
60
|
self._queue = queue
|
63
61
|
super().__init__(config)
|
64
62
|
|
65
|
-
def produce(
|
66
|
-
self, *args, **kwargs
|
67
|
-
): # pylint: disable=keyword-arg-before-vararg
|
63
|
+
def produce(self, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
|
68
64
|
self._queue.append(
|
69
65
|
MockedMessage(
|
70
66
|
topic=kwargs.get("topic"),
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|