opentelemetry-instrumentation-confluent-kafka 0.40b0__tar.gz → 0.41b0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (13) hide show
  1. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/PKG-INFO +1 -1
  2. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/src/opentelemetry/instrumentation/confluent_kafka/__init__.py +52 -21
  3. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/src/opentelemetry/instrumentation/confluent_kafka/utils.py +32 -2
  4. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/src/opentelemetry/instrumentation/confluent_kafka/version.py +1 -1
  5. opentelemetry_instrumentation_confluent_kafka-0.41b0/tests/test_instrumentation.py +248 -0
  6. opentelemetry_instrumentation_confluent_kafka-0.41b0/tests/utils.py +39 -0
  7. opentelemetry_instrumentation_confluent_kafka-0.40b0/tests/test_instrumentation.py +0 -106
  8. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/.gitignore +0 -0
  9. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/LICENSE +0 -0
  10. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/README.rst +0 -0
  11. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/pyproject.toml +0 -0
  12. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/src/opentelemetry/instrumentation/confluent_kafka/package.py +0 -0
  13. {opentelemetry_instrumentation_confluent_kafka-0.40b0 → opentelemetry_instrumentation_confluent_kafka-0.41b0}/tests/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opentelemetry-instrumentation-confluent-kafka
3
- Version: 0.40b0
3
+ Version: 0.41b0
4
4
  Summary: OpenTelemetry Confluent Kafka instrumentation
5
5
  Project-URL: Homepage, https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-confluent-kafka
6
6
  Author-email: OpenTelemetry Authors <cncf-opentelemetry-contributors@lists.cncf.io>
@@ -112,6 +112,8 @@ from opentelemetry.trace import Link, SpanKind, Tracer
112
112
  from .package import _instruments
113
113
  from .utils import (
114
114
  KafkaPropertiesExtractor,
115
+ _end_current_consume_span,
116
+ _create_new_consume_span,
115
117
  _enrich_span,
116
118
  _get_span_name,
117
119
  _kafka_getter,
@@ -137,6 +139,12 @@ class AutoInstrumentedConsumer(Consumer):
137
139
  def poll(self, timeout=-1): # pylint: disable=useless-super-delegation
138
140
  return super().poll(timeout)
139
141
 
142
+ # This method is deliberately implemented in order to allow wrapt to wrap this function
143
+ def consume(
144
+ self, *args, **kwargs
145
+ ): # pylint: disable=useless-super-delegation
146
+ return super().consume(*args, **kwargs)
147
+
140
148
 
141
149
  class ProxiedProducer(Producer):
142
150
  def __init__(self, producer: Producer, tracer: Tracer):
@@ -177,10 +185,14 @@ class ProxiedConsumer(Consumer):
177
185
  def commit(self, *args, **kwargs):
178
186
  return self._consumer.commit(*args, **kwargs)
179
187
 
180
- def consume(
181
- self, num_messages=1, *args, **kwargs
182
- ): # pylint: disable=keyword-arg-before-vararg
183
- return self._consumer.consume(num_messages, *args, **kwargs)
188
+ def consume(self, *args, **kwargs):
189
+ return ConfluentKafkaInstrumentor.wrap_consume(
190
+ self._consumer.consume,
191
+ self,
192
+ self._tracer,
193
+ args,
194
+ kwargs,
195
+ )
184
196
 
185
197
  def get_watermark_offsets(
186
198
  self, partition, timeout=-1, *args, **kwargs
@@ -275,6 +287,11 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
275
287
  func, instance, self._tracer, args, kwargs
276
288
  )
277
289
 
290
+ def _inner_wrap_consume(func, instance, args, kwargs):
291
+ return ConfluentKafkaInstrumentor.wrap_consume(
292
+ func, instance, self._tracer, args, kwargs
293
+ )
294
+
278
295
  wrapt.wrap_function_wrapper(
279
296
  AutoInstrumentedProducer,
280
297
  "produce",
@@ -287,6 +304,12 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
287
304
  _inner_wrap_poll,
288
305
  )
289
306
 
307
+ wrapt.wrap_function_wrapper(
308
+ AutoInstrumentedConsumer,
309
+ "consume",
310
+ _inner_wrap_consume,
311
+ )
312
+
290
313
  def _uninstrument(self, **kwargs):
291
314
  confluent_kafka.Producer = self._original_kafka_producer
292
315
  confluent_kafka.Consumer = self._original_kafka_consumer
@@ -326,29 +349,14 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
326
349
  @staticmethod
327
350
  def wrap_poll(func, instance, tracer, args, kwargs):
328
351
  if instance._current_consume_span:
329
- context.detach(instance._current_context_token)
330
- instance._current_context_token = None
331
- instance._current_consume_span.end()
332
- instance._current_consume_span = None
352
+ _end_current_consume_span(instance)
333
353
 
334
354
  with tracer.start_as_current_span(
335
355
  "recv", end_on_exit=True, kind=trace.SpanKind.CONSUMER
336
356
  ):
337
357
  record = func(*args, **kwargs)
338
358
  if record:
339
- links = []
340
- ctx = propagate.extract(record.headers(), getter=_kafka_getter)
341
- if ctx:
342
- for item in ctx.values():
343
- if hasattr(item, "get_span_context"):
344
- links.append(Link(context=item.get_span_context()))
345
-
346
- instance._current_consume_span = tracer.start_span(
347
- name=f"{record.topic()} process",
348
- links=links,
349
- kind=SpanKind.CONSUMER,
350
- )
351
-
359
+ _create_new_consume_span(instance, tracer, [record])
352
360
  _enrich_span(
353
361
  instance._current_consume_span,
354
362
  record.topic(),
@@ -361,3 +369,26 @@ class ConfluentKafkaInstrumentor(BaseInstrumentor):
361
369
  )
362
370
 
363
371
  return record
372
+
373
+ @staticmethod
374
+ def wrap_consume(func, instance, tracer, args, kwargs):
375
+ if instance._current_consume_span:
376
+ _end_current_consume_span(instance)
377
+
378
+ with tracer.start_as_current_span(
379
+ "recv", end_on_exit=True, kind=trace.SpanKind.CONSUMER
380
+ ):
381
+ records = func(*args, **kwargs)
382
+ if len(records) > 0:
383
+ _create_new_consume_span(instance, tracer, records)
384
+ _enrich_span(
385
+ instance._current_consume_span,
386
+ records[0].topic(),
387
+ operation=MessagingOperationValues.PROCESS,
388
+ )
389
+
390
+ instance._current_context_token = context.attach(
391
+ trace.set_span_in_context(instance._current_consume_span)
392
+ )
393
+
394
+ return records
@@ -1,6 +1,8 @@
1
1
  from logging import getLogger
2
2
  from typing import List, Optional
3
3
 
4
+ from opentelemetry import context, propagate
5
+ from opentelemetry.trace import SpanKind, Link
4
6
  from opentelemetry.propagators import textmap
5
7
  from opentelemetry.semconv.trace import (
6
8
  MessagingDestinationKindValues,
@@ -81,6 +83,34 @@ class KafkaContextSetter(textmap.Setter):
81
83
  _kafka_getter = KafkaContextGetter()
82
84
 
83
85
 
86
+ def _end_current_consume_span(instance):
87
+ context.detach(instance._current_context_token)
88
+ instance._current_context_token = None
89
+ instance._current_consume_span.end()
90
+ instance._current_consume_span = None
91
+
92
+
93
+ def _create_new_consume_span(instance, tracer, records):
94
+ links = _get_links_from_records(records)
95
+ instance._current_consume_span = tracer.start_span(
96
+ name=f"{records[0].topic()} process",
97
+ links=links,
98
+ kind=SpanKind.CONSUMER,
99
+ )
100
+
101
+
102
+ def _get_links_from_records(records):
103
+ links = []
104
+ for record in records:
105
+ ctx = propagate.extract(record.headers(), getter=_kafka_getter)
106
+ if ctx:
107
+ for item in ctx.values():
108
+ if hasattr(item, "get_span_context"):
109
+ links.append(Link(context=item.get_span_context()))
110
+
111
+ return links
112
+
113
+
84
114
  def _enrich_span(
85
115
  span,
86
116
  topic,
@@ -94,7 +124,7 @@ def _enrich_span(
94
124
  span.set_attribute(SpanAttributes.MESSAGING_SYSTEM, "kafka")
95
125
  span.set_attribute(SpanAttributes.MESSAGING_DESTINATION, topic)
96
126
 
97
- if partition:
127
+ if partition is not None:
98
128
  span.set_attribute(SpanAttributes.MESSAGING_KAFKA_PARTITION, partition)
99
129
 
100
130
  span.set_attribute(
@@ -109,7 +139,7 @@ def _enrich_span(
109
139
 
110
140
  # https://stackoverflow.com/questions/65935155/identify-and-find-specific-message-in-kafka-topic
111
141
  # A message within Kafka is uniquely defined by its topic name, topic partition and offset.
112
- if partition and offset and topic:
142
+ if partition is not None and offset is not None and topic:
113
143
  span.set_attribute(
114
144
  SpanAttributes.MESSAGING_MESSAGE_ID,
115
145
  f"{topic}.{partition}.{offset}",
@@ -12,4 +12,4 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- __version__ = "0.40b0"
15
+ __version__ = "0.41b0"
@@ -0,0 +1,248 @@
1
+ # Copyright The OpenTelemetry Authors
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # pylint: disable=no-name-in-module
16
+
17
+ from opentelemetry.semconv.trace import (
18
+ SpanAttributes,
19
+ MessagingDestinationKindValues,
20
+ )
21
+ from opentelemetry.test.test_base import TestBase
22
+ from .utils import MockConsumer, MockedMessage
23
+
24
+ from confluent_kafka import Consumer, Producer
25
+
26
+ from opentelemetry.instrumentation.confluent_kafka import (
27
+ ConfluentKafkaInstrumentor,
28
+ ProxiedConsumer,
29
+ ProxiedProducer,
30
+ )
31
+ from opentelemetry.instrumentation.confluent_kafka.utils import (
32
+ KafkaContextGetter,
33
+ KafkaContextSetter,
34
+ )
35
+
36
+
37
+ class TestConfluentKafka(TestBase):
38
+ def test_instrument_api(self) -> None:
39
+ instrumentation = ConfluentKafkaInstrumentor()
40
+
41
+ producer = Producer({"bootstrap.servers": "localhost:29092"})
42
+ producer = instrumentation.instrument_producer(producer)
43
+
44
+ self.assertEqual(producer.__class__, ProxiedProducer)
45
+
46
+ producer = instrumentation.uninstrument_producer(producer)
47
+ self.assertEqual(producer.__class__, Producer)
48
+
49
+ producer = Producer({"bootstrap.servers": "localhost:29092"})
50
+ producer = instrumentation.instrument_producer(producer)
51
+
52
+ self.assertEqual(producer.__class__, ProxiedProducer)
53
+
54
+ producer = instrumentation.uninstrument_producer(producer)
55
+ self.assertEqual(producer.__class__, Producer)
56
+
57
+ consumer = Consumer(
58
+ {
59
+ "bootstrap.servers": "localhost:29092",
60
+ "group.id": "mygroup",
61
+ "auto.offset.reset": "earliest",
62
+ }
63
+ )
64
+
65
+ consumer = instrumentation.instrument_consumer(consumer)
66
+ self.assertEqual(consumer.__class__, ProxiedConsumer)
67
+
68
+ consumer = instrumentation.uninstrument_consumer(consumer)
69
+ self.assertEqual(consumer.__class__, Consumer)
70
+
71
+ def test_consumer_commit_method_exists(self) -> None:
72
+ instrumentation = ConfluentKafkaInstrumentor()
73
+
74
+ consumer = Consumer(
75
+ {
76
+ "bootstrap.servers": "localhost:29092",
77
+ "group.id": "mygroup",
78
+ "auto.offset.reset": "earliest",
79
+ }
80
+ )
81
+
82
+ consumer = instrumentation.instrument_consumer(consumer)
83
+ self.assertEqual(consumer.__class__, ProxiedConsumer)
84
+ self.assertTrue(hasattr(consumer, "commit"))
85
+
86
+ def test_context_setter(self) -> None:
87
+ context_setter = KafkaContextSetter()
88
+
89
+ carrier_dict = {"key1": "val1"}
90
+ context_setter.set(carrier_dict, "key2", "val2")
91
+ self.assertGreaterEqual(
92
+ carrier_dict.items(), {"key2": "val2".encode()}.items()
93
+ )
94
+
95
+ carrier_list = [("key1", "val1")]
96
+ context_setter.set(carrier_list, "key2", "val2")
97
+ self.assertTrue(("key2", "val2".encode()) in carrier_list)
98
+
99
+ def test_context_getter(self) -> None:
100
+ context_setter = KafkaContextSetter()
101
+ context_getter = KafkaContextGetter()
102
+
103
+ carrier_dict = {}
104
+ context_setter.set(carrier_dict, "key1", "val1")
105
+ self.assertEqual(context_getter.get(carrier_dict, "key1"), ["val1"])
106
+ self.assertEqual(["key1"], context_getter.keys(carrier_dict))
107
+
108
+ carrier_list = []
109
+ context_setter.set(carrier_list, "key1", "val1")
110
+ self.assertEqual(context_getter.get(carrier_list, "key1"), ["val1"])
111
+ self.assertEqual(["key1"], context_getter.keys(carrier_list))
112
+
113
+ def test_poll(self) -> None:
114
+ instrumentation = ConfluentKafkaInstrumentor()
115
+ mocked_messages = [
116
+ MockedMessage("topic-10", 0, 0, []),
117
+ MockedMessage("topic-20", 2, 4, []),
118
+ MockedMessage("topic-30", 1, 3, []),
119
+ ]
120
+ expected_spans = [
121
+ {"name": "recv", "attributes": {}},
122
+ {
123
+ "name": "topic-10 process",
124
+ "attributes": {
125
+ SpanAttributes.MESSAGING_OPERATION: "process",
126
+ SpanAttributes.MESSAGING_KAFKA_PARTITION: 0,
127
+ SpanAttributes.MESSAGING_SYSTEM: "kafka",
128
+ SpanAttributes.MESSAGING_DESTINATION: "topic-10",
129
+ SpanAttributes.MESSAGING_DESTINATION_KIND: MessagingDestinationKindValues.QUEUE.value,
130
+ SpanAttributes.MESSAGING_MESSAGE_ID: "topic-10.0.0",
131
+ },
132
+ },
133
+ {"name": "recv", "attributes": {}},
134
+ {
135
+ "name": "topic-20 process",
136
+ "attributes": {
137
+ SpanAttributes.MESSAGING_OPERATION: "process",
138
+ SpanAttributes.MESSAGING_KAFKA_PARTITION: 2,
139
+ SpanAttributes.MESSAGING_SYSTEM: "kafka",
140
+ SpanAttributes.MESSAGING_DESTINATION: "topic-20",
141
+ SpanAttributes.MESSAGING_DESTINATION_KIND: MessagingDestinationKindValues.QUEUE.value,
142
+ SpanAttributes.MESSAGING_MESSAGE_ID: "topic-20.2.4",
143
+ },
144
+ },
145
+ {"name": "recv", "attributes": {}},
146
+ {
147
+ "name": "topic-30 process",
148
+ "attributes": {
149
+ SpanAttributes.MESSAGING_OPERATION: "process",
150
+ SpanAttributes.MESSAGING_KAFKA_PARTITION: 1,
151
+ SpanAttributes.MESSAGING_SYSTEM: "kafka",
152
+ SpanAttributes.MESSAGING_DESTINATION: "topic-30",
153
+ SpanAttributes.MESSAGING_DESTINATION_KIND: MessagingDestinationKindValues.QUEUE.value,
154
+ SpanAttributes.MESSAGING_MESSAGE_ID: "topic-30.1.3",
155
+ },
156
+ },
157
+ {"name": "recv", "attributes": {}},
158
+ ]
159
+
160
+ consumer = MockConsumer(
161
+ mocked_messages,
162
+ {
163
+ "bootstrap.servers": "localhost:29092",
164
+ "group.id": "mygroup",
165
+ "auto.offset.reset": "earliest",
166
+ },
167
+ )
168
+ self.memory_exporter.clear()
169
+ consumer = instrumentation.instrument_consumer(consumer)
170
+ consumer.poll()
171
+ consumer.poll()
172
+ consumer.poll()
173
+ consumer.poll()
174
+
175
+ span_list = self.memory_exporter.get_finished_spans()
176
+ self._compare_spans(span_list, expected_spans)
177
+
178
+ def test_consume(self) -> None:
179
+ instrumentation = ConfluentKafkaInstrumentor()
180
+ mocked_messages = [
181
+ MockedMessage("topic-1", 0, 0, []),
182
+ MockedMessage("topic-1", 2, 1, []),
183
+ MockedMessage("topic-1", 3, 2, []),
184
+ MockedMessage("topic-2", 0, 0, []),
185
+ MockedMessage("topic-3", 0, 3, []),
186
+ MockedMessage("topic-2", 0, 1, []),
187
+ ]
188
+ expected_spans = [
189
+ {"name": "recv", "attributes": {}},
190
+ {
191
+ "name": "topic-1 process",
192
+ "attributes": {
193
+ SpanAttributes.MESSAGING_OPERATION: "process",
194
+ SpanAttributes.MESSAGING_SYSTEM: "kafka",
195
+ SpanAttributes.MESSAGING_DESTINATION: "topic-1",
196
+ SpanAttributes.MESSAGING_DESTINATION_KIND: MessagingDestinationKindValues.QUEUE.value,
197
+ },
198
+ },
199
+ {"name": "recv", "attributes": {}},
200
+ {
201
+ "name": "topic-2 process",
202
+ "attributes": {
203
+ SpanAttributes.MESSAGING_OPERATION: "process",
204
+ SpanAttributes.MESSAGING_SYSTEM: "kafka",
205
+ SpanAttributes.MESSAGING_DESTINATION: "topic-2",
206
+ SpanAttributes.MESSAGING_DESTINATION_KIND: MessagingDestinationKindValues.QUEUE.value,
207
+ },
208
+ },
209
+ {"name": "recv", "attributes": {}},
210
+ {
211
+ "name": "topic-3 process",
212
+ "attributes": {
213
+ SpanAttributes.MESSAGING_OPERATION: "process",
214
+ SpanAttributes.MESSAGING_SYSTEM: "kafka",
215
+ SpanAttributes.MESSAGING_DESTINATION: "topic-3",
216
+ SpanAttributes.MESSAGING_DESTINATION_KIND: MessagingDestinationKindValues.QUEUE.value,
217
+ },
218
+ },
219
+ {"name": "recv", "attributes": {}},
220
+ ]
221
+
222
+ consumer = MockConsumer(
223
+ mocked_messages,
224
+ {
225
+ "bootstrap.servers": "localhost:29092",
226
+ "group.id": "mygroup",
227
+ "auto.offset.reset": "earliest",
228
+ },
229
+ )
230
+
231
+ self.memory_exporter.clear()
232
+ consumer = instrumentation.instrument_consumer(consumer)
233
+ consumer.consume(3)
234
+ consumer.consume(1)
235
+ consumer.consume(2)
236
+ consumer.consume(1)
237
+ span_list = self.memory_exporter.get_finished_spans()
238
+ self._compare_spans(span_list, expected_spans)
239
+
240
+ def _compare_spans(self, spans, expected_spans):
241
+ for span, expected_span in zip(spans, expected_spans):
242
+ self.assertEqual(expected_span["name"], span.name)
243
+ for attribute_key, expected_attribute_value in expected_span[
244
+ "attributes"
245
+ ].items():
246
+ self.assertEqual(
247
+ expected_attribute_value, span.attributes[attribute_key]
248
+ )
@@ -0,0 +1,39 @@
1
+ from confluent_kafka import Consumer
2
+
3
+
4
+ class MockConsumer(Consumer):
5
+ def __init__(self, queue, config):
6
+ self._queue = queue
7
+ super().__init__(config)
8
+
9
+ def consume(
10
+ self, num_messages=1, *args, **kwargs
11
+ ): # pylint: disable=keyword-arg-before-vararg
12
+ messages = self._queue[:num_messages]
13
+ self._queue = self._queue[num_messages:]
14
+ return messages
15
+
16
+ def poll(self, timeout=None):
17
+ if len(self._queue) > 0:
18
+ return self._queue.pop(0)
19
+ return None
20
+
21
+
22
+ class MockedMessage:
23
+ def __init__(self, topic: str, partition: int, offset: int, headers):
24
+ self._topic = topic
25
+ self._partition = partition
26
+ self._offset = offset
27
+ self._headers = headers
28
+
29
+ def topic(self):
30
+ return self._topic
31
+
32
+ def partition(self):
33
+ return self._partition
34
+
35
+ def offset(self):
36
+ return self._offset
37
+
38
+ def headers(self):
39
+ return self._headers
@@ -1,106 +0,0 @@
1
- # Copyright The OpenTelemetry Authors
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- # pylint: disable=no-name-in-module
16
-
17
- from unittest import TestCase
18
-
19
- from confluent_kafka import Consumer, Producer
20
-
21
- from opentelemetry.instrumentation.confluent_kafka import (
22
- ConfluentKafkaInstrumentor,
23
- ProxiedConsumer,
24
- ProxiedProducer,
25
- )
26
- from opentelemetry.instrumentation.confluent_kafka.utils import (
27
- KafkaContextGetter,
28
- KafkaContextSetter,
29
- )
30
-
31
-
32
- class TestConfluentKafka(TestCase):
33
- def test_instrument_api(self) -> None:
34
- instrumentation = ConfluentKafkaInstrumentor()
35
-
36
- producer = Producer({"bootstrap.servers": "localhost:29092"})
37
- producer = instrumentation.instrument_producer(producer)
38
-
39
- self.assertEqual(producer.__class__, ProxiedProducer)
40
-
41
- producer = instrumentation.uninstrument_producer(producer)
42
- self.assertEqual(producer.__class__, Producer)
43
-
44
- producer = Producer({"bootstrap.servers": "localhost:29092"})
45
- producer = instrumentation.instrument_producer(producer)
46
-
47
- self.assertEqual(producer.__class__, ProxiedProducer)
48
-
49
- producer = instrumentation.uninstrument_producer(producer)
50
- self.assertEqual(producer.__class__, Producer)
51
-
52
- consumer = Consumer(
53
- {
54
- "bootstrap.servers": "localhost:29092",
55
- "group.id": "mygroup",
56
- "auto.offset.reset": "earliest",
57
- }
58
- )
59
-
60
- consumer = instrumentation.instrument_consumer(consumer)
61
- self.assertEqual(consumer.__class__, ProxiedConsumer)
62
-
63
- consumer = instrumentation.uninstrument_consumer(consumer)
64
- self.assertEqual(consumer.__class__, Consumer)
65
-
66
- def test_consumer_commit_method_exists(self) -> None:
67
- instrumentation = ConfluentKafkaInstrumentor()
68
-
69
- consumer = Consumer(
70
- {
71
- "bootstrap.servers": "localhost:29092",
72
- "group.id": "mygroup",
73
- "auto.offset.reset": "earliest",
74
- }
75
- )
76
-
77
- consumer = instrumentation.instrument_consumer(consumer)
78
- self.assertEqual(consumer.__class__, ProxiedConsumer)
79
- self.assertTrue(hasattr(consumer, "commit"))
80
-
81
- def test_context_setter(self) -> None:
82
- context_setter = KafkaContextSetter()
83
-
84
- carrier_dict = {"key1": "val1"}
85
- context_setter.set(carrier_dict, "key2", "val2")
86
- self.assertGreaterEqual(
87
- carrier_dict.items(), {"key2": "val2".encode()}.items()
88
- )
89
-
90
- carrier_list = [("key1", "val1")]
91
- context_setter.set(carrier_list, "key2", "val2")
92
- self.assertTrue(("key2", "val2".encode()) in carrier_list)
93
-
94
- def test_context_getter(self) -> None:
95
- context_setter = KafkaContextSetter()
96
- context_getter = KafkaContextGetter()
97
-
98
- carrier_dict = {}
99
- context_setter.set(carrier_dict, "key1", "val1")
100
- self.assertEqual(context_getter.get(carrier_dict, "key1"), ["val1"])
101
- self.assertEqual(["key1"], context_getter.keys(carrier_dict))
102
-
103
- carrier_list = []
104
- context_setter.set(carrier_list, "key1", "val1")
105
- self.assertEqual(context_getter.get(carrier_list, "key1"), ["val1"])
106
- self.assertEqual(["key1"], context_getter.keys(carrier_list))