dgkafka 1.0.0a15__tar.gz → 1.0.0a17__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a15
3
+ Version: 1.0.0a17
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -35,7 +35,7 @@ class AvroKafkaConsumer(KafkaConsumer):
35
35
  self.consumer = AvroConsumer(configs)
36
36
  self.logger.info("[*] Avro consumer initialized successfully")
37
37
  except Exception as ex:
38
- self.logger.error(f"[x] Failed to initialize Avro consumer: {ex}")
38
+ self.logger.error(f"[x] Failed to initialize avro consumer: {ex}")
39
39
  raise
40
40
 
41
41
  def consume(self, num_messages: int = 1, timeout: float = 1.0, decode_: bool = False, **kwargs: Any) -> Iterator[str | bytes | Message | None]:
@@ -45,9 +45,9 @@ class AvroKafkaProducer(KafkaProducer):
45
45
  default_key_schema=self.default_key_schema,
46
46
  default_value_schema=self.default_value_schema
47
47
  )
48
- self.logger.info("Avro producer initialized successfully")
48
+ self.logger.info("[*] Avro producer initialized successfully")
49
49
  except Exception as ex:
50
- self.logger.error(f"Failed to initialize Avro producer: {ex}")
50
+ self.logger.error(f"[x] Failed to initialize avro producer: {ex}")
51
51
  raise
52
52
 
53
53
  def produce(
@@ -77,6 +77,8 @@ class AvroKafkaProducer(KafkaProducer):
77
77
  producer = self._ensure_producer()
78
78
  producer.poll(0)
79
79
 
80
+ self._delivery_status['success'] = None
81
+
80
82
  # Prepare headers
81
83
  headers_list = None
82
84
  if headers:
@@ -107,14 +109,25 @@ class AvroKafkaProducer(KafkaProducer):
107
109
  )
108
110
 
109
111
  if flush:
110
- producer.flush()
112
+ remaining = producer.flush(timeout)
113
+ if remaining > 0:
114
+ self.logger.warning(f"[!] {remaining} messages remain undelivered after flush timeout")
115
+ return False
116
+
117
+ # Если flush=True, статус должен быть установлен к этому моменту
118
+ if flush and self._delivery_status['success'] is not None:
119
+ return self._delivery_status['success']
120
+
121
+ # Если flush=False, мы не можем гарантировать доставку, возвращаем True
122
+ # (так как технически ошибки пока нет)
123
+ return True
111
124
 
112
125
  except SerializerError as ex:
113
- self.logger.error(f"Avro serialization failed: {ex}")
114
- raise
126
+ self.logger.error(f"[x] Avro serialization failed: {ex}")
127
+ return False
115
128
  except Exception as ex:
116
- self.logger.error(f"Failed to produce Avro message: {ex}")
117
- raise
129
+ self.logger.error(f"[x] Failed to produce Avro message: {ex}")
130
+ return False
118
131
 
119
132
  def get_schema(self, subject: str, version: int = 1) -> Dict[str, Any]:
120
133
  """Get Avro schema from Schema Registry."""
@@ -0,0 +1,197 @@
1
+ from typing import Literal, Iterator, Any
2
+ from dgkafka.errors import ConsumerNotSetError
3
+
4
+ from confluent_kafka import Consumer, KafkaException, Message, TopicPartition
5
+ from confluent_kafka import OFFSET_STORED, OFFSET_BEGINNING, OFFSET_END
6
+
7
+ import logging
8
+ import dglog
9
+
10
+ OffsetType = Literal[OFFSET_STORED, OFFSET_BEGINNING, OFFSET_END] | int
11
+
12
+
13
+ class KafkaConsumer:
14
+ def __init__(self, logger_: logging.Logger | dglog.Logger | None = None, **configs: Any) -> None:
15
+ self.consumer: Consumer | None = None
16
+ self.logger = logger_ if logger_ else dglog.Logger()
17
+ if isinstance(self.logger, dglog.Logger):
18
+ self.logger.auto_configure()
19
+ self._init_consumer(**configs)
20
+
21
+ def _init_consumer(self, **configs: Any) -> None:
22
+ """Internal method to initialize consumer"""
23
+ try:
24
+ self.consumer = Consumer(configs)
25
+ self.logger.info("[*] Consumer initialized successfully")
26
+ except KafkaException as ex:
27
+ self.logger.error(f"[x] Failed to initialize consumer: {ex}")
28
+ raise
29
+
30
+ def close(self) -> None:
31
+ """Safely close the consumer"""
32
+ if self.consumer is not None:
33
+ try:
34
+ self.consumer.close()
35
+ self.logger.info("[*] Consumer closed successfully")
36
+ except KafkaException as ex:
37
+ self.logger.error(f"[x] Error closing consumer: {ex}")
38
+ raise
39
+ finally:
40
+ self.consumer = None
41
+
42
+ def __enter__(self):
43
+ """Context manager entry point"""
44
+ if self.consumer is None:
45
+ self._init_consumer()
46
+ return self
47
+
48
+ def __exit__(self, exc_type, exc_val, exc_tb):
49
+ """Context manager exit point"""
50
+ self.close()
51
+
52
+ def _ensure_consumer(self) -> Consumer:
53
+ """Ensure consumer is initialized"""
54
+ if self.consumer is None:
55
+ raise ConsumerNotSetError('[!] Consumer not initialized!')
56
+ return self.consumer
57
+
58
+ def subscribe(self, topics: str | list[str], partition: int | None = None,
59
+ offset: OffsetType = OFFSET_STORED) -> None:
60
+ """Subscribe to topics"""
61
+ consumer = self._ensure_consumer()
62
+
63
+ if partition is not None and offset != OFFSET_STORED:
64
+ topic_list = [topics] if isinstance(topics, str) else topics
65
+ for topic in topic_list:
66
+ self._assign_topic_partition(topic, partition, offset)
67
+ else:
68
+ topics_list = [topics] if isinstance(topics, str) else topics
69
+ consumer.subscribe(topics_list, on_assign=self.on_assign, on_revoke=self.on_revoke)
70
+
71
+ def on_assign(self, consumer, partitions):
72
+ self.kafka_status = "UP"
73
+ for topic in {p.topic for p in partitions}:
74
+ new = {p.partition for p in partitions if p.topic == topic}
75
+ self.logger.debug(f"[@] on_assign {topic} {new if new else '{}'}")
76
+ old = {p.partition for p in consumer.assignment() if p.topic == topic}
77
+ old.update(new)
78
+ self.logger.info(f"[*] Assigned {topic} {old if old else '{}'}")
79
+
80
+ def on_revoke(self, consumer, partitions):
81
+ for topic in {p.topic for p in partitions}:
82
+ new = {p.partition for p in partitions if p.topic == topic}
83
+ self.logger.debug(f"[@] on_revoke {topic} {new if new else '{}'}")
84
+ old = {p.partition for p in consumer.assignment() if p.topic == topic}
85
+ old.difference_update(new)
86
+ self.logger.info(f"[*] Assigned {topic} {old if old else '{}'}")
87
+
88
+ def _assign_topic_partition(self, topic: str, partition: int, offset: OffsetType) -> None:
89
+ """Assign to specific partition"""
90
+ consumer = self._ensure_consumer()
91
+ topic_partition = TopicPartition(topic, partition, offset)
92
+ consumer.assign([topic_partition])
93
+ consumer.seek(topic_partition)
94
+ self.logger.info(f"[*] Assigned to topic '{topic}' partition {partition} with offset {offset}")
95
+
96
+ def consume(self, num_messages: int = 1, timeout: float = 1.0, decode_: bool = False) -> Iterator[Message | str]:
97
+ """Consume messages"""
98
+ consumer = self._ensure_consumer()
99
+
100
+ for _ in range(num_messages):
101
+ if (msg := self._consume(consumer, timeout)) is None:
102
+ continue
103
+ yield msg.value().decode('utf-8') if decode_ else msg
104
+
105
+ def _consume(self, consumer: Consumer, timeout: float) -> Message | None:
106
+ msg = consumer.poll(timeout)
107
+ if msg is None:
108
+ return None
109
+ if msg.error():
110
+ self.logger.error(f"[x] Consumer error: {msg.error()}")
111
+ return None
112
+ self.logger.info(f"[<] Received message from {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
113
+ self.logger.debug(f"[*] Message content: {msg.value()}")
114
+ return msg
115
+
116
+ def commit(self, message: Message | None = None, offsets: list[TopicPartition] | None = None,
117
+ asynchronous: bool = True) -> list[TopicPartition] | None:
118
+ """Commit offsets to Kafka."""
119
+ consumer = self._ensure_consumer()
120
+ if message:
121
+ return consumer.commit(message=message, asynchronous=asynchronous)
122
+ elif offsets:
123
+ return consumer.commit(offsets=offsets, asynchronous=asynchronous)
124
+ return consumer.commit(asynchronous=asynchronous)
125
+
126
+ def get_subscription_info(self) -> dict:
127
+ """Get current subscription and assignment information.
128
+
129
+ Returns:
130
+ dict: Dictionary with subscription and assignment details
131
+ {
132
+ 'subscribed_topics': list[str] | None,
133
+ 'assignments': list[dict] | None,
134
+ 'current_offsets': list[dict] | None
135
+ }
136
+ """
137
+ consumer = self._ensure_consumer()
138
+
139
+ try:
140
+ # Получаем текущие назначения (assignments)
141
+ assignments = consumer.assignment()
142
+
143
+ # Получаем текущие позиции (offsets)
144
+ current_offsets = []
145
+ if assignments:
146
+ current_offsets = [consumer.position(tp) for tp in assignments]
147
+
148
+ # Для получения подписок используем список топиков из assignments
149
+ subscribed_topics = list({tp.topic for tp in assignments}) if assignments else None
150
+
151
+ # Формируем информацию о назначениях
152
+ assignments_info = []
153
+ for tp in assignments:
154
+ assignments_info.append({
155
+ 'topic': tp.topic,
156
+ 'partition': tp.partition,
157
+ 'offset': tp.offset
158
+ })
159
+
160
+ # Формируем информацию о текущих позициях
161
+ offsets_info = []
162
+ for tp in current_offsets:
163
+ offsets_info.append({
164
+ 'topic': tp.topic,
165
+ 'partition': tp.partition,
166
+ 'offset': tp.offset
167
+ })
168
+
169
+ return {
170
+ 'subscribed_topics': subscribed_topics,
171
+ 'assignments': assignments_info if assignments_info else None,
172
+ 'current_offsets': offsets_info if offsets_info else None
173
+ }
174
+
175
+ except KafkaException as ex:
176
+ self.logger.error(f"[x] Failed to get subscription info: {ex}")
177
+ raise
178
+
179
+ def log_subscription_info(self) -> None:
180
+ """Log current subscription and assignment information."""
181
+ info = self.get_subscription_info()
182
+
183
+ if info['subscribed_topics']:
184
+ self.logger.info(f"[*] Subscribed topics: {', '.join(info['subscribed_topics'])}")
185
+ else:
186
+ self.logger.info("[!] Not subscribed to any topics")
187
+
188
+ if info['assignments']:
189
+ self.logger.info("[*] Current partition assignments:")
190
+ for assignment in info['assignments']:
191
+ self.logger.info(f" - {assignment['topic']} [partition {assignment['partition']}]")
192
+
193
+ if info['current_offsets']:
194
+ self.logger.info("[*] Current read positions:")
195
+ for offset in info['current_offsets']:
196
+ self.logger.info(
197
+ f" - {offset['topic']} [partition {offset['partition']}]: position {offset['offset']}")
@@ -20,6 +20,9 @@ class KafkaProducer:
20
20
  """
21
21
  self.producer: Producer | None = None
22
22
  self.logger = logger_ if logger_ else dglog.Logger()
23
+
24
+ self._delivery_status = {'success': None}
25
+
23
26
  if isinstance(self.logger, dglog.Logger):
24
27
  self.logger.auto_configure()
25
28
  self._init_producer(**configs)
@@ -28,7 +31,7 @@ class KafkaProducer:
28
31
  """Internal method to initialize producer."""
29
32
  try:
30
33
  self.producer = Producer(configs)
31
- self.logger.info("[*] Kafka producer initialized successfully")
34
+ self.logger.info("[*] Producer initialized successfully")
32
35
  except Exception as ex:
33
36
  self.logger.error(f"[x] Failed to initialize producer: {ex}")
34
37
  raise
@@ -71,8 +74,11 @@ class KafkaProducer:
71
74
  if err is not None:
72
75
  self.logger.error(f"[x] Message delivery failed: {err}")
73
76
  self.logger.debug(f"[~] Failed message details: {msg}")
77
+ self._delivery_status['success'] = False
74
78
  else:
75
- self.logger.info(f"[>] Message delivered to {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
79
+ self.logger.info(
80
+ f"[>] Message delivered to {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
81
+ self._delivery_status['success'] = True
76
82
 
77
83
  def produce(
78
84
  self,
@@ -82,7 +88,7 @@ class KafkaProducer:
82
88
  partition: int | None = None,
83
89
  headers: dict[str, bytes] | None = None,
84
90
  flush: bool = True
85
- ) -> None:
91
+ ) -> bool:
86
92
  """Produce a message to Kafka.
87
93
 
88
94
  Args:
@@ -96,6 +102,8 @@ class KafkaProducer:
96
102
  producer = self._ensure_producer()
97
103
  producer.poll(0)
98
104
 
105
+ self._delivery_status['success'] = None
106
+
99
107
  # Generate key if not provided
100
108
  key = key if key is not None else str(uuid.uuid4())
101
109
  key_bytes = key.encode('utf-8')
@@ -112,7 +120,7 @@ class KafkaProducer:
112
120
  value = json.dumps(message, ensure_ascii=False, default=dthandler, indent=4).encode('utf-8')
113
121
  except Exception as ex:
114
122
  self.logger.error(f"[x] Failed to serialize message: {ex}")
115
- raise
123
+ return False
116
124
 
117
125
  # Prepare message headers
118
126
  headers_list = None
@@ -140,10 +148,21 @@ class KafkaProducer:
140
148
  headers=headers_list
141
149
  )
142
150
  if flush:
143
- producer.flush()
151
+ remaining = producer.flush(1.0) # timeout 1 second
152
+ if remaining > 0:
153
+ self.logger.warning(f"[!] {remaining} messages remain undelivered")
154
+ return False
155
+
156
+ # Если flush=True, статус должен быть установлен к этому моменту
157
+ if flush and self._delivery_status['success'] is not None:
158
+ return self._delivery_status['success']
159
+
160
+ # Если flush=False, мы не можем гарантировать доставку, возвращаем True
161
+ # (так как технически ошибки пока нет)
162
+ return True
144
163
  except Exception as ex:
145
164
  self.logger.error(f"[x] Failed to produce message: {ex}")
146
- raise
165
+ return False
147
166
 
148
167
  def flush(self, timeout: float = 10.0) -> None:
149
168
  """Wait for all messages to be delivered.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a15
3
+ Version: 1.0.0a17
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "dgkafka"
7
- version = "1.0.0a15"
7
+ version = "1.0.0a17"
8
8
  authors = [
9
9
  {name = "Roman Rasputin", email = "admin@roro.su"},
10
10
  ]
@@ -6,7 +6,7 @@ with open('README.md') as f:
6
6
 
7
7
 
8
8
  setup(name='dgkafka',
9
- version='1.0.0a0',
9
+ version='1.0.0a17',
10
10
  description='Ver.1.0.0',
11
11
  long_description=long_description,
12
12
  long_description_content_type='text/markdown', # This is important!
@@ -1,107 +0,0 @@
1
- from typing import Literal, Iterator, Any
2
- from dgkafka.errors import ConsumerNotSetError
3
-
4
- from confluent_kafka import Consumer, KafkaException, Message, TopicPartition
5
- from confluent_kafka import OFFSET_STORED, OFFSET_BEGINNING, OFFSET_END
6
-
7
- import logging
8
- import dglog
9
-
10
- OffsetType = Literal[OFFSET_STORED, OFFSET_BEGINNING, OFFSET_END] | int
11
-
12
-
13
- class KafkaConsumer:
14
- def __init__(self, logger_: logging.Logger | dglog.Logger | None = None, **configs: Any) -> None:
15
- self.consumer: Consumer | None = None
16
- self.logger = logger_ if logger_ else dglog.Logger()
17
- if isinstance(self.logger, dglog.Logger):
18
- self.logger.auto_configure()
19
- self._init_consumer(**configs)
20
-
21
- def _init_consumer(self, **configs: Any) -> None:
22
- """Internal method to initialize consumer"""
23
- try:
24
- self.consumer = Consumer(configs)
25
- except KafkaException as ex:
26
- self.logger.error(f"[x] Failed to initialize Kafka consumer: {ex}")
27
- raise
28
-
29
- def close(self) -> None:
30
- """Safely close the consumer"""
31
- if self.consumer is not None:
32
- try:
33
- self.consumer.close()
34
- self.logger.info("[*] Kafka consumer closed successfully")
35
- except KafkaException as ex:
36
- self.logger.error(f"[x] Error closing consumer: {ex}")
37
- raise
38
- finally:
39
- self.consumer = None
40
-
41
- def __enter__(self):
42
- """Context manager entry point"""
43
- if self.consumer is None:
44
- self._init_consumer()
45
- return self
46
-
47
- def __exit__(self, exc_type, exc_val, exc_tb):
48
- """Context manager exit point"""
49
- self.close()
50
-
51
- def _ensure_consumer(self) -> Consumer:
52
- """Ensure consumer is initialized"""
53
- if self.consumer is None:
54
- raise ConsumerNotSetError('[!] Consumer not initialized!')
55
- return self.consumer
56
-
57
- def subscribe(self, topics: str | list[str], partition: int | None = None,
58
- offset: OffsetType = OFFSET_STORED) -> None:
59
- """Subscribe to topics"""
60
- consumer = self._ensure_consumer()
61
-
62
- if partition is not None and offset != OFFSET_STORED:
63
- topic_list = [topics] if isinstance(topics, str) else topics
64
- for topic in topic_list:
65
- self._assign_topic_partition(topic, partition, offset)
66
- else:
67
- topics_list = [topics] if isinstance(topics, str) else topics
68
- consumer.subscribe(topics_list)
69
- self.logger.info(f"[*] Subscribed to topics: {topics_list}")
70
-
71
- def _assign_topic_partition(self, topic: str, partition: int, offset: OffsetType) -> None:
72
- """Assign to specific partition"""
73
- consumer = self._ensure_consumer()
74
- topic_partition = TopicPartition(topic, partition, offset)
75
- consumer.assign([topic_partition])
76
- consumer.seek(topic_partition)
77
- self.logger.info(f"[*] Assigned to topic '{topic}' partition {partition} with offset {offset}")
78
-
79
- def consume(self, num_messages: int = 1, timeout: float = 1.0, decode_: bool = False) -> Iterator[Message | str]:
80
- """Consume messages"""
81
- consumer = self._ensure_consumer()
82
-
83
- for _ in range(num_messages):
84
- if (msg := self._consume(consumer, timeout)) is None:
85
- continue
86
- yield msg.value().decode('utf-8') if decode_ else msg
87
-
88
- def _consume(self, consumer: Consumer, timeout: float) -> Message | None:
89
- msg = consumer.poll(timeout)
90
- if msg is None:
91
- return None
92
- if msg.error():
93
- self.logger.error(f"Consumer error: {msg.error()}")
94
- return None
95
- self.logger.info(f"[<] Received message from {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
96
- self.logger.debug(f"[*] Message content: {msg.value()}")
97
- return msg
98
-
99
- def commit(self, message: Message | None = None, offsets: list[TopicPartition] | None = None,
100
- asynchronous: bool = True) -> list[TopicPartition] | None:
101
- """Commit offsets to Kafka."""
102
- consumer = self._ensure_consumer()
103
- if message:
104
- return consumer.commit(message=message, asynchronous=asynchronous)
105
- elif offsets:
106
- return consumer.commit(offsets=offsets, asynchronous=asynchronous)
107
- return consumer.commit(asynchronous=asynchronous)
File without changes
File without changes
File without changes
File without changes
File without changes