dgkafka 1.0.0a14__tar.gz → 1.0.0a16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a14
3
+ Version: 1.0.0a16
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -35,7 +35,7 @@ class AvroKafkaConsumer(KafkaConsumer):
35
35
  self.consumer = AvroConsumer(configs)
36
36
  self.logger.info("[*] Avro consumer initialized successfully")
37
37
  except Exception as ex:
38
- self.logger.error(f"[x] Failed to initialize Avro consumer: {ex}")
38
+ self.logger.error(f"[x] Failed to initialize avro consumer: {ex}")
39
39
  raise
40
40
 
41
41
  def consume(self, num_messages: int = 1, timeout: float = 1.0, decode_: bool = False, **kwargs: Any) -> Iterator[str | bytes | Message | None]:
@@ -45,9 +45,9 @@ class AvroKafkaProducer(KafkaProducer):
45
45
  default_key_schema=self.default_key_schema,
46
46
  default_value_schema=self.default_value_schema
47
47
  )
48
- self.logger.info("Avro producer initialized successfully")
48
+ self.logger.info("[*] Avro producer initialized successfully")
49
49
  except Exception as ex:
50
- self.logger.error(f"Failed to initialize Avro producer: {ex}")
50
+ self.logger.error(f"[x] Failed to initialize avro producer: {ex}")
51
51
  raise
52
52
 
53
53
  def produce(
@@ -77,6 +77,8 @@ class AvroKafkaProducer(KafkaProducer):
77
77
  producer = self._ensure_producer()
78
78
  producer.poll(0)
79
79
 
80
+ self._delivery_status['success'] = None
81
+
80
82
  # Prepare headers
81
83
  headers_list = None
82
84
  if headers:
@@ -107,14 +109,25 @@ class AvroKafkaProducer(KafkaProducer):
107
109
  )
108
110
 
109
111
  if flush:
110
- producer.flush()
112
+ remaining = producer.flush(timeout)
113
+ if remaining > 0:
114
+ self.logger.warning(f"[!] {remaining} messages remain undelivered after flush timeout")
115
+ return False
116
+
117
+ # Если flush=True, статус должен быть установлен к этому моменту
118
+ if flush and self._delivery_status['success'] is not None:
119
+ return self._delivery_status['success']
120
+
121
+ # Если flush=False, мы не можем гарантировать доставку, возвращаем True
122
+ # (так как технически ошибки пока нет)
123
+ return True
111
124
 
112
125
  except SerializerError as ex:
113
- self.logger.error(f"Avro serialization failed: {ex}")
114
- raise
126
+ self.logger.error(f"[x] Avro serialization failed: {ex}")
127
+ return False
115
128
  except Exception as ex:
116
- self.logger.error(f"Failed to produce Avro message: {ex}")
117
- raise
129
+ self.logger.error(f"[x] Failed to produce Avro message: {ex}")
130
+ return False
118
131
 
119
132
  def get_schema(self, subject: str, version: int = 1) -> Dict[str, Any]:
120
133
  """Get Avro schema from Schema Registry."""
@@ -22,8 +22,9 @@ class KafkaConsumer:
22
22
  """Internal method to initialize consumer"""
23
23
  try:
24
24
  self.consumer = Consumer(configs)
25
+ self.logger.info("[*] Consumer initialized successfully")
25
26
  except KafkaException as ex:
26
- self.logger.error(f"[x] Failed to initialize Kafka consumer: {ex}")
27
+ self.logger.error(f"[x] Failed to initialize consumer: {ex}")
27
28
  raise
28
29
 
29
30
  def close(self) -> None:
@@ -31,7 +32,7 @@ class KafkaConsumer:
31
32
  if self.consumer is not None:
32
33
  try:
33
34
  self.consumer.close()
34
- self.logger.info("[*] Kafka consumer closed successfully")
35
+ self.logger.info("[*] Consumer closed successfully")
35
36
  except KafkaException as ex:
36
37
  self.logger.error(f"[x] Error closing consumer: {ex}")
37
38
  raise
@@ -66,7 +67,8 @@ class KafkaConsumer:
66
67
  else:
67
68
  topics_list = [topics] if isinstance(topics, str) else topics
68
69
  consumer.subscribe(topics_list)
69
- self.logger.info(f"[*] Subscribed to topics: {topics_list}")
70
+ self.log_subscription_info()
71
+ # self.logger.info(f"[*] Subscribed to topics: {topics_list}")
70
72
 
71
73
  def _assign_topic_partition(self, topic: str, partition: int, offset: OffsetType) -> None:
72
74
  """Assign to specific partition"""
@@ -90,7 +92,7 @@ class KafkaConsumer:
90
92
  if msg is None:
91
93
  return None
92
94
  if msg.error():
93
- self.logger.error(f"Consumer error: {msg.error()}")
95
+ self.logger.error(f"[x] Consumer error: {msg.error()}")
94
96
  return None
95
97
  self.logger.info(f"[<] Received message from {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
96
98
  self.logger.debug(f"[*] Message content: {msg.value()}")
@@ -105,3 +107,76 @@ class KafkaConsumer:
105
107
  elif offsets:
106
108
  return consumer.commit(offsets=offsets, asynchronous=asynchronous)
107
109
  return consumer.commit(asynchronous=asynchronous)
110
+
111
+ def get_subscription_info(self) -> dict:
112
+ """Get current subscription and assignment information.
113
+
114
+ Returns:
115
+ dict: Dictionary with subscription and assignment details
116
+ {
117
+ 'subscribed_topics': list[str] | None,
118
+ 'assignments': list[dict] | None,
119
+ 'current_offsets': list[dict] | None
120
+ }
121
+ """
122
+ consumer = self._ensure_consumer()
123
+
124
+ try:
125
+ # Получаем текущие подписки
126
+ subscribed_topics = consumer.subscription()
127
+
128
+ # Получаем текущие назначения (assignments)
129
+ assignments = consumer.assignment()
130
+
131
+ # Получаем текущие позиции (offsets)
132
+ current_offsets = []
133
+ if assignments:
134
+ current_offsets = consumer.position(assignments)
135
+
136
+ # Формируем информацию о назначениях
137
+ assignments_info = []
138
+ for tp in assignments:
139
+ assignments_info.append({
140
+ 'topic': tp.topic,
141
+ 'partition': tp.partition,
142
+ 'offset': tp.offset
143
+ })
144
+
145
+ # Формируем информацию о текущих позициях
146
+ offsets_info = []
147
+ for tp in current_offsets:
148
+ offsets_info.append({
149
+ 'topic': tp.topic,
150
+ 'partition': tp.partition,
151
+ 'offset': tp.offset
152
+ })
153
+
154
+ return {
155
+ 'subscribed_topics': list(subscribed_topics) if subscribed_topics else None,
156
+ 'assignments': assignments_info if assignments_info else None,
157
+ 'current_offsets': offsets_info if offsets_info else None
158
+ }
159
+ except KafkaException as ex:
160
+ self.logger.error(f"[x] Failed to get subscription info: {ex}")
161
+ raise
162
+
163
+ def log_subscription_info(self) -> None:
164
+ """Log current subscription and assignment information."""
165
+ info = self.get_subscription_info()
166
+
167
+ if info['subscribed_topics']:
168
+ self.logger.info(f"[*] Subscribed topics: {', '.join(info['subscribed_topics'])}")
169
+ else:
170
+ self.logger.info("[!] Not subscribed to any topics")
171
+
172
+ if info['assignments']:
173
+ self.logger.info("[*] Current assignments:")
174
+ for assignment in info['assignments']:
175
+ self.logger.info(f" - {assignment['topic']} [partition {assignment['partition']}]")
176
+ else:
177
+ self.logger.info("[!] No partition assignments")
178
+
179
+ if info['current_offsets']:
180
+ self.logger.info("[*] Current offsets:")
181
+ for offset in info['current_offsets']:
182
+ self.logger.info(f" - {offset['topic']} [partition {offset['partition']}]: offset {offset['offset']}")
@@ -1,6 +1,8 @@
1
1
  import uuid
2
2
  from typing import Optional, Any
3
3
 
4
+ from datetime import datetime, date
5
+
4
6
  from confluent_kafka import Producer, Message
5
7
  from dgkafka.errors import ProducerNotSetError
6
8
 
@@ -18,6 +20,9 @@ class KafkaProducer:
18
20
  """
19
21
  self.producer: Producer | None = None
20
22
  self.logger = logger_ if logger_ else dglog.Logger()
23
+
24
+ self._delivery_status = {'success': None}
25
+
21
26
  if isinstance(self.logger, dglog.Logger):
22
27
  self.logger.auto_configure()
23
28
  self._init_producer(**configs)
@@ -26,7 +31,7 @@ class KafkaProducer:
26
31
  """Internal method to initialize producer."""
27
32
  try:
28
33
  self.producer = Producer(configs)
29
- self.logger.info("[*] Kafka producer initialized successfully")
34
+ self.logger.info("[*] Producer initialized successfully")
30
35
  except Exception as ex:
31
36
  self.logger.error(f"[x] Failed to initialize producer: {ex}")
32
37
  raise
@@ -69,8 +74,11 @@ class KafkaProducer:
69
74
  if err is not None:
70
75
  self.logger.error(f"[x] Message delivery failed: {err}")
71
76
  self.logger.debug(f"[~] Failed message details: {msg}")
77
+ self._delivery_status['success'] = False
72
78
  else:
73
- self.logger.info(f"[>] Message delivered to {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
79
+ self.logger.info(
80
+ f"[>] Message delivered to {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
81
+ self._delivery_status['success'] = True
74
82
 
75
83
  def produce(
76
84
  self,
@@ -80,7 +88,7 @@ class KafkaProducer:
80
88
  partition: int | None = None,
81
89
  headers: dict[str, bytes] | None = None,
82
90
  flush: bool = True
83
- ) -> None:
91
+ ) -> bool:
84
92
  """Produce a message to Kafka.
85
93
 
86
94
  Args:
@@ -94,6 +102,8 @@ class KafkaProducer:
94
102
  producer = self._ensure_producer()
95
103
  producer.poll(0)
96
104
 
105
+ self._delivery_status['success'] = None
106
+
97
107
  # Generate key if not provided
98
108
  key = key if key is not None else str(uuid.uuid4())
99
109
  key_bytes = key.encode('utf-8')
@@ -110,7 +120,7 @@ class KafkaProducer:
110
120
  value = json.dumps(message, ensure_ascii=False, default=dthandler, indent=4).encode('utf-8')
111
121
  except Exception as ex:
112
122
  self.logger.error(f"[x] Failed to serialize message: {ex}")
113
- raise
123
+ return False
114
124
 
115
125
  # Prepare message headers
116
126
  headers_list = None
@@ -138,10 +148,21 @@ class KafkaProducer:
138
148
  headers=headers_list
139
149
  )
140
150
  if flush:
141
- producer.flush()
151
+ remaining = producer.flush(1.0) # timeout 1 second
152
+ if remaining > 0:
153
+ self.logger.warning(f"[!] {remaining} messages remain undelivered")
154
+ return False
155
+
156
+ # Если flush=True, статус должен быть установлен к этому моменту
157
+ if flush and self._delivery_status['success'] is not None:
158
+ return self._delivery_status['success']
159
+
160
+ # Если flush=False, мы не можем гарантировать доставку, возвращаем True
161
+ # (так как технически ошибки пока нет)
162
+ return True
142
163
  except Exception as ex:
143
164
  self.logger.error(f"[x] Failed to produce message: {ex}")
144
- raise
165
+ return False
145
166
 
146
167
  def flush(self, timeout: float = 10.0) -> None:
147
168
  """Wait for all messages to be delivered.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a14
3
+ Version: 1.0.0a16
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "dgkafka"
7
- version = "1.0.0a14"
7
+ version = "1.0.0a16"
8
8
  authors = [
9
9
  {name = "Roman Rasputin", email = "admin@roro.su"},
10
10
  ]
@@ -6,7 +6,7 @@ with open('README.md') as f:
6
6
 
7
7
 
8
8
  setup(name='dgkafka',
9
- version='1.0.0a0',
9
+ version='1.0.0a16',
10
10
  description='Ver.1.0.0',
11
11
  long_description=long_description,
12
12
  long_description_content_type='text/markdown', # This is important!
File without changes
File without changes
File without changes
File without changes
File without changes