dgkafka 1.0.0a17__py3-none-any.whl → 1.0.2a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dgkafka/avro_producer.py +3 -3
- dgkafka/config.py +0 -1
- dgkafka/producer.py +6 -5
- {dgkafka-1.0.0a17.dist-info → dgkafka-1.0.2a0.dist-info}/METADATA +1 -1
- dgkafka-1.0.2a0.dist-info/RECORD +13 -0
- dgkafka-1.0.0a17.dist-info/RECORD +0 -13
- {dgkafka-1.0.0a17.dist-info → dgkafka-1.0.2a0.dist-info}/WHEEL +0 -0
- {dgkafka-1.0.0a17.dist-info → dgkafka-1.0.2a0.dist-info}/licenses/LICENSE +0 -0
- {dgkafka-1.0.0a17.dist-info → dgkafka-1.0.2a0.dist-info}/top_level.txt +0 -0
dgkafka/avro_producer.py
CHANGED
@@ -59,7 +59,8 @@ class AvroKafkaProducer(KafkaProducer):
|
|
59
59
|
key_schema: dict[str, Any] | None = None,
|
60
60
|
partition: int | None = None,
|
61
61
|
headers: dict[str, bytes] | None = None,
|
62
|
-
flush: bool = True
|
62
|
+
flush: bool = True,
|
63
|
+
flush_timeout: float | None = None
|
63
64
|
) -> None:
|
64
65
|
"""
|
65
66
|
Produce Avro-encoded message to Kafka.
|
@@ -109,9 +110,8 @@ class AvroKafkaProducer(KafkaProducer):
|
|
109
110
|
)
|
110
111
|
|
111
112
|
if flush:
|
112
|
-
remaining =
|
113
|
+
remaining = self.flush(flush_timeout) # timeout 1 second
|
113
114
|
if remaining > 0:
|
114
|
-
self.logger.warning(f"[!] {remaining} messages remain undelivered after flush timeout")
|
115
115
|
return False
|
116
116
|
|
117
117
|
# Если flush=True, статус должен быть установлен к этому моменту
|
dgkafka/config.py
CHANGED
@@ -74,7 +74,6 @@ class AvroConsumerConfig(ConsumerConfig, AvroConfigMixin):
|
|
74
74
|
|
75
75
|
class AvroProducerConfig(ProducerConfig, AvroConfigMixin):
|
76
76
|
"""Avro producer configuration with Schema Registry support"""
|
77
|
-
max_schemas_per_subject: int = Field(default=1000, alias="max.schemas.per.subject")
|
78
77
|
|
79
78
|
@classmethod
|
80
79
|
def set(cls, config_dict: Dict[str, Any]) -> "AvroProducerConfig":
|
dgkafka/producer.py
CHANGED
@@ -87,7 +87,8 @@ class KafkaProducer:
|
|
87
87
|
key: str | None = None,
|
88
88
|
partition: int | None = None,
|
89
89
|
headers: dict[str, bytes] | None = None,
|
90
|
-
flush: bool = True
|
90
|
+
flush: bool = True,
|
91
|
+
flush_timeout: float | None = None
|
91
92
|
) -> bool:
|
92
93
|
"""Produce a message to Kafka.
|
93
94
|
|
@@ -148,9 +149,8 @@ class KafkaProducer:
|
|
148
149
|
headers=headers_list
|
149
150
|
)
|
150
151
|
if flush:
|
151
|
-
remaining =
|
152
|
+
remaining = self.flush(flush_timeout) # timeout 1 second
|
152
153
|
if remaining > 0:
|
153
|
-
self.logger.warning(f"[!] {remaining} messages remain undelivered")
|
154
154
|
return False
|
155
155
|
|
156
156
|
# Если flush=True, статус должен быть установлен к этому моменту
|
@@ -164,7 +164,7 @@ class KafkaProducer:
|
|
164
164
|
self.logger.error(f"[x] Failed to produce message: {ex}")
|
165
165
|
return False
|
166
166
|
|
167
|
-
def flush(self, timeout: float =
|
167
|
+
def flush(self, timeout: float | None = None) -> int | None:
|
168
168
|
"""Wait for all messages to be delivered.
|
169
169
|
|
170
170
|
Args:
|
@@ -174,7 +174,8 @@ class KafkaProducer:
|
|
174
174
|
try:
|
175
175
|
remaining = producer.flush(timeout)
|
176
176
|
if remaining > 0:
|
177
|
-
self.logger.warning(f"[!] {remaining} messages remain undelivered
|
177
|
+
self.logger.warning(f"[!] {remaining} messages remain undelivered")
|
178
|
+
return remaining
|
178
179
|
except Exception as ex:
|
179
180
|
self.logger.error(f"[x] Flush failed: {ex}")
|
180
181
|
raise
|
@@ -0,0 +1,13 @@
|
|
1
|
+
dgkafka/__init__.py,sha256=fnqVZROyHXipdmhqZaa9XUjvQe795JJKFakwTndAiIw,286
|
2
|
+
dgkafka/avro_consumer.py,sha256=fI-VYQdw3bQJoDN4jS0F5Aa78wlxmjg5lmnTumw9FDs,2917
|
3
|
+
dgkafka/avro_producer.py,sha256=Ye7X3YhF702lVNuWBySllDMm2_9VYKEhc1FpI2sWOPs,5383
|
4
|
+
dgkafka/config.py,sha256=y5O1C0rEJET2Bjes3Y1RzHo9ISnkQE4k_Lx8uOo6wys,3740
|
5
|
+
dgkafka/consumer.py,sha256=OfONH61xj1sD4Z0YIQngfJOOwkx8Ko9waNYCKoXtJL4,8371
|
6
|
+
dgkafka/errors.py,sha256=PaH46tXameS--hrrUXKhQkZlBHvMSMPmjhVeRkmFvV0,95
|
7
|
+
dgkafka/json_consumer.py,sha256=7Gzn7C9WpyCTPDV6eFDugAx5gC9vdV-HrTh3Nv--zIE,1152
|
8
|
+
dgkafka/producer.py,sha256=GQPVYrbAMkbcbW2GxHR7M5UUyeGgq_ADKwFLsVFR3Ag,6507
|
9
|
+
dgkafka-1.0.2a0.dist-info/licenses/LICENSE,sha256=pAZXnNE2dxxwXFIduGyn1gpvPefJtUYOYZOi3yeGG94,1068
|
10
|
+
dgkafka-1.0.2a0.dist-info/METADATA,sha256=dGxlbsztvUAfpjcTq7zWst5SEQ1GnVFUucZY9nrYmJE,6061
|
11
|
+
dgkafka-1.0.2a0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
12
|
+
dgkafka-1.0.2a0.dist-info/top_level.txt,sha256=GyNrxOh7IPdL0t2SxH8DWxg3fUma-ezQ1Kz4zIr2B7U,8
|
13
|
+
dgkafka-1.0.2a0.dist-info/RECORD,,
|
@@ -1,13 +0,0 @@
|
|
1
|
-
dgkafka/__init__.py,sha256=fnqVZROyHXipdmhqZaa9XUjvQe795JJKFakwTndAiIw,286
|
2
|
-
dgkafka/avro_consumer.py,sha256=fI-VYQdw3bQJoDN4jS0F5Aa78wlxmjg5lmnTumw9FDs,2917
|
3
|
-
dgkafka/avro_producer.py,sha256=UL0RBaEKl_lbNF1UI-4w8R3AIEGHYI-Vna3iVZIlJqo,5421
|
4
|
-
dgkafka/config.py,sha256=TmfB0IOauSPr4B8InOZdoebAEGeabs7HZUXuozAFFlk,3828
|
5
|
-
dgkafka/consumer.py,sha256=OfONH61xj1sD4Z0YIQngfJOOwkx8Ko9waNYCKoXtJL4,8371
|
6
|
-
dgkafka/errors.py,sha256=PaH46tXameS--hrrUXKhQkZlBHvMSMPmjhVeRkmFvV0,95
|
7
|
-
dgkafka/json_consumer.py,sha256=7Gzn7C9WpyCTPDV6eFDugAx5gC9vdV-HrTh3Nv--zIE,1152
|
8
|
-
dgkafka/producer.py,sha256=WHVhw5HyJINxvUg_j_9iiQ0N7I3jc4niFXpZfkDHUOQ,6519
|
9
|
-
dgkafka-1.0.0a17.dist-info/licenses/LICENSE,sha256=pAZXnNE2dxxwXFIduGyn1gpvPefJtUYOYZOi3yeGG94,1068
|
10
|
-
dgkafka-1.0.0a17.dist-info/METADATA,sha256=OML5JG3E8w6nYbMXUNMB_q3iSxplfy-g7y8FTMNosig,6062
|
11
|
-
dgkafka-1.0.0a17.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
12
|
-
dgkafka-1.0.0a17.dist-info/top_level.txt,sha256=GyNrxOh7IPdL0t2SxH8DWxg3fUma-ezQ1Kz4zIr2B7U,8
|
13
|
-
dgkafka-1.0.0a17.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|