dgkafka 1.0.0a0__tar.gz → 1.0.0a2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a0
3
+ Version: 1.0.0a2
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -8,17 +8,32 @@ Author-email: Roman Rasputin <admin@roro.su>
8
8
  License: MIT License
9
9
  Project-URL: Homepage, https://gitlab.com/gng-group/dgkafka
10
10
  Project-URL: BugTracker, https://gitlab.com/gng-group/dgkafka/issues
11
- Keywords: logging,logger,rotation,timed,advanced
11
+ Keywords: kafka,client,confluent,avro,fastapi,logging
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: License :: OSI Approved :: MIT License
15
- Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
16
17
  Classifier: Operating System :: OS Independent
17
18
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
18
19
  Classifier: Topic :: System :: Logging
19
- Requires-Python: >=3.7
20
+ Classifier: Topic :: System :: Distributed Computing
21
+ Requires-Python: >=3.10
20
22
  Description-Content-Type: text/markdown
21
23
  License-File: LICENSE
24
+ Requires-Dist: confluent-kafka>=2.1.1
25
+ Requires-Dist: dglog>=1.0.0
26
+ Provides-Extra: avro
27
+ Requires-Dist: requests; extra == "avro"
28
+ Requires-Dist: fastavro<2; extra == "avro"
29
+ Requires-Dist: avro<2,>=1.11.1; extra == "avro"
30
+ Requires-Dist: attrs; extra == "avro"
31
+ Requires-Dist: cachetools; extra == "avro"
32
+ Requires-Dist: httpx>=0.26; extra == "avro"
33
+ Requires-Dist: authlib; extra == "avro"
34
+ Provides-Extra: json
35
+ Requires-Dist: pyrsistent; extra == "json"
36
+ Requires-Dist: jsonschema; extra == "json"
22
37
  Dynamic: author
23
38
  Dynamic: home-page
24
39
  Dynamic: license-file
@@ -39,6 +54,12 @@ For Avro support (requires additional dependencies):
39
54
  pip install dgkafka[avro]
40
55
  ```
41
56
 
57
+ For Json support (requires additional dependencies):
58
+
59
+ ```bash
60
+ pip install dgkafka[json]
61
+ ```
62
+
42
63
  ## Features
43
64
 
44
65
  - Producers and consumers for different data formats:
@@ -14,6 +14,12 @@ For Avro support (requires additional dependencies):
14
14
  pip install dgkafka[avro]
15
15
  ```
16
16
 
17
+ For Json support (requires additional dependencies):
18
+
19
+ ```bash
20
+ pip install dgkafka[json]
21
+ ```
22
+
17
23
  ## Features
18
24
 
19
25
  - Producers and consumers for different data formats:
@@ -0,0 +1,11 @@
1
+ from .consumer import KafkaConsumer
2
+ from .producer import KafkaProducer
3
+ try:
4
+ from .avro_consumer import AvroKafkaConsumer
5
+ from .avro_producer import AvroKafkaProducer
6
+ except ImportError:
7
+ pass
8
+ try:
9
+ from .json_consumer import JsonKafkaConsumer
10
+ except ImportError:
11
+ pass
@@ -1,17 +1,20 @@
1
1
  from typing import Any, Iterator
2
2
 
3
3
  from dgkafka.consumer import KafkaConsumer
4
- from dglog import Logger
4
+
5
5
  from confluent_kafka import Message
6
6
  from confluent_kafka.avro import AvroConsumer
7
7
  from confluent_kafka.avro.serializer import SerializerError
8
8
  from confluent_kafka.avro.cached_schema_registry_client import CachedSchemaRegistryClient
9
9
 
10
+ import logging
11
+ import dglog
12
+
10
13
 
11
14
  class AvroKafkaConsumer(KafkaConsumer):
12
15
  """Kafka consumer with Avro schema support using Schema Registry."""
13
16
 
14
- def __init__(self, schema_registry_url: str, logger_: Logger | None = None, **configs: Any) -> None:
17
+ def __init__(self, logger_: logging.Logger | dglog.Logger | None = None, **configs: Any) -> None:
15
18
  """
16
19
  Initialize Avro consumer.
17
20
 
@@ -20,8 +23,8 @@ class AvroKafkaConsumer(KafkaConsumer):
20
23
  logger_: Optional logger instance
21
24
  configs: Kafka consumer configuration
22
25
  """
23
- self.schema_registry_url = schema_registry_url
24
- self.schema_registry_client = CachedSchemaRegistryClient({'url': schema_registry_url})
26
+ self.schema_registry_url = configs.get('schema.registry.url')
27
+ self.schema_registry_client = CachedSchemaRegistryClient({'url': self.schema_registry_url})
25
28
  super().__init__(logger_=logger_, **configs)
26
29
 
27
30
  def _init_consumer(self, **configs: Any) -> None:
@@ -35,7 +38,7 @@ class AvroKafkaConsumer(KafkaConsumer):
35
38
  self.logger.error(f"[x] Failed to initialize Avro consumer: {ex}")
36
39
  raise
37
40
 
38
- def consume(self, num_messages: int = 1, timeout: float = 10.0, **kwargs: Any) -> Iterator[dict[str, Any] | None, Message]:
41
+ def consume(self, num_messages: int = 1, timeout: float = 10.0, **kwargs: Any) -> Iterator[str | bytes | Message | None]:
39
42
  """
40
43
  Consume Avro-encoded messages.
41
44
 
@@ -50,8 +53,9 @@ class AvroKafkaConsumer(KafkaConsumer):
50
53
  consumer = self._ensure_consumer()
51
54
 
52
55
  for _ in range(num_messages):
56
+ msg = self._consume(consumer, timeout)
53
57
  try:
54
- if (msg := self._consume(consumer, timeout)) is None:
58
+ if msg is None:
55
59
  continue
56
60
  yield msg.value()
57
61
  except SerializerError as e:
@@ -3,7 +3,8 @@ from confluent_kafka.avro import AvroProducer
3
3
  from confluent_kafka.avro.serializer import SerializerError
4
4
  from confluent_kafka.avro.cached_schema_registry_client import CachedSchemaRegistryClient
5
5
 
6
- from dglog import Logger
6
+ import dglog
7
+ import logging
7
8
 
8
9
  from dgkafka.producer import KafkaProducer
9
10
 
@@ -13,10 +14,9 @@ class AvroKafkaProducer(KafkaProducer):
13
14
 
14
15
  def __init__(
15
16
  self,
16
- schema_registry_url: str,
17
- default_key_schema: Optional[Dict[str, Any]] = None,
18
- default_value_schema: Optional[Dict[str, Any]] = None,
19
- logger_: Optional[Logger] = None,
17
+ default_key_schema: str | None = None,
18
+ default_value_schema: str | None = None,
19
+ logger_: logging.Logger | dglog.Logger | None = None,
20
20
  **configs: Any
21
21
  ) -> None:
22
22
  """
@@ -29,17 +29,17 @@ class AvroKafkaProducer(KafkaProducer):
29
29
  logger_: Optional logger instance
30
30
  configs: Kafka producer configuration
31
31
  """
32
- self.schema_registry_url = schema_registry_url
32
+ self.schema_registry_url = configs.get('schema.registry.url')
33
+ assert self.schema_registry_url is not None, "schema.registry.url is required"
34
+
33
35
  self.default_key_schema = default_key_schema
34
36
  self.default_value_schema = default_value_schema
35
- self.schema_registry_client = CachedSchemaRegistryClient({'url': schema_registry_url})
37
+ self.schema_registry_client = CachedSchemaRegistryClient({'url': self.schema_registry_url})
36
38
  super().__init__(logger_=logger_, **configs)
37
39
 
38
40
  def _init_producer(self, **configs: Any) -> None:
39
41
  """Initialize AvroProducer instance."""
40
42
  try:
41
- # AvroProducer requires schema registry in config
42
- configs['schema.registry.url'] = self.schema_registry_url
43
43
  self.producer = AvroProducer(
44
44
  config=configs,
45
45
  default_key_schema=self.default_key_schema,
@@ -53,12 +53,12 @@ class AvroKafkaProducer(KafkaProducer):
53
53
  def produce(
54
54
  self,
55
55
  topic: str,
56
- value: Union[Dict[str, Any], Any],
57
- key: Optional[Union[Dict[str, Any], str]] = None,
58
- value_schema: Optional[Dict[str, Any]] = None,
59
- key_schema: Optional[Dict[str, Any]] = None,
60
- partition: Optional[int] = None,
61
- headers: Optional[Dict[str, bytes]] = None,
56
+ value: dict[str, Any] | Any,
57
+ key: dict[str, Any] | str | None = None,
58
+ value_schema: dict[str, Any] | None = None,
59
+ key_schema: dict[str, Any] | None = None,
60
+ partition: int | None = None,
61
+ headers: dict[str, bytes] | None = None,
62
62
  flush: bool = True
63
63
  ) -> None:
64
64
  """
@@ -0,0 +1,89 @@
1
+ from typing import Literal, Optional
2
+ from pydantic import BaseModel, Field
3
+
4
+
5
+ class KafkaConfig(BaseModel):
6
+ bootstrap_servers: str = Field(..., alias="bootstrap.servers",
7
+ description="Comma-separated list of broker addresses")
8
+ security_protocol: Optional[str] = Field(default="SSL", alias="security.protocol",
9
+ description="Protocol used to communicate with brokers")
10
+ ssl_ca_location: Optional[str] = Field(default=None, alias="ssl.ca.location",
11
+ description="Path to CA certificate file")
12
+ ssl_certificate_location: Optional[str] = Field(default=None, alias="ssl.certificate.location",
13
+ description="Path to client certificate file")
14
+ ssl_key_location: Optional[str] = Field(default=None, alias="ssl.key.location",
15
+ description="Path to client private key file")
16
+ ssl_certificate_verification: Optional[bool] = Field(default=True,
17
+ alias="enable.ssl.certificate.verification",
18
+ description="Enable SSL certificate verification")
19
+ ssl_endpoint_identification_algorithm: Optional[str] = Field(default="https",
20
+ alias="ssl.endpoint.identification.algorithm",
21
+ description="Endpoint identification algorithm")
22
+
23
+ class Config:
24
+ allow_population_by_field_name = True
25
+ extra = "forbid"
26
+
27
+
28
+ class ConsumerConfig(KafkaConfig):
29
+ group_id: str = Field(..., alias="group.id",
30
+ description="Consumer group identifier")
31
+ enable_auto_commit: bool = Field(default=False, alias="enable.auto.commit",
32
+ description="Automatically commit offsets periodically")
33
+ auto_offset_reset: Literal["earliest", "latest"] = Field(default="earliest",
34
+ alias="auto.offset.reset",
35
+ description="Reset policy when no offset is available")
36
+ max_poll_records: Optional[int] = Field(default=500, alias="max.poll.records",
37
+ description="Maximum records per poll")
38
+ session_timeout_ms: int = Field(default=10000, alias="session.timeout.ms",
39
+ description="Timeout for consumer session")
40
+
41
+
42
+ class ProducerConfig(KafkaConfig):
43
+ acks: Literal["all", "-1", "0", "1"] = Field(default="all",
44
+ description="Number of acknowledgments")
45
+ retries: int = Field(default=3, description="Number of retries on failure")
46
+ linger_ms: int = Field(default=0, alias="linger.ms",
47
+ description="Delay in milliseconds to wait for messages in the buffer")
48
+ compression_type: Optional[str] = Field(default=None, alias="compression.type",
49
+ description="Compression codec to use")
50
+ batch_size: int = Field(default=16384, alias="batch.size",
51
+ description="Batch size in bytes")
52
+
53
+
54
+ class AvroConfigMixin(BaseModel):
55
+ schema_registry_url: str = Field(..., alias="schema.registry.url",
56
+ description="URL of Schema Registry")
57
+ schema_registry_ssl_ca_location: Optional[str] = Field(default=None,
58
+ alias="schema.registry.ssl.ca.location",
59
+ description="Schema Registry CA certificate path")
60
+ auto_register_schemas: bool = Field(default=True, alias="auto.register.schemas",
61
+ description="Automatically register schemas")
62
+
63
+
64
+ class AvroConsumerConfig(ConsumerConfig, AvroConfigMixin):
65
+ use_latest_version: bool = Field(default=True, alias="use.latest.version",
66
+ description="Use latest schema version")
67
+ skip_known_types: bool = Field(default=False, alias="skip.known.types",
68
+ description="Skip known types during deserialization")
69
+
70
+
71
+ class AvroProducerConfig(ProducerConfig, AvroConfigMixin):
72
+ value_subject_name_strategy: Optional[str] = Field(default=None,
73
+ alias="value.subject.name.strategy",
74
+ description="Strategy for subject name generation")
75
+ key_subject_name_strategy: Optional[str] = Field(default=None,
76
+ alias="key.subject.name.strategy",
77
+ description="Strategy for key subject name generation")
78
+
79
+
80
+ class JsonConsumerConfig(ConsumerConfig):
81
+ json_deserializer: Optional[str] = Field(default=None,
82
+ description="Custom JSON deserializer function")
83
+ encoding: str = Field(default="utf-8", description="Message encoding")
84
+
85
+
86
+ class JsonProducerConfig(ProducerConfig):
87
+ json_serializer: Optional[str] = Field(default=None,
88
+ description="Custom JSON serializer function")
89
+ encoding: str = Field(default="utf-8", description="Message encoding")
@@ -1,17 +1,21 @@
1
1
  from typing import Literal, Iterator, Any
2
2
  from dgkafka.errors import ConsumerNotSetError
3
- from dglog import Logger
4
- from confluent_kafka import Consumer, KafkaError, KafkaException, Message, TopicPartition
3
+
4
+ from confluent_kafka import Consumer, KafkaException, Message, TopicPartition
5
5
  from confluent_kafka import OFFSET_STORED, OFFSET_BEGINNING, OFFSET_END
6
6
 
7
+ import logging
8
+ import dglog
9
+
7
10
  OffsetType = Literal[OFFSET_STORED, OFFSET_BEGINNING, OFFSET_END] | int
8
11
 
9
12
 
10
13
  class KafkaConsumer:
11
- def __init__(self, logger_: Logger | None = None, **configs: Any) -> None:
14
+ def __init__(self, logger_: logging.Logger | dglog.Logger | None = None, **configs: Any) -> None:
12
15
  self.consumer: Consumer | None = None
13
- self.logger = logger_ if logger_ else Logger()
14
- self.logger.auto_configure()
16
+ self.logger = logger_ if logger_ else dglog.Logger()
17
+ if isinstance(self.logger, dglog.Logger):
18
+ self.logger.auto_configure()
15
19
  self._init_consumer(**configs)
16
20
 
17
21
  def _init_consumer(self, **configs: Any) -> None:
@@ -84,10 +88,10 @@ class KafkaConsumer:
84
88
  def _consume(self, consumer: Consumer, timeout: float) -> Message | None:
85
89
  msg = consumer.poll(timeout)
86
90
  if msg is None:
87
- return
91
+ return None
88
92
  if msg.error():
89
93
  self.logger.error(f"Consumer error: {msg.error()}")
90
- return
94
+ return None
91
95
  self.logger.info(f"[<] Received message from {msg.topic()} [partition {msg.partition()}, offset {msg.offset()}]")
92
96
  self.logger.debug(f"[*] Message content: {msg.value()}")
93
97
  return msg
@@ -1,12 +1,15 @@
1
1
  import uuid
2
2
  from typing import Optional, Any
3
- from dglog import Logger
3
+
4
4
  from confluent_kafka import Producer, Message
5
5
  from dgkafka.errors import ProducerNotSetError
6
6
 
7
+ import logging
8
+ import dglog
9
+
7
10
 
8
11
  class KafkaProducer:
9
- def __init__(self, logger_: Logger | None = None, **configs: Any) -> None:
12
+ def __init__(self, logger_: logging.Logger | dglog.Logger | None = None, **configs: Any) -> None:
10
13
  """Initialize Kafka producer.
11
14
 
12
15
  Args:
@@ -14,8 +17,9 @@ class KafkaProducer:
14
17
  configs: Kafka producer configuration
15
18
  """
16
19
  self.producer: Producer | None = None
17
- self.logger = logger_ if logger_ else Logger()
18
- self.logger.auto_configure()
20
+ self.logger = logger_ if logger_ else dglog.Logger()
21
+ if isinstance(self.logger, dglog.Logger):
22
+ self.logger.auto_configure()
19
23
  self._init_producer(**configs)
20
24
 
21
25
  def _init_producer(self, **configs: Any) -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a0
3
+ Version: 1.0.0a2
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -8,17 +8,32 @@ Author-email: Roman Rasputin <admin@roro.su>
8
8
  License: MIT License
9
9
  Project-URL: Homepage, https://gitlab.com/gng-group/dgkafka
10
10
  Project-URL: BugTracker, https://gitlab.com/gng-group/dgkafka/issues
11
- Keywords: logging,logger,rotation,timed,advanced
11
+ Keywords: kafka,client,confluent,avro,fastapi,logging
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: License :: OSI Approved :: MIT License
15
- Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
16
17
  Classifier: Operating System :: OS Independent
17
18
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
18
19
  Classifier: Topic :: System :: Logging
19
- Requires-Python: >=3.7
20
+ Classifier: Topic :: System :: Distributed Computing
21
+ Requires-Python: >=3.10
20
22
  Description-Content-Type: text/markdown
21
23
  License-File: LICENSE
24
+ Requires-Dist: confluent-kafka>=2.1.1
25
+ Requires-Dist: dglog>=1.0.0
26
+ Provides-Extra: avro
27
+ Requires-Dist: requests; extra == "avro"
28
+ Requires-Dist: fastavro<2; extra == "avro"
29
+ Requires-Dist: avro<2,>=1.11.1; extra == "avro"
30
+ Requires-Dist: attrs; extra == "avro"
31
+ Requires-Dist: cachetools; extra == "avro"
32
+ Requires-Dist: httpx>=0.26; extra == "avro"
33
+ Requires-Dist: authlib; extra == "avro"
34
+ Provides-Extra: json
35
+ Requires-Dist: pyrsistent; extra == "json"
36
+ Requires-Dist: jsonschema; extra == "json"
22
37
  Dynamic: author
23
38
  Dynamic: home-page
24
39
  Dynamic: license-file
@@ -39,6 +54,12 @@ For Avro support (requires additional dependencies):
39
54
  pip install dgkafka[avro]
40
55
  ```
41
56
 
57
+ For Json support (requires additional dependencies):
58
+
59
+ ```bash
60
+ pip install dgkafka[json]
61
+ ```
62
+
42
63
  ## Features
43
64
 
44
65
  - Producers and consumers for different data formats:
@@ -5,6 +5,7 @@ setup.py
5
5
  dgkafka/__init__.py
6
6
  dgkafka/avro_consumer.py
7
7
  dgkafka/avro_producer.py
8
+ dgkafka/config.py
8
9
  dgkafka/consumer.py
9
10
  dgkafka/errors.py
10
11
  dgkafka/json_consumer.py
@@ -13,4 +14,5 @@ dgkafka.egg-info/PKG-INFO
13
14
  dgkafka.egg-info/SOURCES.txt
14
15
  dgkafka.egg-info/dependency_links.txt
15
16
  dgkafka.egg-info/not-zip-safe
17
+ dgkafka.egg-info/requires.txt
16
18
  dgkafka.egg-info/top_level.txt
@@ -0,0 +1,15 @@
1
+ confluent-kafka>=2.1.1
2
+ dglog>=1.0.0
3
+
4
+ [avro]
5
+ requests
6
+ fastavro<2
7
+ avro<2,>=1.11.1
8
+ attrs
9
+ cachetools
10
+ httpx>=0.26
11
+ authlib
12
+
13
+ [json]
14
+ pyrsistent
15
+ jsonschema
@@ -0,0 +1,49 @@
1
+ [build-system]
2
+ requires = ["setuptools>=42", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "dgkafka"
7
+ version = "1.0.0a2"
8
+ authors = [
9
+ {name = "Roman Rasputin", email = "admin@roro.su"},
10
+ ]
11
+ description = "Kafka clients"
12
+ readme = "README.md"
13
+ requires-python = ">=3.10"
14
+ classifiers = [
15
+ "Development Status :: 4 - Beta",
16
+ "Intended Audience :: Developers",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Programming Language :: Python :: 3.10",
19
+ "Programming Language :: Python :: 3.11",
20
+ "Operating System :: OS Independent",
21
+ "Topic :: Software Development :: Libraries :: Python Modules",
22
+ "Topic :: System :: Logging",
23
+ "Topic :: System :: Distributed Computing",
24
+ ]
25
+ keywords = ["kafka", "client", "confluent", "avro", "fastapi", "logging"]
26
+ license = {text = "MIT License"}
27
+ dependencies = [
28
+ "confluent-kafka>=2.1.1",
29
+ "dglog>=1.0.0",
30
+ ]
31
+
32
+ [project.optional-dependencies]
33
+ avro = [
34
+ "requests",
35
+ "fastavro<2",
36
+ "avro>=1.11.1,<2",
37
+ "attrs",
38
+ "cachetools",
39
+ "httpx>=0.26",
40
+ "authlib"
41
+ ]
42
+ json = [
43
+ "pyrsistent",
44
+ "jsonschema"
45
+ ]
46
+
47
+ [project.urls]
48
+ Homepage = "https://gitlab.com/gng-group/dgkafka"
49
+ BugTracker = "https://gitlab.com/gng-group/dgkafka/issues"
@@ -1,5 +0,0 @@
1
- from consumer import KafkaConsumer
2
- from avro_consumer import AvroKafkaConsumer
3
- from producer import KafkaProducer
4
- from avro_producer import AvroKafkaProducer
5
- from json_consumer import JsonKafkaConsumer
@@ -1,28 +0,0 @@
1
- [build-system]
2
- requires = ["setuptools>=42", "wheel"]
3
- build-backend = "setuptools.build_meta"
4
-
5
- [project]
6
- name = "dgkafka"
7
- version = "1.0.0a0"
8
- authors = [
9
- {name="Roman Rasputin", email="admin@roro.su"},
10
- ]
11
- description = "Kafka clients"
12
- readme = "README.md"
13
- requires-python = ">=3.7"
14
- classifiers = [
15
- "Development Status :: 4 - Beta",
16
- "Intended Audience :: Developers",
17
- "License :: OSI Approved :: MIT License",
18
- "Programming Language :: Python :: 3",
19
- "Operating System :: OS Independent",
20
- "Topic :: Software Development :: Libraries :: Python Modules",
21
- "Topic :: System :: Logging",
22
- ]
23
- keywords = ["logging", "logger", "rotation", "timed", "advanced"]
24
- license = {text = "MIT License"}
25
-
26
- [project.urls]
27
- Homepage = "https://gitlab.com/gng-group/dgkafka"
28
- BugTracker = "https://gitlab.com/gng-group/dgkafka/issues"
File without changes
File without changes
File without changes
File without changes