dgkafka 1.0.0a7__py3-none-any.whl → 1.0.0a9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dgkafka/avro_consumer.py CHANGED
@@ -38,7 +38,7 @@ class AvroKafkaConsumer(KafkaConsumer):
38
38
  self.logger.error(f"[x] Failed to initialize Avro consumer: {ex}")
39
39
  raise
40
40
 
41
- def consume(self, num_messages: int = 1, timeout: float = 10.0, **kwargs: Any) -> Iterator[str | bytes | Message | None]:
41
+ def consume(self, num_messages: int = 1, timeout: float = 1.0, **kwargs: Any) -> Iterator[str | bytes | Message | None]:
42
42
  """
43
43
  Consume Avro-encoded messages.
44
44
 
dgkafka/config.py CHANGED
@@ -1,92 +1,82 @@
1
- from typing import Literal, Optional
2
- from pydantic import BaseModel, Field, validator, Json
1
+ from typing import Dict, Any, Optional, Literal
2
+ from pydantic import BaseModel, Field, ConfigDict
3
3
  from enum import Enum
4
4
 
5
-
6
5
  class SecurityProtocol(str, Enum):
7
6
  PLAINTEXT = "PLAINTEXT"
8
7
  SSL = "SSL"
9
8
  SASL_PLAINTEXT = "SASL_PLAINTEXT"
10
9
  SASL_SSL = "SASL_SSL"
11
10
 
12
-
13
11
  class KafkaConfig(BaseModel):
14
12
  """Base configuration for all Kafka clients"""
15
- bootstrap_servers: str = Field(..., alias="bootstrap.servers",
16
- description="Comma-separated list of broker addresses")
17
- security_protocol: SecurityProtocol = Field(default=SecurityProtocol.SSL,
13
+ bootstrap_servers: str = Field(..., alias="bootstrap.servers")
14
+ security_protocol: Literal["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"] = Field(default="SSL",
18
15
  alias="security.protocol")
19
- ssl_cafile: Optional[str] = Field(default=None, alias="ssl.ca.location")
20
- ssl_certfile: Optional[str] = Field(default=None, alias="ssl.certificate.location")
21
- ssl_keyfile: Optional[str] = Field(default=None, alias="ssl.key.location")
22
- ssl_check_hostname: bool = Field(default=True, alias="enable.ssl.certificate.verification")
23
- ssl_password: Optional[str] = Field(default=None, alias="ssl.key.password")
24
-
25
- def get(self) -> dict[str, Json]:
26
- return self.model_dump(by_alias=True)
27
-
28
- class Config:
29
- validate_by_name = True
30
- use_enum_values = True
31
- extra = "forbid"
16
+ ssl_ca_location: Optional[str] = Field(default=None, alias="ssl.ca.location")
17
+ ssl_certificate_location: Optional[str] = Field(default=None, alias="ssl.certificate.location")
18
+ ssl_key_location: Optional[str] = Field(default=None, alias="ssl.key.location")
19
+ ssl_endpoint_identification_algorithm: Optional[str] = Field(default=None,
20
+ alias="ssl.endpoint.identification.algorithm")
21
+
22
+ model_config = ConfigDict(
23
+ populate_by_name=True,
24
+ extra="forbid",
25
+ protected_namespaces=()
26
+ )
27
+
28
+ def get(self) -> Dict[str, Any]:
29
+ """Get config in format suitable for confluent_kafka"""
30
+ return self.model_dump(by_alias=True, exclude_none=True)
32
31
 
32
+ @classmethod
33
+ def set(cls, config_dict: Dict[str, Any]) -> "KafkaConfig":
34
+ """Create config from dictionary"""
35
+ return cls(**config_dict)
33
36
 
34
37
  class ConsumerConfig(KafkaConfig):
35
- """Configuration for basic Kafka consumer"""
38
+ """Base consumer configuration"""
36
39
  group_id: str = Field(..., alias="group.id")
37
40
  enable_auto_commit: bool = Field(default=False, alias="enable.auto.commit")
38
- auto_offset_reset: Literal["earliest", "latest"] = Field(default="latest",
39
- alias="auto.offset.reset")
41
+ auto_offset_reset: Literal["earliest", "latest"] = Field(
42
+ default="earliest", alias="auto.offset.reset")
40
43
  session_timeout_ms: int = Field(default=10000, alias="session.timeout.ms")
41
44
  max_poll_interval_ms: int = Field(default=300000, alias="max.poll.interval.ms")
42
- fetch_max_bytes: int = Field(default=52428800, alias="fetch.max.bytes")
43
-
44
45
 
45
46
  class ProducerConfig(KafkaConfig):
46
- """Configuration for basic Kafka producer"""
47
+ """Base producer configuration"""
47
48
  acks: Literal["all", "0", "1"] = Field(default="all")
48
49
  retries: int = Field(default=0)
49
- linger_ms: int = Field(default=0, alias="linger.ms")
50
- compression_type: Literal["none", "gzip", "snappy", "lz4", "zstd"] = Field(
51
- default="none", alias="compression.type")
50
+ compression_type: str = Field(default="none", alias="compression.type")
52
51
  batch_size: int = Field(default=16384, alias="batch.size")
53
- max_in_flight: int = Field(default=1000000, alias="max.in.flight.requests.per.connection")
54
-
55
-
56
- class AvroConsumerConfig(ConsumerConfig):
57
- """Configuration for Avro consumer with Schema Registry"""
58
- schema_registry_url: str = Field(..., alias="schema.registry.url")
59
- schema_registry_ssl_cafile: Optional[str] = Field(default=None,
60
- alias="schema.registry.ssl.ca.location")
61
- schema_registry_basic_auth_user_info: Optional[str] = Field(default=None,
62
- alias="schema.registry.basic.auth.user.info")
63
- specific_avro_reader: bool = Field(default=False, alias="specific.avro.reader")
52
+ linger_ms: int = Field(default=0, alias="linger.ms")
64
53
 
65
54
 
66
- class AvroProducerConfig(ProducerConfig):
67
- """Configuration for Avro producer with Schema Registry"""
55
+ class AvroConfigMixin:
68
56
  schema_registry_url: str = Field(..., alias="schema.registry.url")
69
- schema_registry_ssl_cafile: Optional[str] = Field(default=None,
70
- alias="schema.registry.ssl.ca.location")
71
- schema_registry_basic_auth_user_info: Optional[str] = Field(default=None,
72
- alias="schema.registry.basic.auth.user.info")
73
- max_schemas_per_subject: int = Field(default=1000, alias="max.schemas.per.subject")
57
+ schema_registry_ssl_ca_location: Optional[str] = Field(
58
+ default=None, alias="schema.registry.ssl.ca.location")
59
+ schema_registry_basic_auth_user_info: Optional[str] = Field(
60
+ default=None, alias="basic.auth.user.info")
74
61
 
75
62
 
76
- class JsonConsumerConfig(ConsumerConfig):
77
- """Configuration for JSON consumer"""
78
- json_deserializer: Optional[str] = None # Custom deserializer function
79
- encoding: str = Field(default="utf-8")
63
+ class AvroConsumerConfig(ConsumerConfig, AvroConfigMixin):
64
+ """Avro consumer configuration with Schema Registry support"""
80
65
 
81
-
82
- class JsonProducerConfig(ProducerConfig):
83
- """Configuration for JSON producer"""
84
- json_serializer: Optional[str] = None # Custom serializer function
85
- encoding: str = Field(default="utf-8")
66
+ @classmethod
67
+ def set(cls, config_dict: Dict[str, Any]) -> "AvroConsumerConfig":
68
+ """Create from dictionary with Schema Registry validation"""
69
+ if "schema.registry.url" not in config_dict:
70
+ raise ValueError("schema.registry.url is required for AvroConsumer")
71
+ return cls(**config_dict)
72
+
73
+ class AvroProducerConfig(ProducerConfig, AvroConfigMixin):
74
+ """Avro producer configuration with Schema Registry support"""
75
+ max_schemas_per_subject: int = Field(default=1000, alias="max.schemas.per.subject")
86
76
 
87
77
  @classmethod
88
- @validator("compression_type")
89
- def validate_compression(cls, v):
90
- if v not in ["none", "gzip", "snappy", "lz4", "zstd"]:
91
- raise ValueError("Unsupported compression type")
92
- return v
78
+ def set(cls, config_dict: Dict[str, Any]) -> "AvroProducerConfig":
79
+ """Create from dictionary with Schema Registry validation"""
80
+ if "schema.registry.url" not in config_dict:
81
+ raise ValueError("schema.registry.url is required for AvroProducer")
82
+ return cls(**config_dict)
dgkafka/consumer.py CHANGED
@@ -76,7 +76,7 @@ class KafkaConsumer:
76
76
  consumer.seek(topic_partition)
77
77
  self.logger.info(f"[*] Assigned to topic '{topic}' partition {partition} with offset {offset}")
78
78
 
79
- def consume(self, num_messages: int = 1, timeout: float = 10.0, decode_utf8: bool = False) -> Iterator[Message | str]:
79
+ def consume(self, num_messages: int = 1, timeout: float = 1.0, decode_utf8: bool = False) -> Iterator[Message | str]:
80
80
  """Consume messages"""
81
81
  consumer = self._ensure_consumer()
82
82
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a7
3
+ Version: 1.0.0a9
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -0,0 +1,13 @@
1
+ dgkafka/__init__.py,sha256=fnqVZROyHXipdmhqZaa9XUjvQe795JJKFakwTndAiIw,286
2
+ dgkafka/avro_consumer.py,sha256=By2QWA2BX6ih40x2w3nI1_bv4XtghoVeLcSBNcpJByw,2874
3
+ dgkafka/avro_producer.py,sha256=51p9GeavU3M21LfQZuidEkhIpzA-KGQOXhB0xMfNUus,4247
4
+ dgkafka/config.py,sha256=uaqllIJZS98_LXg7HE8KFBySZeLJ7BWw2Vv3aft0rtA,3679
5
+ dgkafka/consumer.py,sha256=JoaNnAbCbhSjRl4X4ikReC5Ey5XamJS2DRPyYnP_RRM,4426
6
+ dgkafka/errors.py,sha256=PaH46tXameS--hrrUXKhQkZlBHvMSMPmjhVeRkmFvV0,95
7
+ dgkafka/json_consumer.py,sha256=7Gzn7C9WpyCTPDV6eFDugAx5gC9vdV-HrTh3Nv--zIE,1152
8
+ dgkafka/producer.py,sha256=4tKIYUs1zlrie5TOxnL0J38BON2HreOLf2rYW1hVrIs,5085
9
+ dgkafka-1.0.0a9.dist-info/licenses/LICENSE,sha256=pAZXnNE2dxxwXFIduGyn1gpvPefJtUYOYZOi3yeGG94,1068
10
+ dgkafka-1.0.0a9.dist-info/METADATA,sha256=oBHVvRoOCDkFENPqaV7aK9LYnpgXKT5O_aP7XQFehYI,6061
11
+ dgkafka-1.0.0a9.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
12
+ dgkafka-1.0.0a9.dist-info/top_level.txt,sha256=GyNrxOh7IPdL0t2SxH8DWxg3fUma-ezQ1Kz4zIr2B7U,8
13
+ dgkafka-1.0.0a9.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- dgkafka/__init__.py,sha256=fnqVZROyHXipdmhqZaa9XUjvQe795JJKFakwTndAiIw,286
2
- dgkafka/avro_consumer.py,sha256=Hk1_vexqd5F2g2D0IC2n1HAwXR-cQrbYDsSYr616ASY,2875
3
- dgkafka/avro_producer.py,sha256=51p9GeavU3M21LfQZuidEkhIpzA-KGQOXhB0xMfNUus,4247
4
- dgkafka/config.py,sha256=JUoN0euTjPc1upnRkeDM_c3yIomJ0AI4zkcKlQhDYew,4278
5
- dgkafka/consumer.py,sha256=OSryUsowjpPA4ZbTSs3dwWgIgd2HOAdGeqM-Y1R1XLk,4427
6
- dgkafka/errors.py,sha256=PaH46tXameS--hrrUXKhQkZlBHvMSMPmjhVeRkmFvV0,95
7
- dgkafka/json_consumer.py,sha256=7Gzn7C9WpyCTPDV6eFDugAx5gC9vdV-HrTh3Nv--zIE,1152
8
- dgkafka/producer.py,sha256=4tKIYUs1zlrie5TOxnL0J38BON2HreOLf2rYW1hVrIs,5085
9
- dgkafka-1.0.0a7.dist-info/licenses/LICENSE,sha256=pAZXnNE2dxxwXFIduGyn1gpvPefJtUYOYZOi3yeGG94,1068
10
- dgkafka-1.0.0a7.dist-info/METADATA,sha256=Y2vrwwJjreIXFmfutKATCyQx0oocAAH0bpL484l5gr0,6061
11
- dgkafka-1.0.0a7.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
12
- dgkafka-1.0.0a7.dist-info/top_level.txt,sha256=GyNrxOh7IPdL0t2SxH8DWxg3fUma-ezQ1Kz4zIr2B7U,8
13
- dgkafka-1.0.0a7.dist-info/RECORD,,