dgkafka 1.0.0a5__tar.gz → 1.0.0a7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a5
3
+ Version: 1.0.0a7
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -0,0 +1,92 @@
1
+ from typing import Literal, Optional
2
+ from pydantic import BaseModel, Field, validator, Json
3
+ from enum import Enum
4
+
5
+
6
+ class SecurityProtocol(str, Enum):
7
+ PLAINTEXT = "PLAINTEXT"
8
+ SSL = "SSL"
9
+ SASL_PLAINTEXT = "SASL_PLAINTEXT"
10
+ SASL_SSL = "SASL_SSL"
11
+
12
+
13
+ class KafkaConfig(BaseModel):
14
+ """Base configuration for all Kafka clients"""
15
+ bootstrap_servers: str = Field(..., alias="bootstrap.servers",
16
+ description="Comma-separated list of broker addresses")
17
+ security_protocol: SecurityProtocol = Field(default=SecurityProtocol.SSL,
18
+ alias="security.protocol")
19
+ ssl_cafile: Optional[str] = Field(default=None, alias="ssl.ca.location")
20
+ ssl_certfile: Optional[str] = Field(default=None, alias="ssl.certificate.location")
21
+ ssl_keyfile: Optional[str] = Field(default=None, alias="ssl.key.location")
22
+ ssl_check_hostname: bool = Field(default=True, alias="enable.ssl.certificate.verification")
23
+ ssl_password: Optional[str] = Field(default=None, alias="ssl.key.password")
24
+
25
+ def get(self) -> dict[str, Json]:
26
+ return self.model_dump(by_alias=True)
27
+
28
+ class Config:
29
+ validate_by_name = True
30
+ use_enum_values = True
31
+ extra = "forbid"
32
+
33
+
34
+ class ConsumerConfig(KafkaConfig):
35
+ """Configuration for basic Kafka consumer"""
36
+ group_id: str = Field(..., alias="group.id")
37
+ enable_auto_commit: bool = Field(default=False, alias="enable.auto.commit")
38
+ auto_offset_reset: Literal["earliest", "latest"] = Field(default="latest",
39
+ alias="auto.offset.reset")
40
+ session_timeout_ms: int = Field(default=10000, alias="session.timeout.ms")
41
+ max_poll_interval_ms: int = Field(default=300000, alias="max.poll.interval.ms")
42
+ fetch_max_bytes: int = Field(default=52428800, alias="fetch.max.bytes")
43
+
44
+
45
+ class ProducerConfig(KafkaConfig):
46
+ """Configuration for basic Kafka producer"""
47
+ acks: Literal["all", "0", "1"] = Field(default="all")
48
+ retries: int = Field(default=0)
49
+ linger_ms: int = Field(default=0, alias="linger.ms")
50
+ compression_type: Literal["none", "gzip", "snappy", "lz4", "zstd"] = Field(
51
+ default="none", alias="compression.type")
52
+ batch_size: int = Field(default=16384, alias="batch.size")
53
+ max_in_flight: int = Field(default=1000000, alias="max.in.flight.requests.per.connection")
54
+
55
+
56
+ class AvroConsumerConfig(ConsumerConfig):
57
+ """Configuration for Avro consumer with Schema Registry"""
58
+ schema_registry_url: str = Field(..., alias="schema.registry.url")
59
+ schema_registry_ssl_cafile: Optional[str] = Field(default=None,
60
+ alias="schema.registry.ssl.ca.location")
61
+ schema_registry_basic_auth_user_info: Optional[str] = Field(default=None,
62
+ alias="schema.registry.basic.auth.user.info")
63
+ specific_avro_reader: bool = Field(default=False, alias="specific.avro.reader")
64
+
65
+
66
+ class AvroProducerConfig(ProducerConfig):
67
+ """Configuration for Avro producer with Schema Registry"""
68
+ schema_registry_url: str = Field(..., alias="schema.registry.url")
69
+ schema_registry_ssl_cafile: Optional[str] = Field(default=None,
70
+ alias="schema.registry.ssl.ca.location")
71
+ schema_registry_basic_auth_user_info: Optional[str] = Field(default=None,
72
+ alias="schema.registry.basic.auth.user.info")
73
+ max_schemas_per_subject: int = Field(default=1000, alias="max.schemas.per.subject")
74
+
75
+
76
+ class JsonConsumerConfig(ConsumerConfig):
77
+ """Configuration for JSON consumer"""
78
+ json_deserializer: Optional[str] = None # Custom deserializer function
79
+ encoding: str = Field(default="utf-8")
80
+
81
+
82
+ class JsonProducerConfig(ProducerConfig):
83
+ """Configuration for JSON producer"""
84
+ json_serializer: Optional[str] = None # Custom serializer function
85
+ encoding: str = Field(default="utf-8")
86
+
87
+ @classmethod
88
+ @validator("compression_type")
89
+ def validate_compression(cls, v):
90
+ if v not in ["none", "gzip", "snappy", "lz4", "zstd"]:
91
+ raise ValueError("Unsupported compression type")
92
+ return v
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgkafka
3
- Version: 1.0.0a5
3
+ Version: 1.0.0a7
4
4
  Summary: Kafka clients
5
5
  Home-page: https://gitlab.com/gng-group/dgkafka.git
6
6
  Author: Malanris
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "dgkafka"
7
- version = "1.0.0a5"
7
+ version = "1.0.0a7"
8
8
  authors = [
9
9
  {name = "Roman Rasputin", email = "admin@roro.su"},
10
10
  ]
@@ -1,92 +0,0 @@
1
- from typing import Literal, Optional
2
- from pydantic import BaseModel, Field, Json
3
-
4
-
5
- class KafkaConfig(BaseModel):
6
- bootstrap_servers: str = Field(..., alias="bootstrap.servers",
7
- description="Comma-separated list of broker addresses")
8
- security_protocol: Optional[str] = Field(default="SSL", alias="security.protocol",
9
- description="Protocol used to communicate with brokers")
10
- ssl_ca_location: Optional[str] = Field(default=None, alias="ssl.ca.location",
11
- description="Path to CA certificate file")
12
- ssl_certificate_location: Optional[str] = Field(default=None, alias="ssl.certificate.location",
13
- description="Path to client certificate file")
14
- ssl_key_location: Optional[str] = Field(default=None, alias="ssl.key.location",
15
- description="Path to client private key file")
16
- ssl_certificate_verification: Optional[bool] = Field(default=True,
17
- alias="enable.ssl.certificate.verification",
18
- description="Enable SSL certificate verification")
19
- ssl_endpoint_identification_algorithm: Optional[str] = Field(default="https",
20
- alias="ssl.endpoint.identification.algorithm",
21
- description="Endpoint identification algorithm")
22
-
23
- def get(self) -> dict[str, Json]:
24
- return self.model_dump(by_alias=True)
25
-
26
- class Config:
27
- validate_by_name = True
28
- extra = "forbid"
29
-
30
-
31
- class ConsumerConfig(KafkaConfig):
32
- group_id: str = Field(..., alias="group.id",
33
- description="Consumer group identifier")
34
- enable_auto_commit: bool = Field(default=False, alias="enable.auto.commit",
35
- description="Automatically commit offsets periodically")
36
- auto_offset_reset: Literal["earliest", "latest"] = Field(default="earliest",
37
- alias="auto.offset.reset",
38
- description="Reset policy when no offset is available")
39
- max_poll_records: Optional[int] = Field(default=500, alias="max.poll.records",
40
- description="Maximum records per poll")
41
- session_timeout_ms: int = Field(default=10000, alias="session.timeout.ms",
42
- description="Timeout for consumer session")
43
-
44
-
45
- class ProducerConfig(KafkaConfig):
46
- acks: Literal["all", "-1", "0", "1"] = Field(default="all",
47
- description="Number of acknowledgments")
48
- retries: int = Field(default=3, description="Number of retries on failure")
49
- linger_ms: int = Field(default=0, alias="linger.ms",
50
- description="Delay in milliseconds to wait for messages in the buffer")
51
- compression_type: Optional[str] = Field(default=None, alias="compression.type",
52
- description="Compression codec to use")
53
- batch_size: int = Field(default=16384, alias="batch.size",
54
- description="Batch size in bytes")
55
-
56
-
57
- class AvroConfigMixin(BaseModel):
58
- schema_registry_url: str = Field(..., alias="schema.registry.url",
59
- description="URL of Schema Registry")
60
- schema_registry_ssl_ca_location: Optional[str] = Field(default=None,
61
- alias="schema.registry.ssl.ca.location",
62
- description="Schema Registry CA certificate path")
63
- auto_register_schemas: bool = Field(default=True, alias="auto.register.schemas",
64
- description="Automatically register schemas")
65
-
66
-
67
- class AvroConsumerConfig(ConsumerConfig, AvroConfigMixin):
68
- use_latest_version: bool = Field(default=True, alias="use.latest.version",
69
- description="Use latest schema version")
70
- skip_known_types: bool = Field(default=False, alias="skip.known.types",
71
- description="Skip known types during deserialization")
72
-
73
-
74
- class AvroProducerConfig(ProducerConfig, AvroConfigMixin):
75
- value_subject_name_strategy: Optional[str] = Field(default=None,
76
- alias="value.subject.name.strategy",
77
- description="Strategy for subject name generation")
78
- key_subject_name_strategy: Optional[str] = Field(default=None,
79
- alias="key.subject.name.strategy",
80
- description="Strategy for key subject name generation")
81
-
82
-
83
- class JsonConsumerConfig(ConsumerConfig):
84
- json_deserializer: Optional[str] = Field(default=None,
85
- description="Custom JSON deserializer function")
86
- encoding: str = Field(default="utf-8", description="Message encoding")
87
-
88
-
89
- class JsonProducerConfig(ProducerConfig):
90
- json_serializer: Optional[str] = Field(default=None,
91
- description="Custom JSON serializer function")
92
- encoding: str = Field(default="utf-8", description="Message encoding")
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes