aws-lambda-powertools 3.15.0__py3-none-any.whl → 3.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,3 @@
1
1
  """Exposes version constant to avoid circular dependencies."""
2
2
 
3
- VERSION = "3.15.0"
3
+ VERSION = "3.15.1"
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  from functools import cached_property
4
5
  from typing import TYPE_CHECKING, Any
5
6
 
@@ -13,6 +14,8 @@ if TYPE_CHECKING:
13
14
 
14
15
  from aws_lambda_powertools.utilities.kafka.schema_config import SchemaConfig
15
16
 
17
+ logger = logging.getLogger(__name__)
18
+
16
19
 
17
20
  class ConsumerRecordRecords(KafkaEventRecordBase):
18
21
  """
@@ -31,18 +34,24 @@ class ConsumerRecordRecords(KafkaEventRecordBase):
31
34
  if not key:
32
35
  return None
33
36
 
37
+ logger.debug("Deserializing key field")
38
+
34
39
  # Determine schema type and schema string
35
40
  schema_type = None
36
- schema_str = None
41
+ schema_value = None
37
42
  output_serializer = None
38
43
 
39
44
  if self.schema_config and self.schema_config.key_schema_type:
40
45
  schema_type = self.schema_config.key_schema_type
41
- schema_str = self.schema_config.key_schema
46
+ schema_value = self.schema_config.key_schema
42
47
  output_serializer = self.schema_config.key_output_serializer
43
48
 
44
49
  # Always use get_deserializer if None it will default to DEFAULT
45
- deserializer = get_deserializer(schema_type, schema_str)
50
+ deserializer = get_deserializer(
51
+ schema_type=schema_type,
52
+ schema_value=schema_value,
53
+ field_metadata=self.key_schema_metadata,
54
+ )
46
55
  deserialized_value = deserializer.deserialize(key)
47
56
 
48
57
  # Apply output serializer if specified
@@ -57,16 +66,22 @@ class ConsumerRecordRecords(KafkaEventRecordBase):
57
66
 
58
67
  # Determine schema type and schema string
59
68
  schema_type = None
60
- schema_str = None
69
+ schema_value = None
61
70
  output_serializer = None
62
71
 
72
+ logger.debug("Deserializing value field")
73
+
63
74
  if self.schema_config and self.schema_config.value_schema_type:
64
75
  schema_type = self.schema_config.value_schema_type
65
- schema_str = self.schema_config.value_schema
76
+ schema_value = self.schema_config.value_schema
66
77
  output_serializer = self.schema_config.value_output_serializer
67
78
 
68
79
  # Always use get_deserializer if None it will default to DEFAULT
69
- deserializer = get_deserializer(schema_type, schema_str)
80
+ deserializer = get_deserializer(
81
+ schema_type=schema_type,
82
+ schema_value=schema_value,
83
+ field_metadata=self.value_schema_metadata,
84
+ )
70
85
  deserialized_value = deserializer.deserialize(value)
71
86
 
72
87
  # Apply output serializer if specified
@@ -1,6 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import io
4
+ import logging
5
+ from typing import Any
4
6
 
5
7
  from avro.io import BinaryDecoder, DatumReader
6
8
  from avro.schema import parse as parse_schema
@@ -9,8 +11,11 @@ from aws_lambda_powertools.utilities.kafka.deserializer.base import Deserializer
9
11
  from aws_lambda_powertools.utilities.kafka.exceptions import (
10
12
  KafkaConsumerAvroSchemaParserError,
11
13
  KafkaConsumerDeserializationError,
14
+ KafkaConsumerDeserializationFormatMismatch,
12
15
  )
13
16
 
17
+ logger = logging.getLogger(__name__)
18
+
14
19
 
15
20
  class AvroDeserializer(DeserializerBase):
16
21
  """
@@ -20,10 +25,11 @@ class AvroDeserializer(DeserializerBase):
20
25
  a provided Avro schema definition.
21
26
  """
22
27
 
23
- def __init__(self, schema_str: str):
28
+ def __init__(self, schema_str: str, field_metadata: dict[str, Any] | None = None):
24
29
  try:
25
30
  self.parsed_schema = parse_schema(schema_str)
26
31
  self.reader = DatumReader(self.parsed_schema)
32
+ self.field_metatada = field_metadata
27
33
  except Exception as e:
28
34
  raise KafkaConsumerAvroSchemaParserError(
29
35
  f"Invalid Avro schema. Please ensure the provided avro schema is valid: {type(e).__name__}: {str(e)}",
@@ -60,6 +66,13 @@ class AvroDeserializer(DeserializerBase):
60
66
  ... except KafkaConsumerDeserializationError as e:
61
67
  ... print(f"Failed to deserialize: {e}")
62
68
  """
69
+ data_format = self.field_metatada.get("dataFormat") if self.field_metatada else None
70
+
71
+ if data_format and data_format != "AVRO":
72
+ raise KafkaConsumerDeserializationFormatMismatch(f"Expected data is AVRO but you sent {data_format}")
73
+
74
+ logger.debug("Deserializing data with AVRO format")
75
+
63
76
  try:
64
77
  value = self._decode_input(data)
65
78
  bytes_reader = io.BytesIO(value)
@@ -1,9 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import base64
4
+ import logging
4
5
 
5
6
  from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase
6
7
 
8
+ logger = logging.getLogger(__name__)
9
+
7
10
 
8
11
  class DefaultDeserializer(DeserializerBase):
9
12
  """
@@ -43,4 +46,5 @@ class DefaultDeserializer(DeserializerBase):
43
46
  >>> result = deserializer.deserialize(bytes_data)
44
47
  >>> print(result == bytes_data) # Output: True
45
48
  """
49
+ logger.debug("Deserializing data with primitives types")
46
50
  return base64.b64decode(data).decode("utf-8")
@@ -13,21 +13,27 @@ if TYPE_CHECKING:
13
13
  _deserializer_cache: dict[str, DeserializerBase] = {}
14
14
 
15
15
 
16
- def _get_cache_key(schema_type: str | object, schema_value: Any) -> str:
16
+ def _get_cache_key(schema_type: str | object, schema_value: Any, field_metadata: dict[str, Any]) -> str:
17
+ schema_metadata = None
18
+
19
+ if field_metadata:
20
+ schema_metadata = field_metadata.get("schemaId")
21
+
17
22
  if schema_value is None:
18
- return str(schema_type)
23
+ schema_hash = f"{str(schema_type)}_{schema_metadata}"
19
24
 
20
25
  if isinstance(schema_value, str):
26
+ hashable_value = f"{schema_value}_{schema_metadata}"
21
27
  # For string schemas like Avro, hash the content
22
- schema_hash = hashlib.md5(schema_value.encode("utf-8"), usedforsecurity=False).hexdigest()
28
+ schema_hash = hashlib.md5(hashable_value.encode("utf-8"), usedforsecurity=False).hexdigest()
23
29
  else:
24
30
  # For objects like Protobuf, use the object id
25
- schema_hash = str(id(schema_value))
31
+ schema_hash = f"{str(id(schema_value))}_{schema_metadata}"
26
32
 
27
33
  return f"{schema_type}_{schema_hash}"
28
34
 
29
35
 
30
- def get_deserializer(schema_type: str | object, schema_value: Any) -> DeserializerBase:
36
+ def get_deserializer(schema_type: str | object, schema_value: Any, field_metadata: Any) -> DeserializerBase:
31
37
  """
32
38
  Factory function to get the appropriate deserializer based on schema type.
33
39
 
@@ -75,7 +81,7 @@ def get_deserializer(schema_type: str | object, schema_value: Any) -> Deserializ
75
81
  """
76
82
 
77
83
  # Generate a cache key based on schema type and value
78
- cache_key = _get_cache_key(schema_type, schema_value)
84
+ cache_key = _get_cache_key(schema_type, schema_value, field_metadata)
79
85
 
80
86
  # Check if we already have this deserializer in cache
81
87
  if cache_key in _deserializer_cache:
@@ -87,14 +93,14 @@ def get_deserializer(schema_type: str | object, schema_value: Any) -> Deserializ
87
93
  # Import here to avoid dependency if not used
88
94
  from aws_lambda_powertools.utilities.kafka.deserializer.avro import AvroDeserializer
89
95
 
90
- deserializer = AvroDeserializer(schema_value)
96
+ deserializer = AvroDeserializer(schema_str=schema_value, field_metadata=field_metadata)
91
97
  elif schema_type == "PROTOBUF":
92
98
  # Import here to avoid dependency if not used
93
99
  from aws_lambda_powertools.utilities.kafka.deserializer.protobuf import ProtobufDeserializer
94
100
 
95
- deserializer = ProtobufDeserializer(schema_value)
101
+ deserializer = ProtobufDeserializer(message_class=schema_value, field_metadata=field_metadata)
96
102
  elif schema_type == "JSON":
97
- deserializer = JsonDeserializer()
103
+ deserializer = JsonDeserializer(field_metadata=field_metadata)
98
104
 
99
105
  else:
100
106
  # Default to no-op deserializer
@@ -2,9 +2,16 @@ from __future__ import annotations
2
2
 
3
3
  import base64
4
4
  import json
5
+ import logging
6
+ from typing import Any
5
7
 
6
8
  from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase
7
- from aws_lambda_powertools.utilities.kafka.exceptions import KafkaConsumerDeserializationError
9
+ from aws_lambda_powertools.utilities.kafka.exceptions import (
10
+ KafkaConsumerDeserializationError,
11
+ KafkaConsumerDeserializationFormatMismatch,
12
+ )
13
+
14
+ logger = logging.getLogger(__name__)
8
15
 
9
16
 
10
17
  class JsonDeserializer(DeserializerBase):
@@ -15,6 +22,9 @@ class JsonDeserializer(DeserializerBase):
15
22
  into Python dictionaries.
16
23
  """
17
24
 
25
+ def __init__(self, field_metadata: dict[str, Any] | None = None):
26
+ self.field_metatada = field_metadata
27
+
18
28
  def deserialize(self, data: bytes | str) -> dict:
19
29
  """
20
30
  Deserialize JSON data to a Python dictionary.
@@ -45,6 +55,14 @@ class JsonDeserializer(DeserializerBase):
45
55
  ... except KafkaConsumerDeserializationError as e:
46
56
  ... print(f"Failed to deserialize: {e}")
47
57
  """
58
+
59
+ data_format = self.field_metatada.get("dataFormat") if self.field_metatada else None
60
+
61
+ if data_format and data_format != "JSON":
62
+ raise KafkaConsumerDeserializationFormatMismatch(f"Expected data is JSON but you sent {data_format}")
63
+
64
+ logger.debug("Deserializing data with JSON format")
65
+
48
66
  try:
49
67
  return json.loads(base64.b64decode(data).decode("utf-8"))
50
68
  except Exception as e:
@@ -1,15 +1,19 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  from typing import Any
4
5
 
5
- from google.protobuf.internal.decoder import _DecodeVarint # type: ignore[attr-defined]
6
+ from google.protobuf.internal.decoder import _DecodeSignedVarint # type: ignore[attr-defined]
6
7
  from google.protobuf.json_format import MessageToDict
7
8
 
8
9
  from aws_lambda_powertools.utilities.kafka.deserializer.base import DeserializerBase
9
10
  from aws_lambda_powertools.utilities.kafka.exceptions import (
10
11
  KafkaConsumerDeserializationError,
12
+ KafkaConsumerDeserializationFormatMismatch,
11
13
  )
12
14
 
15
+ logger = logging.getLogger(__name__)
16
+
13
17
 
14
18
  class ProtobufDeserializer(DeserializerBase):
15
19
  """
@@ -19,8 +23,9 @@ class ProtobufDeserializer(DeserializerBase):
19
23
  into Python dictionaries using the provided Protocol Buffer message class.
20
24
  """
21
25
 
22
- def __init__(self, message_class: Any):
26
+ def __init__(self, message_class: Any, field_metadata: dict[str, Any] | None = None):
23
27
  self.message_class = message_class
28
+ self.field_metatada = field_metadata
24
29
 
25
30
  def deserialize(self, data: bytes | str) -> dict:
26
31
  """
@@ -61,57 +66,56 @@ class ProtobufDeserializer(DeserializerBase):
61
66
  ... except KafkaConsumerDeserializationError as e:
62
67
  ... print(f"Failed to deserialize: {e}")
63
68
  """
64
- value = self._decode_input(data)
65
- try:
66
- message = self.message_class()
67
- message.ParseFromString(value)
68
- return MessageToDict(message, preserving_proto_field_name=True)
69
- except Exception:
70
- return self._deserialize_with_message_index(value, self.message_class())
71
69
 
72
- def _deserialize_with_message_index(self, data: bytes, parser: Any) -> dict:
73
- """
74
- Deserialize protobuf message with Confluent message index handling.
70
+ data_format = self.field_metatada.get("dataFormat") if self.field_metatada else None
71
+ schema_id = self.field_metatada.get("schemaId") if self.field_metatada else None
75
72
 
76
- Parameters
77
- ----------
78
- data : bytes
79
- data
80
- parser : google.protobuf.message.Message
81
- Protobuf message instance to parse the data into
73
+ if data_format and data_format != "PROTOBUF":
74
+ raise KafkaConsumerDeserializationFormatMismatch(f"Expected data is PROTOBUF but you sent {data_format}")
82
75
 
83
- Returns
84
- -------
85
- dict
86
- Dictionary representation of the parsed protobuf message with original field names
76
+ logger.debug("Deserializing data with PROTOBUF format")
87
77
 
88
- Raises
89
- ------
90
- KafkaConsumerDeserializationError
91
- If deserialization fails
78
+ try:
79
+ value = self._decode_input(data)
80
+ message = self.message_class()
81
+ if schema_id is None:
82
+ logger.debug("Plain PROTOBUF data: using default deserializer")
83
+ # Plain protobuf - direct parser
84
+ message.ParseFromString(value)
85
+ elif len(schema_id) > 20:
86
+ logger.debug("PROTOBUF data integrated with Glue SchemaRegistry: using Glue deserializer")
87
+ # Glue schema registry integration - remove the first byte
88
+ message.ParseFromString(value[1:])
89
+ else:
90
+ logger.debug("PROTOBUF data integrated with Confluent SchemaRegistry: using Confluent deserializer")
91
+ # Confluent schema registry integration - remove message index list
92
+ message.ParseFromString(self._remove_message_index(value))
92
93
 
93
- Notes
94
- -----
95
- This method handles the special case of Confluent Schema Registry's message index
96
- format, where the message is prefixed with either a single 0 (for the first schema)
97
- or a list of schema indexes. The actual protobuf message follows these indexes.
98
- """
94
+ return MessageToDict(message, preserving_proto_field_name=True)
95
+ except Exception as e:
96
+ raise KafkaConsumerDeserializationError(
97
+ f"Error trying to deserialize protobuf data - {type(e).__name__}: {str(e)}",
98
+ ) from e
99
99
 
100
+ def _remove_message_index(self, data):
101
+ """
102
+ Identifies and removes Confluent Schema Registry MessageIndex from bytes.
103
+ Returns pure protobuf bytes.
104
+ """
100
105
  buffer = memoryview(data)
101
106
  pos = 0
102
107
 
103
- try:
104
- first_value, new_pos = _DecodeVarint(buffer, pos)
105
- pos = new_pos
108
+ logger.debug("Removing message list bytes")
106
109
 
107
- if first_value != 0:
108
- for _ in range(first_value):
109
- _, new_pos = _DecodeVarint(buffer, pos)
110
- pos = new_pos
110
+ # Read first varint (index count or 0)
111
+ first_value, new_pos = _DecodeSignedVarint(buffer, pos)
112
+ pos = new_pos
111
113
 
112
- parser.ParseFromString(data[pos:])
113
- return MessageToDict(parser, preserving_proto_field_name=True)
114
- except Exception as e:
115
- raise KafkaConsumerDeserializationError(
116
- f"Error trying to deserialize protobuf data - {type(e).__name__}: {str(e)}",
117
- ) from e
114
+ # Skip index values if present
115
+ if first_value != 0:
116
+ for _ in range(first_value):
117
+ _, new_pos = _DecodeSignedVarint(buffer, pos)
118
+ pos = new_pos
119
+
120
+ # Return remaining bytes (pure protobuf)
121
+ return data[pos:]
@@ -4,6 +4,12 @@ class KafkaConsumerAvroSchemaParserError(Exception):
4
4
  """
5
5
 
6
6
 
7
+ class KafkaConsumerDeserializationFormatMismatch(Exception):
8
+ """
9
+ Error raised when deserialization format is incompatible
10
+ """
11
+
12
+
7
13
  class KafkaConsumerDeserializationError(Exception):
8
14
  """
9
15
  Error raised when message deserialization fails.
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  from typing import TYPE_CHECKING, Any
4
5
 
5
6
  from aws_lambda_powertools.utilities.kafka.serialization.base import OutputSerializerBase
@@ -9,6 +10,8 @@ if TYPE_CHECKING:
9
10
 
10
11
  from aws_lambda_powertools.utilities.kafka.serialization.types import T
11
12
 
13
+ logger = logging.getLogger(__name__)
14
+
12
15
 
13
16
  class CustomDictOutputSerializer(OutputSerializerBase):
14
17
  """
@@ -19,4 +22,5 @@ class CustomDictOutputSerializer(OutputSerializerBase):
19
22
  """
20
23
 
21
24
  def serialize(self, data: dict[str, Any], output: type[T] | Callable | None = None) -> T | dict[str, Any]:
25
+ logger.debug("Serializing output data with CustomDictOutputSerializer")
22
26
  return data if output is None else output(data) # type: ignore[call-arg]
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  from dataclasses import is_dataclass
4
5
  from typing import TYPE_CHECKING, Any, cast
5
6
 
@@ -9,6 +10,8 @@ from aws_lambda_powertools.utilities.kafka.serialization.types import T
9
10
  if TYPE_CHECKING:
10
11
  from collections.abc import Callable
11
12
 
13
+ logger = logging.getLogger(__name__)
14
+
12
15
 
13
16
  class DataclassOutputSerializer(OutputSerializerBase):
14
17
  """
@@ -22,4 +25,6 @@ class DataclassOutputSerializer(OutputSerializerBase):
22
25
  if not is_dataclass(output): # pragma: no cover
23
26
  raise ValueError("Output class must be a dataclass")
24
27
 
28
+ logger.debug("Serializing output data with DataclassOutputSerializer")
29
+
25
30
  return cast(T, output(**data))
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  from typing import TYPE_CHECKING, Any
4
5
 
5
6
  from pydantic import TypeAdapter
@@ -11,6 +12,8 @@ if TYPE_CHECKING:
11
12
 
12
13
  from aws_lambda_powertools.utilities.kafka.serialization.types import T
13
14
 
15
+ logger = logging.getLogger(__name__)
16
+
14
17
 
15
18
  class PydanticOutputSerializer(OutputSerializerBase):
16
19
  """
@@ -21,6 +24,7 @@ class PydanticOutputSerializer(OutputSerializerBase):
21
24
  """
22
25
 
23
26
  def serialize(self, data: dict[str, Any], output: type[T] | Callable | None = None) -> T | dict[str, Any]:
27
+ logger.debug("Serializing output data with PydanticOutputSerializer")
24
28
  # Use TypeAdapter for better support of Union types and other complex types
25
29
  adapter: TypeAdapter = TypeAdapter(output)
26
30
  return adapter.validate_python(data)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: aws_lambda_powertools
3
- Version: 3.15.0
3
+ Version: 3.15.1
4
4
  Summary: Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity.
5
5
  License: MIT
6
6
  Keywords: aws_lambda_powertools,aws,tracing,logging,lambda,powertools,feature_flags,idempotency,middleware
@@ -98,7 +98,7 @@ aws_lambda_powertools/shared/json_encoder.py,sha256=JQeWNu-4M7_xI_hqYExrxsb3OcEH
98
98
  aws_lambda_powertools/shared/lazy_import.py,sha256=TbXQm2bcwXdZrYdBaJJXIswyLlumM85RJ_A_0w-h-GU,2019
99
99
  aws_lambda_powertools/shared/types.py,sha256=EZ_tbX3F98LA4Zcra1hTEjzRacpZAtggK957Zcv1oKg,135
100
100
  aws_lambda_powertools/shared/user_agent.py,sha256=DrCMFQuT4a4iIrpcWpAIjY37EFqR9-QxlxDGD-Nn9Gg,7081
101
- aws_lambda_powertools/shared/version.py,sha256=and7WBc_Ox1Z0FcIGlQjGhuFeAT1WVEaKytTKGK0tuk,83
101
+ aws_lambda_powertools/shared/version.py,sha256=6rtvj2LhPP0CTjGnsLub8sUdIv49vsogeMeVJfp1Wnw,83
102
102
  aws_lambda_powertools/tracing/__init__.py,sha256=f4bMThOPBPWTPVcYqcAIErAJPerMsf3H_Z4gCXCsK9I,141
103
103
  aws_lambda_powertools/tracing/base.py,sha256=WSO986XGBOe9K0F2SnG6ustJokIrtO0m0mcL8N7mfno,4544
104
104
  aws_lambda_powertools/tracing/extensions.py,sha256=APOfXOq-hRBKaK5WyfIyrd_6M1_9SWJZ3zxLA9jDZzU,492
@@ -191,22 +191,22 @@ aws_lambda_powertools/utilities/idempotency/serialization/pydantic.py,sha256=NVK
191
191
  aws_lambda_powertools/utilities/jmespath_utils/__init__.py,sha256=Br89UButW4sLv2Dkjz_MiPS0TpMEPOO-W4wW0n9quPc,3597
192
192
  aws_lambda_powertools/utilities/jmespath_utils/envelopes.py,sha256=jZJYbUldrZgCWl-PL8oRmC9p6G6D-3812kJmJfLkM6Q,817
193
193
  aws_lambda_powertools/utilities/kafka/__init__.py,sha256=PUx4xgcL7td8fSfumD6INCdmGGIy7E11eqHk0w4XiEU,320
194
- aws_lambda_powertools/utilities/kafka/consumer_records.py,sha256=1S6mRcKeqPQtiRyDJIL308TQ9dEMdv13JdBFCwRVVmo,4825
194
+ aws_lambda_powertools/utilities/kafka/consumer_records.py,sha256=9v6lHETHJp5tnVcUfkI5qxlBIcij4LIp1idddklucOc,5218
195
195
  aws_lambda_powertools/utilities/kafka/deserializer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
196
- aws_lambda_powertools/utilities/kafka/deserializer/avro.py,sha256=G2pr5GLOsurhRCcMNCpd1VFcIh79LSYWDqg4_qVN0-g,2401
196
+ aws_lambda_powertools/utilities/kafka/deserializer/avro.py,sha256=YeRrnY55tw9CM2yZnmQgt6gj1AueSAlcWd6PM4ND0ps,2940
197
197
  aws_lambda_powertools/utilities/kafka/deserializer/base.py,sha256=-bOPbZADsKK-HuQupZy54csY74rvIXVIQiFxGHxjTww,1549
198
- aws_lambda_powertools/utilities/kafka/deserializer/default.py,sha256=GhAUfZO3rcasy_ZiKEv3IOFX_VNDTA61zb2eo0HAIcQ,1416
199
- aws_lambda_powertools/utilities/kafka/deserializer/deserializer.py,sha256=Wh4-srDUFGBUZ0kXiiU-R8DznumOKdLPk9Va88LT7zE,3733
200
- aws_lambda_powertools/utilities/kafka/deserializer/json.py,sha256=2_helv8DyUoK60z3wflLd1BwkuRINaVjucyYie10qUM,1742
201
- aws_lambda_powertools/utilities/kafka/deserializer/protobuf.py,sha256=WEaJysV__sZy32HNpqZZ6KrZyxyvsEnxf24On36zeXY,4183
202
- aws_lambda_powertools/utilities/kafka/exceptions.py,sha256=kwNfAcYmiTgk19QEdN6oHtMc2dDIZh64uWr_d5X3FFc,495
198
+ aws_lambda_powertools/utilities/kafka/deserializer/default.py,sha256=oFg7ZEQtyI-M4HfswOmqT_qPnmLKCxL3gF5mRogcxkI,1534
199
+ aws_lambda_powertools/utilities/kafka/deserializer/deserializer.py,sha256=c82bDMwqFfMgIMGpryoruCSWluQiLV2roLP1fy-qAuU,4143
200
+ aws_lambda_powertools/utilities/kafka/deserializer/json.py,sha256=La4BcF03-2PWWfL5kejPn3F7WD7oq80bSsYtyN_bu98,2312
201
+ aws_lambda_powertools/utilities/kafka/deserializer/protobuf.py,sha256=yYOJ3ZhR5XaxHO25XaPllrOFgIyDfNHfc2prgAFxcNU,4763
202
+ aws_lambda_powertools/utilities/kafka/exceptions.py,sha256=Lwsl8iR8A-DlleJ28OzZ7tkeUGQMd7P69hYW_ioEtqY,635
203
203
  aws_lambda_powertools/utilities/kafka/kafka_consumer.py,sha256=x1RcOWJcG74JvzrahO4cSSrAuSZFUlMtg14QjAjtJjY,2069
204
204
  aws_lambda_powertools/utilities/kafka/schema_config.py,sha256=GaSiwKWiUEQqrdBZWMaGl_DQcqpyplohoCMgMrCOyGI,3268
205
205
  aws_lambda_powertools/utilities/kafka/serialization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
206
206
  aws_lambda_powertools/utilities/kafka/serialization/base.py,sha256=z3Z_CbmBbrKDfzZjyvWkhdhnmgpR4CznefweATh-0_8,1904
207
- aws_lambda_powertools/utilities/kafka/serialization/custom_dict.py,sha256=ZEIiPy9HxdImeCh0ljzWPFw93SD1ktsJpIKqD2_uzfg,789
208
- aws_lambda_powertools/utilities/kafka/serialization/dataclass.py,sha256=uWqi8K0Hcn-ET-RcJwwn4HyOCtu_Oq6oTZxXtIW8xBE,871
209
- aws_lambda_powertools/utilities/kafka/serialization/pydantic.py,sha256=2Uuv6b20bsYVOY5gl8tcVHVVi3krCquEzPVuzsO5LeE,933
207
+ aws_lambda_powertools/utilities/kafka/serialization/custom_dict.py,sha256=RlYO6Ca6VddIeQ77KIi2Y7uj9drBvMutUm-VUuYv0ww,922
208
+ aws_lambda_powertools/utilities/kafka/serialization/dataclass.py,sha256=KOsrKIQs-6smn1CmbOPzBRIydabWn42VkB3_sER46pE,1004
209
+ aws_lambda_powertools/utilities/kafka/serialization/pydantic.py,sha256=BTP11dAfzCre01rQBYBLI9kn1-FjxdZGRRLhapklQUI,1064
210
210
  aws_lambda_powertools/utilities/kafka/serialization/serialization.py,sha256=XqLLZcHFea0jhtznNrrZeK5vW1arEBu8a0jwziJC-jY,2312
211
211
  aws_lambda_powertools/utilities/kafka/serialization/types.py,sha256=zClRo5ve8RGia7wQnby41W-Zprj-slOA5da1LfYnuhw,45
212
212
  aws_lambda_powertools/utilities/parameters/__init__.py,sha256=KVJWu7pyunw9to8VkTZ0fy6MCR9iW0tUCjAJVqgdwdw,771
@@ -290,7 +290,7 @@ aws_lambda_powertools/utilities/validation/envelopes.py,sha256=YD5HOFx6IClQgii0n
290
290
  aws_lambda_powertools/utilities/validation/exceptions.py,sha256=PKy_19zQMBJGCMMFl-sMkcm-cc0v3zZBn_bhGE4wKNo,2084
291
291
  aws_lambda_powertools/utilities/validation/validator.py,sha256=khCqFhACSdn0nKyYRRPiC5Exht956hTfSfhlV3IRmpg,10099
292
292
  aws_lambda_powertools/warnings/__init__.py,sha256=vqDVeZz8wGtD8WGYNSkQE7AHwqtIrPGRxuoJR_BBnSs,1193
293
- aws_lambda_powertools-3.15.0.dist-info/LICENSE,sha256=vMHS2eBgmwPUIMPb7LQ4p7ib_FPVQXarVjAasflrTwo,951
294
- aws_lambda_powertools-3.15.0.dist-info/METADATA,sha256=10eOohXPQlEeOM8QpLP-2ZyvAuukPgXLD5JJgtY-PI0,11528
295
- aws_lambda_powertools-3.15.0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
296
- aws_lambda_powertools-3.15.0.dist-info/RECORD,,
293
+ aws_lambda_powertools-3.15.1.dist-info/LICENSE,sha256=vMHS2eBgmwPUIMPb7LQ4p7ib_FPVQXarVjAasflrTwo,951
294
+ aws_lambda_powertools-3.15.1.dist-info/METADATA,sha256=lMRG2jtXHeR08xxirRjwbDG4kQlb40Z01pqWvSEFAks,11528
295
+ aws_lambda_powertools-3.15.1.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
296
+ aws_lambda_powertools-3.15.1.dist-info/RECORD,,