turbo-lambda 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,13 @@
1
+ Metadata-Version: 2.3
2
+ Name: turbo-lambda
3
+ Version: 0.1.0
4
+ Summary: Turbo Lambda Description
5
+ Author: Sam Mosleh
6
+ Author-email: Sam Mosleh <sam.mosleh.d@gmail.com>
7
+ Requires-Dist: orjson>=3.11.3
8
+ Requires-Dist: pydantic-settings>=2.11.0
9
+ Requires-Python: >=3.12
10
+ Description-Content-Type: text/markdown
11
+
12
+ # turbo-lambda
13
+ Turbo Lambda Library
@@ -0,0 +1,2 @@
1
+ # turbo-lambda
2
+ Turbo Lambda Library
@@ -0,0 +1,84 @@
1
+ [project]
2
+ name = "turbo-lambda"
3
+ version = "0.1.0"
4
+ description = "Turbo Lambda Description"
5
+ readme = "README.md"
6
+ authors = [{ name = "Sam Mosleh", email = "sam.mosleh.d@gmail.com" }]
7
+ requires-python = ">=3.12"
8
+ dependencies = [
9
+ "orjson>=3.11.3",
10
+ "pydantic-settings>=2.11.0",
11
+ ]
12
+
13
+ [dependency-groups]
14
+ dev = [
15
+ "coverage>=7.11.0",
16
+ "ipykernel>=7.0.1",
17
+ "mypy>=1.18.2",
18
+ "pytest>=8.4.2",
19
+ ]
20
+
21
+ [build-system]
22
+ requires = ["uv_build>=0.9.2,<0.10.0"]
23
+ build-backend = "uv_build"
24
+
25
+ [tool.mypy]
26
+ plugins = ['pydantic.mypy']
27
+
28
+ [tool.pytest.ini_options]
29
+ xfail_strict = true
30
+ testpaths = "tests"
31
+ # asyncio_mode = "auto"
32
+ addopts = ["--strict-config", "--strict-markers"]
33
+ markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"]
34
+ filterwarnings = ["error"]
35
+
36
+ [tool.coverage.run]
37
+ branch = true
38
+ parallel = true
39
+ relative_files = true
40
+ source = ["src", "tests"]
41
+
42
+ [tool.coverage.report]
43
+ show_missing = true
44
+ # skip_covered = true
45
+ partial_branches = [
46
+ "# pragma: no cover\\b",
47
+ "# pragma: (nt|posix|cygwin|darwin|linux|msys|win32|cpython|pypy) (no )?cover\\b",
48
+ "# pragma: (>=?|<=?|==|!=)\\d+\\.\\d+ cover\\b",
49
+ ]
50
+ exclude_also = ['case _:\n\s*assert_never\(.*\)']
51
+
52
+ [tool.ruff.lint]
53
+ select = [
54
+ "E", # pycodestyle errors
55
+ "W", # pycodestyle warnings
56
+ "F", # pyflakes
57
+ "PL", # pylint
58
+ "I", # isort
59
+ "C", # flake8-comprehensions
60
+ "B", # flake8-bugbear
61
+ "Q", # flake8-quotes
62
+ "T20", # flake8-print
63
+ "S", # flake8-bandit
64
+ "N", # pep8-naming
65
+ "UP", # pyupgrade
66
+ "RUF", # ruff
67
+ ]
68
+ ignore = [
69
+ "E501", # line too long, handled by formatter
70
+ ]
71
+
72
+ [tool.ruff.lint.per-file-ignores]
73
+ "tests/*" = ["S101", "S311"]
74
+
75
+ [tool.semantic_release]
76
+ commit_parser = "emoji"
77
+ commit_message = "🔖 {version}\n\nAutomatically generated by python-semantic-release"
78
+ version_toml = ["pyproject.toml:project.version"]
79
+ allow_zero_version = true
80
+
81
+ [tool.semantic_release.commit_parser_options]
82
+ major_tags = ["💥"]
83
+ minor_tags = ["✨"]
84
+ patch_tags = ["🚑️", "🔒️", "🐛", "⚡️"]
File without changes
@@ -0,0 +1,221 @@
1
+ import inspect
2
+ import logging
3
+ from collections.abc import Callable
4
+ from concurrent.futures import ThreadPoolExecutor
5
+ from contextlib import AbstractContextManager
6
+ from functools import wraps
7
+ from types import TracebackType
8
+ from typing import Any, Protocol, overload
9
+
10
+ import pydantic
11
+
12
+ from turbo_lambda import schemas
13
+ from turbo_lambda.errors import (
14
+ RequestValidationError,
15
+ general_error_to_gateway_response,
16
+ )
17
+ from turbo_lambda.log import log_after_call, logger, logger_bind
18
+
19
+
20
+ class LambdaHandlerT[ResponseT](Protocol):
21
+ def __call__(
22
+ self, event: schemas.EventType, context: schemas.LambdaContextProtocol
23
+ ) -> ResponseT: ...
24
+
25
+
26
+ class ModelDumpProtocol[DumpOutput](Protocol):
27
+ def model_dump(self) -> DumpOutput: ...
28
+
29
+
30
+ @overload
31
+ def validated_handler[RequestT: pydantic.BaseModel, DumpOutput](
32
+ func: Callable[[RequestT], ModelDumpProtocol[DumpOutput]],
33
+ ) -> LambdaHandlerT[DumpOutput]: ...
34
+
35
+
36
+ @overload
37
+ def validated_handler[RequestT: pydantic.BaseModel, ResponseT](
38
+ func: Callable[[RequestT], ResponseT],
39
+ ) -> LambdaHandlerT[ResponseT]: ...
40
+
41
+
42
+ def validated_handler[RequestT: pydantic.BaseModel, ResponseT](
43
+ func: Callable[[RequestT], ResponseT],
44
+ ) -> LambdaHandlerT[Any]:
45
+ func_annotations = inspect.signature(func, eval_str=True)
46
+ request_type: type[RequestT] = next(
47
+ iter(func_annotations.parameters.values())
48
+ ).annotation
49
+ response_type_adapter: pydantic.TypeAdapter[ResponseT] = pydantic.TypeAdapter(
50
+ func_annotations.return_annotation
51
+ )
52
+
53
+ def wrapper(
54
+ event: schemas.EventType, context: schemas.LambdaContextProtocol
55
+ ) -> Any:
56
+ try:
57
+ validated_event = request_type.model_validate(event)
58
+ except pydantic.ValidationError as e:
59
+ raise RequestValidationError(e) from e
60
+ logger.debug("parsed_event", extra={"event": validated_event})
61
+ return response_type_adapter.dump_python(
62
+ func(validated_event), mode="json", by_alias=True
63
+ )
64
+
65
+ return wrapper
66
+
67
+
68
+ def context_manager_middleware[**P, T](
69
+ cm: Callable[P, AbstractContextManager[Any]],
70
+ ) -> Callable[[Callable[P, T]], Callable[P, T]]:
71
+ def decorator(func: Callable[P, T]) -> Callable[P, T]:
72
+ @wraps(func)
73
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
74
+ with cm(*args, **kwargs):
75
+ return func(*args, **kwargs)
76
+
77
+ return wrapper
78
+
79
+ return decorator
80
+
81
+
82
+ def error_transformer_handler[**P, T, E: Exception](
83
+ error_handler: Callable[[E], T],
84
+ ) -> Callable[[Callable[P, T]], Callable[P, T]]:
85
+ error_handler_annotations = inspect.signature(error_handler, eval_str=True)
86
+ error_type: type[E] = next(
87
+ iter(error_handler_annotations.parameters.values())
88
+ ).annotation
89
+
90
+ def decorator(func: Callable[P, T]) -> Callable[P, T]:
91
+ @wraps(func)
92
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
93
+ try:
94
+ return func(*args, **kwargs)
95
+ except error_type as e:
96
+ return error_handler(e)
97
+
98
+ return wrapper
99
+
100
+ return decorator
101
+
102
+
103
+ def request_logger_handler[ResponseT](
104
+ func: LambdaHandlerT[ResponseT],
105
+ ) -> LambdaHandlerT[ResponseT]:
106
+ def bind_extractor(
107
+ event: schemas.EventType, context: schemas.LambdaContextProtocol
108
+ ) -> AbstractContextManager[None]:
109
+ return logger_bind(
110
+ lambda_context={
111
+ "name": context.function_name,
112
+ "memory_size": context.memory_limit_in_mb,
113
+ "arn": context.invoked_function_arn,
114
+ "request_id": context.aws_request_id,
115
+ },
116
+ )
117
+
118
+ return context_manager_middleware(
119
+ bind_extractor,
120
+ )(
121
+ log_after_call(
122
+ log_level=logging.DEBUG,
123
+ log_message="request",
124
+ result_extractor=lambda _: {}, # TODO: Remove after mypy fix
125
+ )(
126
+ func,
127
+ ),
128
+ )
129
+
130
+
131
+ def gateway_handler[RequestT: pydantic.BaseModel](
132
+ func: Callable[[RequestT], schemas.ApiGatewayResponse],
133
+ ) -> LambdaHandlerT[schemas.ApiGatewaySerializedResponse]:
134
+ def bind_extractor(
135
+ event: schemas.EventType, context: schemas.LambdaContextProtocol
136
+ ) -> AbstractContextManager[None]:
137
+ return logger_bind(
138
+ lambda_context={
139
+ "name": context.function_name,
140
+ "memory_size": context.memory_limit_in_mb,
141
+ "arn": context.invoked_function_arn,
142
+ "request_id": context.aws_request_id,
143
+ },
144
+ correlation_id=event["requestContext"].get("requestId"),
145
+ )
146
+
147
+ def result_extractor(
148
+ response: schemas.ApiGatewaySerializedResponse,
149
+ ) -> dict[str, Any]:
150
+ return {"status_code": response["statusCode"]}
151
+
152
+ return context_manager_middleware(
153
+ bind_extractor,
154
+ )(
155
+ log_after_call(
156
+ log_level=logging.DEBUG,
157
+ log_message="request",
158
+ result_extractor=result_extractor,
159
+ )(
160
+ error_transformer_handler(general_error_to_gateway_response)(
161
+ validated_handler(func),
162
+ ),
163
+ )
164
+ )
165
+
166
+
167
+ def parallel_sqs_handler[RequestT](
168
+ *,
169
+ max_workers: int,
170
+ ) -> Callable[
171
+ [Callable[[RequestT], None]],
172
+ Callable[[schemas.SqsEvent[RequestT]], schemas.LambdaCheckpointResponse],
173
+ ]:
174
+ def decorator(
175
+ func: Callable[[RequestT], None],
176
+ ) -> Callable[[schemas.SqsEvent[RequestT]], schemas.LambdaCheckpointResponse]:
177
+ func_annotations = inspect.signature(func, eval_str=True)
178
+ request_type: type[RequestT] = next(
179
+ iter(func_annotations.parameters.values())
180
+ ).annotation
181
+ SqsEventType = schemas.SqsEvent[request_type] # type: ignore[valid-type] # noqa: N806
182
+
183
+ def single_record_processor(
184
+ rec: schemas.SqsRecordModel[RequestT],
185
+ ) -> schemas.LambdaCheckpointItem | None:
186
+ try:
187
+ func(rec.body)
188
+ except Exception:
189
+ return schemas.LambdaCheckpointItem(item_identifier=rec.message_id)
190
+ return None
191
+
192
+ def wrapper(event: SqsEventType) -> schemas.LambdaCheckpointResponse:
193
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
194
+ responses = executor.map(single_record_processor, event.records)
195
+ return schemas.LambdaCheckpointResponse(
196
+ batch_item_failures=[item for item in responses if item is not None]
197
+ )
198
+
199
+ return wrapper
200
+
201
+ return decorator
202
+
203
+
204
+ class CachedContextManager[T]:
205
+ def __init__(self, context_manager: AbstractContextManager[T]) -> None:
206
+ self._context_manager = context_manager
207
+
208
+ def __enter__(self) -> T:
209
+ self._value = self._context_manager.__enter__()
210
+ return self._value
211
+
212
+ def __exit__(
213
+ self,
214
+ exc_type: type[BaseException] | None,
215
+ exc_value: BaseException | None,
216
+ traceback: TracebackType | None,
217
+ ) -> bool | None:
218
+ return self._context_manager.__exit__(exc_type, exc_value, traceback)
219
+
220
+ def __call__(self) -> T:
221
+ return self._value
@@ -0,0 +1,59 @@
1
+ from http import HTTPStatus
2
+ from typing import Any
3
+
4
+ import pydantic
5
+
6
+ from turbo_lambda import schemas
7
+
8
+
9
+ class ApplicationError(Exception):
10
+ pass
11
+
12
+
13
+ class UnauthorizedError(ApplicationError):
14
+ def __init__(self) -> None:
15
+ super().__init__("Unauthorized")
16
+
17
+
18
+ class GeneralError(ApplicationError):
19
+ def __init__(
20
+ self,
21
+ error_type: str = "about:blank",
22
+ status_code: HTTPStatus = HTTPStatus.BAD_REQUEST,
23
+ title: str | None = None,
24
+ detail: str = "General error",
25
+ extensions: Any = None,
26
+ ) -> None:
27
+ super().__init__()
28
+ self.error_type = error_type
29
+ self.status_code = status_code
30
+ self.title = status_code.description if title is None else title
31
+ self.detail = detail
32
+ self.extensions = extensions
33
+
34
+
35
+ class RequestValidationError(GeneralError):
36
+ def __init__(self, error: pydantic.ValidationError) -> None:
37
+ super().__init__(
38
+ error_type="https://docs.pydantic.dev/errors/validation_errors/",
39
+ status_code=HTTPStatus.UNPROCESSABLE_ENTITY,
40
+ title=error.title,
41
+ detail=str(error),
42
+ extensions=error.errors(),
43
+ )
44
+
45
+
46
+ def general_error_to_gateway_response(
47
+ error: GeneralError,
48
+ ) -> schemas.ApiGatewaySerializedResponse:
49
+ return schemas.ApiGatewayResponse(
50
+ status_code=error.status_code,
51
+ headers={"Content-Type": "application/problem+json"},
52
+ body=schemas.HttpErrorResponse(
53
+ type=error.error_type,
54
+ status=error.status_code,
55
+ title=error.title,
56
+ detail=error.detail,
57
+ extensions=error.extensions,
58
+ ),
59
+ ).model_dump()
@@ -0,0 +1,181 @@
1
+ import contextlib
2
+ import contextvars
3
+ import inspect
4
+ import logging
5
+ import os
6
+ import time
7
+ from collections.abc import Callable, Generator, Iterable, Mapping
8
+ from functools import singledispatch, wraps
9
+ from typing import Any, Never, overload
10
+
11
+ import orjson
12
+ import pydantic
13
+
14
+ LOGGING_CTX: contextvars.ContextVar[dict[str, Any]] = contextvars.ContextVar(
15
+ "LOGGING_CTX"
16
+ )
17
+ _DEFAULT_TRANSLATOR = {
18
+ "levelname": "level",
19
+ "name": "logger",
20
+ "asctime": "timestamp",
21
+ "exc_text": "exc_info",
22
+ "funcName": "func_name",
23
+ "threadName": "thread_name",
24
+ "processName": "process_name",
25
+ "taskName": "task_name",
26
+ }
27
+ _DEFAULT_IGNORE = {
28
+ "msg",
29
+ "args",
30
+ "levelno",
31
+ "exc_info",
32
+ "created",
33
+ "msecs",
34
+ "relativeCreated",
35
+ }
36
+
37
+
38
+ @contextlib.contextmanager
39
+ def logger_bind(**kwargs: Any) -> Generator[None]:
40
+ token = LOGGING_CTX.set(LOGGING_CTX.get({}) | kwargs)
41
+ try:
42
+ yield
43
+ finally:
44
+ LOGGING_CTX.reset(token)
45
+
46
+
47
+ def _context_adder_filter(record: logging.LogRecord) -> bool:
48
+ for k, v in LOGGING_CTX.get({}).items():
49
+ setattr(record, k, v)
50
+ return True
51
+
52
+
53
+ @singledispatch
54
+ def orjson_custom_type_handler(value: Any) -> Never:
55
+ raise TypeError()
56
+
57
+
58
+ @orjson_custom_type_handler.register(set)
59
+ def _(value: set[Any]) -> Any:
60
+ return list(value)
61
+
62
+
63
+ @orjson_custom_type_handler.register(pydantic.BaseModel)
64
+ def _(value: pydantic.BaseModel) -> Any:
65
+ return value.model_dump(mode="json")
66
+
67
+
68
+ class JsonFormatter(logging.Formatter):
69
+ def __init__(
70
+ self,
71
+ translator_dict: Mapping[str, str] | None = None,
72
+ ignored_keys: set[str] | None = None,
73
+ ):
74
+ self.translator_dict = translator_dict or _DEFAULT_TRANSLATOR
75
+ self.ignored_keys = ignored_keys or _DEFAULT_IGNORE
76
+ self.default_time_format = "%Y-%m-%dT%H:%M:%S"
77
+ self.default_msec_format = "%s.%03dZ"
78
+ super().__init__()
79
+
80
+ def format(self, record: logging.LogRecord) -> str:
81
+ record.message = record.getMessage()
82
+ record.asctime = self.formatTime(record)
83
+ if record.exc_info and record.exc_text is None:
84
+ record.exc_text = self.formatException(record.exc_info)
85
+ if record.stack_info:
86
+ record.stack_info = self.formatStack(record.stack_info)
87
+ message_dict = {
88
+ self.translator_dict.get(rec_key, rec_key): rec_val
89
+ for rec_key, rec_val in vars(record).items()
90
+ if rec_key not in self.ignored_keys
91
+ }
92
+ return orjson.dumps(message_dict, default=orjson_custom_type_handler).decode()
93
+
94
+
95
+ def config_default_logger() -> None: # pragma: no cover
96
+ handler = logging.StreamHandler()
97
+ handler.setFormatter(json_formatter)
98
+ logger.addHandler(handler)
99
+
100
+
101
+ def config_lambda_logger() -> None: # pragma: no cover
102
+ if log_level := os.environ.get("AWS_LAMBDA_LOG_LEVEL"):
103
+ logger.setLevel(log_level)
104
+ logging.getLogger().handlers[0].setFormatter(json_formatter)
105
+
106
+
107
+ @overload
108
+ def log_after_call[**P, T](func: Callable[P, T]) -> Callable[P, T]: ...
109
+
110
+
111
+ @overload
112
+ def log_after_call[**P, T](
113
+ *,
114
+ log_level: int = logging.INFO,
115
+ log_message: str = "call",
116
+ excluded_fields: Iterable[str] = ("self", "context"),
117
+ result_extractor: Callable[[T], dict[str, Any]] = lambda _: {},
118
+ ) -> Callable[[Callable[P, T]], Callable[P, T]]: ...
119
+
120
+
121
+ def log_after_call[**P, T](
122
+ func: Callable[P, T] | None = None,
123
+ log_level: int = logging.INFO,
124
+ log_message: str = "call",
125
+ excluded_fields: Iterable[str] = ("self", "context"),
126
+ result_extractor: Callable[[T], dict[str, Any]] = lambda _: {},
127
+ ) -> Callable[P, T] | Callable[[Callable[P, T]], Callable[P, T]]:
128
+ def decorator(func: Callable[P, T]) -> Callable[P, T]:
129
+ sig = inspect.signature(func)
130
+
131
+ @wraps(func)
132
+ def wrapper(*f_args: P.args, **f_kwargs: P.kwargs) -> T:
133
+ extra: dict[str, Any] = {
134
+ "function": {
135
+ "name": func.__qualname__,
136
+ "module": func.__module__,
137
+ "pathname": func.__code__.co_filename,
138
+ "firstlineno": func.__code__.co_firstlineno,
139
+ },
140
+ "arguments": get_arguments(sig, excluded_fields, f_args, f_kwargs),
141
+ }
142
+ exc_info = False
143
+ st = time.monotonic()
144
+ try:
145
+ result = func(*f_args, **f_kwargs)
146
+ extra.update(result_extractor(result))
147
+ return result
148
+ except Exception:
149
+ exc_info = True
150
+ raise
151
+ finally:
152
+ extra["duration"] = time.monotonic() - st
153
+ logger.log(
154
+ log_level if not exc_info else logging.ERROR,
155
+ log_message,
156
+ exc_info=exc_info,
157
+ extra=extra,
158
+ )
159
+
160
+ return wrapper
161
+
162
+ return decorator if func is None else decorator(func)
163
+
164
+
165
+ def get_arguments(
166
+ sig: inspect.Signature,
167
+ excluded_fields: Iterable[str],
168
+ args: Iterable[Any],
169
+ kwargs: dict[str, Any],
170
+ ) -> dict[str, Any]:
171
+ bind = sig.bind(*args, **kwargs)
172
+ bind.apply_defaults()
173
+ for field in excluded_fields:
174
+ bind.arguments.pop(field, None)
175
+ return bind.arguments
176
+
177
+
178
+ json_formatter = JsonFormatter()
179
+ logger = logging.getLogger(__name__)
180
+ logger.setLevel(logging.DEBUG)
181
+ logger.addFilter(_context_adder_filter)
File without changes
@@ -0,0 +1,503 @@
1
+ import base64
2
+ import datetime
3
+ import os
4
+ import re
5
+ from enum import Enum
6
+ from http import HTTPStatus
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Annotated,
10
+ Any,
11
+ Literal,
12
+ NewType,
13
+ Protocol,
14
+ TypedDict,
15
+ assert_never,
16
+ )
17
+
18
+ import annotated_types
19
+ import pydantic
20
+ from pydantic_core import core_schema
21
+
22
+ IS_LAMBDA = os.environ.get("AWS_EXECUTION_ENV", "").startswith("AWS_Lambda_")
23
+ EventType = NewType("EventType", dict[str, Any])
24
+ _ROUTE_ARN_PATTERN_STR = r"^arn:aws:execute-api:(?P<region>[a-zA-Z0-9-]+):(?P<account_id>\d+):(?P<api_id>[a-zA-Z0-9]+)/(?P<stage>[^/]+)/(?P<method>[A-Z]+)/(?P<resource_path>.*)$"
25
+ _ROUTE_ARN_PATTERN = re.compile(_ROUTE_ARN_PATTERN_STR)
26
+
27
+
28
+ class LambdaContextProtocol(Protocol):
29
+ function_name: str
30
+ memory_limit_in_mb: int
31
+ invoked_function_arn: str
32
+ aws_request_id: str
33
+
34
+
35
+ class GatewayEventPathParameters[ParamT](pydantic.BaseModel):
36
+ path_parameters: Annotated[ParamT, pydantic.Field(alias="pathParameters")]
37
+
38
+
39
+ class GatewayEventQueryParameters[ParamT](pydantic.BaseModel):
40
+ query_string_parameters: Annotated[
41
+ ParamT | None, pydantic.Field(alias="queryStringParameters")
42
+ ] = None
43
+
44
+
45
+ class HttpErrorResponse(pydantic.BaseModel):
46
+ """RFC7807 Compatible schema."""
47
+
48
+ type: str
49
+ status: int
50
+ title: str
51
+ detail: str
52
+ extensions: Any
53
+
54
+
55
+ class ApiGatewaySerializedResponse(TypedDict):
56
+ statusCode: int
57
+ headers: dict[str, str]
58
+ body: str | None
59
+ isBase64Encoded: bool
60
+
61
+
62
+ class ApiGatewayResponse(pydantic.BaseModel):
63
+ status_code: int | None = None
64
+ headers: dict[str, str] | None = None
65
+ body: pydantic.BaseModel | bytes | None
66
+
67
+ @pydantic.model_serializer
68
+ def serializer(self) -> ApiGatewaySerializedResponse:
69
+ match self.body:
70
+ case pydantic.BaseModel():
71
+ return {
72
+ "statusCode": self.status_code or HTTPStatus.OK,
73
+ "headers": {"Content-Type": "application/json"}
74
+ | (self.headers or {}),
75
+ "body": self.body.model_dump_json(by_alias=True),
76
+ "isBase64Encoded": False,
77
+ }
78
+ case bytes():
79
+ return {
80
+ "statusCode": self.status_code or HTTPStatus.OK,
81
+ "headers": {"Content-Type": "application/octet-stream"}
82
+ | (self.headers or {}),
83
+ "body": base64.b64encode(self.body).decode(),
84
+ "isBase64Encoded": True,
85
+ }
86
+ case None:
87
+ return {
88
+ "statusCode": self.status_code or HTTPStatus.NO_CONTENT,
89
+ "headers": (self.headers or {}),
90
+ "body": None,
91
+ "isBase64Encoded": False,
92
+ }
93
+ case _:
94
+ assert_never(self.body)
95
+
96
+ if TYPE_CHECKING:
97
+
98
+ def model_dump(self) -> ApiGatewaySerializedResponse: ... # type: ignore[override]
99
+
100
+
101
+ class SqsAttributesModel(pydantic.BaseModel):
102
+ approximate_receive_count: Annotated[
103
+ str,
104
+ pydantic.Field(
105
+ alias="ApproximateReceiveCount",
106
+ description="The number of times a message has been received across all queues but not deleted.",
107
+ examples=["1", "2"],
108
+ ),
109
+ ]
110
+ message_deduplication_id: Annotated[
111
+ str | None,
112
+ pydantic.Field(
113
+ alias="MessageDeduplicationId",
114
+ description="Returns the value provided by the producer that calls the SendMessage action.",
115
+ examples=["msg-dedup-12345", "unique-msg-abc123", None],
116
+ ),
117
+ ] = None
118
+ message_group_id: Annotated[
119
+ str | None,
120
+ pydantic.Field(
121
+ alias="MessageGroupId",
122
+ description="Returns the value provided by the producer that calls the SendMessage action.",
123
+ examples=["order-processing", "user-123-updates", None],
124
+ ),
125
+ ] = None
126
+ aws_trace_header: Annotated[
127
+ str | None,
128
+ pydantic.Field(
129
+ alias="AWSTraceHeader",
130
+ description="The AWS X-Ray trace header for request tracing.",
131
+ examples=["Root=1-5e1b4151-5ac6c58239c1e5b4", None],
132
+ ),
133
+ ] = None
134
+ sent_timestamp: Annotated[
135
+ datetime.datetime,
136
+ pydantic.Field(
137
+ alias="SentTimestamp",
138
+ description="The time the message was sent to the queue (epoch time in milliseconds).",
139
+ examples=["1545082649183", "1545082650636", "1713185156609"],
140
+ ),
141
+ ]
142
+ sequence_number: Annotated[
143
+ str | None,
144
+ pydantic.Field(
145
+ alias="SequenceNumber",
146
+ description="Returns the value provided by Amazon SQS.",
147
+ examples=["18849496460467696128", "18849496460467696129", None],
148
+ ),
149
+ ] = None
150
+ dead_letter_queue_source_arn: Annotated[
151
+ str | None,
152
+ pydantic.Field(
153
+ alias="DeadLetterQueueSourceArn",
154
+ description="The ARN of the dead-letter queue from which the message was moved.",
155
+ examples=[
156
+ "arn:aws:sqs:eu-central-1:123456789012:sqs-redrive-SampleQueue-RNvLCpwGmLi7",
157
+ None,
158
+ ],
159
+ ),
160
+ ] = None
161
+ sender_id: Annotated[
162
+ str,
163
+ pydantic.Field(
164
+ alias="SenderId",
165
+ description="The user ID for IAM users or the role ID for IAM roles that sent the message.",
166
+ examples=["AIDAIENQZJOLO23YVJ4VO", "AMCXIENQZJOLO23YVJ4VO"],
167
+ ),
168
+ ]
169
+ approximate_first_receive_timestamp: Annotated[
170
+ datetime.datetime,
171
+ pydantic.Field(
172
+ alias="ApproximateFirstReceiveTimestamp",
173
+ description="The time the message was first received from the queue (epoch time in milliseconds).",
174
+ examples=["1545082649185", "1545082650649", "1713185156612"],
175
+ ),
176
+ ]
177
+
178
+
179
+ class SqsMsgAttributeModel(pydantic.BaseModel):
180
+ string_value: Annotated[
181
+ str | None,
182
+ pydantic.Field(
183
+ alias="stringValue",
184
+ description="The string value of the message attribute.",
185
+ examples=["100", "active", "user-12345", None],
186
+ ),
187
+ ] = None
188
+ binary_value: Annotated[
189
+ str | None,
190
+ pydantic.Field(
191
+ alias="binaryValue",
192
+ description="The binary value of the message attribute, base64-encoded.",
193
+ examples=["base64Str", "SGVsbG8gV29ybGQ=", None],
194
+ ),
195
+ ] = None
196
+ string_list_values: Annotated[
197
+ list[str],
198
+ pydantic.Field(
199
+ alias="stringListValues",
200
+ description="A list of string values for the message attribute.",
201
+ examples=[["item1", "item2"], ["tag1", "tag2", "tag3"], []],
202
+ ),
203
+ ] = []
204
+ binary_list_values: Annotated[
205
+ list[str],
206
+ pydantic.Field(
207
+ alias="binaryListValues",
208
+ description="A list of binary values for the message attribute, each base64-encoded.",
209
+ examples=[["dmFsdWUx", "dmFsdWUy"], ["aGVsbG8="], []],
210
+ ),
211
+ ] = []
212
+ data_type: Annotated[
213
+ str,
214
+ pydantic.Field(
215
+ alias="dataType",
216
+ description="The data type of the message attribute (String, Number, Binary, or custom data type).",
217
+ examples=["String", "Number", "Binary", "String.custom", "Number.float"],
218
+ ),
219
+ ]
220
+
221
+
222
+ class SqsRecordModel[BodyT](pydantic.BaseModel):
223
+ message_id: Annotated[
224
+ str,
225
+ pydantic.Field(
226
+ alias="messageId",
227
+ description="A unique identifier for the message. A MessageId is considered unique across all AWS accounts.",
228
+ examples=[
229
+ "059f36b4-87a3-44ab-83d2-661975830a7d",
230
+ "2e1424d4-f796-459a-8184-9c92662be6da",
231
+ "db37cc61-1bb0-4e77-b6f3-7cf87f44a72a",
232
+ ],
233
+ ),
234
+ ]
235
+ receipt_handle: Annotated[
236
+ str,
237
+ pydantic.Field(
238
+ alias="receiptHandle",
239
+ description="An identifier associated with the act of receiving the message, used for message deletion.",
240
+ examples=[
241
+ "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...",
242
+ "AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...",
243
+ ],
244
+ ),
245
+ ]
246
+ body: Annotated[
247
+ BodyT,
248
+ pydantic.Field(
249
+ description="The message's contents (not URL-encoded). Can be plain text or JSON.",
250
+ examples=[
251
+ "Test message.",
252
+ '{"message": "foo1"}',
253
+ "hello world",
254
+ ],
255
+ ),
256
+ ]
257
+ attributes: Annotated[
258
+ SqsAttributesModel,
259
+ pydantic.Field(
260
+ description="A map of the attributes requested in ReceiveMessage to their respective values.",
261
+ ),
262
+ ]
263
+ message_attributes: Annotated[
264
+ dict[str, SqsMsgAttributeModel],
265
+ pydantic.Field(
266
+ alias="messageAttributes",
267
+ description="User-defined message attributes as key-value pairs.",
268
+ ),
269
+ ]
270
+ md5_of_body: Annotated[
271
+ str,
272
+ pydantic.Field(
273
+ alias="md5OfBody",
274
+ description="An MD5 digest of the non-URL-encoded message body string.",
275
+ examples=[
276
+ "e4e68fb7bd0e697a0ae8f1bb342846b3",
277
+ "6a204bd89f3c8348afd5c77c717a097a",
278
+ ],
279
+ ),
280
+ ]
281
+ md5_of_message_attributes: Annotated[
282
+ str | None,
283
+ pydantic.Field(
284
+ alias="md5OfMessageAttributes",
285
+ description="An MD5 digest of the non-URL-encoded message attribute string.",
286
+ examples=[
287
+ "00484c68...59e48fb7",
288
+ "b25f48e8...f4e4f0bb",
289
+ None,
290
+ ],
291
+ ),
292
+ ] = None
293
+ event_source: Annotated[
294
+ Literal["aws:sqs"],
295
+ pydantic.Field(
296
+ alias="eventSource",
297
+ description="The AWS service that invoked the function.",
298
+ examples=["aws:sqs"],
299
+ ),
300
+ ]
301
+ event_source_arn: Annotated[
302
+ str,
303
+ pydantic.Field(
304
+ alias="eventSourceARN",
305
+ description="The Amazon Resource Name (ARN) of the SQS queue.",
306
+ examples=[
307
+ "arn:aws:sqs:us-east-2:123456789012:my-queue",
308
+ "arn:aws:sqs:eu-central-1:123456789012:sqs-redrive-SampleDLQ-Emgp9MFSLBZm",
309
+ ],
310
+ ),
311
+ ]
312
+ aws_region: Annotated[
313
+ str,
314
+ pydantic.Field(
315
+ alias="awsRegion",
316
+ description="The AWS region where the SQS queue is located.",
317
+ examples=["us-east-1", "us-east-2", "eu-central-1"],
318
+ ),
319
+ ]
320
+
321
+
322
+ class SqsEvent[BodyT](pydantic.BaseModel):
323
+ records: Annotated[
324
+ list[SqsRecordModel[BodyT]],
325
+ pydantic.Field(
326
+ alias="Records",
327
+ description="A list of SQS message records included in the event.",
328
+ examples=[
329
+ [
330
+ {
331
+ "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d",
332
+ "body": "Test message.",
333
+ }
334
+ ]
335
+ ],
336
+ ),
337
+ ]
338
+
339
+
340
+ class LambdaCheckpointItem(pydantic.BaseModel):
341
+ item_identifier: Annotated[
342
+ str, pydantic.Field(serialization_alias="itemIdentifier")
343
+ ]
344
+
345
+
346
+ class LambdaCheckpointResponse(pydantic.BaseModel):
347
+ batch_item_failures: Annotated[
348
+ list[LambdaCheckpointItem],
349
+ pydantic.Field(serialization_alias="batchItemFailures"),
350
+ ]
351
+
352
+
353
+ class EventBridgeModel[DetailT](pydantic.BaseModel):
354
+ version: Annotated[
355
+ str,
356
+ pydantic.Field(
357
+ description="By default, this is set to 0 (zero) in all events.",
358
+ examples=["0"],
359
+ ),
360
+ ]
361
+ id: Annotated[
362
+ str,
363
+ pydantic.Field(
364
+ description="A Version 4 UUID generated for every event.",
365
+ examples=["6a7e8feb-b491-4cf7-a9f1-bf3703467718"],
366
+ ),
367
+ ]
368
+ source: Annotated[
369
+ str,
370
+ pydantic.Field(
371
+ description="Identifies the service that sourced the event. \
372
+ All events sourced from within AWS begin with 'aws.'",
373
+ examples=["aws.ec2", "aws.s3", "aws.events", "aws.scheduler"],
374
+ ),
375
+ ]
376
+ account: Annotated[
377
+ str,
378
+ pydantic.Field(
379
+ description="The 12-digit AWS account ID of the owner of the service emitting the event.",
380
+ examples=["111122223333", "123456789012"],
381
+ ),
382
+ ]
383
+ time: Annotated[
384
+ datetime.datetime,
385
+ pydantic.Field(
386
+ description="The event timestamp, which can be specified by the service originating the event.",
387
+ examples=["2017-12-22T18:43:48Z", "2023-01-15T10:30:00Z"],
388
+ ),
389
+ ]
390
+ region: Annotated[
391
+ str,
392
+ pydantic.Field(
393
+ description="Identifies the AWS region where the event originated.",
394
+ examples=["us-east-1", "us-west-2", "eu-west-1"],
395
+ ),
396
+ ]
397
+ resources: Annotated[
398
+ list[str],
399
+ pydantic.Field(
400
+ description="A JSON array that contains ARNs that identify resources involved in the event. "
401
+ "Inclusion of these ARNs is at the discretion of the service.",
402
+ examples=[
403
+ ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"],
404
+ ["arn:aws:s3:::my-bucket/my-key"],
405
+ ["arn:aws:events:us-east-1:123456789012:rule/MyRule"],
406
+ ],
407
+ ),
408
+ ]
409
+ detail_type: Annotated[
410
+ str,
411
+ pydantic.Field(
412
+ alias="detail-type",
413
+ description="Identifies, in combination with the source Field, the fields and values that appear in the detail field.",
414
+ examples=[
415
+ "EC2 Instance State-change Notification",
416
+ "Object Created",
417
+ "Scheduled Event",
418
+ ],
419
+ ),
420
+ ]
421
+ detail: Annotated[
422
+ DetailT,
423
+ pydantic.Field(
424
+ description="A JSON object, whose content is at the discretion of the service originating the event.",
425
+ ),
426
+ ]
427
+ replay_name: Annotated[
428
+ str | None,
429
+ pydantic.Field(
430
+ alias="replay-name",
431
+ description="Identifies whether the event is being replayed and what is the name of the replay.",
432
+ examples=["replay_archive", "my-replay-2023"],
433
+ ),
434
+ ] = None
435
+
436
+
437
+ class RouteARN(pydantic.BaseModel):
438
+ region: str
439
+ account_id: int
440
+ api_id: str
441
+ stage: str
442
+ method: str
443
+ resource_path: str
444
+
445
+
446
+ def _route_arn_validate(v: str | Any) -> dict[str, str] | Any:
447
+ if not isinstance(v, str):
448
+ return v
449
+ matched = _ROUTE_ARN_PATTERN.match(v)
450
+ if not matched:
451
+ raise ValueError("Invalid Route ARN")
452
+ return matched.groupdict()
453
+
454
+
455
+ def _route_arn_serialize(value: RouteARN) -> str:
456
+ return f"arn:aws:execute-api:{value.region}:{value.account_id}:{value.api_id}/{value.stage}/{value.method}/{value.resource_path}"
457
+
458
+
459
+ type RouteARNStr = Annotated[
460
+ RouteARN,
461
+ pydantic.BeforeValidator(
462
+ _route_arn_validate,
463
+ json_schema_input_type=core_schema.str_schema(pattern=_ROUTE_ARN_PATTERN_STR),
464
+ ),
465
+ pydantic.PlainSerializer(
466
+ _route_arn_serialize,
467
+ return_type=core_schema.str_schema(pattern=_ROUTE_ARN_PATTERN_STR),
468
+ ),
469
+ ]
470
+
471
+
472
+ class ActionEnum(Enum):
473
+ API_INVOKE = "execute-api:Invoke"
474
+
475
+
476
+ class EffectEnum(Enum):
477
+ ALLOW = "Allow"
478
+ DENY = "Deny"
479
+
480
+
481
+ class AuthorizerPolicyStatement(pydantic.BaseModel):
482
+ action: Annotated[ActionEnum, pydantic.Field(serialization_alias="Action")]
483
+ effect: Annotated[EffectEnum, pydantic.Field(serialization_alias="Effect")]
484
+ resource: Annotated[str, pydantic.Field(serialization_alias="Resource")]
485
+
486
+
487
+ class AuthorizerPolicyDocument(pydantic.BaseModel):
488
+ version: Annotated[
489
+ Literal["2012-10-17"], pydantic.Field(serialization_alias="Version")
490
+ ]
491
+ statement: Annotated[
492
+ list[AuthorizerPolicyStatement],
493
+ annotated_types.Len(1),
494
+ pydantic.Field(serialization_alias="Statement"),
495
+ ]
496
+
497
+
498
+ class AuthorizerResponse(pydantic.BaseModel):
499
+ principal_id: Annotated[str, pydantic.Field(serialization_alias="principalId")]
500
+ policy_document: Annotated[
501
+ AuthorizerPolicyDocument, pydantic.Field(serialization_alias="policyDocument")
502
+ ]
503
+ context: dict[str, str]