microbootstrap 0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. microbootstrap/__init__.py +44 -0
  2. microbootstrap/bootstrappers/__init__.py +0 -0
  3. microbootstrap/bootstrappers/base.py +114 -0
  4. microbootstrap/bootstrappers/fastapi.py +148 -0
  5. microbootstrap/bootstrappers/faststream.py +121 -0
  6. microbootstrap/bootstrappers/litestar.py +166 -0
  7. microbootstrap/config/__init__.py +0 -0
  8. microbootstrap/config/fastapi.py +64 -0
  9. microbootstrap/config/faststream.py +27 -0
  10. microbootstrap/config/litestar.py +21 -0
  11. microbootstrap/console_writer.py +34 -0
  12. microbootstrap/exceptions.py +10 -0
  13. microbootstrap/granian_server.py +41 -0
  14. microbootstrap/helpers.py +111 -0
  15. microbootstrap/instruments/__init__.py +0 -0
  16. microbootstrap/instruments/base.py +62 -0
  17. microbootstrap/instruments/cors_instrument.py +29 -0
  18. microbootstrap/instruments/health_checks_instrument.py +39 -0
  19. microbootstrap/instruments/instrument_box.py +50 -0
  20. microbootstrap/instruments/logging_instrument.py +202 -0
  21. microbootstrap/instruments/opentelemetry_instrument.py +203 -0
  22. microbootstrap/instruments/prometheus_instrument.py +63 -0
  23. microbootstrap/instruments/pyroscope_instrument.py +54 -0
  24. microbootstrap/instruments/sentry_instrument.py +124 -0
  25. microbootstrap/instruments/swagger_instrument.py +30 -0
  26. microbootstrap/instruments_setupper.py +72 -0
  27. microbootstrap/middlewares/__init__.py +0 -0
  28. microbootstrap/middlewares/fastapi.py +42 -0
  29. microbootstrap/middlewares/litestar.py +49 -0
  30. microbootstrap/py.typed +0 -0
  31. microbootstrap/settings.py +112 -0
  32. microbootstrap-0.dist-info/METADATA +920 -0
  33. microbootstrap-0.dist-info/RECORD +34 -0
  34. microbootstrap-0.dist-info/WHEEL +4 -0
@@ -0,0 +1,27 @@
1
+ from __future__ import annotations
2
+ import dataclasses
3
+ import typing
4
+
5
+
6
+ if typing.TYPE_CHECKING:
7
+ import faststream.asyncapi.schema as asyncapi
8
+ from faststream.asgi.types import ASGIApp
9
+ from faststream.broker.core.usecase import BrokerUsecase
10
+ from faststream.types import AnyDict, AnyHttpUrl, Lifespan
11
+
12
+
13
+ @dataclasses.dataclass
14
+ class FastStreamConfig:
15
+ broker: BrokerUsecase[typing.Any, typing.Any] | None = None
16
+ asgi_routes: typing.Sequence[tuple[str, ASGIApp]] = ()
17
+ lifespan: Lifespan | None = None
18
+ terms_of_service: AnyHttpUrl | None = None
19
+ license: asyncapi.License | asyncapi.LicenseDict | AnyDict | None = None
20
+ contact: asyncapi.Contact | asyncapi.ContactDict | AnyDict | None = None
21
+ tags: typing.Sequence[asyncapi.Tag | asyncapi.TagDict | AnyDict] | None = None
22
+ external_docs: asyncapi.ExternalDocs | asyncapi.ExternalDocsDict | AnyDict | None = None
23
+ identifier: str | None = None
24
+ on_startup: typing.Sequence[typing.Callable[..., typing.Any]] = ()
25
+ after_startup: typing.Sequence[typing.Callable[..., typing.Any]] = ()
26
+ on_shutdown: typing.Sequence[typing.Callable[..., typing.Any]] = ()
27
+ after_shutdown: typing.Sequence[typing.Callable[..., typing.Any]] = ()
@@ -0,0 +1,21 @@
1
+ from __future__ import annotations
2
+ import dataclasses
3
+ import typing
4
+
5
+ from litestar.config.app import AppConfig
6
+ from litestar.logging import LoggingConfig
7
+
8
+
9
+ if typing.TYPE_CHECKING:
10
+ from litestar.types import OnAppInitHandler
11
+
12
+
13
+ @dataclasses.dataclass
14
+ class LitestarConfig(AppConfig):
15
+ on_app_init: typing.Sequence[OnAppInitHandler] | None = None
16
+ logging_config: LoggingConfig = dataclasses.field(
17
+ default_factory=lambda: LoggingConfig(
18
+ # required for foreign logs json formatting
19
+ configure_root_logger=False,
20
+ )
21
+ )
@@ -0,0 +1,34 @@
1
+ from __future__ import annotations
2
+ import dataclasses
3
+ import typing
4
+
5
+ from rich.console import Console
6
+ from rich.rule import Rule
7
+ from rich.table import Table
8
+
9
+
10
+ @dataclasses.dataclass
11
+ class ConsoleWriter:
12
+ writer_enabled: bool = True
13
+ rich_console: Console = dataclasses.field(init=False, default_factory=Console)
14
+ rich_table: Table = dataclasses.field(init=False)
15
+
16
+ def __post_init__(self) -> None:
17
+ self.rich_table = Table(show_header=False, header_style="cyan")
18
+ self.rich_table.add_column("Item", style="cyan")
19
+ self.rich_table.add_column("Status")
20
+ self.rich_table.add_column("Reason", style="yellow")
21
+
22
+ def write_instrument_status(
23
+ self,
24
+ instrument_name: str,
25
+ is_enabled: bool,
26
+ disable_reason: str | None = None,
27
+ ) -> None:
28
+ is_enabled_value: typing.Final = "[green]Enabled[/green]" if is_enabled else "[red]Disabled[/red]"
29
+ self.rich_table.add_row(rf"{instrument_name}", is_enabled_value, disable_reason or "")
30
+
31
+ def print_bootstrap_table(self) -> None:
32
+ if self.writer_enabled:
33
+ self.rich_console.print(Rule("[yellow]Bootstrapping application[/yellow]", align="left"))
34
+ self.rich_console.print(self.rich_table)
@@ -0,0 +1,10 @@
1
+ class MicroBootstrapBaseError(Exception):
2
+ """Base for all exceptions."""
3
+
4
+
5
+ class ConfigMergeError(MicroBootstrapBaseError):
6
+ """Raises when it's impossible to merge configs due to type mismatch."""
7
+
8
+
9
+ class MissingInstrumentError(MicroBootstrapBaseError):
10
+ """Raises when attempting to configure instrument, that is not supported yet."""
@@ -0,0 +1,41 @@
1
+ from __future__ import annotations
2
+ import logging
3
+ import typing
4
+
5
+ import granian
6
+ from granian.constants import Interfaces
7
+ from granian.log import LogLevels
8
+
9
+
10
+ if typing.TYPE_CHECKING:
11
+ from granian.server.common import AbstractServer as GranianServer
12
+
13
+ from microbootstrap.settings import ServerConfig
14
+
15
+
16
+ GRANIAN_LOG_LEVELS_MAP = {
17
+ logging.CRITICAL: LogLevels.critical,
18
+ logging.ERROR: LogLevels.error,
19
+ logging.WARNING: LogLevels.warning,
20
+ logging.WARNING: LogLevels.warn,
21
+ logging.INFO: LogLevels.info,
22
+ logging.DEBUG: LogLevels.debug,
23
+ }
24
+
25
+
26
+ # TODO: create bootstrappers for application servers. granian/uvicorn # noqa: TD002
27
+ def create_granian_server(
28
+ target: str,
29
+ settings: ServerConfig,
30
+ **granian_options: typing.Any, # noqa: ANN401
31
+ ) -> GranianServer[typing.Any]:
32
+ return granian.Granian(
33
+ target=target,
34
+ address=settings.server_host,
35
+ port=settings.server_port,
36
+ interface=Interfaces.ASGI,
37
+ workers=settings.server_workers_count,
38
+ log_level=GRANIAN_LOG_LEVELS_MAP[getattr(settings, "logging_log_level", logging.INFO)],
39
+ reload=settings.server_reload,
40
+ **granian_options,
41
+ )
@@ -0,0 +1,111 @@
1
+ import dataclasses
2
+ import re
3
+ import typing
4
+ from dataclasses import _MISSING_TYPE
5
+
6
+ from microbootstrap import exceptions
7
+
8
+
9
+ if typing.TYPE_CHECKING:
10
+ from dataclasses import _DataclassT
11
+
12
+ from pydantic import BaseModel
13
+
14
+
15
+ PydanticConfigT = typing.TypeVar("PydanticConfigT", bound="BaseModel")
16
+ VALID_PATH_PATTERN: typing.Final = r"^(/[a-zA-Z0-9_-]+)+/?$"
17
+
18
+
19
+ def dataclass_to_dict_no_defaults(dataclass_to_convert: "_DataclassT") -> dict[str, typing.Any]:
20
+ conversion_result: typing.Final = {}
21
+ for dataclass_field in dataclasses.fields(dataclass_to_convert):
22
+ value = getattr(dataclass_to_convert, dataclass_field.name)
23
+ if isinstance(dataclass_field.default, _MISSING_TYPE):
24
+ conversion_result[dataclass_field.name] = value
25
+ continue
26
+ if dataclass_field.default != value and isinstance(dataclass_field.default_factory, _MISSING_TYPE):
27
+ conversion_result[dataclass_field.name] = value
28
+ continue
29
+ if value != dataclass_field.default and value != dataclass_field.default_factory(): # type: ignore[misc]
30
+ conversion_result[dataclass_field.name] = value
31
+
32
+ return conversion_result
33
+
34
+
35
+ def merge_pydantic_configs(
36
+ config_to_merge: PydanticConfigT,
37
+ config_with_changes: PydanticConfigT,
38
+ ) -> PydanticConfigT:
39
+ initial_fields: typing.Final = dict(config_to_merge)
40
+ changed_fields: typing.Final = {
41
+ one_field_name: getattr(config_with_changes, one_field_name)
42
+ for one_field_name in config_with_changes.model_fields_set
43
+ }
44
+ merged_fields: typing.Final = merge_dict_configs(initial_fields, changed_fields)
45
+ return config_to_merge.model_copy(update=merged_fields)
46
+
47
+
48
+ def merge_dataclasses_configs(
49
+ config_to_merge: "_DataclassT",
50
+ config_with_changes: "_DataclassT",
51
+ ) -> "_DataclassT":
52
+ config_class: typing.Final = config_to_merge.__class__
53
+ resulting_dict_config: typing.Final = merge_dict_configs(
54
+ dataclass_to_dict_no_defaults(config_to_merge),
55
+ dataclass_to_dict_no_defaults(config_with_changes),
56
+ )
57
+ return config_class(**resulting_dict_config)
58
+
59
+
60
+ def merge_dict_configs(
61
+ config_dict: dict[str, typing.Any],
62
+ changes_dict: dict[str, typing.Any],
63
+ ) -> dict[str, typing.Any]:
64
+ for change_key, change_value in changes_dict.items():
65
+ config_value = config_dict.get(change_key)
66
+
67
+ if isinstance(config_value, set):
68
+ if not isinstance(change_value, set):
69
+ raise exceptions.ConfigMergeError(f"Can't merge {config_value} and {change_value}")
70
+ config_dict[change_key] = {*config_value, *change_value}
71
+ continue
72
+
73
+ if isinstance(config_value, tuple):
74
+ if not isinstance(change_value, tuple):
75
+ raise exceptions.ConfigMergeError(f"Can't merge {config_value} and {change_value}")
76
+ config_dict[change_key] = (*config_value, *change_value)
77
+ continue
78
+
79
+ if isinstance(config_value, list):
80
+ if not isinstance(change_value, list):
81
+ raise exceptions.ConfigMergeError(f"Can't merge {config_value} and {change_value}")
82
+ config_dict[change_key] = [*config_value, *change_value]
83
+ continue
84
+
85
+ if isinstance(config_value, dict):
86
+ if not isinstance(change_value, dict):
87
+ raise exceptions.ConfigMergeError(f"Can't merge {config_value} and {change_value}")
88
+ config_dict[change_key] = {**config_value, **change_value}
89
+ continue
90
+
91
+ config_dict[change_key] = change_value
92
+
93
+ return config_dict
94
+
95
+
96
+ def is_valid_path(maybe_path: str) -> bool:
97
+ return bool(re.fullmatch(VALID_PATH_PATTERN, maybe_path))
98
+
99
+
100
+ def optimize_exclude_paths(
101
+ exclude_endpoints: typing.Iterable[str],
102
+ ) -> typing.Collection[str]:
103
+ # `in` operator is faster for tuples than for lists
104
+ endpoints_to_ignore: typing.Collection[str] = tuple(exclude_endpoints)
105
+
106
+ # 10 is just an empirical value, based of measuring the performance
107
+ # iterating over a tuple of <10 elements is faster than hashing
108
+ if len(endpoints_to_ignore) >= 10: # noqa: PLR2004
109
+ endpoints_to_ignore = set(endpoints_to_ignore)
110
+
111
+ return endpoints_to_ignore
File without changes
@@ -0,0 +1,62 @@
1
+ from __future__ import annotations
2
+ import abc
3
+ import dataclasses
4
+ import typing
5
+
6
+ import pydantic
7
+
8
+ from microbootstrap.helpers import merge_pydantic_configs
9
+
10
+
11
+ if typing.TYPE_CHECKING:
12
+ from microbootstrap.console_writer import ConsoleWriter
13
+
14
+
15
+ InstrumentConfigT = typing.TypeVar("InstrumentConfigT", bound="BaseInstrumentConfig")
16
+ ApplicationT = typing.TypeVar("ApplicationT", bound=typing.Any)
17
+
18
+
19
+ class BaseInstrumentConfig(pydantic.BaseModel):
20
+ model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
21
+
22
+
23
+ @dataclasses.dataclass
24
+ class Instrument(abc.ABC, typing.Generic[InstrumentConfigT]):
25
+ instrument_config: InstrumentConfigT
26
+ instrument_name: typing.ClassVar[str]
27
+ ready_condition: typing.ClassVar[str]
28
+
29
+ def configure_instrument(
30
+ self,
31
+ incoming_config: InstrumentConfigT,
32
+ ) -> None:
33
+ self.instrument_config = merge_pydantic_configs(self.instrument_config, incoming_config)
34
+
35
+ def write_status(self, console_writer: ConsoleWriter) -> None:
36
+ console_writer.write_instrument_status(
37
+ self.instrument_name,
38
+ is_enabled=self.is_ready(),
39
+ disable_reason=None if self.is_ready() else self.ready_condition,
40
+ )
41
+
42
+ @abc.abstractmethod
43
+ def is_ready(self) -> bool: ...
44
+
45
+ @classmethod
46
+ @abc.abstractmethod
47
+ def get_config_type(cls) -> type[InstrumentConfigT]:
48
+ raise NotImplementedError
49
+
50
+ def bootstrap(self) -> None:
51
+ return None
52
+
53
+ def teardown(self) -> None:
54
+ return None
55
+
56
+ def bootstrap_before(self) -> dict[str, typing.Any]:
57
+ """Add some framework-related parameters to final bootstrap result before application creation."""
58
+ return {}
59
+
60
+ def bootstrap_after(self, application: ApplicationT) -> ApplicationT:
61
+ """Add some framework-related parameters to final bootstrap result after application creation."""
62
+ return application
@@ -0,0 +1,29 @@
1
+ from __future__ import annotations
2
+
3
+ import pydantic
4
+
5
+ from microbootstrap.instruments.base import BaseInstrumentConfig, Instrument
6
+
7
+
8
+ class CorsConfig(BaseInstrumentConfig):
9
+ cors_allowed_origins: list[str] = pydantic.Field(default_factory=list)
10
+ cors_allowed_methods: list[str] = pydantic.Field(default_factory=list)
11
+ cors_allowed_headers: list[str] = pydantic.Field(default_factory=list)
12
+ cors_exposed_headers: list[str] = pydantic.Field(default_factory=list)
13
+ cors_allowed_credentials: bool = False
14
+ cors_allowed_origin_regex: str | None = None
15
+ cors_max_age: int = 600
16
+
17
+
18
+ class CorsInstrument(Instrument[CorsConfig]):
19
+ instrument_name = "Cors"
20
+ ready_condition = "Provide allowed origins or regex"
21
+
22
+ def is_ready(self) -> bool:
23
+ return bool(self.instrument_config.cors_allowed_origins) or bool(
24
+ self.instrument_config.cors_allowed_origin_regex,
25
+ )
26
+
27
+ @classmethod
28
+ def get_config_type(cls) -> type[CorsConfig]:
29
+ return CorsConfig
@@ -0,0 +1,39 @@
1
+ from __future__ import annotations
2
+
3
+ import typing_extensions
4
+
5
+ from microbootstrap.instruments.base import BaseInstrumentConfig, Instrument
6
+
7
+
8
+ class HealthCheckTypedDict(typing_extensions.TypedDict, total=False):
9
+ service_version: str | None
10
+ service_name: str | None
11
+ health_status: bool
12
+
13
+
14
+ class HealthChecksConfig(BaseInstrumentConfig):
15
+ service_name: str = "micro-service"
16
+ service_version: str = "1.0.0"
17
+
18
+ health_checks_enabled: bool = True
19
+ health_checks_path: str = "/health/"
20
+ health_checks_include_in_schema: bool = False
21
+
22
+
23
+ class HealthChecksInstrument(Instrument[HealthChecksConfig]):
24
+ instrument_name = "Health checks"
25
+ ready_condition = "Set health_checks_enabled to True"
26
+
27
+ def render_health_check_data(self) -> HealthCheckTypedDict:
28
+ return {
29
+ "service_version": self.instrument_config.service_version,
30
+ "service_name": self.instrument_config.service_name,
31
+ "health_status": True,
32
+ }
33
+
34
+ def is_ready(self) -> bool:
35
+ return self.instrument_config.health_checks_enabled
36
+
37
+ @classmethod
38
+ def get_config_type(cls) -> type[HealthChecksConfig]:
39
+ return HealthChecksConfig
@@ -0,0 +1,50 @@
1
+ import dataclasses
2
+ import typing
3
+
4
+ from microbootstrap import exceptions
5
+ from microbootstrap.instruments.base import Instrument, InstrumentConfigT
6
+ from microbootstrap.settings import SettingsT
7
+
8
+
9
+ @dataclasses.dataclass
10
+ class InstrumentBox:
11
+ __instruments__: list[type[Instrument[typing.Any]]] = dataclasses.field(default_factory=list)
12
+ __initialized_instruments__: list[Instrument[typing.Any]] = dataclasses.field(default_factory=list)
13
+
14
+ def initialize(self, settings: SettingsT) -> None:
15
+ settings_dump = settings.model_dump()
16
+ self.__initialized_instruments__ = [
17
+ instrument_type(instrument_type.get_config_type()(**settings_dump))
18
+ for instrument_type in self.__instruments__
19
+ ]
20
+
21
+ def configure_instrument(
22
+ self,
23
+ instrument_config: InstrumentConfigT,
24
+ ) -> None:
25
+ for instrument in self.__initialized_instruments__:
26
+ if isinstance(instrument_config, instrument.get_config_type()):
27
+ instrument.configure_instrument(instrument_config)
28
+ return
29
+
30
+ raise exceptions.MissingInstrumentError(
31
+ f"Instrument for config {instrument_config.__class__.__name__} is not supported yet.",
32
+ )
33
+
34
+ def extend_instruments(
35
+ self,
36
+ instrument_class: type[Instrument[InstrumentConfigT]],
37
+ ) -> type[Instrument[InstrumentConfigT]]:
38
+ """Extend list of instruments, excluding one whose config is already in use."""
39
+ self.__instruments__ = list(
40
+ filter(
41
+ lambda instrument: instrument.get_config_type() is not instrument_class.get_config_type(),
42
+ self.__instruments__,
43
+ ),
44
+ )
45
+ self.__instruments__.append(instrument_class)
46
+ return instrument_class
47
+
48
+ @property
49
+ def instruments(self) -> list[Instrument[typing.Any]]:
50
+ return self.__initialized_instruments__
@@ -0,0 +1,202 @@
1
+ from __future__ import annotations
2
+ import logging
3
+ import logging.handlers
4
+ import sys
5
+ import time
6
+ import typing
7
+ import urllib.parse
8
+
9
+ import orjson
10
+ import pydantic
11
+ import structlog
12
+ import typing_extensions
13
+ from opentelemetry import trace
14
+
15
+ from microbootstrap.instruments.base import BaseInstrumentConfig, Instrument
16
+
17
+
18
+ if typing.TYPE_CHECKING:
19
+ import fastapi
20
+ import litestar
21
+ from structlog.typing import EventDict, WrappedLogger
22
+
23
+
24
+ ScopeType = typing.MutableMapping[str, typing.Any]
25
+
26
+ access_logger: typing.Final = structlog.get_logger("api.access")
27
+
28
+
29
+ def make_path_with_query_string(scope: ScopeType) -> str:
30
+ path_with_query_string: typing.Final = urllib.parse.quote(scope["path"])
31
+ if scope["query_string"]:
32
+ return f"{path_with_query_string}?{scope['query_string'].decode('ascii')}"
33
+ return path_with_query_string
34
+
35
+
36
+ def fill_log_message(
37
+ log_level: str,
38
+ request: litestar.Request[typing.Any, typing.Any, typing.Any] | fastapi.Request,
39
+ status_code: int,
40
+ start_time: int,
41
+ ) -> None:
42
+ process_time: typing.Final = time.perf_counter_ns() - start_time
43
+ url_with_query: typing.Final = make_path_with_query_string(typing.cast("ScopeType", request.scope))
44
+ client_host: typing.Final = request.client.host if request.client is not None else None
45
+ client_port: typing.Final = request.client.port if request.client is not None else None
46
+ http_method: typing.Final = request.method
47
+ http_version: typing.Final = request.scope["http_version"]
48
+ log_on_correct_level: typing.Final = getattr(access_logger, log_level)
49
+ log_on_correct_level(
50
+ http={
51
+ "url": url_with_query,
52
+ "status_code": status_code,
53
+ "method": http_method,
54
+ "version": http_version,
55
+ },
56
+ network={"client": {"ip": client_host, "port": client_port}},
57
+ duration=process_time,
58
+ )
59
+
60
+
61
+ def tracer_injection(_: WrappedLogger, __: str, event_dict: EventDict) -> EventDict:
62
+ current_span = trace.get_current_span()
63
+ if not current_span.is_recording():
64
+ event_dict["tracing"] = {}
65
+ return event_dict
66
+
67
+ current_span_context = current_span.get_span_context()
68
+ event_dict["tracing"] = {
69
+ "span_id": trace.format_span_id(current_span_context.span_id),
70
+ "trace_id": trace.format_trace_id(current_span_context.trace_id),
71
+ }
72
+ return event_dict
73
+
74
+
75
+ STRUCTLOG_PRE_CHAIN_PROCESSORS: typing.Final[list[typing.Any]] = [
76
+ structlog.stdlib.add_log_level,
77
+ structlog.stdlib.add_logger_name,
78
+ tracer_injection,
79
+ structlog.stdlib.PositionalArgumentsFormatter(),
80
+ structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
81
+ structlog.processors.StackInfoRenderer(),
82
+ structlog.processors.format_exc_info,
83
+ structlog.processors.UnicodeDecoder(),
84
+ ]
85
+
86
+
87
+ def _serialize_log_with_orjson_to_string(value: typing.Any, **kwargs: typing.Any) -> str: # noqa: ANN401
88
+ return orjson.dumps(value, **kwargs).decode()
89
+
90
+
91
+ STRUCTLOG_FORMATTER_PROCESSOR: typing.Final = structlog.processors.JSONRenderer(
92
+ serializer=_serialize_log_with_orjson_to_string
93
+ )
94
+
95
+
96
+ class MemoryLoggerFactory(structlog.stdlib.LoggerFactory):
97
+ def __init__(
98
+ self,
99
+ *args: typing.Any, # noqa: ANN401
100
+ logging_buffer_capacity: int,
101
+ logging_flush_level: int,
102
+ logging_log_level: int,
103
+ log_stream: typing.Any = sys.stdout, # noqa: ANN401
104
+ **kwargs: typing.Any, # noqa: ANN401
105
+ ) -> None:
106
+ super().__init__(*args, **kwargs)
107
+ self.logging_buffer_capacity = logging_buffer_capacity
108
+ self.logging_flush_level = logging_flush_level
109
+ self.logging_log_level = logging_log_level
110
+ self.log_stream = log_stream
111
+
112
+ def __call__(self, *args: typing.Any) -> logging.Logger: # noqa: ANN401
113
+ logger: typing.Final = super().__call__(*args)
114
+ stream_handler: typing.Final = logging.StreamHandler(stream=self.log_stream)
115
+ handler: typing.Final = logging.handlers.MemoryHandler(
116
+ capacity=self.logging_buffer_capacity,
117
+ flushLevel=self.logging_flush_level,
118
+ target=stream_handler,
119
+ )
120
+ logger.addHandler(handler)
121
+ logger.setLevel(self.logging_log_level)
122
+ logger.propagate = False
123
+ return logger
124
+
125
+
126
+ class LoggingConfig(BaseInstrumentConfig):
127
+ service_debug: bool = True
128
+
129
+ logging_log_level: int = logging.INFO
130
+ logging_flush_level: int = logging.ERROR
131
+ logging_buffer_capacity: int = 10
132
+ logging_extra_processors: list[typing.Any] = pydantic.Field(default_factory=list)
133
+ logging_unset_handlers: list[str] = pydantic.Field(
134
+ default_factory=lambda: ["uvicorn", "uvicorn.access"],
135
+ )
136
+ logging_exclude_endpoints: list[str] = pydantic.Field(default_factory=lambda: ["/health/", "/metrics"])
137
+ logging_turn_off_middleware: bool = False
138
+
139
+ @pydantic.model_validator(mode="after")
140
+ def remove_trailing_slashes_from_logging_exclude_endpoints(self) -> typing_extensions.Self:
141
+ self.logging_exclude_endpoints = [
142
+ one_endpoint.removesuffix("/") for one_endpoint in self.logging_exclude_endpoints
143
+ ]
144
+ return self
145
+
146
+
147
+ class LoggingInstrument(Instrument[LoggingConfig]):
148
+ instrument_name = "Logging"
149
+ ready_condition = "Works only in non-debug mode"
150
+
151
+ def is_ready(self) -> bool:
152
+ return not self.instrument_config.service_debug
153
+
154
+ def teardown(self) -> None:
155
+ structlog.reset_defaults()
156
+
157
+ def _unset_handlers(self) -> None:
158
+ for unset_handlers_logger in self.instrument_config.logging_unset_handlers:
159
+ logging.getLogger(unset_handlers_logger).handlers = []
160
+
161
+ def _configure_structlog_loggers(self) -> None:
162
+ structlog.configure(
163
+ processors=[
164
+ structlog.stdlib.filter_by_level,
165
+ *STRUCTLOG_PRE_CHAIN_PROCESSORS,
166
+ *self.instrument_config.logging_extra_processors,
167
+ STRUCTLOG_FORMATTER_PROCESSOR,
168
+ ],
169
+ context_class=dict,
170
+ logger_factory=MemoryLoggerFactory(
171
+ logging_buffer_capacity=self.instrument_config.logging_buffer_capacity,
172
+ logging_flush_level=self.instrument_config.logging_flush_level,
173
+ logging_log_level=self.instrument_config.logging_log_level,
174
+ ),
175
+ wrapper_class=structlog.stdlib.BoundLogger,
176
+ cache_logger_on_first_use=True,
177
+ )
178
+
179
+ def _configure_foreign_loggers(self) -> None:
180
+ root_logger: typing.Final = logging.getLogger()
181
+ stream_handler: typing.Final = logging.StreamHandler(sys.stdout)
182
+ stream_handler.setFormatter(
183
+ structlog.stdlib.ProcessorFormatter(
184
+ foreign_pre_chain=STRUCTLOG_PRE_CHAIN_PROCESSORS,
185
+ processors=[
186
+ structlog.stdlib.ProcessorFormatter.remove_processors_meta,
187
+ STRUCTLOG_FORMATTER_PROCESSOR,
188
+ ],
189
+ logger=root_logger,
190
+ )
191
+ )
192
+ root_logger.addHandler(stream_handler)
193
+ root_logger.setLevel(self.instrument_config.logging_log_level)
194
+
195
+ def bootstrap(self) -> None:
196
+ self._unset_handlers()
197
+ self._configure_structlog_loggers()
198
+ self._configure_foreign_loggers()
199
+
200
+ @classmethod
201
+ def get_config_type(cls) -> type[LoggingConfig]:
202
+ return LoggingConfig