otel-declarative 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. otel_declarative/Config/__init__.py +0 -0
  2. otel_declarative/Config/extraction_config.py +79 -0
  3. otel_declarative/Engines/Strategies/__init__.py +0 -0
  4. otel_declarative/Engines/Strategies/converter_strategies.py +120 -0
  5. otel_declarative/Engines/__init__.py +0 -0
  6. otel_declarative/Engines/converter_registry.py +140 -0
  7. otel_declarative/Engines/generic_extractor.py +96 -0
  8. otel_declarative/Engines/log_processors.py +191 -0
  9. otel_declarative/Engines/model_registry.py +90 -0
  10. otel_declarative/Engines/object_hydrator.py +126 -0
  11. otel_declarative/Engines/path_resolver.py +111 -0
  12. otel_declarative/Enums/__init__.py +0 -0
  13. otel_declarative/Enums/converter_types.py +42 -0
  14. otel_declarative/Enums/extraction_source.py +19 -0
  15. otel_declarative/Factories/__init__.py +0 -0
  16. otel_declarative/Factories/extractor_factory.py +156 -0
  17. otel_declarative/Infrastructure/__init__.py +0 -0
  18. otel_declarative/Infrastructure/async_log_engine.py +164 -0
  19. otel_declarative/Infrastructure/handlers.py +39 -0
  20. otel_declarative/Interfaces/__init__.py +0 -0
  21. otel_declarative/Interfaces/extractor.py +50 -0
  22. otel_declarative/Logging/__init__.py +0 -0
  23. otel_declarative/Logging/logger_factory.py +185 -0
  24. otel_declarative/Models/Log/__init__.py +0 -0
  25. otel_declarative/Models/Log/constants.py +47 -0
  26. otel_declarative/Models/Log/context.py +105 -0
  27. otel_declarative/Models/Log/mapping.py +94 -0
  28. otel_declarative/Models/Log/state.py +59 -0
  29. otel_declarative/Models/Log/topology.py +202 -0
  30. otel_declarative/Models/__init__.py +0 -0
  31. otel_declarative/Models/engine_states.py +135 -0
  32. otel_declarative/Models/mapping_models.py +111 -0
  33. otel_declarative/Models/summary_models.py +104 -0
  34. otel_declarative/Reporters/__init__.py +0 -0
  35. otel_declarative/Reporters/structured_reporter.py +154 -0
  36. otel_declarative/__init__.py +13 -0
  37. otel_declarative/constants.py +79 -0
  38. otel_declarative/provider.py +217 -0
  39. otel_declarative/settings.py +150 -0
  40. otel_declarative-0.1.2.dist-info/METADATA +72 -0
  41. otel_declarative-0.1.2.dist-info/RECORD +44 -0
  42. otel_declarative-0.1.2.dist-info/WHEEL +5 -0
  43. otel_declarative-0.1.2.dist-info/licenses/LICENSE +21 -0
  44. otel_declarative-0.1.2.dist-info/top_level.txt +1 -0
File without changes
@@ -0,0 +1,79 @@
1
+ import os
2
+ import yaml
3
+ from typing import Dict, Any, Optional
4
+ from pydantic import BaseModel, Field, ConfigDict, ValidationError
5
+ from otel_declarative.Models.mapping_models import LayerMappingRules
6
+ from otel_declarative.Logging.logger_factory import get_child_logger
7
+
8
+ logger = get_child_logger("otel_declarative.Config", "ObservabilityMappingConfig")
9
+
10
+ class ObservabilityMappingConfig(BaseModel):
11
+ """
12
+ 观测性映射全局配置模型
13
+
14
+ 职责:
15
+ 1、作为声明式规则的内存映射容器
16
+ 2、聚合所有逻辑层级的数据提取规则
17
+ 3、提供静态工厂方法, 支持从外部 YAML 文件初始化配置并进行强类型校验
18
+ """
19
+ model_config = ConfigDict(
20
+ frozen=True,
21
+ populate_by_name=True,
22
+ extra="ignore"
23
+ )
24
+
25
+ # 逻辑层及规则字典: 键名为业务 Layer 标识, 值为对应的提取规则集
26
+ layer: Dict[str, LayerMappingRules] = Field(
27
+ default_factory=dict,
28
+ description="业务层级到声明式提取规则的映射表"
29
+ )
30
+
31
+ @classmethod
32
+ def load_from_yaml(cls, file_path: str) -> "ObservabilityMappingConfig":
33
+ """
34
+ 从指定路径的 YAML 文件加载并初始化配置对象
35
+
36
+ 逻辑:
37
+ 1、读取原始 YAML 文件内容
38
+ 2、执行安全 YAML 解析
39
+ 3、执行 Pydantic 级联校验
40
+
41
+ :param file_path: YAML 配置文件的绝对路径
42
+ :return: 经过校验后的 ObservabilityConfig 实例
43
+ """
44
+ # 路径合规性检查: 不抛出异常, 仅记录警告并回退
45
+ if not os.path.isabs(file_path):
46
+ logger.warning(f"映射配置文件路径不是绝对路径: {file_path}, 将回退至零规则模式")
47
+ return cls(layer={})
48
+
49
+ if not os.path.exists(file_path):
50
+ logger.warning(f"无法找到声明式映射配置文件: {file_path}, 将回退至零规则模式")
51
+ return cls(layer={})
52
+
53
+ try:
54
+ with open(file_path, "r", encoding="utf-8") as f:
55
+ raw_data: Optional[Dict[str, Any]] = yaml.safe_load(f)
56
+
57
+ if raw_data is None:
58
+ logger.warning(f"配置文件为空: {file_path}, 已初始化为零规则")
59
+ return cls(layer={})
60
+
61
+ return cls.model_validate(raw_data)
62
+ except (yaml.YAMLError, ValidationError):
63
+ logger.exception(
64
+ f"观测性配置校验/解析失败: {file_path}"
65
+ f"系统将自动回退至零规则模式"
66
+ )
67
+ return cls(layer={})
68
+ except Exception as e:
69
+ logger.exception(f"加载观测性配置时发生未预期异常, 已执行安全降级")
70
+ return cls(layer={})
71
+
72
+ def get_rules_for_layer(self, layer: str) -> Optional[LayerMappingRules]:
73
+ """
74
+ 安全获取指定层级的提取规则集
75
+
76
+ :param layer: 逻辑层级标识符
77
+ :return: 对应的映射规则, 若未定义则返回 None
78
+ """
79
+ return self.layer.get(layer)
File without changes
@@ -0,0 +1,120 @@
1
+ import decimal
2
+ import ipaddress
3
+ import pendulum
4
+ import humanize
5
+ import dateparser
6
+ from glom import glom
7
+ from dateparser.conf import Settings
8
+ from typing import Any, Annotated, Final, cast
9
+ from pydantic import TypeAdapter, BeforeValidator
10
+
11
+ class ConverterStrategies:
12
+ """
13
+ 转换策略静态工具集
14
+
15
+ 职责:
16
+ 1、封装无状态、纯函数式的标准转换逻辑
17
+ 2、集成 Decimal、Pendulum、IpAddress 等专业库处理边缘情况
18
+ 3、提供宽容与精确性并存的数据清洗能力
19
+ """
20
+
21
+ # --- 内部验证钩子 ---
22
+
23
+ @staticmethod
24
+ def _tolerant_int_pre_validator(v: Any) -> Any:
25
+ """
26
+ 宽容整数预处理器
27
+
28
+ 策略:
29
+ 利用 Decimal 执行高精度截断操作, 避免 float 二进制近似带来的转换误差
30
+
31
+ :param v: 任意输入值
32
+ :return: 预处理后的整数或原始值
33
+ """
34
+ try:
35
+ if isinstance(v, (float, str)):
36
+ return int(decimal.Decimal(str(v)).to_integral_value(rounding=decimal.ROUND_DOWN))
37
+ except (ValueError, TypeError, decimal.InvalidOperation):
38
+ pass
39
+ return v
40
+
41
+ TolerantInt: Final = Annotated[int, BeforeValidator(_tolerant_int_pre_validator)]
42
+
43
+ # --- 公开策略方法 ---
44
+
45
+ @staticmethod
46
+ def to_string(value: Any) -> str:
47
+ """
48
+ 策略: 强制转换为字符串
49
+ """
50
+ return TypeAdapter(str).validate_python(value)
51
+
52
+ @staticmethod
53
+ def to_int(value: Any) -> str:
54
+ """
55
+ 策略: 强制转换为整数 (Decimal 高精度截断)
56
+
57
+ :raises: ValidationError
58
+ """
59
+ return TypeAdapter(ConverterStrategies.TolerantInt).validate_python(value)
60
+
61
+ @staticmethod
62
+ def to_bool(value: Any) -> bool:
63
+ """
64
+ 策略: 强制转换为布尔值
65
+ """
66
+ return TypeAdapter(bool).validate_python(value)
67
+
68
+ @staticmethod
69
+ def to_datetime(value: Any, timezone: str = "UTC") -> Any:
70
+ """
71
+ 策略: 转换为 datetime 对象 (混合引擎)
72
+
73
+ 逻辑:
74
+ 1、优先使用 Pendulum 库尝试精确解析 (RFC 3339 / ISO 8601)
75
+ 2、若失败, 降级至 Dateparser 执行模糊自然语言推断
76
+
77
+ :param timezone: 模板时区名称
78
+ """
79
+ try:
80
+ dt = pendulum.parse(str(value), tz=timezone)
81
+ if isinstance(dt, pendulum.DateTime):
82
+ return dt
83
+ except Exception:
84
+ pass
85
+
86
+ # 降级至 Dateparse 模糊解析
87
+ settings_obj = Settings({"TO_TIMEZONE": timezone})
88
+ if isinstance(value, (int, float)):
89
+ return dateparser.parse(str(value), settings=cast(Any, settings_obj))
90
+ return dateparser.parse(value, settings=cast(Any, settings_obj))
91
+
92
+ @staticmethod
93
+ def to_ipv4(value: Any) -> str:
94
+ """
95
+ 策略: 转换为标准 IPv4 字符串
96
+
97
+ :param value: IP 字符串或整数
98
+ :return: 标准点分十进制字符串
99
+ :raises: ValueError - 当 IP 格式非法时抛出
100
+ """
101
+ try:
102
+ return str(ipaddress.IPv4Address(value))
103
+ except ipaddress.AddressValueError as e:
104
+ raise ValueError(f"无效的 IPv4 地址:{value}") from e
105
+
106
+ @staticmethod
107
+ def human_size(value: Any) -> str:
108
+ """
109
+ 策略: 转换为人性化字节大小
110
+ """
111
+ return humanize.naturalsize(value)
112
+
113
+ @staticmethod
114
+ def glom_transform(value: Any) -> Any:
115
+ """
116
+ 策略: 执行 glom 结构变换
117
+ """
118
+ if isinstance(value, tuple) and len(value) == 2:
119
+ return glom(value[0], value[1])
120
+ return value
File without changes
@@ -0,0 +1,140 @@
1
+ import dateparser
2
+ import humanize
3
+ from glom import glom
4
+ from pydantic import TypeAdapter
5
+ from typing import Any, Callable, Dict, Union, Set, Optional
6
+ from otel_declarative.settings import ObservabilitySettings
7
+ from otel_declarative.Enums.converter_types import StandardConverter
8
+ from otel_declarative.Logging.logger_factory import get_child_logger
9
+
10
+ logger = get_child_logger("otel_declarative.Engines", "ConverterRegistry")
11
+
12
+ class ConverterRegistry:
13
+ """
14
+ 基于专业库集成的转换策略注册表
15
+
16
+ 职责:
17
+ 1、利用 dateparser, humanize, glom 等专业库处理复杂数据转换
18
+ 2、利用 Pydantic TypeAdapter 实现类型强制转换与熔断
19
+ 3、遵循 IoC 原则, 通过配置对象控制转换行为
20
+ """
21
+ def __init__(self, settings: ObservabilitySettings):
22
+ """
23
+ :param settings: 观测性全局配置对象
24
+ """
25
+ self._settings = settings
26
+ self._converters: Dict[str, Callable[[Any], Any]] = {}
27
+ # 异常指纹追踪集, 用于抑制重复的 WARNING 级日志输出
28
+ self._reported_errors: Set[str] = set()
29
+ self._setup_adapters()
30
+
31
+ def convert(self, name: str, value: Any, default: Any = None) -> Any:
32
+ """
33
+ 执行受保护的声明式转换
34
+
35
+ :param name: 转换器名称 (来自 YAML 配置)
36
+ :param value: 原始数据
37
+ :param default: 失败时的回退值
38
+ """
39
+ if value is None:
40
+ return default
41
+
42
+ # 执行标准化匹配, 兼容 YAML 中的大小写差异
43
+ lookup_key = self._normalize_key(name)
44
+ adapter: Optional[Callable[[Any], Any]] = self._converters.get(lookup_key)
45
+
46
+ if not adapter:
47
+ return value
48
+
49
+ try:
50
+ result = adapter(value)
51
+ return result if result is not None else default
52
+ except Exception as e:
53
+ error_fingerprint: str = f"{lookup_key}:{type(e).__name__}"
54
+ if error_fingerprint not in self._reported_errors:
55
+ logger.warning(
56
+ f"观测引擎断路器 | 转换器: {lookup_key} | "
57
+ f"错误: {type(e).__name__} | 摘要: {str(e)} | "
58
+ f"源数据快照: {str(value)[:100]}"
59
+ )
60
+ logger.debug(f"断路器触发堆栈 [{error_fingerprint}]", exc_info=True)
61
+ self._reported_errors.add(error_fingerprint)
62
+ return default
63
+
64
+ def _normalize_key(self, key: Union["StandardConverter", str]) -> str:
65
+ """
66
+ 统一转换器标识符格式
67
+
68
+ 将枚举成员或原始字符串转换为统一的查找键
69
+
70
+ :param key: 转换器标识符, 支持 StandardConverter 枚举或原始字符串
71
+ :return: 标准化后的字符串键
72
+ """
73
+ raw_key: str = key.value if hasattr(key, "value") else str(key)
74
+ return raw_key.strip().lower()
75
+
76
+ def _setup_adapters(self) -> None:
77
+ """
78
+ 绑定标准转换器标识符至具体的专业库调用逻辑
79
+ """
80
+ # --- 1、基础类型适配 ---
81
+ self.register(StandardConverter.TO_STR, self._pydantic_adapter(str))
82
+ self.register(StandardConverter.TO_INT, self._pydantic_adapter(int))
83
+ self.register(StandardConverter.TO_BOOL, self._pydantic_adapter(bool))
84
+
85
+ # --- 2、时间处理适配 ---
86
+ if self._settings.enable_dateparser:
87
+ self.register(StandardConverter.TO_DATETIME, self._date_adapter)
88
+
89
+ # --- 3、格式化适配 ---
90
+ if self._settings.enable_humanize:
91
+ self.register(StandardConverter.HUMAN_SIZE, humanize.naturalsize)
92
+ self.register(StandardConverter.HUMAN_DURATION, humanize.precisedelta)
93
+
94
+ # --- 4、结构转换适配 ---
95
+ if self._settings.use_glom_spec:
96
+ self.register(StandardConverter.GLOM_TRANSFORM, self._glom_adapter)
97
+
98
+ def register(self, name: Union[StandardConverter, str], func: Callable[[Any], Any]) -> None:
99
+ """
100
+ 注册转换器逻辑
101
+
102
+ :param name: 转换器标识
103
+ :param func: 具体的转换逻辑函数
104
+ """
105
+ if isinstance(name, StandardConverter):
106
+ lookup_key = self._normalize_key(name)
107
+ elif isinstance(name, str):
108
+ lookup_key = self._normalize_key(name)
109
+ else:
110
+ logger.error(f"无效的转换器名称类型:{type(name)}")
111
+ raise TypeError(f"无效的转换器名称类型:{type(name)}")
112
+
113
+ self._converters[lookup_key] = func
114
+ logger.debug(f"转换器注册成功:{lookup_key}")
115
+
116
+ # --- 专业库适配器闭包实现 ---
117
+
118
+ def _pydantic_adapter(self, target_type: type) -> Callable[[Any], Any]:
119
+ """
120
+ 构造基于 Pydantic TypeAdapter 的类型强制转换内核
121
+ """
122
+ adapter = TypeAdapter(target_type)
123
+ return lambda v: adapter.validate_python(v)
124
+
125
+ def _date_adapter(self, value: Any) -> Any:
126
+ """
127
+ 集成 dateparser 处理模糊时间字符串与时间戳
128
+ """
129
+ if isinstance(value, (int, float)):
130
+ return dateparser.parse(str(value), settings={'TO_TIMEZONE': self._settings.default_timezone})
131
+ return dateparser.parse(value, settings={'TO_TIMEZONE': self._settings.default_timezone})
132
+
133
+ def _glom_adapter(self, value: Any) -> Any:
134
+ """
135
+ 利用 glom 执行声明式深层结构重塑
136
+ """
137
+ # 注意: 此处 value 预期为元组 (data) 或仅为 data
138
+ if isinstance(value, tuple) and len(value) == 2:
139
+ return glom(value[0], value[1])
140
+ return value
@@ -0,0 +1,96 @@
1
+ from typing import Any, Dict, Optional, Type
2
+ from otel_declarative.Interfaces.extractor import IExtractor
3
+ from otel_declarative.Models.mapping_models import LayerMappingRules
4
+ from otel_declarative.Models.summary_models import InputSummary, OutputSummary
5
+ from otel_declarative.Engines.path_resolver import PathResolver
6
+ from otel_declarative.Enums.extraction_source import ExtractionSource
7
+ from otel_declarative.Logging.logger_factory import get_child_logger
8
+
9
+ logger = get_child_logger("otel_declarative.Engines", "GenericExtractor")
10
+
11
+ class GenericExtractor(IExtractor):
12
+ """
13
+ 通用声明式提取器
14
+
15
+ 职责:
16
+ 1、流程编排: 按照声明式规则 (LayerMappingRules) 驱动路径解析引擎执行数据挖掘
17
+ 2、数据归约: 将解析后的异构数据片段组装并校验为强类型的 DTO 模型
18
+ 3、策略解耦: 自身不包含任何业务代码, 所有的提取逻辑由注入的映射规则决定
19
+ """
20
+ def __init__(self, layer: str, rules: LayerMappingRules, resolver: PathResolver):
21
+ """
22
+ :param layer: 当前提取器所属的逻辑层级标识符
23
+ :param rules: 存储在内存中的强类型提取规则映射表
24
+ :param resolver: 注入的路径解析引擎实例
25
+ """
26
+ self._layer: str = layer
27
+ self._rules: LayerMappingRules = rules
28
+ self._resolver: PathResolver = resolver
29
+
30
+ def extract_input(self, args: Any, kwargs: Any) -> InputSummary:
31
+ """
32
+ 执行函数入口阶段的数据提取与模型装配
33
+
34
+ 逻辑:
35
+ 1、组装基础解析上下文
36
+ 2、遍历 input_rules 委派 resolver 提取数据
37
+ 3、执行属性自动补全与鲁棒性校验
38
+ 4、生成强类型 InputSummary 实例
39
+
40
+ :param args: 业务函数的位置参数元组
41
+ :param kwargs: 业务函数的关键字参数字典
42
+ :return: 经过 Pydantic 校验的标准化输入摘要模型
43
+ """
44
+ # 1、构造统一的解析上下文命名空间
45
+ extraction_context: Dict[str, Any] = {
46
+ ExtractionSource.ARGS.value: args,
47
+ ExtractionSource.KWARGS.value: kwargs
48
+ }
49
+ # 2、遍历声明式规则执行数据收集
50
+ extracted_fields: Dict[str, Any] = {}
51
+ for field_name, mapping_rule in self._rules.input_rules.items():
52
+ # 委派解析引擎执行深度路径搜索与数据清洗
53
+ extracted_fields[field_name] = self._resolver.resolve(context=extraction_context, mapping=mapping_rule)
54
+
55
+ # 3、自动嗅探缺失的元数据
56
+ current_payload_type = extracted_fields.get("payload_type")
57
+ if not current_payload_type or current_payload_type == "Unknown":
58
+ if len(args) > 1:
59
+ # 按照约定, args[1] 通常为业务负载对象
60
+ extracted_fields["payload_type"] = type(args[1]).__name__
61
+ else:
62
+ extracted_fields["payload_type"] = "Unknown"
63
+
64
+ # 4、兜底
65
+ if not extracted_fields.get("pod_name"):
66
+ extracted_fields["pod_name"] = "unspecified-pod"
67
+
68
+ return InputSummary.model_validate(extracted_fields)
69
+
70
+ def extract_output(self, result: Any) -> OutputSummary:
71
+ """
72
+ 执行函数返回阶段的数据提取与模型装配
73
+
74
+ :param result: 业务逻辑处理完成后的返回值对象
75
+ :return: 经过 Pydantic 校验的标准化输出摘要模型
76
+ """
77
+ # 1、构造结果阶段解析上下文
78
+ extraction_context: Dict[str, Any] = {
79
+ ExtractionSource.RESULTS.value: result
80
+ }
81
+ # 2、遍历输出映射规则, 执行数据收集
82
+ extracted_fields: Dict[str, Any] = {}
83
+ for field_name, mapping_rule in self._rules.output_rules.items():
84
+ extracted_fields[field_name] = self._resolver.resolve(context=extraction_context, mapping=mapping_rule)
85
+ # 3、执行最终模型校验
86
+ return OutputSummary.model_validate(extracted_fields)
87
+
88
+ def supports(self, layer: str, payload_type: Optional[Type[Any]] = None) -> bool:
89
+ """
90
+ 判定当前实例是否能够支持指定的观测层级
91
+
92
+ :param layer: 装饰器传入的逻辑层级标识
93
+ :param payload_type: 运行时的 Payload 类型, 供动态嗅探使用
94
+ :return: 布尔值, 标识是否匹配
95
+ """
96
+ return self._layer == layer
@@ -0,0 +1,191 @@
1
+ import os
2
+ from typing import Any, Dict, List, Optional
3
+ from opentelemetry import trace
4
+ from otel_declarative.Models.Log.topology import InjectionLayer, RenamingLayer
5
+ from otel_declarative.Models.Log.context import LogContext
6
+ from otel_declarative.Models.Log.mapping import LogFieldMapping
7
+ from otel_declarative.Models.Log.constants import FieldContract, ILogProcessor
8
+
9
+
10
+ class OtelTraceContextProcessor:
11
+ """
12
+ OpenTelemetry 追踪上下文处理器
13
+
14
+ 职责:
15
+ 1、运行时提取: 从 OTel 全局上下文中动态挖取当前协程 / 线程活跃 Span 的 TraceID 与 SpanID
16
+ 2、结构化注入: 按照配置对象定义的 Key 名将追踪标识注入日志字典
17
+ 3、状态感知: 仅在存在有效活跃 Span 时执行注入, 否则保持事件字典完整性
18
+ """
19
+ def __init__(self, field_mapping: LogFieldMapping):
20
+ """
21
+ :param field_mapping: 字段重映射配置对象, 定义输出字典中的键名
22
+ """
23
+ self._mapping: LogFieldMapping = field_mapping
24
+
25
+ def __call__(self, logger: Any, method_name: str, event_dict: Dict[str, Any]) -> Dict[str, Any]:
26
+ """
27
+ structlog 处理器核心入口
28
+
29
+ :param logger: 记录器实例
30
+ :param method_name: 日志级别方法名
31
+ :param event_dict: 当前待处理的日志事件字典
32
+ :return: 注入了 Trace 上下文后的事件字典
33
+ """
34
+ span = trace.get_current_span()
35
+
36
+ if span and span.get_span_context().is_valid:
37
+ span_context = span.get_span_context()
38
+ # 将 TraceID 和 SpanID 转换为标准 16 进制字符串
39
+ '''
40
+ 1、在 OpenTelemetry Python SDK 的内部实现中, span_context.trace_id 和 span_context.span_id 是以 原始整数 (int) 形式存储的
41
+ TraceID 是一个 128 位的整数, SpanID 是一个 64 位的整数
42
+ 如果直接将这些整数写入日志, 可观测性后端 (如 Jaeger, ELK, Grafana) 无法识别这种十进制格式,导致 '链路-日志' 关联失效
43
+ 2、根据 W3C Trace Context 规范和 OpenTelemetry 标准: Trace ID 必须表示为 32 个字符的十六进制字符串 (小写), Span ID 必须表示为 16 个字符的十六进制字符串 (小写)
44
+ 3、如果在此处将 TraceID 直接以 int 格式写入 JSON 日志, 在解析时会发生精度丢失, 导致日志中的 ID 变成一个近似值,从而彻底无法与追踪系统匹配
45
+ '''
46
+ event_dict.setdefault(self._mapping.trace_id, format(span_context.trace_id, "032x"))
47
+ event_dict.setdefault(self._mapping.span_id, format(span_context.span_id, "016x"))
48
+ return event_dict
49
+
50
+ class ServiceMetadataProcessor:
51
+ """
52
+ 服务身份元数据处理器
53
+
54
+ 职责:
55
+ 1、注入静态标识: 将服务名、运行环境等核心标识注入每行日志, 实现分布式环境下的身份溯源
56
+ 2、K8S 现场捕获: 自动探测宿主 Pod 名称, 实现物理机与集群逻辑节点的关联
57
+ 3、强类型一致: 使用 LogContext 模型作为注入数据的契约
58
+ """
59
+ def __init__(self, log_context: LogContext, field_mapping: LogFieldMapping):
60
+ """
61
+ :param log_context: 包含服务身份信息的强类型上下文模型
62
+ """
63
+ self._context: LogContext = log_context
64
+ self._mapping: LogFieldMapping = field_mapping
65
+ self._pod_name: str = getattr(self._context, "node_name", None) or os.getenv("HOSTNAME") or "unknown-node"
66
+
67
+ def __call__(self, logger: Any, method_name: str, event_dict: Dict[str, Any]) -> Dict[str, Any]:
68
+ """
69
+ 执行 structlog 的事件字典的元数据注入流水线
70
+
71
+ 依据 structlog 处理器协议, 本方法在日志事件被渲染 / 输出前自动触发
72
+
73
+ 负载将该服务实例的身份信息注入到事件上下文
74
+
75
+ :param logger: 当前活跃的结构化记录器实例
76
+ :param method_name: 触发日志生产的逻辑方法名称
77
+ :param event_dict: 当前待处理的结构化事件字典
78
+ :return: 经过元数据增强后的事件字典, 供流水线下游处理器继续处理
79
+ """
80
+ event_dict.setdefault(self._mapping.service, self._context.service_name)
81
+ event_dict.setdefault(self._mapping.environment, self._context.environment)
82
+ if self._pod_name:
83
+ event_dict.setdefault(self._mapping.pod_name, self._pod_name)
84
+
85
+ return event_dict
86
+
87
+ class LogFieldRenamer:
88
+ """
89
+ 日志标准字段重命名处理器
90
+
91
+ 职责:
92
+ 1、动态 Schema 映射: 基于配置模型定义的字段字段执行键名转换, 实现系统内部 Schema 与外部存储 Schema 的解耦
93
+ 2、消除逻辑硬编码: 通过模型内省字段发现迁移目标
94
+ 3、零配置扩展性: 当 LogFieldMapping 模型新增字段定义时, 该处理器无需修改代码即可自动支持新字段的重命名
95
+ """
96
+ def __init__(self, field_mapping: LogFieldMapping):
97
+ """
98
+ :param field_mapping: 字段重映射配置对象
99
+ """
100
+ self._mapping: LogFieldMapping = field_mapping
101
+ # 预计算动态迁移矩阵
102
+ # 格式: {'原始库输出键名': '用户定义输出键名'}
103
+ self._migration_matrix: Dict[str, str] = {}
104
+ # [Fix 2026.01.17]: PydanticDeprecatedSince211: Accessing the 'model_fields' attribute on the instance is deprecated.
105
+ # 修复过时的 Pydantic v2 API 的使用方式
106
+ # 原代码: for field_name, field_info in self._mapping.model_fields.items():
107
+ for field_name, field_info in type(self._mapping).model_fields.items():
108
+ # 查找该字段关联的 FieldContract 契约对象
109
+ contract: Optional[FieldContract] = self._find_contract(field_info.metadata)
110
+ # 仅处理明确声明了可重命名契约的字段
111
+ if contract and contract.is_renamable and contract.source_key:
112
+ target_key: str = getattr(self._mapping, field_name)
113
+ if contract.source_key != target_key:
114
+ self._migration_matrix[contract.source_key] = target_key
115
+
116
+ def __call__(self, logger: Any, method_name: str, event_dict: Dict[str, Any]) -> Dict[str, Any]:
117
+ """
118
+ 执行结构化日志字段的动态重命名映射
119
+
120
+ 逻辑:
121
+ 1、迭代预计算的迁移矩阵
122
+ 2、安全检查: 判定当前事件字典是否包含待迁移的源键
123
+ 3、原子操作: 利用 dict.pop 确保数据从旧键迁移到新键的过程不发生丢失
124
+
125
+ :param logger: structlog 记录器实例
126
+ :param method_name: 日志级别方法名
127
+ :param event_dict: 待处理的结构化日志事件字典
128
+ :return: 经过 Schema 映射转换后的事件字典
129
+ """
130
+ if not self._migration_matrix:
131
+ return event_dict
132
+
133
+ for source_key, target_key in self._migration_matrix.items():
134
+ if source_key in event_dict:
135
+ event_dict[target_key] = event_dict.pop(source_key)
136
+
137
+ return event_dict
138
+
139
+ def __repr__(self) -> str:
140
+ """
141
+ 提供可调试的状态摘要
142
+ """
143
+ return f"<LogFieldRenamer matrix_size={len(self._migration_matrix)}>"
144
+
145
+ def _find_contract(self, metadata: List[Any]) -> Optional[FieldContract]:
146
+ """
147
+ 从 Pydantic 字段元数据列表中检索 FieldContract 实例
148
+
149
+ :param metadata: Pydantic 字段的元数据列表
150
+ :return: 找到的契约对象
151
+ """
152
+ for item in metadata:
153
+ if isinstance(item, FieldContract):
154
+ return item
155
+ return None
156
+
157
+ def get_otel_context_injectors(field_mapping: LogFieldMapping, log_context: LogContext) -> InjectionLayer:
158
+ """
159
+ 获取 OTel 上下文注入处理器链
160
+
161
+ 职责:
162
+ 1、构建仅负责生产数据的处理器链 (Trace Context、Service Metadata)
163
+ 2、作为 structlog 处理流水线的上游组件, 确保所有元数据在渲染前被注入
164
+ 3、依赖注入: 将强类型的映射配置与上下文契约注入具体的处理器实例
165
+ 4、利用 InjectionLayer 容器封装, 增强类型语义
166
+
167
+ :param field_mapping: 定义输出字段键名的映射配置对象
168
+ :param log_context: 包含服务身份信息的强类型上下文模型
169
+ :return: 包含处理器序列的注入层容器对象
170
+ """
171
+ processors: List[ILogProcessor] = [
172
+ OtelTraceContextProcessor(field_mapping=field_mapping),
173
+ ServiceMetadataProcessor(log_context=log_context, field_mapping=field_mapping),
174
+ ]
175
+ return InjectionLayer(processors=processors)
176
+
177
+ def get_field_renamer_processor(field_mapping: LogFieldMapping) -> RenamingLayer:
178
+ """
179
+ 获取字段重命名处理器工厂方法
180
+
181
+ 职责:
182
+ 1、构建仅负责修改键名的处理器
183
+ 2、作为 structlog 处理流水线的末端组件, 确保在 EventRenamer 与 ExceptionFormatter 之后执行
184
+ 3、隔离性: 独立于注入逻辑, 允许灵活调整其在处理器链中的拓扑位置
185
+ 4、利用 RenamingLayer 容器封装, 增强类型语义
186
+
187
+ :param field_mapping: 包含 FieldContract 元数据契约的字段映射配置
188
+ :return: 符合 structlog Processor 协议的字段重命名处理器实例
189
+ """
190
+ processor = LogFieldRenamer(field_mapping=field_mapping)
191
+ return RenamingLayer(processor=processor)