sycommon-python-lib 0.1.56b5__tar.gz → 0.1.56b7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/PKG-INFO +2 -1
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/pyproject.toml +2 -1
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/Config.py +16 -0
- sycommon_python_lib-0.1.56b7/src/sycommon/config/SentryConfig.py +13 -0
- sycommon_python_lib-0.1.56b7/src/sycommon/logging/kafka_log.py +309 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/exception.py +10 -16
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/timeout.py +2 -1
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/traceid.py +67 -61
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/notice/uvicorn_monitor.py +32 -27
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/rabbitmq/rabbitmq_client.py +64 -14
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/rabbitmq/rabbitmq_pool.py +59 -9
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/rabbitmq/rabbitmq_service.py +52 -41
- sycommon_python_lib-0.1.56b7/src/sycommon/sentry/sy_sentry.py +34 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/services.py +4 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/synacos/nacos_service.py +101 -82
- sycommon_python_lib-0.1.56b7/src/sycommon/tools/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon_python_lib.egg-info/PKG-INFO +2 -1
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon_python_lib.egg-info/SOURCES.txt +3 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon_python_lib.egg-info/requires.txt +1 -0
- sycommon_python_lib-0.1.56b5/src/sycommon/logging/kafka_log.py +0 -556
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/README.md +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/setup.cfg +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/command/cli.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/DatabaseConfig.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/EmbeddingConfig.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/LLMConfig.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/MQConfig.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/RerankerConfig.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/config/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/database/async_base_db_service.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/database/async_database_service.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/database/base_db_service.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/database/database_service.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/health/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/health/health_check.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/health/metrics.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/health/ping.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/llm/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/llm/embedding.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/llm/get_llm.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/llm/llm_logger.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/llm/llm_tokens.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/logging/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/logging/async_sql_logger.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/logging/logger_levels.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/logging/logger_wrapper.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/logging/sql_logger.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/context.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/cors.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/docs.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/middleware.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/monitor_memory.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/mq.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/base_http.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/log.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/mqlistener_config.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/mqmsg_model.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/mqsend_config.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/models/sso_user.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/notice/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5/src/sycommon/sse → sycommon_python_lib-0.1.56b7/src/sycommon/sentry}/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5/src/sycommon/synacos → sycommon_python_lib-0.1.56b7/src/sycommon/sse}/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/sse/event.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/sse/sse.py +0 -0
- {sycommon_python_lib-0.1.56b5/src/sycommon/tools → sycommon_python_lib-0.1.56b7/src/sycommon/synacos}/__init__.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/synacos/example.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/synacos/example2.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/synacos/feign.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/synacos/feign_client.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/synacos/param.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/tools/docs.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/tools/merge_headers.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/tools/snowflake.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/tools/timing.py +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon_python_lib.egg-info/dependency_links.txt +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon_python_lib.egg-info/entry_points.txt +0 -0
- {sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon_python_lib.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sycommon-python-lib
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.56b7
|
|
4
4
|
Summary: Add your description here
|
|
5
5
|
Requires-Python: >=3.11
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -21,6 +21,7 @@ Requires-Dist: psutil>=7.1.3
|
|
|
21
21
|
Requires-Dist: pydantic>=2.12.5
|
|
22
22
|
Requires-Dist: python-dotenv>=1.2.1
|
|
23
23
|
Requires-Dist: pyyaml>=6.0.3
|
|
24
|
+
Requires-Dist: sentry-sdk[fastapi]>=2.48.0
|
|
24
25
|
Requires-Dist: sqlalchemy[asyncio]>=2.0.45
|
|
25
26
|
Requires-Dist: starlette>=0.50.0
|
|
26
27
|
Requires-Dist: uvicorn>=0.40.0
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "sycommon-python-lib"
|
|
3
|
-
version = "0.1.56-
|
|
3
|
+
version = "0.1.56-beta7"
|
|
4
4
|
description = "Add your description here"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
requires-python = ">=3.11"
|
|
@@ -22,6 +22,7 @@ dependencies = [
|
|
|
22
22
|
"pydantic>=2.12.5",
|
|
23
23
|
"python-dotenv>=1.2.1",
|
|
24
24
|
"pyyaml>=6.0.3",
|
|
25
|
+
"sentry-sdk[fastapi]>=2.48.0",
|
|
25
26
|
"sqlalchemy[asyncio]>=2.0.45",
|
|
26
27
|
"starlette>=0.50.0",
|
|
27
28
|
"uvicorn>=0.40.0",
|
|
@@ -22,6 +22,7 @@ class Config(metaclass=SingletonMeta):
|
|
|
22
22
|
self.llm_configs = []
|
|
23
23
|
self.embedding_configs = []
|
|
24
24
|
self.reranker_configs = []
|
|
25
|
+
self.sentry_configs = []
|
|
25
26
|
self._process_config()
|
|
26
27
|
|
|
27
28
|
def get_llm_config(self, model_name):
|
|
@@ -42,6 +43,12 @@ class Config(metaclass=SingletonMeta):
|
|
|
42
43
|
return llm
|
|
43
44
|
raise ValueError(f"No configuration found for model: {model_name}")
|
|
44
45
|
|
|
46
|
+
def get_sentry_config(self, name):
|
|
47
|
+
for sentry in self.sentry_configs:
|
|
48
|
+
if sentry.get('name') == name:
|
|
49
|
+
return sentry
|
|
50
|
+
raise ValueError(f"No configuration found for server: {name}")
|
|
51
|
+
|
|
45
52
|
def _process_config(self):
|
|
46
53
|
llm_config_list = self.config.get('LLMConfig', [])
|
|
47
54
|
for llm_config in llm_config_list:
|
|
@@ -71,6 +78,15 @@ class Config(metaclass=SingletonMeta):
|
|
|
71
78
|
except ValueError as e:
|
|
72
79
|
print(f"Invalid LLM configuration: {e}")
|
|
73
80
|
|
|
81
|
+
sentry_config_list = self.config.get('SentryConfig', [])
|
|
82
|
+
for sentry_config in sentry_config_list:
|
|
83
|
+
try:
|
|
84
|
+
from sycommon.config.SentryConfig import SentryConfig
|
|
85
|
+
validated_config = SentryConfig(**sentry_config)
|
|
86
|
+
self.sentry_configs.append(validated_config.model_dump())
|
|
87
|
+
except ValueError as e:
|
|
88
|
+
print(f"Invalid Sentry configuration: {e}")
|
|
89
|
+
|
|
74
90
|
def set_attr(self, share_configs: dict):
|
|
75
91
|
self.config = {**self.config, **
|
|
76
92
|
share_configs.get('llm', {}), **share_configs}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class SentryConfig(BaseModel):
|
|
5
|
+
name: str
|
|
6
|
+
dsn: str
|
|
7
|
+
enable: bool
|
|
8
|
+
|
|
9
|
+
@classmethod
|
|
10
|
+
def from_config(cls, server_name: str):
|
|
11
|
+
from sycommon.config.Config import Config
|
|
12
|
+
sentry_config = Config().get_sentry_config(server_name)
|
|
13
|
+
return cls(**sentry_config)
|
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import json
|
|
4
|
+
import socket
|
|
5
|
+
import threading
|
|
6
|
+
import traceback
|
|
7
|
+
import asyncio
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
|
|
10
|
+
from kafka import KafkaProducer
|
|
11
|
+
from loguru import logger
|
|
12
|
+
|
|
13
|
+
from sycommon.config.Config import Config, SingletonMeta
|
|
14
|
+
from sycommon.middleware.context import current_trace_id, current_headers
|
|
15
|
+
from sycommon.tools.snowflake import Snowflake
|
|
16
|
+
|
|
17
|
+
# 配置Loguru的颜色方案
|
|
18
|
+
LOGURU_FORMAT = (
|
|
19
|
+
"<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | "
|
|
20
|
+
"<level>{level: <8}</level> | "
|
|
21
|
+
"<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
|
|
22
|
+
"<level>{message}</level>"
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class KafkaSink:
|
|
27
|
+
"""
|
|
28
|
+
自定义 Loguru Sink,负责格式化日志并发送到 Kafka
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, service_id: str):
|
|
32
|
+
self.service_id = service_id
|
|
33
|
+
# 获取配置
|
|
34
|
+
from sycommon.synacos.nacos_service import NacosService
|
|
35
|
+
common = NacosService(
|
|
36
|
+
Config().config).share_configs.get("common.yml", {})
|
|
37
|
+
bootstrap_servers = common.get("log", {}).get(
|
|
38
|
+
"kafka", {}).get("servers", None)
|
|
39
|
+
|
|
40
|
+
self._producer = KafkaProducer(
|
|
41
|
+
bootstrap_servers=bootstrap_servers,
|
|
42
|
+
value_serializer=lambda v: json.dumps(
|
|
43
|
+
v, ensure_ascii=False).encode('utf-8'),
|
|
44
|
+
# 保持原有的优化配置
|
|
45
|
+
max_block_ms=60000,
|
|
46
|
+
retries=5,
|
|
47
|
+
request_timeout_ms=30000,
|
|
48
|
+
compression_type='gzip',
|
|
49
|
+
batch_size=16384,
|
|
50
|
+
linger_ms=5,
|
|
51
|
+
buffer_memory=33554432,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
def write(self, message):
|
|
55
|
+
"""
|
|
56
|
+
Loguru 会调用此方法。
|
|
57
|
+
message 参数实际上是 loguru.Message 对象,可以通过 message.record 获取所有字段。
|
|
58
|
+
"""
|
|
59
|
+
try:
|
|
60
|
+
# 1. 获取原始日志记录
|
|
61
|
+
record = message.record
|
|
62
|
+
|
|
63
|
+
# 2. 提取 TraceID
|
|
64
|
+
trace_id = None
|
|
65
|
+
try:
|
|
66
|
+
# 如果业务方传的是 JSON 字符串作为 message
|
|
67
|
+
msg_obj = json.loads(record["message"])
|
|
68
|
+
if isinstance(msg_obj, dict):
|
|
69
|
+
trace_id = msg_obj.get("trace_id")
|
|
70
|
+
except:
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
if not trace_id:
|
|
74
|
+
trace_id = current_trace_id.get()
|
|
75
|
+
|
|
76
|
+
if not trace_id:
|
|
77
|
+
trace_id = str(Snowflake.id)
|
|
78
|
+
else:
|
|
79
|
+
trace_id = str(trace_id)
|
|
80
|
+
|
|
81
|
+
# 3. 提取异常详情 (如果有)
|
|
82
|
+
error_detail = ""
|
|
83
|
+
if record["exception"] is not None:
|
|
84
|
+
# Loguru 的 exception 对象
|
|
85
|
+
error_detail = "".join(traceback.format_exception(
|
|
86
|
+
record["exception"].type,
|
|
87
|
+
record["exception"].value,
|
|
88
|
+
record["exception"].traceback
|
|
89
|
+
))
|
|
90
|
+
elif "error" in record["extra"]:
|
|
91
|
+
# 兼容其他方式注入的异常
|
|
92
|
+
error_detail = str(record["extra"].get("error"))
|
|
93
|
+
|
|
94
|
+
# 4. 获取主机信息
|
|
95
|
+
try:
|
|
96
|
+
ip = socket.gethostbyname(socket.gethostname())
|
|
97
|
+
except:
|
|
98
|
+
ip = '127.0.0.1'
|
|
99
|
+
host_name = socket.gethostname()
|
|
100
|
+
|
|
101
|
+
# 5. 获取线程/协程信息
|
|
102
|
+
try:
|
|
103
|
+
task = asyncio.current_task()
|
|
104
|
+
thread_info = f"coroutine:{task.get_name()}" if task else f"thread:{threading.current_thread().name}"
|
|
105
|
+
except RuntimeError:
|
|
106
|
+
thread_info = f"thread:{threading.current_thread().name}"
|
|
107
|
+
|
|
108
|
+
# 6. 提取类名/文件名信息
|
|
109
|
+
file_name = record["file"].name
|
|
110
|
+
logger_name = record["name"]
|
|
111
|
+
if logger_name and logger_name != file_name:
|
|
112
|
+
class_name = f"{file_name}:{logger_name}"
|
|
113
|
+
else:
|
|
114
|
+
class_name = file_name
|
|
115
|
+
|
|
116
|
+
# 7. 构建最终的 Kafka 日志结构
|
|
117
|
+
log_entry = {
|
|
118
|
+
"traceId": trace_id,
|
|
119
|
+
"sySpanId": "",
|
|
120
|
+
"syBizId": "",
|
|
121
|
+
"ptxId": "",
|
|
122
|
+
"time": record["time"].strftime("%Y-%m-%d %H:%M:%S"),
|
|
123
|
+
"day": datetime.now().strftime("%Y.%m.%d"),
|
|
124
|
+
"msg": record["message"],
|
|
125
|
+
"detail": error_detail,
|
|
126
|
+
"ip": ip,
|
|
127
|
+
"hostName": host_name,
|
|
128
|
+
"tenantId": "",
|
|
129
|
+
"userId": "",
|
|
130
|
+
"customerId": "",
|
|
131
|
+
"env": Config().config.get('Nacos', {}).get('namespaceId', ''),
|
|
132
|
+
"priReqSource": "",
|
|
133
|
+
"reqSource": "",
|
|
134
|
+
"serviceId": self.service_id,
|
|
135
|
+
"logLevel": record["level"].name,
|
|
136
|
+
"className": class_name,
|
|
137
|
+
"method": record["function"],
|
|
138
|
+
"line": str(record["line"]),
|
|
139
|
+
"theadName": thread_info,
|
|
140
|
+
"sqlCost": 0,
|
|
141
|
+
"size": len(str(record["message"])),
|
|
142
|
+
"uid": int(Snowflake.id)
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
# 8. 发送
|
|
146
|
+
self._producer.send("shengye-json-log", log_entry)
|
|
147
|
+
|
|
148
|
+
except Exception as e:
|
|
149
|
+
print(f"KafkaSink Error: {e}")
|
|
150
|
+
|
|
151
|
+
def flush(self):
|
|
152
|
+
if self._producer:
|
|
153
|
+
self._producer.flush(timeout=5)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class KafkaLogger(metaclass=SingletonMeta):
|
|
157
|
+
_sink_instance = None
|
|
158
|
+
|
|
159
|
+
@staticmethod
|
|
160
|
+
def setup_logger(config: dict):
|
|
161
|
+
logger.remove()
|
|
162
|
+
|
|
163
|
+
from sycommon.synacos.nacos_service import NacosService
|
|
164
|
+
service_id = NacosService(config).service_name
|
|
165
|
+
|
|
166
|
+
KafkaLogger._sink_instance = KafkaSink(service_id)
|
|
167
|
+
|
|
168
|
+
logger.add(
|
|
169
|
+
KafkaLogger._sink_instance,
|
|
170
|
+
level="INFO",
|
|
171
|
+
format="{message}",
|
|
172
|
+
enqueue=True,
|
|
173
|
+
backtrace=True,
|
|
174
|
+
diagnose=True
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
logger.add(
|
|
178
|
+
sink=sys.stdout,
|
|
179
|
+
level="ERROR",
|
|
180
|
+
format=LOGURU_FORMAT,
|
|
181
|
+
colorize=True,
|
|
182
|
+
backtrace=True,
|
|
183
|
+
diagnose=True
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
sys.excepthook = KafkaLogger._handle_exception
|
|
187
|
+
|
|
188
|
+
@staticmethod
|
|
189
|
+
def _handle_exception(exc_type, exc_value, exc_traceback):
|
|
190
|
+
if issubclass(exc_type, KeyboardInterrupt):
|
|
191
|
+
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
trace_id = current_trace_id.get() or str(Snowflake.id)
|
|
195
|
+
error_msg = json.dumps({
|
|
196
|
+
"trace_id": trace_id,
|
|
197
|
+
"message": f"Uncaught exception: {exc_type.__name__}",
|
|
198
|
+
"level": "ERROR"
|
|
199
|
+
}, ensure_ascii=False)
|
|
200
|
+
|
|
201
|
+
logger.opt(exception=(exc_type, exc_value,
|
|
202
|
+
exc_traceback)).error(error_msg)
|
|
203
|
+
|
|
204
|
+
@staticmethod
|
|
205
|
+
def close():
|
|
206
|
+
if KafkaLogger._sink_instance:
|
|
207
|
+
KafkaLogger._sink_instance.flush()
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
class SYLogger:
|
|
211
|
+
@staticmethod
|
|
212
|
+
def get_trace_id():
|
|
213
|
+
return current_trace_id.get()
|
|
214
|
+
|
|
215
|
+
@staticmethod
|
|
216
|
+
def set_trace_id(trace_id: str):
|
|
217
|
+
return current_trace_id.set(trace_id)
|
|
218
|
+
|
|
219
|
+
@staticmethod
|
|
220
|
+
def reset_trace_id(token):
|
|
221
|
+
current_trace_id.reset(token)
|
|
222
|
+
|
|
223
|
+
@staticmethod
|
|
224
|
+
def get_headers():
|
|
225
|
+
return current_headers.get()
|
|
226
|
+
|
|
227
|
+
@staticmethod
|
|
228
|
+
def set_headers(headers: list[tuple[str, str]]):
|
|
229
|
+
return current_headers.set(headers)
|
|
230
|
+
|
|
231
|
+
@staticmethod
|
|
232
|
+
def reset_headers(token):
|
|
233
|
+
current_headers.reset(token)
|
|
234
|
+
|
|
235
|
+
@staticmethod
|
|
236
|
+
def _get_execution_context() -> str:
|
|
237
|
+
try:
|
|
238
|
+
task = asyncio.current_task()
|
|
239
|
+
if task:
|
|
240
|
+
return f"coroutine:{task.get_name()}"
|
|
241
|
+
except RuntimeError:
|
|
242
|
+
pass
|
|
243
|
+
return f"thread:{threading.current_thread().name}"
|
|
244
|
+
|
|
245
|
+
@staticmethod
|
|
246
|
+
def _log(msg: any, level: str = "INFO"):
|
|
247
|
+
"""
|
|
248
|
+
统一日志记录入口
|
|
249
|
+
修复:手动提取堆栈信息并写入 message,确保 Kafka 能收到
|
|
250
|
+
"""
|
|
251
|
+
# 序列化消息
|
|
252
|
+
if isinstance(msg, dict) or isinstance(msg, list):
|
|
253
|
+
msg_str = json.dumps(msg, ensure_ascii=False)
|
|
254
|
+
else:
|
|
255
|
+
msg_str = str(msg)
|
|
256
|
+
|
|
257
|
+
# 构建基础日志字典
|
|
258
|
+
log_dict = {
|
|
259
|
+
"trace_id": str(SYLogger.get_trace_id() or Snowflake.id),
|
|
260
|
+
"message": msg_str,
|
|
261
|
+
"level": level,
|
|
262
|
+
"threadName": SYLogger._get_execution_context()
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
# 如果是 ERROR 级别,手动获取堆栈并加入 log_dict
|
|
266
|
+
if level == "ERROR":
|
|
267
|
+
# 获取当前异常信息 (sys.exc_info() 在 except 块中有效)
|
|
268
|
+
exc_info = sys.exc_info()
|
|
269
|
+
if exc_info and exc_info[0] is not None:
|
|
270
|
+
# 将堆栈格式化为字符串,放入 detail 字段
|
|
271
|
+
# 这样 KafkaSink 解析 message 时,就能拿到 detail
|
|
272
|
+
tb_str = "".join(traceback.format_exception(*exc_info))
|
|
273
|
+
log_dict["detail"] = tb_str
|
|
274
|
+
|
|
275
|
+
# 将字典转为 JSON 字符串传给 Loguru
|
|
276
|
+
log_json = json.dumps(log_dict, ensure_ascii=False)
|
|
277
|
+
|
|
278
|
+
if level == "ERROR":
|
|
279
|
+
# 依然使用 opt(exception=True) 让控制台打印彩色堆栈
|
|
280
|
+
# 注意:Loguru 内部可能会忽略我们已经塞进去的 detail 字符串,
|
|
281
|
+
# 但这没关系,因为 KafkaSink 解析 message 字符串时会重新读取 detail
|
|
282
|
+
logger.opt(exception=True).error(log_json)
|
|
283
|
+
elif level == "WARNING":
|
|
284
|
+
logger.warning(log_json)
|
|
285
|
+
else:
|
|
286
|
+
logger.info(log_json)
|
|
287
|
+
|
|
288
|
+
if os.getenv('DEV-LOG', 'false').lower() == 'true':
|
|
289
|
+
print(log_json)
|
|
290
|
+
|
|
291
|
+
@staticmethod
|
|
292
|
+
def info(msg: any, *args, **kwargs):
|
|
293
|
+
SYLogger._log(msg, "INFO")
|
|
294
|
+
|
|
295
|
+
@staticmethod
|
|
296
|
+
def warning(msg: any, *args, **kwargs):
|
|
297
|
+
SYLogger._log(msg, "WARNING")
|
|
298
|
+
|
|
299
|
+
@staticmethod
|
|
300
|
+
def debug(msg: any, *args, **kwargs):
|
|
301
|
+
SYLogger._log(msg, "DEBUG")
|
|
302
|
+
|
|
303
|
+
@staticmethod
|
|
304
|
+
def error(msg: any, *args, **kwargs):
|
|
305
|
+
SYLogger._log(msg, "ERROR")
|
|
306
|
+
|
|
307
|
+
@staticmethod
|
|
308
|
+
def exception(msg: any, *args, **kwargs):
|
|
309
|
+
SYLogger._log(msg, "ERROR")
|
{sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/exception.py
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from fastapi import Request, HTTPException
|
|
2
2
|
from fastapi.responses import JSONResponse
|
|
3
3
|
from pydantic import ValidationError
|
|
4
|
-
import
|
|
4
|
+
from sycommon.logging.kafka_log import SYLogger
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
def setup_exception_handler(app, config: dict):
|
|
@@ -15,7 +15,7 @@ def setup_exception_handler(app, config: dict):
|
|
|
15
15
|
int_MaxBytes = int(MaxBytes) / 1024 / 1024
|
|
16
16
|
return JSONResponse(
|
|
17
17
|
content={
|
|
18
|
-
'code': 413, 'error': f'File size exceeds the allowed limit of {int_MaxBytes}MB.'},
|
|
18
|
+
'code': 413, 'error': f'File size exceeds the allowed limit of {int_MaxBytes}MB.', 'traceId': SYLogger.get_trace_id()},
|
|
19
19
|
status_code=413
|
|
20
20
|
)
|
|
21
21
|
|
|
@@ -27,7 +27,8 @@ def setup_exception_handler(app, config: dict):
|
|
|
27
27
|
content={
|
|
28
28
|
"code": exc.status_code,
|
|
29
29
|
"message": exc.detail,
|
|
30
|
-
"path": str(request.url.path)
|
|
30
|
+
"path": str(request.url.path),
|
|
31
|
+
"traceId": SYLogger.get_trace_id()
|
|
31
32
|
}
|
|
32
33
|
)
|
|
33
34
|
|
|
@@ -39,7 +40,8 @@ def setup_exception_handler(app, config: dict):
|
|
|
39
40
|
content={
|
|
40
41
|
"code": 400,
|
|
41
42
|
"message": "参数验证失败",
|
|
42
|
-
"details": exc.errors()
|
|
43
|
+
"details": exc.errors(),
|
|
44
|
+
"traceId": SYLogger.get_trace_id()
|
|
43
45
|
}
|
|
44
46
|
)
|
|
45
47
|
|
|
@@ -55,30 +57,22 @@ def setup_exception_handler(app, config: dict):
|
|
|
55
57
|
status_code=exc.code,
|
|
56
58
|
content={
|
|
57
59
|
"code": exc.code,
|
|
58
|
-
"message": exc.message
|
|
60
|
+
"message": exc.message,
|
|
61
|
+
"traceId": SYLogger.get_trace_id()
|
|
59
62
|
}
|
|
60
63
|
)
|
|
61
64
|
|
|
62
65
|
# 5. 全局异常处理器(捕获所有未处理的异常)
|
|
63
66
|
@app.exception_handler(Exception)
|
|
64
67
|
async def global_exception_handler(request: Request, exc: Exception):
|
|
65
|
-
# 记录详细错误信息
|
|
66
|
-
error_msg = f"请求路径: {request.url}\n"
|
|
67
|
-
error_msg += f"错误类型: {type(exc).__name__}\n"
|
|
68
|
-
error_msg += f"错误信息: {str(exc)}\n"
|
|
69
|
-
error_msg += f"堆栈信息: {traceback.format_exc()}"
|
|
70
|
-
|
|
71
|
-
# 使用你的日志服务记录错误
|
|
72
|
-
from sycommon.logging.kafka_log import SYLogger
|
|
73
|
-
SYLogger.error(error_msg)
|
|
74
|
-
|
|
75
68
|
# 返回统一格式的错误响应(生产环境可选择不返回详细信息)
|
|
76
69
|
return JSONResponse(
|
|
77
70
|
status_code=500,
|
|
78
71
|
content={
|
|
79
72
|
"code": 500,
|
|
80
73
|
"message": "服务器内部错误,请稍后重试",
|
|
81
|
-
"detail": str(exc) if config.get('DEBUG', False) else "Internal Server Error"
|
|
74
|
+
"detail": str(exc) if config.get('DEBUG', False) else "Internal Server Error",
|
|
75
|
+
"traceId": SYLogger.get_trace_id()
|
|
82
76
|
}
|
|
83
77
|
)
|
|
84
78
|
|
{sycommon_python_lib-0.1.56b5 → sycommon_python_lib-0.1.56b7}/src/sycommon/middleware/timeout.py
RENAMED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
import time
|
|
3
3
|
from fastapi import Request
|
|
4
4
|
from fastapi.responses import JSONResponse
|
|
5
|
+
from sycommon.logging.kafka_log import SYLogger
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
def setup_request_timeout_middleware(app, config: dict):
|
|
@@ -14,6 +15,6 @@ def setup_request_timeout_middleware(app, config: dict):
|
|
|
14
15
|
response = await call_next(request)
|
|
15
16
|
duration = time.time() - request.state.start_time
|
|
16
17
|
if duration > REQUEST_TIMEOUT:
|
|
17
|
-
return JSONResponse(content={'code': 1, 'error': 'Request timed out'}, status_code=504)
|
|
18
|
+
return JSONResponse(content={'code': 1, 'error': 'Request timed out', 'traceId': SYLogger.get_trace_id()}, status_code=504)
|
|
18
19
|
return response
|
|
19
20
|
return app
|