sycommon-python-lib 0.1.55__py3-none-any.whl → 0.1.55a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,96 +0,0 @@
1
- from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
2
- from sqlalchemy import text
3
-
4
- from sycommon.config.Config import SingletonMeta
5
- from sycommon.config.DatabaseConfig import DatabaseConfig, convert_dict_keys
6
- from sycommon.logging.kafka_log import SYLogger
7
- from sycommon.logging.async_sql_logger import AsyncSQLTraceLogger
8
- from sycommon.synacos.nacos_service import NacosService
9
-
10
-
11
- class AsyncDatabaseService(metaclass=SingletonMeta):
12
- _engine = None
13
-
14
- @staticmethod
15
- async def setup_database(config: dict, shareConfigKey: str):
16
- common = NacosService(config).share_configs.get(shareConfigKey, {})
17
- if common and common.get('spring', {}).get('datasource', None):
18
- databaseConfig = common.get('spring', {}).get('datasource', None)
19
- converted_dict = convert_dict_keys(databaseConfig)
20
- db_config = DatabaseConfig.model_validate(converted_dict)
21
-
22
- # 初始化 DatabaseConnector (传入配置)
23
- connector = AsyncDatabaseConnector(db_config)
24
-
25
- # 赋值 engine
26
- AsyncDatabaseService._engine = connector.engine
27
-
28
- # 执行异步测试连接
29
- if not await connector.test_connection():
30
- raise Exception("Database connection test failed")
31
-
32
- @staticmethod
33
- def engine():
34
- return AsyncDatabaseService._engine
35
-
36
-
37
- class AsyncDatabaseConnector(metaclass=SingletonMeta):
38
- def __init__(self, db_config: DatabaseConfig):
39
- # 从 DatabaseConfig 中提取数据库连接信息
40
- self.db_user = db_config.username
41
- self.db_password = db_config.password
42
-
43
- # 提取 URL 中的主机、端口和数据库名
44
- url_parts = db_config.url.split('//')[1].split('/')
45
- host_port = url_parts[0].split(':')
46
- self.db_host = host_port[0]
47
- self.db_port = host_port[1]
48
- self.db_name = url_parts[1].split('?')[0]
49
-
50
- # 提取 URL 中的参数
51
- params_str = url_parts[1].split('?')[1] if len(
52
- url_parts[1].split('?')) > 1 else ''
53
- params = {}
54
- for param in params_str.split('&'):
55
- if param:
56
- key, value = param.split('=')
57
- params[key] = value
58
-
59
- # 在params中去掉指定的参数
60
- for key in ['useUnicode', 'characterEncoding', 'serverTimezone', 'zeroDateTimeBehavior']:
61
- if key in params:
62
- del params[key]
63
-
64
- # 构建数据库连接 URL
65
- # 注意:这里将 mysqlconnector 替换为 aiomysql 以支持异步
66
- self.db_url = f'mysql+aiomysql://{self.db_user}:{self.db_password}@{self.db_host}:{self.db_port}/{self.db_name}'
67
-
68
- SYLogger.info(f"Database URL: {self.db_url}")
69
-
70
- # 优化连接池配置
71
- # 使用 create_async_engine 替代 create_engine
72
- self.engine = create_async_engine(
73
- self.db_url,
74
- connect_args=params,
75
- pool_size=10, # 连接池大小
76
- max_overflow=20, # 最大溢出连接数
77
- pool_timeout=30, # 连接超时时间(秒)
78
- pool_recycle=3600, # 连接回收时间(秒)
79
- pool_pre_ping=True, # 每次获取连接前检查连接是否有效
80
- echo=False, # 打印 SQL 语句
81
- )
82
-
83
- # 注册 SQL 日志拦截器 (注意:SQLTraceLogger 需要支持异步引擎,或者您可能需要调整日志逻辑)
84
- # 假设 SQLTraceLogger.setup_sql_logging 能够处理 AsyncEngine
85
- AsyncSQLTraceLogger.setup_sql_logging(self.engine)
86
-
87
- async def test_connection(self):
88
- try:
89
- # 异步上下文管理器
90
- async with self.engine.connect() as connection:
91
- # 执行简单查询
92
- await connection.execute(text("SELECT 1"))
93
- return True
94
- except Exception as e:
95
- SYLogger.error(f"Database connection test failed: {e}")
96
- return False
@@ -1,65 +0,0 @@
1
- from sqlalchemy import event
2
- from sqlalchemy.ext.asyncio import AsyncEngine
3
- from sycommon.logging.kafka_log import SYLogger
4
- import time
5
- from datetime import datetime
6
- from decimal import Decimal
7
-
8
-
9
- class AsyncSQLTraceLogger:
10
- @staticmethod
11
- def setup_sql_logging(engine):
12
- """
13
- 为 SQLAlchemy 异步引擎注册事件监听器
14
- 注意:必须监听 engine.sync_engine,而不能直接监听 AsyncEngine
15
- """
16
- def serialize_params(params):
17
- """处理特殊类型参数的序列化"""
18
- if isinstance(params, (list, tuple)):
19
- return [serialize_params(p) for p in params]
20
- elif isinstance(params, dict):
21
- return {k: serialize_params(v) for k, v in params.items()}
22
- elif isinstance(params, datetime):
23
- return params.isoformat()
24
- elif isinstance(params, Decimal):
25
- return float(params)
26
- else:
27
- return params
28
-
29
- # ========== 核心修改 ==========
30
- # 必须通过 engine.sync_engine 来获取底层的同步引擎进行监听
31
- target = engine.sync_engine
32
-
33
- @event.listens_for(target, "after_cursor_execute")
34
- def after_cursor_execute(
35
- conn, cursor, statement, parameters, context, executemany
36
- ):
37
- try:
38
- # 从连接选项中获取开始时间
39
- # conn 在这里是同步连接对象
40
- start_time = conn.info.get('_start_time') or \
41
- conn._execution_options.get("_start_time", time.time())
42
-
43
- execution_time = (time.time() - start_time) * 1000
44
-
45
- sql_log = {
46
- "type": "SQL",
47
- "statement": statement,
48
- "parameters": serialize_params(parameters),
49
- "execution_time_ms": round(execution_time, 2),
50
- }
51
-
52
- # 注意:SYLogger.info 必须是线程安全的或非阻塞的,否则可能影响异步性能
53
- SYLogger.info(f"SQL执行: {sql_log}")
54
- except Exception as e:
55
- SYLogger.error(f"SQL日志处理失败: {str(e)}")
56
-
57
- @event.listens_for(target, "before_cursor_execute")
58
- def before_cursor_execute(
59
- conn, cursor, statement, parameters, context, executemany
60
- ):
61
- try:
62
- # 记录开始时间到 execution_options
63
- conn = conn.execution_options(_start_time=time.time())
64
- except Exception as e:
65
- SYLogger.error(f"SQL开始时间记录失败: {str(e)}")
@@ -1,23 +0,0 @@
1
- import logging
2
-
3
-
4
- def setup_logger_levels():
5
- """配置各模块的日志级别,抑制无关INFO/DEBUG日志"""
6
- # Nacos 客户端:仅输出WARNING及以上(屏蔽INFO级的心跳/注册日志)
7
- logging.getLogger("nacos.client").setLevel(logging.WARNING)
8
-
9
- # Kafka Python客户端:屏蔽INFO级的连接/版本检测日志
10
- logging.getLogger("kafka.conn").setLevel(logging.WARNING)
11
- logging.getLogger("kafka.producer").setLevel(logging.WARNING)
12
-
13
- # Uvicorn/FastAPI:屏蔽启动/应用初始化的INFO日志(保留ERROR/WARNING)
14
- # logging.getLogger("uvicorn").setLevel(logging.WARNING)
15
- # logging.getLogger("uvicorn.access").setLevel(logging.WARNING) # 屏蔽访问日志
16
- # logging.getLogger("uvicorn.error").setLevel(logging.ERROR) # 仅保留错误
17
-
18
- # 自定义的root日志(如同步数据库/监听器初始化):屏蔽INFO
19
- logging.getLogger("root").setLevel(logging.WARNING)
20
-
21
- # RabbitMQ相关日志(如果有专属日志器)
22
- logging.getLogger("pika").setLevel(logging.WARNING) # 若使用pika客户端
23
- logging.getLogger("rabbitmq").setLevel(logging.WARNING)
@@ -1,97 +0,0 @@
1
- def merge_headers(
2
- source_headers, # 来源headers(支持多种格式:字典/MutableHeaders/键值对列表/元组)
3
- target_headers, # 目标headers(原有值需保留,同名覆盖source)
4
- keep_keys=None, # 需保留的key集合(None表示保留所有)
5
- delete_keys={'content-length', 'accept',
6
- 'content-type', 'sec-fetch-mode',
7
- 'sec-fetch-dest', 'sec-fetch-site',
8
- 'pragma', 'cache-control',
9
- 'accept-encoding', 'priority'}, # 需删除的source key集合
10
- encoding='utf-8' # 字符编码(处理bytes转换)
11
- ) -> dict:
12
- """
13
- 合并headers,最终规则:
14
- 1. 所有key统一转为小写进行比较判断(完全大小写无关)
15
- 2. target_headers 同名key 完全覆盖 source_headers(source同名key不生效)
16
- 3. delete_keys 作用于source_headers:source中所有该列表内的key一律不添加(无论是否新增)
17
- 4. target_headers 中的key即使在delete_keys也始终保留,不受删除规则影响
18
- 5. 自动处理bytes/其他类型的键值转换为字符串
19
- 6. 最终输出的key全部为小写
20
- """
21
- # 初始化并统一转为小写集合
22
- keep_keys = {k.lower() for k in keep_keys} if keep_keys else set()
23
- delete_keys = {k.lower() for k in delete_keys} if delete_keys else set()
24
-
25
- # 修复1:兼容 MutableHeaders/普通字典/None 等 target_headers 类型
26
- if target_headers is None:
27
- target_dict = {}
28
- elif hasattr(target_headers, 'items'):
29
- # 支持 MutableHeaders/Headers/普通字典(都有items()方法)
30
- target_dict = dict(target_headers.items())
31
- else:
32
- # 兜底:可迭代对象转为字典
33
- target_dict = dict(target_headers) if isinstance(
34
- target_headers, (list, tuple)) else {}
35
-
36
- # 标准化target_headers:key转为小写,保留原有值
37
- processed_headers = {k.lower(): v for k, v in target_dict.items()}
38
- target_original_keys = set(processed_headers.keys())
39
-
40
- # 修复2:统一处理 source_headers 格式,确保是键值对迭代器
41
- # 步骤1:将source_headers转为标准的键值对列表
42
- if source_headers is None:
43
- source_kv_list = []
44
- elif hasattr(source_headers, 'items'):
45
- # 字典/MutableHeaders → 转为键值对列表
46
- source_kv_list = list(source_headers.items())
47
- elif isinstance(source_headers, (list, tuple)):
48
- # 列表/元组 → 校验并过滤合法的键值对(仅保留长度为2的元组/列表)
49
- source_kv_list = []
50
- for item in source_headers:
51
- if isinstance(item, (list, tuple)) and len(item) == 2:
52
- source_kv_list.append(item)
53
- else:
54
- # 跳过非法格式(如长度≠2的元素),避免解包报错
55
- continue
56
- else:
57
- # 其他类型 → 空列表(避免迭代报错)
58
- source_kv_list = []
59
-
60
- # 处理来源headers的键值转换和合并(遍历标准化后的键值对)
61
- for key, value in source_kv_list:
62
- # 转换key为字符串并统一转为小写(判断用)
63
- if not isinstance(key, str):
64
- try:
65
- key = key.decode(encoding, errors='replace') if isinstance(
66
- key, bytes) else str(key)
67
- except Exception:
68
- # 极端情况:无法转换的key直接跳过
69
- continue
70
-
71
- key_lower = key.lower()
72
-
73
- # 转换value为字符串
74
- if not isinstance(value, str):
75
- try:
76
- value = value.decode(encoding, errors='replace') if isinstance(
77
- value, bytes) else str(value)
78
- except Exception:
79
- # 无法转换的value设为空字符串
80
- value = ""
81
-
82
- # 过滤1:source的key在删除列表 → 直接跳过
83
- if key_lower in delete_keys:
84
- continue
85
-
86
- # 过滤2:仅保留指定的key(如果设置了keep_keys)
87
- if keep_keys and key_lower not in keep_keys:
88
- continue
89
-
90
- # 过滤3:target已有同名key → 直接跳过(target值覆盖source)
91
- if key_lower in target_original_keys:
92
- continue
93
-
94
- # 仅添加符合条件的key-value(最终key为小写)
95
- processed_headers[key_lower] = value
96
-
97
- return processed_headers