mdbq 4.1.2__py3-none-any.whl → 4.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mdbq might be problematic. Click here for more details.
- mdbq/__version__.py +1 -1
- mdbq/auth/auth_backend.py +440 -0
- mdbq/myconf/myconf.py +30 -43
- mdbq/mysql/s_query.py +1 -1
- mdbq/redis/getredis.py +33 -28
- mdbq/redis/redis_cache.py +103 -134
- mdbq/route/monitor.py +58 -201
- {mdbq-4.1.2.dist-info → mdbq-4.1.4.dist-info}/METADATA +1 -1
- {mdbq-4.1.2.dist-info → mdbq-4.1.4.dist-info}/RECORD +11 -11
- {mdbq-4.1.2.dist-info → mdbq-4.1.4.dist-info}/WHEEL +0 -0
- {mdbq-4.1.2.dist-info → mdbq-4.1.4.dist-info}/top_level.txt +0 -0
mdbq/redis/redis_cache.py
CHANGED
|
@@ -26,21 +26,21 @@ from typing import Optional, Dict, Any, List, Callable
|
|
|
26
26
|
from threading import Event
|
|
27
27
|
from collections import defaultdict, deque
|
|
28
28
|
import redis
|
|
29
|
-
from mdbq.log import mylogger
|
|
29
|
+
# from mdbq.log import mylogger
|
|
30
30
|
|
|
31
31
|
|
|
32
|
-
# 全局日志器
|
|
33
|
-
logger = mylogger.MyLogger(
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
)
|
|
32
|
+
# # 全局日志器
|
|
33
|
+
# logger = mylogger.MyLogger(
|
|
34
|
+
# logging_mode='file',
|
|
35
|
+
# log_level='info',
|
|
36
|
+
# log_format='json',
|
|
37
|
+
# max_log_size=50,
|
|
38
|
+
# backup_count=5,
|
|
39
|
+
# enable_async=False,
|
|
40
|
+
# sample_rate=1,
|
|
41
|
+
# sensitive_fields=[],
|
|
42
|
+
# enable_metrics=False,
|
|
43
|
+
# )
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
class CacheStatsCollector:
|
|
@@ -105,31 +105,28 @@ class CacheStatsCollector:
|
|
|
105
105
|
self._check_and_submit()
|
|
106
106
|
except Exception as submit_error:
|
|
107
107
|
# 统计提交失败不应影响统计记录
|
|
108
|
-
logger.error("统计数据提交检查失败,但统计记录继续", {
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
})
|
|
108
|
+
# logger.error("统计数据提交检查失败,但统计记录继续", {
|
|
109
|
+
# 'instance_name': self.instance_name,
|
|
110
|
+
# 'process_id': self.process_id,
|
|
111
|
+
# 'operation': operation,
|
|
112
|
+
# 'submit_error': str(submit_error)
|
|
113
|
+
# })
|
|
114
|
+
pass
|
|
114
115
|
except Exception as e:
|
|
115
116
|
# 统计记录失败不应影响缓存操作
|
|
116
|
-
logger.error("统计记录失败,但缓存操作继续", {
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
})
|
|
117
|
+
# logger.error("统计记录失败,但缓存操作继续", {
|
|
118
|
+
# 'instance_name': self.instance_name,
|
|
119
|
+
# 'process_id': self.process_id,
|
|
120
|
+
# 'operation': operation,
|
|
121
|
+
# 'error': str(e)
|
|
122
|
+
# })
|
|
123
|
+
pass
|
|
122
124
|
|
|
123
125
|
def _start_background_timer(self):
|
|
124
126
|
"""启动后台定时提交线程"""
|
|
125
127
|
if self._timer is not None:
|
|
126
128
|
return # 已经启动
|
|
127
129
|
|
|
128
|
-
logger.debug("启动后台定时提交", {
|
|
129
|
-
'instance_name': self.instance_name,
|
|
130
|
-
'submit_interval': self.submit_interval
|
|
131
|
-
})
|
|
132
|
-
|
|
133
130
|
self._timer = threading.Timer(self.submit_interval, self._background_submit)
|
|
134
131
|
self._timer.daemon = True # 设置为守护线程
|
|
135
132
|
self._timer.start()
|
|
@@ -147,22 +144,22 @@ class CacheStatsCollector:
|
|
|
147
144
|
|
|
148
145
|
except Exception as e:
|
|
149
146
|
self._error_count += 1
|
|
150
|
-
logger.error("后台定时提交失败", {
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
})
|
|
147
|
+
# logger.error("后台定时提交失败", {
|
|
148
|
+
# 'instance_name': self.instance_name,
|
|
149
|
+
# 'process_id': self.process_id,
|
|
150
|
+
# 'error': str(e),
|
|
151
|
+
# 'error_type': type(e).__name__,
|
|
152
|
+
# 'error_count': self._error_count,
|
|
153
|
+
# 'max_errors': self._max_errors
|
|
154
|
+
# })
|
|
158
155
|
|
|
159
156
|
# 如果连续错误次数过多,停止定时器
|
|
160
157
|
if self._error_count >= self._max_errors:
|
|
161
|
-
logger.error("后台定时器连续错误过多,停止定时提交", {
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
})
|
|
158
|
+
# logger.error("后台定时器连续错误过多,停止定时提交", {
|
|
159
|
+
# 'instance_name': self.instance_name,
|
|
160
|
+
# 'process_id': self.process_id,
|
|
161
|
+
# 'error_count': self._error_count
|
|
162
|
+
# })
|
|
166
163
|
return # 不再安排下一次定时器
|
|
167
164
|
|
|
168
165
|
finally:
|
|
@@ -198,27 +195,22 @@ class CacheStatsCollector:
|
|
|
198
195
|
self.last_submit_time = current_time
|
|
199
196
|
self.last_operation_count = self.stats['total_operations']
|
|
200
197
|
|
|
201
|
-
logger.info("统计数据提交成功", {
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
})
|
|
198
|
+
# logger.info("统计数据提交成功", {
|
|
199
|
+
# 'instance_name': self.instance_name,
|
|
200
|
+
# 'total_operations': self.stats['total_operations'],
|
|
201
|
+
# 'new_operations': new_operations,
|
|
202
|
+
# 'trigger_type': 'background_timer' if force_check else 'operation_triggered'
|
|
203
|
+
# })
|
|
207
204
|
except Exception as e:
|
|
208
|
-
logger.error("统计数据提交失败", {
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
})
|
|
205
|
+
# logger.error("统计数据提交失败", {
|
|
206
|
+
# 'instance_name': self.instance_name,
|
|
207
|
+
# 'error': str(e),
|
|
208
|
+
# 'trigger_type': 'background_timer' if force_check else 'operation_triggered'
|
|
209
|
+
# })
|
|
210
|
+
pass
|
|
213
211
|
else:
|
|
214
212
|
# 无新操作,跳过提交但更新时间
|
|
215
213
|
self.last_submit_time = current_time
|
|
216
|
-
if force_check: # 仅在后台定时器触发时记录
|
|
217
|
-
logger.debug("后台检查:无新操作,跳过提交", {
|
|
218
|
-
'instance_name': self.instance_name,
|
|
219
|
-
'total_operations': self.stats['total_operations']
|
|
220
|
-
})
|
|
221
|
-
|
|
222
214
|
def _submit_to_mysql(self):
|
|
223
215
|
"""同步提交统计数据到MySQL"""
|
|
224
216
|
if not self.mysql_pool:
|
|
@@ -270,12 +262,12 @@ class CacheStatsCollector:
|
|
|
270
262
|
connection.close()
|
|
271
263
|
|
|
272
264
|
except Exception as e:
|
|
273
|
-
logger.error("MySQL提交失败", {
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
})
|
|
265
|
+
# logger.error("MySQL提交失败", {
|
|
266
|
+
# 'instance_name': self.instance_name,
|
|
267
|
+
# 'database': db_name,
|
|
268
|
+
# 'table': table_name,
|
|
269
|
+
# 'error': str(e)
|
|
270
|
+
# })
|
|
279
271
|
raise
|
|
280
272
|
|
|
281
273
|
def get_stats(self) -> Dict[str, Any]:
|
|
@@ -307,9 +299,9 @@ class CacheStatsCollector:
|
|
|
307
299
|
|
|
308
300
|
def shutdown(self):
|
|
309
301
|
"""关闭统计收集器,停止后台定时器"""
|
|
310
|
-
logger.info("关闭统计收集器", {
|
|
311
|
-
|
|
312
|
-
})
|
|
302
|
+
# logger.info("关闭统计收集器", {
|
|
303
|
+
# 'instance_name': self.instance_name
|
|
304
|
+
# })
|
|
313
305
|
|
|
314
306
|
# 设置关闭标志
|
|
315
307
|
self._shutdown_event.set()
|
|
@@ -417,18 +409,19 @@ class SmartCacheSystem:
|
|
|
417
409
|
try:
|
|
418
410
|
self._create_simple_stats_table()
|
|
419
411
|
self._state = CacheSystemState.MYSQL_READY
|
|
420
|
-
logger.info("统计功能已启用", {
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
})
|
|
412
|
+
# logger.info("统计功能已启用", {
|
|
413
|
+
# 'instance_name': self.instance_name,
|
|
414
|
+
# 'process_id': os.getpid()
|
|
415
|
+
# })
|
|
424
416
|
except Exception as e:
|
|
425
|
-
logger.error("统计表创建失败", {
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
})
|
|
417
|
+
# logger.error("统计表创建失败", {
|
|
418
|
+
# 'instance_name': self.instance_name,
|
|
419
|
+
# 'error': str(e)
|
|
420
|
+
# })
|
|
421
|
+
pass
|
|
429
422
|
else:
|
|
430
423
|
self._state = CacheSystemState.ERROR
|
|
431
|
-
logger.error("Redis连接失败", {'instance_name': self.instance_name})
|
|
424
|
+
# logger.error("Redis连接失败", {'instance_name': self.instance_name})
|
|
432
425
|
|
|
433
426
|
def _test_redis_connection(self) -> bool:
|
|
434
427
|
"""测试Redis连接"""
|
|
@@ -436,7 +429,7 @@ class SmartCacheSystem:
|
|
|
436
429
|
self.redis_client.ping()
|
|
437
430
|
return True
|
|
438
431
|
except Exception as e:
|
|
439
|
-
logger.error("Redis连接测试失败", {'error': str(e)})
|
|
432
|
+
# logger.error("Redis连接测试失败", {'error': str(e)})
|
|
440
433
|
return False
|
|
441
434
|
|
|
442
435
|
def _create_simple_stats_table(self):
|
|
@@ -493,7 +486,7 @@ class SmartCacheSystem:
|
|
|
493
486
|
connection.close()
|
|
494
487
|
|
|
495
488
|
except Exception as e:
|
|
496
|
-
logger.error("统计表初始化失败", {'error': str(e)})
|
|
489
|
+
# logger.error("统计表初始化失败", {'error': str(e)})
|
|
497
490
|
raise
|
|
498
491
|
|
|
499
492
|
@property
|
|
@@ -539,11 +532,11 @@ class SmartCacheSystem:
|
|
|
539
532
|
response_time = (time.time() - start_time) * 1000
|
|
540
533
|
if self.stats_collector:
|
|
541
534
|
self.stats_collector.record_operation('errors', response_time, namespace)
|
|
542
|
-
logger.error("缓存获取失败", {
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
})
|
|
535
|
+
# logger.error("缓存获取失败", {
|
|
536
|
+
# 'key': key,
|
|
537
|
+
# 'namespace': namespace,
|
|
538
|
+
# 'error': str(e)
|
|
539
|
+
# })
|
|
547
540
|
return default
|
|
548
541
|
|
|
549
542
|
def set(self, key: str, value: Any, ttl: Optional[int] = None, namespace: str = "") -> bool:
|
|
@@ -574,11 +567,11 @@ class SmartCacheSystem:
|
|
|
574
567
|
if value_size > self.config.max_value_size:
|
|
575
568
|
if self.stats_collector:
|
|
576
569
|
self.stats_collector.record_operation('errors', 0, namespace)
|
|
577
|
-
logger.warning("缓存值过大,跳过设置", {
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
})
|
|
570
|
+
# logger.warning("缓存值过大,跳过设置", {
|
|
571
|
+
# 'key': key,
|
|
572
|
+
# 'size': len(serialized_value),
|
|
573
|
+
# 'max_size': self.config.max_value_size
|
|
574
|
+
# })
|
|
582
575
|
return False
|
|
583
576
|
|
|
584
577
|
result = self.redis_client.setex(cache_key, ttl, serialized_value)
|
|
@@ -592,11 +585,11 @@ class SmartCacheSystem:
|
|
|
592
585
|
response_time = (time.time() - start_time) * 1000
|
|
593
586
|
if self.stats_collector:
|
|
594
587
|
self.stats_collector.record_operation('errors', response_time, namespace)
|
|
595
|
-
logger.error("缓存设置失败", {
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
})
|
|
588
|
+
# logger.error("缓存设置失败", {
|
|
589
|
+
# 'key': key,
|
|
590
|
+
# 'namespace': namespace,
|
|
591
|
+
# 'error': str(e)
|
|
592
|
+
# })
|
|
600
593
|
return False
|
|
601
594
|
|
|
602
595
|
def delete(self, key: str, namespace: str = "") -> bool:
|
|
@@ -621,11 +614,11 @@ class SmartCacheSystem:
|
|
|
621
614
|
response_time = (time.time() - start_time) * 1000
|
|
622
615
|
if self.stats_collector:
|
|
623
616
|
self.stats_collector.record_operation('errors', response_time, namespace)
|
|
624
|
-
logger.error("缓存删除失败", {
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
})
|
|
617
|
+
# logger.error("缓存删除失败", {
|
|
618
|
+
# 'key': key,
|
|
619
|
+
# 'namespace': namespace,
|
|
620
|
+
# 'error': str(e)
|
|
621
|
+
# })
|
|
629
622
|
return False
|
|
630
623
|
|
|
631
624
|
def clear_namespace(self, namespace: str) -> int:
|
|
@@ -643,10 +636,10 @@ class SmartCacheSystem:
|
|
|
643
636
|
return 0
|
|
644
637
|
|
|
645
638
|
except Exception as e:
|
|
646
|
-
logger.error("清除命名空间失败", {
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
})
|
|
639
|
+
# logger.error("清除命名空间失败", {
|
|
640
|
+
# 'namespace': namespace,
|
|
641
|
+
# 'error': str(e)
|
|
642
|
+
# })
|
|
650
643
|
return 0
|
|
651
644
|
|
|
652
645
|
def _generate_cache_key(self, key: str, namespace: str = "") -> str:
|
|
@@ -760,18 +753,6 @@ class SmartCacheSystem:
|
|
|
760
753
|
# 7. TTL边界限制(使用配置值)
|
|
761
754
|
final_ttl = max(self.config.ttl_min, min(self.config.ttl_max, final_ttl))
|
|
762
755
|
|
|
763
|
-
# 8. 记录智能TTL决策(使用配置开关)
|
|
764
|
-
if self.config.debug_ttl:
|
|
765
|
-
logger.debug("智能TTL计算", {
|
|
766
|
-
'namespace': namespace,
|
|
767
|
-
'key': key[:50] + "..." if len(key) > 50 else key,
|
|
768
|
-
'data_size': data_size,
|
|
769
|
-
'base_ttl': base_ttl,
|
|
770
|
-
'key_factor': key_factor,
|
|
771
|
-
'size_factor': size_factor,
|
|
772
|
-
'final_ttl': final_ttl
|
|
773
|
-
})
|
|
774
|
-
|
|
775
756
|
return final_ttl
|
|
776
757
|
|
|
777
758
|
def get_stats(self) -> Dict[str, Any]:
|
|
@@ -779,11 +760,7 @@ class SmartCacheSystem:
|
|
|
779
760
|
# 确保统计系统已初始化
|
|
780
761
|
if self.stats_collector:
|
|
781
762
|
return self.stats_collector.get_stats()
|
|
782
|
-
|
|
783
|
-
logger.debug("统计系统未初始化,返回空统计信息", {
|
|
784
|
-
'instance_name': self.instance_name,
|
|
785
|
-
'process_id': os.getpid()
|
|
786
|
-
})
|
|
763
|
+
|
|
787
764
|
return {
|
|
788
765
|
'enabled': False,
|
|
789
766
|
'message': '统计系统未初始化',
|
|
@@ -828,14 +805,6 @@ class SmartCacheSystem:
|
|
|
828
805
|
if not self.config.enable_stats:
|
|
829
806
|
return
|
|
830
807
|
|
|
831
|
-
logger.debug("调用操作记录", {
|
|
832
|
-
'operation': operation,
|
|
833
|
-
'response_time_ms': round(response_time, 2),
|
|
834
|
-
'namespace': namespace,
|
|
835
|
-
'key': key[:50] + "..." if len(key) > 50 else key,
|
|
836
|
-
'instance_name': self.instance_name
|
|
837
|
-
})
|
|
838
|
-
|
|
839
808
|
def shutdown(self):
|
|
840
809
|
"""关闭缓存系统"""
|
|
841
810
|
self._state = CacheSystemState.SHUTDOWN
|
|
@@ -844,7 +813,7 @@ class SmartCacheSystem:
|
|
|
844
813
|
# 关闭统计收集器(包括后台定时器)
|
|
845
814
|
self.stats_collector.shutdown()
|
|
846
815
|
|
|
847
|
-
logger.info("缓存系统已关闭", {'instance_name': self.instance_name})
|
|
816
|
+
# logger.info("缓存系统已关闭", {'instance_name': self.instance_name})
|
|
848
817
|
|
|
849
818
|
|
|
850
819
|
class CacheManager:
|
|
@@ -868,10 +837,10 @@ class CacheManager:
|
|
|
868
837
|
def initialize(self, redis_client: redis.Redis, mysql_pool=None, instance_name: str = "default", **config):
|
|
869
838
|
"""初始化缓存系统"""
|
|
870
839
|
if self.cache_instance is not None:
|
|
871
|
-
logger.warning("缓存系统已初始化,跳过重复初始化", {
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
})
|
|
840
|
+
# logger.warning("缓存系统已初始化,跳过重复初始化", {
|
|
841
|
+
# 'existing_instance': self.cache_instance.instance_name,
|
|
842
|
+
# 'new_instance': instance_name
|
|
843
|
+
# })
|
|
875
844
|
return
|
|
876
845
|
|
|
877
846
|
self.cache_instance = SmartCacheSystem(
|