mdbq 4.1.2__py3-none-any.whl → 4.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mdbq might be problematic. Click here for more details.

mdbq/__version__.py CHANGED
@@ -1 +1 @@
1
- VERSION = '4.1.2'
1
+ VERSION = '4.1.3'
mdbq/myconf/myconf.py CHANGED
@@ -1,22 +1,22 @@
1
1
  import re
2
2
  from typing import Dict, Any, Optional, Union, List, Tuple, Type, TypeVar
3
3
  from pathlib import Path
4
- from mdbq.log import mylogger
4
+ # from mdbq.log import mylogger
5
5
  from dataclasses import dataclass, field
6
6
  from enum import Enum
7
7
  import time
8
8
 
9
- logger = mylogger.MyLogger(
10
- logging_mode='both',
11
- log_level='info',
12
- log_format='json',
13
- max_log_size=50,
14
- backup_count=5,
15
- enable_async=False, # 是否启用异步日志
16
- sample_rate=1, # 采样DEBUG/INFO日志
17
- sensitive_fields=[], # 敏感字段过滤
18
- enable_metrics=False, # 是否启用性能指标
19
- )
9
+ # logger = mylogger.MyLogger(
10
+ # logging_mode='both',
11
+ # log_level='info',
12
+ # log_format='json',
13
+ # max_log_size=50,
14
+ # backup_count=5,
15
+ # enable_async=False, # 是否启用异步日志
16
+ # sample_rate=1, # 采样DEBUG/INFO日志
17
+ # sensitive_fields=[], # 敏感字段过滤
18
+ # enable_metrics=False, # 是否启用性能指标
19
+ # )
20
20
 
21
21
  T = TypeVar('T') # 类型变量
22
22
 
@@ -151,12 +151,10 @@ class ConfigParser:
151
151
  def open(self, file_path: Union[str, Path]) -> 'ConfigParser':
152
152
  """打开配置文件"""
153
153
  file_path = Path(file_path)
154
- logger.debug(f'尝试打开配置文件: {file_path}')
155
154
  if not file_path.exists() and not self.options.auto_create:
156
- logger.error(f'配置文件不存在: {file_path}')
155
+ # logger.error(f'配置文件不存在: {file_path}')
157
156
  raise ConfigException.file_not_found(file_path)
158
157
  self._current_file = file_path
159
- logger.debug(f'配置文件已打开: {file_path}')
160
158
  return self
161
159
 
162
160
  def _ensure_file_open(self) -> None:
@@ -211,7 +209,6 @@ class ConfigParser:
211
209
 
212
210
  def _update_cache(self, file_path: str, config: Dict[str, Any]) -> None:
213
211
  """更新配置缓存"""
214
- logger.debug(f'更新配置缓存: {file_path}')
215
212
  self._config_cache[file_path] = config
216
213
  self._cache_timestamps[file_path] = time.time()
217
214
 
@@ -236,13 +233,11 @@ class ConfigParser:
236
233
  def _clear_cache(self, file_path: Optional[str] = None) -> None:
237
234
  """清除配置缓存"""
238
235
  if file_path:
239
- logger.debug(f'清除指定文件的缓存: {file_path}')
240
236
  self._config_cache.pop(file_path, None)
241
237
  self._cache_timestamps.pop(file_path, None)
242
238
  self._comments_cache.pop(file_path, None)
243
239
  self._section_map.pop(file_path, None)
244
240
  else:
245
- logger.debug('清除所有配置缓存')
246
241
  self._config_cache.clear()
247
242
  self._cache_timestamps.clear()
248
243
  self._comments_cache.clear()
@@ -323,12 +318,11 @@ class ConfigParser:
323
318
  default: Any = None, value_type: Optional[Type[T]] = None,
324
319
  file_path: Optional[Union[str, Path]] = None) -> T:
325
320
  """获取指定配置项的值"""
326
- logger.debug(f'获取配置项: section={section}, key={key}, file_path={file_path}')
327
321
  if file_path is None:
328
322
  self._ensure_file_open()
329
323
  file_path = self._current_file
330
324
  if not self._validate_key(key):
331
- logger.error(f'无效的键名: {key}')
325
+ # logger.error(f'无效的键名: {key}')
332
326
  raise ConfigException.invalid_key_error(key, file_path, section)
333
327
  config = self.read(file_path)
334
328
  section = section or self.options.default_section
@@ -336,15 +330,15 @@ class ConfigParser:
336
330
  original_section = self._get_original_section(str(file_path), normalized_section)
337
331
  if original_section is None:
338
332
  if default is not None:
339
- logger.warning(f'节不存在,返回默认值: section={section}, key={key}, default={default}')
333
+ # logger.warning(f'节不存在,返回默认值: section={section}, key={key}, default={default}')
340
334
  return default
341
- logger.error(f'配置节不存在: {section}')
335
+ # logger.error(f'配置节不存在: {section}')
342
336
  raise ConfigException.section_not_found(file_path, section)
343
337
  if key not in config[original_section]:
344
338
  if default is not None:
345
- logger.warning(f'键不存在,返回默认值: section={section}, key={key}, default={default}')
339
+ # logger.warning(f'键不存在,返回默认值: section={section}, key={key}, default={default}')
346
340
  return default
347
- logger.error(f'配置键不存在: {key}')
341
+ # logger.error(f'配置键不存在: {key}')
348
342
  raise ConfigException.key_not_found(file_path, original_section, key)
349
343
  value = config[original_section][key]
350
344
  if value_type is not None:
@@ -356,7 +350,6 @@ class ConfigParser:
356
350
  value_types: Optional[Dict[str, Type]] = None,
357
351
  file_path: Optional[Union[str, Path]] = None) -> Dict[str, Any]:
358
352
  """批量获取多个配置项的值"""
359
- logger.debug(f'批量获取配置项: section={section}, keys={keys}, file_path={file_path}')
360
353
  if file_path is None:
361
354
  self._ensure_file_open()
362
355
  file_path = self._current_file
@@ -375,7 +368,7 @@ class ConfigParser:
375
368
  )
376
369
  result[key] = value
377
370
  except ConfigException as e:
378
- logger.error(f"读取配置项失败: section={section}, key={key}, error={e}")
371
+ # logger.error(f"读取配置项失败: section={section}, key={key}, error={e}")
379
372
  if key in defaults:
380
373
  result[key] = defaults[key]
381
374
  else:
@@ -388,7 +381,6 @@ class ConfigParser:
388
381
  value_types: Optional[Dict[str, Type]] = None,
389
382
  file_path: Optional[Union[str, Path]] = None) -> Tuple[Any, ...]:
390
383
  """获取指定节点下多个键的值元组"""
391
- logger.debug(f'获取节下多个键的值: section={section}, keys={keys}, file_path={file_path}')
392
384
  if file_path is None:
393
385
  self._ensure_file_open()
394
386
  file_path = self._current_file
@@ -407,7 +399,7 @@ class ConfigParser:
407
399
  )
408
400
  result.append(value)
409
401
  except ConfigException as e:
410
- logger.error(f"读取配置项失败: section={section}, key={key}, error={e}")
402
+ # logger.error(f"读取配置项失败: section={section}, key={key}, error={e}")
411
403
  if key in defaults:
412
404
  result.append(defaults[key])
413
405
  else:
@@ -419,7 +411,6 @@ class ConfigParser:
419
411
  value_type: Optional[Type] = None,
420
412
  file_path: Optional[Union[str, Path]] = None) -> None:
421
413
  """设置指定配置项的值"""
422
- logger.debug(f'准备写入配置项: section={section}, key={key}, value={value}, file_path={file_path}')
423
414
  if file_path is None:
424
415
  self._ensure_file_open()
425
416
  file_path = self._current_file
@@ -427,7 +418,7 @@ class ConfigParser:
427
418
  file_path = Path(file_path)
428
419
 
429
420
  if not self._validate_key(key):
430
- logger.error(f'无效的键名: {key}')
421
+ # logger.error(f'无效的键名: {key}')
431
422
  raise ConfigException.invalid_key_error(key, file_path, section)
432
423
 
433
424
  section = section or self.options.default_section
@@ -452,7 +443,7 @@ class ConfigParser:
452
443
  else:
453
444
  value = value_type(value)
454
445
  except (ValueError, TypeError) as e:
455
- logger.error(f'类型转换失败: value={value}, type={value_type}, error={e}')
446
+ # logger.error(f'类型转换失败: value={value}, type={value_type}, error={e}')
456
447
  raise ConfigException.conversion_error(value, value_type, file_path, section=section, key=key)
457
448
 
458
449
  if isinstance(value, bool):
@@ -509,21 +500,20 @@ class ConfigParser:
509
500
  file.write(f'{key}={value}\n')
510
501
 
511
502
  self._clear_cache(str(file_path))
512
- logger.info(f'配置项写入成功: section={section}, key={key}, value={value}, file_path={file_path}')
503
+ # logger.info(f'配置项写入成功: section={section}, key={key}, value={value}, file_path={file_path}')
513
504
 
514
505
  except Exception as e:
515
- logger.error(f'写入配置项失败: section={section}, key={key}, value={value}, file_path={file_path}, error={e}')
506
+ # logger.error(f'写入配置项失败: section={section}, key={key}, value={value}, file_path={file_path}, error={e}')
516
507
  raise ConfigException.write_error(file_path, e)
517
508
 
518
509
  def set_values(self, section: Optional[str] = None, values: Dict[str, Any] = None,
519
510
  value_types: Optional[Dict[str, Type]] = None,
520
511
  file_path: Optional[Union[str, Path]] = None) -> None:
521
512
  """批量设置多个配置项的值"""
522
- logger.debug(f'批量写入配置项: section={section}, keys={list(values.keys())}, file_path={file_path}')
523
513
  for key, value in values.items():
524
514
  value_type = value_types.get(key) if value_types else None
525
515
  self.set_value(section, key, value, value_type, file_path)
526
- logger.info(f'批量写入配置项完成: section={section}, file_path={file_path}')
516
+ # logger.info(f'批量写入配置项完成: section={section}, file_path={file_path}')
527
517
 
528
518
  def validate_config(self, section: Optional[str] = None, schema: Dict[str, Type] = None,
529
519
  file_path: Optional[Union[str, Path]] = None) -> bool:
@@ -560,17 +550,15 @@ class ConfigParser:
560
550
  else:
561
551
  file_path = Path(file_path)
562
552
 
563
- logger.debug(f'开始读取配置文件: {file_path}')
564
553
  cached_config = self._get_cached_config(str(file_path))
565
554
  if cached_config is not None:
566
- logger.debug(f'命中配置缓存: {file_path}')
567
555
  return cached_config
568
556
 
569
557
  if not file_path.exists():
570
558
  if not self.options.auto_create:
571
- logger.error(f'配置文件不存在: {file_path}')
559
+ # logger.error(f'配置文件不存在: {file_path}')
572
560
  raise ConfigException.file_not_found(file_path)
573
- logger.info(f'配置文件不存在,将创建: {file_path}')
561
+ # logger.info(f'配置文件不存在,将创建: {file_path}')
574
562
  file_path.parent.mkdir(parents=True, exist_ok=True)
575
563
  file_path.touch()
576
564
  return {}
@@ -592,7 +580,7 @@ class ConfigParser:
592
580
  if stripped_line.startswith('[') and stripped_line.endswith(']'):
593
581
  current_section = stripped_line[1:-1]
594
582
  if not self._validate_key(current_section):
595
- logger.error(f'无效的节名: {current_section}')
583
+ # logger.error(f'无效的节名: {current_section}')
596
584
  raise ConfigException.invalid_section_error(current_section, file_path)
597
585
  self._update_section_map(str(file_path), current_section)
598
586
  if current_section not in config:
@@ -606,7 +594,7 @@ class ConfigParser:
606
594
  if key_value:
607
595
  key, value = key_value
608
596
  if not self._validate_key(key):
609
- logger.error(f'无效的键名: {key}')
597
+ # logger.error(f'无效的键名: {key}')
610
598
  raise ConfigException.invalid_key_error(key, file_path, current_section)
611
599
  value, comment = self._extract_comment(value)
612
600
 
@@ -621,11 +609,10 @@ class ConfigParser:
621
609
  self._comments_cache.setdefault(str(file_path), {}).setdefault(current_section, []).append(comment)
622
610
 
623
611
  self._update_cache(str(file_path), config)
624
- logger.debug(f'配置文件读取成功: {file_path}')
625
612
  return config
626
613
 
627
614
  except Exception as e:
628
- logger.error(f'读取配置文件失败: {file_path}, error={e}')
615
+ # logger.error(f'读取配置文件失败: {file_path}, error={e}')
629
616
  raise ConfigException.read_error(file_path, e)
630
617
 
631
618
 
mdbq/mysql/s_query.py CHANGED
@@ -16,7 +16,7 @@ from functools import wraps
16
16
  warnings.filterwarnings('ignore')
17
17
  logger = mylogger.MyLogger(
18
18
  logging_mode='file',
19
- log_level='info',
19
+ log_level='error',
20
20
  log_format='json',
21
21
  max_log_size=50,
22
22
  backup_count=5,
mdbq/redis/getredis.py CHANGED
@@ -5,21 +5,21 @@ import numpy as np
5
5
  import json
6
6
  import datetime
7
7
  import threading
8
- from mdbq.log import mylogger
8
+ # from mdbq.log import mylogger
9
9
  from decimal import Decimal
10
10
  import orjson
11
11
 
12
- logger = mylogger.MyLogger(
13
- logging_mode='file',
14
- log_level='info',
15
- log_format='json',
16
- max_log_size=50,
17
- backup_count=5,
18
- enable_async=False, # 是否启用异步日志
19
- sample_rate=1, # 采样DEBUG/INFO日志
20
- sensitive_fields=[], # 敏感字段过滤
21
- enable_metrics=False, # 是否启用性能指标
22
- )
12
+ # logger = mylogger.MyLogger(
13
+ # logging_mode='file',
14
+ # log_level='info',
15
+ # log_format='json',
16
+ # max_log_size=50,
17
+ # backup_count=5,
18
+ # enable_async=False, # 是否启用异步日志
19
+ # sample_rate=1, # 采样DEBUG/INFO日志
20
+ # sensitive_fields=[], # 敏感字段过滤
21
+ # enable_metrics=False, # 是否启用性能指标
22
+ # )
23
23
 
24
24
 
25
25
  class RedisData(object):
@@ -61,7 +61,8 @@ class RedisData(object):
61
61
 
62
62
  combined_df = pd.concat(dfs, ignore_index=True) if dfs else pd.DataFrame()
63
63
  if combined_df.empty:
64
- logger.info(f"警告: {db_name}.{table_name} 未读取到数据")
64
+ # logger.info(f"警告: {db_name}.{table_name} 未读取到数据")
65
+ pass
65
66
  else:
66
67
  combined_df = self._convert_date_columns(combined_df)
67
68
  return combined_df
@@ -86,7 +87,7 @@ class RedisData(object):
86
87
  ttl = self.redis_engine.ttl(cache_key)
87
88
  cache_data = self._fetch_redis_data(cache_key)
88
89
  except Exception as e:
89
- logger.error(f"Redis 连接异常: {e},直接访问 MySQL")
90
+ # logger.error(f"Redis 连接异常: {e},直接访问 MySQL")
90
91
  return self.get_from_mysql(db_name, table_name, set_year, start_date, end_date)
91
92
 
92
93
  # 缓存失效处理逻辑
@@ -134,11 +135,11 @@ class RedisData(object):
134
135
  self.redis_engine.set(cache_key, serialized_data)
135
136
  self.redis_engine.expire(cache_key, self.cache_ttl)
136
137
 
137
- logger.info(f"缓存更新 {cache_key} | 数据量: {len(combined_data)}")
138
+ # logger.info(f"缓存更新 {cache_key} | 数据量: {len(combined_data)}")
138
139
  return combined_data
139
140
 
140
141
  except Exception as e:
141
- logger.error(f"缓存更新失败: {cache_key} - {str(e)}")
142
+ # logger.error(f"缓存更新失败: {cache_key} - {str(e)}")
142
143
  return pd.DataFrame()
143
144
 
144
145
  # Helper Methods ------------------------------------------------
@@ -160,7 +161,7 @@ class RedisData(object):
160
161
  projection={}
161
162
  )
162
163
  except Exception as e:
163
- logger.error(f"MySQL 查询异常 {db_name}.{table_name}: {e}")
164
+ # logger.error(f"MySQL 查询异常 {db_name}.{table_name}: {e}")
164
165
  return pd.DataFrame()
165
166
 
166
167
  def _fetch_redis_data(self, cache_key: str) -> pd.DataFrame:
@@ -173,7 +174,7 @@ class RedisData(object):
173
174
  df = pd.DataFrame(json.loads(data.decode("utf-8")))
174
175
  return self._convert_date_columns(df)
175
176
  except Exception as e:
176
- logger.error(f"Redis 数据解析失败 {cache_key}: {e}")
177
+ # logger.error(f"Redis 数据解析失败 {cache_key}: {e}")
177
178
  return pd.DataFrame()
178
179
 
179
180
  def _convert_date_columns(self, df: pd.DataFrame) -> pd.DataFrame:
@@ -278,7 +279,8 @@ class RedisDataHash(object):
278
279
 
279
280
  combined_df = pd.concat(dfs, ignore_index=True) if dfs else pd.DataFrame()
280
281
  if combined_df.empty:
281
- logger.warn(f"warning: {db_name}.{table_name} 未读取到数据")
282
+ # logger.warn(f"warning: {db_name}.{table_name} 未读取到数据")
283
+ pass
282
284
  else:
283
285
  combined_df = self._convert_date_columns(combined_df)
284
286
  return combined_df
@@ -293,7 +295,7 @@ class RedisDataHash(object):
293
295
  projection={}
294
296
  ) -> pd.DataFrame:
295
297
  if not self.redis_engine.ping():
296
- logger.error(f"Redis ping异常,直接访问 MySQL")
298
+ # logger.error(f"Redis ping异常,直接访问 MySQL")
297
299
  return self.get_from_mysql(db_name, table_name, set_year, start_date, end_date, projection)
298
300
  start_dt = pd.to_datetime(start_date).floor('D')
299
301
  end_dt = pd.to_datetime(end_date).floor('D')
@@ -335,7 +337,7 @@ class RedisDataHash(object):
335
337
  return self.get_from_mysql(db_name, table_name, set_year, start_date, end_date, projection)
336
338
 
337
339
  except Exception as e:
338
- logger.error(f"Redis 连接异常: {e},直接访问 MySQL")
340
+ # logger.error(f"Redis 连接异常: {e},直接访问 MySQL")
339
341
  return self.get_from_mysql(db_name, table_name, set_year, start_date, end_date, projection)
340
342
 
341
343
  def set_redis(
@@ -385,9 +387,10 @@ class RedisDataHash(object):
385
387
  serialized_data = self._serialize_data(group)
386
388
  self.redis_engine.hset(cache_key, month_str, serialized_data)
387
389
  self.redis_engine.expire(cache_key, self.cache_ttl + random.randint(0, 1800))
388
- logger.info(f"缓存更新 {cache_key} | 数据量: {len(combined_data)}")
390
+ # logger.info(f"缓存更新 {cache_key} | 数据量: {len(combined_data)}")
389
391
  except Exception as e:
390
- logger.error(f"缓存更新失败: {cache_key} - {str(e)}")
392
+ # logger.error(f"缓存更新失败: {cache_key} - {str(e)}")
393
+ pass
391
394
 
392
395
  def _fetch_table_data(
393
396
  self,
@@ -406,7 +409,7 @@ class RedisDataHash(object):
406
409
  projection=projection
407
410
  )
408
411
  except Exception as e:
409
- logger.error(f"MySQL 查询异常 {db_name}.{table_name}: {e}")
412
+ # logger.error(f"MySQL 查询异常 {db_name}.{table_name}: {e}")
410
413
  return pd.DataFrame()
411
414
 
412
415
  def _fetch_redis_data(self, cache_key: str, months: list = None) -> pd.DataFrame:
@@ -441,7 +444,8 @@ class RedisDataHash(object):
441
444
  df = self._convert_date_columns(df)
442
445
  dfs.append(df)
443
446
  except Exception as e:
444
- logger.error(f"月份数据解析失败 {field}: {e}")
447
+ # logger.error(f"月份数据解析失败 {field}: {e}")
448
+ pass
445
449
 
446
450
  # 处理分片数据(优化后的批处理逻辑)
447
451
  cursor, shard_data = results[result_index]
@@ -459,7 +463,8 @@ class RedisDataHash(object):
459
463
  df = pd.DataFrame(orjson.loads(value))
460
464
  dfs.append(self._convert_date_columns(df))
461
465
  except Exception as e:
462
- logger.error(f"分片数据解析失败: {e}")
466
+ # logger.error(f"分片数据解析失败: {e}")
467
+ pass
463
468
 
464
469
  # 继续获取后续分片
465
470
  if cursor == 0:
@@ -475,7 +480,7 @@ class RedisDataHash(object):
475
480
  return pd.DataFrame()
476
481
 
477
482
  except Exception as e:
478
- logger.error(f"Redis 数据获取失败 {cache_key}: {e}")
483
+ # logger.error(f"Redis 数据获取失败 {cache_key}: {e}")
479
484
  return pd.DataFrame()
480
485
 
481
486
  def _convert_date_columns(self, df: pd.DataFrame) -> pd.DataFrame:
@@ -562,7 +567,7 @@ class RedisDataHash(object):
562
567
  try:
563
568
  records = temp_df.to_dict(orient='records')
564
569
  except Exception as e:
565
- logger.error(f"DataFrame转字典失败: {str(e)}")
570
+ # logger.error(f"DataFrame转字典失败: {str(e)}")
566
571
  records = []
567
572
 
568
573
  # 序列化配置 --------------------------------------------------------
mdbq/redis/redis_cache.py CHANGED
@@ -26,21 +26,21 @@ from typing import Optional, Dict, Any, List, Callable
26
26
  from threading import Event
27
27
  from collections import defaultdict, deque
28
28
  import redis
29
- from mdbq.log import mylogger
29
+ # from mdbq.log import mylogger
30
30
 
31
31
 
32
- # 全局日志器
33
- logger = mylogger.MyLogger(
34
- logging_mode='file',
35
- log_level='info',
36
- log_format='json',
37
- max_log_size=50,
38
- backup_count=5,
39
- enable_async=False,
40
- sample_rate=1,
41
- sensitive_fields=[],
42
- enable_metrics=False,
43
- )
32
+ # # 全局日志器
33
+ # logger = mylogger.MyLogger(
34
+ # logging_mode='file',
35
+ # log_level='info',
36
+ # log_format='json',
37
+ # max_log_size=50,
38
+ # backup_count=5,
39
+ # enable_async=False,
40
+ # sample_rate=1,
41
+ # sensitive_fields=[],
42
+ # enable_metrics=False,
43
+ # )
44
44
 
45
45
 
46
46
  class CacheStatsCollector:
@@ -105,31 +105,28 @@ class CacheStatsCollector:
105
105
  self._check_and_submit()
106
106
  except Exception as submit_error:
107
107
  # 统计提交失败不应影响统计记录
108
- logger.error("统计数据提交检查失败,但统计记录继续", {
109
- 'instance_name': self.instance_name,
110
- 'process_id': self.process_id,
111
- 'operation': operation,
112
- 'submit_error': str(submit_error)
113
- })
108
+ # logger.error("统计数据提交检查失败,但统计记录继续", {
109
+ # 'instance_name': self.instance_name,
110
+ # 'process_id': self.process_id,
111
+ # 'operation': operation,
112
+ # 'submit_error': str(submit_error)
113
+ # })
114
+ pass
114
115
  except Exception as e:
115
116
  # 统计记录失败不应影响缓存操作
116
- logger.error("统计记录失败,但缓存操作继续", {
117
- 'instance_name': self.instance_name,
118
- 'process_id': self.process_id,
119
- 'operation': operation,
120
- 'error': str(e)
121
- })
117
+ # logger.error("统计记录失败,但缓存操作继续", {
118
+ # 'instance_name': self.instance_name,
119
+ # 'process_id': self.process_id,
120
+ # 'operation': operation,
121
+ # 'error': str(e)
122
+ # })
123
+ pass
122
124
 
123
125
  def _start_background_timer(self):
124
126
  """启动后台定时提交线程"""
125
127
  if self._timer is not None:
126
128
  return # 已经启动
127
129
 
128
- logger.debug("启动后台定时提交", {
129
- 'instance_name': self.instance_name,
130
- 'submit_interval': self.submit_interval
131
- })
132
-
133
130
  self._timer = threading.Timer(self.submit_interval, self._background_submit)
134
131
  self._timer.daemon = True # 设置为守护线程
135
132
  self._timer.start()
@@ -147,22 +144,22 @@ class CacheStatsCollector:
147
144
 
148
145
  except Exception as e:
149
146
  self._error_count += 1
150
- logger.error("后台定时提交失败", {
151
- 'instance_name': self.instance_name,
152
- 'process_id': self.process_id,
153
- 'error': str(e),
154
- 'error_type': type(e).__name__,
155
- 'error_count': self._error_count,
156
- 'max_errors': self._max_errors
157
- })
147
+ # logger.error("后台定时提交失败", {
148
+ # 'instance_name': self.instance_name,
149
+ # 'process_id': self.process_id,
150
+ # 'error': str(e),
151
+ # 'error_type': type(e).__name__,
152
+ # 'error_count': self._error_count,
153
+ # 'max_errors': self._max_errors
154
+ # })
158
155
 
159
156
  # 如果连续错误次数过多,停止定时器
160
157
  if self._error_count >= self._max_errors:
161
- logger.error("后台定时器连续错误过多,停止定时提交", {
162
- 'instance_name': self.instance_name,
163
- 'process_id': self.process_id,
164
- 'error_count': self._error_count
165
- })
158
+ # logger.error("后台定时器连续错误过多,停止定时提交", {
159
+ # 'instance_name': self.instance_name,
160
+ # 'process_id': self.process_id,
161
+ # 'error_count': self._error_count
162
+ # })
166
163
  return # 不再安排下一次定时器
167
164
 
168
165
  finally:
@@ -198,27 +195,22 @@ class CacheStatsCollector:
198
195
  self.last_submit_time = current_time
199
196
  self.last_operation_count = self.stats['total_operations']
200
197
 
201
- logger.info("统计数据提交成功", {
202
- 'instance_name': self.instance_name,
203
- 'total_operations': self.stats['total_operations'],
204
- 'new_operations': new_operations,
205
- 'trigger_type': 'background_timer' if force_check else 'operation_triggered'
206
- })
198
+ # logger.info("统计数据提交成功", {
199
+ # 'instance_name': self.instance_name,
200
+ # 'total_operations': self.stats['total_operations'],
201
+ # 'new_operations': new_operations,
202
+ # 'trigger_type': 'background_timer' if force_check else 'operation_triggered'
203
+ # })
207
204
  except Exception as e:
208
- logger.error("统计数据提交失败", {
209
- 'instance_name': self.instance_name,
210
- 'error': str(e),
211
- 'trigger_type': 'background_timer' if force_check else 'operation_triggered'
212
- })
205
+ # logger.error("统计数据提交失败", {
206
+ # 'instance_name': self.instance_name,
207
+ # 'error': str(e),
208
+ # 'trigger_type': 'background_timer' if force_check else 'operation_triggered'
209
+ # })
210
+ pass
213
211
  else:
214
212
  # 无新操作,跳过提交但更新时间
215
213
  self.last_submit_time = current_time
216
- if force_check: # 仅在后台定时器触发时记录
217
- logger.debug("后台检查:无新操作,跳过提交", {
218
- 'instance_name': self.instance_name,
219
- 'total_operations': self.stats['total_operations']
220
- })
221
-
222
214
  def _submit_to_mysql(self):
223
215
  """同步提交统计数据到MySQL"""
224
216
  if not self.mysql_pool:
@@ -270,12 +262,12 @@ class CacheStatsCollector:
270
262
  connection.close()
271
263
 
272
264
  except Exception as e:
273
- logger.error("MySQL提交失败", {
274
- 'instance_name': self.instance_name,
275
- 'database': db_name,
276
- 'table': table_name,
277
- 'error': str(e)
278
- })
265
+ # logger.error("MySQL提交失败", {
266
+ # 'instance_name': self.instance_name,
267
+ # 'database': db_name,
268
+ # 'table': table_name,
269
+ # 'error': str(e)
270
+ # })
279
271
  raise
280
272
 
281
273
  def get_stats(self) -> Dict[str, Any]:
@@ -307,9 +299,9 @@ class CacheStatsCollector:
307
299
 
308
300
  def shutdown(self):
309
301
  """关闭统计收集器,停止后台定时器"""
310
- logger.info("关闭统计收集器", {
311
- 'instance_name': self.instance_name
312
- })
302
+ # logger.info("关闭统计收集器", {
303
+ # 'instance_name': self.instance_name
304
+ # })
313
305
 
314
306
  # 设置关闭标志
315
307
  self._shutdown_event.set()
@@ -417,18 +409,19 @@ class SmartCacheSystem:
417
409
  try:
418
410
  self._create_simple_stats_table()
419
411
  self._state = CacheSystemState.MYSQL_READY
420
- logger.info("统计功能已启用", {
421
- 'instance_name': self.instance_name,
422
- 'process_id': os.getpid()
423
- })
412
+ # logger.info("统计功能已启用", {
413
+ # 'instance_name': self.instance_name,
414
+ # 'process_id': os.getpid()
415
+ # })
424
416
  except Exception as e:
425
- logger.error("统计表创建失败", {
426
- 'instance_name': self.instance_name,
427
- 'error': str(e)
428
- })
417
+ # logger.error("统计表创建失败", {
418
+ # 'instance_name': self.instance_name,
419
+ # 'error': str(e)
420
+ # })
421
+ pass
429
422
  else:
430
423
  self._state = CacheSystemState.ERROR
431
- logger.error("Redis连接失败", {'instance_name': self.instance_name})
424
+ # logger.error("Redis连接失败", {'instance_name': self.instance_name})
432
425
 
433
426
  def _test_redis_connection(self) -> bool:
434
427
  """测试Redis连接"""
@@ -436,7 +429,7 @@ class SmartCacheSystem:
436
429
  self.redis_client.ping()
437
430
  return True
438
431
  except Exception as e:
439
- logger.error("Redis连接测试失败", {'error': str(e)})
432
+ # logger.error("Redis连接测试失败", {'error': str(e)})
440
433
  return False
441
434
 
442
435
  def _create_simple_stats_table(self):
@@ -493,7 +486,7 @@ class SmartCacheSystem:
493
486
  connection.close()
494
487
 
495
488
  except Exception as e:
496
- logger.error("统计表初始化失败", {'error': str(e)})
489
+ # logger.error("统计表初始化失败", {'error': str(e)})
497
490
  raise
498
491
 
499
492
  @property
@@ -539,11 +532,11 @@ class SmartCacheSystem:
539
532
  response_time = (time.time() - start_time) * 1000
540
533
  if self.stats_collector:
541
534
  self.stats_collector.record_operation('errors', response_time, namespace)
542
- logger.error("缓存获取失败", {
543
- 'key': key,
544
- 'namespace': namespace,
545
- 'error': str(e)
546
- })
535
+ # logger.error("缓存获取失败", {
536
+ # 'key': key,
537
+ # 'namespace': namespace,
538
+ # 'error': str(e)
539
+ # })
547
540
  return default
548
541
 
549
542
  def set(self, key: str, value: Any, ttl: Optional[int] = None, namespace: str = "") -> bool:
@@ -574,11 +567,11 @@ class SmartCacheSystem:
574
567
  if value_size > self.config.max_value_size:
575
568
  if self.stats_collector:
576
569
  self.stats_collector.record_operation('errors', 0, namespace)
577
- logger.warning("缓存值过大,跳过设置", {
578
- 'key': key,
579
- 'size': len(serialized_value),
580
- 'max_size': self.config.max_value_size
581
- })
570
+ # logger.warning("缓存值过大,跳过设置", {
571
+ # 'key': key,
572
+ # 'size': len(serialized_value),
573
+ # 'max_size': self.config.max_value_size
574
+ # })
582
575
  return False
583
576
 
584
577
  result = self.redis_client.setex(cache_key, ttl, serialized_value)
@@ -592,11 +585,11 @@ class SmartCacheSystem:
592
585
  response_time = (time.time() - start_time) * 1000
593
586
  if self.stats_collector:
594
587
  self.stats_collector.record_operation('errors', response_time, namespace)
595
- logger.error("缓存设置失败", {
596
- 'key': key,
597
- 'namespace': namespace,
598
- 'error': str(e)
599
- })
588
+ # logger.error("缓存设置失败", {
589
+ # 'key': key,
590
+ # 'namespace': namespace,
591
+ # 'error': str(e)
592
+ # })
600
593
  return False
601
594
 
602
595
  def delete(self, key: str, namespace: str = "") -> bool:
@@ -621,11 +614,11 @@ class SmartCacheSystem:
621
614
  response_time = (time.time() - start_time) * 1000
622
615
  if self.stats_collector:
623
616
  self.stats_collector.record_operation('errors', response_time, namespace)
624
- logger.error("缓存删除失败", {
625
- 'key': key,
626
- 'namespace': namespace,
627
- 'error': str(e)
628
- })
617
+ # logger.error("缓存删除失败", {
618
+ # 'key': key,
619
+ # 'namespace': namespace,
620
+ # 'error': str(e)
621
+ # })
629
622
  return False
630
623
 
631
624
  def clear_namespace(self, namespace: str) -> int:
@@ -643,10 +636,10 @@ class SmartCacheSystem:
643
636
  return 0
644
637
 
645
638
  except Exception as e:
646
- logger.error("清除命名空间失败", {
647
- 'namespace': namespace,
648
- 'error': str(e)
649
- })
639
+ # logger.error("清除命名空间失败", {
640
+ # 'namespace': namespace,
641
+ # 'error': str(e)
642
+ # })
650
643
  return 0
651
644
 
652
645
  def _generate_cache_key(self, key: str, namespace: str = "") -> str:
@@ -760,18 +753,6 @@ class SmartCacheSystem:
760
753
  # 7. TTL边界限制(使用配置值)
761
754
  final_ttl = max(self.config.ttl_min, min(self.config.ttl_max, final_ttl))
762
755
 
763
- # 8. 记录智能TTL决策(使用配置开关)
764
- if self.config.debug_ttl:
765
- logger.debug("智能TTL计算", {
766
- 'namespace': namespace,
767
- 'key': key[:50] + "..." if len(key) > 50 else key,
768
- 'data_size': data_size,
769
- 'base_ttl': base_ttl,
770
- 'key_factor': key_factor,
771
- 'size_factor': size_factor,
772
- 'final_ttl': final_ttl
773
- })
774
-
775
756
  return final_ttl
776
757
 
777
758
  def get_stats(self) -> Dict[str, Any]:
@@ -779,11 +760,7 @@ class SmartCacheSystem:
779
760
  # 确保统计系统已初始化
780
761
  if self.stats_collector:
781
762
  return self.stats_collector.get_stats()
782
-
783
- logger.debug("统计系统未初始化,返回空统计信息", {
784
- 'instance_name': self.instance_name,
785
- 'process_id': os.getpid()
786
- })
763
+
787
764
  return {
788
765
  'enabled': False,
789
766
  'message': '统计系统未初始化',
@@ -828,14 +805,6 @@ class SmartCacheSystem:
828
805
  if not self.config.enable_stats:
829
806
  return
830
807
 
831
- logger.debug("调用操作记录", {
832
- 'operation': operation,
833
- 'response_time_ms': round(response_time, 2),
834
- 'namespace': namespace,
835
- 'key': key[:50] + "..." if len(key) > 50 else key,
836
- 'instance_name': self.instance_name
837
- })
838
-
839
808
  def shutdown(self):
840
809
  """关闭缓存系统"""
841
810
  self._state = CacheSystemState.SHUTDOWN
@@ -844,7 +813,7 @@ class SmartCacheSystem:
844
813
  # 关闭统计收集器(包括后台定时器)
845
814
  self.stats_collector.shutdown()
846
815
 
847
- logger.info("缓存系统已关闭", {'instance_name': self.instance_name})
816
+ # logger.info("缓存系统已关闭", {'instance_name': self.instance_name})
848
817
 
849
818
 
850
819
  class CacheManager:
@@ -868,10 +837,10 @@ class CacheManager:
868
837
  def initialize(self, redis_client: redis.Redis, mysql_pool=None, instance_name: str = "default", **config):
869
838
  """初始化缓存系统"""
870
839
  if self.cache_instance is not None:
871
- logger.warning("缓存系统已初始化,跳过重复初始化", {
872
- 'existing_instance': self.cache_instance.instance_name,
873
- 'new_instance': instance_name
874
- })
840
+ # logger.warning("缓存系统已初始化,跳过重复初始化", {
841
+ # 'existing_instance': self.cache_instance.instance_name,
842
+ # 'new_instance': instance_name
843
+ # })
875
844
  return
876
845
 
877
846
  self.cache_instance = SmartCacheSystem(
mdbq/route/monitor.py CHANGED
@@ -21,7 +21,7 @@ from typing import Dict, Any
21
21
  from urllib.parse import urlparse
22
22
  from dbutils.pooled_db import PooledDB # type: ignore
23
23
  from mdbq.myconf import myconf # type: ignore
24
- from mdbq.log import mylogger
24
+ # from mdbq.log import mylogger
25
25
  from flask import request, g
26
26
  import re
27
27
  import ipaddress
@@ -33,17 +33,17 @@ host, port, username, password = parser.get_section_values(
33
33
  keys=['host', 'port', 'username', 'password'],
34
34
  )
35
35
 
36
- logger = mylogger.MyLogger(
37
- logging_mode='file',
38
- log_level='info',
39
- log_format='json',
40
- max_log_size=50,
41
- backup_count=5,
42
- enable_async=False, # 是否启用异步日志
43
- sample_rate=1, # 采样DEBUG/INFO日志
44
- sensitive_fields=[], # 敏感字段过滤
45
- enable_metrics=False, # 是否启用性能指标
46
- )
36
+ # logger = mylogger.MyLogger(
37
+ # logging_mode='file',
38
+ # log_level='info',
39
+ # log_format='json',
40
+ # max_log_size=50,
41
+ # backup_count=5,
42
+ # enable_async=False, # 是否启用异步日志
43
+ # sample_rate=1, # 采样DEBUG/INFO日志
44
+ # sensitive_fields=[], # 敏感字段过滤
45
+ # enable_metrics=False, # 是否启用性能指标
46
+ # )
47
47
 
48
48
 
49
49
  class RouteMonitor:
@@ -83,10 +83,10 @@ class RouteMonitor:
83
83
  connection.close()
84
84
 
85
85
  except Exception as e:
86
- logger.error("数据库连接池初始化失败", {
87
- "错误信息": str(e),
88
- "数据库": self.database
89
- })
86
+ # logger.error("数据库连接池初始化失败", {
87
+ # "错误信息": str(e),
88
+ # "数据库": self.database
89
+ # })
90
90
  raise
91
91
 
92
92
  def ensure_database_context(self, cursor):
@@ -94,22 +94,16 @@ class RouteMonitor:
94
94
  try:
95
95
  cursor.execute(f"USE `{self.database}`")
96
96
  except Exception as e:
97
- logger.warning("切换数据库上下文失败,尝试重新创建", {
98
- "数据库": self.database,
99
- "错误": str(e)
100
- })
97
+ # logger.warning("切换数据库上下文失败,尝试重新创建", {
98
+ # "数据库": self.database,
99
+ # "错误": str(e)
100
+ # })
101
101
  cursor.execute(f"CREATE DATABASE IF NOT EXISTS `{self.database}` DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci")
102
102
  cursor.execute(f"USE `{self.database}`")
103
103
 
104
104
  def init_database_tables(self):
105
105
  """初始化数据库表结构"""
106
106
  try:
107
- logger.debug("🗄️ 开始创建/验证数据库表结构", {
108
- "操作": "表结构初始化",
109
- "预期表数": 4,
110
- "数据库": self.database
111
- })
112
-
113
107
  connection = self.pool.connection()
114
108
  try:
115
109
  with connection.cursor() as cursor:
@@ -244,23 +238,17 @@ class RouteMonitor:
244
238
  COMMENT='系统性能统计表';
245
239
  """)
246
240
  connection.commit()
247
- logger.debug("🎯 所有数据库表结构初始化完成", {
248
- "创建表数": 4,
249
- "操作状态": "成功",
250
- "数据库": self.database,
251
- "数据库引擎": "InnoDB"
252
- })
253
241
 
254
242
  finally:
255
243
  connection.close()
256
244
 
257
245
  except Exception as e:
258
- logger.error("数据库表结构初始化失败", {
259
- "错误信息": str(e),
260
- "错误类型": type(e).__name__,
261
- "数据库": self.database,
262
- "影响": "监控系统可能无法正常工作"
263
- })
246
+ # logger.error("数据库表结构初始化失败", {
247
+ # "错误信息": str(e),
248
+ # "错误类型": type(e).__name__,
249
+ # "数据库": self.database,
250
+ # "影响": "监控系统可能无法正常工作"
251
+ # })
264
252
  # 静默处理初始化错误,避免影响主应用
265
253
  pass
266
254
 
@@ -434,25 +422,9 @@ class RouteMonitor:
434
422
 
435
423
  # 设置请求ID到全局变量中,供后续使用
436
424
  g.request_id = request_id
437
-
438
- logger.debug("📋 开始收集请求数据", {
439
- "请求ID": request_id,
440
- "请求方法": request.method,
441
- "端点": request.endpoint or request.path,
442
- "客户端IP": request.remote_addr
443
- })
444
-
445
425
  # 获取真实IP
446
426
  real_ip, forwarded_ips = self.get_real_ip(request)
447
427
 
448
- if real_ip != request.remote_addr:
449
- logger.debug("🔍 检测到IP转发", {
450
- "请求ID": request_id,
451
- "原始IP": request.remote_addr,
452
- "真实IP": real_ip,
453
- "转发链长度": len(forwarded_ips) if forwarded_ips else 0
454
- })
455
-
456
428
  # 获取请求头信息
457
429
  headers = dict(request.headers)
458
430
  sanitized_headers = self.sanitize_data(headers)
@@ -479,25 +451,15 @@ class RouteMonitor:
479
451
  request_body = body_data.decode('utf-8')
480
452
  except UnicodeDecodeError:
481
453
  request_body = f"[BINARY_DATA:{len(body_data)}_bytes]"
482
- logger.debug("📁 检测到二进制数据", {
483
- "请求ID": request_id,
484
- "数据大小": len(body_data),
485
- "处理方式": "标记为二进制"
486
- })
487
454
 
488
455
  if request_body:
489
456
  request_size = len(str(request_body).encode('utf-8'))
490
- logger.debug("📊 请求体信息", {
491
- "请求ID": request_id,
492
- "数据类型": "JSON" if request.is_json else "表单" if request.form else "文本",
493
- "大小": f"{request_size} bytes"
494
- })
495
457
  except Exception as e:
496
458
  request_body = "[ERROR_READING_BODY]"
497
- logger.warning("⚠️ 读取请求体失败", {
498
- "请求ID": request_id,
499
- "错误": str(e)
500
- })
459
+ # logger.warning("读取请求体失败", {
460
+ # "请求ID": request_id,
461
+ # "错误": str(e)
462
+ # })
501
463
 
502
464
  # 清理敏感数据
503
465
  sanitized_body = self.sanitize_data(request_body)
@@ -506,21 +468,6 @@ class RouteMonitor:
506
468
  # 设备信息提取
507
469
  user_agent = request.headers.get('User-Agent', '')
508
470
  device_info = self.extract_device_info(user_agent)
509
-
510
- if device_info['is_bot']:
511
- logger.debug("🤖 检测到机器人请求", {
512
- "请求ID": request_id,
513
- "用户代理": user_agent[:100] + "..." if len(user_agent) > 100 else user_agent,
514
- "IP": real_ip
515
- })
516
-
517
- if device_info['is_mobile']:
518
- logger.debug("📱 检测到移动设备请求", {
519
- "请求ID": request_id,
520
- "操作系统": device_info['os_name'],
521
- "浏览器": device_info['browser_name']
522
- })
523
-
524
471
  # URL解析
525
472
  parsed_url = urlparse(request.url)
526
473
 
@@ -559,14 +506,6 @@ class RouteMonitor:
559
506
  'os_version': device_info['os_version'],
560
507
  }
561
508
 
562
- logger.debug("✅ 请求数据收集完成", {
563
- "请求ID": request_id,
564
- "数据字段数": len(request_data),
565
- "请求大小": f"{request_size} bytes",
566
- "设备类型": "移动" if device_info['is_mobile'] else "桌面",
567
- "是否机器人": device_info['is_bot']
568
- })
569
-
570
509
  return request_data
571
510
 
572
511
  def mask_token(self, token: str) -> str:
@@ -591,13 +530,6 @@ class RouteMonitor:
591
530
  if response_data:
592
531
  request_data.update(response_data)
593
532
 
594
- logger.debug("💾 保存请求日志到数据库", {
595
- "请求ID": request_id,
596
- "端点": request_data.get('endpoint', ''),
597
- "状态码": request_data.get('response_status', '未知'),
598
- "处理时间": f"{request_data.get('process_time', 0)}ms"
599
- })
600
-
601
533
  connection = self.pool.connection()
602
534
  try:
603
535
  with connection.cursor() as cursor:
@@ -616,39 +548,26 @@ class RouteMonitor:
616
548
  cursor.execute(sql, list(request_data.values()))
617
549
  connection.commit()
618
550
 
619
- logger.debug("✅ 请求日志保存成功", {
620
- "请求ID": request_id,
621
- "写入状态": "成功",
622
- "数据库表": "api_request_logs"
623
- })
624
-
625
551
  finally:
626
552
  connection.close()
627
553
 
628
554
  except Exception as e:
629
- logger.error("保存请求日志失败", {
630
- "请求ID": request_id,
631
- "错误信息": str(e),
632
- "错误类型": type(e).__name__,
633
- "影响": "日志丢失,但不影响主业务"
634
- })
555
+ # logger.error("保存请求日志失败", {
556
+ # "请求ID": request_id,
557
+ # "错误信息": str(e),
558
+ # "错误类型": type(e).__name__,
559
+ # "影响": "日志丢失,但不影响主业务"
560
+ # })
635
561
  # 静默处理错误,不影响主业务
636
562
  pass
637
563
 
638
564
  def update_statistics(self, request_data: Dict[str, Any]):
639
565
  """更新统计数据"""
640
566
  request_id = request_data.get('request_id', 'unknown')
641
- endpoint = request_data.get('endpoint', '')
642
- status_code = request_data.get('response_status', 500)
567
+ # endpoint = request_data.get('endpoint', '')
568
+ # status_code = request_data.get('response_status', 500)
643
569
 
644
570
  try:
645
- logger.debug("📈 更新统计数据", {
646
- "请求ID": request_id,
647
- "端点": endpoint,
648
- "状态码": status_code,
649
- "统计类型": "API访问统计和IP统计"
650
- })
651
-
652
571
  connection = self.pool.connection()
653
572
  try:
654
573
  with connection.cursor() as cursor:
@@ -705,23 +624,16 @@ class RouteMonitor:
705
624
 
706
625
  connection.commit()
707
626
 
708
- logger.debug("✅ 统计数据更新成功", {
709
- "请求ID": request_id,
710
- "更新表": "api_access_statistics, ip_access_statistics",
711
- "日期": str(date),
712
- "小时": hour
713
- })
714
-
715
627
  finally:
716
628
  connection.close()
717
629
 
718
630
  except Exception as e:
719
- logger.error("更新统计数据失败", {
720
- "请求ID": request_id,
721
- "错误信息": str(e),
722
- "错误类型": type(e).__name__,
723
- "影响": "统计数据缺失,但不影响主业务"
724
- })
631
+ # logger.error("更新统计数据失败", {
632
+ # "请求ID": request_id,
633
+ # "错误信息": str(e),
634
+ # "错误类型": type(e).__name__,
635
+ # "影响": "统计数据缺失,但不影响主业务"
636
+ # })
725
637
  # 静默处理错误
726
638
  pass
727
639
 
@@ -737,14 +649,6 @@ class RouteMonitor:
737
649
  request_data = self.collect_request_data(request)
738
650
  request_id = request_data.get('request_id', 'unknown')
739
651
 
740
- logger.debug("🎯 开始监控请求", {
741
- "请求ID": request_id,
742
- "函数名": func.__name__,
743
- "端点": request_data.get('endpoint', ''),
744
- "方法": request_data.get('method', ''),
745
- "来源IP": request_data.get('real_ip', request_data.get('client_ip'))
746
- })
747
-
748
652
  try:
749
653
  # 执行原函数
750
654
  response = func(*args, **kwargs)
@@ -774,16 +678,6 @@ class RouteMonitor:
774
678
  'process_time': process_time,
775
679
  'response_size': response_size
776
680
  }
777
-
778
- logger.debug("✅ 请求处理完成", {
779
- "请求ID": request_id,
780
- "函数名": func.__name__,
781
- "状态码": response_status,
782
- "处理时间": f"{process_time}ms",
783
- "响应大小": f"{response_size} bytes",
784
- "结果": "成功"
785
- })
786
-
787
681
  # 保存日志
788
682
  self.save_request_log(request_data, response_data)
789
683
 
@@ -805,14 +699,14 @@ class RouteMonitor:
805
699
  'response_size': 0
806
700
  }
807
701
 
808
- logger.error("请求处理异常", {
809
- "请求ID": request_id,
810
- "函数名": func.__name__,
811
- "错误信息": str(e),
812
- "错误类型": type(e).__name__,
813
- "处理时间": f"{process_time}ms",
814
- "结果": "异常"
815
- })
702
+ # logger.error("请求处理异常", {
703
+ # "请求ID": request_id,
704
+ # "函数名": func.__name__,
705
+ # "错误信息": str(e),
706
+ # "错误类型": type(e).__name__,
707
+ # "处理时间": f"{process_time}ms",
708
+ # "结果": "异常"
709
+ # })
816
710
 
817
711
  # 保存错误日志
818
712
  self.save_request_log(request_data, error_data)
@@ -829,11 +723,6 @@ class RouteMonitor:
829
723
  def get_statistics_summary(self, days: int = 7) -> Dict[str, Any]:
830
724
  """获取统计摘要"""
831
725
  try:
832
- logger.debug("📊 开始获取统计摘要", {
833
- "查询天数": days,
834
- "操作": "统计数据查询"
835
- })
836
-
837
726
  connection = self.pool.connection()
838
727
  try:
839
728
  with connection.cursor() as cursor:
@@ -843,12 +732,6 @@ class RouteMonitor:
843
732
  end_date = datetime.now().date()
844
733
  start_date = end_date - timedelta(days=days)
845
734
 
846
- logger.debug("📅 统计查询时间范围", {
847
- "开始日期": str(start_date),
848
- "结束日期": str(end_date),
849
- "查询天数": days
850
- })
851
-
852
735
  # 总体统计
853
736
  cursor.execute("""
854
737
  SELECT
@@ -862,15 +745,6 @@ class RouteMonitor:
862
745
  """, (start_date, end_date))
863
746
 
864
747
  summary = cursor.fetchone() or {}
865
-
866
- logger.debug("📈 总体统计查询完成", {
867
- "总请求数": summary.get('total_requests', 0) or 0,
868
- "成功请求数": summary.get('success_requests', 0) or 0,
869
- "错误请求数": summary.get('error_requests', 0) or 0,
870
- "平均响应时间": f"{(summary.get('avg_response_time', 0) or 0):.2f}ms",
871
- "唯一端点数": summary.get('unique_endpoints', 0) or 0
872
- })
873
-
874
748
  # 热门端点
875
749
  cursor.execute("""
876
750
  SELECT endpoint, SUM(total_requests) as requests
@@ -883,12 +757,6 @@ class RouteMonitor:
883
757
 
884
758
  top_endpoints = cursor.fetchall()
885
759
 
886
- logger.debug("🔥 热门端点查询完成", {
887
- "查询结果数": len(top_endpoints),
888
- "最热门端点": top_endpoints[0]['endpoint'] if top_endpoints else "无数据",
889
- "最高请求数": top_endpoints[0]['requests'] if top_endpoints else 0
890
- })
891
-
892
760
  # 活跃IP统计
893
761
  cursor.execute("""
894
762
  SELECT COUNT(DISTINCT ip_address) as unique_ips,
@@ -899,11 +767,6 @@ class RouteMonitor:
899
767
 
900
768
  ip_stats = cursor.fetchone() or {}
901
769
 
902
- logger.debug("🌐 IP统计查询完成", {
903
- "唯一IP数": ip_stats.get('unique_ips', 0),
904
- "IP总请求数": ip_stats.get('total_ip_requests', 0)
905
- })
906
-
907
770
  result = {
908
771
  'period': f'{start_date} to {end_date}',
909
772
  'summary': summary,
@@ -911,24 +774,18 @@ class RouteMonitor:
911
774
  'ip_statistics': ip_stats
912
775
  }
913
776
 
914
- logger.debug("✅ 统计摘要获取成功", {
915
- "查询天数": days,
916
- "数据完整性": "完整",
917
- "结果状态": "成功"
918
- })
919
-
920
777
  return result
921
778
 
922
779
  finally:
923
780
  connection.close()
924
781
 
925
782
  except Exception as e:
926
- logger.error("获取统计摘要失败", {
927
- "查询天数": days,
928
- "错误信息": str(e),
929
- "错误类型": type(e).__name__,
930
- "影响": "统计摘要不可用"
931
- })
783
+ # logger.error("获取统计摘要失败", {
784
+ # "查询天数": days,
785
+ # "错误信息": str(e),
786
+ # "错误类型": type(e).__name__,
787
+ # "影响": "统计摘要不可用"
788
+ # })
932
789
  return {'error': str(e)}
933
790
 
934
791
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mdbq
3
- Version: 4.1.2
3
+ Version: 4.1.3
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,5 +1,5 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
- mdbq/__version__.py,sha256=t0Fz3FVzbu8dTQm35pWtvOg5K2EJnfXWvbSU90YalwU,17
2
+ mdbq/__version__.py,sha256=Jjk9_jrwEIRLu3zYAhoUXB_8XlsokRJUl6DPuo2urQw,17
3
3
  mdbq/auth/__init__.py,sha256=pnPMAt63sh1B6kEvmutUuro46zVf2v2YDAG7q-jV_To,24
4
4
  mdbq/auth/auth_backend.py,sha256=u8qwMNjAdDliaaFzfulVqQIoFrIJhe980OdYwTIVKJg,99750
5
5
  mdbq/auth/crypto.py,sha256=fcZRFCnrKVVdWDUx_zds51ynFYwS9DBvJOrRQVldrfM,15931
@@ -9,11 +9,11 @@ mdbq/js/jc.py,sha256=6Rgf1WqaJJ1oevpn-pt08gXKbX5hjoQaV6uZGCAGbYw,13177
9
9
  mdbq/log/__init__.py,sha256=Mpbrav0s0ifLL7lVDAuePEi1hJKiSHhxcv1byBKDl5E,15
10
10
  mdbq/log/mylogger.py,sha256=DyBftCMNLe1pTTXsa830pUtDISJxpJHFIradYtE3lFA,26418
11
11
  mdbq/myconf/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
12
- mdbq/myconf/myconf.py,sha256=rHvQCnQRKhQ49AZBke-Z4v28hyOLmHt4MylIuB0H6yA,33516
12
+ mdbq/myconf/myconf.py,sha256=x_9mS6wOfKVjCVElbruxj2yjzitbyKiTkf59quG-5Zg,32529
13
13
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
14
14
  mdbq/mysql/deduplicator.py,sha256=2fugLyKs_xkvYvoG0C0hRYbJ_w8-4oa1FJ_vavoD7Qo,73084
15
15
  mdbq/mysql/mysql.py,sha256=pDg771xBugCMSTWeskIFTi3pFLgaqgyG3smzf-86Wn8,56772
16
- mdbq/mysql/s_query.py,sha256=nwhyqbxq-V0sGUJbdjiUDEwjpDxiKrzG0PjV6wkrWU4,50474
16
+ mdbq/mysql/s_query.py,sha256=N2xHJf2CiUXjXIVBemdst-wamIP3908EGAJOFG13fCU,50475
17
17
  mdbq/mysql/unique_.py,sha256=MaztT-WIyEQUs-OOYY4pFulgHVcXR1BfCy3QUz0XM_U,21127
18
18
  mdbq/mysql/uploader.py,sha256=FG_4btNwTjbCqZFeIigCfar7r-OOA7VkyuJsOOC9WLw,111539
19
19
  mdbq/other/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
@@ -26,16 +26,16 @@ mdbq/pbix/__init__.py,sha256=Trtfaynu9RjoTyLLYBN2xdRxTvm_zhCniUkVTAYwcjo,24
26
26
  mdbq/pbix/pbix_refresh.py,sha256=JUjKW3bNEyoMVfVfo77UhguvS5AWkixvVhDbw4_MHco,2396
27
27
  mdbq/pbix/refresh_all.py,sha256=OBT9EewSZ0aRS9vL_FflVn74d4l2G00wzHiikCC4TC0,5926
28
28
  mdbq/redis/__init__.py,sha256=YtgBlVSMDphtpwYX248wGge1x-Ex_mMufz4-8W0XRmA,12
29
- mdbq/redis/getredis.py,sha256=vpBuNc22uj9Vr-_Dh25_wpwWM1e-072EAAIBdB_IpL0,23494
30
- mdbq/redis/redis_cache.py,sha256=2uhAmG9FYHjzByZEg7o8fFFxuF6AnOASVvjdAM4Gx5E,35719
29
+ mdbq/redis/getredis.py,sha256=vdg7YQEjhoMp5QzxygNGx5DQKRnePrcwPYgUrDypA6g,23672
30
+ mdbq/redis/redis_cache.py,sha256=hjl-oh1r0ONdMeWN28cTMsdkcjc7HG71NmGZ9H6jWUE,34559
31
31
  mdbq/route/__init__.py,sha256=BT_dAY7V-U2o72bevq1B9Mq9QA7GodwtkxyLNdGaoE8,22
32
32
  mdbq/route/analytics.py,sha256=dngj5hVwKddEUy59nSYbOoJ9C7OVrtCmCkvW6Uj9RYM,28097
33
- mdbq/route/monitor.py,sha256=8uscuoJF4eMr5o8cAqywQv868m1yIGCuK1l4ZWN6KPE,42954
33
+ mdbq/route/monitor.py,sha256=lyowGUU8c2GykeZLrdxd7nXpNMqXWcOsuQsbS8l0pwU,36595
34
34
  mdbq/route/routes.py,sha256=QVGfTvDgu0CpcKCvk1ra74H8uojgqTLUav1fnVAqLEA,29433
35
35
  mdbq/selenium/__init__.py,sha256=AKzeEceqZyvqn2dEDoJSzDQnbuENkJSHAlbHAD0u0ZI,10
36
36
  mdbq/selenium/get_driver.py,sha256=1NTlVUE6QsyjTrVVVqTO2LOnYf578ccFWlWnvIXGtic,20903
37
37
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
38
- mdbq-4.1.2.dist-info/METADATA,sha256=PsddxgvUrLRPSZGLVd4b64UA19gU-LjiHkK2FV1_OVM,363
39
- mdbq-4.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
40
- mdbq-4.1.2.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
41
- mdbq-4.1.2.dist-info/RECORD,,
38
+ mdbq-4.1.3.dist-info/METADATA,sha256=9NMHciONJynTrghvSivIGIOTpBkPaiF4FlS-spIRbXc,363
39
+ mdbq-4.1.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
40
+ mdbq-4.1.3.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
41
+ mdbq-4.1.3.dist-info/RECORD,,
File without changes