crawlo 1.2.8__py3-none-any.whl → 1.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +63 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +323 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +186 -186
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -251
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +366 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -251
  19. crawlo/crawler.py +1103 -1100
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -107
  22. crawlo/downloader/__init__.py +273 -266
  23. crawlo/downloader/aiohttp_downloader.py +226 -228
  24. crawlo/downloader/cffi_downloader.py +245 -256
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +43 -43
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +136 -136
  50. crawlo/middleware/offsite.py +114 -114
  51. crawlo/middleware/proxy.py +386 -368
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -0
  57. crawlo/mode_manager.py +211 -211
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -338
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +157 -157
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +223 -223
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +317 -317
  70. crawlo/pipelines/pipeline_manager.py +62 -62
  71. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  72. crawlo/project.py +290 -315
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -378
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +216 -220
  78. crawlo/settings/setting_manager.py +163 -122
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +261 -288
  89. crawlo/templates/project/settings_distributed.py.tmpl +174 -157
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -100
  91. crawlo/templates/project/settings_high_performance.py.tmpl +125 -134
  92. crawlo/templates/project/settings_minimal.py.tmpl +30 -0
  93. crawlo/templates/project/settings_simple.py.tmpl +96 -98
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +47 -47
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/{cleaners → tools}/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +290 -36
  103. crawlo/tools/distributed_coordinator.py +388 -387
  104. crawlo/{cleaners → tools}/encoding_converter.py +127 -126
  105. crawlo/tools/request_tools.py +83 -0
  106. crawlo/tools/retry_mechanism.py +224 -221
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/{cleaners → tools}/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +35 -35
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +187 -128
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/METADATA +1011 -764
  131. crawlo-1.2.9.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -237
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +143 -103
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +67 -0
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +151 -0
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +153 -0
  156. tests/test_config_validator.py +182 -193
  157. tests/test_crawlo_proxy_integration.py +109 -173
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -0
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -357
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +185 -0
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +73 -0
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +112 -0
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -153
  213. tests/test_user_agents.py +97 -0
  214. tests/tools_example.py +260 -257
  215. tests/verify_distributed.py +117 -0
  216. crawlo/cleaners/__init__.py +0 -61
  217. crawlo/utils/date_tools.py +0 -290
  218. crawlo-1.2.8.dist-info/RECORD +0 -209
  219. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/WHEEL +0 -0
  220. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.2.8.dist-info → crawlo-1.2.9.dist-info}/top_level.txt +0 -0
@@ -1,143 +1,143 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 环境变量配置工具
5
- 提供统一的环境变量读取和配置管理机制
6
- """
7
- import os
8
- import re
9
- from typing import Any
10
-
11
-
12
- class EnvConfigManager:
13
- """环境变量配置管理器"""
14
-
15
- @staticmethod
16
- def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
17
- """
18
- 获取环境变量值
19
-
20
- Args:
21
- var_name: 环境变量名称
22
- default: 默认值
23
- var_type: 变量类型 (str, int, float, bool)
24
-
25
- Returns:
26
- 环境变量值或默认值
27
- """
28
- value = os.getenv(var_name)
29
- if value is None:
30
- return default
31
-
32
- try:
33
- if var_type == bool:
34
- return value.lower() in ('1', 'true', 'yes', 'on')
35
- elif var_type == int:
36
- return int(value)
37
- elif var_type == float:
38
- return float(value)
39
- else:
40
- return value
41
- except (ValueError, TypeError):
42
- return default
43
-
44
- @staticmethod
45
- def get_redis_config() -> dict:
46
- """
47
- 获取 Redis 配置
48
-
49
- Returns:
50
- Redis 配置字典
51
- """
52
- return {
53
- 'REDIS_HOST': EnvConfigManager.get_env_var('REDIS_HOST', '127.0.0.1', str),
54
- 'REDIS_PORT': EnvConfigManager.get_env_var('REDIS_PORT', 6379, int),
55
- 'REDIS_PASSWORD': EnvConfigManager.get_env_var('REDIS_PASSWORD', '', str),
56
- 'REDIS_DB': EnvConfigManager.get_env_var('REDIS_DB', 0, int),
57
- }
58
-
59
- @staticmethod
60
- def get_runtime_config() -> dict:
61
- """
62
- 获取运行时配置
63
-
64
- Returns:
65
- 运行时配置字典
66
- """
67
- return {
68
- 'CRAWLO_MODE': EnvConfigManager.get_env_var('CRAWLO_MODE', 'standalone', str),
69
- 'PROJECT_NAME': EnvConfigManager.get_env_var('PROJECT_NAME', 'crawlo', str),
70
- 'CONCURRENCY': EnvConfigManager.get_env_var('CONCURRENCY', 8, int),
71
- }
72
-
73
- @staticmethod
74
- def get_version() -> str:
75
- """
76
- 获取框架版本号
77
-
78
- Returns:
79
- 框架版本号字符串
80
- """
81
- # 获取版本文件路径
82
- version_file = os.path.join(os.path.dirname(__file__), '..', '__version__.py')
83
- default_version = '1.0.0'
84
-
85
- if os.path.exists(version_file):
86
- try:
87
- with open(version_file, 'r', encoding='utf-8') as f:
88
- content = f.read()
89
- # 使用正则表达式提取版本号
90
- version_match = re.search(r"__version__\s*=\s*['\"]([^'\"]*)['\"]", content)
91
- if version_match:
92
- return version_match.group(1)
93
- except Exception:
94
- # 如果读取失败,使用默认版本号
95
- pass
96
-
97
- return default_version
98
-
99
-
100
- # 便捷函数
101
- def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
102
- """
103
- 便捷函数:获取环境变量值
104
-
105
- Args:
106
- var_name: 环境变量名称
107
- default: 默认值
108
- var_type: 变量类型 (str, int, float, bool)
109
-
110
- Returns:
111
- 环境变量值或默认值
112
- """
113
- return EnvConfigManager.get_env_var(var_name, default, var_type)
114
-
115
-
116
- def get_redis_config() -> dict:
117
- """
118
- 便捷函数:获取 Redis 配置
119
-
120
- Returns:
121
- Redis 配置字典
122
- """
123
- return EnvConfigManager.get_redis_config()
124
-
125
-
126
- def get_runtime_config() -> dict:
127
- """
128
- 便捷函数:获取运行时配置
129
-
130
- Returns:
131
- 运行时配置字典
132
- """
133
- return EnvConfigManager.get_runtime_config()
134
-
135
-
136
- def get_version() -> str:
137
- """
138
- 便捷函数:获取框架版本号
139
-
140
- Returns:
141
- 框架版本号字符串
142
- """
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 环境变量配置工具
5
+ 提供统一的环境变量读取和配置管理机制
6
+ """
7
+ import os
8
+ import re
9
+ from typing import Any
10
+
11
+
12
+ class EnvConfigManager:
13
+ """环境变量配置管理器"""
14
+
15
+ @staticmethod
16
+ def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
17
+ """
18
+ 获取环境变量值
19
+
20
+ Args:
21
+ var_name: 环境变量名称
22
+ default: 默认值
23
+ var_type: 变量类型 (str, int, float, bool)
24
+
25
+ Returns:
26
+ 环境变量值或默认值
27
+ """
28
+ value = os.getenv(var_name)
29
+ if value is None:
30
+ return default
31
+
32
+ try:
33
+ if var_type == bool:
34
+ return value.lower() in ('1', 'true', 'yes', 'on')
35
+ elif var_type == int:
36
+ return int(value)
37
+ elif var_type == float:
38
+ return float(value)
39
+ else:
40
+ return value
41
+ except (ValueError, TypeError):
42
+ return default
43
+
44
+ @staticmethod
45
+ def get_redis_config() -> dict:
46
+ """
47
+ 获取 Redis 配置
48
+
49
+ Returns:
50
+ Redis 配置字典
51
+ """
52
+ return {
53
+ 'REDIS_HOST': EnvConfigManager.get_env_var('REDIS_HOST', '127.0.0.1', str),
54
+ 'REDIS_PORT': EnvConfigManager.get_env_var('REDIS_PORT', 6379, int),
55
+ 'REDIS_PASSWORD': EnvConfigManager.get_env_var('REDIS_PASSWORD', '', str),
56
+ 'REDIS_DB': EnvConfigManager.get_env_var('REDIS_DB', 0, int),
57
+ }
58
+
59
+ @staticmethod
60
+ def get_runtime_config() -> dict:
61
+ """
62
+ 获取运行时配置
63
+
64
+ Returns:
65
+ 运行时配置字典
66
+ """
67
+ return {
68
+ 'CRAWLO_MODE': EnvConfigManager.get_env_var('CRAWLO_MODE', 'standalone', str),
69
+ 'PROJECT_NAME': EnvConfigManager.get_env_var('PROJECT_NAME', 'crawlo', str),
70
+ 'CONCURRENCY': EnvConfigManager.get_env_var('CONCURRENCY', 8, int),
71
+ }
72
+
73
+ @staticmethod
74
+ def get_version() -> str:
75
+ """
76
+ 获取框架版本号
77
+
78
+ Returns:
79
+ 框架版本号字符串
80
+ """
81
+ # 获取版本文件路径
82
+ version_file = os.path.join(os.path.dirname(__file__), '..', '__version__.py')
83
+ default_version = '1.0.0'
84
+
85
+ if os.path.exists(version_file):
86
+ try:
87
+ with open(version_file, 'r', encoding='utf-8') as f:
88
+ content = f.read()
89
+ # 使用正则表达式提取版本号
90
+ version_match = re.search(r"__version__\s*=\s*['\"]([^'\"]*)['\"]", content)
91
+ if version_match:
92
+ return version_match.group(1)
93
+ except Exception:
94
+ # 如果读取失败,使用默认版本号
95
+ pass
96
+
97
+ return default_version
98
+
99
+
100
+ # 便捷函数
101
+ def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
102
+ """
103
+ 便捷函数:获取环境变量值
104
+
105
+ Args:
106
+ var_name: 环境变量名称
107
+ default: 默认值
108
+ var_type: 变量类型 (str, int, float, bool)
109
+
110
+ Returns:
111
+ 环境变量值或默认值
112
+ """
113
+ return EnvConfigManager.get_env_var(var_name, default, var_type)
114
+
115
+
116
+ def get_redis_config() -> dict:
117
+ """
118
+ 便捷函数:获取 Redis 配置
119
+
120
+ Returns:
121
+ Redis 配置字典
122
+ """
123
+ return EnvConfigManager.get_redis_config()
124
+
125
+
126
+ def get_runtime_config() -> dict:
127
+ """
128
+ 便捷函数:获取运行时配置
129
+
130
+ Returns:
131
+ 运行时配置字典
132
+ """
133
+ return EnvConfigManager.get_runtime_config()
134
+
135
+
136
+ def get_version() -> str:
137
+ """
138
+ 便捷函数:获取框架版本号
139
+
140
+ Returns:
141
+ 框架版本号字符串
142
+ """
143
143
  return EnvConfigManager.get_version()
@@ -1,124 +1,124 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 统一错误处理工具
5
- 提供一致的错误处理和日志记录机制
6
- """
7
- from functools import wraps
8
- from typing import Callable, Any
9
-
10
- from crawlo.utils.enhanced_error_handler import EnhancedErrorHandler, ErrorContext
11
- from crawlo.utils.log import get_logger
12
-
13
-
14
- class ErrorHandler:
15
- """统一错误处理器(简化版,使用增强版作为后端实现)"""
16
-
17
- def __init__(self, logger_name: str = __name__, log_level: str = 'ERROR'):
18
- self.logger = get_logger(logger_name, log_level)
19
- # 使用增强版错误处理器作为后端
20
- self._enhanced_handler = EnhancedErrorHandler(logger_name, log_level)
21
-
22
- def handle_error(self, exception: Exception, context: str = "",
23
- raise_error: bool = True, log_error: bool = True) -> None:
24
- """
25
- 统一处理错误
26
-
27
- Args:
28
- exception: 异常对象
29
- context: 错误上下文描述
30
- raise_error: 是否重新抛出异常
31
- log_error: 是否记录错误日志
32
- """
33
- # 转换为增强版错误上下文
34
- error_context = ErrorContext(context=context) if context else None
35
- self._enhanced_handler.handle_error(
36
- exception, context=error_context,
37
- raise_error=raise_error, log_error=log_error
38
- )
39
-
40
- def safe_call(self, func: Callable, *args, default_return=None,
41
- context: str = "", **kwargs) -> Any:
42
- """
43
- 安全调用函数,捕获并处理异常
44
-
45
- Args:
46
- func: 要调用的函数
47
- *args: 函数参数
48
- default_return: 默认返回值
49
- context: 错误上下文描述
50
- **kwargs: 函数关键字参数
51
-
52
- Returns:
53
- 函数返回值或默认值
54
- """
55
- error_context = ErrorContext(context=context) if context else None
56
- return self._enhanced_handler.safe_call(
57
- func, *args, default_return=default_return,
58
- context=error_context, **kwargs
59
- )
60
-
61
- def retry_on_failure(self, max_retries: int = 3, delay: float = 1.0,
62
- exceptions: tuple = (Exception,)):
63
- """
64
- 装饰器:失败时重试
65
-
66
- Args:
67
- max_retries: 最大重试次数
68
- delay: 重试间隔(秒)
69
- exceptions: 需要重试的异常类型
70
- """
71
- def decorator(func):
72
- # 直接使用增强版处理器的重试装饰器
73
- return self._enhanced_handler.retry_on_failure(
74
- max_retries=max_retries, delay=delay, exceptions=exceptions
75
- )(func)
76
- return decorator
77
-
78
-
79
- # 全局错误处理器实例
80
- default_error_handler = ErrorHandler()
81
-
82
-
83
- def handle_exception(context: str = "", raise_error: bool = True, log_error: bool = True):
84
- """
85
- 装饰器:处理函数异常
86
-
87
- Args:
88
- context: 错误上下文描述
89
- raise_error: 是否重新抛出异常
90
- log_error: 是否记录错误日志
91
- """
92
- def decorator(func):
93
- @wraps(func)
94
- async def async_wrapper(*args, **kwargs):
95
- try:
96
- return await func(*args, **kwargs)
97
- except Exception as e:
98
- default_error_handler.handle_error(
99
- e, context=f"{context} - {func.__name__}",
100
- raise_error=raise_error, log_error=log_error
101
- )
102
- if not raise_error:
103
- return None
104
-
105
- @wraps(func)
106
- def sync_wrapper(*args, **kwargs):
107
- try:
108
- return func(*args, **kwargs)
109
- except Exception as e:
110
- default_error_handler.handle_error(
111
- e, context=f"{context} - {func.__name__}",
112
- raise_error=raise_error, log_error=log_error
113
- )
114
- if not raise_error:
115
- return None
116
-
117
- # 根据函数是否为异步函数返回相应的包装器
118
- import inspect
119
- if inspect.iscoroutinefunction(func):
120
- return async_wrapper
121
- else:
122
- return sync_wrapper
123
-
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 统一错误处理工具
5
+ 提供一致的错误处理和日志记录机制
6
+ """
7
+ from functools import wraps
8
+ from typing import Callable, Any
9
+
10
+ from crawlo.utils.enhanced_error_handler import EnhancedErrorHandler, ErrorContext
11
+ from crawlo.utils.log import get_logger
12
+
13
+
14
+ class ErrorHandler:
15
+ """统一错误处理器(简化版,使用增强版作为后端实现)"""
16
+
17
+ def __init__(self, logger_name: str = __name__, log_level: str = 'ERROR'):
18
+ self.logger = get_logger(logger_name, log_level)
19
+ # 使用增强版错误处理器作为后端
20
+ self._enhanced_handler = EnhancedErrorHandler(logger_name, log_level)
21
+
22
+ def handle_error(self, exception: Exception, context: str = "",
23
+ raise_error: bool = True, log_error: bool = True) -> None:
24
+ """
25
+ 统一处理错误
26
+
27
+ Args:
28
+ exception: 异常对象
29
+ context: 错误上下文描述
30
+ raise_error: 是否重新抛出异常
31
+ log_error: 是否记录错误日志
32
+ """
33
+ # 转换为增强版错误上下文
34
+ error_context = ErrorContext(context=context) if context else None
35
+ self._enhanced_handler.handle_error(
36
+ exception, context=error_context,
37
+ raise_error=raise_error, log_error=log_error
38
+ )
39
+
40
+ def safe_call(self, func: Callable, *args, default_return=None,
41
+ context: str = "", **kwargs) -> Any:
42
+ """
43
+ 安全调用函数,捕获并处理异常
44
+
45
+ Args:
46
+ func: 要调用的函数
47
+ *args: 函数参数
48
+ default_return: 默认返回值
49
+ context: 错误上下文描述
50
+ **kwargs: 函数关键字参数
51
+
52
+ Returns:
53
+ 函数返回值或默认值
54
+ """
55
+ error_context = ErrorContext(context=context) if context else None
56
+ return self._enhanced_handler.safe_call(
57
+ func, *args, default_return=default_return,
58
+ context=error_context, **kwargs
59
+ )
60
+
61
+ def retry_on_failure(self, max_retries: int = 3, delay: float = 1.0,
62
+ exceptions: tuple = (Exception,)):
63
+ """
64
+ 装饰器:失败时重试
65
+
66
+ Args:
67
+ max_retries: 最大重试次数
68
+ delay: 重试间隔(秒)
69
+ exceptions: 需要重试的异常类型
70
+ """
71
+ def decorator(func):
72
+ # 直接使用增强版处理器的重试装饰器
73
+ return self._enhanced_handler.retry_on_failure(
74
+ max_retries=max_retries, delay=delay, exceptions=exceptions
75
+ )(func)
76
+ return decorator
77
+
78
+
79
+ # 全局错误处理器实例
80
+ default_error_handler = ErrorHandler()
81
+
82
+
83
+ def handle_exception(context: str = "", raise_error: bool = True, log_error: bool = True):
84
+ """
85
+ 装饰器:处理函数异常
86
+
87
+ Args:
88
+ context: 错误上下文描述
89
+ raise_error: 是否重新抛出异常
90
+ log_error: 是否记录错误日志
91
+ """
92
+ def decorator(func):
93
+ @wraps(func)
94
+ async def async_wrapper(*args, **kwargs):
95
+ try:
96
+ return await func(*args, **kwargs)
97
+ except Exception as e:
98
+ default_error_handler.handle_error(
99
+ e, context=f"{context} - {func.__name__}",
100
+ raise_error=raise_error, log_error=log_error
101
+ )
102
+ if not raise_error:
103
+ return None
104
+
105
+ @wraps(func)
106
+ def sync_wrapper(*args, **kwargs):
107
+ try:
108
+ return func(*args, **kwargs)
109
+ except Exception as e:
110
+ default_error_handler.handle_error(
111
+ e, context=f"{context} - {func.__name__}",
112
+ raise_error=raise_error, log_error=log_error
113
+ )
114
+ if not raise_error:
115
+ return None
116
+
117
+ # 根据函数是否为异步函数返回相应的包装器
118
+ import inspect
119
+ if inspect.iscoroutinefunction(func):
120
+ return async_wrapper
121
+ else:
122
+ return sync_wrapper
123
+
124
124
  return decorator