crawlo 1.2.6__py3-none-any.whl → 1.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (209) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +75 -88
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +138 -144
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -323
  14. crawlo/commands/startproject.py +436 -436
  15. crawlo/commands/stats.py +187 -187
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -251
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +365 -356
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +251 -239
  23. crawlo/crawler.py +1099 -1110
  24. crawlo/data/__init__.py +5 -5
  25. crawlo/data/user_agents.py +107 -107
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +228 -221
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -212
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +39 -38
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +234 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -52
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +136 -135
  54. crawlo/middleware/offsite.py +114 -114
  55. crawlo/middleware/proxy.py +367 -367
  56. crawlo/middleware/request_ignore.py +86 -86
  57. crawlo/middleware/response_code.py +163 -163
  58. crawlo/middleware/response_filter.py +136 -136
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -317
  73. crawlo/pipelines/pipeline_manager.py +62 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +166 -165
  75. crawlo/project.py +314 -279
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +377 -376
  78. crawlo/queue/redis_priority_queue.py +306 -306
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +219 -215
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -129
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/settings.py.tmpl +288 -288
  92. crawlo/templates/project/settings_distributed.py.tmpl +157 -157
  93. crawlo/templates/project/settings_gentle.py.tmpl +100 -100
  94. crawlo/templates/project/settings_high_performance.py.tmpl +134 -134
  95. crawlo/templates/project/settings_simple.py.tmpl +98 -98
  96. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  97. crawlo/templates/run.py.tmpl +45 -45
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -259
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -356
  113. crawlo/utils/env_config.py +143 -106
  114. crawlo/utils/error_handler.py +123 -123
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -344
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -285
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +351 -351
  122. crawlo/utils/redis_key_validator.py +198 -198
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -218
  125. crawlo/utils/spider_loader.py +61 -61
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/METADATA +764 -764
  130. crawlo-1.2.7.dist-info/RECORD +209 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_consistency.py +81 -0
  152. tests/test_config_validator.py +193 -193
  153. tests/test_crawlo_proxy_integration.py +172 -172
  154. tests/test_date_tools.py +123 -123
  155. tests/test_default_header_middleware.py +158 -158
  156. tests/test_double_crawlo_fix.py +207 -207
  157. tests/test_double_crawlo_fix_simple.py +124 -124
  158. tests/test_download_delay_middleware.py +221 -221
  159. tests/test_downloader_proxy_compatibility.py +268 -268
  160. tests/test_dynamic_downloaders_proxy.py +124 -124
  161. tests/test_dynamic_proxy.py +92 -92
  162. tests/test_dynamic_proxy_config.py +146 -146
  163. tests/test_dynamic_proxy_real.py +109 -109
  164. tests/test_edge_cases.py +303 -303
  165. tests/test_enhanced_error_handler.py +270 -270
  166. tests/test_env_config.py +121 -121
  167. tests/test_error_handler_compatibility.py +112 -112
  168. tests/test_final_validation.py +153 -153
  169. tests/test_framework_env_usage.py +103 -103
  170. tests/test_integration.py +356 -356
  171. tests/test_item_dedup_redis_key.py +122 -122
  172. tests/test_mode_consistency.py +52 -0
  173. tests/test_offsite_middleware.py +221 -221
  174. tests/test_parsel.py +29 -29
  175. tests/test_performance.py +327 -327
  176. tests/test_proxy_api.py +264 -264
  177. tests/test_proxy_health_check.py +32 -32
  178. tests/test_proxy_middleware.py +121 -121
  179. tests/test_proxy_middleware_enhanced.py +216 -216
  180. tests/test_proxy_middleware_integration.py +136 -136
  181. tests/test_proxy_providers.py +56 -56
  182. tests/test_proxy_stats.py +19 -19
  183. tests/test_proxy_strategies.py +59 -59
  184. tests/test_queue_manager_double_crawlo.py +173 -173
  185. tests/test_queue_manager_redis_key.py +176 -176
  186. tests/test_real_scenario_proxy.py +195 -195
  187. tests/test_redis_config.py +28 -28
  188. tests/test_redis_connection_pool.py +294 -294
  189. tests/test_redis_key_naming.py +181 -181
  190. tests/test_redis_key_validator.py +123 -123
  191. tests/test_redis_queue.py +224 -224
  192. tests/test_request_ignore_middleware.py +182 -182
  193. tests/test_request_serialization.py +70 -70
  194. tests/test_response_code_middleware.py +349 -349
  195. tests/test_response_filter_middleware.py +427 -427
  196. tests/test_response_improvements.py +152 -152
  197. tests/test_retry_middleware.py +241 -241
  198. tests/test_scheduler.py +252 -241
  199. tests/test_scheduler_config_update.py +134 -0
  200. tests/test_simple_response.py +61 -61
  201. tests/test_telecom_spider_redis_key.py +205 -205
  202. tests/test_template_content.py +87 -87
  203. tests/test_template_redis_key.py +134 -134
  204. tests/test_tools.py +153 -153
  205. tests/tools_example.py +257 -257
  206. crawlo-1.2.6.dist-info/RECORD +0 -206
  207. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/WHEEL +0 -0
  208. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/entry_points.txt +0 -0
  209. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/top_level.txt +0 -0
@@ -1,106 +1,143 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 环境变量配置工具
5
- 提供统一的环境变量读取和配置管理机制
6
- """
7
- import os
8
- from typing import Any
9
-
10
-
11
- class EnvConfigManager:
12
- """环境变量配置管理器"""
13
-
14
- @staticmethod
15
- def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
16
- """
17
- 获取环境变量值
18
-
19
- Args:
20
- var_name: 环境变量名称
21
- default: 默认值
22
- var_type: 变量类型 (str, int, float, bool)
23
-
24
- Returns:
25
- 环境变量值或默认值
26
- """
27
- value = os.getenv(var_name)
28
- if value is None:
29
- return default
30
-
31
- try:
32
- if var_type == bool:
33
- return value.lower() in ('1', 'true', 'yes', 'on')
34
- elif var_type == int:
35
- return int(value)
36
- elif var_type == float:
37
- return float(value)
38
- else:
39
- return value
40
- except (ValueError, TypeError):
41
- return default
42
-
43
- @staticmethod
44
- def get_redis_config() -> dict:
45
- """
46
- 获取 Redis 配置
47
-
48
- Returns:
49
- Redis 配置字典
50
- """
51
- return {
52
- 'REDIS_HOST': EnvConfigManager.get_env_var('REDIS_HOST', '127.0.0.1', str),
53
- 'REDIS_PORT': EnvConfigManager.get_env_var('REDIS_PORT', 6379, int),
54
- 'REDIS_PASSWORD': EnvConfigManager.get_env_var('REDIS_PASSWORD', '', str),
55
- 'REDIS_DB': EnvConfigManager.get_env_var('REDIS_DB', 0, int),
56
- }
57
-
58
- @staticmethod
59
- def get_runtime_config() -> dict:
60
- """
61
- 获取运行时配置
62
-
63
- Returns:
64
- 运行时配置字典
65
- """
66
- return {
67
- 'CRAWLO_MODE': EnvConfigManager.get_env_var('CRAWLO_MODE', 'standalone', str),
68
- 'PROJECT_NAME': EnvConfigManager.get_env_var('PROJECT_NAME', 'crawlo', str),
69
- 'CONCURRENCY': EnvConfigManager.get_env_var('CONCURRENCY', 8, int),
70
- }
71
-
72
-
73
- # 便捷函数
74
- def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
75
- """
76
- 便捷函数:获取环境变量值
77
-
78
- Args:
79
- var_name: 环境变量名称
80
- default: 默认值
81
- var_type: 变量类型 (str, int, float, bool)
82
-
83
- Returns:
84
- 环境变量值或默认值
85
- """
86
- return EnvConfigManager.get_env_var(var_name, default, var_type)
87
-
88
-
89
- def get_redis_config() -> dict:
90
- """
91
- 便捷函数:获取 Redis 配置
92
-
93
- Returns:
94
- Redis 配置字典
95
- """
96
- return EnvConfigManager.get_redis_config()
97
-
98
-
99
- def get_runtime_config() -> dict:
100
- """
101
- 便捷函数:获取运行时配置
102
-
103
- Returns:
104
- 运行时配置字典
105
- """
106
- return EnvConfigManager.get_runtime_config()
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 环境变量配置工具
5
+ 提供统一的环境变量读取和配置管理机制
6
+ """
7
+ import os
8
+ import re
9
+ from typing import Any
10
+
11
+
12
+ class EnvConfigManager:
13
+ """环境变量配置管理器"""
14
+
15
+ @staticmethod
16
+ def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
17
+ """
18
+ 获取环境变量值
19
+
20
+ Args:
21
+ var_name: 环境变量名称
22
+ default: 默认值
23
+ var_type: 变量类型 (str, int, float, bool)
24
+
25
+ Returns:
26
+ 环境变量值或默认值
27
+ """
28
+ value = os.getenv(var_name)
29
+ if value is None:
30
+ return default
31
+
32
+ try:
33
+ if var_type == bool:
34
+ return value.lower() in ('1', 'true', 'yes', 'on')
35
+ elif var_type == int:
36
+ return int(value)
37
+ elif var_type == float:
38
+ return float(value)
39
+ else:
40
+ return value
41
+ except (ValueError, TypeError):
42
+ return default
43
+
44
+ @staticmethod
45
+ def get_redis_config() -> dict:
46
+ """
47
+ 获取 Redis 配置
48
+
49
+ Returns:
50
+ Redis 配置字典
51
+ """
52
+ return {
53
+ 'REDIS_HOST': EnvConfigManager.get_env_var('REDIS_HOST', '127.0.0.1', str),
54
+ 'REDIS_PORT': EnvConfigManager.get_env_var('REDIS_PORT', 6379, int),
55
+ 'REDIS_PASSWORD': EnvConfigManager.get_env_var('REDIS_PASSWORD', '', str),
56
+ 'REDIS_DB': EnvConfigManager.get_env_var('REDIS_DB', 0, int),
57
+ }
58
+
59
+ @staticmethod
60
+ def get_runtime_config() -> dict:
61
+ """
62
+ 获取运行时配置
63
+
64
+ Returns:
65
+ 运行时配置字典
66
+ """
67
+ return {
68
+ 'CRAWLO_MODE': EnvConfigManager.get_env_var('CRAWLO_MODE', 'standalone', str),
69
+ 'PROJECT_NAME': EnvConfigManager.get_env_var('PROJECT_NAME', 'crawlo', str),
70
+ 'CONCURRENCY': EnvConfigManager.get_env_var('CONCURRENCY', 8, int),
71
+ }
72
+
73
+ @staticmethod
74
+ def get_version() -> str:
75
+ """
76
+ 获取框架版本号
77
+
78
+ Returns:
79
+ 框架版本号字符串
80
+ """
81
+ # 获取版本文件路径
82
+ version_file = os.path.join(os.path.dirname(__file__), '..', '__version__.py')
83
+ default_version = '1.0.0'
84
+
85
+ if os.path.exists(version_file):
86
+ try:
87
+ with open(version_file, 'r', encoding='utf-8') as f:
88
+ content = f.read()
89
+ # 使用正则表达式提取版本号
90
+ version_match = re.search(r"__version__\s*=\s*['\"]([^'\"]*)['\"]", content)
91
+ if version_match:
92
+ return version_match.group(1)
93
+ except Exception:
94
+ # 如果读取失败,使用默认版本号
95
+ pass
96
+
97
+ return default_version
98
+
99
+
100
+ # 便捷函数
101
+ def get_env_var(var_name: str, default: Any = None, var_type: type = str) -> Any:
102
+ """
103
+ 便捷函数:获取环境变量值
104
+
105
+ Args:
106
+ var_name: 环境变量名称
107
+ default: 默认值
108
+ var_type: 变量类型 (str, int, float, bool)
109
+
110
+ Returns:
111
+ 环境变量值或默认值
112
+ """
113
+ return EnvConfigManager.get_env_var(var_name, default, var_type)
114
+
115
+
116
+ def get_redis_config() -> dict:
117
+ """
118
+ 便捷函数:获取 Redis 配置
119
+
120
+ Returns:
121
+ Redis 配置字典
122
+ """
123
+ return EnvConfigManager.get_redis_config()
124
+
125
+
126
+ def get_runtime_config() -> dict:
127
+ """
128
+ 便捷函数:获取运行时配置
129
+
130
+ Returns:
131
+ 运行时配置字典
132
+ """
133
+ return EnvConfigManager.get_runtime_config()
134
+
135
+
136
+ def get_version() -> str:
137
+ """
138
+ 便捷函数:获取框架版本号
139
+
140
+ Returns:
141
+ 框架版本号字符串
142
+ """
143
+ return EnvConfigManager.get_version()
@@ -1,124 +1,124 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 统一错误处理工具
5
- 提供一致的错误处理和日志记录机制
6
- """
7
- from functools import wraps
8
- from typing import Callable, Any
9
-
10
- from crawlo.utils.enhanced_error_handler import EnhancedErrorHandler, ErrorContext
11
- from crawlo.utils.log import get_logger
12
-
13
-
14
- class ErrorHandler:
15
- """统一错误处理器(简化版,使用增强版作为后端实现)"""
16
-
17
- def __init__(self, logger_name: str = __name__, log_level: str = 'ERROR'):
18
- self.logger = get_logger(logger_name, log_level)
19
- # 使用增强版错误处理器作为后端
20
- self._enhanced_handler = EnhancedErrorHandler(logger_name, log_level)
21
-
22
- def handle_error(self, exception: Exception, context: str = "",
23
- raise_error: bool = True, log_error: bool = True) -> None:
24
- """
25
- 统一处理错误
26
-
27
- Args:
28
- exception: 异常对象
29
- context: 错误上下文描述
30
- raise_error: 是否重新抛出异常
31
- log_error: 是否记录错误日志
32
- """
33
- # 转换为增强版错误上下文
34
- error_context = ErrorContext(context=context) if context else None
35
- self._enhanced_handler.handle_error(
36
- exception, context=error_context,
37
- raise_error=raise_error, log_error=log_error
38
- )
39
-
40
- def safe_call(self, func: Callable, *args, default_return=None,
41
- context: str = "", **kwargs) -> Any:
42
- """
43
- 安全调用函数,捕获并处理异常
44
-
45
- Args:
46
- func: 要调用的函数
47
- *args: 函数参数
48
- default_return: 默认返回值
49
- context: 错误上下文描述
50
- **kwargs: 函数关键字参数
51
-
52
- Returns:
53
- 函数返回值或默认值
54
- """
55
- error_context = ErrorContext(context=context) if context else None
56
- return self._enhanced_handler.safe_call(
57
- func, *args, default_return=default_return,
58
- context=error_context, **kwargs
59
- )
60
-
61
- def retry_on_failure(self, max_retries: int = 3, delay: float = 1.0,
62
- exceptions: tuple = (Exception,)):
63
- """
64
- 装饰器:失败时重试
65
-
66
- Args:
67
- max_retries: 最大重试次数
68
- delay: 重试间隔(秒)
69
- exceptions: 需要重试的异常类型
70
- """
71
- def decorator(func):
72
- # 直接使用增强版处理器的重试装饰器
73
- return self._enhanced_handler.retry_on_failure(
74
- max_retries=max_retries, delay=delay, exceptions=exceptions
75
- )(func)
76
- return decorator
77
-
78
-
79
- # 全局错误处理器实例
80
- default_error_handler = ErrorHandler()
81
-
82
-
83
- def handle_exception(context: str = "", raise_error: bool = True, log_error: bool = True):
84
- """
85
- 装饰器:处理函数异常
86
-
87
- Args:
88
- context: 错误上下文描述
89
- raise_error: 是否重新抛出异常
90
- log_error: 是否记录错误日志
91
- """
92
- def decorator(func):
93
- @wraps(func)
94
- async def async_wrapper(*args, **kwargs):
95
- try:
96
- return await func(*args, **kwargs)
97
- except Exception as e:
98
- default_error_handler.handle_error(
99
- e, context=f"{context} - {func.__name__}",
100
- raise_error=raise_error, log_error=log_error
101
- )
102
- if not raise_error:
103
- return None
104
-
105
- @wraps(func)
106
- def sync_wrapper(*args, **kwargs):
107
- try:
108
- return func(*args, **kwargs)
109
- except Exception as e:
110
- default_error_handler.handle_error(
111
- e, context=f"{context} - {func.__name__}",
112
- raise_error=raise_error, log_error=log_error
113
- )
114
- if not raise_error:
115
- return None
116
-
117
- # 根据函数是否为异步函数返回相应的包装器
118
- import inspect
119
- if inspect.iscoroutinefunction(func):
120
- return async_wrapper
121
- else:
122
- return sync_wrapper
123
-
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 统一错误处理工具
5
+ 提供一致的错误处理和日志记录机制
6
+ """
7
+ from functools import wraps
8
+ from typing import Callable, Any
9
+
10
+ from crawlo.utils.enhanced_error_handler import EnhancedErrorHandler, ErrorContext
11
+ from crawlo.utils.log import get_logger
12
+
13
+
14
+ class ErrorHandler:
15
+ """统一错误处理器(简化版,使用增强版作为后端实现)"""
16
+
17
+ def __init__(self, logger_name: str = __name__, log_level: str = 'ERROR'):
18
+ self.logger = get_logger(logger_name, log_level)
19
+ # 使用增强版错误处理器作为后端
20
+ self._enhanced_handler = EnhancedErrorHandler(logger_name, log_level)
21
+
22
+ def handle_error(self, exception: Exception, context: str = "",
23
+ raise_error: bool = True, log_error: bool = True) -> None:
24
+ """
25
+ 统一处理错误
26
+
27
+ Args:
28
+ exception: 异常对象
29
+ context: 错误上下文描述
30
+ raise_error: 是否重新抛出异常
31
+ log_error: 是否记录错误日志
32
+ """
33
+ # 转换为增强版错误上下文
34
+ error_context = ErrorContext(context=context) if context else None
35
+ self._enhanced_handler.handle_error(
36
+ exception, context=error_context,
37
+ raise_error=raise_error, log_error=log_error
38
+ )
39
+
40
+ def safe_call(self, func: Callable, *args, default_return=None,
41
+ context: str = "", **kwargs) -> Any:
42
+ """
43
+ 安全调用函数,捕获并处理异常
44
+
45
+ Args:
46
+ func: 要调用的函数
47
+ *args: 函数参数
48
+ default_return: 默认返回值
49
+ context: 错误上下文描述
50
+ **kwargs: 函数关键字参数
51
+
52
+ Returns:
53
+ 函数返回值或默认值
54
+ """
55
+ error_context = ErrorContext(context=context) if context else None
56
+ return self._enhanced_handler.safe_call(
57
+ func, *args, default_return=default_return,
58
+ context=error_context, **kwargs
59
+ )
60
+
61
+ def retry_on_failure(self, max_retries: int = 3, delay: float = 1.0,
62
+ exceptions: tuple = (Exception,)):
63
+ """
64
+ 装饰器:失败时重试
65
+
66
+ Args:
67
+ max_retries: 最大重试次数
68
+ delay: 重试间隔(秒)
69
+ exceptions: 需要重试的异常类型
70
+ """
71
+ def decorator(func):
72
+ # 直接使用增强版处理器的重试装饰器
73
+ return self._enhanced_handler.retry_on_failure(
74
+ max_retries=max_retries, delay=delay, exceptions=exceptions
75
+ )(func)
76
+ return decorator
77
+
78
+
79
+ # 全局错误处理器实例
80
+ default_error_handler = ErrorHandler()
81
+
82
+
83
+ def handle_exception(context: str = "", raise_error: bool = True, log_error: bool = True):
84
+ """
85
+ 装饰器:处理函数异常
86
+
87
+ Args:
88
+ context: 错误上下文描述
89
+ raise_error: 是否重新抛出异常
90
+ log_error: 是否记录错误日志
91
+ """
92
+ def decorator(func):
93
+ @wraps(func)
94
+ async def async_wrapper(*args, **kwargs):
95
+ try:
96
+ return await func(*args, **kwargs)
97
+ except Exception as e:
98
+ default_error_handler.handle_error(
99
+ e, context=f"{context} - {func.__name__}",
100
+ raise_error=raise_error, log_error=log_error
101
+ )
102
+ if not raise_error:
103
+ return None
104
+
105
+ @wraps(func)
106
+ def sync_wrapper(*args, **kwargs):
107
+ try:
108
+ return func(*args, **kwargs)
109
+ except Exception as e:
110
+ default_error_handler.handle_error(
111
+ e, context=f"{context} - {func.__name__}",
112
+ raise_error=raise_error, log_error=log_error
113
+ )
114
+ if not raise_error:
115
+ return None
116
+
117
+ # 根据函数是否为异步函数返回相应的包装器
118
+ import inspect
119
+ if inspect.iscoroutinefunction(func):
120
+ return async_wrapper
121
+ else:
122
+ return sync_wrapper
123
+
124
124
  return decorator
@@ -1,82 +1,82 @@
1
- # -*- coding: UTF-8 -*-
2
- from typing import Union, AsyncGenerator, Generator
3
- from inspect import isgenerator, isasyncgen
4
- from crawlo import Response, Request, Item
5
- from crawlo.exceptions import TransformTypeError
6
-
7
- T = Union[Request, Item]
8
-
9
-
10
- async def transform(
11
- func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
12
- response: Response
13
- ) -> AsyncGenerator[Union[T, Exception], None]:
14
- """
15
- 转换回调函数的输出为统一异步生成器
16
-
17
- Args:
18
- func: 同步或异步生成器函数
19
- response: 当前响应对象
20
-
21
- Yields:
22
- Union[T, Exception]: 生成请求/Item或异常对象
23
-
24
- Raises:
25
- TransformTypeError: 当输入类型不符合要求时
26
- """
27
-
28
- def _set_meta(obj: T) -> T:
29
- """统一设置请求的depth元数据"""
30
- if isinstance(obj, Request):
31
- obj.meta.setdefault('depth', response.meta.get('depth', 0))
32
- return obj
33
-
34
- # 类型检查前置
35
- if not (isgenerator(func) or isasyncgen(func)):
36
- raise TransformTypeError(
37
- f'Callback must return generator or async generator, got {type(func).__name__}'
38
- )
39
-
40
- try:
41
- if isgenerator(func):
42
- # 同步生成器处理
43
- for item in func:
44
- yield _set_meta(item)
45
- else:
46
- # 异步生成器处理
47
- async for item in func:
48
- yield _set_meta(item)
49
-
50
- except Exception as e:
51
- yield e
52
-
53
- # #!/usr/bin/python
54
- # # -*- coding:UTF-8 -*-
55
- # from typing import Callable, Union
56
- # from inspect import isgenerator, isasyncgen
57
- # from crawlo import Response, Request, Item
58
- # from crawlo.exceptions import TransformTypeError
59
- #
60
- #
61
- # T = Union[Request, Item]
62
- #
63
- #
64
- # async def transform(func: Callable, response: Response):
65
- # def set_request(t: T) -> T:
66
- # if isinstance(t, Request):
67
- # t.meta['depth'] = response.meta['depth']
68
- # return t
69
- # try:
70
- # if isgenerator(func):
71
- # for f in func:
72
- # yield set_request(f)
73
- # elif isasyncgen(func):
74
- # async for f in func:
75
- # yield set_request(f)
76
- # else:
77
- # raise TransformTypeError(
78
- # f'callback return type error: {type(func)} must be `generator` or `async generator`'
79
- # )
80
- # except Exception as exp:
81
- # yield exp
82
-
1
+ # -*- coding: UTF-8 -*-
2
+ from typing import Union, AsyncGenerator, Generator
3
+ from inspect import isgenerator, isasyncgen
4
+ from crawlo import Response, Request, Item
5
+ from crawlo.exceptions import TransformTypeError
6
+
7
+ T = Union[Request, Item]
8
+
9
+
10
+ async def transform(
11
+ func: Union[Generator[T, None, None], AsyncGenerator[T, None]],
12
+ response: Response
13
+ ) -> AsyncGenerator[Union[T, Exception], None]:
14
+ """
15
+ 转换回调函数的输出为统一异步生成器
16
+
17
+ Args:
18
+ func: 同步或异步生成器函数
19
+ response: 当前响应对象
20
+
21
+ Yields:
22
+ Union[T, Exception]: 生成请求/Item或异常对象
23
+
24
+ Raises:
25
+ TransformTypeError: 当输入类型不符合要求时
26
+ """
27
+
28
+ def _set_meta(obj: T) -> T:
29
+ """统一设置请求的depth元数据"""
30
+ if isinstance(obj, Request):
31
+ obj.meta.setdefault('depth', response.meta.get('depth', 0))
32
+ return obj
33
+
34
+ # 类型检查前置
35
+ if not (isgenerator(func) or isasyncgen(func)):
36
+ raise TransformTypeError(
37
+ f'Callback must return generator or async generator, got {type(func).__name__}'
38
+ )
39
+
40
+ try:
41
+ if isgenerator(func):
42
+ # 同步生成器处理
43
+ for item in func:
44
+ yield _set_meta(item)
45
+ else:
46
+ # 异步生成器处理
47
+ async for item in func:
48
+ yield _set_meta(item)
49
+
50
+ except Exception as e:
51
+ yield e
52
+
53
+ # #!/usr/bin/python
54
+ # # -*- coding:UTF-8 -*-
55
+ # from typing import Callable, Union
56
+ # from inspect import isgenerator, isasyncgen
57
+ # from crawlo import Response, Request, Item
58
+ # from crawlo.exceptions import TransformTypeError
59
+ #
60
+ #
61
+ # T = Union[Request, Item]
62
+ #
63
+ #
64
+ # async def transform(func: Callable, response: Response):
65
+ # def set_request(t: T) -> T:
66
+ # if isinstance(t, Request):
67
+ # t.meta['depth'] = response.meta['depth']
68
+ # return t
69
+ # try:
70
+ # if isgenerator(func):
71
+ # for f in func:
72
+ # yield set_request(f)
73
+ # elif isasyncgen(func):
74
+ # async for f in func:
75
+ # yield set_request(f)
76
+ # else:
77
+ # raise TransformTypeError(
78
+ # f'callback return type error: {type(func)} must be `generator` or `async generator`'
79
+ # )
80
+ # except Exception as exp:
81
+ # yield exp
82
+