crawlo 1.3.5__py3-none-any.whl → 1.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (289) hide show
  1. crawlo/__init__.py +87 -87
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -341
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +45 -45
  16. crawlo/core/engine.py +439 -439
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -257
  19. crawlo/crawler.py +638 -638
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -228
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +103 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -257
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -291
  47. crawlo/initialization/__init__.py +39 -39
  48. crawlo/initialization/built_in.py +425 -425
  49. crawlo/initialization/context.py +141 -141
  50. crawlo/initialization/core.py +193 -193
  51. crawlo/initialization/phases.py +148 -148
  52. crawlo/initialization/registry.py +145 -145
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -23
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +37 -37
  58. crawlo/logging/config.py +96 -96
  59. crawlo/logging/factory.py +128 -128
  60. crawlo/logging/manager.py +111 -111
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -212
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +325 -325
  85. crawlo/pipelines/pipeline_manager.py +76 -76
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -327
  88. crawlo/queue/pqueue.py +42 -42
  89. crawlo/queue/queue_manager.py +503 -503
  90. crawlo/queue/redis_priority_queue.py +326 -326
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -321
  93. crawlo/settings/setting_manager.py +214 -214
  94. crawlo/spider/__init__.py +657 -657
  95. crawlo/stats_collector.py +73 -73
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +138 -138
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +167 -167
  104. crawlo/templates/project/settings_distributed.py.tmpl +166 -166
  105. crawlo/templates/project/settings_gentle.py.tmpl +166 -166
  106. crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
  107. crawlo/templates/project/settings_minimal.py.tmpl +65 -65
  108. crawlo/templates/project/settings_simple.py.tmpl +164 -164
  109. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  110. crawlo/templates/run.py.tmpl +34 -34
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +9 -9
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +364 -364
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +25 -25
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -165
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +79 -79
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -388
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -225
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.5.dist-info → crawlo-1.3.6.dist-info}/METADATA +1126 -1126
  149. crawlo-1.3.6.dist-info/RECORD +290 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +106 -106
  154. tests/baidu_performance_test.py +108 -108
  155. tests/baidu_test.py +59 -59
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +212 -212
  158. tests/comprehensive_test.py +81 -81
  159. tests/comprehensive_testing_summary.md +186 -186
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +69 -69
  164. tests/debug_framework_logger.py +84 -84
  165. tests/debug_log_config.py +126 -126
  166. tests/debug_log_levels.py +63 -63
  167. tests/debug_pipelines.py +66 -66
  168. tests/detailed_log_test.py +233 -233
  169. tests/distributed_test.py +66 -66
  170. tests/distributed_test_debug.py +76 -76
  171. tests/dynamic_loading_example.py +523 -523
  172. tests/dynamic_loading_test.py +104 -104
  173. tests/env_config_example.py +133 -133
  174. tests/error_handling_example.py +171 -171
  175. tests/final_comprehensive_test.py +151 -151
  176. tests/final_log_test.py +260 -260
  177. tests/final_validation_test.py +182 -182
  178. tests/fix_log_test.py +142 -142
  179. tests/framework_performance_test.py +202 -202
  180. tests/log_buffering_test.py +111 -111
  181. tests/log_generation_timing_test.py +153 -153
  182. tests/optimized_performance_test.py +211 -211
  183. tests/performance_comparison.py +245 -245
  184. tests/queue_blocking_test.py +113 -113
  185. tests/queue_test.py +89 -89
  186. tests/redis_key_validation_demo.py +130 -130
  187. tests/request_params_example.py +150 -150
  188. tests/response_improvements_example.py +144 -144
  189. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  190. tests/scrapy_comparison/scrapy_test.py +133 -133
  191. tests/simple_command_test.py +119 -119
  192. tests/simple_crawlo_test.py +127 -127
  193. tests/simple_log_test.py +57 -57
  194. tests/simple_log_test2.py +137 -137
  195. tests/simple_optimization_test.py +128 -128
  196. tests/simple_queue_type_test.py +42 -0
  197. tests/simple_spider_test.py +49 -49
  198. tests/simple_test.py +47 -47
  199. tests/spider_log_timing_test.py +177 -177
  200. tests/test_advanced_tools.py +148 -148
  201. tests/test_all_commands.py +230 -230
  202. tests/test_all_redis_key_configs.py +145 -145
  203. tests/test_authenticated_proxy.py +141 -141
  204. tests/test_batch_processor.py +178 -178
  205. tests/test_cleaners.py +54 -54
  206. tests/test_component_factory.py +174 -174
  207. tests/test_comprehensive.py +146 -146
  208. tests/test_config_consistency.py +80 -80
  209. tests/test_config_merge.py +152 -152
  210. tests/test_config_validator.py +182 -182
  211. tests/test_controlled_spider_mixin.py +79 -79
  212. tests/test_crawlo_proxy_integration.py +108 -108
  213. tests/test_date_tools.py +123 -123
  214. tests/test_default_header_middleware.py +158 -158
  215. tests/test_distributed.py +65 -65
  216. tests/test_double_crawlo_fix.py +207 -207
  217. tests/test_double_crawlo_fix_simple.py +124 -124
  218. tests/test_download_delay_middleware.py +221 -221
  219. tests/test_downloader_proxy_compatibility.py +268 -268
  220. tests/test_dynamic_downloaders_proxy.py +124 -124
  221. tests/test_dynamic_proxy.py +92 -92
  222. tests/test_dynamic_proxy_config.py +146 -146
  223. tests/test_dynamic_proxy_real.py +109 -109
  224. tests/test_edge_cases.py +303 -303
  225. tests/test_enhanced_error_handler.py +270 -270
  226. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  227. tests/test_env_config.py +121 -121
  228. tests/test_error_handler_compatibility.py +112 -112
  229. tests/test_factories.py +252 -252
  230. tests/test_final_validation.py +153 -153
  231. tests/test_framework_env_usage.py +103 -103
  232. tests/test_framework_logger.py +66 -66
  233. tests/test_framework_startup.py +64 -64
  234. tests/test_get_component_logger.py +83 -83
  235. tests/test_integration.py +169 -169
  236. tests/test_item_dedup_redis_key.py +122 -122
  237. tests/test_large_scale_config.py +112 -112
  238. tests/test_large_scale_helper.py +235 -235
  239. tests/test_logging_system.py +282 -282
  240. tests/test_mode_change.py +72 -72
  241. tests/test_mode_consistency.py +51 -51
  242. tests/test_offsite_middleware.py +221 -221
  243. tests/test_parsel.py +29 -29
  244. tests/test_performance.py +327 -327
  245. tests/test_performance_monitor.py +115 -115
  246. tests/test_proxy_api.py +264 -264
  247. tests/test_proxy_health_check.py +32 -32
  248. tests/test_proxy_middleware.py +121 -121
  249. tests/test_proxy_middleware_enhanced.py +216 -216
  250. tests/test_proxy_middleware_integration.py +136 -136
  251. tests/test_proxy_middleware_refactored.py +184 -184
  252. tests/test_proxy_providers.py +56 -56
  253. tests/test_proxy_stats.py +19 -19
  254. tests/test_proxy_strategies.py +59 -59
  255. tests/test_queue_empty_check.py +41 -41
  256. tests/test_queue_manager_double_crawlo.py +173 -173
  257. tests/test_queue_manager_redis_key.py +176 -176
  258. tests/test_queue_type.py +107 -0
  259. tests/test_random_user_agent.py +72 -72
  260. tests/test_real_scenario_proxy.py +195 -195
  261. tests/test_redis_config.py +28 -28
  262. tests/test_redis_connection_pool.py +294 -294
  263. tests/test_redis_key_naming.py +181 -181
  264. tests/test_redis_key_validator.py +123 -123
  265. tests/test_redis_queue.py +224 -224
  266. tests/test_request_ignore_middleware.py +182 -182
  267. tests/test_request_params.py +111 -111
  268. tests/test_request_serialization.py +70 -70
  269. tests/test_response_code_middleware.py +349 -349
  270. tests/test_response_filter_middleware.py +427 -427
  271. tests/test_response_improvements.py +152 -152
  272. tests/test_retry_middleware.py +241 -241
  273. tests/test_scheduler.py +252 -252
  274. tests/test_scheduler_config_update.py +133 -133
  275. tests/test_simple_response.py +61 -61
  276. tests/test_telecom_spider_redis_key.py +205 -205
  277. tests/test_template_content.py +87 -87
  278. tests/test_template_redis_key.py +134 -134
  279. tests/test_tools.py +159 -159
  280. tests/test_user_agents.py +96 -96
  281. tests/tools_example.py +260 -260
  282. tests/untested_features_report.md +138 -138
  283. tests/verify_debug.py +51 -51
  284. tests/verify_distributed.py +117 -117
  285. tests/verify_log_fix.py +111 -111
  286. crawlo-1.3.5.dist-info/RECORD +0 -288
  287. {crawlo-1.3.5.dist-info → crawlo-1.3.6.dist-info}/WHEEL +0 -0
  288. {crawlo-1.3.5.dist-info → crawlo-1.3.6.dist-info}/entry_points.txt +0 -0
  289. {crawlo-1.3.5.dist-info → crawlo-1.3.6.dist-info}/top_level.txt +0 -0
@@ -1,136 +1,136 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from pprint import pformat
4
- from types import MethodType
5
- from asyncio import create_task
6
- from collections import defaultdict
7
- from typing import List, Dict, Callable, Optional
8
-
9
- from crawlo import Request, Response
10
- from crawlo.utils.log import get_logger
11
- from crawlo.utils.class_loader import load_class
12
- from crawlo.middleware import BaseMiddleware
13
- from crawlo.project import common_call
14
- from crawlo.event import ignore_request, response_received
15
- from crawlo.exceptions import MiddlewareInitError, InvalidOutputError, RequestMethodError, IgnoreRequestError, \
16
- NotConfiguredError
17
-
18
-
19
- class MiddlewareManager:
20
-
21
- def __init__(self, crawler):
22
- self.crawler = crawler
23
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
- self.middlewares: List = []
25
- self.methods: Dict[str, List[MethodType]] = defaultdict(list)
26
- middlewares = self.crawler.settings.get_list('MIDDLEWARES')
27
- self._add_middleware(middlewares)
28
- self._add_method()
29
-
30
- self.download_method: Callable = crawler.engine.downloader.download
31
- self._stats = crawler.stats
32
-
33
- async def _process_request(self, request: Request):
34
- for method in self.methods['process_request']:
35
- result = await common_call(method, request, self.crawler.spider)
36
- if result is None:
37
- continue
38
- if isinstance(result, (Request, Response)):
39
- return result
40
- raise InvalidOutputError(
41
- f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(result).__name__}"
42
- )
43
- return await self.download_method(request)
44
-
45
- async def _process_response(self, request: Request, response: Response):
46
- for method in reversed(self.methods['process_response']):
47
- try:
48
- response = await common_call(method, request, response, self.crawler.spider)
49
- except IgnoreRequestError as exp:
50
- create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
51
- if isinstance(response, Request):
52
- return response
53
- if isinstance(response, Response):
54
- continue
55
- raise InvalidOutputError(
56
- f"{method.__self__.__class__.__name__}. must return Request or Response, got {type(response).__name__}"
57
- )
58
- return response
59
-
60
- async def _process_exception(self, request: Request, exp: Exception):
61
- for method in self.methods['process_exception']:
62
- response = await common_call(method, request, exp, self.crawler.spider)
63
- if response is None:
64
- continue
65
- if isinstance(response, (Request, Response)):
66
- return response
67
- if response:
68
- break
69
- raise InvalidOutputError(
70
- f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(response).__name__}"
71
- )
72
- else:
73
- raise exp
74
-
75
- async def download(self, request) -> Optional[Response]:
76
- """ called in the download method. """
77
- try:
78
- response = await self._process_request(request)
79
- except KeyError:
80
- raise RequestMethodError(f"{request.method.lower()} is not supported")
81
- except IgnoreRequestError as exp:
82
- create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
83
- response = await self._process_exception(request, exp)
84
- except Exception as exp:
85
- self._stats.inc_value(f'download_error/{exp.__class__.__name__}')
86
- response = await self._process_exception(request, exp)
87
- else:
88
- create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
- # self.crawler.stats.inc_value('response_received_count')
90
- if isinstance(response, Response):
91
- response = await self._process_response(request, response)
92
- if isinstance(response, Request):
93
- await self.crawler.engine.enqueue_request(request)
94
- return None
95
- return response
96
-
97
- @classmethod
98
- def create_instance(cls, *args, **kwargs):
99
- return cls(*args, **kwargs)
100
-
101
- def _add_middleware(self, middlewares):
102
- enabled_middlewares = [m for m in middlewares if self._validate_middleware(m)]
103
- if enabled_middlewares:
104
- # 恢复INFO级别日志,保留关键的启用信息
105
- self.logger.info(f'Enabled middlewares:\n {pformat(enabled_middlewares)}')
106
-
107
- def _validate_middleware(self, middleware):
108
- middleware_cls = load_class(middleware)
109
- if not hasattr(middleware_cls, 'create_instance'):
110
- raise MiddlewareInitError(
111
- f"Middleware init failed, must inherit from `BaseMiddleware` or have a `create_instance` method"
112
- )
113
- try:
114
- instance = middleware_cls.create_instance(self.crawler)
115
- self.middlewares.append(instance)
116
- return True
117
- except NotConfiguredError:
118
- return False
119
-
120
- def _add_method(self):
121
- for middleware in self.middlewares:
122
- if hasattr(middleware, 'process_request'):
123
- if self._validate_middleware_method(method_name='process_request', middleware=middleware):
124
- self.methods['process_request'].append(middleware.process_request)
125
- if hasattr(middleware, 'process_response'):
126
- if self._validate_middleware_method(method_name='process_response', middleware=middleware):
127
- self.methods['process_response'].append(middleware.process_response)
128
- if hasattr(middleware, 'process_exception'):
129
- if self._validate_middleware_method(method_name='process_exception', middleware=middleware):
130
- self.methods['process_exception'].append(middleware.process_exception)
131
-
132
- @staticmethod
133
- def _validate_middleware_method(method_name, middleware) -> bool:
134
- method = getattr(type(middleware), method_name)
135
- base_method = getattr(BaseMiddleware, method_name)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from pprint import pformat
4
+ from types import MethodType
5
+ from asyncio import create_task
6
+ from collections import defaultdict
7
+ from typing import List, Dict, Callable, Optional
8
+
9
+ from crawlo import Request, Response
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.utils.class_loader import load_class
12
+ from crawlo.middleware import BaseMiddleware
13
+ from crawlo.project import common_call
14
+ from crawlo.event import ignore_request, response_received
15
+ from crawlo.exceptions import MiddlewareInitError, InvalidOutputError, RequestMethodError, IgnoreRequestError, \
16
+ NotConfiguredError
17
+
18
+
19
+ class MiddlewareManager:
20
+
21
+ def __init__(self, crawler):
22
+ self.crawler = crawler
23
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
+ self.middlewares: List = []
25
+ self.methods: Dict[str, List[MethodType]] = defaultdict(list)
26
+ middlewares = self.crawler.settings.get_list('MIDDLEWARES')
27
+ self._add_middleware(middlewares)
28
+ self._add_method()
29
+
30
+ self.download_method: Callable = crawler.engine.downloader.download
31
+ self._stats = crawler.stats
32
+
33
+ async def _process_request(self, request: Request):
34
+ for method in self.methods['process_request']:
35
+ result = await common_call(method, request, self.crawler.spider)
36
+ if result is None:
37
+ continue
38
+ if isinstance(result, (Request, Response)):
39
+ return result
40
+ raise InvalidOutputError(
41
+ f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(result).__name__}"
42
+ )
43
+ return await self.download_method(request)
44
+
45
+ async def _process_response(self, request: Request, response: Response):
46
+ for method in reversed(self.methods['process_response']):
47
+ try:
48
+ response = await common_call(method, request, response, self.crawler.spider)
49
+ except IgnoreRequestError as exp:
50
+ create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
51
+ if isinstance(response, Request):
52
+ return response
53
+ if isinstance(response, Response):
54
+ continue
55
+ raise InvalidOutputError(
56
+ f"{method.__self__.__class__.__name__}. must return Request or Response, got {type(response).__name__}"
57
+ )
58
+ return response
59
+
60
+ async def _process_exception(self, request: Request, exp: Exception):
61
+ for method in self.methods['process_exception']:
62
+ response = await common_call(method, request, exp, self.crawler.spider)
63
+ if response is None:
64
+ continue
65
+ if isinstance(response, (Request, Response)):
66
+ return response
67
+ if response:
68
+ break
69
+ raise InvalidOutputError(
70
+ f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(response).__name__}"
71
+ )
72
+ else:
73
+ raise exp
74
+
75
+ async def download(self, request) -> Optional[Response]:
76
+ """ called in the download method. """
77
+ try:
78
+ response = await self._process_request(request)
79
+ except KeyError:
80
+ raise RequestMethodError(f"{request.method.lower()} is not supported")
81
+ except IgnoreRequestError as exp:
82
+ create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
83
+ response = await self._process_exception(request, exp)
84
+ except Exception as exp:
85
+ self._stats.inc_value(f'download_error/{exp.__class__.__name__}')
86
+ response = await self._process_exception(request, exp)
87
+ else:
88
+ create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
+ # self.crawler.stats.inc_value('response_received_count')
90
+ if isinstance(response, Response):
91
+ response = await self._process_response(request, response)
92
+ if isinstance(response, Request):
93
+ await self.crawler.engine.enqueue_request(request)
94
+ return None
95
+ return response
96
+
97
+ @classmethod
98
+ def create_instance(cls, *args, **kwargs):
99
+ return cls(*args, **kwargs)
100
+
101
+ def _add_middleware(self, middlewares):
102
+ enabled_middlewares = [m for m in middlewares if self._validate_middleware(m)]
103
+ if enabled_middlewares:
104
+ # 恢复INFO级别日志,保留关键的启用信息
105
+ self.logger.info(f'Enabled middlewares:\n {pformat(enabled_middlewares)}')
106
+
107
+ def _validate_middleware(self, middleware):
108
+ middleware_cls = load_class(middleware)
109
+ if not hasattr(middleware_cls, 'create_instance'):
110
+ raise MiddlewareInitError(
111
+ f"Middleware init failed, must inherit from `BaseMiddleware` or have a `create_instance` method"
112
+ )
113
+ try:
114
+ instance = middleware_cls.create_instance(self.crawler)
115
+ self.middlewares.append(instance)
116
+ return True
117
+ except NotConfiguredError:
118
+ return False
119
+
120
+ def _add_method(self):
121
+ for middleware in self.middlewares:
122
+ if hasattr(middleware, 'process_request'):
123
+ if self._validate_middleware_method(method_name='process_request', middleware=middleware):
124
+ self.methods['process_request'].append(middleware.process_request)
125
+ if hasattr(middleware, 'process_response'):
126
+ if self._validate_middleware_method(method_name='process_response', middleware=middleware):
127
+ self.methods['process_response'].append(middleware.process_response)
128
+ if hasattr(middleware, 'process_exception'):
129
+ if self._validate_middleware_method(method_name='process_exception', middleware=middleware):
130
+ self.methods['process_exception'].append(middleware.process_exception)
131
+
132
+ @staticmethod
133
+ def _validate_middleware_method(method_name, middleware) -> bool:
134
+ method = getattr(type(middleware), method_name)
135
+ base_method = getattr(BaseMiddleware, method_name)
136
136
  return False if method == base_method else True
@@ -1,124 +1,124 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- OffsiteMiddleware 中间件
5
- 用于过滤掉不在指定域名范围内的请求
6
- """
7
- import re
8
- from urllib.parse import urlparse
9
-
10
- from crawlo.utils.log import get_logger
11
- from crawlo.exceptions import IgnoreRequestError
12
-
13
-
14
- class OffsiteMiddleware:
15
- """
16
- OffsiteMiddleware 中间件
17
- 用于过滤掉不在指定域名范围内的请求,防止爬虫爬取到不相关的网站
18
- """
19
-
20
- def __init__(self, stats, log_level, allowed_domains=None):
21
- self.logger = get_logger(self.__class__.__name__, log_level)
22
- self.stats = stats
23
- self.allowed_domains = allowed_domains or []
24
-
25
- @classmethod
26
- def create_instance(cls, crawler):
27
- """
28
- 创建中间件实例
29
- 从爬虫设置中获取允许的域名列表
30
- """
31
- # 优先使用 Spider 实例的 allowed_domains,回退到全局设置中的 ALLOWED_DOMAINS
32
- allowed_domains = []
33
-
34
- # 检查当前爬虫实例是否有 allowed_domains 属性
35
- if hasattr(crawler, 'spider') and crawler.spider and hasattr(crawler.spider, 'allowed_domains'):
36
- allowed_domains = getattr(crawler.spider, 'allowed_domains', [])
37
-
38
- # 如果 Spider 实例没有设置 allowed_domains,则从全局设置中获取
39
- if not allowed_domains:
40
- allowed_domains = crawler.settings.get_list('ALLOWED_DOMAINS')
41
-
42
- # 如果没有配置允许的域名,则禁用此中间件
43
- if not allowed_domains:
44
- from crawlo.exceptions import NotConfiguredError
45
- raise NotConfiguredError("未配置ALLOWED_DOMAINS,OffsiteMiddleware已禁用")
46
-
47
- o = cls(
48
- stats=crawler.stats,
49
- log_level=crawler.settings.get('LOG_LEVEL'),
50
- allowed_domains=allowed_domains
51
- )
52
-
53
- # 编译域名正则表达式以提高性能
54
- o._compile_domains()
55
-
56
- # 使用中间件自己的logger而不是crawler.logger
57
- o.logger.debug(f"OffsiteMiddleware 已启用,允许的域名: {allowed_domains}")
58
- return o
59
-
60
- def _compile_domains(self):
61
- """
62
- 编译域名正则表达式
63
- """
64
- self._domain_regexes = []
65
- for domain in self.allowed_domains:
66
- # 转义域名中的特殊字符
67
- escaped_domain = re.escape(domain)
68
- # 创建匹配域名的正则表达式(支持子域名)
69
- regex = re.compile(r'(^|.*\.)' + escaped_domain + '$', re.IGNORECASE)
70
- self._domain_regexes.append(regex)
71
-
72
- def _is_offsite_request(self, request):
73
- """
74
- 判断请求是否为站外请求
75
- """
76
- try:
77
- parsed_url = urlparse(request.url)
78
- hostname = parsed_url.hostname
79
-
80
- if not hostname:
81
- return True # 无效URL
82
-
83
- # 检查是否匹配允许的域名
84
- for regex in self._domain_regexes:
85
- if regex.match(hostname):
86
- return False # 匹配允许的域名
87
-
88
- return True # 不匹配任何允许的域名
89
- except Exception:
90
- # URL解析失败,视为站外请求
91
- return True
92
-
93
- async def process_request(self, request, spider):
94
- """
95
- 处理请求,过滤站外请求
96
- """
97
- if self._is_offsite_request(request):
98
- # 记录被过滤的请求
99
- self.stats.inc_value('offsite_request_count')
100
-
101
- # 记录被过滤的域名
102
- try:
103
- parsed_url = urlparse(request.url)
104
- hostname = parsed_url.hostname or "unknown"
105
- self.stats.inc_value(f'offsite_request_count/{hostname}')
106
- except:
107
- self.stats.inc_value('offsite_request_count/invalid_url')
108
-
109
- self.logger.debug(f"过滤站外请求: {request.url}")
110
-
111
- # 抛出异常以忽略该请求
112
- raise IgnoreRequestError(f"站外请求被过滤: {request.url}")
113
-
114
- return None
115
-
116
- def process_exception(self, request, exception, spider):
117
- """
118
- 处理异常
119
- """
120
- # 如果是IgnoreRequestError且是我们产生的,则处理它
121
- if isinstance(exception, IgnoreRequestError) and "站外请求被过滤" in str(exception):
122
- self.logger.debug(f"已过滤站外请求: {request.url}")
123
- return True # 表示异常已被处理
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ OffsiteMiddleware 中间件
5
+ 用于过滤掉不在指定域名范围内的请求
6
+ """
7
+ import re
8
+ from urllib.parse import urlparse
9
+
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.exceptions import IgnoreRequestError
12
+
13
+
14
+ class OffsiteMiddleware:
15
+ """
16
+ OffsiteMiddleware 中间件
17
+ 用于过滤掉不在指定域名范围内的请求,防止爬虫爬取到不相关的网站
18
+ """
19
+
20
+ def __init__(self, stats, log_level, allowed_domains=None):
21
+ self.logger = get_logger(self.__class__.__name__, log_level)
22
+ self.stats = stats
23
+ self.allowed_domains = allowed_domains or []
24
+
25
+ @classmethod
26
+ def create_instance(cls, crawler):
27
+ """
28
+ 创建中间件实例
29
+ 从爬虫设置中获取允许的域名列表
30
+ """
31
+ # 优先使用 Spider 实例的 allowed_domains,回退到全局设置中的 ALLOWED_DOMAINS
32
+ allowed_domains = []
33
+
34
+ # 检查当前爬虫实例是否有 allowed_domains 属性
35
+ if hasattr(crawler, 'spider') and crawler.spider and hasattr(crawler.spider, 'allowed_domains'):
36
+ allowed_domains = getattr(crawler.spider, 'allowed_domains', [])
37
+
38
+ # 如果 Spider 实例没有设置 allowed_domains,则从全局设置中获取
39
+ if not allowed_domains:
40
+ allowed_domains = crawler.settings.get_list('ALLOWED_DOMAINS')
41
+
42
+ # 如果没有配置允许的域名,则禁用此中间件
43
+ if not allowed_domains:
44
+ from crawlo.exceptions import NotConfiguredError
45
+ raise NotConfiguredError("未配置ALLOWED_DOMAINS,OffsiteMiddleware已禁用")
46
+
47
+ o = cls(
48
+ stats=crawler.stats,
49
+ log_level=crawler.settings.get('LOG_LEVEL'),
50
+ allowed_domains=allowed_domains
51
+ )
52
+
53
+ # 编译域名正则表达式以提高性能
54
+ o._compile_domains()
55
+
56
+ # 使用中间件自己的logger而不是crawler.logger
57
+ o.logger.debug(f"OffsiteMiddleware 已启用,允许的域名: {allowed_domains}")
58
+ return o
59
+
60
+ def _compile_domains(self):
61
+ """
62
+ 编译域名正则表达式
63
+ """
64
+ self._domain_regexes = []
65
+ for domain in self.allowed_domains:
66
+ # 转义域名中的特殊字符
67
+ escaped_domain = re.escape(domain)
68
+ # 创建匹配域名的正则表达式(支持子域名)
69
+ regex = re.compile(r'(^|.*\.)' + escaped_domain + '$', re.IGNORECASE)
70
+ self._domain_regexes.append(regex)
71
+
72
+ def _is_offsite_request(self, request):
73
+ """
74
+ 判断请求是否为站外请求
75
+ """
76
+ try:
77
+ parsed_url = urlparse(request.url)
78
+ hostname = parsed_url.hostname
79
+
80
+ if not hostname:
81
+ return True # 无效URL
82
+
83
+ # 检查是否匹配允许的域名
84
+ for regex in self._domain_regexes:
85
+ if regex.match(hostname):
86
+ return False # 匹配允许的域名
87
+
88
+ return True # 不匹配任何允许的域名
89
+ except Exception:
90
+ # URL解析失败,视为站外请求
91
+ return True
92
+
93
+ async def process_request(self, request, spider):
94
+ """
95
+ 处理请求,过滤站外请求
96
+ """
97
+ if self._is_offsite_request(request):
98
+ # 记录被过滤的请求
99
+ self.stats.inc_value('offsite_request_count')
100
+
101
+ # 记录被过滤的域名
102
+ try:
103
+ parsed_url = urlparse(request.url)
104
+ hostname = parsed_url.hostname or "unknown"
105
+ self.stats.inc_value(f'offsite_request_count/{hostname}')
106
+ except:
107
+ self.stats.inc_value('offsite_request_count/invalid_url')
108
+
109
+ self.logger.debug(f"过滤站外请求: {request.url}")
110
+
111
+ # 抛出异常以忽略该请求
112
+ raise IgnoreRequestError(f"站外请求被过滤: {request.url}")
113
+
114
+ return None
115
+
116
+ def process_exception(self, request, exception, spider):
117
+ """
118
+ 处理异常
119
+ """
120
+ # 如果是IgnoreRequestError且是我们产生的,则处理它
121
+ if isinstance(exception, IgnoreRequestError) and "站外请求被过滤" in str(exception):
122
+ self.logger.debug(f"已过滤站外请求: {request.url}")
123
+ return True # 表示异常已被处理
124
124
  return None