crawlo 1.3.3__py3-none-any.whl → 1.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (279) hide show
  1. crawlo/__init__.py +87 -63
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +46 -2
  16. crawlo/core/engine.py +439 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -256
  19. crawlo/crawler.py +639 -1167
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -226
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -52
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +28 -0
  40. crawlo/factories/base.py +69 -0
  41. crawlo/factories/crawler.py +104 -0
  42. crawlo/factories/registry.py +85 -0
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -0
  47. crawlo/initialization/__init__.py +40 -0
  48. crawlo/initialization/built_in.py +426 -0
  49. crawlo/initialization/context.py +142 -0
  50. crawlo/initialization/core.py +194 -0
  51. crawlo/initialization/phases.py +149 -0
  52. crawlo/initialization/registry.py +146 -0
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -22
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +38 -0
  58. crawlo/logging/config.py +97 -0
  59. crawlo/logging/factory.py +129 -0
  60. crawlo/logging/manager.py +112 -0
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -187
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +318 -318
  85. crawlo/pipelines/pipeline_manager.py +76 -75
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -325
  88. crawlo/queue/pqueue.py +43 -37
  89. crawlo/queue/queue_manager.py +503 -379
  90. crawlo/queue/redis_priority_queue.py +326 -306
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -225
  93. crawlo/settings/setting_manager.py +214 -198
  94. crawlo/spider/__init__.py +657 -639
  95. crawlo/stats_collector.py +73 -59
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +139 -30
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +168 -267
  104. crawlo/templates/project/settings_distributed.py.tmpl +167 -180
  105. crawlo/templates/project/settings_gentle.py.tmpl +167 -61
  106. crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
  107. crawlo/templates/project/settings_minimal.py.tmpl +66 -35
  108. crawlo/templates/project/settings_simple.py.tmpl +165 -102
  109. crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
  110. crawlo/templates/run.py.tmpl +34 -38
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +10 -0
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +365 -0
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +26 -0
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -124
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +44 -200
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -351
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -218
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/METADATA +1126 -1020
  149. crawlo-1.3.4.dist-info/RECORD +278 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +107 -107
  154. tests/baidu_performance_test.py +109 -0
  155. tests/baidu_test.py +60 -0
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +213 -0
  158. tests/comprehensive_test.py +82 -0
  159. tests/comprehensive_testing_summary.md +187 -0
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +70 -0
  164. tests/debug_framework_logger.py +85 -0
  165. tests/debug_log_levels.py +64 -0
  166. tests/debug_pipelines.py +66 -66
  167. tests/distributed_test.py +67 -0
  168. tests/distributed_test_debug.py +77 -0
  169. tests/dynamic_loading_example.py +523 -523
  170. tests/dynamic_loading_test.py +104 -104
  171. tests/env_config_example.py +133 -133
  172. tests/error_handling_example.py +171 -171
  173. tests/final_command_test_report.md +0 -0
  174. tests/final_comprehensive_test.py +152 -0
  175. tests/final_validation_test.py +183 -0
  176. tests/framework_performance_test.py +203 -0
  177. tests/optimized_performance_test.py +212 -0
  178. tests/performance_comparison.py +246 -0
  179. tests/queue_blocking_test.py +114 -0
  180. tests/queue_test.py +90 -0
  181. tests/redis_key_validation_demo.py +130 -130
  182. tests/request_params_example.py +150 -150
  183. tests/response_improvements_example.py +144 -144
  184. tests/scrapy_comparison/ofweek_scrapy.py +139 -0
  185. tests/scrapy_comparison/scrapy_test.py +134 -0
  186. tests/simple_command_test.py +120 -0
  187. tests/simple_crawlo_test.py +128 -0
  188. tests/simple_log_test.py +58 -0
  189. tests/simple_optimization_test.py +129 -0
  190. tests/simple_spider_test.py +50 -0
  191. tests/simple_test.py +48 -0
  192. tests/test_advanced_tools.py +148 -148
  193. tests/test_all_commands.py +231 -0
  194. tests/test_all_redis_key_configs.py +145 -145
  195. tests/test_authenticated_proxy.py +141 -141
  196. tests/test_batch_processor.py +179 -0
  197. tests/test_cleaners.py +54 -54
  198. tests/test_component_factory.py +175 -0
  199. tests/test_comprehensive.py +146 -146
  200. tests/test_config_consistency.py +80 -80
  201. tests/test_config_merge.py +152 -152
  202. tests/test_config_validator.py +182 -182
  203. tests/test_controlled_spider_mixin.py +80 -0
  204. tests/test_crawlo_proxy_integration.py +108 -108
  205. tests/test_date_tools.py +123 -123
  206. tests/test_default_header_middleware.py +158 -158
  207. tests/test_distributed.py +65 -65
  208. tests/test_double_crawlo_fix.py +207 -207
  209. tests/test_double_crawlo_fix_simple.py +124 -124
  210. tests/test_download_delay_middleware.py +221 -221
  211. tests/test_downloader_proxy_compatibility.py +268 -268
  212. tests/test_dynamic_downloaders_proxy.py +124 -124
  213. tests/test_dynamic_proxy.py +92 -92
  214. tests/test_dynamic_proxy_config.py +146 -146
  215. tests/test_dynamic_proxy_real.py +109 -109
  216. tests/test_edge_cases.py +303 -303
  217. tests/test_enhanced_error_handler.py +270 -270
  218. tests/test_enhanced_error_handler_comprehensive.py +246 -0
  219. tests/test_env_config.py +121 -121
  220. tests/test_error_handler_compatibility.py +112 -112
  221. tests/test_factories.py +253 -0
  222. tests/test_final_validation.py +153 -153
  223. tests/test_framework_env_usage.py +103 -103
  224. tests/test_framework_logger.py +67 -0
  225. tests/test_framework_startup.py +65 -0
  226. tests/test_integration.py +169 -169
  227. tests/test_item_dedup_redis_key.py +122 -122
  228. tests/test_large_scale_config.py +113 -0
  229. tests/test_large_scale_helper.py +236 -0
  230. tests/test_mode_change.py +73 -0
  231. tests/test_mode_consistency.py +51 -51
  232. tests/test_offsite_middleware.py +221 -221
  233. tests/test_parsel.py +29 -29
  234. tests/test_performance.py +327 -327
  235. tests/test_performance_monitor.py +116 -0
  236. tests/test_proxy_api.py +264 -264
  237. tests/test_proxy_health_check.py +32 -32
  238. tests/test_proxy_middleware.py +121 -121
  239. tests/test_proxy_middleware_enhanced.py +216 -216
  240. tests/test_proxy_middleware_integration.py +136 -136
  241. tests/test_proxy_middleware_refactored.py +184 -184
  242. tests/test_proxy_providers.py +56 -56
  243. tests/test_proxy_stats.py +19 -19
  244. tests/test_proxy_strategies.py +59 -59
  245. tests/test_queue_empty_check.py +42 -0
  246. tests/test_queue_manager_double_crawlo.py +173 -173
  247. tests/test_queue_manager_redis_key.py +176 -176
  248. tests/test_random_user_agent.py +72 -72
  249. tests/test_real_scenario_proxy.py +195 -195
  250. tests/test_redis_config.py +28 -28
  251. tests/test_redis_connection_pool.py +294 -294
  252. tests/test_redis_key_naming.py +181 -181
  253. tests/test_redis_key_validator.py +123 -123
  254. tests/test_redis_queue.py +224 -224
  255. tests/test_request_ignore_middleware.py +182 -182
  256. tests/test_request_params.py +111 -111
  257. tests/test_request_serialization.py +70 -70
  258. tests/test_response_code_middleware.py +349 -349
  259. tests/test_response_filter_middleware.py +427 -427
  260. tests/test_response_improvements.py +152 -152
  261. tests/test_retry_middleware.py +241 -241
  262. tests/test_scheduler.py +252 -252
  263. tests/test_scheduler_config_update.py +133 -133
  264. tests/test_simple_response.py +61 -61
  265. tests/test_telecom_spider_redis_key.py +205 -205
  266. tests/test_template_content.py +87 -87
  267. tests/test_template_redis_key.py +134 -134
  268. tests/test_tools.py +159 -159
  269. tests/test_user_agents.py +96 -96
  270. tests/tools_example.py +260 -260
  271. tests/untested_features_report.md +139 -0
  272. tests/verify_debug.py +52 -0
  273. tests/verify_distributed.py +117 -117
  274. tests/verify_log_fix.py +112 -0
  275. crawlo-1.3.3.dist-info/RECORD +0 -219
  276. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
  277. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/WHEEL +0 -0
  278. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/entry_points.txt +0 -0
  279. {crawlo-1.3.3.dist-info → crawlo-1.3.4.dist-info}/top_level.txt +0 -0
@@ -1,136 +1,136 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from pprint import pformat
4
- from types import MethodType
5
- from asyncio import create_task
6
- from collections import defaultdict
7
- from typing import List, Dict, Callable, Optional
8
-
9
- from crawlo import Request, Response
10
- from crawlo.utils.log import get_logger
11
- from crawlo.project import load_class
12
- from crawlo.middleware import BaseMiddleware
13
- from crawlo.project import common_call
14
- from crawlo.event import ignore_request, response_received
15
- from crawlo.exceptions import MiddlewareInitError, InvalidOutputError, RequestMethodError, IgnoreRequestError, \
16
- NotConfiguredError
17
-
18
-
19
- class MiddlewareManager:
20
-
21
- def __init__(self, crawler):
22
- self.crawler = crawler
23
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
- self.middlewares: List = []
25
- self.methods: Dict[str, List[MethodType]] = defaultdict(list)
26
- middlewares = self.crawler.settings.get_list('MIDDLEWARES')
27
- self._add_middleware(middlewares)
28
- self._add_method()
29
-
30
- self.download_method: Callable = crawler.engine.downloader.download
31
- self._stats = crawler.stats
32
-
33
- async def _process_request(self, request: Request):
34
- for method in self.methods['process_request']:
35
- result = await common_call(method, request, self.crawler.spider)
36
- if result is None:
37
- continue
38
- if isinstance(result, (Request, Response)):
39
- return result
40
- raise InvalidOutputError(
41
- f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(result).__name__}"
42
- )
43
- return await self.download_method(request)
44
-
45
- async def _process_response(self, request: Request, response: Response):
46
- for method in reversed(self.methods['process_response']):
47
- try:
48
- response = await common_call(method, request, response, self.crawler.spider)
49
- except IgnoreRequestError as exp:
50
- create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
51
- if isinstance(response, Request):
52
- return response
53
- if isinstance(response, Response):
54
- continue
55
- raise InvalidOutputError(
56
- f"{method.__self__.__class__.__name__}. must return Request or Response, got {type(response).__name__}"
57
- )
58
- return response
59
-
60
- async def _process_exception(self, request: Request, exp: Exception):
61
- for method in self.methods['process_exception']:
62
- response = await common_call(method, request, exp, self.crawler.spider)
63
- if response is None:
64
- continue
65
- if isinstance(response, (Request, Response)):
66
- return response
67
- if response:
68
- break
69
- raise InvalidOutputError(
70
- f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(response).__name__}"
71
- )
72
- else:
73
- raise exp
74
-
75
- async def download(self, request) -> Optional[Response]:
76
- """ called in the download method. """
77
- try:
78
- response = await self._process_request(request)
79
- except KeyError:
80
- raise RequestMethodError(f"{request.method.lower()} is not supported")
81
- except IgnoreRequestError as exp:
82
- create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
83
- response = await self._process_exception(request, exp)
84
- except Exception as exp:
85
- self._stats.inc_value(f'download_error/{exp.__class__.__name__}')
86
- response = await self._process_exception(request, exp)
87
- else:
88
- create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
- # self.crawler.stats.inc_value('response_received_count')
90
- if isinstance(response, Response):
91
- response = await self._process_response(request, response)
92
- if isinstance(response, Request):
93
- await self.crawler.engine.enqueue_request(request)
94
- return None
95
- return response
96
-
97
- @classmethod
98
- def create_instance(cls, *args, **kwargs):
99
- return cls(*args, **kwargs)
100
-
101
- def _add_middleware(self, middlewares):
102
- enabled_middlewares = [m for m in middlewares if self._validate_middleware(m)]
103
- if enabled_middlewares:
104
- # 恢复INFO级别日志,保留关键的启用信息
105
- self.logger.info(f'Enabled middlewares:\n {pformat(enabled_middlewares)}')
106
-
107
- def _validate_middleware(self, middleware):
108
- middleware_cls = load_class(middleware)
109
- if not hasattr(middleware_cls, 'create_instance'):
110
- raise MiddlewareInitError(
111
- f"Middleware init failed, must inherit from `BaseMiddleware` or have a `create_instance` method"
112
- )
113
- try:
114
- instance = middleware_cls.create_instance(self.crawler)
115
- self.middlewares.append(instance)
116
- return True
117
- except NotConfiguredError:
118
- return False
119
-
120
- def _add_method(self):
121
- for middleware in self.middlewares:
122
- if hasattr(middleware, 'process_request'):
123
- if self._validate_middleware_method(method_name='process_request', middleware=middleware):
124
- self.methods['process_request'].append(middleware.process_request)
125
- if hasattr(middleware, 'process_response'):
126
- if self._validate_middleware_method(method_name='process_response', middleware=middleware):
127
- self.methods['process_response'].append(middleware.process_response)
128
- if hasattr(middleware, 'process_exception'):
129
- if self._validate_middleware_method(method_name='process_exception', middleware=middleware):
130
- self.methods['process_exception'].append(middleware.process_exception)
131
-
132
- @staticmethod
133
- def _validate_middleware_method(method_name, middleware) -> bool:
134
- method = getattr(type(middleware), method_name)
135
- base_method = getattr(BaseMiddleware, method_name)
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from pprint import pformat
4
+ from types import MethodType
5
+ from asyncio import create_task
6
+ from collections import defaultdict
7
+ from typing import List, Dict, Callable, Optional
8
+
9
+ from crawlo import Request, Response
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.utils.class_loader import load_class
12
+ from crawlo.middleware import BaseMiddleware
13
+ from crawlo.project import common_call
14
+ from crawlo.event import ignore_request, response_received
15
+ from crawlo.exceptions import MiddlewareInitError, InvalidOutputError, RequestMethodError, IgnoreRequestError, \
16
+ NotConfiguredError
17
+
18
+
19
+ class MiddlewareManager:
20
+
21
+ def __init__(self, crawler):
22
+ self.crawler = crawler
23
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
+ self.middlewares: List = []
25
+ self.methods: Dict[str, List[MethodType]] = defaultdict(list)
26
+ middlewares = self.crawler.settings.get_list('MIDDLEWARES')
27
+ self._add_middleware(middlewares)
28
+ self._add_method()
29
+
30
+ self.download_method: Callable = crawler.engine.downloader.download
31
+ self._stats = crawler.stats
32
+
33
+ async def _process_request(self, request: Request):
34
+ for method in self.methods['process_request']:
35
+ result = await common_call(method, request, self.crawler.spider)
36
+ if result is None:
37
+ continue
38
+ if isinstance(result, (Request, Response)):
39
+ return result
40
+ raise InvalidOutputError(
41
+ f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(result).__name__}"
42
+ )
43
+ return await self.download_method(request)
44
+
45
+ async def _process_response(self, request: Request, response: Response):
46
+ for method in reversed(self.methods['process_response']):
47
+ try:
48
+ response = await common_call(method, request, response, self.crawler.spider)
49
+ except IgnoreRequestError as exp:
50
+ create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
51
+ if isinstance(response, Request):
52
+ return response
53
+ if isinstance(response, Response):
54
+ continue
55
+ raise InvalidOutputError(
56
+ f"{method.__self__.__class__.__name__}. must return Request or Response, got {type(response).__name__}"
57
+ )
58
+ return response
59
+
60
+ async def _process_exception(self, request: Request, exp: Exception):
61
+ for method in self.methods['process_exception']:
62
+ response = await common_call(method, request, exp, self.crawler.spider)
63
+ if response is None:
64
+ continue
65
+ if isinstance(response, (Request, Response)):
66
+ return response
67
+ if response:
68
+ break
69
+ raise InvalidOutputError(
70
+ f"{method.__self__.__class__.__name__}. must return None or Request or Response, got {type(response).__name__}"
71
+ )
72
+ else:
73
+ raise exp
74
+
75
+ async def download(self, request) -> Optional[Response]:
76
+ """ called in the download method. """
77
+ try:
78
+ response = await self._process_request(request)
79
+ except KeyError:
80
+ raise RequestMethodError(f"{request.method.lower()} is not supported")
81
+ except IgnoreRequestError as exp:
82
+ create_task(self.crawler.subscriber.notify(ignore_request, exp, request, self.crawler.spider))
83
+ response = await self._process_exception(request, exp)
84
+ except Exception as exp:
85
+ self._stats.inc_value(f'download_error/{exp.__class__.__name__}')
86
+ response = await self._process_exception(request, exp)
87
+ else:
88
+ create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
+ # self.crawler.stats.inc_value('response_received_count')
90
+ if isinstance(response, Response):
91
+ response = await self._process_response(request, response)
92
+ if isinstance(response, Request):
93
+ await self.crawler.engine.enqueue_request(request)
94
+ return None
95
+ return response
96
+
97
+ @classmethod
98
+ def create_instance(cls, *args, **kwargs):
99
+ return cls(*args, **kwargs)
100
+
101
+ def _add_middleware(self, middlewares):
102
+ enabled_middlewares = [m for m in middlewares if self._validate_middleware(m)]
103
+ if enabled_middlewares:
104
+ # 恢复INFO级别日志,保留关键的启用信息
105
+ self.logger.info(f'Enabled middlewares:\n {pformat(enabled_middlewares)}')
106
+
107
+ def _validate_middleware(self, middleware):
108
+ middleware_cls = load_class(middleware)
109
+ if not hasattr(middleware_cls, 'create_instance'):
110
+ raise MiddlewareInitError(
111
+ f"Middleware init failed, must inherit from `BaseMiddleware` or have a `create_instance` method"
112
+ )
113
+ try:
114
+ instance = middleware_cls.create_instance(self.crawler)
115
+ self.middlewares.append(instance)
116
+ return True
117
+ except NotConfiguredError:
118
+ return False
119
+
120
+ def _add_method(self):
121
+ for middleware in self.middlewares:
122
+ if hasattr(middleware, 'process_request'):
123
+ if self._validate_middleware_method(method_name='process_request', middleware=middleware):
124
+ self.methods['process_request'].append(middleware.process_request)
125
+ if hasattr(middleware, 'process_response'):
126
+ if self._validate_middleware_method(method_name='process_response', middleware=middleware):
127
+ self.methods['process_response'].append(middleware.process_response)
128
+ if hasattr(middleware, 'process_exception'):
129
+ if self._validate_middleware_method(method_name='process_exception', middleware=middleware):
130
+ self.methods['process_exception'].append(middleware.process_exception)
131
+
132
+ @staticmethod
133
+ def _validate_middleware_method(method_name, middleware) -> bool:
134
+ method = getattr(type(middleware), method_name)
135
+ base_method = getattr(BaseMiddleware, method_name)
136
136
  return False if method == base_method else True
@@ -1,124 +1,124 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- OffsiteMiddleware 中间件
5
- 用于过滤掉不在指定域名范围内的请求
6
- """
7
- import re
8
- from urllib.parse import urlparse
9
-
10
- from crawlo.utils.log import get_logger
11
- from crawlo.exceptions import IgnoreRequestError
12
-
13
-
14
- class OffsiteMiddleware:
15
- """
16
- OffsiteMiddleware 中间件
17
- 用于过滤掉不在指定域名范围内的请求,防止爬虫爬取到不相关的网站
18
- """
19
-
20
- def __init__(self, stats, log_level, allowed_domains=None):
21
- self.logger = get_logger(self.__class__.__name__, log_level)
22
- self.stats = stats
23
- self.allowed_domains = allowed_domains or []
24
-
25
- @classmethod
26
- def create_instance(cls, crawler):
27
- """
28
- 创建中间件实例
29
- 从爬虫设置中获取允许的域名列表
30
- """
31
- # 优先使用 Spider 实例的 allowed_domains,回退到全局设置中的 ALLOWED_DOMAINS
32
- allowed_domains = []
33
-
34
- # 检查当前爬虫实例是否有 allowed_domains 属性
35
- if hasattr(crawler, 'spider') and crawler.spider and hasattr(crawler.spider, 'allowed_domains'):
36
- allowed_domains = getattr(crawler.spider, 'allowed_domains', [])
37
-
38
- # 如果 Spider 实例没有设置 allowed_domains,则从全局设置中获取
39
- if not allowed_domains:
40
- allowed_domains = crawler.settings.get_list('ALLOWED_DOMAINS')
41
-
42
- # 如果没有配置允许的域名,则禁用此中间件
43
- if not allowed_domains:
44
- from crawlo.exceptions import NotConfiguredError
45
- raise NotConfiguredError("未配置ALLOWED_DOMAINS,OffsiteMiddleware已禁用")
46
-
47
- o = cls(
48
- stats=crawler.stats,
49
- log_level=crawler.settings.get('LOG_LEVEL'),
50
- allowed_domains=allowed_domains
51
- )
52
-
53
- # 编译域名正则表达式以提高性能
54
- o._compile_domains()
55
-
56
- # 使用中间件自己的logger而不是crawler.logger
57
- o.logger.debug(f"OffsiteMiddleware 已启用,允许的域名: {allowed_domains}")
58
- return o
59
-
60
- def _compile_domains(self):
61
- """
62
- 编译域名正则表达式
63
- """
64
- self._domain_regexes = []
65
- for domain in self.allowed_domains:
66
- # 转义域名中的特殊字符
67
- escaped_domain = re.escape(domain)
68
- # 创建匹配域名的正则表达式(支持子域名)
69
- regex = re.compile(r'(^|.*\.)' + escaped_domain + '$', re.IGNORECASE)
70
- self._domain_regexes.append(regex)
71
-
72
- def _is_offsite_request(self, request):
73
- """
74
- 判断请求是否为站外请求
75
- """
76
- try:
77
- parsed_url = urlparse(request.url)
78
- hostname = parsed_url.hostname
79
-
80
- if not hostname:
81
- return True # 无效URL
82
-
83
- # 检查是否匹配允许的域名
84
- for regex in self._domain_regexes:
85
- if regex.match(hostname):
86
- return False # 匹配允许的域名
87
-
88
- return True # 不匹配任何允许的域名
89
- except Exception:
90
- # URL解析失败,视为站外请求
91
- return True
92
-
93
- async def process_request(self, request, spider):
94
- """
95
- 处理请求,过滤站外请求
96
- """
97
- if self._is_offsite_request(request):
98
- # 记录被过滤的请求
99
- self.stats.inc_value('offsite_request_count')
100
-
101
- # 记录被过滤的域名
102
- try:
103
- parsed_url = urlparse(request.url)
104
- hostname = parsed_url.hostname or "unknown"
105
- self.stats.inc_value(f'offsite_request_count/{hostname}')
106
- except:
107
- self.stats.inc_value('offsite_request_count/invalid_url')
108
-
109
- self.logger.debug(f"过滤站外请求: {request.url}")
110
-
111
- # 抛出异常以忽略该请求
112
- raise IgnoreRequestError(f"站外请求被过滤: {request.url}")
113
-
114
- return None
115
-
116
- def process_exception(self, request, exception, spider):
117
- """
118
- 处理异常
119
- """
120
- # 如果是IgnoreRequestError且是我们产生的,则处理它
121
- if isinstance(exception, IgnoreRequestError) and "站外请求被过滤" in str(exception):
122
- self.logger.debug(f"已过滤站外请求: {request.url}")
123
- return True # 表示异常已被处理
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ OffsiteMiddleware 中间件
5
+ 用于过滤掉不在指定域名范围内的请求
6
+ """
7
+ import re
8
+ from urllib.parse import urlparse
9
+
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.exceptions import IgnoreRequestError
12
+
13
+
14
+ class OffsiteMiddleware:
15
+ """
16
+ OffsiteMiddleware 中间件
17
+ 用于过滤掉不在指定域名范围内的请求,防止爬虫爬取到不相关的网站
18
+ """
19
+
20
+ def __init__(self, stats, log_level, allowed_domains=None):
21
+ self.logger = get_logger(self.__class__.__name__, log_level)
22
+ self.stats = stats
23
+ self.allowed_domains = allowed_domains or []
24
+
25
+ @classmethod
26
+ def create_instance(cls, crawler):
27
+ """
28
+ 创建中间件实例
29
+ 从爬虫设置中获取允许的域名列表
30
+ """
31
+ # 优先使用 Spider 实例的 allowed_domains,回退到全局设置中的 ALLOWED_DOMAINS
32
+ allowed_domains = []
33
+
34
+ # 检查当前爬虫实例是否有 allowed_domains 属性
35
+ if hasattr(crawler, 'spider') and crawler.spider and hasattr(crawler.spider, 'allowed_domains'):
36
+ allowed_domains = getattr(crawler.spider, 'allowed_domains', [])
37
+
38
+ # 如果 Spider 实例没有设置 allowed_domains,则从全局设置中获取
39
+ if not allowed_domains:
40
+ allowed_domains = crawler.settings.get_list('ALLOWED_DOMAINS')
41
+
42
+ # 如果没有配置允许的域名,则禁用此中间件
43
+ if not allowed_domains:
44
+ from crawlo.exceptions import NotConfiguredError
45
+ raise NotConfiguredError("未配置ALLOWED_DOMAINS,OffsiteMiddleware已禁用")
46
+
47
+ o = cls(
48
+ stats=crawler.stats,
49
+ log_level=crawler.settings.get('LOG_LEVEL'),
50
+ allowed_domains=allowed_domains
51
+ )
52
+
53
+ # 编译域名正则表达式以提高性能
54
+ o._compile_domains()
55
+
56
+ # 使用中间件自己的logger而不是crawler.logger
57
+ o.logger.debug(f"OffsiteMiddleware 已启用,允许的域名: {allowed_domains}")
58
+ return o
59
+
60
+ def _compile_domains(self):
61
+ """
62
+ 编译域名正则表达式
63
+ """
64
+ self._domain_regexes = []
65
+ for domain in self.allowed_domains:
66
+ # 转义域名中的特殊字符
67
+ escaped_domain = re.escape(domain)
68
+ # 创建匹配域名的正则表达式(支持子域名)
69
+ regex = re.compile(r'(^|.*\.)' + escaped_domain + '$', re.IGNORECASE)
70
+ self._domain_regexes.append(regex)
71
+
72
+ def _is_offsite_request(self, request):
73
+ """
74
+ 判断请求是否为站外请求
75
+ """
76
+ try:
77
+ parsed_url = urlparse(request.url)
78
+ hostname = parsed_url.hostname
79
+
80
+ if not hostname:
81
+ return True # 无效URL
82
+
83
+ # 检查是否匹配允许的域名
84
+ for regex in self._domain_regexes:
85
+ if regex.match(hostname):
86
+ return False # 匹配允许的域名
87
+
88
+ return True # 不匹配任何允许的域名
89
+ except Exception:
90
+ # URL解析失败,视为站外请求
91
+ return True
92
+
93
+ async def process_request(self, request, spider):
94
+ """
95
+ 处理请求,过滤站外请求
96
+ """
97
+ if self._is_offsite_request(request):
98
+ # 记录被过滤的请求
99
+ self.stats.inc_value('offsite_request_count')
100
+
101
+ # 记录被过滤的域名
102
+ try:
103
+ parsed_url = urlparse(request.url)
104
+ hostname = parsed_url.hostname or "unknown"
105
+ self.stats.inc_value(f'offsite_request_count/{hostname}')
106
+ except:
107
+ self.stats.inc_value('offsite_request_count/invalid_url')
108
+
109
+ self.logger.debug(f"过滤站外请求: {request.url}")
110
+
111
+ # 抛出异常以忽略该请求
112
+ raise IgnoreRequestError(f"站外请求被过滤: {request.url}")
113
+
114
+ return None
115
+
116
+ def process_exception(self, request, exception, spider):
117
+ """
118
+ 处理异常
119
+ """
120
+ # 如果是IgnoreRequestError且是我们产生的,则处理它
121
+ if isinstance(exception, IgnoreRequestError) and "站外请求被过滤" in str(exception):
122
+ self.logger.debug(f"已过滤站外请求: {request.url}")
123
+ return True # 表示异常已被处理
124
124
  return None