crawlo 1.4.5__py3-none-any.whl → 1.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (375) hide show
  1. crawlo/__init__.py +90 -89
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +186 -186
  7. crawlo/commands/help.py +140 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +379 -341
  10. crawlo/commands/startproject.py +460 -460
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +320 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +52 -52
  16. crawlo/core/engine.py +451 -438
  17. crawlo/core/processor.py +47 -47
  18. crawlo/core/scheduler.py +290 -291
  19. crawlo/crawler.py +698 -657
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +280 -276
  23. crawlo/downloader/aiohttp_downloader.py +233 -233
  24. crawlo/downloader/cffi_downloader.py +250 -245
  25. crawlo/downloader/httpx_downloader.py +265 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +425 -402
  28. crawlo/downloader/selenium_downloader.py +486 -472
  29. crawlo/event.py +45 -11
  30. crawlo/exceptions.py +215 -82
  31. crawlo/extension/__init__.py +65 -64
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +94 -94
  34. crawlo/extension/log_stats.py +70 -70
  35. crawlo/extension/logging_extension.py +53 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +104 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/factories/utils.py +135 -0
  44. crawlo/filters/__init__.py +170 -153
  45. crawlo/filters/aioredis_filter.py +348 -264
  46. crawlo/filters/memory_filter.py +261 -276
  47. crawlo/framework.py +306 -292
  48. crawlo/initialization/__init__.py +44 -44
  49. crawlo/initialization/built_in.py +391 -434
  50. crawlo/initialization/context.py +141 -141
  51. crawlo/initialization/core.py +240 -194
  52. crawlo/initialization/phases.py +230 -149
  53. crawlo/initialization/registry.py +143 -145
  54. crawlo/initialization/utils.py +49 -0
  55. crawlo/interfaces.py +23 -23
  56. crawlo/items/__init__.py +23 -23
  57. crawlo/items/base.py +23 -23
  58. crawlo/items/fields.py +52 -52
  59. crawlo/items/items.py +104 -104
  60. crawlo/logging/__init__.py +42 -46
  61. crawlo/logging/config.py +277 -197
  62. crawlo/logging/factory.py +175 -171
  63. crawlo/logging/manager.py +104 -112
  64. crawlo/middleware/__init__.py +87 -24
  65. crawlo/middleware/default_header.py +132 -132
  66. crawlo/middleware/download_delay.py +104 -104
  67. crawlo/middleware/middleware_manager.py +142 -142
  68. crawlo/middleware/offsite.py +123 -123
  69. crawlo/middleware/proxy.py +209 -386
  70. crawlo/middleware/request_ignore.py +86 -86
  71. crawlo/middleware/response_code.py +150 -150
  72. crawlo/middleware/response_filter.py +136 -136
  73. crawlo/middleware/retry.py +124 -124
  74. crawlo/mode_manager.py +287 -253
  75. crawlo/network/__init__.py +21 -21
  76. crawlo/network/request.py +375 -379
  77. crawlo/network/response.py +569 -664
  78. crawlo/pipelines/__init__.py +53 -22
  79. crawlo/pipelines/base_pipeline.py +452 -0
  80. crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
  81. crawlo/pipelines/console_pipeline.py +39 -39
  82. crawlo/pipelines/csv_pipeline.py +316 -316
  83. crawlo/pipelines/database_dedup_pipeline.py +197 -197
  84. crawlo/pipelines/json_pipeline.py +218 -218
  85. crawlo/pipelines/memory_dedup_pipeline.py +105 -105
  86. crawlo/pipelines/mongo_pipeline.py +140 -132
  87. crawlo/pipelines/mysql_pipeline.py +470 -326
  88. crawlo/pipelines/pipeline_manager.py +100 -100
  89. crawlo/pipelines/redis_dedup_pipeline.py +155 -156
  90. crawlo/project.py +347 -347
  91. crawlo/queue/__init__.py +10 -0
  92. crawlo/queue/pqueue.py +38 -38
  93. crawlo/queue/queue_manager.py +591 -525
  94. crawlo/queue/redis_priority_queue.py +519 -370
  95. crawlo/settings/__init__.py +7 -7
  96. crawlo/settings/default_settings.py +285 -270
  97. crawlo/settings/setting_manager.py +219 -219
  98. crawlo/spider/__init__.py +657 -657
  99. crawlo/stats_collector.py +82 -73
  100. crawlo/subscriber.py +129 -129
  101. crawlo/task_manager.py +138 -138
  102. crawlo/templates/crawlo.cfg.tmpl +10 -10
  103. crawlo/templates/project/__init__.py.tmpl +2 -4
  104. crawlo/templates/project/items.py.tmpl +13 -17
  105. crawlo/templates/project/middlewares.py.tmpl +38 -38
  106. crawlo/templates/project/pipelines.py.tmpl +35 -36
  107. crawlo/templates/project/settings.py.tmpl +110 -157
  108. crawlo/templates/project/settings_distributed.py.tmpl +156 -161
  109. crawlo/templates/project/settings_gentle.py.tmpl +170 -171
  110. crawlo/templates/project/settings_high_performance.py.tmpl +171 -172
  111. crawlo/templates/project/settings_minimal.py.tmpl +99 -77
  112. crawlo/templates/project/settings_simple.py.tmpl +168 -169
  113. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  114. crawlo/templates/run.py.tmpl +23 -30
  115. crawlo/templates/spider/spider.py.tmpl +33 -144
  116. crawlo/templates/spiders_init.py.tmpl +5 -10
  117. crawlo/tools/__init__.py +86 -189
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +384 -384
  120. crawlo/tools/scenario_adapter.py +262 -262
  121. crawlo/tools/text_cleaner.py +232 -232
  122. crawlo/utils/__init__.py +50 -50
  123. crawlo/utils/batch_processor.py +276 -259
  124. crawlo/utils/config_manager.py +442 -0
  125. crawlo/utils/controlled_spider_mixin.py +439 -439
  126. crawlo/utils/db_helper.py +250 -244
  127. crawlo/utils/error_handler.py +410 -410
  128. crawlo/utils/fingerprint.py +121 -121
  129. crawlo/utils/func_tools.py +82 -82
  130. crawlo/utils/large_scale_helper.py +344 -344
  131. crawlo/utils/leak_detector.py +335 -0
  132. crawlo/utils/log.py +79 -79
  133. crawlo/utils/misc.py +81 -81
  134. crawlo/utils/mongo_connection_pool.py +157 -0
  135. crawlo/utils/mysql_connection_pool.py +197 -0
  136. crawlo/utils/performance_monitor.py +285 -285
  137. crawlo/utils/queue_helper.py +175 -175
  138. crawlo/utils/redis_checker.py +91 -0
  139. crawlo/utils/redis_connection_pool.py +578 -388
  140. crawlo/utils/redis_key_validator.py +198 -198
  141. crawlo/utils/request.py +278 -256
  142. crawlo/utils/request_serializer.py +225 -225
  143. crawlo/utils/resource_manager.py +337 -0
  144. crawlo/utils/selector_helper.py +137 -137
  145. crawlo/utils/singleton.py +70 -0
  146. crawlo/utils/spider_loader.py +201 -201
  147. crawlo/utils/text_helper.py +94 -94
  148. crawlo/utils/{url.py → url_utils.py} +39 -39
  149. crawlo-1.4.7.dist-info/METADATA +689 -0
  150. crawlo-1.4.7.dist-info/RECORD +347 -0
  151. examples/__init__.py +7 -7
  152. tests/__init__.py +7 -7
  153. tests/advanced_tools_example.py +217 -275
  154. tests/authenticated_proxy_example.py +110 -106
  155. tests/baidu_performance_test.py +108 -108
  156. tests/baidu_test.py +59 -59
  157. tests/bug_check_test.py +250 -250
  158. tests/cleaners_example.py +160 -160
  159. tests/comprehensive_framework_test.py +212 -212
  160. tests/comprehensive_test.py +81 -81
  161. tests/comprehensive_testing_summary.md +186 -186
  162. tests/config_validation_demo.py +142 -142
  163. tests/controlled_spider_example.py +205 -205
  164. tests/date_tools_example.py +180 -180
  165. tests/debug_configure.py +69 -69
  166. tests/debug_framework_logger.py +84 -84
  167. tests/debug_log_config.py +126 -126
  168. tests/debug_log_levels.py +63 -63
  169. tests/debug_pipelines.py +66 -66
  170. tests/detailed_log_test.py +233 -233
  171. tests/direct_selector_helper_test.py +96 -96
  172. tests/distributed_dedup_test.py +467 -0
  173. tests/distributed_test.py +66 -66
  174. tests/distributed_test_debug.py +76 -76
  175. tests/dynamic_loading_example.py +523 -523
  176. tests/dynamic_loading_test.py +104 -104
  177. tests/error_handling_example.py +171 -171
  178. tests/explain_mysql_update_behavior.py +77 -0
  179. tests/final_comprehensive_test.py +151 -151
  180. tests/final_log_test.py +260 -260
  181. tests/final_validation_test.py +182 -182
  182. tests/fix_log_test.py +142 -142
  183. tests/framework_performance_test.py +202 -202
  184. tests/log_buffering_test.py +111 -111
  185. tests/log_generation_timing_test.py +153 -153
  186. tests/monitor_redis_dedup.sh +72 -0
  187. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
  188. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
  189. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
  190. tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
  191. tests/ofweek_scrapy/scrapy.cfg +11 -11
  192. tests/optimized_performance_test.py +211 -211
  193. tests/performance_comparison.py +244 -244
  194. tests/queue_blocking_test.py +113 -113
  195. tests/queue_test.py +89 -89
  196. tests/redis_key_validation_demo.py +130 -130
  197. tests/request_params_example.py +150 -150
  198. tests/response_improvements_example.py +144 -144
  199. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  200. tests/scrapy_comparison/scrapy_test.py +133 -133
  201. tests/simple_cli_test.py +55 -0
  202. tests/simple_command_test.py +119 -119
  203. tests/simple_crawlo_test.py +126 -126
  204. tests/simple_follow_test.py +38 -38
  205. tests/simple_log_test2.py +137 -137
  206. tests/simple_optimization_test.py +128 -128
  207. tests/simple_queue_type_test.py +41 -41
  208. tests/simple_response_selector_test.py +94 -94
  209. tests/simple_selector_helper_test.py +154 -154
  210. tests/simple_selector_test.py +207 -207
  211. tests/simple_spider_test.py +49 -49
  212. tests/simple_url_test.py +73 -73
  213. tests/simulate_mysql_update_test.py +140 -0
  214. tests/spider_log_timing_test.py +177 -177
  215. tests/test_advanced_tools.py +148 -148
  216. tests/test_all_commands.py +230 -230
  217. tests/test_all_pipeline_fingerprints.py +133 -133
  218. tests/test_all_redis_key_configs.py +145 -145
  219. tests/test_asyncmy_usage.py +57 -0
  220. tests/test_batch_processor.py +178 -178
  221. tests/test_cleaners.py +54 -54
  222. tests/test_cli_arguments.py +119 -0
  223. tests/test_component_factory.py +174 -174
  224. tests/test_config_consistency.py +80 -80
  225. tests/test_config_merge.py +152 -152
  226. tests/test_config_validator.py +182 -182
  227. tests/test_controlled_spider_mixin.py +79 -79
  228. tests/test_crawler_process_import.py +38 -38
  229. tests/test_crawler_process_spider_modules.py +47 -47
  230. tests/test_crawlo_proxy_integration.py +114 -108
  231. tests/test_date_tools.py +123 -123
  232. tests/test_dedup_fix.py +220 -220
  233. tests/test_dedup_pipeline_consistency.py +124 -124
  234. tests/test_default_header_middleware.py +313 -313
  235. tests/test_distributed.py +65 -65
  236. tests/test_double_crawlo_fix.py +204 -204
  237. tests/test_double_crawlo_fix_simple.py +124 -124
  238. tests/test_download_delay_middleware.py +221 -221
  239. tests/test_downloader_proxy_compatibility.py +272 -268
  240. tests/test_edge_cases.py +305 -305
  241. tests/test_encoding_core.py +56 -56
  242. tests/test_encoding_detection.py +126 -126
  243. tests/test_enhanced_error_handler.py +270 -270
  244. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  245. tests/test_error_handler_compatibility.py +112 -112
  246. tests/test_factories.py +252 -252
  247. tests/test_factory_compatibility.py +196 -196
  248. tests/test_final_validation.py +153 -153
  249. tests/test_fingerprint_consistency.py +135 -135
  250. tests/test_fingerprint_simple.py +51 -51
  251. tests/test_get_component_logger.py +83 -83
  252. tests/test_hash_performance.py +99 -99
  253. tests/test_integration.py +169 -169
  254. tests/test_item_dedup_redis_key.py +122 -122
  255. tests/test_large_scale_helper.py +235 -235
  256. tests/test_logging_enhancements.py +374 -374
  257. tests/test_logging_final.py +184 -184
  258. tests/test_logging_integration.py +312 -312
  259. tests/test_logging_system.py +282 -282
  260. tests/test_middleware_debug.py +141 -141
  261. tests/test_mode_consistency.py +51 -51
  262. tests/test_multi_directory.py +67 -67
  263. tests/test_multiple_spider_modules.py +80 -80
  264. tests/test_mysql_pipeline_config.py +165 -0
  265. tests/test_mysql_pipeline_error.py +99 -0
  266. tests/test_mysql_pipeline_init_log.py +83 -0
  267. tests/test_mysql_pipeline_integration.py +133 -0
  268. tests/test_mysql_pipeline_refactor.py +144 -0
  269. tests/test_mysql_pipeline_refactor_simple.py +86 -0
  270. tests/test_mysql_pipeline_robustness.py +196 -0
  271. tests/test_mysql_pipeline_types.py +89 -0
  272. tests/test_mysql_update_columns.py +94 -0
  273. tests/test_offsite_middleware.py +244 -244
  274. tests/test_offsite_middleware_simple.py +203 -203
  275. tests/test_optimized_selector_naming.py +100 -100
  276. tests/test_parsel.py +29 -29
  277. tests/test_performance.py +327 -327
  278. tests/test_performance_monitor.py +115 -115
  279. tests/test_pipeline_fingerprint_consistency.py +86 -86
  280. tests/test_priority_behavior.py +211 -211
  281. tests/test_priority_consistency.py +151 -151
  282. tests/test_priority_consistency_fixed.py +249 -249
  283. tests/test_proxy_health_check.py +32 -32
  284. tests/test_proxy_middleware.py +217 -121
  285. tests/test_proxy_middleware_enhanced.py +212 -216
  286. tests/test_proxy_middleware_integration.py +142 -137
  287. tests/test_proxy_middleware_refactored.py +207 -184
  288. tests/test_proxy_only.py +84 -0
  289. tests/test_proxy_providers.py +56 -56
  290. tests/test_proxy_stats.py +19 -19
  291. tests/test_proxy_strategies.py +59 -59
  292. tests/test_proxy_with_downloader.py +153 -0
  293. tests/test_queue_empty_check.py +41 -41
  294. tests/test_queue_manager_double_crawlo.py +173 -173
  295. tests/test_queue_manager_redis_key.py +179 -179
  296. tests/test_queue_naming.py +154 -154
  297. tests/test_queue_type.py +106 -106
  298. tests/test_queue_type_redis_config_consistency.py +130 -130
  299. tests/test_random_headers_default.py +322 -322
  300. tests/test_random_headers_necessity.py +308 -308
  301. tests/test_random_user_agent.py +72 -72
  302. tests/test_redis_config.py +28 -28
  303. tests/test_redis_connection_pool.py +294 -294
  304. tests/test_redis_key_naming.py +181 -181
  305. tests/test_redis_key_validator.py +123 -123
  306. tests/test_redis_queue.py +224 -224
  307. tests/test_redis_queue_name_fix.py +175 -175
  308. tests/test_redis_queue_type_fallback.py +129 -129
  309. tests/test_request_ignore_middleware.py +182 -182
  310. tests/test_request_params.py +111 -111
  311. tests/test_request_serialization.py +70 -70
  312. tests/test_response_code_middleware.py +349 -349
  313. tests/test_response_filter_middleware.py +427 -427
  314. tests/test_response_follow.py +104 -104
  315. tests/test_response_improvements.py +152 -152
  316. tests/test_response_selector_methods.py +92 -92
  317. tests/test_response_url_methods.py +70 -70
  318. tests/test_response_urljoin.py +86 -86
  319. tests/test_retry_middleware.py +333 -333
  320. tests/test_retry_middleware_realistic.py +273 -273
  321. tests/test_scheduler.py +252 -252
  322. tests/test_scheduler_config_update.py +133 -133
  323. tests/test_scrapy_style_encoding.py +112 -112
  324. tests/test_selector_helper.py +100 -100
  325. tests/test_selector_optimizations.py +146 -146
  326. tests/test_simple_response.py +61 -61
  327. tests/test_spider_loader.py +49 -49
  328. tests/test_spider_loader_comprehensive.py +69 -69
  329. tests/test_spider_modules.py +84 -84
  330. tests/test_spiders/test_spider.py +9 -9
  331. tests/test_telecom_spider_redis_key.py +205 -205
  332. tests/test_template_content.py +87 -87
  333. tests/test_template_redis_key.py +134 -134
  334. tests/test_tools.py +159 -159
  335. tests/test_user_agent_randomness.py +176 -176
  336. tests/test_user_agents.py +96 -96
  337. tests/untested_features_report.md +138 -138
  338. tests/verify_debug.py +51 -51
  339. tests/verify_distributed.py +117 -117
  340. tests/verify_log_fix.py +111 -111
  341. tests/verify_mysql_warnings.py +110 -0
  342. crawlo/logging/async_handler.py +0 -181
  343. crawlo/logging/monitor.py +0 -153
  344. crawlo/logging/sampler.py +0 -167
  345. crawlo/middleware/simple_proxy.py +0 -65
  346. crawlo/tools/authenticated_proxy.py +0 -241
  347. crawlo/tools/data_formatter.py +0 -226
  348. crawlo/tools/data_validator.py +0 -181
  349. crawlo/tools/encoding_converter.py +0 -127
  350. crawlo/tools/network_diagnostic.py +0 -365
  351. crawlo/tools/request_tools.py +0 -83
  352. crawlo/tools/retry_mechanism.py +0 -224
  353. crawlo/utils/env_config.py +0 -143
  354. crawlo/utils/large_scale_config.py +0 -287
  355. crawlo/utils/system.py +0 -11
  356. crawlo/utils/tools.py +0 -5
  357. crawlo-1.4.5.dist-info/METADATA +0 -329
  358. crawlo-1.4.5.dist-info/RECORD +0 -347
  359. tests/env_config_example.py +0 -134
  360. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
  361. tests/test_authenticated_proxy.py +0 -142
  362. tests/test_comprehensive.py +0 -147
  363. tests/test_dynamic_downloaders_proxy.py +0 -125
  364. tests/test_dynamic_proxy.py +0 -93
  365. tests/test_dynamic_proxy_config.py +0 -147
  366. tests/test_dynamic_proxy_real.py +0 -110
  367. tests/test_env_config.py +0 -122
  368. tests/test_framework_env_usage.py +0 -104
  369. tests/test_large_scale_config.py +0 -113
  370. tests/test_proxy_api.py +0 -265
  371. tests/test_real_scenario_proxy.py +0 -196
  372. tests/tools_example.py +0 -261
  373. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
  374. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
  375. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
crawlo/core/scheduler.py CHANGED
@@ -1,292 +1,291 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import traceback
4
- from typing import Optional, Callable
5
-
6
- from crawlo.utils.log import get_logger
7
- from crawlo.utils.request import set_request
8
- from crawlo.utils.error_handler import ErrorHandler
9
- from crawlo.utils.misc import load_object
10
- from crawlo.project import common_call
11
- from crawlo.utils.request_serializer import RequestSerializer
12
- from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
13
-
14
-
15
- class Scheduler:
16
- def __init__(self, crawler, dupe_filter, stats, log_level, priority):
17
- self.crawler = crawler
18
- self.queue_manager: Optional[QueueManager] = None
19
- self.request_serializer = RequestSerializer()
20
-
21
- self.logger = get_logger(name=self.__class__.__name__, level=log_level)
22
- self.error_handler = ErrorHandler(self.__class__.__name__, log_level)
23
- self.stats = stats
24
- self.dupe_filter = dupe_filter
25
- self.priority = priority
26
-
27
- @classmethod
28
- def create_instance(cls, crawler):
29
- filter_cls = load_object(crawler.settings.get('FILTER_CLASS'))
30
- o = cls(
31
- crawler=crawler,
32
- dupe_filter=filter_cls.create_instance(crawler),
33
- stats=crawler.stats,
34
- log_level=crawler.settings.get('LOG_LEVEL'),
35
- priority=crawler.settings.get('DEPTH_PRIORITY')
36
- )
37
- return o
38
-
39
- async def open(self):
40
- """Initialize scheduler and queue"""
41
- self.logger.debug("开始初始化调度器...")
42
- try:
43
- # 创建队列配置
44
- queue_config = QueueConfig.from_settings(self.crawler.settings)
45
-
46
- # 创建队列管理器
47
- self.queue_manager = QueueManager(queue_config)
48
-
49
- # 初始化队列
50
- needs_config_update = await self.queue_manager.initialize()
51
-
52
- # 检查是否需要更新过滤器配置
53
- updated_configs = []
54
- if needs_config_update:
55
- # 如果返回True,说明队列类型发生了变化,需要检查当前队列类型来决定更新方向
56
- if self.queue_manager._queue_type == QueueType.REDIS:
57
- self._switch_to_redis_config()
58
- updated_configs.append("Redis")
59
- else:
60
- self._switch_to_memory_config()
61
- updated_configs.append("内存")
62
- else:
63
- # 检查是否需要更新配置(即使队列管理器没有要求更新)
64
- # QUEUE_TYPE 明确设置为 redis 时,也应该检查配置一致性
65
- queue_type_setting = self.crawler.settings.get('QUEUE_TYPE', 'memory')
66
- if queue_type_setting == 'redis' or needs_config_update:
67
- updated_configs = self._check_filter_config()
68
- else:
69
- updated_configs = []
70
-
71
- # 处理过滤器配置更新
72
- await self._process_filter_updates(needs_config_update, updated_configs)
73
-
74
- # 输出关键的调度器初始化完成信息
75
- status = self.queue_manager.get_status()
76
- current_filter = self.crawler.settings.get('FILTER_CLASS')
77
-
78
- self.logger.info(f"enabled filters: \n {current_filter}")
79
-
80
- # 优化日志输出,将多条日志合并为1条关键信息
81
- queue_type_setting = self.crawler.settings.get('QUEUE_TYPE', 'memory')
82
- if queue_type_setting in ['auto', 'redis'] and updated_configs:
83
- concurrency = self.crawler.settings.get('CONCURRENCY', 8)
84
- delay = self.crawler.settings.get('DOWNLOAD_DELAY', 1.0)
85
- self.logger.debug(f"Scheduler initialized [Queue type: {status['type']}, Status: {status['health']}, Concurrency: {concurrency}, Delay: {delay}s]")
86
- else:
87
- self.logger.debug(f"Scheduler initialized [Queue type: {status['type']}, Status: {status['health']}]")
88
- except Exception as e:
89
- self.logger.error(f"Scheduler initialization failed: {e}")
90
- self.logger.debug(f"Detailed error information:\n{traceback.format_exc()}")
91
- raise
92
-
93
- def _check_filter_config(self):
94
- """检查并更新过滤器配置"""
95
- updated_configs = []
96
-
97
- if self.queue_manager._queue_type == QueueType.REDIS:
98
- # 检查当前过滤器是否为内存过滤器
99
- current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
100
- if 'memory_filter' in current_filter_class:
101
- self._switch_to_redis_config()
102
- updated_configs.append("Redis")
103
- elif self.queue_manager._queue_type == QueueType.MEMORY:
104
- # 检查当前过滤器是否为Redis过滤器
105
- current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
106
- if 'aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class:
107
- self._switch_to_memory_config()
108
- updated_configs.append("内存")
109
-
110
- return updated_configs
111
-
112
- async def _process_filter_updates(self, needs_config_update, updated_configs):
113
- """处理过滤器更新逻辑"""
114
- # 检查配置是否与队列类型匹配
115
- current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
116
- filter_matches_queue_type = self._is_filter_matching_queue_type(current_filter_class)
117
-
118
- # 只有在配置不匹配且需要更新时才重新创建过滤器实例
119
- if needs_config_update or not filter_matches_queue_type:
120
- # 如果需要更新配置,则执行更新
121
- if needs_config_update:
122
- # 重新创建过滤器实例,确保使用更新后的配置
123
- filter_cls = load_object(self.crawler.settings.get('FILTER_CLASS'))
124
- self.dupe_filter = filter_cls.create_instance(self.crawler)
125
-
126
- # 记录警告信息
127
- original_mode = "standalone" if 'memory_filter' in current_filter_class else "distributed"
128
- new_mode = "distributed" if self.queue_manager._queue_type == QueueType.REDIS else "standalone"
129
- if original_mode != new_mode:
130
- self.logger.warning(f"runtime mode inconsistency detected: switched from {original_mode} to {new_mode} mode")
131
- elif not filter_matches_queue_type:
132
- # 配置不匹配,需要更新
133
- if self.queue_manager._queue_type == QueueType.REDIS:
134
- self._switch_to_redis_config()
135
- elif self.queue_manager._queue_type == QueueType.MEMORY:
136
- self._switch_to_memory_config()
137
-
138
- # 重新创建过滤器实例
139
- filter_cls = load_object(self.crawler.settings.get('FILTER_CLASS'))
140
- self.dupe_filter = filter_cls.create_instance(self.crawler)
141
-
142
- def _is_filter_matching_queue_type(self, current_filter_class):
143
- """检查过滤器配置是否与队列类型匹配"""
144
- return (
145
- (self.queue_manager._queue_type == QueueType.REDIS and
146
- ('aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class)) or
147
- (self.queue_manager._queue_type == QueueType.MEMORY and
148
- 'memory_filter' in current_filter_class)
149
- )
150
-
151
- def _switch_to_redis_config(self):
152
- """切换到Redis配置"""
153
- if self.queue_manager and self.queue_manager._queue_type == QueueType.REDIS:
154
- # 检查当前过滤器是否为内存过滤器
155
- current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
156
- updated_configs = []
157
-
158
- if 'memory_filter' in current_filter_class:
159
- # 更新为Redis过滤器
160
- self.crawler.settings.set('FILTER_CLASS', 'crawlo.filters.aioredis_filter.AioRedisFilter')
161
- updated_configs.append("filter")
162
-
163
- # 检查当前去重管道是否为内存去重管道
164
- current_dedup_pipeline = self.crawler.settings.get('DEFAULT_DEDUP_PIPELINE', '')
165
- if 'memory_dedup_pipeline' in current_dedup_pipeline:
166
- # 更新为Redis去重管道
167
- self.crawler.settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline')
168
- # 同时更新PIPELINES列表中的去重管道
169
- pipelines = self.crawler.settings.get('PIPELINES', [])
170
- if current_dedup_pipeline in pipelines:
171
- # 找到并替换内存去重管道为Redis去重管道
172
- index = pipelines.index(current_dedup_pipeline)
173
- pipelines[index] = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
174
- self.crawler.settings.set('PIPELINES', pipelines)
175
- updated_configs.append("dedup pipeline")
176
-
177
- # 合并日志输出
178
- if updated_configs:
179
- self.logger.info(f"configuration updated: {', '.join(updated_configs)} -> redis mode")
180
-
181
- def _switch_to_memory_config(self):
182
- """切换到内存配置"""
183
- if self.queue_manager and self.queue_manager._queue_type == QueueType.MEMORY:
184
- # 检查当前过滤器是否为Redis过滤器
185
- current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
186
- updated_configs = []
187
-
188
- if 'aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class:
189
- # 更新为内存过滤器
190
- self.crawler.settings.set('FILTER_CLASS', 'crawlo.filters.memory_filter.MemoryFilter')
191
- updated_configs.append("filter")
192
-
193
- # 检查当前去重管道是否为Redis去重管道
194
- current_dedup_pipeline = self.crawler.settings.get('DEFAULT_DEDUP_PIPELINE', '')
195
- if 'redis_dedup_pipeline' in current_dedup_pipeline:
196
- # 更新为内存去重管道
197
- self.crawler.settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline')
198
- # 同时更新PIPELINES列表中的去重管道
199
- pipelines = self.crawler.settings.get('PIPELINES', [])
200
- if current_dedup_pipeline in pipelines:
201
- # 找到并替换Redis去重管道为内存去重管道
202
- index = pipelines.index(current_dedup_pipeline)
203
- pipelines[index] = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
204
- self.crawler.settings.set('PIPELINES', pipelines)
205
- updated_configs.append("dedup pipeline")
206
-
207
- # 合并日志输出
208
- if updated_configs:
209
- self.logger.debug(f"configuration updated: {', '.join(updated_configs)} -> memory mode")
210
-
211
- async def next_request(self):
212
- """Get next request"""
213
- if not self.queue_manager:
214
- return None
215
-
216
- try:
217
- request = await self.queue_manager.get()
218
-
219
- # 恢复 callback(从 Redis 队列取出时)
220
- if request:
221
- spider = getattr(self.crawler, 'spider', None)
222
- request = self.request_serializer.restore_after_deserialization(request, spider)
223
-
224
- return request
225
- except Exception as e:
226
- self.error_handler.handle_error(
227
- e,
228
- context="Failed to get next request",
229
- raise_error=False
230
- )
231
- return None
232
-
233
- async def enqueue_request(self, request):
234
- """Add request to queue"""
235
- if not request.dont_filter and await common_call(self.dupe_filter.requested, request):
236
- self.dupe_filter.log_stats(request)
237
- return False
238
-
239
- if not self.queue_manager:
240
- self.logger.error("Queue manager not initialized")
241
- return False
242
-
243
- set_request(request, self.priority)
244
-
245
- try:
246
- # 使用统一的队列接口
247
- success = await self.queue_manager.put(request, priority=getattr(request, 'priority', 0))
248
-
249
- if success:
250
- self.logger.debug(f"Request enqueued successfully: {request.url}")
251
-
252
- return success
253
- except Exception as e:
254
- self.error_handler.handle_error(
255
- e,
256
- context="Failed to enqueue request",
257
- raise_error=False
258
- )
259
- return False
260
-
261
- def idle(self) -> bool:
262
- """Check if queue is empty"""
263
- return len(self) == 0
264
-
265
- async def async_idle(self) -> bool:
266
- """Asynchronously check if queue is empty (more accurate)"""
267
- if not self.queue_manager:
268
- return True
269
- # 使用队列管理器的异步empty方法
270
- return await self.queue_manager.async_empty()
271
-
272
- async def close(self):
273
- """Close scheduler"""
274
- try:
275
- if isinstance(closed := getattr(self.dupe_filter, 'closed', None), Callable):
276
- await closed()
277
-
278
- if self.queue_manager:
279
- await self.queue_manager.close()
280
- except Exception as e:
281
- self.error_handler.handle_error(
282
- e,
283
- context="Failed to close scheduler",
284
- raise_error=False
285
- )
286
-
287
- def __len__(self):
288
- """Get queue size"""
289
- if not self.queue_manager:
290
- return 0
291
- # 返回同步的近似值,实际大小需要异步获取
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import traceback
4
+ from typing import Optional, Callable
5
+
6
+ from crawlo.logging import get_logger
7
+ from crawlo.utils.request import set_request
8
+ from crawlo.utils.error_handler import ErrorHandler
9
+ from crawlo.utils.misc import load_object
10
+ from crawlo.project import common_call
11
+ from crawlo.utils.request_serializer import RequestSerializer
12
+ from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
13
+
14
+
15
+ class Scheduler:
16
+ def __init__(self, crawler, dupe_filter, stats, priority):
17
+ self.crawler = crawler
18
+ self.queue_manager: Optional[QueueManager] = None
19
+ self.request_serializer = RequestSerializer()
20
+
21
+ self.logger = get_logger(self.__class__.__name__)
22
+ self.error_handler = ErrorHandler(self.__class__.__name__)
23
+ self.stats = stats
24
+ self.dupe_filter = dupe_filter
25
+ self.priority = priority
26
+
27
+ @classmethod
28
+ def create_instance(cls, crawler):
29
+ filter_cls = load_object(crawler.settings.get('FILTER_CLASS'))
30
+ o = cls(
31
+ crawler=crawler,
32
+ dupe_filter=filter_cls.create_instance(crawler),
33
+ stats=crawler.stats,
34
+ priority=crawler.settings.get('DEPTH_PRIORITY')
35
+ )
36
+ return o
37
+
38
+ async def open(self):
39
+ """Initialize scheduler and queue"""
40
+ self.logger.debug("开始初始化调度器...")
41
+ try:
42
+ # 创建队列配置
43
+ queue_config = QueueConfig.from_settings(self.crawler.settings)
44
+
45
+ # 创建队列管理器
46
+ self.queue_manager = QueueManager(queue_config)
47
+
48
+ # 初始化队列
49
+ needs_config_update = await self.queue_manager.initialize()
50
+
51
+ # 检查是否需要更新过滤器配置
52
+ updated_configs = []
53
+ if needs_config_update:
54
+ # 如果返回True,说明队列类型发生了变化,需要检查当前队列类型来决定更新方向
55
+ if self.queue_manager._queue_type == QueueType.REDIS:
56
+ self._switch_to_redis_config()
57
+ updated_configs.append("Redis")
58
+ else:
59
+ self._switch_to_memory_config()
60
+ updated_configs.append("内存")
61
+ else:
62
+ # 检查是否需要更新配置(即使队列管理器没有要求更新)
63
+ # 当 QUEUE_TYPE 明确设置为 redis 时,也应该检查配置一致性
64
+ queue_type_setting = self.crawler.settings.get('QUEUE_TYPE', 'memory')
65
+ if queue_type_setting == 'redis' or needs_config_update:
66
+ updated_configs = self._check_filter_config()
67
+ else:
68
+ updated_configs = []
69
+
70
+ # 处理过滤器配置更新
71
+ await self._process_filter_updates(needs_config_update, updated_configs)
72
+
73
+ # 输出关键的调度器初始化完成信息
74
+ status = self.queue_manager.get_status()
75
+ current_filter = self.crawler.settings.get('FILTER_CLASS')
76
+
77
+ self.logger.info(f"enabled filters: \n {current_filter}")
78
+
79
+ # 优化日志输出,将多条日志合并为1条关键信息
80
+ queue_type_setting = self.crawler.settings.get('QUEUE_TYPE', 'memory')
81
+ if queue_type_setting in ['auto', 'redis'] and updated_configs:
82
+ concurrency = self.crawler.settings.get('CONCURRENCY', 8)
83
+ delay = self.crawler.settings.get('DOWNLOAD_DELAY', 1.0)
84
+ self.logger.debug(f"Scheduler initialized [Queue type: {status['type']}, Status: {status['health']}, Concurrency: {concurrency}, Delay: {delay}s]")
85
+ else:
86
+ self.logger.debug(f"Scheduler initialized [Queue type: {status['type']}, Status: {status['health']}]")
87
+ except Exception as e:
88
+ self.logger.error(f"Scheduler initialization failed: {e}")
89
+ self.logger.debug(f"Detailed error information:\n{traceback.format_exc()}")
90
+ raise
91
+
92
+ def _check_filter_config(self):
93
+ """检查并更新过滤器配置"""
94
+ updated_configs = []
95
+
96
+ if self.queue_manager._queue_type == QueueType.REDIS:
97
+ # 检查当前过滤器是否为内存过滤器
98
+ current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
99
+ if 'memory_filter' in current_filter_class:
100
+ self._switch_to_redis_config()
101
+ updated_configs.append("Redis")
102
+ elif self.queue_manager._queue_type == QueueType.MEMORY:
103
+ # 检查当前过滤器是否为Redis过滤器
104
+ current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
105
+ if 'aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class:
106
+ self._switch_to_memory_config()
107
+ updated_configs.append("内存")
108
+
109
+ return updated_configs
110
+
111
+ async def _process_filter_updates(self, needs_config_update, updated_configs):
112
+ """处理过滤器更新逻辑"""
113
+ # 检查配置是否与队列类型匹配
114
+ current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
115
+ filter_matches_queue_type = self._is_filter_matching_queue_type(current_filter_class)
116
+
117
+ # 只有在配置不匹配且需要更新时才重新创建过滤器实例
118
+ if needs_config_update or not filter_matches_queue_type:
119
+ # 如果需要更新配置,则执行更新
120
+ if needs_config_update:
121
+ # 重新创建过滤器实例,确保使用更新后的配置
122
+ filter_cls = load_object(self.crawler.settings.get('FILTER_CLASS'))
123
+ self.dupe_filter = filter_cls.create_instance(self.crawler)
124
+
125
+ # 记录警告信息
126
+ original_mode = "standalone" if 'memory_filter' in current_filter_class else "distributed"
127
+ new_mode = "distributed" if self.queue_manager._queue_type == QueueType.REDIS else "standalone"
128
+ if original_mode != new_mode:
129
+ self.logger.warning(f"runtime mode inconsistency detected: switched from {original_mode} to {new_mode} mode")
130
+ elif not filter_matches_queue_type:
131
+ # 配置不匹配,需要更新
132
+ if self.queue_manager._queue_type == QueueType.REDIS:
133
+ self._switch_to_redis_config()
134
+ elif self.queue_manager._queue_type == QueueType.MEMORY:
135
+ self._switch_to_memory_config()
136
+
137
+ # 重新创建过滤器实例
138
+ filter_cls = load_object(self.crawler.settings.get('FILTER_CLASS'))
139
+ self.dupe_filter = filter_cls.create_instance(self.crawler)
140
+
141
+ def _is_filter_matching_queue_type(self, current_filter_class):
142
+ """检查过滤器配置是否与队列类型匹配"""
143
+ return (
144
+ (self.queue_manager._queue_type == QueueType.REDIS and
145
+ ('aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class)) or
146
+ (self.queue_manager._queue_type == QueueType.MEMORY and
147
+ 'memory_filter' in current_filter_class)
148
+ )
149
+
150
+ def _switch_to_redis_config(self):
151
+ """切换到Redis配置"""
152
+ if self.queue_manager and self.queue_manager._queue_type == QueueType.REDIS:
153
+ # 检查当前过滤器是否为内存过滤器
154
+ current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
155
+ updated_configs = []
156
+
157
+ if 'memory_filter' in current_filter_class:
158
+ # 更新为Redis过滤器
159
+ self.crawler.settings.set('FILTER_CLASS', 'crawlo.filters.aioredis_filter.AioRedisFilter')
160
+ updated_configs.append("filter")
161
+
162
+ # 检查当前去重管道是否为内存去重管道
163
+ current_dedup_pipeline = self.crawler.settings.get('DEFAULT_DEDUP_PIPELINE', '')
164
+ if 'memory_dedup_pipeline' in current_dedup_pipeline:
165
+ # 更新为Redis去重管道
166
+ self.crawler.settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline')
167
+ # 同时更新PIPELINES列表中的去重管道
168
+ pipelines = self.crawler.settings.get('PIPELINES', [])
169
+ if current_dedup_pipeline in pipelines:
170
+ # 找到并替换内存去重管道为Redis去重管道
171
+ index = pipelines.index(current_dedup_pipeline)
172
+ pipelines[index] = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
173
+ self.crawler.settings.set('PIPELINES', pipelines)
174
+ updated_configs.append("dedup pipeline")
175
+
176
+ # 合并日志输出
177
+ if updated_configs:
178
+ self.logger.info(f"configuration updated: {', '.join(updated_configs)} -> redis mode")
179
+
180
+ def _switch_to_memory_config(self):
181
+ """切换到内存配置"""
182
+ if self.queue_manager and self.queue_manager._queue_type == QueueType.MEMORY:
183
+ # 检查当前过滤器是否为Redis过滤器
184
+ current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
185
+ updated_configs = []
186
+
187
+ if 'aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class:
188
+ # 更新为内存过滤器
189
+ self.crawler.settings.set('FILTER_CLASS', 'crawlo.filters.memory_filter.MemoryFilter')
190
+ updated_configs.append("filter")
191
+
192
+ # 检查当前去重管道是否为Redis去重管道
193
+ current_dedup_pipeline = self.crawler.settings.get('DEFAULT_DEDUP_PIPELINE', '')
194
+ if 'redis_dedup_pipeline' in current_dedup_pipeline:
195
+ # 更新为内存去重管道
196
+ self.crawler.settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline')
197
+ # 同时更新PIPELINES列表中的去重管道
198
+ pipelines = self.crawler.settings.get('PIPELINES', [])
199
+ if current_dedup_pipeline in pipelines:
200
+ # 找到并替换Redis去重管道为内存去重管道
201
+ index = pipelines.index(current_dedup_pipeline)
202
+ pipelines[index] = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
203
+ self.crawler.settings.set('PIPELINES', pipelines)
204
+ updated_configs.append("dedup pipeline")
205
+
206
+ # 合并日志输出
207
+ if updated_configs:
208
+ self.logger.debug(f"configuration updated: {', '.join(updated_configs)} -> memory mode")
209
+
210
+ async def next_request(self):
211
+ """Get next request"""
212
+ if not self.queue_manager:
213
+ return None
214
+
215
+ try:
216
+ request = await self.queue_manager.get()
217
+
218
+ # 恢复 callback(从 Redis 队列取出时)
219
+ if request:
220
+ spider = getattr(self.crawler, 'spider', None)
221
+ request = self.request_serializer.restore_after_deserialization(request, spider)
222
+
223
+ return request
224
+ except Exception as e:
225
+ self.error_handler.handle_error(
226
+ e,
227
+ context="Failed to get next request",
228
+ raise_error=False
229
+ )
230
+ return None
231
+
232
+ async def enqueue_request(self, request):
233
+ """Add request to queue"""
234
+ if not request.dont_filter and await common_call(self.dupe_filter.requested, request):
235
+ self.dupe_filter.log_stats(request)
236
+ return False
237
+
238
+ if not self.queue_manager:
239
+ self.logger.error("Queue manager not initialized")
240
+ return False
241
+
242
+ set_request(request, self.priority)
243
+
244
+ try:
245
+ # 使用统一的队列接口
246
+ success = await self.queue_manager.put(request, priority=getattr(request, 'priority', 0))
247
+
248
+ if success:
249
+ self.logger.debug(f"Request enqueued successfully: {request.url}")
250
+
251
+ return success
252
+ except Exception as e:
253
+ self.error_handler.handle_error(
254
+ e,
255
+ context="Failed to enqueue request",
256
+ raise_error=False
257
+ )
258
+ return False
259
+
260
+ def idle(self) -> bool:
261
+ """Check if queue is empty"""
262
+ return len(self) == 0
263
+
264
+ async def async_idle(self) -> bool:
265
+ """Asynchronously check if queue is empty (more accurate)"""
266
+ if not self.queue_manager:
267
+ return True
268
+ # 使用队列管理器的异步empty方法
269
+ return await self.queue_manager.async_empty()
270
+
271
+ async def close(self):
272
+ """Close scheduler"""
273
+ try:
274
+ if isinstance(closed := getattr(self.dupe_filter, 'closed', None), Callable):
275
+ await closed()
276
+
277
+ if self.queue_manager:
278
+ await self.queue_manager.close()
279
+ except Exception as e:
280
+ self.error_handler.handle_error(
281
+ e,
282
+ context="Failed to close scheduler",
283
+ raise_error=False
284
+ )
285
+
286
+ def __len__(self):
287
+ """Get queue size"""
288
+ if not self.queue_manager:
289
+ return 0
290
+ # 返回同步的近似值,实际大小需要异步获取
292
291
  return 0 if self.queue_manager.empty() else 1