crawlo 1.4.1__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -228
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.1.dist-info/METADATA +0 -1199
- crawlo-1.4.1.dist-info/RECORD +0 -309
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
crawlo/core/scheduler.py
CHANGED
|
@@ -1,258 +1,292 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
import traceback
|
|
4
|
-
from typing import Optional, Callable
|
|
5
|
-
|
|
6
|
-
from crawlo.utils.log import get_logger
|
|
7
|
-
from crawlo.utils.request import set_request
|
|
8
|
-
from crawlo.utils.error_handler import ErrorHandler
|
|
9
|
-
from crawlo.utils.class_loader import load_class
|
|
10
|
-
from crawlo.project import common_call
|
|
11
|
-
from crawlo.utils.request_serializer import RequestSerializer
|
|
12
|
-
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class Scheduler:
|
|
16
|
-
def __init__(self, crawler, dupe_filter, stats, log_level, priority):
|
|
17
|
-
self.crawler = crawler
|
|
18
|
-
self.queue_manager: Optional[QueueManager] = None
|
|
19
|
-
self.request_serializer = RequestSerializer()
|
|
20
|
-
|
|
21
|
-
self.logger = get_logger(name=self.__class__.__name__, level=log_level)
|
|
22
|
-
self.error_handler = ErrorHandler(self.__class__.__name__, log_level)
|
|
23
|
-
self.stats = stats
|
|
24
|
-
self.dupe_filter = dupe_filter
|
|
25
|
-
self.priority = priority
|
|
26
|
-
|
|
27
|
-
@classmethod
|
|
28
|
-
def create_instance(cls, crawler):
|
|
29
|
-
filter_cls = load_class(crawler.settings.get('FILTER_CLASS'))
|
|
30
|
-
o = cls(
|
|
31
|
-
crawler=crawler,
|
|
32
|
-
dupe_filter=filter_cls.create_instance(crawler),
|
|
33
|
-
stats=crawler.stats,
|
|
34
|
-
log_level=crawler.settings.get('LOG_LEVEL'),
|
|
35
|
-
priority=crawler.settings.get('DEPTH_PRIORITY')
|
|
36
|
-
)
|
|
37
|
-
return o
|
|
38
|
-
|
|
39
|
-
async def open(self):
|
|
40
|
-
"""Initialize scheduler and queue"""
|
|
41
|
-
self.logger.debug("开始初始化调度器...")
|
|
42
|
-
try:
|
|
43
|
-
# 创建队列配置
|
|
44
|
-
queue_config = QueueConfig.from_settings(self.crawler.settings)
|
|
45
|
-
|
|
46
|
-
# 创建队列管理器
|
|
47
|
-
self.queue_manager = QueueManager(queue_config)
|
|
48
|
-
|
|
49
|
-
# 初始化队列
|
|
50
|
-
needs_config_update = await self.queue_manager.initialize()
|
|
51
|
-
|
|
52
|
-
# 检查是否需要更新过滤器配置
|
|
53
|
-
updated_configs = []
|
|
54
|
-
if needs_config_update:
|
|
55
|
-
# 如果返回True,说明队列类型发生了变化,需要检查当前队列类型来决定更新方向
|
|
56
|
-
if self.queue_manager._queue_type == QueueType.REDIS:
|
|
57
|
-
self.
|
|
58
|
-
updated_configs.append("Redis")
|
|
59
|
-
else:
|
|
60
|
-
self.
|
|
61
|
-
updated_configs.append("内存")
|
|
62
|
-
else:
|
|
63
|
-
# 检查是否需要更新配置(即使队列管理器没有要求更新)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
self.
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
if
|
|
120
|
-
#
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
#
|
|
133
|
-
self.
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
self.
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
import traceback
|
|
4
|
+
from typing import Optional, Callable
|
|
5
|
+
|
|
6
|
+
from crawlo.utils.log import get_logger
|
|
7
|
+
from crawlo.utils.request import set_request
|
|
8
|
+
from crawlo.utils.error_handler import ErrorHandler
|
|
9
|
+
from crawlo.utils.class_loader import load_class
|
|
10
|
+
from crawlo.project import common_call
|
|
11
|
+
from crawlo.utils.request_serializer import RequestSerializer
|
|
12
|
+
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Scheduler:
|
|
16
|
+
def __init__(self, crawler, dupe_filter, stats, log_level, priority):
|
|
17
|
+
self.crawler = crawler
|
|
18
|
+
self.queue_manager: Optional[QueueManager] = None
|
|
19
|
+
self.request_serializer = RequestSerializer()
|
|
20
|
+
|
|
21
|
+
self.logger = get_logger(name=self.__class__.__name__, level=log_level)
|
|
22
|
+
self.error_handler = ErrorHandler(self.__class__.__name__, log_level)
|
|
23
|
+
self.stats = stats
|
|
24
|
+
self.dupe_filter = dupe_filter
|
|
25
|
+
self.priority = priority
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def create_instance(cls, crawler):
|
|
29
|
+
filter_cls = load_class(crawler.settings.get('FILTER_CLASS'))
|
|
30
|
+
o = cls(
|
|
31
|
+
crawler=crawler,
|
|
32
|
+
dupe_filter=filter_cls.create_instance(crawler),
|
|
33
|
+
stats=crawler.stats,
|
|
34
|
+
log_level=crawler.settings.get('LOG_LEVEL'),
|
|
35
|
+
priority=crawler.settings.get('DEPTH_PRIORITY')
|
|
36
|
+
)
|
|
37
|
+
return o
|
|
38
|
+
|
|
39
|
+
async def open(self):
|
|
40
|
+
"""Initialize scheduler and queue"""
|
|
41
|
+
self.logger.debug("开始初始化调度器...")
|
|
42
|
+
try:
|
|
43
|
+
# 创建队列配置
|
|
44
|
+
queue_config = QueueConfig.from_settings(self.crawler.settings)
|
|
45
|
+
|
|
46
|
+
# 创建队列管理器
|
|
47
|
+
self.queue_manager = QueueManager(queue_config)
|
|
48
|
+
|
|
49
|
+
# 初始化队列
|
|
50
|
+
needs_config_update = await self.queue_manager.initialize()
|
|
51
|
+
|
|
52
|
+
# 检查是否需要更新过滤器配置
|
|
53
|
+
updated_configs = []
|
|
54
|
+
if needs_config_update:
|
|
55
|
+
# 如果返回True,说明队列类型发生了变化,需要检查当前队列类型来决定更新方向
|
|
56
|
+
if self.queue_manager._queue_type == QueueType.REDIS:
|
|
57
|
+
self._switch_to_redis_config()
|
|
58
|
+
updated_configs.append("Redis")
|
|
59
|
+
else:
|
|
60
|
+
self._switch_to_memory_config()
|
|
61
|
+
updated_configs.append("内存")
|
|
62
|
+
else:
|
|
63
|
+
# 检查是否需要更新配置(即使队列管理器没有要求更新)
|
|
64
|
+
# 当 QUEUE_TYPE 明确设置为 redis 时,也应该检查配置一致性
|
|
65
|
+
queue_type_setting = self.crawler.settings.get('QUEUE_TYPE', 'memory')
|
|
66
|
+
if queue_type_setting == 'redis' or needs_config_update:
|
|
67
|
+
updated_configs = self._check_filter_config()
|
|
68
|
+
else:
|
|
69
|
+
updated_configs = []
|
|
70
|
+
|
|
71
|
+
# 处理过滤器配置更新
|
|
72
|
+
await self._process_filter_updates(needs_config_update, updated_configs)
|
|
73
|
+
|
|
74
|
+
# 输出关键的调度器初始化完成信息
|
|
75
|
+
status = self.queue_manager.get_status()
|
|
76
|
+
current_filter = self.crawler.settings.get('FILTER_CLASS')
|
|
77
|
+
|
|
78
|
+
self.logger.info(f"enabled filters: \n {current_filter}")
|
|
79
|
+
|
|
80
|
+
# 优化日志输出,将多条日志合并为1条关键信息
|
|
81
|
+
queue_type_setting = self.crawler.settings.get('QUEUE_TYPE', 'memory')
|
|
82
|
+
if queue_type_setting in ['auto', 'redis'] and updated_configs:
|
|
83
|
+
concurrency = self.crawler.settings.get('CONCURRENCY', 8)
|
|
84
|
+
delay = self.crawler.settings.get('DOWNLOAD_DELAY', 1.0)
|
|
85
|
+
self.logger.debug(f"Scheduler initialized [Queue type: {status['type']}, Status: {status['health']}, Concurrency: {concurrency}, Delay: {delay}s]")
|
|
86
|
+
else:
|
|
87
|
+
self.logger.debug(f"Scheduler initialized [Queue type: {status['type']}, Status: {status['health']}]")
|
|
88
|
+
except Exception as e:
|
|
89
|
+
self.logger.error(f"Scheduler initialization failed: {e}")
|
|
90
|
+
self.logger.debug(f"Detailed error information:\n{traceback.format_exc()}")
|
|
91
|
+
raise
|
|
92
|
+
|
|
93
|
+
def _check_filter_config(self):
|
|
94
|
+
"""检查并更新过滤器配置"""
|
|
95
|
+
updated_configs = []
|
|
96
|
+
|
|
97
|
+
if self.queue_manager._queue_type == QueueType.REDIS:
|
|
98
|
+
# 检查当前过滤器是否为内存过滤器
|
|
99
|
+
current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
|
|
100
|
+
if 'memory_filter' in current_filter_class:
|
|
101
|
+
self._switch_to_redis_config()
|
|
102
|
+
updated_configs.append("Redis")
|
|
103
|
+
elif self.queue_manager._queue_type == QueueType.MEMORY:
|
|
104
|
+
# 检查当前过滤器是否为Redis过滤器
|
|
105
|
+
current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
|
|
106
|
+
if 'aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class:
|
|
107
|
+
self._switch_to_memory_config()
|
|
108
|
+
updated_configs.append("内存")
|
|
109
|
+
|
|
110
|
+
return updated_configs
|
|
111
|
+
|
|
112
|
+
async def _process_filter_updates(self, needs_config_update, updated_configs):
|
|
113
|
+
"""处理过滤器更新逻辑"""
|
|
114
|
+
# 检查配置是否与队列类型匹配
|
|
115
|
+
current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
|
|
116
|
+
filter_matches_queue_type = self._is_filter_matching_queue_type(current_filter_class)
|
|
117
|
+
|
|
118
|
+
# 只有在配置不匹配且需要更新时才重新创建过滤器实例
|
|
119
|
+
if needs_config_update or not filter_matches_queue_type:
|
|
120
|
+
# 如果需要更新配置,则执行更新
|
|
121
|
+
if needs_config_update:
|
|
122
|
+
# 重新创建过滤器实例,确保使用更新后的配置
|
|
123
|
+
filter_cls = load_class(self.crawler.settings.get('FILTER_CLASS'))
|
|
124
|
+
self.dupe_filter = filter_cls.create_instance(self.crawler)
|
|
125
|
+
|
|
126
|
+
# 记录警告信息
|
|
127
|
+
original_mode = "standalone" if 'memory_filter' in current_filter_class else "distributed"
|
|
128
|
+
new_mode = "distributed" if self.queue_manager._queue_type == QueueType.REDIS else "standalone"
|
|
129
|
+
if original_mode != new_mode:
|
|
130
|
+
self.logger.warning(f"runtime mode inconsistency detected: switched from {original_mode} to {new_mode} mode")
|
|
131
|
+
elif not filter_matches_queue_type:
|
|
132
|
+
# 配置不匹配,需要更新
|
|
133
|
+
if self.queue_manager._queue_type == QueueType.REDIS:
|
|
134
|
+
self._switch_to_redis_config()
|
|
135
|
+
elif self.queue_manager._queue_type == QueueType.MEMORY:
|
|
136
|
+
self._switch_to_memory_config()
|
|
137
|
+
|
|
138
|
+
# 重新创建过滤器实例
|
|
139
|
+
filter_cls = load_class(self.crawler.settings.get('FILTER_CLASS'))
|
|
140
|
+
self.dupe_filter = filter_cls.create_instance(self.crawler)
|
|
141
|
+
|
|
142
|
+
def _is_filter_matching_queue_type(self, current_filter_class):
|
|
143
|
+
"""检查过滤器配置是否与队列类型匹配"""
|
|
144
|
+
return (
|
|
145
|
+
(self.queue_manager._queue_type == QueueType.REDIS and
|
|
146
|
+
('aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class)) or
|
|
147
|
+
(self.queue_manager._queue_type == QueueType.MEMORY and
|
|
148
|
+
'memory_filter' in current_filter_class)
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
def _switch_to_redis_config(self):
|
|
152
|
+
"""切换到Redis配置"""
|
|
153
|
+
if self.queue_manager and self.queue_manager._queue_type == QueueType.REDIS:
|
|
154
|
+
# 检查当前过滤器是否为内存过滤器
|
|
155
|
+
current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
|
|
156
|
+
updated_configs = []
|
|
157
|
+
|
|
158
|
+
if 'memory_filter' in current_filter_class:
|
|
159
|
+
# 更新为Redis过滤器
|
|
160
|
+
self.crawler.settings.set('FILTER_CLASS', 'crawlo.filters.aioredis_filter.AioRedisFilter')
|
|
161
|
+
updated_configs.append("filter")
|
|
162
|
+
|
|
163
|
+
# 检查当前去重管道是否为内存去重管道
|
|
164
|
+
current_dedup_pipeline = self.crawler.settings.get('DEFAULT_DEDUP_PIPELINE', '')
|
|
165
|
+
if 'memory_dedup_pipeline' in current_dedup_pipeline:
|
|
166
|
+
# 更新为Redis去重管道
|
|
167
|
+
self.crawler.settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline')
|
|
168
|
+
# 同时更新PIPELINES列表中的去重管道
|
|
169
|
+
pipelines = self.crawler.settings.get('PIPELINES', [])
|
|
170
|
+
if current_dedup_pipeline in pipelines:
|
|
171
|
+
# 找到并替换内存去重管道为Redis去重管道
|
|
172
|
+
index = pipelines.index(current_dedup_pipeline)
|
|
173
|
+
pipelines[index] = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
174
|
+
self.crawler.settings.set('PIPELINES', pipelines)
|
|
175
|
+
updated_configs.append("dedup pipeline")
|
|
176
|
+
|
|
177
|
+
# 合并日志输出
|
|
178
|
+
if updated_configs:
|
|
179
|
+
self.logger.info(f"configuration updated: {', '.join(updated_configs)} -> redis mode")
|
|
180
|
+
|
|
181
|
+
def _switch_to_memory_config(self):
|
|
182
|
+
"""切换到内存配置"""
|
|
183
|
+
if self.queue_manager and self.queue_manager._queue_type == QueueType.MEMORY:
|
|
184
|
+
# 检查当前过滤器是否为Redis过滤器
|
|
185
|
+
current_filter_class = self.crawler.settings.get('FILTER_CLASS', '')
|
|
186
|
+
updated_configs = []
|
|
187
|
+
|
|
188
|
+
if 'aioredis_filter' in current_filter_class or 'redis_filter' in current_filter_class:
|
|
189
|
+
# 更新为内存过滤器
|
|
190
|
+
self.crawler.settings.set('FILTER_CLASS', 'crawlo.filters.memory_filter.MemoryFilter')
|
|
191
|
+
updated_configs.append("filter")
|
|
192
|
+
|
|
193
|
+
# 检查当前去重管道是否为Redis去重管道
|
|
194
|
+
current_dedup_pipeline = self.crawler.settings.get('DEFAULT_DEDUP_PIPELINE', '')
|
|
195
|
+
if 'redis_dedup_pipeline' in current_dedup_pipeline:
|
|
196
|
+
# 更新为内存去重管道
|
|
197
|
+
self.crawler.settings.set('DEFAULT_DEDUP_PIPELINE', 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline')
|
|
198
|
+
# 同时更新PIPELINES列表中的去重管道
|
|
199
|
+
pipelines = self.crawler.settings.get('PIPELINES', [])
|
|
200
|
+
if current_dedup_pipeline in pipelines:
|
|
201
|
+
# 找到并替换Redis去重管道为内存去重管道
|
|
202
|
+
index = pipelines.index(current_dedup_pipeline)
|
|
203
|
+
pipelines[index] = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
204
|
+
self.crawler.settings.set('PIPELINES', pipelines)
|
|
205
|
+
updated_configs.append("dedup pipeline")
|
|
206
|
+
|
|
207
|
+
# 合并日志输出
|
|
208
|
+
if updated_configs:
|
|
209
|
+
self.logger.debug(f"configuration updated: {', '.join(updated_configs)} -> memory mode")
|
|
210
|
+
|
|
211
|
+
async def next_request(self):
|
|
212
|
+
"""Get next request"""
|
|
213
|
+
if not self.queue_manager:
|
|
214
|
+
return None
|
|
215
|
+
|
|
216
|
+
try:
|
|
217
|
+
request = await self.queue_manager.get()
|
|
218
|
+
|
|
219
|
+
# 恢复 callback(从 Redis 队列取出时)
|
|
220
|
+
if request:
|
|
221
|
+
spider = getattr(self.crawler, 'spider', None)
|
|
222
|
+
request = self.request_serializer.restore_after_deserialization(request, spider)
|
|
223
|
+
|
|
224
|
+
return request
|
|
225
|
+
except Exception as e:
|
|
226
|
+
self.error_handler.handle_error(
|
|
227
|
+
e,
|
|
228
|
+
context="Failed to get next request",
|
|
229
|
+
raise_error=False
|
|
230
|
+
)
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
async def enqueue_request(self, request):
|
|
234
|
+
"""Add request to queue"""
|
|
235
|
+
if not request.dont_filter and await common_call(self.dupe_filter.requested, request):
|
|
236
|
+
self.dupe_filter.log_stats(request)
|
|
237
|
+
return False
|
|
238
|
+
|
|
239
|
+
if not self.queue_manager:
|
|
240
|
+
self.logger.error("Queue manager not initialized")
|
|
241
|
+
return False
|
|
242
|
+
|
|
243
|
+
set_request(request, self.priority)
|
|
244
|
+
|
|
245
|
+
try:
|
|
246
|
+
# 使用统一的队列接口
|
|
247
|
+
success = await self.queue_manager.put(request, priority=getattr(request, 'priority', 0))
|
|
248
|
+
|
|
249
|
+
if success:
|
|
250
|
+
self.logger.debug(f"Request enqueued successfully: {request.url}")
|
|
251
|
+
|
|
252
|
+
return success
|
|
253
|
+
except Exception as e:
|
|
254
|
+
self.error_handler.handle_error(
|
|
255
|
+
e,
|
|
256
|
+
context="Failed to enqueue request",
|
|
257
|
+
raise_error=False
|
|
258
|
+
)
|
|
259
|
+
return False
|
|
260
|
+
|
|
261
|
+
def idle(self) -> bool:
|
|
262
|
+
"""Check if queue is empty"""
|
|
263
|
+
return len(self) == 0
|
|
264
|
+
|
|
265
|
+
async def async_idle(self) -> bool:
|
|
266
|
+
"""Asynchronously check if queue is empty (more accurate)"""
|
|
267
|
+
if not self.queue_manager:
|
|
268
|
+
return True
|
|
269
|
+
# 使用队列管理器的异步empty方法
|
|
270
|
+
return await self.queue_manager.async_empty()
|
|
271
|
+
|
|
272
|
+
async def close(self):
|
|
273
|
+
"""Close scheduler"""
|
|
274
|
+
try:
|
|
275
|
+
if isinstance(closed := getattr(self.dupe_filter, 'closed', None), Callable):
|
|
276
|
+
await closed()
|
|
277
|
+
|
|
278
|
+
if self.queue_manager:
|
|
279
|
+
await self.queue_manager.close()
|
|
280
|
+
except Exception as e:
|
|
281
|
+
self.error_handler.handle_error(
|
|
282
|
+
e,
|
|
283
|
+
context="Failed to close scheduler",
|
|
284
|
+
raise_error=False
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
def __len__(self):
|
|
288
|
+
"""Get queue size"""
|
|
289
|
+
if not self.queue_manager:
|
|
290
|
+
return 0
|
|
291
|
+
# 返回同步的近似值,实际大小需要异步获取
|
|
258
292
|
return 0 if self.queue_manager.empty() else 1
|