crawlo 1.4.7__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -90
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -140
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -379
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -320
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -451
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -290
- crawlo/crawler.py +698 -698
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -280
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -250
- crawlo/downloader/httpx_downloader.py +265 -265
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -425
- crawlo/downloader/selenium_downloader.py +486 -486
- crawlo/event.py +45 -45
- crawlo/exceptions.py +214 -214
- crawlo/extension/__init__.py +64 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -53
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -104
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +134 -134
- crawlo/filters/__init__.py +170 -170
- crawlo/filters/aioredis_filter.py +347 -347
- crawlo/filters/memory_filter.py +261 -261
- crawlo/framework.py +306 -306
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -391
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -240
- crawlo/initialization/phases.py +229 -229
- crawlo/initialization/registry.py +143 -143
- crawlo/initialization/utils.py +48 -48
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -42
- crawlo/logging/config.py +280 -276
- crawlo/logging/factory.py +175 -175
- crawlo/logging/manager.py +104 -104
- crawlo/middleware/__init__.py +87 -87
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -287
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +408 -376
- crawlo/network/response.py +598 -569
- crawlo/pipelines/__init__.py +52 -52
- crawlo/pipelines/base_pipeline.py +452 -452
- crawlo/pipelines/bloom_dedup_pipeline.py +145 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +196 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +104 -105
- crawlo/pipelines/mongo_pipeline.py +140 -139
- crawlo/pipelines/mysql_pipeline.py +468 -469
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -155
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +9 -9
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -591
- crawlo/queue/redis_priority_queue.py +518 -518
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +287 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +658 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +1 -1
- crawlo/templates/project/items.py.tmpl +13 -13
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -35
- crawlo/templates/project/settings.py.tmpl +113 -109
- crawlo/templates/project/settings_distributed.py.tmpl +160 -156
- crawlo/templates/project/settings_gentle.py.tmpl +174 -170
- crawlo/templates/project/settings_high_performance.py.tmpl +175 -171
- crawlo/templates/project/settings_minimal.py.tmpl +102 -98
- crawlo/templates/project/settings_simple.py.tmpl +172 -168
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -32
- crawlo/templates/spiders_init.py.tmpl +4 -4
- crawlo/tools/__init__.py +86 -86
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +74 -50
- crawlo/utils/batch_processor.py +276 -276
- crawlo/utils/config_manager.py +442 -442
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -335
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -157
- crawlo/utils/mysql_connection_pool.py +197 -197
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +90 -90
- crawlo/utils/redis_connection_pool.py +578 -578
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -278
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -337
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +138 -137
- crawlo/utils/singleton.py +69 -69
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/METADATA +831 -689
- crawlo-1.4.8.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -217
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -467
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -72
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +54 -54
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +118 -118
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/utils/log.py +0 -80
- crawlo/utils/url_utils.py +0 -40
- crawlo-1.4.7.dist-info/RECORD +0 -347
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
crawlo/task_manager.py
CHANGED
|
@@ -1,139 +1,139 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
import time
|
|
4
|
-
import asyncio
|
|
5
|
-
from typing import Set, Final
|
|
6
|
-
from collections import deque
|
|
7
|
-
from asyncio import Task, Future, Semaphore
|
|
8
|
-
from crawlo.logging import get_logger
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class DynamicSemaphore(Semaphore):
|
|
12
|
-
"""支持动态调整的信号量"""
|
|
13
|
-
|
|
14
|
-
def __init__(self, initial_value: int = 8):
|
|
15
|
-
super().__init__(initial_value)
|
|
16
|
-
self._initial_value = initial_value
|
|
17
|
-
self._current_value = initial_value
|
|
18
|
-
self._response_times = deque(maxlen=10) # 存储最近10次响应时间
|
|
19
|
-
self._last_adjust_time = time.time()
|
|
20
|
-
|
|
21
|
-
def record_response_time(self, response_time: float):
|
|
22
|
-
"""记录响应时间"""
|
|
23
|
-
self._response_times.append(response_time)
|
|
24
|
-
|
|
25
|
-
def adjust_concurrency(self):
|
|
26
|
-
"""根据响应时间动态调整并发数"""
|
|
27
|
-
current_time = time.time()
|
|
28
|
-
# 限制调整频率,至少间隔1秒(从2秒减少到1秒)
|
|
29
|
-
if current_time - self._last_adjust_time < 1:
|
|
30
|
-
return
|
|
31
|
-
|
|
32
|
-
self._last_adjust_time = current_time
|
|
33
|
-
|
|
34
|
-
if len(self._response_times) < 2: # 从3减少到2
|
|
35
|
-
return
|
|
36
|
-
|
|
37
|
-
# 计算平均响应时间
|
|
38
|
-
avg_response_time = sum(self._response_times) / len(self._response_times)
|
|
39
|
-
|
|
40
|
-
# 根据响应时间调整并发数
|
|
41
|
-
if avg_response_time < 0.2: # 响应很快,增加并发(从0.3降到0.2)
|
|
42
|
-
new_concurrency = min(self._current_value + 5, self._initial_value * 3) # 增加幅度从3提高到5,最大值从2倍提高到3倍
|
|
43
|
-
elif avg_response_time > 1.0: # 响应较慢,减少并发(从1.5降到1.0)
|
|
44
|
-
new_concurrency = max(self._current_value - 5, max(1, self._initial_value // 3)) # 减少幅度从3提高到5,最小值从一半降低到三分之一
|
|
45
|
-
else:
|
|
46
|
-
return # 保持当前并发数
|
|
47
|
-
|
|
48
|
-
# 只有当变化较大时才调整
|
|
49
|
-
if abs(new_concurrency - self._current_value) > 1:
|
|
50
|
-
self._adjust_semaphore_value(new_concurrency)
|
|
51
|
-
|
|
52
|
-
def _adjust_semaphore_value(self, new_value: int):
|
|
53
|
-
"""调整信号量的值"""
|
|
54
|
-
if new_value > self._current_value:
|
|
55
|
-
# 增加信号量
|
|
56
|
-
for _ in range(new_value - self._current_value):
|
|
57
|
-
self.release()
|
|
58
|
-
elif new_value < self._current_value:
|
|
59
|
-
# 减少信号量,这里只是记录新的目标值
|
|
60
|
-
# 实际减少会在acquire时处理
|
|
61
|
-
pass
|
|
62
|
-
|
|
63
|
-
self._current_value = new_value
|
|
64
|
-
# 注意:Python的Semaphore没有直接修改内部计数器的方法
|
|
65
|
-
# 所以我们只能通过release()来增加,减少则需要在acquire时控制
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
class TaskManager:
|
|
69
|
-
|
|
70
|
-
def __init__(self, total_concurrency: int = 8):
|
|
71
|
-
self.current_task: Final[Set] = set()
|
|
72
|
-
# 使用动态信号量替代普通信号量
|
|
73
|
-
self.semaphore: DynamicSemaphore = DynamicSemaphore(max(1, total_concurrency))
|
|
74
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
75
|
-
|
|
76
|
-
# 异常统计
|
|
77
|
-
self._exception_count = 0
|
|
78
|
-
self._total_tasks = 0
|
|
79
|
-
|
|
80
|
-
async def create_task(self, coroutine) -> Task:
|
|
81
|
-
# 等待信号量,控制并发数
|
|
82
|
-
await self.semaphore.acquire()
|
|
83
|
-
|
|
84
|
-
task = asyncio.create_task(coroutine)
|
|
85
|
-
self.current_task.add(task)
|
|
86
|
-
self._total_tasks += 1
|
|
87
|
-
|
|
88
|
-
def done_callback(_future: Future) -> None:
|
|
89
|
-
try:
|
|
90
|
-
self.current_task.discard(task) # 使用discard而不是remove,避免KeyError
|
|
91
|
-
|
|
92
|
-
# 获取任务结果或异常 - 这是关键,必须调用result()或exception()来"获取"异常
|
|
93
|
-
try:
|
|
94
|
-
# 尝试获取结果,如果有异常会被抛出
|
|
95
|
-
result = _future.result()
|
|
96
|
-
# 如果成功完成,可以在这里记录成功统计
|
|
97
|
-
except Exception as exception:
|
|
98
|
-
# 异常被正确"获取"了,不会再出现"never retrieved"警告
|
|
99
|
-
self._exception_count += 1
|
|
100
|
-
|
|
101
|
-
# 记录异常详情
|
|
102
|
-
self.logger.error(
|
|
103
|
-
f"Task completed with exception: {type(exception).__name__}: {exception}"
|
|
104
|
-
)
|
|
105
|
-
self.logger.debug("Task exception details:", exc_info=exception)
|
|
106
|
-
|
|
107
|
-
# 可以在这里添加更多的异常处理逻辑,如发送到监控系统
|
|
108
|
-
|
|
109
|
-
except Exception as e:
|
|
110
|
-
# 防止回调函数本身出现异常
|
|
111
|
-
self.logger.error(f"Error in task done callback: {e}")
|
|
112
|
-
finally:
|
|
113
|
-
# 确保信号量始终被释放
|
|
114
|
-
self.semaphore.release()
|
|
115
|
-
|
|
116
|
-
# 定期调整并发数(从每3个任务调整一次改为每2个任务调整一次)
|
|
117
|
-
if self._total_tasks % 2 == 0:
|
|
118
|
-
self.semaphore.adjust_concurrency()
|
|
119
|
-
|
|
120
|
-
task.add_done_callback(done_callback)
|
|
121
|
-
|
|
122
|
-
return task
|
|
123
|
-
|
|
124
|
-
def all_done(self) -> bool:
|
|
125
|
-
return len(self.current_task) == 0
|
|
126
|
-
|
|
127
|
-
def record_response_time(self, response_time: float):
|
|
128
|
-
"""记录任务的响应时间,用于动态调整并发数"""
|
|
129
|
-
self.semaphore.record_response_time(response_time)
|
|
130
|
-
|
|
131
|
-
def get_stats(self) -> dict:
|
|
132
|
-
"""获取任务管理器统计信息"""
|
|
133
|
-
return {
|
|
134
|
-
'active_tasks': len(self.current_task),
|
|
135
|
-
'total_tasks': self._total_tasks,
|
|
136
|
-
'exception_count': self._exception_count,
|
|
137
|
-
'success_rate': (self._total_tasks - self._exception_count) / max(1, self._total_tasks) * 100,
|
|
138
|
-
'current_concurrency': self.semaphore._current_value
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
import time
|
|
4
|
+
import asyncio
|
|
5
|
+
from typing import Set, Final
|
|
6
|
+
from collections import deque
|
|
7
|
+
from asyncio import Task, Future, Semaphore
|
|
8
|
+
from crawlo.logging import get_logger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DynamicSemaphore(Semaphore):
|
|
12
|
+
"""支持动态调整的信号量"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, initial_value: int = 8):
|
|
15
|
+
super().__init__(initial_value)
|
|
16
|
+
self._initial_value = initial_value
|
|
17
|
+
self._current_value = initial_value
|
|
18
|
+
self._response_times = deque(maxlen=10) # 存储最近10次响应时间
|
|
19
|
+
self._last_adjust_time = time.time()
|
|
20
|
+
|
|
21
|
+
def record_response_time(self, response_time: float):
|
|
22
|
+
"""记录响应时间"""
|
|
23
|
+
self._response_times.append(response_time)
|
|
24
|
+
|
|
25
|
+
def adjust_concurrency(self):
|
|
26
|
+
"""根据响应时间动态调整并发数"""
|
|
27
|
+
current_time = time.time()
|
|
28
|
+
# 限制调整频率,至少间隔1秒(从2秒减少到1秒)
|
|
29
|
+
if current_time - self._last_adjust_time < 1:
|
|
30
|
+
return
|
|
31
|
+
|
|
32
|
+
self._last_adjust_time = current_time
|
|
33
|
+
|
|
34
|
+
if len(self._response_times) < 2: # 从3减少到2
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
# 计算平均响应时间
|
|
38
|
+
avg_response_time = sum(self._response_times) / len(self._response_times)
|
|
39
|
+
|
|
40
|
+
# 根据响应时间调整并发数
|
|
41
|
+
if avg_response_time < 0.2: # 响应很快,增加并发(从0.3降到0.2)
|
|
42
|
+
new_concurrency = min(self._current_value + 5, self._initial_value * 3) # 增加幅度从3提高到5,最大值从2倍提高到3倍
|
|
43
|
+
elif avg_response_time > 1.0: # 响应较慢,减少并发(从1.5降到1.0)
|
|
44
|
+
new_concurrency = max(self._current_value - 5, max(1, self._initial_value // 3)) # 减少幅度从3提高到5,最小值从一半降低到三分之一
|
|
45
|
+
else:
|
|
46
|
+
return # 保持当前并发数
|
|
47
|
+
|
|
48
|
+
# 只有当变化较大时才调整
|
|
49
|
+
if abs(new_concurrency - self._current_value) > 1:
|
|
50
|
+
self._adjust_semaphore_value(new_concurrency)
|
|
51
|
+
|
|
52
|
+
def _adjust_semaphore_value(self, new_value: int):
|
|
53
|
+
"""调整信号量的值"""
|
|
54
|
+
if new_value > self._current_value:
|
|
55
|
+
# 增加信号量
|
|
56
|
+
for _ in range(new_value - self._current_value):
|
|
57
|
+
self.release()
|
|
58
|
+
elif new_value < self._current_value:
|
|
59
|
+
# 减少信号量,这里只是记录新的目标值
|
|
60
|
+
# 实际减少会在acquire时处理
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
self._current_value = new_value
|
|
64
|
+
# 注意:Python的Semaphore没有直接修改内部计数器的方法
|
|
65
|
+
# 所以我们只能通过release()来增加,减少则需要在acquire时控制
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class TaskManager:
|
|
69
|
+
|
|
70
|
+
def __init__(self, total_concurrency: int = 8):
|
|
71
|
+
self.current_task: Final[Set] = set()
|
|
72
|
+
# 使用动态信号量替代普通信号量
|
|
73
|
+
self.semaphore: DynamicSemaphore = DynamicSemaphore(max(1, total_concurrency))
|
|
74
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
75
|
+
|
|
76
|
+
# 异常统计
|
|
77
|
+
self._exception_count = 0
|
|
78
|
+
self._total_tasks = 0
|
|
79
|
+
|
|
80
|
+
async def create_task(self, coroutine) -> Task:
|
|
81
|
+
# 等待信号量,控制并发数
|
|
82
|
+
await self.semaphore.acquire()
|
|
83
|
+
|
|
84
|
+
task = asyncio.create_task(coroutine)
|
|
85
|
+
self.current_task.add(task)
|
|
86
|
+
self._total_tasks += 1
|
|
87
|
+
|
|
88
|
+
def done_callback(_future: Future) -> None:
|
|
89
|
+
try:
|
|
90
|
+
self.current_task.discard(task) # 使用discard而不是remove,避免KeyError
|
|
91
|
+
|
|
92
|
+
# 获取任务结果或异常 - 这是关键,必须调用result()或exception()来"获取"异常
|
|
93
|
+
try:
|
|
94
|
+
# 尝试获取结果,如果有异常会被抛出
|
|
95
|
+
result = _future.result()
|
|
96
|
+
# 如果成功完成,可以在这里记录成功统计
|
|
97
|
+
except Exception as exception:
|
|
98
|
+
# 异常被正确"获取"了,不会再出现"never retrieved"警告
|
|
99
|
+
self._exception_count += 1
|
|
100
|
+
|
|
101
|
+
# 记录异常详情
|
|
102
|
+
self.logger.error(
|
|
103
|
+
f"Task completed with exception: {type(exception).__name__}: {exception}"
|
|
104
|
+
)
|
|
105
|
+
self.logger.debug("Task exception details:", exc_info=exception)
|
|
106
|
+
|
|
107
|
+
# 可以在这里添加更多的异常处理逻辑,如发送到监控系统
|
|
108
|
+
|
|
109
|
+
except Exception as e:
|
|
110
|
+
# 防止回调函数本身出现异常
|
|
111
|
+
self.logger.error(f"Error in task done callback: {e}")
|
|
112
|
+
finally:
|
|
113
|
+
# 确保信号量始终被释放
|
|
114
|
+
self.semaphore.release()
|
|
115
|
+
|
|
116
|
+
# 定期调整并发数(从每3个任务调整一次改为每2个任务调整一次)
|
|
117
|
+
if self._total_tasks % 2 == 0:
|
|
118
|
+
self.semaphore.adjust_concurrency()
|
|
119
|
+
|
|
120
|
+
task.add_done_callback(done_callback)
|
|
121
|
+
|
|
122
|
+
return task
|
|
123
|
+
|
|
124
|
+
def all_done(self) -> bool:
|
|
125
|
+
return len(self.current_task) == 0
|
|
126
|
+
|
|
127
|
+
def record_response_time(self, response_time: float):
|
|
128
|
+
"""记录任务的响应时间,用于动态调整并发数"""
|
|
129
|
+
self.semaphore.record_response_time(response_time)
|
|
130
|
+
|
|
131
|
+
def get_stats(self) -> dict:
|
|
132
|
+
"""获取任务管理器统计信息"""
|
|
133
|
+
return {
|
|
134
|
+
'active_tasks': len(self.current_task),
|
|
135
|
+
'total_tasks': self._total_tasks,
|
|
136
|
+
'exception_count': self._exception_count,
|
|
137
|
+
'success_rate': (self._total_tasks - self._exception_count) / max(1, self._total_tasks) * 100,
|
|
138
|
+
'current_concurrency': self.semaphore._current_value
|
|
139
139
|
}
|
crawlo/templates/crawlo.cfg.tmpl
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
# crawlo.cfg
|
|
2
|
-
# 项目配置文件
|
|
3
|
-
|
|
4
|
-
[settings]
|
|
5
|
-
# settings 模块路径
|
|
6
|
-
default = {{project_name}}.settings
|
|
7
|
-
|
|
8
|
-
[deploy]
|
|
9
|
-
# 部署配置(可选)
|
|
10
|
-
# url = http://localhost:6800/
|
|
1
|
+
# crawlo.cfg
|
|
2
|
+
# 项目配置文件
|
|
3
|
+
|
|
4
|
+
[settings]
|
|
5
|
+
# settings 模块路径
|
|
6
|
+
default = {{project_name}}.settings
|
|
7
|
+
|
|
8
|
+
[deploy]
|
|
9
|
+
# 部署配置(可选)
|
|
10
|
+
# url = http://localhost:6800/
|
|
11
11
|
# project = {{project_name}}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
2
|
"""{{project_name}} 项目"""
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
数据项定义
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
from crawlo.items import Item, Field
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class {{project_name|title}}Item(Item):
|
|
10
|
-
"""数据项"""
|
|
11
|
-
id = Field()
|
|
12
|
-
# price = Field()
|
|
13
|
-
# description = Field()
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
数据项定义
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from crawlo.items import Item, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class {{project_name|title}}Item(Item):
|
|
10
|
+
"""数据项"""
|
|
11
|
+
id = Field()
|
|
12
|
+
# price = Field()
|
|
13
|
+
# description = Field()
|
|
14
14
|
pass
|
|
@@ -1,39 +1,39 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
{{project_name}}.middlewares
|
|
4
|
-
============================
|
|
5
|
-
自定义中间件示例
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
from crawlo.network import Request, Response
|
|
9
|
-
from crawlo.logging import get_logger
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class {{project_name|title}}Middleware:
|
|
13
|
-
"""
|
|
14
|
-
{{project_name}} 项目的中间件
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
def __init__(self):
|
|
18
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
19
|
-
|
|
20
|
-
def process_request(self, request, spider):
|
|
21
|
-
"""
|
|
22
|
-
在请求被下载器执行前调用
|
|
23
|
-
"""
|
|
24
|
-
self.logger.info(f"处理请求: {request.url}")
|
|
25
|
-
return None
|
|
26
|
-
|
|
27
|
-
def process_response(self, request, response, spider):
|
|
28
|
-
"""
|
|
29
|
-
在响应被 Spider 处理前调用
|
|
30
|
-
"""
|
|
31
|
-
self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
|
|
32
|
-
return response
|
|
33
|
-
|
|
34
|
-
def process_exception(self, request, exception, spider):
|
|
35
|
-
"""
|
|
36
|
-
在下载或处理过程中发生异常时调用
|
|
37
|
-
"""
|
|
38
|
-
self.logger.error(f"请求异常: {request.url} - {exception}")
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
{{project_name}}.middlewares
|
|
4
|
+
============================
|
|
5
|
+
自定义中间件示例
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from crawlo.network import Request, Response
|
|
9
|
+
from crawlo.logging import get_logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class {{project_name|title}}Middleware:
|
|
13
|
+
"""
|
|
14
|
+
{{project_name}} 项目的中间件
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self):
|
|
18
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
19
|
+
|
|
20
|
+
def process_request(self, request, spider):
|
|
21
|
+
"""
|
|
22
|
+
在请求被下载器执行前调用
|
|
23
|
+
"""
|
|
24
|
+
self.logger.info(f"处理请求: {request.url}")
|
|
25
|
+
return None
|
|
26
|
+
|
|
27
|
+
def process_response(self, request, response, spider):
|
|
28
|
+
"""
|
|
29
|
+
在响应被 Spider 处理前调用
|
|
30
|
+
"""
|
|
31
|
+
self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
|
|
32
|
+
return response
|
|
33
|
+
|
|
34
|
+
def process_exception(self, request, exception, spider):
|
|
35
|
+
"""
|
|
36
|
+
在下载或处理过程中发生异常时调用
|
|
37
|
+
"""
|
|
38
|
+
self.logger.error(f"请求异常: {request.url} - {exception}")
|
|
39
39
|
return None
|
|
@@ -1,36 +1,36 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
{{project_name}}.pipelines
|
|
4
|
-
==========================
|
|
5
|
-
数据管道示例
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
from crawlo.logging import get_logger
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class {{project_name|title}}Pipeline:
|
|
12
|
-
"""
|
|
13
|
-
{{project_name}} 项目的数据管道
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
def __init__(self):
|
|
17
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
18
|
-
|
|
19
|
-
def process_item(self, item, spider):
|
|
20
|
-
"""
|
|
21
|
-
处理数据项
|
|
22
|
-
"""
|
|
23
|
-
self.logger.info(f"处理数据项: {item}")
|
|
24
|
-
return item
|
|
25
|
-
|
|
26
|
-
def open_spider(self, spider):
|
|
27
|
-
"""
|
|
28
|
-
爬虫启动时调用
|
|
29
|
-
"""
|
|
30
|
-
self.logger.info(f"管道已启动,准备处理爬虫 '{spider.name}' 的数据")
|
|
31
|
-
|
|
32
|
-
def close_spider(self, spider):
|
|
33
|
-
"""
|
|
34
|
-
爬虫关闭时调用
|
|
35
|
-
"""
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
{{project_name}}.pipelines
|
|
4
|
+
==========================
|
|
5
|
+
数据管道示例
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from crawlo.logging import get_logger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class {{project_name|title}}Pipeline:
|
|
12
|
+
"""
|
|
13
|
+
{{project_name}} 项目的数据管道
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def __init__(self):
|
|
17
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
18
|
+
|
|
19
|
+
def process_item(self, item, spider):
|
|
20
|
+
"""
|
|
21
|
+
处理数据项
|
|
22
|
+
"""
|
|
23
|
+
self.logger.info(f"处理数据项: {item}")
|
|
24
|
+
return item
|
|
25
|
+
|
|
26
|
+
def open_spider(self, spider):
|
|
27
|
+
"""
|
|
28
|
+
爬虫启动时调用
|
|
29
|
+
"""
|
|
30
|
+
self.logger.info(f"管道已启动,准备处理爬虫 '{spider.name}' 的数据")
|
|
31
|
+
|
|
32
|
+
def close_spider(self, spider):
|
|
33
|
+
"""
|
|
34
|
+
爬虫关闭时调用
|
|
35
|
+
"""
|
|
36
36
|
self.logger.info("管道已关闭")
|