crawlo 1.4.7__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -90
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -140
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -379
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -320
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -451
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -290
- crawlo/crawler.py +698 -698
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -280
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -250
- crawlo/downloader/httpx_downloader.py +265 -265
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -425
- crawlo/downloader/selenium_downloader.py +486 -486
- crawlo/event.py +45 -45
- crawlo/exceptions.py +214 -214
- crawlo/extension/__init__.py +64 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -53
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -104
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +134 -134
- crawlo/filters/__init__.py +170 -170
- crawlo/filters/aioredis_filter.py +347 -347
- crawlo/filters/memory_filter.py +261 -261
- crawlo/framework.py +306 -306
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -391
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -240
- crawlo/initialization/phases.py +229 -229
- crawlo/initialization/registry.py +143 -143
- crawlo/initialization/utils.py +48 -48
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -42
- crawlo/logging/config.py +280 -276
- crawlo/logging/factory.py +175 -175
- crawlo/logging/manager.py +104 -104
- crawlo/middleware/__init__.py +87 -87
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -287
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +408 -376
- crawlo/network/response.py +598 -569
- crawlo/pipelines/__init__.py +52 -52
- crawlo/pipelines/base_pipeline.py +452 -452
- crawlo/pipelines/bloom_dedup_pipeline.py +145 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +196 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +104 -105
- crawlo/pipelines/mongo_pipeline.py +140 -139
- crawlo/pipelines/mysql_pipeline.py +468 -469
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -155
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +9 -9
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -591
- crawlo/queue/redis_priority_queue.py +518 -518
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +287 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +658 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +1 -1
- crawlo/templates/project/items.py.tmpl +13 -13
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -35
- crawlo/templates/project/settings.py.tmpl +113 -109
- crawlo/templates/project/settings_distributed.py.tmpl +160 -156
- crawlo/templates/project/settings_gentle.py.tmpl +174 -170
- crawlo/templates/project/settings_high_performance.py.tmpl +175 -171
- crawlo/templates/project/settings_minimal.py.tmpl +102 -98
- crawlo/templates/project/settings_simple.py.tmpl +172 -168
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -32
- crawlo/templates/spiders_init.py.tmpl +4 -4
- crawlo/tools/__init__.py +86 -86
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +74 -50
- crawlo/utils/batch_processor.py +276 -276
- crawlo/utils/config_manager.py +442 -442
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -335
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -157
- crawlo/utils/mysql_connection_pool.py +197 -197
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +90 -90
- crawlo/utils/redis_connection_pool.py +578 -578
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -278
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -337
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +138 -137
- crawlo/utils/singleton.py +69 -69
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/METADATA +831 -689
- crawlo-1.4.8.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -217
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -467
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -72
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +54 -54
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +118 -118
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/utils/log.py +0 -80
- crawlo/utils/url_utils.py +0 -40
- crawlo-1.4.7.dist-info/RECORD +0 -347
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
tests/dynamic_loading_test.py
CHANGED
|
@@ -1,105 +1,105 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
动态加载功能测试示例
|
|
5
|
-
==================
|
|
6
|
-
测试新添加的动态加载功能是否符合预期
|
|
7
|
-
"""
|
|
8
|
-
import asyncio
|
|
9
|
-
from crawlo import Spider, Request
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class TabControlTestSpider(Spider):
|
|
13
|
-
"""标签页控制测试爬虫"""
|
|
14
|
-
name = 'tab_control_test_spider'
|
|
15
|
-
|
|
16
|
-
custom_settings = {
|
|
17
|
-
'DOWNLOADER_TYPE': 'playwright',
|
|
18
|
-
'PLAYWRIGHT_BROWSER_TYPE': 'chromium',
|
|
19
|
-
'PLAYWRIGHT_HEADLESS': True,
|
|
20
|
-
'PLAYWRIGHT_SINGLE_BROWSER_MODE': True,
|
|
21
|
-
'PLAYWRIGHT_MAX_PAGES_PER_BROWSER': 3, # 限制最大页面数为3
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
def start_requests(self):
|
|
25
|
-
# 测试多个URL,验证标签页控制功能
|
|
26
|
-
urls = [
|
|
27
|
-
'https://httpbin.org/html', # 简单HTML页面
|
|
28
|
-
'https://httpbin.org/json', # JSON页面
|
|
29
|
-
'https://httpbin.org/xml', # XML页面
|
|
30
|
-
'https://httpbin.org/robots.txt', # 文本页面
|
|
31
|
-
'https://httpbin.org/headers', # 请求头页面
|
|
32
|
-
]
|
|
33
|
-
|
|
34
|
-
for url in urls:
|
|
35
|
-
yield Request(url=url, callback=self.parse)
|
|
36
|
-
|
|
37
|
-
def parse(self, response):
|
|
38
|
-
"""解析响应"""
|
|
39
|
-
print(f"成功下载页面: {response.url}")
|
|
40
|
-
print(f"响应状态码: {response.status_code}")
|
|
41
|
-
print(f"响应内容长度: {len(response.text)}")
|
|
42
|
-
yield {'url': response.url, 'status': response.status_code}
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class PaginationTestSpider(Spider):
|
|
46
|
-
"""翻页功能测试爬虫"""
|
|
47
|
-
name = 'pagination_test_spider'
|
|
48
|
-
|
|
49
|
-
custom_settings = {
|
|
50
|
-
'DOWNLOADER_TYPE': 'playwright',
|
|
51
|
-
'PLAYWRIGHT_BROWSER_TYPE': 'chromium',
|
|
52
|
-
'PLAYWRIGHT_HEADLESS': True,
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
def start_requests(self):
|
|
56
|
-
# 测试带有翻页操作的请求
|
|
57
|
-
request = Request(
|
|
58
|
-
url='https://httpbin.org/html',
|
|
59
|
-
callback=self.parse_with_pagination
|
|
60
|
-
).set_dynamic_loader(
|
|
61
|
-
True,
|
|
62
|
-
{
|
|
63
|
-
"pagination_actions": [
|
|
64
|
-
# 模拟鼠标滑动
|
|
65
|
-
{
|
|
66
|
-
"type": "scroll",
|
|
67
|
-
"params": {
|
|
68
|
-
"count": 2,
|
|
69
|
-
"distance": 300,
|
|
70
|
-
"delay": 500
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
]
|
|
74
|
-
}
|
|
75
|
-
)
|
|
76
|
-
yield request
|
|
77
|
-
|
|
78
|
-
def parse_with_pagination(self, response):
|
|
79
|
-
"""解析带翻页操作的响应"""
|
|
80
|
-
print(f"成功下载页面: {response.url}")
|
|
81
|
-
yield {'url': response.url, 'status': response.status_code}
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
# 运行测试
|
|
85
|
-
async def run_tests():
|
|
86
|
-
"""运行测试"""
|
|
87
|
-
print("=== 动态加载功能测试 ===\n")
|
|
88
|
-
|
|
89
|
-
# 测试1: 标签页控制功能
|
|
90
|
-
print("1. 测试标签页控制功能:")
|
|
91
|
-
print(" 配置最大页面数为3,请求5个URL")
|
|
92
|
-
print(" 预期结果: 正常下载所有页面,但只创建3个标签页\n")
|
|
93
|
-
|
|
94
|
-
# 测试2: 翻页功能
|
|
95
|
-
print("2. 测试翻页功能:")
|
|
96
|
-
print(" 配置鼠标滑动翻页操作")
|
|
97
|
-
print(" 预期结果: 页面加载完成后执行滑动操作\n")
|
|
98
|
-
|
|
99
|
-
print("请运行以下命令来执行测试:")
|
|
100
|
-
print("crawlo crawl tab_control_test_spider")
|
|
101
|
-
print("crawlo crawl pagination_test_spider")
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
if __name__ == '__main__':
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
动态加载功能测试示例
|
|
5
|
+
==================
|
|
6
|
+
测试新添加的动态加载功能是否符合预期
|
|
7
|
+
"""
|
|
8
|
+
import asyncio
|
|
9
|
+
from crawlo import Spider, Request
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TabControlTestSpider(Spider):
|
|
13
|
+
"""标签页控制测试爬虫"""
|
|
14
|
+
name = 'tab_control_test_spider'
|
|
15
|
+
|
|
16
|
+
custom_settings = {
|
|
17
|
+
'DOWNLOADER_TYPE': 'playwright',
|
|
18
|
+
'PLAYWRIGHT_BROWSER_TYPE': 'chromium',
|
|
19
|
+
'PLAYWRIGHT_HEADLESS': True,
|
|
20
|
+
'PLAYWRIGHT_SINGLE_BROWSER_MODE': True,
|
|
21
|
+
'PLAYWRIGHT_MAX_PAGES_PER_BROWSER': 3, # 限制最大页面数为3
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
def start_requests(self):
|
|
25
|
+
# 测试多个URL,验证标签页控制功能
|
|
26
|
+
urls = [
|
|
27
|
+
'https://httpbin.org/html', # 简单HTML页面
|
|
28
|
+
'https://httpbin.org/json', # JSON页面
|
|
29
|
+
'https://httpbin.org/xml', # XML页面
|
|
30
|
+
'https://httpbin.org/robots.txt', # 文本页面
|
|
31
|
+
'https://httpbin.org/headers', # 请求头页面
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
for url in urls:
|
|
35
|
+
yield Request(url=url, callback=self.parse)
|
|
36
|
+
|
|
37
|
+
def parse(self, response):
|
|
38
|
+
"""解析响应"""
|
|
39
|
+
print(f"成功下载页面: {response.url}")
|
|
40
|
+
print(f"响应状态码: {response.status_code}")
|
|
41
|
+
print(f"响应内容长度: {len(response.text)}")
|
|
42
|
+
yield {'url': response.url, 'status': response.status_code}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class PaginationTestSpider(Spider):
|
|
46
|
+
"""翻页功能测试爬虫"""
|
|
47
|
+
name = 'pagination_test_spider'
|
|
48
|
+
|
|
49
|
+
custom_settings = {
|
|
50
|
+
'DOWNLOADER_TYPE': 'playwright',
|
|
51
|
+
'PLAYWRIGHT_BROWSER_TYPE': 'chromium',
|
|
52
|
+
'PLAYWRIGHT_HEADLESS': True,
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
def start_requests(self):
|
|
56
|
+
# 测试带有翻页操作的请求
|
|
57
|
+
request = Request(
|
|
58
|
+
url='https://httpbin.org/html',
|
|
59
|
+
callback=self.parse_with_pagination
|
|
60
|
+
).set_dynamic_loader(
|
|
61
|
+
True,
|
|
62
|
+
{
|
|
63
|
+
"pagination_actions": [
|
|
64
|
+
# 模拟鼠标滑动
|
|
65
|
+
{
|
|
66
|
+
"type": "scroll",
|
|
67
|
+
"params": {
|
|
68
|
+
"count": 2,
|
|
69
|
+
"distance": 300,
|
|
70
|
+
"delay": 500
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
]
|
|
74
|
+
}
|
|
75
|
+
)
|
|
76
|
+
yield request
|
|
77
|
+
|
|
78
|
+
def parse_with_pagination(self, response):
|
|
79
|
+
"""解析带翻页操作的响应"""
|
|
80
|
+
print(f"成功下载页面: {response.url}")
|
|
81
|
+
yield {'url': response.url, 'status': response.status_code}
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# 运行测试
|
|
85
|
+
async def run_tests():
|
|
86
|
+
"""运行测试"""
|
|
87
|
+
print("=== 动态加载功能测试 ===\n")
|
|
88
|
+
|
|
89
|
+
# 测试1: 标签页控制功能
|
|
90
|
+
print("1. 测试标签页控制功能:")
|
|
91
|
+
print(" 配置最大页面数为3,请求5个URL")
|
|
92
|
+
print(" 预期结果: 正常下载所有页面,但只创建3个标签页\n")
|
|
93
|
+
|
|
94
|
+
# 测试2: 翻页功能
|
|
95
|
+
print("2. 测试翻页功能:")
|
|
96
|
+
print(" 配置鼠标滑动翻页操作")
|
|
97
|
+
print(" 预期结果: 页面加载完成后执行滑动操作\n")
|
|
98
|
+
|
|
99
|
+
print("请运行以下命令来执行测试:")
|
|
100
|
+
print("crawlo crawl tab_control_test_spider")
|
|
101
|
+
print("crawlo crawl pagination_test_spider")
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
if __name__ == '__main__':
|
|
105
105
|
asyncio.run(run_tests())
|
tests/error_handling_example.py
CHANGED
|
@@ -1,172 +1,172 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
错误处理使用示例
|
|
5
|
-
展示如何在实际项目中使用增强版错误处理工具
|
|
6
|
-
"""
|
|
7
|
-
import sys
|
|
8
|
-
import os
|
|
9
|
-
import asyncio
|
|
10
|
-
import time
|
|
11
|
-
from typing import Optional
|
|
12
|
-
|
|
13
|
-
# 添加项目根目录到 Python 路径
|
|
14
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
-
|
|
16
|
-
from crawlo.utils.enhanced_error_handler import (
|
|
17
|
-
EnhancedErrorHandler,
|
|
18
|
-
ErrorContext,
|
|
19
|
-
DetailedException,
|
|
20
|
-
handle_exception
|
|
21
|
-
)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
# 创建错误处理器实例
|
|
25
|
-
error_handler = EnhancedErrorHandler("example_module")
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class DatabaseConnectionError(DetailedException):
|
|
29
|
-
"""数据库连接错误"""
|
|
30
|
-
pass
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class NetworkTimeoutError(DetailedException):
|
|
34
|
-
"""网络超时错误"""
|
|
35
|
-
pass
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
@handle_exception(context="数据库连接", module="database", function="connect_to_db", error_code="DB001")
|
|
39
|
-
def connect_to_db(host: str, port: int) -> bool:
|
|
40
|
-
"""模拟数据库连接"""
|
|
41
|
-
print(f"正在连接数据库 {host}:{port}...")
|
|
42
|
-
|
|
43
|
-
# 模拟连接失败
|
|
44
|
-
if host == "invalid.host":
|
|
45
|
-
raise DatabaseConnectionError(
|
|
46
|
-
"无法连接到数据库服务器",
|
|
47
|
-
context=ErrorContext(context="数据库连接失败", module="database", function="connect_to_db"),
|
|
48
|
-
error_code="DB001",
|
|
49
|
-
host=host,
|
|
50
|
-
port=port
|
|
51
|
-
)
|
|
52
|
-
|
|
53
|
-
# 模拟连接成功
|
|
54
|
-
print("数据库连接成功")
|
|
55
|
-
return True
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
@error_handler.retry_on_failure(max_retries=3, delay=0.5, backoff_factor=2.0)
|
|
59
|
-
async def fetch_data_from_api(url: str) -> dict:
|
|
60
|
-
"""模拟从API获取数据(带重试机制)"""
|
|
61
|
-
print(f"正在从 {url} 获取数据...")
|
|
62
|
-
|
|
63
|
-
# 模拟网络问题
|
|
64
|
-
if url == "https://slow.api.com/data":
|
|
65
|
-
time.sleep(2) # 模拟慢速响应
|
|
66
|
-
raise NetworkTimeoutError(
|
|
67
|
-
"API响应超时",
|
|
68
|
-
context=ErrorContext(context="API调用超时", module="api", function="fetch_data_from_api"),
|
|
69
|
-
error_code="API001",
|
|
70
|
-
url=url,
|
|
71
|
-
timeout=1
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
if url == "https://error.api.com/data":
|
|
75
|
-
raise NetworkTimeoutError(
|
|
76
|
-
"API服务器错误",
|
|
77
|
-
context=ErrorContext(context="API服务器错误", module="api", function="fetch_data_from_api"),
|
|
78
|
-
error_code="API002",
|
|
79
|
-
url=url,
|
|
80
|
-
status_code=500
|
|
81
|
-
)
|
|
82
|
-
|
|
83
|
-
# 模拟成功响应
|
|
84
|
-
print("API数据获取成功")
|
|
85
|
-
return {"data": "sample data", "status": "success"}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def process_data(data: dict) -> Optional[str]:
|
|
89
|
-
"""处理数据"""
|
|
90
|
-
try:
|
|
91
|
-
print("正在处理数据...")
|
|
92
|
-
|
|
93
|
-
# 模拟处理错误
|
|
94
|
-
if not data:
|
|
95
|
-
raise ValueError("数据为空")
|
|
96
|
-
|
|
97
|
-
if "error" in data:
|
|
98
|
-
raise RuntimeError(f"数据处理失败: {data['error']}")
|
|
99
|
-
|
|
100
|
-
# 模拟处理成功
|
|
101
|
-
result = f"处理完成: {data.get('data', 'no data')}"
|
|
102
|
-
print(f"{result}")
|
|
103
|
-
return result
|
|
104
|
-
|
|
105
|
-
except Exception as e:
|
|
106
|
-
# 使用错误处理器处理异常
|
|
107
|
-
context = ErrorContext(context="数据处理", module="data_processor", function="process_data")
|
|
108
|
-
error_handler.handle_error(e, context=context, raise_error=False)
|
|
109
|
-
return None
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
async def main():
|
|
113
|
-
"""主函数"""
|
|
114
|
-
print("错误处理使用示例")
|
|
115
|
-
print("=" * 50)
|
|
116
|
-
|
|
117
|
-
# 1. 测试数据库连接(成功情况)
|
|
118
|
-
print("1. 测试数据库连接(成功情况)")
|
|
119
|
-
try:
|
|
120
|
-
connect_to_db("localhost", 5432)
|
|
121
|
-
except Exception as e:
|
|
122
|
-
print(f"意外错误: {e}")
|
|
123
|
-
print()
|
|
124
|
-
|
|
125
|
-
# 2. 测试数据库连接(失败情况)
|
|
126
|
-
print("2. 测试数据库连接(失败情况)")
|
|
127
|
-
try:
|
|
128
|
-
connect_to_db("invalid.host", 5432)
|
|
129
|
-
except Exception as e:
|
|
130
|
-
print(f"预期的数据库连接错误: {e}")
|
|
131
|
-
print()
|
|
132
|
-
|
|
133
|
-
# 3. 测试API调用(成功情况)
|
|
134
|
-
print("3. 测试API调用(成功情况)")
|
|
135
|
-
try:
|
|
136
|
-
data = await fetch_data_from_api("https://api.com/data")
|
|
137
|
-
process_data(data)
|
|
138
|
-
except Exception as e:
|
|
139
|
-
print(f"API调用错误: {e}")
|
|
140
|
-
print()
|
|
141
|
-
|
|
142
|
-
# 4. 测试API调用(失败情况,带重试)
|
|
143
|
-
print("4. 测试API调用(失败情况,带重试)")
|
|
144
|
-
try:
|
|
145
|
-
data = await fetch_data_from_api("https://error.api.com/data")
|
|
146
|
-
process_data(data)
|
|
147
|
-
except Exception as e:
|
|
148
|
-
print(f"API调用错误(重试后仍然失败): {e}")
|
|
149
|
-
print()
|
|
150
|
-
|
|
151
|
-
# 5. 测试数据处理(失败情况)
|
|
152
|
-
print("5. 测试数据处理(失败情况)")
|
|
153
|
-
process_data(None) # 空数据
|
|
154
|
-
process_data({"error": "invalid format"}) # 错误数据
|
|
155
|
-
print()
|
|
156
|
-
|
|
157
|
-
# 6. 查看错误历史
|
|
158
|
-
print("6. 错误历史记录")
|
|
159
|
-
history = error_handler.get_error_history()
|
|
160
|
-
print(f"共记录 {len(history)} 个错误:")
|
|
161
|
-
for i, record in enumerate(history, 1):
|
|
162
|
-
print(f" {i}. {record['exception_type']}: {record['message']}")
|
|
163
|
-
if record['context']:
|
|
164
|
-
print(f" 上下文: {record['context']}")
|
|
165
|
-
print()
|
|
166
|
-
|
|
167
|
-
print("=" * 50)
|
|
168
|
-
print("示例运行完成")
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
if __name__ == "__main__":
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
错误处理使用示例
|
|
5
|
+
展示如何在实际项目中使用增强版错误处理工具
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import asyncio
|
|
10
|
+
import time
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
# 添加项目根目录到 Python 路径
|
|
14
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
15
|
+
|
|
16
|
+
from crawlo.utils.enhanced_error_handler import (
|
|
17
|
+
EnhancedErrorHandler,
|
|
18
|
+
ErrorContext,
|
|
19
|
+
DetailedException,
|
|
20
|
+
handle_exception
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# 创建错误处理器实例
|
|
25
|
+
error_handler = EnhancedErrorHandler("example_module")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class DatabaseConnectionError(DetailedException):
|
|
29
|
+
"""数据库连接错误"""
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class NetworkTimeoutError(DetailedException):
|
|
34
|
+
"""网络超时错误"""
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@handle_exception(context="数据库连接", module="database", function="connect_to_db", error_code="DB001")
|
|
39
|
+
def connect_to_db(host: str, port: int) -> bool:
|
|
40
|
+
"""模拟数据库连接"""
|
|
41
|
+
print(f"正在连接数据库 {host}:{port}...")
|
|
42
|
+
|
|
43
|
+
# 模拟连接失败
|
|
44
|
+
if host == "invalid.host":
|
|
45
|
+
raise DatabaseConnectionError(
|
|
46
|
+
"无法连接到数据库服务器",
|
|
47
|
+
context=ErrorContext(context="数据库连接失败", module="database", function="connect_to_db"),
|
|
48
|
+
error_code="DB001",
|
|
49
|
+
host=host,
|
|
50
|
+
port=port
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# 模拟连接成功
|
|
54
|
+
print("数据库连接成功")
|
|
55
|
+
return True
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@error_handler.retry_on_failure(max_retries=3, delay=0.5, backoff_factor=2.0)
|
|
59
|
+
async def fetch_data_from_api(url: str) -> dict:
|
|
60
|
+
"""模拟从API获取数据(带重试机制)"""
|
|
61
|
+
print(f"正在从 {url} 获取数据...")
|
|
62
|
+
|
|
63
|
+
# 模拟网络问题
|
|
64
|
+
if url == "https://slow.api.com/data":
|
|
65
|
+
time.sleep(2) # 模拟慢速响应
|
|
66
|
+
raise NetworkTimeoutError(
|
|
67
|
+
"API响应超时",
|
|
68
|
+
context=ErrorContext(context="API调用超时", module="api", function="fetch_data_from_api"),
|
|
69
|
+
error_code="API001",
|
|
70
|
+
url=url,
|
|
71
|
+
timeout=1
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
if url == "https://error.api.com/data":
|
|
75
|
+
raise NetworkTimeoutError(
|
|
76
|
+
"API服务器错误",
|
|
77
|
+
context=ErrorContext(context="API服务器错误", module="api", function="fetch_data_from_api"),
|
|
78
|
+
error_code="API002",
|
|
79
|
+
url=url,
|
|
80
|
+
status_code=500
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# 模拟成功响应
|
|
84
|
+
print("API数据获取成功")
|
|
85
|
+
return {"data": "sample data", "status": "success"}
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def process_data(data: dict) -> Optional[str]:
|
|
89
|
+
"""处理数据"""
|
|
90
|
+
try:
|
|
91
|
+
print("正在处理数据...")
|
|
92
|
+
|
|
93
|
+
# 模拟处理错误
|
|
94
|
+
if not data:
|
|
95
|
+
raise ValueError("数据为空")
|
|
96
|
+
|
|
97
|
+
if "error" in data:
|
|
98
|
+
raise RuntimeError(f"数据处理失败: {data['error']}")
|
|
99
|
+
|
|
100
|
+
# 模拟处理成功
|
|
101
|
+
result = f"处理完成: {data.get('data', 'no data')}"
|
|
102
|
+
print(f"{result}")
|
|
103
|
+
return result
|
|
104
|
+
|
|
105
|
+
except Exception as e:
|
|
106
|
+
# 使用错误处理器处理异常
|
|
107
|
+
context = ErrorContext(context="数据处理", module="data_processor", function="process_data")
|
|
108
|
+
error_handler.handle_error(e, context=context, raise_error=False)
|
|
109
|
+
return None
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
async def main():
|
|
113
|
+
"""主函数"""
|
|
114
|
+
print("错误处理使用示例")
|
|
115
|
+
print("=" * 50)
|
|
116
|
+
|
|
117
|
+
# 1. 测试数据库连接(成功情况)
|
|
118
|
+
print("1. 测试数据库连接(成功情况)")
|
|
119
|
+
try:
|
|
120
|
+
connect_to_db("localhost", 5432)
|
|
121
|
+
except Exception as e:
|
|
122
|
+
print(f"意外错误: {e}")
|
|
123
|
+
print()
|
|
124
|
+
|
|
125
|
+
# 2. 测试数据库连接(失败情况)
|
|
126
|
+
print("2. 测试数据库连接(失败情况)")
|
|
127
|
+
try:
|
|
128
|
+
connect_to_db("invalid.host", 5432)
|
|
129
|
+
except Exception as e:
|
|
130
|
+
print(f"预期的数据库连接错误: {e}")
|
|
131
|
+
print()
|
|
132
|
+
|
|
133
|
+
# 3. 测试API调用(成功情况)
|
|
134
|
+
print("3. 测试API调用(成功情况)")
|
|
135
|
+
try:
|
|
136
|
+
data = await fetch_data_from_api("https://api.com/data")
|
|
137
|
+
process_data(data)
|
|
138
|
+
except Exception as e:
|
|
139
|
+
print(f"API调用错误: {e}")
|
|
140
|
+
print()
|
|
141
|
+
|
|
142
|
+
# 4. 测试API调用(失败情况,带重试)
|
|
143
|
+
print("4. 测试API调用(失败情况,带重试)")
|
|
144
|
+
try:
|
|
145
|
+
data = await fetch_data_from_api("https://error.api.com/data")
|
|
146
|
+
process_data(data)
|
|
147
|
+
except Exception as e:
|
|
148
|
+
print(f"API调用错误(重试后仍然失败): {e}")
|
|
149
|
+
print()
|
|
150
|
+
|
|
151
|
+
# 5. 测试数据处理(失败情况)
|
|
152
|
+
print("5. 测试数据处理(失败情况)")
|
|
153
|
+
process_data(None) # 空数据
|
|
154
|
+
process_data({"error": "invalid format"}) # 错误数据
|
|
155
|
+
print()
|
|
156
|
+
|
|
157
|
+
# 6. 查看错误历史
|
|
158
|
+
print("6. 错误历史记录")
|
|
159
|
+
history = error_handler.get_error_history()
|
|
160
|
+
print(f"共记录 {len(history)} 个错误:")
|
|
161
|
+
for i, record in enumerate(history, 1):
|
|
162
|
+
print(f" {i}. {record['exception_type']}: {record['message']}")
|
|
163
|
+
if record['context']:
|
|
164
|
+
print(f" 上下文: {record['context']}")
|
|
165
|
+
print()
|
|
166
|
+
|
|
167
|
+
print("=" * 50)
|
|
168
|
+
print("示例运行完成")
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
if __name__ == "__main__":
|
|
172
172
|
asyncio.run(main())
|