crawlo 1.4.6__py3-none-any.whl → 1.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -89
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -341
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -438
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -291
- crawlo/crawler.py +698 -657
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -276
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -247
- crawlo/downloader/httpx_downloader.py +265 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -402
- crawlo/downloader/selenium_downloader.py +486 -472
- crawlo/event.py +45 -11
- crawlo/exceptions.py +215 -82
- crawlo/extension/__init__.py +65 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -103
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +135 -0
- crawlo/filters/__init__.py +170 -153
- crawlo/filters/aioredis_filter.py +348 -264
- crawlo/filters/memory_filter.py +261 -276
- crawlo/framework.py +306 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -434
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -194
- crawlo/initialization/phases.py +230 -149
- crawlo/initialization/registry.py +143 -145
- crawlo/initialization/utils.py +49 -0
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -46
- crawlo/logging/config.py +277 -197
- crawlo/logging/factory.py +175 -171
- crawlo/logging/manager.py +104 -112
- crawlo/middleware/__init__.py +87 -24
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -253
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +375 -379
- crawlo/network/response.py +569 -664
- crawlo/pipelines/__init__.py +53 -22
- crawlo/pipelines/base_pipeline.py +452 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +140 -132
- crawlo/pipelines/mysql_pipeline.py +469 -476
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -156
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +10 -0
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -525
- crawlo/queue/redis_priority_queue.py +519 -370
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -277
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +2 -4
- crawlo/templates/project/items.py.tmpl +13 -17
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -36
- crawlo/templates/project/settings.py.tmpl +109 -111
- crawlo/templates/project/settings_distributed.py.tmpl +156 -159
- crawlo/templates/project/settings_gentle.py.tmpl +170 -176
- crawlo/templates/project/settings_high_performance.py.tmpl +171 -177
- crawlo/templates/project/settings_minimal.py.tmpl +98 -100
- crawlo/templates/project/settings_simple.py.tmpl +168 -174
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -40
- crawlo/templates/spiders_init.py.tmpl +5 -10
- crawlo/tools/__init__.py +86 -189
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +50 -50
- crawlo/utils/batch_processor.py +276 -259
- crawlo/utils/config_manager.py +442 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -0
- crawlo/utils/log.py +79 -79
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -0
- crawlo/utils/mysql_connection_pool.py +197 -0
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +91 -0
- crawlo/utils/redis_connection_pool.py +578 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -256
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -0
- crawlo/utils/selector_helper.py +137 -137
- crawlo/utils/singleton.py +70 -0
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- crawlo/utils/{url.py → url_utils.py} +39 -39
- crawlo-1.4.7.dist-info/METADATA +689 -0
- crawlo-1.4.7.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -275
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -0
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -0
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +55 -0
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +119 -0
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/logging/async_handler.py +0 -181
- crawlo/logging/monitor.py +0 -153
- crawlo/logging/sampler.py +0 -167
- crawlo/tools/authenticated_proxy.py +0 -241
- crawlo/tools/data_formatter.py +0 -226
- crawlo/tools/data_validator.py +0 -181
- crawlo/tools/encoding_converter.py +0 -127
- crawlo/tools/network_diagnostic.py +0 -365
- crawlo/tools/request_tools.py +0 -83
- crawlo/tools/retry_mechanism.py +0 -224
- crawlo/utils/env_config.py +0 -143
- crawlo/utils/large_scale_config.py +0 -287
- crawlo/utils/system.py +0 -11
- crawlo/utils/tools.py +0 -5
- crawlo-1.4.6.dist-info/METADATA +0 -329
- crawlo-1.4.6.dist-info/RECORD +0 -361
- tests/env_config_example.py +0 -134
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
- tests/test_authenticated_proxy.py +0 -142
- tests/test_comprehensive.py +0 -147
- tests/test_dynamic_downloaders_proxy.py +0 -125
- tests/test_dynamic_proxy.py +0 -93
- tests/test_dynamic_proxy_config.py +0 -147
- tests/test_dynamic_proxy_real.py +0 -110
- tests/test_env_config.py +0 -122
- tests/test_framework_env_usage.py +0 -104
- tests/test_large_scale_config.py +0 -113
- tests/test_proxy_api.py +0 -265
- tests/test_real_scenario_proxy.py +0 -196
- tests/tools_example.py +0 -261
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
|
@@ -1,85 +1,85 @@
|
|
|
1
|
-
# Scrapy settings for ofweek_scrapy project
|
|
2
|
-
|
|
3
|
-
BOT_NAME = 'ofweek_scrapy'
|
|
4
|
-
|
|
5
|
-
SPIDER_MODULES = ['ofweek_scrapy.spiders']
|
|
6
|
-
NEWSPIDER_MODULE = 'ofweek_scrapy.spiders'
|
|
7
|
-
|
|
8
|
-
# Obey robots.txt rules
|
|
9
|
-
ROBOTSTXT_OBEY = False
|
|
10
|
-
|
|
11
|
-
# Configure maximum concurrent requests performed by Scrapy (default: 16)
|
|
12
|
-
# 使用与Crawlo相同的并发数
|
|
13
|
-
CONCURRENT_REQUESTS = 8
|
|
14
|
-
|
|
15
|
-
# Configure a delay for requests for the same website (default: 0)
|
|
16
|
-
# 使用与Crawlo相同的下载延迟
|
|
17
|
-
DOWNLOAD_DELAY = 1.0
|
|
18
|
-
# The download delay setting will honor only one of:
|
|
19
|
-
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
|
|
20
|
-
#CONCURRENT_REQUESTS_PER_IP = 16
|
|
21
|
-
|
|
22
|
-
# Disable cookies (enabled by default)
|
|
23
|
-
#COOKIES_ENABLED = False
|
|
24
|
-
|
|
25
|
-
# Disable Telnet Console (enabled by default)
|
|
26
|
-
#TELNETCONSOLE_ENABLED = False
|
|
27
|
-
|
|
28
|
-
# Override the default request headers:
|
|
29
|
-
#DEFAULT_REQUEST_HEADERS = {
|
|
30
|
-
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
31
|
-
# 'Accept-Language': 'en',
|
|
32
|
-
#}
|
|
33
|
-
|
|
34
|
-
# Enable or disable spider middlewares
|
|
35
|
-
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
|
|
36
|
-
#SPIDER_MIDDLEWARES = {
|
|
37
|
-
# 'ofweek_scrapy.middlewares.OfweekScrapySpiderMiddleware': 543,
|
|
38
|
-
#}
|
|
39
|
-
|
|
40
|
-
# Enable or disable downloader middlewares
|
|
41
|
-
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
|
|
42
|
-
#DOWNLOADER_MIDDLEWARES = {
|
|
43
|
-
# 'ofweek_scrapy.middlewares.OfweekScrapyDownloaderMiddleware': 543,
|
|
44
|
-
#}
|
|
45
|
-
|
|
46
|
-
# Enable or disable extensions
|
|
47
|
-
# See https://docs.scrapy.org/en/latest/topics/extensions.html
|
|
48
|
-
#EXTENSIONS = {
|
|
49
|
-
# 'scrapy.extensions.telnet.TelnetConsole': None,
|
|
50
|
-
#}
|
|
51
|
-
|
|
52
|
-
# Configure item pipelines
|
|
53
|
-
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
|
|
54
|
-
#ITEM_PIPELINES = {
|
|
55
|
-
# 'ofweek_scrapy.pipelines.OfweekScrapyPipeline': 300,
|
|
56
|
-
#}
|
|
57
|
-
|
|
58
|
-
# Enable and configure the AutoThrottle extension (disabled by default)
|
|
59
|
-
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
|
|
60
|
-
#AUTOTHROTTLE_ENABLED = True
|
|
61
|
-
# The initial download delay
|
|
62
|
-
#AUTOTHROTTLE_START_DELAY = 5
|
|
63
|
-
# The maximum download delay to be set in case of high latencies
|
|
64
|
-
#AUTOTHROTTLE_MAX_DELAY = 60
|
|
65
|
-
# The average number of requests Scrapy should be sending in parallel to
|
|
66
|
-
# each remote server
|
|
67
|
-
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
|
|
68
|
-
# Enable showing throttling stats for every response received:
|
|
69
|
-
#AUTOTHROTTLE_DEBUG = False
|
|
70
|
-
|
|
71
|
-
# Enable and configure HTTP caching (disabled by default)
|
|
72
|
-
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
|
|
73
|
-
#HTTPCACHE_ENABLED = True
|
|
74
|
-
#HTTPCACHE_EXPIRATION_SECS = 0
|
|
75
|
-
#HTTPCACHE_DIR = 'httpcache'
|
|
76
|
-
#HTTPCACHE_IGNORE_HTTP_CODES = []
|
|
77
|
-
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
|
78
|
-
|
|
79
|
-
# Set settings whose default value is deprecated to a future-proof value
|
|
80
|
-
REQUEST_FINGERPRINTER_IMPLEMENTATION = '2.7'
|
|
81
|
-
TWISTED_REACTOR = 'twisted.internet.asyncioreactor.AsyncioSelectorReactor'
|
|
82
|
-
FEED_EXPORT_ENCODING = 'utf-8'
|
|
83
|
-
|
|
84
|
-
# 日志配置
|
|
1
|
+
# Scrapy settings for ofweek_scrapy project
|
|
2
|
+
|
|
3
|
+
BOT_NAME = 'ofweek_scrapy'
|
|
4
|
+
|
|
5
|
+
SPIDER_MODULES = ['ofweek_scrapy.spiders']
|
|
6
|
+
NEWSPIDER_MODULE = 'ofweek_scrapy.spiders'
|
|
7
|
+
|
|
8
|
+
# Obey robots.txt rules
|
|
9
|
+
ROBOTSTXT_OBEY = False
|
|
10
|
+
|
|
11
|
+
# Configure maximum concurrent requests performed by Scrapy (default: 16)
|
|
12
|
+
# 使用与Crawlo相同的并发数
|
|
13
|
+
CONCURRENT_REQUESTS = 8
|
|
14
|
+
|
|
15
|
+
# Configure a delay for requests for the same website (default: 0)
|
|
16
|
+
# 使用与Crawlo相同的下载延迟
|
|
17
|
+
DOWNLOAD_DELAY = 1.0
|
|
18
|
+
# The download delay setting will honor only one of:
|
|
19
|
+
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
|
|
20
|
+
#CONCURRENT_REQUESTS_PER_IP = 16
|
|
21
|
+
|
|
22
|
+
# Disable cookies (enabled by default)
|
|
23
|
+
#COOKIES_ENABLED = False
|
|
24
|
+
|
|
25
|
+
# Disable Telnet Console (enabled by default)
|
|
26
|
+
#TELNETCONSOLE_ENABLED = False
|
|
27
|
+
|
|
28
|
+
# Override the default request headers:
|
|
29
|
+
#DEFAULT_REQUEST_HEADERS = {
|
|
30
|
+
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
31
|
+
# 'Accept-Language': 'en',
|
|
32
|
+
#}
|
|
33
|
+
|
|
34
|
+
# Enable or disable spider middlewares
|
|
35
|
+
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
|
|
36
|
+
#SPIDER_MIDDLEWARES = {
|
|
37
|
+
# 'ofweek_scrapy.middlewares.OfweekScrapySpiderMiddleware': 543,
|
|
38
|
+
#}
|
|
39
|
+
|
|
40
|
+
# Enable or disable downloader middlewares
|
|
41
|
+
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
|
|
42
|
+
#DOWNLOADER_MIDDLEWARES = {
|
|
43
|
+
# 'ofweek_scrapy.middlewares.OfweekScrapyDownloaderMiddleware': 543,
|
|
44
|
+
#}
|
|
45
|
+
|
|
46
|
+
# Enable or disable extensions
|
|
47
|
+
# See https://docs.scrapy.org/en/latest/topics/extensions.html
|
|
48
|
+
#EXTENSIONS = {
|
|
49
|
+
# 'scrapy.extensions.telnet.TelnetConsole': None,
|
|
50
|
+
#}
|
|
51
|
+
|
|
52
|
+
# Configure item pipelines
|
|
53
|
+
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
|
|
54
|
+
#ITEM_PIPELINES = {
|
|
55
|
+
# 'ofweek_scrapy.pipelines.OfweekScrapyPipeline': 300,
|
|
56
|
+
#}
|
|
57
|
+
|
|
58
|
+
# Enable and configure the AutoThrottle extension (disabled by default)
|
|
59
|
+
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
|
|
60
|
+
#AUTOTHROTTLE_ENABLED = True
|
|
61
|
+
# The initial download delay
|
|
62
|
+
#AUTOTHROTTLE_START_DELAY = 5
|
|
63
|
+
# The maximum download delay to be set in case of high latencies
|
|
64
|
+
#AUTOTHROTTLE_MAX_DELAY = 60
|
|
65
|
+
# The average number of requests Scrapy should be sending in parallel to
|
|
66
|
+
# each remote server
|
|
67
|
+
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
|
|
68
|
+
# Enable showing throttling stats for every response received:
|
|
69
|
+
#AUTOTHROTTLE_DEBUG = False
|
|
70
|
+
|
|
71
|
+
# Enable and configure HTTP caching (disabled by default)
|
|
72
|
+
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
|
|
73
|
+
#HTTPCACHE_ENABLED = True
|
|
74
|
+
#HTTPCACHE_EXPIRATION_SECS = 0
|
|
75
|
+
#HTTPCACHE_DIR = 'httpcache'
|
|
76
|
+
#HTTPCACHE_IGNORE_HTTP_CODES = []
|
|
77
|
+
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
|
78
|
+
|
|
79
|
+
# Set settings whose default value is deprecated to a future-proof value
|
|
80
|
+
REQUEST_FINGERPRINTER_IMPLEMENTATION = '2.7'
|
|
81
|
+
TWISTED_REACTOR = 'twisted.internet.asyncioreactor.AsyncioSelectorReactor'
|
|
82
|
+
FEED_EXPORT_ENCODING = 'utf-8'
|
|
83
|
+
|
|
84
|
+
# 日志配置
|
|
85
85
|
LOG_LEVEL = 'INFO'
|
tests/ofweek_scrapy/scrapy.cfg
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
# Automatically created by: scrapy startproject
|
|
2
|
-
#
|
|
3
|
-
# For more information about the [deploy] section see:
|
|
4
|
-
# https://scrapyd.readthedocs.io/en/latest/deploy.html
|
|
5
|
-
|
|
6
|
-
[settings]
|
|
7
|
-
default = ofweek_scrapy.settings
|
|
8
|
-
|
|
9
|
-
[deploy]
|
|
10
|
-
#url = http://localhost:6800/
|
|
11
|
-
project = ofweek_scrapy
|
|
1
|
+
# Automatically created by: scrapy startproject
|
|
2
|
+
#
|
|
3
|
+
# For more information about the [deploy] section see:
|
|
4
|
+
# https://scrapyd.readthedocs.io/en/latest/deploy.html
|
|
5
|
+
|
|
6
|
+
[settings]
|
|
7
|
+
default = ofweek_scrapy.settings
|
|
8
|
+
|
|
9
|
+
[deploy]
|
|
10
|
+
#url = http://localhost:6800/
|
|
11
|
+
project = ofweek_scrapy
|
|
@@ -1,212 +1,212 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
优化后的Crawlo框架性能测试脚本
|
|
5
|
-
用于评估优化后的框架性能表现
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import asyncio
|
|
9
|
-
import time
|
|
10
|
-
import sys
|
|
11
|
-
import os
|
|
12
|
-
from pathlib import Path
|
|
13
|
-
|
|
14
|
-
# 添加项目根目录到 Python 路径
|
|
15
|
-
project_root = Path(__file__).parent.parent
|
|
16
|
-
sys.path.insert(0, str(project_root))
|
|
17
|
-
|
|
18
|
-
from crawlo.crawler import CrawlerProcess
|
|
19
|
-
from crawlo.spider import Spider
|
|
20
|
-
from crawlo import Request, Item
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
class TestItem(Item):
|
|
24
|
-
"""测试用的Item类"""
|
|
25
|
-
def __init__(self):
|
|
26
|
-
super().__init__()
|
|
27
|
-
self.url = ''
|
|
28
|
-
self.status = 0
|
|
29
|
-
self.page_id = ''
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def create_test_spider_class(spider_name, page_count):
|
|
33
|
-
"""动态创建测试爬虫类"""
|
|
34
|
-
class TestSpider(Spider):
|
|
35
|
-
# 显式设置name属性
|
|
36
|
-
name = spider_name
|
|
37
|
-
|
|
38
|
-
def __init__(self, *args, **kwargs):
|
|
39
|
-
super().__init__(*args, **kwargs)
|
|
40
|
-
# 使用较少的测试页面以加快测试速度
|
|
41
|
-
self.start_urls = [f'https://httpbin.org/delay/0?page={i}' for i in range(page_count)]
|
|
42
|
-
|
|
43
|
-
def parse(self, response):
|
|
44
|
-
"""简单解析响应"""
|
|
45
|
-
# 正确返回Item对象
|
|
46
|
-
item = TestItem()
|
|
47
|
-
item['url'] = response.url
|
|
48
|
-
item['status'] = response.status_code
|
|
49
|
-
item['page_id'] = response.url.split('page=')[-1] if 'page=' in response.url else 'unknown'
|
|
50
|
-
yield item
|
|
51
|
-
|
|
52
|
-
return TestSpider
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
class OptimizedPerformanceTester:
|
|
56
|
-
"""优化后的性能测试器"""
|
|
57
|
-
|
|
58
|
-
def __init__(self):
|
|
59
|
-
self.results = {}
|
|
60
|
-
|
|
61
|
-
def test_initialization_performance(self):
|
|
62
|
-
"""测试初始化性能"""
|
|
63
|
-
print("测试初始化性能...")
|
|
64
|
-
|
|
65
|
-
start_time = time.time()
|
|
66
|
-
settings = {
|
|
67
|
-
'CONCURRENT_REQUESTS': 10,
|
|
68
|
-
}
|
|
69
|
-
process = CrawlerProcess(settings=settings)
|
|
70
|
-
end_time = time.time()
|
|
71
|
-
|
|
72
|
-
init_time = end_time - start_time
|
|
73
|
-
print(f"初始化时间: {init_time:.4f} 秒")
|
|
74
|
-
return init_time
|
|
75
|
-
|
|
76
|
-
async def run_crawler_test(self, test_pages=20, concurrent_requests=10, test_name="performance_test"):
|
|
77
|
-
"""运行爬虫性能测试"""
|
|
78
|
-
# 配置设置
|
|
79
|
-
settings = {
|
|
80
|
-
'CONCURRENT_REQUESTS': concurrent_requests,
|
|
81
|
-
'DOWNLOAD_DELAY': 0.1, # 减少延迟以提高性能
|
|
82
|
-
'RANDOMIZE_DOWNLOAD_DELAY': False,
|
|
83
|
-
'SCHEDULER_MAX_QUEUE_SIZE': 5000, # 使用优化后的队列大小
|
|
84
|
-
'BACKPRESSURE_RATIO': 0.9, # 使用优化后的背压比率
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
# 创建测试爬虫类
|
|
88
|
-
TestSpiderClass = create_test_spider_class(test_name, test_pages)
|
|
89
|
-
|
|
90
|
-
# 注册爬虫类
|
|
91
|
-
from crawlo.spider import get_global_spider_registry
|
|
92
|
-
registry = get_global_spider_registry()
|
|
93
|
-
registry[TestSpiderClass.name] = TestSpiderClass
|
|
94
|
-
|
|
95
|
-
# 创建爬虫进程
|
|
96
|
-
process = CrawlerProcess(settings=settings)
|
|
97
|
-
|
|
98
|
-
# 添加测试爬虫
|
|
99
|
-
crawler = await process.crawl(TestSpiderClass.name)
|
|
100
|
-
|
|
101
|
-
# 计算性能指标
|
|
102
|
-
metrics = crawler.metrics
|
|
103
|
-
duration = metrics.get_total_duration()
|
|
104
|
-
rps = test_pages / duration if duration > 0 else 0
|
|
105
|
-
|
|
106
|
-
return {
|
|
107
|
-
'duration': duration,
|
|
108
|
-
'rps': rps,
|
|
109
|
-
'pages': test_pages,
|
|
110
|
-
'concurrent': concurrent_requests
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
async def run_scale_tests(self):
|
|
114
|
-
"""运行不同规模的测试"""
|
|
115
|
-
print("\n=== 运行规模测试 ===")
|
|
116
|
-
scales = [10, 20, 50] # 减少测试规模以加快测试速度
|
|
117
|
-
results = []
|
|
118
|
-
|
|
119
|
-
for i, scale in enumerate(scales):
|
|
120
|
-
test_name = f"scale_test_{i}_{scale}"
|
|
121
|
-
print(f"测试规模: {scale} 个页面")
|
|
122
|
-
try:
|
|
123
|
-
result = await self.run_crawler_test(test_pages=scale, test_name=test_name)
|
|
124
|
-
results.append(result)
|
|
125
|
-
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
126
|
-
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
127
|
-
except Exception as e:
|
|
128
|
-
print(f" 测试失败: {e}")
|
|
129
|
-
import traceback
|
|
130
|
-
traceback.print_exc()
|
|
131
|
-
print()
|
|
132
|
-
|
|
133
|
-
return results
|
|
134
|
-
|
|
135
|
-
async def run_concurrency_tests(self):
|
|
136
|
-
"""运行不同并发数的测试"""
|
|
137
|
-
print("\n=== 运行并发测试 ===")
|
|
138
|
-
concurrencies = [1, 5, 10, 20] # 增加更高并发数的测试
|
|
139
|
-
results = []
|
|
140
|
-
|
|
141
|
-
for i, concurrency in enumerate(concurrencies):
|
|
142
|
-
test_name = f"concurrency_test_{i}_{concurrency}"
|
|
143
|
-
print(f"测试并发数: {concurrency}")
|
|
144
|
-
try:
|
|
145
|
-
result = await self.run_crawler_test(
|
|
146
|
-
test_pages=50, # 增加页面数以更好地测试并发效果
|
|
147
|
-
concurrent_requests=concurrency,
|
|
148
|
-
test_name=test_name
|
|
149
|
-
)
|
|
150
|
-
results.append(result)
|
|
151
|
-
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
152
|
-
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
153
|
-
except Exception as e:
|
|
154
|
-
print(f" 测试失败: {e}")
|
|
155
|
-
import traceback
|
|
156
|
-
traceback.print_exc()
|
|
157
|
-
print()
|
|
158
|
-
|
|
159
|
-
return results
|
|
160
|
-
|
|
161
|
-
async def run_performance_suite(self):
|
|
162
|
-
"""运行完整的性能测试套件"""
|
|
163
|
-
print("开始 Crawlo 框架优化后性能测试")
|
|
164
|
-
print("=" * 50)
|
|
165
|
-
|
|
166
|
-
# 测试初始化性能
|
|
167
|
-
init_time = self.test_initialization_performance()
|
|
168
|
-
|
|
169
|
-
# 运行规模测试
|
|
170
|
-
scale_results = await self.run_scale_tests()
|
|
171
|
-
|
|
172
|
-
# 运行并发测试
|
|
173
|
-
concurrency_results = await self.run_concurrency_tests()
|
|
174
|
-
|
|
175
|
-
# 汇总结果
|
|
176
|
-
print("\n=== 性能测试汇总 ===")
|
|
177
|
-
print(f"初始化时间: {init_time:.4f} 秒")
|
|
178
|
-
|
|
179
|
-
print("\n规模测试结果:")
|
|
180
|
-
for result in scale_results:
|
|
181
|
-
if 'duration' in result:
|
|
182
|
-
print(f" {result['pages']} 页面: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
183
|
-
|
|
184
|
-
print("\n并发测试结果:")
|
|
185
|
-
for result in concurrency_results:
|
|
186
|
-
if 'duration' in result:
|
|
187
|
-
print(f" {result['concurrent']} 并发: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
188
|
-
|
|
189
|
-
return {
|
|
190
|
-
'initialization': init_time,
|
|
191
|
-
'scale_tests': scale_results,
|
|
192
|
-
'concurrency_tests': concurrency_results
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
async def main():
|
|
197
|
-
"""主函数"""
|
|
198
|
-
tester = OptimizedPerformanceTester()
|
|
199
|
-
results = await tester.run_performance_suite()
|
|
200
|
-
|
|
201
|
-
print("\n=== 测试完成 ===")
|
|
202
|
-
print("优化后性能测试已完成,结果如上所示。")
|
|
203
|
-
|
|
204
|
-
# 保存结果到文件
|
|
205
|
-
import json
|
|
206
|
-
with open('optimized_performance_test_results.json', 'w', encoding='utf-8') as f:
|
|
207
|
-
json.dump(results, f, ensure_ascii=False, indent=2)
|
|
208
|
-
print("结果已保存到 optimized_performance_test_results.json")
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
if __name__ == '__main__':
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
优化后的Crawlo框架性能测试脚本
|
|
5
|
+
用于评估优化后的框架性能表现
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import time
|
|
10
|
+
import sys
|
|
11
|
+
import os
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
# 添加项目根目录到 Python 路径
|
|
15
|
+
project_root = Path(__file__).parent.parent
|
|
16
|
+
sys.path.insert(0, str(project_root))
|
|
17
|
+
|
|
18
|
+
from crawlo.crawler import CrawlerProcess
|
|
19
|
+
from crawlo.spider import Spider
|
|
20
|
+
from crawlo import Request, Item
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TestItem(Item):
|
|
24
|
+
"""测试用的Item类"""
|
|
25
|
+
def __init__(self):
|
|
26
|
+
super().__init__()
|
|
27
|
+
self.url = ''
|
|
28
|
+
self.status = 0
|
|
29
|
+
self.page_id = ''
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def create_test_spider_class(spider_name, page_count):
|
|
33
|
+
"""动态创建测试爬虫类"""
|
|
34
|
+
class TestSpider(Spider):
|
|
35
|
+
# 显式设置name属性
|
|
36
|
+
name = spider_name
|
|
37
|
+
|
|
38
|
+
def __init__(self, *args, **kwargs):
|
|
39
|
+
super().__init__(*args, **kwargs)
|
|
40
|
+
# 使用较少的测试页面以加快测试速度
|
|
41
|
+
self.start_urls = [f'https://httpbin.org/delay/0?page={i}' for i in range(page_count)]
|
|
42
|
+
|
|
43
|
+
def parse(self, response):
|
|
44
|
+
"""简单解析响应"""
|
|
45
|
+
# 正确返回Item对象
|
|
46
|
+
item = TestItem()
|
|
47
|
+
item['url'] = response.url
|
|
48
|
+
item['status'] = response.status_code
|
|
49
|
+
item['page_id'] = response.url.split('page=')[-1] if 'page=' in response.url else 'unknown'
|
|
50
|
+
yield item
|
|
51
|
+
|
|
52
|
+
return TestSpider
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class OptimizedPerformanceTester:
|
|
56
|
+
"""优化后的性能测试器"""
|
|
57
|
+
|
|
58
|
+
def __init__(self):
|
|
59
|
+
self.results = {}
|
|
60
|
+
|
|
61
|
+
def test_initialization_performance(self):
|
|
62
|
+
"""测试初始化性能"""
|
|
63
|
+
print("测试初始化性能...")
|
|
64
|
+
|
|
65
|
+
start_time = time.time()
|
|
66
|
+
settings = {
|
|
67
|
+
'CONCURRENT_REQUESTS': 10,
|
|
68
|
+
}
|
|
69
|
+
process = CrawlerProcess(settings=settings)
|
|
70
|
+
end_time = time.time()
|
|
71
|
+
|
|
72
|
+
init_time = end_time - start_time
|
|
73
|
+
print(f"初始化时间: {init_time:.4f} 秒")
|
|
74
|
+
return init_time
|
|
75
|
+
|
|
76
|
+
async def run_crawler_test(self, test_pages=20, concurrent_requests=10, test_name="performance_test"):
|
|
77
|
+
"""运行爬虫性能测试"""
|
|
78
|
+
# 配置设置
|
|
79
|
+
settings = {
|
|
80
|
+
'CONCURRENT_REQUESTS': concurrent_requests,
|
|
81
|
+
'DOWNLOAD_DELAY': 0.1, # 减少延迟以提高性能
|
|
82
|
+
'RANDOMIZE_DOWNLOAD_DELAY': False,
|
|
83
|
+
'SCHEDULER_MAX_QUEUE_SIZE': 5000, # 使用优化后的队列大小
|
|
84
|
+
'BACKPRESSURE_RATIO': 0.9, # 使用优化后的背压比率
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
# 创建测试爬虫类
|
|
88
|
+
TestSpiderClass = create_test_spider_class(test_name, test_pages)
|
|
89
|
+
|
|
90
|
+
# 注册爬虫类
|
|
91
|
+
from crawlo.spider import get_global_spider_registry
|
|
92
|
+
registry = get_global_spider_registry()
|
|
93
|
+
registry[TestSpiderClass.name] = TestSpiderClass
|
|
94
|
+
|
|
95
|
+
# 创建爬虫进程
|
|
96
|
+
process = CrawlerProcess(settings=settings)
|
|
97
|
+
|
|
98
|
+
# 添加测试爬虫
|
|
99
|
+
crawler = await process.crawl(TestSpiderClass.name)
|
|
100
|
+
|
|
101
|
+
# 计算性能指标
|
|
102
|
+
metrics = crawler.metrics
|
|
103
|
+
duration = metrics.get_total_duration()
|
|
104
|
+
rps = test_pages / duration if duration > 0 else 0
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
'duration': duration,
|
|
108
|
+
'rps': rps,
|
|
109
|
+
'pages': test_pages,
|
|
110
|
+
'concurrent': concurrent_requests
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async def run_scale_tests(self):
|
|
114
|
+
"""运行不同规模的测试"""
|
|
115
|
+
print("\n=== 运行规模测试 ===")
|
|
116
|
+
scales = [10, 20, 50] # 减少测试规模以加快测试速度
|
|
117
|
+
results = []
|
|
118
|
+
|
|
119
|
+
for i, scale in enumerate(scales):
|
|
120
|
+
test_name = f"scale_test_{i}_{scale}"
|
|
121
|
+
print(f"测试规模: {scale} 个页面")
|
|
122
|
+
try:
|
|
123
|
+
result = await self.run_crawler_test(test_pages=scale, test_name=test_name)
|
|
124
|
+
results.append(result)
|
|
125
|
+
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
126
|
+
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
127
|
+
except Exception as e:
|
|
128
|
+
print(f" 测试失败: {e}")
|
|
129
|
+
import traceback
|
|
130
|
+
traceback.print_exc()
|
|
131
|
+
print()
|
|
132
|
+
|
|
133
|
+
return results
|
|
134
|
+
|
|
135
|
+
async def run_concurrency_tests(self):
|
|
136
|
+
"""运行不同并发数的测试"""
|
|
137
|
+
print("\n=== 运行并发测试 ===")
|
|
138
|
+
concurrencies = [1, 5, 10, 20] # 增加更高并发数的测试
|
|
139
|
+
results = []
|
|
140
|
+
|
|
141
|
+
for i, concurrency in enumerate(concurrencies):
|
|
142
|
+
test_name = f"concurrency_test_{i}_{concurrency}"
|
|
143
|
+
print(f"测试并发数: {concurrency}")
|
|
144
|
+
try:
|
|
145
|
+
result = await self.run_crawler_test(
|
|
146
|
+
test_pages=50, # 增加页面数以更好地测试并发效果
|
|
147
|
+
concurrent_requests=concurrency,
|
|
148
|
+
test_name=test_name
|
|
149
|
+
)
|
|
150
|
+
results.append(result)
|
|
151
|
+
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
152
|
+
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
153
|
+
except Exception as e:
|
|
154
|
+
print(f" 测试失败: {e}")
|
|
155
|
+
import traceback
|
|
156
|
+
traceback.print_exc()
|
|
157
|
+
print()
|
|
158
|
+
|
|
159
|
+
return results
|
|
160
|
+
|
|
161
|
+
async def run_performance_suite(self):
|
|
162
|
+
"""运行完整的性能测试套件"""
|
|
163
|
+
print("开始 Crawlo 框架优化后性能测试")
|
|
164
|
+
print("=" * 50)
|
|
165
|
+
|
|
166
|
+
# 测试初始化性能
|
|
167
|
+
init_time = self.test_initialization_performance()
|
|
168
|
+
|
|
169
|
+
# 运行规模测试
|
|
170
|
+
scale_results = await self.run_scale_tests()
|
|
171
|
+
|
|
172
|
+
# 运行并发测试
|
|
173
|
+
concurrency_results = await self.run_concurrency_tests()
|
|
174
|
+
|
|
175
|
+
# 汇总结果
|
|
176
|
+
print("\n=== 性能测试汇总 ===")
|
|
177
|
+
print(f"初始化时间: {init_time:.4f} 秒")
|
|
178
|
+
|
|
179
|
+
print("\n规模测试结果:")
|
|
180
|
+
for result in scale_results:
|
|
181
|
+
if 'duration' in result:
|
|
182
|
+
print(f" {result['pages']} 页面: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
183
|
+
|
|
184
|
+
print("\n并发测试结果:")
|
|
185
|
+
for result in concurrency_results:
|
|
186
|
+
if 'duration' in result:
|
|
187
|
+
print(f" {result['concurrent']} 并发: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
188
|
+
|
|
189
|
+
return {
|
|
190
|
+
'initialization': init_time,
|
|
191
|
+
'scale_tests': scale_results,
|
|
192
|
+
'concurrency_tests': concurrency_results
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
async def main():
|
|
197
|
+
"""主函数"""
|
|
198
|
+
tester = OptimizedPerformanceTester()
|
|
199
|
+
results = await tester.run_performance_suite()
|
|
200
|
+
|
|
201
|
+
print("\n=== 测试完成 ===")
|
|
202
|
+
print("优化后性能测试已完成,结果如上所示。")
|
|
203
|
+
|
|
204
|
+
# 保存结果到文件
|
|
205
|
+
import json
|
|
206
|
+
with open('optimized_performance_test_results.json', 'w', encoding='utf-8') as f:
|
|
207
|
+
json.dump(results, f, ensure_ascii=False, indent=2)
|
|
208
|
+
print("结果已保存到 optimized_performance_test_results.json")
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
if __name__ == '__main__':
|
|
212
212
|
asyncio.run(main())
|