crawlo 1.3.3__py3-none-any.whl → 1.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +87 -63
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -323
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +46 -2
- crawlo/core/engine.py +439 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +257 -256
- crawlo/crawler.py +639 -1167
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +228 -226
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +61 -52
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +28 -0
- crawlo/factories/base.py +69 -0
- crawlo/factories/crawler.py +104 -0
- crawlo/factories/registry.py +85 -0
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -0
- crawlo/initialization/__init__.py +40 -0
- crawlo/initialization/built_in.py +426 -0
- crawlo/initialization/context.py +142 -0
- crawlo/initialization/core.py +194 -0
- crawlo/initialization/phases.py +149 -0
- crawlo/initialization/registry.py +146 -0
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -22
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +38 -0
- crawlo/logging/config.py +97 -0
- crawlo/logging/factory.py +129 -0
- crawlo/logging/manager.py +112 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +212 -187
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -318
- crawlo/pipelines/pipeline_manager.py +76 -75
- crawlo/pipelines/redis_dedup_pipeline.py +166 -166
- crawlo/project.py +327 -325
- crawlo/queue/pqueue.py +43 -37
- crawlo/queue/queue_manager.py +503 -379
- crawlo/queue/redis_priority_queue.py +326 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +321 -225
- crawlo/settings/setting_manager.py +214 -198
- crawlo/spider/__init__.py +657 -639
- crawlo/stats_collector.py +73 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +139 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +168 -267
- crawlo/templates/project/settings_distributed.py.tmpl +167 -180
- crawlo/templates/project/settings_gentle.py.tmpl +167 -61
- crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
- crawlo/templates/project/settings_minimal.py.tmpl +66 -35
- crawlo/templates/project/settings_simple.py.tmpl +165 -102
- crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
- crawlo/templates/run.py.tmpl +34 -38
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +10 -0
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +388 -388
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +365 -0
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +26 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -124
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +80 -200
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/METADATA +1126 -1020
- crawlo-1.3.5.dist-info/RECORD +288 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -107
- tests/baidu_performance_test.py +109 -0
- tests/baidu_test.py +60 -0
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +213 -0
- tests/comprehensive_test.py +82 -0
- tests/comprehensive_testing_summary.md +187 -0
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +70 -0
- tests/debug_framework_logger.py +85 -0
- tests/debug_log_config.py +127 -0
- tests/debug_log_levels.py +64 -0
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +234 -0
- tests/distributed_test.py +67 -0
- tests/distributed_test_debug.py +77 -0
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_command_test_report.md +0 -0
- tests/final_comprehensive_test.py +152 -0
- tests/final_log_test.py +261 -0
- tests/final_validation_test.py +183 -0
- tests/fix_log_test.py +143 -0
- tests/framework_performance_test.py +203 -0
- tests/log_buffering_test.py +112 -0
- tests/log_generation_timing_test.py +154 -0
- tests/optimized_performance_test.py +212 -0
- tests/performance_comparison.py +246 -0
- tests/queue_blocking_test.py +114 -0
- tests/queue_test.py +90 -0
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +139 -0
- tests/scrapy_comparison/scrapy_test.py +134 -0
- tests/simple_command_test.py +120 -0
- tests/simple_crawlo_test.py +128 -0
- tests/simple_log_test.py +58 -0
- tests/simple_log_test2.py +138 -0
- tests/simple_optimization_test.py +129 -0
- tests/simple_spider_test.py +50 -0
- tests/simple_test.py +48 -0
- tests/spider_log_timing_test.py +178 -0
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +231 -0
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +179 -0
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +175 -0
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +80 -0
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +246 -0
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +253 -0
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +67 -0
- tests/test_framework_startup.py +65 -0
- tests/test_get_component_logger.py +84 -0
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +113 -0
- tests/test_large_scale_helper.py +236 -0
- tests/test_logging_system.py +283 -0
- tests/test_mode_change.py +73 -0
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +116 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +42 -0
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +139 -0
- tests/verify_debug.py +52 -0
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +112 -0
- crawlo-1.3.3.dist-info/RECORD +0 -219
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/WHEEL +0 -0
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/entry_points.txt +0 -0
- {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Crawlo 框架性能测试脚本
|
|
5
|
+
用于评估框架在不同场景下的性能表现
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import time
|
|
10
|
+
import tracemalloc
|
|
11
|
+
import threading
|
|
12
|
+
import sys
|
|
13
|
+
import os
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
# 添加项目根目录到 Python 路径
|
|
17
|
+
project_root = Path(__file__).parent.parent
|
|
18
|
+
sys.path.insert(0, str(project_root))
|
|
19
|
+
|
|
20
|
+
from crawlo.crawler import CrawlerProcess
|
|
21
|
+
from crawlo.spider import Spider
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def create_test_spider_class(spider_name, page_count):
|
|
25
|
+
"""动态创建测试爬虫类"""
|
|
26
|
+
class TestSpider(Spider):
|
|
27
|
+
# 显式设置name属性
|
|
28
|
+
name = spider_name
|
|
29
|
+
|
|
30
|
+
def __init__(self, *args, **kwargs):
|
|
31
|
+
super().__init__(*args, **kwargs)
|
|
32
|
+
# 使用较少的测试页面以加快测试速度
|
|
33
|
+
self.start_urls = [f'https://httpbin.org/delay/0?page={i}' for i in range(page_count)]
|
|
34
|
+
|
|
35
|
+
def parse(self, response):
|
|
36
|
+
"""简单解析响应"""
|
|
37
|
+
yield {
|
|
38
|
+
'url': response.url,
|
|
39
|
+
'status': response.status_code, # 修复:使用status_code而不是status
|
|
40
|
+
'page_id': response.url.split('page=')[-1] if 'page=' in response.url else 'unknown'
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return TestSpider
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class PerformanceTester:
|
|
47
|
+
"""性能测试器"""
|
|
48
|
+
|
|
49
|
+
def __init__(self):
|
|
50
|
+
self.results = {}
|
|
51
|
+
|
|
52
|
+
def test_initialization_performance(self):
|
|
53
|
+
"""测试初始化性能"""
|
|
54
|
+
print("测试初始化性能...")
|
|
55
|
+
|
|
56
|
+
start_time = time.time()
|
|
57
|
+
settings = {
|
|
58
|
+
'CONCURRENT_REQUESTS': 10,
|
|
59
|
+
}
|
|
60
|
+
process = CrawlerProcess(settings=settings)
|
|
61
|
+
end_time = time.time()
|
|
62
|
+
|
|
63
|
+
init_time = end_time - start_time
|
|
64
|
+
print(f"初始化时间: {init_time:.4f} 秒")
|
|
65
|
+
return init_time
|
|
66
|
+
|
|
67
|
+
async def run_crawler_test(self, test_pages=20, concurrent_requests=10, test_name="performance_test"):
|
|
68
|
+
"""运行爬虫性能测试"""
|
|
69
|
+
# 配置设置
|
|
70
|
+
settings = {
|
|
71
|
+
'CONCURRENT_REQUESTS': concurrent_requests,
|
|
72
|
+
'DOWNLOAD_DELAY': 0,
|
|
73
|
+
'RANDOMIZE_DOWNLOAD_DELAY': False,
|
|
74
|
+
'SCHEDULER_MAX_QUEUE_SIZE': 1000,
|
|
75
|
+
'BACKPRESSURE_RATIO': 0.8,
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
# 创建测试爬虫类
|
|
79
|
+
TestSpiderClass = create_test_spider_class(test_name, test_pages)
|
|
80
|
+
|
|
81
|
+
# 注册爬虫类
|
|
82
|
+
from crawlo.spider import get_global_spider_registry
|
|
83
|
+
registry = get_global_spider_registry()
|
|
84
|
+
registry[TestSpiderClass.name] = TestSpiderClass
|
|
85
|
+
|
|
86
|
+
# 创建爬虫进程
|
|
87
|
+
process = CrawlerProcess(settings=settings)
|
|
88
|
+
|
|
89
|
+
# 添加测试爬虫
|
|
90
|
+
crawler = await process.crawl(TestSpiderClass.name)
|
|
91
|
+
|
|
92
|
+
# 计算性能指标
|
|
93
|
+
metrics = crawler.metrics
|
|
94
|
+
duration = metrics.get_total_duration()
|
|
95
|
+
rps = test_pages / duration if duration > 0 else 0
|
|
96
|
+
|
|
97
|
+
return {
|
|
98
|
+
'duration': duration,
|
|
99
|
+
'rps': rps,
|
|
100
|
+
'pages': test_pages,
|
|
101
|
+
'concurrent': concurrent_requests
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
async def run_scale_tests(self):
|
|
105
|
+
"""运行不同规模的测试"""
|
|
106
|
+
print("\n=== 运行规模测试 ===")
|
|
107
|
+
scales = [10, 20, 50] # 减少测试规模以加快测试速度
|
|
108
|
+
results = []
|
|
109
|
+
|
|
110
|
+
for i, scale in enumerate(scales):
|
|
111
|
+
test_name = f"scale_test_{i}_{scale}"
|
|
112
|
+
print(f"测试规模: {scale} 个页面")
|
|
113
|
+
try:
|
|
114
|
+
result = await self.run_crawler_test(test_pages=scale, test_name=test_name)
|
|
115
|
+
results.append(result)
|
|
116
|
+
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
117
|
+
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
118
|
+
except Exception as e:
|
|
119
|
+
print(f" 测试失败: {e}")
|
|
120
|
+
import traceback
|
|
121
|
+
traceback.print_exc()
|
|
122
|
+
print()
|
|
123
|
+
|
|
124
|
+
return results
|
|
125
|
+
|
|
126
|
+
async def run_concurrency_tests(self):
|
|
127
|
+
"""运行不同并发数的测试"""
|
|
128
|
+
print("\n=== 运行并发测试 ===")
|
|
129
|
+
concurrencies = [1, 5, 10] # 减少并发数以避免对测试服务器造成过大压力
|
|
130
|
+
results = []
|
|
131
|
+
|
|
132
|
+
for i, concurrency in enumerate(concurrencies):
|
|
133
|
+
test_name = f"concurrency_test_{i}_{concurrency}"
|
|
134
|
+
print(f"测试并发数: {concurrency}")
|
|
135
|
+
try:
|
|
136
|
+
result = await self.run_crawler_test(
|
|
137
|
+
test_pages=20,
|
|
138
|
+
concurrent_requests=concurrency,
|
|
139
|
+
test_name=test_name
|
|
140
|
+
)
|
|
141
|
+
results.append(result)
|
|
142
|
+
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
143
|
+
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
144
|
+
except Exception as e:
|
|
145
|
+
print(f" 测试失败: {e}")
|
|
146
|
+
import traceback
|
|
147
|
+
traceback.print_exc()
|
|
148
|
+
print()
|
|
149
|
+
|
|
150
|
+
return results
|
|
151
|
+
|
|
152
|
+
async def run_performance_suite(self):
|
|
153
|
+
"""运行完整的性能测试套件"""
|
|
154
|
+
print("开始 Crawlo 框架性能测试")
|
|
155
|
+
print("=" * 50)
|
|
156
|
+
|
|
157
|
+
# 测试初始化性能
|
|
158
|
+
init_time = self.test_initialization_performance()
|
|
159
|
+
|
|
160
|
+
# 运行规模测试
|
|
161
|
+
scale_results = await self.run_scale_tests()
|
|
162
|
+
|
|
163
|
+
# 运行并发测试
|
|
164
|
+
concurrency_results = await self.run_concurrency_tests()
|
|
165
|
+
|
|
166
|
+
# 汇总结果
|
|
167
|
+
print("\n=== 性能测试汇总 ===")
|
|
168
|
+
print(f"初始化时间: {init_time:.4f} 秒")
|
|
169
|
+
|
|
170
|
+
print("\n规模测试结果:")
|
|
171
|
+
for result in scale_results:
|
|
172
|
+
if 'duration' in result:
|
|
173
|
+
print(f" {result['pages']} 页面: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
174
|
+
|
|
175
|
+
print("\n并发测试结果:")
|
|
176
|
+
for result in concurrency_results:
|
|
177
|
+
if 'duration' in result:
|
|
178
|
+
print(f" {result['concurrent']} 并发: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
179
|
+
|
|
180
|
+
return {
|
|
181
|
+
'initialization': init_time,
|
|
182
|
+
'scale_tests': scale_results,
|
|
183
|
+
'concurrency_tests': concurrency_results
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
async def main():
|
|
188
|
+
"""主函数"""
|
|
189
|
+
tester = PerformanceTester()
|
|
190
|
+
results = await tester.run_performance_suite()
|
|
191
|
+
|
|
192
|
+
print("\n=== 测试完成 ===")
|
|
193
|
+
print("性能测试已完成,结果如上所示。")
|
|
194
|
+
|
|
195
|
+
# 保存结果到文件
|
|
196
|
+
import json
|
|
197
|
+
with open('performance_test_results.json', 'w', encoding='utf-8') as f:
|
|
198
|
+
json.dump(results, f, ensure_ascii=False, indent=2)
|
|
199
|
+
print("结果已保存到 performance_test_results.json")
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
if __name__ == '__main__':
|
|
203
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试日志缓冲问题
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
# 添加项目根目录到Python路径
|
|
12
|
+
project_root = Path(__file__).parent.parent
|
|
13
|
+
sys.path.insert(0, str(project_root))
|
|
14
|
+
|
|
15
|
+
from crawlo.logging import configure_logging as configure, get_logger
|
|
16
|
+
import logging
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def test_log_buffering():
|
|
20
|
+
"""测试日志缓冲行为"""
|
|
21
|
+
print("=== 测试日志缓冲行为 ===")
|
|
22
|
+
|
|
23
|
+
# 设置日志文件路径
|
|
24
|
+
log_file = "logs/buffering_test2.log"
|
|
25
|
+
|
|
26
|
+
# 删除可能存在的旧日志文件
|
|
27
|
+
if os.path.exists(log_file):
|
|
28
|
+
os.remove(log_file)
|
|
29
|
+
|
|
30
|
+
# 配置日志系统
|
|
31
|
+
configure(
|
|
32
|
+
level='INFO',
|
|
33
|
+
file_path=log_file,
|
|
34
|
+
console_enabled=True,
|
|
35
|
+
file_enabled=True
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
# 获取logger
|
|
39
|
+
logger = get_logger('buffering.test2')
|
|
40
|
+
|
|
41
|
+
print("1. 检查handler的自动刷新设置...")
|
|
42
|
+
for handler in logger.handlers:
|
|
43
|
+
handler_type = type(handler).__name__
|
|
44
|
+
print(f" Handler类型: {handler_type}")
|
|
45
|
+
if hasattr(handler, 'stream'):
|
|
46
|
+
print(f" 流类型: {type(handler.stream).__name__}")
|
|
47
|
+
if hasattr(handler.stream, 'flush'):
|
|
48
|
+
print(f" 支持flush方法: True")
|
|
49
|
+
|
|
50
|
+
# 检查是否有自动刷新设置
|
|
51
|
+
if hasattr(handler, 'flush'):
|
|
52
|
+
print(f" Handler有flush方法")
|
|
53
|
+
|
|
54
|
+
print("\n2. 写入日志并强制刷新...")
|
|
55
|
+
logger.info("测试日志1")
|
|
56
|
+
logger.info("测试日志2")
|
|
57
|
+
|
|
58
|
+
# 强制刷新所有handler
|
|
59
|
+
for handler in logger.handlers:
|
|
60
|
+
if hasattr(handler, 'flush'):
|
|
61
|
+
handler.flush()
|
|
62
|
+
print(f" 已刷新 {type(handler).__name__}")
|
|
63
|
+
|
|
64
|
+
# 检查文件内容
|
|
65
|
+
if os.path.exists(log_file):
|
|
66
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
67
|
+
content = f.read()
|
|
68
|
+
print(f"\n日志文件内容:")
|
|
69
|
+
print(f" 行数: {len(content.splitlines())}")
|
|
70
|
+
print(f" 内容: {repr(content)}")
|
|
71
|
+
|
|
72
|
+
print("\n3. 测试不同日志级别的缓冲...")
|
|
73
|
+
logger.debug("DEBUG消息(不应显示)")
|
|
74
|
+
logger.info("INFO消息")
|
|
75
|
+
logger.warning("WARNING消息")
|
|
76
|
+
logger.error("ERROR消息")
|
|
77
|
+
|
|
78
|
+
# 再次强制刷新
|
|
79
|
+
for handler in logger.handlers:
|
|
80
|
+
if hasattr(handler, 'flush'):
|
|
81
|
+
handler.flush()
|
|
82
|
+
|
|
83
|
+
# 检查最终文件内容
|
|
84
|
+
if os.path.exists(log_file):
|
|
85
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
86
|
+
lines = f.readlines()
|
|
87
|
+
print(f"\n最终日志文件内容:")
|
|
88
|
+
print(f" 总行数: {len(lines)}")
|
|
89
|
+
for i, line in enumerate(lines):
|
|
90
|
+
print(f" {i+1}: {line.strip()}")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def main():
|
|
94
|
+
"""主函数"""
|
|
95
|
+
print("开始测试日志缓冲问题...")
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
test_log_buffering()
|
|
99
|
+
|
|
100
|
+
print("\n=== 所有测试完成 ===")
|
|
101
|
+
|
|
102
|
+
except Exception as e:
|
|
103
|
+
print(f"\n测试过程中出现错误: {e}")
|
|
104
|
+
import traceback
|
|
105
|
+
traceback.print_exc()
|
|
106
|
+
return 1
|
|
107
|
+
|
|
108
|
+
return 0
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
if __name__ == '__main__':
|
|
112
|
+
sys.exit(main())
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试日志文件的生成时机
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
# 添加项目根目录到Python路径
|
|
13
|
+
project_root = Path(__file__).parent.parent
|
|
14
|
+
sys.path.insert(0, str(project_root))
|
|
15
|
+
|
|
16
|
+
from crawlo.logging import configure_logging as configure, get_logger
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def test_log_file_generation_timing():
|
|
20
|
+
"""测试日志文件的生成时机"""
|
|
21
|
+
print("=== 测试日志文件的生成时机 ===")
|
|
22
|
+
|
|
23
|
+
# 设置日志文件路径
|
|
24
|
+
log_file = "logs/timing_test.log"
|
|
25
|
+
|
|
26
|
+
# 确保日志目录存在
|
|
27
|
+
os.makedirs("logs", exist_ok=True)
|
|
28
|
+
|
|
29
|
+
# 删除可能存在的旧日志文件
|
|
30
|
+
if os.path.exists(log_file):
|
|
31
|
+
os.remove(log_file)
|
|
32
|
+
print(f"已删除旧日志文件: {log_file}")
|
|
33
|
+
|
|
34
|
+
print(f"日志文件路径: {log_file}")
|
|
35
|
+
print(f"日志文件是否存在: {os.path.exists(log_file)}")
|
|
36
|
+
|
|
37
|
+
# 配置日志系统
|
|
38
|
+
print("\n1. 配置日志系统...")
|
|
39
|
+
configure(
|
|
40
|
+
level='INFO',
|
|
41
|
+
file_path=log_file,
|
|
42
|
+
console_enabled=True,
|
|
43
|
+
file_enabled=True
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
print(f"配置后日志文件是否存在: {os.path.exists(log_file)}")
|
|
47
|
+
|
|
48
|
+
# 获取logger
|
|
49
|
+
print("\n2. 获取logger...")
|
|
50
|
+
logger = get_logger('timing.test')
|
|
51
|
+
print(f"获取logger后日志文件是否存在: {os.path.exists(log_file)}")
|
|
52
|
+
|
|
53
|
+
# 立即写入日志
|
|
54
|
+
print("\n3. 立即写入日志...")
|
|
55
|
+
logger.info("立即写入的第一条日志")
|
|
56
|
+
print(f"写入第一条日志后日志文件是否存在: {os.path.exists(log_file)}")
|
|
57
|
+
|
|
58
|
+
# 检查文件内容
|
|
59
|
+
if os.path.exists(log_file):
|
|
60
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
61
|
+
content = f.read()
|
|
62
|
+
print(f"日志文件内容行数: {len(content.splitlines())}")
|
|
63
|
+
print(f"日志文件大小: {os.path.getsize(log_file)} 字节")
|
|
64
|
+
|
|
65
|
+
# 等待一小段时间
|
|
66
|
+
print("\n4. 等待1秒...")
|
|
67
|
+
time.sleep(1)
|
|
68
|
+
print(f"等待后日志文件是否存在: {os.path.exists(log_file)}")
|
|
69
|
+
|
|
70
|
+
# 再写入一些日志
|
|
71
|
+
print("\n5. 再写入一些日志...")
|
|
72
|
+
for i in range(5):
|
|
73
|
+
logger.info(f"第{i+1}条测试日志")
|
|
74
|
+
time.sleep(0.1)
|
|
75
|
+
|
|
76
|
+
print(f"写入多条日志后日志文件是否存在: {os.path.exists(log_file)}")
|
|
77
|
+
|
|
78
|
+
# 检查最终文件内容
|
|
79
|
+
if os.path.exists(log_file):
|
|
80
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
81
|
+
lines = f.readlines()
|
|
82
|
+
print(f"\n最终日志文件内容:")
|
|
83
|
+
print(f" 总行数: {len(lines)}")
|
|
84
|
+
print(f" 文件大小: {os.path.getsize(log_file)} 字节")
|
|
85
|
+
if lines:
|
|
86
|
+
print(f" 第一行: {lines[0].strip()}")
|
|
87
|
+
print(f" 最后一行: {lines[-1].strip()}")
|
|
88
|
+
|
|
89
|
+
print("\n=== 测试完成 ===")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def test_buffering_behavior():
|
|
93
|
+
"""测试日志缓冲行为"""
|
|
94
|
+
print("\n=== 测试日志缓冲行为 ===")
|
|
95
|
+
|
|
96
|
+
# 设置日志文件路径
|
|
97
|
+
log_file = "logs/buffering_test.log"
|
|
98
|
+
|
|
99
|
+
# 删除可能存在的旧日志文件
|
|
100
|
+
if os.path.exists(log_file):
|
|
101
|
+
os.remove(log_file)
|
|
102
|
+
|
|
103
|
+
# 配置日志系统
|
|
104
|
+
configure(
|
|
105
|
+
level='INFO',
|
|
106
|
+
file_path=log_file,
|
|
107
|
+
console_enabled=True,
|
|
108
|
+
file_enabled=True
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
logger = get_logger('buffering.test')
|
|
112
|
+
|
|
113
|
+
print("1. 写入日志后立即检查文件...")
|
|
114
|
+
logger.info("缓冲测试日志1")
|
|
115
|
+
print(f" 写入后文件是否存在: {os.path.exists(log_file)}")
|
|
116
|
+
|
|
117
|
+
if os.path.exists(log_file):
|
|
118
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
119
|
+
content = f.read()
|
|
120
|
+
print(f" 文件内容: '{content.strip()}'")
|
|
121
|
+
|
|
122
|
+
print("2. 强制刷新并检查...")
|
|
123
|
+
# 获取文件处理器并强制刷新
|
|
124
|
+
for handler in logger.handlers:
|
|
125
|
+
if hasattr(handler, 'flush'):
|
|
126
|
+
handler.flush()
|
|
127
|
+
|
|
128
|
+
if os.path.exists(log_file):
|
|
129
|
+
with open(log_file, 'r', encoding='utf-8') as f:
|
|
130
|
+
content = f.read()
|
|
131
|
+
print(f" 刷新后文件内容: '{content.strip()}'")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def main():
|
|
135
|
+
"""主函数"""
|
|
136
|
+
print("开始测试日志文件的生成时机...")
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
test_log_file_generation_timing()
|
|
140
|
+
test_buffering_behavior()
|
|
141
|
+
|
|
142
|
+
print("\n=== 所有测试完成 ===")
|
|
143
|
+
|
|
144
|
+
except Exception as e:
|
|
145
|
+
print(f"\n测试过程中出现错误: {e}")
|
|
146
|
+
import traceback
|
|
147
|
+
traceback.print_exc()
|
|
148
|
+
return 1
|
|
149
|
+
|
|
150
|
+
return 0
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
if __name__ == '__main__':
|
|
154
|
+
sys.exit(main())
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
优化后的Crawlo框架性能测试脚本
|
|
5
|
+
用于评估优化后的框架性能表现
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import time
|
|
10
|
+
import sys
|
|
11
|
+
import os
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
# 添加项目根目录到 Python 路径
|
|
15
|
+
project_root = Path(__file__).parent.parent
|
|
16
|
+
sys.path.insert(0, str(project_root))
|
|
17
|
+
|
|
18
|
+
from crawlo.crawler import CrawlerProcess
|
|
19
|
+
from crawlo.spider import Spider
|
|
20
|
+
from crawlo import Request, Item
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class TestItem(Item):
|
|
24
|
+
"""测试用的Item类"""
|
|
25
|
+
def __init__(self):
|
|
26
|
+
super().__init__()
|
|
27
|
+
self.url = ''
|
|
28
|
+
self.status = 0
|
|
29
|
+
self.page_id = ''
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def create_test_spider_class(spider_name, page_count):
|
|
33
|
+
"""动态创建测试爬虫类"""
|
|
34
|
+
class TestSpider(Spider):
|
|
35
|
+
# 显式设置name属性
|
|
36
|
+
name = spider_name
|
|
37
|
+
|
|
38
|
+
def __init__(self, *args, **kwargs):
|
|
39
|
+
super().__init__(*args, **kwargs)
|
|
40
|
+
# 使用较少的测试页面以加快测试速度
|
|
41
|
+
self.start_urls = [f'https://httpbin.org/delay/0?page={i}' for i in range(page_count)]
|
|
42
|
+
|
|
43
|
+
def parse(self, response):
|
|
44
|
+
"""简单解析响应"""
|
|
45
|
+
# 正确返回Item对象
|
|
46
|
+
item = TestItem()
|
|
47
|
+
item['url'] = response.url
|
|
48
|
+
item['status'] = response.status_code
|
|
49
|
+
item['page_id'] = response.url.split('page=')[-1] if 'page=' in response.url else 'unknown'
|
|
50
|
+
yield item
|
|
51
|
+
|
|
52
|
+
return TestSpider
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class OptimizedPerformanceTester:
|
|
56
|
+
"""优化后的性能测试器"""
|
|
57
|
+
|
|
58
|
+
def __init__(self):
|
|
59
|
+
self.results = {}
|
|
60
|
+
|
|
61
|
+
def test_initialization_performance(self):
|
|
62
|
+
"""测试初始化性能"""
|
|
63
|
+
print("测试初始化性能...")
|
|
64
|
+
|
|
65
|
+
start_time = time.time()
|
|
66
|
+
settings = {
|
|
67
|
+
'CONCURRENT_REQUESTS': 10,
|
|
68
|
+
}
|
|
69
|
+
process = CrawlerProcess(settings=settings)
|
|
70
|
+
end_time = time.time()
|
|
71
|
+
|
|
72
|
+
init_time = end_time - start_time
|
|
73
|
+
print(f"初始化时间: {init_time:.4f} 秒")
|
|
74
|
+
return init_time
|
|
75
|
+
|
|
76
|
+
async def run_crawler_test(self, test_pages=20, concurrent_requests=10, test_name="performance_test"):
|
|
77
|
+
"""运行爬虫性能测试"""
|
|
78
|
+
# 配置设置
|
|
79
|
+
settings = {
|
|
80
|
+
'CONCURRENT_REQUESTS': concurrent_requests,
|
|
81
|
+
'DOWNLOAD_DELAY': 0.1, # 减少延迟以提高性能
|
|
82
|
+
'RANDOMIZE_DOWNLOAD_DELAY': False,
|
|
83
|
+
'SCHEDULER_MAX_QUEUE_SIZE': 5000, # 使用优化后的队列大小
|
|
84
|
+
'BACKPRESSURE_RATIO': 0.9, # 使用优化后的背压比率
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
# 创建测试爬虫类
|
|
88
|
+
TestSpiderClass = create_test_spider_class(test_name, test_pages)
|
|
89
|
+
|
|
90
|
+
# 注册爬虫类
|
|
91
|
+
from crawlo.spider import get_global_spider_registry
|
|
92
|
+
registry = get_global_spider_registry()
|
|
93
|
+
registry[TestSpiderClass.name] = TestSpiderClass
|
|
94
|
+
|
|
95
|
+
# 创建爬虫进程
|
|
96
|
+
process = CrawlerProcess(settings=settings)
|
|
97
|
+
|
|
98
|
+
# 添加测试爬虫
|
|
99
|
+
crawler = await process.crawl(TestSpiderClass.name)
|
|
100
|
+
|
|
101
|
+
# 计算性能指标
|
|
102
|
+
metrics = crawler.metrics
|
|
103
|
+
duration = metrics.get_total_duration()
|
|
104
|
+
rps = test_pages / duration if duration > 0 else 0
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
'duration': duration,
|
|
108
|
+
'rps': rps,
|
|
109
|
+
'pages': test_pages,
|
|
110
|
+
'concurrent': concurrent_requests
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async def run_scale_tests(self):
|
|
114
|
+
"""运行不同规模的测试"""
|
|
115
|
+
print("\n=== 运行规模测试 ===")
|
|
116
|
+
scales = [10, 20, 50] # 减少测试规模以加快测试速度
|
|
117
|
+
results = []
|
|
118
|
+
|
|
119
|
+
for i, scale in enumerate(scales):
|
|
120
|
+
test_name = f"scale_test_{i}_{scale}"
|
|
121
|
+
print(f"测试规模: {scale} 个页面")
|
|
122
|
+
try:
|
|
123
|
+
result = await self.run_crawler_test(test_pages=scale, test_name=test_name)
|
|
124
|
+
results.append(result)
|
|
125
|
+
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
126
|
+
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
127
|
+
except Exception as e:
|
|
128
|
+
print(f" 测试失败: {e}")
|
|
129
|
+
import traceback
|
|
130
|
+
traceback.print_exc()
|
|
131
|
+
print()
|
|
132
|
+
|
|
133
|
+
return results
|
|
134
|
+
|
|
135
|
+
async def run_concurrency_tests(self):
|
|
136
|
+
"""运行不同并发数的测试"""
|
|
137
|
+
print("\n=== 运行并发测试 ===")
|
|
138
|
+
concurrencies = [1, 5, 10, 20] # 增加更高并发数的测试
|
|
139
|
+
results = []
|
|
140
|
+
|
|
141
|
+
for i, concurrency in enumerate(concurrencies):
|
|
142
|
+
test_name = f"concurrency_test_{i}_{concurrency}"
|
|
143
|
+
print(f"测试并发数: {concurrency}")
|
|
144
|
+
try:
|
|
145
|
+
result = await self.run_crawler_test(
|
|
146
|
+
test_pages=50, # 增加页面数以更好地测试并发效果
|
|
147
|
+
concurrent_requests=concurrency,
|
|
148
|
+
test_name=test_name
|
|
149
|
+
)
|
|
150
|
+
results.append(result)
|
|
151
|
+
print(f" 完成时间: {result['duration']:.2f} 秒")
|
|
152
|
+
print(f" 每秒请求数: {result['rps']:.2f} RPS")
|
|
153
|
+
except Exception as e:
|
|
154
|
+
print(f" 测试失败: {e}")
|
|
155
|
+
import traceback
|
|
156
|
+
traceback.print_exc()
|
|
157
|
+
print()
|
|
158
|
+
|
|
159
|
+
return results
|
|
160
|
+
|
|
161
|
+
async def run_performance_suite(self):
|
|
162
|
+
"""运行完整的性能测试套件"""
|
|
163
|
+
print("开始 Crawlo 框架优化后性能测试")
|
|
164
|
+
print("=" * 50)
|
|
165
|
+
|
|
166
|
+
# 测试初始化性能
|
|
167
|
+
init_time = self.test_initialization_performance()
|
|
168
|
+
|
|
169
|
+
# 运行规模测试
|
|
170
|
+
scale_results = await self.run_scale_tests()
|
|
171
|
+
|
|
172
|
+
# 运行并发测试
|
|
173
|
+
concurrency_results = await self.run_concurrency_tests()
|
|
174
|
+
|
|
175
|
+
# 汇总结果
|
|
176
|
+
print("\n=== 性能测试汇总 ===")
|
|
177
|
+
print(f"初始化时间: {init_time:.4f} 秒")
|
|
178
|
+
|
|
179
|
+
print("\n规模测试结果:")
|
|
180
|
+
for result in scale_results:
|
|
181
|
+
if 'duration' in result:
|
|
182
|
+
print(f" {result['pages']} 页面: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
183
|
+
|
|
184
|
+
print("\n并发测试结果:")
|
|
185
|
+
for result in concurrency_results:
|
|
186
|
+
if 'duration' in result:
|
|
187
|
+
print(f" {result['concurrent']} 并发: {result['duration']:.2f}s, {result['rps']:.2f} RPS")
|
|
188
|
+
|
|
189
|
+
return {
|
|
190
|
+
'initialization': init_time,
|
|
191
|
+
'scale_tests': scale_results,
|
|
192
|
+
'concurrency_tests': concurrency_results
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
async def main():
|
|
197
|
+
"""主函数"""
|
|
198
|
+
tester = OptimizedPerformanceTester()
|
|
199
|
+
results = await tester.run_performance_suite()
|
|
200
|
+
|
|
201
|
+
print("\n=== 测试完成 ===")
|
|
202
|
+
print("优化后性能测试已完成,结果如上所示。")
|
|
203
|
+
|
|
204
|
+
# 保存结果到文件
|
|
205
|
+
import json
|
|
206
|
+
with open('optimized_performance_test_results.json', 'w', encoding='utf-8') as f:
|
|
207
|
+
json.dump(results, f, ensure_ascii=False, indent=2)
|
|
208
|
+
print("结果已保存到 optimized_performance_test_results.json")
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
if __name__ == '__main__':
|
|
212
|
+
asyncio.run(main())
|