crawlo 1.2.0__py3-none-any.whl → 1.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +81 -65
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +143 -133
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +292 -292
- crawlo/commands/startproject.py +418 -418
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +252 -252
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +354 -354
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +143 -143
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +220 -220
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +213 -213
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -280
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -32
- crawlo/middleware/download_delay.py +105 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +116 -0
- crawlo/middleware/proxy.py +366 -272
- crawlo/middleware/request_ignore.py +88 -30
- crawlo/middleware/response_code.py +164 -18
- crawlo/middleware/response_filter.py +138 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +61 -61
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +187 -187
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +337 -337
- crawlo/queue/redis_priority_queue.py +298 -298
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +226 -219
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -109
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/run.py.tmpl +45 -45
- crawlo/templates/project/settings.py.tmpl +327 -326
- crawlo/templates/project/settings_distributed.py.tmpl +119 -119
- crawlo/templates/project/settings_gentle.py.tmpl +94 -94
- crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
- crawlo/templates/project/settings_simple.py.tmpl +68 -68
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +143 -141
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +260 -260
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +359 -359
- crawlo/utils/env_config.py +105 -105
- crawlo/utils/error_handler.py +125 -125
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +284 -284
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +334 -334
- crawlo/utils/redis_key_validator.py +199 -199
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/METADATA +692 -697
- crawlo-1.2.2.dist-info/RECORD +220 -0
- examples/__init__.py +7 -7
- examples/aiohttp_settings.py +42 -0
- examples/curl_cffi_settings.py +41 -0
- examples/default_header_middleware_example.py +107 -0
- examples/default_header_spider_example.py +129 -0
- examples/download_delay_middleware_example.py +160 -0
- examples/httpx_settings.py +42 -0
- examples/multi_downloader_proxy_example.py +81 -0
- examples/offsite_middleware_example.py +55 -0
- examples/offsite_spider_example.py +107 -0
- examples/proxy_spider_example.py +166 -0
- examples/request_ignore_middleware_example.py +51 -0
- examples/request_ignore_spider_example.py +99 -0
- examples/response_code_middleware_example.py +52 -0
- examples/response_filter_middleware_example.py +67 -0
- examples/tong_hua_shun_settings.py +62 -0
- examples/tong_hua_shun_spider.py +170 -0
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +173 -0
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +159 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +222 -0
- tests/test_downloader_proxy_compatibility.py +269 -0
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_offsite_middleware.py +222 -0
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +265 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +122 -0
- tests/test_proxy_middleware_enhanced.py +217 -0
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +196 -0
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +183 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +350 -0
- tests/test_response_filter_middleware.py +428 -0
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +242 -0
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.2.0.dist-info/RECORD +0 -190
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/WHEEL +0 -0
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/top_level.txt +0 -0
|
@@ -1,177 +1,177 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
QueueManager Redis Key 测试脚本
|
|
5
|
-
用于验证QueueManager在创建RedisPriorityQueue时是否正确传递module_name参数
|
|
6
|
-
"""
|
|
7
|
-
import asyncio
|
|
8
|
-
import sys
|
|
9
|
-
import os
|
|
10
|
-
import traceback
|
|
11
|
-
|
|
12
|
-
# 添加项目根目录到路径
|
|
13
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
-
|
|
15
|
-
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class MockSettings:
|
|
19
|
-
"""模拟设置类"""
|
|
20
|
-
def __init__(self, project_name="test_project"):
|
|
21
|
-
self.project_name = project_name
|
|
22
|
-
self.REDIS_URL = "redis://127.0.0.1:6379/15" # 使用测试数据库
|
|
23
|
-
self.REDIS_TTL = 0
|
|
24
|
-
self.CLEANUP_FP = 0
|
|
25
|
-
self.FILTER_DEBUG = True
|
|
26
|
-
self.LOG_LEVEL = "INFO"
|
|
27
|
-
self.DECODE_RESPONSES = True
|
|
28
|
-
|
|
29
|
-
def get(self, key, default=None):
|
|
30
|
-
if key == 'PROJECT_NAME':
|
|
31
|
-
return self.project_name
|
|
32
|
-
elif key == 'REDIS_URL':
|
|
33
|
-
return self.REDIS_URL
|
|
34
|
-
elif key == 'FILTER_DEBUG':
|
|
35
|
-
return self.FILTER_DEBUG
|
|
36
|
-
elif key == 'LOG_LEVEL':
|
|
37
|
-
return self.LOG_LEVEL
|
|
38
|
-
elif key == 'DECODE_RESPONSES':
|
|
39
|
-
return self.DECODE_RESPONSES
|
|
40
|
-
return default
|
|
41
|
-
|
|
42
|
-
def get_bool(self, key, default=False):
|
|
43
|
-
if key == 'FILTER_DEBUG':
|
|
44
|
-
return self.FILTER_DEBUG
|
|
45
|
-
elif key == 'DECODE_RESPONSES':
|
|
46
|
-
return self.DECODE_RESPONSES
|
|
47
|
-
elif key == 'CLEANUP_FP':
|
|
48
|
-
return self.CLEANUP_FP
|
|
49
|
-
return default
|
|
50
|
-
|
|
51
|
-
def get_int(self, key, default=0):
|
|
52
|
-
if key == 'REDIS_TTL':
|
|
53
|
-
return self.REDIS_TTL
|
|
54
|
-
elif key == 'REDIS_PORT':
|
|
55
|
-
return 6379
|
|
56
|
-
elif key == 'REDIS_DB':
|
|
57
|
-
return 0
|
|
58
|
-
elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
|
|
59
|
-
return 1000
|
|
60
|
-
elif key == 'QUEUE_MAX_RETRIES':
|
|
61
|
-
return 3
|
|
62
|
-
elif key == 'QUEUE_TIMEOUT':
|
|
63
|
-
return 300
|
|
64
|
-
return default
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
async def test_queue_manager_redis_key():
|
|
68
|
-
"""测试QueueManager创建Redis队列时的key命名"""
|
|
69
|
-
print("🔍 测试QueueManager创建Redis队列时的key命名...")
|
|
70
|
-
|
|
71
|
-
try:
|
|
72
|
-
# 测试不同的队列名称配置
|
|
73
|
-
test_cases = [
|
|
74
|
-
{
|
|
75
|
-
"queue_name": "crawlo:books_distributed:queue:requests",
|
|
76
|
-
"expected_module_name": "books_distributed",
|
|
77
|
-
"description": "标准项目名称"
|
|
78
|
-
},
|
|
79
|
-
{
|
|
80
|
-
"queue_name": "crawlo:api_data_collection:queue:requests",
|
|
81
|
-
"expected_module_name": "api_data_collection",
|
|
82
|
-
"description": "API数据采集项目"
|
|
83
|
-
},
|
|
84
|
-
{
|
|
85
|
-
"queue_name": "crawlo:test_project:queue:requests",
|
|
86
|
-
"expected_module_name": "test_project",
|
|
87
|
-
"description": "测试项目"
|
|
88
|
-
},
|
|
89
|
-
{
|
|
90
|
-
"queue_name": "simple_queue_name",
|
|
91
|
-
"expected_module_name": "simple_queue_name",
|
|
92
|
-
"description": "简单队列名称"
|
|
93
|
-
},
|
|
94
|
-
{
|
|
95
|
-
"queue_name": "",
|
|
96
|
-
"expected_module_name": "default",
|
|
97
|
-
"description": "空队列名称"
|
|
98
|
-
}
|
|
99
|
-
]
|
|
100
|
-
|
|
101
|
-
for i, test_case in enumerate(test_cases, 1):
|
|
102
|
-
print(f" {i}. 测试 {test_case['description']}...")
|
|
103
|
-
|
|
104
|
-
# 创建QueueConfig
|
|
105
|
-
config = QueueConfig(
|
|
106
|
-
queue_type=QueueType.REDIS,
|
|
107
|
-
redis_url="redis://127.0.0.1:6379/15",
|
|
108
|
-
queue_name=test_case["queue_name"],
|
|
109
|
-
max_queue_size=1000,
|
|
110
|
-
max_retries=3,
|
|
111
|
-
timeout=300
|
|
112
|
-
)
|
|
113
|
-
|
|
114
|
-
# 创建QueueManager
|
|
115
|
-
queue_manager = QueueManager(config)
|
|
116
|
-
|
|
117
|
-
# 创建队列实例
|
|
118
|
-
queue = await queue_manager._create_queue(QueueType.REDIS)
|
|
119
|
-
|
|
120
|
-
# 验证module_name是否正确设置
|
|
121
|
-
assert hasattr(queue, 'module_name'), "RedisPriorityQueue缺少module_name属性"
|
|
122
|
-
assert queue.module_name == test_case["expected_module_name"], \
|
|
123
|
-
f"module_name不匹配: {queue.module_name} != {test_case['expected_module_name']}"
|
|
124
|
-
|
|
125
|
-
# 验证队列名称是否符合规范
|
|
126
|
-
expected_queue_name = f"crawlo:{queue.module_name}:queue:requests"
|
|
127
|
-
expected_processing_queue = f"crawlo:{queue.module_name}:queue:processing"
|
|
128
|
-
expected_failed_queue = f"crawlo:{queue.module_name}:queue:failed"
|
|
129
|
-
|
|
130
|
-
assert queue.queue_name == expected_queue_name, \
|
|
131
|
-
f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
|
|
132
|
-
assert queue.processing_queue == expected_processing_queue, \
|
|
133
|
-
f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
|
|
134
|
-
assert queue.failed_queue == expected_failed_queue, \
|
|
135
|
-
f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
|
|
136
|
-
|
|
137
|
-
print(f" ✅ module_name: {queue.module_name}")
|
|
138
|
-
print(f" ✅ 队列名称: {queue.queue_name}")
|
|
139
|
-
print(f" ✅ 处理中队列名称: {queue.processing_queue}")
|
|
140
|
-
print(f" ✅ 失败队列名称: {queue.failed_queue}")
|
|
141
|
-
|
|
142
|
-
print("✅ QueueManager Redis key命名测试通过!")
|
|
143
|
-
return True
|
|
144
|
-
|
|
145
|
-
except Exception as e:
|
|
146
|
-
print(f"❌ QueueManager Redis key命名测试失败: {e}")
|
|
147
|
-
traceback.print_exc()
|
|
148
|
-
return False
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
async def main():
|
|
152
|
-
"""主测试函数"""
|
|
153
|
-
print("🚀 开始QueueManager Redis key命名测试...")
|
|
154
|
-
print("=" * 50)
|
|
155
|
-
|
|
156
|
-
try:
|
|
157
|
-
success = await test_queue_manager_redis_key()
|
|
158
|
-
|
|
159
|
-
print("=" * 50)
|
|
160
|
-
if success:
|
|
161
|
-
print("🎉 所有测试通过!QueueManager正确传递module_name参数")
|
|
162
|
-
else:
|
|
163
|
-
print("❌ 测试失败,请检查实现")
|
|
164
|
-
return 1
|
|
165
|
-
|
|
166
|
-
except Exception as e:
|
|
167
|
-
print("=" * 50)
|
|
168
|
-
print(f"❌ 测试过程中发生异常: {e}")
|
|
169
|
-
traceback.print_exc()
|
|
170
|
-
return 1
|
|
171
|
-
|
|
172
|
-
return 0
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
if __name__ == "__main__":
|
|
176
|
-
exit_code = asyncio.run(main())
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
QueueManager Redis Key 测试脚本
|
|
5
|
+
用于验证QueueManager在创建RedisPriorityQueue时是否正确传递module_name参数
|
|
6
|
+
"""
|
|
7
|
+
import asyncio
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
import traceback
|
|
11
|
+
|
|
12
|
+
# 添加项目根目录到路径
|
|
13
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
+
|
|
15
|
+
from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class MockSettings:
|
|
19
|
+
"""模拟设置类"""
|
|
20
|
+
def __init__(self, project_name="test_project"):
|
|
21
|
+
self.project_name = project_name
|
|
22
|
+
self.REDIS_URL = "redis://127.0.0.1:6379/15" # 使用测试数据库
|
|
23
|
+
self.REDIS_TTL = 0
|
|
24
|
+
self.CLEANUP_FP = 0
|
|
25
|
+
self.FILTER_DEBUG = True
|
|
26
|
+
self.LOG_LEVEL = "INFO"
|
|
27
|
+
self.DECODE_RESPONSES = True
|
|
28
|
+
|
|
29
|
+
def get(self, key, default=None):
|
|
30
|
+
if key == 'PROJECT_NAME':
|
|
31
|
+
return self.project_name
|
|
32
|
+
elif key == 'REDIS_URL':
|
|
33
|
+
return self.REDIS_URL
|
|
34
|
+
elif key == 'FILTER_DEBUG':
|
|
35
|
+
return self.FILTER_DEBUG
|
|
36
|
+
elif key == 'LOG_LEVEL':
|
|
37
|
+
return self.LOG_LEVEL
|
|
38
|
+
elif key == 'DECODE_RESPONSES':
|
|
39
|
+
return self.DECODE_RESPONSES
|
|
40
|
+
return default
|
|
41
|
+
|
|
42
|
+
def get_bool(self, key, default=False):
|
|
43
|
+
if key == 'FILTER_DEBUG':
|
|
44
|
+
return self.FILTER_DEBUG
|
|
45
|
+
elif key == 'DECODE_RESPONSES':
|
|
46
|
+
return self.DECODE_RESPONSES
|
|
47
|
+
elif key == 'CLEANUP_FP':
|
|
48
|
+
return self.CLEANUP_FP
|
|
49
|
+
return default
|
|
50
|
+
|
|
51
|
+
def get_int(self, key, default=0):
|
|
52
|
+
if key == 'REDIS_TTL':
|
|
53
|
+
return self.REDIS_TTL
|
|
54
|
+
elif key == 'REDIS_PORT':
|
|
55
|
+
return 6379
|
|
56
|
+
elif key == 'REDIS_DB':
|
|
57
|
+
return 0
|
|
58
|
+
elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
|
|
59
|
+
return 1000
|
|
60
|
+
elif key == 'QUEUE_MAX_RETRIES':
|
|
61
|
+
return 3
|
|
62
|
+
elif key == 'QUEUE_TIMEOUT':
|
|
63
|
+
return 300
|
|
64
|
+
return default
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
async def test_queue_manager_redis_key():
|
|
68
|
+
"""测试QueueManager创建Redis队列时的key命名"""
|
|
69
|
+
print("🔍 测试QueueManager创建Redis队列时的key命名...")
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
# 测试不同的队列名称配置
|
|
73
|
+
test_cases = [
|
|
74
|
+
{
|
|
75
|
+
"queue_name": "crawlo:books_distributed:queue:requests",
|
|
76
|
+
"expected_module_name": "books_distributed",
|
|
77
|
+
"description": "标准项目名称"
|
|
78
|
+
},
|
|
79
|
+
{
|
|
80
|
+
"queue_name": "crawlo:api_data_collection:queue:requests",
|
|
81
|
+
"expected_module_name": "api_data_collection",
|
|
82
|
+
"description": "API数据采集项目"
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
"queue_name": "crawlo:test_project:queue:requests",
|
|
86
|
+
"expected_module_name": "test_project",
|
|
87
|
+
"description": "测试项目"
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
"queue_name": "simple_queue_name",
|
|
91
|
+
"expected_module_name": "simple_queue_name",
|
|
92
|
+
"description": "简单队列名称"
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
"queue_name": "",
|
|
96
|
+
"expected_module_name": "default",
|
|
97
|
+
"description": "空队列名称"
|
|
98
|
+
}
|
|
99
|
+
]
|
|
100
|
+
|
|
101
|
+
for i, test_case in enumerate(test_cases, 1):
|
|
102
|
+
print(f" {i}. 测试 {test_case['description']}...")
|
|
103
|
+
|
|
104
|
+
# 创建QueueConfig
|
|
105
|
+
config = QueueConfig(
|
|
106
|
+
queue_type=QueueType.REDIS,
|
|
107
|
+
redis_url="redis://127.0.0.1:6379/15",
|
|
108
|
+
queue_name=test_case["queue_name"],
|
|
109
|
+
max_queue_size=1000,
|
|
110
|
+
max_retries=3,
|
|
111
|
+
timeout=300
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
# 创建QueueManager
|
|
115
|
+
queue_manager = QueueManager(config)
|
|
116
|
+
|
|
117
|
+
# 创建队列实例
|
|
118
|
+
queue = await queue_manager._create_queue(QueueType.REDIS)
|
|
119
|
+
|
|
120
|
+
# 验证module_name是否正确设置
|
|
121
|
+
assert hasattr(queue, 'module_name'), "RedisPriorityQueue缺少module_name属性"
|
|
122
|
+
assert queue.module_name == test_case["expected_module_name"], \
|
|
123
|
+
f"module_name不匹配: {queue.module_name} != {test_case['expected_module_name']}"
|
|
124
|
+
|
|
125
|
+
# 验证队列名称是否符合规范
|
|
126
|
+
expected_queue_name = f"crawlo:{queue.module_name}:queue:requests"
|
|
127
|
+
expected_processing_queue = f"crawlo:{queue.module_name}:queue:processing"
|
|
128
|
+
expected_failed_queue = f"crawlo:{queue.module_name}:queue:failed"
|
|
129
|
+
|
|
130
|
+
assert queue.queue_name == expected_queue_name, \
|
|
131
|
+
f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
|
|
132
|
+
assert queue.processing_queue == expected_processing_queue, \
|
|
133
|
+
f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
|
|
134
|
+
assert queue.failed_queue == expected_failed_queue, \
|
|
135
|
+
f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
|
|
136
|
+
|
|
137
|
+
print(f" ✅ module_name: {queue.module_name}")
|
|
138
|
+
print(f" ✅ 队列名称: {queue.queue_name}")
|
|
139
|
+
print(f" ✅ 处理中队列名称: {queue.processing_queue}")
|
|
140
|
+
print(f" ✅ 失败队列名称: {queue.failed_queue}")
|
|
141
|
+
|
|
142
|
+
print("✅ QueueManager Redis key命名测试通过!")
|
|
143
|
+
return True
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
print(f"❌ QueueManager Redis key命名测试失败: {e}")
|
|
147
|
+
traceback.print_exc()
|
|
148
|
+
return False
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
async def main():
|
|
152
|
+
"""主测试函数"""
|
|
153
|
+
print("🚀 开始QueueManager Redis key命名测试...")
|
|
154
|
+
print("=" * 50)
|
|
155
|
+
|
|
156
|
+
try:
|
|
157
|
+
success = await test_queue_manager_redis_key()
|
|
158
|
+
|
|
159
|
+
print("=" * 50)
|
|
160
|
+
if success:
|
|
161
|
+
print("🎉 所有测试通过!QueueManager正确传递module_name参数")
|
|
162
|
+
else:
|
|
163
|
+
print("❌ 测试失败,请检查实现")
|
|
164
|
+
return 1
|
|
165
|
+
|
|
166
|
+
except Exception as e:
|
|
167
|
+
print("=" * 50)
|
|
168
|
+
print(f"❌ 测试过程中发生异常: {e}")
|
|
169
|
+
traceback.print_exc()
|
|
170
|
+
return 1
|
|
171
|
+
|
|
172
|
+
return 0
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
if __name__ == "__main__":
|
|
176
|
+
exit_code = asyncio.run(main())
|
|
177
177
|
sys.exit(exit_code)
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
真实场景代理测试
|
|
5
|
+
================
|
|
6
|
+
使用用户提供的headers、cookies和URL测试代理功能
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import aiohttp
|
|
11
|
+
import sys
|
|
12
|
+
import os
|
|
13
|
+
from urllib.parse import urlparse
|
|
14
|
+
|
|
15
|
+
# 添加项目根目录到Python路径
|
|
16
|
+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
17
|
+
|
|
18
|
+
# 用户提供的请求头
|
|
19
|
+
HEADERS = {
|
|
20
|
+
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
|
21
|
+
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
|
|
22
|
+
"cache-control": "no-cache",
|
|
23
|
+
"pragma": "no-cache",
|
|
24
|
+
"priority": "u=0, i",
|
|
25
|
+
"sec-ch-ua": "\"Chromium\";v=\"140\", \"Not=A?Brand\";v=\"24\", \"Google Chrome\";v=\"140\"",
|
|
26
|
+
"sec-ch-ua-mobile": "?0",
|
|
27
|
+
"sec-ch-ua-platform": "\"Windows\"",
|
|
28
|
+
"sec-fetch-dest": "document",
|
|
29
|
+
"sec-fetch-mode": "navigate",
|
|
30
|
+
"sec-fetch-site": "none",
|
|
31
|
+
"sec-fetch-user": "?1",
|
|
32
|
+
"upgrade-insecure-requests": "1",
|
|
33
|
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36"
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# 用户提供的cookies
|
|
37
|
+
COOKIES = {
|
|
38
|
+
"Hm_lvt_722143063e4892925903024537075d0d": "1758071793",
|
|
39
|
+
"Hm_lvt_929f8b362150b1f77b477230541dbbc2": "1758071793",
|
|
40
|
+
"historystock": "600699",
|
|
41
|
+
"spversion": "20130314",
|
|
42
|
+
"cid": "f9bc812da2c3a7ddf6d5df1fa2d497091758076438",
|
|
43
|
+
"u_ukey": "A10702B8689642C6BE607730E11E6E4A",
|
|
44
|
+
"u_uver": "1.0.0",
|
|
45
|
+
"u_dpass": "Qk3U07X7SHGKa0AcRUg1R1DVWbPioD9Eg270bdikvlwWWXexbsXnRsQNt%2B04iXwdHi80LrSsTFH9a%2B6rtRvqGg%3D%3D",
|
|
46
|
+
"u_did": "E3ED337393E1429DA56E380DD00B3CCD",
|
|
47
|
+
"u_ttype": "WEB",
|
|
48
|
+
"user_status": "0",
|
|
49
|
+
"ttype": "WEB",
|
|
50
|
+
"log": "",
|
|
51
|
+
"Hm_lvt_69929b9dce4c22a060bd22d703b2a280": "1758079404,1758113068,1758157144",
|
|
52
|
+
"HMACCOUNT": "08DF0D235A291EAA",
|
|
53
|
+
"Hm_lvt_78c58f01938e4d85eaf619eae71b4ed1": "1758071793,1758113068,1758157144",
|
|
54
|
+
"user": "MDpteF9lNXRkY3RpdHo6Ok5vbmU6NTAwOjgxNzYyOTAwNDo3LDExMTExMTExMTExLDQwOzQ0LDExLDQwOzYsMSw0MDs1LDEsNDA7MSwxMDEsNDA7MiwxLDQwOzMsMSw0MDs1LDEsNDA7OCwwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMSw0MDsxMDIsMSw0MDoxNjo6OjgwNzYyOTAwNDoxNzU4MTYxNTE0Ojo6MTc1ODA3MjA2MDo2MDQ4MDA6MDoxYTQ0NmFlNDY4M2VmZWY3YmNjYTczY2U3ODZmZTNiODg6ZGVmYXVsdF81OjA%3D",
|
|
55
|
+
"userid": "807629004",
|
|
56
|
+
"u_name": "mx_e5tdctitz",
|
|
57
|
+
"escapename": "mx_e5tdctitz",
|
|
58
|
+
"ticket": "85eea709becdd924d7eb975351de629e",
|
|
59
|
+
"utk": "8959c4c6b6f5fb7628864feab15473f4",
|
|
60
|
+
"sess_tk": "eyJ0eXAiOiJKV1QiLCJhbGciOiJFUzI1NiIsImtpZCI6InNlc3NfdGtfMSIsImJ0eSI6InNlc3NfdGsifQ.eyJqdGkiOiI4ODNiZmU4NmU3M2NhN2NjN2JlZmVmODM0NmFlNDZhNDEiLCJpYXQiOjE3NTgxNjE1MTQsImV4cCI6MTc1ODc2NjMxNCwic3ViIjoiODA3NjI5MDA0IiwiaXNzIjoidXBhc3MuaXdlbmNhaS5jb20iLCJhdWQiOiIyMDIwMTExODUyODg5MDcyIiwiYWN0Ijoib2ZjIiwiY3VocyI6ImIwNTcyZDVjOWNlNDg0MGFlOWYxYTlhYjU3NGZkNjQyYjgzNmExN2E3Y2NhZjk4ZWRiNzI5ZmJkOWFjOGVkYmYifQ.UBNIzxGvQQtXSiIcB_1JJl-EuAc1S9j2LcTLXjwy4ImhDDbh1oJvyRdDUrXdUpwBpIyx5zgYqgt_3FEhY_iayw",
|
|
61
|
+
"cuc": "ap2eap3gg99g",
|
|
62
|
+
"Hm_lvt_f79b64788a4e377c608617fba4c736e2": "1758161692",
|
|
63
|
+
"v": "A1glI4rWhPCQGqh0MvA0ioufKY3vQbzLHqWQT5JJpBNGLfazOlGMW261YNrh",
|
|
64
|
+
"Hm_lpvt_78c58f01938e4d85eaf619eae71b4ed1": "1758163145",
|
|
65
|
+
"Hm_lpvt_f79b64788a4e377c608617fba4c736e2": "1758163145",
|
|
66
|
+
"Hm_lpvt_69929b9dce4c22a060bd22d703b2a280": "1758163145"
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
# 用户提供的URL
|
|
70
|
+
URL = "https://stock.10jqka.com.cn/20240315/c655957791.shtml"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
async def test_without_proxy():
|
|
74
|
+
"""不使用代理直接测试访问"""
|
|
75
|
+
print("=== 不使用代理直接访问 ===")
|
|
76
|
+
print(f"URL: {URL}")
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
timeout = aiohttp.ClientTimeout(total=15)
|
|
80
|
+
async with aiohttp.ClientSession(timeout=timeout, headers=HEADERS, cookies=COOKIES) as session:
|
|
81
|
+
async with session.get(URL) as response:
|
|
82
|
+
print(f"状态码: {response.status}")
|
|
83
|
+
if response.status == 200:
|
|
84
|
+
print("✅ 直接访问成功")
|
|
85
|
+
return True
|
|
86
|
+
else:
|
|
87
|
+
print(f"❌ 直接访问失败,状态码: {response.status}")
|
|
88
|
+
return False
|
|
89
|
+
except Exception as e:
|
|
90
|
+
print(f"❌ 直接访问出错: {e}")
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
async def test_with_proxy(proxy_url):
|
|
95
|
+
"""使用代理测试访问"""
|
|
96
|
+
print(f"\n=== 使用代理访问 ===")
|
|
97
|
+
print(f"代理地址: {proxy_url}")
|
|
98
|
+
print(f"URL: {URL}")
|
|
99
|
+
|
|
100
|
+
try:
|
|
101
|
+
timeout = aiohttp.ClientTimeout(total=15)
|
|
102
|
+
async with aiohttp.ClientSession(timeout=timeout, headers=HEADERS, cookies=COOKIES) as session:
|
|
103
|
+
# 处理带认证的代理
|
|
104
|
+
if "@" in proxy_url and "://" in proxy_url:
|
|
105
|
+
parsed = urlparse(proxy_url)
|
|
106
|
+
if parsed.username and parsed.password:
|
|
107
|
+
# 提取认证信息
|
|
108
|
+
auth = aiohttp.BasicAuth(parsed.username, parsed.password)
|
|
109
|
+
# 清理代理URL
|
|
110
|
+
clean_proxy = f"{parsed.scheme}://{parsed.hostname}"
|
|
111
|
+
if parsed.port:
|
|
112
|
+
clean_proxy += f":{parsed.port}"
|
|
113
|
+
|
|
114
|
+
print(f"使用带认证的代理: {clean_proxy}")
|
|
115
|
+
async with session.get(URL, proxy=clean_proxy, proxy_auth=auth) as response:
|
|
116
|
+
print(f"状态码: {response.status}")
|
|
117
|
+
if response.status == 200:
|
|
118
|
+
print("✅ 代理访问成功")
|
|
119
|
+
return True
|
|
120
|
+
else:
|
|
121
|
+
print(f"❌ 代理访问失败,状态码: {response.status}")
|
|
122
|
+
return False
|
|
123
|
+
else:
|
|
124
|
+
# 直接使用代理URL
|
|
125
|
+
print(f"使用代理: {proxy_url}")
|
|
126
|
+
async with session.get(URL, proxy=proxy_url) as response:
|
|
127
|
+
print(f"状态码: {response.status}")
|
|
128
|
+
if response.status == 200:
|
|
129
|
+
print("✅ 代理访问成功")
|
|
130
|
+
return True
|
|
131
|
+
else:
|
|
132
|
+
print(f"❌ 代理访问失败,状态码: {response.status}")
|
|
133
|
+
return False
|
|
134
|
+
except Exception as e:
|
|
135
|
+
print(f"❌ 代理访问出错: {e}")
|
|
136
|
+
return False
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
async def get_proxy_from_api():
|
|
140
|
+
"""从代理API获取代理"""
|
|
141
|
+
proxy_api = 'http://test.proxy.api:8080/proxy/getitem/'
|
|
142
|
+
print(f"\n=== 从代理API获取代理 ===")
|
|
143
|
+
print(f"API地址: {proxy_api}")
|
|
144
|
+
|
|
145
|
+
try:
|
|
146
|
+
timeout = aiohttp.ClientTimeout(total=10)
|
|
147
|
+
async with aiohttp.ClientSession(timeout=timeout) as session:
|
|
148
|
+
async with session.get(proxy_api) as response:
|
|
149
|
+
print(f"状态码: {response.status}")
|
|
150
|
+
|
|
151
|
+
if response.status == 200:
|
|
152
|
+
data = await response.json()
|
|
153
|
+
print(f"响应数据: {data}")
|
|
154
|
+
|
|
155
|
+
# 提取代理URL
|
|
156
|
+
if isinstance(data, dict) and data.get('status') == 0:
|
|
157
|
+
proxy_info = data.get('proxy', {})
|
|
158
|
+
if isinstance(proxy_info, dict):
|
|
159
|
+
proxy_url = proxy_info.get('https') or proxy_info.get('http')
|
|
160
|
+
if proxy_url:
|
|
161
|
+
print(f"提取到的代理URL: {proxy_url}")
|
|
162
|
+
return proxy_url
|
|
163
|
+
print("无法获取代理URL")
|
|
164
|
+
return None
|
|
165
|
+
except Exception as e:
|
|
166
|
+
print(f"API请求出错: {e}")
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
async def main():
|
|
171
|
+
"""主测试函数"""
|
|
172
|
+
print("开始真实场景代理测试...")
|
|
173
|
+
print("=" * 50)
|
|
174
|
+
|
|
175
|
+
# 1. 首先测试不使用代理直接访问
|
|
176
|
+
direct_success = await test_without_proxy()
|
|
177
|
+
|
|
178
|
+
# 2. 从代理API获取代理
|
|
179
|
+
proxy_url = await get_proxy_from_api()
|
|
180
|
+
|
|
181
|
+
if not proxy_url:
|
|
182
|
+
print("\n❌ 无法获取代理,测试结束")
|
|
183
|
+
return
|
|
184
|
+
|
|
185
|
+
# 3. 使用代理访问
|
|
186
|
+
proxy_success = await test_with_proxy(proxy_url)
|
|
187
|
+
|
|
188
|
+
# 4. 测试结果总结
|
|
189
|
+
print(f"\n{'='*30}")
|
|
190
|
+
print("测试结果:")
|
|
191
|
+
print(f"直接访问: {'成功' if direct_success else '失败'}")
|
|
192
|
+
print(f"代理访问: {'成功' if proxy_success else '失败'}")
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
if __name__ == "__main__":
|
|
196
|
+
asyncio.run(main())
|
tests/test_redis_config.py
CHANGED
|
@@ -1,29 +1,29 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
快速测试 Redis 连接配置修复
|
|
5
|
-
"""
|
|
6
|
-
import asyncio
|
|
7
|
-
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
8
|
-
from crawlo.settings.default_settings import REDIS_URL
|
|
9
|
-
|
|
10
|
-
async def test_redis_config():
|
|
11
|
-
"""测试修复后的 Redis 配置"""
|
|
12
|
-
print(f"🔍 测试 Redis 配置: {REDIS_URL}")
|
|
13
|
-
|
|
14
|
-
try:
|
|
15
|
-
queue = RedisPriorityQueue(redis_url=REDIS_URL)
|
|
16
|
-
await queue.connect()
|
|
17
|
-
print("✅ Redis 连接成功!")
|
|
18
|
-
await queue.close()
|
|
19
|
-
return True
|
|
20
|
-
except Exception as e:
|
|
21
|
-
print(f"❌ Redis 连接失败: {e}")
|
|
22
|
-
return False
|
|
23
|
-
|
|
24
|
-
if __name__ == "__main__":
|
|
25
|
-
success = asyncio.run(test_redis_config())
|
|
26
|
-
if success:
|
|
27
|
-
print("🎉 配置修复成功!现在可以运行你的爬虫了。")
|
|
28
|
-
else:
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
快速测试 Redis 连接配置修复
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
8
|
+
from crawlo.settings.default_settings import REDIS_URL
|
|
9
|
+
|
|
10
|
+
async def test_redis_config():
|
|
11
|
+
"""测试修复后的 Redis 配置"""
|
|
12
|
+
print(f"🔍 测试 Redis 配置: {REDIS_URL}")
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
queue = RedisPriorityQueue(redis_url=REDIS_URL)
|
|
16
|
+
await queue.connect()
|
|
17
|
+
print("✅ Redis 连接成功!")
|
|
18
|
+
await queue.close()
|
|
19
|
+
return True
|
|
20
|
+
except Exception as e:
|
|
21
|
+
print(f"❌ Redis 连接失败: {e}")
|
|
22
|
+
return False
|
|
23
|
+
|
|
24
|
+
if __name__ == "__main__":
|
|
25
|
+
success = asyncio.run(test_redis_config())
|
|
26
|
+
if success:
|
|
27
|
+
print("🎉 配置修复成功!现在可以运行你的爬虫了。")
|
|
28
|
+
else:
|
|
29
29
|
print("❌ 配置仍有问题,请检查 Redis 服务状态。")
|