crawlo 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +34 -33
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +152 -126
- crawlo/commands/list.py +156 -147
- crawlo/commands/run.py +285 -285
- crawlo/commands/startproject.py +196 -111
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +187 -0
- crawlo/config.py +280 -0
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +171 -158
- crawlo/core/enhanced_engine.py +190 -0
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +166 -57
- crawlo/crawler.py +1028 -495
- crawlo/downloader/__init__.py +242 -78
- crawlo/downloader/aiohttp_downloader.py +212 -199
- crawlo/downloader/cffi_downloader.py +251 -241
- crawlo/downloader/httpx_downloader.py +259 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +82 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +154 -37
- crawlo/filters/aioredis_filter.py +242 -150
- crawlo/filters/memory_filter.py +269 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +248 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +125 -90
- crawlo/mode_manager.py +201 -0
- crawlo/network/__init__.py +21 -7
- crawlo/network/request.py +311 -203
- crawlo/network/response.py +271 -166
- crawlo/pipelines/__init__.py +22 -13
- crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +317 -0
- crawlo/pipelines/database_dedup_pipeline.py +225 -0
- crawlo/pipelines/json_pipeline.py +219 -0
- crawlo/pipelines/memory_dedup_pipeline.py +116 -0
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/pipelines/redis_dedup_pipeline.py +163 -0
- crawlo/project.py +153 -153
- crawlo/queue/__init__.py +0 -0
- crawlo/queue/pqueue.py +37 -0
- crawlo/queue/queue_manager.py +308 -0
- crawlo/queue/redis_priority_queue.py +209 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +245 -167
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +639 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +30 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +87 -76
- crawlo/templates/project/pipelines.py.tmpl +342 -64
- crawlo/templates/project/run.py.tmpl +252 -0
- crawlo/templates/project/settings.py.tmpl +251 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +178 -32
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/controlled_spider_mixin.py +440 -0
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +287 -0
- crawlo/utils/large_scale_helper.py +344 -0
- crawlo/utils/log.py +128 -128
- crawlo/utils/queue_helper.py +176 -0
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +220 -0
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.3.dist-info/METADATA +635 -0
- crawlo-1.1.3.dist-info/RECORD +113 -0
- examples/__init__.py +7 -7
- examples/controlled_spider_example.py +205 -0
- tests/__init__.py +7 -7
- tests/test_final_validation.py +154 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_redis_config.py +29 -0
- tests/test_redis_queue.py +225 -0
- tests/test_request_serialization.py +71 -0
- tests/test_scheduler.py +242 -0
- crawlo/pipelines/mysql_batch_pipline.py +0 -273
- crawlo/utils/pqueue.py +0 -174
- crawlo-1.1.1.dist-info/METADATA +0 -220
- crawlo-1.1.1.dist-info/RECORD +0 -100
- examples/baidu_spider/__init__.py +0 -7
- examples/baidu_spider/demo.py +0 -94
- examples/baidu_spider/items.py +0 -46
- examples/baidu_spider/middleware.py +0 -49
- examples/baidu_spider/pipeline.py +0 -55
- examples/baidu_spider/run.py +0 -27
- examples/baidu_spider/settings.py +0 -121
- examples/baidu_spider/spiders/__init__.py +0 -7
- examples/baidu_spider/spiders/bai_du.py +0 -61
- examples/baidu_spider/spiders/miit.py +0 -159
- examples/baidu_spider/spiders/sina.py +0 -79
- {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
- {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Redis 分布式队列测试脚本
|
|
5
|
+
用于诊断和修复分布式队列问题
|
|
6
|
+
"""
|
|
7
|
+
import asyncio
|
|
8
|
+
import sys
|
|
9
|
+
import traceback
|
|
10
|
+
import time
|
|
11
|
+
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
12
|
+
from crawlo.network.request import Request
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
async def test_redis_connection():
|
|
16
|
+
"""测试 Redis 连接"""
|
|
17
|
+
print("🔍 1. 测试 Redis 连接...")
|
|
18
|
+
|
|
19
|
+
# 测试不同的 Redis URL 格式
|
|
20
|
+
test_urls = [
|
|
21
|
+
"redis://localhost:6379/0",
|
|
22
|
+
"redis://:oscar&0503@127.0.0.1:6379/0", # 带密码
|
|
23
|
+
"redis://127.0.0.1:6379/0", # 无密码
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
for redis_url in test_urls:
|
|
27
|
+
try:
|
|
28
|
+
print(f" 尝试连接: {redis_url}")
|
|
29
|
+
queue = RedisPriorityQueue(redis_url=redis_url)
|
|
30
|
+
await queue.connect()
|
|
31
|
+
print(f" ✅ 连接成功: {redis_url}")
|
|
32
|
+
await queue.close()
|
|
33
|
+
return redis_url
|
|
34
|
+
except Exception as e:
|
|
35
|
+
print(f" ❌ 连接失败: {redis_url} - {e}")
|
|
36
|
+
|
|
37
|
+
raise ConnectionError("所有 Redis URL 都连接失败")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
async def test_queue_operations(redis_url):
|
|
41
|
+
"""测试队列基本操作"""
|
|
42
|
+
print("🔍 2. 测试队列基本操作...")
|
|
43
|
+
|
|
44
|
+
queue = RedisPriorityQueue(
|
|
45
|
+
redis_url=redis_url,
|
|
46
|
+
queue_name="test:crawlo:requests",
|
|
47
|
+
max_retries=2
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
await queue.connect()
|
|
52
|
+
|
|
53
|
+
# 测试 put 操作
|
|
54
|
+
test_request = Request(url="https://example.com", priority=5)
|
|
55
|
+
print(f" 📤 插入请求: {test_request.url}")
|
|
56
|
+
|
|
57
|
+
success = await queue.put(test_request, priority=5)
|
|
58
|
+
if success:
|
|
59
|
+
print(" ✅ 插入成功")
|
|
60
|
+
else:
|
|
61
|
+
print(" ❌ 插入失败")
|
|
62
|
+
return False
|
|
63
|
+
|
|
64
|
+
# 测试队列大小
|
|
65
|
+
size = await queue.qsize()
|
|
66
|
+
print(f" 📊 队列大小: {size}")
|
|
67
|
+
|
|
68
|
+
# 测试 get 操作
|
|
69
|
+
print(" 📥 获取请求...")
|
|
70
|
+
retrieved_request = await queue.get(timeout=2.0)
|
|
71
|
+
|
|
72
|
+
if retrieved_request:
|
|
73
|
+
print(f" ✅ 获取成功: {retrieved_request.url}")
|
|
74
|
+
# 测试 ack
|
|
75
|
+
await queue.ack(retrieved_request)
|
|
76
|
+
print(" ✅ ACK 成功")
|
|
77
|
+
else:
|
|
78
|
+
print(" ❌ 获取失败(超时)")
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
return True
|
|
82
|
+
|
|
83
|
+
except Exception as e:
|
|
84
|
+
print(f" ❌ 队列操作失败: {e}")
|
|
85
|
+
traceback.print_exc()
|
|
86
|
+
return False
|
|
87
|
+
finally:
|
|
88
|
+
await queue.close()
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
async def test_serialization():
|
|
92
|
+
"""测试序列化问题"""
|
|
93
|
+
print("🔍 3. 测试 Request 序列化...")
|
|
94
|
+
|
|
95
|
+
try:
|
|
96
|
+
import pickle
|
|
97
|
+
from crawlo.network.request import Request
|
|
98
|
+
|
|
99
|
+
# 创建测试请求
|
|
100
|
+
request = Request(
|
|
101
|
+
url="https://example.com",
|
|
102
|
+
method="GET",
|
|
103
|
+
headers={"User-Agent": "Test"},
|
|
104
|
+
meta={"test": "data"},
|
|
105
|
+
priority=5
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# 测试序列化
|
|
109
|
+
serialized = pickle.dumps(request)
|
|
110
|
+
print(f" ✅ 序列化成功,大小: {len(serialized)} bytes")
|
|
111
|
+
|
|
112
|
+
# 测试反序列化
|
|
113
|
+
deserialized = pickle.loads(serialized)
|
|
114
|
+
print(f" ✅ 反序列化成功: {deserialized.url}")
|
|
115
|
+
|
|
116
|
+
return True
|
|
117
|
+
|
|
118
|
+
except Exception as e:
|
|
119
|
+
print(f" ❌ 序列化失败: {e}")
|
|
120
|
+
traceback.print_exc()
|
|
121
|
+
return False
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
async def test_concurrent_operations(redis_url):
|
|
125
|
+
"""测试并发操作"""
|
|
126
|
+
print("🔍 4. 测试并发操作...")
|
|
127
|
+
|
|
128
|
+
async def producer(queue, start_id):
|
|
129
|
+
"""生产者"""
|
|
130
|
+
try:
|
|
131
|
+
for i in range(5):
|
|
132
|
+
request = Request(url=f"https://example{start_id + i}.com", priority=i)
|
|
133
|
+
await queue.put(request, priority=i)
|
|
134
|
+
await asyncio.sleep(0.1)
|
|
135
|
+
print(f" ✅ 生产者 {start_id} 完成")
|
|
136
|
+
except Exception as e:
|
|
137
|
+
print(f" ❌ 生产者 {start_id} 失败: {e}")
|
|
138
|
+
|
|
139
|
+
async def consumer(queue, consumer_id):
|
|
140
|
+
"""消费者"""
|
|
141
|
+
consumed = 0
|
|
142
|
+
try:
|
|
143
|
+
for _ in range(3): # 每个消费者处理3个请求
|
|
144
|
+
request = await queue.get(timeout=5.0)
|
|
145
|
+
if request:
|
|
146
|
+
await queue.ack(request)
|
|
147
|
+
consumed += 1
|
|
148
|
+
await asyncio.sleep(0.05)
|
|
149
|
+
else:
|
|
150
|
+
break
|
|
151
|
+
print(f" ✅ 消费者 {consumer_id} 处理了 {consumed} 个请求")
|
|
152
|
+
except Exception as e:
|
|
153
|
+
print(f" ❌ 消费者 {consumer_id} 失败: {e}")
|
|
154
|
+
|
|
155
|
+
queue = RedisPriorityQueue(
|
|
156
|
+
redis_url=redis_url,
|
|
157
|
+
queue_name="test:concurrent:requests"
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
await queue.connect()
|
|
162
|
+
|
|
163
|
+
# 并发运行生产者和消费者
|
|
164
|
+
tasks = [
|
|
165
|
+
producer(queue, 0),
|
|
166
|
+
producer(queue, 10),
|
|
167
|
+
consumer(queue, 1),
|
|
168
|
+
consumer(queue, 2),
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
|
172
|
+
|
|
173
|
+
# 检查剩余队列大小
|
|
174
|
+
final_size = await queue.qsize()
|
|
175
|
+
print(f" 📊 最终队列大小: {final_size}")
|
|
176
|
+
|
|
177
|
+
return True
|
|
178
|
+
|
|
179
|
+
except Exception as e:
|
|
180
|
+
print(f" ❌ 并发测试失败: {e}")
|
|
181
|
+
return False
|
|
182
|
+
finally:
|
|
183
|
+
await queue.close()
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
async def main():
|
|
187
|
+
"""主测试函数"""
|
|
188
|
+
print("🚀 开始 Redis 分布式队列诊断...")
|
|
189
|
+
print("=" * 50)
|
|
190
|
+
|
|
191
|
+
try:
|
|
192
|
+
# 1. 测试连接
|
|
193
|
+
redis_url = await test_redis_connection()
|
|
194
|
+
|
|
195
|
+
# 2. 测试序列化
|
|
196
|
+
if not await test_serialization():
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
# 3. 测试基本操作
|
|
200
|
+
if not await test_queue_operations(redis_url):
|
|
201
|
+
return
|
|
202
|
+
|
|
203
|
+
# 4. 测试并发操作
|
|
204
|
+
if not await test_concurrent_operations(redis_url):
|
|
205
|
+
return
|
|
206
|
+
|
|
207
|
+
print("=" * 50)
|
|
208
|
+
print("🎉 所有测试通过!Redis 队列工作正常")
|
|
209
|
+
|
|
210
|
+
except Exception as e:
|
|
211
|
+
print("=" * 50)
|
|
212
|
+
print(f"❌ 诊断失败: {e}")
|
|
213
|
+
traceback.print_exc()
|
|
214
|
+
|
|
215
|
+
# 提供解决建议
|
|
216
|
+
print("\n🔧 可能的解决方案:")
|
|
217
|
+
print("1. 检查 Redis 服务是否启动: redis-server")
|
|
218
|
+
print("2. 检查 Redis 密码配置")
|
|
219
|
+
print("3. 检查防火墙和端口 6379")
|
|
220
|
+
print("4. 安装 Redis: pip install redis")
|
|
221
|
+
print("5. 检查 Redis 配置文件中的 bind 设置")
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
if __name__ == "__main__":
|
|
225
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试 Request 序列化问题修复
|
|
5
|
+
"""
|
|
6
|
+
import pickle
|
|
7
|
+
import sys
|
|
8
|
+
sys.path.insert(0, "..")
|
|
9
|
+
|
|
10
|
+
from crawlo.network.request import Request
|
|
11
|
+
from crawlo.core.scheduler import Scheduler
|
|
12
|
+
from unittest.mock import Mock
|
|
13
|
+
|
|
14
|
+
# 模拟一个带 logger 的 Request
|
|
15
|
+
class TestRequest(Request):
|
|
16
|
+
def __init__(self, *args, **kwargs):
|
|
17
|
+
super().__init__(*args, **kwargs)
|
|
18
|
+
# 添加一个 logger 属性模拟问题
|
|
19
|
+
from crawlo.utils.log import get_logger
|
|
20
|
+
self.logger = get_logger("test_request")
|
|
21
|
+
self.meta['spider_logger'] = get_logger("spider_logger")
|
|
22
|
+
|
|
23
|
+
def test_request_serialization():
|
|
24
|
+
"""测试 Request 序列化"""
|
|
25
|
+
print("🔍 测试 Request 序列化修复...")
|
|
26
|
+
|
|
27
|
+
# 创建一个带 logger 的请求
|
|
28
|
+
request = TestRequest(
|
|
29
|
+
url="https://example.com",
|
|
30
|
+
meta={"test": "data"} # 移除 Mock 对象
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
print(f" 📦 原始请求: {request}")
|
|
34
|
+
print(f" 🔧 请求有 logger: {hasattr(request, 'logger')}")
|
|
35
|
+
print(f" 🔧 meta 有 logger: {'spider_logger' in request.meta}")
|
|
36
|
+
|
|
37
|
+
# 创建一个 mock scheduler 来测试清理
|
|
38
|
+
class MockScheduler:
|
|
39
|
+
def _deep_clean_loggers(self, request):
|
|
40
|
+
return Scheduler._deep_clean_loggers(self, request)
|
|
41
|
+
def _remove_logger_from_dict(self, d):
|
|
42
|
+
return Scheduler._remove_logger_from_dict(self, d)
|
|
43
|
+
|
|
44
|
+
scheduler = MockScheduler()
|
|
45
|
+
|
|
46
|
+
# 执行清理
|
|
47
|
+
scheduler._deep_clean_loggers(request)
|
|
48
|
+
|
|
49
|
+
print(f" 🧹 清理后有 logger: {hasattr(request, 'logger')}")
|
|
50
|
+
print(f" 🧹 清理后 meta 有 logger: {'spider_logger' in request.meta}")
|
|
51
|
+
|
|
52
|
+
# 测试序列化
|
|
53
|
+
try:
|
|
54
|
+
serialized = pickle.dumps(request)
|
|
55
|
+
print(f" ✅ 序列化成功,大小: {len(serialized)} bytes")
|
|
56
|
+
|
|
57
|
+
# 测试反序列化
|
|
58
|
+
deserialized = pickle.loads(serialized)
|
|
59
|
+
print(f" ✅ 反序列化成功: {deserialized}")
|
|
60
|
+
return True
|
|
61
|
+
|
|
62
|
+
except Exception as e:
|
|
63
|
+
print(f" ❌ 序列化失败: {e}")
|
|
64
|
+
return False
|
|
65
|
+
|
|
66
|
+
if __name__ == "__main__":
|
|
67
|
+
success = test_request_serialization()
|
|
68
|
+
if success:
|
|
69
|
+
print("🎉 Request 序列化修复成功!")
|
|
70
|
+
else:
|
|
71
|
+
print("❌ 序列化问题仍未解决")
|
tests/test_scheduler.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
测试修复后的 Scheduler 分布式队列功能
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
import sys
|
|
8
|
+
from unittest.mock import Mock
|
|
9
|
+
from crawlo.core.scheduler import Scheduler
|
|
10
|
+
from crawlo.network.request import Request
|
|
11
|
+
from crawlo.utils.log import get_logger
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class MockCrawler:
|
|
15
|
+
"""模拟 Crawler 对象"""
|
|
16
|
+
def __init__(self, use_redis=True):
|
|
17
|
+
self.settings = MockSettings(use_redis)
|
|
18
|
+
self.stats = Mock()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class MockSettings:
|
|
22
|
+
"""模拟 Settings 对象"""
|
|
23
|
+
def __init__(self, use_redis=True):
|
|
24
|
+
self.use_redis = use_redis
|
|
25
|
+
|
|
26
|
+
def get(self, key, default=None):
|
|
27
|
+
config = {
|
|
28
|
+
'FILTER_CLASS': 'crawlo.filters.memory_filter.MemoryFilter',
|
|
29
|
+
'LOG_LEVEL': 'INFO',
|
|
30
|
+
'DEPTH_PRIORITY': 1,
|
|
31
|
+
'SCHEDULER_MAX_QUEUE_SIZE': 100,
|
|
32
|
+
'SCHEDULER_QUEUE_NAME': 'test:crawlo:requests',
|
|
33
|
+
'FILTER_DEBUG': False,
|
|
34
|
+
'PROJECT_NAME': 'test',
|
|
35
|
+
}
|
|
36
|
+
if self.use_redis:
|
|
37
|
+
config['REDIS_URL'] = 'redis://localhost:6379/0'
|
|
38
|
+
|
|
39
|
+
return config.get(key, default)
|
|
40
|
+
|
|
41
|
+
def get_int(self, key, default=0):
|
|
42
|
+
value = self.get(key, default)
|
|
43
|
+
return int(value) if value is not None else default
|
|
44
|
+
|
|
45
|
+
def get_bool(self, key, default=False):
|
|
46
|
+
value = self.get(key, default)
|
|
47
|
+
if isinstance(value, bool):
|
|
48
|
+
return value
|
|
49
|
+
if isinstance(value, str):
|
|
50
|
+
return value.lower() in ('true', '1', 'yes')
|
|
51
|
+
return bool(value) if value is not None else default
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class MockFilter:
|
|
55
|
+
"""模拟去重过滤器"""
|
|
56
|
+
def __init__(self):
|
|
57
|
+
self.seen = set()
|
|
58
|
+
|
|
59
|
+
@classmethod
|
|
60
|
+
def create_instance(cls, crawler):
|
|
61
|
+
return cls()
|
|
62
|
+
|
|
63
|
+
async def requested(self, request):
|
|
64
|
+
if request.url in self.seen:
|
|
65
|
+
return True
|
|
66
|
+
self.seen.add(request.url)
|
|
67
|
+
return False
|
|
68
|
+
|
|
69
|
+
def log_stats(self, request):
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
async def test_memory_scheduler():
|
|
74
|
+
"""测试内存调度器"""
|
|
75
|
+
print("🔍 测试内存调度器...")
|
|
76
|
+
|
|
77
|
+
crawler = MockCrawler(use_redis=False)
|
|
78
|
+
scheduler = Scheduler.create_instance(crawler)
|
|
79
|
+
|
|
80
|
+
# 模拟去重过滤器
|
|
81
|
+
scheduler.dupe_filter = MockFilter()
|
|
82
|
+
|
|
83
|
+
scheduler.open()
|
|
84
|
+
|
|
85
|
+
# 测试入队
|
|
86
|
+
request1 = Request(url="https://example1.com")
|
|
87
|
+
request2 = Request(url="https://example2.com")
|
|
88
|
+
|
|
89
|
+
success1 = await scheduler.enqueue_request(request1)
|
|
90
|
+
success2 = await scheduler.enqueue_request(request2)
|
|
91
|
+
|
|
92
|
+
print(f" 📤 入队结果: {success1}, {success2}")
|
|
93
|
+
print(f" 📊 队列大小: {len(scheduler)}")
|
|
94
|
+
|
|
95
|
+
# 测试出队
|
|
96
|
+
req1 = await scheduler.next_request()
|
|
97
|
+
req2 = await scheduler.next_request()
|
|
98
|
+
|
|
99
|
+
print(f" 📥 出队结果: {req1.url if req1 else None}, {req2.url if req2 else None}")
|
|
100
|
+
print(f" 📊 剩余大小: {len(scheduler)}")
|
|
101
|
+
|
|
102
|
+
await scheduler.close()
|
|
103
|
+
print(" ✅ 内存调度器测试完成")
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
async def test_redis_scheduler():
|
|
107
|
+
"""测试 Redis 调度器"""
|
|
108
|
+
print("🔍 测试 Redis 调度器...")
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
crawler = MockCrawler(use_redis=True)
|
|
112
|
+
scheduler = Scheduler.create_instance(crawler)
|
|
113
|
+
|
|
114
|
+
# 模拟去重过滤器
|
|
115
|
+
scheduler.dupe_filter = MockFilter()
|
|
116
|
+
|
|
117
|
+
scheduler.open()
|
|
118
|
+
|
|
119
|
+
# 测试入队
|
|
120
|
+
request1 = Request(url="https://redis-test1.com", priority=5)
|
|
121
|
+
request2 = Request(url="https://redis-test2.com", priority=3)
|
|
122
|
+
request3 = Request(url="https://redis-test3.com", priority=8)
|
|
123
|
+
|
|
124
|
+
success1 = await scheduler.enqueue_request(request1)
|
|
125
|
+
success2 = await scheduler.enqueue_request(request2)
|
|
126
|
+
success3 = await scheduler.enqueue_request(request3)
|
|
127
|
+
|
|
128
|
+
print(f" 📤 入队结果: {success1}, {success2}, {success3}")
|
|
129
|
+
print(f" 📊 队列大小: {len(scheduler)}")
|
|
130
|
+
|
|
131
|
+
# 等待一小段时间让 Redis 操作完成
|
|
132
|
+
await asyncio.sleep(0.5)
|
|
133
|
+
|
|
134
|
+
# 测试出队(应该按优先级排序)
|
|
135
|
+
req1 = await scheduler.next_request()
|
|
136
|
+
req2 = await scheduler.next_request()
|
|
137
|
+
req3 = await scheduler.next_request()
|
|
138
|
+
|
|
139
|
+
print(" 📥 出队结果(按优先级):")
|
|
140
|
+
if req1:
|
|
141
|
+
print(f" {req1.url} (优先级: {getattr(req1, 'priority', 0)})")
|
|
142
|
+
if req2:
|
|
143
|
+
print(f" {req2.url} (优先级: {getattr(req2, 'priority', 0)})")
|
|
144
|
+
if req3:
|
|
145
|
+
print(f" {req3.url} (优先级: {getattr(req3, 'priority', 0)})")
|
|
146
|
+
|
|
147
|
+
print(f" 📊 剩余大小: {len(scheduler)}")
|
|
148
|
+
|
|
149
|
+
await scheduler.close()
|
|
150
|
+
print(" ✅ Redis 调度器测试完成")
|
|
151
|
+
|
|
152
|
+
except Exception as e:
|
|
153
|
+
print(f" ❌ Redis 调度器测试失败: {e}")
|
|
154
|
+
import traceback
|
|
155
|
+
traceback.print_exc()
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def test_concurrent_redis():
|
|
159
|
+
"""测试并发 Redis 操作"""
|
|
160
|
+
print("🔍 测试并发 Redis 操作...")
|
|
161
|
+
|
|
162
|
+
async def producer(scheduler, name, count):
|
|
163
|
+
"""生产者"""
|
|
164
|
+
for i in range(count):
|
|
165
|
+
request = Request(url=f"https://{name}-{i}.com", priority=i % 10)
|
|
166
|
+
await scheduler.enqueue_request(request)
|
|
167
|
+
await asyncio.sleep(0.01)
|
|
168
|
+
print(f" ✅ 生产者 {name} 完成 ({count} 个请求)")
|
|
169
|
+
|
|
170
|
+
async def consumer(scheduler, name, count):
|
|
171
|
+
"""消费者"""
|
|
172
|
+
consumed = 0
|
|
173
|
+
for _ in range(count):
|
|
174
|
+
request = await scheduler.next_request()
|
|
175
|
+
if request:
|
|
176
|
+
consumed += 1
|
|
177
|
+
await asyncio.sleep(0.005)
|
|
178
|
+
else:
|
|
179
|
+
break
|
|
180
|
+
print(f" ✅ 消费者 {name} 处理了 {consumed} 个请求")
|
|
181
|
+
|
|
182
|
+
try:
|
|
183
|
+
crawler = MockCrawler(use_redis=True)
|
|
184
|
+
scheduler = Scheduler.create_instance(crawler)
|
|
185
|
+
scheduler.dupe_filter = MockFilter()
|
|
186
|
+
scheduler.open()
|
|
187
|
+
|
|
188
|
+
# 并发运行生产者和消费者
|
|
189
|
+
tasks = [
|
|
190
|
+
producer(scheduler, "producer-1", 5),
|
|
191
|
+
producer(scheduler, "producer-2", 5),
|
|
192
|
+
consumer(scheduler, "consumer-1", 3),
|
|
193
|
+
consumer(scheduler, "consumer-2", 3),
|
|
194
|
+
consumer(scheduler, "consumer-3", 4),
|
|
195
|
+
]
|
|
196
|
+
|
|
197
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
|
198
|
+
|
|
199
|
+
print(f" 📊 最终队列大小: {len(scheduler)}")
|
|
200
|
+
|
|
201
|
+
await scheduler.close()
|
|
202
|
+
print(" ✅ 并发测试完成")
|
|
203
|
+
|
|
204
|
+
except Exception as e:
|
|
205
|
+
print(f" ❌ 并发测试失败: {e}")
|
|
206
|
+
import traceback
|
|
207
|
+
traceback.print_exc()
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
async def main():
|
|
211
|
+
"""主测试函数"""
|
|
212
|
+
print("🚀 开始测试修复后的 Scheduler...")
|
|
213
|
+
print("=" * 50)
|
|
214
|
+
|
|
215
|
+
try:
|
|
216
|
+
# 1. 测试内存调度器
|
|
217
|
+
await test_memory_scheduler()
|
|
218
|
+
print()
|
|
219
|
+
|
|
220
|
+
# 2. 测试 Redis 调度器
|
|
221
|
+
await test_redis_scheduler()
|
|
222
|
+
print()
|
|
223
|
+
|
|
224
|
+
# 3. 测试并发操作
|
|
225
|
+
await test_concurrent_redis()
|
|
226
|
+
|
|
227
|
+
print("=" * 50)
|
|
228
|
+
print("🎉 所有 Scheduler 测试完成!")
|
|
229
|
+
|
|
230
|
+
except Exception as e:
|
|
231
|
+
print("=" * 50)
|
|
232
|
+
print(f"❌ 测试失败: {e}")
|
|
233
|
+
import traceback
|
|
234
|
+
traceback.print_exc()
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
if __name__ == "__main__":
|
|
238
|
+
# 设置日志级别避免过多输出
|
|
239
|
+
import logging
|
|
240
|
+
logging.getLogger('crawlo').setLevel(logging.WARNING)
|
|
241
|
+
|
|
242
|
+
asyncio.run(main())
|