crawlo 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +34 -34
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +285 -285
- crawlo/commands/startproject.py +196 -196
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +279 -279
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +171 -171
- crawlo/core/enhanced_engine.py +189 -189
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +165 -165
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +242 -242
- crawlo/downloader/aiohttp_downloader.py +212 -212
- crawlo/downloader/cffi_downloader.py +251 -251
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +38 -31
- crawlo/extension/health_check.py +142 -0
- crawlo/extension/log_interval.py +58 -49
- crawlo/extension/log_stats.py +82 -44
- crawlo/extension/logging_extension.py +44 -35
- crawlo/extension/memory_monitor.py +89 -0
- crawlo/extension/performance_profiler.py +118 -0
- crawlo/extension/request_recorder.py +108 -0
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +241 -241
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +248 -248
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +200 -200
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +311 -311
- crawlo/network/response.py +271 -271
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +132 -117
- crawlo/pipelines/mysql_pipeline.py +317 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/pipelines/redis_dedup_pipeline.py +162 -162
- crawlo/project.py +153 -153
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +307 -307
- crawlo/queue/redis_priority_queue.py +208 -208
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +278 -244
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +131 -106
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +111 -87
- crawlo/templates/project/pipelines.py.tmpl +97 -341
- crawlo/templates/project/run.py.tmpl +251 -251
- crawlo/templates/project/settings.py.tmpl +279 -250
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +142 -178
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.4.dist-info/METADATA +403 -0
- crawlo-1.1.4.dist-info/RECORD +117 -0
- examples/__init__.py +7 -7
- examples/controlled_spider_example.py +205 -205
- tests/__init__.py +7 -7
- tests/test_final_validation.py +153 -153
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_redis_config.py +28 -28
- tests/test_redis_queue.py +224 -224
- tests/test_request_serialization.py +70 -70
- tests/test_scheduler.py +241 -241
- crawlo-1.1.3.dist-info/METADATA +0 -635
- crawlo-1.1.3.dist-info/RECORD +0 -113
- {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/WHEEL +0 -0
- {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/top_level.txt +0 -0
tests/test_proxy_providers.py
CHANGED
|
@@ -1,57 +1,57 @@
|
|
|
1
|
-
# tests/test_proxy_providers.py
|
|
2
|
-
import pytest
|
|
3
|
-
import pytest
|
|
4
|
-
import respx
|
|
5
|
-
from httpx import Response
|
|
6
|
-
from crawlo.proxy.providers import StaticProxyProvider, FileProxyProvider, APIProxyProvider
|
|
7
|
-
import tempfile
|
|
8
|
-
import os
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
@pytest.mark.asyncio
|
|
12
|
-
async def test_static_provider():
|
|
13
|
-
"""测试静态代理提供者"""
|
|
14
|
-
provider = StaticProxyProvider(['http://1.1.1.1:8080', 'http://2.2.2.2:8080'])
|
|
15
|
-
proxies = await provider.fetch_proxies()
|
|
16
|
-
assert len(proxies) == 2
|
|
17
|
-
assert 'http://1.1.1.1:8080' in proxies
|
|
18
|
-
assert 'http://2.2.2.2:8080' in proxies
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
@pytest.mark.asyncio
|
|
22
|
-
async def test_file_provider():
|
|
23
|
-
"""测试文件代理提供者"""
|
|
24
|
-
with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
|
|
25
|
-
f.write("http://a.com:8080\nhttp://b.com:8080\n")
|
|
26
|
-
temp_path = f.name
|
|
27
|
-
try:
|
|
28
|
-
provider = FileProxyProvider(temp_path)
|
|
29
|
-
proxies = await provider.fetch_proxies()
|
|
30
|
-
assert len(proxies) == 2
|
|
31
|
-
assert 'http://a.com:8080' in proxies
|
|
32
|
-
assert 'http://b.com:8080' in proxies
|
|
33
|
-
finally:
|
|
34
|
-
os.unlink(temp_path)
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@pytest.mark.asyncio
|
|
38
|
-
@respx.mock
|
|
39
|
-
async def test_api_provider():
|
|
40
|
-
"""使用 respx 拦截 HTTP 请求,更简洁可靠"""
|
|
41
|
-
# 拦截 GET 请求
|
|
42
|
-
respx.get("https://api.example.com").mock(
|
|
43
|
-
return_value=Response(
|
|
44
|
-
200,
|
|
45
|
-
json=[
|
|
46
|
-
{"ip": "1.1.1.1", "port": 8080},
|
|
47
|
-
{"ip": "2.2.2.2", "port": 8080}
|
|
48
|
-
]
|
|
49
|
-
)
|
|
50
|
-
)
|
|
51
|
-
|
|
52
|
-
provider = APIProxyProvider(url="https://api.example.com")
|
|
53
|
-
proxies = await provider.fetch_proxies()
|
|
54
|
-
|
|
55
|
-
assert len(proxies) == 2
|
|
56
|
-
assert "http://1.1.1.1:8080" in proxies
|
|
1
|
+
# tests/test_proxy_providers.py
|
|
2
|
+
import pytest
|
|
3
|
+
import pytest
|
|
4
|
+
import respx
|
|
5
|
+
from httpx import Response
|
|
6
|
+
from crawlo.proxy.providers import StaticProxyProvider, FileProxyProvider, APIProxyProvider
|
|
7
|
+
import tempfile
|
|
8
|
+
import os
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.mark.asyncio
|
|
12
|
+
async def test_static_provider():
|
|
13
|
+
"""测试静态代理提供者"""
|
|
14
|
+
provider = StaticProxyProvider(['http://1.1.1.1:8080', 'http://2.2.2.2:8080'])
|
|
15
|
+
proxies = await provider.fetch_proxies()
|
|
16
|
+
assert len(proxies) == 2
|
|
17
|
+
assert 'http://1.1.1.1:8080' in proxies
|
|
18
|
+
assert 'http://2.2.2.2:8080' in proxies
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@pytest.mark.asyncio
|
|
22
|
+
async def test_file_provider():
|
|
23
|
+
"""测试文件代理提供者"""
|
|
24
|
+
with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
|
|
25
|
+
f.write("http://a.com:8080\nhttp://b.com:8080\n")
|
|
26
|
+
temp_path = f.name
|
|
27
|
+
try:
|
|
28
|
+
provider = FileProxyProvider(temp_path)
|
|
29
|
+
proxies = await provider.fetch_proxies()
|
|
30
|
+
assert len(proxies) == 2
|
|
31
|
+
assert 'http://a.com:8080' in proxies
|
|
32
|
+
assert 'http://b.com:8080' in proxies
|
|
33
|
+
finally:
|
|
34
|
+
os.unlink(temp_path)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@pytest.mark.asyncio
|
|
38
|
+
@respx.mock
|
|
39
|
+
async def test_api_provider():
|
|
40
|
+
"""使用 respx 拦截 HTTP 请求,更简洁可靠"""
|
|
41
|
+
# 拦截 GET 请求
|
|
42
|
+
respx.get("https://api.example.com").mock(
|
|
43
|
+
return_value=Response(
|
|
44
|
+
200,
|
|
45
|
+
json=[
|
|
46
|
+
{"ip": "1.1.1.1", "port": 8080},
|
|
47
|
+
{"ip": "2.2.2.2", "port": 8080}
|
|
48
|
+
]
|
|
49
|
+
)
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
provider = APIProxyProvider(url="https://api.example.com")
|
|
53
|
+
proxies = await provider.fetch_proxies()
|
|
54
|
+
|
|
55
|
+
assert len(proxies) == 2
|
|
56
|
+
assert "http://1.1.1.1:8080" in proxies
|
|
57
57
|
assert "http://2.2.2.2:8080" in proxies
|
tests/test_proxy_stats.py
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
|
-
# tests/test_proxy_stats.py
|
|
2
|
-
from crawlo.proxy.stats import ProxyStats
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
def test_proxy_stats():
|
|
6
|
-
"""测试代理统计功能"""
|
|
7
|
-
stats = ProxyStats()
|
|
8
|
-
url = 'http://proxy1:8080'
|
|
9
|
-
|
|
10
|
-
stats.record(url, 'success')
|
|
11
|
-
stats.record(url, 'success')
|
|
12
|
-
stats.record(url, 'failure')
|
|
13
|
-
|
|
14
|
-
assert stats.get(url)['success'] == 2
|
|
15
|
-
assert stats.get(url)['failure'] == 1
|
|
16
|
-
assert stats.get(url)['total'] == 3
|
|
17
|
-
|
|
18
|
-
all_data = stats.all()
|
|
19
|
-
assert url in all_data
|
|
1
|
+
# tests/test_proxy_stats.py
|
|
2
|
+
from crawlo.proxy.stats import ProxyStats
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def test_proxy_stats():
|
|
6
|
+
"""测试代理统计功能"""
|
|
7
|
+
stats = ProxyStats()
|
|
8
|
+
url = 'http://proxy1:8080'
|
|
9
|
+
|
|
10
|
+
stats.record(url, 'success')
|
|
11
|
+
stats.record(url, 'success')
|
|
12
|
+
stats.record(url, 'failure')
|
|
13
|
+
|
|
14
|
+
assert stats.get(url)['success'] == 2
|
|
15
|
+
assert stats.get(url)['failure'] == 1
|
|
16
|
+
assert stats.get(url)['total'] == 3
|
|
17
|
+
|
|
18
|
+
all_data = stats.all()
|
|
19
|
+
assert url in all_data
|
|
20
20
|
assert all_data[url]['success'] == 2
|
tests/test_proxy_strategies.py
CHANGED
|
@@ -1,60 +1,60 @@
|
|
|
1
|
-
# tests/test_proxy_strategies.py
|
|
2
|
-
import pytest
|
|
3
|
-
from crawlo import Request
|
|
4
|
-
from crawlo.proxy.strategies import STRATEGIES
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
@pytest.fixture
|
|
8
|
-
def mock_proxies():
|
|
9
|
-
"""提供测试用的代理列表"""
|
|
10
|
-
return [
|
|
11
|
-
{'url': 'http://p1:8080'},
|
|
12
|
-
{'url': 'http://p2:8080'},
|
|
13
|
-
{'url': 'http://p3:8080'},
|
|
14
|
-
]
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
@pytest.fixture
|
|
18
|
-
def mock_stats():
|
|
19
|
-
"""提供测试用的统计信息"""
|
|
20
|
-
return {
|
|
21
|
-
'http://p1:8080': {'total': 10},
|
|
22
|
-
'http://p2:8080': {'total': 5},
|
|
23
|
-
'http://p3:8080': {'total': 1},
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
@pytest.fixture
|
|
28
|
-
def mock_request():
|
|
29
|
-
"""提供测试用的请求对象"""
|
|
30
|
-
return Request("https://example.com")
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def test_random_strategy(mock_proxies, mock_request, mock_stats):
|
|
34
|
-
"""测试随机策略"""
|
|
35
|
-
strategy = STRATEGIES['random']
|
|
36
|
-
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
37
|
-
assert chosen in [p['url'] for p in mock_proxies]
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
|
|
41
|
-
"""测试最少使用策略"""
|
|
42
|
-
strategy = STRATEGIES['least_used']
|
|
43
|
-
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
44
|
-
assert chosen == 'http://p3:8080' # total=1
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
|
|
48
|
-
"""测试域名规则策略"""
|
|
49
|
-
from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
|
|
50
|
-
request = Request("https://taobao.com/item/123")
|
|
51
|
-
rules = {'taobao.com': 'http://special:8080'}
|
|
52
|
-
|
|
53
|
-
# Monkey patch 确保有回退策略
|
|
54
|
-
old_strategy = STRATEGIES['least_used']
|
|
55
|
-
try:
|
|
56
|
-
STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
|
|
57
|
-
chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
|
|
58
|
-
assert chosen == 'http://special:8080'
|
|
59
|
-
finally:
|
|
1
|
+
# tests/test_proxy_strategies.py
|
|
2
|
+
import pytest
|
|
3
|
+
from crawlo import Request
|
|
4
|
+
from crawlo.proxy.strategies import STRATEGIES
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@pytest.fixture
|
|
8
|
+
def mock_proxies():
|
|
9
|
+
"""提供测试用的代理列表"""
|
|
10
|
+
return [
|
|
11
|
+
{'url': 'http://p1:8080'},
|
|
12
|
+
{'url': 'http://p2:8080'},
|
|
13
|
+
{'url': 'http://p3:8080'},
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@pytest.fixture
|
|
18
|
+
def mock_stats():
|
|
19
|
+
"""提供测试用的统计信息"""
|
|
20
|
+
return {
|
|
21
|
+
'http://p1:8080': {'total': 10},
|
|
22
|
+
'http://p2:8080': {'total': 5},
|
|
23
|
+
'http://p3:8080': {'total': 1},
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@pytest.fixture
|
|
28
|
+
def mock_request():
|
|
29
|
+
"""提供测试用的请求对象"""
|
|
30
|
+
return Request("https://example.com")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_random_strategy(mock_proxies, mock_request, mock_stats):
|
|
34
|
+
"""测试随机策略"""
|
|
35
|
+
strategy = STRATEGIES['random']
|
|
36
|
+
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
37
|
+
assert chosen in [p['url'] for p in mock_proxies]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
|
|
41
|
+
"""测试最少使用策略"""
|
|
42
|
+
strategy = STRATEGIES['least_used']
|
|
43
|
+
chosen = strategy(mock_proxies, mock_request, mock_stats)
|
|
44
|
+
assert chosen == 'http://p3:8080' # total=1
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
|
|
48
|
+
"""测试域名规则策略"""
|
|
49
|
+
from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
|
|
50
|
+
request = Request("https://taobao.com/item/123")
|
|
51
|
+
rules = {'taobao.com': 'http://special:8080'}
|
|
52
|
+
|
|
53
|
+
# Monkey patch 确保有回退策略
|
|
54
|
+
old_strategy = STRATEGIES['least_used']
|
|
55
|
+
try:
|
|
56
|
+
STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
|
|
57
|
+
chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
|
|
58
|
+
assert chosen == 'http://special:8080'
|
|
59
|
+
finally:
|
|
60
60
|
STRATEGIES['least_used'] = old_strategy
|
tests/test_redis_config.py
CHANGED
|
@@ -1,29 +1,29 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
快速测试 Redis 连接配置修复
|
|
5
|
-
"""
|
|
6
|
-
import asyncio
|
|
7
|
-
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
8
|
-
from crawlo.settings.default_settings import REDIS_URL
|
|
9
|
-
|
|
10
|
-
async def test_redis_config():
|
|
11
|
-
"""测试修复后的 Redis 配置"""
|
|
12
|
-
print(f"🔍 测试 Redis 配置: {REDIS_URL}")
|
|
13
|
-
|
|
14
|
-
try:
|
|
15
|
-
queue = RedisPriorityQueue(redis_url=REDIS_URL)
|
|
16
|
-
await queue.connect()
|
|
17
|
-
print("✅ Redis 连接成功!")
|
|
18
|
-
await queue.close()
|
|
19
|
-
return True
|
|
20
|
-
except Exception as e:
|
|
21
|
-
print(f"❌ Redis 连接失败: {e}")
|
|
22
|
-
return False
|
|
23
|
-
|
|
24
|
-
if __name__ == "__main__":
|
|
25
|
-
success = asyncio.run(test_redis_config())
|
|
26
|
-
if success:
|
|
27
|
-
print("🎉 配置修复成功!现在可以运行你的爬虫了。")
|
|
28
|
-
else:
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
快速测试 Redis 连接配置修复
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
from crawlo.queue.redis_priority_queue import RedisPriorityQueue
|
|
8
|
+
from crawlo.settings.default_settings import REDIS_URL
|
|
9
|
+
|
|
10
|
+
async def test_redis_config():
|
|
11
|
+
"""测试修复后的 Redis 配置"""
|
|
12
|
+
print(f"🔍 测试 Redis 配置: {REDIS_URL}")
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
queue = RedisPriorityQueue(redis_url=REDIS_URL)
|
|
16
|
+
await queue.connect()
|
|
17
|
+
print("✅ Redis 连接成功!")
|
|
18
|
+
await queue.close()
|
|
19
|
+
return True
|
|
20
|
+
except Exception as e:
|
|
21
|
+
print(f"❌ Redis 连接失败: {e}")
|
|
22
|
+
return False
|
|
23
|
+
|
|
24
|
+
if __name__ == "__main__":
|
|
25
|
+
success = asyncio.run(test_redis_config())
|
|
26
|
+
if success:
|
|
27
|
+
print("🎉 配置修复成功!现在可以运行你的爬虫了。")
|
|
28
|
+
else:
|
|
29
29
|
print("❌ 配置仍有问题,请检查 Redis 服务状态。")
|