crawlo 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (128) hide show
  1. crawlo/__init__.py +34 -33
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +152 -126
  7. crawlo/commands/list.py +156 -147
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -111
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +187 -0
  12. crawlo/config.py +280 -0
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -158
  15. crawlo/core/enhanced_engine.py +190 -0
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +166 -57
  18. crawlo/crawler.py +1028 -495
  19. crawlo/downloader/__init__.py +242 -78
  20. crawlo/downloader/aiohttp_downloader.py +212 -199
  21. crawlo/downloader/cffi_downloader.py +251 -241
  22. crawlo/downloader/httpx_downloader.py +259 -246
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +82 -78
  25. crawlo/extension/__init__.py +31 -31
  26. crawlo/extension/log_interval.py +49 -49
  27. crawlo/extension/log_stats.py +44 -44
  28. crawlo/extension/logging_extension.py +34 -34
  29. crawlo/filters/__init__.py +154 -37
  30. crawlo/filters/aioredis_filter.py +242 -150
  31. crawlo/filters/memory_filter.py +269 -202
  32. crawlo/items/__init__.py +23 -23
  33. crawlo/items/base.py +21 -21
  34. crawlo/items/fields.py +53 -53
  35. crawlo/items/items.py +104 -104
  36. crawlo/middleware/__init__.py +21 -21
  37. crawlo/middleware/default_header.py +32 -32
  38. crawlo/middleware/download_delay.py +28 -28
  39. crawlo/middleware/middleware_manager.py +135 -135
  40. crawlo/middleware/proxy.py +248 -245
  41. crawlo/middleware/request_ignore.py +30 -30
  42. crawlo/middleware/response_code.py +18 -18
  43. crawlo/middleware/response_filter.py +26 -26
  44. crawlo/middleware/retry.py +125 -90
  45. crawlo/mode_manager.py +201 -0
  46. crawlo/network/__init__.py +21 -7
  47. crawlo/network/request.py +311 -203
  48. crawlo/network/response.py +271 -166
  49. crawlo/pipelines/__init__.py +22 -13
  50. crawlo/pipelines/bloom_dedup_pipeline.py +157 -0
  51. crawlo/pipelines/console_pipeline.py +39 -39
  52. crawlo/pipelines/csv_pipeline.py +317 -0
  53. crawlo/pipelines/database_dedup_pipeline.py +225 -0
  54. crawlo/pipelines/json_pipeline.py +219 -0
  55. crawlo/pipelines/memory_dedup_pipeline.py +116 -0
  56. crawlo/pipelines/mongo_pipeline.py +116 -116
  57. crawlo/pipelines/mysql_pipeline.py +195 -195
  58. crawlo/pipelines/pipeline_manager.py +56 -56
  59. crawlo/pipelines/redis_dedup_pipeline.py +163 -0
  60. crawlo/project.py +153 -153
  61. crawlo/queue/__init__.py +0 -0
  62. crawlo/queue/pqueue.py +37 -0
  63. crawlo/queue/queue_manager.py +308 -0
  64. crawlo/queue/redis_priority_queue.py +209 -0
  65. crawlo/settings/__init__.py +7 -7
  66. crawlo/settings/default_settings.py +245 -167
  67. crawlo/settings/setting_manager.py +99 -99
  68. crawlo/spider/__init__.py +639 -129
  69. crawlo/stats_collector.py +59 -59
  70. crawlo/subscriber.py +106 -106
  71. crawlo/task_manager.py +30 -27
  72. crawlo/templates/crawlo.cfg.tmpl +10 -10
  73. crawlo/templates/project/__init__.py.tmpl +3 -3
  74. crawlo/templates/project/items.py.tmpl +17 -17
  75. crawlo/templates/project/middlewares.py.tmpl +87 -76
  76. crawlo/templates/project/pipelines.py.tmpl +342 -64
  77. crawlo/templates/project/run.py.tmpl +252 -0
  78. crawlo/templates/project/settings.py.tmpl +251 -54
  79. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  80. crawlo/templates/spider/spider.py.tmpl +178 -32
  81. crawlo/utils/__init__.py +7 -7
  82. crawlo/utils/controlled_spider_mixin.py +440 -0
  83. crawlo/utils/date_tools.py +233 -233
  84. crawlo/utils/db_helper.py +343 -343
  85. crawlo/utils/func_tools.py +82 -82
  86. crawlo/utils/large_scale_config.py +287 -0
  87. crawlo/utils/large_scale_helper.py +344 -0
  88. crawlo/utils/log.py +128 -128
  89. crawlo/utils/queue_helper.py +176 -0
  90. crawlo/utils/request.py +267 -267
  91. crawlo/utils/request_serializer.py +220 -0
  92. crawlo/utils/spider_loader.py +62 -62
  93. crawlo/utils/system.py +11 -11
  94. crawlo/utils/tools.py +4 -4
  95. crawlo/utils/url.py +39 -39
  96. crawlo-1.1.3.dist-info/METADATA +635 -0
  97. crawlo-1.1.3.dist-info/RECORD +113 -0
  98. examples/__init__.py +7 -7
  99. examples/controlled_spider_example.py +205 -0
  100. tests/__init__.py +7 -7
  101. tests/test_final_validation.py +154 -0
  102. tests/test_proxy_health_check.py +32 -32
  103. tests/test_proxy_middleware_integration.py +136 -136
  104. tests/test_proxy_providers.py +56 -56
  105. tests/test_proxy_stats.py +19 -19
  106. tests/test_proxy_strategies.py +59 -59
  107. tests/test_redis_config.py +29 -0
  108. tests/test_redis_queue.py +225 -0
  109. tests/test_request_serialization.py +71 -0
  110. tests/test_scheduler.py +242 -0
  111. crawlo/pipelines/mysql_batch_pipline.py +0 -273
  112. crawlo/utils/pqueue.py +0 -174
  113. crawlo-1.1.1.dist-info/METADATA +0 -220
  114. crawlo-1.1.1.dist-info/RECORD +0 -100
  115. examples/baidu_spider/__init__.py +0 -7
  116. examples/baidu_spider/demo.py +0 -94
  117. examples/baidu_spider/items.py +0 -46
  118. examples/baidu_spider/middleware.py +0 -49
  119. examples/baidu_spider/pipeline.py +0 -55
  120. examples/baidu_spider/run.py +0 -27
  121. examples/baidu_spider/settings.py +0 -121
  122. examples/baidu_spider/spiders/__init__.py +0 -7
  123. examples/baidu_spider/spiders/bai_du.py +0 -61
  124. examples/baidu_spider/spiders/miit.py +0 -159
  125. examples/baidu_spider/spiders/sina.py +0 -79
  126. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/WHEEL +0 -0
  127. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/entry_points.txt +0 -0
  128. {crawlo-1.1.1.dist-info → crawlo-1.1.3.dist-info}/top_level.txt +0 -0
@@ -1,137 +1,137 @@
1
- # tests/test_proxy_middleware_integration.py
2
- import pytest
3
- import asyncio
4
- import time
5
- from unittest.mock import Mock, AsyncMock, patch
6
- from crawlo import Request, Response, Spider
7
- from crawlo.proxy.middleware import ProxyMiddleware
8
- from crawlo.proxy.stats import ProxyStats
9
-
10
-
11
- @pytest.fixture
12
- def crawler():
13
- class MockSettings:
14
- def get(self, key, default=None):
15
- defaults = {
16
- 'PROXY_ENABLED': True,
17
- 'PROXIES': ['http://p1:8080', 'http://p2:8080'],
18
- 'PROXY_SELECTION_STRATEGY': 'random',
19
- 'PROXY_REQUEST_DELAY_ENABLED': False,
20
- 'PROXY_MAX_RETRY_COUNT': 1,
21
- }
22
- return defaults.get(key, default)
23
-
24
- def get_bool(self, key, default=None):
25
- return self.get(key, default)
26
-
27
- def get_int(self, key, default=None):
28
- return self.get(key, default)
29
-
30
- def get_float(self, key, default=None):
31
- return self.get(key, default)
32
-
33
- def get_list(self, key, default=None):
34
- return self.get(key, default)
35
-
36
- class MockCrawler:
37
- def __init__(self):
38
- self.settings = MockSettings()
39
-
40
- return MockCrawler()
41
-
42
-
43
- @pytest.fixture
44
- def middleware(crawler):
45
- mw = ProxyMiddleware.create_instance(crawler)
46
- mw._load_providers = Mock()
47
- mw._update_proxy_pool = AsyncMock()
48
- mw._health_check = AsyncMock()
49
- mw.scheduler = None
50
-
51
- mw.proxies = [
52
- {
53
- 'url': 'http://p1:8080',
54
- 'healthy': True,
55
- 'failures': 0,
56
- 'last_health_check': 0,
57
- 'unhealthy_since': 0
58
- },
59
- {
60
- 'url': 'http://p2:8080',
61
- 'healthy': True,
62
- 'failures': 0,
63
- 'last_health_check': 0,
64
- 'unhealthy_since': 0
65
- },
66
- ]
67
- mw.stats = ProxyStats()
68
- for p in mw.proxies:
69
- mw.stats.record(p['url'], 'total')
70
-
71
- asyncio.get_event_loop().run_until_complete(mw._initial_setup())
72
- return mw
73
-
74
-
75
- @pytest.fixture
76
- def spider():
77
- return Mock(spec=Spider, logger=Mock())
78
-
79
-
80
- def test_process_request_sets_proxy(middleware, spider):
81
- request = Request("https://example.com")
82
- result = asyncio.get_event_loop().run_until_complete(
83
- middleware.process_request(request, spider)
84
- )
85
- assert result is None
86
- assert hasattr(request, 'proxy')
87
- assert request.proxy in ['http://p1:8080', 'http://p2:8080']
88
-
89
-
90
- def test_process_response_records_success(middleware, spider):
91
- request = Request("https://example.com")
92
- request.proxy = 'http://p1:8080'
93
- response = Response("https://example.com", body=b"ok", headers={})
94
- middleware.stats.record(request.proxy, 'total')
95
- middleware.process_response(request, response, spider)
96
- assert middleware.stats.get(request.proxy)['success'] == 1
97
-
98
-
99
- def test_process_exception_switches_proxy(middleware, spider):
100
- request = Request("https://example.com")
101
- request.proxy = 'http://p1:8080'
102
- request.meta['proxy_retry_count'] = 0
103
-
104
- result = middleware.process_exception(request, Exception("Timeout"), spider)
105
- assert result is not None
106
- assert result.proxy != 'http://p1:8080'
107
- assert result.meta['proxy_retry_count'] == 1
108
-
109
- final = middleware.process_exception(result, Exception("Timeout"), spider)
110
- assert final is None
111
-
112
-
113
- def test_mark_failure_disables_proxy(middleware):
114
- proxy_url = 'http://p1:8080'
115
- p = next(p for p in middleware.proxies if p['url'] == proxy_url)
116
- p['failures'] = 2
117
-
118
- middleware._mark_failure(proxy_url)
119
- assert p['failures'] == 3
120
- assert p['healthy'] is False
121
- assert p['unhealthy_since'] > 0
122
-
123
-
124
- @pytest.mark.asyncio
125
- async def test_request_delay(middleware, spider):
126
- """测试请求延迟功能:验证是否调用了 asyncio.sleep"""
127
- with patch("crawlo.proxy.middleware.asyncio.sleep", new_callable=AsyncMock) as mock_sleep:
128
- middleware.delay_enabled = True # 注意:这里应该是 delay_enabled 而不是 request_delay_enabled
129
- middleware.request_delay = 0.1
130
- middleware._last_req_time = time.time() - 0.05 # 50ms 前
131
-
132
- request = Request("https://a.com")
133
- await middleware.process_request(request, spider)
134
-
135
- mock_sleep.assert_called_once()
136
- delay = mock_sleep.call_args[0][0]
1
+ # tests/test_proxy_middleware_integration.py
2
+ import pytest
3
+ import asyncio
4
+ import time
5
+ from unittest.mock import Mock, AsyncMock, patch
6
+ from crawlo import Request, Response, Spider
7
+ from crawlo.proxy.middleware import ProxyMiddleware
8
+ from crawlo.proxy.stats import ProxyStats
9
+
10
+
11
+ @pytest.fixture
12
+ def crawler():
13
+ class MockSettings:
14
+ def get(self, key, default=None):
15
+ defaults = {
16
+ 'PROXY_ENABLED': True,
17
+ 'PROXIES': ['http://p1:8080', 'http://p2:8080'],
18
+ 'PROXY_SELECTION_STRATEGY': 'random',
19
+ 'PROXY_REQUEST_DELAY_ENABLED': False,
20
+ 'PROXY_MAX_RETRY_COUNT': 1,
21
+ }
22
+ return defaults.get(key, default)
23
+
24
+ def get_bool(self, key, default=None):
25
+ return self.get(key, default)
26
+
27
+ def get_int(self, key, default=None):
28
+ return self.get(key, default)
29
+
30
+ def get_float(self, key, default=None):
31
+ return self.get(key, default)
32
+
33
+ def get_list(self, key, default=None):
34
+ return self.get(key, default)
35
+
36
+ class MockCrawler:
37
+ def __init__(self):
38
+ self.settings = MockSettings()
39
+
40
+ return MockCrawler()
41
+
42
+
43
+ @pytest.fixture
44
+ def middleware(crawler):
45
+ mw = ProxyMiddleware.create_instance(crawler)
46
+ mw._load_providers = Mock()
47
+ mw._update_proxy_pool = AsyncMock()
48
+ mw._health_check = AsyncMock()
49
+ mw.scheduler = None
50
+
51
+ mw.proxies = [
52
+ {
53
+ 'url': 'http://p1:8080',
54
+ 'healthy': True,
55
+ 'failures': 0,
56
+ 'last_health_check': 0,
57
+ 'unhealthy_since': 0
58
+ },
59
+ {
60
+ 'url': 'http://p2:8080',
61
+ 'healthy': True,
62
+ 'failures': 0,
63
+ 'last_health_check': 0,
64
+ 'unhealthy_since': 0
65
+ },
66
+ ]
67
+ mw.stats = ProxyStats()
68
+ for p in mw.proxies:
69
+ mw.stats.record(p['url'], 'total')
70
+
71
+ asyncio.get_event_loop().run_until_complete(mw._initial_setup())
72
+ return mw
73
+
74
+
75
+ @pytest.fixture
76
+ def spider():
77
+ return Mock(spec=Spider, logger=Mock())
78
+
79
+
80
+ def test_process_request_sets_proxy(middleware, spider):
81
+ request = Request("https://example.com")
82
+ result = asyncio.get_event_loop().run_until_complete(
83
+ middleware.process_request(request, spider)
84
+ )
85
+ assert result is None
86
+ assert hasattr(request, 'proxy')
87
+ assert request.proxy in ['http://p1:8080', 'http://p2:8080']
88
+
89
+
90
+ def test_process_response_records_success(middleware, spider):
91
+ request = Request("https://example.com")
92
+ request.proxy = 'http://p1:8080'
93
+ response = Response("https://example.com", body=b"ok", headers={})
94
+ middleware.stats.record(request.proxy, 'total')
95
+ middleware.process_response(request, response, spider)
96
+ assert middleware.stats.get(request.proxy)['success'] == 1
97
+
98
+
99
+ def test_process_exception_switches_proxy(middleware, spider):
100
+ request = Request("https://example.com")
101
+ request.proxy = 'http://p1:8080'
102
+ request.meta['proxy_retry_count'] = 0
103
+
104
+ result = middleware.process_exception(request, Exception("Timeout"), spider)
105
+ assert result is not None
106
+ assert result.proxy != 'http://p1:8080'
107
+ assert result.meta['proxy_retry_count'] == 1
108
+
109
+ final = middleware.process_exception(result, Exception("Timeout"), spider)
110
+ assert final is None
111
+
112
+
113
+ def test_mark_failure_disables_proxy(middleware):
114
+ proxy_url = 'http://p1:8080'
115
+ p = next(p for p in middleware.proxies if p['url'] == proxy_url)
116
+ p['failures'] = 2
117
+
118
+ middleware._mark_failure(proxy_url)
119
+ assert p['failures'] == 3
120
+ assert p['healthy'] is False
121
+ assert p['unhealthy_since'] > 0
122
+
123
+
124
+ @pytest.mark.asyncio
125
+ async def test_request_delay(middleware, spider):
126
+ """测试请求延迟功能:验证是否调用了 asyncio.sleep"""
127
+ with patch("crawlo.proxy.middleware.asyncio.sleep", new_callable=AsyncMock) as mock_sleep:
128
+ middleware.delay_enabled = True # 注意:这里应该是 delay_enabled 而不是 request_delay_enabled
129
+ middleware.request_delay = 0.1
130
+ middleware._last_req_time = time.time() - 0.05 # 50ms 前
131
+
132
+ request = Request("https://a.com")
133
+ await middleware.process_request(request, spider)
134
+
135
+ mock_sleep.assert_called_once()
136
+ delay = mock_sleep.call_args[0][0]
137
137
  assert 0.04 <= delay <= 0.06
@@ -1,57 +1,57 @@
1
- # tests/test_proxy_providers.py
2
- import pytest
3
- import pytest
4
- import respx
5
- from httpx import Response
6
- from crawlo.proxy.providers import StaticProxyProvider, FileProxyProvider, APIProxyProvider
7
- import tempfile
8
- import os
9
-
10
-
11
- @pytest.mark.asyncio
12
- async def test_static_provider():
13
- """测试静态代理提供者"""
14
- provider = StaticProxyProvider(['http://1.1.1.1:8080', 'http://2.2.2.2:8080'])
15
- proxies = await provider.fetch_proxies()
16
- assert len(proxies) == 2
17
- assert 'http://1.1.1.1:8080' in proxies
18
- assert 'http://2.2.2.2:8080' in proxies
19
-
20
-
21
- @pytest.mark.asyncio
22
- async def test_file_provider():
23
- """测试文件代理提供者"""
24
- with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
25
- f.write("http://a.com:8080\nhttp://b.com:8080\n")
26
- temp_path = f.name
27
- try:
28
- provider = FileProxyProvider(temp_path)
29
- proxies = await provider.fetch_proxies()
30
- assert len(proxies) == 2
31
- assert 'http://a.com:8080' in proxies
32
- assert 'http://b.com:8080' in proxies
33
- finally:
34
- os.unlink(temp_path)
35
-
36
-
37
- @pytest.mark.asyncio
38
- @respx.mock
39
- async def test_api_provider():
40
- """使用 respx 拦截 HTTP 请求,更简洁可靠"""
41
- # 拦截 GET 请求
42
- respx.get("https://api.example.com").mock(
43
- return_value=Response(
44
- 200,
45
- json=[
46
- {"ip": "1.1.1.1", "port": 8080},
47
- {"ip": "2.2.2.2", "port": 8080}
48
- ]
49
- )
50
- )
51
-
52
- provider = APIProxyProvider(url="https://api.example.com")
53
- proxies = await provider.fetch_proxies()
54
-
55
- assert len(proxies) == 2
56
- assert "http://1.1.1.1:8080" in proxies
1
+ # tests/test_proxy_providers.py
2
+ import pytest
3
+ import pytest
4
+ import respx
5
+ from httpx import Response
6
+ from crawlo.proxy.providers import StaticProxyProvider, FileProxyProvider, APIProxyProvider
7
+ import tempfile
8
+ import os
9
+
10
+
11
+ @pytest.mark.asyncio
12
+ async def test_static_provider():
13
+ """测试静态代理提供者"""
14
+ provider = StaticProxyProvider(['http://1.1.1.1:8080', 'http://2.2.2.2:8080'])
15
+ proxies = await provider.fetch_proxies()
16
+ assert len(proxies) == 2
17
+ assert 'http://1.1.1.1:8080' in proxies
18
+ assert 'http://2.2.2.2:8080' in proxies
19
+
20
+
21
+ @pytest.mark.asyncio
22
+ async def test_file_provider():
23
+ """测试文件代理提供者"""
24
+ with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
25
+ f.write("http://a.com:8080\nhttp://b.com:8080\n")
26
+ temp_path = f.name
27
+ try:
28
+ provider = FileProxyProvider(temp_path)
29
+ proxies = await provider.fetch_proxies()
30
+ assert len(proxies) == 2
31
+ assert 'http://a.com:8080' in proxies
32
+ assert 'http://b.com:8080' in proxies
33
+ finally:
34
+ os.unlink(temp_path)
35
+
36
+
37
+ @pytest.mark.asyncio
38
+ @respx.mock
39
+ async def test_api_provider():
40
+ """使用 respx 拦截 HTTP 请求,更简洁可靠"""
41
+ # 拦截 GET 请求
42
+ respx.get("https://api.example.com").mock(
43
+ return_value=Response(
44
+ 200,
45
+ json=[
46
+ {"ip": "1.1.1.1", "port": 8080},
47
+ {"ip": "2.2.2.2", "port": 8080}
48
+ ]
49
+ )
50
+ )
51
+
52
+ provider = APIProxyProvider(url="https://api.example.com")
53
+ proxies = await provider.fetch_proxies()
54
+
55
+ assert len(proxies) == 2
56
+ assert "http://1.1.1.1:8080" in proxies
57
57
  assert "http://2.2.2.2:8080" in proxies
tests/test_proxy_stats.py CHANGED
@@ -1,20 +1,20 @@
1
- # tests/test_proxy_stats.py
2
- from crawlo.proxy.stats import ProxyStats
3
-
4
-
5
- def test_proxy_stats():
6
- """测试代理统计功能"""
7
- stats = ProxyStats()
8
- url = 'http://proxy1:8080'
9
-
10
- stats.record(url, 'success')
11
- stats.record(url, 'success')
12
- stats.record(url, 'failure')
13
-
14
- assert stats.get(url)['success'] == 2
15
- assert stats.get(url)['failure'] == 1
16
- assert stats.get(url)['total'] == 3
17
-
18
- all_data = stats.all()
19
- assert url in all_data
1
+ # tests/test_proxy_stats.py
2
+ from crawlo.proxy.stats import ProxyStats
3
+
4
+
5
+ def test_proxy_stats():
6
+ """测试代理统计功能"""
7
+ stats = ProxyStats()
8
+ url = 'http://proxy1:8080'
9
+
10
+ stats.record(url, 'success')
11
+ stats.record(url, 'success')
12
+ stats.record(url, 'failure')
13
+
14
+ assert stats.get(url)['success'] == 2
15
+ assert stats.get(url)['failure'] == 1
16
+ assert stats.get(url)['total'] == 3
17
+
18
+ all_data = stats.all()
19
+ assert url in all_data
20
20
  assert all_data[url]['success'] == 2
@@ -1,60 +1,60 @@
1
- # tests/test_proxy_strategies.py
2
- import pytest
3
- from crawlo import Request
4
- from crawlo.proxy.strategies import STRATEGIES
5
-
6
-
7
- @pytest.fixture
8
- def mock_proxies():
9
- """提供测试用的代理列表"""
10
- return [
11
- {'url': 'http://p1:8080'},
12
- {'url': 'http://p2:8080'},
13
- {'url': 'http://p3:8080'},
14
- ]
15
-
16
-
17
- @pytest.fixture
18
- def mock_stats():
19
- """提供测试用的统计信息"""
20
- return {
21
- 'http://p1:8080': {'total': 10},
22
- 'http://p2:8080': {'total': 5},
23
- 'http://p3:8080': {'total': 1},
24
- }
25
-
26
-
27
- @pytest.fixture
28
- def mock_request():
29
- """提供测试用的请求对象"""
30
- return Request("https://example.com")
31
-
32
-
33
- def test_random_strategy(mock_proxies, mock_request, mock_stats):
34
- """测试随机策略"""
35
- strategy = STRATEGIES['random']
36
- chosen = strategy(mock_proxies, mock_request, mock_stats)
37
- assert chosen in [p['url'] for p in mock_proxies]
38
-
39
-
40
- def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
41
- """测试最少使用策略"""
42
- strategy = STRATEGIES['least_used']
43
- chosen = strategy(mock_proxies, mock_request, mock_stats)
44
- assert chosen == 'http://p3:8080' # total=1
45
-
46
-
47
- def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
48
- """测试域名规则策略"""
49
- from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
50
- request = Request("https://taobao.com/item/123")
51
- rules = {'taobao.com': 'http://special:8080'}
52
-
53
- # Monkey patch 确保有回退策略
54
- old_strategy = STRATEGIES['least_used']
55
- try:
56
- STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
57
- chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
58
- assert chosen == 'http://special:8080'
59
- finally:
1
+ # tests/test_proxy_strategies.py
2
+ import pytest
3
+ from crawlo import Request
4
+ from crawlo.proxy.strategies import STRATEGIES
5
+
6
+
7
+ @pytest.fixture
8
+ def mock_proxies():
9
+ """提供测试用的代理列表"""
10
+ return [
11
+ {'url': 'http://p1:8080'},
12
+ {'url': 'http://p2:8080'},
13
+ {'url': 'http://p3:8080'},
14
+ ]
15
+
16
+
17
+ @pytest.fixture
18
+ def mock_stats():
19
+ """提供测试用的统计信息"""
20
+ return {
21
+ 'http://p1:8080': {'total': 10},
22
+ 'http://p2:8080': {'total': 5},
23
+ 'http://p3:8080': {'total': 1},
24
+ }
25
+
26
+
27
+ @pytest.fixture
28
+ def mock_request():
29
+ """提供测试用的请求对象"""
30
+ return Request("https://example.com")
31
+
32
+
33
+ def test_random_strategy(mock_proxies, mock_request, mock_stats):
34
+ """测试随机策略"""
35
+ strategy = STRATEGIES['random']
36
+ chosen = strategy(mock_proxies, mock_request, mock_stats)
37
+ assert chosen in [p['url'] for p in mock_proxies]
38
+
39
+
40
+ def test_least_used_strategy(mock_proxies, mock_request, mock_stats):
41
+ """测试最少使用策略"""
42
+ strategy = STRATEGIES['least_used']
43
+ chosen = strategy(mock_proxies, mock_request, mock_stats)
44
+ assert chosen == 'http://p3:8080' # total=1
45
+
46
+
47
+ def test_domain_rule_strategy(mock_proxies, mock_request, mock_stats):
48
+ """测试域名规则策略"""
49
+ from crawlo.proxy.strategies.domain_rule import domain_rule_strategy
50
+ request = Request("https://taobao.com/item/123")
51
+ rules = {'taobao.com': 'http://special:8080'}
52
+
53
+ # Monkey patch 确保有回退策略
54
+ old_strategy = STRATEGIES['least_used']
55
+ try:
56
+ STRATEGIES['least_used'] = lambda p, r, s: 'http://fallback:8080'
57
+ chosen = domain_rule_strategy(mock_proxies, request, mock_stats, rules)
58
+ assert chosen == 'http://special:8080'
59
+ finally:
60
60
  STRATEGIES['least_used'] = old_strategy
@@ -0,0 +1,29 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 快速测试 Redis 连接配置修复
5
+ """
6
+ import asyncio
7
+ from crawlo.queue.redis_priority_queue import RedisPriorityQueue
8
+ from crawlo.settings.default_settings import REDIS_URL
9
+
10
+ async def test_redis_config():
11
+ """测试修复后的 Redis 配置"""
12
+ print(f"🔍 测试 Redis 配置: {REDIS_URL}")
13
+
14
+ try:
15
+ queue = RedisPriorityQueue(redis_url=REDIS_URL)
16
+ await queue.connect()
17
+ print("✅ Redis 连接成功!")
18
+ await queue.close()
19
+ return True
20
+ except Exception as e:
21
+ print(f"❌ Redis 连接失败: {e}")
22
+ return False
23
+
24
+ if __name__ == "__main__":
25
+ success = asyncio.run(test_redis_config())
26
+ if success:
27
+ print("🎉 配置修复成功!现在可以运行你的爬虫了。")
28
+ else:
29
+ print("❌ 配置仍有问题,请检查 Redis 服务状态。")