crawlo 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (118) hide show
  1. crawlo/__init__.py +34 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +40 -40
  4. crawlo/commands/__init__.py +13 -13
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/list.py +155 -155
  8. crawlo/commands/run.py +285 -285
  9. crawlo/commands/startproject.py +196 -196
  10. crawlo/commands/stats.py +188 -188
  11. crawlo/commands/utils.py +186 -186
  12. crawlo/config.py +279 -279
  13. crawlo/core/__init__.py +2 -2
  14. crawlo/core/engine.py +171 -171
  15. crawlo/core/enhanced_engine.py +189 -189
  16. crawlo/core/processor.py +40 -40
  17. crawlo/core/scheduler.py +165 -165
  18. crawlo/crawler.py +1027 -1027
  19. crawlo/downloader/__init__.py +242 -242
  20. crawlo/downloader/aiohttp_downloader.py +212 -212
  21. crawlo/downloader/cffi_downloader.py +251 -251
  22. crawlo/downloader/httpx_downloader.py +259 -259
  23. crawlo/event.py +11 -11
  24. crawlo/exceptions.py +81 -81
  25. crawlo/extension/__init__.py +38 -31
  26. crawlo/extension/health_check.py +142 -0
  27. crawlo/extension/log_interval.py +58 -49
  28. crawlo/extension/log_stats.py +82 -44
  29. crawlo/extension/logging_extension.py +44 -35
  30. crawlo/extension/memory_monitor.py +89 -0
  31. crawlo/extension/performance_profiler.py +118 -0
  32. crawlo/extension/request_recorder.py +108 -0
  33. crawlo/filters/__init__.py +154 -154
  34. crawlo/filters/aioredis_filter.py +241 -241
  35. crawlo/filters/memory_filter.py +269 -269
  36. crawlo/items/__init__.py +23 -23
  37. crawlo/items/base.py +21 -21
  38. crawlo/items/fields.py +53 -53
  39. crawlo/items/items.py +104 -104
  40. crawlo/middleware/__init__.py +21 -21
  41. crawlo/middleware/default_header.py +32 -32
  42. crawlo/middleware/download_delay.py +28 -28
  43. crawlo/middleware/middleware_manager.py +135 -135
  44. crawlo/middleware/proxy.py +248 -248
  45. crawlo/middleware/request_ignore.py +30 -30
  46. crawlo/middleware/response_code.py +18 -18
  47. crawlo/middleware/response_filter.py +26 -26
  48. crawlo/middleware/retry.py +124 -124
  49. crawlo/mode_manager.py +200 -200
  50. crawlo/network/__init__.py +21 -21
  51. crawlo/network/request.py +311 -311
  52. crawlo/network/response.py +271 -271
  53. crawlo/pipelines/__init__.py +21 -21
  54. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  55. crawlo/pipelines/console_pipeline.py +39 -39
  56. crawlo/pipelines/csv_pipeline.py +316 -316
  57. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  58. crawlo/pipelines/json_pipeline.py +218 -218
  59. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  60. crawlo/pipelines/mongo_pipeline.py +132 -117
  61. crawlo/pipelines/mysql_pipeline.py +317 -195
  62. crawlo/pipelines/pipeline_manager.py +56 -56
  63. crawlo/pipelines/redis_dedup_pipeline.py +162 -162
  64. crawlo/project.py +153 -153
  65. crawlo/queue/pqueue.py +37 -37
  66. crawlo/queue/queue_manager.py +307 -307
  67. crawlo/queue/redis_priority_queue.py +208 -208
  68. crawlo/settings/__init__.py +7 -7
  69. crawlo/settings/default_settings.py +278 -244
  70. crawlo/settings/setting_manager.py +99 -99
  71. crawlo/spider/__init__.py +639 -639
  72. crawlo/stats_collector.py +59 -59
  73. crawlo/subscriber.py +131 -106
  74. crawlo/task_manager.py +30 -30
  75. crawlo/templates/crawlo.cfg.tmpl +10 -10
  76. crawlo/templates/project/__init__.py.tmpl +3 -3
  77. crawlo/templates/project/items.py.tmpl +17 -17
  78. crawlo/templates/project/middlewares.py.tmpl +111 -87
  79. crawlo/templates/project/pipelines.py.tmpl +97 -341
  80. crawlo/templates/project/run.py.tmpl +251 -251
  81. crawlo/templates/project/settings.py.tmpl +279 -250
  82. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  83. crawlo/templates/spider/spider.py.tmpl +142 -178
  84. crawlo/utils/__init__.py +7 -7
  85. crawlo/utils/controlled_spider_mixin.py +439 -439
  86. crawlo/utils/date_tools.py +233 -233
  87. crawlo/utils/db_helper.py +343 -343
  88. crawlo/utils/func_tools.py +82 -82
  89. crawlo/utils/large_scale_config.py +286 -286
  90. crawlo/utils/large_scale_helper.py +343 -343
  91. crawlo/utils/log.py +128 -128
  92. crawlo/utils/queue_helper.py +175 -175
  93. crawlo/utils/request.py +267 -267
  94. crawlo/utils/request_serializer.py +219 -219
  95. crawlo/utils/spider_loader.py +62 -62
  96. crawlo/utils/system.py +11 -11
  97. crawlo/utils/tools.py +4 -4
  98. crawlo/utils/url.py +39 -39
  99. crawlo-1.1.4.dist-info/METADATA +403 -0
  100. crawlo-1.1.4.dist-info/RECORD +117 -0
  101. examples/__init__.py +7 -7
  102. examples/controlled_spider_example.py +205 -205
  103. tests/__init__.py +7 -7
  104. tests/test_final_validation.py +153 -153
  105. tests/test_proxy_health_check.py +32 -32
  106. tests/test_proxy_middleware_integration.py +136 -136
  107. tests/test_proxy_providers.py +56 -56
  108. tests/test_proxy_stats.py +19 -19
  109. tests/test_proxy_strategies.py +59 -59
  110. tests/test_redis_config.py +28 -28
  111. tests/test_redis_queue.py +224 -224
  112. tests/test_request_serialization.py +70 -70
  113. tests/test_scheduler.py +241 -241
  114. crawlo-1.1.3.dist-info/METADATA +0 -635
  115. crawlo-1.1.3.dist-info/RECORD +0 -113
  116. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/WHEEL +0 -0
  117. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/entry_points.txt +0 -0
  118. {crawlo-1.1.3.dist-info → crawlo-1.1.4.dist-info}/top_level.txt +0 -0
@@ -1,154 +1,154 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 最终验证测试:确认分布式队列的 logger 序列化问题已完全解决
5
- """
6
- import asyncio
7
- import pickle
8
- import sys
9
- sys.path.insert(0, "..")
10
-
11
- from crawlo.network.request import Request
12
- from crawlo.spider import Spider
13
- from crawlo.core.scheduler import Scheduler
14
- from crawlo.queue.redis_priority_queue import RedisPriorityQueue
15
- from crawlo.utils.log import get_logger
16
- from unittest.mock import Mock
17
-
18
-
19
- class TestSpider(Spider):
20
- """测试爬虫"""
21
- name = "validation_spider"
22
-
23
- def __init__(self):
24
- super().__init__()
25
- # 故意添加多个 logger 来测试清理
26
- self.custom_logger = get_logger("custom")
27
- self.debug_logger = get_logger("debug")
28
- self.nested_data = {
29
- 'logger': get_logger("nested"),
30
- 'sub': {
31
- 'logger_ref': get_logger("sub_logger")
32
- }
33
- }
34
-
35
- def parse(self, response):
36
- # 验证主 logger 还在
37
- self.logger.info(f"✅ 主 logger 工作正常: {response.url}")
38
- return {"url": response.url, "status": "success"}
39
-
40
-
41
- def test_scheduler_cleaning():
42
- """测试调度器的 logger 清理"""
43
- print("🔍 测试调度器 logger 清理...")
44
-
45
- spider = TestSpider()
46
- request = Request(
47
- url="https://scheduler-test.com",
48
- callback=spider.parse,
49
- meta={"logger": get_logger("meta_logger")}
50
- )
51
-
52
- # Mock crawler 和 scheduler
53
- class MockCrawler:
54
- def __init__(self):
55
- self.spider = spider
56
-
57
- class MockScheduler(Scheduler):
58
- def __init__(self):
59
- self.crawler = MockCrawler()
60
- self.logger = get_logger("MockScheduler")
61
-
62
- scheduler = MockScheduler()
63
-
64
- # 清理前检查
65
- print(f" 🔧 清理前 - spider.logger: {spider.logger is not None}")
66
- print(f" 🔧 清理前 - spider.custom_logger: {spider.custom_logger is not None}")
67
- print(f" 🔧 清理前 - request.callback: {request.callback is not None}")
68
-
69
- # 执行清理
70
- cleaned_request = scheduler._deep_clean_loggers(request)
71
-
72
- # 清理后检查
73
- print(f" ✅ 清理后 - spider.logger: {spider.logger is not None}")
74
- print(f" ✅ 清理后 - spider.custom_logger: {spider.custom_logger is None}")
75
- print(f" ✅ 清理后 - request.callback: {cleaned_request.callback is None}")
76
-
77
- # 序列化测试
78
- try:
79
- serialized = pickle.dumps(cleaned_request)
80
- print(f" ✅ 调度器清理后序列化成功,大小: {len(serialized)} bytes")
81
- return True
82
- except Exception as e:
83
- print(f" ❌ 调度器清理后序列化失败: {e}")
84
- return False
85
-
86
-
87
- async def test_redis_queue_cleaning():
88
- """测试 Redis 队列的 logger 清理"""
89
- print("\\n🔍 测试 Redis 队列 logger 清理...")
90
-
91
- spider = TestSpider()
92
- request = Request(
93
- url="https://redis-test.com",
94
- callback=spider.parse,
95
- meta={"logger": get_logger("meta_logger")}
96
- )
97
-
98
- try:
99
- queue = RedisPriorityQueue(redis_url="redis://127.0.0.1:6379/0")
100
- await queue.connect()
101
-
102
- # 入队测试
103
- success = await queue.put(request, priority=0)
104
- print(f" ✅ Redis 队列入队成功: {success}")
105
-
106
- if success:
107
- # 出队测试
108
- retrieved = await queue.get(timeout=2.0)
109
- if retrieved:
110
- print(f" ✅ Redis 队列出队成功: {retrieved.url}")
111
- print(f" ✅ callback 信息保存: {'_callback_info' in retrieved.meta}")
112
- await queue.close()
113
- return True
114
- else:
115
- print(" ❌ 出队失败")
116
- await queue.close()
117
- return False
118
- else:
119
- await queue.close()
120
- return False
121
-
122
- except Exception as e:
123
- print(f" ❌ Redis 队列测试失败: {e}")
124
- return False
125
-
126
-
127
- async def main():
128
- """主测试函数"""
129
- print("🚀 开始最终验证测试...")
130
- print("=" * 60)
131
-
132
- # 测试 1: 调度器清理
133
- scheduler_ok = test_scheduler_cleaning()
134
-
135
- # 测试 2: Redis 队列清理
136
- redis_ok = await test_redis_queue_cleaning()
137
-
138
- print("\\n" + "=" * 60)
139
- print("📊 测试结果汇总:")
140
- print(f" 调度器 logger 清理: {'✅ 通过' if scheduler_ok else '❌ 失败'}")
141
- print(f" Redis 队列清理: {'✅ 通过' if redis_ok else '❌ 失败'}")
142
-
143
- if scheduler_ok and redis_ok:
144
- print("\\n🎉 所有测试通过!")
145
- print("✅ 分布式队列的 logger 序列化问题已完全修复!")
146
- print("✅ Crawlo 现在可以正常使用 Redis 分布式队列了!")
147
- return True
148
- else:
149
- print("\\n❌ 部分测试失败,需要进一步修复")
150
- return False
151
-
152
-
153
- if __name__ == "__main__":
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 最终验证测试:确认分布式队列的 logger 序列化问题已完全解决
5
+ """
6
+ import asyncio
7
+ import pickle
8
+ import sys
9
+ sys.path.insert(0, "..")
10
+
11
+ from crawlo.network.request import Request
12
+ from crawlo.spider import Spider
13
+ from crawlo.core.scheduler import Scheduler
14
+ from crawlo.queue.redis_priority_queue import RedisPriorityQueue
15
+ from crawlo.utils.log import get_logger
16
+ from unittest.mock import Mock
17
+
18
+
19
+ class TestSpider(Spider):
20
+ """测试爬虫"""
21
+ name = "validation_spider"
22
+
23
+ def __init__(self):
24
+ super().__init__()
25
+ # 故意添加多个 logger 来测试清理
26
+ self.custom_logger = get_logger("custom")
27
+ self.debug_logger = get_logger("debug")
28
+ self.nested_data = {
29
+ 'logger': get_logger("nested"),
30
+ 'sub': {
31
+ 'logger_ref': get_logger("sub_logger")
32
+ }
33
+ }
34
+
35
+ def parse(self, response):
36
+ # 验证主 logger 还在
37
+ self.logger.info(f"✅ 主 logger 工作正常: {response.url}")
38
+ return {"url": response.url, "status": "success"}
39
+
40
+
41
+ def test_scheduler_cleaning():
42
+ """测试调度器的 logger 清理"""
43
+ print("🔍 测试调度器 logger 清理...")
44
+
45
+ spider = TestSpider()
46
+ request = Request(
47
+ url="https://scheduler-test.com",
48
+ callback=spider.parse,
49
+ meta={"logger": get_logger("meta_logger")}
50
+ )
51
+
52
+ # Mock crawler 和 scheduler
53
+ class MockCrawler:
54
+ def __init__(self):
55
+ self.spider = spider
56
+
57
+ class MockScheduler(Scheduler):
58
+ def __init__(self):
59
+ self.crawler = MockCrawler()
60
+ self.logger = get_logger("MockScheduler")
61
+
62
+ scheduler = MockScheduler()
63
+
64
+ # 清理前检查
65
+ print(f" 🔧 清理前 - spider.logger: {spider.logger is not None}")
66
+ print(f" 🔧 清理前 - spider.custom_logger: {spider.custom_logger is not None}")
67
+ print(f" 🔧 清理前 - request.callback: {request.callback is not None}")
68
+
69
+ # 执行清理
70
+ cleaned_request = scheduler._deep_clean_loggers(request)
71
+
72
+ # 清理后检查
73
+ print(f" ✅ 清理后 - spider.logger: {spider.logger is not None}")
74
+ print(f" ✅ 清理后 - spider.custom_logger: {spider.custom_logger is None}")
75
+ print(f" ✅ 清理后 - request.callback: {cleaned_request.callback is None}")
76
+
77
+ # 序列化测试
78
+ try:
79
+ serialized = pickle.dumps(cleaned_request)
80
+ print(f" ✅ 调度器清理后序列化成功,大小: {len(serialized)} bytes")
81
+ return True
82
+ except Exception as e:
83
+ print(f" ❌ 调度器清理后序列化失败: {e}")
84
+ return False
85
+
86
+
87
+ async def test_redis_queue_cleaning():
88
+ """测试 Redis 队列的 logger 清理"""
89
+ print("\\n🔍 测试 Redis 队列 logger 清理...")
90
+
91
+ spider = TestSpider()
92
+ request = Request(
93
+ url="https://redis-test.com",
94
+ callback=spider.parse,
95
+ meta={"logger": get_logger("meta_logger")}
96
+ )
97
+
98
+ try:
99
+ queue = RedisPriorityQueue(redis_url="redis://127.0.0.1:6379/0")
100
+ await queue.connect()
101
+
102
+ # 入队测试
103
+ success = await queue.put(request, priority=0)
104
+ print(f" ✅ Redis 队列入队成功: {success}")
105
+
106
+ if success:
107
+ # 出队测试
108
+ retrieved = await queue.get(timeout=2.0)
109
+ if retrieved:
110
+ print(f" ✅ Redis 队列出队成功: {retrieved.url}")
111
+ print(f" ✅ callback 信息保存: {'_callback_info' in retrieved.meta}")
112
+ await queue.close()
113
+ return True
114
+ else:
115
+ print(" ❌ 出队失败")
116
+ await queue.close()
117
+ return False
118
+ else:
119
+ await queue.close()
120
+ return False
121
+
122
+ except Exception as e:
123
+ print(f" ❌ Redis 队列测试失败: {e}")
124
+ return False
125
+
126
+
127
+ async def main():
128
+ """主测试函数"""
129
+ print("🚀 开始最终验证测试...")
130
+ print("=" * 60)
131
+
132
+ # 测试 1: 调度器清理
133
+ scheduler_ok = test_scheduler_cleaning()
134
+
135
+ # 测试 2: Redis 队列清理
136
+ redis_ok = await test_redis_queue_cleaning()
137
+
138
+ print("\\n" + "=" * 60)
139
+ print("📊 测试结果汇总:")
140
+ print(f" 调度器 logger 清理: {'✅ 通过' if scheduler_ok else '❌ 失败'}")
141
+ print(f" Redis 队列清理: {'✅ 通过' if redis_ok else '❌ 失败'}")
142
+
143
+ if scheduler_ok and redis_ok:
144
+ print("\\n🎉 所有测试通过!")
145
+ print("✅ 分布式队列的 logger 序列化问题已完全修复!")
146
+ print("✅ Crawlo 现在可以正常使用 Redis 分布式队列了!")
147
+ return True
148
+ else:
149
+ print("\\n❌ 部分测试失败,需要进一步修复")
150
+ return False
151
+
152
+
153
+ if __name__ == "__main__":
154
154
  asyncio.run(main())
@@ -1,33 +1,33 @@
1
- # tests/test_proxy_health_check.py
2
- import pytest
3
- from unittest.mock import AsyncMock, patch
4
- from crawlo.proxy.health_check import check_single_proxy
5
- import httpx
6
-
7
-
8
- @pytest.mark.asyncio
9
- @patch('httpx.AsyncClient')
10
- async def test_health_check_success(mock_client_class):
11
- """测试健康检查:成功"""
12
- mock_resp = AsyncMock()
13
- mock_resp.status_code = 200
14
- mock_client_class.return_value.__aenter__.return_value.get.return_value = mock_resp
15
-
16
- proxy_info = {'url': 'http://good:8080', 'healthy': False}
17
- await check_single_proxy(proxy_info)
18
-
19
- assert proxy_info['healthy'] is True
20
- assert proxy_info['failures'] == 0
21
-
22
-
23
- @pytest.mark.asyncio
24
- @patch('httpx.AsyncClient')
25
- async def test_health_check_failure(mock_client_class):
26
- """测试健康检查:失败"""
27
- mock_client_class.return_value.__aenter__.return_value.get.side_effect = httpx.ConnectError("Failed")
28
-
29
- proxy_info = {'url': 'http://bad:8080', 'healthy': True, 'failures': 0}
30
- await check_single_proxy(proxy_info)
31
-
32
- assert proxy_info['healthy'] is False
1
+ # tests/test_proxy_health_check.py
2
+ import pytest
3
+ from unittest.mock import AsyncMock, patch
4
+ from crawlo.proxy.health_check import check_single_proxy
5
+ import httpx
6
+
7
+
8
+ @pytest.mark.asyncio
9
+ @patch('httpx.AsyncClient')
10
+ async def test_health_check_success(mock_client_class):
11
+ """测试健康检查:成功"""
12
+ mock_resp = AsyncMock()
13
+ mock_resp.status_code = 200
14
+ mock_client_class.return_value.__aenter__.return_value.get.return_value = mock_resp
15
+
16
+ proxy_info = {'url': 'http://good:8080', 'healthy': False}
17
+ await check_single_proxy(proxy_info)
18
+
19
+ assert proxy_info['healthy'] is True
20
+ assert proxy_info['failures'] == 0
21
+
22
+
23
+ @pytest.mark.asyncio
24
+ @patch('httpx.AsyncClient')
25
+ async def test_health_check_failure(mock_client_class):
26
+ """测试健康检查:失败"""
27
+ mock_client_class.return_value.__aenter__.return_value.get.side_effect = httpx.ConnectError("Failed")
28
+
29
+ proxy_info = {'url': 'http://bad:8080', 'healthy': True, 'failures': 0}
30
+ await check_single_proxy(proxy_info)
31
+
32
+ assert proxy_info['healthy'] is False
33
33
  assert proxy_info['failures'] == 1
@@ -1,137 +1,137 @@
1
- # tests/test_proxy_middleware_integration.py
2
- import pytest
3
- import asyncio
4
- import time
5
- from unittest.mock import Mock, AsyncMock, patch
6
- from crawlo import Request, Response, Spider
7
- from crawlo.proxy.middleware import ProxyMiddleware
8
- from crawlo.proxy.stats import ProxyStats
9
-
10
-
11
- @pytest.fixture
12
- def crawler():
13
- class MockSettings:
14
- def get(self, key, default=None):
15
- defaults = {
16
- 'PROXY_ENABLED': True,
17
- 'PROXIES': ['http://p1:8080', 'http://p2:8080'],
18
- 'PROXY_SELECTION_STRATEGY': 'random',
19
- 'PROXY_REQUEST_DELAY_ENABLED': False,
20
- 'PROXY_MAX_RETRY_COUNT': 1,
21
- }
22
- return defaults.get(key, default)
23
-
24
- def get_bool(self, key, default=None):
25
- return self.get(key, default)
26
-
27
- def get_int(self, key, default=None):
28
- return self.get(key, default)
29
-
30
- def get_float(self, key, default=None):
31
- return self.get(key, default)
32
-
33
- def get_list(self, key, default=None):
34
- return self.get(key, default)
35
-
36
- class MockCrawler:
37
- def __init__(self):
38
- self.settings = MockSettings()
39
-
40
- return MockCrawler()
41
-
42
-
43
- @pytest.fixture
44
- def middleware(crawler):
45
- mw = ProxyMiddleware.create_instance(crawler)
46
- mw._load_providers = Mock()
47
- mw._update_proxy_pool = AsyncMock()
48
- mw._health_check = AsyncMock()
49
- mw.scheduler = None
50
-
51
- mw.proxies = [
52
- {
53
- 'url': 'http://p1:8080',
54
- 'healthy': True,
55
- 'failures': 0,
56
- 'last_health_check': 0,
57
- 'unhealthy_since': 0
58
- },
59
- {
60
- 'url': 'http://p2:8080',
61
- 'healthy': True,
62
- 'failures': 0,
63
- 'last_health_check': 0,
64
- 'unhealthy_since': 0
65
- },
66
- ]
67
- mw.stats = ProxyStats()
68
- for p in mw.proxies:
69
- mw.stats.record(p['url'], 'total')
70
-
71
- asyncio.get_event_loop().run_until_complete(mw._initial_setup())
72
- return mw
73
-
74
-
75
- @pytest.fixture
76
- def spider():
77
- return Mock(spec=Spider, logger=Mock())
78
-
79
-
80
- def test_process_request_sets_proxy(middleware, spider):
81
- request = Request("https://example.com")
82
- result = asyncio.get_event_loop().run_until_complete(
83
- middleware.process_request(request, spider)
84
- )
85
- assert result is None
86
- assert hasattr(request, 'proxy')
87
- assert request.proxy in ['http://p1:8080', 'http://p2:8080']
88
-
89
-
90
- def test_process_response_records_success(middleware, spider):
91
- request = Request("https://example.com")
92
- request.proxy = 'http://p1:8080'
93
- response = Response("https://example.com", body=b"ok", headers={})
94
- middleware.stats.record(request.proxy, 'total')
95
- middleware.process_response(request, response, spider)
96
- assert middleware.stats.get(request.proxy)['success'] == 1
97
-
98
-
99
- def test_process_exception_switches_proxy(middleware, spider):
100
- request = Request("https://example.com")
101
- request.proxy = 'http://p1:8080'
102
- request.meta['proxy_retry_count'] = 0
103
-
104
- result = middleware.process_exception(request, Exception("Timeout"), spider)
105
- assert result is not None
106
- assert result.proxy != 'http://p1:8080'
107
- assert result.meta['proxy_retry_count'] == 1
108
-
109
- final = middleware.process_exception(result, Exception("Timeout"), spider)
110
- assert final is None
111
-
112
-
113
- def test_mark_failure_disables_proxy(middleware):
114
- proxy_url = 'http://p1:8080'
115
- p = next(p for p in middleware.proxies if p['url'] == proxy_url)
116
- p['failures'] = 2
117
-
118
- middleware._mark_failure(proxy_url)
119
- assert p['failures'] == 3
120
- assert p['healthy'] is False
121
- assert p['unhealthy_since'] > 0
122
-
123
-
124
- @pytest.mark.asyncio
125
- async def test_request_delay(middleware, spider):
126
- """测试请求延迟功能:验证是否调用了 asyncio.sleep"""
127
- with patch("crawlo.proxy.middleware.asyncio.sleep", new_callable=AsyncMock) as mock_sleep:
128
- middleware.delay_enabled = True # 注意:这里应该是 delay_enabled 而不是 request_delay_enabled
129
- middleware.request_delay = 0.1
130
- middleware._last_req_time = time.time() - 0.05 # 50ms 前
131
-
132
- request = Request("https://a.com")
133
- await middleware.process_request(request, spider)
134
-
135
- mock_sleep.assert_called_once()
136
- delay = mock_sleep.call_args[0][0]
1
+ # tests/test_proxy_middleware_integration.py
2
+ import pytest
3
+ import asyncio
4
+ import time
5
+ from unittest.mock import Mock, AsyncMock, patch
6
+ from crawlo import Request, Response, Spider
7
+ from crawlo.proxy.middleware import ProxyMiddleware
8
+ from crawlo.proxy.stats import ProxyStats
9
+
10
+
11
+ @pytest.fixture
12
+ def crawler():
13
+ class MockSettings:
14
+ def get(self, key, default=None):
15
+ defaults = {
16
+ 'PROXY_ENABLED': True,
17
+ 'PROXIES': ['http://p1:8080', 'http://p2:8080'],
18
+ 'PROXY_SELECTION_STRATEGY': 'random',
19
+ 'PROXY_REQUEST_DELAY_ENABLED': False,
20
+ 'PROXY_MAX_RETRY_COUNT': 1,
21
+ }
22
+ return defaults.get(key, default)
23
+
24
+ def get_bool(self, key, default=None):
25
+ return self.get(key, default)
26
+
27
+ def get_int(self, key, default=None):
28
+ return self.get(key, default)
29
+
30
+ def get_float(self, key, default=None):
31
+ return self.get(key, default)
32
+
33
+ def get_list(self, key, default=None):
34
+ return self.get(key, default)
35
+
36
+ class MockCrawler:
37
+ def __init__(self):
38
+ self.settings = MockSettings()
39
+
40
+ return MockCrawler()
41
+
42
+
43
+ @pytest.fixture
44
+ def middleware(crawler):
45
+ mw = ProxyMiddleware.create_instance(crawler)
46
+ mw._load_providers = Mock()
47
+ mw._update_proxy_pool = AsyncMock()
48
+ mw._health_check = AsyncMock()
49
+ mw.scheduler = None
50
+
51
+ mw.proxies = [
52
+ {
53
+ 'url': 'http://p1:8080',
54
+ 'healthy': True,
55
+ 'failures': 0,
56
+ 'last_health_check': 0,
57
+ 'unhealthy_since': 0
58
+ },
59
+ {
60
+ 'url': 'http://p2:8080',
61
+ 'healthy': True,
62
+ 'failures': 0,
63
+ 'last_health_check': 0,
64
+ 'unhealthy_since': 0
65
+ },
66
+ ]
67
+ mw.stats = ProxyStats()
68
+ for p in mw.proxies:
69
+ mw.stats.record(p['url'], 'total')
70
+
71
+ asyncio.get_event_loop().run_until_complete(mw._initial_setup())
72
+ return mw
73
+
74
+
75
+ @pytest.fixture
76
+ def spider():
77
+ return Mock(spec=Spider, logger=Mock())
78
+
79
+
80
+ def test_process_request_sets_proxy(middleware, spider):
81
+ request = Request("https://example.com")
82
+ result = asyncio.get_event_loop().run_until_complete(
83
+ middleware.process_request(request, spider)
84
+ )
85
+ assert result is None
86
+ assert hasattr(request, 'proxy')
87
+ assert request.proxy in ['http://p1:8080', 'http://p2:8080']
88
+
89
+
90
+ def test_process_response_records_success(middleware, spider):
91
+ request = Request("https://example.com")
92
+ request.proxy = 'http://p1:8080'
93
+ response = Response("https://example.com", body=b"ok", headers={})
94
+ middleware.stats.record(request.proxy, 'total')
95
+ middleware.process_response(request, response, spider)
96
+ assert middleware.stats.get(request.proxy)['success'] == 1
97
+
98
+
99
+ def test_process_exception_switches_proxy(middleware, spider):
100
+ request = Request("https://example.com")
101
+ request.proxy = 'http://p1:8080'
102
+ request.meta['proxy_retry_count'] = 0
103
+
104
+ result = middleware.process_exception(request, Exception("Timeout"), spider)
105
+ assert result is not None
106
+ assert result.proxy != 'http://p1:8080'
107
+ assert result.meta['proxy_retry_count'] == 1
108
+
109
+ final = middleware.process_exception(result, Exception("Timeout"), spider)
110
+ assert final is None
111
+
112
+
113
+ def test_mark_failure_disables_proxy(middleware):
114
+ proxy_url = 'http://p1:8080'
115
+ p = next(p for p in middleware.proxies if p['url'] == proxy_url)
116
+ p['failures'] = 2
117
+
118
+ middleware._mark_failure(proxy_url)
119
+ assert p['failures'] == 3
120
+ assert p['healthy'] is False
121
+ assert p['unhealthy_since'] > 0
122
+
123
+
124
+ @pytest.mark.asyncio
125
+ async def test_request_delay(middleware, spider):
126
+ """测试请求延迟功能:验证是否调用了 asyncio.sleep"""
127
+ with patch("crawlo.proxy.middleware.asyncio.sleep", new_callable=AsyncMock) as mock_sleep:
128
+ middleware.delay_enabled = True # 注意:这里应该是 delay_enabled 而不是 request_delay_enabled
129
+ middleware.request_delay = 0.1
130
+ middleware._last_req_time = time.time() - 0.05 # 50ms 前
131
+
132
+ request = Request("https://a.com")
133
+ await middleware.process_request(request, spider)
134
+
135
+ mock_sleep.assert_called_once()
136
+ delay = mock_sleep.call_args[0][0]
137
137
  assert 0.04 <= delay <= 0.06