crawlo 1.2.5__py3-none-any.whl → 1.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (209) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +75 -88
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +138 -144
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -323
  14. crawlo/commands/startproject.py +436 -436
  15. crawlo/commands/stats.py +187 -187
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -251
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +365 -354
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +251 -143
  23. crawlo/crawler.py +1099 -1110
  24. crawlo/data/__init__.py +5 -5
  25. crawlo/data/user_agents.py +107 -107
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +228 -221
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -212
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +39 -38
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +234 -281
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -52
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +136 -135
  54. crawlo/middleware/offsite.py +114 -114
  55. crawlo/middleware/proxy.py +367 -367
  56. crawlo/middleware/request_ignore.py +86 -86
  57. crawlo/middleware/response_code.py +163 -163
  58. crawlo/middleware/response_filter.py +136 -136
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -317
  73. crawlo/pipelines/pipeline_manager.py +62 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +166 -165
  75. crawlo/project.py +314 -279
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +377 -337
  78. crawlo/queue/redis_priority_queue.py +306 -299
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +219 -217
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -129
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/settings.py.tmpl +288 -324
  92. crawlo/templates/project/settings_distributed.py.tmpl +157 -154
  93. crawlo/templates/project/settings_gentle.py.tmpl +101 -128
  94. crawlo/templates/project/settings_high_performance.py.tmpl +135 -150
  95. crawlo/templates/project/settings_simple.py.tmpl +99 -103
  96. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  97. crawlo/templates/run.py.tmpl +45 -47
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -259
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -356
  113. crawlo/utils/env_config.py +143 -106
  114. crawlo/utils/error_handler.py +123 -123
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -344
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -285
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +351 -334
  122. crawlo/utils/redis_key_validator.py +198 -198
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -218
  125. crawlo/utils/spider_loader.py +61 -61
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/METADATA +764 -764
  130. crawlo-1.2.7.dist-info/RECORD +209 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_consistency.py +81 -0
  152. tests/test_config_validator.py +193 -193
  153. tests/test_crawlo_proxy_integration.py +172 -172
  154. tests/test_date_tools.py +123 -123
  155. tests/test_default_header_middleware.py +158 -158
  156. tests/test_double_crawlo_fix.py +207 -207
  157. tests/test_double_crawlo_fix_simple.py +124 -124
  158. tests/test_download_delay_middleware.py +221 -221
  159. tests/test_downloader_proxy_compatibility.py +268 -268
  160. tests/test_dynamic_downloaders_proxy.py +124 -124
  161. tests/test_dynamic_proxy.py +92 -92
  162. tests/test_dynamic_proxy_config.py +146 -146
  163. tests/test_dynamic_proxy_real.py +109 -109
  164. tests/test_edge_cases.py +303 -303
  165. tests/test_enhanced_error_handler.py +270 -270
  166. tests/test_env_config.py +121 -121
  167. tests/test_error_handler_compatibility.py +112 -112
  168. tests/test_final_validation.py +153 -153
  169. tests/test_framework_env_usage.py +103 -103
  170. tests/test_integration.py +356 -356
  171. tests/test_item_dedup_redis_key.py +122 -122
  172. tests/test_mode_consistency.py +52 -0
  173. tests/test_offsite_middleware.py +221 -221
  174. tests/test_parsel.py +29 -29
  175. tests/test_performance.py +327 -327
  176. tests/test_proxy_api.py +264 -264
  177. tests/test_proxy_health_check.py +32 -32
  178. tests/test_proxy_middleware.py +121 -121
  179. tests/test_proxy_middleware_enhanced.py +216 -216
  180. tests/test_proxy_middleware_integration.py +136 -136
  181. tests/test_proxy_providers.py +56 -56
  182. tests/test_proxy_stats.py +19 -19
  183. tests/test_proxy_strategies.py +59 -59
  184. tests/test_queue_manager_double_crawlo.py +173 -173
  185. tests/test_queue_manager_redis_key.py +176 -176
  186. tests/test_real_scenario_proxy.py +195 -195
  187. tests/test_redis_config.py +28 -28
  188. tests/test_redis_connection_pool.py +294 -294
  189. tests/test_redis_key_naming.py +181 -181
  190. tests/test_redis_key_validator.py +123 -123
  191. tests/test_redis_queue.py +224 -224
  192. tests/test_request_ignore_middleware.py +182 -182
  193. tests/test_request_serialization.py +70 -70
  194. tests/test_response_code_middleware.py +349 -349
  195. tests/test_response_filter_middleware.py +427 -427
  196. tests/test_response_improvements.py +152 -152
  197. tests/test_retry_middleware.py +241 -241
  198. tests/test_scheduler.py +252 -241
  199. tests/test_scheduler_config_update.py +134 -0
  200. tests/test_simple_response.py +61 -61
  201. tests/test_telecom_spider_redis_key.py +205 -205
  202. tests/test_template_content.py +87 -87
  203. tests/test_template_redis_key.py +134 -134
  204. tests/test_tools.py +153 -153
  205. tests/tools_example.py +257 -257
  206. crawlo-1.2.5.dist-info/RECORD +0 -206
  207. {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/WHEEL +0 -0
  208. {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/entry_points.txt +0 -0
  209. {crawlo-1.2.5.dist-info → crawlo-1.2.7.dist-info}/top_level.txt +0 -0
@@ -1,269 +1,269 @@
1
- #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
- """
4
- 测试代理中间件与Crawlo框架中三个主要下载器的兼容性
5
- - aiohttp_downloader
6
- - httpx_downloader
7
- - curl_cffi_downloader
8
- """
9
-
10
- import asyncio
11
- import sys
12
- import os
13
-
14
- # 添加项目根目录到Python路径
15
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
-
17
- from crawlo.downloader.aiohttp_downloader import AioHttpDownloader
18
- from crawlo.downloader.httpx_downloader import HttpXDownloader
19
- from crawlo.downloader.cffi_downloader import CurlCffiDownloader
20
- from crawlo.middleware.proxy import ProxyMiddleware
21
- from crawlo.network.request import Request
22
- from crawlo.settings.setting_manager import SettingManager
23
-
24
-
25
- class MockSpider:
26
- """模拟爬虫类"""
27
- def __init__(self, crawler):
28
- self.crawler = crawler
29
-
30
-
31
- class MockCrawler:
32
- """模拟爬虫实例"""
33
- def __init__(self, settings):
34
- self.settings = settings
35
- self.spider = MockSpider(self) # 添加spider属性
36
-
37
-
38
- def create_test_settings(proxy_url=None):
39
- """创建测试设置"""
40
- settings = SettingManager()
41
- settings.set("LOG_LEVEL", "DEBUG")
42
- settings.set("DOWNLOAD_TIMEOUT", 30)
43
- settings.set("CONNECTION_POOL_LIMIT", 100)
44
- settings.set("CONNECTION_POOL_LIMIT_PER_HOST", 20)
45
- settings.set("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024)
46
- settings.set("VERIFY_SSL", True)
47
-
48
- # 代理相关设置
49
- if proxy_url:
50
- settings.set("PROXY_ENABLED", True)
51
- settings.set("PROXY_API_URL", proxy_url)
52
- settings.set("PROXY_REFRESH_INTERVAL", 60)
53
- settings.set("PROXY_POOL_SIZE", 5)
54
- else:
55
- settings.set("PROXY_ENABLED", False)
56
-
57
- return settings
58
-
59
-
60
- async def test_aiohttp_with_proxy(proxy_url, target_url):
61
- """测试aiohttp下载器与代理的适配性"""
62
- print(f"\n=== 测试 aiohttp 下载器与代理 ===")
63
- print(f"代理URL: {proxy_url}")
64
- print(f"目标URL: {target_url}")
65
-
66
- try:
67
- # 创建设置
68
- settings = create_test_settings(proxy_url)
69
- crawler = MockCrawler(settings)
70
-
71
- # 创建下载器
72
- downloader = AioHttpDownloader(crawler)
73
- downloader.open()
74
-
75
- # 创建代理中间件
76
- proxy_middleware = ProxyMiddleware(settings, "DEBUG")
77
-
78
- # 创建请求
79
- request = Request(url=target_url)
80
-
81
- # 创建模拟爬虫
82
- spider = MockSpider(crawler)
83
-
84
- # 通过代理中间件处理请求
85
- await proxy_middleware.process_request(request, spider)
86
-
87
- if request.proxy:
88
- print(f"✓ 代理已成功设置: {request.proxy}")
89
- else:
90
- print("⚠ 代理未设置")
91
-
92
- # 尝试下载
93
- try:
94
- response = await downloader.download(request)
95
- if response and response.status_code:
96
- print(f"✓ 下载成功,状态码: {response.status_code}")
97
- # 只检查状态码,避免编码问题
98
- return True
99
- else:
100
- print("✗ 下载失败,响应为空")
101
- return False
102
- except Exception as e:
103
- print(f"✗ 下载过程中出错: {e}")
104
- return False
105
-
106
- except Exception as e:
107
- print(f"✗ 测试aiohttp时出错: {e}")
108
- return False
109
- finally:
110
- # 清理资源
111
- try:
112
- await downloader.close()
113
- await proxy_middleware.close()
114
- except:
115
- pass
116
-
117
-
118
- async def test_httpx_with_proxy_async(proxy_url, target_url):
119
- """测试httpx下载器与代理的适配性"""
120
- print(f"\n=== 测试 httpx 下载器与代理 ===")
121
- print(f"代理URL: {proxy_url}")
122
- print(f"目标URL: {target_url}")
123
-
124
- try:
125
- # 创建设置
126
- settings = create_test_settings(proxy_url)
127
- crawler = MockCrawler(settings)
128
-
129
- # 创建下载器
130
- downloader = HttpXDownloader(crawler)
131
- downloader.open()
132
-
133
- # 创建代理中间件
134
- proxy_middleware = ProxyMiddleware(settings, "DEBUG")
135
-
136
- # 创建请求
137
- request = Request(url=target_url)
138
-
139
- # 创建模拟爬虫
140
- spider = MockSpider(crawler)
141
-
142
- # 通过代理中间件处理请求
143
- await proxy_middleware.process_request(request, spider)
144
-
145
- if request.proxy:
146
- print(f"✓ 代理已成功设置: {request.proxy}")
147
- else:
148
- print("⚠ 代理未设置")
149
-
150
- # 尝试下载
151
- try:
152
- response = await downloader.download(request)
153
- if response and response.status_code:
154
- print(f"✓ 下载成功,状态码: {response.status_code}")
155
- # 只检查状态码,避免编码问题
156
- return True
157
- else:
158
- print("✗ 下载失败,响应为空")
159
- return False
160
- except Exception as e:
161
- print(f"✗ 下载过程中出错: {e}")
162
- return False
163
-
164
- except Exception as e:
165
- print(f"✗ 测试httpx时出错: {e}")
166
- return False
167
- finally:
168
- # 清理资源
169
- try:
170
- await downloader.close()
171
- await proxy_middleware.close()
172
- except:
173
- pass
174
-
175
-
176
- async def test_curl_cffi_with_proxy_async(proxy_url, target_url):
177
- """测试curl-cffi下载器与代理的适配性"""
178
- print(f"\n=== 测试 curl-cffi 下载器与代理 ===")
179
- print(f"代理URL: {proxy_url}")
180
- print(f"目标URL: {target_url}")
181
-
182
- try:
183
- # 创建设置
184
- settings = create_test_settings(proxy_url)
185
- crawler = MockCrawler(settings)
186
-
187
- # 创建下载器
188
- downloader = CurlCffiDownloader(crawler)
189
- downloader.open()
190
-
191
- # 创建代理中间件
192
- proxy_middleware = ProxyMiddleware(settings, "DEBUG")
193
-
194
- # 创建请求
195
- request = Request(url=target_url)
196
-
197
- # 创建模拟爬虫
198
- spider = MockSpider(crawler)
199
-
200
- # 通过代理中间件处理请求
201
- await proxy_middleware.process_request(request, spider)
202
-
203
- if request.proxy:
204
- print(f"✓ 代理已成功设置: {request.proxy}")
205
- else:
206
- print("⚠ 代理未设置")
207
-
208
- # 尝试下载
209
- try:
210
- response = await downloader.download(request)
211
- if response and response.status_code:
212
- print(f"✓ 下载成功,状态码: {response.status_code}")
213
- # 只检查状态码,避免编码问题
214
- return True
215
- else:
216
- print("✗ 下载失败,响应为空")
217
- return False
218
- except Exception as e:
219
- print(f"✗ 下载过程中出错: {e}")
220
- return False
221
-
222
- except Exception as e:
223
- print(f"✗ 测试curl-cffi时出错: {e}")
224
- return False
225
- finally:
226
- # 清理资源
227
- try:
228
- await downloader.close()
229
- await proxy_middleware.close()
230
- except:
231
- pass
232
-
233
-
234
- async def main():
235
- """主测试函数"""
236
- print("开始测试代理中间件与三个下载器的兼容性...")
237
-
238
- # 使用测试代理URL(这里使用一个公开的测试代理)
239
- # 注意:在实际使用中,您需要替换为有效的代理URL
240
- test_proxy_url = "http://test.proxy.api:8080/proxy/getitem/"
241
- test_target_url = "https://httpbin.org/ip" # 一个返回IP信息的测试站点
242
-
243
- print(f"测试代理API: {test_proxy_url}")
244
- print(f"测试目标URL: {test_target_url}")
245
-
246
- # 测试aiohttp下载器
247
- aiohttp_result = await test_aiohttp_with_proxy(test_proxy_url, test_target_url)
248
-
249
- # 测试httpx下载器
250
- httpx_result = await test_httpx_with_proxy_async(test_proxy_url, test_target_url)
251
-
252
- # 测试curl-cffi下载器
253
- curl_cffi_result = await test_curl_cffi_with_proxy_async(test_proxy_url, test_target_url)
254
-
255
- # 汇总结果
256
- print("\n" + "="*50)
257
- print("测试结果汇总:")
258
- print(f"aiohttp 下载器: {'✓ 通过' if aiohttp_result else '✗ 失败'}")
259
- print(f"httpx 下载器: {'✓ 通过' if httpx_result else '✗ 失败'}")
260
- print(f"curl-cffi 下载器: {'✓ 通过' if curl_cffi_result else '✗ 失败'}")
261
-
262
- overall_result = all([aiohttp_result, httpx_result, curl_cffi_result])
263
- print(f"\n总体结果: {'✓ 所有下载器都适配代理中间件' if overall_result else '✗ 部分下载器不兼容'}")
264
-
265
- return overall_result
266
-
267
-
268
- if __name__ == "__main__":
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 测试代理中间件与Crawlo框架中三个主要下载器的兼容性
5
+ - aiohttp_downloader
6
+ - httpx_downloader
7
+ - curl_cffi_downloader
8
+ """
9
+
10
+ import asyncio
11
+ import sys
12
+ import os
13
+
14
+ # 添加项目根目录到Python路径
15
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
+
17
+ from crawlo.downloader.aiohttp_downloader import AioHttpDownloader
18
+ from crawlo.downloader.httpx_downloader import HttpXDownloader
19
+ from crawlo.downloader.cffi_downloader import CurlCffiDownloader
20
+ from crawlo.middleware.proxy import ProxyMiddleware
21
+ from crawlo.network.request import Request
22
+ from crawlo.settings.setting_manager import SettingManager
23
+
24
+
25
+ class MockSpider:
26
+ """模拟爬虫类"""
27
+ def __init__(self, crawler):
28
+ self.crawler = crawler
29
+
30
+
31
+ class MockCrawler:
32
+ """模拟爬虫实例"""
33
+ def __init__(self, settings):
34
+ self.settings = settings
35
+ self.spider = MockSpider(self) # 添加spider属性
36
+
37
+
38
+ def create_test_settings(proxy_url=None):
39
+ """创建测试设置"""
40
+ settings = SettingManager()
41
+ settings.set("LOG_LEVEL", "DEBUG")
42
+ settings.set("DOWNLOAD_TIMEOUT", 30)
43
+ settings.set("CONNECTION_POOL_LIMIT", 100)
44
+ settings.set("CONNECTION_POOL_LIMIT_PER_HOST", 20)
45
+ settings.set("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024)
46
+ settings.set("VERIFY_SSL", True)
47
+
48
+ # 代理相关设置
49
+ if proxy_url:
50
+ settings.set("PROXY_ENABLED", True)
51
+ settings.set("PROXY_API_URL", proxy_url)
52
+ settings.set("PROXY_REFRESH_INTERVAL", 60)
53
+ settings.set("PROXY_POOL_SIZE", 5)
54
+ else:
55
+ settings.set("PROXY_ENABLED", False)
56
+
57
+ return settings
58
+
59
+
60
+ async def test_aiohttp_with_proxy(proxy_url, target_url):
61
+ """测试aiohttp下载器与代理的适配性"""
62
+ print(f"\n=== 测试 aiohttp 下载器与代理 ===")
63
+ print(f"代理URL: {proxy_url}")
64
+ print(f"目标URL: {target_url}")
65
+
66
+ try:
67
+ # 创建设置
68
+ settings = create_test_settings(proxy_url)
69
+ crawler = MockCrawler(settings)
70
+
71
+ # 创建下载器
72
+ downloader = AioHttpDownloader(crawler)
73
+ downloader.open()
74
+
75
+ # 创建代理中间件
76
+ proxy_middleware = ProxyMiddleware(settings, "DEBUG")
77
+
78
+ # 创建请求
79
+ request = Request(url=target_url)
80
+
81
+ # 创建模拟爬虫
82
+ spider = MockSpider(crawler)
83
+
84
+ # 通过代理中间件处理请求
85
+ await proxy_middleware.process_request(request, spider)
86
+
87
+ if request.proxy:
88
+ print(f"✓ 代理已成功设置: {request.proxy}")
89
+ else:
90
+ print("⚠ 代理未设置")
91
+
92
+ # 尝试下载
93
+ try:
94
+ response = await downloader.download(request)
95
+ if response and response.status_code:
96
+ print(f"✓ 下载成功,状态码: {response.status_code}")
97
+ # 只检查状态码,避免编码问题
98
+ return True
99
+ else:
100
+ print("✗ 下载失败,响应为空")
101
+ return False
102
+ except Exception as e:
103
+ print(f"✗ 下载过程中出错: {e}")
104
+ return False
105
+
106
+ except Exception as e:
107
+ print(f"✗ 测试aiohttp时出错: {e}")
108
+ return False
109
+ finally:
110
+ # 清理资源
111
+ try:
112
+ await downloader.close()
113
+ await proxy_middleware.close()
114
+ except:
115
+ pass
116
+
117
+
118
+ async def test_httpx_with_proxy_async(proxy_url, target_url):
119
+ """测试httpx下载器与代理的适配性"""
120
+ print(f"\n=== 测试 httpx 下载器与代理 ===")
121
+ print(f"代理URL: {proxy_url}")
122
+ print(f"目标URL: {target_url}")
123
+
124
+ try:
125
+ # 创建设置
126
+ settings = create_test_settings(proxy_url)
127
+ crawler = MockCrawler(settings)
128
+
129
+ # 创建下载器
130
+ downloader = HttpXDownloader(crawler)
131
+ downloader.open()
132
+
133
+ # 创建代理中间件
134
+ proxy_middleware = ProxyMiddleware(settings, "DEBUG")
135
+
136
+ # 创建请求
137
+ request = Request(url=target_url)
138
+
139
+ # 创建模拟爬虫
140
+ spider = MockSpider(crawler)
141
+
142
+ # 通过代理中间件处理请求
143
+ await proxy_middleware.process_request(request, spider)
144
+
145
+ if request.proxy:
146
+ print(f"✓ 代理已成功设置: {request.proxy}")
147
+ else:
148
+ print("⚠ 代理未设置")
149
+
150
+ # 尝试下载
151
+ try:
152
+ response = await downloader.download(request)
153
+ if response and response.status_code:
154
+ print(f"✓ 下载成功,状态码: {response.status_code}")
155
+ # 只检查状态码,避免编码问题
156
+ return True
157
+ else:
158
+ print("✗ 下载失败,响应为空")
159
+ return False
160
+ except Exception as e:
161
+ print(f"✗ 下载过程中出错: {e}")
162
+ return False
163
+
164
+ except Exception as e:
165
+ print(f"✗ 测试httpx时出错: {e}")
166
+ return False
167
+ finally:
168
+ # 清理资源
169
+ try:
170
+ await downloader.close()
171
+ await proxy_middleware.close()
172
+ except:
173
+ pass
174
+
175
+
176
+ async def test_curl_cffi_with_proxy_async(proxy_url, target_url):
177
+ """测试curl-cffi下载器与代理的适配性"""
178
+ print(f"\n=== 测试 curl-cffi 下载器与代理 ===")
179
+ print(f"代理URL: {proxy_url}")
180
+ print(f"目标URL: {target_url}")
181
+
182
+ try:
183
+ # 创建设置
184
+ settings = create_test_settings(proxy_url)
185
+ crawler = MockCrawler(settings)
186
+
187
+ # 创建下载器
188
+ downloader = CurlCffiDownloader(crawler)
189
+ downloader.open()
190
+
191
+ # 创建代理中间件
192
+ proxy_middleware = ProxyMiddleware(settings, "DEBUG")
193
+
194
+ # 创建请求
195
+ request = Request(url=target_url)
196
+
197
+ # 创建模拟爬虫
198
+ spider = MockSpider(crawler)
199
+
200
+ # 通过代理中间件处理请求
201
+ await proxy_middleware.process_request(request, spider)
202
+
203
+ if request.proxy:
204
+ print(f"✓ 代理已成功设置: {request.proxy}")
205
+ else:
206
+ print("⚠ 代理未设置")
207
+
208
+ # 尝试下载
209
+ try:
210
+ response = await downloader.download(request)
211
+ if response and response.status_code:
212
+ print(f"✓ 下载成功,状态码: {response.status_code}")
213
+ # 只检查状态码,避免编码问题
214
+ return True
215
+ else:
216
+ print("✗ 下载失败,响应为空")
217
+ return False
218
+ except Exception as e:
219
+ print(f"✗ 下载过程中出错: {e}")
220
+ return False
221
+
222
+ except Exception as e:
223
+ print(f"✗ 测试curl-cffi时出错: {e}")
224
+ return False
225
+ finally:
226
+ # 清理资源
227
+ try:
228
+ await downloader.close()
229
+ await proxy_middleware.close()
230
+ except:
231
+ pass
232
+
233
+
234
+ async def main():
235
+ """主测试函数"""
236
+ print("开始测试代理中间件与三个下载器的兼容性...")
237
+
238
+ # 使用测试代理URL(这里使用一个公开的测试代理)
239
+ # 注意:在实际使用中,您需要替换为有效的代理URL
240
+ test_proxy_url = "http://test.proxy.api:8080/proxy/getitem/"
241
+ test_target_url = "https://httpbin.org/ip" # 一个返回IP信息的测试站点
242
+
243
+ print(f"测试代理API: {test_proxy_url}")
244
+ print(f"测试目标URL: {test_target_url}")
245
+
246
+ # 测试aiohttp下载器
247
+ aiohttp_result = await test_aiohttp_with_proxy(test_proxy_url, test_target_url)
248
+
249
+ # 测试httpx下载器
250
+ httpx_result = await test_httpx_with_proxy_async(test_proxy_url, test_target_url)
251
+
252
+ # 测试curl-cffi下载器
253
+ curl_cffi_result = await test_curl_cffi_with_proxy_async(test_proxy_url, test_target_url)
254
+
255
+ # 汇总结果
256
+ print("\n" + "="*50)
257
+ print("测试结果汇总:")
258
+ print(f"aiohttp 下载器: {'✓ 通过' if aiohttp_result else '✗ 失败'}")
259
+ print(f"httpx 下载器: {'✓ 通过' if httpx_result else '✗ 失败'}")
260
+ print(f"curl-cffi 下载器: {'✓ 通过' if curl_cffi_result else '✗ 失败'}")
261
+
262
+ overall_result = all([aiohttp_result, httpx_result, curl_cffi_result])
263
+ print(f"\n总体结果: {'✓ 所有下载器都适配代理中间件' if overall_result else '✗ 部分下载器不兼容'}")
264
+
265
+ return overall_result
266
+
267
+
268
+ if __name__ == "__main__":
269
269
  asyncio.run(main())