crawlo 1.1.9__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (221) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +65 -65
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +142 -132
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +292 -292
  14. crawlo/commands/startproject.py +418 -418
  15. crawlo/commands/stats.py +188 -188
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +252 -252
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +354 -345
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +143 -136
  23. crawlo/crawler.py +1027 -1027
  24. crawlo/downloader/__init__.py +266 -266
  25. crawlo/downloader/aiohttp_downloader.py +220 -220
  26. crawlo/downloader/cffi_downloader.py +256 -256
  27. crawlo/downloader/httpx_downloader.py +259 -259
  28. crawlo/downloader/hybrid_downloader.py +213 -213
  29. crawlo/downloader/playwright_downloader.py +402 -402
  30. crawlo/downloader/selenium_downloader.py +472 -472
  31. crawlo/event.py +11 -11
  32. crawlo/exceptions.py +81 -81
  33. crawlo/extension/__init__.py +37 -37
  34. crawlo/extension/health_check.py +141 -141
  35. crawlo/extension/log_interval.py +57 -57
  36. crawlo/extension/log_stats.py +81 -81
  37. crawlo/extension/logging_extension.py +43 -43
  38. crawlo/extension/memory_monitor.py +104 -104
  39. crawlo/extension/performance_profiler.py +133 -133
  40. crawlo/extension/request_recorder.py +107 -107
  41. crawlo/filters/__init__.py +154 -154
  42. crawlo/filters/aioredis_filter.py +280 -280
  43. crawlo/filters/memory_filter.py +269 -269
  44. crawlo/items/__init__.py +23 -23
  45. crawlo/items/base.py +21 -21
  46. crawlo/items/fields.py +53 -53
  47. crawlo/items/items.py +104 -104
  48. crawlo/middleware/__init__.py +21 -21
  49. crawlo/middleware/default_header.py +132 -32
  50. crawlo/middleware/download_delay.py +105 -28
  51. crawlo/middleware/middleware_manager.py +135 -135
  52. crawlo/middleware/offsite.py +116 -0
  53. crawlo/middleware/proxy.py +366 -272
  54. crawlo/middleware/request_ignore.py +88 -30
  55. crawlo/middleware/response_code.py +164 -18
  56. crawlo/middleware/response_filter.py +138 -26
  57. crawlo/middleware/retry.py +124 -124
  58. crawlo/mode_manager.py +211 -211
  59. crawlo/network/__init__.py +21 -21
  60. crawlo/network/request.py +338 -338
  61. crawlo/network/response.py +359 -359
  62. crawlo/pipelines/__init__.py +21 -21
  63. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  64. crawlo/pipelines/console_pipeline.py +39 -39
  65. crawlo/pipelines/csv_pipeline.py +316 -316
  66. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  67. crawlo/pipelines/json_pipeline.py +218 -218
  68. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  69. crawlo/pipelines/mongo_pipeline.py +131 -131
  70. crawlo/pipelines/mysql_pipeline.py +316 -316
  71. crawlo/pipelines/pipeline_manager.py +61 -61
  72. crawlo/pipelines/redis_dedup_pipeline.py +167 -167
  73. crawlo/project.py +187 -187
  74. crawlo/queue/pqueue.py +37 -37
  75. crawlo/queue/queue_manager.py +337 -334
  76. crawlo/queue/redis_priority_queue.py +298 -298
  77. crawlo/settings/__init__.py +7 -7
  78. crawlo/settings/default_settings.py +226 -219
  79. crawlo/settings/setting_manager.py +122 -122
  80. crawlo/spider/__init__.py +639 -639
  81. crawlo/stats_collector.py +59 -59
  82. crawlo/subscriber.py +130 -130
  83. crawlo/task_manager.py +30 -30
  84. crawlo/templates/crawlo.cfg.tmpl +10 -10
  85. crawlo/templates/project/__init__.py.tmpl +3 -3
  86. crawlo/templates/project/items.py.tmpl +17 -17
  87. crawlo/templates/project/middlewares.py.tmpl +118 -109
  88. crawlo/templates/project/pipelines.py.tmpl +96 -96
  89. crawlo/templates/project/run.py.tmpl +45 -45
  90. crawlo/templates/project/settings.py.tmpl +327 -326
  91. crawlo/templates/project/settings_distributed.py.tmpl +119 -119
  92. crawlo/templates/project/settings_gentle.py.tmpl +94 -94
  93. crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
  94. crawlo/templates/project/settings_simple.py.tmpl +68 -68
  95. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  96. crawlo/templates/spider/spider.py.tmpl +143 -141
  97. crawlo/tools/__init__.py +182 -182
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/tools/data_validator.py +180 -180
  101. crawlo/tools/date_tools.py +35 -35
  102. crawlo/tools/distributed_coordinator.py +386 -386
  103. crawlo/tools/retry_mechanism.py +220 -220
  104. crawlo/tools/scenario_adapter.py +262 -262
  105. crawlo/utils/__init__.py +35 -35
  106. crawlo/utils/batch_processor.py +260 -260
  107. crawlo/utils/controlled_spider_mixin.py +439 -439
  108. crawlo/utils/date_tools.py +290 -290
  109. crawlo/utils/db_helper.py +343 -343
  110. crawlo/utils/enhanced_error_handler.py +359 -359
  111. crawlo/utils/env_config.py +105 -105
  112. crawlo/utils/error_handler.py +125 -125
  113. crawlo/utils/func_tools.py +82 -82
  114. crawlo/utils/large_scale_config.py +286 -286
  115. crawlo/utils/large_scale_helper.py +343 -343
  116. crawlo/utils/log.py +128 -128
  117. crawlo/utils/performance_monitor.py +284 -284
  118. crawlo/utils/queue_helper.py +175 -175
  119. crawlo/utils/redis_connection_pool.py +334 -334
  120. crawlo/utils/redis_key_validator.py +199 -199
  121. crawlo/utils/request.py +267 -267
  122. crawlo/utils/request_serializer.py +219 -219
  123. crawlo/utils/spider_loader.py +62 -62
  124. crawlo/utils/system.py +11 -11
  125. crawlo/utils/tools.py +4 -4
  126. crawlo/utils/url.py +39 -39
  127. crawlo-1.2.1.dist-info/METADATA +692 -0
  128. crawlo-1.2.1.dist-info/RECORD +220 -0
  129. examples/__init__.py +7 -7
  130. examples/aiohttp_settings.py +42 -0
  131. examples/curl_cffi_settings.py +41 -0
  132. examples/default_header_middleware_example.py +107 -0
  133. examples/default_header_spider_example.py +129 -0
  134. examples/download_delay_middleware_example.py +160 -0
  135. examples/httpx_settings.py +42 -0
  136. examples/multi_downloader_proxy_example.py +81 -0
  137. examples/offsite_middleware_example.py +55 -0
  138. examples/offsite_spider_example.py +107 -0
  139. examples/proxy_spider_example.py +166 -0
  140. examples/request_ignore_middleware_example.py +51 -0
  141. examples/request_ignore_spider_example.py +99 -0
  142. examples/response_code_middleware_example.py +52 -0
  143. examples/response_filter_middleware_example.py +67 -0
  144. examples/tong_hua_shun_settings.py +62 -0
  145. examples/tong_hua_shun_spider.py +170 -0
  146. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  147. tests/__init__.py +7 -7
  148. tests/advanced_tools_example.py +275 -275
  149. tests/authenticated_proxy_example.py +236 -236
  150. tests/cleaners_example.py +160 -160
  151. tests/config_validation_demo.py +102 -102
  152. tests/controlled_spider_example.py +205 -205
  153. tests/date_tools_example.py +180 -180
  154. tests/dynamic_loading_example.py +523 -523
  155. tests/dynamic_loading_test.py +104 -104
  156. tests/env_config_example.py +133 -133
  157. tests/error_handling_example.py +171 -171
  158. tests/redis_key_validation_demo.py +130 -130
  159. tests/response_improvements_example.py +144 -144
  160. tests/test_advanced_tools.py +148 -148
  161. tests/test_all_redis_key_configs.py +145 -145
  162. tests/test_authenticated_proxy.py +141 -141
  163. tests/test_cleaners.py +54 -54
  164. tests/test_comprehensive.py +146 -146
  165. tests/test_config_validator.py +193 -193
  166. tests/test_crawlo_proxy_integration.py +173 -0
  167. tests/test_date_tools.py +123 -123
  168. tests/test_default_header_middleware.py +159 -0
  169. tests/test_double_crawlo_fix.py +207 -207
  170. tests/test_double_crawlo_fix_simple.py +124 -124
  171. tests/test_download_delay_middleware.py +222 -0
  172. tests/test_downloader_proxy_compatibility.py +269 -0
  173. tests/test_dynamic_downloaders_proxy.py +124 -124
  174. tests/test_dynamic_proxy.py +92 -92
  175. tests/test_dynamic_proxy_config.py +146 -146
  176. tests/test_dynamic_proxy_real.py +109 -109
  177. tests/test_edge_cases.py +303 -303
  178. tests/test_enhanced_error_handler.py +270 -270
  179. tests/test_env_config.py +121 -121
  180. tests/test_error_handler_compatibility.py +112 -112
  181. tests/test_final_validation.py +153 -153
  182. tests/test_framework_env_usage.py +103 -103
  183. tests/test_integration.py +356 -356
  184. tests/test_item_dedup_redis_key.py +122 -122
  185. tests/test_offsite_middleware.py +222 -0
  186. tests/test_parsel.py +29 -29
  187. tests/test_performance.py +327 -327
  188. tests/test_proxy_api.py +265 -0
  189. tests/test_proxy_health_check.py +32 -32
  190. tests/test_proxy_middleware.py +122 -0
  191. tests/test_proxy_middleware_enhanced.py +217 -0
  192. tests/test_proxy_middleware_integration.py +136 -136
  193. tests/test_proxy_providers.py +56 -56
  194. tests/test_proxy_stats.py +19 -19
  195. tests/test_proxy_strategies.py +59 -59
  196. tests/test_queue_manager_double_crawlo.py +174 -231
  197. tests/test_queue_manager_redis_key.py +176 -176
  198. tests/test_real_scenario_proxy.py +196 -0
  199. tests/test_redis_config.py +28 -28
  200. tests/test_redis_connection_pool.py +294 -294
  201. tests/test_redis_key_naming.py +181 -181
  202. tests/test_redis_key_validator.py +123 -123
  203. tests/test_redis_queue.py +224 -224
  204. tests/test_request_ignore_middleware.py +183 -0
  205. tests/test_request_serialization.py +70 -70
  206. tests/test_response_code_middleware.py +350 -0
  207. tests/test_response_filter_middleware.py +428 -0
  208. tests/test_response_improvements.py +152 -152
  209. tests/test_retry_middleware.py +242 -0
  210. tests/test_scheduler.py +241 -241
  211. tests/test_simple_response.py +61 -61
  212. tests/test_telecom_spider_redis_key.py +205 -205
  213. tests/test_template_content.py +87 -87
  214. tests/test_template_redis_key.py +134 -134
  215. tests/test_tools.py +153 -153
  216. tests/tools_example.py +257 -257
  217. crawlo-1.1.9.dist-info/METADATA +0 -626
  218. crawlo-1.1.9.dist-info/RECORD +0 -190
  219. {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/WHEEL +0 -0
  220. {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/entry_points.txt +0 -0
  221. {crawlo-1.1.9.dist-info → crawlo-1.2.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,269 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 测试代理中间件与Crawlo框架中三个主要下载器的兼容性
5
+ - aiohttp_downloader
6
+ - httpx_downloader
7
+ - curl_cffi_downloader
8
+ """
9
+
10
+ import asyncio
11
+ import sys
12
+ import os
13
+
14
+ # 添加项目根目录到Python路径
15
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
+
17
+ from crawlo.downloader.aiohttp_downloader import AioHttpDownloader
18
+ from crawlo.downloader.httpx_downloader import HttpXDownloader
19
+ from crawlo.downloader.cffi_downloader import CurlCffiDownloader
20
+ from crawlo.middleware.proxy import ProxyMiddleware
21
+ from crawlo.network.request import Request
22
+ from crawlo.settings.setting_manager import SettingManager
23
+
24
+
25
+ class MockSpider:
26
+ """模拟爬虫类"""
27
+ def __init__(self, crawler):
28
+ self.crawler = crawler
29
+
30
+
31
+ class MockCrawler:
32
+ """模拟爬虫实例"""
33
+ def __init__(self, settings):
34
+ self.settings = settings
35
+ self.spider = MockSpider(self) # 添加spider属性
36
+
37
+
38
+ def create_test_settings(proxy_url=None):
39
+ """创建测试设置"""
40
+ settings = SettingManager()
41
+ settings.set("LOG_LEVEL", "DEBUG")
42
+ settings.set("DOWNLOAD_TIMEOUT", 30)
43
+ settings.set("CONNECTION_POOL_LIMIT", 100)
44
+ settings.set("CONNECTION_POOL_LIMIT_PER_HOST", 20)
45
+ settings.set("DOWNLOAD_MAXSIZE", 10 * 1024 * 1024)
46
+ settings.set("VERIFY_SSL", True)
47
+
48
+ # 代理相关设置
49
+ if proxy_url:
50
+ settings.set("PROXY_ENABLED", True)
51
+ settings.set("PROXY_API_URL", proxy_url)
52
+ settings.set("PROXY_REFRESH_INTERVAL", 60)
53
+ settings.set("PROXY_POOL_SIZE", 5)
54
+ else:
55
+ settings.set("PROXY_ENABLED", False)
56
+
57
+ return settings
58
+
59
+
60
+ async def test_aiohttp_with_proxy(proxy_url, target_url):
61
+ """测试aiohttp下载器与代理的适配性"""
62
+ print(f"\n=== 测试 aiohttp 下载器与代理 ===")
63
+ print(f"代理URL: {proxy_url}")
64
+ print(f"目标URL: {target_url}")
65
+
66
+ try:
67
+ # 创建设置
68
+ settings = create_test_settings(proxy_url)
69
+ crawler = MockCrawler(settings)
70
+
71
+ # 创建下载器
72
+ downloader = AioHttpDownloader(crawler)
73
+ downloader.open()
74
+
75
+ # 创建代理中间件
76
+ proxy_middleware = ProxyMiddleware(settings, "DEBUG")
77
+
78
+ # 创建请求
79
+ request = Request(url=target_url)
80
+
81
+ # 创建模拟爬虫
82
+ spider = MockSpider(crawler)
83
+
84
+ # 通过代理中间件处理请求
85
+ await proxy_middleware.process_request(request, spider)
86
+
87
+ if request.proxy:
88
+ print(f"✓ 代理已成功设置: {request.proxy}")
89
+ else:
90
+ print("⚠ 代理未设置")
91
+
92
+ # 尝试下载
93
+ try:
94
+ response = await downloader.download(request)
95
+ if response and response.status_code:
96
+ print(f"✓ 下载成功,状态码: {response.status_code}")
97
+ # 只检查状态码,避免编码问题
98
+ return True
99
+ else:
100
+ print("✗ 下载失败,响应为空")
101
+ return False
102
+ except Exception as e:
103
+ print(f"✗ 下载过程中出错: {e}")
104
+ return False
105
+
106
+ except Exception as e:
107
+ print(f"✗ 测试aiohttp时出错: {e}")
108
+ return False
109
+ finally:
110
+ # 清理资源
111
+ try:
112
+ await downloader.close()
113
+ await proxy_middleware.close()
114
+ except:
115
+ pass
116
+
117
+
118
+ async def test_httpx_with_proxy_async(proxy_url, target_url):
119
+ """测试httpx下载器与代理的适配性"""
120
+ print(f"\n=== 测试 httpx 下载器与代理 ===")
121
+ print(f"代理URL: {proxy_url}")
122
+ print(f"目标URL: {target_url}")
123
+
124
+ try:
125
+ # 创建设置
126
+ settings = create_test_settings(proxy_url)
127
+ crawler = MockCrawler(settings)
128
+
129
+ # 创建下载器
130
+ downloader = HttpXDownloader(crawler)
131
+ downloader.open()
132
+
133
+ # 创建代理中间件
134
+ proxy_middleware = ProxyMiddleware(settings, "DEBUG")
135
+
136
+ # 创建请求
137
+ request = Request(url=target_url)
138
+
139
+ # 创建模拟爬虫
140
+ spider = MockSpider(crawler)
141
+
142
+ # 通过代理中间件处理请求
143
+ await proxy_middleware.process_request(request, spider)
144
+
145
+ if request.proxy:
146
+ print(f"✓ 代理已成功设置: {request.proxy}")
147
+ else:
148
+ print("⚠ 代理未设置")
149
+
150
+ # 尝试下载
151
+ try:
152
+ response = await downloader.download(request)
153
+ if response and response.status_code:
154
+ print(f"✓ 下载成功,状态码: {response.status_code}")
155
+ # 只检查状态码,避免编码问题
156
+ return True
157
+ else:
158
+ print("✗ 下载失败,响应为空")
159
+ return False
160
+ except Exception as e:
161
+ print(f"✗ 下载过程中出错: {e}")
162
+ return False
163
+
164
+ except Exception as e:
165
+ print(f"✗ 测试httpx时出错: {e}")
166
+ return False
167
+ finally:
168
+ # 清理资源
169
+ try:
170
+ await downloader.close()
171
+ await proxy_middleware.close()
172
+ except:
173
+ pass
174
+
175
+
176
+ async def test_curl_cffi_with_proxy_async(proxy_url, target_url):
177
+ """测试curl-cffi下载器与代理的适配性"""
178
+ print(f"\n=== 测试 curl-cffi 下载器与代理 ===")
179
+ print(f"代理URL: {proxy_url}")
180
+ print(f"目标URL: {target_url}")
181
+
182
+ try:
183
+ # 创建设置
184
+ settings = create_test_settings(proxy_url)
185
+ crawler = MockCrawler(settings)
186
+
187
+ # 创建下载器
188
+ downloader = CurlCffiDownloader(crawler)
189
+ downloader.open()
190
+
191
+ # 创建代理中间件
192
+ proxy_middleware = ProxyMiddleware(settings, "DEBUG")
193
+
194
+ # 创建请求
195
+ request = Request(url=target_url)
196
+
197
+ # 创建模拟爬虫
198
+ spider = MockSpider(crawler)
199
+
200
+ # 通过代理中间件处理请求
201
+ await proxy_middleware.process_request(request, spider)
202
+
203
+ if request.proxy:
204
+ print(f"✓ 代理已成功设置: {request.proxy}")
205
+ else:
206
+ print("⚠ 代理未设置")
207
+
208
+ # 尝试下载
209
+ try:
210
+ response = await downloader.download(request)
211
+ if response and response.status_code:
212
+ print(f"✓ 下载成功,状态码: {response.status_code}")
213
+ # 只检查状态码,避免编码问题
214
+ return True
215
+ else:
216
+ print("✗ 下载失败,响应为空")
217
+ return False
218
+ except Exception as e:
219
+ print(f"✗ 下载过程中出错: {e}")
220
+ return False
221
+
222
+ except Exception as e:
223
+ print(f"✗ 测试curl-cffi时出错: {e}")
224
+ return False
225
+ finally:
226
+ # 清理资源
227
+ try:
228
+ await downloader.close()
229
+ await proxy_middleware.close()
230
+ except:
231
+ pass
232
+
233
+
234
+ async def main():
235
+ """主测试函数"""
236
+ print("开始测试代理中间件与三个下载器的兼容性...")
237
+
238
+ # 使用测试代理URL(这里使用一个公开的测试代理)
239
+ # 注意:在实际使用中,您需要替换为有效的代理URL
240
+ test_proxy_url = "http://test.proxy.api:8080/proxy/getitem/"
241
+ test_target_url = "https://httpbin.org/ip" # 一个返回IP信息的测试站点
242
+
243
+ print(f"测试代理API: {test_proxy_url}")
244
+ print(f"测试目标URL: {test_target_url}")
245
+
246
+ # 测试aiohttp下载器
247
+ aiohttp_result = await test_aiohttp_with_proxy(test_proxy_url, test_target_url)
248
+
249
+ # 测试httpx下载器
250
+ httpx_result = await test_httpx_with_proxy_async(test_proxy_url, test_target_url)
251
+
252
+ # 测试curl-cffi下载器
253
+ curl_cffi_result = await test_curl_cffi_with_proxy_async(test_proxy_url, test_target_url)
254
+
255
+ # 汇总结果
256
+ print("\n" + "="*50)
257
+ print("测试结果汇总:")
258
+ print(f"aiohttp 下载器: {'✓ 通过' if aiohttp_result else '✗ 失败'}")
259
+ print(f"httpx 下载器: {'✓ 通过' if httpx_result else '✗ 失败'}")
260
+ print(f"curl-cffi 下载器: {'✓ 通过' if curl_cffi_result else '✗ 失败'}")
261
+
262
+ overall_result = all([aiohttp_result, httpx_result, curl_cffi_result])
263
+ print(f"\n总体结果: {'✓ 所有下载器都适配代理中间件' if overall_result else '✗ 部分下载器不兼容'}")
264
+
265
+ return overall_result
266
+
267
+
268
+ if __name__ == "__main__":
269
+ asyncio.run(main())
@@ -1,125 +1,125 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 测试动态下载器(Selenium和Playwright)的代理功能
5
- """
6
-
7
- import asyncio
8
- from crawlo.tools import AuthenticatedProxy
9
-
10
-
11
- def test_selenium_proxy_configuration():
12
- """测试Selenium下载器的代理配置"""
13
- print("=== 测试Selenium下载器的代理配置 ===")
14
-
15
- # 代理配置
16
- proxy_config = {
17
- "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
18
- "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
19
- }
20
-
21
- # 创建代理对象
22
- proxy_url = proxy_config["http"]
23
- proxy = AuthenticatedProxy(proxy_url)
24
-
25
- print(f"原始代理URL: {proxy_url}")
26
- print(f"清洁URL: {proxy.clean_url}")
27
- print(f"认证信息: {proxy.get_auth_credentials()}")
28
-
29
- # Selenium的代理设置方式
30
- print(f"\nSelenium代理设置方式:")
31
- print(f" 1. 在爬虫设置中配置:")
32
- print(f" settings = {{")
33
- print(f" 'SELENIUM_PROXY': '{proxy.clean_url}',")
34
- print(f" }}")
35
-
36
- # 对于带认证的代理,需要特殊处理
37
- if proxy.username and proxy.password:
38
- print(f"\n 2. 带认证代理的处理:")
39
- print(f" - 用户名: {proxy.username}")
40
- print(f" - 密码: {proxy.password}")
41
- print(f" - 认证头: {proxy.get_auth_header()}")
42
- print(f" - 注意: Selenium需要通过扩展或其他方式处理认证")
43
-
44
- print("\nSelenium测试完成!")
45
-
46
-
47
- async def test_playwright_proxy_configuration():
48
- """测试Playwright下载器的代理配置"""
49
- print("\n=== 测试Playwright下载器的代理配置 ===")
50
-
51
- # 代理配置
52
- proxy_config = {
53
- "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
54
- "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
55
- }
56
-
57
- # 创建代理对象
58
- proxy_url = proxy_config["http"]
59
- proxy = AuthenticatedProxy(proxy_url)
60
-
61
- print(f"原始代理URL: {proxy_url}")
62
- print(f"清洁URL: {proxy.clean_url}")
63
- print(f"认证信息: {proxy.get_auth_credentials()}")
64
-
65
- # Playwright的代理设置方式
66
- print(f"\nPlaywright代理设置方式:")
67
- print(f" 1. 简单代理配置:")
68
- print(f" settings = {{")
69
- print(f" 'PLAYWRIGHT_PROXY': '{proxy.clean_url}',")
70
- print(f" }}")
71
-
72
- # 对于带认证的代理,Playwright可以直接在代理配置中包含认证信息
73
- if proxy.username and proxy.password:
74
- print(f"\n 2. 带认证的代理配置:")
75
- print(f" settings = {{")
76
- print(f" 'PLAYWRIGHT_PROXY': {{")
77
- print(f" 'server': '{proxy.clean_url}',")
78
- print(f" 'username': '{proxy.username}',")
79
- print(f" 'password': '{proxy.password}'")
80
- print(f" }}")
81
- print(f" }}")
82
-
83
- print("\nPlaywright测试完成!")
84
-
85
-
86
- def show_proxy_usage_examples():
87
- """显示代理使用示例"""
88
- print("\n=== 代理使用示例 ===")
89
-
90
- # 代理配置示例
91
- proxy_examples = [
92
- "http://username:password@proxy.example.com:8080", # 带认证HTTP代理
93
- "https://user:pass@secure-proxy.example.com:443", # 带认证HTTPS代理
94
- "http://proxy.example.com:8080", # 不带认证代理
95
- "socks5://username:password@socks-proxy.example.com:1080" # SOCKS5代理
96
- ]
97
-
98
- for i, proxy_url in enumerate(proxy_examples, 1):
99
- print(f"\n示例 {i}: {proxy_url}")
100
- try:
101
- proxy = AuthenticatedProxy(proxy_url)
102
- print(f" 清洁URL: {proxy.clean_url}")
103
- print(f" 用户名: {proxy.username or '无'}")
104
- print(f" 密码: {proxy.password or '无'}")
105
- print(f" 是否有效: {proxy.is_valid()}")
106
- if proxy.username and proxy.password:
107
- print(f" 认证头: {proxy.get_auth_header()}")
108
- except Exception as e:
109
- print(f" 错误: {e}")
110
-
111
-
112
- async def main():
113
- """主测试函数"""
114
- print("开始测试动态下载器的代理功能...\n")
115
-
116
- # 测试各个下载器
117
- test_selenium_proxy_configuration()
118
- await test_playwright_proxy_configuration()
119
- show_proxy_usage_examples()
120
-
121
- print("\n所有测试完成!")
122
-
123
-
124
- if __name__ == "__main__":
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 测试动态下载器(Selenium和Playwright)的代理功能
5
+ """
6
+
7
+ import asyncio
8
+ from crawlo.tools import AuthenticatedProxy
9
+
10
+
11
+ def test_selenium_proxy_configuration():
12
+ """测试Selenium下载器的代理配置"""
13
+ print("=== 测试Selenium下载器的代理配置 ===")
14
+
15
+ # 代理配置
16
+ proxy_config = {
17
+ "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
18
+ "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
19
+ }
20
+
21
+ # 创建代理对象
22
+ proxy_url = proxy_config["http"]
23
+ proxy = AuthenticatedProxy(proxy_url)
24
+
25
+ print(f"原始代理URL: {proxy_url}")
26
+ print(f"清洁URL: {proxy.clean_url}")
27
+ print(f"认证信息: {proxy.get_auth_credentials()}")
28
+
29
+ # Selenium的代理设置方式
30
+ print(f"\nSelenium代理设置方式:")
31
+ print(f" 1. 在爬虫设置中配置:")
32
+ print(f" settings = {{")
33
+ print(f" 'SELENIUM_PROXY': '{proxy.clean_url}',")
34
+ print(f" }}")
35
+
36
+ # 对于带认证的代理,需要特殊处理
37
+ if proxy.username and proxy.password:
38
+ print(f"\n 2. 带认证代理的处理:")
39
+ print(f" - 用户名: {proxy.username}")
40
+ print(f" - 密码: {proxy.password}")
41
+ print(f" - 认证头: {proxy.get_auth_header()}")
42
+ print(f" - 注意: Selenium需要通过扩展或其他方式处理认证")
43
+
44
+ print("\nSelenium测试完成!")
45
+
46
+
47
+ async def test_playwright_proxy_configuration():
48
+ """测试Playwright下载器的代理配置"""
49
+ print("\n=== 测试Playwright下载器的代理配置 ===")
50
+
51
+ # 代理配置
52
+ proxy_config = {
53
+ "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
54
+ "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
55
+ }
56
+
57
+ # 创建代理对象
58
+ proxy_url = proxy_config["http"]
59
+ proxy = AuthenticatedProxy(proxy_url)
60
+
61
+ print(f"原始代理URL: {proxy_url}")
62
+ print(f"清洁URL: {proxy.clean_url}")
63
+ print(f"认证信息: {proxy.get_auth_credentials()}")
64
+
65
+ # Playwright的代理设置方式
66
+ print(f"\nPlaywright代理设置方式:")
67
+ print(f" 1. 简单代理配置:")
68
+ print(f" settings = {{")
69
+ print(f" 'PLAYWRIGHT_PROXY': '{proxy.clean_url}',")
70
+ print(f" }}")
71
+
72
+ # 对于带认证的代理,Playwright可以直接在代理配置中包含认证信息
73
+ if proxy.username and proxy.password:
74
+ print(f"\n 2. 带认证的代理配置:")
75
+ print(f" settings = {{")
76
+ print(f" 'PLAYWRIGHT_PROXY': {{")
77
+ print(f" 'server': '{proxy.clean_url}',")
78
+ print(f" 'username': '{proxy.username}',")
79
+ print(f" 'password': '{proxy.password}'")
80
+ print(f" }}")
81
+ print(f" }}")
82
+
83
+ print("\nPlaywright测试完成!")
84
+
85
+
86
+ def show_proxy_usage_examples():
87
+ """显示代理使用示例"""
88
+ print("\n=== 代理使用示例 ===")
89
+
90
+ # 代理配置示例
91
+ proxy_examples = [
92
+ "http://username:password@proxy.example.com:8080", # 带认证HTTP代理
93
+ "https://user:pass@secure-proxy.example.com:443", # 带认证HTTPS代理
94
+ "http://proxy.example.com:8080", # 不带认证代理
95
+ "socks5://username:password@socks-proxy.example.com:1080" # SOCKS5代理
96
+ ]
97
+
98
+ for i, proxy_url in enumerate(proxy_examples, 1):
99
+ print(f"\n示例 {i}: {proxy_url}")
100
+ try:
101
+ proxy = AuthenticatedProxy(proxy_url)
102
+ print(f" 清洁URL: {proxy.clean_url}")
103
+ print(f" 用户名: {proxy.username or '无'}")
104
+ print(f" 密码: {proxy.password or '无'}")
105
+ print(f" 是否有效: {proxy.is_valid()}")
106
+ if proxy.username and proxy.password:
107
+ print(f" 认证头: {proxy.get_auth_header()}")
108
+ except Exception as e:
109
+ print(f" 错误: {e}")
110
+
111
+
112
+ async def main():
113
+ """主测试函数"""
114
+ print("开始测试动态下载器的代理功能...\n")
115
+
116
+ # 测试各个下载器
117
+ test_selenium_proxy_configuration()
118
+ await test_playwright_proxy_configuration()
119
+ show_proxy_usage_examples()
120
+
121
+ print("\n所有测试完成!")
122
+
123
+
124
+ if __name__ == "__main__":
125
125
  asyncio.run(main())