crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (186) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +285 -285
  13. crawlo/commands/startproject.py +300 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +309 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +280 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +206 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +56 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +166 -162
  71. crawlo/project.py +153 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +320 -307
  74. crawlo/queue/redis_priority_queue.py +277 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +216 -278
  77. crawlo/settings/setting_manager.py +99 -99
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
  126. crawlo-1.1.5.dist-info/RECORD +185 -0
  127. examples/__init__.py +7 -7
  128. tests/__init__.py +7 -7
  129. tests/advanced_tools_example.py +276 -0
  130. tests/authenticated_proxy_example.py +237 -0
  131. tests/cleaners_example.py +161 -0
  132. tests/config_validation_demo.py +103 -0
  133. {examples → tests}/controlled_spider_example.py +205 -205
  134. tests/date_tools_example.py +181 -0
  135. tests/dynamic_loading_example.py +524 -0
  136. tests/dynamic_loading_test.py +105 -0
  137. tests/env_config_example.py +134 -0
  138. tests/error_handling_example.py +172 -0
  139. tests/redis_key_validation_demo.py +131 -0
  140. tests/response_improvements_example.py +145 -0
  141. tests/test_advanced_tools.py +149 -0
  142. tests/test_all_redis_key_configs.py +146 -0
  143. tests/test_authenticated_proxy.py +142 -0
  144. tests/test_cleaners.py +55 -0
  145. tests/test_comprehensive.py +147 -0
  146. tests/test_config_validator.py +194 -0
  147. tests/test_date_tools.py +124 -0
  148. tests/test_dynamic_downloaders_proxy.py +125 -0
  149. tests/test_dynamic_proxy.py +93 -0
  150. tests/test_dynamic_proxy_config.py +147 -0
  151. tests/test_dynamic_proxy_real.py +110 -0
  152. tests/test_edge_cases.py +304 -0
  153. tests/test_enhanced_error_handler.py +271 -0
  154. tests/test_env_config.py +122 -0
  155. tests/test_error_handler_compatibility.py +113 -0
  156. tests/test_final_validation.py +153 -153
  157. tests/test_framework_env_usage.py +104 -0
  158. tests/test_integration.py +357 -0
  159. tests/test_item_dedup_redis_key.py +123 -0
  160. tests/test_parsel.py +30 -0
  161. tests/test_performance.py +328 -0
  162. tests/test_proxy_health_check.py +32 -32
  163. tests/test_proxy_middleware_integration.py +136 -136
  164. tests/test_proxy_providers.py +56 -56
  165. tests/test_proxy_stats.py +19 -19
  166. tests/test_proxy_strategies.py +59 -59
  167. tests/test_queue_manager_redis_key.py +177 -0
  168. tests/test_redis_config.py +28 -28
  169. tests/test_redis_connection_pool.py +295 -0
  170. tests/test_redis_key_naming.py +182 -0
  171. tests/test_redis_key_validator.py +124 -0
  172. tests/test_redis_queue.py +224 -224
  173. tests/test_request_serialization.py +70 -70
  174. tests/test_response_improvements.py +153 -0
  175. tests/test_scheduler.py +241 -241
  176. tests/test_simple_response.py +62 -0
  177. tests/test_telecom_spider_redis_key.py +206 -0
  178. tests/test_template_content.py +88 -0
  179. tests/test_template_redis_key.py +135 -0
  180. tests/test_tools.py +154 -0
  181. tests/tools_example.py +258 -0
  182. crawlo/core/enhanced_engine.py +0 -190
  183. crawlo-1.1.4.dist-info/RECORD +0 -117
  184. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  185. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  186. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,149 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 高级工具测试
5
+ """
6
+ import unittest
7
+ from crawlo.tools import (
8
+ # 数据处理工具
9
+ clean_text,
10
+ format_currency,
11
+ validate_email,
12
+ validate_url,
13
+ check_data_integrity,
14
+
15
+ # 重试机制
16
+ RetryMechanism,
17
+ should_retry,
18
+ exponential_backoff,
19
+
20
+ # 反爬虫应对工具
21
+ AntiCrawler,
22
+ rotate_proxy,
23
+ handle_captcha,
24
+ detect_rate_limiting,
25
+
26
+ # 分布式协调工具
27
+ generate_pagination_tasks,
28
+ distribute_tasks,
29
+ DistributedCoordinator,
30
+ TaskDistributor,
31
+ DeduplicationTool
32
+ )
33
+
34
+
35
+ class TestAdvancedTools(unittest.TestCase):
36
+ """高级工具测试类"""
37
+
38
+ def test_data_processing_tools(self):
39
+ """测试数据处理工具"""
40
+ # 测试数据清洗
41
+ dirty_text = "<p>这是一个&nbsp;<b>测试</b>&amp;文本</p>"
42
+ clean_result = clean_text(dirty_text)
43
+ self.assertEqual(clean_result, "这是一个 测试&文本")
44
+
45
+ # 测试数据格式化
46
+ price = 1234.567
47
+ formatted_price = format_currency(price, "¥", 2)
48
+ self.assertEqual(formatted_price, "¥1,234.57")
49
+
50
+ # 测试字段验证
51
+ self.assertTrue(validate_email("test@example.com"))
52
+ self.assertFalse(validate_email("invalid-email"))
53
+
54
+ self.assertTrue(validate_url("https://example.com"))
55
+ self.assertFalse(validate_url("invalid-url"))
56
+
57
+ # 测试数据完整性检查
58
+ data = {
59
+ "name": "张三",
60
+ "email": "test@example.com",
61
+ "phone": "13812345678"
62
+ }
63
+ required_fields = ["name", "email", "phone"]
64
+ integrity_result = check_data_integrity(data, required_fields)
65
+ self.assertTrue(integrity_result["is_valid"])
66
+
67
+ def test_retry_mechanism(self):
68
+ """测试重试机制"""
69
+ # 测试指数退避
70
+ delay = exponential_backoff(0)
71
+ self.assertGreater(delay, 0)
72
+
73
+ # 测试是否应该重试
74
+ self.assertTrue(should_retry(status_code=500))
75
+ self.assertTrue(should_retry(exception=ConnectionError()))
76
+ self.assertFalse(should_retry(status_code=200))
77
+
78
+ def test_anti_crawler_tools(self):
79
+ """测试反爬虫应对工具"""
80
+ # 测试反爬虫工具
81
+ anti_crawler = AntiCrawler()
82
+
83
+ # 测试随机User-Agent
84
+ user_agent = anti_crawler.get_random_user_agent()
85
+ self.assertIsInstance(user_agent, str)
86
+ self.assertGreater(len(user_agent), 0)
87
+
88
+ # 测试代理轮换
89
+ proxy = anti_crawler.rotate_proxy()
90
+ self.assertIsInstance(proxy, dict)
91
+
92
+ # 测试验证码检测
93
+ self.assertTrue(anti_crawler.handle_captcha("请输入验证码进行验证"))
94
+ self.assertFalse(anti_crawler.handle_captcha("正常页面内容"))
95
+
96
+ # 测试频率限制检测
97
+ self.assertTrue(anti_crawler.detect_rate_limiting(429, {}))
98
+ self.assertFalse(anti_crawler.detect_rate_limiting(200, {}))
99
+
100
+ def test_distributed_coordinator_tools(self):
101
+ """测试分布式协调工具"""
102
+ # 测试任务分发器
103
+ distributor = TaskDistributor()
104
+
105
+ # 测试分页任务生成
106
+ base_url = "https://example.com/products"
107
+ pagination_tasks = distributor.generate_pagination_tasks(base_url, 1, 5)
108
+ self.assertEqual(len(pagination_tasks), 5)
109
+
110
+ # 测试任务分发
111
+ tasks = list(range(1, 21)) # 20个任务
112
+ distributed = distributor.distribute_tasks(tasks, 4) # 分发给4个工作节点
113
+ self.assertEqual(len(distributed), 4)
114
+ self.assertEqual(sum(len(worker_tasks) for worker_tasks in distributed), 20)
115
+
116
+ # 测试去重工具
117
+ dedup_tool = DeduplicationTool()
118
+
119
+ # 测试数据指纹生成
120
+ fingerprint1 = dedup_tool.generate_fingerprint({"name": "test", "value": 123})
121
+ fingerprint2 = dedup_tool.generate_fingerprint({"name": "test", "value": 123})
122
+ self.assertEqual(fingerprint1, fingerprint2)
123
+
124
+ # 测试去重功能
125
+ self.assertFalse(dedup_tool.is_duplicate({"name": "test", "value": 123}))
126
+ self.assertTrue(dedup_tool.add_to_dedup({"name": "test", "value": 123}))
127
+ self.assertTrue(dedup_tool.is_duplicate({"name": "test", "value": 123}))
128
+ self.assertFalse(dedup_tool.add_to_dedup({"name": "test", "value": 123}))
129
+
130
+ # 测试分布式协调器
131
+ coordinator = DistributedCoordinator()
132
+
133
+ # 测试任务ID生成
134
+ task_id = coordinator.generate_task_id("https://example.com", "test_spider")
135
+ self.assertIsInstance(task_id, str)
136
+ self.assertEqual(len(task_id), 32) # MD5 hash长度
137
+
138
+ # 测试分页任务生成
139
+ pagination_tasks = coordinator.generate_pagination_tasks("https://example.com/products", 1, 5)
140
+ self.assertEqual(len(pagination_tasks), 5)
141
+
142
+ # 测试任务分发
143
+ tasks = list(range(1, 21)) # 20个任务
144
+ distributed = coordinator.distribute_tasks(tasks, 4) # 分发给4个工作节点
145
+ self.assertEqual(len(distributed), 4)
146
+
147
+
148
+ if __name__ == '__main__':
149
+ unittest.main()
@@ -0,0 +1,146 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 所有Redis Key配置测试脚本
5
+ 用于验证所有配置文件是否符合新的Redis key命名规范
6
+ """
7
+ import sys
8
+ import os
9
+ import re
10
+
11
+ # 添加项目根目录到路径
12
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
13
+
14
+
15
+ def test_all_redis_key_configs():
16
+ """测试所有Redis key配置"""
17
+ print("🔍 测试所有Redis key配置...")
18
+
19
+ try:
20
+ # 检查示例项目配置文件
21
+ example_projects = [
22
+ "examples/books_distributed/books_distributed/settings.py",
23
+ "examples/api_data_collection/api_data_collection/settings.py",
24
+ "examples/telecom_licenses_distributed/telecom_licenses_distributed/settings.py"
25
+ ]
26
+
27
+ for project_config in example_projects:
28
+ print(f" 检查 {project_config}...")
29
+ if not os.path.exists(project_config):
30
+ print(f"❌ 配置文件不存在: {project_config}")
31
+ return False
32
+
33
+ with open(project_config, 'r', encoding='utf-8') as f:
34
+ content = f.read()
35
+
36
+ # 检查是否移除了旧的REDIS_KEY配置
37
+ if re.search(r'REDIS_KEY\s*=', content) and 'crawlo:{PROJECT_NAME}:filter:fingerprint' not in content:
38
+ print(f"❌ {project_config}中仍然存在旧的REDIS_KEY配置")
39
+ return False
40
+
41
+ # 检查是否添加了新的注释
42
+ if 'crawlo:{PROJECT_NAME}:filter:fingerprint' not in content:
43
+ print(f"❌ {project_config}中缺少新的Redis key命名规范注释")
44
+ return False
45
+
46
+ print(f" ✅ {project_config}符合新的Redis key命名规范")
47
+
48
+ # 检查模板文件
49
+ template_file = "crawlo/templates/project/settings.py.tmpl"
50
+ print(f" 检查 {template_file}...")
51
+ if not os.path.exists(template_file):
52
+ print(f"❌ 模板文件不存在: {template_file}")
53
+ return False
54
+
55
+ with open(template_file, 'r', encoding='utf-8') as f:
56
+ template_content = f.read()
57
+
58
+ # 检查是否移除了旧的REDIS_KEY配置
59
+ if "REDIS_KEY = f'{{project_name}}:fingerprint'" in template_content:
60
+ print("❌ 模板文件中仍然存在旧的REDIS_KEY配置")
61
+ return False
62
+
63
+ # 检查是否添加了新的注释
64
+ if '# crawlo:{project_name}:filter:fingerprint (请求去重)' not in template_content:
65
+ print("❌ 模板文件中缺少请求去重的Redis key命名规范注释")
66
+ return False
67
+
68
+ if '# crawlo:{project_name}:item:fingerprint (数据项去重)' not in template_content:
69
+ print("❌ 模板文件中缺少数据项去重的Redis key命名规范注释")
70
+ return False
71
+
72
+ print(f" ✅ {template_file}符合新的Redis key命名规范")
73
+
74
+ # 检查mode_manager.py
75
+ mode_manager_file = "crawlo/mode_manager.py"
76
+ print(f" 检查 {mode_manager_file}...")
77
+ if not os.path.exists(mode_manager_file):
78
+ print(f"❌ 文件不存在: {mode_manager_file}")
79
+ return False
80
+
81
+ with open(mode_manager_file, 'r', encoding='utf-8') as f:
82
+ mode_manager_content = f.read()
83
+
84
+ # 检查是否移除了旧的REDIS_KEY配置
85
+ if "'REDIS_KEY': f'{project_name}:fingerprint'" in mode_manager_content:
86
+ print("❌ mode_manager.py中仍然存在旧的REDIS_KEY配置")
87
+ return False
88
+
89
+ # 检查是否添加了新的注释
90
+ if 'crawlo:{project_name}:filter:fingerprint (请求去重)' not in mode_manager_content:
91
+ print("❌ mode_manager.py中缺少新的Redis key命名规范注释")
92
+ return False
93
+
94
+ print(f" ✅ {mode_manager_file}符合新的Redis key命名规范")
95
+
96
+ # 检查默认设置文件
97
+ default_settings_file = "crawlo/settings/default_settings.py"
98
+ print(f" 检查 {default_settings_file}...")
99
+ if not os.path.exists(default_settings_file):
100
+ print(f"❌ 文件不存在: {default_settings_file}")
101
+ return False
102
+
103
+ with open(default_settings_file, 'r', encoding='utf-8') as f:
104
+ default_settings_content = f.read()
105
+
106
+ # 检查是否移除了旧的REDIS_KEY配置
107
+ if re.search(r'REDIS_KEY\s*=\s*.*fingerprint', default_settings_content):
108
+ print("❌ 默认设置文件中仍然存在旧的REDIS_KEY配置")
109
+ return False
110
+
111
+ print(f" ✅ {default_settings_file}符合新的Redis key命名规范")
112
+
113
+ print("✅ 所有Redis key配置测试通过!")
114
+ return True
115
+
116
+ except Exception as e:
117
+ print(f"❌ 测试过程中发生错误: {e}")
118
+ return False
119
+
120
+
121
+ def main():
122
+ """主测试函数"""
123
+ print("🚀 开始所有Redis key配置测试...")
124
+ print("=" * 50)
125
+
126
+ try:
127
+ success = test_all_redis_key_configs()
128
+
129
+ print("=" * 50)
130
+ if success:
131
+ print("🎉 所有测试通过!所有配置文件符合新的Redis key命名规范")
132
+ else:
133
+ print("❌ 测试失败,请检查配置文件")
134
+ return 1
135
+
136
+ except Exception as e:
137
+ print("=" * 50)
138
+ print(f"❌ 测试过程中发生异常: {e}")
139
+ return 1
140
+
141
+ return 0
142
+
143
+
144
+ if __name__ == "__main__":
145
+ exit_code = main()
146
+ sys.exit(exit_code)
@@ -0,0 +1,142 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 测试带认证代理的功能
5
+ """
6
+
7
+ import asyncio
8
+ import aiohttp
9
+ import httpx
10
+ from crawlo.network.request import Request
11
+ from crawlo.tools import AuthenticatedProxy
12
+
13
+
14
+ async def test_proxy_with_aiohttp():
15
+ """测试AioHttp与认证代理"""
16
+ print("=== 测试AioHttp与认证代理 ===")
17
+
18
+ # 代理配置
19
+ proxy_config = {
20
+ "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
21
+ "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
22
+ }
23
+
24
+ # 创建代理对象
25
+ proxy_url = proxy_config["http"]
26
+ proxy = AuthenticatedProxy(proxy_url)
27
+
28
+ print(f"原始代理URL: {proxy_url}")
29
+ print(f"清洁URL: {proxy.clean_url}")
30
+ print(f"认证信息: {proxy.get_auth_credentials()}")
31
+
32
+ # 使用aiohttp直接测试
33
+ try:
34
+ auth = proxy.get_auth_credentials()
35
+ if auth:
36
+ basic_auth = aiohttp.BasicAuth(auth['username'], auth['password'])
37
+ else:
38
+ basic_auth = None
39
+
40
+ async with aiohttp.ClientSession() as session:
41
+ async with session.get(
42
+ "https://httpbin.org/ip",
43
+ proxy=proxy.clean_url,
44
+ proxy_auth=basic_auth
45
+ ) as response:
46
+ print(f"AioHttp测试成功!")
47
+ print(f"状态码: {response.status}")
48
+ content = await response.text()
49
+ print(f"响应内容: {content[:200]}...")
50
+
51
+ except Exception as e:
52
+ print(f"AioHttp测试失败: {e}")
53
+ import traceback
54
+ traceback.print_exc()
55
+
56
+
57
+ def test_proxy_with_httpx():
58
+ """测试HttpX与认证代理"""
59
+ print("\n=== 测试HttpX与认证代理 ===")
60
+
61
+ # 代理配置
62
+ proxy_config = {
63
+ "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
64
+ "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
65
+ }
66
+
67
+ # 使用httpx直接测试
68
+ try:
69
+ # HttpX可以直接使用带认证的URL作为proxy参数
70
+ proxy_url = proxy_config["http"]
71
+
72
+ with httpx.Client(proxy=proxy_url) as client:
73
+ response = client.get("https://httpbin.org/ip")
74
+ print(f"HttpX测试成功!")
75
+ print(f"状态码: {response.status_code}")
76
+ print(f"响应内容: {response.text[:200]}...")
77
+
78
+ except Exception as e:
79
+ print(f"HttpX测试失败: {e}")
80
+ import traceback
81
+ traceback.print_exc()
82
+
83
+
84
+ async def test_proxy_with_curl_cffi():
85
+ """测试CurlCffi与认证代理"""
86
+ print("\n=== 测试CurlCffi与认证代理 ===")
87
+
88
+ # 代理配置
89
+ proxy_config = {
90
+ "http": "http://dwe20241014:Dwe0101014@182.201.243.186:58111",
91
+ "https": "http://dwe20241014:Dwe0101014@182.201.243.186:58111"
92
+ }
93
+
94
+ # 创建代理对象
95
+ proxy_url = proxy_config["http"]
96
+ proxy = AuthenticatedProxy(proxy_url)
97
+
98
+ print(f"原始代理URL: {proxy_url}")
99
+ print(f"代理字典: {proxy.proxy_dict}")
100
+ print(f"认证头: {proxy.get_auth_header()}")
101
+
102
+ # 使用curl-cffi直接测试
103
+ try:
104
+ from curl_cffi import requests as curl_requests
105
+
106
+ # 设置代理和认证头
107
+ proxies = proxy.proxy_dict
108
+ headers = {}
109
+ auth_header = proxy.get_auth_header()
110
+ if auth_header:
111
+ headers["Proxy-Authorization"] = auth_header
112
+
113
+ response = curl_requests.get(
114
+ "https://httpbin.org/ip",
115
+ proxies=proxies,
116
+ headers=headers
117
+ )
118
+
119
+ print(f"CurlCffi测试成功!")
120
+ print(f"状态码: {response.status_code}")
121
+ print(f"响应内容: {response.text[:200]}...")
122
+
123
+ except Exception as e:
124
+ print(f"CurlCffi测试失败: {e}")
125
+ import traceback
126
+ traceback.print_exc()
127
+
128
+
129
+ async def main():
130
+ """主测试函数"""
131
+ print("开始测试带认证代理的功能...\n")
132
+
133
+ # 测试各个库
134
+ await test_proxy_with_aiohttp()
135
+ test_proxy_with_httpx()
136
+ await test_proxy_with_curl_cffi()
137
+
138
+ print("\n所有测试完成!")
139
+
140
+
141
+ if __name__ == "__main__":
142
+ asyncio.run(main())
tests/test_cleaners.py ADDED
@@ -0,0 +1,55 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 数据清洗工具测试
5
+ """
6
+ import unittest
7
+ from crawlo.cleaners import (
8
+ TextCleaner,
9
+ DataFormatter,
10
+ remove_html_tags,
11
+ decode_html_entities,
12
+ clean_text,
13
+ format_number,
14
+ format_currency,
15
+ format_phone_number
16
+ )
17
+
18
+
19
+ class TestCleaners(unittest.TestCase):
20
+ """数据清洗工具测试类"""
21
+
22
+ def test_text_cleaner(self):
23
+ """测试文本清洗功能"""
24
+ # 测试移除HTML标签
25
+ html_text = "<p>这是一个<b>测试</b>文本</p>"
26
+ clean_text_result = remove_html_tags(html_text)
27
+ self.assertEqual(clean_text_result, "这是一个测试文本")
28
+
29
+ # 测试解码HTML实体
30
+ entity_text = "这是一个&nbsp;测试&amp;文本"
31
+ decoded_text = decode_html_entities(entity_text)
32
+ self.assertEqual(decoded_text, "这是一个 测试&文本")
33
+
34
+ # 测试综合清洗
35
+ complex_text = "<p>这是一个&nbsp;<b>测试</b>&amp;文本</p>"
36
+ cleaned = clean_text(complex_text)
37
+ self.assertEqual(cleaned, "这是一个 测试&文本")
38
+
39
+ def test_data_formatter(self):
40
+ """测试数据格式化功能"""
41
+ # 测试数字格式化
42
+ formatted_num = format_number(1234.567, precision=2, thousand_separator=True)
43
+ self.assertEqual(formatted_num, "1,234.57")
44
+
45
+ # 测试货币格式化
46
+ formatted_currency = format_currency(1234.567, "¥", 2)
47
+ self.assertEqual(formatted_currency, "¥1,234.57")
48
+
49
+ # 测试电话号码格式化
50
+ formatted_phone = format_phone_number("13812345678", "+86", "international")
51
+ self.assertEqual(formatted_phone, "+86 138 1234 5678")
52
+
53
+
54
+ if __name__ == '__main__':
55
+ unittest.main()
@@ -0,0 +1,147 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 综合测试
5
+ 验证所有改进的集成效果
6
+ """
7
+ import sys
8
+ import os
9
+ import asyncio
10
+ import unittest
11
+ from unittest.mock import patch, MagicMock
12
+
13
+ # 添加项目根目录到Python路径
14
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
15
+
16
+ from crawlo.utils.env_config import get_env_var, get_redis_config, get_runtime_config
17
+ from crawlo.utils.error_handler import ErrorHandler, handle_exception
18
+ from crawlo.core.engine import Engine
19
+ from crawlo.settings.setting_manager import SettingManager
20
+ from crawlo.settings import default_settings
21
+ from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
22
+
23
+
24
+ class TestComprehensiveIntegration(unittest.TestCase):
25
+ """综合集成测试"""
26
+
27
+ def setUp(self):
28
+ """测试前准备"""
29
+ # 设置测试环境变量
30
+ self.test_env = {
31
+ 'PROJECT_NAME': 'test_project',
32
+ 'CONCURRENCY': '4',
33
+ 'REDIS_HOST': 'localhost',
34
+ 'REDIS_PORT': '6379'
35
+ }
36
+ self.original_env = {}
37
+ for key, value in self.test_env.items():
38
+ self.original_env[key] = os.environ.get(key)
39
+ os.environ[key] = value
40
+
41
+ def tearDown(self):
42
+ """测试后清理"""
43
+ # 恢复原始环境变量
44
+ for key, value in self.original_env.items():
45
+ if value is None:
46
+ os.environ.pop(key, None)
47
+ else:
48
+ os.environ[key] = value
49
+
50
+ def test_env_config_integration(self):
51
+ """测试环境变量配置集成"""
52
+ # 验证环境变量工具正常工作
53
+ project_name = get_env_var('PROJECT_NAME', 'default', str)
54
+ self.assertEqual(project_name, 'test_project')
55
+
56
+ concurrency = get_env_var('CONCURRENCY', 1, int)
57
+ self.assertEqual(concurrency, 4)
58
+
59
+ # 验证Redis配置工具
60
+ redis_config = get_redis_config()
61
+ self.assertEqual(redis_config['REDIS_HOST'], 'localhost')
62
+ self.assertEqual(redis_config['REDIS_PORT'], 6379)
63
+
64
+ def test_error_handler_integration(self):
65
+ """测试错误处理集成"""
66
+ # 验证错误处理模块正常工作
67
+ error_handler = ErrorHandler("test")
68
+
69
+ # 测试错误处理
70
+ try:
71
+ error_handler.handle_error(ValueError("Test error"), raise_error=False)
72
+ except Exception:
73
+ self.fail("Error handler should not raise exception when raise_error=False")
74
+
75
+ # 测试安全调用
76
+ result = error_handler.safe_call(lambda x: x*2, 5, default_return=0)
77
+ self.assertEqual(result, 10)
78
+
79
+ # 测试装饰器
80
+ @handle_exception(raise_error=False)
81
+ def failing_function():
82
+ raise RuntimeError("Test")
83
+
84
+ try:
85
+ failing_function()
86
+ except Exception:
87
+ self.fail("Decorated function should not raise exception")
88
+
89
+ def test_settings_integration(self):
90
+ """测试设置管理器集成"""
91
+ # 重新加载默认设置以获取环境变量
92
+ import importlib
93
+ import crawlo.settings.default_settings
94
+ importlib.reload(crawlo.settings.default_settings)
95
+
96
+ # 创建设置管理器
97
+ settings = SettingManager()
98
+ settings.set_settings(crawlo.settings.default_settings)
99
+
100
+ # 验证设置正确加载
101
+ self.assertEqual(settings.get('PROJECT_NAME'), 'test_project')
102
+ self.assertEqual(settings.get_int('CONCURRENCY'), 4)
103
+ self.assertEqual(settings.get('REDIS_HOST'), 'localhost')
104
+
105
+ def test_queue_manager_config(self):
106
+ """测试队列管理器配置"""
107
+ # 重新加载默认设置
108
+ import importlib
109
+ import crawlo.settings.default_settings
110
+ importlib.reload(crawlo.settings.default_settings)
111
+
112
+ # 创建设置管理器
113
+ settings = SettingManager()
114
+ settings.set_settings(crawlo.settings.default_settings)
115
+
116
+ # 从设置创建队列配置
117
+ queue_config = QueueConfig.from_settings(settings)
118
+
119
+ # 验证配置正确
120
+ self.assertEqual(queue_config.queue_type, QueueType.AUTO)
121
+ self.assertIn('test_project', queue_config.queue_name)
122
+
123
+ async def test_async_components(self):
124
+ """测试异步组件"""
125
+ # 测试异步错误处理装饰器
126
+ @handle_exception(raise_error=False)
127
+ async def async_failing_function():
128
+ raise RuntimeError("Async test")
129
+
130
+ try:
131
+ await async_failing_function()
132
+ except Exception:
133
+ self.fail("Async decorated function should not raise exception")
134
+
135
+
136
+ if __name__ == '__main__':
137
+ # 运行同步测试
138
+ unittest.main(exit=False)
139
+
140
+ # 运行异步测试
141
+ async def run_async_tests():
142
+ test_instance = TestComprehensiveIntegration()
143
+ test_instance.setUp()
144
+ await test_instance.test_async_components()
145
+ test_instance.tearDown()
146
+
147
+ asyncio.run(run_async_tests())