crawlo 1.3.2__py3-none-any.whl → 1.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (219) hide show
  1. crawlo/__init__.py +63 -63
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +322 -314
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +365 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -256
  19. crawlo/crawler.py +1166 -1168
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +226 -226
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +52 -45
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +135 -135
  50. crawlo/middleware/offsite.py +123 -123
  51. crawlo/middleware/proxy.py +386 -386
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -65
  57. crawlo/mode_manager.py +187 -187
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -379
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +318 -318
  70. crawlo/pipelines/pipeline_manager.py +75 -75
  71. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  72. crawlo/project.py +325 -297
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -379
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +225 -225
  78. crawlo/settings/setting_manager.py +198 -198
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +266 -266
  89. crawlo/templates/project/settings_distributed.py.tmpl +179 -179
  90. crawlo/templates/project/settings_gentle.py.tmpl +60 -60
  91. crawlo/templates/project/settings_high_performance.py.tmpl +130 -130
  92. crawlo/templates/project/settings_minimal.py.tmpl +34 -34
  93. crawlo/templates/project/settings_simple.py.tmpl +101 -101
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +38 -38
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -200
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/tools/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +289 -289
  103. crawlo/tools/distributed_coordinator.py +388 -388
  104. crawlo/tools/encoding_converter.py +127 -127
  105. crawlo/tools/request_tools.py +82 -82
  106. crawlo/tools/retry_mechanism.py +224 -224
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/tools/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +34 -34
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +199 -146
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/METADATA +1020 -1020
  131. crawlo-1.3.3.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -107
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +142 -142
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +66 -66
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +150 -150
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +152 -152
  156. tests/test_config_validator.py +182 -182
  157. tests/test_crawlo_proxy_integration.py +108 -108
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -65
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -169
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +184 -184
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +72 -72
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +111 -111
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -159
  213. tests/test_user_agents.py +96 -96
  214. tests/tools_example.py +260 -260
  215. tests/verify_distributed.py +117 -117
  216. crawlo-1.3.2.dist-info/RECORD +0 -219
  217. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/WHEEL +0 -0
  218. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/entry_points.txt +0 -0
  219. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/top_level.txt +0 -0
@@ -1,135 +1,135 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 模板项目Redis Key测试脚本
5
- 用于验证通过模板生成的项目是否符合新的Redis key命名规范
6
- """
7
- import sys
8
- import os
9
- import tempfile
10
- import shutil
11
- import subprocess
12
- from pathlib import Path
13
-
14
- # 添加项目根目录到路径
15
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
-
17
-
18
- def test_template_project_redis_key():
19
- """测试模板项目Redis key命名规范"""
20
- print("🔍 测试模板项目Redis key命名规范...")
21
-
22
- # 创建临时目录
23
- with tempfile.TemporaryDirectory() as temp_dir:
24
- try:
25
- # 在原始工作目录中创建项目,然后移动到临时目录
26
- original_cwd = os.getcwd()
27
-
28
- # 创建测试项目(在原始工作目录中)
29
- print(" 1. 创建测试项目...")
30
- cmd_path = os.path.join(original_cwd, "crawlo", "commands", "startproject.py")
31
- result = subprocess.run([
32
- sys.executable, cmd_path, "test_project"
33
- ], cwd=original_cwd, capture_output=True, text=True)
34
-
35
- if result.returncode != 0:
36
- print(f"创建项目失败: {result.stderr}")
37
- return False
38
-
39
- print(" 项目创建成功")
40
-
41
- # 检查生成的文件
42
- project_dir = Path(original_cwd) / "test_project"
43
- if not project_dir.exists():
44
- print("项目目录未创建")
45
- return False
46
-
47
- # 移动项目到临时目录
48
- target_dir = Path(temp_dir) / "test_project"
49
- shutil.move(str(project_dir), str(target_dir))
50
- project_dir = target_dir
51
-
52
- settings_file = project_dir / "test_project" / "settings.py"
53
- if not settings_file.exists():
54
- print("settings.py文件未创建")
55
- return False
56
-
57
- # 读取settings.py内容
58
- with open(settings_file, 'r', encoding='utf-8') as f:
59
- settings_content = f.read()
60
-
61
- # 检查是否移除了旧的REDIS_KEY配置
62
- if "REDIS_KEY = f'{{project_name}}:fingerprint'" in settings_content:
63
- print("仍然存在旧的REDIS_KEY配置")
64
- return False
65
-
66
- # 检查是否添加了新的注释
67
- if "# crawlo:{project_name}:filter:fingerprint (请求去重)" not in settings_content:
68
- print("缺少新的Redis key命名规范注释")
69
- return False
70
-
71
- if "# crawlo:{project_name}:item:fingerprint (数据项去重)" not in settings_content:
72
- print("缺少数据项去重的Redis key命名规范注释")
73
- return False
74
-
75
- print(" settings.py符合新的Redis key命名规范")
76
-
77
- # 检查crawlo.cfg
78
- cfg_file = project_dir / "crawlo.cfg"
79
- if not cfg_file.exists():
80
- print("crawlo.cfg文件未创建")
81
- return False
82
-
83
- with open(cfg_file, 'r', encoding='utf-8') as f:
84
- cfg_content = f.read()
85
-
86
- if "default = test_project.settings" not in cfg_content:
87
- print("crawlo.cfg配置不正确")
88
- return False
89
-
90
- print(" crawlo.cfg配置正确")
91
-
92
- print("模板项目Redis key命名规范测试通过!")
93
- return True
94
-
95
- except Exception as e:
96
- print(f"测试过程中发生错误: {e}")
97
- import traceback
98
- traceback.print_exc()
99
- return False
100
- finally:
101
- # 清理创建的项目目录
102
- project_dir = Path(original_cwd) / "test_project"
103
- if project_dir.exists():
104
- shutil.rmtree(str(project_dir), ignore_errors=True)
105
-
106
- # 恢复原始工作目录
107
- os.chdir(original_cwd)
108
-
109
-
110
- def main():
111
- """主测试函数"""
112
- print("开始模板项目Redis key命名规范测试...")
113
- print("=" * 50)
114
-
115
- try:
116
- success = test_template_project_redis_key()
117
-
118
- print("=" * 50)
119
- if success:
120
- print("所有测试通过!模板项目符合新的Redis key命名规范")
121
- else:
122
- print("测试失败,请检查模板文件")
123
- return 1
124
-
125
- except Exception as e:
126
- print("=" * 50)
127
- print(f"测试过程中发生异常: {e}")
128
- return 1
129
-
130
- return 0
131
-
132
-
133
- if __name__ == "__main__":
134
- exit_code = main()
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 模板项目Redis Key测试脚本
5
+ 用于验证通过模板生成的项目是否符合新的Redis key命名规范
6
+ """
7
+ import sys
8
+ import os
9
+ import tempfile
10
+ import shutil
11
+ import subprocess
12
+ from pathlib import Path
13
+
14
+ # 添加项目根目录到路径
15
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
+
17
+
18
+ def test_template_project_redis_key():
19
+ """测试模板项目Redis key命名规范"""
20
+ print("🔍 测试模板项目Redis key命名规范...")
21
+
22
+ # 创建临时目录
23
+ with tempfile.TemporaryDirectory() as temp_dir:
24
+ try:
25
+ # 在原始工作目录中创建项目,然后移动到临时目录
26
+ original_cwd = os.getcwd()
27
+
28
+ # 创建测试项目(在原始工作目录中)
29
+ print(" 1. 创建测试项目...")
30
+ cmd_path = os.path.join(original_cwd, "crawlo", "commands", "startproject.py")
31
+ result = subprocess.run([
32
+ sys.executable, cmd_path, "test_project"
33
+ ], cwd=original_cwd, capture_output=True, text=True)
34
+
35
+ if result.returncode != 0:
36
+ print(f"创建项目失败: {result.stderr}")
37
+ return False
38
+
39
+ print(" 项目创建成功")
40
+
41
+ # 检查生成的文件
42
+ project_dir = Path(original_cwd) / "test_project"
43
+ if not project_dir.exists():
44
+ print("项目目录未创建")
45
+ return False
46
+
47
+ # 移动项目到临时目录
48
+ target_dir = Path(temp_dir) / "test_project"
49
+ shutil.move(str(project_dir), str(target_dir))
50
+ project_dir = target_dir
51
+
52
+ settings_file = project_dir / "test_project" / "settings.py"
53
+ if not settings_file.exists():
54
+ print("settings.py文件未创建")
55
+ return False
56
+
57
+ # 读取settings.py内容
58
+ with open(settings_file, 'r', encoding='utf-8') as f:
59
+ settings_content = f.read()
60
+
61
+ # 检查是否移除了旧的REDIS_KEY配置
62
+ if "REDIS_KEY = f'{{project_name}}:fingerprint'" in settings_content:
63
+ print("仍然存在旧的REDIS_KEY配置")
64
+ return False
65
+
66
+ # 检查是否添加了新的注释
67
+ if "# crawlo:{project_name}:filter:fingerprint (请求去重)" not in settings_content:
68
+ print("缺少新的Redis key命名规范注释")
69
+ return False
70
+
71
+ if "# crawlo:{project_name}:item:fingerprint (数据项去重)" not in settings_content:
72
+ print("缺少数据项去重的Redis key命名规范注释")
73
+ return False
74
+
75
+ print(" settings.py符合新的Redis key命名规范")
76
+
77
+ # 检查crawlo.cfg
78
+ cfg_file = project_dir / "crawlo.cfg"
79
+ if not cfg_file.exists():
80
+ print("crawlo.cfg文件未创建")
81
+ return False
82
+
83
+ with open(cfg_file, 'r', encoding='utf-8') as f:
84
+ cfg_content = f.read()
85
+
86
+ if "default = test_project.settings" not in cfg_content:
87
+ print("crawlo.cfg配置不正确")
88
+ return False
89
+
90
+ print(" crawlo.cfg配置正确")
91
+
92
+ print("模板项目Redis key命名规范测试通过!")
93
+ return True
94
+
95
+ except Exception as e:
96
+ print(f"测试过程中发生错误: {e}")
97
+ import traceback
98
+ traceback.print_exc()
99
+ return False
100
+ finally:
101
+ # 清理创建的项目目录
102
+ project_dir = Path(original_cwd) / "test_project"
103
+ if project_dir.exists():
104
+ shutil.rmtree(str(project_dir), ignore_errors=True)
105
+
106
+ # 恢复原始工作目录
107
+ os.chdir(original_cwd)
108
+
109
+
110
+ def main():
111
+ """主测试函数"""
112
+ print("开始模板项目Redis key命名规范测试...")
113
+ print("=" * 50)
114
+
115
+ try:
116
+ success = test_template_project_redis_key()
117
+
118
+ print("=" * 50)
119
+ if success:
120
+ print("所有测试通过!模板项目符合新的Redis key命名规范")
121
+ else:
122
+ print("测试失败,请检查模板文件")
123
+ return 1
124
+
125
+ except Exception as e:
126
+ print("=" * 50)
127
+ print(f"测试过程中发生异常: {e}")
128
+ return 1
129
+
130
+ return 0
131
+
132
+
133
+ if __name__ == "__main__":
134
+ exit_code = main()
135
135
  sys.exit(exit_code)
tests/test_tools.py CHANGED
@@ -1,160 +1,160 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 工具包测试
5
- """
6
- import asyncio
7
- import unittest
8
- from crawlo.tools import (
9
- # 日期工具
10
- parse_time,
11
- format_time,
12
- time_diff,
13
-
14
- # 数据清洗工具
15
- clean_text,
16
- format_currency,
17
- extract_emails,
18
-
19
- # 数据验证工具
20
- validate_email,
21
- validate_url,
22
- validate_phone,
23
- validate_chinese_id_card,
24
- validate_date,
25
- validate_number_range,
26
-
27
- # 请求处理工具
28
- build_url,
29
- add_query_params,
30
- merge_headers,
31
-
32
- # 反爬虫应对工具
33
- get_random_user_agent,
34
- rotate_proxy,
35
-
36
- # 分布式协调工具
37
- generate_task_id,
38
- get_cluster_info
39
- )
40
-
41
-
42
- class TestTools(unittest.TestCase):
43
- """工具包测试类"""
44
-
45
- def test_date_tools(self):
46
- """测试日期工具"""
47
- # 测试时间解析
48
- time_str = "2025-09-10 14:30:00"
49
- parsed_time = parse_time(time_str)
50
- self.assertIsNotNone(parsed_time)
51
-
52
- # 测试时间格式化
53
- formatted_time = format_time(parsed_time, "%Y-%m-%d")
54
- self.assertEqual(formatted_time, "2025-09-10")
55
-
56
- # 测试时间差计算
57
- time_str2 = "2025-09-11 14:30:00"
58
- parsed_time2 = parse_time(time_str2)
59
- diff = time_diff(parsed_time2, parsed_time)
60
- self.assertEqual(diff, 86400) # 24小时 = 86400秒
61
-
62
- def test_data_cleaning_tools(self):
63
- """测试数据清洗工具"""
64
- # 测试文本清洗
65
- dirty_text = "<p>这是一个&nbsp;<b>测试</b>&amp;文本</p>"
66
- clean_result = clean_text(dirty_text)
67
- self.assertEqual(clean_result, "这是一个 测试&文本")
68
-
69
- # 测试货币格式化
70
- price = 1234.567
71
- formatted_price = format_currency(price, "¥", 2)
72
- self.assertEqual(formatted_price, "¥1,234.57")
73
-
74
- # 测试邮箱提取
75
- text_with_email = "联系邮箱: test@example.com, support@crawler.com"
76
- emails = extract_emails(text_with_email)
77
- self.assertIn("test@example.com", emails)
78
- self.assertIn("support@crawler.com", emails)
79
-
80
- def test_data_validation_tools(self):
81
- """测试数据验证工具"""
82
- # 测试邮箱验证
83
- self.assertTrue(validate_email("test@example.com"))
84
- self.assertFalse(validate_email("invalid-email"))
85
-
86
- # 测试URL验证
87
- self.assertTrue(validate_url("https://example.com"))
88
- self.assertFalse(validate_url("invalid-url"))
89
-
90
- # 测试电话验证
91
- self.assertTrue(validate_phone("13812345678"))
92
- self.assertFalse(validate_phone("12345"))
93
-
94
- # 测试身份证验证
95
- self.assertTrue(validate_chinese_id_card("110101199001011234"))
96
- self.assertFalse(validate_chinese_id_card("invalid-id"))
97
-
98
- # 测试日期验证
99
- self.assertTrue(validate_date("2025-09-10"))
100
- self.assertFalse(validate_date("invalid-date"))
101
-
102
- # 测试数值范围验证
103
- self.assertTrue(validate_number_range(50, 1, 100))
104
- self.assertFalse(validate_number_range(150, 1, 100))
105
-
106
- def test_request_handling_tools(self):
107
- """测试请求处理工具"""
108
- # 测试URL构建
109
- base_url = "https://api.example.com"
110
- path = "/v1/users"
111
- query_params = {"page": 1, "limit": 10}
112
- full_url = build_url(base_url, path, query_params)
113
- self.assertIn("https://api.example.com/v1/users", full_url)
114
- self.assertIn("page=1", full_url)
115
- self.assertIn("limit=10", full_url)
116
-
117
- # 测试添加查询参数
118
- existing_url = "https://api.example.com/v1/users?page=1"
119
- new_params = {"sort": "name"}
120
- updated_url = add_query_params(existing_url, new_params)
121
- self.assertIn("sort=name", updated_url)
122
-
123
- # 测试合并请求头
124
- base_headers = {"Content-Type": "application/json"}
125
- additional_headers = {"Authorization": "Bearer token123"}
126
- merged_headers = merge_headers(base_headers, additional_headers)
127
- self.assertEqual(merged_headers["Content-Type"], "application/json")
128
- self.assertEqual(merged_headers["Authorization"], "Bearer token123")
129
-
130
- def test_anti_crawler_tools(self):
131
- """测试反爬虫应对工具"""
132
- # 测试随机User-Agent
133
- user_agent = get_random_user_agent()
134
- self.assertIsInstance(user_agent, str)
135
- self.assertGreater(len(user_agent), 0)
136
-
137
- # 测试代理轮换
138
- proxy = rotate_proxy()
139
- self.assertIsInstance(proxy, dict)
140
-
141
- def test_distributed_coordinator_tools(self):
142
- """测试分布式协调工具"""
143
- # 测试任务ID生成
144
- task_id = generate_task_id("https://example.com", "test_spider")
145
- self.assertIsInstance(task_id, str)
146
- self.assertEqual(len(task_id), 32) # MD5 hash长度
147
-
148
- # 测试集群信息获取(异步函数需要特殊处理)
149
- async def test_cluster_info():
150
- cluster_info = await get_cluster_info()
151
- self.assertIsInstance(cluster_info, dict)
152
- return cluster_info
153
-
154
- # 运行异步测试
155
- cluster_info = asyncio.run(test_cluster_info())
156
- self.assertIsInstance(cluster_info, dict)
157
-
158
-
159
- if __name__ == '__main__':
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 工具包测试
5
+ """
6
+ import asyncio
7
+ import unittest
8
+ from crawlo.tools import (
9
+ # 日期工具
10
+ parse_time,
11
+ format_time,
12
+ time_diff,
13
+
14
+ # 数据清洗工具
15
+ clean_text,
16
+ format_currency,
17
+ extract_emails,
18
+
19
+ # 数据验证工具
20
+ validate_email,
21
+ validate_url,
22
+ validate_phone,
23
+ validate_chinese_id_card,
24
+ validate_date,
25
+ validate_number_range,
26
+
27
+ # 请求处理工具
28
+ build_url,
29
+ add_query_params,
30
+ merge_headers,
31
+
32
+ # 反爬虫应对工具
33
+ get_random_user_agent,
34
+ rotate_proxy,
35
+
36
+ # 分布式协调工具
37
+ generate_task_id,
38
+ get_cluster_info
39
+ )
40
+
41
+
42
+ class TestTools(unittest.TestCase):
43
+ """工具包测试类"""
44
+
45
+ def test_date_tools(self):
46
+ """测试日期工具"""
47
+ # 测试时间解析
48
+ time_str = "2025-09-10 14:30:00"
49
+ parsed_time = parse_time(time_str)
50
+ self.assertIsNotNone(parsed_time)
51
+
52
+ # 测试时间格式化
53
+ formatted_time = format_time(parsed_time, "%Y-%m-%d")
54
+ self.assertEqual(formatted_time, "2025-09-10")
55
+
56
+ # 测试时间差计算
57
+ time_str2 = "2025-09-11 14:30:00"
58
+ parsed_time2 = parse_time(time_str2)
59
+ diff = time_diff(parsed_time2, parsed_time)
60
+ self.assertEqual(diff, 86400) # 24小时 = 86400秒
61
+
62
+ def test_data_cleaning_tools(self):
63
+ """测试数据清洗工具"""
64
+ # 测试文本清洗
65
+ dirty_text = "<p>这是一个&nbsp;<b>测试</b>&amp;文本</p>"
66
+ clean_result = clean_text(dirty_text)
67
+ self.assertEqual(clean_result, "这是一个 测试&文本")
68
+
69
+ # 测试货币格式化
70
+ price = 1234.567
71
+ formatted_price = format_currency(price, "¥", 2)
72
+ self.assertEqual(formatted_price, "¥1,234.57")
73
+
74
+ # 测试邮箱提取
75
+ text_with_email = "联系邮箱: test@example.com, support@crawler.com"
76
+ emails = extract_emails(text_with_email)
77
+ self.assertIn("test@example.com", emails)
78
+ self.assertIn("support@crawler.com", emails)
79
+
80
+ def test_data_validation_tools(self):
81
+ """测试数据验证工具"""
82
+ # 测试邮箱验证
83
+ self.assertTrue(validate_email("test@example.com"))
84
+ self.assertFalse(validate_email("invalid-email"))
85
+
86
+ # 测试URL验证
87
+ self.assertTrue(validate_url("https://example.com"))
88
+ self.assertFalse(validate_url("invalid-url"))
89
+
90
+ # 测试电话验证
91
+ self.assertTrue(validate_phone("13812345678"))
92
+ self.assertFalse(validate_phone("12345"))
93
+
94
+ # 测试身份证验证
95
+ self.assertTrue(validate_chinese_id_card("110101199001011234"))
96
+ self.assertFalse(validate_chinese_id_card("invalid-id"))
97
+
98
+ # 测试日期验证
99
+ self.assertTrue(validate_date("2025-09-10"))
100
+ self.assertFalse(validate_date("invalid-date"))
101
+
102
+ # 测试数值范围验证
103
+ self.assertTrue(validate_number_range(50, 1, 100))
104
+ self.assertFalse(validate_number_range(150, 1, 100))
105
+
106
+ def test_request_handling_tools(self):
107
+ """测试请求处理工具"""
108
+ # 测试URL构建
109
+ base_url = "https://api.example.com"
110
+ path = "/v1/users"
111
+ query_params = {"page": 1, "limit": 10}
112
+ full_url = build_url(base_url, path, query_params)
113
+ self.assertIn("https://api.example.com/v1/users", full_url)
114
+ self.assertIn("page=1", full_url)
115
+ self.assertIn("limit=10", full_url)
116
+
117
+ # 测试添加查询参数
118
+ existing_url = "https://api.example.com/v1/users?page=1"
119
+ new_params = {"sort": "name"}
120
+ updated_url = add_query_params(existing_url, new_params)
121
+ self.assertIn("sort=name", updated_url)
122
+
123
+ # 测试合并请求头
124
+ base_headers = {"Content-Type": "application/json"}
125
+ additional_headers = {"Authorization": "Bearer token123"}
126
+ merged_headers = merge_headers(base_headers, additional_headers)
127
+ self.assertEqual(merged_headers["Content-Type"], "application/json")
128
+ self.assertEqual(merged_headers["Authorization"], "Bearer token123")
129
+
130
+ def test_anti_crawler_tools(self):
131
+ """测试反爬虫应对工具"""
132
+ # 测试随机User-Agent
133
+ user_agent = get_random_user_agent()
134
+ self.assertIsInstance(user_agent, str)
135
+ self.assertGreater(len(user_agent), 0)
136
+
137
+ # 测试代理轮换
138
+ proxy = rotate_proxy()
139
+ self.assertIsInstance(proxy, dict)
140
+
141
+ def test_distributed_coordinator_tools(self):
142
+ """测试分布式协调工具"""
143
+ # 测试任务ID生成
144
+ task_id = generate_task_id("https://example.com", "test_spider")
145
+ self.assertIsInstance(task_id, str)
146
+ self.assertEqual(len(task_id), 32) # MD5 hash长度
147
+
148
+ # 测试集群信息获取(异步函数需要特殊处理)
149
+ async def test_cluster_info():
150
+ cluster_info = await get_cluster_info()
151
+ self.assertIsInstance(cluster_info, dict)
152
+ return cluster_info
153
+
154
+ # 运行异步测试
155
+ cluster_info = asyncio.run(test_cluster_info())
156
+ self.assertIsInstance(cluster_info, dict)
157
+
158
+
159
+ if __name__ == '__main__':
160
160
  unittest.main()