crawlo 1.2.6__py3-none-any.whl → 1.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (209) hide show
  1. crawlo/__init__.py +61 -61
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +60 -60
  4. crawlo/cleaners/data_formatter.py +225 -225
  5. crawlo/cleaners/encoding_converter.py +125 -125
  6. crawlo/cleaners/text_cleaner.py +232 -232
  7. crawlo/cli.py +75 -88
  8. crawlo/commands/__init__.py +14 -14
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/help.py +138 -144
  12. crawlo/commands/list.py +155 -155
  13. crawlo/commands/run.py +323 -323
  14. crawlo/commands/startproject.py +436 -436
  15. crawlo/commands/stats.py +187 -187
  16. crawlo/commands/utils.py +186 -186
  17. crawlo/config.py +312 -312
  18. crawlo/config_validator.py +251 -251
  19. crawlo/core/__init__.py +2 -2
  20. crawlo/core/engine.py +365 -356
  21. crawlo/core/processor.py +40 -40
  22. crawlo/core/scheduler.py +251 -239
  23. crawlo/crawler.py +1099 -1110
  24. crawlo/data/__init__.py +5 -5
  25. crawlo/data/user_agents.py +107 -107
  26. crawlo/downloader/__init__.py +266 -266
  27. crawlo/downloader/aiohttp_downloader.py +228 -221
  28. crawlo/downloader/cffi_downloader.py +256 -256
  29. crawlo/downloader/httpx_downloader.py +259 -259
  30. crawlo/downloader/hybrid_downloader.py +212 -212
  31. crawlo/downloader/playwright_downloader.py +402 -402
  32. crawlo/downloader/selenium_downloader.py +472 -472
  33. crawlo/event.py +11 -11
  34. crawlo/exceptions.py +81 -81
  35. crawlo/extension/__init__.py +39 -38
  36. crawlo/extension/health_check.py +141 -141
  37. crawlo/extension/log_interval.py +57 -57
  38. crawlo/extension/log_stats.py +81 -81
  39. crawlo/extension/logging_extension.py +43 -43
  40. crawlo/extension/memory_monitor.py +104 -104
  41. crawlo/extension/performance_profiler.py +133 -133
  42. crawlo/extension/request_recorder.py +107 -107
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +234 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/items/__init__.py +23 -23
  47. crawlo/items/base.py +21 -21
  48. crawlo/items/fields.py +52 -52
  49. crawlo/items/items.py +104 -104
  50. crawlo/middleware/__init__.py +21 -21
  51. crawlo/middleware/default_header.py +131 -131
  52. crawlo/middleware/download_delay.py +104 -104
  53. crawlo/middleware/middleware_manager.py +136 -135
  54. crawlo/middleware/offsite.py +114 -114
  55. crawlo/middleware/proxy.py +367 -367
  56. crawlo/middleware/request_ignore.py +86 -86
  57. crawlo/middleware/response_code.py +163 -163
  58. crawlo/middleware/response_filter.py +136 -136
  59. crawlo/middleware/retry.py +124 -124
  60. crawlo/mode_manager.py +211 -211
  61. crawlo/network/__init__.py +21 -21
  62. crawlo/network/request.py +338 -338
  63. crawlo/network/response.py +359 -359
  64. crawlo/pipelines/__init__.py +21 -21
  65. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  66. crawlo/pipelines/console_pipeline.py +39 -39
  67. crawlo/pipelines/csv_pipeline.py +316 -316
  68. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  69. crawlo/pipelines/json_pipeline.py +218 -218
  70. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  71. crawlo/pipelines/mongo_pipeline.py +131 -131
  72. crawlo/pipelines/mysql_pipeline.py +317 -317
  73. crawlo/pipelines/pipeline_manager.py +62 -61
  74. crawlo/pipelines/redis_dedup_pipeline.py +166 -165
  75. crawlo/project.py +314 -279
  76. crawlo/queue/pqueue.py +37 -37
  77. crawlo/queue/queue_manager.py +377 -376
  78. crawlo/queue/redis_priority_queue.py +306 -306
  79. crawlo/settings/__init__.py +7 -7
  80. crawlo/settings/default_settings.py +219 -215
  81. crawlo/settings/setting_manager.py +122 -122
  82. crawlo/spider/__init__.py +639 -639
  83. crawlo/stats_collector.py +59 -59
  84. crawlo/subscriber.py +129 -129
  85. crawlo/task_manager.py +30 -30
  86. crawlo/templates/crawlo.cfg.tmpl +10 -10
  87. crawlo/templates/project/__init__.py.tmpl +3 -3
  88. crawlo/templates/project/items.py.tmpl +17 -17
  89. crawlo/templates/project/middlewares.py.tmpl +118 -118
  90. crawlo/templates/project/pipelines.py.tmpl +96 -96
  91. crawlo/templates/project/settings.py.tmpl +288 -288
  92. crawlo/templates/project/settings_distributed.py.tmpl +157 -157
  93. crawlo/templates/project/settings_gentle.py.tmpl +100 -100
  94. crawlo/templates/project/settings_high_performance.py.tmpl +134 -134
  95. crawlo/templates/project/settings_simple.py.tmpl +98 -98
  96. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  97. crawlo/templates/run.py.tmpl +45 -45
  98. crawlo/templates/spider/spider.py.tmpl +143 -143
  99. crawlo/tools/__init__.py +182 -182
  100. crawlo/tools/anti_crawler.py +268 -268
  101. crawlo/tools/authenticated_proxy.py +240 -240
  102. crawlo/tools/data_validator.py +180 -180
  103. crawlo/tools/date_tools.py +35 -35
  104. crawlo/tools/distributed_coordinator.py +386 -386
  105. crawlo/tools/retry_mechanism.py +220 -220
  106. crawlo/tools/scenario_adapter.py +262 -262
  107. crawlo/utils/__init__.py +35 -35
  108. crawlo/utils/batch_processor.py +259 -259
  109. crawlo/utils/controlled_spider_mixin.py +439 -439
  110. crawlo/utils/date_tools.py +290 -290
  111. crawlo/utils/db_helper.py +343 -343
  112. crawlo/utils/enhanced_error_handler.py +356 -356
  113. crawlo/utils/env_config.py +143 -106
  114. crawlo/utils/error_handler.py +123 -123
  115. crawlo/utils/func_tools.py +82 -82
  116. crawlo/utils/large_scale_config.py +286 -286
  117. crawlo/utils/large_scale_helper.py +344 -344
  118. crawlo/utils/log.py +128 -128
  119. crawlo/utils/performance_monitor.py +285 -285
  120. crawlo/utils/queue_helper.py +175 -175
  121. crawlo/utils/redis_connection_pool.py +351 -351
  122. crawlo/utils/redis_key_validator.py +198 -198
  123. crawlo/utils/request.py +267 -267
  124. crawlo/utils/request_serializer.py +218 -218
  125. crawlo/utils/spider_loader.py +61 -61
  126. crawlo/utils/system.py +11 -11
  127. crawlo/utils/tools.py +4 -4
  128. crawlo/utils/url.py +39 -39
  129. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/METADATA +764 -764
  130. crawlo-1.2.7.dist-info/RECORD +209 -0
  131. examples/__init__.py +7 -7
  132. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  133. tests/__init__.py +7 -7
  134. tests/advanced_tools_example.py +275 -275
  135. tests/authenticated_proxy_example.py +236 -236
  136. tests/cleaners_example.py +160 -160
  137. tests/config_validation_demo.py +102 -102
  138. tests/controlled_spider_example.py +205 -205
  139. tests/date_tools_example.py +180 -180
  140. tests/dynamic_loading_example.py +523 -523
  141. tests/dynamic_loading_test.py +104 -104
  142. tests/env_config_example.py +133 -133
  143. tests/error_handling_example.py +171 -171
  144. tests/redis_key_validation_demo.py +130 -130
  145. tests/response_improvements_example.py +144 -144
  146. tests/test_advanced_tools.py +148 -148
  147. tests/test_all_redis_key_configs.py +145 -145
  148. tests/test_authenticated_proxy.py +141 -141
  149. tests/test_cleaners.py +54 -54
  150. tests/test_comprehensive.py +146 -146
  151. tests/test_config_consistency.py +81 -0
  152. tests/test_config_validator.py +193 -193
  153. tests/test_crawlo_proxy_integration.py +172 -172
  154. tests/test_date_tools.py +123 -123
  155. tests/test_default_header_middleware.py +158 -158
  156. tests/test_double_crawlo_fix.py +207 -207
  157. tests/test_double_crawlo_fix_simple.py +124 -124
  158. tests/test_download_delay_middleware.py +221 -221
  159. tests/test_downloader_proxy_compatibility.py +268 -268
  160. tests/test_dynamic_downloaders_proxy.py +124 -124
  161. tests/test_dynamic_proxy.py +92 -92
  162. tests/test_dynamic_proxy_config.py +146 -146
  163. tests/test_dynamic_proxy_real.py +109 -109
  164. tests/test_edge_cases.py +303 -303
  165. tests/test_enhanced_error_handler.py +270 -270
  166. tests/test_env_config.py +121 -121
  167. tests/test_error_handler_compatibility.py +112 -112
  168. tests/test_final_validation.py +153 -153
  169. tests/test_framework_env_usage.py +103 -103
  170. tests/test_integration.py +356 -356
  171. tests/test_item_dedup_redis_key.py +122 -122
  172. tests/test_mode_consistency.py +52 -0
  173. tests/test_offsite_middleware.py +221 -221
  174. tests/test_parsel.py +29 -29
  175. tests/test_performance.py +327 -327
  176. tests/test_proxy_api.py +264 -264
  177. tests/test_proxy_health_check.py +32 -32
  178. tests/test_proxy_middleware.py +121 -121
  179. tests/test_proxy_middleware_enhanced.py +216 -216
  180. tests/test_proxy_middleware_integration.py +136 -136
  181. tests/test_proxy_providers.py +56 -56
  182. tests/test_proxy_stats.py +19 -19
  183. tests/test_proxy_strategies.py +59 -59
  184. tests/test_queue_manager_double_crawlo.py +173 -173
  185. tests/test_queue_manager_redis_key.py +176 -176
  186. tests/test_real_scenario_proxy.py +195 -195
  187. tests/test_redis_config.py +28 -28
  188. tests/test_redis_connection_pool.py +294 -294
  189. tests/test_redis_key_naming.py +181 -181
  190. tests/test_redis_key_validator.py +123 -123
  191. tests/test_redis_queue.py +224 -224
  192. tests/test_request_ignore_middleware.py +182 -182
  193. tests/test_request_serialization.py +70 -70
  194. tests/test_response_code_middleware.py +349 -349
  195. tests/test_response_filter_middleware.py +427 -427
  196. tests/test_response_improvements.py +152 -152
  197. tests/test_retry_middleware.py +241 -241
  198. tests/test_scheduler.py +252 -241
  199. tests/test_scheduler_config_update.py +134 -0
  200. tests/test_simple_response.py +61 -61
  201. tests/test_telecom_spider_redis_key.py +205 -205
  202. tests/test_template_content.py +87 -87
  203. tests/test_template_redis_key.py +134 -134
  204. tests/test_tools.py +153 -153
  205. tests/tools_example.py +257 -257
  206. crawlo-1.2.6.dist-info/RECORD +0 -206
  207. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/WHEEL +0 -0
  208. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/entry_points.txt +0 -0
  209. {crawlo-1.2.6.dist-info → crawlo-1.2.7.dist-info}/top_level.txt +0 -0
@@ -1,62 +1,62 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- Response 简单功能测试
5
- """
6
- import sys
7
- import os
8
-
9
- # 添加项目根目录到路径
10
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
11
-
12
- from crawlo.network.response import Response
13
-
14
-
15
- def test_basic_functionality():
16
- """测试基本功能"""
17
- print("测试基本功能...")
18
-
19
- # 创建一个简单的HTML响应
20
- html_content = """
21
- <html>
22
- <head>
23
- <title>测试页面</title>
24
- </head>
25
- <body>
26
- <div class="content">
27
- <h1>主标题</h1>
28
- <p class="intro">这是介绍段落</p>
29
- </div>
30
- </body>
31
- </html>
32
- """
33
-
34
- response = Response(
35
- url="https://example.com/test",
36
- body=html_content.encode('utf-8'),
37
- headers={"content-type": "text/html; charset=utf-8"}
38
- )
39
-
40
- # 测试基本属性
41
- print(f"URL: {response.url}")
42
- print(f"状态码: {response.status_code}")
43
-
44
- # 测试文本提取(使用新方法)
45
- title = response.extract_text('title')
46
- print(f"标题: {title}")
47
-
48
- h1_text = response.extract_text('.content h1')
49
- print(f"H1文本: {h1_text}")
50
-
51
- intro_text = response.extract_text('.intro')
52
- print(f"介绍文本: {intro_text}")
53
-
54
- # 测试XPath(使用新方法)
55
- title_xpath = response.extract_text('//title')
56
- print(f"XPath标题: {title_xpath}")
57
-
58
- print("基本功能测试完成")
59
-
60
-
61
- if __name__ == '__main__':
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ Response 简单功能测试
5
+ """
6
+ import sys
7
+ import os
8
+
9
+ # 添加项目根目录到路径
10
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
11
+
12
+ from crawlo.network.response import Response
13
+
14
+
15
+ def test_basic_functionality():
16
+ """测试基本功能"""
17
+ print("测试基本功能...")
18
+
19
+ # 创建一个简单的HTML响应
20
+ html_content = """
21
+ <html>
22
+ <head>
23
+ <title>测试页面</title>
24
+ </head>
25
+ <body>
26
+ <div class="content">
27
+ <h1>主标题</h1>
28
+ <p class="intro">这是介绍段落</p>
29
+ </div>
30
+ </body>
31
+ </html>
32
+ """
33
+
34
+ response = Response(
35
+ url="https://example.com/test",
36
+ body=html_content.encode('utf-8'),
37
+ headers={"content-type": "text/html; charset=utf-8"}
38
+ )
39
+
40
+ # 测试基本属性
41
+ print(f"URL: {response.url}")
42
+ print(f"状态码: {response.status_code}")
43
+
44
+ # 测试文本提取(使用新方法)
45
+ title = response.extract_text('title')
46
+ print(f"标题: {title}")
47
+
48
+ h1_text = response.extract_text('.content h1')
49
+ print(f"H1文本: {h1_text}")
50
+
51
+ intro_text = response.extract_text('.intro')
52
+ print(f"介绍文本: {intro_text}")
53
+
54
+ # 测试XPath(使用新方法)
55
+ title_xpath = response.extract_text('//title')
56
+ print(f"XPath标题: {title_xpath}")
57
+
58
+ print("基本功能测试完成")
59
+
60
+
61
+ if __name__ == '__main__':
62
62
  test_basic_functionality()
@@ -1,206 +1,206 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- 电信设备许可证爬虫Redis Key测试脚本
5
- 用于验证分布式爬虫是否符合新的Redis key命名规范
6
- """
7
- import sys
8
- import os
9
- import asyncio
10
- import tempfile
11
- import shutil
12
- from pathlib import Path
13
-
14
- # 添加项目根目录到路径
15
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
-
17
- # 导入相关模块
18
- from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
19
- from crawlo.filters.aioredis_filter import AioRedisFilter
20
- from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
21
-
22
-
23
- class MockSettings:
24
- """模拟设置类"""
25
- def __init__(self, project_name="telecom_licenses_distributed"):
26
- self.project_name = project_name
27
- self.REDIS_HOST = '127.0.0.1'
28
- self.REDIS_PORT = 6379
29
- self.REDIS_PASSWORD = ''
30
- self.REDIS_DB = 2
31
- self.REDIS_URL = f'redis://127.0.0.1:6379/{self.REDIS_DB}'
32
- self.REDIS_TTL = 0
33
- self.CLEANUP_FP = 0
34
- self.FILTER_DEBUG = True
35
- self.LOG_LEVEL = "INFO"
36
- self.DECODE_RESPONSES = True
37
- self.SCHEDULER_QUEUE_NAME = f'crawlo:{project_name}:queue:requests'
38
-
39
- def get(self, key, default=None):
40
- if key == 'PROJECT_NAME':
41
- return self.project_name
42
- elif key == 'REDIS_HOST':
43
- return self.REDIS_HOST
44
- elif key == 'REDIS_PASSWORD':
45
- return self.REDIS_PASSWORD
46
- elif key == 'REDIS_URL':
47
- return self.REDIS_URL
48
- elif key == 'FILTER_DEBUG':
49
- return self.FILTER_DEBUG
50
- elif key == 'LOG_LEVEL':
51
- return self.LOG_LEVEL
52
- elif key == 'DECODE_RESPONSES':
53
- return self.DECODE_RESPONSES
54
- elif key == 'SCHEDULER_QUEUE_NAME':
55
- return self.SCHEDULER_QUEUE_NAME
56
- return default
57
-
58
- def get_bool(self, key, default=False):
59
- if key == 'FILTER_DEBUG':
60
- return self.FILTER_DEBUG
61
- elif key == 'DECODE_RESPONSES':
62
- return self.DECODE_RESPONSES
63
- elif key == 'CLEANUP_FP':
64
- return self.CLEANUP_FP
65
- return default
66
-
67
- def get_int(self, key, default=0): # 修复方法名
68
- if key == 'REDIS_TTL':
69
- return self.REDIS_TTL
70
- elif key == 'REDIS_PORT':
71
- return self.REDIS_PORT
72
- elif key == 'REDIS_DB':
73
- return self.REDIS_DB
74
- elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
75
- return 1000
76
- elif key == 'QUEUE_MAX_RETRIES':
77
- return 3
78
- elif key == 'QUEUE_TIMEOUT':
79
- return 300
80
- return default
81
-
82
-
83
- class MockCrawler:
84
- """模拟爬虫类"""
85
- def __init__(self, project_name="telecom_licenses_distributed"):
86
- self.settings = MockSettings(project_name)
87
- self.stats = {}
88
-
89
-
90
- async def test_telecom_spider_redis_key():
91
- """测试电信设备许可证爬虫Redis key命名规范"""
92
- print("🔍 测试电信设备许可证爬虫Redis key命名规范...")
93
-
94
- project_name = "telecom_licenses_distributed"
95
- expected_prefix = f"crawlo:{project_name}"
96
-
97
- try:
98
- # 1. 测试QueueManager和RedisPriorityQueue
99
- print(" 1. 测试队列管理器...")
100
- queue_config = QueueConfig(
101
- queue_type=QueueType.REDIS,
102
- redis_url="redis://127.0.0.1:6379/2",
103
- queue_name=f"crawlo:{project_name}:queue:requests", # 使用统一命名规范
104
- max_queue_size=1000,
105
- max_retries=3,
106
- timeout=300
107
- )
108
-
109
- queue_manager = QueueManager(queue_config)
110
- queue = await queue_manager._create_queue(QueueType.REDIS)
111
-
112
- # 验证队列名称是否符合规范
113
- expected_queue_name = f"{expected_prefix}:queue:requests"
114
- expected_processing_queue = f"{expected_prefix}:queue:processing"
115
- expected_failed_queue = f"{expected_prefix}:queue:failed"
116
-
117
- assert queue.queue_name == expected_queue_name, f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
118
- assert queue.processing_queue == expected_processing_queue, f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
119
- assert queue.failed_queue == expected_failed_queue, f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
120
-
121
- print(f" ✅ 请求队列: {queue.queue_name}")
122
- print(f" ✅ 处理中队列: {queue.processing_queue}")
123
- print(f" ✅ 失败队列: {queue.failed_queue}")
124
-
125
- # 2. 测试AioRedisFilter
126
- print(" 2. 测试请求去重过滤器...")
127
- mock_crawler = MockCrawler(project_name)
128
- filter_instance = AioRedisFilter.create_instance(mock_crawler)
129
-
130
- expected_filter_key = f"{expected_prefix}:filter:fingerprint"
131
- assert filter_instance.redis_key == expected_filter_key, f"过滤器key不匹配: {filter_instance.redis_key} != {expected_filter_key}"
132
-
133
- print(f" ✅ 请求去重key: {filter_instance.redis_key}")
134
-
135
- # 3. 测试RedisDedupPipeline
136
- print(" 3. 测试数据项去重管道...")
137
- dedup_pipeline = RedisDedupPipeline.from_crawler(mock_crawler)
138
-
139
- expected_item_key = f"{expected_prefix}:item:fingerprint"
140
- assert dedup_pipeline.redis_key == expected_item_key, f"数据项去重key不匹配: {dedup_pipeline.redis_key} != {expected_item_key}"
141
-
142
- print(f" ✅ 数据项去重key: {dedup_pipeline.redis_key}")
143
-
144
- # 4. 验证所有key都使用统一前缀
145
- print(" 4. 验证统一前缀...")
146
- all_keys = [
147
- queue.queue_name,
148
- queue.processing_queue,
149
- queue.failed_queue,
150
- filter_instance.redis_key,
151
- dedup_pipeline.redis_key
152
- ]
153
-
154
- for key in all_keys:
155
- assert key.startswith(expected_prefix), f"Key未使用统一前缀: {key}"
156
- print(f" ✅ {key}")
157
-
158
- print("✅ 电信设备许可证爬虫Redis key命名规范测试通过!")
159
- return True
160
-
161
- except Exception as e:
162
- print(f"❌ 测试失败: {e}")
163
- import traceback
164
- traceback.print_exc()
165
- return False
166
- finally:
167
- # 清理资源
168
- try:
169
- if 'queue' in locals():
170
- await queue.close()
171
- if 'filter_instance' in locals() and hasattr(filter_instance, 'redis'):
172
- await filter_instance.redis.close()
173
- if 'dedup_pipeline' in locals() and hasattr(dedup_pipeline, 'redis_client'):
174
- dedup_pipeline.redis_client.close()
175
- except:
176
- pass
177
-
178
-
179
- async def main():
180
- """主测试函数"""
181
- print("🚀 开始电信设备许可证爬虫Redis key命名规范测试...")
182
- print("=" * 60)
183
-
184
- try:
185
- success = await test_telecom_spider_redis_key()
186
-
187
- print("=" * 60)
188
- if success:
189
- print("🎉 所有测试通过!电信设备许可证爬虫符合新的Redis key命名规范")
190
- else:
191
- print("❌ 测试失败,请检查实现")
192
- return 1
193
-
194
- except Exception as e:
195
- print("=" * 60)
196
- print(f"❌ 测试过程中发生异常: {e}")
197
- import traceback
198
- traceback.print_exc()
199
- return 1
200
-
201
- return 0
202
-
203
-
204
- if __name__ == "__main__":
205
- exit_code = asyncio.run(main())
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 电信设备许可证爬虫Redis Key测试脚本
5
+ 用于验证分布式爬虫是否符合新的Redis key命名规范
6
+ """
7
+ import sys
8
+ import os
9
+ import asyncio
10
+ import tempfile
11
+ import shutil
12
+ from pathlib import Path
13
+
14
+ # 添加项目根目录到路径
15
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
16
+
17
+ # 导入相关模块
18
+ from crawlo.queue.queue_manager import QueueManager, QueueConfig, QueueType
19
+ from crawlo.filters.aioredis_filter import AioRedisFilter
20
+ from crawlo.pipelines.redis_dedup_pipeline import RedisDedupPipeline
21
+
22
+
23
+ class MockSettings:
24
+ """模拟设置类"""
25
+ def __init__(self, project_name="telecom_licenses_distributed"):
26
+ self.project_name = project_name
27
+ self.REDIS_HOST = '127.0.0.1'
28
+ self.REDIS_PORT = 6379
29
+ self.REDIS_PASSWORD = ''
30
+ self.REDIS_DB = 2
31
+ self.REDIS_URL = f'redis://127.0.0.1:6379/{self.REDIS_DB}'
32
+ self.REDIS_TTL = 0
33
+ self.CLEANUP_FP = 0
34
+ self.FILTER_DEBUG = True
35
+ self.LOG_LEVEL = "INFO"
36
+ self.DECODE_RESPONSES = True
37
+ self.SCHEDULER_QUEUE_NAME = f'crawlo:{project_name}:queue:requests'
38
+
39
+ def get(self, key, default=None):
40
+ if key == 'PROJECT_NAME':
41
+ return self.project_name
42
+ elif key == 'REDIS_HOST':
43
+ return self.REDIS_HOST
44
+ elif key == 'REDIS_PASSWORD':
45
+ return self.REDIS_PASSWORD
46
+ elif key == 'REDIS_URL':
47
+ return self.REDIS_URL
48
+ elif key == 'FILTER_DEBUG':
49
+ return self.FILTER_DEBUG
50
+ elif key == 'LOG_LEVEL':
51
+ return self.LOG_LEVEL
52
+ elif key == 'DECODE_RESPONSES':
53
+ return self.DECODE_RESPONSES
54
+ elif key == 'SCHEDULER_QUEUE_NAME':
55
+ return self.SCHEDULER_QUEUE_NAME
56
+ return default
57
+
58
+ def get_bool(self, key, default=False):
59
+ if key == 'FILTER_DEBUG':
60
+ return self.FILTER_DEBUG
61
+ elif key == 'DECODE_RESPONSES':
62
+ return self.DECODE_RESPONSES
63
+ elif key == 'CLEANUP_FP':
64
+ return self.CLEANUP_FP
65
+ return default
66
+
67
+ def get_int(self, key, default=0): # 修复方法名
68
+ if key == 'REDIS_TTL':
69
+ return self.REDIS_TTL
70
+ elif key == 'REDIS_PORT':
71
+ return self.REDIS_PORT
72
+ elif key == 'REDIS_DB':
73
+ return self.REDIS_DB
74
+ elif key == 'SCHEDULER_MAX_QUEUE_SIZE':
75
+ return 1000
76
+ elif key == 'QUEUE_MAX_RETRIES':
77
+ return 3
78
+ elif key == 'QUEUE_TIMEOUT':
79
+ return 300
80
+ return default
81
+
82
+
83
+ class MockCrawler:
84
+ """模拟爬虫类"""
85
+ def __init__(self, project_name="telecom_licenses_distributed"):
86
+ self.settings = MockSettings(project_name)
87
+ self.stats = {}
88
+
89
+
90
+ async def test_telecom_spider_redis_key():
91
+ """测试电信设备许可证爬虫Redis key命名规范"""
92
+ print("🔍 测试电信设备许可证爬虫Redis key命名规范...")
93
+
94
+ project_name = "telecom_licenses_distributed"
95
+ expected_prefix = f"crawlo:{project_name}"
96
+
97
+ try:
98
+ # 1. 测试QueueManager和RedisPriorityQueue
99
+ print(" 1. 测试队列管理器...")
100
+ queue_config = QueueConfig(
101
+ queue_type=QueueType.REDIS,
102
+ redis_url="redis://127.0.0.1:6379/2",
103
+ queue_name=f"crawlo:{project_name}:queue:requests", # 使用统一命名规范
104
+ max_queue_size=1000,
105
+ max_retries=3,
106
+ timeout=300
107
+ )
108
+
109
+ queue_manager = QueueManager(queue_config)
110
+ queue = await queue_manager._create_queue(QueueType.REDIS)
111
+
112
+ # 验证队列名称是否符合规范
113
+ expected_queue_name = f"{expected_prefix}:queue:requests"
114
+ expected_processing_queue = f"{expected_prefix}:queue:processing"
115
+ expected_failed_queue = f"{expected_prefix}:queue:failed"
116
+
117
+ assert queue.queue_name == expected_queue_name, f"队列名称不匹配: {queue.queue_name} != {expected_queue_name}"
118
+ assert queue.processing_queue == expected_processing_queue, f"处理中队列名称不匹配: {queue.processing_queue} != {expected_processing_queue}"
119
+ assert queue.failed_queue == expected_failed_queue, f"失败队列名称不匹配: {queue.failed_queue} != {expected_failed_queue}"
120
+
121
+ print(f" ✅ 请求队列: {queue.queue_name}")
122
+ print(f" ✅ 处理中队列: {queue.processing_queue}")
123
+ print(f" ✅ 失败队列: {queue.failed_queue}")
124
+
125
+ # 2. 测试AioRedisFilter
126
+ print(" 2. 测试请求去重过滤器...")
127
+ mock_crawler = MockCrawler(project_name)
128
+ filter_instance = AioRedisFilter.create_instance(mock_crawler)
129
+
130
+ expected_filter_key = f"{expected_prefix}:filter:fingerprint"
131
+ assert filter_instance.redis_key == expected_filter_key, f"过滤器key不匹配: {filter_instance.redis_key} != {expected_filter_key}"
132
+
133
+ print(f" ✅ 请求去重key: {filter_instance.redis_key}")
134
+
135
+ # 3. 测试RedisDedupPipeline
136
+ print(" 3. 测试数据项去重管道...")
137
+ dedup_pipeline = RedisDedupPipeline.from_crawler(mock_crawler)
138
+
139
+ expected_item_key = f"{expected_prefix}:item:fingerprint"
140
+ assert dedup_pipeline.redis_key == expected_item_key, f"数据项去重key不匹配: {dedup_pipeline.redis_key} != {expected_item_key}"
141
+
142
+ print(f" ✅ 数据项去重key: {dedup_pipeline.redis_key}")
143
+
144
+ # 4. 验证所有key都使用统一前缀
145
+ print(" 4. 验证统一前缀...")
146
+ all_keys = [
147
+ queue.queue_name,
148
+ queue.processing_queue,
149
+ queue.failed_queue,
150
+ filter_instance.redis_key,
151
+ dedup_pipeline.redis_key
152
+ ]
153
+
154
+ for key in all_keys:
155
+ assert key.startswith(expected_prefix), f"Key未使用统一前缀: {key}"
156
+ print(f" ✅ {key}")
157
+
158
+ print("✅ 电信设备许可证爬虫Redis key命名规范测试通过!")
159
+ return True
160
+
161
+ except Exception as e:
162
+ print(f"❌ 测试失败: {e}")
163
+ import traceback
164
+ traceback.print_exc()
165
+ return False
166
+ finally:
167
+ # 清理资源
168
+ try:
169
+ if 'queue' in locals():
170
+ await queue.close()
171
+ if 'filter_instance' in locals() and hasattr(filter_instance, 'redis'):
172
+ await filter_instance.redis.close()
173
+ if 'dedup_pipeline' in locals() and hasattr(dedup_pipeline, 'redis_client'):
174
+ dedup_pipeline.redis_client.close()
175
+ except:
176
+ pass
177
+
178
+
179
+ async def main():
180
+ """主测试函数"""
181
+ print("🚀 开始电信设备许可证爬虫Redis key命名规范测试...")
182
+ print("=" * 60)
183
+
184
+ try:
185
+ success = await test_telecom_spider_redis_key()
186
+
187
+ print("=" * 60)
188
+ if success:
189
+ print("🎉 所有测试通过!电信设备许可证爬虫符合新的Redis key命名规范")
190
+ else:
191
+ print("❌ 测试失败,请检查实现")
192
+ return 1
193
+
194
+ except Exception as e:
195
+ print("=" * 60)
196
+ print(f"❌ 测试过程中发生异常: {e}")
197
+ import traceback
198
+ traceback.print_exc()
199
+ return 1
200
+
201
+ return 0
202
+
203
+
204
+ if __name__ == "__main__":
205
+ exit_code = asyncio.run(main())
206
206
  sys.exit(exit_code)