crawlo 1.3.4__py3-none-any.whl → 1.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (289) hide show
  1. crawlo/__init__.py +87 -87
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -341
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +45 -45
  16. crawlo/core/engine.py +439 -439
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -257
  19. crawlo/crawler.py +638 -638
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -228
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +103 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -257
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -291
  47. crawlo/initialization/__init__.py +39 -39
  48. crawlo/initialization/built_in.py +425 -425
  49. crawlo/initialization/context.py +141 -141
  50. crawlo/initialization/core.py +193 -193
  51. crawlo/initialization/phases.py +148 -148
  52. crawlo/initialization/registry.py +145 -145
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -23
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +37 -37
  58. crawlo/logging/config.py +96 -96
  59. crawlo/logging/factory.py +128 -128
  60. crawlo/logging/manager.py +111 -111
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -212
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +325 -318
  85. crawlo/pipelines/pipeline_manager.py +76 -76
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -327
  88. crawlo/queue/pqueue.py +42 -42
  89. crawlo/queue/queue_manager.py +503 -503
  90. crawlo/queue/redis_priority_queue.py +326 -326
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -321
  93. crawlo/settings/setting_manager.py +214 -214
  94. crawlo/spider/__init__.py +657 -657
  95. crawlo/stats_collector.py +73 -73
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +138 -138
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +167 -167
  104. crawlo/templates/project/settings_distributed.py.tmpl +166 -166
  105. crawlo/templates/project/settings_gentle.py.tmpl +166 -166
  106. crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
  107. crawlo/templates/project/settings_minimal.py.tmpl +65 -65
  108. crawlo/templates/project/settings_simple.py.tmpl +164 -164
  109. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  110. crawlo/templates/run.py.tmpl +34 -34
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +9 -9
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +364 -364
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +25 -25
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -165
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +80 -44
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -388
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -225
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.4.dist-info → crawlo-1.3.6.dist-info}/METADATA +1126 -1126
  149. crawlo-1.3.6.dist-info/RECORD +290 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +106 -106
  154. tests/baidu_performance_test.py +108 -108
  155. tests/baidu_test.py +59 -59
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +212 -212
  158. tests/comprehensive_test.py +81 -81
  159. tests/comprehensive_testing_summary.md +186 -186
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +69 -69
  164. tests/debug_framework_logger.py +84 -84
  165. tests/debug_log_config.py +127 -0
  166. tests/debug_log_levels.py +63 -63
  167. tests/debug_pipelines.py +66 -66
  168. tests/detailed_log_test.py +234 -0
  169. tests/distributed_test.py +66 -66
  170. tests/distributed_test_debug.py +76 -76
  171. tests/dynamic_loading_example.py +523 -523
  172. tests/dynamic_loading_test.py +104 -104
  173. tests/env_config_example.py +133 -133
  174. tests/error_handling_example.py +171 -171
  175. tests/final_comprehensive_test.py +151 -151
  176. tests/final_log_test.py +261 -0
  177. tests/final_validation_test.py +182 -182
  178. tests/fix_log_test.py +143 -0
  179. tests/framework_performance_test.py +202 -202
  180. tests/log_buffering_test.py +112 -0
  181. tests/log_generation_timing_test.py +154 -0
  182. tests/optimized_performance_test.py +211 -211
  183. tests/performance_comparison.py +245 -245
  184. tests/queue_blocking_test.py +113 -113
  185. tests/queue_test.py +89 -89
  186. tests/redis_key_validation_demo.py +130 -130
  187. tests/request_params_example.py +150 -150
  188. tests/response_improvements_example.py +144 -144
  189. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  190. tests/scrapy_comparison/scrapy_test.py +133 -133
  191. tests/simple_command_test.py +119 -119
  192. tests/simple_crawlo_test.py +127 -127
  193. tests/simple_log_test.py +57 -57
  194. tests/simple_log_test2.py +138 -0
  195. tests/simple_optimization_test.py +128 -128
  196. tests/simple_queue_type_test.py +42 -0
  197. tests/simple_spider_test.py +49 -49
  198. tests/simple_test.py +47 -47
  199. tests/spider_log_timing_test.py +178 -0
  200. tests/test_advanced_tools.py +148 -148
  201. tests/test_all_commands.py +230 -230
  202. tests/test_all_redis_key_configs.py +145 -145
  203. tests/test_authenticated_proxy.py +141 -141
  204. tests/test_batch_processor.py +178 -178
  205. tests/test_cleaners.py +54 -54
  206. tests/test_component_factory.py +174 -174
  207. tests/test_comprehensive.py +146 -146
  208. tests/test_config_consistency.py +80 -80
  209. tests/test_config_merge.py +152 -152
  210. tests/test_config_validator.py +182 -182
  211. tests/test_controlled_spider_mixin.py +79 -79
  212. tests/test_crawlo_proxy_integration.py +108 -108
  213. tests/test_date_tools.py +123 -123
  214. tests/test_default_header_middleware.py +158 -158
  215. tests/test_distributed.py +65 -65
  216. tests/test_double_crawlo_fix.py +207 -207
  217. tests/test_double_crawlo_fix_simple.py +124 -124
  218. tests/test_download_delay_middleware.py +221 -221
  219. tests/test_downloader_proxy_compatibility.py +268 -268
  220. tests/test_dynamic_downloaders_proxy.py +124 -124
  221. tests/test_dynamic_proxy.py +92 -92
  222. tests/test_dynamic_proxy_config.py +146 -146
  223. tests/test_dynamic_proxy_real.py +109 -109
  224. tests/test_edge_cases.py +303 -303
  225. tests/test_enhanced_error_handler.py +270 -270
  226. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  227. tests/test_env_config.py +121 -121
  228. tests/test_error_handler_compatibility.py +112 -112
  229. tests/test_factories.py +252 -252
  230. tests/test_final_validation.py +153 -153
  231. tests/test_framework_env_usage.py +103 -103
  232. tests/test_framework_logger.py +66 -66
  233. tests/test_framework_startup.py +64 -64
  234. tests/test_get_component_logger.py +84 -0
  235. tests/test_integration.py +169 -169
  236. tests/test_item_dedup_redis_key.py +122 -122
  237. tests/test_large_scale_config.py +112 -112
  238. tests/test_large_scale_helper.py +235 -235
  239. tests/test_logging_system.py +283 -0
  240. tests/test_mode_change.py +72 -72
  241. tests/test_mode_consistency.py +51 -51
  242. tests/test_offsite_middleware.py +221 -221
  243. tests/test_parsel.py +29 -29
  244. tests/test_performance.py +327 -327
  245. tests/test_performance_monitor.py +115 -115
  246. tests/test_proxy_api.py +264 -264
  247. tests/test_proxy_health_check.py +32 -32
  248. tests/test_proxy_middleware.py +121 -121
  249. tests/test_proxy_middleware_enhanced.py +216 -216
  250. tests/test_proxy_middleware_integration.py +136 -136
  251. tests/test_proxy_middleware_refactored.py +184 -184
  252. tests/test_proxy_providers.py +56 -56
  253. tests/test_proxy_stats.py +19 -19
  254. tests/test_proxy_strategies.py +59 -59
  255. tests/test_queue_empty_check.py +41 -41
  256. tests/test_queue_manager_double_crawlo.py +173 -173
  257. tests/test_queue_manager_redis_key.py +176 -176
  258. tests/test_queue_type.py +107 -0
  259. tests/test_random_user_agent.py +72 -72
  260. tests/test_real_scenario_proxy.py +195 -195
  261. tests/test_redis_config.py +28 -28
  262. tests/test_redis_connection_pool.py +294 -294
  263. tests/test_redis_key_naming.py +181 -181
  264. tests/test_redis_key_validator.py +123 -123
  265. tests/test_redis_queue.py +224 -224
  266. tests/test_request_ignore_middleware.py +182 -182
  267. tests/test_request_params.py +111 -111
  268. tests/test_request_serialization.py +70 -70
  269. tests/test_response_code_middleware.py +349 -349
  270. tests/test_response_filter_middleware.py +427 -427
  271. tests/test_response_improvements.py +152 -152
  272. tests/test_retry_middleware.py +241 -241
  273. tests/test_scheduler.py +252 -252
  274. tests/test_scheduler_config_update.py +133 -133
  275. tests/test_simple_response.py +61 -61
  276. tests/test_telecom_spider_redis_key.py +205 -205
  277. tests/test_template_content.py +87 -87
  278. tests/test_template_redis_key.py +134 -134
  279. tests/test_tools.py +159 -159
  280. tests/test_user_agents.py +96 -96
  281. tests/tools_example.py +260 -260
  282. tests/untested_features_report.md +138 -138
  283. tests/verify_debug.py +51 -51
  284. tests/verify_distributed.py +117 -117
  285. tests/verify_log_fix.py +111 -111
  286. crawlo-1.3.4.dist-info/RECORD +0 -278
  287. {crawlo-1.3.4.dist-info → crawlo-1.3.6.dist-info}/WHEEL +0 -0
  288. {crawlo-1.3.4.dist-info → crawlo-1.3.6.dist-info}/entry_points.txt +0 -0
  289. {crawlo-1.3.4.dist-info → crawlo-1.3.6.dist-info}/top_level.txt +0 -0
@@ -1,388 +1,388 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 分布式协调工具
7
- """
8
-
9
- import hashlib
10
- import time
11
- import urllib.parse
12
- from datetime import datetime
13
- from typing import Dict, Any, Optional, Tuple, List, Set
14
- from urllib.parse import urlparse
15
-
16
-
17
- class TaskDistributor:
18
- """任务分发工具类"""
19
-
20
- @staticmethod
21
- def generate_pagination_tasks(base_url: str, start_page: int = 1,
22
- end_page: int = 100, page_param: str = "page") -> List[str]:
23
- """
24
- 生成分页任务URL列表
25
-
26
- Args:
27
- base_url (str): 基础URL
28
- start_page (int): 起始页码
29
- end_page (int): 结束页码
30
- page_param (str): 分页参数名
31
-
32
- Returns:
33
- List[str]: 分页URL列表
34
- """
35
- tasks = []
36
- parsed = urlparse(base_url)
37
- query_dict = dict([q.split('=') for q in parsed.query.split('&') if q]) if parsed.query else {}
38
-
39
- for page in range(start_page, end_page + 1):
40
- query_dict[page_param] = str(page)
41
- query_string = '&'.join([f"{k}={v}" for k, v in query_dict.items()])
42
- new_parsed = parsed._replace(query=query_string)
43
- tasks.append(urllib.parse.urlunparse(new_parsed))
44
-
45
- return tasks
46
-
47
- @staticmethod
48
- def distribute_tasks(tasks: List[Any], num_workers: int) -> List[List[Any]]:
49
- """
50
- 将任务分发给多个工作节点
51
-
52
- Args:
53
- tasks (List[Any]): 任务列表
54
- num_workers (int): 工作节点数量
55
-
56
- Returns:
57
- List[List[Any]]: 分发后的任务列表
58
- """
59
- if num_workers <= 0:
60
- raise ValueError("工作节点数量必须大于0")
61
-
62
- if not tasks:
63
- return [[] for _ in range(num_workers)]
64
-
65
- # 计算每个工作节点应分配的任务数量
66
- tasks_per_worker = len(tasks) // num_workers
67
- remaining_tasks = len(tasks) % num_workers
68
-
69
- distributed_tasks = []
70
- task_index = 0
71
-
72
- for i in range(num_workers):
73
- # 分配基础任务数量
74
- worker_tasks_count = tasks_per_worker
75
- # 分配剩余任务
76
- if i < remaining_tasks:
77
- worker_tasks_count += 1
78
-
79
- worker_tasks = tasks[task_index:task_index + worker_tasks_count]
80
- distributed_tasks.append(worker_tasks)
81
- task_index += worker_tasks_count
82
-
83
- return distributed_tasks
84
-
85
-
86
- class DeduplicationTool:
87
- """数据去重工具类"""
88
-
89
- def __init__(self):
90
- self.memory_set: Set[str] = set()
91
- self.bloom_filter = None # 在实际应用中可以集成布隆过滤器
92
-
93
- @staticmethod
94
- def generate_fingerprint(data: Any) -> str:
95
- """
96
- 生成数据指纹
97
-
98
- Args:
99
- data (Any): 数据
100
-
101
- Returns:
102
- str: 数据指纹(MD5哈希)
103
- """
104
- if isinstance(data, dict):
105
- # 对于字典,排序键以确保一致性
106
- data_str = str(sorted(data.items()))
107
- else:
108
- data_str = str(data)
109
-
110
- return hashlib.md5(data_str.encode('utf-8')).hexdigest()
111
-
112
- def is_duplicate(self, data: Any) -> bool:
113
- """
114
- 检查数据是否重复(内存去重)
115
-
116
- Args:
117
- data (Any): 数据
118
-
119
- Returns:
120
- bool: 是否重复
121
- """
122
- fingerprint = self.generate_fingerprint(data)
123
- return fingerprint in self.memory_set
124
-
125
- def add_to_dedup(self, data: Any) -> bool:
126
- """
127
- 将数据添加到去重集合
128
-
129
- Args:
130
- data (Any): 数据
131
-
132
- Returns:
133
- bool: 是否成功添加(True表示之前不存在,False表示已存在)
134
- """
135
- fingerprint = self.generate_fingerprint(data)
136
- if fingerprint in self.memory_set:
137
- return False
138
- else:
139
- self.memory_set.add(fingerprint)
140
- return True
141
-
142
- async def async_is_duplicate(self, data: Any) -> bool:
143
- """
144
- 异步检查数据是否重复
145
-
146
- Args:
147
- data (Any): 数据
148
-
149
- Returns:
150
- bool: 是否重复
151
- """
152
- return self.is_duplicate(data)
153
-
154
- async def async_add_to_dedup(self, data: Any) -> bool:
155
- """
156
- 异步将数据添加到去重集合
157
-
158
- Args:
159
- data (Any): 数据
160
-
161
- Returns:
162
- bool: 是否成功添加
163
- """
164
- return self.add_to_dedup(data)
165
-
166
-
167
- class DistributedCoordinator:
168
- """分布式协调工具类"""
169
-
170
- def __init__(self, redis_client: Any = None):
171
- """
172
- 初始化分布式协调工具
173
-
174
- Args:
175
- redis_client (Any): Redis客户端
176
- """
177
- self.redis_client = redis_client
178
- self.task_distributor = TaskDistributor()
179
- self.deduplication_tool = DeduplicationTool()
180
-
181
- @staticmethod
182
- def generate_task_id(url: str, spider_name: str) -> str:
183
- """
184
- 生成任务ID
185
-
186
- Args:
187
- url (str): URL
188
- spider_name (str): 爬虫名称
189
-
190
- Returns:
191
- str: 任务ID
192
- """
193
- # 使用URL和爬虫名称生成唯一任务ID
194
- unique_string = f"{url}_{spider_name}_{int(time.time() * 1000)}"
195
- return hashlib.md5(unique_string.encode('utf-8')).hexdigest()
196
-
197
- async def claim_task(self, task_id: str, worker_id: str,
198
- timeout: int = 300) -> Tuple[bool, Optional[str]]:
199
- """
200
- 声明任务(分布式锁)
201
-
202
- Args:
203
- task_id (str): 任务ID
204
- worker_id (str): 工作节点ID
205
- timeout (int): 锁超时时间(秒)
206
-
207
- Returns:
208
- Tuple[bool, Optional[str]]: (是否成功声明, 错误信息)
209
- """
210
- # 如果没有Redis客户端,使用内存模拟
211
- if self.redis_client is None:
212
- # 模拟成功声明
213
- return True, None
214
-
215
- try:
216
- # 实际实现应该使用Redis的SET命令带有NX和EX选项
217
- # result = await self.redis_client.set(f"task_lock:{task_id}", worker_id, nx=True, ex=timeout)
218
- # return bool(result), None if result else "任务已被其他节点声明"
219
- return True, None
220
- except Exception as e:
221
- return False, str(e)
222
-
223
- async def report_task_status(self, task_id: str, status: str, worker_id: str) -> bool:
224
- """
225
- 报告任务状态
226
-
227
- Args:
228
- task_id (str): 任务ID
229
- status (str): 任务状态 (pending, processing, completed, failed)
230
- worker_id (str): 工作节点ID
231
-
232
- Returns:
233
- bool: 是否成功报告
234
- """
235
- try:
236
- status_info = {
237
- "task_id": task_id,
238
- "status": status,
239
- "worker_id": worker_id,
240
- "timestamp": datetime.now().isoformat()
241
- }
242
-
243
- if self.redis_client is None:
244
- # 模拟成功报告
245
- print(f"报告任务状态: {status_info}")
246
- return True
247
-
248
- # 实际实现应该将状态信息存储到Redis中
249
- # await self.redis_client.hset(f"task_status:{task_id}", mapping=status_info)
250
- return True
251
- except Exception:
252
- return False
253
-
254
- async def get_cluster_info(self) -> Dict[str, Any]:
255
- """
256
- 获取集群信息
257
-
258
- Returns:
259
- Dict[str, Any]: 集群信息
260
- """
261
- try:
262
- if self.redis_client is None:
263
- # 返回模拟的集群信息
264
- return {
265
- "worker_count": 3,
266
- "active_workers": ["worker_1", "worker_2", "worker_3"],
267
- "task_queue_size": 100,
268
- "processed_tasks": 500,
269
- "failed_tasks": 5,
270
- "timestamp": datetime.now().isoformat()
271
- }
272
-
273
- # 实际实现应该从Redis获取集群信息
274
- # 这里返回模拟数据
275
- return {
276
- "worker_count": 3,
277
- "active_workers": ["worker_1", "worker_2", "worker_3"],
278
- "task_queue_size": 100,
279
- "processed_tasks": 500,
280
- "failed_tasks": 5,
281
- "timestamp": datetime.now().isoformat()
282
- }
283
- except Exception as e:
284
- return {"error": str(e)}
285
-
286
- def generate_pagination_tasks(self, base_url: str, start_page: int = 1,
287
- end_page: int = 100, page_param: str = "page") -> List[str]:
288
- """
289
- 生成分页任务URL列表
290
-
291
- Args:
292
- base_url (str): 基础URL
293
- start_page (int): 起始页码
294
- end_page (int): 结束页码
295
- page_param (str): 分页参数名
296
-
297
- Returns:
298
- List[str]: 分页URL列表
299
- """
300
- return self.task_distributor.generate_pagination_tasks(base_url, start_page, end_page, page_param)
301
-
302
- def distribute_tasks(self, tasks: List[Any], num_workers: int) -> List[List[Any]]:
303
- """
304
- 将任务分发给多个工作节点
305
-
306
- Args:
307
- tasks (List[Any]): 任务列表
308
- num_workers (int): 工作节点数量
309
-
310
- Returns:
311
- List[List[Any]]: 分发后的任务列表
312
- """
313
- return self.task_distributor.distribute_tasks(tasks, num_workers)
314
-
315
- async def is_duplicate(self, data: Any) -> bool:
316
- """
317
- 检查数据是否重复
318
-
319
- Args:
320
- data (Any): 数据
321
-
322
- Returns:
323
- bool: 是否重复
324
- """
325
- # 如果有Redis客户端,可以使用布隆过滤器或Redis集合进行去重
326
- if self.redis_client is not None:
327
- # 这里可以实现基于Redis的去重逻辑
328
- pass
329
-
330
- # 使用内存去重作为后备方案
331
- return await self.deduplication_tool.async_is_duplicate(data)
332
-
333
- async def add_to_dedup(self, data: Any) -> bool:
334
- """
335
- 将数据添加到去重集合
336
-
337
- Args:
338
- data (Any): 数据
339
-
340
- Returns:
341
- bool: 是否成功添加
342
- """
343
- # 如果有Redis客户端,可以使用布隆过滤器或Redis集合进行去重
344
- if self.redis_client is not None:
345
- # 这里可以实现基于Redis的去重逻辑
346
- pass
347
-
348
- # 使用内存去重作为后备方案
349
- return await self.deduplication_tool.async_add_to_dedup(data)
350
-
351
-
352
- # 便捷函数
353
- def generate_task_id(url: str, spider_name: str) -> str:
354
- """生成任务ID"""
355
- return DistributedCoordinator.generate_task_id(url, spider_name)
356
-
357
-
358
- async def claim_task(task_id: str, worker_id: str,
359
- redis_client: Any = None, timeout: int = 300) -> Tuple[bool, Optional[str]]:
360
- """声明任务"""
361
- coordinator = DistributedCoordinator(redis_client)
362
- return await coordinator.claim_task(task_id, worker_id, timeout)
363
-
364
-
365
- async def report_task_status(task_id: str, status: str, worker_id: str,
366
- redis_client: Any = None) -> bool:
367
- """报告任务状态"""
368
- coordinator = DistributedCoordinator(redis_client)
369
- return await coordinator.report_task_status(task_id, status, worker_id)
370
-
371
-
372
- async def get_cluster_info(redis_client: Any = None) -> Dict[str, Any]:
373
- """获取集群信息"""
374
- coordinator = DistributedCoordinator(redis_client)
375
- return await coordinator.get_cluster_info()
376
-
377
-
378
- def generate_pagination_tasks(base_url: str, start_page: int = 1,
379
- end_page: int = 100, page_param: str = "page") -> List[str]:
380
- """生成分页任务URL列表"""
381
- coordinator = DistributedCoordinator()
382
- return coordinator.generate_pagination_tasks(base_url, start_page, end_page, page_param)
383
-
384
-
385
- def distribute_tasks(tasks: List[Any], num_workers: int) -> List[List[Any]]:
386
- """将任务分发给多个工作节点"""
387
- coordinator = DistributedCoordinator()
388
- return coordinator.distribute_tasks(tasks, num_workers)
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-09-10 22:00
5
+ # @Author : crawl-coder
6
+ # @Desc : 分布式协调工具
7
+ """
8
+
9
+ import hashlib
10
+ import time
11
+ import urllib.parse
12
+ from datetime import datetime
13
+ from typing import Dict, Any, Optional, Tuple, List, Set
14
+ from urllib.parse import urlparse
15
+
16
+
17
+ class TaskDistributor:
18
+ """任务分发工具类"""
19
+
20
+ @staticmethod
21
+ def generate_pagination_tasks(base_url: str, start_page: int = 1,
22
+ end_page: int = 100, page_param: str = "page") -> List[str]:
23
+ """
24
+ 生成分页任务URL列表
25
+
26
+ Args:
27
+ base_url (str): 基础URL
28
+ start_page (int): 起始页码
29
+ end_page (int): 结束页码
30
+ page_param (str): 分页参数名
31
+
32
+ Returns:
33
+ List[str]: 分页URL列表
34
+ """
35
+ tasks = []
36
+ parsed = urlparse(base_url)
37
+ query_dict = dict([q.split('=') for q in parsed.query.split('&') if q]) if parsed.query else {}
38
+
39
+ for page in range(start_page, end_page + 1):
40
+ query_dict[page_param] = str(page)
41
+ query_string = '&'.join([f"{k}={v}" for k, v in query_dict.items()])
42
+ new_parsed = parsed._replace(query=query_string)
43
+ tasks.append(urllib.parse.urlunparse(new_parsed))
44
+
45
+ return tasks
46
+
47
+ @staticmethod
48
+ def distribute_tasks(tasks: List[Any], num_workers: int) -> List[List[Any]]:
49
+ """
50
+ 将任务分发给多个工作节点
51
+
52
+ Args:
53
+ tasks (List[Any]): 任务列表
54
+ num_workers (int): 工作节点数量
55
+
56
+ Returns:
57
+ List[List[Any]]: 分发后的任务列表
58
+ """
59
+ if num_workers <= 0:
60
+ raise ValueError("工作节点数量必须大于0")
61
+
62
+ if not tasks:
63
+ return [[] for _ in range(num_workers)]
64
+
65
+ # 计算每个工作节点应分配的任务数量
66
+ tasks_per_worker = len(tasks) // num_workers
67
+ remaining_tasks = len(tasks) % num_workers
68
+
69
+ distributed_tasks = []
70
+ task_index = 0
71
+
72
+ for i in range(num_workers):
73
+ # 分配基础任务数量
74
+ worker_tasks_count = tasks_per_worker
75
+ # 分配剩余任务
76
+ if i < remaining_tasks:
77
+ worker_tasks_count += 1
78
+
79
+ worker_tasks = tasks[task_index:task_index + worker_tasks_count]
80
+ distributed_tasks.append(worker_tasks)
81
+ task_index += worker_tasks_count
82
+
83
+ return distributed_tasks
84
+
85
+
86
+ class DeduplicationTool:
87
+ """数据去重工具类"""
88
+
89
+ def __init__(self):
90
+ self.memory_set: Set[str] = set()
91
+ self.bloom_filter = None # 在实际应用中可以集成布隆过滤器
92
+
93
+ @staticmethod
94
+ def generate_fingerprint(data: Any) -> str:
95
+ """
96
+ 生成数据指纹
97
+
98
+ Args:
99
+ data (Any): 数据
100
+
101
+ Returns:
102
+ str: 数据指纹(MD5哈希)
103
+ """
104
+ if isinstance(data, dict):
105
+ # 对于字典,排序键以确保一致性
106
+ data_str = str(sorted(data.items()))
107
+ else:
108
+ data_str = str(data)
109
+
110
+ return hashlib.md5(data_str.encode('utf-8')).hexdigest()
111
+
112
+ def is_duplicate(self, data: Any) -> bool:
113
+ """
114
+ 检查数据是否重复(内存去重)
115
+
116
+ Args:
117
+ data (Any): 数据
118
+
119
+ Returns:
120
+ bool: 是否重复
121
+ """
122
+ fingerprint = self.generate_fingerprint(data)
123
+ return fingerprint in self.memory_set
124
+
125
+ def add_to_dedup(self, data: Any) -> bool:
126
+ """
127
+ 将数据添加到去重集合
128
+
129
+ Args:
130
+ data (Any): 数据
131
+
132
+ Returns:
133
+ bool: 是否成功添加(True表示之前不存在,False表示已存在)
134
+ """
135
+ fingerprint = self.generate_fingerprint(data)
136
+ if fingerprint in self.memory_set:
137
+ return False
138
+ else:
139
+ self.memory_set.add(fingerprint)
140
+ return True
141
+
142
+ async def async_is_duplicate(self, data: Any) -> bool:
143
+ """
144
+ 异步检查数据是否重复
145
+
146
+ Args:
147
+ data (Any): 数据
148
+
149
+ Returns:
150
+ bool: 是否重复
151
+ """
152
+ return self.is_duplicate(data)
153
+
154
+ async def async_add_to_dedup(self, data: Any) -> bool:
155
+ """
156
+ 异步将数据添加到去重集合
157
+
158
+ Args:
159
+ data (Any): 数据
160
+
161
+ Returns:
162
+ bool: 是否成功添加
163
+ """
164
+ return self.add_to_dedup(data)
165
+
166
+
167
+ class DistributedCoordinator:
168
+ """分布式协调工具类"""
169
+
170
+ def __init__(self, redis_client: Any = None):
171
+ """
172
+ 初始化分布式协调工具
173
+
174
+ Args:
175
+ redis_client (Any): Redis客户端
176
+ """
177
+ self.redis_client = redis_client
178
+ self.task_distributor = TaskDistributor()
179
+ self.deduplication_tool = DeduplicationTool()
180
+
181
+ @staticmethod
182
+ def generate_task_id(url: str, spider_name: str) -> str:
183
+ """
184
+ 生成任务ID
185
+
186
+ Args:
187
+ url (str): URL
188
+ spider_name (str): 爬虫名称
189
+
190
+ Returns:
191
+ str: 任务ID
192
+ """
193
+ # 使用URL和爬虫名称生成唯一任务ID
194
+ unique_string = f"{url}_{spider_name}_{int(time.time() * 1000)}"
195
+ return hashlib.md5(unique_string.encode('utf-8')).hexdigest()
196
+
197
+ async def claim_task(self, task_id: str, worker_id: str,
198
+ timeout: int = 300) -> Tuple[bool, Optional[str]]:
199
+ """
200
+ 声明任务(分布式锁)
201
+
202
+ Args:
203
+ task_id (str): 任务ID
204
+ worker_id (str): 工作节点ID
205
+ timeout (int): 锁超时时间(秒)
206
+
207
+ Returns:
208
+ Tuple[bool, Optional[str]]: (是否成功声明, 错误信息)
209
+ """
210
+ # 如果没有Redis客户端,使用内存模拟
211
+ if self.redis_client is None:
212
+ # 模拟成功声明
213
+ return True, None
214
+
215
+ try:
216
+ # 实际实现应该使用Redis的SET命令带有NX和EX选项
217
+ # result = await self.redis_client.set(f"task_lock:{task_id}", worker_id, nx=True, ex=timeout)
218
+ # return bool(result), None if result else "任务已被其他节点声明"
219
+ return True, None
220
+ except Exception as e:
221
+ return False, str(e)
222
+
223
+ async def report_task_status(self, task_id: str, status: str, worker_id: str) -> bool:
224
+ """
225
+ 报告任务状态
226
+
227
+ Args:
228
+ task_id (str): 任务ID
229
+ status (str): 任务状态 (pending, processing, completed, failed)
230
+ worker_id (str): 工作节点ID
231
+
232
+ Returns:
233
+ bool: 是否成功报告
234
+ """
235
+ try:
236
+ status_info = {
237
+ "task_id": task_id,
238
+ "status": status,
239
+ "worker_id": worker_id,
240
+ "timestamp": datetime.now().isoformat()
241
+ }
242
+
243
+ if self.redis_client is None:
244
+ # 模拟成功报告
245
+ print(f"报告任务状态: {status_info}")
246
+ return True
247
+
248
+ # 实际实现应该将状态信息存储到Redis中
249
+ # await self.redis_client.hset(f"task_status:{task_id}", mapping=status_info)
250
+ return True
251
+ except Exception:
252
+ return False
253
+
254
+ async def get_cluster_info(self) -> Dict[str, Any]:
255
+ """
256
+ 获取集群信息
257
+
258
+ Returns:
259
+ Dict[str, Any]: 集群信息
260
+ """
261
+ try:
262
+ if self.redis_client is None:
263
+ # 返回模拟的集群信息
264
+ return {
265
+ "worker_count": 3,
266
+ "active_workers": ["worker_1", "worker_2", "worker_3"],
267
+ "task_queue_size": 100,
268
+ "processed_tasks": 500,
269
+ "failed_tasks": 5,
270
+ "timestamp": datetime.now().isoformat()
271
+ }
272
+
273
+ # 实际实现应该从Redis获取集群信息
274
+ # 这里返回模拟数据
275
+ return {
276
+ "worker_count": 3,
277
+ "active_workers": ["worker_1", "worker_2", "worker_3"],
278
+ "task_queue_size": 100,
279
+ "processed_tasks": 500,
280
+ "failed_tasks": 5,
281
+ "timestamp": datetime.now().isoformat()
282
+ }
283
+ except Exception as e:
284
+ return {"error": str(e)}
285
+
286
+ def generate_pagination_tasks(self, base_url: str, start_page: int = 1,
287
+ end_page: int = 100, page_param: str = "page") -> List[str]:
288
+ """
289
+ 生成分页任务URL列表
290
+
291
+ Args:
292
+ base_url (str): 基础URL
293
+ start_page (int): 起始页码
294
+ end_page (int): 结束页码
295
+ page_param (str): 分页参数名
296
+
297
+ Returns:
298
+ List[str]: 分页URL列表
299
+ """
300
+ return self.task_distributor.generate_pagination_tasks(base_url, start_page, end_page, page_param)
301
+
302
+ def distribute_tasks(self, tasks: List[Any], num_workers: int) -> List[List[Any]]:
303
+ """
304
+ 将任务分发给多个工作节点
305
+
306
+ Args:
307
+ tasks (List[Any]): 任务列表
308
+ num_workers (int): 工作节点数量
309
+
310
+ Returns:
311
+ List[List[Any]]: 分发后的任务列表
312
+ """
313
+ return self.task_distributor.distribute_tasks(tasks, num_workers)
314
+
315
+ async def is_duplicate(self, data: Any) -> bool:
316
+ """
317
+ 检查数据是否重复
318
+
319
+ Args:
320
+ data (Any): 数据
321
+
322
+ Returns:
323
+ bool: 是否重复
324
+ """
325
+ # 如果有Redis客户端,可以使用布隆过滤器或Redis集合进行去重
326
+ if self.redis_client is not None:
327
+ # 这里可以实现基于Redis的去重逻辑
328
+ pass
329
+
330
+ # 使用内存去重作为后备方案
331
+ return await self.deduplication_tool.async_is_duplicate(data)
332
+
333
+ async def add_to_dedup(self, data: Any) -> bool:
334
+ """
335
+ 将数据添加到去重集合
336
+
337
+ Args:
338
+ data (Any): 数据
339
+
340
+ Returns:
341
+ bool: 是否成功添加
342
+ """
343
+ # 如果有Redis客户端,可以使用布隆过滤器或Redis集合进行去重
344
+ if self.redis_client is not None:
345
+ # 这里可以实现基于Redis的去重逻辑
346
+ pass
347
+
348
+ # 使用内存去重作为后备方案
349
+ return await self.deduplication_tool.async_add_to_dedup(data)
350
+
351
+
352
+ # 便捷函数
353
+ def generate_task_id(url: str, spider_name: str) -> str:
354
+ """生成任务ID"""
355
+ return DistributedCoordinator.generate_task_id(url, spider_name)
356
+
357
+
358
+ async def claim_task(task_id: str, worker_id: str,
359
+ redis_client: Any = None, timeout: int = 300) -> Tuple[bool, Optional[str]]:
360
+ """声明任务"""
361
+ coordinator = DistributedCoordinator(redis_client)
362
+ return await coordinator.claim_task(task_id, worker_id, timeout)
363
+
364
+
365
+ async def report_task_status(task_id: str, status: str, worker_id: str,
366
+ redis_client: Any = None) -> bool:
367
+ """报告任务状态"""
368
+ coordinator = DistributedCoordinator(redis_client)
369
+ return await coordinator.report_task_status(task_id, status, worker_id)
370
+
371
+
372
+ async def get_cluster_info(redis_client: Any = None) -> Dict[str, Any]:
373
+ """获取集群信息"""
374
+ coordinator = DistributedCoordinator(redis_client)
375
+ return await coordinator.get_cluster_info()
376
+
377
+
378
+ def generate_pagination_tasks(base_url: str, start_page: int = 1,
379
+ end_page: int = 100, page_param: str = "page") -> List[str]:
380
+ """生成分页任务URL列表"""
381
+ coordinator = DistributedCoordinator()
382
+ return coordinator.generate_pagination_tasks(base_url, start_page, end_page, page_param)
383
+
384
+
385
+ def distribute_tasks(tasks: List[Any], num_workers: int) -> List[List[Any]]:
386
+ """将任务分发给多个工作节点"""
387
+ coordinator = DistributedCoordinator()
388
+ return coordinator.distribute_tasks(tasks, num_workers)