crawlo 1.3.2__py3-none-any.whl → 1.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (219) hide show
  1. crawlo/__init__.py +63 -63
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +322 -314
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +2 -2
  16. crawlo/core/engine.py +365 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +256 -256
  19. crawlo/crawler.py +1166 -1168
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +226 -226
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +52 -45
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/filters/__init__.py +154 -154
  40. crawlo/filters/aioredis_filter.py +234 -234
  41. crawlo/filters/memory_filter.py +269 -269
  42. crawlo/items/__init__.py +23 -23
  43. crawlo/items/base.py +21 -21
  44. crawlo/items/fields.py +52 -52
  45. crawlo/items/items.py +104 -104
  46. crawlo/middleware/__init__.py +21 -21
  47. crawlo/middleware/default_header.py +132 -132
  48. crawlo/middleware/download_delay.py +104 -104
  49. crawlo/middleware/middleware_manager.py +135 -135
  50. crawlo/middleware/offsite.py +123 -123
  51. crawlo/middleware/proxy.py +386 -386
  52. crawlo/middleware/request_ignore.py +86 -86
  53. crawlo/middleware/response_code.py +163 -163
  54. crawlo/middleware/response_filter.py +136 -136
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/middleware/simple_proxy.py +65 -65
  57. crawlo/mode_manager.py +187 -187
  58. crawlo/network/__init__.py +21 -21
  59. crawlo/network/request.py +379 -379
  60. crawlo/network/response.py +359 -359
  61. crawlo/pipelines/__init__.py +21 -21
  62. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  63. crawlo/pipelines/console_pipeline.py +39 -39
  64. crawlo/pipelines/csv_pipeline.py +316 -316
  65. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  66. crawlo/pipelines/json_pipeline.py +218 -218
  67. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  68. crawlo/pipelines/mongo_pipeline.py +131 -131
  69. crawlo/pipelines/mysql_pipeline.py +318 -318
  70. crawlo/pipelines/pipeline_manager.py +75 -75
  71. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  72. crawlo/project.py +325 -297
  73. crawlo/queue/pqueue.py +37 -37
  74. crawlo/queue/queue_manager.py +379 -379
  75. crawlo/queue/redis_priority_queue.py +306 -306
  76. crawlo/settings/__init__.py +7 -7
  77. crawlo/settings/default_settings.py +225 -225
  78. crawlo/settings/setting_manager.py +198 -198
  79. crawlo/spider/__init__.py +639 -639
  80. crawlo/stats_collector.py +59 -59
  81. crawlo/subscriber.py +129 -129
  82. crawlo/task_manager.py +30 -30
  83. crawlo/templates/crawlo.cfg.tmpl +10 -10
  84. crawlo/templates/project/__init__.py.tmpl +3 -3
  85. crawlo/templates/project/items.py.tmpl +17 -17
  86. crawlo/templates/project/middlewares.py.tmpl +118 -118
  87. crawlo/templates/project/pipelines.py.tmpl +96 -96
  88. crawlo/templates/project/settings.py.tmpl +266 -266
  89. crawlo/templates/project/settings_distributed.py.tmpl +179 -179
  90. crawlo/templates/project/settings_gentle.py.tmpl +60 -60
  91. crawlo/templates/project/settings_high_performance.py.tmpl +130 -130
  92. crawlo/templates/project/settings_minimal.py.tmpl +34 -34
  93. crawlo/templates/project/settings_simple.py.tmpl +101 -101
  94. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  95. crawlo/templates/run.py.tmpl +38 -38
  96. crawlo/templates/spider/spider.py.tmpl +143 -143
  97. crawlo/tools/__init__.py +200 -200
  98. crawlo/tools/anti_crawler.py +268 -268
  99. crawlo/tools/authenticated_proxy.py +240 -240
  100. crawlo/tools/data_formatter.py +225 -225
  101. crawlo/tools/data_validator.py +180 -180
  102. crawlo/tools/date_tools.py +289 -289
  103. crawlo/tools/distributed_coordinator.py +388 -388
  104. crawlo/tools/encoding_converter.py +127 -127
  105. crawlo/tools/request_tools.py +82 -82
  106. crawlo/tools/retry_mechanism.py +224 -224
  107. crawlo/tools/scenario_adapter.py +262 -262
  108. crawlo/tools/text_cleaner.py +232 -232
  109. crawlo/utils/__init__.py +34 -34
  110. crawlo/utils/batch_processor.py +259 -259
  111. crawlo/utils/controlled_spider_mixin.py +439 -439
  112. crawlo/utils/db_helper.py +343 -343
  113. crawlo/utils/enhanced_error_handler.py +356 -356
  114. crawlo/utils/env_config.py +142 -142
  115. crawlo/utils/error_handler.py +123 -123
  116. crawlo/utils/func_tools.py +82 -82
  117. crawlo/utils/large_scale_config.py +286 -286
  118. crawlo/utils/large_scale_helper.py +344 -344
  119. crawlo/utils/log.py +199 -146
  120. crawlo/utils/performance_monitor.py +285 -285
  121. crawlo/utils/queue_helper.py +175 -175
  122. crawlo/utils/redis_connection_pool.py +351 -351
  123. crawlo/utils/redis_key_validator.py +198 -198
  124. crawlo/utils/request.py +267 -267
  125. crawlo/utils/request_serializer.py +218 -218
  126. crawlo/utils/spider_loader.py +61 -61
  127. crawlo/utils/system.py +11 -11
  128. crawlo/utils/tools.py +4 -4
  129. crawlo/utils/url.py +39 -39
  130. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/METADATA +1020 -1020
  131. crawlo-1.3.3.dist-info/RECORD +219 -0
  132. examples/__init__.py +7 -7
  133. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
  134. tests/__init__.py +7 -7
  135. tests/advanced_tools_example.py +275 -275
  136. tests/authenticated_proxy_example.py +107 -107
  137. tests/cleaners_example.py +160 -160
  138. tests/config_validation_demo.py +142 -142
  139. tests/controlled_spider_example.py +205 -205
  140. tests/date_tools_example.py +180 -180
  141. tests/debug_pipelines.py +66 -66
  142. tests/dynamic_loading_example.py +523 -523
  143. tests/dynamic_loading_test.py +104 -104
  144. tests/env_config_example.py +133 -133
  145. tests/error_handling_example.py +171 -171
  146. tests/redis_key_validation_demo.py +130 -130
  147. tests/request_params_example.py +150 -150
  148. tests/response_improvements_example.py +144 -144
  149. tests/test_advanced_tools.py +148 -148
  150. tests/test_all_redis_key_configs.py +145 -145
  151. tests/test_authenticated_proxy.py +141 -141
  152. tests/test_cleaners.py +54 -54
  153. tests/test_comprehensive.py +146 -146
  154. tests/test_config_consistency.py +80 -80
  155. tests/test_config_merge.py +152 -152
  156. tests/test_config_validator.py +182 -182
  157. tests/test_crawlo_proxy_integration.py +108 -108
  158. tests/test_date_tools.py +123 -123
  159. tests/test_default_header_middleware.py +158 -158
  160. tests/test_distributed.py +65 -65
  161. tests/test_double_crawlo_fix.py +207 -207
  162. tests/test_double_crawlo_fix_simple.py +124 -124
  163. tests/test_download_delay_middleware.py +221 -221
  164. tests/test_downloader_proxy_compatibility.py +268 -268
  165. tests/test_dynamic_downloaders_proxy.py +124 -124
  166. tests/test_dynamic_proxy.py +92 -92
  167. tests/test_dynamic_proxy_config.py +146 -146
  168. tests/test_dynamic_proxy_real.py +109 -109
  169. tests/test_edge_cases.py +303 -303
  170. tests/test_enhanced_error_handler.py +270 -270
  171. tests/test_env_config.py +121 -121
  172. tests/test_error_handler_compatibility.py +112 -112
  173. tests/test_final_validation.py +153 -153
  174. tests/test_framework_env_usage.py +103 -103
  175. tests/test_integration.py +169 -169
  176. tests/test_item_dedup_redis_key.py +122 -122
  177. tests/test_mode_consistency.py +51 -51
  178. tests/test_offsite_middleware.py +221 -221
  179. tests/test_parsel.py +29 -29
  180. tests/test_performance.py +327 -327
  181. tests/test_proxy_api.py +264 -264
  182. tests/test_proxy_health_check.py +32 -32
  183. tests/test_proxy_middleware.py +121 -121
  184. tests/test_proxy_middleware_enhanced.py +216 -216
  185. tests/test_proxy_middleware_integration.py +136 -136
  186. tests/test_proxy_middleware_refactored.py +184 -184
  187. tests/test_proxy_providers.py +56 -56
  188. tests/test_proxy_stats.py +19 -19
  189. tests/test_proxy_strategies.py +59 -59
  190. tests/test_queue_manager_double_crawlo.py +173 -173
  191. tests/test_queue_manager_redis_key.py +176 -176
  192. tests/test_random_user_agent.py +72 -72
  193. tests/test_real_scenario_proxy.py +195 -195
  194. tests/test_redis_config.py +28 -28
  195. tests/test_redis_connection_pool.py +294 -294
  196. tests/test_redis_key_naming.py +181 -181
  197. tests/test_redis_key_validator.py +123 -123
  198. tests/test_redis_queue.py +224 -224
  199. tests/test_request_ignore_middleware.py +182 -182
  200. tests/test_request_params.py +111 -111
  201. tests/test_request_serialization.py +70 -70
  202. tests/test_response_code_middleware.py +349 -349
  203. tests/test_response_filter_middleware.py +427 -427
  204. tests/test_response_improvements.py +152 -152
  205. tests/test_retry_middleware.py +241 -241
  206. tests/test_scheduler.py +252 -252
  207. tests/test_scheduler_config_update.py +133 -133
  208. tests/test_simple_response.py +61 -61
  209. tests/test_telecom_spider_redis_key.py +205 -205
  210. tests/test_template_content.py +87 -87
  211. tests/test_template_redis_key.py +134 -134
  212. tests/test_tools.py +159 -159
  213. tests/test_user_agents.py +96 -96
  214. tests/tools_example.py +260 -260
  215. tests/verify_distributed.py +117 -117
  216. crawlo-1.3.2.dist-info/RECORD +0 -219
  217. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/WHEEL +0 -0
  218. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/entry_points.txt +0 -0
  219. {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/top_level.txt +0 -0
@@ -1,223 +1,223 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- """
4
- 基于数据库的数据项去重管道
5
- =======================
6
- 提供持久化去重功能,适用于需要长期运行或断点续爬的场景。
7
-
8
- 特点:
9
- - 持久化存储: 重启爬虫后仍能保持去重状态
10
- - 可靠性高: 数据库事务保证一致性
11
- - 适用性广: 支持多种数据库后端
12
- - 可扩展: 支持自定义表结构和字段
13
- """
14
- import hashlib
15
- import aiomysql
16
-
17
- from crawlo import Item
18
- from crawlo.exceptions import DropItem
19
- from crawlo.spider import Spider
20
- from crawlo.utils.log import get_logger
21
-
22
-
23
- class DatabaseDedupPipeline:
24
- """基于数据库的数据项去重管道"""
25
-
26
- def __init__(
27
- self,
28
- db_host: str = 'localhost',
29
- db_port: int = 3306,
30
- db_user: str = 'root',
31
- db_password: str = '',
32
- db_name: str = 'crawlo',
33
- table_name: str = 'item_fingerprints',
34
- log_level: str = "INFO"
35
- ):
36
- """
37
- 初始化数据库去重管道
38
-
39
- :param db_host: 数据库主机地址
40
- :param db_port: 数据库端口
41
- :param db_user: 数据库用户名
42
- :param db_password: 数据库密码
43
- :param db_name: 数据库名称
44
- :param table_name: 存储指纹的表名
45
- :param log_level: 日志级别
46
- """
47
- self.logger = get_logger(self.__class__.__name__, log_level)
48
-
49
- # 数据库连接参数
50
- self.db_config = {
51
- 'host': db_host,
52
- 'port': db_port,
53
- 'user': db_user,
54
- 'password': db_password,
55
- 'db': db_name,
56
- 'autocommit': False
57
- }
58
-
59
- self.table_name = table_name
60
- self.dropped_count = 0
61
- self.connection = None
62
- self.pool = None
63
-
64
- @classmethod
65
- def from_crawler(cls, crawler):
66
- """从爬虫配置创建管道实例"""
67
- settings = crawler.settings
68
-
69
- return cls(
70
- db_host=settings.get('DB_HOST', 'localhost'),
71
- db_port=settings.getint('DB_PORT', 3306),
72
- db_user=settings.get('DB_USER', 'root'),
73
- db_password=settings.get('DB_PASSWORD', ''),
74
- db_name=settings.get('DB_NAME', 'crawlo'),
75
- table_name=settings.get('DB_DEDUP_TABLE', 'item_fingerprints'),
76
- log_level=settings.get('LOG_LEVEL', 'INFO')
77
- )
78
-
79
- async def open_spider(self, spider: Spider) -> None:
80
- """
81
- 爬虫启动时初始化数据库连接
82
-
83
- :param spider: 爬虫实例
84
- """
85
- try:
86
- # 创建连接池
87
- self.pool = await aiomysql.create_pool(
88
- **self.db_config,
89
- minsize=2,
90
- maxsize=10
91
- )
92
-
93
- # 创建去重表(如果不存在)
94
- await self._create_dedup_table()
95
-
96
- self.logger.info(f"Database deduplication pipeline initialized: {self.db_config['host']}:{self.db_config['port']}/{self.db_config['db']}.{self.table_name}")
97
- except Exception as e:
98
- self.logger.error(f"Database deduplication pipeline initialization failed: {e}")
99
- raise RuntimeError(f"数据库去重管道初始化失败: {e}")
100
-
101
- async def _create_dedup_table(self) -> None:
102
- """创建去重表"""
103
- create_table_sql = f"""
104
- CREATE TABLE IF NOT EXISTS `{self.table_name}` (
105
- `id` BIGINT AUTO_INCREMENT PRIMARY KEY,
106
- `fingerprint` VARCHAR(64) NOT NULL UNIQUE,
107
- `created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
108
- INDEX `idx_fingerprint` (`fingerprint`)
109
- ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
110
- """
111
-
112
- async with self.pool.acquire() as conn:
113
- async with conn.cursor() as cursor:
114
- await cursor.execute(create_table_sql)
115
- await conn.commit()
116
-
117
- async def process_item(self, item: Item, spider: Spider) -> Item:
118
- """
119
- 处理数据项,进行去重检查
120
-
121
- :param item: 要处理的数据项
122
- :param spider: 爬虫实例
123
- :return: 处理后的数据项或抛出 DropItem 异常
124
- """
125
- try:
126
- # 生成数据项指纹
127
- fingerprint = self._generate_item_fingerprint(item)
128
-
129
- # 检查指纹是否已存在
130
- exists = await self._check_fingerprint_exists(fingerprint)
131
-
132
- if exists:
133
- # 如果已存在,丢弃这个数据项
134
- self.dropped_count += 1
135
- self.logger.debug(f"Dropping duplicate item: {fingerprint[:20]}...")
136
- raise DropItem(f"Duplicate item: {fingerprint}")
137
- else:
138
- # 记录新数据项的指纹
139
- await self._insert_fingerprint(fingerprint)
140
- self.logger.debug(f"Processing new item: {fingerprint[:20]}...")
141
- return item
142
-
143
- except Exception as e:
144
- self.logger.error(f"Error processing item: {e}")
145
- # 在错误时继续处理,避免丢失数据
146
- return item
147
-
148
- async def _check_fingerprint_exists(self, fingerprint: str) -> bool:
149
- """
150
- 检查指纹是否已存在
151
-
152
- :param fingerprint: 数据项指纹
153
- :return: 是否存在
154
- """
155
- check_sql = f"SELECT 1 FROM `{self.table_name}` WHERE `fingerprint` = %s LIMIT 1"
156
-
157
- async with self.pool.acquire() as conn:
158
- async with conn.cursor() as cursor:
159
- await cursor.execute(check_sql, (fingerprint,))
160
- result = await cursor.fetchone()
161
- return result is not None
162
-
163
- async def _insert_fingerprint(self, fingerprint: str) -> None:
164
- """
165
- 插入新指纹
166
-
167
- :param fingerprint: 数据项指纹
168
- """
169
- insert_sql = f"INSERT INTO `{self.table_name}` (`fingerprint`) VALUES (%s)"
170
-
171
- async with self.pool.acquire() as conn:
172
- async with conn.cursor() as cursor:
173
- try:
174
- await cursor.execute(insert_sql, (fingerprint,))
175
- await conn.commit()
176
- except aiomysql.IntegrityError:
177
- # 指纹已存在(并发情况下可能发生)
178
- await conn.rollback()
179
- raise DropItem(f"重复的数据项: {fingerprint}")
180
- except Exception:
181
- await conn.rollback()
182
- raise
183
-
184
- def _generate_item_fingerprint(self, item: Item) -> str:
185
- """
186
- 生成数据项指纹
187
-
188
- 基于数据项的所有字段生成唯一指纹,用于去重判断。
189
-
190
- :param item: 数据项
191
- :return: 指纹字符串
192
- """
193
- # 将数据项转换为可序列化的字典
194
- try:
195
- item_dict = item.to_dict()
196
- except AttributeError:
197
- # 兼容没有to_dict方法的Item实现
198
- item_dict = dict(item)
199
-
200
- # 对字典进行排序以确保一致性
201
- sorted_items = sorted(item_dict.items())
202
-
203
- # 生成指纹字符串
204
- fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
205
-
206
- # 使用 SHA256 生成固定长度的指纹
207
- return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
208
-
209
- async def close_spider(self, spider: Spider) -> None:
210
- """
211
- 爬虫关闭时的清理工作
212
-
213
- :param spider: 爬虫实例
214
- """
215
- try:
216
- if self.pool:
217
- self.pool.close()
218
- await self.pool.wait_closed()
219
-
220
- self.logger.info(f"Spider {spider.name} closed:")
221
- self.logger.info(f" - Dropped duplicate items: {self.dropped_count}")
222
- except Exception as e:
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ """
4
+ 基于数据库的数据项去重管道
5
+ =======================
6
+ 提供持久化去重功能,适用于需要长期运行或断点续爬的场景。
7
+
8
+ 特点:
9
+ - 持久化存储: 重启爬虫后仍能保持去重状态
10
+ - 可靠性高: 数据库事务保证一致性
11
+ - 适用性广: 支持多种数据库后端
12
+ - 可扩展: 支持自定义表结构和字段
13
+ """
14
+ import hashlib
15
+ import aiomysql
16
+
17
+ from crawlo import Item
18
+ from crawlo.exceptions import DropItem
19
+ from crawlo.spider import Spider
20
+ from crawlo.utils.log import get_logger
21
+
22
+
23
+ class DatabaseDedupPipeline:
24
+ """基于数据库的数据项去重管道"""
25
+
26
+ def __init__(
27
+ self,
28
+ db_host: str = 'localhost',
29
+ db_port: int = 3306,
30
+ db_user: str = 'root',
31
+ db_password: str = '',
32
+ db_name: str = 'crawlo',
33
+ table_name: str = 'item_fingerprints',
34
+ log_level: str = "INFO"
35
+ ):
36
+ """
37
+ 初始化数据库去重管道
38
+
39
+ :param db_host: 数据库主机地址
40
+ :param db_port: 数据库端口
41
+ :param db_user: 数据库用户名
42
+ :param db_password: 数据库密码
43
+ :param db_name: 数据库名称
44
+ :param table_name: 存储指纹的表名
45
+ :param log_level: 日志级别
46
+ """
47
+ self.logger = get_logger(self.__class__.__name__, log_level)
48
+
49
+ # 数据库连接参数
50
+ self.db_config = {
51
+ 'host': db_host,
52
+ 'port': db_port,
53
+ 'user': db_user,
54
+ 'password': db_password,
55
+ 'db': db_name,
56
+ 'autocommit': False
57
+ }
58
+
59
+ self.table_name = table_name
60
+ self.dropped_count = 0
61
+ self.connection = None
62
+ self.pool = None
63
+
64
+ @classmethod
65
+ def from_crawler(cls, crawler):
66
+ """从爬虫配置创建管道实例"""
67
+ settings = crawler.settings
68
+
69
+ return cls(
70
+ db_host=settings.get('DB_HOST', 'localhost'),
71
+ db_port=settings.getint('DB_PORT', 3306),
72
+ db_user=settings.get('DB_USER', 'root'),
73
+ db_password=settings.get('DB_PASSWORD', ''),
74
+ db_name=settings.get('DB_NAME', 'crawlo'),
75
+ table_name=settings.get('DB_DEDUP_TABLE', 'item_fingerprints'),
76
+ log_level=settings.get('LOG_LEVEL', 'INFO')
77
+ )
78
+
79
+ async def open_spider(self, spider: Spider) -> None:
80
+ """
81
+ 爬虫启动时初始化数据库连接
82
+
83
+ :param spider: 爬虫实例
84
+ """
85
+ try:
86
+ # 创建连接池
87
+ self.pool = await aiomysql.create_pool(
88
+ **self.db_config,
89
+ minsize=2,
90
+ maxsize=10
91
+ )
92
+
93
+ # 创建去重表(如果不存在)
94
+ await self._create_dedup_table()
95
+
96
+ self.logger.info(f"Database deduplication pipeline initialized: {self.db_config['host']}:{self.db_config['port']}/{self.db_config['db']}.{self.table_name}")
97
+ except Exception as e:
98
+ self.logger.error(f"Database deduplication pipeline initialization failed: {e}")
99
+ raise RuntimeError(f"数据库去重管道初始化失败: {e}")
100
+
101
+ async def _create_dedup_table(self) -> None:
102
+ """创建去重表"""
103
+ create_table_sql = f"""
104
+ CREATE TABLE IF NOT EXISTS `{self.table_name}` (
105
+ `id` BIGINT AUTO_INCREMENT PRIMARY KEY,
106
+ `fingerprint` VARCHAR(64) NOT NULL UNIQUE,
107
+ `created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
108
+ INDEX `idx_fingerprint` (`fingerprint`)
109
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
110
+ """
111
+
112
+ async with self.pool.acquire() as conn:
113
+ async with conn.cursor() as cursor:
114
+ await cursor.execute(create_table_sql)
115
+ await conn.commit()
116
+
117
+ async def process_item(self, item: Item, spider: Spider) -> Item:
118
+ """
119
+ 处理数据项,进行去重检查
120
+
121
+ :param item: 要处理的数据项
122
+ :param spider: 爬虫实例
123
+ :return: 处理后的数据项或抛出 DropItem 异常
124
+ """
125
+ try:
126
+ # 生成数据项指纹
127
+ fingerprint = self._generate_item_fingerprint(item)
128
+
129
+ # 检查指纹是否已存在
130
+ exists = await self._check_fingerprint_exists(fingerprint)
131
+
132
+ if exists:
133
+ # 如果已存在,丢弃这个数据项
134
+ self.dropped_count += 1
135
+ self.logger.debug(f"Dropping duplicate item: {fingerprint[:20]}...")
136
+ raise DropItem(f"Duplicate item: {fingerprint}")
137
+ else:
138
+ # 记录新数据项的指纹
139
+ await self._insert_fingerprint(fingerprint)
140
+ self.logger.debug(f"Processing new item: {fingerprint[:20]}...")
141
+ return item
142
+
143
+ except Exception as e:
144
+ self.logger.error(f"Error processing item: {e}")
145
+ # 在错误时继续处理,避免丢失数据
146
+ return item
147
+
148
+ async def _check_fingerprint_exists(self, fingerprint: str) -> bool:
149
+ """
150
+ 检查指纹是否已存在
151
+
152
+ :param fingerprint: 数据项指纹
153
+ :return: 是否存在
154
+ """
155
+ check_sql = f"SELECT 1 FROM `{self.table_name}` WHERE `fingerprint` = %s LIMIT 1"
156
+
157
+ async with self.pool.acquire() as conn:
158
+ async with conn.cursor() as cursor:
159
+ await cursor.execute(check_sql, (fingerprint,))
160
+ result = await cursor.fetchone()
161
+ return result is not None
162
+
163
+ async def _insert_fingerprint(self, fingerprint: str) -> None:
164
+ """
165
+ 插入新指纹
166
+
167
+ :param fingerprint: 数据项指纹
168
+ """
169
+ insert_sql = f"INSERT INTO `{self.table_name}` (`fingerprint`) VALUES (%s)"
170
+
171
+ async with self.pool.acquire() as conn:
172
+ async with conn.cursor() as cursor:
173
+ try:
174
+ await cursor.execute(insert_sql, (fingerprint,))
175
+ await conn.commit()
176
+ except aiomysql.IntegrityError:
177
+ # 指纹已存在(并发情况下可能发生)
178
+ await conn.rollback()
179
+ raise DropItem(f"重复的数据项: {fingerprint}")
180
+ except Exception:
181
+ await conn.rollback()
182
+ raise
183
+
184
+ def _generate_item_fingerprint(self, item: Item) -> str:
185
+ """
186
+ 生成数据项指纹
187
+
188
+ 基于数据项的所有字段生成唯一指纹,用于去重判断。
189
+
190
+ :param item: 数据项
191
+ :return: 指纹字符串
192
+ """
193
+ # 将数据项转换为可序列化的字典
194
+ try:
195
+ item_dict = item.to_dict()
196
+ except AttributeError:
197
+ # 兼容没有to_dict方法的Item实现
198
+ item_dict = dict(item)
199
+
200
+ # 对字典进行排序以确保一致性
201
+ sorted_items = sorted(item_dict.items())
202
+
203
+ # 生成指纹字符串
204
+ fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
205
+
206
+ # 使用 SHA256 生成固定长度的指纹
207
+ return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
208
+
209
+ async def close_spider(self, spider: Spider) -> None:
210
+ """
211
+ 爬虫关闭时的清理工作
212
+
213
+ :param spider: 爬虫实例
214
+ """
215
+ try:
216
+ if self.pool:
217
+ self.pool.close()
218
+ await self.pool.wait_closed()
219
+
220
+ self.logger.info(f"Spider {spider.name} closed:")
221
+ self.logger.info(f" - Dropped duplicate items: {self.dropped_count}")
222
+ except Exception as e:
223
223
  self.logger.error(f"Error closing spider: {e}")