crawlo 1.1.0__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +33 -24
- crawlo/__version__.py +1 -1
- crawlo/cli.py +40 -40
- crawlo/commands/__init__.py +13 -13
- crawlo/commands/check.py +594 -155
- crawlo/commands/genspider.py +125 -110
- crawlo/commands/list.py +147 -119
- crawlo/commands/run.py +285 -170
- crawlo/commands/startproject.py +111 -101
- crawlo/commands/stats.py +188 -167
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -57
- crawlo/crawler.py +494 -492
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +199 -199
- crawlo/downloader/cffi_downloader.py +242 -277
- crawlo/downloader/httpx_downloader.py +246 -246
- crawlo/event.py +11 -11
- crawlo/exceptions.py +78 -78
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +34 -34
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +150 -150
- crawlo/filters/memory_filter.py +202 -202
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/proxy.py +245 -245
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -90
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +203 -203
- crawlo/network/response.py +166 -166
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +272 -272
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/project.py +153 -0
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +166 -168
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +129 -129
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +27 -27
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +75 -75
- crawlo/templates/project/pipelines.py.tmpl +63 -63
- crawlo/templates/project/settings.py.tmpl +54 -54
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +31 -31
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/date_tools.py +233 -233
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +128 -128
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/request.py +267 -267
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.1.1.dist-info/METADATA +220 -0
- crawlo-1.1.1.dist-info/RECORD +100 -0
- examples/__init__.py +7 -0
- examples/baidu_spider/__init__.py +7 -0
- examples/baidu_spider/demo.py +94 -0
- examples/baidu_spider/items.py +46 -0
- examples/baidu_spider/middleware.py +49 -0
- examples/baidu_spider/pipeline.py +55 -0
- examples/baidu_spider/run.py +27 -0
- examples/baidu_spider/settings.py +121 -0
- examples/baidu_spider/spiders/__init__.py +7 -0
- examples/baidu_spider/spiders/bai_du.py +61 -0
- examples/baidu_spider/spiders/miit.py +159 -0
- examples/baidu_spider/spiders/sina.py +79 -0
- tests/__init__.py +7 -7
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- crawlo/utils/concurrency_manager.py +0 -125
- crawlo/utils/project.py +0 -197
- crawlo-1.1.0.dist-info/METADATA +0 -49
- crawlo-1.1.0.dist-info/RECORD +0 -97
- examples/gxb/__init__.py +0 -0
- examples/gxb/items.py +0 -36
- examples/gxb/run.py +0 -16
- examples/gxb/settings.py +0 -72
- examples/gxb/spider/__init__.py +0 -2
- examples/gxb/spider/miit_spider.py +0 -180
- examples/gxb/spider/telecom_device.py +0 -129
- {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/WHEEL +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.0.dist-info → crawlo-1.1.1.dist-info}/top_level.txt +0 -0
|
@@ -1,273 +1,273 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
import asyncio
|
|
3
|
-
import aiomysql
|
|
4
|
-
from typing import Optional, List, Dict
|
|
5
|
-
from asyncmy import create_pool
|
|
6
|
-
from crawlo.utils.log import get_logger
|
|
7
|
-
from crawlo.exceptions import ItemDiscard
|
|
8
|
-
from crawlo.utils.tools import make_insert_sql, logger
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class AsyncmyMySQLPipeline:
|
|
12
|
-
def __init__(self, crawler):
|
|
13
|
-
self.crawler = crawler
|
|
14
|
-
self.settings = crawler.settings
|
|
15
|
-
self.logger = get_logger(self.__class__.__name__, self.settings.get('LOG_LEVEL'))
|
|
16
|
-
|
|
17
|
-
# 配置参数
|
|
18
|
-
self.table_name = (
|
|
19
|
-
self.settings.get('MYSQL_TABLE') or
|
|
20
|
-
getattr(crawler.spider, 'mysql_table', None) or
|
|
21
|
-
f"{crawler.spider.name}_items"
|
|
22
|
-
)
|
|
23
|
-
self.batch_size = self.settings.getint('MYSQL_BATCH_SIZE', 100)
|
|
24
|
-
self.flush_interval = self.settings.getfloat('MYSQL_FLUSH_INTERVAL', 3.0) # 秒
|
|
25
|
-
|
|
26
|
-
# 连接池相关
|
|
27
|
-
self._pool_lock = asyncio.Lock()
|
|
28
|
-
self._pool_initialized = False
|
|
29
|
-
self.pool = None
|
|
30
|
-
|
|
31
|
-
# 缓冲区与锁
|
|
32
|
-
self.items_buffer: List[Dict] = []
|
|
33
|
-
self.buffer_lock = asyncio.Lock()
|
|
34
|
-
|
|
35
|
-
# 后台任务
|
|
36
|
-
self.flush_task: Optional[asyncio.Task] = None
|
|
37
|
-
|
|
38
|
-
# 注册关闭事件
|
|
39
|
-
crawler.subscriber.subscribe(self.spider_closed, event='spider_closed')
|
|
40
|
-
|
|
41
|
-
@classmethod
|
|
42
|
-
def from_crawler(cls, crawler):
|
|
43
|
-
return cls(crawler)
|
|
44
|
-
|
|
45
|
-
async def _ensure_pool(self):
|
|
46
|
-
"""确保连接池已初始化(线程安全)"""
|
|
47
|
-
if self._pool_initialized:
|
|
48
|
-
return
|
|
49
|
-
|
|
50
|
-
async with self._pool_lock:
|
|
51
|
-
if not self._pool_initialized:
|
|
52
|
-
try:
|
|
53
|
-
self.pool = await create_pool(
|
|
54
|
-
host=self.settings.get('MYSQL_HOST', 'localhost'),
|
|
55
|
-
port=self.settings.get_int('MYSQL_PORT', 3306),
|
|
56
|
-
user=self.settings.get('MYSQL_USER', 'root'),
|
|
57
|
-
password=self.settings.get('MYSQL_PASSWORD', ''),
|
|
58
|
-
db=self.settings.get('MYSQL_DB', 'scrapy_db'),
|
|
59
|
-
minsize=self.settings.get_int('MYSQL_POOL_MIN', 3),
|
|
60
|
-
maxsize=self.settings.get_int('MYSQL_POOL_MAX', 10),
|
|
61
|
-
echo=self.settings.get_bool('MYSQL_ECHO', False)
|
|
62
|
-
)
|
|
63
|
-
self._pool_initialized = True
|
|
64
|
-
self.logger.debug(f"MySQL连接池初始化完成(表: {self.table_name})")
|
|
65
|
-
except Exception as e:
|
|
66
|
-
self.logger.error(f"MySQL连接池初始化失败: {e}")
|
|
67
|
-
raise
|
|
68
|
-
|
|
69
|
-
async def open_spider(self, spider):
|
|
70
|
-
"""爬虫启动时初始化后台刷新任务"""
|
|
71
|
-
await self._ensure_pool()
|
|
72
|
-
self.flush_task = asyncio.create_task(self._flush_loop())
|
|
73
|
-
|
|
74
|
-
async def _flush_loop(self):
|
|
75
|
-
"""后台循环:定期检查是否需要刷新缓冲区"""
|
|
76
|
-
while True:
|
|
77
|
-
await asyncio.sleep(self.flush_interval)
|
|
78
|
-
if len(self.items_buffer) > 0:
|
|
79
|
-
await self._flush_buffer()
|
|
80
|
-
|
|
81
|
-
async def _flush_buffer(self):
|
|
82
|
-
"""将缓冲区中的数据批量写入数据库"""
|
|
83
|
-
async with self.buffer_lock:
|
|
84
|
-
if not self.items_buffer:
|
|
85
|
-
return
|
|
86
|
-
|
|
87
|
-
items_to_insert = self.items_buffer.copy()
|
|
88
|
-
self.items_buffer.clear()
|
|
89
|
-
|
|
90
|
-
try:
|
|
91
|
-
await self._ensure_pool()
|
|
92
|
-
first_item = items_to_insert[0]
|
|
93
|
-
sql = make_insert_sql(table=self.table_name, data=first_item, many=True)
|
|
94
|
-
|
|
95
|
-
values = [list(item.values()) for item in items_to_insert]
|
|
96
|
-
|
|
97
|
-
async with self.pool.acquire() as conn:
|
|
98
|
-
async with conn.cursor() as cursor:
|
|
99
|
-
affected_rows = await cursor.executemany(sql, values)
|
|
100
|
-
await conn.commit()
|
|
101
|
-
|
|
102
|
-
spider_name = getattr(self.crawler.spider, 'name', 'unknown')
|
|
103
|
-
self.logger.info(f"批量插入 {affected_rows} 条记录到 {self.table_name}")
|
|
104
|
-
self.crawler.stats.inc_value('mysql/insert_success_batch', len(items_to_insert))
|
|
105
|
-
|
|
106
|
-
except Exception as e:
|
|
107
|
-
self.logger.error(f"批量插入失败: {e}")
|
|
108
|
-
self.crawler.stats.inc_value('mysql/insert_failed_batch', len(items_to_insert))
|
|
109
|
-
# 可选:重试或丢弃
|
|
110
|
-
raise ItemDiscard(f"批量插入失败: {e}")
|
|
111
|
-
|
|
112
|
-
async def process_item(self, item, spider, kwargs=None) -> dict:
|
|
113
|
-
"""将 item 添加到缓冲区,触发批量插入"""
|
|
114
|
-
item_dict = dict(item)
|
|
115
|
-
|
|
116
|
-
async with self.buffer_lock:
|
|
117
|
-
self.items_buffer.append(item_dict)
|
|
118
|
-
if len(self.items_buffer) >= self.batch_size:
|
|
119
|
-
# 达到批量阈值,立即刷新
|
|
120
|
-
await self._flush_buffer()
|
|
121
|
-
|
|
122
|
-
return item
|
|
123
|
-
|
|
124
|
-
async def spider_closed(self):
|
|
125
|
-
"""关闭爬虫时,确保所有剩余数据被写入"""
|
|
126
|
-
if self.flush_task:
|
|
127
|
-
self.flush_task.cancel()
|
|
128
|
-
try:
|
|
129
|
-
await self.flush_task
|
|
130
|
-
except asyncio.CancelledError:
|
|
131
|
-
pass
|
|
132
|
-
|
|
133
|
-
# 刷最后一批数据
|
|
134
|
-
if self.items_buffer:
|
|
135
|
-
await self._flush_buffer()
|
|
136
|
-
|
|
137
|
-
# 关闭连接池
|
|
138
|
-
if self.pool:
|
|
139
|
-
self.pool.close()
|
|
140
|
-
await self.pool.wait_closed()
|
|
141
|
-
self.logger.info("MySQL连接池已关闭")
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
class AiomysqlMySQLPipeline:
|
|
145
|
-
def __init__(self, crawler):
|
|
146
|
-
self.crawler = crawler
|
|
147
|
-
self.settings = crawler.settings
|
|
148
|
-
self.logger = get_logger(self.__class__.__name__, self.settings.get('LOG_LEVEL'))
|
|
149
|
-
|
|
150
|
-
# 配置
|
|
151
|
-
self.table_name = (
|
|
152
|
-
self.settings.get('MYSQL_TABLE') or
|
|
153
|
-
getattr(crawler.spider, 'mysql_table', None) or
|
|
154
|
-
f"{crawler.spider.name}_items"
|
|
155
|
-
)
|
|
156
|
-
self.batch_size = self.settings.getint('MYSQL_BATCH_SIZE', 100)
|
|
157
|
-
self.flush_interval = self.settings.getfloat('MYSQL_FLUSH_INTERVAL', 3.0)
|
|
158
|
-
|
|
159
|
-
# 连接池
|
|
160
|
-
self._pool_lock = asyncio.Lock()
|
|
161
|
-
self._pool_initialized = False
|
|
162
|
-
self.pool = None
|
|
163
|
-
|
|
164
|
-
# 缓冲
|
|
165
|
-
self.items_buffer: List[Dict] = []
|
|
166
|
-
self.buffer_lock = asyncio.Lock()
|
|
167
|
-
|
|
168
|
-
# 后台任务
|
|
169
|
-
self.flush_task: Optional[asyncio.Task] = None
|
|
170
|
-
|
|
171
|
-
crawler.subscriber.subscribe(self.spider_closed, event='spider_closed')
|
|
172
|
-
|
|
173
|
-
@classmethod
|
|
174
|
-
def create_instance(cls, crawler):
|
|
175
|
-
return cls(crawler)
|
|
176
|
-
|
|
177
|
-
async def _init_pool(self):
|
|
178
|
-
"""延迟初始化连接池(线程安全)"""
|
|
179
|
-
if self._pool_initialized:
|
|
180
|
-
return
|
|
181
|
-
|
|
182
|
-
async with self._pool_lock:
|
|
183
|
-
if not self._pool_initialized:
|
|
184
|
-
try:
|
|
185
|
-
self.pool = await aiomysql.create_pool(
|
|
186
|
-
host=self.settings.get('MYSQL_HOST', 'localhost'),
|
|
187
|
-
port=self.settings.getint('MYSQL_PORT', 3306),
|
|
188
|
-
user=self.settings.get('MYSQL_USER', 'root'),
|
|
189
|
-
password=self.settings.get('MYSQL_PASSWORD', ''),
|
|
190
|
-
db=self.settings.get('MYSQL_DB', 'scrapy_db'),
|
|
191
|
-
minsize=self.settings.getint('MYSQL_POOL_MIN', 3),
|
|
192
|
-
maxsize=self.settings.getint('MYSQL_POOL_MAX', 10),
|
|
193
|
-
cursorclass=aiomysql.DictCursor,
|
|
194
|
-
autocommit=False
|
|
195
|
-
)
|
|
196
|
-
self._pool_initialized = True
|
|
197
|
-
self.logger.debug(f"aiomysql连接池已初始化(表: {self.table_name})")
|
|
198
|
-
except Exception as e:
|
|
199
|
-
self.logger.error(f"aiomysql连接池初始化失败: {e}")
|
|
200
|
-
raise
|
|
201
|
-
|
|
202
|
-
async def open_spider(self, spider):
|
|
203
|
-
"""爬虫启动时创建后台刷新任务"""
|
|
204
|
-
await self._init_pool()
|
|
205
|
-
self.flush_task = asyncio.create_task(self._flush_loop())
|
|
206
|
-
|
|
207
|
-
async def _flush_loop(self):
|
|
208
|
-
"""定期刷新缓冲区"""
|
|
209
|
-
while True:
|
|
210
|
-
await asyncio.sleep(self.flush_interval)
|
|
211
|
-
if len(self.items_buffer) > 0:
|
|
212
|
-
await self._flush_buffer()
|
|
213
|
-
|
|
214
|
-
async def _flush_buffer(self):
|
|
215
|
-
"""执行批量插入"""
|
|
216
|
-
async with self.buffer_lock:
|
|
217
|
-
if not self.items_buffer:
|
|
218
|
-
return
|
|
219
|
-
items_to_insert = self.items_buffer.copy()
|
|
220
|
-
self.items_buffer.clear()
|
|
221
|
-
|
|
222
|
-
try:
|
|
223
|
-
await self._init_pool()
|
|
224
|
-
keys = items_to_insert[0].keys()
|
|
225
|
-
placeholders = ', '.join(['%s'] * len(keys))
|
|
226
|
-
columns = ', '.join([f'`{k}`' for k in keys])
|
|
227
|
-
sql = f"INSERT INTO `{self.table_name}` ({columns}) VALUES ({placeholders})"
|
|
228
|
-
|
|
229
|
-
values = [list(item.values()) for item in items_to_insert]
|
|
230
|
-
|
|
231
|
-
async with self.pool.acquire() as conn:
|
|
232
|
-
async with conn.cursor() as cursor:
|
|
233
|
-
result = await cursor.executemany(sql, values)
|
|
234
|
-
await conn.commit()
|
|
235
|
-
|
|
236
|
-
spider_name = getattr(self.crawler.spider, 'name', 'unknown')
|
|
237
|
-
self.logger.info(f"【{spider_name}】批量插入 {result} 条记录到 {self.table_name}")
|
|
238
|
-
self.crawler.stats.inc_value('mysql/insert_success_batch', len(items_to_insert))
|
|
239
|
-
|
|
240
|
-
except aiomysql.Error as e:
|
|
241
|
-
self.logger.error(f"aiomysql批量插入失败: {e}")
|
|
242
|
-
self.crawler.stats.inc_value('mysql/insert_failed_batch', len(items_to_insert))
|
|
243
|
-
raise ItemDiscard(f"MySQL错误: {e.args[1]}")
|
|
244
|
-
except Exception as e:
|
|
245
|
-
self.logger.error(f"未知错误: {e}")
|
|
246
|
-
raise ItemDiscard(f"处理失败: {e}")
|
|
247
|
-
|
|
248
|
-
async def process_item(self, item, spider) -> dict:
|
|
249
|
-
item_dict = dict(item)
|
|
250
|
-
|
|
251
|
-
async with self.buffer_lock:
|
|
252
|
-
self.items_buffer.append(item_dict)
|
|
253
|
-
if len(self.items_buffer) >= self.batch_size:
|
|
254
|
-
await self._flush_buffer()
|
|
255
|
-
|
|
256
|
-
return item
|
|
257
|
-
|
|
258
|
-
async def spider_closed(self):
|
|
259
|
-
"""清理资源并提交剩余数据"""
|
|
260
|
-
if self.flush_task:
|
|
261
|
-
self.flush_task.cancel()
|
|
262
|
-
try:
|
|
263
|
-
await self.flush_task
|
|
264
|
-
except asyncio.CancelledError:
|
|
265
|
-
pass
|
|
266
|
-
|
|
267
|
-
if self.items_buffer:
|
|
268
|
-
await self._flush_buffer()
|
|
269
|
-
|
|
270
|
-
if self.pool:
|
|
271
|
-
self.pool.close()
|
|
272
|
-
await self.pool.wait_closed()
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
import asyncio
|
|
3
|
+
import aiomysql
|
|
4
|
+
from typing import Optional, List, Dict
|
|
5
|
+
from asyncmy import create_pool
|
|
6
|
+
from crawlo.utils.log import get_logger
|
|
7
|
+
from crawlo.exceptions import ItemDiscard
|
|
8
|
+
from crawlo.utils.tools import make_insert_sql, logger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AsyncmyMySQLPipeline:
|
|
12
|
+
def __init__(self, crawler):
|
|
13
|
+
self.crawler = crawler
|
|
14
|
+
self.settings = crawler.settings
|
|
15
|
+
self.logger = get_logger(self.__class__.__name__, self.settings.get('LOG_LEVEL'))
|
|
16
|
+
|
|
17
|
+
# 配置参数
|
|
18
|
+
self.table_name = (
|
|
19
|
+
self.settings.get('MYSQL_TABLE') or
|
|
20
|
+
getattr(crawler.spider, 'mysql_table', None) or
|
|
21
|
+
f"{crawler.spider.name}_items"
|
|
22
|
+
)
|
|
23
|
+
self.batch_size = self.settings.getint('MYSQL_BATCH_SIZE', 100)
|
|
24
|
+
self.flush_interval = self.settings.getfloat('MYSQL_FLUSH_INTERVAL', 3.0) # 秒
|
|
25
|
+
|
|
26
|
+
# 连接池相关
|
|
27
|
+
self._pool_lock = asyncio.Lock()
|
|
28
|
+
self._pool_initialized = False
|
|
29
|
+
self.pool = None
|
|
30
|
+
|
|
31
|
+
# 缓冲区与锁
|
|
32
|
+
self.items_buffer: List[Dict] = []
|
|
33
|
+
self.buffer_lock = asyncio.Lock()
|
|
34
|
+
|
|
35
|
+
# 后台任务
|
|
36
|
+
self.flush_task: Optional[asyncio.Task] = None
|
|
37
|
+
|
|
38
|
+
# 注册关闭事件
|
|
39
|
+
crawler.subscriber.subscribe(self.spider_closed, event='spider_closed')
|
|
40
|
+
|
|
41
|
+
@classmethod
|
|
42
|
+
def from_crawler(cls, crawler):
|
|
43
|
+
return cls(crawler)
|
|
44
|
+
|
|
45
|
+
async def _ensure_pool(self):
|
|
46
|
+
"""确保连接池已初始化(线程安全)"""
|
|
47
|
+
if self._pool_initialized:
|
|
48
|
+
return
|
|
49
|
+
|
|
50
|
+
async with self._pool_lock:
|
|
51
|
+
if not self._pool_initialized:
|
|
52
|
+
try:
|
|
53
|
+
self.pool = await create_pool(
|
|
54
|
+
host=self.settings.get('MYSQL_HOST', 'localhost'),
|
|
55
|
+
port=self.settings.get_int('MYSQL_PORT', 3306),
|
|
56
|
+
user=self.settings.get('MYSQL_USER', 'root'),
|
|
57
|
+
password=self.settings.get('MYSQL_PASSWORD', ''),
|
|
58
|
+
db=self.settings.get('MYSQL_DB', 'scrapy_db'),
|
|
59
|
+
minsize=self.settings.get_int('MYSQL_POOL_MIN', 3),
|
|
60
|
+
maxsize=self.settings.get_int('MYSQL_POOL_MAX', 10),
|
|
61
|
+
echo=self.settings.get_bool('MYSQL_ECHO', False)
|
|
62
|
+
)
|
|
63
|
+
self._pool_initialized = True
|
|
64
|
+
self.logger.debug(f"MySQL连接池初始化完成(表: {self.table_name})")
|
|
65
|
+
except Exception as e:
|
|
66
|
+
self.logger.error(f"MySQL连接池初始化失败: {e}")
|
|
67
|
+
raise
|
|
68
|
+
|
|
69
|
+
async def open_spider(self, spider):
|
|
70
|
+
"""爬虫启动时初始化后台刷新任务"""
|
|
71
|
+
await self._ensure_pool()
|
|
72
|
+
self.flush_task = asyncio.create_task(self._flush_loop())
|
|
73
|
+
|
|
74
|
+
async def _flush_loop(self):
|
|
75
|
+
"""后台循环:定期检查是否需要刷新缓冲区"""
|
|
76
|
+
while True:
|
|
77
|
+
await asyncio.sleep(self.flush_interval)
|
|
78
|
+
if len(self.items_buffer) > 0:
|
|
79
|
+
await self._flush_buffer()
|
|
80
|
+
|
|
81
|
+
async def _flush_buffer(self):
|
|
82
|
+
"""将缓冲区中的数据批量写入数据库"""
|
|
83
|
+
async with self.buffer_lock:
|
|
84
|
+
if not self.items_buffer:
|
|
85
|
+
return
|
|
86
|
+
|
|
87
|
+
items_to_insert = self.items_buffer.copy()
|
|
88
|
+
self.items_buffer.clear()
|
|
89
|
+
|
|
90
|
+
try:
|
|
91
|
+
await self._ensure_pool()
|
|
92
|
+
first_item = items_to_insert[0]
|
|
93
|
+
sql = make_insert_sql(table=self.table_name, data=first_item, many=True)
|
|
94
|
+
|
|
95
|
+
values = [list(item.values()) for item in items_to_insert]
|
|
96
|
+
|
|
97
|
+
async with self.pool.acquire() as conn:
|
|
98
|
+
async with conn.cursor() as cursor:
|
|
99
|
+
affected_rows = await cursor.executemany(sql, values)
|
|
100
|
+
await conn.commit()
|
|
101
|
+
|
|
102
|
+
spider_name = getattr(self.crawler.spider, 'name', 'unknown')
|
|
103
|
+
self.logger.info(f"批量插入 {affected_rows} 条记录到 {self.table_name}")
|
|
104
|
+
self.crawler.stats.inc_value('mysql/insert_success_batch', len(items_to_insert))
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
self.logger.error(f"批量插入失败: {e}")
|
|
108
|
+
self.crawler.stats.inc_value('mysql/insert_failed_batch', len(items_to_insert))
|
|
109
|
+
# 可选:重试或丢弃
|
|
110
|
+
raise ItemDiscard(f"批量插入失败: {e}")
|
|
111
|
+
|
|
112
|
+
async def process_item(self, item, spider, kwargs=None) -> dict:
|
|
113
|
+
"""将 item 添加到缓冲区,触发批量插入"""
|
|
114
|
+
item_dict = dict(item)
|
|
115
|
+
|
|
116
|
+
async with self.buffer_lock:
|
|
117
|
+
self.items_buffer.append(item_dict)
|
|
118
|
+
if len(self.items_buffer) >= self.batch_size:
|
|
119
|
+
# 达到批量阈值,立即刷新
|
|
120
|
+
await self._flush_buffer()
|
|
121
|
+
|
|
122
|
+
return item
|
|
123
|
+
|
|
124
|
+
async def spider_closed(self):
|
|
125
|
+
"""关闭爬虫时,确保所有剩余数据被写入"""
|
|
126
|
+
if self.flush_task:
|
|
127
|
+
self.flush_task.cancel()
|
|
128
|
+
try:
|
|
129
|
+
await self.flush_task
|
|
130
|
+
except asyncio.CancelledError:
|
|
131
|
+
pass
|
|
132
|
+
|
|
133
|
+
# 刷最后一批数据
|
|
134
|
+
if self.items_buffer:
|
|
135
|
+
await self._flush_buffer()
|
|
136
|
+
|
|
137
|
+
# 关闭连接池
|
|
138
|
+
if self.pool:
|
|
139
|
+
self.pool.close()
|
|
140
|
+
await self.pool.wait_closed()
|
|
141
|
+
self.logger.info("MySQL连接池已关闭")
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class AiomysqlMySQLPipeline:
|
|
145
|
+
def __init__(self, crawler):
|
|
146
|
+
self.crawler = crawler
|
|
147
|
+
self.settings = crawler.settings
|
|
148
|
+
self.logger = get_logger(self.__class__.__name__, self.settings.get('LOG_LEVEL'))
|
|
149
|
+
|
|
150
|
+
# 配置
|
|
151
|
+
self.table_name = (
|
|
152
|
+
self.settings.get('MYSQL_TABLE') or
|
|
153
|
+
getattr(crawler.spider, 'mysql_table', None) or
|
|
154
|
+
f"{crawler.spider.name}_items"
|
|
155
|
+
)
|
|
156
|
+
self.batch_size = self.settings.getint('MYSQL_BATCH_SIZE', 100)
|
|
157
|
+
self.flush_interval = self.settings.getfloat('MYSQL_FLUSH_INTERVAL', 3.0)
|
|
158
|
+
|
|
159
|
+
# 连接池
|
|
160
|
+
self._pool_lock = asyncio.Lock()
|
|
161
|
+
self._pool_initialized = False
|
|
162
|
+
self.pool = None
|
|
163
|
+
|
|
164
|
+
# 缓冲
|
|
165
|
+
self.items_buffer: List[Dict] = []
|
|
166
|
+
self.buffer_lock = asyncio.Lock()
|
|
167
|
+
|
|
168
|
+
# 后台任务
|
|
169
|
+
self.flush_task: Optional[asyncio.Task] = None
|
|
170
|
+
|
|
171
|
+
crawler.subscriber.subscribe(self.spider_closed, event='spider_closed')
|
|
172
|
+
|
|
173
|
+
@classmethod
|
|
174
|
+
def create_instance(cls, crawler):
|
|
175
|
+
return cls(crawler)
|
|
176
|
+
|
|
177
|
+
async def _init_pool(self):
|
|
178
|
+
"""延迟初始化连接池(线程安全)"""
|
|
179
|
+
if self._pool_initialized:
|
|
180
|
+
return
|
|
181
|
+
|
|
182
|
+
async with self._pool_lock:
|
|
183
|
+
if not self._pool_initialized:
|
|
184
|
+
try:
|
|
185
|
+
self.pool = await aiomysql.create_pool(
|
|
186
|
+
host=self.settings.get('MYSQL_HOST', 'localhost'),
|
|
187
|
+
port=self.settings.getint('MYSQL_PORT', 3306),
|
|
188
|
+
user=self.settings.get('MYSQL_USER', 'root'),
|
|
189
|
+
password=self.settings.get('MYSQL_PASSWORD', ''),
|
|
190
|
+
db=self.settings.get('MYSQL_DB', 'scrapy_db'),
|
|
191
|
+
minsize=self.settings.getint('MYSQL_POOL_MIN', 3),
|
|
192
|
+
maxsize=self.settings.getint('MYSQL_POOL_MAX', 10),
|
|
193
|
+
cursorclass=aiomysql.DictCursor,
|
|
194
|
+
autocommit=False
|
|
195
|
+
)
|
|
196
|
+
self._pool_initialized = True
|
|
197
|
+
self.logger.debug(f"aiomysql连接池已初始化(表: {self.table_name})")
|
|
198
|
+
except Exception as e:
|
|
199
|
+
self.logger.error(f"aiomysql连接池初始化失败: {e}")
|
|
200
|
+
raise
|
|
201
|
+
|
|
202
|
+
async def open_spider(self, spider):
|
|
203
|
+
"""爬虫启动时创建后台刷新任务"""
|
|
204
|
+
await self._init_pool()
|
|
205
|
+
self.flush_task = asyncio.create_task(self._flush_loop())
|
|
206
|
+
|
|
207
|
+
async def _flush_loop(self):
|
|
208
|
+
"""定期刷新缓冲区"""
|
|
209
|
+
while True:
|
|
210
|
+
await asyncio.sleep(self.flush_interval)
|
|
211
|
+
if len(self.items_buffer) > 0:
|
|
212
|
+
await self._flush_buffer()
|
|
213
|
+
|
|
214
|
+
async def _flush_buffer(self):
|
|
215
|
+
"""执行批量插入"""
|
|
216
|
+
async with self.buffer_lock:
|
|
217
|
+
if not self.items_buffer:
|
|
218
|
+
return
|
|
219
|
+
items_to_insert = self.items_buffer.copy()
|
|
220
|
+
self.items_buffer.clear()
|
|
221
|
+
|
|
222
|
+
try:
|
|
223
|
+
await self._init_pool()
|
|
224
|
+
keys = items_to_insert[0].keys()
|
|
225
|
+
placeholders = ', '.join(['%s'] * len(keys))
|
|
226
|
+
columns = ', '.join([f'`{k}`' for k in keys])
|
|
227
|
+
sql = f"INSERT INTO `{self.table_name}` ({columns}) VALUES ({placeholders})"
|
|
228
|
+
|
|
229
|
+
values = [list(item.values()) for item in items_to_insert]
|
|
230
|
+
|
|
231
|
+
async with self.pool.acquire() as conn:
|
|
232
|
+
async with conn.cursor() as cursor:
|
|
233
|
+
result = await cursor.executemany(sql, values)
|
|
234
|
+
await conn.commit()
|
|
235
|
+
|
|
236
|
+
spider_name = getattr(self.crawler.spider, 'name', 'unknown')
|
|
237
|
+
self.logger.info(f"【{spider_name}】批量插入 {result} 条记录到 {self.table_name}")
|
|
238
|
+
self.crawler.stats.inc_value('mysql/insert_success_batch', len(items_to_insert))
|
|
239
|
+
|
|
240
|
+
except aiomysql.Error as e:
|
|
241
|
+
self.logger.error(f"aiomysql批量插入失败: {e}")
|
|
242
|
+
self.crawler.stats.inc_value('mysql/insert_failed_batch', len(items_to_insert))
|
|
243
|
+
raise ItemDiscard(f"MySQL错误: {e.args[1]}")
|
|
244
|
+
except Exception as e:
|
|
245
|
+
self.logger.error(f"未知错误: {e}")
|
|
246
|
+
raise ItemDiscard(f"处理失败: {e}")
|
|
247
|
+
|
|
248
|
+
async def process_item(self, item, spider) -> dict:
|
|
249
|
+
item_dict = dict(item)
|
|
250
|
+
|
|
251
|
+
async with self.buffer_lock:
|
|
252
|
+
self.items_buffer.append(item_dict)
|
|
253
|
+
if len(self.items_buffer) >= self.batch_size:
|
|
254
|
+
await self._flush_buffer()
|
|
255
|
+
|
|
256
|
+
return item
|
|
257
|
+
|
|
258
|
+
async def spider_closed(self):
|
|
259
|
+
"""清理资源并提交剩余数据"""
|
|
260
|
+
if self.flush_task:
|
|
261
|
+
self.flush_task.cancel()
|
|
262
|
+
try:
|
|
263
|
+
await self.flush_task
|
|
264
|
+
except asyncio.CancelledError:
|
|
265
|
+
pass
|
|
266
|
+
|
|
267
|
+
if self.items_buffer:
|
|
268
|
+
await self._flush_buffer()
|
|
269
|
+
|
|
270
|
+
if self.pool:
|
|
271
|
+
self.pool.close()
|
|
272
|
+
await self.pool.wait_closed()
|
|
273
273
|
self.logger.info("aiomysql连接池已释放")
|