crawlo 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +25 -9
- crawlo/__version__.py +1 -1
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +158 -158
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +57 -57
- crawlo/crawler.py +424 -242
- crawlo/downloader/__init__.py +78 -78
- crawlo/downloader/aiohttp_downloader.py +200 -259
- crawlo/downloader/cffi_downloader.py +277 -0
- crawlo/downloader/httpx_downloader.py +246 -187
- crawlo/event.py +11 -11
- crawlo/exceptions.py +73 -64
- crawlo/extension/__init__.py +31 -31
- crawlo/extension/log_interval.py +49 -49
- crawlo/extension/log_stats.py +44 -44
- crawlo/extension/logging_extension.py +35 -0
- crawlo/filters/__init__.py +37 -37
- crawlo/filters/aioredis_filter.py +150 -158
- crawlo/filters/memory_filter.py +202 -202
- crawlo/items/__init__.py +62 -62
- crawlo/items/items.py +115 -119
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +32 -32
- crawlo/middleware/download_delay.py +28 -28
- crawlo/middleware/middleware_manager.py +135 -140
- crawlo/middleware/proxy.py +246 -0
- crawlo/middleware/request_ignore.py +30 -30
- crawlo/middleware/response_code.py +18 -18
- crawlo/middleware/response_filter.py +26 -26
- crawlo/middleware/retry.py +90 -90
- crawlo/network/__init__.py +7 -7
- crawlo/network/request.py +203 -204
- crawlo/network/response.py +166 -166
- crawlo/pipelines/__init__.py +13 -13
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/mongo_pipeline.py +116 -116
- crawlo/pipelines/mysql_batch_pipline.py +273 -134
- crawlo/pipelines/mysql_pipeline.py +195 -195
- crawlo/pipelines/pipeline_manager.py +56 -56
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +169 -93
- crawlo/settings/setting_manager.py +99 -99
- crawlo/spider/__init__.py +41 -36
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +106 -106
- crawlo/task_manager.py +27 -27
- crawlo/templates/item_template.tmpl +21 -21
- crawlo/templates/project_template/main.py +32 -32
- crawlo/templates/project_template/setting.py +189 -189
- crawlo/templates/spider_template.tmpl +30 -30
- crawlo/utils/__init__.py +7 -7
- crawlo/utils/concurrency_manager.py +124 -124
- crawlo/utils/date_tools.py +233 -177
- crawlo/utils/db_helper.py +344 -0
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/log.py +129 -39
- crawlo/utils/pqueue.py +173 -173
- crawlo/utils/project.py +59 -59
- crawlo/utils/request.py +267 -122
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +5 -303
- crawlo/utils/url.py +39 -39
- {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/METADATA +49 -48
- crawlo-1.0.5.dist-info/RECORD +84 -0
- {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/top_level.txt +1 -0
- examples/__init__.py +0 -0
- examples/gxb/__init__.py +0 -0
- examples/gxb/items.py +36 -0
- examples/gxb/run.py +15 -0
- examples/gxb/settings.py +71 -0
- examples/gxb/spider/__init__.py +0 -0
- examples/gxb/spider/miit_spider.py +180 -0
- examples/gxb/spider/telecom_device_licenses.py +129 -0
- tests/__init__.py +7 -7
- tests/test_proxy_health_check.py +33 -0
- tests/test_proxy_middleware_integration.py +137 -0
- tests/test_proxy_providers.py +57 -0
- tests/test_proxy_stats.py +20 -0
- tests/test_proxy_strategies.py +60 -0
- crawlo/downloader/playwright_downloader.py +0 -161
- crawlo/filters/redis_filter.py +0 -120
- crawlo-1.0.3.dist-info/RECORD +0 -80
- tests/baidu_spider/__init__.py +0 -7
- tests/baidu_spider/demo.py +0 -94
- tests/baidu_spider/items.py +0 -25
- tests/baidu_spider/middleware.py +0 -49
- tests/baidu_spider/pipeline.py +0 -55
- tests/baidu_spider/request_fingerprints.txt +0 -9
- tests/baidu_spider/run.py +0 -27
- tests/baidu_spider/settings.py +0 -78
- tests/baidu_spider/spiders/__init__.py +0 -7
- tests/baidu_spider/spiders/bai_du.py +0 -61
- tests/baidu_spider/spiders/sina.py +0 -79
- {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/WHEEL +0 -0
- {crawlo-1.0.3.dist-info → crawlo-1.0.5.dist-info}/entry_points.txt +0 -0
crawlo/crawler.py
CHANGED
|
@@ -1,242 +1,424 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
from typing import Type,
|
|
6
|
-
|
|
7
|
-
from crawlo.spider import Spider
|
|
8
|
-
from crawlo.core.engine import Engine
|
|
9
|
-
from crawlo.utils.log import get_logger
|
|
10
|
-
from crawlo.subscriber import Subscriber
|
|
11
|
-
from crawlo.extension import ExtensionManager
|
|
12
|
-
from crawlo.exceptions import SpiderTypeError
|
|
13
|
-
from crawlo.stats_collector import StatsCollector
|
|
14
|
-
from crawlo.event import spider_opened, spider_closed
|
|
15
|
-
from crawlo.settings.setting_manager import SettingManager
|
|
16
|
-
from crawlo.utils.project import merge_settings, get_settings
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
logger = get_logger(__name__)
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class Crawler:
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
self.
|
|
27
|
-
self.
|
|
28
|
-
self.
|
|
29
|
-
self.
|
|
30
|
-
self.
|
|
31
|
-
self.
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
self.
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
self.
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
self._set_spider(spider)
|
|
69
|
-
return spider
|
|
70
|
-
|
|
71
|
-
def _create_engine(self) -> Engine:
|
|
72
|
-
engine = Engine(self)
|
|
73
|
-
engine.engine_start()
|
|
74
|
-
return engine
|
|
75
|
-
|
|
76
|
-
def _create_stats(self) -> StatsCollector:
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
self.
|
|
105
|
-
self.
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
#
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
#
|
|
224
|
-
#
|
|
225
|
-
#
|
|
226
|
-
#
|
|
227
|
-
#
|
|
228
|
-
#
|
|
229
|
-
#
|
|
230
|
-
#
|
|
231
|
-
#
|
|
232
|
-
#
|
|
233
|
-
#
|
|
234
|
-
#
|
|
235
|
-
#
|
|
236
|
-
#
|
|
237
|
-
#
|
|
238
|
-
#
|
|
239
|
-
#
|
|
240
|
-
#
|
|
241
|
-
#
|
|
242
|
-
#
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
import asyncio
|
|
4
|
+
import signal
|
|
5
|
+
from typing import Type, Optional, Set, List
|
|
6
|
+
|
|
7
|
+
from crawlo.spider import Spider
|
|
8
|
+
from crawlo.core.engine import Engine
|
|
9
|
+
from crawlo.utils.log import get_logger
|
|
10
|
+
from crawlo.subscriber import Subscriber
|
|
11
|
+
from crawlo.extension import ExtensionManager
|
|
12
|
+
from crawlo.exceptions import SpiderTypeError
|
|
13
|
+
from crawlo.stats_collector import StatsCollector
|
|
14
|
+
from crawlo.event import spider_opened, spider_closed
|
|
15
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
16
|
+
from crawlo.utils.project import merge_settings, get_settings
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Crawler:
|
|
23
|
+
"""单个爬虫运行实例,绑定 Spider 与引擎"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, spider_cls: Type[Spider], settings: SettingManager):
|
|
26
|
+
self.spider_cls = spider_cls
|
|
27
|
+
self.spider: Optional[Spider] = None
|
|
28
|
+
self.engine: Optional[Engine] = None
|
|
29
|
+
self.stats: Optional[StatsCollector] = None
|
|
30
|
+
self.subscriber: Optional[Subscriber] = None
|
|
31
|
+
self.extension: Optional[ExtensionManager] = None
|
|
32
|
+
self.settings: SettingManager = settings.copy()
|
|
33
|
+
self._closed = False # 新增状态
|
|
34
|
+
self._close_lock = asyncio.Lock()
|
|
35
|
+
|
|
36
|
+
async def crawl(self):
|
|
37
|
+
"""启动爬虫核心流程"""
|
|
38
|
+
self.subscriber = self._create_subscriber()
|
|
39
|
+
self.spider = self._create_spider()
|
|
40
|
+
self.engine = self._create_engine()
|
|
41
|
+
self.stats = self._create_stats()
|
|
42
|
+
self.extension = self._create_extension()
|
|
43
|
+
|
|
44
|
+
await self.engine.start_spider(self.spider)
|
|
45
|
+
|
|
46
|
+
@staticmethod
|
|
47
|
+
def _create_subscriber() -> Subscriber:
|
|
48
|
+
return Subscriber()
|
|
49
|
+
|
|
50
|
+
def _create_spider(self) -> Spider:
|
|
51
|
+
spider = self.spider_cls.create_instance(self)
|
|
52
|
+
|
|
53
|
+
# --- 关键属性检查 ---
|
|
54
|
+
if not getattr(spider, 'name', None):
|
|
55
|
+
raise AttributeError(f"爬虫类 '{self.spider_cls.__name__}' 必须定义 'name' 属性。")
|
|
56
|
+
|
|
57
|
+
if not callable(getattr(spider, 'start_requests', None)):
|
|
58
|
+
raise AttributeError(f"爬虫 '{spider.name}' 必须实现可调用的 'start_requests' 方法。")
|
|
59
|
+
|
|
60
|
+
start_urls = getattr(spider, 'start_urls', [])
|
|
61
|
+
if isinstance(start_urls, str):
|
|
62
|
+
raise TypeError(f"爬虫 '{spider.name}' 的 'start_urls' 必须是列表或元组,不能是字符串。")
|
|
63
|
+
|
|
64
|
+
if not callable(getattr(spider, 'parse', None)):
|
|
65
|
+
logger.warning(
|
|
66
|
+
f"爬虫 '{spider.name}' 未定义 'parse' 方法。请确保所有 Request 都指定了回调函数,否则响应将被忽略。")
|
|
67
|
+
|
|
68
|
+
self._set_spider(spider)
|
|
69
|
+
return spider
|
|
70
|
+
|
|
71
|
+
def _create_engine(self) -> Engine:
|
|
72
|
+
engine = Engine(self)
|
|
73
|
+
engine.engine_start()
|
|
74
|
+
return engine
|
|
75
|
+
|
|
76
|
+
def _create_stats(self) -> StatsCollector:
|
|
77
|
+
return StatsCollector(self)
|
|
78
|
+
|
|
79
|
+
def _create_extension(self) -> ExtensionManager:
|
|
80
|
+
return ExtensionManager.create_instance(self)
|
|
81
|
+
|
|
82
|
+
def _set_spider(self, spider: Spider):
|
|
83
|
+
self.subscriber.subscribe(spider.spider_opened, event=spider_opened)
|
|
84
|
+
self.subscriber.subscribe(spider.spider_closed, event=spider_closed)
|
|
85
|
+
merge_settings(spider, self.settings)
|
|
86
|
+
|
|
87
|
+
async def close(self, reason='finished') -> None:
|
|
88
|
+
async with self._close_lock:
|
|
89
|
+
if self._closed:
|
|
90
|
+
return
|
|
91
|
+
self._closed = True
|
|
92
|
+
await self.subscriber.notify(spider_closed)
|
|
93
|
+
if self.stats and self.spider:
|
|
94
|
+
self.stats.close_spider(spider=self.spider, reason=reason)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class CrawlerProcess:
|
|
98
|
+
"""
|
|
99
|
+
爬虫进程管理器,支持多爬虫并发调度、信号量控制、实时日志与优雅关闭
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
def __init__(self, settings: Optional[SettingManager] = None, max_concurrency: Optional[int] = None):
|
|
103
|
+
self.settings: SettingManager = settings or self._get_default_settings()
|
|
104
|
+
self.crawlers: Set[Crawler] = set()
|
|
105
|
+
self._active_tasks: Set[asyncio.Task] = set()
|
|
106
|
+
|
|
107
|
+
# 使用专用配置,降级使用 CONCURRENCY
|
|
108
|
+
self.max_concurrency: int = (
|
|
109
|
+
max_concurrency
|
|
110
|
+
or self.settings.get('MAX_RUNNING_SPIDERS')
|
|
111
|
+
or self.settings.get('CONCURRENCY', 3)
|
|
112
|
+
)
|
|
113
|
+
self.semaphore = asyncio.Semaphore(self.max_concurrency)
|
|
114
|
+
|
|
115
|
+
# 注册信号量
|
|
116
|
+
signal.signal(signal.SIGINT, self._shutdown)
|
|
117
|
+
signal.signal(signal.SIGTERM, self._shutdown)
|
|
118
|
+
logger.info(f"CrawlerProcess 初始化完成,最大并行爬虫数: {self.max_concurrency}")
|
|
119
|
+
|
|
120
|
+
async def crawl(self, spiders):
|
|
121
|
+
"""
|
|
122
|
+
启动一个或多个爬虫,流式调度,支持实时进度反馈
|
|
123
|
+
"""
|
|
124
|
+
spider_classes = self._normalize_spiders(spiders)
|
|
125
|
+
total = len(spider_classes)
|
|
126
|
+
|
|
127
|
+
if total == 0:
|
|
128
|
+
raise ValueError("至少需要提供一个爬虫类")
|
|
129
|
+
|
|
130
|
+
# 按名称排序
|
|
131
|
+
spider_classes.sort(key=lambda cls: cls.__name__.lower())
|
|
132
|
+
|
|
133
|
+
logger.info(f"启动 {total} 个爬虫.")
|
|
134
|
+
|
|
135
|
+
# 流式启动所有爬虫任务
|
|
136
|
+
tasks = [
|
|
137
|
+
asyncio.create_task(self._run_spider_with_limit(spider_cls, index + 1, total))
|
|
138
|
+
for index, spider_cls in enumerate(spider_classes)
|
|
139
|
+
]
|
|
140
|
+
|
|
141
|
+
# 等待所有任务完成(失败不中断)
|
|
142
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
143
|
+
|
|
144
|
+
# 统计异常
|
|
145
|
+
failed = [i for i, r in enumerate(results) if isinstance(r, Exception)]
|
|
146
|
+
if failed:
|
|
147
|
+
logger.error(f"共 {len(failed)} 个爬虫执行异常: {[spider_classes[i].__name__ for i in failed]}")
|
|
148
|
+
|
|
149
|
+
@staticmethod
|
|
150
|
+
def _normalize_spiders(spiders) -> List[Type[Spider]]:
|
|
151
|
+
"""标准化输入为爬虫类列表"""
|
|
152
|
+
if isinstance(spiders, type) and issubclass(spiders, Spider):
|
|
153
|
+
return [spiders]
|
|
154
|
+
elif isinstance(spiders, (list, tuple)):
|
|
155
|
+
return list(spiders)
|
|
156
|
+
else:
|
|
157
|
+
raise TypeError("spiders 必须是爬虫类或爬虫类列表/元组")
|
|
158
|
+
|
|
159
|
+
async def _run_spider_with_limit(self, spider_cls: Type[Spider], seq: int, total: int):
|
|
160
|
+
"""
|
|
161
|
+
受信号量限制的爬虫运行函数,带进度日志
|
|
162
|
+
"""
|
|
163
|
+
task = asyncio.current_task()
|
|
164
|
+
self._active_tasks.add(task)
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
# 获取并发许可
|
|
168
|
+
await self.semaphore.acquire()
|
|
169
|
+
|
|
170
|
+
start_msg = f"[{seq}/{total}] 启动爬虫: {spider_cls.__name__}"
|
|
171
|
+
logger.info(start_msg)
|
|
172
|
+
|
|
173
|
+
# 创建并运行爬虫
|
|
174
|
+
crawler = self._create_crawler(spider_cls)
|
|
175
|
+
self.crawlers.add(crawler)
|
|
176
|
+
await crawler.crawl()
|
|
177
|
+
|
|
178
|
+
end_msg = f"[{seq}/{total}] 爬虫完成: {spider_cls.__name__}"
|
|
179
|
+
logger.info(end_msg)
|
|
180
|
+
|
|
181
|
+
except Exception as e:
|
|
182
|
+
logger.error(f"爬虫 {spider_cls.__name__} 执行失败: {e}", exc_info=True)
|
|
183
|
+
raise
|
|
184
|
+
finally:
|
|
185
|
+
if task in self._active_tasks:
|
|
186
|
+
self._active_tasks.remove(task)
|
|
187
|
+
self.semaphore.release() # 必须释放
|
|
188
|
+
|
|
189
|
+
def _create_crawler(self, spider_cls: Type[Spider]) -> Crawler:
|
|
190
|
+
"""创建爬虫实例"""
|
|
191
|
+
if isinstance(spider_cls, str):
|
|
192
|
+
raise SpiderTypeError(f"不支持字符串形式的爬虫: {spider_cls}")
|
|
193
|
+
return Crawler(spider_cls, self.settings)
|
|
194
|
+
|
|
195
|
+
def _shutdown(self, _signum, _frame):
|
|
196
|
+
"""优雅关闭信号处理"""
|
|
197
|
+
logger.warning("收到关闭信号,正在停止所有爬虫...")
|
|
198
|
+
for crawler in list(self.crawlers):
|
|
199
|
+
if crawler.engine:
|
|
200
|
+
crawler.engine.running = False
|
|
201
|
+
crawler.engine.normal = False
|
|
202
|
+
asyncio.create_task(self._wait_for_shutdown())
|
|
203
|
+
|
|
204
|
+
async def _wait_for_shutdown(self):
|
|
205
|
+
"""等待所有活跃任务完成"""
|
|
206
|
+
pending = [t for t in self._active_tasks if not t.done()]
|
|
207
|
+
if pending:
|
|
208
|
+
logger.info(f"等待 {len(pending)} 个活跃任务完成...")
|
|
209
|
+
await asyncio.gather(*pending, return_exceptions=True)
|
|
210
|
+
logger.info("所有爬虫已优雅关闭")
|
|
211
|
+
|
|
212
|
+
@classmethod
|
|
213
|
+
def _get_default_settings(cls) -> SettingManager:
|
|
214
|
+
"""加载默认配置"""
|
|
215
|
+
try:
|
|
216
|
+
return get_settings()
|
|
217
|
+
except Exception as e:
|
|
218
|
+
logger.warning(f"无法加载默认配置: {e}")
|
|
219
|
+
return SettingManager()
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
# #!/usr/bin/python
|
|
224
|
+
# # -*- coding:UTF-8 -*
|
|
225
|
+
# import signal
|
|
226
|
+
# import asyncio
|
|
227
|
+
# from typing import Final, Set, Optional
|
|
228
|
+
#
|
|
229
|
+
# from crawlo.spider import Spider
|
|
230
|
+
# from crawlo.core.engine import Engine
|
|
231
|
+
# from crawlo.utils.log import get_logger
|
|
232
|
+
# from crawlo.subscriber import Subscriber
|
|
233
|
+
# from crawlo.extension import ExtensionManager
|
|
234
|
+
# from crawlo.exceptions import SpiderTypeError
|
|
235
|
+
# from crawlo.stats_collector import StatsCollector
|
|
236
|
+
# from crawlo.event import spider_opened, spider_closed
|
|
237
|
+
# from crawlo.settings.setting_manager import SettingManager
|
|
238
|
+
# from crawlo.utils.project import merge_settings, get_settings
|
|
239
|
+
#
|
|
240
|
+
# logger = get_logger(__name__)
|
|
241
|
+
#
|
|
242
|
+
#
|
|
243
|
+
# class Crawler:
|
|
244
|
+
#
|
|
245
|
+
# def __init__(self, spider_cls, settings):
|
|
246
|
+
# self.spider_cls = spider_cls
|
|
247
|
+
# self.spider: Optional[Spider] = None
|
|
248
|
+
# self.engine: Optional[Engine] = None
|
|
249
|
+
# self.stats: Optional[StatsCollector] = None
|
|
250
|
+
# self.subscriber: Optional[Subscriber] = None
|
|
251
|
+
# self.extension: Optional[ExtensionManager] = None
|
|
252
|
+
# self.settings: SettingManager = settings.copy()
|
|
253
|
+
#
|
|
254
|
+
# async def crawl(self):
|
|
255
|
+
# self.subscriber = self._create_subscriber()
|
|
256
|
+
# self.spider = self._create_spider()
|
|
257
|
+
# self.engine = self._create_engine()
|
|
258
|
+
# self.stats = self._create_stats()
|
|
259
|
+
# self.extension = self._create_extension()
|
|
260
|
+
#
|
|
261
|
+
# await self.engine.start_spider(self.spider)
|
|
262
|
+
#
|
|
263
|
+
# @staticmethod
|
|
264
|
+
# def _create_subscriber():
|
|
265
|
+
# return Subscriber()
|
|
266
|
+
#
|
|
267
|
+
# def _create_spider(self) -> Spider:
|
|
268
|
+
# spider = self.spider_cls.create_instance(self)
|
|
269
|
+
#
|
|
270
|
+
# # --- 关键属性检查 ---
|
|
271
|
+
# # 1. 检查 name
|
|
272
|
+
# if not getattr(spider, 'name', None):
|
|
273
|
+
# raise AttributeError(f"Spider class '{self.spider_cls.__name__}' must have a 'name' attribute.")
|
|
274
|
+
#
|
|
275
|
+
# # 2. 检查 start_requests 是否可调用
|
|
276
|
+
# if not callable(getattr(spider, 'start_requests', None)):
|
|
277
|
+
# raise AttributeError(f"Spider '{spider.name}' must have a callable 'start_requests' method.")
|
|
278
|
+
#
|
|
279
|
+
# # 3. 检查 start_urls 类型
|
|
280
|
+
# start_urls = getattr(spider, 'start_urls', [])
|
|
281
|
+
# if isinstance(start_urls, str):
|
|
282
|
+
# raise TypeError(f"'{spider.name}.start_urls' must be a list or tuple, not a string.")
|
|
283
|
+
#
|
|
284
|
+
# # --- 日志提示 ---
|
|
285
|
+
# # 提醒用户定义 parse 方法
|
|
286
|
+
# if not callable(getattr(spider, 'parse', None)):
|
|
287
|
+
# logger.warning(f"Spider '{spider.name}' lacks a 'parse' method. Ensure all Requests have callbacks.")
|
|
288
|
+
#
|
|
289
|
+
# self._set_spider(spider)
|
|
290
|
+
# return spider
|
|
291
|
+
#
|
|
292
|
+
# def _create_engine(self) -> Engine:
|
|
293
|
+
# engine = Engine(self)
|
|
294
|
+
# engine.engine_start()
|
|
295
|
+
# return engine
|
|
296
|
+
#
|
|
297
|
+
# def _create_stats(self) -> StatsCollector:
|
|
298
|
+
# stats = StatsCollector(self)
|
|
299
|
+
# return stats
|
|
300
|
+
#
|
|
301
|
+
# def _create_extension(self) -> ExtensionManager:
|
|
302
|
+
# extension = ExtensionManager.create_instance(self)
|
|
303
|
+
# return extension
|
|
304
|
+
#
|
|
305
|
+
# def _set_spider(self, spider):
|
|
306
|
+
# self.subscriber.subscribe(spider.spider_opened, event=spider_opened)
|
|
307
|
+
# self.subscriber.subscribe(spider.spider_closed, event=spider_closed)
|
|
308
|
+
# merge_settings(spider, self.settings)
|
|
309
|
+
#
|
|
310
|
+
# async def close(self, reason='finished') -> None:
|
|
311
|
+
# await asyncio.create_task(self.subscriber.notify(spider_closed))
|
|
312
|
+
# self.stats.close_spider(spider=self.spider, reason=reason)
|
|
313
|
+
#
|
|
314
|
+
#
|
|
315
|
+
# class CrawlerProcess:
|
|
316
|
+
# """爬虫处理类,支持跨平台动态并发控制和精细化日志"""
|
|
317
|
+
#
|
|
318
|
+
# def __init__(self, settings=None, max_concurrency: Optional[int] = None, batch_size: int = 10):
|
|
319
|
+
# self.crawlers: Final[Set] = set()
|
|
320
|
+
# self._active_spiders: Final[Set] = set()
|
|
321
|
+
# self.settings = settings or self._get_default_settings()
|
|
322
|
+
# self.batch_size = batch_size
|
|
323
|
+
#
|
|
324
|
+
# # 优先使用专用配置,降级使用 CONCURRENCY,最后用默认值
|
|
325
|
+
# self.max_concurrency = (
|
|
326
|
+
# max_concurrency or
|
|
327
|
+
# self.settings.get('MAX_RUNNING_SPIDERS') or
|
|
328
|
+
# self.settings.get('CONCURRENCY', 5)
|
|
329
|
+
# )
|
|
330
|
+
# self.semaphore = asyncio.Semaphore(self.max_concurrency)
|
|
331
|
+
#
|
|
332
|
+
# signal.signal(signal.SIGINT, self._shutdown)
|
|
333
|
+
# logger.debug(f"初始化爬虫处理进程,最大并发数: {self.max_concurrency}")
|
|
334
|
+
#
|
|
335
|
+
# async def crawl(self, spiders):
|
|
336
|
+
# """支持单个或多个爬虫的批量处理,优化日志输出"""
|
|
337
|
+
# if not spiders:
|
|
338
|
+
# raise ValueError("至少需要提供一个爬虫类")
|
|
339
|
+
#
|
|
340
|
+
# # 统一转换为列表
|
|
341
|
+
# if isinstance(spiders, type) and issubclass(spiders, Spider):
|
|
342
|
+
# spiders = [spiders]
|
|
343
|
+
# elif isinstance(spiders, (list, tuple)):
|
|
344
|
+
# spiders = list(spiders)
|
|
345
|
+
# else:
|
|
346
|
+
# raise TypeError("spiders 必须是爬虫类或爬虫类列表/元组")
|
|
347
|
+
#
|
|
348
|
+
# # 按爬虫类名首字母排序(升序)
|
|
349
|
+
# spiders.sort(key=lambda x: x.__name__.lower())
|
|
350
|
+
#
|
|
351
|
+
# if len(spiders) == 1:
|
|
352
|
+
# logger.info(f"启动爬虫: {spiders[0].__name__}")
|
|
353
|
+
# else:
|
|
354
|
+
# logger.info(f"启动{len(spiders)}个爬虫,按名称排序后分批处理中")
|
|
355
|
+
#
|
|
356
|
+
# batches = [spiders[i:i + self.batch_size] for i in range(0, len(spiders), self.batch_size)]
|
|
357
|
+
#
|
|
358
|
+
# for batch_idx, batch in enumerate(batches):
|
|
359
|
+
# batch_tasks = set()
|
|
360
|
+
#
|
|
361
|
+
# for spider_cls in batch:
|
|
362
|
+
# crawler = self._create_crawler(spider_cls)
|
|
363
|
+
# self.crawlers.add(crawler)
|
|
364
|
+
#
|
|
365
|
+
# await self.semaphore.acquire()
|
|
366
|
+
# task = asyncio.create_task(self._run_crawler_with_semaphore(crawler))
|
|
367
|
+
# batch_tasks.add(task)
|
|
368
|
+
# self._active_spiders.add(task)
|
|
369
|
+
#
|
|
370
|
+
# if len(spiders) > 1: # 仅对多爬虫显示批次信息
|
|
371
|
+
# logger.info(f"启动第 {batch_idx + 1}/{len(batches)} 批爬虫,共 {len(batch)} 个")
|
|
372
|
+
#
|
|
373
|
+
# await asyncio.gather(*batch_tasks)
|
|
374
|
+
#
|
|
375
|
+
# if len(spiders) > 1: # 仅对多爬虫显示批次完成信息
|
|
376
|
+
# logger.info(f"第 {batch_idx + 1} 批爬虫处理完成")
|
|
377
|
+
#
|
|
378
|
+
# async def _run_crawler_with_semaphore(self, crawler):
|
|
379
|
+
# """使用信号量控制的爬虫运行函数"""
|
|
380
|
+
# try:
|
|
381
|
+
# await crawler.crawl()
|
|
382
|
+
# finally:
|
|
383
|
+
# self.semaphore.release() # 确保资源释放
|
|
384
|
+
#
|
|
385
|
+
# async def start(self):
|
|
386
|
+
# """启动所有爬虫任务"""
|
|
387
|
+
# if self._active_spiders:
|
|
388
|
+
# logger.info(f"启动 {len(self._active_spiders)} 个爬虫任务,计算得知当前设备最大并发限制: {self.max_concurrency}")
|
|
389
|
+
# await asyncio.gather(*self._active_spiders)
|
|
390
|
+
#
|
|
391
|
+
# def _create_crawler(self, spider_cls) -> Crawler:
|
|
392
|
+
# """创建爬虫实例"""
|
|
393
|
+
# if isinstance(spider_cls, str):
|
|
394
|
+
# raise SpiderTypeError(f"{type(self)}.crawl args: String is not supported.")
|
|
395
|
+
# crawler: Crawler = Crawler(spider_cls, self.settings)
|
|
396
|
+
# return crawler
|
|
397
|
+
#
|
|
398
|
+
# def _shutdown(self, _signum, _frame):
|
|
399
|
+
# """优雅关闭所有爬虫"""
|
|
400
|
+
# logger.warning(f"收到关闭信号,正在优雅关闭 {len(self.crawlers)} 个爬虫...")
|
|
401
|
+
# for crawler in self.crawlers:
|
|
402
|
+
# if crawler.engine:
|
|
403
|
+
# crawler.engine.running = False
|
|
404
|
+
# crawler.engine.normal = False
|
|
405
|
+
# crawler.stats.close_spider(crawler.spider, 'shutdown signal')
|
|
406
|
+
#
|
|
407
|
+
# # 等待所有任务完成
|
|
408
|
+
# asyncio.create_task(self._wait_for_tasks())
|
|
409
|
+
#
|
|
410
|
+
# async def _wait_for_tasks(self):
|
|
411
|
+
# """等待所有活跃任务完成"""
|
|
412
|
+
# pending = [task for task in self._active_spiders if not task.done()]
|
|
413
|
+
# if pending:
|
|
414
|
+
# logger.info(f"等待 {len(pending)} 个活跃任务完成...")
|
|
415
|
+
# await asyncio.gather(*pending)
|
|
416
|
+
# logger.info("所有爬虫已优雅关闭")
|
|
417
|
+
#
|
|
418
|
+
# @classmethod
|
|
419
|
+
# def _get_default_settings(cls):
|
|
420
|
+
# """框架自动获取默认配置"""
|
|
421
|
+
# try:
|
|
422
|
+
# return get_settings()
|
|
423
|
+
# except ImportError:
|
|
424
|
+
# return {}
|