crawlo 1.4.3__py3-none-any.whl → 1.4.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +11 -15
- crawlo/__version__.py +1 -1
- crawlo/commands/genspider.py +52 -17
- crawlo/commands/startproject.py +24 -0
- crawlo/core/engine.py +2 -2
- crawlo/core/scheduler.py +4 -4
- crawlo/crawler.py +13 -6
- crawlo/downloader/__init__.py +5 -2
- crawlo/extension/__init__.py +2 -2
- crawlo/filters/aioredis_filter.py +8 -1
- crawlo/filters/memory_filter.py +8 -1
- crawlo/initialization/built_in.py +13 -4
- crawlo/initialization/core.py +5 -4
- crawlo/interfaces.py +24 -0
- crawlo/middleware/__init__.py +7 -4
- crawlo/middleware/middleware_manager.py +15 -8
- crawlo/mode_manager.py +45 -11
- crawlo/network/response.py +374 -69
- crawlo/pipelines/mysql_pipeline.py +6 -6
- crawlo/pipelines/pipeline_manager.py +2 -2
- crawlo/project.py +2 -4
- crawlo/queue/pqueue.py +2 -6
- crawlo/queue/queue_manager.py +1 -2
- crawlo/settings/default_settings.py +15 -30
- crawlo/task_manager.py +2 -2
- crawlo/templates/project/items.py.tmpl +2 -2
- crawlo/templates/project/middlewares.py.tmpl +9 -89
- crawlo/templates/project/pipelines.py.tmpl +8 -68
- crawlo/templates/project/settings.py.tmpl +51 -65
- crawlo/templates/project/settings_distributed.py.tmpl +59 -67
- crawlo/templates/project/settings_gentle.py.tmpl +45 -40
- crawlo/templates/project/settings_high_performance.py.tmpl +45 -40
- crawlo/templates/project/settings_minimal.py.tmpl +37 -26
- crawlo/templates/project/settings_simple.py.tmpl +45 -40
- crawlo/templates/run.py.tmpl +3 -7
- crawlo/tools/__init__.py +0 -11
- crawlo/utils/__init__.py +17 -1
- crawlo/utils/db_helper.py +220 -319
- crawlo/utils/error_handler.py +313 -67
- crawlo/utils/fingerprint.py +3 -4
- crawlo/utils/misc.py +82 -0
- crawlo/utils/request.py +55 -66
- crawlo/utils/selector_helper.py +138 -0
- crawlo/utils/spider_loader.py +185 -45
- crawlo/utils/text_helper.py +95 -0
- crawlo-1.4.5.dist-info/METADATA +329 -0
- {crawlo-1.4.3.dist-info → crawlo-1.4.5.dist-info}/RECORD +89 -68
- tests/bug_check_test.py +251 -0
- tests/direct_selector_helper_test.py +97 -0
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -0
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -0
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -0
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +85 -0
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -0
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +162 -0
- tests/ofweek_scrapy/scrapy.cfg +11 -0
- tests/performance_comparison.py +4 -5
- tests/simple_crawlo_test.py +1 -2
- tests/simple_follow_test.py +39 -0
- tests/simple_response_selector_test.py +95 -0
- tests/simple_selector_helper_test.py +155 -0
- tests/simple_selector_test.py +208 -0
- tests/simple_url_test.py +74 -0
- tests/test_crawler_process_import.py +39 -0
- tests/test_crawler_process_spider_modules.py +48 -0
- tests/test_edge_cases.py +7 -5
- tests/test_encoding_core.py +57 -0
- tests/test_encoding_detection.py +127 -0
- tests/test_factory_compatibility.py +197 -0
- tests/test_multi_directory.py +68 -0
- tests/test_multiple_spider_modules.py +81 -0
- tests/test_optimized_selector_naming.py +101 -0
- tests/test_priority_behavior.py +18 -18
- tests/test_response_follow.py +105 -0
- tests/test_response_selector_methods.py +93 -0
- tests/test_response_url_methods.py +71 -0
- tests/test_response_urljoin.py +87 -0
- tests/test_scrapy_style_encoding.py +113 -0
- tests/test_selector_helper.py +101 -0
- tests/test_selector_optimizations.py +147 -0
- tests/test_spider_loader.py +50 -0
- tests/test_spider_loader_comprehensive.py +70 -0
- tests/test_spider_modules.py +85 -0
- tests/test_spiders/__init__.py +1 -0
- tests/test_spiders/test_spider.py +10 -0
- crawlo/tools/anti_crawler.py +0 -269
- crawlo/utils/class_loader.py +0 -26
- crawlo/utils/enhanced_error_handler.py +0 -357
- crawlo-1.4.3.dist-info/METADATA +0 -190
- examples/test_project/__init__.py +0 -7
- examples/test_project/run.py +0 -35
- examples/test_project/test_project/__init__.py +0 -4
- examples/test_project/test_project/items.py +0 -18
- examples/test_project/test_project/middlewares.py +0 -119
- examples/test_project/test_project/pipelines.py +0 -97
- examples/test_project/test_project/settings.py +0 -170
- examples/test_project/test_project/spiders/__init__.py +0 -10
- examples/test_project/test_project/spiders/of_week_dis.py +0 -144
- tests/simple_log_test.py +0 -58
- tests/simple_test.py +0 -48
- tests/test_framework_logger.py +0 -67
- tests/test_framework_startup.py +0 -65
- tests/test_mode_change.py +0 -73
- {crawlo-1.4.3.dist-info → crawlo-1.4.5.dist-info}/WHEEL +0 -0
- {crawlo-1.4.3.dist-info → crawlo-1.4.5.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.3.dist-info → crawlo-1.4.5.dist-info}/top_level.txt +0 -0
- /tests/{final_command_test_report.md → ofweek_scrapy/ofweek_scrapy/__init__.py} +0 -0
|
@@ -5,7 +5,7 @@ from asyncmy import create_pool
|
|
|
5
5
|
from typing import Optional, List, Dict
|
|
6
6
|
|
|
7
7
|
from crawlo.exceptions import ItemDiscard
|
|
8
|
-
from crawlo.utils.db_helper import
|
|
8
|
+
from crawlo.utils.db_helper import SQLBuilder
|
|
9
9
|
from crawlo.utils.log import get_logger
|
|
10
10
|
from . import BasePipeline
|
|
11
11
|
|
|
@@ -88,7 +88,7 @@ class AsyncmyMySQLPipeline:
|
|
|
88
88
|
try:
|
|
89
89
|
await self._ensure_pool()
|
|
90
90
|
item_dict = dict(item)
|
|
91
|
-
sql =
|
|
91
|
+
sql = SQLBuilder.make_insert(table=self.table_name, data=item_dict, **kwargs)
|
|
92
92
|
|
|
93
93
|
rowcount = await self._execute_sql(sql=sql)
|
|
94
94
|
if rowcount > 1:
|
|
@@ -141,7 +141,7 @@ class AsyncmyMySQLPipeline:
|
|
|
141
141
|
await self._ensure_pool()
|
|
142
142
|
|
|
143
143
|
# 使用批量SQL生成函数
|
|
144
|
-
batch_result =
|
|
144
|
+
batch_result = SQLBuilder.make_batch(table=self.table_name, datas=self.batch_buffer)
|
|
145
145
|
if batch_result is None:
|
|
146
146
|
self.logger.warning("批量插入数据为空")
|
|
147
147
|
self.batch_buffer.clear()
|
|
@@ -254,8 +254,8 @@ class AiomysqlMySQLPipeline:
|
|
|
254
254
|
await self._init_pool()
|
|
255
255
|
|
|
256
256
|
item_dict = dict(item)
|
|
257
|
-
# 使用
|
|
258
|
-
sql =
|
|
257
|
+
# 使用SQLBuilder生成SQL
|
|
258
|
+
sql = SQLBuilder.make_insert(table=self.table_name, data=item_dict)
|
|
259
259
|
|
|
260
260
|
async with self.pool.acquire() as conn:
|
|
261
261
|
async with conn.cursor() as cursor:
|
|
@@ -283,7 +283,7 @@ class AiomysqlMySQLPipeline:
|
|
|
283
283
|
await self._init_pool()
|
|
284
284
|
|
|
285
285
|
# 使用批量SQL生成函数
|
|
286
|
-
batch_result =
|
|
286
|
+
batch_result = SQLBuilder.make_batch(table=self.table_name, datas=self.batch_buffer)
|
|
287
287
|
if batch_result is None:
|
|
288
288
|
self.logger.warning("批量插入数据为空")
|
|
289
289
|
self.batch_buffer.clear()
|
|
@@ -6,7 +6,7 @@ from asyncio import create_task
|
|
|
6
6
|
|
|
7
7
|
from crawlo.utils.log import get_logger
|
|
8
8
|
from crawlo.event import item_successful, item_discard
|
|
9
|
-
from crawlo.utils.
|
|
9
|
+
from crawlo.utils.misc import load_object
|
|
10
10
|
from crawlo.project import common_call
|
|
11
11
|
from crawlo.exceptions import PipelineInitError, ItemDiscard, InvalidOutputError, DropItem
|
|
12
12
|
|
|
@@ -61,7 +61,7 @@ class PipelineManager:
|
|
|
61
61
|
def _add_pipelines(self, pipelines):
|
|
62
62
|
for pipeline in pipelines:
|
|
63
63
|
try:
|
|
64
|
-
pipeline_cls =
|
|
64
|
+
pipeline_cls = load_object(pipeline)
|
|
65
65
|
if not hasattr(pipeline_cls, 'from_crawler'):
|
|
66
66
|
raise PipelineInitError(
|
|
67
67
|
f"Pipeline init failed, must inherit from `BasePipeline` or have a `from_crawler` method"
|
crawlo/project.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
|
+
import configparser
|
|
1
2
|
import os
|
|
2
3
|
import sys
|
|
3
|
-
import importlib
|
|
4
|
-
import configparser
|
|
5
|
-
from importlib import import_module
|
|
6
4
|
from inspect import iscoroutinefunction
|
|
7
5
|
from typing import Callable, Optional, Any
|
|
8
6
|
|
|
@@ -42,7 +40,7 @@ def load_class(path: str) -> Any:
|
|
|
42
40
|
加载的类对象
|
|
43
41
|
"""
|
|
44
42
|
# 使用工具模块的实现,避免循环依赖
|
|
45
|
-
from crawlo.utils.
|
|
43
|
+
from crawlo.utils.misc import load_object as _load_class
|
|
46
44
|
return _load_class(path)
|
|
47
45
|
|
|
48
46
|
|
crawlo/queue/pqueue.py
CHANGED
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
# -*- coding:UTF-8 -*-
|
|
2
|
-
import json
|
|
3
|
-
import sys
|
|
4
2
|
import asyncio
|
|
3
|
+
import sys
|
|
5
4
|
from asyncio import PriorityQueue
|
|
6
|
-
from typing import Optional,
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
from crawlo import Request
|
|
5
|
+
from typing import Optional, Any
|
|
10
6
|
|
|
11
7
|
|
|
12
8
|
class SpiderPriorityQueue(PriorityQueue):
|
crawlo/queue/queue_manager.py
CHANGED
|
@@ -5,11 +5,10 @@
|
|
|
5
5
|
提供简洁、一致的队列接口,自动处理不同队列类型的差异
|
|
6
6
|
"""
|
|
7
7
|
import asyncio
|
|
8
|
+
import time
|
|
8
9
|
import traceback
|
|
9
10
|
from enum import Enum
|
|
10
11
|
from typing import Optional, Dict, Any, Union, TYPE_CHECKING
|
|
11
|
-
import time
|
|
12
|
-
import random
|
|
13
12
|
|
|
14
13
|
if TYPE_CHECKING:
|
|
15
14
|
from crawlo import Request
|
|
@@ -6,9 +6,7 @@
|
|
|
6
6
|
# 添加环境变量配置工具导入
|
|
7
7
|
from crawlo.utils.env_config import get_redis_config, get_runtime_config, get_version
|
|
8
8
|
|
|
9
|
-
#
|
|
10
|
-
# 1. 框架基础配置
|
|
11
|
-
# ===========================================================================
|
|
9
|
+
# --------------------------------- 1. 框架基础配置 ------------------------------------
|
|
12
10
|
|
|
13
11
|
# 框架初始化控制
|
|
14
12
|
FRAMEWORK_INIT_ORDER = [
|
|
@@ -27,9 +25,11 @@ VERSION = get_version() # 项目版本号 - 从框架的__version__.py文件中
|
|
|
27
25
|
RUN_MODE = runtime_config['CRAWLO_MODE'] # 运行模式:standalone/distributed/auto
|
|
28
26
|
CONCURRENCY = runtime_config['CONCURRENCY'] # 并发数配置
|
|
29
27
|
|
|
30
|
-
#
|
|
31
|
-
#
|
|
32
|
-
#
|
|
28
|
+
# 爬虫模块配置
|
|
29
|
+
SPIDER_MODULES = [] # 爬虫模块列表
|
|
30
|
+
SPIDER_LOADER_WARN_ONLY = False # 爬虫加载器是否只警告不报错
|
|
31
|
+
|
|
32
|
+
# --------------------------------- 2. 爬虫核心配置 ------------------------------------
|
|
33
33
|
|
|
34
34
|
# 下载器配置
|
|
35
35
|
DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 默认下载器
|
|
@@ -53,9 +53,7 @@ QUEUE_TYPE = 'auto' # 队列类型:memory/redis/auto
|
|
|
53
53
|
QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
|
|
54
54
|
QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
|
|
55
55
|
|
|
56
|
-
#
|
|
57
|
-
# 3. 数据库和过滤器配置
|
|
58
|
-
# ===========================================================================
|
|
56
|
+
# --------------------------------- 3. 数据库和过滤器配置 ------------------------------------
|
|
59
57
|
|
|
60
58
|
# MySQL配置
|
|
61
59
|
MYSQL_HOST = '127.0.0.1'
|
|
@@ -100,9 +98,7 @@ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
|
100
98
|
BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
|
|
101
99
|
BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
|
|
102
100
|
|
|
103
|
-
#
|
|
104
|
-
# 4. 中间件配置
|
|
105
|
-
# ===========================================================================
|
|
101
|
+
# --------------------------------- 4. 中间件配置 ------------------------------------
|
|
106
102
|
|
|
107
103
|
# 框架中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
108
104
|
MIDDLEWARES = [
|
|
@@ -118,18 +114,14 @@ MIDDLEWARES = [
|
|
|
118
114
|
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
|
|
119
115
|
]
|
|
120
116
|
|
|
121
|
-
#
|
|
122
|
-
# 5. 管道配置
|
|
123
|
-
# ===========================================================================
|
|
117
|
+
# --------------------------------- 5. 管道配置 ------------------------------------
|
|
124
118
|
|
|
125
119
|
# 框架数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
126
120
|
PIPELINES = [
|
|
127
121
|
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
128
122
|
]
|
|
129
123
|
|
|
130
|
-
#
|
|
131
|
-
# 6. 扩展配置
|
|
132
|
-
# ===========================================================================
|
|
124
|
+
# --------------------------------- 6. 扩展配置 ------------------------------------
|
|
133
125
|
|
|
134
126
|
# 框架扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
135
127
|
EXTENSIONS = [
|
|
@@ -138,9 +130,7 @@ EXTENSIONS = [
|
|
|
138
130
|
'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
|
|
139
131
|
]
|
|
140
132
|
|
|
141
|
-
#
|
|
142
|
-
# 7. 日志与监控配置
|
|
143
|
-
# ===========================================================================
|
|
133
|
+
# --------------------------------- 7. 日志与监控配置 ------------------------------------
|
|
144
134
|
|
|
145
135
|
# 日志配置
|
|
146
136
|
LOG_LEVEL = None # 日志级别: DEBUG/INFO/WARNING/ERROR,默认为None,由用户在项目settings中设置
|
|
@@ -148,13 +138,12 @@ STATS_DUMP = True # 是否周期性输出统计信息
|
|
|
148
138
|
LOG_FILE = None # 日志文件路径,将在项目配置中设置
|
|
149
139
|
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
150
140
|
LOG_ENCODING = 'utf-8'
|
|
141
|
+
LOG_MAX_BYTES = 10 * 1024 * 1024 # 日志轮转大小(字节)
|
|
142
|
+
LOG_BACKUP_COUNT = 5 # 日志备份数量
|
|
151
143
|
|
|
152
144
|
# 日志间隔配置
|
|
153
145
|
INTERVAL = 60 # 日志输出间隔(秒)
|
|
154
146
|
|
|
155
|
-
# 自定义日志配置
|
|
156
|
-
LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
|
|
157
|
-
|
|
158
147
|
# 内存监控配置
|
|
159
148
|
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
160
149
|
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|
|
@@ -169,9 +158,7 @@ PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
|
|
|
169
158
|
# 健康检查配置
|
|
170
159
|
HEALTH_CHECK_ENABLED = True # 是否启用健康检查
|
|
171
160
|
|
|
172
|
-
#
|
|
173
|
-
# 8. 网络请求配置
|
|
174
|
-
# ===========================================================================
|
|
161
|
+
# --------------------------------- 8. 网络请求配置 ------------------------------------
|
|
175
162
|
|
|
176
163
|
# 默认请求头配置
|
|
177
164
|
DEFAULT_REQUEST_HEADERS = {
|
|
@@ -262,9 +249,7 @@ PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
|
|
|
262
249
|
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
263
250
|
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
264
251
|
|
|
265
|
-
#
|
|
266
|
-
# 9. 数据存储配置
|
|
267
|
-
# ===========================================================================
|
|
252
|
+
# --------------------------------- 9. 数据存储配置 ------------------------------------
|
|
268
253
|
|
|
269
254
|
# CSV管道配置
|
|
270
255
|
CSV_DELIMITER = ',' # CSV分隔符
|
crawlo/task_manager.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
# -*- coding:UTF-8 -*-
|
|
3
|
+
import time
|
|
3
4
|
import asyncio
|
|
4
|
-
from asyncio import Task, Future, Semaphore
|
|
5
5
|
from typing import Set, Final
|
|
6
6
|
from collections import deque
|
|
7
|
-
import
|
|
7
|
+
from asyncio import Task, Future, Semaphore
|
|
8
8
|
from crawlo.utils.log import get_logger
|
|
9
9
|
|
|
10
10
|
|
|
@@ -2,118 +2,38 @@
|
|
|
2
2
|
"""
|
|
3
3
|
{{project_name}}.middlewares
|
|
4
4
|
============================
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
这是一个简单的示例中间件,您可以根据需要添加更多中间件。
|
|
5
|
+
自定义中间件示例
|
|
8
6
|
"""
|
|
9
7
|
|
|
10
|
-
import
|
|
11
|
-
from crawlo import Request, Response
|
|
8
|
+
from crawlo.network import Request, Response
|
|
12
9
|
from crawlo.utils.log import get_logger
|
|
13
10
|
|
|
14
11
|
|
|
15
|
-
class
|
|
12
|
+
class {{project_name|title}}Middleware:
|
|
16
13
|
"""
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
此中间件会:
|
|
20
|
-
1. 为请求添加随机 User-Agent
|
|
21
|
-
2. 记录请求和响应信息
|
|
22
|
-
3. 处理异常情况
|
|
14
|
+
{{project_name}} 项目的中间件
|
|
23
15
|
"""
|
|
24
16
|
|
|
25
17
|
def __init__(self):
|
|
26
18
|
self.logger = get_logger(self.__class__.__name__)
|
|
27
|
-
self.user_agents = [
|
|
28
|
-
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
|
|
29
|
-
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36',
|
|
30
|
-
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:135.0) Gecko/20100101 Firefox/135.0',
|
|
31
|
-
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/135.0',
|
|
32
|
-
]
|
|
33
19
|
|
|
34
20
|
def process_request(self, request, spider):
|
|
35
21
|
"""
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
Args:
|
|
39
|
-
request: 请求对象
|
|
40
|
-
spider: 爬虫实例
|
|
41
|
-
|
|
42
|
-
Returns:
|
|
43
|
-
None: 继续处理请求
|
|
44
|
-
Response: 返回响应对象(短路处理)
|
|
45
|
-
Request: 返回新请求对象(替换原请求)
|
|
22
|
+
在请求被下载器执行前调用
|
|
46
23
|
"""
|
|
47
|
-
|
|
48
|
-
if 'User-Agent' not in request.headers:
|
|
49
|
-
ua = random.choice(self.user_agents)
|
|
50
|
-
request.headers['User-Agent'] = ua
|
|
51
|
-
self.logger.debug(f"为请求 {request.url} 设置 User-Agent: {ua[:50]}...")
|
|
52
|
-
|
|
24
|
+
self.logger.info(f"处理请求: {request.url}")
|
|
53
25
|
return None
|
|
54
26
|
|
|
55
27
|
def process_response(self, request, response, spider):
|
|
56
28
|
"""
|
|
57
|
-
在响应被 Spider
|
|
58
|
-
|
|
59
|
-
Args:
|
|
60
|
-
request: 原始请求对象
|
|
61
|
-
response: 响应对象
|
|
62
|
-
spider: 爬虫实例
|
|
63
|
-
|
|
64
|
-
Returns:
|
|
65
|
-
Response: 处理后的响应对象
|
|
29
|
+
在响应被 Spider 处理前调用
|
|
66
30
|
"""
|
|
67
|
-
# 记录响应信息
|
|
68
31
|
self.logger.info(f"收到响应: {request.url} - 状态码: {response.status_code}")
|
|
69
|
-
|
|
70
|
-
# 可以在这里处理特殊状态码
|
|
71
|
-
if response.status_code == 403:
|
|
72
|
-
self.logger.warning(f"访问被拒绝: {request.url}")
|
|
73
|
-
|
|
74
32
|
return response
|
|
75
33
|
|
|
76
34
|
def process_exception(self, request, exception, spider):
|
|
77
35
|
"""
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
Args:
|
|
81
|
-
request: 请求对象
|
|
82
|
-
exception: 异常对象
|
|
83
|
-
spider: 爬虫实例
|
|
84
|
-
|
|
85
|
-
Returns:
|
|
86
|
-
None: 异常将继续传播
|
|
87
|
-
Response: 返回响应对象(处理异常)
|
|
88
|
-
Request: 返回新请求对象(重试请求)
|
|
36
|
+
在下载或处理过程中发生异常时调用
|
|
89
37
|
"""
|
|
90
38
|
self.logger.error(f"请求异常: {request.url} - {exception}")
|
|
91
|
-
return None
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
# ======================== 使用说明 ========================
|
|
95
|
-
#
|
|
96
|
-
# 在 settings.py 中启用中间件:
|
|
97
|
-
# MIDDLEWARES = [
|
|
98
|
-
# '{{project_name}}.middlewares.ExampleMiddleware',
|
|
99
|
-
# ]
|
|
100
|
-
#
|
|
101
|
-
# 您可以根据需要添加更多中间件,例如:
|
|
102
|
-
# 1. 请求处理中间件(修改请求头、设置代理等)
|
|
103
|
-
# 2. 响应处理中间件(解析、过滤等)
|
|
104
|
-
# 3. 异常处理中间件(重试、记录等)
|
|
105
|
-
#
|
|
106
|
-
# 每个中间件可以实现以下方法:
|
|
107
|
-
# - process_request: 处理请求
|
|
108
|
-
# - process_response: 处理响应
|
|
109
|
-
# - process_exception: 处理异常
|
|
110
|
-
#
|
|
111
|
-
# 注意:Crawlo框架提供了许多内置中间件,您可以直接使用:
|
|
112
|
-
# - DownloadDelayMiddleware: 控制请求延迟
|
|
113
|
-
# - ResponseCodeMiddleware: 处理HTTP状态码并记录统计信息
|
|
114
|
-
# - ResponseFilterMiddleware: 过滤特定状态码的响应
|
|
115
|
-
# - DefaultHeaderMiddleware: 添加默认请求头
|
|
116
|
-
# - ProxyMiddleware: 设置代理
|
|
117
|
-
# - RetryMiddleware: 处理重试逻辑
|
|
118
|
-
# - OffsiteMiddleware: 过滤站外请求
|
|
119
|
-
# ======================== 使用说明 ========================
|
|
39
|
+
return None
|
|
@@ -2,96 +2,36 @@
|
|
|
2
2
|
"""
|
|
3
3
|
{{project_name}}.pipelines
|
|
4
4
|
==========================
|
|
5
|
-
|
|
6
|
-
例如:清理、验证、去重、保存到数据库等。
|
|
7
|
-
|
|
8
|
-
这是一个简单的示例管道,您可以根据需要添加更多管道。
|
|
5
|
+
数据管道示例
|
|
9
6
|
"""
|
|
10
7
|
|
|
11
|
-
from datetime import datetime
|
|
12
8
|
from crawlo.exceptions import DropItem
|
|
13
9
|
from crawlo.utils.log import get_logger
|
|
14
10
|
|
|
15
11
|
|
|
16
|
-
class
|
|
12
|
+
class {{project_name|title}}Pipeline:
|
|
17
13
|
"""
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
此管道会:
|
|
21
|
-
1. 验证必要字段
|
|
22
|
-
2. 清理数据
|
|
23
|
-
3. 添加时间戳
|
|
24
|
-
4. 记录处理日志
|
|
14
|
+
{{project_name}} 项目的数据管道
|
|
25
15
|
"""
|
|
26
16
|
|
|
27
17
|
def __init__(self):
|
|
28
18
|
self.logger = get_logger(self.__class__.__name__)
|
|
29
|
-
self.item_count = 0
|
|
30
19
|
|
|
31
20
|
def process_item(self, item, spider):
|
|
32
21
|
"""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
Args:
|
|
36
|
-
item: 要处理的数据项
|
|
37
|
-
spider: 爬虫实例
|
|
38
|
-
|
|
39
|
-
Returns:
|
|
40
|
-
处理后的数据项
|
|
41
|
-
|
|
42
|
-
Raises:
|
|
43
|
-
DropItem: 如果数据项无效则抛出此异常
|
|
22
|
+
处理数据项
|
|
44
23
|
"""
|
|
45
|
-
|
|
46
|
-
if not item.get('title') or not item.get('url'):
|
|
47
|
-
raise DropItem("缺少必要字段: title 或 url")
|
|
48
|
-
|
|
49
|
-
# 数据清理
|
|
50
|
-
item['title'] = str(item['title']).strip()
|
|
51
|
-
|
|
52
|
-
# 添加处理时间戳
|
|
53
|
-
item['processed_at'] = datetime.now().isoformat()
|
|
54
|
-
|
|
55
|
-
# 计数器
|
|
56
|
-
self.item_count += 1
|
|
57
|
-
|
|
58
|
-
# 记录日志
|
|
59
|
-
self.logger.info(f"处理第 {self.item_count} 个数据项: {item['title']}")
|
|
60
|
-
|
|
24
|
+
self.logger.info(f"处理数据项: {item}")
|
|
61
25
|
return item
|
|
62
26
|
|
|
63
27
|
def open_spider(self, spider):
|
|
64
28
|
"""
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
Args:
|
|
68
|
-
spider: 爬虫实例
|
|
29
|
+
爬虫启动时调用
|
|
69
30
|
"""
|
|
70
31
|
self.logger.info(f"管道已启动,准备处理爬虫 '{spider.name}' 的数据")
|
|
71
32
|
|
|
72
33
|
def close_spider(self, spider):
|
|
73
34
|
"""
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
Args:
|
|
77
|
-
spider: 爬虫实例
|
|
35
|
+
爬虫关闭时调用
|
|
78
36
|
"""
|
|
79
|
-
self.logger.info(
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
# ======================== 使用说明 ========================
|
|
83
|
-
#
|
|
84
|
-
# 在 settings.py 中启用管道:
|
|
85
|
-
# PIPELINES = [
|
|
86
|
-
# '{{project_name}}.pipelines.ExamplePipeline',
|
|
87
|
-
# ]
|
|
88
|
-
#
|
|
89
|
-
# 您可以根据需要添加更多管道,例如:
|
|
90
|
-
# 1. 数据验证管道
|
|
91
|
-
# 2. 去重管道
|
|
92
|
-
# 3. 数据存储管道(数据库、文件等)
|
|
93
|
-
# 4. 数据转换管道
|
|
94
|
-
#
|
|
95
|
-
# 每个管道都应该实现 process_item 方法,
|
|
96
|
-
# 可选实现 open_spider 和 close_spider 方法。
|
|
97
|
-
# ======================== 使用说明 ========================
|
|
37
|
+
self.logger.info("管道已关闭")
|
|
@@ -3,84 +3,69 @@
|
|
|
3
3
|
{{project_name}} 项目配置文件
|
|
4
4
|
=============================
|
|
5
5
|
基于 Crawlo 框架的爬虫项目配置。
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import os
|
|
9
|
-
|
|
10
|
-
# ============================== 项目基本信息 ==============================
|
|
11
|
-
PROJECT_NAME = '{{project_name}}'
|
|
12
|
-
|
|
13
|
-
# ============================== 运行模式 ==============================
|
|
14
|
-
# 可选值: 'standalone', 'distributed', 'auto'
|
|
15
|
-
RUN_MODE = 'standalone'
|
|
16
6
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
DOWNLOAD_DELAY = 1.0
|
|
7
|
+
此配置使用 CrawloConfig.standalone() 工厂方法创建单机模式配置,
|
|
8
|
+
适用于开发测试和中小规模数据采集任务。
|
|
9
|
+
"""
|
|
21
10
|
|
|
22
|
-
|
|
23
|
-
# 可选下载器:
|
|
24
|
-
# DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
25
|
-
# DOWNLOADER = 'crawlo.downloader.httpx_downloader.HttpXDownloader'
|
|
26
|
-
# DOWNLOADER = 'crawlo.downloader.cffi_downloader.CurlCffiDownloader'
|
|
27
|
-
DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
11
|
+
from crawlo.config import CrawloConfig
|
|
28
12
|
|
|
29
|
-
#
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
13
|
+
# 使用单机模式配置工厂创建配置
|
|
14
|
+
config = CrawloConfig.auto(
|
|
15
|
+
project_name='{{project_name}}',
|
|
16
|
+
concurrency=8,
|
|
17
|
+
download_delay=1.0
|
|
18
|
+
)
|
|
35
19
|
|
|
36
|
-
#
|
|
37
|
-
|
|
20
|
+
# 将配置转换为当前模块的全局变量
|
|
21
|
+
locals().update(config.to_dict())
|
|
38
22
|
|
|
39
|
-
#
|
|
40
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
23
|
+
# =================================== 爬虫配置 ===================================
|
|
41
24
|
|
|
42
|
-
#
|
|
25
|
+
# 爬虫模块配置
|
|
43
26
|
SPIDER_MODULES = ['{{project_name}}.spiders']
|
|
44
27
|
|
|
45
|
-
#
|
|
46
|
-
# MIDDLEWARES = [
|
|
47
|
-
# 'crawlo.middleware.simple_proxy.SimpleProxyMiddleware',
|
|
48
|
-
# ]
|
|
49
|
-
|
|
50
|
-
# ============================== 默认请求头配置 ==============================
|
|
28
|
+
# 默认请求头配置
|
|
51
29
|
# 为DefaultHeaderMiddleware配置默认请求头
|
|
52
|
-
DEFAULT_REQUEST_HEADERS = {
|
|
53
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
54
|
-
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
55
|
-
'Accept-Encoding': 'gzip, deflate, br',
|
|
56
|
-
}
|
|
30
|
+
# DEFAULT_REQUEST_HEADERS = {}
|
|
57
31
|
|
|
58
|
-
#
|
|
32
|
+
# 允许的域名
|
|
59
33
|
# 为OffsiteMiddleware配置允许的域名
|
|
60
|
-
# ALLOWED_DOMAINS = [
|
|
34
|
+
# ALLOWED_DOMAINS = []
|
|
61
35
|
|
|
62
|
-
#
|
|
36
|
+
# 数据管道
|
|
37
|
+
# 如需添加自定义管道,请取消注释并添加
|
|
63
38
|
# PIPELINES = [
|
|
64
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline',
|
|
39
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(使用asyncmy异步库)
|
|
40
|
+
# # '{{project_name}}.pipelines.CustomPipeline', # 用户自定义管道示例
|
|
65
41
|
# ]
|
|
66
42
|
|
|
67
|
-
#
|
|
43
|
+
# =================================== 系统配置 ===================================
|
|
44
|
+
|
|
45
|
+
# 扩展组件
|
|
46
|
+
# 如需添加自定义扩展,请取消注释并添加
|
|
68
47
|
# EXTENSIONS = [
|
|
69
|
-
# '
|
|
70
|
-
# 'crawlo.extension.log_stats.LogStats',
|
|
71
|
-
# 'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
48
|
+
# # '{{project_name}}.extensions.CustomExtension', # 用户自定义扩展示例
|
|
72
49
|
# ]
|
|
73
50
|
|
|
74
|
-
#
|
|
51
|
+
# 中间件
|
|
52
|
+
# 如需添加自定义中间件,请取消注释并添加
|
|
53
|
+
# MIDDLEWARES = [
|
|
54
|
+
# # '{{project_name}}.middlewares.CustomMiddleware', # 用户自定义中间件示例
|
|
55
|
+
# ]
|
|
56
|
+
|
|
57
|
+
# 日志配置
|
|
75
58
|
LOG_LEVEL = 'INFO'
|
|
76
59
|
LOG_FILE = 'logs/{{project_name}}.log'
|
|
77
60
|
LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
|
|
78
61
|
STATS_DUMP = True
|
|
79
62
|
|
|
80
|
-
#
|
|
63
|
+
# 输出配置
|
|
81
64
|
OUTPUT_DIR = 'output'
|
|
82
65
|
|
|
83
|
-
#
|
|
66
|
+
# =================================== 数据库配置 ===================================
|
|
67
|
+
|
|
68
|
+
# Redis配置
|
|
84
69
|
REDIS_HOST = '127.0.0.1'
|
|
85
70
|
REDIS_PORT = 6379
|
|
86
71
|
REDIS_PASSWORD = ''
|
|
@@ -92,18 +77,18 @@ if REDIS_PASSWORD:
|
|
|
92
77
|
else:
|
|
93
78
|
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
94
79
|
|
|
95
|
-
#
|
|
96
|
-
MYSQL_HOST =
|
|
97
|
-
MYSQL_PORT =
|
|
98
|
-
MYSQL_USER =
|
|
99
|
-
MYSQL_PASSWORD =
|
|
100
|
-
MYSQL_DB =
|
|
80
|
+
# MySQL配置
|
|
81
|
+
MYSQL_HOST = '127.0.0.1'
|
|
82
|
+
MYSQL_PORT = 3306
|
|
83
|
+
MYSQL_USER = 'root'
|
|
84
|
+
MYSQL_PASSWORD = '123456'
|
|
85
|
+
MYSQL_DB = '{{project_name}}'
|
|
101
86
|
MYSQL_TABLE = '{{project_name}}_data'
|
|
102
87
|
MYSQL_BATCH_SIZE = 100
|
|
103
88
|
MYSQL_USE_BATCH = False # 是否启用批量插入
|
|
104
89
|
|
|
105
|
-
#
|
|
106
|
-
MONGO_URI =
|
|
90
|
+
# MongoDB配置
|
|
91
|
+
MONGO_URI = 'mongodb://localhost:27017'
|
|
107
92
|
MONGO_DATABASE = '{{project_name}}_db'
|
|
108
93
|
MONGO_COLLECTION = '{{project_name}}_items'
|
|
109
94
|
MONGO_MAX_POOL_SIZE = 200
|
|
@@ -111,7 +96,9 @@ MONGO_MIN_POOL_SIZE = 20
|
|
|
111
96
|
MONGO_BATCH_SIZE = 100 # 批量插入条数
|
|
112
97
|
MONGO_USE_BATCH = False # 是否启用批量插入
|
|
113
98
|
|
|
114
|
-
#
|
|
99
|
+
# =================================== 网络配置 ===================================
|
|
100
|
+
|
|
101
|
+
# 代理配置
|
|
115
102
|
# 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
|
|
116
103
|
PROXY_ENABLED = False # 是否启用代理
|
|
117
104
|
|
|
@@ -130,7 +117,6 @@ PROXY_EXTRACTOR = "proxy"
|
|
|
130
117
|
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
131
118
|
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
132
119
|
|
|
133
|
-
# ============================== Curl-Cffi 特有配置 ==============================
|
|
134
120
|
# 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
135
121
|
CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
|
|
136
122
|
|
|
@@ -142,7 +128,7 @@ CURL_BROWSER_VERSION_MAP = {
|
|
|
142
128
|
"firefox": "firefox135",
|
|
143
129
|
}
|
|
144
130
|
|
|
145
|
-
#
|
|
131
|
+
# 下载器优化配置
|
|
146
132
|
# 下载器健康检查
|
|
147
133
|
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
148
134
|
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
@@ -163,7 +149,7 @@ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
|
163
149
|
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
164
150
|
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
165
151
|
|
|
166
|
-
#
|
|
152
|
+
# 内存监控配置
|
|
167
153
|
# 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
|
|
168
154
|
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
169
155
|
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|