crawlo 1.4.3__py3-none-any.whl → 1.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__version__.py +1 -1
- crawlo/commands/genspider.py +52 -17
- crawlo/crawler.py +6 -0
- crawlo/queue/pqueue.py +2 -6
- crawlo/queue/queue_manager.py +1 -2
- crawlo/settings/default_settings.py +11 -30
- crawlo/templates/project/settings.py.tmpl +51 -65
- crawlo/templates/project/settings_distributed.py.tmpl +59 -67
- crawlo/templates/project/settings_gentle.py.tmpl +45 -40
- crawlo/templates/project/settings_high_performance.py.tmpl +45 -40
- crawlo/templates/project/settings_minimal.py.tmpl +37 -26
- crawlo/templates/project/settings_simple.py.tmpl +45 -40
- crawlo/templates/run.py.tmpl +3 -7
- {crawlo-1.4.3.dist-info → crawlo-1.4.4.dist-info}/METADATA +1 -1
- {crawlo-1.4.3.dist-info → crawlo-1.4.4.dist-info}/RECORD +21 -27
- tests/test_multi_directory.py +68 -0
- tests/test_multiple_spider_modules.py +81 -0
- tests/test_spider_modules.py +85 -0
- examples/test_project/__init__.py +0 -7
- examples/test_project/run.py +0 -35
- examples/test_project/test_project/__init__.py +0 -4
- examples/test_project/test_project/items.py +0 -18
- examples/test_project/test_project/middlewares.py +0 -119
- examples/test_project/test_project/pipelines.py +0 -97
- examples/test_project/test_project/settings.py +0 -170
- examples/test_project/test_project/spiders/__init__.py +0 -10
- examples/test_project/test_project/spiders/of_week_dis.py +0 -144
- {crawlo-1.4.3.dist-info → crawlo-1.4.4.dist-info}/WHEEL +0 -0
- {crawlo-1.4.3.dist-info → crawlo-1.4.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.3.dist-info → crawlo-1.4.4.dist-info}/top_level.txt +0 -0
crawlo/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '1.4.
|
|
1
|
+
__version__ = '1.4.4'
|
crawlo/commands/genspider.py
CHANGED
|
@@ -6,15 +6,16 @@
|
|
|
6
6
|
# @Desc : 命令行入口:crawlo genspider baidu,创建爬虫。
|
|
7
7
|
"""
|
|
8
8
|
import sys
|
|
9
|
+
import re
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
import configparser
|
|
11
12
|
import importlib
|
|
12
13
|
from rich.console import Console
|
|
13
14
|
|
|
14
15
|
from .utils import (
|
|
15
|
-
get_project_root,
|
|
16
|
-
validate_project_environment,
|
|
17
|
-
show_error_panel,
|
|
16
|
+
get_project_root,
|
|
17
|
+
validate_project_environment,
|
|
18
|
+
show_error_panel,
|
|
18
19
|
show_success_panel,
|
|
19
20
|
validate_spider_name,
|
|
20
21
|
is_valid_domain
|
|
@@ -35,6 +36,39 @@ def _render_template(tmpl_path, context):
|
|
|
35
36
|
return content
|
|
36
37
|
|
|
37
38
|
|
|
39
|
+
def generate_class_name(spider_name):
|
|
40
|
+
"""
|
|
41
|
+
根据爬虫名称生成类名
|
|
42
|
+
规则:蛇形命名 → 大驼峰命名 + 'Spider'
|
|
43
|
+
示例:
|
|
44
|
+
'news_spider' → 'NewsSpider'
|
|
45
|
+
'ofweek_standalone' → 'OfweekStandaloneSpider'
|
|
46
|
+
'baidu' → 'BaiduSpider'
|
|
47
|
+
"""
|
|
48
|
+
# 如果名称已包含 'spider' 后缀,先去除
|
|
49
|
+
name_clean = spider_name
|
|
50
|
+
|
|
51
|
+
# 定义要移除的后缀列表
|
|
52
|
+
spider_suffixes = ['_spider', 'spider']
|
|
53
|
+
|
|
54
|
+
# 检查并移除后缀
|
|
55
|
+
for suffix in spider_suffixes:
|
|
56
|
+
if spider_name.endswith(suffix):
|
|
57
|
+
name_clean = spider_name[:-len(suffix)]
|
|
58
|
+
break
|
|
59
|
+
|
|
60
|
+
# 按分隔符拆分单词
|
|
61
|
+
words = re.split(r'[_-]', name_clean)
|
|
62
|
+
|
|
63
|
+
# 将每个单词首字母大写
|
|
64
|
+
capitalized_words = [word.capitalize() for word in words if word]
|
|
65
|
+
|
|
66
|
+
# 组合成类名
|
|
67
|
+
class_name = ''.join(capitalized_words) + 'Spider'
|
|
68
|
+
|
|
69
|
+
return class_name
|
|
70
|
+
|
|
71
|
+
|
|
38
72
|
def main(args):
|
|
39
73
|
if len(args) < 2:
|
|
40
74
|
console.print("[bold red]错误:[/bold red] 用法: [blue]crawlo genspider[/blue] <爬虫名称> <域名>")
|
|
@@ -45,11 +79,11 @@ def main(args):
|
|
|
45
79
|
|
|
46
80
|
spider_name = args[0]
|
|
47
81
|
domain = args[1]
|
|
48
|
-
|
|
82
|
+
|
|
49
83
|
# 验证爬虫名称
|
|
50
84
|
if not validate_spider_name(spider_name):
|
|
51
85
|
show_error_panel(
|
|
52
|
-
"无效的爬虫名称",
|
|
86
|
+
"无效的爬虫名称",
|
|
53
87
|
f"爬虫名称 '[cyan]{spider_name}[/cyan]' 无效。\n"
|
|
54
88
|
"爬虫名称应:\n"
|
|
55
89
|
" • 以小写字母开头\n"
|
|
@@ -57,11 +91,11 @@ def main(args):
|
|
|
57
91
|
" • 是有效的Python标识符"
|
|
58
92
|
)
|
|
59
93
|
return 1
|
|
60
|
-
|
|
94
|
+
|
|
61
95
|
# 验证域名格式
|
|
62
96
|
if not is_valid_domain(domain):
|
|
63
97
|
show_error_panel(
|
|
64
|
-
"无效的域名",
|
|
98
|
+
"无效的域名",
|
|
65
99
|
f"域名 '[cyan]{domain}[/cyan]' 格式无效。\n"
|
|
66
100
|
"请提供有效的域名,如 'example.com'"
|
|
67
101
|
)
|
|
@@ -72,7 +106,7 @@ def main(args):
|
|
|
72
106
|
if not is_valid:
|
|
73
107
|
show_error_panel("非Crawlo项目", error_msg)
|
|
74
108
|
return 1
|
|
75
|
-
|
|
109
|
+
|
|
76
110
|
project_root = get_project_root()
|
|
77
111
|
|
|
78
112
|
# 确定 items 模块的路径
|
|
@@ -91,7 +125,8 @@ def main(args):
|
|
|
91
125
|
if item_classes:
|
|
92
126
|
default_item_class = item_classes[0].__name__
|
|
93
127
|
else:
|
|
94
|
-
console.print(
|
|
128
|
+
console.print(
|
|
129
|
+
"[yellow]警告:[/yellow] 在 [cyan]items.py[/cyan] 中未找到项目类,使用 [green]ExampleItem[/green]。")
|
|
95
130
|
|
|
96
131
|
except ImportError as e:
|
|
97
132
|
console.print(f"[yellow]警告:[/yellow] 导入 [cyan]{items_module_path}[/cyan] 失败: {e}")
|
|
@@ -104,7 +139,7 @@ def main(args):
|
|
|
104
139
|
spider_file = spiders_dir / f'{spider_name}.py'
|
|
105
140
|
if spider_file.exists():
|
|
106
141
|
show_error_panel(
|
|
107
|
-
"爬虫已存在",
|
|
142
|
+
"爬虫已存在",
|
|
108
143
|
f"爬虫 '[cyan]{spider_name}[/cyan]' 已存在于\n[green]{spider_file}[/green]"
|
|
109
144
|
)
|
|
110
145
|
return 1
|
|
@@ -113,13 +148,13 @@ def main(args):
|
|
|
113
148
|
tmpl_path = TEMPLATES_DIR / 'spider' / 'spider.py.tmpl'
|
|
114
149
|
if not tmpl_path.exists():
|
|
115
150
|
show_error_panel(
|
|
116
|
-
"模板未找到",
|
|
151
|
+
"模板未找到",
|
|
117
152
|
f"模板文件未找到于 [cyan]{tmpl_path}[/cyan]"
|
|
118
153
|
)
|
|
119
154
|
return 1
|
|
120
155
|
|
|
121
|
-
#
|
|
122
|
-
class_name =
|
|
156
|
+
# 生成类名(使用新的转换函数)
|
|
157
|
+
class_name = generate_class_name(spider_name)
|
|
123
158
|
|
|
124
159
|
context = {
|
|
125
160
|
'spider_name': spider_name,
|
|
@@ -133,7 +168,7 @@ def main(args):
|
|
|
133
168
|
content = _render_template(tmpl_path, context)
|
|
134
169
|
with open(spider_file, 'w', encoding='utf-8') as f:
|
|
135
170
|
f.write(content)
|
|
136
|
-
|
|
171
|
+
|
|
137
172
|
console.print(f"[green]爬虫 '[bold]{spider_name}[/bold]' 创建成功![/green]")
|
|
138
173
|
console.print(f" → 位置: [cyan]{spider_file}[/cyan]")
|
|
139
174
|
console.print(f" → 类名: [yellow]{class_name}[/yellow]")
|
|
@@ -141,12 +176,12 @@ def main(args):
|
|
|
141
176
|
console.print("\n[bold]下一步操作:[/bold]")
|
|
142
177
|
console.print(f" [blue]crawlo run[/blue] {spider_name}")
|
|
143
178
|
console.print(f" [blue]crawlo check[/blue] {spider_name}")
|
|
144
|
-
|
|
179
|
+
|
|
145
180
|
return 0
|
|
146
|
-
|
|
181
|
+
|
|
147
182
|
except Exception as e:
|
|
148
183
|
show_error_panel(
|
|
149
|
-
"创建失败",
|
|
184
|
+
"创建失败",
|
|
150
185
|
f"创建爬虫失败: {e}"
|
|
151
186
|
)
|
|
152
187
|
return 1
|
crawlo/crawler.py
CHANGED
|
@@ -350,6 +350,12 @@ class CrawlerProcess:
|
|
|
350
350
|
self._crawlers: List[ModernCrawler] = []
|
|
351
351
|
self._semaphore = asyncio.Semaphore(max_concurrency)
|
|
352
352
|
self._logger = get_logger('crawler.process')
|
|
353
|
+
|
|
354
|
+
# 如果没有显式提供spider_modules,则从settings中获取
|
|
355
|
+
if spider_modules is None and self._settings:
|
|
356
|
+
spider_modules = self._settings.get('SPIDER_MODULES')
|
|
357
|
+
self._logger.debug(f"从settings中获取SPIDER_MODULES: {spider_modules}")
|
|
358
|
+
|
|
353
359
|
self._spider_modules = spider_modules # 保存spider_modules
|
|
354
360
|
|
|
355
361
|
# 如果提供了spider_modules,自动注册这些模块中的爬虫
|
crawlo/queue/pqueue.py
CHANGED
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
# -*- coding:UTF-8 -*-
|
|
2
|
-
import json
|
|
3
|
-
import sys
|
|
4
2
|
import asyncio
|
|
3
|
+
import sys
|
|
5
4
|
from asyncio import PriorityQueue
|
|
6
|
-
from typing import Optional,
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
from crawlo import Request
|
|
5
|
+
from typing import Optional, Any
|
|
10
6
|
|
|
11
7
|
|
|
12
8
|
class SpiderPriorityQueue(PriorityQueue):
|
crawlo/queue/queue_manager.py
CHANGED
|
@@ -5,11 +5,10 @@
|
|
|
5
5
|
提供简洁、一致的队列接口,自动处理不同队列类型的差异
|
|
6
6
|
"""
|
|
7
7
|
import asyncio
|
|
8
|
+
import time
|
|
8
9
|
import traceback
|
|
9
10
|
from enum import Enum
|
|
10
11
|
from typing import Optional, Dict, Any, Union, TYPE_CHECKING
|
|
11
|
-
import time
|
|
12
|
-
import random
|
|
13
12
|
|
|
14
13
|
if TYPE_CHECKING:
|
|
15
14
|
from crawlo import Request
|
|
@@ -6,9 +6,7 @@
|
|
|
6
6
|
# 添加环境变量配置工具导入
|
|
7
7
|
from crawlo.utils.env_config import get_redis_config, get_runtime_config, get_version
|
|
8
8
|
|
|
9
|
-
#
|
|
10
|
-
# 1. 框架基础配置
|
|
11
|
-
# ===========================================================================
|
|
9
|
+
# --------------------------------- 1. 框架基础配置 ------------------------------------
|
|
12
10
|
|
|
13
11
|
# 框架初始化控制
|
|
14
12
|
FRAMEWORK_INIT_ORDER = [
|
|
@@ -27,9 +25,7 @@ VERSION = get_version() # 项目版本号 - 从框架的__version__.py文件中
|
|
|
27
25
|
RUN_MODE = runtime_config['CRAWLO_MODE'] # 运行模式:standalone/distributed/auto
|
|
28
26
|
CONCURRENCY = runtime_config['CONCURRENCY'] # 并发数配置
|
|
29
27
|
|
|
30
|
-
#
|
|
31
|
-
# 2. 爬虫核心配置
|
|
32
|
-
# ===========================================================================
|
|
28
|
+
# --------------------------------- 2. 爬虫核心配置 ------------------------------------
|
|
33
29
|
|
|
34
30
|
# 下载器配置
|
|
35
31
|
DOWNLOADER = "crawlo.downloader.httpx_downloader.HttpXDownloader" # 默认下载器
|
|
@@ -53,9 +49,7 @@ QUEUE_TYPE = 'auto' # 队列类型:memory/redis/auto
|
|
|
53
49
|
QUEUE_MAX_RETRIES = 3 # 队列操作最大重试次数
|
|
54
50
|
QUEUE_TIMEOUT = 300 # 队列操作超时时间(秒)
|
|
55
51
|
|
|
56
|
-
#
|
|
57
|
-
# 3. 数据库和过滤器配置
|
|
58
|
-
# ===========================================================================
|
|
52
|
+
# --------------------------------- 3. 数据库和过滤器配置 ------------------------------------
|
|
59
53
|
|
|
60
54
|
# MySQL配置
|
|
61
55
|
MYSQL_HOST = '127.0.0.1'
|
|
@@ -100,9 +94,7 @@ FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
|
100
94
|
BLOOM_FILTER_CAPACITY = 1000000 # Bloom过滤器容量
|
|
101
95
|
BLOOM_FILTER_ERROR_RATE = 0.001 # Bloom过滤器错误率
|
|
102
96
|
|
|
103
|
-
#
|
|
104
|
-
# 4. 中间件配置
|
|
105
|
-
# ===========================================================================
|
|
97
|
+
# --------------------------------- 4. 中间件配置 ------------------------------------
|
|
106
98
|
|
|
107
99
|
# 框架中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
108
100
|
MIDDLEWARES = [
|
|
@@ -118,18 +110,14 @@ MIDDLEWARES = [
|
|
|
118
110
|
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
|
|
119
111
|
]
|
|
120
112
|
|
|
121
|
-
#
|
|
122
|
-
# 5. 管道配置
|
|
123
|
-
# ===========================================================================
|
|
113
|
+
# --------------------------------- 5. 管道配置 ------------------------------------
|
|
124
114
|
|
|
125
115
|
# 框架数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
126
116
|
PIPELINES = [
|
|
127
117
|
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
128
118
|
]
|
|
129
119
|
|
|
130
|
-
#
|
|
131
|
-
# 6. 扩展配置
|
|
132
|
-
# ===========================================================================
|
|
120
|
+
# --------------------------------- 6. 扩展配置 ------------------------------------
|
|
133
121
|
|
|
134
122
|
# 框架扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
135
123
|
EXTENSIONS = [
|
|
@@ -138,9 +126,7 @@ EXTENSIONS = [
|
|
|
138
126
|
'crawlo.extension.logging_extension.CustomLoggerExtension', # 自定义日志
|
|
139
127
|
]
|
|
140
128
|
|
|
141
|
-
#
|
|
142
|
-
# 7. 日志与监控配置
|
|
143
|
-
# ===========================================================================
|
|
129
|
+
# --------------------------------- 7. 日志与监控配置 ------------------------------------
|
|
144
130
|
|
|
145
131
|
# 日志配置
|
|
146
132
|
LOG_LEVEL = None # 日志级别: DEBUG/INFO/WARNING/ERROR,默认为None,由用户在项目settings中设置
|
|
@@ -148,13 +134,12 @@ STATS_DUMP = True # 是否周期性输出统计信息
|
|
|
148
134
|
LOG_FILE = None # 日志文件路径,将在项目配置中设置
|
|
149
135
|
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
150
136
|
LOG_ENCODING = 'utf-8'
|
|
137
|
+
LOG_MAX_BYTES = 10 * 1024 * 1024 # 日志轮转大小(字节)
|
|
138
|
+
LOG_BACKUP_COUNT = 5 # 日志备份数量
|
|
151
139
|
|
|
152
140
|
# 日志间隔配置
|
|
153
141
|
INTERVAL = 60 # 日志输出间隔(秒)
|
|
154
142
|
|
|
155
|
-
# 自定义日志配置
|
|
156
|
-
LOG_ENABLE_CUSTOM = False # 是否启用自定义日志
|
|
157
|
-
|
|
158
143
|
# 内存监控配置
|
|
159
144
|
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
160
145
|
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|
|
@@ -169,9 +154,7 @@ PERFORMANCE_PROFILER_INTERVAL = 300 # 性能分析间隔(秒)
|
|
|
169
154
|
# 健康检查配置
|
|
170
155
|
HEALTH_CHECK_ENABLED = True # 是否启用健康检查
|
|
171
156
|
|
|
172
|
-
#
|
|
173
|
-
# 8. 网络请求配置
|
|
174
|
-
# ===========================================================================
|
|
157
|
+
# --------------------------------- 8. 网络请求配置 ------------------------------------
|
|
175
158
|
|
|
176
159
|
# 默认请求头配置
|
|
177
160
|
DEFAULT_REQUEST_HEADERS = {
|
|
@@ -262,9 +245,7 @@ PLAYWRIGHT_MAX_PAGES_PER_BROWSER = 10 # 单浏览器最大页面数量
|
|
|
262
245
|
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
263
246
|
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
264
247
|
|
|
265
|
-
#
|
|
266
|
-
# 9. 数据存储配置
|
|
267
|
-
# ===========================================================================
|
|
248
|
+
# --------------------------------- 9. 数据存储配置 ------------------------------------
|
|
268
249
|
|
|
269
250
|
# CSV管道配置
|
|
270
251
|
CSV_DELIMITER = ',' # CSV分隔符
|
|
@@ -3,84 +3,69 @@
|
|
|
3
3
|
{{project_name}} 项目配置文件
|
|
4
4
|
=============================
|
|
5
5
|
基于 Crawlo 框架的爬虫项目配置。
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import os
|
|
9
|
-
|
|
10
|
-
# ============================== 项目基本信息 ==============================
|
|
11
|
-
PROJECT_NAME = '{{project_name}}'
|
|
12
|
-
|
|
13
|
-
# ============================== 运行模式 ==============================
|
|
14
|
-
# 可选值: 'standalone', 'distributed', 'auto'
|
|
15
|
-
RUN_MODE = 'standalone'
|
|
16
6
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
DOWNLOAD_DELAY = 1.0
|
|
7
|
+
此配置使用 CrawloConfig.standalone() 工厂方法创建单机模式配置,
|
|
8
|
+
适用于开发测试和中小规模数据采集任务。
|
|
9
|
+
"""
|
|
21
10
|
|
|
22
|
-
|
|
23
|
-
# 可选下载器:
|
|
24
|
-
# DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
25
|
-
# DOWNLOADER = 'crawlo.downloader.httpx_downloader.HttpXDownloader'
|
|
26
|
-
# DOWNLOADER = 'crawlo.downloader.cffi_downloader.CurlCffiDownloader'
|
|
27
|
-
DOWNLOADER = 'crawlo.downloader.aiohttp_downloader.AioHttpDownloader'
|
|
11
|
+
from crawlo.config import CrawloConfig
|
|
28
12
|
|
|
29
|
-
#
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
13
|
+
# 使用单机模式配置工厂创建配置
|
|
14
|
+
config = CrawloConfig.auto(
|
|
15
|
+
project_name='{{project_name}}',
|
|
16
|
+
concurrency=8,
|
|
17
|
+
download_delay=1.0
|
|
18
|
+
)
|
|
35
19
|
|
|
36
|
-
#
|
|
37
|
-
|
|
20
|
+
# 将配置转换为当前模块的全局变量
|
|
21
|
+
locals().update(config.to_dict())
|
|
38
22
|
|
|
39
|
-
#
|
|
40
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
23
|
+
# =================================== 爬虫配置 ===================================
|
|
41
24
|
|
|
42
|
-
#
|
|
25
|
+
# 爬虫模块配置
|
|
43
26
|
SPIDER_MODULES = ['{{project_name}}.spiders']
|
|
44
27
|
|
|
45
|
-
#
|
|
46
|
-
# MIDDLEWARES = [
|
|
47
|
-
# 'crawlo.middleware.simple_proxy.SimpleProxyMiddleware',
|
|
48
|
-
# ]
|
|
49
|
-
|
|
50
|
-
# ============================== 默认请求头配置 ==============================
|
|
28
|
+
# 默认请求头配置
|
|
51
29
|
# 为DefaultHeaderMiddleware配置默认请求头
|
|
52
|
-
DEFAULT_REQUEST_HEADERS = {
|
|
53
|
-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
54
|
-
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
|
55
|
-
'Accept-Encoding': 'gzip, deflate, br',
|
|
56
|
-
}
|
|
30
|
+
# DEFAULT_REQUEST_HEADERS = {}
|
|
57
31
|
|
|
58
|
-
#
|
|
32
|
+
# 允许的域名
|
|
59
33
|
# 为OffsiteMiddleware配置允许的域名
|
|
60
|
-
# ALLOWED_DOMAINS = [
|
|
34
|
+
# ALLOWED_DOMAINS = []
|
|
61
35
|
|
|
62
|
-
#
|
|
36
|
+
# 数据管道
|
|
37
|
+
# 如需添加自定义管道,请取消注释并添加
|
|
63
38
|
# PIPELINES = [
|
|
64
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline',
|
|
39
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储(使用asyncmy异步库)
|
|
40
|
+
# # '{{project_name}}.pipelines.CustomPipeline', # 用户自定义管道示例
|
|
65
41
|
# ]
|
|
66
42
|
|
|
67
|
-
#
|
|
43
|
+
# =================================== 系统配置 ===================================
|
|
44
|
+
|
|
45
|
+
# 扩展组件
|
|
46
|
+
# 如需添加自定义扩展,请取消注释并添加
|
|
68
47
|
# EXTENSIONS = [
|
|
69
|
-
# '
|
|
70
|
-
# 'crawlo.extension.log_stats.LogStats',
|
|
71
|
-
# 'crawlo.extension.logging_extension.CustomLoggerExtension',
|
|
48
|
+
# # '{{project_name}}.extensions.CustomExtension', # 用户自定义扩展示例
|
|
72
49
|
# ]
|
|
73
50
|
|
|
74
|
-
#
|
|
51
|
+
# 中间件
|
|
52
|
+
# 如需添加自定义中间件,请取消注释并添加
|
|
53
|
+
# MIDDLEWARES = [
|
|
54
|
+
# # '{{project_name}}.middlewares.CustomMiddleware', # 用户自定义中间件示例
|
|
55
|
+
# ]
|
|
56
|
+
|
|
57
|
+
# 日志配置
|
|
75
58
|
LOG_LEVEL = 'INFO'
|
|
76
59
|
LOG_FILE = 'logs/{{project_name}}.log'
|
|
77
60
|
LOG_ENCODING = 'utf-8' # 明确指定日志文件编码
|
|
78
61
|
STATS_DUMP = True
|
|
79
62
|
|
|
80
|
-
#
|
|
63
|
+
# 输出配置
|
|
81
64
|
OUTPUT_DIR = 'output'
|
|
82
65
|
|
|
83
|
-
#
|
|
66
|
+
# =================================== 数据库配置 ===================================
|
|
67
|
+
|
|
68
|
+
# Redis配置
|
|
84
69
|
REDIS_HOST = '127.0.0.1'
|
|
85
70
|
REDIS_PORT = 6379
|
|
86
71
|
REDIS_PASSWORD = ''
|
|
@@ -92,18 +77,18 @@ if REDIS_PASSWORD:
|
|
|
92
77
|
else:
|
|
93
78
|
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
|
|
94
79
|
|
|
95
|
-
#
|
|
96
|
-
MYSQL_HOST =
|
|
97
|
-
MYSQL_PORT =
|
|
98
|
-
MYSQL_USER =
|
|
99
|
-
MYSQL_PASSWORD =
|
|
100
|
-
MYSQL_DB =
|
|
80
|
+
# MySQL配置
|
|
81
|
+
MYSQL_HOST = '127.0.0.1'
|
|
82
|
+
MYSQL_PORT = 3306
|
|
83
|
+
MYSQL_USER = 'root'
|
|
84
|
+
MYSQL_PASSWORD = '123456'
|
|
85
|
+
MYSQL_DB = '{{project_name}}'
|
|
101
86
|
MYSQL_TABLE = '{{project_name}}_data'
|
|
102
87
|
MYSQL_BATCH_SIZE = 100
|
|
103
88
|
MYSQL_USE_BATCH = False # 是否启用批量插入
|
|
104
89
|
|
|
105
|
-
#
|
|
106
|
-
MONGO_URI =
|
|
90
|
+
# MongoDB配置
|
|
91
|
+
MONGO_URI = 'mongodb://localhost:27017'
|
|
107
92
|
MONGO_DATABASE = '{{project_name}}_db'
|
|
108
93
|
MONGO_COLLECTION = '{{project_name}}_items'
|
|
109
94
|
MONGO_MAX_POOL_SIZE = 200
|
|
@@ -111,7 +96,9 @@ MONGO_MIN_POOL_SIZE = 20
|
|
|
111
96
|
MONGO_BATCH_SIZE = 100 # 批量插入条数
|
|
112
97
|
MONGO_USE_BATCH = False # 是否启用批量插入
|
|
113
98
|
|
|
114
|
-
#
|
|
99
|
+
# =================================== 网络配置 ===================================
|
|
100
|
+
|
|
101
|
+
# 代理配置
|
|
115
102
|
# 代理功能默认不启用,如需使用请在项目配置文件中启用并配置相关参数
|
|
116
103
|
PROXY_ENABLED = False # 是否启用代理
|
|
117
104
|
|
|
@@ -130,7 +117,6 @@ PROXY_EXTRACTOR = "proxy"
|
|
|
130
117
|
PROXY_REFRESH_INTERVAL = 60 # 代理刷新间隔(秒)
|
|
131
118
|
PROXY_API_TIMEOUT = 10 # 请求代理 API 超时时间
|
|
132
119
|
|
|
133
|
-
# ============================== Curl-Cffi 特有配置 ==============================
|
|
134
120
|
# 浏览器指纹模拟(仅 CurlCffi 下载器有效)
|
|
135
121
|
CURL_BROWSER_TYPE = "chrome" # 可选: chrome, edge, safari, firefox 或版本如 chrome136
|
|
136
122
|
|
|
@@ -142,7 +128,7 @@ CURL_BROWSER_VERSION_MAP = {
|
|
|
142
128
|
"firefox": "firefox135",
|
|
143
129
|
}
|
|
144
130
|
|
|
145
|
-
#
|
|
131
|
+
# 下载器优化配置
|
|
146
132
|
# 下载器健康检查
|
|
147
133
|
DOWNLOADER_HEALTH_CHECK = True # 是否启用下载器健康检查
|
|
148
134
|
HEALTH_CHECK_INTERVAL = 60 # 健康检查间隔(秒)
|
|
@@ -163,7 +149,7 @@ AIOHTTP_FORCE_CLOSE = False # 是否强制关闭连接
|
|
|
163
149
|
CONNECTION_TTL_DNS_CACHE = 300 # DNS缓存TTL(秒)
|
|
164
150
|
CONNECTION_KEEPALIVE_TIMEOUT = 15 # Keep-Alive超时(秒)
|
|
165
151
|
|
|
166
|
-
#
|
|
152
|
+
# 内存监控配置
|
|
167
153
|
# 内存监控扩展默认不启用,如需使用请在项目配置文件中启用
|
|
168
154
|
MEMORY_MONITOR_ENABLED = False # 是否启用内存监控
|
|
169
155
|
MEMORY_MONITOR_INTERVAL = 60 # 内存监控检查间隔(秒)
|