crawlo 1.1.6__py3-none-any.whl → 1.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__version__.py +1 -1
- crawlo/cli.py +30 -5
- crawlo/commands/__init__.py +2 -1
- crawlo/commands/help.py +133 -0
- crawlo/templates/project/middlewares.py.tmpl +0 -1
- crawlo/templates/project/pipelines.py.tmpl +0 -1
- crawlo/templates/project/run.py.tmpl +25 -231
- crawlo/templates/project/settings.py.tmpl +2 -2
- {crawlo-1.1.6.dist-info → crawlo-1.1.8.dist-info}/METADATA +237 -12
- {crawlo-1.1.6.dist-info → crawlo-1.1.8.dist-info}/RECORD +13 -12
- {crawlo-1.1.6.dist-info → crawlo-1.1.8.dist-info}/WHEEL +0 -0
- {crawlo-1.1.6.dist-info → crawlo-1.1.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.1.6.dist-info → crawlo-1.1.8.dist-info}/top_level.txt +0 -0
crawlo/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.1.
|
|
1
|
+
__version__ = "1.1.8"
|
crawlo/cli.py
CHANGED
|
@@ -10,24 +10,49 @@ def main():
|
|
|
10
10
|
# 获取所有可用命令
|
|
11
11
|
commands = get_commands()
|
|
12
12
|
|
|
13
|
+
# 创建主解析器
|
|
13
14
|
parser = argparse.ArgumentParser(
|
|
14
15
|
description="Crawlo: A lightweight web crawler framework.",
|
|
15
|
-
usage="crawlo <command> [options]"
|
|
16
|
+
usage="crawlo <command> [options]",
|
|
17
|
+
add_help=False # 禁用默认帮助,我们自己处理
|
|
16
18
|
)
|
|
17
|
-
|
|
18
|
-
#
|
|
19
|
-
|
|
20
|
-
|
|
19
|
+
|
|
20
|
+
# 添加帮助参数
|
|
21
|
+
parser.add_argument('-h', '--help', action='store_true', help='显示帮助信息')
|
|
22
|
+
parser.add_argument('command', nargs='?', help='可用命令: ' + ', '.join(commands.keys()))
|
|
23
|
+
|
|
24
|
+
# 解析已知参数
|
|
21
25
|
args, unknown = parser.parse_known_args()
|
|
22
26
|
|
|
27
|
+
# 处理帮助参数
|
|
28
|
+
if args.help or (args.command is None and not unknown):
|
|
29
|
+
# 导入并运行帮助命令
|
|
30
|
+
try:
|
|
31
|
+
module = __import__(commands['help'], fromlist=['main'])
|
|
32
|
+
sys.exit(module.main([]))
|
|
33
|
+
except ImportError as e:
|
|
34
|
+
print(f"Failed to load help command: {e}")
|
|
35
|
+
sys.exit(1)
|
|
36
|
+
except Exception as e:
|
|
37
|
+
print(f"Help command failed: {e}")
|
|
38
|
+
sys.exit(1)
|
|
39
|
+
|
|
40
|
+
# 检查命令是否存在
|
|
23
41
|
if args.command not in commands:
|
|
24
42
|
print(f"Unknown command: {args.command}")
|
|
25
43
|
print(f"Available commands: {', '.join(commands.keys())}")
|
|
44
|
+
# 显示帮助信息
|
|
45
|
+
try:
|
|
46
|
+
module = __import__(commands['help'], fromlist=['main'])
|
|
47
|
+
module.main([])
|
|
48
|
+
except:
|
|
49
|
+
pass
|
|
26
50
|
sys.exit(1)
|
|
27
51
|
|
|
28
52
|
# 动态导入并执行命令
|
|
29
53
|
try:
|
|
30
54
|
module = __import__(commands[args.command], fromlist=['main'])
|
|
55
|
+
# 将未知参数传递给子命令
|
|
31
56
|
sys.exit(module.main(unknown))
|
|
32
57
|
except ImportError as e:
|
|
33
58
|
print(f"Failed to load command '{args.command}': {e}")
|
crawlo/commands/__init__.py
CHANGED
crawlo/commands/help.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-09-12
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : 命令行入口:crawlo -h|--help,显示帮助信息。
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
from rich.table import Table
|
|
11
|
+
from rich.panel import Panel
|
|
12
|
+
from rich.text import Text
|
|
13
|
+
from rich import box
|
|
14
|
+
|
|
15
|
+
console = Console()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def main(args):
|
|
19
|
+
"""
|
|
20
|
+
主函数:显示帮助信息
|
|
21
|
+
用法:
|
|
22
|
+
crawlo -h|--help
|
|
23
|
+
"""
|
|
24
|
+
# 检查是否有无效参数
|
|
25
|
+
if args and args[0] not in ['-h', '--help', 'help']:
|
|
26
|
+
console.print("[bold red]❌ 无效参数:[/bold red] [yellow]{}[/yellow]".format(args[0]))
|
|
27
|
+
console.print("[bold blue]💡 提示:[/bold blue] 使用 [green]crawlo -h[/green] 或 [green]crawlo --help[/green] 查看帮助信息")
|
|
28
|
+
return 1
|
|
29
|
+
|
|
30
|
+
# 显示帮助信息
|
|
31
|
+
show_help()
|
|
32
|
+
return 0
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def show_help():
|
|
36
|
+
"""显示完整的帮助信息"""
|
|
37
|
+
# 显示框架标题和版本
|
|
38
|
+
console.print(Panel(
|
|
39
|
+
Text.from_markup(":spider_web: [bold blue]Crawlo[/bold blue] [bold white]v1.1.7[/bold white] - 异步爬虫框架"),
|
|
40
|
+
expand=False,
|
|
41
|
+
border_style="blue"
|
|
42
|
+
))
|
|
43
|
+
|
|
44
|
+
# 显示基本用法
|
|
45
|
+
console.print("[bold green]基本用法:[/bold green]")
|
|
46
|
+
console.print(" [blue]crawlo[/blue] [cyan]<command>[/cyan] [options]")
|
|
47
|
+
console.print()
|
|
48
|
+
|
|
49
|
+
# 显示可用命令
|
|
50
|
+
console.print("[bold green]可用命令:[/bold green]")
|
|
51
|
+
table = Table(box=box.SIMPLE, show_header=True, header_style="bold magenta")
|
|
52
|
+
table.add_column("命令", style="cyan", width=15)
|
|
53
|
+
table.add_column("描述", style="white")
|
|
54
|
+
table.add_column("用法", style="yellow")
|
|
55
|
+
|
|
56
|
+
table.add_row("startproject", "创建新项目", "crawlo startproject <project_name>")
|
|
57
|
+
table.add_row("genspider", "生成爬虫模板", "crawlo genspider <spider_name> [domain]")
|
|
58
|
+
table.add_row("run", "运行爬虫", "crawlo run <spider_name>|all [options]")
|
|
59
|
+
table.add_row("check", "检查爬虫代码", "crawlo check [options]")
|
|
60
|
+
table.add_row("list", "列出所有爬虫", "crawlo list")
|
|
61
|
+
table.add_row("stats", "查看统计信息", "crawlo stats [spider_name]")
|
|
62
|
+
table.add_row("help", "显示帮助信息", "crawlo -h|--help")
|
|
63
|
+
|
|
64
|
+
console.print(table)
|
|
65
|
+
console.print()
|
|
66
|
+
|
|
67
|
+
# 显示全局选项
|
|
68
|
+
console.print("[bold green]全局选项:[/bold green]")
|
|
69
|
+
table = Table(box=box.SIMPLE, show_header=False)
|
|
70
|
+
table.add_column("选项", style="cyan", width=15)
|
|
71
|
+
table.add_column("描述", style="white")
|
|
72
|
+
|
|
73
|
+
table.add_row("-h, --help", "显示帮助信息")
|
|
74
|
+
table.add_row("-v, --version", "显示版本信息")
|
|
75
|
+
|
|
76
|
+
console.print(table)
|
|
77
|
+
console.print()
|
|
78
|
+
|
|
79
|
+
# 显示各命令的详细用法
|
|
80
|
+
console.print("[bold green]命令详细用法:[/bold green]")
|
|
81
|
+
|
|
82
|
+
# run 命令
|
|
83
|
+
console.print("[bold cyan]run[/bold cyan] - 运行爬虫")
|
|
84
|
+
console.print(" 用法: crawlo run <spider_name>|all [--json] [--no-stats]")
|
|
85
|
+
console.print(" 示例:")
|
|
86
|
+
console.print(" crawlo run myspider")
|
|
87
|
+
console.print(" crawlo run all")
|
|
88
|
+
console.print(" crawlo run all --json --no-stats")
|
|
89
|
+
console.print()
|
|
90
|
+
|
|
91
|
+
# check 命令
|
|
92
|
+
console.print("[bold cyan]check[/bold cyan] - 检查爬虫代码")
|
|
93
|
+
console.print(" 用法: crawlo check [--fix] [--ci] [--json] [--watch]")
|
|
94
|
+
console.print(" 示例:")
|
|
95
|
+
console.print(" crawlo check")
|
|
96
|
+
console.print(" crawlo check --fix")
|
|
97
|
+
console.print(" crawlo check --ci --json")
|
|
98
|
+
console.print()
|
|
99
|
+
|
|
100
|
+
# startproject 命令
|
|
101
|
+
console.print("[bold cyan]startproject[/bold cyan] - 创建新项目")
|
|
102
|
+
console.print(" 用法: crawlo startproject <project_name>")
|
|
103
|
+
console.print(" 示例:")
|
|
104
|
+
console.print(" crawlo startproject myproject")
|
|
105
|
+
console.print()
|
|
106
|
+
|
|
107
|
+
# genspider 命令
|
|
108
|
+
console.print("[bold cyan]genspider[/bold cyan] - 生成爬虫模板")
|
|
109
|
+
console.print(" 用法: crawlo genspider <spider_name> [domain]")
|
|
110
|
+
console.print(" 示例:")
|
|
111
|
+
console.print(" crawlo genspider myspider example.com")
|
|
112
|
+
console.print()
|
|
113
|
+
|
|
114
|
+
# list 命令
|
|
115
|
+
console.print("[bold cyan]list[/bold cyan] - 列出所有爬虫")
|
|
116
|
+
console.print(" 用法: crawlo list")
|
|
117
|
+
console.print(" 示例:")
|
|
118
|
+
console.print(" crawlo list")
|
|
119
|
+
console.print()
|
|
120
|
+
|
|
121
|
+
# stats 命令
|
|
122
|
+
console.print("[bold cyan]stats[/bold cyan] - 查看统计信息")
|
|
123
|
+
console.print(" 用法: crawlo stats [spider_name]")
|
|
124
|
+
console.print(" 示例:")
|
|
125
|
+
console.print(" crawlo stats")
|
|
126
|
+
console.print(" crawlo stats myspider")
|
|
127
|
+
console.print()
|
|
128
|
+
|
|
129
|
+
# 显示更多信息
|
|
130
|
+
console.print("[bold green]更多信息:[/bold green]")
|
|
131
|
+
console.print(" 文档: https://crawlo.readthedocs.io/")
|
|
132
|
+
console.print(" 源码: https://github.com/crawl-coder/Crawlo")
|
|
133
|
+
console.print(" 问题: https://github.com/crawl-coder/Crawlo/issues")
|
|
@@ -3,250 +3,44 @@
|
|
|
3
3
|
"""
|
|
4
4
|
{{project_name}} 项目运行脚本
|
|
5
5
|
============================
|
|
6
|
-
基于 Crawlo
|
|
7
|
-
支持单机/分布式模式,灵活配置,开箱即用。
|
|
8
|
-
|
|
9
|
-
🎯 快速使用:
|
|
10
|
-
python run.py spider_name # 单机模式运行
|
|
11
|
-
python run.py spider_name --distributed # 分布式模式运行
|
|
12
|
-
python run.py spider_name --env production # 使用预设配置
|
|
13
|
-
python run.py all # 运行所有爬虫
|
|
14
|
-
|
|
15
|
-
🔧 高级选项:
|
|
16
|
-
python run.py spider_name --dry-run # 干运行(不执行实际爬取)
|
|
17
|
-
python run.py spider_name --concurrency 16 # 自定义并发数
|
|
18
|
-
python run.py spider_name --mode gentle # 温和模式(低负载)
|
|
19
|
-
python run.py spider1 spider2 --distributed # 多爬虫分布式运行
|
|
20
|
-
|
|
21
|
-
📦 配置模式:
|
|
22
|
-
--standalone 单机模式(默认)- 内存队列,无需外部依赖
|
|
23
|
-
--distributed 分布式模式 - Redis队列,支持多节点
|
|
24
|
-
--auto 自动模式 - 智能检测Redis可用性
|
|
25
|
-
|
|
26
|
-
🎛️ 预设配置:
|
|
27
|
-
--env development 开发环境(调试友好)
|
|
28
|
-
--env production 生产环境(高性能)
|
|
29
|
-
--env large-scale 大规模爬取(优化内存)
|
|
30
|
-
--env gentle 温和模式(低负载)
|
|
6
|
+
基于 Crawlo 框架的简化爬虫启动器。
|
|
31
7
|
"""
|
|
32
8
|
|
|
33
|
-
import os
|
|
34
9
|
import sys
|
|
10
|
+
import os
|
|
35
11
|
import asyncio
|
|
36
|
-
import argparse
|
|
37
|
-
from pathlib import Path
|
|
38
|
-
from crawlo.crawler import CrawlerProcess
|
|
39
|
-
from crawlo.config import CrawloConfig
|
|
40
|
-
from crawlo.mode_manager import standalone_mode, distributed_mode, auto_mode
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def create_parser():
|
|
44
|
-
"""创建命令行参数解析器"""
|
|
45
|
-
parser = argparse.ArgumentParser(
|
|
46
|
-
description='{{project_name}} 爬虫启动器 - 基于 Crawlo 框架',
|
|
47
|
-
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
48
|
-
epilog="""
|
|
49
|
-
示例用法:
|
|
50
|
-
python run.py my_spider # 默认单机模式
|
|
51
|
-
python run.py my_spider --distributed # 分布式模式
|
|
52
|
-
python run.py my_spider --env production # 生产环境配置
|
|
53
|
-
python run.py spider1 spider2 # 运行多个爬虫
|
|
54
|
-
python run.py all # 运行所有爬虫
|
|
55
|
-
python run.py my_spider --dry-run # 测试模式
|
|
56
|
-
"""
|
|
57
|
-
)
|
|
58
|
-
|
|
59
|
-
# 爬虫名称(位置参数)
|
|
60
|
-
parser.add_argument(
|
|
61
|
-
'spiders',
|
|
62
|
-
nargs='*',
|
|
63
|
-
help='要运行的爬虫名称(可指定多个,"all"表示运行所有爬虫)'
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
# 运行模式选择
|
|
67
|
-
mode_group = parser.add_mutually_exclusive_group()
|
|
68
|
-
mode_group.add_argument(
|
|
69
|
-
'--standalone',
|
|
70
|
-
action='store_true',
|
|
71
|
-
help='单机模式(默认)- 使用内存队列,无需外部依赖'
|
|
72
|
-
)
|
|
73
|
-
mode_group.add_argument(
|
|
74
|
-
'--distributed',
|
|
75
|
-
action='store_true',
|
|
76
|
-
help='分布式模式 - 使用 Redis 队列,支持多节点爬取'
|
|
77
|
-
)
|
|
78
|
-
mode_group.add_argument(
|
|
79
|
-
'--auto',
|
|
80
|
-
action='store_true',
|
|
81
|
-
help='自动模式 - 智能检测 Redis 可用性选择队列类型'
|
|
82
|
-
)
|
|
83
|
-
|
|
84
|
-
# 预设环境配置
|
|
85
|
-
parser.add_argument(
|
|
86
|
-
'--env',
|
|
87
|
-
choices=['development', 'production', 'large-scale', 'gentle'],
|
|
88
|
-
help='预设环境配置(优先级高于模式选择)'
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
# 性能调优选项
|
|
92
|
-
parser.add_argument(
|
|
93
|
-
'--concurrency',
|
|
94
|
-
type=int,
|
|
95
|
-
help='并发请求数(覆盖默认设置)'
|
|
96
|
-
)
|
|
97
|
-
|
|
98
|
-
parser.add_argument(
|
|
99
|
-
'--delay',
|
|
100
|
-
type=float,
|
|
101
|
-
help='请求延迟时间(秒)'
|
|
102
|
-
)
|
|
103
|
-
|
|
104
|
-
# 功能选项
|
|
105
|
-
parser.add_argument(
|
|
106
|
-
'--dry-run',
|
|
107
|
-
action='store_true',
|
|
108
|
-
help='干运行模式 - 解析页面但不执行实际爬取操作'
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
parser.add_argument(
|
|
112
|
-
'--debug',
|
|
113
|
-
action='store_true',
|
|
114
|
-
help='启用调试模式 - 详细日志输出'
|
|
115
|
-
)
|
|
116
|
-
|
|
117
|
-
parser.add_argument(
|
|
118
|
-
'--config-file',
|
|
119
|
-
type=str,
|
|
120
|
-
help='自定义配置文件路径'
|
|
121
|
-
)
|
|
122
|
-
|
|
123
|
-
# 环境变量支持
|
|
124
|
-
parser.add_argument(
|
|
125
|
-
'--from-env',
|
|
126
|
-
action='store_true',
|
|
127
|
-
help='从环境变量加载配置(CRAWLO_*)'
|
|
128
|
-
)
|
|
129
|
-
|
|
130
|
-
return parser
|
|
131
12
|
|
|
13
|
+
# 添加项目根目录到 Python 路径
|
|
14
|
+
project_root = os.path.dirname(os.path.abspath(__file__))
|
|
15
|
+
sys.path.insert(0, project_root)
|
|
132
16
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
config = None
|
|
136
|
-
|
|
137
|
-
# 1. 优先使用环境变量配置
|
|
138
|
-
if args.from_env:
|
|
139
|
-
config = CrawloConfig.from_env()
|
|
140
|
-
print("📋 使用环境变量配置")
|
|
141
|
-
|
|
142
|
-
# 2. 使用预设环境配置
|
|
143
|
-
elif args.env:
|
|
144
|
-
presets = {
|
|
145
|
-
'development': CrawloConfig.presets().development(),
|
|
146
|
-
'production': CrawloConfig.presets().production(),
|
|
147
|
-
'large-scale': CrawloConfig.presets().large_scale(),
|
|
148
|
-
'gentle': CrawloConfig.presets().gentle()
|
|
149
|
-
}
|
|
150
|
-
config = presets[args.env]
|
|
151
|
-
print(f"🎛️ 使用预设配置: {args.env}")
|
|
152
|
-
|
|
153
|
-
# 3. 使用模式配置
|
|
154
|
-
elif args.distributed:
|
|
155
|
-
config = CrawloConfig.distributed()
|
|
156
|
-
print("🌐 启用分布式模式")
|
|
157
|
-
elif args.auto:
|
|
158
|
-
config = CrawloConfig.auto()
|
|
159
|
-
print("🤖 启用自动检测模式")
|
|
160
|
-
else:
|
|
161
|
-
# 默认单机模式
|
|
162
|
-
config = CrawloConfig.standalone()
|
|
163
|
-
print("💻 使用单机模式(默认)")
|
|
164
|
-
|
|
165
|
-
# 4. 应用命令行参数覆盖
|
|
166
|
-
if args.concurrency:
|
|
167
|
-
config.set('CONCURRENCY', args.concurrency)
|
|
168
|
-
print(f"⚡ 设置并发数: {args.concurrency}")
|
|
169
|
-
|
|
170
|
-
if args.delay:
|
|
171
|
-
config.set('DOWNLOAD_DELAY', args.delay)
|
|
172
|
-
print(f"⏱️ 设置请求延迟: {args.delay}秒")
|
|
173
|
-
|
|
174
|
-
if args.debug:
|
|
175
|
-
config.set('LOG_LEVEL', 'DEBUG')
|
|
176
|
-
print("🐛 启用调试模式")
|
|
177
|
-
|
|
178
|
-
if args.dry_run:
|
|
179
|
-
# 干运行模式的配置(可根据需要调整)
|
|
180
|
-
config.set('DOWNLOAD_DELAY', 0.1) # 加快速度
|
|
181
|
-
config.set('CONCURRENCY', 1) # 降低并发
|
|
182
|
-
print("🧪 启用干运行模式")
|
|
183
|
-
|
|
184
|
-
return config
|
|
17
|
+
# 切换到项目根目录
|
|
18
|
+
os.chdir(project_root)
|
|
185
19
|
|
|
20
|
+
from crawlo.crawler import CrawlerProcess
|
|
186
21
|
|
|
187
|
-
|
|
188
|
-
"""
|
|
189
|
-
|
|
190
|
-
# 解析命令行参数
|
|
191
|
-
parser = create_parser()
|
|
192
|
-
args = parser.parse_args()
|
|
193
|
-
|
|
194
|
-
# 检查是否指定了爬虫
|
|
195
|
-
if not args.spiders:
|
|
196
|
-
print("❌ 请指定要运行的爬虫名称")
|
|
197
|
-
print("\n可用的爬虫:")
|
|
198
|
-
print(" # TODO: 在这里列出你的爬虫")
|
|
199
|
-
print(" # from {{project_name}}.spiders import MySpider")
|
|
200
|
-
print("\n使用方法: python run.py <spider_name>")
|
|
201
|
-
parser.print_help()
|
|
202
|
-
return
|
|
203
|
-
|
|
204
|
-
# 构建配置
|
|
205
|
-
config = build_config(args)
|
|
206
|
-
|
|
207
|
-
# 创建爬虫进程
|
|
208
|
-
print(f"\n🚀 正在启动爬虫: {', '.join(args.spiders)}")
|
|
209
|
-
|
|
210
|
-
if args.dry_run:
|
|
211
|
-
print(" 🧪 [干运行模式] 将解析页面但不执行实际爬取")
|
|
22
|
+
def main():
|
|
23
|
+
"""主函数:运行固定的爬虫"""
|
|
24
|
+
print("🚀 启动 {{project_name}} 爬虫")
|
|
212
25
|
|
|
26
|
+
# 创建爬虫进程(自动加载默认配置)
|
|
213
27
|
try:
|
|
214
|
-
#
|
|
215
|
-
|
|
28
|
+
# 确保 spider 模块被正确导入
|
|
29
|
+
spider_modules = ['{{project_name}}.spiders']
|
|
30
|
+
process = CrawlerProcess(spider_modules=spider_modules)
|
|
31
|
+
print("✅ 爬虫进程初始化成功")
|
|
216
32
|
|
|
217
|
-
#
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
spider_names = process.get_spider_names()
|
|
221
|
-
if not spider_names:
|
|
222
|
-
print("❌ 未找到任何爬虫")
|
|
223
|
-
print("💡 请确保:")
|
|
224
|
-
print(" • 爬虫定义在 'spiders/' 目录中")
|
|
225
|
-
print(" • 爬虫类有 'name' 属性")
|
|
226
|
-
return 1
|
|
227
|
-
|
|
228
|
-
print(f"📋 找到 {len(spider_names)} 个爬虫: {', '.join(spider_names)}")
|
|
229
|
-
# 运行所有爬虫
|
|
230
|
-
await process.crawl(spider_names)
|
|
231
|
-
else:
|
|
232
|
-
# 运行指定爬虫
|
|
233
|
-
await process.crawl(args.spiders)
|
|
33
|
+
# 运行固定的爬虫
|
|
34
|
+
# TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
|
|
35
|
+
asyncio.run(process.crawl('your_spider_name'))
|
|
234
36
|
|
|
235
|
-
print("
|
|
37
|
+
print("✅ 爬虫运行完成")
|
|
236
38
|
|
|
237
|
-
except ImportError as e:
|
|
238
|
-
print(f"❌ 无法导入爬虫: {e}")
|
|
239
|
-
print(" 请检查爬虫文件是否存在,并更新 run.py 中的导入语句")
|
|
240
39
|
except Exception as e:
|
|
241
|
-
print(f"❌
|
|
242
|
-
|
|
243
|
-
|
|
40
|
+
print(f"❌ 运行失败: {e}")
|
|
41
|
+
import traceback
|
|
42
|
+
traceback.print_exc()
|
|
43
|
+
sys.exit(1)
|
|
244
44
|
|
|
245
45
|
if __name__ == '__main__':
|
|
246
|
-
|
|
247
|
-
asyncio.run(main())
|
|
248
|
-
except KeyboardInterrupt:
|
|
249
|
-
print("\n⏹️ 用户中断爬虫执行")
|
|
250
|
-
except Exception as e:
|
|
251
|
-
print(f"❌ 运行错误: {e}")
|
|
252
|
-
sys.exit(1)
|
|
46
|
+
main()
|
|
@@ -149,10 +149,10 @@ REQUEST_DIR = '.'
|
|
|
149
149
|
# 分布式模式默认使用Redis去重管道
|
|
150
150
|
if RUN_MODE == 'distributed':
|
|
151
151
|
# 分布式模式下默认使用Redis去重管道
|
|
152
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.RedisDedupPipeline'
|
|
152
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
153
153
|
else:
|
|
154
154
|
# 单机模式下默认使用内存去重管道
|
|
155
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.MemoryDedupPipeline'
|
|
155
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
156
156
|
|
|
157
157
|
# 去重过滤器(推荐分布式项目使用 Redis 过滤器)
|
|
158
158
|
FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: crawlo
|
|
3
|
-
Version: 1.1.
|
|
3
|
+
Version: 1.1.8
|
|
4
4
|
Summary: Crawlo 是一款基于异步IO的高性能Python爬虫框架,支持分布式抓取。
|
|
5
5
|
Home-page: https://github.com/crawl-coder/Crawlo.git
|
|
6
6
|
Author: crawl-coder
|
|
@@ -80,10 +80,25 @@ pip install crawlo
|
|
|
80
80
|
### 创建项目
|
|
81
81
|
|
|
82
82
|
```bash
|
|
83
|
+
# 创建默认项目
|
|
83
84
|
crawlo startproject myproject
|
|
85
|
+
|
|
86
|
+
# 创建分布式模板项目
|
|
87
|
+
crawlo startproject myproject distributed
|
|
88
|
+
|
|
89
|
+
# 创建项目并选择特定模块
|
|
90
|
+
crawlo startproject myproject --modules mysql,redis,proxy
|
|
91
|
+
|
|
84
92
|
cd myproject
|
|
85
93
|
```
|
|
86
94
|
|
|
95
|
+
### 生成爬虫
|
|
96
|
+
|
|
97
|
+
```bash
|
|
98
|
+
# 在项目目录中生成爬虫
|
|
99
|
+
crawlo genspider news_spider news.example.com
|
|
100
|
+
```
|
|
101
|
+
|
|
87
102
|
### 编写爬虫
|
|
88
103
|
|
|
89
104
|
```python
|
|
@@ -109,9 +124,158 @@ class MySpider(Spider):
|
|
|
109
124
|
### 运行爬虫
|
|
110
125
|
|
|
111
126
|
```bash
|
|
112
|
-
|
|
127
|
+
# 使用命令行工具运行爬虫(推荐)
|
|
128
|
+
crawlo run myspider
|
|
129
|
+
|
|
130
|
+
# 使用项目自带的 run.py 脚本运行
|
|
131
|
+
python run.py
|
|
132
|
+
|
|
133
|
+
# 运行所有爬虫
|
|
134
|
+
crawlo run all
|
|
135
|
+
|
|
136
|
+
# 在项目子目录中也能正确运行
|
|
137
|
+
cd subdirectory
|
|
138
|
+
crawlo run myspider
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
## 📜 命令行工具
|
|
142
|
+
|
|
143
|
+
Crawlo 提供了丰富的命令行工具来帮助开发和管理爬虫项目:
|
|
144
|
+
|
|
145
|
+
### 获取帮助
|
|
146
|
+
|
|
147
|
+
```bash
|
|
148
|
+
# 显示帮助信息
|
|
149
|
+
crawlo -h
|
|
150
|
+
crawlo --help
|
|
151
|
+
crawlo help
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
### crawlo startproject
|
|
155
|
+
|
|
156
|
+
创建新的爬虫项目。
|
|
157
|
+
|
|
158
|
+
```bash
|
|
159
|
+
# 基本用法
|
|
160
|
+
crawlo startproject <project_name> [template_type] [--modules module1,module2]
|
|
161
|
+
|
|
162
|
+
# 示例
|
|
163
|
+
crawlo startproject my_spider_project
|
|
164
|
+
crawlo startproject news_crawler simple
|
|
165
|
+
crawlo startproject ecommerce_spider distributed --modules mysql,proxy
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
**参数说明:**
|
|
169
|
+
- `project_name`: 项目名称(必须是有效的Python标识符)
|
|
170
|
+
- `template_type`: 模板类型(可选)
|
|
171
|
+
- `default`: 默认模板 - 通用配置,适合大多数项目
|
|
172
|
+
- `simple`: 简化模板 - 最小配置,适合快速开始
|
|
173
|
+
- `distributed`: 分布式模板 - 针对分布式爬取优化
|
|
174
|
+
- `high-performance`: 高性能模板 - 针对大规模高并发优化
|
|
175
|
+
- `gentle`: 温和模板 - 低负载配置,对目标网站友好
|
|
176
|
+
- `--modules`: 选择要包含的模块组件(可选)
|
|
177
|
+
- `mysql`: MySQL数据库支持
|
|
178
|
+
- `mongodb`: MongoDB数据库支持
|
|
179
|
+
- `redis`: Redis支持(分布式队列和去重)
|
|
180
|
+
- `proxy`: 代理支持
|
|
181
|
+
- `monitoring`: 监控和性能分析
|
|
182
|
+
- `dedup`: 去重功能
|
|
183
|
+
- `httpx`: HttpX下载器
|
|
184
|
+
- `aiohttp`: AioHttp下载器
|
|
185
|
+
- `curl`: CurlCffi下载器
|
|
186
|
+
|
|
187
|
+
### crawlo genspider
|
|
188
|
+
|
|
189
|
+
在现有项目中生成新的爬虫。
|
|
190
|
+
|
|
191
|
+
```bash
|
|
192
|
+
# 基本用法
|
|
193
|
+
crawlo genspider <spider_name> <domain>
|
|
194
|
+
|
|
195
|
+
# 示例
|
|
196
|
+
crawlo genspider news_spider news.example.com
|
|
197
|
+
crawlo genspider product_spider shop.example.com
|
|
198
|
+
```
|
|
199
|
+
|
|
200
|
+
**参数说明:**
|
|
201
|
+
- `spider_name`: 爬虫名称(必须是有效的Python标识符)
|
|
202
|
+
- `domain`: 目标域名
|
|
203
|
+
|
|
204
|
+
### crawlo run
|
|
205
|
+
|
|
206
|
+
运行爬虫。
|
|
207
|
+
|
|
208
|
+
```bash
|
|
209
|
+
# 基本用法
|
|
210
|
+
crawlo run <spider_name>|all [--json] [--no-stats]
|
|
211
|
+
|
|
212
|
+
# 示例
|
|
213
|
+
crawlo run myspider
|
|
214
|
+
crawlo run all
|
|
215
|
+
crawlo run all --json --no-stats
|
|
113
216
|
```
|
|
114
217
|
|
|
218
|
+
**参数说明:**
|
|
219
|
+
- `spider_name`: 要运行的爬虫名称
|
|
220
|
+
- `all`: 运行所有爬虫
|
|
221
|
+
- `--json`: 以JSON格式输出结果
|
|
222
|
+
- `--no-stats`: 不记录统计信息
|
|
223
|
+
|
|
224
|
+
### crawlo list
|
|
225
|
+
|
|
226
|
+
列出项目中所有可用的爬虫。
|
|
227
|
+
|
|
228
|
+
```bash
|
|
229
|
+
# 基本用法
|
|
230
|
+
crawlo list [--json]
|
|
231
|
+
|
|
232
|
+
# 示例
|
|
233
|
+
crawlo list
|
|
234
|
+
crawlo list --json
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
**参数说明:**
|
|
238
|
+
- `--json`: 以JSON格式输出结果
|
|
239
|
+
|
|
240
|
+
### crawlo check
|
|
241
|
+
|
|
242
|
+
检查爬虫定义的合规性。
|
|
243
|
+
|
|
244
|
+
```bash
|
|
245
|
+
# 基本用法
|
|
246
|
+
crawlo check [--fix] [--ci] [--json] [--watch]
|
|
247
|
+
|
|
248
|
+
# 示例
|
|
249
|
+
crawlo check
|
|
250
|
+
crawlo check --fix
|
|
251
|
+
crawlo check --ci
|
|
252
|
+
crawlo check --watch
|
|
253
|
+
```
|
|
254
|
+
|
|
255
|
+
**参数说明:**
|
|
256
|
+
- `--fix`: 自动修复常见问题
|
|
257
|
+
- `--ci`: CI模式输出(简洁格式)
|
|
258
|
+
- `--json`: 以JSON格式输出结果
|
|
259
|
+
- `--watch`: 监听模式,文件更改时自动检查
|
|
260
|
+
|
|
261
|
+
### crawlo stats
|
|
262
|
+
|
|
263
|
+
查看爬虫运行统计信息。
|
|
264
|
+
|
|
265
|
+
```bash
|
|
266
|
+
# 基本用法
|
|
267
|
+
crawlo stats [spider_name] [--all]
|
|
268
|
+
|
|
269
|
+
# 示例
|
|
270
|
+
crawlo stats
|
|
271
|
+
crawlo stats myspider
|
|
272
|
+
crawlo stats myspider --all
|
|
273
|
+
```
|
|
274
|
+
|
|
275
|
+
**参数说明:**
|
|
276
|
+
- `spider_name`: 指定要查看统计信息的爬虫名称
|
|
277
|
+
- `--all`: 显示指定爬虫的所有历史运行记录
|
|
278
|
+
|
|
115
279
|
## 🏗️ 架构设计
|
|
116
280
|
|
|
117
281
|
### 组件交互图
|
|
@@ -176,6 +340,7 @@ crawlo crawl myspider
|
|
|
176
340
|
│ │ │ - ValidationPipeline │ │ │
|
|
177
341
|
│ │ │ - ProcessingPipeline │ │ │
|
|
178
342
|
│ │ │ - StoragePipeline │ │ │
|
|
343
|
+
│ │ │ - DeduplicationPipeline │ │ │
|
|
179
344
|
│ │ └─────────────────────────┘ │ │
|
|
180
345
|
│ └──────────────────────────────┘ │
|
|
181
346
|
└─────────────────────────────────────┘
|
|
@@ -229,12 +394,13 @@ crawlo crawl myspider
|
|
|
229
394
|
▼ ▼
|
|
230
395
|
┌─────────────────┐ 7.生成数据 ┌─────────────┐
|
|
231
396
|
│ Processor ├────────────────►│ Pipeline │
|
|
232
|
-
└─────────────────┘
|
|
233
|
-
│ 8.存储数据
|
|
234
|
-
▼
|
|
235
|
-
┌─────────────────┐
|
|
236
|
-
│ Items │
|
|
237
|
-
└─────────────────┘
|
|
397
|
+
└─────────────────┘ └──────┬──────┘
|
|
398
|
+
│ 8.存储数据 │ 9.去重处理
|
|
399
|
+
▼ ▼
|
|
400
|
+
┌─────────────────┐ ┌─────────────────┐
|
|
401
|
+
│ Items │◄─────────────┤ Deduplication │
|
|
402
|
+
└─────────────────┘ │ Pipeline │
|
|
403
|
+
└─────────────────┘
|
|
238
404
|
```
|
|
239
405
|
|
|
240
406
|
### 模块层次结构图
|
|
@@ -298,6 +464,8 @@ crawlo/
|
|
|
298
464
|
│ ├── pipeline_manager.py # 管道管理器
|
|
299
465
|
│ ├── base_pipeline.py # 管道基类
|
|
300
466
|
│ ├── console_pipeline.py # 控制台输出管道
|
|
467
|
+
│ ├── json_pipeline.py # JSON存储管道
|
|
468
|
+
│ ├── redis_dedup_pipeline.py # Redis去重管道
|
|
301
469
|
│ └── mysql_pipeline.py # MySQL存储管道
|
|
302
470
|
│
|
|
303
471
|
├── extension/ # 扩展组件
|
|
@@ -335,7 +503,7 @@ crawlo/
|
|
|
335
503
|
- **QueueManager**: 统一的队列管理器,支持内存队列和Redis队列的自动切换
|
|
336
504
|
- **Filter**: 请求去重过滤器,支持内存和Redis两种实现
|
|
337
505
|
- **Middleware**: 中间件系统,处理请求/响应的预处理和后处理
|
|
338
|
-
- **Pipeline**: 数据处理管道,支持多种存储方式(控制台、数据库等)
|
|
506
|
+
- **Pipeline**: 数据处理管道,支持多种存储方式(控制台、数据库等)和去重功能
|
|
339
507
|
- **Spider**: 爬虫基类,定义爬取逻辑
|
|
340
508
|
|
|
341
509
|
### 运行模式
|
|
@@ -356,12 +524,64 @@ CONCURRENCY = 16
|
|
|
356
524
|
DOWNLOAD_DELAY = 1.0
|
|
357
525
|
QUEUE_TYPE = 'memory' # 单机模式
|
|
358
526
|
# QUEUE_TYPE = 'redis' # 分布式模式
|
|
527
|
+
|
|
528
|
+
# Redis 配置 (分布式模式下使用)
|
|
529
|
+
REDIS_HOST = 'localhost'
|
|
530
|
+
REDIS_PORT = 6379
|
|
531
|
+
REDIS_DB = 0
|
|
532
|
+
REDIS_PASSWORD = ''
|
|
533
|
+
|
|
534
|
+
# 数据管道配置
|
|
535
|
+
PIPELINES = [
|
|
536
|
+
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
537
|
+
'crawlo.pipelines.json_pipeline.JsonPipeline',
|
|
538
|
+
'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline', # Redis去重管道
|
|
539
|
+
'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL存储管道
|
|
540
|
+
]
|
|
359
541
|
```
|
|
360
542
|
|
|
361
|
-
###
|
|
543
|
+
### MySQL 管道配置
|
|
362
544
|
|
|
545
|
+
Crawlo 提供了现成的 MySQL 管道实现,可以轻松将爬取的数据存储到 MySQL 数据库中:
|
|
546
|
+
|
|
547
|
+
```python
|
|
548
|
+
# 在 settings.py 中启用 MySQL 管道
|
|
549
|
+
PIPELINES = [
|
|
550
|
+
'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline',
|
|
551
|
+
]
|
|
552
|
+
|
|
553
|
+
# MySQL 数据库配置
|
|
554
|
+
MYSQL_HOST = 'localhost'
|
|
555
|
+
MYSQL_PORT = 3306
|
|
556
|
+
MYSQL_USER = 'your_username'
|
|
557
|
+
MYSQL_PASSWORD = 'your_password'
|
|
558
|
+
MYSQL_DB = 'your_database'
|
|
559
|
+
MYSQL_TABLE = 'your_table_name'
|
|
560
|
+
|
|
561
|
+
# 可选的批量插入配置
|
|
562
|
+
MYSQL_BATCH_SIZE = 100
|
|
563
|
+
MYSQL_USE_BATCH = True
|
|
363
564
|
```
|
|
364
|
-
|
|
565
|
+
|
|
566
|
+
MySQL 管道特性:
|
|
567
|
+
- **异步操作**:基于 asyncmy 驱动,提供高性能的异步数据库操作
|
|
568
|
+
- **连接池**:自动管理数据库连接,提高效率
|
|
569
|
+
- **批量插入**:支持批量插入以提高性能
|
|
570
|
+
- **事务支持**:确保数据一致性
|
|
571
|
+
- **灵活配置**:支持自定义表名、批量大小等参数
|
|
572
|
+
|
|
573
|
+
### 命令行配置
|
|
574
|
+
|
|
575
|
+
``bash
|
|
576
|
+
# 运行单个爬虫
|
|
577
|
+
crawlo run myspider
|
|
578
|
+
|
|
579
|
+
# 运行所有爬虫
|
|
580
|
+
crawlo run all
|
|
581
|
+
|
|
582
|
+
# 在项目子目录中也能正确运行
|
|
583
|
+
cd subdirectory
|
|
584
|
+
crawlo run myspider
|
|
365
585
|
```
|
|
366
586
|
|
|
367
587
|
## 🧩 核心组件
|
|
@@ -370,7 +590,11 @@ crawlo crawl myspider --concurrency=32 --delay=0.5
|
|
|
370
590
|
灵活的中间件系统,支持请求预处理、响应处理和异常处理。
|
|
371
591
|
|
|
372
592
|
### 管道系统
|
|
373
|
-
|
|
593
|
+
可扩展的数据处理管道,支持多种存储方式(控制台、数据库等)和去重功能:
|
|
594
|
+
- **ConsolePipeline**: 控制台输出管道
|
|
595
|
+
- **JsonPipeline**: JSON文件存储管道
|
|
596
|
+
- **RedisDedupPipeline**: Redis去重管道,基于Redis集合实现分布式去重
|
|
597
|
+
- **AsyncmyMySQLPipeline**: MySQL数据库存储管道,基于asyncmy驱动
|
|
374
598
|
|
|
375
599
|
### 扩展组件
|
|
376
600
|
功能增强扩展,包括日志、监控、性能分析等。
|
|
@@ -382,6 +606,7 @@ crawlo crawl myspider --concurrency=32 --delay=0.5
|
|
|
382
606
|
|
|
383
607
|
- [API数据采集](examples/api_data_collection/) - 简单的API数据采集示例
|
|
384
608
|
- [电信设备许可证](examples/telecom_licenses_distributed/) - 分布式爬取示例
|
|
609
|
+
- [OFweek分布式爬虫](examples/ofweek_distributed/) - 复杂的分布式爬虫示例,包含Redis去重功能
|
|
385
610
|
|
|
386
611
|
## 📚 文档
|
|
387
612
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
crawlo/__init__.py,sha256=jSOsZbDJ_Q5wZV8onSXx5LgNM7Z1q3zCROGdImBDr2I,1373
|
|
2
|
-
crawlo/__version__.py,sha256=
|
|
3
|
-
crawlo/cli.py,sha256=
|
|
2
|
+
crawlo/__version__.py,sha256=jro_fYFaWTpMLcXlRQe53nd8yxygCDFL1sK3eYFcmKI,23
|
|
3
|
+
crawlo/cli.py,sha256=ZrrOKAvqgGJgqoyakzItt-Jroa9JBF9vQG-1lz4wBPM,2094
|
|
4
4
|
crawlo/config.py,sha256=_pORwcVEgjEhrqVaApu51X0Il3TBK6w9aezGnnqYu8Y,9847
|
|
5
5
|
crawlo/config_validator.py,sha256=M118EATR-tITzRSe2oSinV5oh2QsooMCkEJ5WS8ma_0,10155
|
|
6
6
|
crawlo/crawler.py,sha256=24EE7zFPByeYLJnf1K_R9fhJMqaFUjBSa6TuUhlY4TI,37398
|
|
@@ -15,9 +15,10 @@ crawlo/cleaners/__init__.py,sha256=lxL-ZWDKW-DdobdgKUQ27wNmBiUhGnD0CVG6HWkX3_o,1
|
|
|
15
15
|
crawlo/cleaners/data_formatter.py,sha256=iBDHpZBZvn9O7pLkTQilE1TzYJQEc3z3f6HXoVus0f0,7808
|
|
16
16
|
crawlo/cleaners/encoding_converter.py,sha256=G3khLlk0uBeTwIutsWxVUeSuyc1GMC1BDNJDwsU9ryg,4238
|
|
17
17
|
crawlo/cleaners/text_cleaner.py,sha256=16e6WqIIb9qANMiK-vCEl4TvgkId19Aa2W1NMLU-jFQ,6707
|
|
18
|
-
crawlo/commands/__init__.py,sha256=
|
|
18
|
+
crawlo/commands/__init__.py,sha256=orvY6wLOBwGUEJKeF3h_T1fxj8AaQLjngBDd-3xKOE4,392
|
|
19
19
|
crawlo/commands/check.py,sha256=jW8SgfkOS35j4VS7nRZBZdFCBX9CVFez5LR2sfP_H1U,23437
|
|
20
20
|
crawlo/commands/genspider.py,sha256=_3GwFMYK79BuKk__5L0ljuwWwOzN80MeuhRkL4Ql11A,5201
|
|
21
|
+
crawlo/commands/help.py,sha256=jJ8GbFJcJVQytPIYsEAMT6v58fNo62qd-G3G3elB-1Q,5011
|
|
21
22
|
crawlo/commands/list.py,sha256=octTk0QZhapiyM7WgCPersP2v3MesthbJeG9vMqVFOs,5936
|
|
22
23
|
crawlo/commands/run.py,sha256=Go9hAEUMuG3GphBgemG5S5W4MF39XOxp7-E06rX-pTU,11043
|
|
23
24
|
crawlo/commands/startproject.py,sha256=UYGelGY4dM6Zu3U4G5m8snKqbsfgszhvfpAJLl5b5tM,15772
|
|
@@ -83,10 +84,10 @@ crawlo/spider/__init__.py,sha256=xAH6NfE_6K2aY_VSL9DoGjcmMHJDd5Nxr7TG1Y8vQAE,210
|
|
|
83
84
|
crawlo/templates/crawlo.cfg.tmpl,sha256=lwiUVe5sFixJgHFEjn1OtbAeyWsECOrz37uheuVtulk,240
|
|
84
85
|
crawlo/templates/project/__init__.py.tmpl,sha256=aQnHaOjMSkTviOC8COUX0fKymuyf8lx2tGduxkMkXEE,61
|
|
85
86
|
crawlo/templates/project/items.py.tmpl,sha256=8_3DBA8HrS2XbfHzsMZNJiZbFY6fDJUUMFoFti_obJk,314
|
|
86
|
-
crawlo/templates/project/middlewares.py.tmpl,sha256=
|
|
87
|
-
crawlo/templates/project/pipelines.py.tmpl,sha256=
|
|
88
|
-
crawlo/templates/project/run.py.tmpl,sha256=
|
|
89
|
-
crawlo/templates/project/settings.py.tmpl,sha256=
|
|
87
|
+
crawlo/templates/project/middlewares.py.tmpl,sha256=Em7KdWxF3FE5OzXwYxRkQtWr74YvapqhrI8Kij7J6dc,3840
|
|
88
|
+
crawlo/templates/project/pipelines.py.tmpl,sha256=j9oqEhCezmmHlBhMWgYtlgup4jhWnMlv6AEiAOHODkg,2704
|
|
89
|
+
crawlo/templates/project/run.py.tmpl,sha256=yOxpPkyffXLwa7NDx2Y96c8U-QN81_3mqZSuB526DNs,1271
|
|
90
|
+
crawlo/templates/project/settings.py.tmpl,sha256=M2tga44ki-BpgdKY506mcJ-iEw4AMx4pWQKUdJyRAkM,12034
|
|
90
91
|
crawlo/templates/project/settings_distributed.py.tmpl,sha256=Nli-qR-UB4TXAq4mXjx17y-yAv46NwwpcVidjGeM00A,4321
|
|
91
92
|
crawlo/templates/project/settings_gentle.py.tmpl,sha256=ljXf9vKV2c-cN8yvf5U4UWsPWrMVmJUfbXladIdS2mg,3320
|
|
92
93
|
crawlo/templates/project/settings_high_performance.py.tmpl,sha256=3wf8fFYZ5EVE2742JlcwwrPF794vEIEmbxFSbqyGnJQ,5434
|
|
@@ -182,8 +183,8 @@ tests/test_template_content.py,sha256=URwjlAzMCdUN0sW_OupUcuSNMxp1OKgW79JOpkLPXn
|
|
|
182
183
|
tests/test_template_redis_key.py,sha256=dOFutic8CL3tOzGbYhWbMrYiXZ8R3fhNoF5VKax5Iy0,4946
|
|
183
184
|
tests/test_tools.py,sha256=fgzXL2L7eBV_nGjeMxH8IMhfc0dviQ80XgzZkJp_4dA,5266
|
|
184
185
|
tests/tools_example.py,sha256=uXNS4xXJ-OD_xInAn2zjKLG_nlbgVGXZLoJtfhaG9lI,7926
|
|
185
|
-
crawlo-1.1.
|
|
186
|
-
crawlo-1.1.
|
|
187
|
-
crawlo-1.1.
|
|
188
|
-
crawlo-1.1.
|
|
189
|
-
crawlo-1.1.
|
|
186
|
+
crawlo-1.1.8.dist-info/METADATA,sha256=nszYOx5Hd5KbjBikYameYCyotd_pDcGLMSsGSxV_YpU,26174
|
|
187
|
+
crawlo-1.1.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
188
|
+
crawlo-1.1.8.dist-info/entry_points.txt,sha256=5HoVoTSPxI8SCa5B7pQYxLSrkOdiunyO9tqNsLMv52g,43
|
|
189
|
+
crawlo-1.1.8.dist-info/top_level.txt,sha256=keG_67pbZ_wZL2dmDRA9RMaNHTaV_x_oxZ9DKNgwvR0,22
|
|
190
|
+
crawlo-1.1.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|