crawlo 1.3.2__py3-none-any.whl → 1.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +63 -63
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +322 -314
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +365 -365
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +256 -256
- crawlo/crawler.py +1166 -1168
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +226 -226
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +39 -39
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +52 -45
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +234 -234
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +187 -187
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +318 -318
- crawlo/pipelines/pipeline_manager.py +75 -75
- crawlo/pipelines/redis_dedup_pipeline.py +166 -166
- crawlo/project.py +325 -297
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +379 -379
- crawlo/queue/redis_priority_queue.py +306 -306
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +225 -225
- crawlo/settings/setting_manager.py +198 -198
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +266 -266
- crawlo/templates/project/settings_distributed.py.tmpl +179 -179
- crawlo/templates/project/settings_gentle.py.tmpl +60 -60
- crawlo/templates/project/settings_high_performance.py.tmpl +130 -130
- crawlo/templates/project/settings_minimal.py.tmpl +34 -34
- crawlo/templates/project/settings_simple.py.tmpl +101 -101
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/run.py.tmpl +38 -38
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +388 -388
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +199 -146
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +351 -351
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/METADATA +1020 -1020
- crawlo-1.3.3.dist-info/RECORD +219 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +107 -107
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_pipelines.py +66 -66
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/verify_distributed.py +117 -117
- crawlo-1.3.2.dist-info/RECORD +0 -219
- {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/WHEEL +0 -0
- {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.3.2.dist-info → crawlo-1.3.3.dist-info}/top_level.txt +0 -0
|
@@ -1,131 +1,131 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
高性能模式配置模板
|
|
4
|
-
针对大规模高并发优化
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
# ============================== 项目基本信息 ==============================
|
|
8
|
-
PROJECT_NAME = '{{project_name}}'
|
|
9
|
-
|
|
10
|
-
# ============================== 高性能运行模式 ==============================
|
|
11
|
-
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
-
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
-
|
|
14
|
-
# 并发配置
|
|
15
|
-
CONCURRENCY = 32 # 高并发数以充分利用系统资源
|
|
16
|
-
DOWNLOAD_DELAY = 0.1 # 极小延迟以提高吞吐量
|
|
17
|
-
RANDOMNESS = False # 禁用随机延迟以保证性能
|
|
18
|
-
|
|
19
|
-
# ============================== 队列配置 ==============================
|
|
20
|
-
|
|
21
|
-
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
22
|
-
QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
23
|
-
SCHEDULER_MAX_QUEUE_SIZE = 5000
|
|
24
|
-
SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
25
|
-
QUEUE_MAX_RETRIES = 3
|
|
26
|
-
QUEUE_TIMEOUT = 300
|
|
27
|
-
|
|
28
|
-
# ============================== 去重过滤配置 ==============================
|
|
29
|
-
|
|
30
|
-
# 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
|
|
31
|
-
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
32
|
-
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
33
|
-
|
|
34
|
-
# --- Redis 配置(用于分布式去重和队列) ---
|
|
35
|
-
REDIS_HOST = '127.0.0.1'
|
|
36
|
-
REDIS_PORT = 6379
|
|
37
|
-
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
38
|
-
|
|
39
|
-
# 根据是否有密码生成 URL
|
|
40
|
-
if REDIS_PASSWORD:
|
|
41
|
-
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
42
|
-
else:
|
|
43
|
-
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
44
|
-
|
|
45
|
-
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
46
|
-
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
47
|
-
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
48
|
-
# crawlo:{project_name}:queue:requests (请求队列)
|
|
49
|
-
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
50
|
-
# crawlo:{project_name}:queue:failed (失败队列)
|
|
51
|
-
|
|
52
|
-
REDIS_TTL = 0
|
|
53
|
-
CLEANUP_FP = 0
|
|
54
|
-
FILTER_DEBUG = True
|
|
55
|
-
DECODE_RESPONSES = True
|
|
56
|
-
|
|
57
|
-
# ============================== 用户自定义中间件配置 ==============================
|
|
58
|
-
# 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
|
|
59
|
-
|
|
60
|
-
# 中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
61
|
-
# MIDDLEWARES = [
|
|
62
|
-
# '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
|
|
63
|
-
# ]
|
|
64
|
-
|
|
65
|
-
# ============================== 用户自定义数据管道配置 ==============================
|
|
66
|
-
# 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
|
|
67
|
-
|
|
68
|
-
# 数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
69
|
-
# PIPELINES = [
|
|
70
|
-
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
71
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
72
|
-
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
73
|
-
# ]
|
|
74
|
-
|
|
75
|
-
# 明确添加默认去重管道到管道列表开头
|
|
76
|
-
# PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
77
|
-
|
|
78
|
-
# ============================== 用户自定义扩展组件 ==============================
|
|
79
|
-
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
80
|
-
|
|
81
|
-
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
82
|
-
# EXTENSIONS = [
|
|
83
|
-
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
84
|
-
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
85
|
-
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
86
|
-
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
87
|
-
# ]
|
|
88
|
-
|
|
89
|
-
# ============================== 域名过滤配置 ==============================
|
|
90
|
-
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
91
|
-
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
92
|
-
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
93
|
-
|
|
94
|
-
# ============================== 日志配置 ==============================
|
|
95
|
-
|
|
96
|
-
LOG_LEVEL = 'INFO'
|
|
97
|
-
STATS_DUMP = True
|
|
98
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
99
|
-
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
100
|
-
LOG_ENCODING = 'utf-8'
|
|
101
|
-
|
|
102
|
-
# ============================== 性能优化配置 ==============================
|
|
103
|
-
|
|
104
|
-
# 连接池配置
|
|
105
|
-
CONNECTION_POOL_LIMIT = 100
|
|
106
|
-
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
|
|
107
|
-
DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
|
|
108
|
-
|
|
109
|
-
# 下载器优化配置
|
|
110
|
-
DOWNLOADER_HEALTH_CHECK = True
|
|
111
|
-
HEALTH_CHECK_INTERVAL = 30
|
|
112
|
-
|
|
113
|
-
# 请求统计配置
|
|
114
|
-
REQUEST_STATS_ENABLED = True
|
|
115
|
-
STATS_RESET_ON_START = False
|
|
116
|
-
|
|
117
|
-
# HttpX 下载器专用配置
|
|
118
|
-
HTTPX_HTTP2 = True
|
|
119
|
-
HTTPX_FOLLOW_REDIRECTS = True
|
|
120
|
-
|
|
121
|
-
# AioHttp 下载器专用配置
|
|
122
|
-
AIOHTTP_AUTO_DECOMPRESS = True
|
|
123
|
-
AIOHTTP_FORCE_CLOSE = False
|
|
124
|
-
|
|
125
|
-
# 通用优化配置
|
|
126
|
-
CONNECTION_TTL_DNS_CACHE = 300
|
|
127
|
-
CONNECTION_KEEPALIVE_TIMEOUT = 15
|
|
128
|
-
|
|
129
|
-
# 性能监控
|
|
130
|
-
ENABLE_PERFORMANCE_MONITORING = True
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
高性能模式配置模板
|
|
4
|
+
针对大规模高并发优化
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# ============================== 项目基本信息 ==============================
|
|
8
|
+
PROJECT_NAME = '{{project_name}}'
|
|
9
|
+
|
|
10
|
+
# ============================== 高性能运行模式 ==============================
|
|
11
|
+
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
+
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
+
|
|
14
|
+
# 并发配置
|
|
15
|
+
CONCURRENCY = 32 # 高并发数以充分利用系统资源
|
|
16
|
+
DOWNLOAD_DELAY = 0.1 # 极小延迟以提高吞吐量
|
|
17
|
+
RANDOMNESS = False # 禁用随机延迟以保证性能
|
|
18
|
+
|
|
19
|
+
# ============================== 队列配置 ==============================
|
|
20
|
+
|
|
21
|
+
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
22
|
+
QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
23
|
+
SCHEDULER_MAX_QUEUE_SIZE = 5000
|
|
24
|
+
SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
25
|
+
QUEUE_MAX_RETRIES = 3
|
|
26
|
+
QUEUE_TIMEOUT = 300
|
|
27
|
+
|
|
28
|
+
# ============================== 去重过滤配置 ==============================
|
|
29
|
+
|
|
30
|
+
# 高性能模式下,如果Redis可用则使用Redis去重,否则使用内存去重
|
|
31
|
+
DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.redis_dedup_pipeline.RedisDedupPipeline'
|
|
32
|
+
FILTER_CLASS = 'crawlo.filters.aioredis_filter.AioRedisFilter'
|
|
33
|
+
|
|
34
|
+
# --- Redis 配置(用于分布式去重和队列) ---
|
|
35
|
+
REDIS_HOST = '127.0.0.1'
|
|
36
|
+
REDIS_PORT = 6379
|
|
37
|
+
REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
38
|
+
|
|
39
|
+
# 根据是否有密码生成 URL
|
|
40
|
+
if REDIS_PASSWORD:
|
|
41
|
+
REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
42
|
+
else:
|
|
43
|
+
REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
44
|
+
|
|
45
|
+
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
46
|
+
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
47
|
+
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
48
|
+
# crawlo:{project_name}:queue:requests (请求队列)
|
|
49
|
+
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
50
|
+
# crawlo:{project_name}:queue:failed (失败队列)
|
|
51
|
+
|
|
52
|
+
REDIS_TTL = 0
|
|
53
|
+
CLEANUP_FP = 0
|
|
54
|
+
FILTER_DEBUG = True
|
|
55
|
+
DECODE_RESPONSES = True
|
|
56
|
+
|
|
57
|
+
# ============================== 用户自定义中间件配置 ==============================
|
|
58
|
+
# 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
|
|
59
|
+
|
|
60
|
+
# 中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
61
|
+
# MIDDLEWARES = [
|
|
62
|
+
# '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
|
|
63
|
+
# ]
|
|
64
|
+
|
|
65
|
+
# ============================== 用户自定义数据管道配置 ==============================
|
|
66
|
+
# 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
|
|
67
|
+
|
|
68
|
+
# 数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
69
|
+
# PIPELINES = [
|
|
70
|
+
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
71
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
72
|
+
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
73
|
+
# ]
|
|
74
|
+
|
|
75
|
+
# 明确添加默认去重管道到管道列表开头
|
|
76
|
+
# PIPELINES.insert(0, DEFAULT_DEDUP_PIPELINE)
|
|
77
|
+
|
|
78
|
+
# ============================== 用户自定义扩展组件 ==============================
|
|
79
|
+
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
80
|
+
|
|
81
|
+
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
82
|
+
# EXTENSIONS = [
|
|
83
|
+
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
84
|
+
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
85
|
+
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
86
|
+
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
87
|
+
# ]
|
|
88
|
+
|
|
89
|
+
# ============================== 域名过滤配置 ==============================
|
|
90
|
+
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
91
|
+
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
92
|
+
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
93
|
+
|
|
94
|
+
# ============================== 日志配置 ==============================
|
|
95
|
+
|
|
96
|
+
LOG_LEVEL = 'INFO'
|
|
97
|
+
STATS_DUMP = True
|
|
98
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
99
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
100
|
+
LOG_ENCODING = 'utf-8'
|
|
101
|
+
|
|
102
|
+
# ============================== 性能优化配置 ==============================
|
|
103
|
+
|
|
104
|
+
# 连接池配置
|
|
105
|
+
CONNECTION_POOL_LIMIT = 100
|
|
106
|
+
DOWNLOAD_MAXSIZE = 10 * 1024 * 1024 # 10MB
|
|
107
|
+
DOWNLOAD_WARN_SIZE = 1024 * 1024 # 1MB
|
|
108
|
+
|
|
109
|
+
# 下载器优化配置
|
|
110
|
+
DOWNLOADER_HEALTH_CHECK = True
|
|
111
|
+
HEALTH_CHECK_INTERVAL = 30
|
|
112
|
+
|
|
113
|
+
# 请求统计配置
|
|
114
|
+
REQUEST_STATS_ENABLED = True
|
|
115
|
+
STATS_RESET_ON_START = False
|
|
116
|
+
|
|
117
|
+
# HttpX 下载器专用配置
|
|
118
|
+
HTTPX_HTTP2 = True
|
|
119
|
+
HTTPX_FOLLOW_REDIRECTS = True
|
|
120
|
+
|
|
121
|
+
# AioHttp 下载器专用配置
|
|
122
|
+
AIOHTTP_AUTO_DECOMPRESS = True
|
|
123
|
+
AIOHTTP_FORCE_CLOSE = False
|
|
124
|
+
|
|
125
|
+
# 通用优化配置
|
|
126
|
+
CONNECTION_TTL_DNS_CACHE = 300
|
|
127
|
+
CONNECTION_KEEPALIVE_TIMEOUT = 15
|
|
128
|
+
|
|
129
|
+
# 性能监控
|
|
130
|
+
ENABLE_PERFORMANCE_MONITORING = True
|
|
131
131
|
MEMORY_USAGE_WARNING_THRESHOLD = 800 # MB
|
|
@@ -1,35 +1,35 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
{{project_name}} 最小化配置文件
|
|
4
|
-
=============================
|
|
5
|
-
仅包含最基本和常用的配置项,适合快速开始和简单项目。
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
# ============================== 项目基本信息 ==============================
|
|
9
|
-
PROJECT_NAME = '{{project_name}}'
|
|
10
|
-
|
|
11
|
-
# ============================== 核心配置 ==============================
|
|
12
|
-
# 并发数
|
|
13
|
-
CONCURRENCY = 4
|
|
14
|
-
|
|
15
|
-
# 请求延迟(秒)
|
|
16
|
-
DOWNLOAD_DELAY = 1.0
|
|
17
|
-
|
|
18
|
-
# ============================== 数据存储 ==============================
|
|
19
|
-
# JSON文件存储(默认启用)
|
|
20
|
-
PIPELINES = [
|
|
21
|
-
'crawlo.pipelines.json_pipeline.JsonPipeline',
|
|
22
|
-
]
|
|
23
|
-
|
|
24
|
-
# ============================== 域名过滤配置 ==============================
|
|
25
|
-
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
26
|
-
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
27
|
-
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
28
|
-
|
|
29
|
-
# ============================== 日志配置 ==============================
|
|
30
|
-
LOG_LEVEL = 'INFO'
|
|
31
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
32
|
-
STATS_DUMP = True
|
|
33
|
-
|
|
34
|
-
# ============================== 自定义配置 ==============================
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
{{project_name}} 最小化配置文件
|
|
4
|
+
=============================
|
|
5
|
+
仅包含最基本和常用的配置项,适合快速开始和简单项目。
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# ============================== 项目基本信息 ==============================
|
|
9
|
+
PROJECT_NAME = '{{project_name}}'
|
|
10
|
+
|
|
11
|
+
# ============================== 核心配置 ==============================
|
|
12
|
+
# 并发数
|
|
13
|
+
CONCURRENCY = 4
|
|
14
|
+
|
|
15
|
+
# 请求延迟(秒)
|
|
16
|
+
DOWNLOAD_DELAY = 1.0
|
|
17
|
+
|
|
18
|
+
# ============================== 数据存储 ==============================
|
|
19
|
+
# JSON文件存储(默认启用)
|
|
20
|
+
PIPELINES = [
|
|
21
|
+
'crawlo.pipelines.json_pipeline.JsonPipeline',
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
# ============================== 域名过滤配置 ==============================
|
|
25
|
+
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
26
|
+
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
27
|
+
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
28
|
+
|
|
29
|
+
# ============================== 日志配置 ==============================
|
|
30
|
+
LOG_LEVEL = 'INFO'
|
|
31
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
32
|
+
STATS_DUMP = True
|
|
33
|
+
|
|
34
|
+
# ============================== 自定义配置 ==============================
|
|
35
35
|
# 在此处添加项目特定的配置项
|
|
@@ -1,102 +1,102 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
简化模式配置模板
|
|
4
|
-
最小配置,适合快速开始和简单项目
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
# ============================== 项目基本信息 ==============================
|
|
8
|
-
PROJECT_NAME = '{{project_name}}'
|
|
9
|
-
|
|
10
|
-
# ============================== 简化运行模式 ==============================
|
|
11
|
-
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
-
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
-
|
|
14
|
-
# 并发配置
|
|
15
|
-
CONCURRENCY = 4 # 低并发数以减少资源占用
|
|
16
|
-
DOWNLOAD_DELAY = 1.0 # 增加延迟以降低目标网站压力
|
|
17
|
-
|
|
18
|
-
# ============================== 队列配置 ==============================
|
|
19
|
-
|
|
20
|
-
# 注意:框架已提供默认的队列配置,以下配置项通常无需修改
|
|
21
|
-
# 如需自定义,请取消注释并修改相应值
|
|
22
|
-
|
|
23
|
-
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
24
|
-
# QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
25
|
-
# SCHEDULER_MAX_QUEUE_SIZE = 1000
|
|
26
|
-
# SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
27
|
-
# QUEUE_MAX_RETRIES = 3
|
|
28
|
-
# QUEUE_TIMEOUT = 300
|
|
29
|
-
|
|
30
|
-
# ============================== 去重过滤配置 ==============================
|
|
31
|
-
|
|
32
|
-
# 注意:框架已提供默认的去重配置,以下配置项通常无需修改
|
|
33
|
-
# 如需自定义,请取消注释并修改相应值
|
|
34
|
-
|
|
35
|
-
# 简化模式下使用内存去重管道和过滤器
|
|
36
|
-
# DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
37
|
-
# FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
38
|
-
|
|
39
|
-
# --- Redis 配置(用于分布式去重和队列) ---
|
|
40
|
-
# REDIS_HOST = '127.0.0.1'
|
|
41
|
-
# REDIS_PORT = 6379
|
|
42
|
-
# REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
43
|
-
|
|
44
|
-
# 根据是否有密码生成 URL
|
|
45
|
-
# if REDIS_PASSWORD:
|
|
46
|
-
# REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
47
|
-
# else:
|
|
48
|
-
# REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
49
|
-
|
|
50
|
-
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
51
|
-
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
52
|
-
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
53
|
-
# crawlo:{project_name}:queue:requests (请求队列)
|
|
54
|
-
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
55
|
-
# crawlo:{project_name}:queue:failed (失败队列)
|
|
56
|
-
|
|
57
|
-
# REDIS_TTL = 0
|
|
58
|
-
# CLEANUP_FP = 0
|
|
59
|
-
# FILTER_DEBUG = True
|
|
60
|
-
# DECODE_RESPONSES = True
|
|
61
|
-
|
|
62
|
-
# ============================== 域名过滤配置 ==============================
|
|
63
|
-
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
64
|
-
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
65
|
-
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
66
|
-
|
|
67
|
-
# ============================== 用户自定义中间件配置 ==============================
|
|
68
|
-
# 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
|
|
69
|
-
|
|
70
|
-
# 中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
71
|
-
# MIDDLEWARES = [
|
|
72
|
-
# '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
|
|
73
|
-
# ]
|
|
74
|
-
|
|
75
|
-
# ============================== 用户自定义数据管道配置 ==============================
|
|
76
|
-
# 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
|
|
77
|
-
|
|
78
|
-
# 数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
79
|
-
# PIPELINES = [
|
|
80
|
-
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
81
|
-
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
82
|
-
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
83
|
-
# ]
|
|
84
|
-
|
|
85
|
-
# ============================== 用户自定义扩展组件 ==============================
|
|
86
|
-
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
87
|
-
|
|
88
|
-
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
89
|
-
# EXTENSIONS = [
|
|
90
|
-
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
91
|
-
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
92
|
-
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
93
|
-
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
94
|
-
# ]
|
|
95
|
-
|
|
96
|
-
# ============================== 日志配置 ==============================
|
|
97
|
-
|
|
98
|
-
LOG_LEVEL = 'INFO'
|
|
99
|
-
STATS_DUMP = True
|
|
100
|
-
LOG_FILE = f'logs/{{project_name}}.log'
|
|
101
|
-
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
简化模式配置模板
|
|
4
|
+
最小配置,适合快速开始和简单项目
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# ============================== 项目基本信息 ==============================
|
|
8
|
+
PROJECT_NAME = '{{project_name}}'
|
|
9
|
+
|
|
10
|
+
# ============================== 简化运行模式 ==============================
|
|
11
|
+
# 运行模式:'standalone'(单机), 'distributed'(分布式), 'auto'(自动检测)
|
|
12
|
+
RUN_MODE = 'standalone' # 单机模式 - 适用于开发和小规模数据采集
|
|
13
|
+
|
|
14
|
+
# 并发配置
|
|
15
|
+
CONCURRENCY = 4 # 低并发数以减少资源占用
|
|
16
|
+
DOWNLOAD_DELAY = 1.0 # 增加延迟以降低目标网站压力
|
|
17
|
+
|
|
18
|
+
# ============================== 队列配置 ==============================
|
|
19
|
+
|
|
20
|
+
# 注意:框架已提供默认的队列配置,以下配置项通常无需修改
|
|
21
|
+
# 如需自定义,请取消注释并修改相应值
|
|
22
|
+
|
|
23
|
+
# 队列类型:'auto'(自动选择), 'memory'(内存队列), 'redis'(分布式队列)
|
|
24
|
+
# QUEUE_TYPE = 'auto' # 自动检测,如果Redis可用则使用Redis队列
|
|
25
|
+
# SCHEDULER_MAX_QUEUE_SIZE = 1000
|
|
26
|
+
# SCHEDULER_QUEUE_NAME = f'crawlo:{{project_name}}:queue:requests'
|
|
27
|
+
# QUEUE_MAX_RETRIES = 3
|
|
28
|
+
# QUEUE_TIMEOUT = 300
|
|
29
|
+
|
|
30
|
+
# ============================== 去重过滤配置 ==============================
|
|
31
|
+
|
|
32
|
+
# 注意:框架已提供默认的去重配置,以下配置项通常无需修改
|
|
33
|
+
# 如需自定义,请取消注释并修改相应值
|
|
34
|
+
|
|
35
|
+
# 简化模式下使用内存去重管道和过滤器
|
|
36
|
+
# DEFAULT_DEDUP_PIPELINE = 'crawlo.pipelines.memory_dedup_pipeline.MemoryDedupPipeline'
|
|
37
|
+
# FILTER_CLASS = 'crawlo.filters.memory_filter.MemoryFilter'
|
|
38
|
+
|
|
39
|
+
# --- Redis 配置(用于分布式去重和队列) ---
|
|
40
|
+
# REDIS_HOST = '127.0.0.1'
|
|
41
|
+
# REDIS_PORT = 6379
|
|
42
|
+
# REDIS_PASSWORD = '' # 如果有密码,请填写
|
|
43
|
+
|
|
44
|
+
# 根据是否有密码生成 URL
|
|
45
|
+
# if REDIS_PASSWORD:
|
|
46
|
+
# REDIS_URL = f'redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/0'
|
|
47
|
+
# else:
|
|
48
|
+
# REDIS_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}/0'
|
|
49
|
+
|
|
50
|
+
# Redis key配置已移至各组件中,使用统一的命名规范
|
|
51
|
+
# crawlo:{project_name}:filter:fingerprint (请求去重)
|
|
52
|
+
# crawlo:{project_name}:item:fingerprint (数据项去重)
|
|
53
|
+
# crawlo:{project_name}:queue:requests (请求队列)
|
|
54
|
+
# crawlo:{project_name}:queue:processing (处理中队列)
|
|
55
|
+
# crawlo:{project_name}:queue:failed (失败队列)
|
|
56
|
+
|
|
57
|
+
# REDIS_TTL = 0
|
|
58
|
+
# CLEANUP_FP = 0
|
|
59
|
+
# FILTER_DEBUG = True
|
|
60
|
+
# DECODE_RESPONSES = True
|
|
61
|
+
|
|
62
|
+
# ============================== 域名过滤配置 ==============================
|
|
63
|
+
# OffsiteMiddleware 配置,用于限制爬虫只爬取指定域名的页面
|
|
64
|
+
# 如需启用域名过滤功能,请取消注释并配置允许的域名列表
|
|
65
|
+
# ALLOWED_DOMAINS = ['example.com', 'www.example.com']
|
|
66
|
+
|
|
67
|
+
# ============================== 用户自定义中间件配置 ==============================
|
|
68
|
+
# 注意:框架默认中间件已自动加载,此处可添加或覆盖默认中间件
|
|
69
|
+
|
|
70
|
+
# 中间件列表(框架默认中间件 + 用户自定义中间件)
|
|
71
|
+
# MIDDLEWARES = [
|
|
72
|
+
# '{{project_name}}.middlewares.CustomMiddleware', # 示例自定义中间件
|
|
73
|
+
# ]
|
|
74
|
+
|
|
75
|
+
# ============================== 用户自定义数据管道配置 ==============================
|
|
76
|
+
# 注意:框架默认管道已自动加载,此处可添加或覆盖默认管道
|
|
77
|
+
|
|
78
|
+
# 数据处理管道列表(框架默认管道 + 用户自定义管道)
|
|
79
|
+
# PIPELINES = [
|
|
80
|
+
# '{{project_name}}.pipelines.DatabasePipeline', # 自定义数据库管道
|
|
81
|
+
# 'crawlo.pipelines.mysql_pipeline.AsyncmyMySQLPipeline', # MySQL 存储
|
|
82
|
+
# 'crawlo.pipelines.mongo_pipeline.MongoPipeline', # MongoDB 存储
|
|
83
|
+
# ]
|
|
84
|
+
|
|
85
|
+
# ============================== 用户自定义扩展组件 ==============================
|
|
86
|
+
# 注意:框架默认扩展已自动加载,此处可添加或覆盖默认扩展
|
|
87
|
+
|
|
88
|
+
# 扩展组件列表(框架默认扩展 + 用户自定义扩展)
|
|
89
|
+
# EXTENSIONS = [
|
|
90
|
+
# 'crawlo.extension.memory_monitor.MemoryMonitorExtension', # 内存监控
|
|
91
|
+
# 'crawlo.extension.request_recorder.RequestRecorderExtension', # 请求记录
|
|
92
|
+
# 'crawlo.extension.performance_profiler.PerformanceProfilerExtension', # 性能分析
|
|
93
|
+
# 'crawlo.extension.health_check.HealthCheckExtension', # 健康检查
|
|
94
|
+
# ]
|
|
95
|
+
|
|
96
|
+
# ============================== 日志配置 ==============================
|
|
97
|
+
|
|
98
|
+
LOG_LEVEL = 'INFO'
|
|
99
|
+
STATS_DUMP = True
|
|
100
|
+
LOG_FILE = f'logs/{{project_name}}.log'
|
|
101
|
+
LOG_FORMAT = '%(asctime)s - [%(name)s] - %(levelname)s: %(message)s'
|
|
102
102
|
LOG_ENCODING = 'utf-8'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
# -*- coding: UTF-8 -*-
|
|
2
|
-
"""
|
|
3
|
-
{{project_name}}.spiders
|
|
4
|
-
========================
|
|
5
|
-
存放所有的爬虫。
|
|
1
|
+
# -*- coding: UTF-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
{{project_name}}.spiders
|
|
4
|
+
========================
|
|
5
|
+
存放所有的爬虫。
|
|
6
6
|
"""
|
crawlo/templates/run.py.tmpl
CHANGED
|
@@ -1,39 +1,39 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
{{project_name}} 项目运行脚本
|
|
5
|
-
============================
|
|
6
|
-
基于 Crawlo 框架的简化爬虫启动器。
|
|
7
|
-
"""
|
|
8
|
-
import sys
|
|
9
|
-
import asyncio
|
|
10
|
-
|
|
11
|
-
from crawlo.crawler import CrawlerProcess
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def main():
|
|
15
|
-
"""主函数:运行固定的爬虫"""
|
|
16
|
-
print("🚀 启动 {{project_name}} 爬虫")
|
|
17
|
-
|
|
18
|
-
# 创建爬虫进程(自动加载默认配置)
|
|
19
|
-
try:
|
|
20
|
-
# 确保 spider 模块被正确导入
|
|
21
|
-
spider_modules = ['{{project_name}}.spiders']
|
|
22
|
-
process = CrawlerProcess(spider_modules=spider_modules)
|
|
23
|
-
print("✅ 爬虫进程初始化成功")
|
|
24
|
-
|
|
25
|
-
# 运行固定的爬虫
|
|
26
|
-
# TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
|
|
27
|
-
asyncio.run(process.crawl('your_spider_name'))
|
|
28
|
-
|
|
29
|
-
print("✅ 爬虫运行完成")
|
|
30
|
-
|
|
31
|
-
except Exception as e:
|
|
32
|
-
print(f"❌ 运行失败: {e}")
|
|
33
|
-
import traceback
|
|
34
|
-
traceback.print_exc()
|
|
35
|
-
sys.exit(1)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
if __name__ == '__main__':
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
{{project_name}} 项目运行脚本
|
|
5
|
+
============================
|
|
6
|
+
基于 Crawlo 框架的简化爬虫启动器。
|
|
7
|
+
"""
|
|
8
|
+
import sys
|
|
9
|
+
import asyncio
|
|
10
|
+
|
|
11
|
+
from crawlo.crawler import CrawlerProcess
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def main():
|
|
15
|
+
"""主函数:运行固定的爬虫"""
|
|
16
|
+
print("🚀 启动 {{project_name}} 爬虫")
|
|
17
|
+
|
|
18
|
+
# 创建爬虫进程(自动加载默认配置)
|
|
19
|
+
try:
|
|
20
|
+
# 确保 spider 模块被正确导入
|
|
21
|
+
spider_modules = ['{{project_name}}.spiders']
|
|
22
|
+
process = CrawlerProcess(spider_modules=spider_modules)
|
|
23
|
+
print("✅ 爬虫进程初始化成功")
|
|
24
|
+
|
|
25
|
+
# 运行固定的爬虫
|
|
26
|
+
# TODO: 请将 'your_spider_name' 替换为实际的爬虫名称
|
|
27
|
+
asyncio.run(process.crawl('your_spider_name'))
|
|
28
|
+
|
|
29
|
+
print("✅ 爬虫运行完成")
|
|
30
|
+
|
|
31
|
+
except Exception as e:
|
|
32
|
+
print(f"❌ 运行失败: {e}")
|
|
33
|
+
import traceback
|
|
34
|
+
traceback.print_exc()
|
|
35
|
+
sys.exit(1)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
if __name__ == '__main__':
|
|
39
39
|
main()
|