crawlo 1.2.0__py3-none-any.whl → 1.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +81 -65
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +143 -133
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +292 -292
- crawlo/commands/startproject.py +418 -418
- crawlo/commands/stats.py +188 -188
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +252 -252
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +354 -354
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +143 -143
- crawlo/crawler.py +1027 -1027
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +220 -220
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +213 -213
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -280
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +53 -53
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -32
- crawlo/middleware/download_delay.py +105 -28
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +116 -0
- crawlo/middleware/proxy.py +366 -272
- crawlo/middleware/request_ignore.py +88 -30
- crawlo/middleware/response_code.py +164 -18
- crawlo/middleware/response_filter.py +138 -26
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +224 -224
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +316 -316
- crawlo/pipelines/pipeline_manager.py +61 -61
- crawlo/pipelines/redis_dedup_pipeline.py +167 -167
- crawlo/project.py +187 -187
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +337 -337
- crawlo/queue/redis_priority_queue.py +298 -298
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +226 -219
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +130 -130
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -109
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/run.py.tmpl +45 -45
- crawlo/templates/project/settings.py.tmpl +327 -326
- crawlo/templates/project/settings_distributed.py.tmpl +119 -119
- crawlo/templates/project/settings_gentle.py.tmpl +94 -94
- crawlo/templates/project/settings_high_performance.py.tmpl +151 -151
- crawlo/templates/project/settings_simple.py.tmpl +68 -68
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +143 -141
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +260 -260
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +359 -359
- crawlo/utils/env_config.py +105 -105
- crawlo/utils/error_handler.py +125 -125
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +343 -343
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +284 -284
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +334 -334
- crawlo/utils/redis_key_validator.py +199 -199
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +219 -219
- crawlo/utils/spider_loader.py +62 -62
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/METADATA +692 -697
- crawlo-1.2.2.dist-info/RECORD +220 -0
- examples/__init__.py +7 -7
- examples/aiohttp_settings.py +42 -0
- examples/curl_cffi_settings.py +41 -0
- examples/default_header_middleware_example.py +107 -0
- examples/default_header_spider_example.py +129 -0
- examples/download_delay_middleware_example.py +160 -0
- examples/httpx_settings.py +42 -0
- examples/multi_downloader_proxy_example.py +81 -0
- examples/offsite_middleware_example.py +55 -0
- examples/offsite_spider_example.py +107 -0
- examples/proxy_spider_example.py +166 -0
- examples/request_ignore_middleware_example.py +51 -0
- examples/request_ignore_spider_example.py +99 -0
- examples/response_code_middleware_example.py +52 -0
- examples/response_filter_middleware_example.py +67 -0
- examples/tong_hua_shun_settings.py +62 -0
- examples/tong_hua_shun_spider.py +170 -0
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +173 -0
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +159 -0
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +222 -0
- tests/test_downloader_proxy_compatibility.py +269 -0
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_offsite_middleware.py +222 -0
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +265 -0
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +122 -0
- tests/test_proxy_middleware_enhanced.py +217 -0
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +196 -0
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +183 -0
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +350 -0
- tests/test_response_filter_middleware.py +428 -0
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +242 -0
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.2.0.dist-info/RECORD +0 -190
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/WHEEL +0 -0
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.0.dist-info → crawlo-1.2.2.dist-info}/top_level.txt +0 -0
crawlo/project.py
CHANGED
|
@@ -1,188 +1,188 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
Crawlo 项目初始化模块
|
|
5
|
-
|
|
6
|
-
负责:
|
|
7
|
-
1. 向上搜索项目根目录(通过 crawlo.cfg 或 settings.py)
|
|
8
|
-
2. 将项目根目录加入 sys.path
|
|
9
|
-
3. 加载 settings 模块
|
|
10
|
-
4. 返回 SettingManager 实例
|
|
11
|
-
"""
|
|
12
|
-
import os
|
|
13
|
-
import sys
|
|
14
|
-
import configparser
|
|
15
|
-
from importlib import import_module
|
|
16
|
-
from inspect import iscoroutinefunction
|
|
17
|
-
from typing import Callable, Optional, Tuple
|
|
18
|
-
|
|
19
|
-
from crawlo.utils.log import get_logger
|
|
20
|
-
from crawlo.settings.setting_manager import SettingManager
|
|
21
|
-
|
|
22
|
-
logger = get_logger(__name__)
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
def _find_project_root(start_path: str = ".") -> Optional[str]:
|
|
26
|
-
"""
|
|
27
|
-
从指定路径向上查找项目根目录。
|
|
28
|
-
识别依据:
|
|
29
|
-
1. 存在 'crawlo.cfg'
|
|
30
|
-
2. 存在 '__init__.py' 和 'settings.py'(即 Python 包)
|
|
31
|
-
"""
|
|
32
|
-
path = os.path.abspath(start_path)
|
|
33
|
-
|
|
34
|
-
# 首先检查当前目录及其子目录
|
|
35
|
-
for root, dirs, files in os.walk(path):
|
|
36
|
-
if "crawlo.cfg" in files:
|
|
37
|
-
cfg_path = os.path.join(root, "crawlo.cfg")
|
|
38
|
-
logger.info(f"✅ 找到项目配置文件: {cfg_path}")
|
|
39
|
-
return root
|
|
40
|
-
|
|
41
|
-
# 如果在子目录中没找到,再向上查找
|
|
42
|
-
while True:
|
|
43
|
-
cfg_file = os.path.join(path, "crawlo.cfg")
|
|
44
|
-
if os.path.isfile(cfg_file):
|
|
45
|
-
logger.info(f"✅ 找到项目配置文件: {cfg_file}")
|
|
46
|
-
return path
|
|
47
|
-
|
|
48
|
-
settings_file = os.path.join(path, "settings.py")
|
|
49
|
-
init_file = os.path.join(path, "__init__.py")
|
|
50
|
-
if os.path.isfile(settings_file) and os.path.isfile(init_file):
|
|
51
|
-
logger.info(f"✅ 找到项目模块: {path}")
|
|
52
|
-
return path
|
|
53
|
-
|
|
54
|
-
parent = os.path.dirname(path)
|
|
55
|
-
if parent == path:
|
|
56
|
-
break
|
|
57
|
-
path = parent
|
|
58
|
-
|
|
59
|
-
logger.warning("❌ 未找到 Crawlo 项目根目录。请确保在包含 'crawlo.cfg' 或 'settings.py' 的目录运行。")
|
|
60
|
-
return None
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def _get_settings_module_from_cfg(cfg_path: str) -> str:
|
|
64
|
-
"""从 crawlo.cfg 读取 settings 模块路径"""
|
|
65
|
-
config = configparser.ConfigParser()
|
|
66
|
-
try:
|
|
67
|
-
config.read(cfg_path, encoding="utf-8")
|
|
68
|
-
if config.has_section("settings") and config.has_option("settings", "default"):
|
|
69
|
-
module_path = config.get("settings", "default")
|
|
70
|
-
logger.info(f"📄 从 crawlo.cfg 加载 settings 模块: {module_path}")
|
|
71
|
-
return module_path
|
|
72
|
-
else:
|
|
73
|
-
raise RuntimeError(f"配置文件缺少 [settings] 或 default 选项: {cfg_path}")
|
|
74
|
-
except Exception as e:
|
|
75
|
-
raise RuntimeError(f"解析 crawlo.cfg 失败: {e}")
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def get_settings(custom_settings: Optional[dict] = None) -> SettingManager:
|
|
79
|
-
"""
|
|
80
|
-
获取配置管理器实例(主入口函数)
|
|
81
|
-
|
|
82
|
-
Args:
|
|
83
|
-
custom_settings: 运行时自定义配置,会覆盖 settings.py
|
|
84
|
-
|
|
85
|
-
Returns:
|
|
86
|
-
SettingManager: 已加载配置的实例
|
|
87
|
-
"""
|
|
88
|
-
logger.info("🚀 正在初始化 Crawlo 项目配置...")
|
|
89
|
-
|
|
90
|
-
# 1. 查找项目根
|
|
91
|
-
project_root = _find_project_root()
|
|
92
|
-
if not project_root:
|
|
93
|
-
raise RuntimeError("未找到 Crawlo 项目,请检查项目结构")
|
|
94
|
-
|
|
95
|
-
# 2. 确定 settings 模块
|
|
96
|
-
settings_module_path = None
|
|
97
|
-
cfg_file = os.path.join(project_root, "crawlo.cfg")
|
|
98
|
-
|
|
99
|
-
if os.path.isfile(cfg_file):
|
|
100
|
-
settings_module_path = _get_settings_module_from_cfg(cfg_file)
|
|
101
|
-
else:
|
|
102
|
-
# 推断:项目目录名.settings
|
|
103
|
-
project_name = os.path.basename(project_root)
|
|
104
|
-
settings_module_path = f"{project_name}.settings"
|
|
105
|
-
logger.warning(f"⚠️ 未找到 crawlo.cfg,推断 settings 模块为: {settings_module_path}")
|
|
106
|
-
|
|
107
|
-
# 3. 注入 sys.path
|
|
108
|
-
project_root_str = os.path.abspath(project_root)
|
|
109
|
-
if project_root_str not in sys.path:
|
|
110
|
-
sys.path.insert(0, project_root_str)
|
|
111
|
-
logger.info(f"📁 项目根目录已加入 sys.path: {project_root_str}")
|
|
112
|
-
|
|
113
|
-
# 4. 加载 SettingManager
|
|
114
|
-
logger.info(f"⚙️ 正在加载配置模块: {settings_module_path}")
|
|
115
|
-
settings = SettingManager()
|
|
116
|
-
|
|
117
|
-
try:
|
|
118
|
-
settings.set_settings(settings_module_path)
|
|
119
|
-
logger.info("✅ settings 模块加载成功")
|
|
120
|
-
except Exception as e:
|
|
121
|
-
raise ImportError(f"加载 settings 模块失败 '{settings_module_path}': {e}")
|
|
122
|
-
|
|
123
|
-
# 5. 合并运行时配置
|
|
124
|
-
if custom_settings:
|
|
125
|
-
settings.update_attributes(custom_settings)
|
|
126
|
-
logger.info(f"🔧 已应用运行时自定义配置: {list(custom_settings.keys())}")
|
|
127
|
-
|
|
128
|
-
logger.info("🎉 Crawlo 项目配置初始化完成!")
|
|
129
|
-
return settings
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
def load_class(_path):
|
|
133
|
-
if not isinstance(_path, str):
|
|
134
|
-
if callable(_path):
|
|
135
|
-
return _path
|
|
136
|
-
else:
|
|
137
|
-
raise TypeError(f"args expect str or object, got {_path}")
|
|
138
|
-
|
|
139
|
-
module_name, class_name = _path.rsplit('.', 1)
|
|
140
|
-
|
|
141
|
-
try:
|
|
142
|
-
module = import_module(module_name)
|
|
143
|
-
except ImportError as e:
|
|
144
|
-
# 尝试不同的导入方式
|
|
145
|
-
try:
|
|
146
|
-
# 尝试直接导入完整路径
|
|
147
|
-
module = import_module(_path)
|
|
148
|
-
return module
|
|
149
|
-
except ImportError:
|
|
150
|
-
pass
|
|
151
|
-
raise ImportError(f"Cannot import module {module_name}: {e}")
|
|
152
|
-
|
|
153
|
-
try:
|
|
154
|
-
cls = getattr(module, class_name)
|
|
155
|
-
except AttributeError:
|
|
156
|
-
# 提供更详细的错误信息
|
|
157
|
-
available_attrs = [attr for attr in dir(module) if not attr.startswith('_')]
|
|
158
|
-
raise NameError(f"Module {module_name!r} has no class named {class_name!r}. Available attributes: {available_attrs}")
|
|
159
|
-
return cls
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
def merge_settings(spider, settings):
|
|
163
|
-
spider_name = getattr(spider, 'name', 'UnknownSpider')
|
|
164
|
-
# 检查 settings 是否为 SettingManager 实例
|
|
165
|
-
if not hasattr(settings, 'update_attributes'):
|
|
166
|
-
logger.error(f"merge_settings 接收到的 settings 不是 SettingManager 实例: {type(settings)}")
|
|
167
|
-
# 如果是字典,创建一个新的 SettingManager 实例
|
|
168
|
-
if isinstance(settings, dict):
|
|
169
|
-
from crawlo.settings.setting_manager import SettingManager
|
|
170
|
-
new_settings = SettingManager()
|
|
171
|
-
new_settings.update_attributes(settings)
|
|
172
|
-
settings = new_settings
|
|
173
|
-
else:
|
|
174
|
-
logger.error("无法处理的 settings 类型")
|
|
175
|
-
return
|
|
176
|
-
|
|
177
|
-
if hasattr(spider, 'custom_settings'):
|
|
178
|
-
custom_settings = getattr(spider, 'custom_settings')
|
|
179
|
-
settings.update_attributes(custom_settings)
|
|
180
|
-
else:
|
|
181
|
-
logger.debug(f"爬虫 '{spider_name}' 无 custom_settings,跳过合并") # 添加日志
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
async def common_call(func: Callable, *args, **kwargs):
|
|
185
|
-
if iscoroutinefunction(func):
|
|
186
|
-
return await func(*args, **kwargs)
|
|
187
|
-
else:
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Crawlo 项目初始化模块
|
|
5
|
+
|
|
6
|
+
负责:
|
|
7
|
+
1. 向上搜索项目根目录(通过 crawlo.cfg 或 settings.py)
|
|
8
|
+
2. 将项目根目录加入 sys.path
|
|
9
|
+
3. 加载 settings 模块
|
|
10
|
+
4. 返回 SettingManager 实例
|
|
11
|
+
"""
|
|
12
|
+
import os
|
|
13
|
+
import sys
|
|
14
|
+
import configparser
|
|
15
|
+
from importlib import import_module
|
|
16
|
+
from inspect import iscoroutinefunction
|
|
17
|
+
from typing import Callable, Optional, Tuple
|
|
18
|
+
|
|
19
|
+
from crawlo.utils.log import get_logger
|
|
20
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
21
|
+
|
|
22
|
+
logger = get_logger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _find_project_root(start_path: str = ".") -> Optional[str]:
|
|
26
|
+
"""
|
|
27
|
+
从指定路径向上查找项目根目录。
|
|
28
|
+
识别依据:
|
|
29
|
+
1. 存在 'crawlo.cfg'
|
|
30
|
+
2. 存在 '__init__.py' 和 'settings.py'(即 Python 包)
|
|
31
|
+
"""
|
|
32
|
+
path = os.path.abspath(start_path)
|
|
33
|
+
|
|
34
|
+
# 首先检查当前目录及其子目录
|
|
35
|
+
for root, dirs, files in os.walk(path):
|
|
36
|
+
if "crawlo.cfg" in files:
|
|
37
|
+
cfg_path = os.path.join(root, "crawlo.cfg")
|
|
38
|
+
logger.info(f"✅ 找到项目配置文件: {cfg_path}")
|
|
39
|
+
return root
|
|
40
|
+
|
|
41
|
+
# 如果在子目录中没找到,再向上查找
|
|
42
|
+
while True:
|
|
43
|
+
cfg_file = os.path.join(path, "crawlo.cfg")
|
|
44
|
+
if os.path.isfile(cfg_file):
|
|
45
|
+
logger.info(f"✅ 找到项目配置文件: {cfg_file}")
|
|
46
|
+
return path
|
|
47
|
+
|
|
48
|
+
settings_file = os.path.join(path, "settings.py")
|
|
49
|
+
init_file = os.path.join(path, "__init__.py")
|
|
50
|
+
if os.path.isfile(settings_file) and os.path.isfile(init_file):
|
|
51
|
+
logger.info(f"✅ 找到项目模块: {path}")
|
|
52
|
+
return path
|
|
53
|
+
|
|
54
|
+
parent = os.path.dirname(path)
|
|
55
|
+
if parent == path:
|
|
56
|
+
break
|
|
57
|
+
path = parent
|
|
58
|
+
|
|
59
|
+
logger.warning("❌ 未找到 Crawlo 项目根目录。请确保在包含 'crawlo.cfg' 或 'settings.py' 的目录运行。")
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _get_settings_module_from_cfg(cfg_path: str) -> str:
|
|
64
|
+
"""从 crawlo.cfg 读取 settings 模块路径"""
|
|
65
|
+
config = configparser.ConfigParser()
|
|
66
|
+
try:
|
|
67
|
+
config.read(cfg_path, encoding="utf-8")
|
|
68
|
+
if config.has_section("settings") and config.has_option("settings", "default"):
|
|
69
|
+
module_path = config.get("settings", "default")
|
|
70
|
+
logger.info(f"📄 从 crawlo.cfg 加载 settings 模块: {module_path}")
|
|
71
|
+
return module_path
|
|
72
|
+
else:
|
|
73
|
+
raise RuntimeError(f"配置文件缺少 [settings] 或 default 选项: {cfg_path}")
|
|
74
|
+
except Exception as e:
|
|
75
|
+
raise RuntimeError(f"解析 crawlo.cfg 失败: {e}")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def get_settings(custom_settings: Optional[dict] = None) -> SettingManager:
|
|
79
|
+
"""
|
|
80
|
+
获取配置管理器实例(主入口函数)
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
custom_settings: 运行时自定义配置,会覆盖 settings.py
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
SettingManager: 已加载配置的实例
|
|
87
|
+
"""
|
|
88
|
+
logger.info("🚀 正在初始化 Crawlo 项目配置...")
|
|
89
|
+
|
|
90
|
+
# 1. 查找项目根
|
|
91
|
+
project_root = _find_project_root()
|
|
92
|
+
if not project_root:
|
|
93
|
+
raise RuntimeError("未找到 Crawlo 项目,请检查项目结构")
|
|
94
|
+
|
|
95
|
+
# 2. 确定 settings 模块
|
|
96
|
+
settings_module_path = None
|
|
97
|
+
cfg_file = os.path.join(project_root, "crawlo.cfg")
|
|
98
|
+
|
|
99
|
+
if os.path.isfile(cfg_file):
|
|
100
|
+
settings_module_path = _get_settings_module_from_cfg(cfg_file)
|
|
101
|
+
else:
|
|
102
|
+
# 推断:项目目录名.settings
|
|
103
|
+
project_name = os.path.basename(project_root)
|
|
104
|
+
settings_module_path = f"{project_name}.settings"
|
|
105
|
+
logger.warning(f"⚠️ 未找到 crawlo.cfg,推断 settings 模块为: {settings_module_path}")
|
|
106
|
+
|
|
107
|
+
# 3. 注入 sys.path
|
|
108
|
+
project_root_str = os.path.abspath(project_root)
|
|
109
|
+
if project_root_str not in sys.path:
|
|
110
|
+
sys.path.insert(0, project_root_str)
|
|
111
|
+
logger.info(f"📁 项目根目录已加入 sys.path: {project_root_str}")
|
|
112
|
+
|
|
113
|
+
# 4. 加载 SettingManager
|
|
114
|
+
logger.info(f"⚙️ 正在加载配置模块: {settings_module_path}")
|
|
115
|
+
settings = SettingManager()
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
settings.set_settings(settings_module_path)
|
|
119
|
+
logger.info("✅ settings 模块加载成功")
|
|
120
|
+
except Exception as e:
|
|
121
|
+
raise ImportError(f"加载 settings 模块失败 '{settings_module_path}': {e}")
|
|
122
|
+
|
|
123
|
+
# 5. 合并运行时配置
|
|
124
|
+
if custom_settings:
|
|
125
|
+
settings.update_attributes(custom_settings)
|
|
126
|
+
logger.info(f"🔧 已应用运行时自定义配置: {list(custom_settings.keys())}")
|
|
127
|
+
|
|
128
|
+
logger.info("🎉 Crawlo 项目配置初始化完成!")
|
|
129
|
+
return settings
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def load_class(_path):
|
|
133
|
+
if not isinstance(_path, str):
|
|
134
|
+
if callable(_path):
|
|
135
|
+
return _path
|
|
136
|
+
else:
|
|
137
|
+
raise TypeError(f"args expect str or object, got {_path}")
|
|
138
|
+
|
|
139
|
+
module_name, class_name = _path.rsplit('.', 1)
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
module = import_module(module_name)
|
|
143
|
+
except ImportError as e:
|
|
144
|
+
# 尝试不同的导入方式
|
|
145
|
+
try:
|
|
146
|
+
# 尝试直接导入完整路径
|
|
147
|
+
module = import_module(_path)
|
|
148
|
+
return module
|
|
149
|
+
except ImportError:
|
|
150
|
+
pass
|
|
151
|
+
raise ImportError(f"Cannot import module {module_name}: {e}")
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
cls = getattr(module, class_name)
|
|
155
|
+
except AttributeError:
|
|
156
|
+
# 提供更详细的错误信息
|
|
157
|
+
available_attrs = [attr for attr in dir(module) if not attr.startswith('_')]
|
|
158
|
+
raise NameError(f"Module {module_name!r} has no class named {class_name!r}. Available attributes: {available_attrs}")
|
|
159
|
+
return cls
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def merge_settings(spider, settings):
|
|
163
|
+
spider_name = getattr(spider, 'name', 'UnknownSpider')
|
|
164
|
+
# 检查 settings 是否为 SettingManager 实例
|
|
165
|
+
if not hasattr(settings, 'update_attributes'):
|
|
166
|
+
logger.error(f"merge_settings 接收到的 settings 不是 SettingManager 实例: {type(settings)}")
|
|
167
|
+
# 如果是字典,创建一个新的 SettingManager 实例
|
|
168
|
+
if isinstance(settings, dict):
|
|
169
|
+
from crawlo.settings.setting_manager import SettingManager
|
|
170
|
+
new_settings = SettingManager()
|
|
171
|
+
new_settings.update_attributes(settings)
|
|
172
|
+
settings = new_settings
|
|
173
|
+
else:
|
|
174
|
+
logger.error("无法处理的 settings 类型")
|
|
175
|
+
return
|
|
176
|
+
|
|
177
|
+
if hasattr(spider, 'custom_settings'):
|
|
178
|
+
custom_settings = getattr(spider, 'custom_settings')
|
|
179
|
+
settings.update_attributes(custom_settings)
|
|
180
|
+
else:
|
|
181
|
+
logger.debug(f"爬虫 '{spider_name}' 无 custom_settings,跳过合并") # 添加日志
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
async def common_call(func: Callable, *args, **kwargs):
|
|
185
|
+
if iscoroutinefunction(func):
|
|
186
|
+
return await func(*args, **kwargs)
|
|
187
|
+
else:
|
|
188
188
|
return func(*args, **kwargs)
|
crawlo/queue/pqueue.py
CHANGED
|
@@ -1,37 +1,37 @@
|
|
|
1
|
-
# -*- coding:UTF-8 -*-
|
|
2
|
-
import json
|
|
3
|
-
import sys
|
|
4
|
-
import asyncio
|
|
5
|
-
from asyncio import PriorityQueue
|
|
6
|
-
from typing import Optional
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
from crawlo import Request
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class SpiderPriorityQueue(PriorityQueue):
|
|
13
|
-
"""带超时功能的异步优先级队列"""
|
|
14
|
-
|
|
15
|
-
def __init__(self, maxsize: int = 0) -> None:
|
|
16
|
-
"""初始化队列,maxsize为0表示无大小限制"""
|
|
17
|
-
super().__init__(maxsize)
|
|
18
|
-
|
|
19
|
-
async def get(self, timeout: float = 0.1) -> Optional[Request]:
|
|
20
|
-
"""
|
|
21
|
-
异步获取队列元素,带超时功能
|
|
22
|
-
|
|
23
|
-
Args:
|
|
24
|
-
timeout: 超时时间(秒),默认0.1秒
|
|
25
|
-
|
|
26
|
-
Returns:
|
|
27
|
-
队列元素(优先级, 值)或None(超时)
|
|
28
|
-
"""
|
|
29
|
-
try:
|
|
30
|
-
# 根据Python版本选择超时实现方式
|
|
31
|
-
if sys.version_info >= (3, 11):
|
|
32
|
-
async with asyncio.timeout(timeout):
|
|
33
|
-
return await super().get()
|
|
34
|
-
else:
|
|
35
|
-
return await asyncio.wait_for(super().get(), timeout=timeout)
|
|
36
|
-
except asyncio.TimeoutError:
|
|
37
|
-
return None
|
|
1
|
+
# -*- coding:UTF-8 -*-
|
|
2
|
+
import json
|
|
3
|
+
import sys
|
|
4
|
+
import asyncio
|
|
5
|
+
from asyncio import PriorityQueue
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
from crawlo import Request
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SpiderPriorityQueue(PriorityQueue):
|
|
13
|
+
"""带超时功能的异步优先级队列"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, maxsize: int = 0) -> None:
|
|
16
|
+
"""初始化队列,maxsize为0表示无大小限制"""
|
|
17
|
+
super().__init__(maxsize)
|
|
18
|
+
|
|
19
|
+
async def get(self, timeout: float = 0.1) -> Optional[Request]:
|
|
20
|
+
"""
|
|
21
|
+
异步获取队列元素,带超时功能
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
timeout: 超时时间(秒),默认0.1秒
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
队列元素(优先级, 值)或None(超时)
|
|
28
|
+
"""
|
|
29
|
+
try:
|
|
30
|
+
# 根据Python版本选择超时实现方式
|
|
31
|
+
if sys.version_info >= (3, 11):
|
|
32
|
+
async with asyncio.timeout(timeout):
|
|
33
|
+
return await super().get()
|
|
34
|
+
else:
|
|
35
|
+
return await asyncio.wait_for(super().get(), timeout=timeout)
|
|
36
|
+
except asyncio.TimeoutError:
|
|
37
|
+
return None
|