crawlo 1.2.3__py3-none-any.whl → 1.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +61 -61
- crawlo/__version__.py +1 -1
- crawlo/cleaners/__init__.py +60 -60
- crawlo/cleaners/data_formatter.py +225 -225
- crawlo/cleaners/encoding_converter.py +125 -125
- crawlo/cleaners/text_cleaner.py +232 -232
- crawlo/cli.py +81 -81
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +144 -142
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +323 -292
- crawlo/commands/startproject.py +420 -417
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +186 -186
- crawlo/config.py +312 -312
- crawlo/config_validator.py +251 -251
- crawlo/core/__init__.py +2 -2
- crawlo/core/engine.py +354 -354
- crawlo/core/processor.py +40 -40
- crawlo/core/scheduler.py +143 -143
- crawlo/crawler.py +1110 -1027
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +107 -107
- crawlo/downloader/__init__.py +266 -266
- crawlo/downloader/aiohttp_downloader.py +220 -220
- crawlo/downloader/cffi_downloader.py +256 -256
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +37 -37
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +57 -57
- crawlo/extension/log_stats.py +81 -81
- crawlo/extension/logging_extension.py +43 -43
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +280 -280
- crawlo/filters/memory_filter.py +269 -269
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +21 -21
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +131 -131
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +114 -114
- crawlo/middleware/proxy.py +367 -367
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +163 -163
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +211 -211
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +338 -338
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +222 -222
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +115 -115
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +317 -317
- crawlo/pipelines/pipeline_manager.py +61 -61
- crawlo/pipelines/redis_dedup_pipeline.py +165 -165
- crawlo/project.py +279 -187
- crawlo/queue/pqueue.py +37 -37
- crawlo/queue/queue_manager.py +337 -337
- crawlo/queue/redis_priority_queue.py +298 -298
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +217 -226
- crawlo/settings/setting_manager.py +122 -122
- crawlo/spider/__init__.py +639 -639
- crawlo/stats_collector.py +59 -59
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +30 -30
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/run.py.tmpl +47 -45
- crawlo/templates/project/settings.py.tmpl +350 -325
- crawlo/templates/project/settings_distributed.py.tmpl +160 -121
- crawlo/templates/project/settings_gentle.py.tmpl +133 -94
- crawlo/templates/project/settings_high_performance.py.tmpl +155 -151
- crawlo/templates/project/settings_simple.py.tmpl +108 -68
- crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/tools/__init__.py +182 -182
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +35 -35
- crawlo/tools/distributed_coordinator.py +386 -386
- crawlo/tools/retry_mechanism.py +220 -220
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/utils/__init__.py +35 -35
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/date_tools.py +290 -290
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +105 -105
- crawlo/utils/error_handler.py +123 -123
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +128 -128
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +334 -334
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +218 -218
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/METADATA +764 -692
- crawlo-1.2.4.dist-info/RECORD +206 -0
- examples/__init__.py +7 -7
- tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +81 -81
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +236 -236
- tests/cleaners_example.py +160 -160
- tests/config_validation_demo.py +102 -102
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/redis_key_validation_demo.py +130 -130
- tests/response_improvements_example.py +144 -144
- tests/test_advanced_tools.py +148 -148
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_cleaners.py +54 -54
- tests/test_comprehensive.py +146 -146
- tests/test_config_validator.py +193 -193
- tests/test_crawlo_proxy_integration.py +172 -172
- tests/test_date_tools.py +123 -123
- tests/test_default_header_middleware.py +158 -158
- tests/test_double_crawlo_fix.py +207 -207
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_final_validation.py +153 -153
- tests/test_framework_env_usage.py +103 -103
- tests/test_integration.py +356 -356
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_offsite_middleware.py +221 -221
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +176 -176
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +241 -241
- tests/test_scheduler.py +241 -241
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +153 -153
- tests/tools_example.py +257 -257
- crawlo-1.2.3.dist-info/RECORD +0 -222
- examples/aiohttp_settings.py +0 -42
- examples/curl_cffi_settings.py +0 -41
- examples/default_header_middleware_example.py +0 -107
- examples/default_header_spider_example.py +0 -129
- examples/download_delay_middleware_example.py +0 -160
- examples/httpx_settings.py +0 -42
- examples/multi_downloader_proxy_example.py +0 -81
- examples/offsite_middleware_example.py +0 -55
- examples/offsite_spider_example.py +0 -107
- examples/proxy_spider_example.py +0 -166
- examples/request_ignore_middleware_example.py +0 -51
- examples/request_ignore_spider_example.py +0 -99
- examples/response_code_middleware_example.py +0 -52
- examples/response_filter_middleware_example.py +0 -67
- examples/tong_hua_shun_settings.py +0 -62
- examples/tong_hua_shun_spider.py +0 -170
- {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/WHEEL +0 -0
- {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/entry_points.txt +0 -0
- {crawlo-1.2.3.dist-info → crawlo-1.2.4.dist-info}/top_level.txt +0 -0
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
使用RequestIgnoreMiddleware的爬虫示例
|
|
5
|
-
展示如何在实际爬虫中利用RequestIgnoreMiddleware处理被忽略的请求
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
from crawlo.spider import Spider
|
|
9
|
-
from crawlo.network.request import Request
|
|
10
|
-
from crawlo.exceptions import IgnoreRequestError
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class IgnoreExampleSpider(Spider):
|
|
14
|
-
"""
|
|
15
|
-
示例爬虫,演示RequestIgnoreMiddleware的使用
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
# 爬虫名称
|
|
19
|
-
name = "ignore_example_spider"
|
|
20
|
-
|
|
21
|
-
# 自定义设置
|
|
22
|
-
custom_settings = {
|
|
23
|
-
# 请求延迟(秒)
|
|
24
|
-
'DOWNLOAD_DELAY': 1,
|
|
25
|
-
|
|
26
|
-
# 并发数
|
|
27
|
-
'CONCURRENCY': 4,
|
|
28
|
-
|
|
29
|
-
# 日志级别
|
|
30
|
-
'LOG_LEVEL': 'INFO',
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
def start_requests(self):
|
|
34
|
-
"""
|
|
35
|
-
开始请求
|
|
36
|
-
"""
|
|
37
|
-
urls = [
|
|
38
|
-
'https://httpbin.org/status/200', # 正常请求
|
|
39
|
-
'https://httpbin.org/status/404', # 404请求
|
|
40
|
-
'https://httpbin.org/status/500', # 500请求
|
|
41
|
-
]
|
|
42
|
-
|
|
43
|
-
for url in urls:
|
|
44
|
-
yield Request(url=url, callback=self.parse_response)
|
|
45
|
-
|
|
46
|
-
# 生成一些会被忽略的请求
|
|
47
|
-
yield Request(url='https://example.com/ignore1', callback=self.parse_response)
|
|
48
|
-
yield Request(url='https://example.com/ignore2', callback=self.parse_response)
|
|
49
|
-
|
|
50
|
-
async def parse_response(self, response):
|
|
51
|
-
"""
|
|
52
|
-
处理响应
|
|
53
|
-
"""
|
|
54
|
-
self.logger.info(f"收到响应: {response.url} - 状态码: {response.status_code}")
|
|
55
|
-
|
|
56
|
-
# 模拟某些条件下抛出IgnoreRequestError来忽略请求
|
|
57
|
-
if "ignore" in response.url:
|
|
58
|
-
self.logger.info(f"模拟忽略请求: {response.url}")
|
|
59
|
-
# 抛出IgnoreRequestError来忽略这个请求
|
|
60
|
-
raise IgnoreRequestError(f"模拟忽略请求: {response.url}")
|
|
61
|
-
|
|
62
|
-
# 正常处理响应
|
|
63
|
-
return None
|
|
64
|
-
|
|
65
|
-
def handle_ignore_request(self, request, reason):
|
|
66
|
-
"""
|
|
67
|
-
处理被忽略的请求
|
|
68
|
-
这是一个自定义方法,可以用来处理特定的忽略逻辑
|
|
69
|
-
"""
|
|
70
|
-
self.logger.info(f"处理被忽略的请求: {request.url} - 原因: {reason}")
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
# 运行爬虫的示例代码
|
|
74
|
-
if __name__ == "__main__":
|
|
75
|
-
"""
|
|
76
|
-
运行说明:
|
|
77
|
-
|
|
78
|
-
1. 确保已在项目根目录下安装了crawlo:
|
|
79
|
-
pip install -e .
|
|
80
|
-
|
|
81
|
-
2. 运行爬虫:
|
|
82
|
-
crawlo run ignore_example_spider
|
|
83
|
-
|
|
84
|
-
3. 观察日志输出:
|
|
85
|
-
- 可以看到正常请求的处理
|
|
86
|
-
- 可以看到被忽略请求的记录
|
|
87
|
-
- 查看统计信息中的忽略计数
|
|
88
|
-
|
|
89
|
-
RequestIgnoreMiddleware的优势:
|
|
90
|
-
✓ 自动记录所有被忽略的请求
|
|
91
|
-
✓ 提供详细的统计信息,便于分析爬虫行为
|
|
92
|
-
✓ 支持按原因和域名分类统计
|
|
93
|
-
✓ 无需额外代码,自动处理IgnoreRequestError异常
|
|
94
|
-
"""
|
|
95
|
-
print("RequestIgnoreSpider示例")
|
|
96
|
-
print("=" * 30)
|
|
97
|
-
print("此爬虫演示了RequestIgnoreMiddleware的使用方法")
|
|
98
|
-
print("请使用以下命令运行:")
|
|
99
|
-
print(" crawlo run ignore_example_spider")
|
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
ResponseCodeMiddleware 使用示例
|
|
5
|
-
展示如何使用ResponseCodeMiddleware处理HTTP响应状态码
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
# ResponseCodeMiddleware是默认启用的中间件,无需特殊配置
|
|
9
|
-
# 它会自动记录所有响应的状态码统计信息
|
|
10
|
-
|
|
11
|
-
# 中间件配置(ResponseCodeMiddleware已默认启用)
|
|
12
|
-
SETTINGS = {
|
|
13
|
-
# 中间件配置(ResponseCodeMiddleware已默认启用)
|
|
14
|
-
'MIDDLEWARES': [
|
|
15
|
-
# === 请求预处理阶段 ===
|
|
16
|
-
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
|
|
17
|
-
'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
|
|
18
|
-
'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
|
|
19
|
-
'crawlo.middleware.proxy.ProxyMiddleware', # 4. 设置代理
|
|
20
|
-
'crawlo.middleware.offsite.OffsiteMiddleware', # 5. 站外请求过滤
|
|
21
|
-
|
|
22
|
-
# === 响应处理阶段 ===
|
|
23
|
-
'crawlo.middleware.retry.RetryMiddleware', # 6. 失败请求重试
|
|
24
|
-
'crawlo.middleware.response_code.ResponseCodeMiddleware', # 7. 处理特殊状态码
|
|
25
|
-
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
|
|
26
|
-
],
|
|
27
|
-
|
|
28
|
-
# 其他常用配置
|
|
29
|
-
'DOWNLOAD_DELAY': 1,
|
|
30
|
-
'CONCURRENCY': 8,
|
|
31
|
-
'LOG_LEVEL': 'INFO',
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
def get_settings():
|
|
35
|
-
"""获取配置"""
|
|
36
|
-
return SETTINGS
|
|
37
|
-
|
|
38
|
-
if __name__ == "__main__":
|
|
39
|
-
print("ResponseCodeMiddleware配置示例:")
|
|
40
|
-
print("=" * 40)
|
|
41
|
-
print("中间件列表:")
|
|
42
|
-
for i, middleware in enumerate(SETTINGS['MIDDLEWARES'], 1):
|
|
43
|
-
print(f" {i}. {middleware}")
|
|
44
|
-
|
|
45
|
-
print("\n" + "=" * 40)
|
|
46
|
-
print("ResponseCodeMiddleware功能说明:")
|
|
47
|
-
print("✓ 自动记录所有HTTP响应状态码")
|
|
48
|
-
print("✓ 按状态码分类统计(2xx, 3xx, 4xx, 5xx)")
|
|
49
|
-
print("✓ 记录成功/错误响应数量")
|
|
50
|
-
print("✓ 按域名统计响应状态码分布")
|
|
51
|
-
print("✓ 提供详细的日志信息")
|
|
52
|
-
print("✓ 无需特殊配置,默认启用")
|
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
ResponseFilterMiddleware 使用示例
|
|
5
|
-
展示如何使用ResponseFilterMiddleware过滤HTTP响应
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
# ResponseFilterMiddleware默认允许2xx状态码
|
|
9
|
-
# 可通过配置ALLOWED_RESPONSE_CODES和DENIED_RESPONSE_CODES来自定义过滤规则
|
|
10
|
-
|
|
11
|
-
# 中间件配置示例
|
|
12
|
-
SETTINGS = {
|
|
13
|
-
# 允许的响应状态码列表(除了默认的2xx)
|
|
14
|
-
'ALLOWED_RESPONSE_CODES': [
|
|
15
|
-
301, # 永久重定向
|
|
16
|
-
302, # 临时重定向
|
|
17
|
-
404, # 页面未找到(可能需要特殊处理)
|
|
18
|
-
],
|
|
19
|
-
|
|
20
|
-
# 拒绝的响应状态码列表(优先级高于ALLOWED_RESPONSE_CODES)
|
|
21
|
-
'DENIED_RESPONSE_CODES': [
|
|
22
|
-
200, # 明确拒绝正常响应(仅作示例)
|
|
23
|
-
403, # 禁止访问
|
|
24
|
-
],
|
|
25
|
-
|
|
26
|
-
# 中间件配置(ResponseFilterMiddleware已默认启用)
|
|
27
|
-
'MIDDLEWARES': [
|
|
28
|
-
# === 请求预处理阶段 ===
|
|
29
|
-
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware', # 1. 忽略无效请求
|
|
30
|
-
'crawlo.middleware.download_delay.DownloadDelayMiddleware', # 2. 控制请求频率
|
|
31
|
-
'crawlo.middleware.default_header.DefaultHeaderMiddleware', # 3. 添加默认请求头
|
|
32
|
-
'crawlo.middleware.proxy.ProxyMiddleware', # 4. 设置代理
|
|
33
|
-
'crawlo.middleware.offsite.OffsiteMiddleware', # 5. 站外请求过滤
|
|
34
|
-
|
|
35
|
-
# === 响应处理阶段 ===
|
|
36
|
-
'crawlo.middleware.retry.RetryMiddleware', # 6. 失败请求重试
|
|
37
|
-
'crawlo.middleware.response_code.ResponseCodeMiddleware', # 7. 处理特殊状态码
|
|
38
|
-
'crawlo.middleware.response_filter.ResponseFilterMiddleware', # 8. 响应内容过滤
|
|
39
|
-
],
|
|
40
|
-
|
|
41
|
-
# 其他常用配置
|
|
42
|
-
'DOWNLOAD_DELAY': 1,
|
|
43
|
-
'CONCURRENCY': 8,
|
|
44
|
-
'LOG_LEVEL': 'INFO',
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
def get_settings():
|
|
48
|
-
"""获取配置"""
|
|
49
|
-
return SETTINGS
|
|
50
|
-
|
|
51
|
-
if __name__ == "__main__":
|
|
52
|
-
print("ResponseFilterMiddleware配置示例:")
|
|
53
|
-
print("=" * 40)
|
|
54
|
-
print(f"允许的状态码: {SETTINGS['ALLOWED_RESPONSE_CODES']}")
|
|
55
|
-
print(f"拒绝的状态码: {SETTINGS['DENIED_RESPONSE_CODES']}")
|
|
56
|
-
print("\n中间件列表:")
|
|
57
|
-
for i, middleware in enumerate(SETTINGS['MIDDLEWARES'], 1):
|
|
58
|
-
print(f" {i}. {middleware}")
|
|
59
|
-
|
|
60
|
-
print("\n" + "=" * 40)
|
|
61
|
-
print("ResponseFilterMiddleware功能说明:")
|
|
62
|
-
print("✓ 默认允许2xx状态码")
|
|
63
|
-
print("✓ 支持自定义允许的状态码列表")
|
|
64
|
-
print("✓ 支持自定义拒绝的状态码列表")
|
|
65
|
-
print("✓ 拒绝列表优先级高于允许列表")
|
|
66
|
-
print("✓ 自动过滤不符合要求的响应")
|
|
67
|
-
print("✓ 提供详细的日志信息")
|
|
@@ -1,62 +0,0 @@
|
|
|
1
|
-
# 同花顺爬虫配置示例
|
|
2
|
-
# ==================
|
|
3
|
-
|
|
4
|
-
# 项目基本信息
|
|
5
|
-
PROJECT_NAME = 'tong_hua_shun_crawler'
|
|
6
|
-
|
|
7
|
-
# 并发数
|
|
8
|
-
CONCURRENCY = 1
|
|
9
|
-
|
|
10
|
-
# 日志配置
|
|
11
|
-
LOG_LEVEL = 'INFO'
|
|
12
|
-
LOG_FILE = 'logs/tong_hua_shun.log'
|
|
13
|
-
|
|
14
|
-
# 下载延迟配置
|
|
15
|
-
DOWNLOAD_DELAY = 2
|
|
16
|
-
RANDOMNESS = True
|
|
17
|
-
|
|
18
|
-
# 请求头配置
|
|
19
|
-
DEFAULT_REQUEST_HEADERS = {
|
|
20
|
-
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
|
21
|
-
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
|
|
22
|
-
"cache-control": "no-cache",
|
|
23
|
-
"pragma": "no-cache",
|
|
24
|
-
"priority": "u=0, i",
|
|
25
|
-
"sec-ch-ua": '"Chromium";v="140", "Not=A?Brand";v="24", "Google Chrome";v="140"',
|
|
26
|
-
"sec-ch-ua-mobile": "?0",
|
|
27
|
-
"sec-ch-ua-platform": '"Windows"',
|
|
28
|
-
"sec-fetch-dest": "document",
|
|
29
|
-
"sec-fetch-mode": "navigate",
|
|
30
|
-
"sec-fetch-site": "none",
|
|
31
|
-
"sec-fetch-user": "?1",
|
|
32
|
-
"upgrade-insecure-requests": "1",
|
|
33
|
-
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36"
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
# 代理配置
|
|
37
|
-
PROXY_ENABLED = True
|
|
38
|
-
PROXY_API_URL = 'http://test.proxy.api:8080/proxy/getitem/'
|
|
39
|
-
PROXY_EXTRACTOR = 'proxy'
|
|
40
|
-
PROXY_REFRESH_INTERVAL = 60
|
|
41
|
-
PROXY_API_TIMEOUT = 10
|
|
42
|
-
PROXY_POOL_SIZE = 3
|
|
43
|
-
PROXY_HEALTH_CHECK_THRESHOLD = 0.5
|
|
44
|
-
|
|
45
|
-
# 中间件配置
|
|
46
|
-
MIDDLEWARES = [
|
|
47
|
-
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
|
|
48
|
-
'crawlo.middleware.download_delay.DownloadDelayMiddleware',
|
|
49
|
-
'crawlo.middleware.default_header.DefaultHeaderMiddleware',
|
|
50
|
-
'crawlo.middleware.proxy.ProxyMiddleware',
|
|
51
|
-
'crawlo.middleware.retry.RetryMiddleware',
|
|
52
|
-
'crawlo.middleware.response_code.ResponseCodeMiddleware',
|
|
53
|
-
'crawlo.middleware.response_filter.ResponseFilterMiddleware',
|
|
54
|
-
]
|
|
55
|
-
|
|
56
|
-
# 管道配置
|
|
57
|
-
PIPELINES = [
|
|
58
|
-
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
59
|
-
]
|
|
60
|
-
|
|
61
|
-
# 其他配置
|
|
62
|
-
DOWNLOAD_TIMEOUT = 30
|
examples/tong_hua_shun_spider.py
DELETED
|
@@ -1,170 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
同花顺爬虫示例
|
|
5
|
-
==============
|
|
6
|
-
使用用户提供的headers和cookies爬取同花顺网站
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
import sys
|
|
10
|
-
import os
|
|
11
|
-
|
|
12
|
-
# 添加项目根目录到Python路径
|
|
13
|
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
|
14
|
-
|
|
15
|
-
from crawlo import Spider, Request
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class TongHuaShunSpider(Spider):
|
|
19
|
-
"""同花顺网站爬虫"""
|
|
20
|
-
name = 'tong_hua_shun_spider'
|
|
21
|
-
|
|
22
|
-
# 用户提供的请求头
|
|
23
|
-
custom_headers = {
|
|
24
|
-
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
|
25
|
-
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
|
|
26
|
-
"cache-control": "no-cache",
|
|
27
|
-
"pragma": "no-cache",
|
|
28
|
-
"priority": "u=0, i",
|
|
29
|
-
"sec-ch-ua": "\"Chromium\";v=\"140\", \"Not=A?Brand\";v=\"24\", \"Google Chrome\";v=\"140\"",
|
|
30
|
-
"sec-ch-ua-mobile": "?0",
|
|
31
|
-
"sec-ch-ua-platform": "\"Windows\"",
|
|
32
|
-
"sec-fetch-dest": "document",
|
|
33
|
-
"sec-fetch-mode": "navigate",
|
|
34
|
-
"sec-fetch-site": "none",
|
|
35
|
-
"sec-fetch-user": "?1",
|
|
36
|
-
"upgrade-insecure-requests": "1",
|
|
37
|
-
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36"
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
# 用户提供的cookies
|
|
41
|
-
custom_cookies = {
|
|
42
|
-
"Hm_lvt_722143063e4892925903024537075d0d": "1758071793",
|
|
43
|
-
"Hm_lvt_929f8b362150b1f77b477230541dbbc2": "1758071793",
|
|
44
|
-
"historystock": "600699",
|
|
45
|
-
"spversion": "20130314",
|
|
46
|
-
"cid": "f9bc812da2c3a7ddf6d5df1fa2d497091758076438",
|
|
47
|
-
"u_ukey": "A10702B8689642C6BE607730E11E6E4A",
|
|
48
|
-
"u_uver": "1.0.0",
|
|
49
|
-
"u_dpass": "Qk3U07X7SHGKa0AcRUg1R1DVWbPioD9Eg270bdikvlwWWXexbsXnRsQNt%2B04iXwdHi80LrSsTFH9a%2B6rtRvqGg%3D%3D",
|
|
50
|
-
"u_did": "E3ED337393E1429DA56E380DD00B3CCD",
|
|
51
|
-
"u_ttype": "WEB",
|
|
52
|
-
"user_status": "0",
|
|
53
|
-
"ttype": "WEB",
|
|
54
|
-
"log": "",
|
|
55
|
-
"Hm_lvt_69929b9dce4c22a060bd22d703b2a280": "1758079404,1758113068,1758157144",
|
|
56
|
-
"HMACCOUNT": "08DF0D235A291EAA",
|
|
57
|
-
"Hm_lvt_78c58f01938e4d85eaf619eae71b4ed1": "1758071793,1758113068,1758157144",
|
|
58
|
-
"user": "MDpteF9lNXRkY3RpdHo6Ok5vbmU6NTAwOjgxNzYyOTAwNDo3LDExMTExMTExMTExLDQwOzQ0LDExLDQwOzYsMSw0MDs1LDEsNDA7MSwxMDEsNDA7MiwxLDQwOzMsMSw0MDs1LDEsNDA7OCwwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMSw0MDsxMDIsMSw0MDoxNjo6OjgwNzYyOTAwNDoxNzU4MTYxNTE0Ojo6MTc1ODA3MjA2MDo2MDQ4MDA6MDoxYTQ0NmFlNDY4M2VmZWY3YmNjYTczY2U3ODZmZTNiODg6ZGVmYXVsdF81OjA%3D",
|
|
59
|
-
"userid": "807629004",
|
|
60
|
-
"u_name": "mx_e5tdctitz",
|
|
61
|
-
"escapename": "mx_e5tdctitz",
|
|
62
|
-
"ticket": "85eea709becdd924d7eb975351de629e",
|
|
63
|
-
"utk": "8959c4c6b6f5fb7628864feab15473f4",
|
|
64
|
-
"sess_tk": "eyJ0eXAiOiJKV1QiLCJhbGciOiJFUzI1NiIsImtpZCI6InNlc3NfdGtfMSIsImJ0eSI6InNlc3NfdGsifQ.eyJqdGkiOiI4ODNiZmU4NmU3M2NhN2NjN2JlZmVmODM0NmFlNDZhNDEiLCJpYXQiOjE3NTgxNjE1MTQsImV4cCI6MTc1ODc2NjMxNCwic3ViIjoiODA3NjI5MDA0IiwiaXNzIjoidXBhc3MuaXdlbmNhaS5jb20iLCJhdWQiOiIyMDIwMTExODUyODg5MDcyIiwiYWN0Ijoib2ZjIiwiY3VocyI6ImIwNTcyZDVjOWNlNDg0MGFlOWYxYTlhYjU3NGZkNjQyYjgzNmExN2E3Y2NhZjk4ZWRiNzI5ZmJkOWFjOGVkYmYifQ.UBNIzxGvQQtXSiIcB_1JJl-EuAc1S9j2LcTLXjwy4ImhDDbh1oJvyRdDUrXdUpwBpIyx5zgYqgt_3FEhY_iayw",
|
|
65
|
-
"cuc": "ap2eap3gg99g",
|
|
66
|
-
"Hm_lvt_f79b64788a4e377c608617fba4c736e2": "1758161692",
|
|
67
|
-
"v": "A1glI4rWhPCQGqh0MvA0ioufKY3vQbzLHqWQT5JJpBNGLfazOlGMW261YNrh",
|
|
68
|
-
"Hm_lpvt_78c58f01938e4d85eaf619eae71b4ed1": "1758163145",
|
|
69
|
-
"Hm_lpvt_f79b64788a4e377c608617fba4c736e2": "1758163145",
|
|
70
|
-
"Hm_lpvt_69929b9dce4c22a060bd22d703b2a280": "1758163145"
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
def start_requests(self):
|
|
74
|
-
"""生成初始请求"""
|
|
75
|
-
# 用户提供的URL
|
|
76
|
-
url = "https://stock.10jqka.com.cn/20240315/c655957791.shtml"
|
|
77
|
-
|
|
78
|
-
# 创建请求并添加自定义headers和cookies
|
|
79
|
-
request = Request(
|
|
80
|
-
url=url,
|
|
81
|
-
callback=self.parse,
|
|
82
|
-
headers=self.custom_headers,
|
|
83
|
-
cookies=self.custom_cookies
|
|
84
|
-
)
|
|
85
|
-
yield request
|
|
86
|
-
|
|
87
|
-
def parse(self, response):
|
|
88
|
-
"""解析响应"""
|
|
89
|
-
print(f"\n成功获取页面: {response.url}")
|
|
90
|
-
print(f"状态码: {response.status_code}")
|
|
91
|
-
|
|
92
|
-
# 提取页面标题
|
|
93
|
-
title = response.css('title::text').get()
|
|
94
|
-
if title:
|
|
95
|
-
print(f"页面标题: {title}")
|
|
96
|
-
|
|
97
|
-
# 提取页面中的关键信息
|
|
98
|
-
# 例如提取文章标题、发布时间等
|
|
99
|
-
article_title = response.css('h1.main-title::text').get()
|
|
100
|
-
if article_title:
|
|
101
|
-
print(f"文章标题: {article_title}")
|
|
102
|
-
|
|
103
|
-
publish_time = response.css('.time::text').get()
|
|
104
|
-
if publish_time:
|
|
105
|
-
print(f"发布时间: {publish_time}")
|
|
106
|
-
|
|
107
|
-
# 返回提取的数据
|
|
108
|
-
return {
|
|
109
|
-
'url': response.url,
|
|
110
|
-
'status_code': response.status_code,
|
|
111
|
-
'title': title,
|
|
112
|
-
'article_title': article_title,
|
|
113
|
-
'publish_time': publish_time
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
# 配置说明
|
|
118
|
-
SETTINGS = {
|
|
119
|
-
# 基础配置
|
|
120
|
-
'LOG_LEVEL': 'INFO',
|
|
121
|
-
'CONCURRENCY': 1,
|
|
122
|
-
|
|
123
|
-
# 代理配置
|
|
124
|
-
'PROXY_ENABLED': True,
|
|
125
|
-
'PROXY_API_URL': 'http://test.proxy.api:8080/proxy/getitem/',
|
|
126
|
-
'PROXY_EXTRACTOR': 'proxy',
|
|
127
|
-
'PROXY_REFRESH_INTERVAL': 60,
|
|
128
|
-
'PROXY_API_TIMEOUT': 10,
|
|
129
|
-
'PROXY_POOL_SIZE': 3,
|
|
130
|
-
'PROXY_HEALTH_CHECK_THRESHOLD': 0.5,
|
|
131
|
-
|
|
132
|
-
# 下载延迟
|
|
133
|
-
'DOWNLOAD_DELAY': 2,
|
|
134
|
-
'RANDOMNESS': True,
|
|
135
|
-
|
|
136
|
-
# 中间件
|
|
137
|
-
'MIDDLEWARES': [
|
|
138
|
-
'crawlo.middleware.request_ignore.RequestIgnoreMiddleware',
|
|
139
|
-
'crawlo.middleware.download_delay.DownloadDelayMiddleware',
|
|
140
|
-
'crawlo.middleware.default_header.DefaultHeaderMiddleware',
|
|
141
|
-
'crawlo.middleware.proxy.ProxyMiddleware',
|
|
142
|
-
'crawlo.middleware.retry.RetryMiddleware',
|
|
143
|
-
'crawlo.middleware.response_code.ResponseCodeMiddleware',
|
|
144
|
-
'crawlo.middleware.response_filter.ResponseFilterMiddleware',
|
|
145
|
-
],
|
|
146
|
-
|
|
147
|
-
# 管道
|
|
148
|
-
'PIPELINES': [
|
|
149
|
-
'crawlo.pipelines.console_pipeline.ConsolePipeline',
|
|
150
|
-
],
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
def main():
|
|
155
|
-
"""主函数"""
|
|
156
|
-
print("同花顺爬虫示例")
|
|
157
|
-
print("=" * 30)
|
|
158
|
-
print("此示例展示如何在Crawlo框架中:")
|
|
159
|
-
print("1. 使用自定义headers和cookies")
|
|
160
|
-
print("2. 集成代理功能")
|
|
161
|
-
print("3. 爬取同花顺网站内容")
|
|
162
|
-
print("=" * 30)
|
|
163
|
-
|
|
164
|
-
print("\n使用方法:")
|
|
165
|
-
print("1. 在项目settings.py中配置代理参数")
|
|
166
|
-
print("2. 运行爬虫: crawlo run tong_hua_shun_spider")
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
if __name__ == '__main__':
|
|
170
|
-
main()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|