crawlo 1.4.7__py3-none-any.whl → 1.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -90
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -140
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -379
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -320
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -451
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -290
- crawlo/crawler.py +698 -698
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -280
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -250
- crawlo/downloader/httpx_downloader.py +265 -265
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -425
- crawlo/downloader/selenium_downloader.py +486 -486
- crawlo/event.py +45 -45
- crawlo/exceptions.py +214 -214
- crawlo/extension/__init__.py +64 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -53
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -104
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +134 -134
- crawlo/filters/__init__.py +170 -170
- crawlo/filters/aioredis_filter.py +347 -347
- crawlo/filters/memory_filter.py +261 -261
- crawlo/framework.py +306 -306
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -391
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -240
- crawlo/initialization/phases.py +229 -229
- crawlo/initialization/registry.py +143 -143
- crawlo/initialization/utils.py +48 -48
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -42
- crawlo/logging/config.py +280 -276
- crawlo/logging/factory.py +175 -175
- crawlo/logging/manager.py +104 -104
- crawlo/middleware/__init__.py +87 -87
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -287
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +408 -376
- crawlo/network/response.py +598 -569
- crawlo/pipelines/__init__.py +52 -52
- crawlo/pipelines/base_pipeline.py +452 -452
- crawlo/pipelines/bloom_dedup_pipeline.py +145 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +196 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +104 -105
- crawlo/pipelines/mongo_pipeline.py +140 -139
- crawlo/pipelines/mysql_pipeline.py +468 -469
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -155
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +9 -9
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -591
- crawlo/queue/redis_priority_queue.py +518 -518
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +287 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +658 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +1 -1
- crawlo/templates/project/items.py.tmpl +13 -13
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -35
- crawlo/templates/project/settings.py.tmpl +113 -109
- crawlo/templates/project/settings_distributed.py.tmpl +160 -156
- crawlo/templates/project/settings_gentle.py.tmpl +174 -170
- crawlo/templates/project/settings_high_performance.py.tmpl +175 -171
- crawlo/templates/project/settings_minimal.py.tmpl +102 -98
- crawlo/templates/project/settings_simple.py.tmpl +172 -168
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -32
- crawlo/templates/spiders_init.py.tmpl +4 -4
- crawlo/tools/__init__.py +86 -86
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +74 -50
- crawlo/utils/batch_processor.py +276 -276
- crawlo/utils/config_manager.py +442 -442
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/encoding_helper.py +190 -0
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -335
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -157
- crawlo/utils/mysql_connection_pool.py +197 -197
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +90 -90
- crawlo/utils/redis_connection_pool.py +578 -578
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -278
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -337
- crawlo/utils/response_helper.py +113 -0
- crawlo/utils/selector_helper.py +138 -137
- crawlo/utils/singleton.py +69 -69
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/METADATA +831 -689
- crawlo-1.4.8.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -217
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -467
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -72
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +54 -54
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +118 -118
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/utils/log.py +0 -80
- crawlo/utils/url_utils.py +0 -40
- crawlo-1.4.7.dist-info/RECORD +0 -347
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
crawlo/utils/request.py
CHANGED
|
@@ -1,278 +1,278 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
# @Time : 2025-07-08 08:55
|
|
5
|
-
# @Author : crawl-coder
|
|
6
|
-
# @Desc : None
|
|
7
|
-
"""
|
|
8
|
-
import importlib
|
|
9
|
-
import json
|
|
10
|
-
import hashlib
|
|
11
|
-
from typing import Any, Optional, Iterable, Union, Dict
|
|
12
|
-
from w3lib.url import canonicalize_url
|
|
13
|
-
|
|
14
|
-
from crawlo import Request
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def to_bytes(data: Any, encoding: str = 'utf-8') -> bytes:
|
|
18
|
-
"""
|
|
19
|
-
将各种类型统一转换为 bytes。
|
|
20
|
-
|
|
21
|
-
Args:
|
|
22
|
-
data: 要转换的数据,支持 str, bytes, dict, int, float, bool, None 等类型
|
|
23
|
-
encoding: 字符串编码格式,默认为 'utf-8'
|
|
24
|
-
|
|
25
|
-
Returns:
|
|
26
|
-
bytes: 转换后的字节数据
|
|
27
|
-
|
|
28
|
-
Raises:
|
|
29
|
-
TypeError: 当数据类型无法转换时
|
|
30
|
-
UnicodeEncodeError: 当编码失败时
|
|
31
|
-
ValueError: 当 JSON 序列化失败时
|
|
32
|
-
|
|
33
|
-
Examples:
|
|
34
|
-
>>> to_bytes("hello")
|
|
35
|
-
b'hello'
|
|
36
|
-
>>> to_bytes({"key": "value"})
|
|
37
|
-
b'{"key": "value"}'
|
|
38
|
-
>>> to_bytes(123)
|
|
39
|
-
b'123'
|
|
40
|
-
>>> to_bytes(None)
|
|
41
|
-
b'null'
|
|
42
|
-
"""
|
|
43
|
-
# 预检查编码参数
|
|
44
|
-
if not isinstance(encoding, str):
|
|
45
|
-
raise TypeError(f"encoding must be str, not {type(encoding).__name__}")
|
|
46
|
-
|
|
47
|
-
try:
|
|
48
|
-
if isinstance(data, bytes):
|
|
49
|
-
return data
|
|
50
|
-
elif isinstance(data, str):
|
|
51
|
-
return data.encode(encoding)
|
|
52
|
-
elif isinstance(data, dict):
|
|
53
|
-
return json.dumps(data, sort_keys=True, ensure_ascii=False, separators=(',', ':')).encode(encoding)
|
|
54
|
-
elif isinstance(data, (int, float, bool)):
|
|
55
|
-
return str(data).encode(encoding)
|
|
56
|
-
elif data is None:
|
|
57
|
-
return b'null'
|
|
58
|
-
elif hasattr(data, '__str__'):
|
|
59
|
-
# 处理其他可转换为字符串的对象
|
|
60
|
-
return str(data).encode(encoding)
|
|
61
|
-
else:
|
|
62
|
-
raise TypeError(
|
|
63
|
-
f"`data` must be str, dict, bytes, int, float, bool, or None, "
|
|
64
|
-
f"not {type(data).__name__}"
|
|
65
|
-
)
|
|
66
|
-
except (UnicodeEncodeError, ValueError) as e:
|
|
67
|
-
raise type(e)(f"Failed to convert {type(data).__name__} to bytes: {str(e)}") from e
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
def request_fingerprint(
|
|
71
|
-
request: Request,
|
|
72
|
-
include_headers: Optional[Iterable[Union[bytes, str]]] = None
|
|
73
|
-
) -> str:
|
|
74
|
-
"""
|
|
75
|
-
生成请求指纹,基于方法、标准化 URL、body 和可选的 headers。
|
|
76
|
-
|
|
77
|
-
.. deprecated:: 1.0.0
|
|
78
|
-
此函数已废弃。请使用 :class:`crawlo.utils.fingerprint.FingerprintGenerator` 代替:
|
|
79
|
-
|
|
80
|
-
.. code-block:: python
|
|
81
|
-
|
|
82
|
-
from crawlo.utils.fingerprint import FingerprintGenerator
|
|
83
|
-
|
|
84
|
-
fp = FingerprintGenerator.request_fingerprint(
|
|
85
|
-
method=request.method,
|
|
86
|
-
url=request.url,
|
|
87
|
-
body=request.body or b'',
|
|
88
|
-
headers=dict(request.headers) if hasattr(request, 'headers') else {}
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
此函数保留仅为向后兼容,将在 2.0.0 版本中移除。
|
|
92
|
-
|
|
93
|
-
:param request: Request 对象(需包含 method, url, body, headers)
|
|
94
|
-
:param include_headers: 指定要参与指纹计算的 header 名称列表(str 或 bytes)
|
|
95
|
-
:return: 请求指纹(hex string)
|
|
96
|
-
"""
|
|
97
|
-
import warnings
|
|
98
|
-
warnings.warn(
|
|
99
|
-
"request_fingerprint() is deprecated. "
|
|
100
|
-
"Use FingerprintGenerator.request_fingerprint() instead.",
|
|
101
|
-
DeprecationWarning,
|
|
102
|
-
stacklevel=2
|
|
103
|
-
)
|
|
104
|
-
from crawlo.utils.fingerprint import FingerprintGenerator
|
|
105
|
-
|
|
106
|
-
# 准备请求数据
|
|
107
|
-
method = request.method
|
|
108
|
-
url = request.url
|
|
109
|
-
body = request.body or b''
|
|
110
|
-
headers = None
|
|
111
|
-
|
|
112
|
-
# 处理 headers
|
|
113
|
-
if include_headers and hasattr(request, 'headers'):
|
|
114
|
-
headers = {}
|
|
115
|
-
for header_name in include_headers:
|
|
116
|
-
name_str = str(header_name).lower() # 统一转为小写进行匹配
|
|
117
|
-
value = ''
|
|
118
|
-
|
|
119
|
-
# 兼容 headers 的访问方式(如 MultiDict 或 dict)
|
|
120
|
-
if hasattr(request.headers, 'get_all'):
|
|
121
|
-
# 如 scrapy.http.Headers 的 get_all 方法
|
|
122
|
-
values = request.headers.get_all(name_str)
|
|
123
|
-
value = ';'.join(str(v) for v in values) if values else ''
|
|
124
|
-
elif hasattr(request.headers, '__getitem__'):
|
|
125
|
-
# 如普通 dict
|
|
126
|
-
try:
|
|
127
|
-
raw_value = request.headers[name_str]
|
|
128
|
-
if isinstance(raw_value, list):
|
|
129
|
-
value = ';'.join(str(v) for v in raw_value)
|
|
130
|
-
else:
|
|
131
|
-
value = str(raw_value)
|
|
132
|
-
except (KeyError, TypeError):
|
|
133
|
-
value = ''
|
|
134
|
-
else:
|
|
135
|
-
value = ''
|
|
136
|
-
|
|
137
|
-
headers[name_str] = value
|
|
138
|
-
|
|
139
|
-
# 使用统一的指纹生成器
|
|
140
|
-
return FingerprintGenerator.request_fingerprint(method, url, body, headers)
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
def set_request(request: Request, priority: int) -> None:
|
|
144
|
-
"""
|
|
145
|
-
设置请求的深度和优先级
|
|
146
|
-
|
|
147
|
-
:param request: Request 对象
|
|
148
|
-
:param priority: 优先级值
|
|
149
|
-
"""
|
|
150
|
-
# 增加请求深度
|
|
151
|
-
request.meta['depth'] = request.meta.setdefault('depth', 0) + 1
|
|
152
|
-
|
|
153
|
-
# 根据深度调整优先级,深度越深优先级越低
|
|
154
|
-
if priority:
|
|
155
|
-
request.priority -= request.meta['depth'] * priority
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def request_to_dict(request: Request, spider=None) -> Dict[str, Any]:
|
|
159
|
-
"""
|
|
160
|
-
将 Request 对象转换为可 JSON 序列化的字典,用于分布式爬虫中的请求序列化。
|
|
161
|
-
|
|
162
|
-
Args:
|
|
163
|
-
request: 要序列化的 Request 对象
|
|
164
|
-
spider: 可选,用于辅助序列化(如回调函数的归属)
|
|
165
|
-
|
|
166
|
-
Returns:
|
|
167
|
-
包含 Request 所有关键信息的字典
|
|
168
|
-
"""
|
|
169
|
-
# 基础属性
|
|
170
|
-
d = {
|
|
171
|
-
'url': request.url,
|
|
172
|
-
'method': request.method,
|
|
173
|
-
'headers': dict(request.headers),
|
|
174
|
-
'body': request.body,
|
|
175
|
-
'meta': request.meta.copy(), # 复制一份
|
|
176
|
-
'flags': request.flags.copy(),
|
|
177
|
-
'cb_kwargs': request.cb_kwargs.copy(),
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
# 1. 处理 callback
|
|
181
|
-
# 不能直接序列化函数,所以存储其路径
|
|
182
|
-
if callable(getattr(request, 'callback', None)):
|
|
183
|
-
d['_callback'] = _get_function_path(request.callback)
|
|
184
|
-
|
|
185
|
-
# 2. 处理 errback
|
|
186
|
-
if callable(getattr(request, 'err_back', None)):
|
|
187
|
-
d['_errback'] = _get_function_path(request.err_back)
|
|
188
|
-
|
|
189
|
-
# 3. 记录原始类名,以便反序列化时创建正确的实例
|
|
190
|
-
d['_class'] = request.__class__.__module__ + '.' + request.__class__.__name__
|
|
191
|
-
|
|
192
|
-
return d
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
def request_from_dict(d: Dict[str, Any], spider=None) -> Request:
|
|
196
|
-
"""
|
|
197
|
-
从字典重建 Request 对象,用于分布式爬虫中的请求反序列化。
|
|
198
|
-
|
|
199
|
-
Args:
|
|
200
|
-
d: 由 request_to_dict 生成的字典
|
|
201
|
-
spider: 可选,用于解析回调函数
|
|
202
|
-
|
|
203
|
-
Returns:
|
|
204
|
-
重建的 Request 对象
|
|
205
|
-
"""
|
|
206
|
-
# 1. 获取类名并动态导入
|
|
207
|
-
cls_path = d.pop('_class', None)
|
|
208
|
-
if cls_path:
|
|
209
|
-
module_path, cls_name = cls_path.rsplit('.', 1)
|
|
210
|
-
module = importlib.import_module(module_path)
|
|
211
|
-
cls = getattr(module, cls_name)
|
|
212
|
-
else:
|
|
213
|
-
cls = Request # 默认为 Request
|
|
214
|
-
|
|
215
|
-
# 2. 提取回调函数
|
|
216
|
-
callback_path = d.pop('_callback', None)
|
|
217
|
-
callback = _get_function_from_path(callback_path, spider) if callback_path else None
|
|
218
|
-
|
|
219
|
-
# 3. 提取错误回调
|
|
220
|
-
errback_path = d.pop('_errback', None)
|
|
221
|
-
errback = _get_function_from_path(errback_path, spider) if errback_path else None
|
|
222
|
-
|
|
223
|
-
# 5. 创建 Request 实例
|
|
224
|
-
request = cls(
|
|
225
|
-
url=d['url'],
|
|
226
|
-
method=d.get('method', 'GET'),
|
|
227
|
-
headers=d.get('headers', {}),
|
|
228
|
-
body=d.get('body'),
|
|
229
|
-
callback=callback,
|
|
230
|
-
meta=d.get('meta', {}),
|
|
231
|
-
flags=d.get('flags', []),
|
|
232
|
-
cb_kwargs=d.get('cb_kwargs', {}),
|
|
233
|
-
)
|
|
234
|
-
|
|
235
|
-
# 手动设置 err_back 属性
|
|
236
|
-
if errback is not None:
|
|
237
|
-
request.err_back = errback
|
|
238
|
-
|
|
239
|
-
return request
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
def _get_function_path(func: callable) -> str:
|
|
243
|
-
"""
|
|
244
|
-
获取函数的模块路径,如 'myproject.spiders.my_spider.parse'
|
|
245
|
-
"""
|
|
246
|
-
if hasattr(func, '__wrapped__'):
|
|
247
|
-
# 处理被装饰的函数
|
|
248
|
-
func = func.__wrapped__
|
|
249
|
-
module = func.__module__
|
|
250
|
-
if module is None or module == str.__class__.__module__:
|
|
251
|
-
raise ValueError(f"无法序列化内置函数或lambda: {func}")
|
|
252
|
-
return f"{module}.{func.__qualname__}"
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
def _get_function_from_path(path: str, spider=None) -> Optional[callable]:
|
|
256
|
-
"""
|
|
257
|
-
从路径字符串获取函数对象。
|
|
258
|
-
如果函数是 spider 的方法,会尝试绑定到 spider 实例。
|
|
259
|
-
"""
|
|
260
|
-
try:
|
|
261
|
-
module_path, func_name = path.rsplit('.', 1)
|
|
262
|
-
module = importlib.import_module(module_path)
|
|
263
|
-
|
|
264
|
-
# 逐级获取属性,支持 nested functions
|
|
265
|
-
func = module
|
|
266
|
-
for attr in func_name.split('.'):
|
|
267
|
-
func = getattr(func, attr)
|
|
268
|
-
|
|
269
|
-
# 如果 spider 存在,并且 func 是 spider 的方法
|
|
270
|
-
if spider and hasattr(func, '__name__') and hasattr(spider, func.__name__):
|
|
271
|
-
spider_method = getattr(spider, func.__name__)
|
|
272
|
-
if spider_method is func:
|
|
273
|
-
return spider_method # 返回绑定的方法
|
|
274
|
-
|
|
275
|
-
return func
|
|
276
|
-
except Exception as e:
|
|
277
|
-
raise ValueError(f"无法从路径 '{path}' 加载函数: {e}")
|
|
278
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
# @Time : 2025-07-08 08:55
|
|
5
|
+
# @Author : crawl-coder
|
|
6
|
+
# @Desc : None
|
|
7
|
+
"""
|
|
8
|
+
import importlib
|
|
9
|
+
import json
|
|
10
|
+
import hashlib
|
|
11
|
+
from typing import Any, Optional, Iterable, Union, Dict
|
|
12
|
+
from w3lib.url import canonicalize_url
|
|
13
|
+
|
|
14
|
+
from crawlo import Request
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def to_bytes(data: Any, encoding: str = 'utf-8') -> bytes:
|
|
18
|
+
"""
|
|
19
|
+
将各种类型统一转换为 bytes。
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
data: 要转换的数据,支持 str, bytes, dict, int, float, bool, None 等类型
|
|
23
|
+
encoding: 字符串编码格式,默认为 'utf-8'
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
bytes: 转换后的字节数据
|
|
27
|
+
|
|
28
|
+
Raises:
|
|
29
|
+
TypeError: 当数据类型无法转换时
|
|
30
|
+
UnicodeEncodeError: 当编码失败时
|
|
31
|
+
ValueError: 当 JSON 序列化失败时
|
|
32
|
+
|
|
33
|
+
Examples:
|
|
34
|
+
>>> to_bytes("hello")
|
|
35
|
+
b'hello'
|
|
36
|
+
>>> to_bytes({"key": "value"})
|
|
37
|
+
b'{"key": "value"}'
|
|
38
|
+
>>> to_bytes(123)
|
|
39
|
+
b'123'
|
|
40
|
+
>>> to_bytes(None)
|
|
41
|
+
b'null'
|
|
42
|
+
"""
|
|
43
|
+
# 预检查编码参数
|
|
44
|
+
if not isinstance(encoding, str):
|
|
45
|
+
raise TypeError(f"encoding must be str, not {type(encoding).__name__}")
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
if isinstance(data, bytes):
|
|
49
|
+
return data
|
|
50
|
+
elif isinstance(data, str):
|
|
51
|
+
return data.encode(encoding)
|
|
52
|
+
elif isinstance(data, dict):
|
|
53
|
+
return json.dumps(data, sort_keys=True, ensure_ascii=False, separators=(',', ':')).encode(encoding)
|
|
54
|
+
elif isinstance(data, (int, float, bool)):
|
|
55
|
+
return str(data).encode(encoding)
|
|
56
|
+
elif data is None:
|
|
57
|
+
return b'null'
|
|
58
|
+
elif hasattr(data, '__str__'):
|
|
59
|
+
# 处理其他可转换为字符串的对象
|
|
60
|
+
return str(data).encode(encoding)
|
|
61
|
+
else:
|
|
62
|
+
raise TypeError(
|
|
63
|
+
f"`data` must be str, dict, bytes, int, float, bool, or None, "
|
|
64
|
+
f"not {type(data).__name__}"
|
|
65
|
+
)
|
|
66
|
+
except (UnicodeEncodeError, ValueError) as e:
|
|
67
|
+
raise type(e)(f"Failed to convert {type(data).__name__} to bytes: {str(e)}") from e
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def request_fingerprint(
|
|
71
|
+
request: Request,
|
|
72
|
+
include_headers: Optional[Iterable[Union[bytes, str]]] = None
|
|
73
|
+
) -> str:
|
|
74
|
+
"""
|
|
75
|
+
生成请求指纹,基于方法、标准化 URL、body 和可选的 headers。
|
|
76
|
+
|
|
77
|
+
.. deprecated:: 1.0.0
|
|
78
|
+
此函数已废弃。请使用 :class:`crawlo.utils.fingerprint.FingerprintGenerator` 代替:
|
|
79
|
+
|
|
80
|
+
.. code-block:: python
|
|
81
|
+
|
|
82
|
+
from crawlo.utils.fingerprint import FingerprintGenerator
|
|
83
|
+
|
|
84
|
+
fp = FingerprintGenerator.request_fingerprint(
|
|
85
|
+
method=request.method,
|
|
86
|
+
url=request.url,
|
|
87
|
+
body=request.body or b'',
|
|
88
|
+
headers=dict(request.headers) if hasattr(request, 'headers') else {}
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
此函数保留仅为向后兼容,将在 2.0.0 版本中移除。
|
|
92
|
+
|
|
93
|
+
:param request: Request 对象(需包含 method, url, body, headers)
|
|
94
|
+
:param include_headers: 指定要参与指纹计算的 header 名称列表(str 或 bytes)
|
|
95
|
+
:return: 请求指纹(hex string)
|
|
96
|
+
"""
|
|
97
|
+
import warnings
|
|
98
|
+
warnings.warn(
|
|
99
|
+
"request_fingerprint() is deprecated. "
|
|
100
|
+
"Use FingerprintGenerator.request_fingerprint() instead.",
|
|
101
|
+
DeprecationWarning,
|
|
102
|
+
stacklevel=2
|
|
103
|
+
)
|
|
104
|
+
from crawlo.utils.fingerprint import FingerprintGenerator
|
|
105
|
+
|
|
106
|
+
# 准备请求数据
|
|
107
|
+
method = request.method
|
|
108
|
+
url = request.url
|
|
109
|
+
body = request.body or b''
|
|
110
|
+
headers = None
|
|
111
|
+
|
|
112
|
+
# 处理 headers
|
|
113
|
+
if include_headers and hasattr(request, 'headers'):
|
|
114
|
+
headers = {}
|
|
115
|
+
for header_name in include_headers:
|
|
116
|
+
name_str = str(header_name).lower() # 统一转为小写进行匹配
|
|
117
|
+
value = ''
|
|
118
|
+
|
|
119
|
+
# 兼容 headers 的访问方式(如 MultiDict 或 dict)
|
|
120
|
+
if hasattr(request.headers, 'get_all'):
|
|
121
|
+
# 如 scrapy.http.Headers 的 get_all 方法
|
|
122
|
+
values = request.headers.get_all(name_str)
|
|
123
|
+
value = ';'.join(str(v) for v in values) if values else ''
|
|
124
|
+
elif hasattr(request.headers, '__getitem__'):
|
|
125
|
+
# 如普通 dict
|
|
126
|
+
try:
|
|
127
|
+
raw_value = request.headers[name_str]
|
|
128
|
+
if isinstance(raw_value, list):
|
|
129
|
+
value = ';'.join(str(v) for v in raw_value)
|
|
130
|
+
else:
|
|
131
|
+
value = str(raw_value)
|
|
132
|
+
except (KeyError, TypeError):
|
|
133
|
+
value = ''
|
|
134
|
+
else:
|
|
135
|
+
value = ''
|
|
136
|
+
|
|
137
|
+
headers[name_str] = value
|
|
138
|
+
|
|
139
|
+
# 使用统一的指纹生成器
|
|
140
|
+
return FingerprintGenerator.request_fingerprint(method, url, body, headers)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def set_request(request: Request, priority: int) -> None:
|
|
144
|
+
"""
|
|
145
|
+
设置请求的深度和优先级
|
|
146
|
+
|
|
147
|
+
:param request: Request 对象
|
|
148
|
+
:param priority: 优先级值
|
|
149
|
+
"""
|
|
150
|
+
# 增加请求深度
|
|
151
|
+
request.meta['depth'] = request.meta.setdefault('depth', 0) + 1
|
|
152
|
+
|
|
153
|
+
# 根据深度调整优先级,深度越深优先级越低
|
|
154
|
+
if priority:
|
|
155
|
+
request.priority -= request.meta['depth'] * priority
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def request_to_dict(request: Request, spider=None) -> Dict[str, Any]:
|
|
159
|
+
"""
|
|
160
|
+
将 Request 对象转换为可 JSON 序列化的字典,用于分布式爬虫中的请求序列化。
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
request: 要序列化的 Request 对象
|
|
164
|
+
spider: 可选,用于辅助序列化(如回调函数的归属)
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
包含 Request 所有关键信息的字典
|
|
168
|
+
"""
|
|
169
|
+
# 基础属性
|
|
170
|
+
d = {
|
|
171
|
+
'url': request.url,
|
|
172
|
+
'method': request.method,
|
|
173
|
+
'headers': dict(request.headers),
|
|
174
|
+
'body': request.body,
|
|
175
|
+
'meta': request.meta.copy(), # 复制一份
|
|
176
|
+
'flags': request.flags.copy(),
|
|
177
|
+
'cb_kwargs': request.cb_kwargs.copy(),
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
# 1. 处理 callback
|
|
181
|
+
# 不能直接序列化函数,所以存储其路径
|
|
182
|
+
if callable(getattr(request, 'callback', None)):
|
|
183
|
+
d['_callback'] = _get_function_path(request.callback)
|
|
184
|
+
|
|
185
|
+
# 2. 处理 errback
|
|
186
|
+
if callable(getattr(request, 'err_back', None)):
|
|
187
|
+
d['_errback'] = _get_function_path(request.err_back)
|
|
188
|
+
|
|
189
|
+
# 3. 记录原始类名,以便反序列化时创建正确的实例
|
|
190
|
+
d['_class'] = request.__class__.__module__ + '.' + request.__class__.__name__
|
|
191
|
+
|
|
192
|
+
return d
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def request_from_dict(d: Dict[str, Any], spider=None) -> Request:
|
|
196
|
+
"""
|
|
197
|
+
从字典重建 Request 对象,用于分布式爬虫中的请求反序列化。
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
d: 由 request_to_dict 生成的字典
|
|
201
|
+
spider: 可选,用于解析回调函数
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
重建的 Request 对象
|
|
205
|
+
"""
|
|
206
|
+
# 1. 获取类名并动态导入
|
|
207
|
+
cls_path = d.pop('_class', None)
|
|
208
|
+
if cls_path:
|
|
209
|
+
module_path, cls_name = cls_path.rsplit('.', 1)
|
|
210
|
+
module = importlib.import_module(module_path)
|
|
211
|
+
cls = getattr(module, cls_name)
|
|
212
|
+
else:
|
|
213
|
+
cls = Request # 默认为 Request
|
|
214
|
+
|
|
215
|
+
# 2. 提取回调函数
|
|
216
|
+
callback_path = d.pop('_callback', None)
|
|
217
|
+
callback = _get_function_from_path(callback_path, spider) if callback_path else None
|
|
218
|
+
|
|
219
|
+
# 3. 提取错误回调
|
|
220
|
+
errback_path = d.pop('_errback', None)
|
|
221
|
+
errback = _get_function_from_path(errback_path, spider) if errback_path else None
|
|
222
|
+
|
|
223
|
+
# 5. 创建 Request 实例
|
|
224
|
+
request = cls(
|
|
225
|
+
url=d['url'],
|
|
226
|
+
method=d.get('method', 'GET'),
|
|
227
|
+
headers=d.get('headers', {}),
|
|
228
|
+
body=d.get('body'),
|
|
229
|
+
callback=callback,
|
|
230
|
+
meta=d.get('meta', {}),
|
|
231
|
+
flags=d.get('flags', []),
|
|
232
|
+
cb_kwargs=d.get('cb_kwargs', {}),
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# 手动设置 err_back 属性
|
|
236
|
+
if errback is not None:
|
|
237
|
+
request.err_back = errback
|
|
238
|
+
|
|
239
|
+
return request
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def _get_function_path(func: callable) -> str:
|
|
243
|
+
"""
|
|
244
|
+
获取函数的模块路径,如 'myproject.spiders.my_spider.parse'
|
|
245
|
+
"""
|
|
246
|
+
if hasattr(func, '__wrapped__'):
|
|
247
|
+
# 处理被装饰的函数
|
|
248
|
+
func = func.__wrapped__
|
|
249
|
+
module = func.__module__
|
|
250
|
+
if module is None or module == str.__class__.__module__:
|
|
251
|
+
raise ValueError(f"无法序列化内置函数或lambda: {func}")
|
|
252
|
+
return f"{module}.{func.__qualname__}"
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def _get_function_from_path(path: str, spider=None) -> Optional[callable]:
|
|
256
|
+
"""
|
|
257
|
+
从路径字符串获取函数对象。
|
|
258
|
+
如果函数是 spider 的方法,会尝试绑定到 spider 实例。
|
|
259
|
+
"""
|
|
260
|
+
try:
|
|
261
|
+
module_path, func_name = path.rsplit('.', 1)
|
|
262
|
+
module = importlib.import_module(module_path)
|
|
263
|
+
|
|
264
|
+
# 逐级获取属性,支持 nested functions
|
|
265
|
+
func = module
|
|
266
|
+
for attr in func_name.split('.'):
|
|
267
|
+
func = getattr(func, attr)
|
|
268
|
+
|
|
269
|
+
# 如果 spider 存在,并且 func 是 spider 的方法
|
|
270
|
+
if spider and hasattr(func, '__name__') and hasattr(spider, func.__name__):
|
|
271
|
+
spider_method = getattr(spider, func.__name__)
|
|
272
|
+
if spider_method is func:
|
|
273
|
+
return spider_method # 返回绑定的方法
|
|
274
|
+
|
|
275
|
+
return func
|
|
276
|
+
except Exception as e:
|
|
277
|
+
raise ValueError(f"无法从路径 '{path}' 加载函数: {e}")
|
|
278
|
+
|