crawlo 1.4.1__py3-none-any.whl → 1.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +93 -93
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +151 -151
- crawlo/commands/help.py +138 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +341 -341
- crawlo/commands/startproject.py +436 -436
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +312 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +438 -439
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +291 -257
- crawlo/crawler.py +650 -650
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +273 -273
- crawlo/downloader/aiohttp_downloader.py +233 -228
- crawlo/downloader/cffi_downloader.py +245 -245
- crawlo/downloader/httpx_downloader.py +259 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +402 -402
- crawlo/downloader/selenium_downloader.py +472 -472
- crawlo/event.py +11 -11
- crawlo/exceptions.py +81 -81
- crawlo/extension/__init__.py +63 -63
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +61 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +103 -103
- crawlo/factories/registry.py +84 -84
- crawlo/filters/__init__.py +154 -154
- crawlo/filters/aioredis_filter.py +257 -257
- crawlo/filters/memory_filter.py +269 -269
- crawlo/framework.py +292 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +425 -425
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +193 -193
- crawlo/initialization/phases.py +148 -148
- crawlo/initialization/registry.py +145 -145
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +45 -37
- crawlo/logging/async_handler.py +181 -0
- crawlo/logging/config.py +196 -96
- crawlo/logging/factory.py +171 -128
- crawlo/logging/manager.py +111 -111
- crawlo/logging/monitor.py +153 -0
- crawlo/logging/sampler.py +167 -0
- crawlo/middleware/__init__.py +21 -21
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +135 -135
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +386 -386
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/middleware/simple_proxy.py +65 -65
- crawlo/mode_manager.py +219 -219
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +379 -379
- crawlo/network/response.py +359 -359
- crawlo/pipelines/__init__.py +21 -21
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +131 -131
- crawlo/pipelines/mysql_pipeline.py +325 -325
- crawlo/pipelines/pipeline_manager.py +100 -84
- crawlo/pipelines/redis_dedup_pipeline.py +156 -156
- crawlo/project.py +349 -338
- crawlo/queue/pqueue.py +42 -42
- crawlo/queue/queue_manager.py +526 -522
- crawlo/queue/redis_priority_queue.py +370 -367
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -284
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +73 -73
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +3 -3
- crawlo/templates/project/items.py.tmpl +17 -17
- crawlo/templates/project/middlewares.py.tmpl +118 -118
- crawlo/templates/project/pipelines.py.tmpl +96 -96
- crawlo/templates/project/settings.py.tmpl +170 -170
- crawlo/templates/project/settings_distributed.py.tmpl +169 -169
- crawlo/templates/project/settings_gentle.py.tmpl +166 -166
- crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
- crawlo/templates/project/settings_minimal.py.tmpl +65 -65
- crawlo/templates/project/settings_simple.py.tmpl +164 -164
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +34 -34
- crawlo/templates/spider/spider.py.tmpl +143 -143
- crawlo/templates/spiders_init.py.tmpl +9 -9
- crawlo/tools/__init__.py +200 -200
- crawlo/tools/anti_crawler.py +268 -268
- crawlo/tools/authenticated_proxy.py +240 -240
- crawlo/tools/data_formatter.py +225 -225
- crawlo/tools/data_validator.py +180 -180
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/encoding_converter.py +127 -127
- crawlo/tools/network_diagnostic.py +364 -364
- crawlo/tools/request_tools.py +82 -82
- crawlo/tools/retry_mechanism.py +224 -224
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +34 -34
- crawlo/utils/batch_processor.py +259 -259
- crawlo/utils/class_loader.py +25 -25
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +343 -343
- crawlo/utils/enhanced_error_handler.py +356 -356
- crawlo/utils/env_config.py +142 -142
- crawlo/utils/error_handler.py +165 -165
- crawlo/utils/fingerprint.py +122 -122
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_config.py +286 -286
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/log.py +79 -79
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_connection_pool.py +388 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +267 -267
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/spider_loader.py +61 -61
- crawlo/utils/system.py +11 -11
- crawlo/utils/tools.py +4 -4
- crawlo/utils/url.py +39 -39
- crawlo-1.4.3.dist-info/METADATA +190 -0
- crawlo-1.4.3.dist-info/RECORD +326 -0
- examples/__init__.py +7 -7
- examples/test_project/__init__.py +7 -7
- examples/test_project/run.py +34 -34
- examples/test_project/test_project/__init__.py +3 -3
- examples/test_project/test_project/items.py +17 -17
- examples/test_project/test_project/middlewares.py +118 -118
- examples/test_project/test_project/pipelines.py +96 -96
- examples/test_project/test_project/settings.py +169 -169
- examples/test_project/test_project/spiders/__init__.py +9 -9
- examples/test_project/test_project/spiders/of_week_dis.py +143 -143
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +275 -275
- tests/authenticated_proxy_example.py +106 -106
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/env_config_example.py +133 -133
- tests/error_handling_example.py +171 -171
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +245 -245
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +127 -127
- tests/simple_log_test.py +57 -57
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_spider_test.py +49 -49
- tests/simple_test.py +47 -47
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_authenticated_proxy.py +141 -141
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_component_factory.py +174 -174
- tests/test_comprehensive.py +146 -146
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawlo_proxy_integration.py +108 -108
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +125 -0
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +268 -268
- tests/test_dynamic_downloaders_proxy.py +124 -124
- tests/test_dynamic_proxy.py +92 -92
- tests/test_dynamic_proxy_config.py +146 -146
- tests/test_dynamic_proxy_real.py +109 -109
- tests/test_edge_cases.py +303 -303
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_env_config.py +121 -121
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_framework_env_usage.py +103 -103
- tests/test_framework_logger.py +66 -66
- tests/test_framework_startup.py +64 -64
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_config.py +112 -112
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +375 -0
- tests/test_logging_final.py +185 -0
- tests/test_logging_integration.py +313 -0
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +142 -0
- tests/test_mode_change.py +72 -72
- tests/test_mode_consistency.py +51 -51
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +212 -0
- tests/test_priority_consistency.py +152 -0
- tests/test_priority_consistency_fixed.py +250 -0
- tests/test_proxy_api.py +264 -264
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +121 -121
- tests/test_proxy_middleware_enhanced.py +216 -216
- tests/test_proxy_middleware_integration.py +136 -136
- tests/test_proxy_middleware_refactored.py +184 -184
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +131 -0
- tests/test_random_headers_default.py +323 -0
- tests/test_random_headers_necessity.py +309 -0
- tests/test_random_user_agent.py +72 -72
- tests/test_real_scenario_proxy.py +195 -195
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +130 -0
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_improvements.py +152 -152
- tests/test_retry_middleware.py +334 -242
- tests/test_retry_middleware_realistic.py +274 -0
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_simple_response.py +61 -61
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +177 -0
- tests/test_user_agents.py +96 -96
- tests/tools_example.py +260 -260
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- crawlo-1.4.1.dist-info/METADATA +0 -1199
- crawlo-1.4.1.dist-info/RECORD +0 -309
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/WHEEL +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.1.dist-info → crawlo-1.4.3.dist-info}/top_level.txt +0 -0
|
@@ -1,108 +1,108 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding:UTF-8 -*-
|
|
3
|
-
import os
|
|
4
|
-
import json
|
|
5
|
-
from typing import Any
|
|
6
|
-
from datetime import datetime
|
|
7
|
-
|
|
8
|
-
from crawlo import event
|
|
9
|
-
from crawlo.utils.log import get_logger
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class RequestRecorderExtension:
|
|
13
|
-
"""
|
|
14
|
-
请求记录扩展
|
|
15
|
-
记录所有发送的请求信息到文件,便于调试和分析
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
def __init__(self, crawler: Any):
|
|
19
|
-
self.settings = crawler.settings
|
|
20
|
-
self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
|
|
21
|
-
|
|
22
|
-
# 获取配置参数
|
|
23
|
-
self.enabled = self.settings.get_bool('REQUEST_RECORDER_ENABLED', False)
|
|
24
|
-
self.output_dir = self.settings.get('REQUEST_RECORDER_OUTPUT_DIR', 'requests_log')
|
|
25
|
-
self.max_file_size = self.settings.get_int('REQUEST_RECORDER_MAX_FILE_SIZE', 10 * 1024 * 1024) # 默认10MB
|
|
26
|
-
|
|
27
|
-
# 创建输出目录
|
|
28
|
-
if self.enabled:
|
|
29
|
-
os.makedirs(self.output_dir, exist_ok=True)
|
|
30
|
-
|
|
31
|
-
self.current_file = None
|
|
32
|
-
self.current_file_size = 0
|
|
33
|
-
|
|
34
|
-
@classmethod
|
|
35
|
-
def create_instance(cls, crawler: Any) -> 'RequestRecorderExtension':
|
|
36
|
-
# 只有当配置启用时才创建实例
|
|
37
|
-
if not crawler.settings.get_bool('REQUEST_RECORDER_ENABLED', False):
|
|
38
|
-
from crawlo.exceptions import NotConfigured
|
|
39
|
-
raise NotConfigured("RequestRecorderExtension: REQUEST_RECORDER_ENABLED is False")
|
|
40
|
-
|
|
41
|
-
o = cls(crawler)
|
|
42
|
-
if o.enabled:
|
|
43
|
-
crawler.subscriber.subscribe(o.request_scheduled, event=event.request_scheduled)
|
|
44
|
-
crawler.subscriber.subscribe(o.response_received, event=event.response_received)
|
|
45
|
-
crawler.subscriber.subscribe(o.spider_closed, event=event.spider_closed)
|
|
46
|
-
return o
|
|
47
|
-
|
|
48
|
-
async def request_scheduled(self, request: Any, spider: Any) -> None:
|
|
49
|
-
"""记录调度的请求"""
|
|
50
|
-
if not self.enabled:
|
|
51
|
-
return
|
|
52
|
-
|
|
53
|
-
try:
|
|
54
|
-
request_info = {
|
|
55
|
-
'timestamp': datetime.now().isoformat(),
|
|
56
|
-
'type': 'request',
|
|
57
|
-
'url': request.url,
|
|
58
|
-
'method': request.method,
|
|
59
|
-
'headers': dict(request.headers),
|
|
60
|
-
'meta': getattr(request, 'meta', {}),
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
await self._write_record(request_info)
|
|
64
|
-
except Exception as e:
|
|
65
|
-
self.logger.error(f"Error recording request: {e}")
|
|
66
|
-
|
|
67
|
-
async def response_received(self, response: Any, spider: Any) -> None:
|
|
68
|
-
"""记录接收到的响应"""
|
|
69
|
-
if not self.enabled:
|
|
70
|
-
return
|
|
71
|
-
|
|
72
|
-
try:
|
|
73
|
-
response_info = {
|
|
74
|
-
'timestamp': datetime.now().isoformat(),
|
|
75
|
-
'type': 'response',
|
|
76
|
-
'url': response.url,
|
|
77
|
-
'status_code': response.status_code,
|
|
78
|
-
'headers': dict(response.headers),
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
await self._write_record(response_info)
|
|
82
|
-
except Exception as e:
|
|
83
|
-
self.logger.error(f"Error recording response: {e}")
|
|
84
|
-
|
|
85
|
-
async def spider_closed(self, spider: Any) -> None:
|
|
86
|
-
"""爬虫关闭时清理资源"""
|
|
87
|
-
if self.current_file:
|
|
88
|
-
self.current_file.close()
|
|
89
|
-
self.current_file = None
|
|
90
|
-
self.logger.info("Request recorder closed.")
|
|
91
|
-
|
|
92
|
-
async def _write_record(self, record: dict) -> None:
|
|
93
|
-
"""写入记录到文件"""
|
|
94
|
-
# 检查是否需要创建新文件
|
|
95
|
-
if not self.current_file or self.current_file_size > self.max_file_size:
|
|
96
|
-
if self.current_file:
|
|
97
|
-
self.current_file.close()
|
|
98
|
-
|
|
99
|
-
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
100
|
-
filename = os.path.join(self.output_dir, f'requests_{timestamp}.jsonl')
|
|
101
|
-
self.current_file = open(filename, 'a', encoding='utf-8')
|
|
102
|
-
self.current_file_size = 0
|
|
103
|
-
|
|
104
|
-
# 写入记录
|
|
105
|
-
line = json.dumps(record, ensure_ascii=False) + '\n'
|
|
106
|
-
self.current_file.write(line)
|
|
107
|
-
self.current_file.flush()
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding:UTF-8 -*-
|
|
3
|
+
import os
|
|
4
|
+
import json
|
|
5
|
+
from typing import Any
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
from crawlo import event
|
|
9
|
+
from crawlo.utils.log import get_logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class RequestRecorderExtension:
|
|
13
|
+
"""
|
|
14
|
+
请求记录扩展
|
|
15
|
+
记录所有发送的请求信息到文件,便于调试和分析
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self, crawler: Any):
|
|
19
|
+
self.settings = crawler.settings
|
|
20
|
+
self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
|
|
21
|
+
|
|
22
|
+
# 获取配置参数
|
|
23
|
+
self.enabled = self.settings.get_bool('REQUEST_RECORDER_ENABLED', False)
|
|
24
|
+
self.output_dir = self.settings.get('REQUEST_RECORDER_OUTPUT_DIR', 'requests_log')
|
|
25
|
+
self.max_file_size = self.settings.get_int('REQUEST_RECORDER_MAX_FILE_SIZE', 10 * 1024 * 1024) # 默认10MB
|
|
26
|
+
|
|
27
|
+
# 创建输出目录
|
|
28
|
+
if self.enabled:
|
|
29
|
+
os.makedirs(self.output_dir, exist_ok=True)
|
|
30
|
+
|
|
31
|
+
self.current_file = None
|
|
32
|
+
self.current_file_size = 0
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def create_instance(cls, crawler: Any) -> 'RequestRecorderExtension':
|
|
36
|
+
# 只有当配置启用时才创建实例
|
|
37
|
+
if not crawler.settings.get_bool('REQUEST_RECORDER_ENABLED', False):
|
|
38
|
+
from crawlo.exceptions import NotConfigured
|
|
39
|
+
raise NotConfigured("RequestRecorderExtension: REQUEST_RECORDER_ENABLED is False")
|
|
40
|
+
|
|
41
|
+
o = cls(crawler)
|
|
42
|
+
if o.enabled:
|
|
43
|
+
crawler.subscriber.subscribe(o.request_scheduled, event=event.request_scheduled)
|
|
44
|
+
crawler.subscriber.subscribe(o.response_received, event=event.response_received)
|
|
45
|
+
crawler.subscriber.subscribe(o.spider_closed, event=event.spider_closed)
|
|
46
|
+
return o
|
|
47
|
+
|
|
48
|
+
async def request_scheduled(self, request: Any, spider: Any) -> None:
|
|
49
|
+
"""记录调度的请求"""
|
|
50
|
+
if not self.enabled:
|
|
51
|
+
return
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
request_info = {
|
|
55
|
+
'timestamp': datetime.now().isoformat(),
|
|
56
|
+
'type': 'request',
|
|
57
|
+
'url': request.url,
|
|
58
|
+
'method': request.method,
|
|
59
|
+
'headers': dict(request.headers),
|
|
60
|
+
'meta': getattr(request, 'meta', {}),
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
await self._write_record(request_info)
|
|
64
|
+
except Exception as e:
|
|
65
|
+
self.logger.error(f"Error recording request: {e}")
|
|
66
|
+
|
|
67
|
+
async def response_received(self, response: Any, spider: Any) -> None:
|
|
68
|
+
"""记录接收到的响应"""
|
|
69
|
+
if not self.enabled:
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
response_info = {
|
|
74
|
+
'timestamp': datetime.now().isoformat(),
|
|
75
|
+
'type': 'response',
|
|
76
|
+
'url': response.url,
|
|
77
|
+
'status_code': response.status_code,
|
|
78
|
+
'headers': dict(response.headers),
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
await self._write_record(response_info)
|
|
82
|
+
except Exception as e:
|
|
83
|
+
self.logger.error(f"Error recording response: {e}")
|
|
84
|
+
|
|
85
|
+
async def spider_closed(self, spider: Any) -> None:
|
|
86
|
+
"""爬虫关闭时清理资源"""
|
|
87
|
+
if self.current_file:
|
|
88
|
+
self.current_file.close()
|
|
89
|
+
self.current_file = None
|
|
90
|
+
self.logger.info("Request recorder closed.")
|
|
91
|
+
|
|
92
|
+
async def _write_record(self, record: dict) -> None:
|
|
93
|
+
"""写入记录到文件"""
|
|
94
|
+
# 检查是否需要创建新文件
|
|
95
|
+
if not self.current_file or self.current_file_size > self.max_file_size:
|
|
96
|
+
if self.current_file:
|
|
97
|
+
self.current_file.close()
|
|
98
|
+
|
|
99
|
+
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
100
|
+
filename = os.path.join(self.output_dir, f'requests_{timestamp}.jsonl')
|
|
101
|
+
self.current_file = open(filename, 'a', encoding='utf-8')
|
|
102
|
+
self.current_file_size = 0
|
|
103
|
+
|
|
104
|
+
# 写入记录
|
|
105
|
+
line = json.dumps(record, ensure_ascii=False) + '\n'
|
|
106
|
+
self.current_file.write(line)
|
|
107
|
+
self.current_file.flush()
|
|
108
108
|
self.current_file_size += len(line.encode('utf-8'))
|
crawlo/factories/__init__.py
CHANGED
|
@@ -1,28 +1,28 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
Crawlo组件工厂系统
|
|
5
|
-
==================
|
|
6
|
-
|
|
7
|
-
提供统一的组件创建和依赖注入机制
|
|
8
|
-
"""
|
|
9
|
-
|
|
10
|
-
from .registry import ComponentRegistry, get_component_registry
|
|
11
|
-
from .base import ComponentFactory, ComponentSpec
|
|
12
|
-
from .crawler import CrawlerComponentFactory
|
|
13
|
-
|
|
14
|
-
# 公共接口
|
|
15
|
-
register_component = get_component_registry().register
|
|
16
|
-
get_component = get_component_registry().get
|
|
17
|
-
create_component = get_component_registry().create
|
|
18
|
-
|
|
19
|
-
__all__ = [
|
|
20
|
-
'ComponentRegistry',
|
|
21
|
-
'ComponentFactory',
|
|
22
|
-
'ComponentSpec',
|
|
23
|
-
'CrawlerComponentFactory',
|
|
24
|
-
'get_component_registry',
|
|
25
|
-
'register_component',
|
|
26
|
-
'get_component',
|
|
27
|
-
'create_component'
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Crawlo组件工厂系统
|
|
5
|
+
==================
|
|
6
|
+
|
|
7
|
+
提供统一的组件创建和依赖注入机制
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from .registry import ComponentRegistry, get_component_registry
|
|
11
|
+
from .base import ComponentFactory, ComponentSpec
|
|
12
|
+
from .crawler import CrawlerComponentFactory
|
|
13
|
+
|
|
14
|
+
# 公共接口
|
|
15
|
+
register_component = get_component_registry().register
|
|
16
|
+
get_component = get_component_registry().get
|
|
17
|
+
create_component = get_component_registry().create
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
'ComponentRegistry',
|
|
21
|
+
'ComponentFactory',
|
|
22
|
+
'ComponentSpec',
|
|
23
|
+
'CrawlerComponentFactory',
|
|
24
|
+
'get_component_registry',
|
|
25
|
+
'register_component',
|
|
26
|
+
'get_component',
|
|
27
|
+
'create_component'
|
|
28
28
|
]
|
crawlo/factories/base.py
CHANGED
|
@@ -1,69 +1,69 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
组件工厂基类和规范
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
from abc import ABC, abstractmethod
|
|
8
|
-
from dataclasses import dataclass
|
|
9
|
-
from typing import Type, Any, Dict, Optional, Callable
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
@dataclass
|
|
13
|
-
class ComponentSpec:
|
|
14
|
-
"""组件规范 - 定义如何创建组件"""
|
|
15
|
-
|
|
16
|
-
name: str
|
|
17
|
-
component_type: Type
|
|
18
|
-
factory_func: Callable[..., Any]
|
|
19
|
-
dependencies: list = None
|
|
20
|
-
singleton: bool = False
|
|
21
|
-
config_key: str = None
|
|
22
|
-
|
|
23
|
-
def __post_init__(self):
|
|
24
|
-
if self.dependencies is None:
|
|
25
|
-
self.dependencies = []
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class ComponentFactory(ABC):
|
|
29
|
-
"""组件工厂基类"""
|
|
30
|
-
|
|
31
|
-
@abstractmethod
|
|
32
|
-
def create(self, spec: ComponentSpec, **kwargs) -> Any:
|
|
33
|
-
"""创建组件实例"""
|
|
34
|
-
pass
|
|
35
|
-
|
|
36
|
-
@abstractmethod
|
|
37
|
-
def supports(self, component_type: Type) -> bool:
|
|
38
|
-
"""检查是否支持指定类型的组件"""
|
|
39
|
-
pass
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
class DefaultComponentFactory(ComponentFactory):
|
|
43
|
-
"""默认组件工厂实现"""
|
|
44
|
-
|
|
45
|
-
def __init__(self):
|
|
46
|
-
self._instances: Dict[str, Any] = {}
|
|
47
|
-
|
|
48
|
-
def create(self, spec: ComponentSpec, **kwargs) -> Any:
|
|
49
|
-
"""创建组件实例"""
|
|
50
|
-
# 单例模式检查
|
|
51
|
-
if spec.singleton and spec.name in self._instances:
|
|
52
|
-
return self._instances[spec.name]
|
|
53
|
-
|
|
54
|
-
# 调用工厂函数创建实例
|
|
55
|
-
instance = spec.factory_func(**kwargs)
|
|
56
|
-
|
|
57
|
-
# 保存单例实例
|
|
58
|
-
if spec.singleton:
|
|
59
|
-
self._instances[spec.name] = instance
|
|
60
|
-
|
|
61
|
-
return instance
|
|
62
|
-
|
|
63
|
-
def supports(self, component_type: Type) -> bool:
|
|
64
|
-
"""支持所有类型"""
|
|
65
|
-
return True
|
|
66
|
-
|
|
67
|
-
def clear_singletons(self):
|
|
68
|
-
"""清除单例实例(测试用)"""
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
组件工厂基类和规范
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from abc import ABC, abstractmethod
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from typing import Type, Any, Dict, Optional, Callable
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class ComponentSpec:
|
|
14
|
+
"""组件规范 - 定义如何创建组件"""
|
|
15
|
+
|
|
16
|
+
name: str
|
|
17
|
+
component_type: Type
|
|
18
|
+
factory_func: Callable[..., Any]
|
|
19
|
+
dependencies: list = None
|
|
20
|
+
singleton: bool = False
|
|
21
|
+
config_key: str = None
|
|
22
|
+
|
|
23
|
+
def __post_init__(self):
|
|
24
|
+
if self.dependencies is None:
|
|
25
|
+
self.dependencies = []
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ComponentFactory(ABC):
|
|
29
|
+
"""组件工厂基类"""
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def create(self, spec: ComponentSpec, **kwargs) -> Any:
|
|
33
|
+
"""创建组件实例"""
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
@abstractmethod
|
|
37
|
+
def supports(self, component_type: Type) -> bool:
|
|
38
|
+
"""检查是否支持指定类型的组件"""
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class DefaultComponentFactory(ComponentFactory):
|
|
43
|
+
"""默认组件工厂实现"""
|
|
44
|
+
|
|
45
|
+
def __init__(self):
|
|
46
|
+
self._instances: Dict[str, Any] = {}
|
|
47
|
+
|
|
48
|
+
def create(self, spec: ComponentSpec, **kwargs) -> Any:
|
|
49
|
+
"""创建组件实例"""
|
|
50
|
+
# 单例模式检查
|
|
51
|
+
if spec.singleton and spec.name in self._instances:
|
|
52
|
+
return self._instances[spec.name]
|
|
53
|
+
|
|
54
|
+
# 调用工厂函数创建实例
|
|
55
|
+
instance = spec.factory_func(**kwargs)
|
|
56
|
+
|
|
57
|
+
# 保存单例实例
|
|
58
|
+
if spec.singleton:
|
|
59
|
+
self._instances[spec.name] = instance
|
|
60
|
+
|
|
61
|
+
return instance
|
|
62
|
+
|
|
63
|
+
def supports(self, component_type: Type) -> bool:
|
|
64
|
+
"""支持所有类型"""
|
|
65
|
+
return True
|
|
66
|
+
|
|
67
|
+
def clear_singletons(self):
|
|
68
|
+
"""清除单例实例(测试用)"""
|
|
69
69
|
self._instances.clear()
|
crawlo/factories/crawler.py
CHANGED
|
@@ -1,104 +1,104 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
Crawler组件工厂 - 专门用于创建Crawler相关组件
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
from typing import Any, Type
|
|
8
|
-
|
|
9
|
-
from .base import ComponentFactory, ComponentSpec
|
|
10
|
-
from .registry import get_component_registry
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class CrawlerComponentFactory(ComponentFactory):
|
|
14
|
-
"""Crawler组件工厂"""
|
|
15
|
-
|
|
16
|
-
def create(self, spec: ComponentSpec, **kwargs) -> Any:
|
|
17
|
-
"""创建Crawler相关组件"""
|
|
18
|
-
# 检查是否需要crawler依赖
|
|
19
|
-
if 'crawler' in spec.dependencies and 'crawler' not in kwargs:
|
|
20
|
-
raise ValueError(f"Crawler instance required for component {spec.name}")
|
|
21
|
-
|
|
22
|
-
return spec.factory_func(**kwargs)
|
|
23
|
-
|
|
24
|
-
def supports(self, component_type: Type) -> bool:
|
|
25
|
-
"""检查是否支持指定类型"""
|
|
26
|
-
# 这里可以根据需要定义支持的组件类型
|
|
27
|
-
supported_types = [
|
|
28
|
-
'Engine', 'Scheduler', 'StatsCollector',
|
|
29
|
-
'Subscriber', 'ExtensionManager'
|
|
30
|
-
]
|
|
31
|
-
return component_type.__name__ in supported_types
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def register_crawler_components():
|
|
35
|
-
"""注册Crawler相关组件"""
|
|
36
|
-
registry = get_component_registry()
|
|
37
|
-
|
|
38
|
-
# 注册工厂
|
|
39
|
-
registry.register_factory(CrawlerComponentFactory())
|
|
40
|
-
|
|
41
|
-
# 注册组件规范
|
|
42
|
-
|
|
43
|
-
# Engine组件
|
|
44
|
-
def create_engine(crawler, **kwargs):
|
|
45
|
-
from crawlo.core.engine import Engine
|
|
46
|
-
return Engine(crawler)
|
|
47
|
-
|
|
48
|
-
registry.register(ComponentSpec(
|
|
49
|
-
name='engine',
|
|
50
|
-
component_type=type('Engine', (), {}),
|
|
51
|
-
factory_func=create_engine,
|
|
52
|
-
dependencies=['crawler']
|
|
53
|
-
))
|
|
54
|
-
|
|
55
|
-
# Scheduler组件
|
|
56
|
-
def create_scheduler(crawler, **kwargs):
|
|
57
|
-
from crawlo.core.scheduler import Scheduler
|
|
58
|
-
return Scheduler.create_instance(crawler)
|
|
59
|
-
|
|
60
|
-
registry.register(ComponentSpec(
|
|
61
|
-
name='scheduler',
|
|
62
|
-
component_type=type('Scheduler', (), {}),
|
|
63
|
-
factory_func=create_scheduler,
|
|
64
|
-
dependencies=['crawler']
|
|
65
|
-
))
|
|
66
|
-
|
|
67
|
-
# StatsCollector组件
|
|
68
|
-
def create_stats(crawler, **kwargs):
|
|
69
|
-
from crawlo.stats_collector import StatsCollector
|
|
70
|
-
return StatsCollector(crawler)
|
|
71
|
-
|
|
72
|
-
registry.register(ComponentSpec(
|
|
73
|
-
name='stats',
|
|
74
|
-
component_type=type('StatsCollector', (), {}),
|
|
75
|
-
factory_func=create_stats,
|
|
76
|
-
dependencies=['crawler']
|
|
77
|
-
))
|
|
78
|
-
|
|
79
|
-
# Subscriber组件
|
|
80
|
-
def create_subscriber(**kwargs):
|
|
81
|
-
from crawlo.subscriber import Subscriber
|
|
82
|
-
return Subscriber()
|
|
83
|
-
|
|
84
|
-
registry.register(ComponentSpec(
|
|
85
|
-
name='subscriber',
|
|
86
|
-
component_type=type('Subscriber', (), {}),
|
|
87
|
-
factory_func=create_subscriber
|
|
88
|
-
))
|
|
89
|
-
|
|
90
|
-
# ExtensionManager组件
|
|
91
|
-
def create_extension_manager(crawler, **kwargs):
|
|
92
|
-
from crawlo.extension import ExtensionManager
|
|
93
|
-
return ExtensionManager.create_instance(crawler)
|
|
94
|
-
|
|
95
|
-
registry.register(ComponentSpec(
|
|
96
|
-
name='extension_manager',
|
|
97
|
-
component_type=type('ExtensionManager', (), {}),
|
|
98
|
-
factory_func=create_extension_manager,
|
|
99
|
-
dependencies=['crawler']
|
|
100
|
-
))
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
# 自动注册
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Crawler组件工厂 - 专门用于创建Crawler相关组件
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Any, Type
|
|
8
|
+
|
|
9
|
+
from .base import ComponentFactory, ComponentSpec
|
|
10
|
+
from .registry import get_component_registry
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CrawlerComponentFactory(ComponentFactory):
|
|
14
|
+
"""Crawler组件工厂"""
|
|
15
|
+
|
|
16
|
+
def create(self, spec: ComponentSpec, **kwargs) -> Any:
|
|
17
|
+
"""创建Crawler相关组件"""
|
|
18
|
+
# 检查是否需要crawler依赖
|
|
19
|
+
if 'crawler' in spec.dependencies and 'crawler' not in kwargs:
|
|
20
|
+
raise ValueError(f"Crawler instance required for component {spec.name}")
|
|
21
|
+
|
|
22
|
+
return spec.factory_func(**kwargs)
|
|
23
|
+
|
|
24
|
+
def supports(self, component_type: Type) -> bool:
|
|
25
|
+
"""检查是否支持指定类型"""
|
|
26
|
+
# 这里可以根据需要定义支持的组件类型
|
|
27
|
+
supported_types = [
|
|
28
|
+
'Engine', 'Scheduler', 'StatsCollector',
|
|
29
|
+
'Subscriber', 'ExtensionManager'
|
|
30
|
+
]
|
|
31
|
+
return component_type.__name__ in supported_types
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def register_crawler_components():
|
|
35
|
+
"""注册Crawler相关组件"""
|
|
36
|
+
registry = get_component_registry()
|
|
37
|
+
|
|
38
|
+
# 注册工厂
|
|
39
|
+
registry.register_factory(CrawlerComponentFactory())
|
|
40
|
+
|
|
41
|
+
# 注册组件规范
|
|
42
|
+
|
|
43
|
+
# Engine组件
|
|
44
|
+
def create_engine(crawler, **kwargs):
|
|
45
|
+
from crawlo.core.engine import Engine
|
|
46
|
+
return Engine(crawler)
|
|
47
|
+
|
|
48
|
+
registry.register(ComponentSpec(
|
|
49
|
+
name='engine',
|
|
50
|
+
component_type=type('Engine', (), {}),
|
|
51
|
+
factory_func=create_engine,
|
|
52
|
+
dependencies=['crawler']
|
|
53
|
+
))
|
|
54
|
+
|
|
55
|
+
# Scheduler组件
|
|
56
|
+
def create_scheduler(crawler, **kwargs):
|
|
57
|
+
from crawlo.core.scheduler import Scheduler
|
|
58
|
+
return Scheduler.create_instance(crawler)
|
|
59
|
+
|
|
60
|
+
registry.register(ComponentSpec(
|
|
61
|
+
name='scheduler',
|
|
62
|
+
component_type=type('Scheduler', (), {}),
|
|
63
|
+
factory_func=create_scheduler,
|
|
64
|
+
dependencies=['crawler']
|
|
65
|
+
))
|
|
66
|
+
|
|
67
|
+
# StatsCollector组件
|
|
68
|
+
def create_stats(crawler, **kwargs):
|
|
69
|
+
from crawlo.stats_collector import StatsCollector
|
|
70
|
+
return StatsCollector(crawler)
|
|
71
|
+
|
|
72
|
+
registry.register(ComponentSpec(
|
|
73
|
+
name='stats',
|
|
74
|
+
component_type=type('StatsCollector', (), {}),
|
|
75
|
+
factory_func=create_stats,
|
|
76
|
+
dependencies=['crawler']
|
|
77
|
+
))
|
|
78
|
+
|
|
79
|
+
# Subscriber组件
|
|
80
|
+
def create_subscriber(**kwargs):
|
|
81
|
+
from crawlo.subscriber import Subscriber
|
|
82
|
+
return Subscriber()
|
|
83
|
+
|
|
84
|
+
registry.register(ComponentSpec(
|
|
85
|
+
name='subscriber',
|
|
86
|
+
component_type=type('Subscriber', (), {}),
|
|
87
|
+
factory_func=create_subscriber
|
|
88
|
+
))
|
|
89
|
+
|
|
90
|
+
# ExtensionManager组件
|
|
91
|
+
def create_extension_manager(crawler, **kwargs):
|
|
92
|
+
from crawlo.extension import ExtensionManager
|
|
93
|
+
return ExtensionManager.create_instance(crawler)
|
|
94
|
+
|
|
95
|
+
registry.register(ComponentSpec(
|
|
96
|
+
name='extension_manager',
|
|
97
|
+
component_type=type('ExtensionManager', (), {}),
|
|
98
|
+
factory_func=create_extension_manager,
|
|
99
|
+
dependencies=['crawler']
|
|
100
|
+
))
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
# 自动注册
|
|
104
104
|
register_crawler_components()
|