crawlo 1.4.6__py3-none-any.whl → 1.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crawlo might be problematic. Click here for more details.
- crawlo/__init__.py +90 -89
- crawlo/__version__.py +1 -1
- crawlo/cli.py +75 -75
- crawlo/commands/__init__.py +14 -14
- crawlo/commands/check.py +594 -594
- crawlo/commands/genspider.py +186 -186
- crawlo/commands/help.py +140 -138
- crawlo/commands/list.py +155 -155
- crawlo/commands/run.py +379 -341
- crawlo/commands/startproject.py +460 -460
- crawlo/commands/stats.py +187 -187
- crawlo/commands/utils.py +196 -196
- crawlo/config.py +320 -312
- crawlo/config_validator.py +277 -277
- crawlo/core/__init__.py +52 -52
- crawlo/core/engine.py +451 -438
- crawlo/core/processor.py +47 -47
- crawlo/core/scheduler.py +290 -291
- crawlo/crawler.py +698 -657
- crawlo/data/__init__.py +5 -5
- crawlo/data/user_agents.py +194 -194
- crawlo/downloader/__init__.py +280 -276
- crawlo/downloader/aiohttp_downloader.py +233 -233
- crawlo/downloader/cffi_downloader.py +250 -247
- crawlo/downloader/httpx_downloader.py +265 -259
- crawlo/downloader/hybrid_downloader.py +212 -212
- crawlo/downloader/playwright_downloader.py +425 -402
- crawlo/downloader/selenium_downloader.py +486 -472
- crawlo/event.py +45 -11
- crawlo/exceptions.py +215 -82
- crawlo/extension/__init__.py +65 -64
- crawlo/extension/health_check.py +141 -141
- crawlo/extension/log_interval.py +94 -94
- crawlo/extension/log_stats.py +70 -70
- crawlo/extension/logging_extension.py +53 -61
- crawlo/extension/memory_monitor.py +104 -104
- crawlo/extension/performance_profiler.py +133 -133
- crawlo/extension/request_recorder.py +107 -107
- crawlo/factories/__init__.py +27 -27
- crawlo/factories/base.py +68 -68
- crawlo/factories/crawler.py +104 -103
- crawlo/factories/registry.py +84 -84
- crawlo/factories/utils.py +135 -0
- crawlo/filters/__init__.py +170 -153
- crawlo/filters/aioredis_filter.py +348 -264
- crawlo/filters/memory_filter.py +261 -276
- crawlo/framework.py +306 -292
- crawlo/initialization/__init__.py +44 -44
- crawlo/initialization/built_in.py +391 -434
- crawlo/initialization/context.py +141 -141
- crawlo/initialization/core.py +240 -194
- crawlo/initialization/phases.py +230 -149
- crawlo/initialization/registry.py +143 -145
- crawlo/initialization/utils.py +49 -0
- crawlo/interfaces.py +23 -23
- crawlo/items/__init__.py +23 -23
- crawlo/items/base.py +23 -23
- crawlo/items/fields.py +52 -52
- crawlo/items/items.py +104 -104
- crawlo/logging/__init__.py +42 -46
- crawlo/logging/config.py +277 -197
- crawlo/logging/factory.py +175 -171
- crawlo/logging/manager.py +104 -112
- crawlo/middleware/__init__.py +87 -24
- crawlo/middleware/default_header.py +132 -132
- crawlo/middleware/download_delay.py +104 -104
- crawlo/middleware/middleware_manager.py +142 -142
- crawlo/middleware/offsite.py +123 -123
- crawlo/middleware/proxy.py +209 -209
- crawlo/middleware/request_ignore.py +86 -86
- crawlo/middleware/response_code.py +150 -150
- crawlo/middleware/response_filter.py +136 -136
- crawlo/middleware/retry.py +124 -124
- crawlo/mode_manager.py +287 -253
- crawlo/network/__init__.py +21 -21
- crawlo/network/request.py +375 -379
- crawlo/network/response.py +569 -664
- crawlo/pipelines/__init__.py +53 -22
- crawlo/pipelines/base_pipeline.py +452 -0
- crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
- crawlo/pipelines/console_pipeline.py +39 -39
- crawlo/pipelines/csv_pipeline.py +316 -316
- crawlo/pipelines/database_dedup_pipeline.py +197 -197
- crawlo/pipelines/json_pipeline.py +218 -218
- crawlo/pipelines/memory_dedup_pipeline.py +105 -105
- crawlo/pipelines/mongo_pipeline.py +140 -132
- crawlo/pipelines/mysql_pipeline.py +469 -476
- crawlo/pipelines/pipeline_manager.py +100 -100
- crawlo/pipelines/redis_dedup_pipeline.py +155 -156
- crawlo/project.py +347 -347
- crawlo/queue/__init__.py +10 -0
- crawlo/queue/pqueue.py +38 -38
- crawlo/queue/queue_manager.py +591 -525
- crawlo/queue/redis_priority_queue.py +519 -370
- crawlo/settings/__init__.py +7 -7
- crawlo/settings/default_settings.py +284 -277
- crawlo/settings/setting_manager.py +219 -219
- crawlo/spider/__init__.py +657 -657
- crawlo/stats_collector.py +81 -81
- crawlo/subscriber.py +129 -129
- crawlo/task_manager.py +138 -138
- crawlo/templates/crawlo.cfg.tmpl +10 -10
- crawlo/templates/project/__init__.py.tmpl +2 -4
- crawlo/templates/project/items.py.tmpl +13 -17
- crawlo/templates/project/middlewares.py.tmpl +38 -38
- crawlo/templates/project/pipelines.py.tmpl +35 -36
- crawlo/templates/project/settings.py.tmpl +109 -111
- crawlo/templates/project/settings_distributed.py.tmpl +156 -159
- crawlo/templates/project/settings_gentle.py.tmpl +170 -176
- crawlo/templates/project/settings_high_performance.py.tmpl +171 -177
- crawlo/templates/project/settings_minimal.py.tmpl +98 -100
- crawlo/templates/project/settings_simple.py.tmpl +168 -174
- crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
- crawlo/templates/run.py.tmpl +23 -23
- crawlo/templates/spider/spider.py.tmpl +32 -40
- crawlo/templates/spiders_init.py.tmpl +5 -10
- crawlo/tools/__init__.py +86 -189
- crawlo/tools/date_tools.py +289 -289
- crawlo/tools/distributed_coordinator.py +384 -384
- crawlo/tools/scenario_adapter.py +262 -262
- crawlo/tools/text_cleaner.py +232 -232
- crawlo/utils/__init__.py +50 -50
- crawlo/utils/batch_processor.py +276 -259
- crawlo/utils/config_manager.py +442 -0
- crawlo/utils/controlled_spider_mixin.py +439 -439
- crawlo/utils/db_helper.py +250 -250
- crawlo/utils/error_handler.py +410 -410
- crawlo/utils/fingerprint.py +121 -121
- crawlo/utils/func_tools.py +82 -82
- crawlo/utils/large_scale_helper.py +344 -344
- crawlo/utils/leak_detector.py +335 -0
- crawlo/utils/log.py +79 -79
- crawlo/utils/misc.py +81 -81
- crawlo/utils/mongo_connection_pool.py +157 -0
- crawlo/utils/mysql_connection_pool.py +197 -0
- crawlo/utils/performance_monitor.py +285 -285
- crawlo/utils/queue_helper.py +175 -175
- crawlo/utils/redis_checker.py +91 -0
- crawlo/utils/redis_connection_pool.py +578 -388
- crawlo/utils/redis_key_validator.py +198 -198
- crawlo/utils/request.py +278 -256
- crawlo/utils/request_serializer.py +225 -225
- crawlo/utils/resource_manager.py +337 -0
- crawlo/utils/selector_helper.py +137 -137
- crawlo/utils/singleton.py +70 -0
- crawlo/utils/spider_loader.py +201 -201
- crawlo/utils/text_helper.py +94 -94
- crawlo/utils/{url.py → url_utils.py} +39 -39
- crawlo-1.4.7.dist-info/METADATA +689 -0
- crawlo-1.4.7.dist-info/RECORD +347 -0
- examples/__init__.py +7 -7
- tests/__init__.py +7 -7
- tests/advanced_tools_example.py +217 -275
- tests/authenticated_proxy_example.py +110 -110
- tests/baidu_performance_test.py +108 -108
- tests/baidu_test.py +59 -59
- tests/bug_check_test.py +250 -250
- tests/cleaners_example.py +160 -160
- tests/comprehensive_framework_test.py +212 -212
- tests/comprehensive_test.py +81 -81
- tests/comprehensive_testing_summary.md +186 -186
- tests/config_validation_demo.py +142 -142
- tests/controlled_spider_example.py +205 -205
- tests/date_tools_example.py +180 -180
- tests/debug_configure.py +69 -69
- tests/debug_framework_logger.py +84 -84
- tests/debug_log_config.py +126 -126
- tests/debug_log_levels.py +63 -63
- tests/debug_pipelines.py +66 -66
- tests/detailed_log_test.py +233 -233
- tests/direct_selector_helper_test.py +96 -96
- tests/distributed_dedup_test.py +467 -0
- tests/distributed_test.py +66 -66
- tests/distributed_test_debug.py +76 -76
- tests/dynamic_loading_example.py +523 -523
- tests/dynamic_loading_test.py +104 -104
- tests/error_handling_example.py +171 -171
- tests/explain_mysql_update_behavior.py +76 -76
- tests/final_comprehensive_test.py +151 -151
- tests/final_log_test.py +260 -260
- tests/final_validation_test.py +182 -182
- tests/fix_log_test.py +142 -142
- tests/framework_performance_test.py +202 -202
- tests/log_buffering_test.py +111 -111
- tests/log_generation_timing_test.py +153 -153
- tests/monitor_redis_dedup.sh +72 -0
- tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
- tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
- tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
- tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
- tests/ofweek_scrapy/scrapy.cfg +11 -11
- tests/optimized_performance_test.py +211 -211
- tests/performance_comparison.py +244 -244
- tests/queue_blocking_test.py +113 -113
- tests/queue_test.py +89 -89
- tests/redis_key_validation_demo.py +130 -130
- tests/request_params_example.py +150 -150
- tests/response_improvements_example.py +144 -144
- tests/scrapy_comparison/ofweek_scrapy.py +138 -138
- tests/scrapy_comparison/scrapy_test.py +133 -133
- tests/simple_cli_test.py +55 -0
- tests/simple_command_test.py +119 -119
- tests/simple_crawlo_test.py +126 -126
- tests/simple_follow_test.py +38 -38
- tests/simple_log_test2.py +137 -137
- tests/simple_optimization_test.py +128 -128
- tests/simple_queue_type_test.py +41 -41
- tests/simple_response_selector_test.py +94 -94
- tests/simple_selector_helper_test.py +154 -154
- tests/simple_selector_test.py +207 -207
- tests/simple_spider_test.py +49 -49
- tests/simple_url_test.py +73 -73
- tests/simulate_mysql_update_test.py +139 -139
- tests/spider_log_timing_test.py +177 -177
- tests/test_advanced_tools.py +148 -148
- tests/test_all_commands.py +230 -230
- tests/test_all_pipeline_fingerprints.py +133 -133
- tests/test_all_redis_key_configs.py +145 -145
- tests/test_asyncmy_usage.py +56 -56
- tests/test_batch_processor.py +178 -178
- tests/test_cleaners.py +54 -54
- tests/test_cli_arguments.py +119 -0
- tests/test_component_factory.py +174 -174
- tests/test_config_consistency.py +80 -80
- tests/test_config_merge.py +152 -152
- tests/test_config_validator.py +182 -182
- tests/test_controlled_spider_mixin.py +79 -79
- tests/test_crawler_process_import.py +38 -38
- tests/test_crawler_process_spider_modules.py +47 -47
- tests/test_crawlo_proxy_integration.py +114 -114
- tests/test_date_tools.py +123 -123
- tests/test_dedup_fix.py +220 -220
- tests/test_dedup_pipeline_consistency.py +124 -124
- tests/test_default_header_middleware.py +313 -313
- tests/test_distributed.py +65 -65
- tests/test_double_crawlo_fix.py +204 -204
- tests/test_double_crawlo_fix_simple.py +124 -124
- tests/test_download_delay_middleware.py +221 -221
- tests/test_downloader_proxy_compatibility.py +272 -272
- tests/test_edge_cases.py +305 -305
- tests/test_encoding_core.py +56 -56
- tests/test_encoding_detection.py +126 -126
- tests/test_enhanced_error_handler.py +270 -270
- tests/test_enhanced_error_handler_comprehensive.py +245 -245
- tests/test_error_handler_compatibility.py +112 -112
- tests/test_factories.py +252 -252
- tests/test_factory_compatibility.py +196 -196
- tests/test_final_validation.py +153 -153
- tests/test_fingerprint_consistency.py +135 -135
- tests/test_fingerprint_simple.py +51 -51
- tests/test_get_component_logger.py +83 -83
- tests/test_hash_performance.py +99 -99
- tests/test_integration.py +169 -169
- tests/test_item_dedup_redis_key.py +122 -122
- tests/test_large_scale_helper.py +235 -235
- tests/test_logging_enhancements.py +374 -374
- tests/test_logging_final.py +184 -184
- tests/test_logging_integration.py +312 -312
- tests/test_logging_system.py +282 -282
- tests/test_middleware_debug.py +141 -141
- tests/test_mode_consistency.py +51 -51
- tests/test_multi_directory.py +67 -67
- tests/test_multiple_spider_modules.py +80 -80
- tests/test_mysql_pipeline_config.py +164 -164
- tests/test_mysql_pipeline_error.py +98 -98
- tests/test_mysql_pipeline_init_log.py +82 -82
- tests/test_mysql_pipeline_integration.py +132 -132
- tests/test_mysql_pipeline_refactor.py +143 -143
- tests/test_mysql_pipeline_refactor_simple.py +85 -85
- tests/test_mysql_pipeline_robustness.py +195 -195
- tests/test_mysql_pipeline_types.py +88 -88
- tests/test_mysql_update_columns.py +93 -93
- tests/test_offsite_middleware.py +244 -244
- tests/test_offsite_middleware_simple.py +203 -203
- tests/test_optimized_selector_naming.py +100 -100
- tests/test_parsel.py +29 -29
- tests/test_performance.py +327 -327
- tests/test_performance_monitor.py +115 -115
- tests/test_pipeline_fingerprint_consistency.py +86 -86
- tests/test_priority_behavior.py +211 -211
- tests/test_priority_consistency.py +151 -151
- tests/test_priority_consistency_fixed.py +249 -249
- tests/test_proxy_health_check.py +32 -32
- tests/test_proxy_middleware.py +217 -217
- tests/test_proxy_middleware_enhanced.py +212 -212
- tests/test_proxy_middleware_integration.py +142 -142
- tests/test_proxy_middleware_refactored.py +207 -207
- tests/test_proxy_only.py +83 -83
- tests/test_proxy_providers.py +56 -56
- tests/test_proxy_stats.py +19 -19
- tests/test_proxy_strategies.py +59 -59
- tests/test_proxy_with_downloader.py +152 -152
- tests/test_queue_empty_check.py +41 -41
- tests/test_queue_manager_double_crawlo.py +173 -173
- tests/test_queue_manager_redis_key.py +179 -179
- tests/test_queue_naming.py +154 -154
- tests/test_queue_type.py +106 -106
- tests/test_queue_type_redis_config_consistency.py +130 -130
- tests/test_random_headers_default.py +322 -322
- tests/test_random_headers_necessity.py +308 -308
- tests/test_random_user_agent.py +72 -72
- tests/test_redis_config.py +28 -28
- tests/test_redis_connection_pool.py +294 -294
- tests/test_redis_key_naming.py +181 -181
- tests/test_redis_key_validator.py +123 -123
- tests/test_redis_queue.py +224 -224
- tests/test_redis_queue_name_fix.py +175 -175
- tests/test_redis_queue_type_fallback.py +129 -129
- tests/test_request_ignore_middleware.py +182 -182
- tests/test_request_params.py +111 -111
- tests/test_request_serialization.py +70 -70
- tests/test_response_code_middleware.py +349 -349
- tests/test_response_filter_middleware.py +427 -427
- tests/test_response_follow.py +104 -104
- tests/test_response_improvements.py +152 -152
- tests/test_response_selector_methods.py +92 -92
- tests/test_response_url_methods.py +70 -70
- tests/test_response_urljoin.py +86 -86
- tests/test_retry_middleware.py +333 -333
- tests/test_retry_middleware_realistic.py +273 -273
- tests/test_scheduler.py +252 -252
- tests/test_scheduler_config_update.py +133 -133
- tests/test_scrapy_style_encoding.py +112 -112
- tests/test_selector_helper.py +100 -100
- tests/test_selector_optimizations.py +146 -146
- tests/test_simple_response.py +61 -61
- tests/test_spider_loader.py +49 -49
- tests/test_spider_loader_comprehensive.py +69 -69
- tests/test_spider_modules.py +84 -84
- tests/test_spiders/test_spider.py +9 -9
- tests/test_telecom_spider_redis_key.py +205 -205
- tests/test_template_content.py +87 -87
- tests/test_template_redis_key.py +134 -134
- tests/test_tools.py +159 -159
- tests/test_user_agent_randomness.py +176 -176
- tests/test_user_agents.py +96 -96
- tests/untested_features_report.md +138 -138
- tests/verify_debug.py +51 -51
- tests/verify_distributed.py +117 -117
- tests/verify_log_fix.py +111 -111
- tests/verify_mysql_warnings.py +109 -109
- crawlo/logging/async_handler.py +0 -181
- crawlo/logging/monitor.py +0 -153
- crawlo/logging/sampler.py +0 -167
- crawlo/tools/authenticated_proxy.py +0 -241
- crawlo/tools/data_formatter.py +0 -226
- crawlo/tools/data_validator.py +0 -181
- crawlo/tools/encoding_converter.py +0 -127
- crawlo/tools/network_diagnostic.py +0 -365
- crawlo/tools/request_tools.py +0 -83
- crawlo/tools/retry_mechanism.py +0 -224
- crawlo/utils/env_config.py +0 -143
- crawlo/utils/large_scale_config.py +0 -287
- crawlo/utils/system.py +0 -11
- crawlo/utils/tools.py +0 -5
- crawlo-1.4.6.dist-info/METADATA +0 -329
- crawlo-1.4.6.dist-info/RECORD +0 -361
- tests/env_config_example.py +0 -134
- tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
- tests/test_authenticated_proxy.py +0 -142
- tests/test_comprehensive.py +0 -147
- tests/test_dynamic_downloaders_proxy.py +0 -125
- tests/test_dynamic_proxy.py +0 -93
- tests/test_dynamic_proxy_config.py +0 -147
- tests/test_dynamic_proxy_real.py +0 -110
- tests/test_env_config.py +0 -122
- tests/test_framework_env_usage.py +0 -104
- tests/test_large_scale_config.py +0 -113
- tests/test_proxy_api.py +0 -265
- tests/test_real_scenario_proxy.py +0 -196
- tests/tools_example.py +0 -261
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
- {crawlo-1.4.6.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
|
@@ -1,345 +1,345 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: UTF-8 -*-
|
|
3
|
-
"""
|
|
4
|
-
大规模爬虫优化辅助工具
|
|
5
|
-
"""
|
|
6
|
-
import asyncio
|
|
7
|
-
import json
|
|
8
|
-
import time
|
|
9
|
-
from typing import Generator, List, Dict, Any
|
|
10
|
-
|
|
11
|
-
from crawlo.
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class LargeScaleHelper:
|
|
15
|
-
"""大规模爬虫辅助类"""
|
|
16
|
-
|
|
17
|
-
def __init__(self, batch_size: int = 1000, checkpoint_interval: int = 5000):
|
|
18
|
-
self.batch_size = batch_size
|
|
19
|
-
self.checkpoint_interval = checkpoint_interval
|
|
20
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
21
|
-
|
|
22
|
-
def batch_iterator(self, data_source, start_offset: int = 0) -> Generator[List[Any], None, None]:
|
|
23
|
-
"""
|
|
24
|
-
批量迭代器,适用于大量数据的分批处理
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
data_source: 数据源(支持多种类型)
|
|
28
|
-
start_offset: 起始偏移量
|
|
29
|
-
|
|
30
|
-
Yields:
|
|
31
|
-
每批数据的列表
|
|
32
|
-
"""
|
|
33
|
-
if hasattr(data_source, '__iter__') and not isinstance(data_source, (str, bytes)):
|
|
34
|
-
# 可迭代对象
|
|
35
|
-
yield from self._iterate_batches(data_source, start_offset)
|
|
36
|
-
elif hasattr(data_source, 'get_batch'):
|
|
37
|
-
# 支持分批获取的数据源
|
|
38
|
-
yield from self._get_batches_from_source(data_source, start_offset)
|
|
39
|
-
elif callable(data_source):
|
|
40
|
-
# 函数形式的数据源
|
|
41
|
-
yield from self._get_batches_from_function(data_source, start_offset)
|
|
42
|
-
else:
|
|
43
|
-
raise ValueError(f"不支持的数据源类型: {type(data_source)}")
|
|
44
|
-
|
|
45
|
-
def _iterate_batches(self, iterable, start_offset: int) -> Generator[List[Any], None, None]:
|
|
46
|
-
"""从可迭代对象分批获取数据"""
|
|
47
|
-
iterator = iter(iterable)
|
|
48
|
-
|
|
49
|
-
# 跳过已处理的数据
|
|
50
|
-
for _ in range(start_offset):
|
|
51
|
-
try:
|
|
52
|
-
next(iterator)
|
|
53
|
-
except StopIteration:
|
|
54
|
-
return
|
|
55
|
-
|
|
56
|
-
while True:
|
|
57
|
-
batch = []
|
|
58
|
-
for _ in range(self.batch_size):
|
|
59
|
-
try:
|
|
60
|
-
batch.append(next(iterator))
|
|
61
|
-
except StopIteration:
|
|
62
|
-
if batch:
|
|
63
|
-
yield batch
|
|
64
|
-
return
|
|
65
|
-
|
|
66
|
-
if batch:
|
|
67
|
-
yield batch
|
|
68
|
-
|
|
69
|
-
def _get_batches_from_source(self, data_source, start_offset: int) -> Generator[List[Any], None, None]:
|
|
70
|
-
"""从支持分批获取的数据源获取数据"""
|
|
71
|
-
offset = start_offset
|
|
72
|
-
|
|
73
|
-
while True:
|
|
74
|
-
try:
|
|
75
|
-
batch = data_source.get_batch(offset, self.batch_size)
|
|
76
|
-
if not batch:
|
|
77
|
-
break
|
|
78
|
-
|
|
79
|
-
yield batch
|
|
80
|
-
offset += len(batch)
|
|
81
|
-
|
|
82
|
-
if len(batch) < self.batch_size:
|
|
83
|
-
break # 已到达数据末尾
|
|
84
|
-
|
|
85
|
-
except Exception as e:
|
|
86
|
-
self.logger.error(f"获取批次数据失败: {e}")
|
|
87
|
-
break
|
|
88
|
-
|
|
89
|
-
def _get_batches_from_function(self, func, start_offset: int) -> Generator[List[Any], None, None]:
|
|
90
|
-
"""从函数获取批次数据"""
|
|
91
|
-
offset = start_offset
|
|
92
|
-
|
|
93
|
-
while True:
|
|
94
|
-
try:
|
|
95
|
-
batch = func(offset, self.batch_size)
|
|
96
|
-
if not batch:
|
|
97
|
-
break
|
|
98
|
-
|
|
99
|
-
yield batch
|
|
100
|
-
offset += len(batch)
|
|
101
|
-
|
|
102
|
-
if len(batch) < self.batch_size:
|
|
103
|
-
break
|
|
104
|
-
|
|
105
|
-
except Exception as e:
|
|
106
|
-
self.logger.error(f"函数获取数据失败: {e}")
|
|
107
|
-
break
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
class ProgressManager:
|
|
111
|
-
"""进度管理器"""
|
|
112
|
-
|
|
113
|
-
def __init__(self, progress_file: str = "spider_progress.json"):
|
|
114
|
-
self.progress_file = progress_file
|
|
115
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
116
|
-
|
|
117
|
-
def load_progress(self) -> Dict[str, Any]:
|
|
118
|
-
"""加载进度"""
|
|
119
|
-
try:
|
|
120
|
-
with open(self.progress_file, 'r', encoding='utf-8') as f:
|
|
121
|
-
progress = json.load(f)
|
|
122
|
-
self.logger.info(f"加载进度: {progress}")
|
|
123
|
-
return progress
|
|
124
|
-
except FileNotFoundError:
|
|
125
|
-
self.logger.info("📄 未找到进度文件,从头开始")
|
|
126
|
-
return self._get_default_progress()
|
|
127
|
-
except Exception as e:
|
|
128
|
-
self.logger.error(f"加载进度失败: {e}")
|
|
129
|
-
return self._get_default_progress()
|
|
130
|
-
|
|
131
|
-
def save_progress(self, **kwargs):
|
|
132
|
-
"""保存进度"""
|
|
133
|
-
try:
|
|
134
|
-
progress = {
|
|
135
|
-
**kwargs,
|
|
136
|
-
'timestamp': time.time(),
|
|
137
|
-
'formatted_time': time.strftime('%Y-%m-%d %H:%M:%S')
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
with open(self.progress_file, 'w', encoding='utf-8') as f:
|
|
141
|
-
json.dump(progress, f, indent=2, ensure_ascii=False)
|
|
142
|
-
|
|
143
|
-
self.logger.debug(f"💾 已保存进度: {progress}")
|
|
144
|
-
|
|
145
|
-
except Exception as e:
|
|
146
|
-
self.logger.error(f"保存进度失败: {e}")
|
|
147
|
-
|
|
148
|
-
def _get_default_progress(self) -> Dict[str, Any]:
|
|
149
|
-
"""获取默认进度"""
|
|
150
|
-
return {
|
|
151
|
-
'batch_num': 0,
|
|
152
|
-
'processed_count': 0,
|
|
153
|
-
'skipped_count': 0,
|
|
154
|
-
'timestamp': time.time()
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
class MemoryOptimizer:
|
|
159
|
-
"""内存优化器"""
|
|
160
|
-
|
|
161
|
-
def __init__(self, max_memory_mb: int = 500):
|
|
162
|
-
self.max_memory_mb = max_memory_mb
|
|
163
|
-
self.logger = get_logger(self.__class__.__name__)
|
|
164
|
-
|
|
165
|
-
def check_memory_usage(self) -> Dict[str, float]:
|
|
166
|
-
"""检查内存使用情况"""
|
|
167
|
-
try:
|
|
168
|
-
import psutil
|
|
169
|
-
process = psutil.Process()
|
|
170
|
-
memory_info = process.memory_info()
|
|
171
|
-
|
|
172
|
-
memory_mb = memory_info.rss / 1024 / 1024
|
|
173
|
-
memory_percent = process.memory_percent()
|
|
174
|
-
|
|
175
|
-
return {
|
|
176
|
-
'memory_mb': memory_mb,
|
|
177
|
-
'memory_percent': memory_percent,
|
|
178
|
-
'threshold_mb': self.max_memory_mb
|
|
179
|
-
}
|
|
180
|
-
except ImportError:
|
|
181
|
-
self.logger.warning("psutil 未安装,无法监控内存")
|
|
182
|
-
return {}
|
|
183
|
-
except Exception as e:
|
|
184
|
-
self.logger.error(f"检查内存失败: {e}")
|
|
185
|
-
return {}
|
|
186
|
-
|
|
187
|
-
def should_pause_for_memory(self) -> bool:
|
|
188
|
-
"""检查是否应该因内存不足而暂停"""
|
|
189
|
-
memory_info = self.check_memory_usage()
|
|
190
|
-
|
|
191
|
-
if not memory_info:
|
|
192
|
-
return False
|
|
193
|
-
|
|
194
|
-
memory_mb = memory_info.get('memory_mb', 0)
|
|
195
|
-
|
|
196
|
-
if memory_mb > self.max_memory_mb:
|
|
197
|
-
self.logger.warning(f"内存使用过高: {memory_mb:.1f}MB > {self.max_memory_mb}MB")
|
|
198
|
-
return True
|
|
199
|
-
|
|
200
|
-
return False
|
|
201
|
-
|
|
202
|
-
def force_garbage_collection(self):
|
|
203
|
-
"""强制垃圾回收"""
|
|
204
|
-
try:
|
|
205
|
-
import gc
|
|
206
|
-
collected = gc.collect()
|
|
207
|
-
self.logger.debug(f"垃圾回收: 清理了 {collected} 个对象")
|
|
208
|
-
except Exception as e:
|
|
209
|
-
self.logger.error(f"垃圾回收失败: {e}")
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
class DataSourceAdapter:
|
|
213
|
-
"""数据源适配器"""
|
|
214
|
-
|
|
215
|
-
@staticmethod
|
|
216
|
-
def from_redis_queue(queue, batch_size: int = 1000):
|
|
217
|
-
"""从Redis队列创建批量数据源"""
|
|
218
|
-
def get_batch(offset: int, limit: int) -> List[Dict]:
|
|
219
|
-
try:
|
|
220
|
-
# 如果队列支持范围查询
|
|
221
|
-
if hasattr(queue, 'get_range'):
|
|
222
|
-
return queue.get_range(offset, offset + limit - 1)
|
|
223
|
-
|
|
224
|
-
# 如果队列支持批量获取
|
|
225
|
-
if hasattr(queue, 'get_batch'):
|
|
226
|
-
return queue.get_batch(offset, limit)
|
|
227
|
-
|
|
228
|
-
# 模拟批量获取
|
|
229
|
-
results = []
|
|
230
|
-
for _ in range(limit):
|
|
231
|
-
item = queue.get_nowait() if hasattr(queue, 'get_nowait') else None
|
|
232
|
-
if item:
|
|
233
|
-
results.append(item)
|
|
234
|
-
else:
|
|
235
|
-
break
|
|
236
|
-
|
|
237
|
-
return results
|
|
238
|
-
|
|
239
|
-
except Exception as e:
|
|
240
|
-
print(f"获取批次失败: {e}")
|
|
241
|
-
return []
|
|
242
|
-
|
|
243
|
-
return get_batch
|
|
244
|
-
|
|
245
|
-
@staticmethod
|
|
246
|
-
def from_database(db_helper, query: str, batch_size: int = 1000):
|
|
247
|
-
"""从数据库创建批量数据源"""
|
|
248
|
-
def get_batch(offset: int, limit: int) -> List[Dict]:
|
|
249
|
-
try:
|
|
250
|
-
# 添加分页查询
|
|
251
|
-
paginated_query = f"{query} LIMIT {limit} OFFSET {offset}"
|
|
252
|
-
return db_helper.execute_query(paginated_query)
|
|
253
|
-
except Exception as e:
|
|
254
|
-
print(f"数据库查询失败: {e}")
|
|
255
|
-
return []
|
|
256
|
-
|
|
257
|
-
return get_batch
|
|
258
|
-
|
|
259
|
-
@staticmethod
|
|
260
|
-
def from_file(file_path: str, batch_size: int = 1000):
|
|
261
|
-
"""从文件创建批量数据源"""
|
|
262
|
-
def get_batch(offset: int, limit: int) -> List[str]:
|
|
263
|
-
try:
|
|
264
|
-
with open(file_path, 'r', encoding='utf-8') as f:
|
|
265
|
-
# 跳过已处理的行
|
|
266
|
-
for _ in range(offset):
|
|
267
|
-
f.readline()
|
|
268
|
-
|
|
269
|
-
# 读取当前批次
|
|
270
|
-
batch = []
|
|
271
|
-
for _ in range(limit):
|
|
272
|
-
line = f.readline()
|
|
273
|
-
if not line:
|
|
274
|
-
break
|
|
275
|
-
batch.append(line.strip())
|
|
276
|
-
|
|
277
|
-
return batch
|
|
278
|
-
except Exception as e:
|
|
279
|
-
print(f"读取文件失败: {e}")
|
|
280
|
-
return []
|
|
281
|
-
|
|
282
|
-
return get_batch
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
class LargeScaleSpiderMixin:
|
|
286
|
-
"""大规模爬虫混入类"""
|
|
287
|
-
|
|
288
|
-
def __init__(self):
|
|
289
|
-
super().__init__()
|
|
290
|
-
self.large_scale_helper = LargeScaleHelper(
|
|
291
|
-
batch_size=getattr(self, 'batch_size', 1000),
|
|
292
|
-
checkpoint_interval=getattr(self, 'checkpoint_interval', 5000)
|
|
293
|
-
)
|
|
294
|
-
self.progress_manager = ProgressManager(
|
|
295
|
-
progress_file=getattr(self, 'progress_file', f"{self.name}_progress.json")
|
|
296
|
-
)
|
|
297
|
-
self.memory_optimizer = MemoryOptimizer(
|
|
298
|
-
max_memory_mb=getattr(self, 'max_memory_mb', 500)
|
|
299
|
-
)
|
|
300
|
-
|
|
301
|
-
def create_streaming_start_requests(self, data_source):
|
|
302
|
-
"""创建流式start_requests生成器"""
|
|
303
|
-
progress = self.progress_manager.load_progress()
|
|
304
|
-
start_offset = progress.get('processed_count', 0)
|
|
305
|
-
|
|
306
|
-
processed_count = start_offset
|
|
307
|
-
skipped_count = progress.get('skipped_count', 0)
|
|
308
|
-
|
|
309
|
-
for batch in self.large_scale_helper.batch_iterator(data_source, start_offset):
|
|
310
|
-
|
|
311
|
-
# 内存检查
|
|
312
|
-
if self.memory_optimizer.should_pause_for_memory():
|
|
313
|
-
self.memory_optimizer.force_garbage_collection()
|
|
314
|
-
# 可以添加延迟或其他处理
|
|
315
|
-
asyncio.sleep(1)
|
|
316
|
-
|
|
317
|
-
for item in batch:
|
|
318
|
-
processed_count += 1
|
|
319
|
-
|
|
320
|
-
# 检查进度保存
|
|
321
|
-
if processed_count % self.large_scale_helper.checkpoint_interval == 0:
|
|
322
|
-
self.progress_manager.save_progress(
|
|
323
|
-
processed_count=processed_count,
|
|
324
|
-
skipped_count=skipped_count
|
|
325
|
-
)
|
|
326
|
-
|
|
327
|
-
# 生成请求
|
|
328
|
-
request = self.create_request_from_item(item)
|
|
329
|
-
if request:
|
|
330
|
-
yield request
|
|
331
|
-
else:
|
|
332
|
-
skipped_count += 1
|
|
333
|
-
|
|
334
|
-
# 最终保存进度
|
|
335
|
-
self.progress_manager.save_progress(
|
|
336
|
-
processed_count=processed_count,
|
|
337
|
-
skipped_count=skipped_count,
|
|
338
|
-
completed=True
|
|
339
|
-
)
|
|
340
|
-
|
|
341
|
-
self.logger.info(f"处理完成!总计: {processed_count}, 跳过: {skipped_count}")
|
|
342
|
-
|
|
343
|
-
def create_request_from_item(self, item):
|
|
344
|
-
"""从数据项创建请求(需要子类实现)"""
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: UTF-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
大规模爬虫优化辅助工具
|
|
5
|
+
"""
|
|
6
|
+
import asyncio
|
|
7
|
+
import json
|
|
8
|
+
import time
|
|
9
|
+
from typing import Generator, List, Dict, Any
|
|
10
|
+
|
|
11
|
+
from crawlo.logging import get_logger
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class LargeScaleHelper:
|
|
15
|
+
"""大规模爬虫辅助类"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, batch_size: int = 1000, checkpoint_interval: int = 5000):
|
|
18
|
+
self.batch_size = batch_size
|
|
19
|
+
self.checkpoint_interval = checkpoint_interval
|
|
20
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
21
|
+
|
|
22
|
+
def batch_iterator(self, data_source, start_offset: int = 0) -> Generator[List[Any], None, None]:
|
|
23
|
+
"""
|
|
24
|
+
批量迭代器,适用于大量数据的分批处理
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
data_source: 数据源(支持多种类型)
|
|
28
|
+
start_offset: 起始偏移量
|
|
29
|
+
|
|
30
|
+
Yields:
|
|
31
|
+
每批数据的列表
|
|
32
|
+
"""
|
|
33
|
+
if hasattr(data_source, '__iter__') and not isinstance(data_source, (str, bytes)):
|
|
34
|
+
# 可迭代对象
|
|
35
|
+
yield from self._iterate_batches(data_source, start_offset)
|
|
36
|
+
elif hasattr(data_source, 'get_batch'):
|
|
37
|
+
# 支持分批获取的数据源
|
|
38
|
+
yield from self._get_batches_from_source(data_source, start_offset)
|
|
39
|
+
elif callable(data_source):
|
|
40
|
+
# 函数形式的数据源
|
|
41
|
+
yield from self._get_batches_from_function(data_source, start_offset)
|
|
42
|
+
else:
|
|
43
|
+
raise ValueError(f"不支持的数据源类型: {type(data_source)}")
|
|
44
|
+
|
|
45
|
+
def _iterate_batches(self, iterable, start_offset: int) -> Generator[List[Any], None, None]:
|
|
46
|
+
"""从可迭代对象分批获取数据"""
|
|
47
|
+
iterator = iter(iterable)
|
|
48
|
+
|
|
49
|
+
# 跳过已处理的数据
|
|
50
|
+
for _ in range(start_offset):
|
|
51
|
+
try:
|
|
52
|
+
next(iterator)
|
|
53
|
+
except StopIteration:
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
while True:
|
|
57
|
+
batch = []
|
|
58
|
+
for _ in range(self.batch_size):
|
|
59
|
+
try:
|
|
60
|
+
batch.append(next(iterator))
|
|
61
|
+
except StopIteration:
|
|
62
|
+
if batch:
|
|
63
|
+
yield batch
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
if batch:
|
|
67
|
+
yield batch
|
|
68
|
+
|
|
69
|
+
def _get_batches_from_source(self, data_source, start_offset: int) -> Generator[List[Any], None, None]:
|
|
70
|
+
"""从支持分批获取的数据源获取数据"""
|
|
71
|
+
offset = start_offset
|
|
72
|
+
|
|
73
|
+
while True:
|
|
74
|
+
try:
|
|
75
|
+
batch = data_source.get_batch(offset, self.batch_size)
|
|
76
|
+
if not batch:
|
|
77
|
+
break
|
|
78
|
+
|
|
79
|
+
yield batch
|
|
80
|
+
offset += len(batch)
|
|
81
|
+
|
|
82
|
+
if len(batch) < self.batch_size:
|
|
83
|
+
break # 已到达数据末尾
|
|
84
|
+
|
|
85
|
+
except Exception as e:
|
|
86
|
+
self.logger.error(f"获取批次数据失败: {e}")
|
|
87
|
+
break
|
|
88
|
+
|
|
89
|
+
def _get_batches_from_function(self, func, start_offset: int) -> Generator[List[Any], None, None]:
|
|
90
|
+
"""从函数获取批次数据"""
|
|
91
|
+
offset = start_offset
|
|
92
|
+
|
|
93
|
+
while True:
|
|
94
|
+
try:
|
|
95
|
+
batch = func(offset, self.batch_size)
|
|
96
|
+
if not batch:
|
|
97
|
+
break
|
|
98
|
+
|
|
99
|
+
yield batch
|
|
100
|
+
offset += len(batch)
|
|
101
|
+
|
|
102
|
+
if len(batch) < self.batch_size:
|
|
103
|
+
break
|
|
104
|
+
|
|
105
|
+
except Exception as e:
|
|
106
|
+
self.logger.error(f"函数获取数据失败: {e}")
|
|
107
|
+
break
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class ProgressManager:
|
|
111
|
+
"""进度管理器"""
|
|
112
|
+
|
|
113
|
+
def __init__(self, progress_file: str = "spider_progress.json"):
|
|
114
|
+
self.progress_file = progress_file
|
|
115
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
116
|
+
|
|
117
|
+
def load_progress(self) -> Dict[str, Any]:
|
|
118
|
+
"""加载进度"""
|
|
119
|
+
try:
|
|
120
|
+
with open(self.progress_file, 'r', encoding='utf-8') as f:
|
|
121
|
+
progress = json.load(f)
|
|
122
|
+
self.logger.info(f"加载进度: {progress}")
|
|
123
|
+
return progress
|
|
124
|
+
except FileNotFoundError:
|
|
125
|
+
self.logger.info("📄 未找到进度文件,从头开始")
|
|
126
|
+
return self._get_default_progress()
|
|
127
|
+
except Exception as e:
|
|
128
|
+
self.logger.error(f"加载进度失败: {e}")
|
|
129
|
+
return self._get_default_progress()
|
|
130
|
+
|
|
131
|
+
def save_progress(self, **kwargs):
|
|
132
|
+
"""保存进度"""
|
|
133
|
+
try:
|
|
134
|
+
progress = {
|
|
135
|
+
**kwargs,
|
|
136
|
+
'timestamp': time.time(),
|
|
137
|
+
'formatted_time': time.strftime('%Y-%m-%d %H:%M:%S')
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
with open(self.progress_file, 'w', encoding='utf-8') as f:
|
|
141
|
+
json.dump(progress, f, indent=2, ensure_ascii=False)
|
|
142
|
+
|
|
143
|
+
self.logger.debug(f"💾 已保存进度: {progress}")
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
self.logger.error(f"保存进度失败: {e}")
|
|
147
|
+
|
|
148
|
+
def _get_default_progress(self) -> Dict[str, Any]:
|
|
149
|
+
"""获取默认进度"""
|
|
150
|
+
return {
|
|
151
|
+
'batch_num': 0,
|
|
152
|
+
'processed_count': 0,
|
|
153
|
+
'skipped_count': 0,
|
|
154
|
+
'timestamp': time.time()
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class MemoryOptimizer:
|
|
159
|
+
"""内存优化器"""
|
|
160
|
+
|
|
161
|
+
def __init__(self, max_memory_mb: int = 500):
|
|
162
|
+
self.max_memory_mb = max_memory_mb
|
|
163
|
+
self.logger = get_logger(self.__class__.__name__)
|
|
164
|
+
|
|
165
|
+
def check_memory_usage(self) -> Dict[str, float]:
|
|
166
|
+
"""检查内存使用情况"""
|
|
167
|
+
try:
|
|
168
|
+
import psutil
|
|
169
|
+
process = psutil.Process()
|
|
170
|
+
memory_info = process.memory_info()
|
|
171
|
+
|
|
172
|
+
memory_mb = memory_info.rss / 1024 / 1024
|
|
173
|
+
memory_percent = process.memory_percent()
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
'memory_mb': memory_mb,
|
|
177
|
+
'memory_percent': memory_percent,
|
|
178
|
+
'threshold_mb': self.max_memory_mb
|
|
179
|
+
}
|
|
180
|
+
except ImportError:
|
|
181
|
+
self.logger.warning("psutil 未安装,无法监控内存")
|
|
182
|
+
return {}
|
|
183
|
+
except Exception as e:
|
|
184
|
+
self.logger.error(f"检查内存失败: {e}")
|
|
185
|
+
return {}
|
|
186
|
+
|
|
187
|
+
def should_pause_for_memory(self) -> bool:
|
|
188
|
+
"""检查是否应该因内存不足而暂停"""
|
|
189
|
+
memory_info = self.check_memory_usage()
|
|
190
|
+
|
|
191
|
+
if not memory_info:
|
|
192
|
+
return False
|
|
193
|
+
|
|
194
|
+
memory_mb = memory_info.get('memory_mb', 0)
|
|
195
|
+
|
|
196
|
+
if memory_mb > self.max_memory_mb:
|
|
197
|
+
self.logger.warning(f"内存使用过高: {memory_mb:.1f}MB > {self.max_memory_mb}MB")
|
|
198
|
+
return True
|
|
199
|
+
|
|
200
|
+
return False
|
|
201
|
+
|
|
202
|
+
def force_garbage_collection(self):
|
|
203
|
+
"""强制垃圾回收"""
|
|
204
|
+
try:
|
|
205
|
+
import gc
|
|
206
|
+
collected = gc.collect()
|
|
207
|
+
self.logger.debug(f"垃圾回收: 清理了 {collected} 个对象")
|
|
208
|
+
except Exception as e:
|
|
209
|
+
self.logger.error(f"垃圾回收失败: {e}")
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class DataSourceAdapter:
|
|
213
|
+
"""数据源适配器"""
|
|
214
|
+
|
|
215
|
+
@staticmethod
|
|
216
|
+
def from_redis_queue(queue, batch_size: int = 1000):
|
|
217
|
+
"""从Redis队列创建批量数据源"""
|
|
218
|
+
def get_batch(offset: int, limit: int) -> List[Dict]:
|
|
219
|
+
try:
|
|
220
|
+
# 如果队列支持范围查询
|
|
221
|
+
if hasattr(queue, 'get_range'):
|
|
222
|
+
return queue.get_range(offset, offset + limit - 1)
|
|
223
|
+
|
|
224
|
+
# 如果队列支持批量获取
|
|
225
|
+
if hasattr(queue, 'get_batch'):
|
|
226
|
+
return queue.get_batch(offset, limit)
|
|
227
|
+
|
|
228
|
+
# 模拟批量获取
|
|
229
|
+
results = []
|
|
230
|
+
for _ in range(limit):
|
|
231
|
+
item = queue.get_nowait() if hasattr(queue, 'get_nowait') else None
|
|
232
|
+
if item:
|
|
233
|
+
results.append(item)
|
|
234
|
+
else:
|
|
235
|
+
break
|
|
236
|
+
|
|
237
|
+
return results
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
print(f"获取批次失败: {e}")
|
|
241
|
+
return []
|
|
242
|
+
|
|
243
|
+
return get_batch
|
|
244
|
+
|
|
245
|
+
@staticmethod
|
|
246
|
+
def from_database(db_helper, query: str, batch_size: int = 1000):
|
|
247
|
+
"""从数据库创建批量数据源"""
|
|
248
|
+
def get_batch(offset: int, limit: int) -> List[Dict]:
|
|
249
|
+
try:
|
|
250
|
+
# 添加分页查询
|
|
251
|
+
paginated_query = f"{query} LIMIT {limit} OFFSET {offset}"
|
|
252
|
+
return db_helper.execute_query(paginated_query)
|
|
253
|
+
except Exception as e:
|
|
254
|
+
print(f"数据库查询失败: {e}")
|
|
255
|
+
return []
|
|
256
|
+
|
|
257
|
+
return get_batch
|
|
258
|
+
|
|
259
|
+
@staticmethod
|
|
260
|
+
def from_file(file_path: str, batch_size: int = 1000):
|
|
261
|
+
"""从文件创建批量数据源"""
|
|
262
|
+
def get_batch(offset: int, limit: int) -> List[str]:
|
|
263
|
+
try:
|
|
264
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
265
|
+
# 跳过已处理的行
|
|
266
|
+
for _ in range(offset):
|
|
267
|
+
f.readline()
|
|
268
|
+
|
|
269
|
+
# 读取当前批次
|
|
270
|
+
batch = []
|
|
271
|
+
for _ in range(limit):
|
|
272
|
+
line = f.readline()
|
|
273
|
+
if not line:
|
|
274
|
+
break
|
|
275
|
+
batch.append(line.strip())
|
|
276
|
+
|
|
277
|
+
return batch
|
|
278
|
+
except Exception as e:
|
|
279
|
+
print(f"读取文件失败: {e}")
|
|
280
|
+
return []
|
|
281
|
+
|
|
282
|
+
return get_batch
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
class LargeScaleSpiderMixin:
|
|
286
|
+
"""大规模爬虫混入类"""
|
|
287
|
+
|
|
288
|
+
def __init__(self):
|
|
289
|
+
super().__init__()
|
|
290
|
+
self.large_scale_helper = LargeScaleHelper(
|
|
291
|
+
batch_size=getattr(self, 'batch_size', 1000),
|
|
292
|
+
checkpoint_interval=getattr(self, 'checkpoint_interval', 5000)
|
|
293
|
+
)
|
|
294
|
+
self.progress_manager = ProgressManager(
|
|
295
|
+
progress_file=getattr(self, 'progress_file', f"{self.name}_progress.json")
|
|
296
|
+
)
|
|
297
|
+
self.memory_optimizer = MemoryOptimizer(
|
|
298
|
+
max_memory_mb=getattr(self, 'max_memory_mb', 500)
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
def create_streaming_start_requests(self, data_source):
|
|
302
|
+
"""创建流式start_requests生成器"""
|
|
303
|
+
progress = self.progress_manager.load_progress()
|
|
304
|
+
start_offset = progress.get('processed_count', 0)
|
|
305
|
+
|
|
306
|
+
processed_count = start_offset
|
|
307
|
+
skipped_count = progress.get('skipped_count', 0)
|
|
308
|
+
|
|
309
|
+
for batch in self.large_scale_helper.batch_iterator(data_source, start_offset):
|
|
310
|
+
|
|
311
|
+
# 内存检查
|
|
312
|
+
if self.memory_optimizer.should_pause_for_memory():
|
|
313
|
+
self.memory_optimizer.force_garbage_collection()
|
|
314
|
+
# 可以添加延迟或其他处理
|
|
315
|
+
asyncio.sleep(1)
|
|
316
|
+
|
|
317
|
+
for item in batch:
|
|
318
|
+
processed_count += 1
|
|
319
|
+
|
|
320
|
+
# 检查进度保存
|
|
321
|
+
if processed_count % self.large_scale_helper.checkpoint_interval == 0:
|
|
322
|
+
self.progress_manager.save_progress(
|
|
323
|
+
processed_count=processed_count,
|
|
324
|
+
skipped_count=skipped_count
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
# 生成请求
|
|
328
|
+
request = self.create_request_from_item(item)
|
|
329
|
+
if request:
|
|
330
|
+
yield request
|
|
331
|
+
else:
|
|
332
|
+
skipped_count += 1
|
|
333
|
+
|
|
334
|
+
# 最终保存进度
|
|
335
|
+
self.progress_manager.save_progress(
|
|
336
|
+
processed_count=processed_count,
|
|
337
|
+
skipped_count=skipped_count,
|
|
338
|
+
completed=True
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
self.logger.info(f"处理完成!总计: {processed_count}, 跳过: {skipped_count}")
|
|
342
|
+
|
|
343
|
+
def create_request_from_item(self, item):
|
|
344
|
+
"""从数据项创建请求(需要子类实现)"""
|
|
345
345
|
raise NotImplementedError("子类必须实现 create_request_from_item 方法")
|