crawlo 1.4.0__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (45) hide show
  1. crawlo/__init__.py +9 -4
  2. crawlo/__version__.py +1 -1
  3. crawlo/commands/run.py +1 -1
  4. crawlo/core/__init__.py +8 -2
  5. crawlo/core/scheduler.py +2 -2
  6. crawlo/extension/log_interval.py +44 -7
  7. crawlo/initialization/__init__.py +6 -2
  8. crawlo/middleware/middleware_manager.py +1 -1
  9. crawlo/mode_manager.py +13 -7
  10. crawlo/pipelines/bloom_dedup_pipeline.py +5 -15
  11. crawlo/pipelines/database_dedup_pipeline.py +5 -8
  12. crawlo/pipelines/memory_dedup_pipeline.py +5 -15
  13. crawlo/pipelines/redis_dedup_pipeline.py +2 -15
  14. crawlo/project.py +18 -7
  15. crawlo/settings/default_settings.py +114 -150
  16. crawlo/settings/setting_manager.py +14 -9
  17. crawlo/tools/distributed_coordinator.py +4 -8
  18. crawlo/utils/fingerprint.py +123 -0
  19. {crawlo-1.4.0.dist-info → crawlo-1.4.1.dist-info}/METADATA +1 -1
  20. {crawlo-1.4.0.dist-info → crawlo-1.4.1.dist-info}/RECORD +45 -29
  21. examples/test_project/__init__.py +7 -0
  22. examples/test_project/run.py +35 -0
  23. examples/test_project/test_project/__init__.py +4 -0
  24. examples/test_project/test_project/items.py +18 -0
  25. examples/test_project/test_project/middlewares.py +119 -0
  26. examples/test_project/test_project/pipelines.py +97 -0
  27. examples/test_project/test_project/settings.py +170 -0
  28. examples/test_project/test_project/spiders/__init__.py +10 -0
  29. examples/test_project/test_project/spiders/of_week_dis.py +144 -0
  30. tests/debug_framework_logger.py +1 -1
  31. tests/debug_log_levels.py +1 -1
  32. tests/test_all_pipeline_fingerprints.py +134 -0
  33. tests/test_default_header_middleware.py +242 -87
  34. tests/test_fingerprint_consistency.py +136 -0
  35. tests/test_fingerprint_simple.py +52 -0
  36. tests/test_framework_logger.py +1 -1
  37. tests/test_framework_startup.py +1 -1
  38. tests/test_hash_performance.py +100 -0
  39. tests/test_mode_change.py +1 -1
  40. tests/test_offsite_middleware.py +185 -162
  41. tests/test_offsite_middleware_simple.py +204 -0
  42. tests/test_pipeline_fingerprint_consistency.py +87 -0
  43. {crawlo-1.4.0.dist-info → crawlo-1.4.1.dist-info}/WHEEL +0 -0
  44. {crawlo-1.4.0.dist-info → crawlo-1.4.1.dist-info}/entry_points.txt +0 -0
  45. {crawlo-1.4.0.dist-info → crawlo-1.4.1.dist-info}/top_level.txt +0 -0
crawlo/__init__.py CHANGED
@@ -28,30 +28,35 @@ from crawlo import tools
28
28
 
29
29
  # 框架核心模块 - 使用TYPE_CHECKING避免循环导入
30
30
  if TYPE_CHECKING:
31
- from crawlo.core.framework_initializer import get_framework_initializer, initialize_framework
31
+ from crawlo.initialization import get_framework_initializer, initialize_framework
32
32
 
33
33
  # 为了向后兼容,从tools中导入cleaners相关的功能
34
34
  import crawlo.tools as cleaners
35
35
 
36
+
36
37
  # 延迟导入的辅助函数
37
38
  def get_framework_initializer():
38
39
  """延迟导入get_framework_initializer以避免循环依赖"""
39
- from crawlo.core.framework_initializer import get_framework_initializer as _get_framework_initializer
40
+ from crawlo.initialization import get_framework_initializer as _get_framework_initializer
40
41
  return _get_framework_initializer()
41
42
 
43
+
42
44
  def initialize_framework(custom_settings=None):
43
45
  """延迟导入initialize_framework以避免循环依赖"""
44
- from crawlo.core.framework_initializer import initialize_framework as _initialize_framework
46
+ from crawlo.initialization import initialize_framework as _initialize_framework
45
47
  return _initialize_framework(custom_settings)
46
48
 
49
+
47
50
  # 向后兼容的别名
48
51
  def get_bootstrap_manager():
49
52
  """向后兼容的别名"""
50
53
  return get_framework_initializer()
51
54
 
55
+
52
56
  # 版本号:优先从元数据读取
53
57
  try:
54
58
  from importlib.metadata import version
59
+
55
60
  __version__ = version("crawlo")
56
61
  except Exception:
57
62
  # 开发模式下可能未安装,回退到 __version__.py 或 dev
@@ -85,4 +90,4 @@ __all__ = [
85
90
  'get_framework_initializer',
86
91
  'get_bootstrap_manager',
87
92
  '__version__',
88
- ]
93
+ ]
crawlo/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = '1.4.0'
1
+ __version__ = '1.4.1'
crawlo/commands/run.py CHANGED
@@ -23,7 +23,7 @@ from crawlo.crawler import CrawlerProcess
23
23
  from crawlo.project import get_settings, _find_project_root
24
24
  # 使用新的统一初始化系统
25
25
  from crawlo.initialization import initialize_framework
26
- from crawlo.core import get_framework_initializer
26
+ from crawlo.initialization import get_framework_initializer
27
27
  from crawlo.utils.log import get_logger
28
28
 
29
29
  # 延迟获取logger,确保在日志系统配置之后获取
crawlo/core/__init__.py CHANGED
@@ -10,37 +10,43 @@ from ..initialization import (
10
10
  is_framework_ready
11
11
  )
12
12
 
13
+
13
14
  # 向后兼容的别名
14
15
  def async_initialize_framework(*args, **kwargs):
15
16
  """Async wrapper for framework initialization"""
16
17
  return initialize_framework(*args, **kwargs)
17
18
 
19
+
18
20
  def get_framework_initializer():
19
21
  """Get framework initializer - compatibility function"""
20
22
  from ..initialization.core import CoreInitializer
21
23
  return CoreInitializer()
22
24
 
25
+
23
26
  def get_framework_logger(name='crawlo.core'):
24
27
  """Get framework logger - compatibility function"""
25
28
  from ..logging import get_logger
26
29
  return get_logger(name)
27
30
 
31
+
28
32
  # 向后兼容
29
33
  def bootstrap_framework(*args, **kwargs):
30
34
  """Bootstrap framework - compatibility function"""
31
35
  return initialize_framework(*args, **kwargs)
32
36
 
37
+
33
38
  def get_bootstrap_manager():
34
39
  """Get bootstrap manager - compatibility function"""
35
40
  return get_framework_initializer()
36
41
 
42
+
37
43
  __all__ = [
38
44
  'initialize_framework',
39
- 'async_initialize_framework',
45
+ 'async_initialize_framework',
40
46
  'get_framework_initializer',
41
47
  'is_framework_ready',
42
48
  'get_framework_logger',
43
49
  # 向后兼容
44
50
  'bootstrap_framework',
45
51
  'get_bootstrap_manager'
46
- ]
52
+ ]
crawlo/core/scheduler.py CHANGED
@@ -77,8 +77,8 @@ class Scheduler:
77
77
  # 只有在确实需要更新配置时才重新创建过滤器实例
78
78
  # 检查是否真的进行了配置更新
79
79
  filter_updated = (
80
- (self.queue_manager._queue_type == QueueType.REDIS and 'memory_filter' in self.crawler.settings.get('FILTER_CLASS', '')) or
81
- (self.queue_manager._queue_type == QueueType.MEMORY and ('aioredis_filter' in self.crawler.settings.get('FILTER_CLASS', '') or 'redis_filter' in self.crawler.settings.get('FILTER_CLASS', '')))
80
+ (self.queue_manager._queue_type == QueueType.REDIS and 'aioredis_filter' in self.crawler.settings.get('FILTER_CLASS', '')) or
81
+ (self.queue_manager._queue_type == QueueType.MEMORY and 'memory_filter' in self.crawler.settings.get('FILTER_CLASS', ''))
82
82
  )
83
83
 
84
84
  if needs_config_update or filter_updated:
@@ -15,11 +15,23 @@ class LogIntervalExtension(object):
15
15
  self.item_count = 0
16
16
  self.response_count = 0
17
17
  self.seconds = crawler.settings.get('INTERVAL', 60) # 默认60秒
18
- self.interval = int(self.seconds / 60) if self.seconds % 60 == 0 else self.seconds
19
- self.interval = "" if self.interval == 1 else self.interval
20
- self.unit = 'min' if self.seconds % 60 == 0 else 's'
18
+
19
+ # 修复时间单位计算逻辑
20
+ if self.seconds % 60 == 0:
21
+ self.interval = int(self.seconds / 60)
22
+ self.unit = 'min'
23
+ else:
24
+ self.interval = self.seconds
25
+ self.unit = 's'
26
+
27
+ # 处理单数情况
28
+ if self.interval == 1 and self.unit == 'min':
29
+ self.interval_display = ""
30
+ else:
31
+ self.interval_display = str(self.interval)
21
32
 
22
33
  self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
34
+ self.logger.info(f"LogIntervalExtension initialized with interval: {self.seconds} seconds")
23
35
 
24
36
  @classmethod
25
37
  def create_instance(cls, crawler: Any) -> 'LogIntervalExtension':
@@ -29,9 +41,12 @@ class LogIntervalExtension(object):
29
41
  return o
30
42
 
31
43
  async def spider_opened(self) -> None:
44
+ self.logger.info("Spider opened, starting interval logging task")
32
45
  self.task = asyncio.create_task(self.interval_log())
46
+ self.logger.info("Interval logging task started")
33
47
 
34
48
  async def spider_closed(self) -> None:
49
+ self.logger.info("Spider closed, stopping interval logging task")
35
50
  if self.task:
36
51
  self.task.cancel()
37
52
  try:
@@ -41,17 +56,39 @@ class LogIntervalExtension(object):
41
56
  self.task = None
42
57
 
43
58
  async def interval_log(self) -> None:
59
+ iteration = 0
44
60
  while True:
45
61
  try:
62
+ iteration += 1
63
+ self.logger.debug(f"Interval log iteration {iteration} starting")
46
64
  last_item_count = self.stats.get_value('item_successful_count', default=0)
47
65
  last_response_count = self.stats.get_value('response_received_count', default=0)
48
66
  item_rate = last_item_count - self.item_count
49
67
  response_rate = last_response_count - self.response_count
68
+
69
+ # 添加调试信息
70
+ self.logger.debug(f"Debug info - Iteration: {iteration}, Last item count: {last_item_count}, Last response count: {last_response_count}")
71
+ self.logger.debug(f"Debug info - Previous item count: {self.item_count}, Previous response count: {self.response_count}")
72
+ self.logger.debug(f"Debug info - Item rate: {item_rate}, Response rate: {response_rate}")
73
+
50
74
  self.item_count, self.response_count = last_item_count, last_response_count
51
- self.logger.info(
52
- f'Crawled {last_response_count} pages (at {response_rate} pages/{self.interval}{self.unit}),'
53
- f' Got {last_item_count} items (at {item_rate} items/{self.interval}{self.unit}).'
54
- )
75
+
76
+ # 修复效率计算,确保使用正确的单位
77
+ if self.unit == 'min' and self.seconds > 0:
78
+ # 转换为每分钟速率
79
+ pages_per_min = response_rate * 60 / self.seconds if self.seconds > 0 else 0
80
+ items_per_min = item_rate * 60 / self.seconds if self.seconds > 0 else 0
81
+ self.logger.info(
82
+ f'Crawled {last_response_count} pages (at {pages_per_min:.0f} pages/min),'
83
+ f' Got {last_item_count} items (at {items_per_min:.0f} items/min).'
84
+ )
85
+ else:
86
+ # 使用原始单位
87
+ self.logger.info(
88
+ f'Crawled {last_response_count} pages (at {response_rate} pages/{self.interval_display}{self.unit}),'
89
+ f' Got {last_item_count} items (at {item_rate} items/{self.interval_display}{self.unit}).'
90
+ )
91
+ self.logger.debug(f"Interval log iteration {iteration} completed, sleeping for {self.seconds} seconds")
55
92
  await asyncio.sleep(self.seconds)
56
93
  except Exception as e:
57
94
  self.logger.error(f"Error in interval logging: {e}")
@@ -16,25 +16,29 @@ from .context import InitializationContext
16
16
  from .core import CoreInitializer
17
17
  from .phases import InitializationPhase
18
18
 
19
+
19
20
  # 公共接口
20
21
  def initialize_framework(settings=None, **kwargs):
21
22
  """初始化框架的主要入口"""
22
23
  return CoreInitializer().initialize(settings, **kwargs)
23
24
 
25
+
24
26
  def is_framework_ready():
25
27
  """检查框架是否已准备就绪"""
26
28
  return CoreInitializer().is_ready
27
29
 
30
+
28
31
  def get_framework_context():
29
32
  """获取框架初始化上下文"""
30
33
  return CoreInitializer().context
31
34
 
35
+
32
36
  __all__ = [
33
37
  'InitializerRegistry',
34
- 'InitializationContext',
38
+ 'InitializationContext',
35
39
  'CoreInitializer',
36
40
  'InitializationPhase',
37
41
  'initialize_framework',
38
42
  'is_framework_ready',
39
43
  'get_framework_context'
40
- ]
44
+ ]
@@ -86,7 +86,7 @@ class MiddlewareManager:
86
86
  response = await self._process_exception(request, exp)
87
87
  else:
88
88
  create_task(self.crawler.subscriber.notify(response_received, response, self.crawler.spider))
89
- # self.crawler.stats.inc_value('response_received_count')
89
+ self._stats.inc_value('response_received_count')
90
90
  if isinstance(response, Response):
91
91
  response = await self._process_response(request, response)
92
92
  if isinstance(response, Request):
crawlo/mode_manager.py CHANGED
@@ -7,7 +7,7 @@
7
7
 
8
8
  支持的运行模式:
9
9
  1. standalone - 单机模式(默认)
10
- 2. distributed - 分布式模式
10
+ 2. distributed - 分布式模式
11
11
  3. auto - 自动检测模式
12
12
  """
13
13
  import os
@@ -29,7 +29,7 @@ class ModeManager:
29
29
  # 延迟初始化logger,避免循环依赖
30
30
  self._logger = None
31
31
  self._debug("运行模式管理器初始化完成")
32
-
32
+
33
33
  def _get_logger(self):
34
34
  """延迟获取logger实例"""
35
35
  if self._logger is None:
@@ -40,7 +40,7 @@ class ModeManager:
40
40
  # 如果日志系统尚未初始化,返回None
41
41
  pass
42
42
  return self._logger
43
-
43
+
44
44
  def _debug(self, message: str):
45
45
  """调试日志"""
46
46
  logger = self._get_logger()
@@ -73,7 +73,7 @@ class ModeManager:
73
73
  redis_url = f'redis://:{redis_password}@{redis_host}:{redis_port}/{redis_db}'
74
74
  else:
75
75
  redis_url = f'redis://{redis_host}:{redis_port}/{redis_db}'
76
-
76
+
77
77
  return {
78
78
  'QUEUE_TYPE': 'redis',
79
79
  'FILTER_CLASS': 'crawlo.filters.aioredis_filter.AioRedisFilter',
@@ -119,6 +119,7 @@ class ModeManager:
119
119
 
120
120
  if mode == RunMode.STANDALONE:
121
121
  mode_info = "使用单机模式 - 简单快速,适合开发和中小规模爬取"
122
+ # 对于单机模式,如果用户设置了QUEUE_TYPE为'auto',应该保留用户的设置
122
123
  settings = self.get_standalone_settings()
123
124
  self._debug("应用单机模式配置")
124
125
 
@@ -142,8 +143,13 @@ class ModeManager:
142
143
  raise ValueError(f"不支持的运行模式: {mode}")
143
144
 
144
145
  # 合并用户自定义配置
145
- user_settings = {k: v for k, v in kwargs.items()
146
- if k not in ['redis_host', 'redis_port', 'redis_password', 'project_name']}
146
+ user_settings = {
147
+ k: v for k,
148
+ v in kwargs.items() if k not in [
149
+ 'redis_host',
150
+ 'redis_port',
151
+ 'redis_password',
152
+ 'project_name']}
147
153
  settings.update(user_settings)
148
154
  self._debug(f"合并用户自定义配置: {list(user_settings.keys())}")
149
155
 
@@ -210,4 +216,4 @@ def auto_mode(**kwargs) -> Dict[str, Any]:
210
216
  def from_env(default_mode: str = 'standalone') -> Dict[str, Any]:
211
217
  """从环境变量创建配置"""
212
218
  # 移除直接使用 os.getenv(),要求通过 settings 配置
213
- raise RuntimeError("环境变量配置已移除,请在 settings 中配置相关参数")
219
+ raise RuntimeError("环境变量配置已移除,请在 settings 中配置相关参数")
@@ -38,6 +38,7 @@ except ImportError:
38
38
 
39
39
  from crawlo import Item
40
40
  from crawlo.spider import Spider
41
+ from crawlo.utils.fingerprint import FingerprintGenerator
41
42
  from crawlo.utils.log import get_logger
42
43
  from crawlo.exceptions import DropItem, ItemDiscard
43
44
 
@@ -109,6 +110,9 @@ class BloomDedupPipeline:
109
110
  self.logger.debug(f"Processing new item: {fingerprint[:20]}...")
110
111
  return item
111
112
 
113
+ except ItemDiscard:
114
+ # 重新抛出ItemDiscard异常,确保管道管理器能正确处理
115
+ raise
112
116
  except Exception as e:
113
117
  self.logger.error(f"Error processing item: {e}")
114
118
  # 在错误时继续处理,避免丢失数据
@@ -123,21 +127,7 @@ class BloomDedupPipeline:
123
127
  :param item: 数据项
124
128
  :return: 指纹字符串
125
129
  """
126
- # 将数据项转换为可序列化的字典
127
- try:
128
- item_dict = item.to_dict()
129
- except AttributeError:
130
- # 兼容没有to_dict方法的Item实现
131
- item_dict = dict(item)
132
-
133
- # 对字典进行排序以确保一致性
134
- sorted_items = sorted(item_dict.items())
135
-
136
- # 生成指纹字符串
137
- fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
138
-
139
- # 使用 SHA256 生成固定长度的指纹
140
- return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
130
+ return FingerprintGenerator.item_fingerprint(item)
141
131
 
142
132
  def close_spider(self, spider: Spider) -> None:
143
133
  """
@@ -17,6 +17,7 @@ import aiomysql
17
17
  from crawlo import Item
18
18
  from crawlo.exceptions import DropItem, ItemDiscard
19
19
  from crawlo.spider import Spider
20
+ from crawlo.utils.fingerprint import FingerprintGenerator
20
21
  from crawlo.utils.log import get_logger
21
22
 
22
23
 
@@ -140,6 +141,9 @@ class DatabaseDedupPipeline:
140
141
  self.logger.debug(f"Processing new item: {fingerprint[:20]}...")
141
142
  return item
142
143
 
144
+ except ItemDiscard:
145
+ # 重新抛出ItemDiscard异常,确保管道管理器能正确处理
146
+ raise
143
147
  except Exception as e:
144
148
  self.logger.error(f"Error processing item: {e}")
145
149
  # 在错误时继续处理,避免丢失数据
@@ -190,11 +194,4 @@ class DatabaseDedupPipeline:
190
194
  :param item: 数据项
191
195
  :return: 指纹字符串
192
196
  """
193
- # 将数据项转换为可序列化的字典
194
- try:
195
- item_dict = item.to_dict()
196
- except AttributeError:
197
- # 兼容没有to_dict方法的Item实现
198
- item_dict = dict(item)
199
-
200
- # 对字典进行排序以确保一致性
197
+ return FingerprintGenerator.item_fingerprint(item)
@@ -18,6 +18,7 @@ from typing import Set
18
18
  from crawlo import Item
19
19
  from crawlo.exceptions import DropItem, ItemDiscard
20
20
  from crawlo.spider import Spider
21
+ from crawlo.utils.fingerprint import FingerprintGenerator
21
22
  from crawlo.utils.log import get_logger
22
23
 
23
24
 
@@ -71,6 +72,9 @@ class MemoryDedupPipeline:
71
72
  self.logger.debug(f"Processing new item: {fingerprint[:20]}...")
72
73
  return item
73
74
 
75
+ except ItemDiscard:
76
+ # 重新抛出ItemDiscard异常,确保管道管理器能正确处理
77
+ raise
74
78
  except Exception as e:
75
79
  self.logger.error(f"Error processing item: {e}")
76
80
  # 在错误时继续处理,避免丢失数据
@@ -85,21 +89,7 @@ class MemoryDedupPipeline:
85
89
  :param item: 数据项
86
90
  :return: 指纹字符串
87
91
  """
88
- # 将数据项转换为可序列化的字典
89
- try:
90
- item_dict = item.to_dict()
91
- except AttributeError:
92
- # 兼容没有to_dict方法的Item实现
93
- item_dict = dict(item)
94
-
95
- # 对字典进行排序以确保一致性
96
- sorted_items = sorted(item_dict.items())
97
-
98
- # 生成指纹字符串
99
- fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
100
-
101
- # 使用 SHA256 生成固定长度的指纹
102
- return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
92
+ return FingerprintGenerator.item_fingerprint(item)
103
93
 
104
94
  def close_spider(self, spider: Spider) -> None:
105
95
  """
@@ -18,6 +18,7 @@ from typing import Optional
18
18
  from crawlo import Item
19
19
  from crawlo.spider import Spider
20
20
  from crawlo.exceptions import DropItem, ItemDiscard
21
+ from crawlo.utils.fingerprint import FingerprintGenerator
21
22
  from crawlo.utils.log import get_logger
22
23
 
23
24
 
@@ -132,21 +133,7 @@ class RedisDedupPipeline:
132
133
  :param item: 数据项
133
134
  :return: 指纹字符串
134
135
  """
135
- # 将数据项转换为可序列化的字典
136
- try:
137
- item_dict = item.to_dict()
138
- except AttributeError:
139
- # 兼容没有to_dict方法的Item实现
140
- item_dict = dict(item)
141
-
142
- # 对字典进行排序以确保一致性
143
- sorted_items = sorted(item_dict.items())
144
-
145
- # 生成指纹字符串
146
- fingerprint_string = '|'.join([f"{k}={v}" for k, v in sorted_items if v is not None])
147
-
148
- # 使用 SHA256 生成固定长度的指纹
149
- return hashlib.sha256(fingerprint_string.encode('utf-8')).hexdigest()
136
+ return FingerprintGenerator.item_fingerprint(item)
150
137
 
151
138
  def close_spider(self, spider: Spider) -> None:
152
139
  """
crawlo/project.py CHANGED
@@ -289,11 +289,22 @@ def _load_project_settings(custom_settings: Optional[dict] = None) -> SettingMan
289
289
  if run_mode:
290
290
  from crawlo.mode_manager import ModeManager
291
291
  mode_manager = ModeManager()
292
- mode_settings = mode_manager.resolve_mode_settings(run_mode)
293
- # 合并模式配置,但不覆盖用户已设置的配置
292
+ # 获取项目名称并传递给模式配置
293
+ project_name = settings.get('PROJECT_NAME', 'crawlo')
294
+ mode_settings = mode_manager.resolve_mode_settings(run_mode, project_name=project_name)
295
+
296
+ # 特殊处理:如果用户在settings.py中明确设置了QUEUE_TYPE为'auto',
297
+ # 即使在单机模式下也应该保留这个设置
298
+ user_queue_type = settings.get('QUEUE_TYPE')
299
+ if user_queue_type == 'auto' and run_mode == 'standalone':
300
+ mode_settings['QUEUE_TYPE'] = 'auto'
301
+
302
+ # 合并模式配置
294
303
  for key, value in mode_settings.items():
295
- # 只有当用户没有设置该配置项时才应用模式配置
296
- if key not in settings.attributes:
304
+ # 对于特定的配置项,模式配置应该优先于用户配置
305
+ # 特别是与运行模式密切相关的配置项
306
+ priority_keys = ['QUEUE_TYPE', 'FILTER_CLASS', 'DEFAULT_DEDUP_PIPELINE']
307
+ if key in priority_keys or key not in settings.attributes:
297
308
  settings.set(key, value)
298
309
  _temp_debug(f"🔧 已应用 {run_mode} 模式配置")
299
310
 
@@ -311,9 +322,9 @@ def get_settings(custom_settings: Optional[dict] = None) -> SettingManager:
311
322
  获取配置管理器实例(主入口函数)
312
323
 
313
324
  注意:这个函数现在作为向后兼容的入口,实际的初始化逻辑已经移到
314
- crawlo.core.framework_initializer 模块中。建议使用新的初始化方式:
325
+ crawlo.initialization 模块中。建议使用新的初始化方式:
315
326
 
316
- >>> from crawlo.core.framework_initializer import initialize_framework
327
+ >>> from crawlo.initialization import initialize_framework
317
328
  >>> settings = initialize_framework(custom_settings)
318
329
 
319
330
  Args:
@@ -323,5 +334,5 @@ def get_settings(custom_settings: Optional[dict] = None) -> SettingManager:
323
334
  SettingManager: 已加载配置的实例
324
335
  """
325
336
  # 使用新的统一初始化管理器
326
- from crawlo.core.framework_initializer import initialize_framework
337
+ from crawlo.initialization import initialize_framework
327
338
  return initialize_framework(custom_settings)