crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (186) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +285 -285
  13. crawlo/commands/startproject.py +300 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +309 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +280 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +206 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +56 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +166 -162
  71. crawlo/project.py +153 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +320 -307
  74. crawlo/queue/redis_priority_queue.py +277 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +216 -278
  77. crawlo/settings/setting_manager.py +99 -99
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
  126. crawlo-1.1.5.dist-info/RECORD +185 -0
  127. examples/__init__.py +7 -7
  128. tests/__init__.py +7 -7
  129. tests/advanced_tools_example.py +276 -0
  130. tests/authenticated_proxy_example.py +237 -0
  131. tests/cleaners_example.py +161 -0
  132. tests/config_validation_demo.py +103 -0
  133. {examples → tests}/controlled_spider_example.py +205 -205
  134. tests/date_tools_example.py +181 -0
  135. tests/dynamic_loading_example.py +524 -0
  136. tests/dynamic_loading_test.py +105 -0
  137. tests/env_config_example.py +134 -0
  138. tests/error_handling_example.py +172 -0
  139. tests/redis_key_validation_demo.py +131 -0
  140. tests/response_improvements_example.py +145 -0
  141. tests/test_advanced_tools.py +149 -0
  142. tests/test_all_redis_key_configs.py +146 -0
  143. tests/test_authenticated_proxy.py +142 -0
  144. tests/test_cleaners.py +55 -0
  145. tests/test_comprehensive.py +147 -0
  146. tests/test_config_validator.py +194 -0
  147. tests/test_date_tools.py +124 -0
  148. tests/test_dynamic_downloaders_proxy.py +125 -0
  149. tests/test_dynamic_proxy.py +93 -0
  150. tests/test_dynamic_proxy_config.py +147 -0
  151. tests/test_dynamic_proxy_real.py +110 -0
  152. tests/test_edge_cases.py +304 -0
  153. tests/test_enhanced_error_handler.py +271 -0
  154. tests/test_env_config.py +122 -0
  155. tests/test_error_handler_compatibility.py +113 -0
  156. tests/test_final_validation.py +153 -153
  157. tests/test_framework_env_usage.py +104 -0
  158. tests/test_integration.py +357 -0
  159. tests/test_item_dedup_redis_key.py +123 -0
  160. tests/test_parsel.py +30 -0
  161. tests/test_performance.py +328 -0
  162. tests/test_proxy_health_check.py +32 -32
  163. tests/test_proxy_middleware_integration.py +136 -136
  164. tests/test_proxy_providers.py +56 -56
  165. tests/test_proxy_stats.py +19 -19
  166. tests/test_proxy_strategies.py +59 -59
  167. tests/test_queue_manager_redis_key.py +177 -0
  168. tests/test_redis_config.py +28 -28
  169. tests/test_redis_connection_pool.py +295 -0
  170. tests/test_redis_key_naming.py +182 -0
  171. tests/test_redis_key_validator.py +124 -0
  172. tests/test_redis_queue.py +224 -224
  173. tests/test_request_serialization.py +70 -70
  174. tests/test_response_improvements.py +153 -0
  175. tests/test_scheduler.py +241 -241
  176. tests/test_simple_response.py +62 -0
  177. tests/test_telecom_spider_redis_key.py +206 -0
  178. tests/test_template_content.py +88 -0
  179. tests/test_template_redis_key.py +135 -0
  180. tests/test_tools.py +154 -0
  181. tests/tools_example.py +258 -0
  182. crawlo/core/enhanced_engine.py +0 -190
  183. crawlo-1.1.4.dist-info/RECORD +0 -117
  184. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  185. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  186. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
@@ -1,44 +1,44 @@
1
- from typing import Any
2
- from crawlo.exceptions import NotConfigured
3
- from crawlo.utils.log import get_logger
4
- from crawlo.utils.log import LoggerManager
5
-
6
-
7
- class CustomLoggerExtension:
8
- """
9
- 日志系统初始化扩展
10
- 遵循与 ExtensionManager 一致的接口规范:使用 create_instance
11
- """
12
-
13
- def __init__(self, settings: Any):
14
- self.settings = settings
15
- # 初始化全局日志配置
16
- LoggerManager.configure(settings)
17
-
18
- @classmethod
19
- def create_instance(cls, crawler: Any, *args: Any, **kwargs: Any) -> 'CustomLoggerExtension':
20
- """
21
- 工厂方法:兼容 ExtensionManager 的创建方式
22
- 被 ExtensionManager 调用
23
- """
24
- # 可以通过 settings 控制是否启用
25
- log_file = crawler.settings.get('LOG_FILE')
26
- log_enable_custom = crawler.settings.get('LOG_ENABLE_CUSTOM', False)
27
-
28
- # 只有当没有配置日志文件且未启用自定义日志时才禁用
29
- if not log_file and not log_enable_custom:
30
- raise NotConfigured("CustomLoggerExtension: LOG_FILE not set and LOG_ENABLE_CUSTOM=False")
31
-
32
- return cls(crawler.settings)
33
-
34
- def spider_opened(self, spider: Any) -> None:
35
- logger = get_logger(__name__)
36
- try:
37
- logger.info(
38
- f"CustomLoggerExtension: Logging initialized. "
39
- f"LOG_FILE={self.settings.get('LOG_FILE')}, "
40
- f"LOG_LEVEL={self.settings.get('LOG_LEVEL')}"
41
- )
42
- except Exception as e:
43
- # 即使日志初始化信息无法打印,也不应该影响程序运行
1
+ from typing import Any
2
+ from crawlo.exceptions import NotConfigured
3
+ from crawlo.utils.log import get_logger
4
+ from crawlo.utils.log import LoggerManager
5
+
6
+
7
+ class CustomLoggerExtension:
8
+ """
9
+ 日志系统初始化扩展
10
+ 遵循与 ExtensionManager 一致的接口规范:使用 create_instance
11
+ """
12
+
13
+ def __init__(self, settings: Any):
14
+ self.settings = settings
15
+ # 初始化全局日志配置
16
+ LoggerManager.configure(settings)
17
+
18
+ @classmethod
19
+ def create_instance(cls, crawler: Any, *args: Any, **kwargs: Any) -> 'CustomLoggerExtension':
20
+ """
21
+ 工厂方法:兼容 ExtensionManager 的创建方式
22
+ 被 ExtensionManager 调用
23
+ """
24
+ # 可以通过 settings 控制是否启用
25
+ log_file = crawler.settings.get('LOG_FILE')
26
+ log_enable_custom = crawler.settings.get('LOG_ENABLE_CUSTOM', False)
27
+
28
+ # 只有当没有配置日志文件且未启用自定义日志时才禁用
29
+ if not log_file and not log_enable_custom:
30
+ raise NotConfigured("CustomLoggerExtension: LOG_FILE not set and LOG_ENABLE_CUSTOM=False")
31
+
32
+ return cls(crawler.settings)
33
+
34
+ def spider_opened(self, spider: Any) -> None:
35
+ logger = get_logger(__name__)
36
+ try:
37
+ logger.info(
38
+ f"CustomLoggerExtension: Logging initialized. "
39
+ f"LOG_FILE={self.settings.get('LOG_FILE')}, "
40
+ f"LOG_LEVEL={self.settings.get('LOG_LEVEL')}"
41
+ )
42
+ except Exception as e:
43
+ # 即使日志初始化信息无法打印,也不应该影响程序运行
44
44
  pass
@@ -1,89 +1,105 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import asyncio
4
- import psutil
5
- from typing import Any, Optional
6
-
7
- from crawlo.utils.log import get_logger
8
- from crawlo.event import spider_opened, spider_closed
9
-
10
-
11
- class MemoryMonitorExtension:
12
- """
13
- 内存监控扩展
14
- 定期监控爬虫进程的内存使用情况,并在超出阈值时发出警告
15
- """
16
-
17
- def __init__(self, crawler: Any):
18
- self.task: Optional[asyncio.Task] = None
19
- self.process = psutil.Process()
20
- self.settings = crawler.settings
21
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
22
-
23
- # 获取配置参数
24
- self.interval = self.settings.get_int('MEMORY_MONITOR_INTERVAL', 60) # 默认60秒检查一次
25
- self.warning_threshold = self.settings.get_float('MEMORY_WARNING_THRESHOLD', 80.0) # 默认80%警告阈值
26
- self.critical_threshold = self.settings.get_float('MEMORY_CRITICAL_THRESHOLD', 90.0) # 默认90%严重阈值
27
-
28
- @classmethod
29
- def create_instance(cls, crawler: Any) -> 'MemoryMonitorExtension':
30
- # 只有当配置启用时才创建实例
31
- if not crawler.settings.get_bool('MEMORY_MONITOR_ENABLED', False):
32
- from crawlo.exceptions import NotConfigured
33
- raise NotConfigured("MemoryMonitorExtension: MEMORY_MONITOR_ENABLED is False")
34
-
35
- o = cls(crawler)
36
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
37
- crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
38
- return o
39
-
40
- async def spider_opened(self) -> None:
41
- """爬虫启动时开始监控"""
42
- self.task = asyncio.create_task(self._monitor_loop())
43
- self.logger.info(
44
- f"Memory monitor started. Interval: {self.interval}s, "
45
- f"Warning threshold: {self.warning_threshold}%, Critical threshold: {self.critical_threshold}%"
46
- )
47
-
48
- async def spider_closed(self) -> None:
49
- """爬虫关闭时停止监控"""
50
- if self.task:
51
- self.task.cancel()
52
- try:
53
- await self.task
54
- except asyncio.CancelledError:
55
- pass
56
- self.task = None
57
- self.logger.info("Memory monitor stopped.")
58
-
59
- async def _monitor_loop(self) -> None:
60
- """内存监控循环"""
61
- while True:
62
- try:
63
- # 获取内存使用信息
64
- memory_info = self.process.memory_info()
65
- memory_percent = self.process.memory_percent()
66
-
67
- # 记录内存使用情况
68
- self.logger.debug(
69
- f"Memory usage: {memory_percent:.2f}% "
70
- f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB, "
71
- f"VMS: {memory_info.vms / 1024 / 1024:.2f} MB)"
72
- )
73
-
74
- # 检查是否超过阈值
75
- if memory_percent >= self.critical_threshold:
76
- self.logger.critical(
77
- f"Memory usage critical: {memory_percent:.2f}% "
78
- f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
79
- )
80
- elif memory_percent >= self.warning_threshold:
81
- self.logger.warning(
82
- f"Memory usage high: {memory_percent:.2f}% "
83
- f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
84
- )
85
-
86
- await asyncio.sleep(self.interval)
87
- except Exception as e:
88
- self.logger.error(f"Error in memory monitoring: {e}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ import psutil
5
+ from typing import Any, Optional
6
+
7
+ from crawlo.utils.log import get_logger
8
+ from crawlo.utils.error_handler import ErrorHandler
9
+ from crawlo.event import spider_opened, spider_closed
10
+
11
+
12
+ class MemoryMonitorExtension:
13
+ """
14
+ 内存监控扩展
15
+ 定期监控爬虫进程的内存使用情况,并在超出阈值时发出警告
16
+ """
17
+
18
+ def __init__(self, crawler: Any):
19
+ self.task: Optional[asyncio.Task] = None
20
+ self.process = psutil.Process()
21
+ self.settings = crawler.settings
22
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
+ self.error_handler = ErrorHandler(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
+
25
+ # 获取配置参数
26
+ self.interval = self.settings.get_int('MEMORY_MONITOR_INTERVAL', 60) # 默认60秒检查一次
27
+ self.warning_threshold = self.settings.get_float('MEMORY_WARNING_THRESHOLD', 80.0) # 默认80%警告阈值
28
+ self.critical_threshold = self.settings.get_float('MEMORY_CRITICAL_THRESHOLD', 90.0) # 默认90%严重阈值
29
+
30
+ @classmethod
31
+ def create_instance(cls, crawler: Any) -> 'MemoryMonitorExtension':
32
+ # 只有当配置启用时才创建实例
33
+ if not crawler.settings.get_bool('MEMORY_MONITOR_ENABLED', False):
34
+ from crawlo.exceptions import NotConfigured
35
+ raise NotConfigured("MemoryMonitorExtension: MEMORY_MONITOR_ENABLED is False")
36
+
37
+ o = cls(crawler)
38
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
39
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
40
+ return o
41
+
42
+ async def spider_opened(self) -> None:
43
+ """爬虫启动时开始监控"""
44
+ try:
45
+ self.task = asyncio.create_task(self._monitor_loop())
46
+ self.logger.info(
47
+ f"Memory monitor started. Interval: {self.interval}s, "
48
+ f"Warning threshold: {self.warning_threshold}%, Critical threshold: {self.critical_threshold}%"
49
+ )
50
+ except Exception as e:
51
+ self.error_handler.handle_error(
52
+ e,
53
+ context="启动内存监控失败",
54
+ raise_error=False
55
+ )
56
+
57
+ async def spider_closed(self) -> None:
58
+ """爬虫关闭时停止监控"""
59
+ try:
60
+ if self.task:
61
+ self.task.cancel()
62
+ try:
63
+ await self.task
64
+ except asyncio.CancelledError:
65
+ pass
66
+ self.task = None
67
+ self.logger.info("Memory monitor stopped.")
68
+ except Exception as e:
69
+ self.error_handler.handle_error(
70
+ e,
71
+ context="停止内存监控失败",
72
+ raise_error=False
73
+ )
74
+
75
+ async def _monitor_loop(self) -> None:
76
+ """内存监控循环"""
77
+ while True:
78
+ try:
79
+ # 获取内存使用信息
80
+ memory_info = self.process.memory_info()
81
+ memory_percent = self.process.memory_percent()
82
+
83
+ # 记录内存使用情况
84
+ self.logger.debug(
85
+ f"Memory usage: {memory_percent:.2f}% "
86
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB, "
87
+ f"VMS: {memory_info.vms / 1024 / 1024:.2f} MB)"
88
+ )
89
+
90
+ # 检查是否超过阈值
91
+ if memory_percent >= self.critical_threshold:
92
+ self.logger.critical(
93
+ f"Memory usage critical: {memory_percent:.2f}% "
94
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
95
+ )
96
+ elif memory_percent >= self.warning_threshold:
97
+ self.logger.warning(
98
+ f"Memory usage high: {memory_percent:.2f}% "
99
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
100
+ )
101
+
102
+ await asyncio.sleep(self.interval)
103
+ except Exception as e:
104
+ self.logger.error(f"Error in memory monitoring: {e}")
89
105
  await asyncio.sleep(self.interval)
@@ -1,118 +1,134 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import io
4
- import os
5
- import pstats
6
- import asyncio
7
- import cProfile
8
- from typing import Any, Optional
9
-
10
- from crawlo.utils.log import get_logger
11
- from crawlo.event import spider_opened, spider_closed
12
-
13
-
14
- class PerformanceProfilerExtension:
15
- """
16
- 性能分析扩展
17
- 在爬虫运行期间进行性能分析,帮助优化爬虫性能
18
- """
19
-
20
- def __init__(self, crawler: Any):
21
- self.settings = crawler.settings
22
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
-
24
- # 获取配置参数
25
- self.enabled = self.settings.get_bool('PERFORMANCE_PROFILER_ENABLED', False)
26
- self.output_dir = self.settings.get('PERFORMANCE_PROFILER_OUTPUT_DIR', 'profiling')
27
- self.interval = self.settings.get_int('PERFORMANCE_PROFILER_INTERVAL', 300) # 默认5分钟
28
-
29
- self.profiler: Optional[cProfile.Profile] = None
30
- self.task: Optional[asyncio.Task] = None
31
-
32
- # 创建输出目录
33
- if self.enabled:
34
- os.makedirs(self.output_dir, exist_ok=True)
35
-
36
- @classmethod
37
- def create_instance(cls, crawler: Any) -> 'PerformanceProfilerExtension':
38
- # 只有当配置启用时才创建实例
39
- if not crawler.settings.get_bool('PERFORMANCE_PROFILER_ENABLED', False):
40
- from crawlo.exceptions import NotConfigured
41
- raise NotConfigured("PerformanceProfilerExtension: PERFORMANCE_PROFILER_ENABLED is False")
42
-
43
- o = cls(crawler)
44
- if o.enabled:
45
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
46
- crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
47
- return o
48
-
49
- async def spider_opened(self) -> None:
50
- """爬虫启动时开始性能分析"""
51
- if not self.enabled:
52
- return
53
-
54
- self.profiler = cProfile.Profile()
55
- self.profiler.enable()
56
-
57
- # 启动定期保存分析结果的任务
58
- self.task = asyncio.create_task(self._periodic_save())
59
-
60
- self.logger.info("Performance profiler started.")
61
-
62
- async def spider_closed(self) -> None:
63
- """爬虫关闭时停止性能分析并保存结果"""
64
- if not self.enabled or not self.profiler:
65
- return
66
-
67
- # 停止定期保存任务
68
- if self.task:
69
- self.task.cancel()
70
- try:
71
- await self.task
72
- except asyncio.CancelledError:
73
- pass
74
-
75
- # 停止分析器并保存最终结果
76
- self.profiler.disable()
77
-
78
- # 保存分析结果
79
- await self._save_profile("final")
80
- self.logger.info("Performance profiler stopped and results saved.")
81
-
82
- async def _periodic_save(self) -> None:
83
- """定期保存分析结果"""
84
- counter = 1
85
- while True:
86
- try:
87
- await asyncio.sleep(self.interval)
88
- if self.profiler:
89
- # 临时禁用分析器以保存结果
90
- self.profiler.disable()
91
- await self._save_profile(f"periodic_{counter}")
92
- counter += 1
93
- # 重新启用分析器
94
- self.profiler.enable()
95
- except asyncio.CancelledError:
96
- break
97
- except Exception as e:
98
- self.logger.error(f"Error in periodic profiling save: {e}")
99
-
100
- async def _save_profile(self, name: str) -> None:
101
- """保存分析结果到文件"""
102
- try:
103
- # 创建内存中的字符串流
104
- s = io.StringIO()
105
- ps = pstats.Stats(self.profiler, stream=s)
106
-
107
- # 排序并打印统计信息
108
- ps.sort_stats('cumulative')
109
- ps.print_stats()
110
-
111
- # 保存到文件
112
- filename = os.path.join(self.output_dir, f'profile_{name}.txt')
113
- with open(filename, 'w', encoding='utf-8') as f:
114
- f.write(s.getvalue())
115
-
116
- self.logger.info(f"Performance profile saved to {filename}")
117
- except Exception as e:
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import io
4
+ import os
5
+ import pstats
6
+ import asyncio
7
+ import cProfile
8
+ from typing import Any, Optional
9
+
10
+ from crawlo.utils.log import get_logger
11
+ from crawlo.utils.error_handler import ErrorHandler
12
+ from crawlo.event import spider_opened, spider_closed
13
+
14
+
15
+ class PerformanceProfilerExtension:
16
+ """
17
+ 性能分析扩展
18
+ 在爬虫运行期间进行性能分析,帮助优化爬虫性能
19
+ """
20
+
21
+ def __init__(self, crawler: Any):
22
+ self.settings = crawler.settings
23
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
+ self.error_handler = ErrorHandler(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
25
+
26
+ # 获取配置参数
27
+ self.enabled = self.settings.get_bool('PERFORMANCE_PROFILER_ENABLED', False)
28
+ self.output_dir = self.settings.get('PERFORMANCE_PROFILER_OUTPUT_DIR', 'profiling')
29
+ self.interval = self.settings.get_int('PERFORMANCE_PROFILER_INTERVAL', 300) # 默认5分钟
30
+
31
+ self.profiler: Optional[cProfile.Profile] = None
32
+ self.task: Optional[asyncio.Task] = None
33
+
34
+ # 创建输出目录
35
+ if self.enabled:
36
+ os.makedirs(self.output_dir, exist_ok=True)
37
+
38
+ @classmethod
39
+ def create_instance(cls, crawler: Any) -> 'PerformanceProfilerExtension':
40
+ # 只有当配置启用时才创建实例
41
+ if not crawler.settings.get_bool('PERFORMANCE_PROFILER_ENABLED', False):
42
+ from crawlo.exceptions import NotConfigured
43
+ raise NotConfigured("PerformanceProfilerExtension: PERFORMANCE_PROFILER_ENABLED is False")
44
+
45
+ o = cls(crawler)
46
+ if o.enabled:
47
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
48
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
49
+ return o
50
+
51
+ async def spider_opened(self) -> None:
52
+ """爬虫启动时开始性能分析"""
53
+ if not self.enabled:
54
+ return
55
+
56
+ try:
57
+ self.profiler = cProfile.Profile()
58
+ self.profiler.enable()
59
+
60
+ # 启动定期保存分析结果的任务
61
+ self.task = asyncio.create_task(self._periodic_save())
62
+
63
+ self.logger.info("Performance profiler started.")
64
+ except Exception as e:
65
+ self.error_handler.handle_error(
66
+ e,
67
+ context="启动性能分析器失败",
68
+ raise_error=False
69
+ )
70
+
71
+ async def spider_closed(self) -> None:
72
+ """爬虫关闭时停止性能分析并保存结果"""
73
+ if not self.enabled or not self.profiler:
74
+ return
75
+
76
+ try:
77
+ # 停止定期保存任务
78
+ if self.task:
79
+ self.task.cancel()
80
+ try:
81
+ await self.task
82
+ except asyncio.CancelledError:
83
+ pass
84
+
85
+ # 停止分析器并保存最终结果
86
+ self.profiler.disable()
87
+
88
+ # 保存分析结果
89
+ await self._save_profile("final")
90
+ self.logger.info("Performance profiler stopped and results saved.")
91
+ except Exception as e:
92
+ self.error_handler.handle_error(
93
+ e,
94
+ context="停止性能分析器失败",
95
+ raise_error=False
96
+ )
97
+
98
+ async def _periodic_save(self) -> None:
99
+ """定期保存分析结果"""
100
+ counter = 1
101
+ while True:
102
+ try:
103
+ await asyncio.sleep(self.interval)
104
+ if self.profiler:
105
+ # 临时禁用分析器以保存结果
106
+ self.profiler.disable()
107
+ await self._save_profile(f"periodic_{counter}")
108
+ counter += 1
109
+ # 重新启用分析器
110
+ self.profiler.enable()
111
+ except asyncio.CancelledError:
112
+ break
113
+ except Exception as e:
114
+ self.logger.error(f"Error in periodic profiling save: {e}")
115
+
116
+ async def _save_profile(self, name: str) -> None:
117
+ """保存分析结果到文件"""
118
+ try:
119
+ # 创建内存中的字符串流
120
+ s = io.StringIO()
121
+ ps = pstats.Stats(self.profiler, stream=s)
122
+
123
+ # 排序并打印统计信息
124
+ ps.sort_stats('cumulative')
125
+ ps.print_stats()
126
+
127
+ # 保存到文件
128
+ filename = os.path.join(self.output_dir, f'profile_{name}.txt')
129
+ with open(filename, 'w', encoding='utf-8') as f:
130
+ f.write(s.getvalue())
131
+
132
+ self.logger.info(f"Performance profile saved to {filename}")
133
+ except Exception as e:
118
134
  self.logger.error(f"Error saving performance profile: {e}")