crawlo 1.3.6__py3-none-any.whl → 1.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (291) hide show
  1. crawlo/__init__.py +87 -87
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -341
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +45 -45
  16. crawlo/core/engine.py +439 -439
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -257
  19. crawlo/crawler.py +638 -638
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -228
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +103 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -257
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -292
  47. crawlo/initialization/__init__.py +39 -39
  48. crawlo/initialization/built_in.py +425 -425
  49. crawlo/initialization/context.py +141 -141
  50. crawlo/initialization/core.py +193 -193
  51. crawlo/initialization/phases.py +148 -148
  52. crawlo/initialization/registry.py +145 -145
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -23
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +37 -37
  58. crawlo/logging/config.py +96 -96
  59. crawlo/logging/factory.py +128 -128
  60. crawlo/logging/manager.py +111 -111
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -212
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +325 -325
  85. crawlo/pipelines/pipeline_manager.py +76 -76
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -327
  88. crawlo/queue/pqueue.py +42 -42
  89. crawlo/queue/queue_manager.py +522 -503
  90. crawlo/queue/redis_priority_queue.py +367 -326
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -321
  93. crawlo/settings/setting_manager.py +214 -214
  94. crawlo/spider/__init__.py +657 -657
  95. crawlo/stats_collector.py +73 -73
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +138 -138
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +170 -167
  104. crawlo/templates/project/settings_distributed.py.tmpl +169 -166
  105. crawlo/templates/project/settings_gentle.py.tmpl +166 -166
  106. crawlo/templates/project/settings_high_performance.py.tmpl +167 -167
  107. crawlo/templates/project/settings_minimal.py.tmpl +65 -65
  108. crawlo/templates/project/settings_simple.py.tmpl +164 -164
  109. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  110. crawlo/templates/run.py.tmpl +34 -34
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +9 -9
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +364 -364
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +25 -25
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -165
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +79 -79
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -388
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -225
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.6.dist-info → crawlo-1.3.7.dist-info}/METADATA +1199 -1126
  149. crawlo-1.3.7.dist-info/RECORD +292 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +106 -106
  154. tests/baidu_performance_test.py +108 -108
  155. tests/baidu_test.py +59 -59
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +212 -212
  158. tests/comprehensive_test.py +81 -81
  159. tests/comprehensive_testing_summary.md +186 -186
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +69 -69
  164. tests/debug_framework_logger.py +84 -84
  165. tests/debug_log_config.py +126 -126
  166. tests/debug_log_levels.py +63 -63
  167. tests/debug_pipelines.py +66 -66
  168. tests/detailed_log_test.py +233 -233
  169. tests/distributed_test.py +66 -66
  170. tests/distributed_test_debug.py +76 -76
  171. tests/dynamic_loading_example.py +523 -523
  172. tests/dynamic_loading_test.py +104 -104
  173. tests/env_config_example.py +133 -133
  174. tests/error_handling_example.py +171 -171
  175. tests/final_comprehensive_test.py +151 -151
  176. tests/final_log_test.py +260 -260
  177. tests/final_validation_test.py +182 -182
  178. tests/fix_log_test.py +142 -142
  179. tests/framework_performance_test.py +202 -202
  180. tests/log_buffering_test.py +111 -111
  181. tests/log_generation_timing_test.py +153 -153
  182. tests/optimized_performance_test.py +211 -211
  183. tests/performance_comparison.py +245 -245
  184. tests/queue_blocking_test.py +113 -113
  185. tests/queue_test.py +89 -89
  186. tests/redis_key_validation_demo.py +130 -130
  187. tests/request_params_example.py +150 -150
  188. tests/response_improvements_example.py +144 -144
  189. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  190. tests/scrapy_comparison/scrapy_test.py +133 -133
  191. tests/simple_command_test.py +119 -119
  192. tests/simple_crawlo_test.py +127 -127
  193. tests/simple_log_test.py +57 -57
  194. tests/simple_log_test2.py +137 -137
  195. tests/simple_optimization_test.py +128 -128
  196. tests/simple_queue_type_test.py +41 -41
  197. tests/simple_spider_test.py +49 -49
  198. tests/simple_test.py +47 -47
  199. tests/spider_log_timing_test.py +177 -177
  200. tests/test_advanced_tools.py +148 -148
  201. tests/test_all_commands.py +230 -230
  202. tests/test_all_redis_key_configs.py +145 -145
  203. tests/test_authenticated_proxy.py +141 -141
  204. tests/test_batch_processor.py +178 -178
  205. tests/test_cleaners.py +54 -54
  206. tests/test_component_factory.py +174 -174
  207. tests/test_comprehensive.py +146 -146
  208. tests/test_config_consistency.py +80 -80
  209. tests/test_config_merge.py +152 -152
  210. tests/test_config_validator.py +182 -182
  211. tests/test_controlled_spider_mixin.py +79 -79
  212. tests/test_crawlo_proxy_integration.py +108 -108
  213. tests/test_date_tools.py +123 -123
  214. tests/test_default_header_middleware.py +158 -158
  215. tests/test_distributed.py +65 -65
  216. tests/test_double_crawlo_fix.py +204 -207
  217. tests/test_double_crawlo_fix_simple.py +124 -124
  218. tests/test_download_delay_middleware.py +221 -221
  219. tests/test_downloader_proxy_compatibility.py +268 -268
  220. tests/test_dynamic_downloaders_proxy.py +124 -124
  221. tests/test_dynamic_proxy.py +92 -92
  222. tests/test_dynamic_proxy_config.py +146 -146
  223. tests/test_dynamic_proxy_real.py +109 -109
  224. tests/test_edge_cases.py +303 -303
  225. tests/test_enhanced_error_handler.py +270 -270
  226. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  227. tests/test_env_config.py +121 -121
  228. tests/test_error_handler_compatibility.py +112 -112
  229. tests/test_factories.py +252 -252
  230. tests/test_final_validation.py +153 -153
  231. tests/test_framework_env_usage.py +103 -103
  232. tests/test_framework_logger.py +66 -66
  233. tests/test_framework_startup.py +64 -64
  234. tests/test_get_component_logger.py +83 -83
  235. tests/test_integration.py +169 -169
  236. tests/test_item_dedup_redis_key.py +122 -122
  237. tests/test_large_scale_config.py +112 -112
  238. tests/test_large_scale_helper.py +235 -235
  239. tests/test_logging_system.py +282 -282
  240. tests/test_mode_change.py +72 -72
  241. tests/test_mode_consistency.py +51 -51
  242. tests/test_offsite_middleware.py +221 -221
  243. tests/test_parsel.py +29 -29
  244. tests/test_performance.py +327 -327
  245. tests/test_performance_monitor.py +115 -115
  246. tests/test_proxy_api.py +264 -264
  247. tests/test_proxy_health_check.py +32 -32
  248. tests/test_proxy_middleware.py +121 -121
  249. tests/test_proxy_middleware_enhanced.py +216 -216
  250. tests/test_proxy_middleware_integration.py +136 -136
  251. tests/test_proxy_middleware_refactored.py +184 -184
  252. tests/test_proxy_providers.py +56 -56
  253. tests/test_proxy_stats.py +19 -19
  254. tests/test_proxy_strategies.py +59 -59
  255. tests/test_queue_empty_check.py +41 -41
  256. tests/test_queue_manager_double_crawlo.py +173 -173
  257. tests/test_queue_manager_redis_key.py +179 -176
  258. tests/test_queue_naming.py +155 -0
  259. tests/test_queue_type.py +106 -106
  260. tests/test_random_user_agent.py +72 -72
  261. tests/test_real_scenario_proxy.py +195 -195
  262. tests/test_redis_config.py +28 -28
  263. tests/test_redis_connection_pool.py +294 -294
  264. tests/test_redis_key_naming.py +181 -181
  265. tests/test_redis_key_validator.py +123 -123
  266. tests/test_redis_queue.py +224 -224
  267. tests/test_redis_queue_name_fix.py +176 -0
  268. tests/test_request_ignore_middleware.py +182 -182
  269. tests/test_request_params.py +111 -111
  270. tests/test_request_serialization.py +70 -70
  271. tests/test_response_code_middleware.py +349 -349
  272. tests/test_response_filter_middleware.py +427 -427
  273. tests/test_response_improvements.py +152 -152
  274. tests/test_retry_middleware.py +241 -241
  275. tests/test_scheduler.py +252 -252
  276. tests/test_scheduler_config_update.py +133 -133
  277. tests/test_simple_response.py +61 -61
  278. tests/test_telecom_spider_redis_key.py +205 -205
  279. tests/test_template_content.py +87 -87
  280. tests/test_template_redis_key.py +134 -134
  281. tests/test_tools.py +159 -159
  282. tests/test_user_agents.py +96 -96
  283. tests/tools_example.py +260 -260
  284. tests/untested_features_report.md +138 -138
  285. tests/verify_debug.py +51 -51
  286. tests/verify_distributed.py +117 -117
  287. tests/verify_log_fix.py +111 -111
  288. crawlo-1.3.6.dist-info/RECORD +0 -290
  289. {crawlo-1.3.6.dist-info → crawlo-1.3.7.dist-info}/WHEEL +0 -0
  290. {crawlo-1.3.6.dist-info → crawlo-1.3.7.dist-info}/entry_points.txt +0 -0
  291. {crawlo-1.3.6.dist-info → crawlo-1.3.7.dist-info}/top_level.txt +0 -0
@@ -1,82 +1,82 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from typing import Any
4
-
5
- from crawlo import event
6
- from crawlo.tools.date_tools import now, time_diff
7
-
8
-
9
- class LogStats(object):
10
-
11
- def __init__(self, stats: Any):
12
- self._stats = stats
13
-
14
- @classmethod
15
- def create_instance(cls, crawler: Any) -> 'LogStats':
16
- o = cls(crawler.stats)
17
- # 订阅所有需要的事件
18
- event_subscriptions = [
19
- (o.spider_opened, event.spider_opened),
20
- (o.spider_closed, event.spider_closed),
21
- (o.item_successful, event.item_successful),
22
- (o.item_discard, event.item_discard),
23
- (o.response_received, event.response_received),
24
- (o.request_scheduled, event.request_scheduled),
25
- ]
26
-
27
- for handler, evt in event_subscriptions:
28
- try:
29
- crawler.subscriber.subscribe(handler, event=evt)
30
- except Exception as e:
31
- # 获取日志记录器并记录错误
32
- from crawlo.utils.log import get_logger
33
- logger = get_logger(cls.__name__)
34
- logger.error(f"Failed to subscribe to event {evt}: {e}")
35
-
36
- return o
37
-
38
- async def spider_opened(self) -> None:
39
- try:
40
- self._stats['start_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
41
- except Exception as e:
42
- # 静默处理,避免影响爬虫运行
43
- pass
44
-
45
- async def spider_closed(self) -> None:
46
- try:
47
- self._stats['end_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
48
- self._stats['cost_time(s)'] = time_diff(start=self._stats['start_time'], end=self._stats['end_time'])
49
- except Exception as e:
50
- # 静默处理,避免影响爬虫运行
51
- pass
52
-
53
- async def item_successful(self, _item: Any, _spider: Any) -> None:
54
- try:
55
- self._stats.inc_value('item_successful_count')
56
- except Exception as e:
57
- # 静默处理,避免影响爬虫运行
58
- pass
59
-
60
- async def item_discard(self, _item: Any, exc: Any, _spider: Any) -> None:
61
- try:
62
- self._stats.inc_value('item_discard_count')
63
- reason = getattr(exc, 'msg', None) # 更安全地获取属性
64
- if reason:
65
- self._stats.inc_value(f"item_discard/{reason}")
66
- except Exception as e:
67
- # 静默处理,避免影响爬虫运行
68
- pass
69
-
70
- async def response_received(self, _response: Any, _spider: Any) -> None:
71
- try:
72
- self._stats.inc_value('response_received_count')
73
- except Exception as e:
74
- # 静默处理,避免影响爬虫运行
75
- pass
76
-
77
- async def request_scheduled(self, _request: Any, _spider: Any) -> None:
78
- try:
79
- self._stats.inc_value('request_scheduler_count')
80
- except Exception as e:
81
- # 静默处理,避免影响爬虫运行
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from typing import Any
4
+
5
+ from crawlo import event
6
+ from crawlo.tools.date_tools import now, time_diff
7
+
8
+
9
+ class LogStats(object):
10
+
11
+ def __init__(self, stats: Any):
12
+ self._stats = stats
13
+
14
+ @classmethod
15
+ def create_instance(cls, crawler: Any) -> 'LogStats':
16
+ o = cls(crawler.stats)
17
+ # 订阅所有需要的事件
18
+ event_subscriptions = [
19
+ (o.spider_opened, event.spider_opened),
20
+ (o.spider_closed, event.spider_closed),
21
+ (o.item_successful, event.item_successful),
22
+ (o.item_discard, event.item_discard),
23
+ (o.response_received, event.response_received),
24
+ (o.request_scheduled, event.request_scheduled),
25
+ ]
26
+
27
+ for handler, evt in event_subscriptions:
28
+ try:
29
+ crawler.subscriber.subscribe(handler, event=evt)
30
+ except Exception as e:
31
+ # 获取日志记录器并记录错误
32
+ from crawlo.utils.log import get_logger
33
+ logger = get_logger(cls.__name__)
34
+ logger.error(f"Failed to subscribe to event {evt}: {e}")
35
+
36
+ return o
37
+
38
+ async def spider_opened(self) -> None:
39
+ try:
40
+ self._stats['start_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
41
+ except Exception as e:
42
+ # 静默处理,避免影响爬虫运行
43
+ pass
44
+
45
+ async def spider_closed(self) -> None:
46
+ try:
47
+ self._stats['end_time'] = now(fmt='%Y-%m-%d %H:%M:%S')
48
+ self._stats['cost_time(s)'] = time_diff(start=self._stats['start_time'], end=self._stats['end_time'])
49
+ except Exception as e:
50
+ # 静默处理,避免影响爬虫运行
51
+ pass
52
+
53
+ async def item_successful(self, _item: Any, _spider: Any) -> None:
54
+ try:
55
+ self._stats.inc_value('item_successful_count')
56
+ except Exception as e:
57
+ # 静默处理,避免影响爬虫运行
58
+ pass
59
+
60
+ async def item_discard(self, _item: Any, exc: Any, _spider: Any) -> None:
61
+ try:
62
+ self._stats.inc_value('item_discard_count')
63
+ reason = getattr(exc, 'msg', None) # 更安全地获取属性
64
+ if reason:
65
+ self._stats.inc_value(f"item_discard/{reason}")
66
+ except Exception as e:
67
+ # 静默处理,避免影响爬虫运行
68
+ pass
69
+
70
+ async def response_received(self, _response: Any, _spider: Any) -> None:
71
+ try:
72
+ self._stats.inc_value('response_received_count')
73
+ except Exception as e:
74
+ # 静默处理,避免影响爬虫运行
75
+ pass
76
+
77
+ async def request_scheduled(self, _request: Any, _spider: Any) -> None:
78
+ try:
79
+ self._stats.inc_value('request_scheduler_count')
80
+ except Exception as e:
81
+ # 静默处理,避免影响爬虫运行
82
82
  pass
@@ -1,62 +1,62 @@
1
- from typing import Any
2
- from crawlo.exceptions import NotConfigured
3
- from crawlo.utils.log import get_logger
4
-
5
- # 延迟获取logger,确保在日志系统配置之后获取
6
- _logger = None
7
-
8
- def logger():
9
- """延迟获取logger实例,确保在日志系统配置之后获取"""
10
- global _logger
11
- if _logger is None:
12
- _logger = get_logger(__name__)
13
- return _logger
14
-
15
-
16
- class CustomLoggerExtension:
17
- """
18
- 日志系统初始化扩展
19
- 遵循与 ExtensionManager 一致的接口规范:使用 create_instance
20
- """
21
-
22
- def __init__(self, settings: Any):
23
- self.settings = settings
24
- # 使用新的日志系统,但要简化配置传递
25
- try:
26
- from crawlo.logging import configure_logging
27
- # 直接传递settings对象,让日志系统内部处理
28
- configure_logging(settings)
29
- except Exception as e:
30
- # 如果日志系统配置失败,不应该阻止扩展加载
31
- # 使用基本日志输出错误信息
32
- import logging
33
- logging.getLogger(__name__).warning(f"Failed to configure logging system: {e}")
34
- # 不抛出异常,让扩展继续加载
35
-
36
- @classmethod
37
- def create_instance(cls, crawler: Any, *args: Any, **kwargs: Any) -> 'CustomLoggerExtension':
38
- """
39
- 工厂方法:兼容 ExtensionManager 的创建方式
40
- 被 ExtensionManager 调用
41
- """
42
- # 可以通过 settings 控制是否启用
43
- log_file = crawler.settings.get('LOG_FILE')
44
- log_enable_custom = crawler.settings.get('LOG_ENABLE_CUSTOM', False)
45
-
46
- # 只有当没有配置日志文件且未启用自定义日志时才禁用
47
- if not log_file and not log_enable_custom:
48
- raise NotConfigured("CustomLoggerExtension: LOG_FILE not set and LOG_ENABLE_CUSTOM=False")
49
-
50
- return cls(crawler.settings)
51
-
52
- def spider_opened(self, spider: Any) -> None:
53
- logger_instance = logger()
54
- try:
55
- logger_instance.info(
56
- f"CustomLoggerExtension: Logging initialized. "
57
- f"LOG_FILE={self.settings.get('LOG_FILE')}, "
58
- f"LOG_LEVEL={self.settings.get('LOG_LEVEL')}"
59
- )
60
- except Exception as e:
61
- # 即使日志初始化信息无法打印,也不应该影响程序运行
1
+ from typing import Any
2
+ from crawlo.exceptions import NotConfigured
3
+ from crawlo.utils.log import get_logger
4
+
5
+ # 延迟获取logger,确保在日志系统配置之后获取
6
+ _logger = None
7
+
8
+ def logger():
9
+ """延迟获取logger实例,确保在日志系统配置之后获取"""
10
+ global _logger
11
+ if _logger is None:
12
+ _logger = get_logger(__name__)
13
+ return _logger
14
+
15
+
16
+ class CustomLoggerExtension:
17
+ """
18
+ 日志系统初始化扩展
19
+ 遵循与 ExtensionManager 一致的接口规范:使用 create_instance
20
+ """
21
+
22
+ def __init__(self, settings: Any):
23
+ self.settings = settings
24
+ # 使用新的日志系统,但要简化配置传递
25
+ try:
26
+ from crawlo.logging import configure_logging
27
+ # 直接传递settings对象,让日志系统内部处理
28
+ configure_logging(settings)
29
+ except Exception as e:
30
+ # 如果日志系统配置失败,不应该阻止扩展加载
31
+ # 使用基本日志输出错误信息
32
+ import logging
33
+ logging.getLogger(__name__).warning(f"Failed to configure logging system: {e}")
34
+ # 不抛出异常,让扩展继续加载
35
+
36
+ @classmethod
37
+ def create_instance(cls, crawler: Any, *args: Any, **kwargs: Any) -> 'CustomLoggerExtension':
38
+ """
39
+ 工厂方法:兼容 ExtensionManager 的创建方式
40
+ 被 ExtensionManager 调用
41
+ """
42
+ # 可以通过 settings 控制是否启用
43
+ log_file = crawler.settings.get('LOG_FILE')
44
+ log_enable_custom = crawler.settings.get('LOG_ENABLE_CUSTOM', False)
45
+
46
+ # 只有当没有配置日志文件且未启用自定义日志时才禁用
47
+ if not log_file and not log_enable_custom:
48
+ raise NotConfigured("CustomLoggerExtension: LOG_FILE not set and LOG_ENABLE_CUSTOM=False")
49
+
50
+ return cls(crawler.settings)
51
+
52
+ def spider_opened(self, spider: Any) -> None:
53
+ logger_instance = logger()
54
+ try:
55
+ logger_instance.info(
56
+ f"CustomLoggerExtension: Logging initialized. "
57
+ f"LOG_FILE={self.settings.get('LOG_FILE')}, "
58
+ f"LOG_LEVEL={self.settings.get('LOG_LEVEL')}"
59
+ )
60
+ except Exception as e:
61
+ # 即使日志初始化信息无法打印,也不应该影响程序运行
62
62
  pass
@@ -1,105 +1,105 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import asyncio
4
- import psutil
5
- from typing import Any, Optional
6
-
7
- from crawlo.utils.log import get_logger
8
- from crawlo.utils.error_handler import ErrorHandler
9
- from crawlo.event import spider_opened, spider_closed
10
-
11
-
12
- class MemoryMonitorExtension:
13
- """
14
- 内存监控扩展
15
- 定期监控爬虫进程的内存使用情况,并在超出阈值时发出警告
16
- """
17
-
18
- def __init__(self, crawler: Any):
19
- self.task: Optional[asyncio.Task] = None
20
- self.process = psutil.Process()
21
- self.settings = crawler.settings
22
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
- self.error_handler = ErrorHandler(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
-
25
- # 获取配置参数
26
- self.interval = self.settings.get_int('MEMORY_MONITOR_INTERVAL', 60) # 默认60秒检查一次
27
- self.warning_threshold = self.settings.get_float('MEMORY_WARNING_THRESHOLD', 80.0) # 默认80%警告阈值
28
- self.critical_threshold = self.settings.get_float('MEMORY_CRITICAL_THRESHOLD', 90.0) # 默认90%严重阈值
29
-
30
- @classmethod
31
- def create_instance(cls, crawler: Any) -> 'MemoryMonitorExtension':
32
- # 只有当配置启用时才创建实例
33
- if not crawler.settings.get_bool('MEMORY_MONITOR_ENABLED', False):
34
- from crawlo.exceptions import NotConfigured
35
- raise NotConfigured("MemoryMonitorExtension: MEMORY_MONITOR_ENABLED is False")
36
-
37
- o = cls(crawler)
38
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
39
- crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
40
- return o
41
-
42
- async def spider_opened(self) -> None:
43
- """爬虫启动时开始监控"""
44
- try:
45
- self.task = asyncio.create_task(self._monitor_loop())
46
- self.logger.info(
47
- f"Memory monitor started. Interval: {self.interval}s, "
48
- f"Warning threshold: {self.warning_threshold}%, Critical threshold: {self.critical_threshold}%"
49
- )
50
- except Exception as e:
51
- self.error_handler.handle_error(
52
- e,
53
- context="启动内存监控失败",
54
- raise_error=False
55
- )
56
-
57
- async def spider_closed(self) -> None:
58
- """爬虫关闭时停止监控"""
59
- try:
60
- if self.task:
61
- self.task.cancel()
62
- try:
63
- await self.task
64
- except asyncio.CancelledError:
65
- pass
66
- self.task = None
67
- self.logger.info("Memory monitor stopped.")
68
- except Exception as e:
69
- self.error_handler.handle_error(
70
- e,
71
- context="停止内存监控失败",
72
- raise_error=False
73
- )
74
-
75
- async def _monitor_loop(self) -> None:
76
- """内存监控循环"""
77
- while True:
78
- try:
79
- # 获取内存使用信息
80
- memory_info = self.process.memory_info()
81
- memory_percent = self.process.memory_percent()
82
-
83
- # 记录内存使用情况
84
- self.logger.debug(
85
- f"Memory usage: {memory_percent:.2f}% "
86
- f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB, "
87
- f"VMS: {memory_info.vms / 1024 / 1024:.2f} MB)"
88
- )
89
-
90
- # 检查是否超过阈值
91
- if memory_percent >= self.critical_threshold:
92
- self.logger.critical(
93
- f"Memory usage critical: {memory_percent:.2f}% "
94
- f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
95
- )
96
- elif memory_percent >= self.warning_threshold:
97
- self.logger.warning(
98
- f"Memory usage high: {memory_percent:.2f}% "
99
- f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
100
- )
101
-
102
- await asyncio.sleep(self.interval)
103
- except Exception as e:
104
- self.logger.error(f"Error in memory monitoring: {e}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ import psutil
5
+ from typing import Any, Optional
6
+
7
+ from crawlo.utils.log import get_logger
8
+ from crawlo.utils.error_handler import ErrorHandler
9
+ from crawlo.event import spider_opened, spider_closed
10
+
11
+
12
+ class MemoryMonitorExtension:
13
+ """
14
+ 内存监控扩展
15
+ 定期监控爬虫进程的内存使用情况,并在超出阈值时发出警告
16
+ """
17
+
18
+ def __init__(self, crawler: Any):
19
+ self.task: Optional[asyncio.Task] = None
20
+ self.process = psutil.Process()
21
+ self.settings = crawler.settings
22
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
+ self.error_handler = ErrorHandler(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
24
+
25
+ # 获取配置参数
26
+ self.interval = self.settings.get_int('MEMORY_MONITOR_INTERVAL', 60) # 默认60秒检查一次
27
+ self.warning_threshold = self.settings.get_float('MEMORY_WARNING_THRESHOLD', 80.0) # 默认80%警告阈值
28
+ self.critical_threshold = self.settings.get_float('MEMORY_CRITICAL_THRESHOLD', 90.0) # 默认90%严重阈值
29
+
30
+ @classmethod
31
+ def create_instance(cls, crawler: Any) -> 'MemoryMonitorExtension':
32
+ # 只有当配置启用时才创建实例
33
+ if not crawler.settings.get_bool('MEMORY_MONITOR_ENABLED', False):
34
+ from crawlo.exceptions import NotConfigured
35
+ raise NotConfigured("MemoryMonitorExtension: MEMORY_MONITOR_ENABLED is False")
36
+
37
+ o = cls(crawler)
38
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
39
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
40
+ return o
41
+
42
+ async def spider_opened(self) -> None:
43
+ """爬虫启动时开始监控"""
44
+ try:
45
+ self.task = asyncio.create_task(self._monitor_loop())
46
+ self.logger.info(
47
+ f"Memory monitor started. Interval: {self.interval}s, "
48
+ f"Warning threshold: {self.warning_threshold}%, Critical threshold: {self.critical_threshold}%"
49
+ )
50
+ except Exception as e:
51
+ self.error_handler.handle_error(
52
+ e,
53
+ context="启动内存监控失败",
54
+ raise_error=False
55
+ )
56
+
57
+ async def spider_closed(self) -> None:
58
+ """爬虫关闭时停止监控"""
59
+ try:
60
+ if self.task:
61
+ self.task.cancel()
62
+ try:
63
+ await self.task
64
+ except asyncio.CancelledError:
65
+ pass
66
+ self.task = None
67
+ self.logger.info("Memory monitor stopped.")
68
+ except Exception as e:
69
+ self.error_handler.handle_error(
70
+ e,
71
+ context="停止内存监控失败",
72
+ raise_error=False
73
+ )
74
+
75
+ async def _monitor_loop(self) -> None:
76
+ """内存监控循环"""
77
+ while True:
78
+ try:
79
+ # 获取内存使用信息
80
+ memory_info = self.process.memory_info()
81
+ memory_percent = self.process.memory_percent()
82
+
83
+ # 记录内存使用情况
84
+ self.logger.debug(
85
+ f"Memory usage: {memory_percent:.2f}% "
86
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB, "
87
+ f"VMS: {memory_info.vms / 1024 / 1024:.2f} MB)"
88
+ )
89
+
90
+ # 检查是否超过阈值
91
+ if memory_percent >= self.critical_threshold:
92
+ self.logger.critical(
93
+ f"Memory usage critical: {memory_percent:.2f}% "
94
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
95
+ )
96
+ elif memory_percent >= self.warning_threshold:
97
+ self.logger.warning(
98
+ f"Memory usage high: {memory_percent:.2f}% "
99
+ f"(RSS: {memory_info.rss / 1024 / 1024:.2f} MB)"
100
+ )
101
+
102
+ await asyncio.sleep(self.interval)
103
+ except Exception as e:
104
+ self.logger.error(f"Error in memory monitoring: {e}")
105
105
  await asyncio.sleep(self.interval)