crawlo 1.3.3__py3-none-any.whl → 1.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (289) hide show
  1. crawlo/__init__.py +87 -63
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +151 -151
  7. crawlo/commands/help.py +138 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +341 -323
  10. crawlo/commands/startproject.py +436 -436
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +312 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +46 -2
  16. crawlo/core/engine.py +439 -365
  17. crawlo/core/processor.py +40 -40
  18. crawlo/core/scheduler.py +257 -256
  19. crawlo/crawler.py +639 -1167
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +273 -273
  23. crawlo/downloader/aiohttp_downloader.py +228 -226
  24. crawlo/downloader/cffi_downloader.py +245 -245
  25. crawlo/downloader/httpx_downloader.py +259 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +402 -402
  28. crawlo/downloader/selenium_downloader.py +472 -472
  29. crawlo/event.py +11 -11
  30. crawlo/exceptions.py +81 -81
  31. crawlo/extension/__init__.py +39 -39
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +57 -57
  34. crawlo/extension/log_stats.py +81 -81
  35. crawlo/extension/logging_extension.py +61 -52
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +28 -0
  40. crawlo/factories/base.py +69 -0
  41. crawlo/factories/crawler.py +104 -0
  42. crawlo/factories/registry.py +85 -0
  43. crawlo/filters/__init__.py +154 -154
  44. crawlo/filters/aioredis_filter.py +257 -234
  45. crawlo/filters/memory_filter.py +269 -269
  46. crawlo/framework.py +292 -0
  47. crawlo/initialization/__init__.py +40 -0
  48. crawlo/initialization/built_in.py +426 -0
  49. crawlo/initialization/context.py +142 -0
  50. crawlo/initialization/core.py +194 -0
  51. crawlo/initialization/phases.py +149 -0
  52. crawlo/initialization/registry.py +146 -0
  53. crawlo/items/__init__.py +23 -23
  54. crawlo/items/base.py +23 -22
  55. crawlo/items/fields.py +52 -52
  56. crawlo/items/items.py +104 -104
  57. crawlo/logging/__init__.py +38 -0
  58. crawlo/logging/config.py +97 -0
  59. crawlo/logging/factory.py +129 -0
  60. crawlo/logging/manager.py +112 -0
  61. crawlo/middleware/__init__.py +21 -21
  62. crawlo/middleware/default_header.py +132 -132
  63. crawlo/middleware/download_delay.py +104 -104
  64. crawlo/middleware/middleware_manager.py +135 -135
  65. crawlo/middleware/offsite.py +123 -123
  66. crawlo/middleware/proxy.py +386 -386
  67. crawlo/middleware/request_ignore.py +86 -86
  68. crawlo/middleware/response_code.py +163 -163
  69. crawlo/middleware/response_filter.py +136 -136
  70. crawlo/middleware/retry.py +124 -124
  71. crawlo/middleware/simple_proxy.py +65 -65
  72. crawlo/mode_manager.py +212 -187
  73. crawlo/network/__init__.py +21 -21
  74. crawlo/network/request.py +379 -379
  75. crawlo/network/response.py +359 -359
  76. crawlo/pipelines/__init__.py +21 -21
  77. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  78. crawlo/pipelines/console_pipeline.py +39 -39
  79. crawlo/pipelines/csv_pipeline.py +316 -316
  80. crawlo/pipelines/database_dedup_pipeline.py +222 -222
  81. crawlo/pipelines/json_pipeline.py +218 -218
  82. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  83. crawlo/pipelines/mongo_pipeline.py +131 -131
  84. crawlo/pipelines/mysql_pipeline.py +325 -318
  85. crawlo/pipelines/pipeline_manager.py +76 -75
  86. crawlo/pipelines/redis_dedup_pipeline.py +166 -166
  87. crawlo/project.py +327 -325
  88. crawlo/queue/pqueue.py +43 -37
  89. crawlo/queue/queue_manager.py +503 -379
  90. crawlo/queue/redis_priority_queue.py +326 -306
  91. crawlo/settings/__init__.py +7 -7
  92. crawlo/settings/default_settings.py +321 -225
  93. crawlo/settings/setting_manager.py +214 -198
  94. crawlo/spider/__init__.py +657 -639
  95. crawlo/stats_collector.py +73 -59
  96. crawlo/subscriber.py +129 -129
  97. crawlo/task_manager.py +139 -30
  98. crawlo/templates/crawlo.cfg.tmpl +10 -10
  99. crawlo/templates/project/__init__.py.tmpl +3 -3
  100. crawlo/templates/project/items.py.tmpl +17 -17
  101. crawlo/templates/project/middlewares.py.tmpl +118 -118
  102. crawlo/templates/project/pipelines.py.tmpl +96 -96
  103. crawlo/templates/project/settings.py.tmpl +168 -267
  104. crawlo/templates/project/settings_distributed.py.tmpl +167 -180
  105. crawlo/templates/project/settings_gentle.py.tmpl +167 -61
  106. crawlo/templates/project/settings_high_performance.py.tmpl +168 -131
  107. crawlo/templates/project/settings_minimal.py.tmpl +66 -35
  108. crawlo/templates/project/settings_simple.py.tmpl +165 -102
  109. crawlo/templates/project/spiders/__init__.py.tmpl +10 -6
  110. crawlo/templates/run.py.tmpl +34 -38
  111. crawlo/templates/spider/spider.py.tmpl +143 -143
  112. crawlo/templates/spiders_init.py.tmpl +10 -0
  113. crawlo/tools/__init__.py +200 -200
  114. crawlo/tools/anti_crawler.py +268 -268
  115. crawlo/tools/authenticated_proxy.py +240 -240
  116. crawlo/tools/data_formatter.py +225 -225
  117. crawlo/tools/data_validator.py +180 -180
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +388 -388
  120. crawlo/tools/encoding_converter.py +127 -127
  121. crawlo/tools/network_diagnostic.py +365 -0
  122. crawlo/tools/request_tools.py +82 -82
  123. crawlo/tools/retry_mechanism.py +224 -224
  124. crawlo/tools/scenario_adapter.py +262 -262
  125. crawlo/tools/text_cleaner.py +232 -232
  126. crawlo/utils/__init__.py +34 -34
  127. crawlo/utils/batch_processor.py +259 -259
  128. crawlo/utils/class_loader.py +26 -0
  129. crawlo/utils/controlled_spider_mixin.py +439 -439
  130. crawlo/utils/db_helper.py +343 -343
  131. crawlo/utils/enhanced_error_handler.py +356 -356
  132. crawlo/utils/env_config.py +142 -142
  133. crawlo/utils/error_handler.py +165 -124
  134. crawlo/utils/func_tools.py +82 -82
  135. crawlo/utils/large_scale_config.py +286 -286
  136. crawlo/utils/large_scale_helper.py +344 -344
  137. crawlo/utils/log.py +80 -200
  138. crawlo/utils/performance_monitor.py +285 -285
  139. crawlo/utils/queue_helper.py +175 -175
  140. crawlo/utils/redis_connection_pool.py +388 -351
  141. crawlo/utils/redis_key_validator.py +198 -198
  142. crawlo/utils/request.py +267 -267
  143. crawlo/utils/request_serializer.py +225 -218
  144. crawlo/utils/spider_loader.py +61 -61
  145. crawlo/utils/system.py +11 -11
  146. crawlo/utils/tools.py +4 -4
  147. crawlo/utils/url.py +39 -39
  148. {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/METADATA +1126 -1020
  149. crawlo-1.3.5.dist-info/RECORD +288 -0
  150. examples/__init__.py +7 -7
  151. tests/__init__.py +7 -7
  152. tests/advanced_tools_example.py +275 -275
  153. tests/authenticated_proxy_example.py +107 -107
  154. tests/baidu_performance_test.py +109 -0
  155. tests/baidu_test.py +60 -0
  156. tests/cleaners_example.py +160 -160
  157. tests/comprehensive_framework_test.py +213 -0
  158. tests/comprehensive_test.py +82 -0
  159. tests/comprehensive_testing_summary.md +187 -0
  160. tests/config_validation_demo.py +142 -142
  161. tests/controlled_spider_example.py +205 -205
  162. tests/date_tools_example.py +180 -180
  163. tests/debug_configure.py +70 -0
  164. tests/debug_framework_logger.py +85 -0
  165. tests/debug_log_config.py +127 -0
  166. tests/debug_log_levels.py +64 -0
  167. tests/debug_pipelines.py +66 -66
  168. tests/detailed_log_test.py +234 -0
  169. tests/distributed_test.py +67 -0
  170. tests/distributed_test_debug.py +77 -0
  171. tests/dynamic_loading_example.py +523 -523
  172. tests/dynamic_loading_test.py +104 -104
  173. tests/env_config_example.py +133 -133
  174. tests/error_handling_example.py +171 -171
  175. tests/final_command_test_report.md +0 -0
  176. tests/final_comprehensive_test.py +152 -0
  177. tests/final_log_test.py +261 -0
  178. tests/final_validation_test.py +183 -0
  179. tests/fix_log_test.py +143 -0
  180. tests/framework_performance_test.py +203 -0
  181. tests/log_buffering_test.py +112 -0
  182. tests/log_generation_timing_test.py +154 -0
  183. tests/optimized_performance_test.py +212 -0
  184. tests/performance_comparison.py +246 -0
  185. tests/queue_blocking_test.py +114 -0
  186. tests/queue_test.py +90 -0
  187. tests/redis_key_validation_demo.py +130 -130
  188. tests/request_params_example.py +150 -150
  189. tests/response_improvements_example.py +144 -144
  190. tests/scrapy_comparison/ofweek_scrapy.py +139 -0
  191. tests/scrapy_comparison/scrapy_test.py +134 -0
  192. tests/simple_command_test.py +120 -0
  193. tests/simple_crawlo_test.py +128 -0
  194. tests/simple_log_test.py +58 -0
  195. tests/simple_log_test2.py +138 -0
  196. tests/simple_optimization_test.py +129 -0
  197. tests/simple_spider_test.py +50 -0
  198. tests/simple_test.py +48 -0
  199. tests/spider_log_timing_test.py +178 -0
  200. tests/test_advanced_tools.py +148 -148
  201. tests/test_all_commands.py +231 -0
  202. tests/test_all_redis_key_configs.py +145 -145
  203. tests/test_authenticated_proxy.py +141 -141
  204. tests/test_batch_processor.py +179 -0
  205. tests/test_cleaners.py +54 -54
  206. tests/test_component_factory.py +175 -0
  207. tests/test_comprehensive.py +146 -146
  208. tests/test_config_consistency.py +80 -80
  209. tests/test_config_merge.py +152 -152
  210. tests/test_config_validator.py +182 -182
  211. tests/test_controlled_spider_mixin.py +80 -0
  212. tests/test_crawlo_proxy_integration.py +108 -108
  213. tests/test_date_tools.py +123 -123
  214. tests/test_default_header_middleware.py +158 -158
  215. tests/test_distributed.py +65 -65
  216. tests/test_double_crawlo_fix.py +207 -207
  217. tests/test_double_crawlo_fix_simple.py +124 -124
  218. tests/test_download_delay_middleware.py +221 -221
  219. tests/test_downloader_proxy_compatibility.py +268 -268
  220. tests/test_dynamic_downloaders_proxy.py +124 -124
  221. tests/test_dynamic_proxy.py +92 -92
  222. tests/test_dynamic_proxy_config.py +146 -146
  223. tests/test_dynamic_proxy_real.py +109 -109
  224. tests/test_edge_cases.py +303 -303
  225. tests/test_enhanced_error_handler.py +270 -270
  226. tests/test_enhanced_error_handler_comprehensive.py +246 -0
  227. tests/test_env_config.py +121 -121
  228. tests/test_error_handler_compatibility.py +112 -112
  229. tests/test_factories.py +253 -0
  230. tests/test_final_validation.py +153 -153
  231. tests/test_framework_env_usage.py +103 -103
  232. tests/test_framework_logger.py +67 -0
  233. tests/test_framework_startup.py +65 -0
  234. tests/test_get_component_logger.py +84 -0
  235. tests/test_integration.py +169 -169
  236. tests/test_item_dedup_redis_key.py +122 -122
  237. tests/test_large_scale_config.py +113 -0
  238. tests/test_large_scale_helper.py +236 -0
  239. tests/test_logging_system.py +283 -0
  240. tests/test_mode_change.py +73 -0
  241. tests/test_mode_consistency.py +51 -51
  242. tests/test_offsite_middleware.py +221 -221
  243. tests/test_parsel.py +29 -29
  244. tests/test_performance.py +327 -327
  245. tests/test_performance_monitor.py +116 -0
  246. tests/test_proxy_api.py +264 -264
  247. tests/test_proxy_health_check.py +32 -32
  248. tests/test_proxy_middleware.py +121 -121
  249. tests/test_proxy_middleware_enhanced.py +216 -216
  250. tests/test_proxy_middleware_integration.py +136 -136
  251. tests/test_proxy_middleware_refactored.py +184 -184
  252. tests/test_proxy_providers.py +56 -56
  253. tests/test_proxy_stats.py +19 -19
  254. tests/test_proxy_strategies.py +59 -59
  255. tests/test_queue_empty_check.py +42 -0
  256. tests/test_queue_manager_double_crawlo.py +173 -173
  257. tests/test_queue_manager_redis_key.py +176 -176
  258. tests/test_random_user_agent.py +72 -72
  259. tests/test_real_scenario_proxy.py +195 -195
  260. tests/test_redis_config.py +28 -28
  261. tests/test_redis_connection_pool.py +294 -294
  262. tests/test_redis_key_naming.py +181 -181
  263. tests/test_redis_key_validator.py +123 -123
  264. tests/test_redis_queue.py +224 -224
  265. tests/test_request_ignore_middleware.py +182 -182
  266. tests/test_request_params.py +111 -111
  267. tests/test_request_serialization.py +70 -70
  268. tests/test_response_code_middleware.py +349 -349
  269. tests/test_response_filter_middleware.py +427 -427
  270. tests/test_response_improvements.py +152 -152
  271. tests/test_retry_middleware.py +241 -241
  272. tests/test_scheduler.py +252 -252
  273. tests/test_scheduler_config_update.py +133 -133
  274. tests/test_simple_response.py +61 -61
  275. tests/test_telecom_spider_redis_key.py +205 -205
  276. tests/test_template_content.py +87 -87
  277. tests/test_template_redis_key.py +134 -134
  278. tests/test_tools.py +159 -159
  279. tests/test_user_agents.py +96 -96
  280. tests/tools_example.py +260 -260
  281. tests/untested_features_report.md +139 -0
  282. tests/verify_debug.py +52 -0
  283. tests/verify_distributed.py +117 -117
  284. tests/verify_log_fix.py +112 -0
  285. crawlo-1.3.3.dist-info/RECORD +0 -219
  286. tests/DOUBLE_CRAWLO_PREFIX_FIX_REPORT.md +0 -82
  287. {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/WHEEL +0 -0
  288. {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/entry_points.txt +0 -0
  289. {crawlo-1.3.3.dist-info → crawlo-1.3.5.dist-info}/top_level.txt +0 -0
crawlo/event.py CHANGED
@@ -1,11 +1,11 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
-
4
- spider_error = "spider_error"
5
- spider_opened = "spider_open"
6
- spider_closed = "spider_closed"
7
- ignore_request = "ignore_request"
8
- request_scheduled = "request_scheduled"
9
- response_received = "request_received"
10
- item_successful = "item_successful"
11
- item_discard = "item_discard"
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+
4
+ spider_error = "spider_error"
5
+ spider_opened = "spider_open"
6
+ spider_closed = "spider_closed"
7
+ ignore_request = "ignore_request"
8
+ request_scheduled = "request_scheduled"
9
+ response_received = "request_received"
10
+ item_successful = "item_successful"
11
+ item_discard = "item_discard"
crawlo/exceptions.py CHANGED
@@ -1,82 +1,82 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- class TransformTypeError(TypeError):
4
- pass
5
-
6
-
7
- class OutputError(Exception):
8
- pass
9
-
10
-
11
- class SpiderTypeError(TypeError):
12
- pass
13
-
14
-
15
- class ItemInitError(Exception):
16
- pass
17
-
18
-
19
- class ItemAttributeError(Exception):
20
- pass
21
-
22
-
23
- class DecodeError(Exception):
24
- pass
25
-
26
-
27
- class MiddlewareInitError(Exception):
28
- pass
29
-
30
-
31
- class PipelineInitError(Exception):
32
- pass
33
-
34
-
35
- class InvalidOutputError(Exception):
36
- pass
37
-
38
-
39
- class RequestMethodError(Exception):
40
- pass
41
-
42
-
43
- class IgnoreRequestError(Exception):
44
- def __init__(self, msg):
45
- self.msg = msg
46
- super(IgnoreRequestError, self).__init__(msg)
47
-
48
-
49
- class ItemDiscard(Exception):
50
- def __init__(self, msg):
51
- self.msg = msg
52
- super(ItemDiscard, self).__init__(msg)
53
-
54
-
55
- class NotConfigured(Exception):
56
- pass
57
-
58
-
59
- class NotConfiguredError(Exception):
60
- pass
61
-
62
-
63
- class ExtensionInitError(Exception):
64
- pass
65
-
66
-
67
- class ReceiverTypeError(Exception):
68
- pass
69
-
70
-
71
- class SpiderCreationError(Exception):
72
- """爬虫实例化失败异常"""
73
- pass
74
-
75
-
76
- class ItemValidationError(Exception):
77
- """Item 字段验证错误"""
78
- pass
79
-
80
-
81
- class DropItem(Exception):
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ class TransformTypeError(TypeError):
4
+ pass
5
+
6
+
7
+ class OutputError(Exception):
8
+ pass
9
+
10
+
11
+ class SpiderTypeError(TypeError):
12
+ pass
13
+
14
+
15
+ class ItemInitError(Exception):
16
+ pass
17
+
18
+
19
+ class ItemAttributeError(Exception):
20
+ pass
21
+
22
+
23
+ class DecodeError(Exception):
24
+ pass
25
+
26
+
27
+ class MiddlewareInitError(Exception):
28
+ pass
29
+
30
+
31
+ class PipelineInitError(Exception):
32
+ pass
33
+
34
+
35
+ class InvalidOutputError(Exception):
36
+ pass
37
+
38
+
39
+ class RequestMethodError(Exception):
40
+ pass
41
+
42
+
43
+ class IgnoreRequestError(Exception):
44
+ def __init__(self, msg):
45
+ self.msg = msg
46
+ super(IgnoreRequestError, self).__init__(msg)
47
+
48
+
49
+ class ItemDiscard(Exception):
50
+ def __init__(self, msg):
51
+ self.msg = msg
52
+ super(ItemDiscard, self).__init__(msg)
53
+
54
+
55
+ class NotConfigured(Exception):
56
+ pass
57
+
58
+
59
+ class NotConfiguredError(Exception):
60
+ pass
61
+
62
+
63
+ class ExtensionInitError(Exception):
64
+ pass
65
+
66
+
67
+ class ReceiverTypeError(Exception):
68
+ pass
69
+
70
+
71
+ class SpiderCreationError(Exception):
72
+ """爬虫实例化失败异常"""
73
+ pass
74
+
75
+
76
+ class ItemValidationError(Exception):
77
+ """Item 字段验证错误"""
78
+ pass
79
+
80
+
81
+ class DropItem(Exception):
82
82
  pass
@@ -1,39 +1,39 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- from typing import List, Any
4
- from pprint import pformat
5
-
6
- from crawlo.utils.log import get_logger
7
- from crawlo.project import load_class
8
- from crawlo.exceptions import ExtensionInitError
9
-
10
-
11
- class ExtensionManager(object):
12
-
13
- def __init__(self, crawler: Any):
14
- self.crawler = crawler
15
- self.extensions: List = []
16
- extensions = self.crawler.settings.get_list('EXTENSIONS')
17
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
18
- self._add_extensions(extensions)
19
-
20
- @classmethod
21
- def create_instance(cls, *args: Any, **kwargs: Any) -> 'ExtensionManager':
22
- return cls(*args, **kwargs)
23
-
24
- def _add_extensions(self, extensions: List[str]) -> None:
25
- for extension_path in extensions:
26
- try:
27
- extension_cls = load_class(extension_path)
28
- if not hasattr(extension_cls, 'create_instance'):
29
- raise ExtensionInitError(
30
- f"Extension '{extension_path}' init failed: Must have method 'create_instance()'"
31
- )
32
- self.extensions.append(extension_cls.create_instance(self.crawler))
33
- except Exception as e:
34
- self.logger.error(f"Failed to load extension '{extension_path}': {e}")
35
- raise ExtensionInitError(f"Failed to load extension '{extension_path}': {e}")
36
-
37
- if extensions:
38
- # 恢复INFO级别日志,保留关键的启用信息
39
- self.logger.info(f"Enabled extensions: \n{pformat(extensions)}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ from typing import List, Any
4
+ from pprint import pformat
5
+
6
+ from crawlo.utils.log import get_logger
7
+ from crawlo.utils.class_loader import load_class
8
+ from crawlo.exceptions import ExtensionInitError
9
+
10
+
11
+ class ExtensionManager(object):
12
+
13
+ def __init__(self, crawler: Any):
14
+ self.crawler = crawler
15
+ self.extensions: List = []
16
+ extensions = self.crawler.settings.get_list('EXTENSIONS')
17
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
18
+ self._add_extensions(extensions)
19
+
20
+ @classmethod
21
+ def create_instance(cls, *args: Any, **kwargs: Any) -> 'ExtensionManager':
22
+ return cls(*args, **kwargs)
23
+
24
+ def _add_extensions(self, extensions: List[str]) -> None:
25
+ for extension_path in extensions:
26
+ try:
27
+ extension_cls = load_class(extension_path)
28
+ if not hasattr(extension_cls, 'create_instance'):
29
+ raise ExtensionInitError(
30
+ f"Extension '{extension_path}' init failed: Must have method 'create_instance()'"
31
+ )
32
+ self.extensions.append(extension_cls.create_instance(self.crawler))
33
+ except Exception as e:
34
+ self.logger.error(f"Failed to load extension '{extension_path}': {e}")
35
+ raise ExtensionInitError(f"Failed to load extension '{extension_path}': {e}")
36
+
37
+ if extensions:
38
+ # 恢复INFO级别日志,保留关键的启用信息
39
+ self.logger.info(f"Enabled extensions: \n{pformat(extensions)}")
@@ -1,142 +1,142 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import asyncio
4
- from datetime import datetime
5
- from typing import Any, Optional, Dict
6
-
7
- from crawlo.event import spider_opened, spider_closed, response_received, request_scheduled
8
- from crawlo.utils.log import get_logger
9
-
10
-
11
- class HealthCheckExtension:
12
- """
13
- 健康检查扩展
14
- 监控爬虫的健康状态,包括响应时间、错误率等指标
15
- """
16
-
17
- def __init__(self, crawler: Any):
18
- self.settings = crawler.settings
19
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
20
-
21
- # 获取配置参数
22
- self.enabled = self.settings.get_bool('HEALTH_CHECK_ENABLED', True)
23
- self.check_interval = self.settings.get_int('HEALTH_CHECK_INTERVAL', 60) # 默认60秒
24
-
25
- # 健康状态统计
26
- self.stats: Dict[str, Any] = {
27
- 'start_time': None,
28
- 'total_requests': 0,
29
- 'total_responses': 0,
30
- 'error_responses': 0,
31
- 'last_check_time': None,
32
- 'response_times': [], # 存储最近的响应时间
33
- }
34
-
35
- self.task: Optional[asyncio.Task] = None
36
-
37
- @classmethod
38
- def create_instance(cls, crawler: Any) -> 'HealthCheckExtension':
39
- # 只有当配置启用时才创建实例
40
- if not crawler.settings.get_bool('HEALTH_CHECK_ENABLED', True):
41
- from crawlo.exceptions import NotConfigured
42
- raise NotConfigured("HealthCheckExtension: HEALTH_CHECK_ENABLED is False")
43
-
44
- o = cls(crawler)
45
- if o.enabled:
46
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
47
- crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
48
- crawler.subscriber.subscribe(o.response_received, event=response_received)
49
- crawler.subscriber.subscribe(o.request_scheduled, event=request_scheduled)
50
- return o
51
-
52
- async def spider_opened(self) -> None:
53
- """爬虫启动时初始化健康检查"""
54
- if not self.enabled:
55
- return
56
-
57
- self.stats['start_time'] = datetime.now()
58
- self.task = asyncio.create_task(self._health_check_loop())
59
- self.logger.info("Health check extension started.")
60
-
61
- async def spider_closed(self) -> None:
62
- """爬虫关闭时停止健康检查"""
63
- if not self.enabled:
64
- return
65
-
66
- if self.task:
67
- self.task.cancel()
68
- try:
69
- await self.task
70
- except asyncio.CancelledError:
71
- pass
72
-
73
- # 输出最终健康状态
74
- await self._check_health()
75
- self.logger.info("Health check extension stopped.")
76
-
77
- async def request_scheduled(self, request: Any, spider: Any) -> None:
78
- """记录调度的请求"""
79
- if not self.enabled:
80
- return
81
- self.stats['total_requests'] += 1
82
-
83
- async def response_received(self, response: Any, spider: Any) -> None:
84
- """记录接收到的响应"""
85
- if not self.enabled:
86
- return
87
-
88
- self.stats['total_responses'] += 1
89
-
90
- # 记录错误响应
91
- if hasattr(response, 'status_code') and response.status_code >= 400:
92
- self.stats['error_responses'] += 1
93
-
94
- async def _health_check_loop(self) -> None:
95
- """健康检查循环"""
96
- while True:
97
- try:
98
- await asyncio.sleep(self.check_interval)
99
- await self._check_health()
100
- except asyncio.CancelledError:
101
- break
102
- except Exception as e:
103
- self.logger.error(f"Error in health check loop: {e}")
104
-
105
- async def _check_health(self) -> None:
106
- """执行健康检查并输出报告"""
107
- try:
108
- now_time = datetime.now()
109
- self.stats['last_check_time'] = now_time
110
-
111
- # 计算基本统计信息
112
- runtime = (now_time - self.stats['start_time']).total_seconds() if self.stats['start_time'] else 0
113
- requests_per_second = self.stats['total_requests'] / runtime if runtime > 0 else 0
114
- responses_per_second = self.stats['total_responses'] / runtime if runtime > 0 else 0
115
-
116
- # 计算错误率
117
- error_rate = (
118
- self.stats['error_responses'] / self.stats['total_responses']
119
- if self.stats['total_responses'] > 0 else 0
120
- )
121
-
122
- # 输出健康报告
123
- health_report = {
124
- 'runtime_seconds': round(runtime, 2),
125
- 'total_requests': self.stats['total_requests'],
126
- 'total_responses': self.stats['total_responses'],
127
- 'requests_per_second': round(requests_per_second, 2),
128
- 'responses_per_second': round(responses_per_second, 2),
129
- 'error_responses': self.stats['error_responses'],
130
- 'error_rate': f"{error_rate:.2%}",
131
- }
132
-
133
- # 根据错误率判断健康状态
134
- if error_rate > 0.1: # 错误率超过10%
135
- self.logger.warning(f"Health check report: {health_report}")
136
- elif error_rate > 0.05: # 错误率超过5%
137
- self.logger.info(f"Health check report: {health_report}")
138
- else:
139
- self.logger.debug(f"Health check report: {health_report}")
140
-
141
- except Exception as e:
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ from datetime import datetime
5
+ from typing import Any, Optional, Dict
6
+
7
+ from crawlo.event import spider_opened, spider_closed, response_received, request_scheduled
8
+ from crawlo.utils.log import get_logger
9
+
10
+
11
+ class HealthCheckExtension:
12
+ """
13
+ 健康检查扩展
14
+ 监控爬虫的健康状态,包括响应时间、错误率等指标
15
+ """
16
+
17
+ def __init__(self, crawler: Any):
18
+ self.settings = crawler.settings
19
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
20
+
21
+ # 获取配置参数
22
+ self.enabled = self.settings.get_bool('HEALTH_CHECK_ENABLED', True)
23
+ self.check_interval = self.settings.get_int('HEALTH_CHECK_INTERVAL', 60) # 默认60秒
24
+
25
+ # 健康状态统计
26
+ self.stats: Dict[str, Any] = {
27
+ 'start_time': None,
28
+ 'total_requests': 0,
29
+ 'total_responses': 0,
30
+ 'error_responses': 0,
31
+ 'last_check_time': None,
32
+ 'response_times': [], # 存储最近的响应时间
33
+ }
34
+
35
+ self.task: Optional[asyncio.Task] = None
36
+
37
+ @classmethod
38
+ def create_instance(cls, crawler: Any) -> 'HealthCheckExtension':
39
+ # 只有当配置启用时才创建实例
40
+ if not crawler.settings.get_bool('HEALTH_CHECK_ENABLED', True):
41
+ from crawlo.exceptions import NotConfigured
42
+ raise NotConfigured("HealthCheckExtension: HEALTH_CHECK_ENABLED is False")
43
+
44
+ o = cls(crawler)
45
+ if o.enabled:
46
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
47
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
48
+ crawler.subscriber.subscribe(o.response_received, event=response_received)
49
+ crawler.subscriber.subscribe(o.request_scheduled, event=request_scheduled)
50
+ return o
51
+
52
+ async def spider_opened(self) -> None:
53
+ """爬虫启动时初始化健康检查"""
54
+ if not self.enabled:
55
+ return
56
+
57
+ self.stats['start_time'] = datetime.now()
58
+ self.task = asyncio.create_task(self._health_check_loop())
59
+ self.logger.info("Health check extension started.")
60
+
61
+ async def spider_closed(self) -> None:
62
+ """爬虫关闭时停止健康检查"""
63
+ if not self.enabled:
64
+ return
65
+
66
+ if self.task:
67
+ self.task.cancel()
68
+ try:
69
+ await self.task
70
+ except asyncio.CancelledError:
71
+ pass
72
+
73
+ # 输出最终健康状态
74
+ await self._check_health()
75
+ self.logger.info("Health check extension stopped.")
76
+
77
+ async def request_scheduled(self, request: Any, spider: Any) -> None:
78
+ """记录调度的请求"""
79
+ if not self.enabled:
80
+ return
81
+ self.stats['total_requests'] += 1
82
+
83
+ async def response_received(self, response: Any, spider: Any) -> None:
84
+ """记录接收到的响应"""
85
+ if not self.enabled:
86
+ return
87
+
88
+ self.stats['total_responses'] += 1
89
+
90
+ # 记录错误响应
91
+ if hasattr(response, 'status_code') and response.status_code >= 400:
92
+ self.stats['error_responses'] += 1
93
+
94
+ async def _health_check_loop(self) -> None:
95
+ """健康检查循环"""
96
+ while True:
97
+ try:
98
+ await asyncio.sleep(self.check_interval)
99
+ await self._check_health()
100
+ except asyncio.CancelledError:
101
+ break
102
+ except Exception as e:
103
+ self.logger.error(f"Error in health check loop: {e}")
104
+
105
+ async def _check_health(self) -> None:
106
+ """执行健康检查并输出报告"""
107
+ try:
108
+ now_time = datetime.now()
109
+ self.stats['last_check_time'] = now_time
110
+
111
+ # 计算基本统计信息
112
+ runtime = (now_time - self.stats['start_time']).total_seconds() if self.stats['start_time'] else 0
113
+ requests_per_second = self.stats['total_requests'] / runtime if runtime > 0 else 0
114
+ responses_per_second = self.stats['total_responses'] / runtime if runtime > 0 else 0
115
+
116
+ # 计算错误率
117
+ error_rate = (
118
+ self.stats['error_responses'] / self.stats['total_responses']
119
+ if self.stats['total_responses'] > 0 else 0
120
+ )
121
+
122
+ # 输出健康报告
123
+ health_report = {
124
+ 'runtime_seconds': round(runtime, 2),
125
+ 'total_requests': self.stats['total_requests'],
126
+ 'total_responses': self.stats['total_responses'],
127
+ 'requests_per_second': round(requests_per_second, 2),
128
+ 'responses_per_second': round(responses_per_second, 2),
129
+ 'error_responses': self.stats['error_responses'],
130
+ 'error_rate': f"{error_rate:.2%}",
131
+ }
132
+
133
+ # 根据错误率判断健康状态
134
+ if error_rate > 0.1: # 错误率超过10%
135
+ self.logger.warning(f"Health check report: {health_report}")
136
+ elif error_rate > 0.05: # 错误率超过5%
137
+ self.logger.info(f"Health check report: {health_report}")
138
+ else:
139
+ self.logger.debug(f"Health check report: {health_report}")
140
+
141
+ except Exception as e:
142
142
  self.logger.error(f"Error in health check: {e}")
@@ -1,58 +1,58 @@
1
- #!/usr/bin/python
2
- # -*- coding:UTF-8 -*-
3
- import asyncio
4
- from typing import Any, Optional
5
-
6
- from crawlo.utils.log import get_logger
7
- from crawlo.event import spider_opened, spider_closed
8
-
9
-
10
- class LogIntervalExtension(object):
11
-
12
- def __init__(self, crawler: Any):
13
- self.task: Optional[asyncio.Task] = None
14
- self.stats = crawler.stats
15
- self.item_count = 0
16
- self.response_count = 0
17
- self.seconds = crawler.settings.get('INTERVAL', 60) # 默认60秒
18
- self.interval = int(self.seconds / 60) if self.seconds % 60 == 0 else self.seconds
19
- self.interval = "" if self.interval == 1 else self.interval
20
- self.unit = 'min' if self.seconds % 60 == 0 else 's'
21
-
22
- self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
-
24
- @classmethod
25
- def create_instance(cls, crawler: Any) -> 'LogIntervalExtension':
26
- o = cls(crawler)
27
- crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
28
- crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
29
- return o
30
-
31
- async def spider_opened(self) -> None:
32
- self.task = asyncio.create_task(self.interval_log())
33
-
34
- async def spider_closed(self) -> None:
35
- if self.task:
36
- self.task.cancel()
37
- try:
38
- await self.task
39
- except asyncio.CancelledError:
40
- pass
41
- self.task = None
42
-
43
- async def interval_log(self) -> None:
44
- while True:
45
- try:
46
- last_item_count = self.stats.get_value('item_successful_count', default=0)
47
- last_response_count = self.stats.get_value('response_received_count', default=0)
48
- item_rate = last_item_count - self.item_count
49
- response_rate = last_response_count - self.response_count
50
- self.item_count, self.response_count = last_item_count, last_response_count
51
- self.logger.info(
52
- f'Crawled {last_response_count} pages (at {response_rate} pages/{self.interval}{self.unit}),'
53
- f' Got {last_item_count} items (at {item_rate} items/{self.interval}{self.unit}).'
54
- )
55
- await asyncio.sleep(self.seconds)
56
- except Exception as e:
57
- self.logger.error(f"Error in interval logging: {e}")
1
+ #!/usr/bin/python
2
+ # -*- coding:UTF-8 -*-
3
+ import asyncio
4
+ from typing import Any, Optional
5
+
6
+ from crawlo.utils.log import get_logger
7
+ from crawlo.event import spider_opened, spider_closed
8
+
9
+
10
+ class LogIntervalExtension(object):
11
+
12
+ def __init__(self, crawler: Any):
13
+ self.task: Optional[asyncio.Task] = None
14
+ self.stats = crawler.stats
15
+ self.item_count = 0
16
+ self.response_count = 0
17
+ self.seconds = crawler.settings.get('INTERVAL', 60) # 默认60秒
18
+ self.interval = int(self.seconds / 60) if self.seconds % 60 == 0 else self.seconds
19
+ self.interval = "" if self.interval == 1 else self.interval
20
+ self.unit = 'min' if self.seconds % 60 == 0 else 's'
21
+
22
+ self.logger = get_logger(self.__class__.__name__, crawler.settings.get('LOG_LEVEL'))
23
+
24
+ @classmethod
25
+ def create_instance(cls, crawler: Any) -> 'LogIntervalExtension':
26
+ o = cls(crawler)
27
+ crawler.subscriber.subscribe(o.spider_opened, event=spider_opened)
28
+ crawler.subscriber.subscribe(o.spider_closed, event=spider_closed)
29
+ return o
30
+
31
+ async def spider_opened(self) -> None:
32
+ self.task = asyncio.create_task(self.interval_log())
33
+
34
+ async def spider_closed(self) -> None:
35
+ if self.task:
36
+ self.task.cancel()
37
+ try:
38
+ await self.task
39
+ except asyncio.CancelledError:
40
+ pass
41
+ self.task = None
42
+
43
+ async def interval_log(self) -> None:
44
+ while True:
45
+ try:
46
+ last_item_count = self.stats.get_value('item_successful_count', default=0)
47
+ last_response_count = self.stats.get_value('response_received_count', default=0)
48
+ item_rate = last_item_count - self.item_count
49
+ response_rate = last_response_count - self.response_count
50
+ self.item_count, self.response_count = last_item_count, last_response_count
51
+ self.logger.info(
52
+ f'Crawled {last_response_count} pages (at {response_rate} pages/{self.interval}{self.unit}),'
53
+ f' Got {last_item_count} items (at {item_rate} items/{self.interval}{self.unit}).'
54
+ )
55
+ await asyncio.sleep(self.seconds)
56
+ except Exception as e:
57
+ self.logger.error(f"Error in interval logging: {e}")
58
58
  await asyncio.sleep(self.seconds) # 即使出错也继续执行