crawlo 1.4.7__py3-none-any.whl → 1.4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (348) hide show
  1. crawlo/__init__.py +90 -90
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +186 -186
  7. crawlo/commands/help.py +140 -140
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +379 -379
  10. crawlo/commands/startproject.py +460 -460
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +320 -320
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +52 -52
  16. crawlo/core/engine.py +451 -451
  17. crawlo/core/processor.py +47 -47
  18. crawlo/core/scheduler.py +290 -290
  19. crawlo/crawler.py +698 -698
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +280 -280
  23. crawlo/downloader/aiohttp_downloader.py +233 -233
  24. crawlo/downloader/cffi_downloader.py +250 -250
  25. crawlo/downloader/httpx_downloader.py +265 -265
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +425 -425
  28. crawlo/downloader/selenium_downloader.py +486 -486
  29. crawlo/event.py +45 -45
  30. crawlo/exceptions.py +214 -214
  31. crawlo/extension/__init__.py +64 -64
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +94 -94
  34. crawlo/extension/log_stats.py +70 -70
  35. crawlo/extension/logging_extension.py +53 -53
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +104 -104
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/factories/utils.py +134 -134
  44. crawlo/filters/__init__.py +170 -170
  45. crawlo/filters/aioredis_filter.py +347 -347
  46. crawlo/filters/memory_filter.py +261 -261
  47. crawlo/framework.py +306 -306
  48. crawlo/initialization/__init__.py +44 -44
  49. crawlo/initialization/built_in.py +391 -391
  50. crawlo/initialization/context.py +141 -141
  51. crawlo/initialization/core.py +240 -240
  52. crawlo/initialization/phases.py +229 -229
  53. crawlo/initialization/registry.py +143 -143
  54. crawlo/initialization/utils.py +48 -48
  55. crawlo/interfaces.py +23 -23
  56. crawlo/items/__init__.py +23 -23
  57. crawlo/items/base.py +23 -23
  58. crawlo/items/fields.py +52 -52
  59. crawlo/items/items.py +104 -104
  60. crawlo/logging/__init__.py +42 -42
  61. crawlo/logging/config.py +280 -276
  62. crawlo/logging/factory.py +175 -175
  63. crawlo/logging/manager.py +104 -104
  64. crawlo/middleware/__init__.py +87 -87
  65. crawlo/middleware/default_header.py +132 -132
  66. crawlo/middleware/download_delay.py +104 -104
  67. crawlo/middleware/middleware_manager.py +142 -142
  68. crawlo/middleware/offsite.py +123 -123
  69. crawlo/middleware/proxy.py +209 -209
  70. crawlo/middleware/request_ignore.py +86 -86
  71. crawlo/middleware/response_code.py +150 -150
  72. crawlo/middleware/response_filter.py +136 -136
  73. crawlo/middleware/retry.py +124 -124
  74. crawlo/mode_manager.py +287 -287
  75. crawlo/network/__init__.py +21 -21
  76. crawlo/network/request.py +408 -376
  77. crawlo/network/response.py +598 -569
  78. crawlo/pipelines/__init__.py +52 -52
  79. crawlo/pipelines/base_pipeline.py +452 -452
  80. crawlo/pipelines/bloom_dedup_pipeline.py +145 -146
  81. crawlo/pipelines/console_pipeline.py +39 -39
  82. crawlo/pipelines/csv_pipeline.py +316 -316
  83. crawlo/pipelines/database_dedup_pipeline.py +196 -197
  84. crawlo/pipelines/json_pipeline.py +218 -218
  85. crawlo/pipelines/memory_dedup_pipeline.py +104 -105
  86. crawlo/pipelines/mongo_pipeline.py +140 -139
  87. crawlo/pipelines/mysql_pipeline.py +468 -469
  88. crawlo/pipelines/pipeline_manager.py +100 -100
  89. crawlo/pipelines/redis_dedup_pipeline.py +155 -155
  90. crawlo/project.py +347 -347
  91. crawlo/queue/__init__.py +9 -9
  92. crawlo/queue/pqueue.py +38 -38
  93. crawlo/queue/queue_manager.py +591 -591
  94. crawlo/queue/redis_priority_queue.py +518 -518
  95. crawlo/settings/__init__.py +7 -7
  96. crawlo/settings/default_settings.py +287 -284
  97. crawlo/settings/setting_manager.py +219 -219
  98. crawlo/spider/__init__.py +658 -657
  99. crawlo/stats_collector.py +81 -81
  100. crawlo/subscriber.py +129 -129
  101. crawlo/task_manager.py +138 -138
  102. crawlo/templates/crawlo.cfg.tmpl +10 -10
  103. crawlo/templates/project/__init__.py.tmpl +1 -1
  104. crawlo/templates/project/items.py.tmpl +13 -13
  105. crawlo/templates/project/middlewares.py.tmpl +38 -38
  106. crawlo/templates/project/pipelines.py.tmpl +35 -35
  107. crawlo/templates/project/settings.py.tmpl +113 -109
  108. crawlo/templates/project/settings_distributed.py.tmpl +160 -156
  109. crawlo/templates/project/settings_gentle.py.tmpl +174 -170
  110. crawlo/templates/project/settings_high_performance.py.tmpl +175 -171
  111. crawlo/templates/project/settings_minimal.py.tmpl +102 -98
  112. crawlo/templates/project/settings_simple.py.tmpl +172 -168
  113. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  114. crawlo/templates/run.py.tmpl +23 -23
  115. crawlo/templates/spider/spider.py.tmpl +32 -32
  116. crawlo/templates/spiders_init.py.tmpl +4 -4
  117. crawlo/tools/__init__.py +86 -86
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +384 -384
  120. crawlo/tools/scenario_adapter.py +262 -262
  121. crawlo/tools/text_cleaner.py +232 -232
  122. crawlo/utils/__init__.py +74 -50
  123. crawlo/utils/batch_processor.py +276 -276
  124. crawlo/utils/config_manager.py +442 -442
  125. crawlo/utils/controlled_spider_mixin.py +439 -439
  126. crawlo/utils/db_helper.py +250 -250
  127. crawlo/utils/encoding_helper.py +190 -0
  128. crawlo/utils/error_handler.py +410 -410
  129. crawlo/utils/fingerprint.py +121 -121
  130. crawlo/utils/func_tools.py +82 -82
  131. crawlo/utils/large_scale_helper.py +344 -344
  132. crawlo/utils/leak_detector.py +335 -335
  133. crawlo/utils/misc.py +81 -81
  134. crawlo/utils/mongo_connection_pool.py +157 -157
  135. crawlo/utils/mysql_connection_pool.py +197 -197
  136. crawlo/utils/performance_monitor.py +285 -285
  137. crawlo/utils/queue_helper.py +175 -175
  138. crawlo/utils/redis_checker.py +90 -90
  139. crawlo/utils/redis_connection_pool.py +578 -578
  140. crawlo/utils/redis_key_validator.py +198 -198
  141. crawlo/utils/request.py +278 -278
  142. crawlo/utils/request_serializer.py +225 -225
  143. crawlo/utils/resource_manager.py +337 -337
  144. crawlo/utils/response_helper.py +113 -0
  145. crawlo/utils/selector_helper.py +138 -137
  146. crawlo/utils/singleton.py +69 -69
  147. crawlo/utils/spider_loader.py +201 -201
  148. crawlo/utils/text_helper.py +94 -94
  149. {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/METADATA +831 -689
  150. crawlo-1.4.8.dist-info/RECORD +347 -0
  151. examples/__init__.py +7 -7
  152. tests/__init__.py +7 -7
  153. tests/advanced_tools_example.py +217 -217
  154. tests/authenticated_proxy_example.py +110 -110
  155. tests/baidu_performance_test.py +108 -108
  156. tests/baidu_test.py +59 -59
  157. tests/bug_check_test.py +250 -250
  158. tests/cleaners_example.py +160 -160
  159. tests/comprehensive_framework_test.py +212 -212
  160. tests/comprehensive_test.py +81 -81
  161. tests/comprehensive_testing_summary.md +186 -186
  162. tests/config_validation_demo.py +142 -142
  163. tests/controlled_spider_example.py +205 -205
  164. tests/date_tools_example.py +180 -180
  165. tests/debug_configure.py +69 -69
  166. tests/debug_framework_logger.py +84 -84
  167. tests/debug_log_config.py +126 -126
  168. tests/debug_log_levels.py +63 -63
  169. tests/debug_pipelines.py +66 -66
  170. tests/detailed_log_test.py +233 -233
  171. tests/direct_selector_helper_test.py +96 -96
  172. tests/distributed_dedup_test.py +467 -467
  173. tests/distributed_test.py +66 -66
  174. tests/distributed_test_debug.py +76 -76
  175. tests/dynamic_loading_example.py +523 -523
  176. tests/dynamic_loading_test.py +104 -104
  177. tests/error_handling_example.py +171 -171
  178. tests/explain_mysql_update_behavior.py +76 -76
  179. tests/final_comprehensive_test.py +151 -151
  180. tests/final_log_test.py +260 -260
  181. tests/final_validation_test.py +182 -182
  182. tests/fix_log_test.py +142 -142
  183. tests/framework_performance_test.py +202 -202
  184. tests/log_buffering_test.py +111 -111
  185. tests/log_generation_timing_test.py +153 -153
  186. tests/monitor_redis_dedup.sh +72 -72
  187. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
  188. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
  189. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
  190. tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
  191. tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
  192. tests/ofweek_scrapy/scrapy.cfg +11 -11
  193. tests/optimized_performance_test.py +211 -211
  194. tests/performance_comparison.py +244 -244
  195. tests/queue_blocking_test.py +113 -113
  196. tests/queue_test.py +89 -89
  197. tests/redis_key_validation_demo.py +130 -130
  198. tests/request_params_example.py +150 -150
  199. tests/response_improvements_example.py +144 -144
  200. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  201. tests/scrapy_comparison/scrapy_test.py +133 -133
  202. tests/simple_cli_test.py +54 -54
  203. tests/simple_command_test.py +119 -119
  204. tests/simple_crawlo_test.py +126 -126
  205. tests/simple_follow_test.py +38 -38
  206. tests/simple_log_test2.py +137 -137
  207. tests/simple_optimization_test.py +128 -128
  208. tests/simple_queue_type_test.py +41 -41
  209. tests/simple_response_selector_test.py +94 -94
  210. tests/simple_selector_helper_test.py +154 -154
  211. tests/simple_selector_test.py +207 -207
  212. tests/simple_spider_test.py +49 -49
  213. tests/simple_url_test.py +73 -73
  214. tests/simulate_mysql_update_test.py +139 -139
  215. tests/spider_log_timing_test.py +177 -177
  216. tests/test_advanced_tools.py +148 -148
  217. tests/test_all_commands.py +230 -230
  218. tests/test_all_pipeline_fingerprints.py +133 -133
  219. tests/test_all_redis_key_configs.py +145 -145
  220. tests/test_asyncmy_usage.py +56 -56
  221. tests/test_batch_processor.py +178 -178
  222. tests/test_cleaners.py +54 -54
  223. tests/test_cli_arguments.py +118 -118
  224. tests/test_component_factory.py +174 -174
  225. tests/test_config_consistency.py +80 -80
  226. tests/test_config_merge.py +152 -152
  227. tests/test_config_validator.py +182 -182
  228. tests/test_controlled_spider_mixin.py +79 -79
  229. tests/test_crawler_process_import.py +38 -38
  230. tests/test_crawler_process_spider_modules.py +47 -47
  231. tests/test_crawlo_proxy_integration.py +114 -114
  232. tests/test_date_tools.py +123 -123
  233. tests/test_dedup_fix.py +220 -220
  234. tests/test_dedup_pipeline_consistency.py +124 -124
  235. tests/test_default_header_middleware.py +313 -313
  236. tests/test_distributed.py +65 -65
  237. tests/test_double_crawlo_fix.py +204 -204
  238. tests/test_double_crawlo_fix_simple.py +124 -124
  239. tests/test_download_delay_middleware.py +221 -221
  240. tests/test_downloader_proxy_compatibility.py +272 -272
  241. tests/test_edge_cases.py +305 -305
  242. tests/test_encoding_core.py +56 -56
  243. tests/test_encoding_detection.py +126 -126
  244. tests/test_enhanced_error_handler.py +270 -270
  245. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  246. tests/test_error_handler_compatibility.py +112 -112
  247. tests/test_factories.py +252 -252
  248. tests/test_factory_compatibility.py +196 -196
  249. tests/test_final_validation.py +153 -153
  250. tests/test_fingerprint_consistency.py +135 -135
  251. tests/test_fingerprint_simple.py +51 -51
  252. tests/test_get_component_logger.py +83 -83
  253. tests/test_hash_performance.py +99 -99
  254. tests/test_integration.py +169 -169
  255. tests/test_item_dedup_redis_key.py +122 -122
  256. tests/test_large_scale_helper.py +235 -235
  257. tests/test_logging_enhancements.py +374 -374
  258. tests/test_logging_final.py +184 -184
  259. tests/test_logging_integration.py +312 -312
  260. tests/test_logging_system.py +282 -282
  261. tests/test_middleware_debug.py +141 -141
  262. tests/test_mode_consistency.py +51 -51
  263. tests/test_multi_directory.py +67 -67
  264. tests/test_multiple_spider_modules.py +80 -80
  265. tests/test_mysql_pipeline_config.py +164 -164
  266. tests/test_mysql_pipeline_error.py +98 -98
  267. tests/test_mysql_pipeline_init_log.py +82 -82
  268. tests/test_mysql_pipeline_integration.py +132 -132
  269. tests/test_mysql_pipeline_refactor.py +143 -143
  270. tests/test_mysql_pipeline_refactor_simple.py +85 -85
  271. tests/test_mysql_pipeline_robustness.py +195 -195
  272. tests/test_mysql_pipeline_types.py +88 -88
  273. tests/test_mysql_update_columns.py +93 -93
  274. tests/test_offsite_middleware.py +244 -244
  275. tests/test_offsite_middleware_simple.py +203 -203
  276. tests/test_optimized_selector_naming.py +100 -100
  277. tests/test_parsel.py +29 -29
  278. tests/test_performance.py +327 -327
  279. tests/test_performance_monitor.py +115 -115
  280. tests/test_pipeline_fingerprint_consistency.py +86 -86
  281. tests/test_priority_behavior.py +211 -211
  282. tests/test_priority_consistency.py +151 -151
  283. tests/test_priority_consistency_fixed.py +249 -249
  284. tests/test_proxy_health_check.py +32 -32
  285. tests/test_proxy_middleware.py +217 -217
  286. tests/test_proxy_middleware_enhanced.py +212 -212
  287. tests/test_proxy_middleware_integration.py +142 -142
  288. tests/test_proxy_middleware_refactored.py +207 -207
  289. tests/test_proxy_only.py +83 -83
  290. tests/test_proxy_providers.py +56 -56
  291. tests/test_proxy_stats.py +19 -19
  292. tests/test_proxy_strategies.py +59 -59
  293. tests/test_proxy_with_downloader.py +152 -152
  294. tests/test_queue_empty_check.py +41 -41
  295. tests/test_queue_manager_double_crawlo.py +173 -173
  296. tests/test_queue_manager_redis_key.py +179 -179
  297. tests/test_queue_naming.py +154 -154
  298. tests/test_queue_type.py +106 -106
  299. tests/test_queue_type_redis_config_consistency.py +130 -130
  300. tests/test_random_headers_default.py +322 -322
  301. tests/test_random_headers_necessity.py +308 -308
  302. tests/test_random_user_agent.py +72 -72
  303. tests/test_redis_config.py +28 -28
  304. tests/test_redis_connection_pool.py +294 -294
  305. tests/test_redis_key_naming.py +181 -181
  306. tests/test_redis_key_validator.py +123 -123
  307. tests/test_redis_queue.py +224 -224
  308. tests/test_redis_queue_name_fix.py +175 -175
  309. tests/test_redis_queue_type_fallback.py +129 -129
  310. tests/test_request_ignore_middleware.py +182 -182
  311. tests/test_request_params.py +111 -111
  312. tests/test_request_serialization.py +70 -70
  313. tests/test_response_code_middleware.py +349 -349
  314. tests/test_response_filter_middleware.py +427 -427
  315. tests/test_response_follow.py +104 -104
  316. tests/test_response_improvements.py +152 -152
  317. tests/test_response_selector_methods.py +92 -92
  318. tests/test_response_url_methods.py +70 -70
  319. tests/test_response_urljoin.py +86 -86
  320. tests/test_retry_middleware.py +333 -333
  321. tests/test_retry_middleware_realistic.py +273 -273
  322. tests/test_scheduler.py +252 -252
  323. tests/test_scheduler_config_update.py +133 -133
  324. tests/test_scrapy_style_encoding.py +112 -112
  325. tests/test_selector_helper.py +100 -100
  326. tests/test_selector_optimizations.py +146 -146
  327. tests/test_simple_response.py +61 -61
  328. tests/test_spider_loader.py +49 -49
  329. tests/test_spider_loader_comprehensive.py +69 -69
  330. tests/test_spider_modules.py +84 -84
  331. tests/test_spiders/test_spider.py +9 -9
  332. tests/test_telecom_spider_redis_key.py +205 -205
  333. tests/test_template_content.py +87 -87
  334. tests/test_template_redis_key.py +134 -134
  335. tests/test_tools.py +159 -159
  336. tests/test_user_agent_randomness.py +176 -176
  337. tests/test_user_agents.py +96 -96
  338. tests/untested_features_report.md +138 -138
  339. tests/verify_debug.py +51 -51
  340. tests/verify_distributed.py +117 -117
  341. tests/verify_log_fix.py +111 -111
  342. tests/verify_mysql_warnings.py +109 -109
  343. crawlo/utils/log.py +0 -80
  344. crawlo/utils/url_utils.py +0 -40
  345. crawlo-1.4.7.dist-info/RECORD +0 -347
  346. {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
  347. {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
  348. {crawlo-1.4.7.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
@@ -1,139 +1,139 @@
1
- # 未测试功能报告
2
-
3
- ## 概述
4
-
5
- 在对Crawlo框架进行全面分析后,发现以下功能模块缺乏专门的测试用例。这些模块虽然部分功能在其他测试中可能有间接覆盖,但缺乏针对性的单元测试和集成测试。
6
-
7
- ## 已完成测试的功能模块
8
-
9
- ### 1. 工厂模式相关模块
10
-
11
- **模块路径**: `crawlo/factories/`
12
-
13
- **测试文件**: `tests/test_factories.py`
14
-
15
- **已测试组件**:
16
- - `ComponentRegistry` - 组件注册表
17
- - `ComponentFactory` - 组件工厂基类
18
- - `DefaultComponentFactory` - 默认组件工厂
19
- - `CrawlerComponentFactory` - Crawler组件工厂
20
-
21
- ### 2. 批处理工具
22
-
23
- **模块路径**: `crawlo/utils/batch_processor.py`
24
-
25
- **测试文件**: `tests/test_batch_processor.py`
26
-
27
- **已测试组件**:
28
- - `BatchProcessor` - 批处理处理器
29
- - `RedisBatchProcessor` - Redis批处理处理器
30
- - `batch_process` - 便捷批处理函数
31
-
32
- ### 3. 受控爬虫混入类
33
-
34
- **模块路径**: `crawlo/utils/controlled_spider_mixin.py`
35
-
36
- **测试文件**: `tests/test_controlled_spider_mixin.py`
37
-
38
- **已测试组件**:
39
- - `ControlledRequestMixin` - 受控请求生成混入类
40
- - `AsyncControlledRequestMixin` - 异步受控请求混入类
41
-
42
- ### 4. 大规模配置工具
43
-
44
- **模块路径**: `crawlo/utils/large_scale_config.py`
45
-
46
- **测试文件**: `tests/test_large_scale_config.py`
47
-
48
- **已测试组件**:
49
- - `LargeScaleConfig` - 大规模爬虫配置类
50
- - `apply_large_scale_config` - 应用大规模配置函数
51
-
52
- ### 5. 大规模爬虫辅助工具
53
-
54
- **模块路径**: `crawlo/utils/large_scale_helper.py`
55
-
56
- **测试文件**: `tests/test_large_scale_helper.py`
57
-
58
- **已测试组件**:
59
- - `LargeScaleHelper` - 大规模爬虫辅助类
60
- - `ProgressManager` - 进度管理器
61
- - `MemoryOptimizer` - 内存优化器
62
- - `DataSourceAdapter` - 数据源适配器
63
- - `LargeScaleSpiderMixin` - 大规模爬虫混入类
64
-
65
- ### 6. 增强错误处理工具
66
-
67
- **模块路径**: `crawlo/utils/enhanced_error_handler.py`
68
-
69
- **测试文件**:
70
- - `tests/test_enhanced_error_handler.py` (基础测试)
71
- - `tests/test_enhanced_error_handler_comprehensive.py` (综合测试)
72
-
73
- **已测试组件**:
74
- - `ErrorContext` - 错误上下文信息
75
- - `DetailedException` - 详细异常基类
76
- - `EnhancedErrorHandler` - 增强错误处理器
77
- - `handle_exception` 装饰器
78
-
79
- ## 未测试的功能模块
80
-
81
- ### 1. 性能监控工具
82
-
83
- **模块路径**: `crawlo/utils/performance_monitor.py`
84
-
85
- **测试文件**: `tests/test_performance_monitor.py` (部分测试,依赖psutil)
86
-
87
- **未充分测试组件**:
88
- - `PerformanceMonitor` - 性能监控器
89
- - `PerformanceTimer` - 性能计时器
90
- - `performance_monitor_decorator` - 性能监控装饰器
91
-
92
- **风险**: 性能监控是优化和诊断的重要工具,缺乏测试可能导致监控数据不准确或监控功能失效。
93
-
94
- ## 建议的测试策略
95
-
96
- ### 1. 优先级排序
97
-
98
- **高优先级** (直接影响核心功能):
99
- - (已完成)
100
-
101
- **中优先级** (影响性能和稳定性):
102
- - 性能监控工具
103
-
104
- **低优先级** (辅助功能):
105
- - (已完成)
106
-
107
- ### 2. 测试类型建议
108
-
109
- **单元测试**:
110
- - 针对每个类的方法进行独立测试
111
- - 验证边界条件和异常情况
112
- - 测试配置参数的有效性
113
-
114
- **集成测试**:
115
- - 测试模块间的协作
116
- - 验证与Redis等外部服务的交互
117
- - 测试在真实爬虫场景中的表现
118
-
119
- **性能测试**:
120
- - 验证批处理工具的性能优势
121
- - 测试大规模处理工具的内存使用情况
122
- - 验证性能监控工具的准确性
123
-
124
- ### 3. 测试覆盖建议
125
-
126
- **核心功能覆盖**:
127
- - 正常流程测试
128
- - 异常流程测试
129
- - 边界条件测试
130
- - 并发安全测试
131
-
132
- **配置覆盖**:
133
- - 不同配置参数的测试
134
- - 默认配置与自定义配置的对比
135
- - 配置更新的动态测试
136
-
137
- ## 结论
138
-
1
+ # 未测试功能报告
2
+
3
+ ## 概述
4
+
5
+ 在对Crawlo框架进行全面分析后,发现以下功能模块缺乏专门的测试用例。这些模块虽然部分功能在其他测试中可能有间接覆盖,但缺乏针对性的单元测试和集成测试。
6
+
7
+ ## 已完成测试的功能模块
8
+
9
+ ### 1. 工厂模式相关模块
10
+
11
+ **模块路径**: `crawlo/factories/`
12
+
13
+ **测试文件**: `tests/test_factories.py`
14
+
15
+ **已测试组件**:
16
+ - `ComponentRegistry` - 组件注册表
17
+ - `ComponentFactory` - 组件工厂基类
18
+ - `DefaultComponentFactory` - 默认组件工厂
19
+ - `CrawlerComponentFactory` - Crawler组件工厂
20
+
21
+ ### 2. 批处理工具
22
+
23
+ **模块路径**: `crawlo/utils/batch_processor.py`
24
+
25
+ **测试文件**: `tests/test_batch_processor.py`
26
+
27
+ **已测试组件**:
28
+ - `BatchProcessor` - 批处理处理器
29
+ - `RedisBatchProcessor` - Redis批处理处理器
30
+ - `batch_process` - 便捷批处理函数
31
+
32
+ ### 3. 受控爬虫混入类
33
+
34
+ **模块路径**: `crawlo/utils/controlled_spider_mixin.py`
35
+
36
+ **测试文件**: `tests/test_controlled_spider_mixin.py`
37
+
38
+ **已测试组件**:
39
+ - `ControlledRequestMixin` - 受控请求生成混入类
40
+ - `AsyncControlledRequestMixin` - 异步受控请求混入类
41
+
42
+ ### 4. 大规模配置工具
43
+
44
+ **模块路径**: `crawlo/utils/large_scale_config.py`
45
+
46
+ **测试文件**: `tests/test_large_scale_config.py`
47
+
48
+ **已测试组件**:
49
+ - `LargeScaleConfig` - 大规模爬虫配置类
50
+ - `apply_large_scale_config` - 应用大规模配置函数
51
+
52
+ ### 5. 大规模爬虫辅助工具
53
+
54
+ **模块路径**: `crawlo/utils/large_scale_helper.py`
55
+
56
+ **测试文件**: `tests/test_large_scale_helper.py`
57
+
58
+ **已测试组件**:
59
+ - `LargeScaleHelper` - 大规模爬虫辅助类
60
+ - `ProgressManager` - 进度管理器
61
+ - `MemoryOptimizer` - 内存优化器
62
+ - `DataSourceAdapter` - 数据源适配器
63
+ - `LargeScaleSpiderMixin` - 大规模爬虫混入类
64
+
65
+ ### 6. 增强错误处理工具
66
+
67
+ **模块路径**: `crawlo/utils/enhanced_error_handler.py`
68
+
69
+ **测试文件**:
70
+ - `tests/test_enhanced_error_handler.py` (基础测试)
71
+ - `tests/test_enhanced_error_handler_comprehensive.py` (综合测试)
72
+
73
+ **已测试组件**:
74
+ - `ErrorContext` - 错误上下文信息
75
+ - `DetailedException` - 详细异常基类
76
+ - `EnhancedErrorHandler` - 增强错误处理器
77
+ - `handle_exception` 装饰器
78
+
79
+ ## 未测试的功能模块
80
+
81
+ ### 1. 性能监控工具
82
+
83
+ **模块路径**: `crawlo/utils/performance_monitor.py`
84
+
85
+ **测试文件**: `tests/test_performance_monitor.py` (部分测试,依赖psutil)
86
+
87
+ **未充分测试组件**:
88
+ - `PerformanceMonitor` - 性能监控器
89
+ - `PerformanceTimer` - 性能计时器
90
+ - `performance_monitor_decorator` - 性能监控装饰器
91
+
92
+ **风险**: 性能监控是优化和诊断的重要工具,缺乏测试可能导致监控数据不准确或监控功能失效。
93
+
94
+ ## 建议的测试策略
95
+
96
+ ### 1. 优先级排序
97
+
98
+ **高优先级** (直接影响核心功能):
99
+ - (已完成)
100
+
101
+ **中优先级** (影响性能和稳定性):
102
+ - 性能监控工具
103
+
104
+ **低优先级** (辅助功能):
105
+ - (已完成)
106
+
107
+ ### 2. 测试类型建议
108
+
109
+ **单元测试**:
110
+ - 针对每个类的方法进行独立测试
111
+ - 验证边界条件和异常情况
112
+ - 测试配置参数的有效性
113
+
114
+ **集成测试**:
115
+ - 测试模块间的协作
116
+ - 验证与Redis等外部服务的交互
117
+ - 测试在真实爬虫场景中的表现
118
+
119
+ **性能测试**:
120
+ - 验证批处理工具的性能优势
121
+ - 测试大规模处理工具的内存使用情况
122
+ - 验证性能监控工具的准确性
123
+
124
+ ### 3. 测试覆盖建议
125
+
126
+ **核心功能覆盖**:
127
+ - 正常流程测试
128
+ - 异常流程测试
129
+ - 边界条件测试
130
+ - 并发安全测试
131
+
132
+ **配置覆盖**:
133
+ - 不同配置参数的测试
134
+ - 默认配置与自定义配置的对比
135
+ - 配置更新的动态测试
136
+
137
+ ## 结论
138
+
139
139
  已为工厂模式、批处理工具、受控爬虫混入类、大规模配置工具、大规模爬虫辅助工具和增强错误处理工具创建了测试用例,这些核心组件现在有了基本的测试覆盖。建议继续为性能监控工具补充测试用例(在安装psutil后),以确保框架的完整性和稳定性。
tests/verify_debug.py CHANGED
@@ -1,52 +1,52 @@
1
- #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
- """
4
- 验证运行模式日志级别修改的简单测试
5
- """
6
- import os
7
-
8
- # 删除旧日志文件
9
- log_file = 'verify_debug.log'
10
- if os.path.exists(log_file):
11
- os.remove(log_file)
12
-
13
- # 简单测试日志级别
14
- from crawlo.utils.log import LoggerManager
15
-
16
- # 配置日志系统
17
- LoggerManager.configure(
18
- LOG_LEVEL='INFO',
19
- LOG_FILE=log_file
20
- )
21
-
22
- from crawlo.utils.log import get_logger
23
-
24
- # 创建测试logger
25
- test_logger = get_logger('crawlo.framework')
26
-
27
- # 测试输出
28
- test_logger.info("这是INFO级别的测试信息")
29
- test_logger.debug("这是DEBUG级别的测试信息(不应该在INFO级别的日志中出现)")
30
- test_logger.debug("使用单机模式 - 简单快速,适合开发和中小规模爬取")
31
-
32
- print("测试完成")
33
-
34
- # 检查日志文件
35
- if os.path.exists(log_file):
36
- with open(log_file, 'r', encoding='utf-8') as f:
37
- content = f.read()
38
- print(f"日志文件内容({len(content)} 字符):")
39
- print(content)
40
-
41
- # 检查是否包含不应该出现的DEBUG信息
42
- if "DEBUG" in content:
43
- print("❌ 发现DEBUG级别信息(不应该出现)")
44
- else:
45
- print("✅ 没有发现DEBUG级别信息(正确)")
46
-
47
- if "使用单机模式" in content:
48
- print("❌ 发现运行模式信息(不应该出现在INFO级别)")
49
- else:
50
- print("✅ 没有发现运行模式信息(正确)")
51
- else:
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ """
4
+ 验证运行模式日志级别修改的简单测试
5
+ """
6
+ import os
7
+
8
+ # 删除旧日志文件
9
+ log_file = 'verify_debug.log'
10
+ if os.path.exists(log_file):
11
+ os.remove(log_file)
12
+
13
+ # 简单测试日志级别
14
+ from crawlo.utils.log import LoggerManager
15
+
16
+ # 配置日志系统
17
+ LoggerManager.configure(
18
+ LOG_LEVEL='INFO',
19
+ LOG_FILE=log_file
20
+ )
21
+
22
+ from crawlo.utils.log import get_logger
23
+
24
+ # 创建测试logger
25
+ test_logger = get_logger('crawlo.framework')
26
+
27
+ # 测试输出
28
+ test_logger.info("这是INFO级别的测试信息")
29
+ test_logger.debug("这是DEBUG级别的测试信息(不应该在INFO级别的日志中出现)")
30
+ test_logger.debug("使用单机模式 - 简单快速,适合开发和中小规模爬取")
31
+
32
+ print("测试完成")
33
+
34
+ # 检查日志文件
35
+ if os.path.exists(log_file):
36
+ with open(log_file, 'r', encoding='utf-8') as f:
37
+ content = f.read()
38
+ print(f"日志文件内容({len(content)} 字符):")
39
+ print(content)
40
+
41
+ # 检查是否包含不应该出现的DEBUG信息
42
+ if "DEBUG" in content:
43
+ print("❌ 发现DEBUG级别信息(不应该出现)")
44
+ else:
45
+ print("✅ 没有发现DEBUG级别信息(正确)")
46
+
47
+ if "使用单机模式" in content:
48
+ print("❌ 发现运行模式信息(不应该出现在INFO级别)")
49
+ else:
50
+ print("✅ 没有发现运行模式信息(正确)")
51
+ else:
52
52
  print("❌ 日志文件未创建")
@@ -1,117 +1,117 @@
1
- #!/usr/bin/env python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 分布式采集功能验证脚本
5
- 验证Crawlo框架的分布式采集功能是否正常工作
6
- """
7
-
8
- import redis
9
- import json
10
- import os
11
- import sys
12
-
13
- # 添加项目根目录到 Python 路径
14
- project_root = os.path.dirname(os.path.abspath(__file__))
15
- sys.path.insert(0, project_root)
16
-
17
-
18
- def verify_distributed_functionality():
19
- """验证分布式采集功能"""
20
- print("=== Crawlo分布式采集功能验证 ===\n")
21
-
22
- # 1. 连接Redis
23
- try:
24
- r = redis.Redis(host='localhost', port=6379, db=2, decode_responses=False)
25
- r.ping()
26
- print("✓ Redis连接成功")
27
- except Exception as e:
28
- print(f"✗ Redis连接失败: {e}")
29
- return False
30
-
31
- # 2. 检查项目配置
32
- try:
33
- with open('../examples/ofweek_distributed/crawlo.cfg', 'r') as f:
34
- config_content = f.read()
35
- if 'ofweek_distributed.settings' in config_content:
36
- print("✓ 项目配置文件正确")
37
- else:
38
- print("✗ 项目配置文件不正确")
39
- return False
40
- except Exception as e:
41
- print(f"✗ 无法读取配置文件: {e}")
42
- return False
43
-
44
- # 3. 检查设置文件
45
- try:
46
- with open('../examples/ofweek_distributed/ofweek_distributed/settings.py', 'r') as f:
47
- settings_content = f.read()
48
- checks = [
49
- ('RUN_MODE = \'distributed\'', '运行模式设置为分布式'),
50
- ('QUEUE_TYPE = \'redis\'', '队列类型设置为Redis'),
51
- ('FILTER_CLASS = \'crawlo.filters.aioredis_filter.AioRedisFilter\'', '过滤器设置为Redis过滤器'),
52
- ('REDIS_HOST = \'127.0.0.1\'', 'Redis主机配置正确'),
53
- ]
54
-
55
- all_passed = True
56
- for check, description in checks:
57
- if check in settings_content:
58
- print(f"✓ {description}")
59
- else:
60
- print(f"✗ {description}")
61
- all_passed = False
62
-
63
- if not all_passed:
64
- return False
65
- except Exception as e:
66
- print(f"✗ 无法读取设置文件: {e}")
67
- return False
68
-
69
- # 4. 检查Redis中的数据
70
- try:
71
- # 检查请求去重指纹
72
- request_fingerprints = r.scard("crawlo:ofweek_distributed:filter:fingerprint")
73
- print(f"✓ 请求去重指纹数量: {request_fingerprints}")
74
-
75
- # 检查数据项去重指纹
76
- item_fingerprints = r.scard("crawlo:ofweek_distributed:item:fingerprint")
77
- print(f"✓ 数据项去重指纹数量: {item_fingerprints}")
78
-
79
- # 检查请求队列
80
- queue_size = r.zcard("crawlo:ofweek_distributed:queue:requests")
81
- print(f"✓ 请求队列大小: {queue_size}")
82
-
83
- # 验证数据是否存在
84
- if request_fingerprints > 0 and item_fingerprints > 0:
85
- print("✓ Redis中存在分布式采集数据")
86
- else:
87
- print("⚠ Redis中分布式采集数据为空")
88
-
89
- except Exception as e:
90
- print(f"✗ Redis数据检查失败: {e}")
91
- return False
92
-
93
- # 5. 检查输出文件
94
- try:
95
- import glob
96
- json_files = glob.glob("output/*.json")
97
- if json_files:
98
- latest_file = max(json_files, key=os.path.getctime)
99
- file_size = os.path.getsize(latest_file)
100
- print(f"✓ 输出文件存在: {latest_file} ({file_size} bytes)")
101
- else:
102
- print("⚠ 未找到输出文件")
103
- except Exception as e:
104
- print(f"✗ 输出文件检查失败: {e}")
105
-
106
- print("\n=== 验证结果 ===")
107
- print("✓ Crawlo分布式采集功能正常工作!")
108
- print(" - Redis连接正常")
109
- print(" - 分布式配置正确")
110
- print(" - Redis数据存储正常")
111
- print(" - 采集任务执行正常")
112
-
113
- return True
114
-
115
-
116
- if __name__ == '__main__':
117
- verify_distributed_functionality()
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ 分布式采集功能验证脚本
5
+ 验证Crawlo框架的分布式采集功能是否正常工作
6
+ """
7
+
8
+ import redis
9
+ import json
10
+ import os
11
+ import sys
12
+
13
+ # 添加项目根目录到 Python 路径
14
+ project_root = os.path.dirname(os.path.abspath(__file__))
15
+ sys.path.insert(0, project_root)
16
+
17
+
18
+ def verify_distributed_functionality():
19
+ """验证分布式采集功能"""
20
+ print("=== Crawlo分布式采集功能验证 ===\n")
21
+
22
+ # 1. 连接Redis
23
+ try:
24
+ r = redis.Redis(host='localhost', port=6379, db=2, decode_responses=False)
25
+ r.ping()
26
+ print("✓ Redis连接成功")
27
+ except Exception as e:
28
+ print(f"✗ Redis连接失败: {e}")
29
+ return False
30
+
31
+ # 2. 检查项目配置
32
+ try:
33
+ with open('../examples/ofweek_distributed/crawlo.cfg', 'r') as f:
34
+ config_content = f.read()
35
+ if 'ofweek_distributed.settings' in config_content:
36
+ print("✓ 项目配置文件正确")
37
+ else:
38
+ print("✗ 项目配置文件不正确")
39
+ return False
40
+ except Exception as e:
41
+ print(f"✗ 无法读取配置文件: {e}")
42
+ return False
43
+
44
+ # 3. 检查设置文件
45
+ try:
46
+ with open('../examples/ofweek_distributed/ofweek_distributed/settings.py', 'r') as f:
47
+ settings_content = f.read()
48
+ checks = [
49
+ ('RUN_MODE = \'distributed\'', '运行模式设置为分布式'),
50
+ ('QUEUE_TYPE = \'redis\'', '队列类型设置为Redis'),
51
+ ('FILTER_CLASS = \'crawlo.filters.aioredis_filter.AioRedisFilter\'', '过滤器设置为Redis过滤器'),
52
+ ('REDIS_HOST = \'127.0.0.1\'', 'Redis主机配置正确'),
53
+ ]
54
+
55
+ all_passed = True
56
+ for check, description in checks:
57
+ if check in settings_content:
58
+ print(f"✓ {description}")
59
+ else:
60
+ print(f"✗ {description}")
61
+ all_passed = False
62
+
63
+ if not all_passed:
64
+ return False
65
+ except Exception as e:
66
+ print(f"✗ 无法读取设置文件: {e}")
67
+ return False
68
+
69
+ # 4. 检查Redis中的数据
70
+ try:
71
+ # 检查请求去重指纹
72
+ request_fingerprints = r.scard("crawlo:ofweek_distributed:filter:fingerprint")
73
+ print(f"✓ 请求去重指纹数量: {request_fingerprints}")
74
+
75
+ # 检查数据项去重指纹
76
+ item_fingerprints = r.scard("crawlo:ofweek_distributed:item:fingerprint")
77
+ print(f"✓ 数据项去重指纹数量: {item_fingerprints}")
78
+
79
+ # 检查请求队列
80
+ queue_size = r.zcard("crawlo:ofweek_distributed:queue:requests")
81
+ print(f"✓ 请求队列大小: {queue_size}")
82
+
83
+ # 验证数据是否存在
84
+ if request_fingerprints > 0 and item_fingerprints > 0:
85
+ print("✓ Redis中存在分布式采集数据")
86
+ else:
87
+ print("⚠ Redis中分布式采集数据为空")
88
+
89
+ except Exception as e:
90
+ print(f"✗ Redis数据检查失败: {e}")
91
+ return False
92
+
93
+ # 5. 检查输出文件
94
+ try:
95
+ import glob
96
+ json_files = glob.glob("output/*.json")
97
+ if json_files:
98
+ latest_file = max(json_files, key=os.path.getctime)
99
+ file_size = os.path.getsize(latest_file)
100
+ print(f"✓ 输出文件存在: {latest_file} ({file_size} bytes)")
101
+ else:
102
+ print("⚠ 未找到输出文件")
103
+ except Exception as e:
104
+ print(f"✗ 输出文件检查失败: {e}")
105
+
106
+ print("\n=== 验证结果 ===")
107
+ print("✓ Crawlo分布式采集功能正常工作!")
108
+ print(" - Redis连接正常")
109
+ print(" - 分布式配置正确")
110
+ print(" - Redis数据存储正常")
111
+ print(" - 采集任务执行正常")
112
+
113
+ return True
114
+
115
+
116
+ if __name__ == '__main__':
117
+ verify_distributed_functionality()