crawlo 1.4.5__py3-none-any.whl → 1.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (375) hide show
  1. crawlo/__init__.py +90 -89
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +75 -75
  4. crawlo/commands/__init__.py +14 -14
  5. crawlo/commands/check.py +594 -594
  6. crawlo/commands/genspider.py +186 -186
  7. crawlo/commands/help.py +140 -138
  8. crawlo/commands/list.py +155 -155
  9. crawlo/commands/run.py +379 -341
  10. crawlo/commands/startproject.py +460 -460
  11. crawlo/commands/stats.py +187 -187
  12. crawlo/commands/utils.py +196 -196
  13. crawlo/config.py +320 -312
  14. crawlo/config_validator.py +277 -277
  15. crawlo/core/__init__.py +52 -52
  16. crawlo/core/engine.py +451 -438
  17. crawlo/core/processor.py +47 -47
  18. crawlo/core/scheduler.py +290 -291
  19. crawlo/crawler.py +698 -657
  20. crawlo/data/__init__.py +5 -5
  21. crawlo/data/user_agents.py +194 -194
  22. crawlo/downloader/__init__.py +280 -276
  23. crawlo/downloader/aiohttp_downloader.py +233 -233
  24. crawlo/downloader/cffi_downloader.py +250 -245
  25. crawlo/downloader/httpx_downloader.py +265 -259
  26. crawlo/downloader/hybrid_downloader.py +212 -212
  27. crawlo/downloader/playwright_downloader.py +425 -402
  28. crawlo/downloader/selenium_downloader.py +486 -472
  29. crawlo/event.py +45 -11
  30. crawlo/exceptions.py +215 -82
  31. crawlo/extension/__init__.py +65 -64
  32. crawlo/extension/health_check.py +141 -141
  33. crawlo/extension/log_interval.py +94 -94
  34. crawlo/extension/log_stats.py +70 -70
  35. crawlo/extension/logging_extension.py +53 -61
  36. crawlo/extension/memory_monitor.py +104 -104
  37. crawlo/extension/performance_profiler.py +133 -133
  38. crawlo/extension/request_recorder.py +107 -107
  39. crawlo/factories/__init__.py +27 -27
  40. crawlo/factories/base.py +68 -68
  41. crawlo/factories/crawler.py +104 -103
  42. crawlo/factories/registry.py +84 -84
  43. crawlo/factories/utils.py +135 -0
  44. crawlo/filters/__init__.py +170 -153
  45. crawlo/filters/aioredis_filter.py +348 -264
  46. crawlo/filters/memory_filter.py +261 -276
  47. crawlo/framework.py +306 -292
  48. crawlo/initialization/__init__.py +44 -44
  49. crawlo/initialization/built_in.py +391 -434
  50. crawlo/initialization/context.py +141 -141
  51. crawlo/initialization/core.py +240 -194
  52. crawlo/initialization/phases.py +230 -149
  53. crawlo/initialization/registry.py +143 -145
  54. crawlo/initialization/utils.py +49 -0
  55. crawlo/interfaces.py +23 -23
  56. crawlo/items/__init__.py +23 -23
  57. crawlo/items/base.py +23 -23
  58. crawlo/items/fields.py +52 -52
  59. crawlo/items/items.py +104 -104
  60. crawlo/logging/__init__.py +42 -46
  61. crawlo/logging/config.py +277 -197
  62. crawlo/logging/factory.py +175 -171
  63. crawlo/logging/manager.py +104 -112
  64. crawlo/middleware/__init__.py +87 -24
  65. crawlo/middleware/default_header.py +132 -132
  66. crawlo/middleware/download_delay.py +104 -104
  67. crawlo/middleware/middleware_manager.py +142 -142
  68. crawlo/middleware/offsite.py +123 -123
  69. crawlo/middleware/proxy.py +209 -386
  70. crawlo/middleware/request_ignore.py +86 -86
  71. crawlo/middleware/response_code.py +150 -150
  72. crawlo/middleware/response_filter.py +136 -136
  73. crawlo/middleware/retry.py +124 -124
  74. crawlo/mode_manager.py +287 -253
  75. crawlo/network/__init__.py +21 -21
  76. crawlo/network/request.py +375 -379
  77. crawlo/network/response.py +569 -664
  78. crawlo/pipelines/__init__.py +53 -22
  79. crawlo/pipelines/base_pipeline.py +452 -0
  80. crawlo/pipelines/bloom_dedup_pipeline.py +146 -146
  81. crawlo/pipelines/console_pipeline.py +39 -39
  82. crawlo/pipelines/csv_pipeline.py +316 -316
  83. crawlo/pipelines/database_dedup_pipeline.py +197 -197
  84. crawlo/pipelines/json_pipeline.py +218 -218
  85. crawlo/pipelines/memory_dedup_pipeline.py +105 -105
  86. crawlo/pipelines/mongo_pipeline.py +140 -132
  87. crawlo/pipelines/mysql_pipeline.py +470 -326
  88. crawlo/pipelines/pipeline_manager.py +100 -100
  89. crawlo/pipelines/redis_dedup_pipeline.py +155 -156
  90. crawlo/project.py +347 -347
  91. crawlo/queue/__init__.py +10 -0
  92. crawlo/queue/pqueue.py +38 -38
  93. crawlo/queue/queue_manager.py +591 -525
  94. crawlo/queue/redis_priority_queue.py +519 -370
  95. crawlo/settings/__init__.py +7 -7
  96. crawlo/settings/default_settings.py +285 -270
  97. crawlo/settings/setting_manager.py +219 -219
  98. crawlo/spider/__init__.py +657 -657
  99. crawlo/stats_collector.py +82 -73
  100. crawlo/subscriber.py +129 -129
  101. crawlo/task_manager.py +138 -138
  102. crawlo/templates/crawlo.cfg.tmpl +10 -10
  103. crawlo/templates/project/__init__.py.tmpl +2 -4
  104. crawlo/templates/project/items.py.tmpl +13 -17
  105. crawlo/templates/project/middlewares.py.tmpl +38 -38
  106. crawlo/templates/project/pipelines.py.tmpl +35 -36
  107. crawlo/templates/project/settings.py.tmpl +110 -157
  108. crawlo/templates/project/settings_distributed.py.tmpl +156 -161
  109. crawlo/templates/project/settings_gentle.py.tmpl +170 -171
  110. crawlo/templates/project/settings_high_performance.py.tmpl +171 -172
  111. crawlo/templates/project/settings_minimal.py.tmpl +99 -77
  112. crawlo/templates/project/settings_simple.py.tmpl +168 -169
  113. crawlo/templates/project/spiders/__init__.py.tmpl +9 -9
  114. crawlo/templates/run.py.tmpl +23 -30
  115. crawlo/templates/spider/spider.py.tmpl +33 -144
  116. crawlo/templates/spiders_init.py.tmpl +5 -10
  117. crawlo/tools/__init__.py +86 -189
  118. crawlo/tools/date_tools.py +289 -289
  119. crawlo/tools/distributed_coordinator.py +384 -384
  120. crawlo/tools/scenario_adapter.py +262 -262
  121. crawlo/tools/text_cleaner.py +232 -232
  122. crawlo/utils/__init__.py +50 -50
  123. crawlo/utils/batch_processor.py +276 -259
  124. crawlo/utils/config_manager.py +442 -0
  125. crawlo/utils/controlled_spider_mixin.py +439 -439
  126. crawlo/utils/db_helper.py +250 -244
  127. crawlo/utils/error_handler.py +410 -410
  128. crawlo/utils/fingerprint.py +121 -121
  129. crawlo/utils/func_tools.py +82 -82
  130. crawlo/utils/large_scale_helper.py +344 -344
  131. crawlo/utils/leak_detector.py +335 -0
  132. crawlo/utils/log.py +79 -79
  133. crawlo/utils/misc.py +81 -81
  134. crawlo/utils/mongo_connection_pool.py +157 -0
  135. crawlo/utils/mysql_connection_pool.py +197 -0
  136. crawlo/utils/performance_monitor.py +285 -285
  137. crawlo/utils/queue_helper.py +175 -175
  138. crawlo/utils/redis_checker.py +91 -0
  139. crawlo/utils/redis_connection_pool.py +578 -388
  140. crawlo/utils/redis_key_validator.py +198 -198
  141. crawlo/utils/request.py +278 -256
  142. crawlo/utils/request_serializer.py +225 -225
  143. crawlo/utils/resource_manager.py +337 -0
  144. crawlo/utils/selector_helper.py +137 -137
  145. crawlo/utils/singleton.py +70 -0
  146. crawlo/utils/spider_loader.py +201 -201
  147. crawlo/utils/text_helper.py +94 -94
  148. crawlo/utils/{url.py → url_utils.py} +39 -39
  149. crawlo-1.4.7.dist-info/METADATA +689 -0
  150. crawlo-1.4.7.dist-info/RECORD +347 -0
  151. examples/__init__.py +7 -7
  152. tests/__init__.py +7 -7
  153. tests/advanced_tools_example.py +217 -275
  154. tests/authenticated_proxy_example.py +110 -106
  155. tests/baidu_performance_test.py +108 -108
  156. tests/baidu_test.py +59 -59
  157. tests/bug_check_test.py +250 -250
  158. tests/cleaners_example.py +160 -160
  159. tests/comprehensive_framework_test.py +212 -212
  160. tests/comprehensive_test.py +81 -81
  161. tests/comprehensive_testing_summary.md +186 -186
  162. tests/config_validation_demo.py +142 -142
  163. tests/controlled_spider_example.py +205 -205
  164. tests/date_tools_example.py +180 -180
  165. tests/debug_configure.py +69 -69
  166. tests/debug_framework_logger.py +84 -84
  167. tests/debug_log_config.py +126 -126
  168. tests/debug_log_levels.py +63 -63
  169. tests/debug_pipelines.py +66 -66
  170. tests/detailed_log_test.py +233 -233
  171. tests/direct_selector_helper_test.py +96 -96
  172. tests/distributed_dedup_test.py +467 -0
  173. tests/distributed_test.py +66 -66
  174. tests/distributed_test_debug.py +76 -76
  175. tests/dynamic_loading_example.py +523 -523
  176. tests/dynamic_loading_test.py +104 -104
  177. tests/error_handling_example.py +171 -171
  178. tests/explain_mysql_update_behavior.py +77 -0
  179. tests/final_comprehensive_test.py +151 -151
  180. tests/final_log_test.py +260 -260
  181. tests/final_validation_test.py +182 -182
  182. tests/fix_log_test.py +142 -142
  183. tests/framework_performance_test.py +202 -202
  184. tests/log_buffering_test.py +111 -111
  185. tests/log_generation_timing_test.py +153 -153
  186. tests/monitor_redis_dedup.sh +72 -0
  187. tests/ofweek_scrapy/ofweek_scrapy/items.py +12 -12
  188. tests/ofweek_scrapy/ofweek_scrapy/middlewares.py +100 -100
  189. tests/ofweek_scrapy/ofweek_scrapy/pipelines.py +13 -13
  190. tests/ofweek_scrapy/ofweek_scrapy/settings.py +84 -84
  191. tests/ofweek_scrapy/scrapy.cfg +11 -11
  192. tests/optimized_performance_test.py +211 -211
  193. tests/performance_comparison.py +244 -244
  194. tests/queue_blocking_test.py +113 -113
  195. tests/queue_test.py +89 -89
  196. tests/redis_key_validation_demo.py +130 -130
  197. tests/request_params_example.py +150 -150
  198. tests/response_improvements_example.py +144 -144
  199. tests/scrapy_comparison/ofweek_scrapy.py +138 -138
  200. tests/scrapy_comparison/scrapy_test.py +133 -133
  201. tests/simple_cli_test.py +55 -0
  202. tests/simple_command_test.py +119 -119
  203. tests/simple_crawlo_test.py +126 -126
  204. tests/simple_follow_test.py +38 -38
  205. tests/simple_log_test2.py +137 -137
  206. tests/simple_optimization_test.py +128 -128
  207. tests/simple_queue_type_test.py +41 -41
  208. tests/simple_response_selector_test.py +94 -94
  209. tests/simple_selector_helper_test.py +154 -154
  210. tests/simple_selector_test.py +207 -207
  211. tests/simple_spider_test.py +49 -49
  212. tests/simple_url_test.py +73 -73
  213. tests/simulate_mysql_update_test.py +140 -0
  214. tests/spider_log_timing_test.py +177 -177
  215. tests/test_advanced_tools.py +148 -148
  216. tests/test_all_commands.py +230 -230
  217. tests/test_all_pipeline_fingerprints.py +133 -133
  218. tests/test_all_redis_key_configs.py +145 -145
  219. tests/test_asyncmy_usage.py +57 -0
  220. tests/test_batch_processor.py +178 -178
  221. tests/test_cleaners.py +54 -54
  222. tests/test_cli_arguments.py +119 -0
  223. tests/test_component_factory.py +174 -174
  224. tests/test_config_consistency.py +80 -80
  225. tests/test_config_merge.py +152 -152
  226. tests/test_config_validator.py +182 -182
  227. tests/test_controlled_spider_mixin.py +79 -79
  228. tests/test_crawler_process_import.py +38 -38
  229. tests/test_crawler_process_spider_modules.py +47 -47
  230. tests/test_crawlo_proxy_integration.py +114 -108
  231. tests/test_date_tools.py +123 -123
  232. tests/test_dedup_fix.py +220 -220
  233. tests/test_dedup_pipeline_consistency.py +124 -124
  234. tests/test_default_header_middleware.py +313 -313
  235. tests/test_distributed.py +65 -65
  236. tests/test_double_crawlo_fix.py +204 -204
  237. tests/test_double_crawlo_fix_simple.py +124 -124
  238. tests/test_download_delay_middleware.py +221 -221
  239. tests/test_downloader_proxy_compatibility.py +272 -268
  240. tests/test_edge_cases.py +305 -305
  241. tests/test_encoding_core.py +56 -56
  242. tests/test_encoding_detection.py +126 -126
  243. tests/test_enhanced_error_handler.py +270 -270
  244. tests/test_enhanced_error_handler_comprehensive.py +245 -245
  245. tests/test_error_handler_compatibility.py +112 -112
  246. tests/test_factories.py +252 -252
  247. tests/test_factory_compatibility.py +196 -196
  248. tests/test_final_validation.py +153 -153
  249. tests/test_fingerprint_consistency.py +135 -135
  250. tests/test_fingerprint_simple.py +51 -51
  251. tests/test_get_component_logger.py +83 -83
  252. tests/test_hash_performance.py +99 -99
  253. tests/test_integration.py +169 -169
  254. tests/test_item_dedup_redis_key.py +122 -122
  255. tests/test_large_scale_helper.py +235 -235
  256. tests/test_logging_enhancements.py +374 -374
  257. tests/test_logging_final.py +184 -184
  258. tests/test_logging_integration.py +312 -312
  259. tests/test_logging_system.py +282 -282
  260. tests/test_middleware_debug.py +141 -141
  261. tests/test_mode_consistency.py +51 -51
  262. tests/test_multi_directory.py +67 -67
  263. tests/test_multiple_spider_modules.py +80 -80
  264. tests/test_mysql_pipeline_config.py +165 -0
  265. tests/test_mysql_pipeline_error.py +99 -0
  266. tests/test_mysql_pipeline_init_log.py +83 -0
  267. tests/test_mysql_pipeline_integration.py +133 -0
  268. tests/test_mysql_pipeline_refactor.py +144 -0
  269. tests/test_mysql_pipeline_refactor_simple.py +86 -0
  270. tests/test_mysql_pipeline_robustness.py +196 -0
  271. tests/test_mysql_pipeline_types.py +89 -0
  272. tests/test_mysql_update_columns.py +94 -0
  273. tests/test_offsite_middleware.py +244 -244
  274. tests/test_offsite_middleware_simple.py +203 -203
  275. tests/test_optimized_selector_naming.py +100 -100
  276. tests/test_parsel.py +29 -29
  277. tests/test_performance.py +327 -327
  278. tests/test_performance_monitor.py +115 -115
  279. tests/test_pipeline_fingerprint_consistency.py +86 -86
  280. tests/test_priority_behavior.py +211 -211
  281. tests/test_priority_consistency.py +151 -151
  282. tests/test_priority_consistency_fixed.py +249 -249
  283. tests/test_proxy_health_check.py +32 -32
  284. tests/test_proxy_middleware.py +217 -121
  285. tests/test_proxy_middleware_enhanced.py +212 -216
  286. tests/test_proxy_middleware_integration.py +142 -137
  287. tests/test_proxy_middleware_refactored.py +207 -184
  288. tests/test_proxy_only.py +84 -0
  289. tests/test_proxy_providers.py +56 -56
  290. tests/test_proxy_stats.py +19 -19
  291. tests/test_proxy_strategies.py +59 -59
  292. tests/test_proxy_with_downloader.py +153 -0
  293. tests/test_queue_empty_check.py +41 -41
  294. tests/test_queue_manager_double_crawlo.py +173 -173
  295. tests/test_queue_manager_redis_key.py +179 -179
  296. tests/test_queue_naming.py +154 -154
  297. tests/test_queue_type.py +106 -106
  298. tests/test_queue_type_redis_config_consistency.py +130 -130
  299. tests/test_random_headers_default.py +322 -322
  300. tests/test_random_headers_necessity.py +308 -308
  301. tests/test_random_user_agent.py +72 -72
  302. tests/test_redis_config.py +28 -28
  303. tests/test_redis_connection_pool.py +294 -294
  304. tests/test_redis_key_naming.py +181 -181
  305. tests/test_redis_key_validator.py +123 -123
  306. tests/test_redis_queue.py +224 -224
  307. tests/test_redis_queue_name_fix.py +175 -175
  308. tests/test_redis_queue_type_fallback.py +129 -129
  309. tests/test_request_ignore_middleware.py +182 -182
  310. tests/test_request_params.py +111 -111
  311. tests/test_request_serialization.py +70 -70
  312. tests/test_response_code_middleware.py +349 -349
  313. tests/test_response_filter_middleware.py +427 -427
  314. tests/test_response_follow.py +104 -104
  315. tests/test_response_improvements.py +152 -152
  316. tests/test_response_selector_methods.py +92 -92
  317. tests/test_response_url_methods.py +70 -70
  318. tests/test_response_urljoin.py +86 -86
  319. tests/test_retry_middleware.py +333 -333
  320. tests/test_retry_middleware_realistic.py +273 -273
  321. tests/test_scheduler.py +252 -252
  322. tests/test_scheduler_config_update.py +133 -133
  323. tests/test_scrapy_style_encoding.py +112 -112
  324. tests/test_selector_helper.py +100 -100
  325. tests/test_selector_optimizations.py +146 -146
  326. tests/test_simple_response.py +61 -61
  327. tests/test_spider_loader.py +49 -49
  328. tests/test_spider_loader_comprehensive.py +69 -69
  329. tests/test_spider_modules.py +84 -84
  330. tests/test_spiders/test_spider.py +9 -9
  331. tests/test_telecom_spider_redis_key.py +205 -205
  332. tests/test_template_content.py +87 -87
  333. tests/test_template_redis_key.py +134 -134
  334. tests/test_tools.py +159 -159
  335. tests/test_user_agent_randomness.py +176 -176
  336. tests/test_user_agents.py +96 -96
  337. tests/untested_features_report.md +138 -138
  338. tests/verify_debug.py +51 -51
  339. tests/verify_distributed.py +117 -117
  340. tests/verify_log_fix.py +111 -111
  341. tests/verify_mysql_warnings.py +110 -0
  342. crawlo/logging/async_handler.py +0 -181
  343. crawlo/logging/monitor.py +0 -153
  344. crawlo/logging/sampler.py +0 -167
  345. crawlo/middleware/simple_proxy.py +0 -65
  346. crawlo/tools/authenticated_proxy.py +0 -241
  347. crawlo/tools/data_formatter.py +0 -226
  348. crawlo/tools/data_validator.py +0 -181
  349. crawlo/tools/encoding_converter.py +0 -127
  350. crawlo/tools/network_diagnostic.py +0 -365
  351. crawlo/tools/request_tools.py +0 -83
  352. crawlo/tools/retry_mechanism.py +0 -224
  353. crawlo/utils/env_config.py +0 -143
  354. crawlo/utils/large_scale_config.py +0 -287
  355. crawlo/utils/system.py +0 -11
  356. crawlo/utils/tools.py +0 -5
  357. crawlo-1.4.5.dist-info/METADATA +0 -329
  358. crawlo-1.4.5.dist-info/RECORD +0 -347
  359. tests/env_config_example.py +0 -134
  360. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
  361. tests/test_authenticated_proxy.py +0 -142
  362. tests/test_comprehensive.py +0 -147
  363. tests/test_dynamic_downloaders_proxy.py +0 -125
  364. tests/test_dynamic_proxy.py +0 -93
  365. tests/test_dynamic_proxy_config.py +0 -147
  366. tests/test_dynamic_proxy_real.py +0 -110
  367. tests/test_env_config.py +0 -122
  368. tests/test_framework_env_usage.py +0 -104
  369. tests/test_large_scale_config.py +0 -113
  370. tests/test_proxy_api.py +0 -265
  371. tests/test_real_scenario_proxy.py +0 -196
  372. tests/tools_example.py +0 -261
  373. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/WHEEL +0 -0
  374. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/entry_points.txt +0 -0
  375. {crawlo-1.4.5.dist-info → crawlo-1.4.7.dist-info}/top_level.txt +0 -0
@@ -1,181 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 数据验证工具
7
- """
8
-
9
- import re
10
- from typing import Any, Union, Dict, List
11
- from datetime import datetime
12
- from urllib.parse import urlparse
13
-
14
-
15
- class DataValidator:
16
- """数据验证工具类"""
17
-
18
- @staticmethod
19
- def validate_email(email: str) -> bool:
20
- """
21
- 验证邮箱地址格式
22
-
23
- Args:
24
- email (str): 邮箱地址
25
-
26
- Returns:
27
- bool: 验证结果
28
- """
29
- pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
30
- return bool(re.match(pattern, email))
31
-
32
- @staticmethod
33
- def validate_phone(phone: str, country_code: str = "CN") -> bool:
34
- """
35
- 验证电话号码格式
36
-
37
- Args:
38
- phone (str): 电话号码
39
- country_code (str): 国家代码,默认为"CN"
40
-
41
- Returns:
42
- bool: 验证结果
43
- """
44
- if country_code == "CN":
45
- # 中国手机号码格式
46
- pattern = r'^1[3-9]\d{9}$'
47
- return bool(re.match(pattern, phone))
48
- else:
49
- # 通用格式,只检查是否全为数字且长度在7-15之间
50
- pattern = r'^\d{7,15}$'
51
- return bool(re.match(pattern, phone))
52
-
53
- @staticmethod
54
- def validate_url(url: str) -> bool:
55
- """
56
- 验证URL格式
57
-
58
- Args:
59
- url (str): URL地址
60
-
61
- Returns:
62
- bool: 验证结果
63
- """
64
- try:
65
- result = urlparse(url)
66
- return all([result.scheme, result.netloc])
67
- except Exception:
68
- return False
69
-
70
- @staticmethod
71
- def validate_chinese_id_card(id_card: str) -> bool:
72
- """
73
- 验证中国身份证号码格式
74
-
75
- Args:
76
- id_card (str): 身份证号码
77
-
78
- Returns:
79
- bool: 验证结果
80
- """
81
- # 18位身份证号码格式
82
- pattern = r'^[1-9]\d{5}(18|19|20)\d{2}((0[1-9])|(1[0-2]))(([0-2][1-9])|10|20|30|31)\d{3}[0-9Xx]$'
83
- return bool(re.match(pattern, id_card))
84
-
85
- @staticmethod
86
- def validate_date(date_str: str, date_format: str = "%Y-%m-%d") -> bool:
87
- """
88
- 验证日期格式
89
-
90
- Args:
91
- date_str (str): 日期字符串
92
- date_format (str): 日期格式,默认为"%Y-%m-%d"
93
-
94
- Returns:
95
- bool: 验证结果
96
- """
97
- try:
98
- datetime.strptime(date_str, date_format)
99
- return True
100
- except ValueError:
101
- return False
102
-
103
- @staticmethod
104
- def validate_number_range(value: Union[int, float], min_val: Union[int, float],
105
- max_val: Union[int, float]) -> bool:
106
- """
107
- 验证数值是否在指定范围内
108
-
109
- Args:
110
- value (Union[int, float]): 要验证的数值
111
- min_val (Union[int, float]): 最小值
112
- max_val (Union[int, float]): 最大值
113
-
114
- Returns:
115
- bool: 验证结果
116
- """
117
- return min_val <= value <= max_val
118
-
119
- @staticmethod
120
- def check_data_integrity(data: Dict[str, Any], required_fields: List[str]) -> Dict[str, Any]:
121
- """
122
- 检查数据完整性,确保关键字段不为空
123
-
124
- Args:
125
- data (Dict[str, Any]): 要检查的数据
126
- required_fields (List[str]): 必需字段列表
127
-
128
- Returns:
129
- Dict[str, Any]: 检查结果,包含缺失字段和空值字段
130
- """
131
- missing_fields = []
132
- empty_fields = []
133
-
134
- for field in required_fields:
135
- if field not in data:
136
- missing_fields.append(field)
137
- elif data[field] is None or data[field] == "":
138
- empty_fields.append(field)
139
-
140
- return {
141
- "is_valid": len(missing_fields) == 0 and len(empty_fields) == 0,
142
- "missing_fields": missing_fields,
143
- "empty_fields": empty_fields
144
- }
145
-
146
-
147
- # 便捷函数
148
- def validate_email(email: str) -> bool:
149
- """验证邮箱地址格式"""
150
- return DataValidator.validate_email(email)
151
-
152
-
153
- def validate_phone(phone: str, country_code: str = "CN") -> bool:
154
- """验证电话号码格式"""
155
- return DataValidator.validate_phone(phone, country_code)
156
-
157
-
158
- def validate_url(url: str) -> bool:
159
- """验证URL格式"""
160
- return DataValidator.validate_url(url)
161
-
162
-
163
- def validate_chinese_id_card(id_card: str) -> bool:
164
- """验证中国身份证号码格式"""
165
- return DataValidator.validate_chinese_id_card(id_card)
166
-
167
-
168
- def validate_date(date_str: str, date_format: str = "%Y-%m-%d") -> bool:
169
- """验证日期格式"""
170
- return DataValidator.validate_date(date_str, date_format)
171
-
172
-
173
- def validate_number_range(value: Union[int, float], min_val: Union[int, float],
174
- max_val: Union[int, float]) -> bool:
175
- """验证数值是否在指定范围内"""
176
- return DataValidator.validate_number_range(value, min_val, max_val)
177
-
178
-
179
- def check_data_integrity(data: Dict[str, Any], required_fields: List[str]) -> Dict[str, Any]:
180
- """检查数据完整性"""
181
- return DataValidator.check_data_integrity(data, required_fields)
@@ -1,127 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 编码转换工具
7
- """
8
- try:
9
- import chardet
10
-
11
- HAS_CHARDET = True
12
- except ImportError:
13
- HAS_CHARDET = False
14
- from typing import Optional, Union
15
-
16
-
17
- class EncodingConverter:
18
- """
19
- 编码转换工具类,提供各种编码转换功能。
20
- 特别适用于爬虫中处理不同编码的网页内容。
21
- """
22
-
23
- @staticmethod
24
- def detect_encoding(data: Union[str, bytes]) -> Optional[str]:
25
- """
26
- 检测数据编码
27
-
28
- :param data: 数据(字符串或字节)
29
- :return: 检测到的编码
30
- """
31
- if isinstance(data, str):
32
- # 如果是字符串,直接返回
33
- return 'utf-8'
34
-
35
- if not isinstance(data, bytes):
36
- return None
37
-
38
- if HAS_CHARDET:
39
- try:
40
- # 使用chardet检测编码
41
- result = chardet.detect(data)
42
- return result['encoding']
43
- except Exception:
44
- return None
45
- else:
46
- # 如果没有chardet,返回None
47
- return None
48
-
49
- @staticmethod
50
- def to_utf8(data: Union[str, bytes], source_encoding: Optional[str] = None) -> Optional[str]:
51
- """
52
- 转换为UTF-8编码的字符串
53
-
54
- :param data: 数据(字符串或字节)
55
- :param source_encoding: 源编码(如果为None则自动检测)
56
- :return: UTF-8编码的字符串
57
- """
58
- if isinstance(data, str):
59
- # 如果已经是字符串,假设它已经是UTF-8
60
- return data
61
-
62
- if not isinstance(data, bytes):
63
- return None
64
-
65
- try:
66
- if source_encoding is None:
67
- # 自动检测编码
68
- source_encoding = EncodingConverter.detect_encoding(data)
69
- if source_encoding is None:
70
- # 如果检测失败,尝试常见编码
71
- for encoding in ['utf-8', 'gbk', 'gb2312', 'latin1']:
72
- try:
73
- decoded = data.decode(encoding)
74
- return decoded
75
- except UnicodeDecodeError:
76
- continue
77
- return None
78
- else:
79
- # 使用指定编码
80
- return data.decode(source_encoding)
81
-
82
- # 使用检测到的编码解码
83
- return data.decode(source_encoding)
84
- except Exception:
85
- return None
86
-
87
- @staticmethod
88
- def convert_encoding(data: Union[str, bytes],
89
- source_encoding: Optional[str] = None,
90
- target_encoding: str = 'utf-8') -> Optional[bytes]:
91
- """
92
- 编码转换
93
-
94
- :param data: 数据(字符串或字节)
95
- :param source_encoding: 源编码(如果为None则自动检测)
96
- :param target_encoding: 目标编码
97
- :return: 转换后的字节数据
98
- """
99
- # 先转换为UTF-8字符串
100
- utf8_str = EncodingConverter.to_utf8(data, source_encoding)
101
- if utf8_str is None:
102
- return None
103
-
104
- try:
105
- # 再转换为目标编码
106
- return utf8_str.encode(target_encoding)
107
- except Exception:
108
- return None
109
-
110
-
111
- # =======================对外接口=======================
112
-
113
- def detect_encoding(data: Union[str, bytes]) -> Optional[str]:
114
- """检测数据编码"""
115
- return EncodingConverter.detect_encoding(data)
116
-
117
-
118
- def to_utf8(data: Union[str, bytes], source_encoding: Optional[str] = None) -> Optional[str]:
119
- """转换为UTF-8编码的字符串"""
120
- return EncodingConverter.to_utf8(data, source_encoding)
121
-
122
-
123
- def convert_encoding(data: Union[str, bytes],
124
- source_encoding: Optional[str] = None,
125
- target_encoding: str = 'utf-8') -> Optional[bytes]:
126
- """编码转换"""
127
- return EncodingConverter.convert_encoding(data, source_encoding, target_encoding)
@@ -1,365 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 网络诊断工具
5
- 提供网络连接问题的诊断和解决方案
6
- """
7
-
8
- import asyncio
9
- import socket
10
- import time
11
- from typing import Dict, List, Optional, Tuple
12
- from urllib.parse import urlparse
13
-
14
- import aiohttp
15
- from crawlo.utils.log import get_logger
16
-
17
-
18
- class NetworkDiagnostic:
19
- """网络诊断工具类"""
20
-
21
- def __init__(self):
22
- self.logger = get_logger(self.__class__.__name__)
23
- self._dns_cache: Dict[str, str] = {}
24
-
25
- async def diagnose_url(self, url: str) -> Dict[str, any]:
26
- """
27
- 诊断URL的网络连接问题
28
-
29
- Args:
30
- url: 要诊断的URL
31
-
32
- Returns:
33
- 诊断结果字典
34
- """
35
- parsed = urlparse(url)
36
- hostname = parsed.hostname
37
- port = parsed.port or (443 if parsed.scheme == 'https' else 80)
38
-
39
- result = {
40
- 'url': url,
41
- 'hostname': hostname,
42
- 'port': port,
43
- 'dns_resolution': None,
44
- 'tcp_connection': None,
45
- 'http_response': None,
46
- 'recommendations': []
47
- }
48
-
49
- # DNS解析测试
50
- dns_result = await self._test_dns_resolution(hostname)
51
- result['dns_resolution'] = dns_result
52
-
53
- if dns_result['success']:
54
- # TCP连接测试
55
- tcp_result = await self._test_tcp_connection(hostname, port)
56
- result['tcp_connection'] = tcp_result
57
-
58
- if tcp_result['success']:
59
- # HTTP响应测试
60
- http_result = await self._test_http_response(url)
61
- result['http_response'] = http_result
62
-
63
- # 生成建议
64
- result['recommendations'] = self._generate_recommendations(result)
65
-
66
- return result
67
-
68
- async def _test_dns_resolution(self, hostname: str) -> Dict[str, any]:
69
- """测试DNS解析"""
70
- try:
71
- start_time = time.time()
72
-
73
- # 使用asyncio的DNS解析
74
- loop = asyncio.get_event_loop()
75
- addr_info = await loop.getaddrinfo(hostname, None)
76
-
77
- resolution_time = time.time() - start_time
78
- ip_addresses = list(set([addr[4][0] for addr in addr_info]))
79
-
80
- # 缓存DNS结果
81
- if ip_addresses:
82
- self._dns_cache[hostname] = ip_addresses[0]
83
-
84
- return {
85
- 'success': True,
86
- 'ip_addresses': ip_addresses,
87
- 'resolution_time': resolution_time,
88
- 'error': None
89
- }
90
-
91
- except socket.gaierror as e:
92
- return {
93
- 'success': False,
94
- 'ip_addresses': [],
95
- 'resolution_time': None,
96
- 'error': {
97
- 'type': 'DNSError',
98
- 'code': e.errno,
99
- 'message': str(e)
100
- }
101
- }
102
- except Exception as e:
103
- return {
104
- 'success': False,
105
- 'ip_addresses': [],
106
- 'resolution_time': None,
107
- 'error': {
108
- 'type': type(e).__name__,
109
- 'message': str(e)
110
- }
111
- }
112
-
113
- async def _test_tcp_connection(self, hostname: str, port: int) -> Dict[str, any]:
114
- """测试TCP连接"""
115
- try:
116
- start_time = time.time()
117
-
118
- # 尝试TCP连接
119
- reader, writer = await asyncio.wait_for(
120
- asyncio.open_connection(hostname, port),
121
- timeout=10.0
122
- )
123
-
124
- connection_time = time.time() - start_time
125
-
126
- # 关闭连接
127
- writer.close()
128
- await writer.wait_closed()
129
-
130
- return {
131
- 'success': True,
132
- 'connection_time': connection_time,
133
- 'error': None
134
- }
135
-
136
- except asyncio.TimeoutError:
137
- return {
138
- 'success': False,
139
- 'connection_time': None,
140
- 'error': {
141
- 'type': 'TimeoutError',
142
- 'message': 'Connection timeout'
143
- }
144
- }
145
- except Exception as e:
146
- return {
147
- 'success': False,
148
- 'connection_time': None,
149
- 'error': {
150
- 'type': type(e).__name__,
151
- 'message': str(e)
152
- }
153
- }
154
-
155
- async def _test_http_response(self, url: str) -> Dict[str, any]:
156
- """测试HTTP响应"""
157
- try:
158
- start_time = time.time()
159
-
160
- timeout = aiohttp.ClientTimeout(total=30, connect=10)
161
- async with aiohttp.ClientSession(timeout=timeout) as session:
162
- async with session.get(url) as response:
163
- response_time = time.time() - start_time
164
-
165
- return {
166
- 'success': True,
167
- 'url': url,
168
- 'status_code': response.status_code, # 修复:使用status_code而不是status
169
- 'response_time': response_time,
170
- 'content_length': len(response.body) if response.body else 0,
171
- 'headers': dict(response.headers)
172
- }
173
-
174
- except aiohttp.ClientError as e:
175
- return {
176
- 'success': False,
177
- 'status_code': None,
178
- 'response_time': None,
179
- 'headers': {},
180
- 'error': {
181
- 'type': type(e).__name__,
182
- 'message': str(e)
183
- }
184
- }
185
- except Exception as e:
186
- return {
187
- 'success': False,
188
- 'status_code': None,
189
- 'response_time': None,
190
- 'headers': {},
191
- 'error': {
192
- 'type': type(e).__name__,
193
- 'message': str(e)
194
- }
195
- }
196
-
197
- def _generate_recommendations(self, result: Dict[str, any]) -> List[str]:
198
- """根据诊断结果生成建议"""
199
- recommendations = []
200
-
201
- dns_result = result.get('dns_resolution', {})
202
- tcp_result = result.get('tcp_connection', {})
203
- http_result = result.get('http_response', {})
204
-
205
- # DNS问题建议
206
- if not dns_result.get('success'):
207
- error = dns_result.get('error', {})
208
- if error.get('code') == 8: # nodename nor servname provided, or not known
209
- recommendations.extend([
210
- "DNS解析失败 - 检查域名是否正确",
211
- "检查网络连接是否正常",
212
- "尝试使用不同的DNS服务器(如8.8.8.8或1.1.1.1)",
213
- "检查本地hosts文件是否有相关配置",
214
- "确认域名是否可以从外部访问"
215
- ])
216
- elif error.get('code') == 2: # Name or service not known
217
- recommendations.extend([
218
- "域名不存在或无法解析",
219
- "检查域名拼写是否正确",
220
- "确认域名是否已注册且配置了DNS记录"
221
- ])
222
-
223
- # TCP连接问题建议
224
- elif not tcp_result.get('success'):
225
- error = tcp_result.get('error', {})
226
- if error.get('type') == 'TimeoutError':
227
- recommendations.extend([
228
- "TCP连接超时 - 服务器可能无响应",
229
- "检查防火墙设置是否阻止了连接",
230
- "尝试增加连接超时时间",
231
- "检查代理设置"
232
- ])
233
-
234
- # HTTP问题建议
235
- elif not http_result.get('success'):
236
- error = http_result.get('error', {})
237
- recommendations.extend([
238
- f"HTTP请求失败: {error.get('message', 'Unknown error')}",
239
- "检查URL是否正确",
240
- "确认服务器是否正常运行"
241
- ])
242
-
243
- # 性能建议
244
- if dns_result.get('success') and dns_result.get('resolution_time', 0) > 1.0:
245
- recommendations.append("DNS解析时间较长,考虑使用DNS缓存或更快的DNS服务器")
246
-
247
- if tcp_result.get('success') and tcp_result.get('connection_time', 0) > 2.0:
248
- recommendations.append("TCP连接时间较长,可能存在网络延迟问题")
249
-
250
- if http_result.get('success') and http_result.get('response_time', 0) > 5.0:
251
- recommendations.append("HTTP响应时间较长,服务器可能负载较高")
252
-
253
- return recommendations
254
-
255
- async def batch_diagnose(self, urls: List[str]) -> Dict[str, Dict[str, any]]:
256
- """批量诊断多个URL"""
257
- tasks = []
258
- for url in urls:
259
- task = asyncio.create_task(self.diagnose_url(url))
260
- tasks.append((url, task))
261
-
262
- results = {}
263
- for url, task in tasks:
264
- try:
265
- result = await task
266
- results[url] = result
267
- except Exception as e:
268
- results[url] = {
269
- 'url': url,
270
- 'error': f"诊断过程出错: {e}",
271
- 'recommendations': ["诊断工具本身出现问题,请检查网络环境"]
272
- }
273
-
274
- return results
275
-
276
- def format_diagnostic_report(self, result: Dict[str, any]) -> str:
277
- """格式化诊断报告"""
278
- lines = [
279
- f"=== 网络诊断报告 ===",
280
- f"URL: {result['url']}",
281
- f"主机: {result['hostname']}:{result['port']}",
282
- ""
283
- ]
284
-
285
- # DNS解析结果
286
- dns = result.get('dns_resolution', {})
287
- if dns.get('success'):
288
- lines.extend([
289
- "✅ DNS解析: 成功",
290
- f" IP地址: {', '.join(dns['ip_addresses'])}",
291
- f" 解析时间: {dns['resolution_time']:.3f}秒"
292
- ])
293
- else:
294
- error = dns.get('error', {})
295
- lines.extend([
296
- "❌ DNS解析: 失败",
297
- f" 错误类型: {error.get('type', 'Unknown')}",
298
- f" 错误信息: {error.get('message', 'Unknown error')}"
299
- ])
300
-
301
- lines.append("")
302
-
303
- # TCP连接结果
304
- tcp = result.get('tcp_connection', {})
305
- if tcp and tcp.get('success'):
306
- lines.extend([
307
- "✅ TCP连接: 成功",
308
- f" 连接时间: {tcp['connection_time']:.3f}秒"
309
- ])
310
- elif tcp:
311
- error = tcp.get('error', {})
312
- lines.extend([
313
- "❌ TCP连接: 失败",
314
- f" 错误类型: {error.get('type', 'Unknown')}",
315
- f" 错误信息: {error.get('message', 'Unknown error')}"
316
- ])
317
-
318
- lines.append("")
319
-
320
- # HTTP响应结果
321
- http = result.get('http_response', {})
322
- if http and http.get('success'):
323
- lines.extend([
324
- "✅ HTTP响应: 成功",
325
- f" 状态码: {http['status_code']}",
326
- f" 响应时间: {http['response_time']:.3f}秒"
327
- ])
328
- elif http:
329
- error = http.get('error', {})
330
- lines.extend([
331
- "❌ HTTP响应: 失败",
332
- f" 错误类型: {error.get('type', 'Unknown')}",
333
- f" 错误信息: {error.get('message', 'Unknown error')}"
334
- ])
335
-
336
- # 建议
337
- recommendations = result.get('recommendations', [])
338
- if recommendations:
339
- lines.extend([
340
- "",
341
- "🔧 建议:",
342
- ])
343
- for i, rec in enumerate(recommendations, 1):
344
- lines.append(f" {i}. {rec}")
345
-
346
- return "\n".join(lines)
347
-
348
-
349
- # 便捷函数
350
- async def diagnose_url(url: str) -> Dict[str, any]:
351
- """诊断单个URL的网络问题"""
352
- diagnostic = NetworkDiagnostic()
353
- return await diagnostic.diagnose_url(url)
354
-
355
-
356
- async def diagnose_urls(urls: List[str]) -> Dict[str, Dict[str, any]]:
357
- """批量诊断URL的网络问题"""
358
- diagnostic = NetworkDiagnostic()
359
- return await diagnostic.batch_diagnose(urls)
360
-
361
-
362
- def format_report(result: Dict[str, any]) -> str:
363
- """格式化诊断报告"""
364
- diagnostic = NetworkDiagnostic()
365
- return diagnostic.format_diagnostic_report(result)