crawlo 1.4.6__py3-none-any.whl → 1.4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (162) hide show
  1. crawlo/__init__.py +2 -1
  2. crawlo/__version__.py +1 -1
  3. crawlo/cli.py +2 -2
  4. crawlo/commands/check.py +1 -1
  5. crawlo/commands/help.py +5 -3
  6. crawlo/commands/list.py +1 -1
  7. crawlo/commands/run.py +49 -11
  8. crawlo/commands/stats.py +1 -1
  9. crawlo/config.py +12 -4
  10. crawlo/config_validator.py +1 -1
  11. crawlo/core/engine.py +20 -7
  12. crawlo/core/processor.py +1 -1
  13. crawlo/core/scheduler.py +4 -5
  14. crawlo/crawler.py +51 -10
  15. crawlo/downloader/__init__.py +7 -3
  16. crawlo/downloader/aiohttp_downloader.py +18 -18
  17. crawlo/downloader/cffi_downloader.py +5 -2
  18. crawlo/downloader/httpx_downloader.py +9 -3
  19. crawlo/downloader/hybrid_downloader.py +2 -2
  20. crawlo/downloader/playwright_downloader.py +38 -15
  21. crawlo/downloader/selenium_downloader.py +16 -2
  22. crawlo/event.py +42 -8
  23. crawlo/exceptions.py +157 -24
  24. crawlo/extension/__init__.py +10 -9
  25. crawlo/extension/health_check.py +7 -7
  26. crawlo/extension/log_interval.py +6 -6
  27. crawlo/extension/log_stats.py +2 -2
  28. crawlo/extension/logging_extension.py +4 -12
  29. crawlo/extension/memory_monitor.py +5 -5
  30. crawlo/extension/performance_profiler.py +5 -5
  31. crawlo/extension/request_recorder.py +6 -6
  32. crawlo/factories/base.py +1 -1
  33. crawlo/factories/crawler.py +61 -60
  34. crawlo/factories/utils.py +135 -0
  35. crawlo/filters/__init__.py +19 -2
  36. crawlo/filters/aioredis_filter.py +133 -49
  37. crawlo/filters/memory_filter.py +6 -21
  38. crawlo/framework.py +22 -8
  39. crawlo/initialization/built_in.py +24 -67
  40. crawlo/initialization/core.py +65 -19
  41. crawlo/initialization/phases.py +83 -2
  42. crawlo/initialization/registry.py +5 -7
  43. crawlo/initialization/utils.py +49 -0
  44. crawlo/logging/__init__.py +6 -10
  45. crawlo/logging/config.py +106 -22
  46. crawlo/logging/factory.py +12 -8
  47. crawlo/logging/manager.py +19 -27
  48. crawlo/middleware/__init__.py +72 -9
  49. crawlo/middleware/default_header.py +2 -2
  50. crawlo/middleware/download_delay.py +2 -2
  51. crawlo/middleware/middleware_manager.py +6 -6
  52. crawlo/middleware/offsite.py +2 -2
  53. crawlo/middleware/proxy.py +2 -2
  54. crawlo/middleware/request_ignore.py +4 -4
  55. crawlo/middleware/response_code.py +2 -2
  56. crawlo/middleware/response_filter.py +2 -2
  57. crawlo/middleware/retry.py +1 -1
  58. crawlo/mode_manager.py +38 -4
  59. crawlo/network/request.py +54 -26
  60. crawlo/network/response.py +69 -135
  61. crawlo/pipelines/__init__.py +40 -9
  62. crawlo/pipelines/base_pipeline.py +452 -0
  63. crawlo/pipelines/bloom_dedup_pipeline.py +4 -5
  64. crawlo/pipelines/console_pipeline.py +2 -2
  65. crawlo/pipelines/csv_pipeline.py +4 -4
  66. crawlo/pipelines/database_dedup_pipeline.py +4 -5
  67. crawlo/pipelines/json_pipeline.py +4 -4
  68. crawlo/pipelines/memory_dedup_pipeline.py +4 -5
  69. crawlo/pipelines/mongo_pipeline.py +23 -14
  70. crawlo/pipelines/mysql_pipeline.py +31 -39
  71. crawlo/pipelines/pipeline_manager.py +8 -8
  72. crawlo/pipelines/redis_dedup_pipeline.py +13 -14
  73. crawlo/project.py +1 -1
  74. crawlo/queue/__init__.py +10 -0
  75. crawlo/queue/queue_manager.py +79 -13
  76. crawlo/queue/redis_priority_queue.py +196 -47
  77. crawlo/settings/default_settings.py +16 -6
  78. crawlo/spider/__init__.py +6 -5
  79. crawlo/stats_collector.py +2 -2
  80. crawlo/task_manager.py +1 -1
  81. crawlo/templates/crawlo.cfg.tmpl +3 -3
  82. crawlo/templates/project/__init__.py.tmpl +1 -3
  83. crawlo/templates/project/items.py.tmpl +2 -6
  84. crawlo/templates/project/middlewares.py.tmpl +1 -1
  85. crawlo/templates/project/pipelines.py.tmpl +1 -2
  86. crawlo/templates/project/settings.py.tmpl +12 -10
  87. crawlo/templates/project/settings_distributed.py.tmpl +14 -13
  88. crawlo/templates/project/settings_gentle.py.tmpl +21 -23
  89. crawlo/templates/project/settings_high_performance.py.tmpl +21 -23
  90. crawlo/templates/project/settings_minimal.py.tmpl +10 -8
  91. crawlo/templates/project/settings_simple.py.tmpl +21 -23
  92. crawlo/templates/run.py.tmpl +1 -1
  93. crawlo/templates/spider/spider.py.tmpl +4 -12
  94. crawlo/templates/spiders_init.py.tmpl +3 -8
  95. crawlo/tools/__init__.py +0 -103
  96. crawlo/tools/scenario_adapter.py +1 -1
  97. crawlo/utils/__init__.py +25 -1
  98. crawlo/utils/batch_processor.py +23 -6
  99. crawlo/utils/config_manager.py +442 -0
  100. crawlo/utils/controlled_spider_mixin.py +1 -1
  101. crawlo/utils/db_helper.py +1 -1
  102. crawlo/utils/encoding_helper.py +190 -0
  103. crawlo/utils/error_handler.py +2 -2
  104. crawlo/utils/large_scale_helper.py +1 -1
  105. crawlo/utils/leak_detector.py +335 -0
  106. crawlo/utils/mongo_connection_pool.py +157 -0
  107. crawlo/utils/mysql_connection_pool.py +197 -0
  108. crawlo/utils/performance_monitor.py +1 -1
  109. crawlo/utils/redis_checker.py +91 -0
  110. crawlo/utils/redis_connection_pool.py +260 -70
  111. crawlo/utils/redis_key_validator.py +1 -1
  112. crawlo/utils/request.py +24 -2
  113. crawlo/utils/request_serializer.py +1 -1
  114. crawlo/utils/resource_manager.py +337 -0
  115. crawlo/utils/response_helper.py +113 -0
  116. crawlo/utils/selector_helper.py +3 -2
  117. crawlo/utils/singleton.py +70 -0
  118. crawlo/utils/spider_loader.py +1 -1
  119. crawlo/utils/text_helper.py +1 -1
  120. crawlo-1.4.8.dist-info/METADATA +831 -0
  121. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/RECORD +131 -145
  122. tests/advanced_tools_example.py +10 -68
  123. tests/distributed_dedup_test.py +467 -0
  124. tests/monitor_redis_dedup.sh +72 -0
  125. tests/ofweek_scrapy/ofweek_scrapy/spiders/__init__.py +4 -4
  126. tests/simple_cli_test.py +55 -0
  127. tests/test_cli_arguments.py +119 -0
  128. tests/test_dedup_fix.py +10 -10
  129. crawlo/logging/async_handler.py +0 -181
  130. crawlo/logging/monitor.py +0 -153
  131. crawlo/logging/sampler.py +0 -167
  132. crawlo/tools/authenticated_proxy.py +0 -241
  133. crawlo/tools/data_formatter.py +0 -226
  134. crawlo/tools/data_validator.py +0 -181
  135. crawlo/tools/encoding_converter.py +0 -127
  136. crawlo/tools/network_diagnostic.py +0 -365
  137. crawlo/tools/request_tools.py +0 -83
  138. crawlo/tools/retry_mechanism.py +0 -224
  139. crawlo/utils/env_config.py +0 -143
  140. crawlo/utils/large_scale_config.py +0 -287
  141. crawlo/utils/log.py +0 -80
  142. crawlo/utils/system.py +0 -11
  143. crawlo/utils/tools.py +0 -5
  144. crawlo/utils/url.py +0 -40
  145. crawlo-1.4.6.dist-info/METADATA +0 -329
  146. tests/env_config_example.py +0 -134
  147. tests/ofweek_scrapy/ofweek_scrapy/spiders/ofweek_spider.py +0 -162
  148. tests/test_authenticated_proxy.py +0 -142
  149. tests/test_comprehensive.py +0 -147
  150. tests/test_dynamic_downloaders_proxy.py +0 -125
  151. tests/test_dynamic_proxy.py +0 -93
  152. tests/test_dynamic_proxy_config.py +0 -147
  153. tests/test_dynamic_proxy_real.py +0 -110
  154. tests/test_env_config.py +0 -122
  155. tests/test_framework_env_usage.py +0 -104
  156. tests/test_large_scale_config.py +0 -113
  157. tests/test_proxy_api.py +0 -265
  158. tests/test_real_scenario_proxy.py +0 -196
  159. tests/tools_example.py +0 -261
  160. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/WHEEL +0 -0
  161. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/entry_points.txt +0 -0
  162. {crawlo-1.4.6.dist-info → crawlo-1.4.8.dist-info}/top_level.txt +0 -0
@@ -1,241 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- 带认证代理工具
5
- =============
6
- 支持带用户名密码认证的代理和非认证代理的统一处理工具
7
-
8
- 功能特性:
9
- - 支持HTTP/HTTPS/SOCKS代理
10
- - 支持带认证和不带认证的代理
11
- - 统一的代理格式处理
12
- - 代理有效性检测
13
- """
14
-
15
- from typing import Dict, Any, Optional, Union
16
- from urllib.parse import urlparse, urlunparse
17
-
18
-
19
- class AuthenticatedProxy:
20
- """带认证代理处理类"""
21
-
22
- def __init__(self, proxy_url: str):
23
- """
24
- 初始化代理对象
25
-
26
- Args:
27
- proxy_url (str): 代理URL,支持带认证信息的格式
28
- - 带认证: http://username:password@proxy.example.com:8080
29
- - 无认证: http://proxy.example.com:8080
30
- """
31
- self.proxy_url = proxy_url
32
- self.parsed = urlparse(proxy_url)
33
-
34
- # 提取认证信息
35
- self.username = self.parsed.username
36
- self.password = self.parsed.password
37
-
38
- # 构造不带认证信息的URL
39
- self.clean_url = urlunparse((
40
- self.parsed.scheme,
41
- f"{self.parsed.hostname}:{self.parsed.port}" if self.parsed.port else self.parsed.hostname,
42
- self.parsed.path,
43
- self.parsed.params,
44
- self.parsed.query,
45
- self.parsed.fragment
46
- ))
47
-
48
- # 构造下载器兼容的代理字典
49
- self.proxy_dict = {
50
- "http": self.clean_url,
51
- "https": self.clean_url
52
- }
53
-
54
- # 如果有认证信息,构造认证字符串
55
- if self.username and self.password:
56
- self.auth_string = f"{self.username}:{self.password}"
57
- else:
58
- self.auth_string = None
59
-
60
- def get_proxy_for_downloader(self) -> Union[str, Dict[str, str]]:
61
- """
62
- 获取适用于下载器的代理配置
63
-
64
- Returns:
65
- Union[str, Dict[str, str]]: 代理配置
66
- - 对于AioHttp/CurlCffi: 返回字典格式 {"http": "...", "https": "..."}
67
- - 对于HttpX: 可以直接使用字符串或字典格式
68
- """
69
- return self.proxy_dict
70
-
71
- def get_auth_credentials(self) -> Optional[Dict[str, str]]:
72
- """
73
- 获取认证凭据
74
-
75
- Returns:
76
- Optional[Dict[str, str]]: 认证凭据 {"username": "...", "password": "..."}
77
- """
78
- if self.username and self.password:
79
- return {
80
- "username": self.username,
81
- "password": self.password
82
- }
83
- return None
84
-
85
- def get_auth_header(self) -> Optional[str]:
86
- """
87
- 获取Basic Auth认证头
88
-
89
- Returns:
90
- Optional[str]: Basic Auth头信息
91
- """
92
- if self.username and self.password:
93
- import base64
94
- credentials = f"{self.username}:{self.password}"
95
- encoded = base64.b64encode(credentials.encode()).decode()
96
- return f"Basic {encoded}"
97
- return None
98
-
99
- def is_valid(self) -> bool:
100
- """
101
- 检查代理URL是否有效
102
-
103
- Returns:
104
- bool: 代理URL是否有效
105
- """
106
- # 检查协议
107
- if self.parsed.scheme not in ["http", "https", "socks4", "socks5"]:
108
- return False
109
-
110
- # 检查主机名
111
- if not self.parsed.hostname:
112
- return False
113
-
114
- # 检查端口(如果指定)
115
- if self.parsed.port and (self.parsed.port < 1 or self.parsed.port > 65535):
116
- return False
117
-
118
- return True
119
-
120
- def __str__(self) -> str:
121
- return self.proxy_url
122
-
123
- def __repr__(self) -> str:
124
- return f"AuthenticatedProxy(url='{self.proxy_url}', username={self.username is not None})"
125
-
126
-
127
- def create_proxy_config(proxy_url: str) -> Dict[str, Any]:
128
- """
129
- 创建代理配置,兼容各种下载器
130
-
131
- Args:
132
- proxy_url (str): 代理URL
133
-
134
- Returns:
135
- Dict[str, Any]: 代理配置字典
136
- """
137
- proxy = AuthenticatedProxy(proxy_url)
138
-
139
- if not proxy.is_valid():
140
- raise ValueError(f"Invalid proxy URL: {proxy_url}")
141
-
142
- config = {
143
- "url": proxy.clean_url,
144
- "proxy_dict": proxy.proxy_dict,
145
- "has_auth": proxy.auth_string is not None
146
- }
147
-
148
- if proxy.auth_string:
149
- config["auth"] = proxy.get_auth_credentials()
150
- config["auth_header"] = proxy.get_auth_header()
151
-
152
- return config
153
-
154
-
155
- def format_proxy_for_request(proxy_config: Dict[str, Any], downloader_type: str = "aiohttp") -> Dict[str, Any]:
156
- """
157
- 格式化代理配置以适配特定下载器
158
-
159
- Args:
160
- proxy_config (Dict[str, Any]): 代理配置
161
- downloader_type (str): 下载器类型 (aiohttp, httpx, curl_cffi)
162
-
163
- Returns:
164
- Dict[str, Any]: 适配下载器的代理配置
165
- """
166
- formatted = {}
167
-
168
- if downloader_type.lower() == "aiohttp":
169
- # AioHttp使用proxy和proxy_auth参数
170
- formatted["proxy"] = proxy_config["url"]
171
- if proxy_config.get("has_auth") and proxy_config.get("auth"):
172
- from aiohttp import BasicAuth
173
- auth = proxy_config["auth"]
174
- formatted["proxy_auth"] = BasicAuth(auth["username"], auth["password"])
175
-
176
- elif downloader_type.lower() == "httpx":
177
- # HttpX可以直接使用代理URL字符串或字典
178
- formatted["proxy"] = proxy_config["url"]
179
-
180
- elif downloader_type.lower() == "curl_cffi":
181
- # CurlCffi使用proxies字典
182
- formatted["proxies"] = proxy_config["proxy_dict"]
183
- # 认证信息包含在URL中或通过headers传递
184
- if proxy_config.get("auth_header"):
185
- formatted["headers"] = {"Proxy-Authorization": proxy_config["auth_header"]}
186
-
187
- return formatted
188
-
189
-
190
- # 便捷函数
191
- def parse_proxy_url(proxy_url: str) -> Dict[str, Any]:
192
- """
193
- 解析代理URL并返回详细信息
194
-
195
- Args:
196
- proxy_url (str): 代理URL
197
-
198
- Returns:
199
- Dict[str, Any]: 代理详细信息
200
- """
201
- return create_proxy_config(proxy_url)
202
-
203
-
204
- def validate_proxy_url(proxy_url: str) -> bool:
205
- """
206
- 验证代理URL是否有效
207
-
208
- Args:
209
- proxy_url (str): 代理URL
210
-
211
- Returns:
212
- bool: 是否有效
213
- """
214
- try:
215
- proxy = AuthenticatedProxy(proxy_url)
216
- return proxy.is_valid()
217
- except:
218
- return False
219
-
220
-
221
- def get_proxy_info(proxy_url: str) -> Dict[str, Any]:
222
- """
223
- 获取代理详细信息
224
-
225
- Args:
226
- proxy_url (str): 代理URL
227
-
228
- Returns:
229
- Dict[str, Any]: 代理详细信息
230
- """
231
- proxy = AuthenticatedProxy(proxy_url)
232
- return {
233
- "original_url": proxy.proxy_url,
234
- "clean_url": proxy.clean_url,
235
- "scheme": proxy.parsed.scheme,
236
- "hostname": proxy.parsed.hostname,
237
- "port": proxy.parsed.port,
238
- "has_auth": proxy.auth_string is not None,
239
- "username": proxy.username,
240
- "is_valid": proxy.is_valid()
241
- }
@@ -1,226 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 数据格式化工具
7
- """
8
- import re
9
- from typing import Any, Optional, Union
10
- from decimal import Decimal, InvalidOperation
11
-
12
-
13
- class DataFormatter:
14
- """
15
- 数据格式化工具类,提供各种数据格式化功能。
16
- 特别适用于爬虫中处理各种数据类型的格式化需求。
17
- """
18
-
19
- @staticmethod
20
- def format_number(value: Any,
21
- precision: int = 2,
22
- thousand_separator: bool = False) -> Optional[str]:
23
- """
24
- 格式化数字
25
-
26
- :param value: 数字值
27
- :param precision: 小数点精度
28
- :param thousand_separator: 是否使用千位分隔符
29
- :return: 格式化后的数字字符串
30
- """
31
- if value is None:
32
- return None
33
-
34
- try:
35
- # 转换为Decimal以避免浮点数精度问题
36
- decimal_value = Decimal(str(value))
37
-
38
- if thousand_separator:
39
- # 使用千位分隔符
40
- formatted = f"{decimal_value:,.{precision}f}"
41
- else:
42
- # 不使用千位分隔符
43
- formatted = f"{decimal_value:.{precision}f}"
44
-
45
- return formatted
46
- except (ValueError, InvalidOperation):
47
- return None
48
-
49
- @staticmethod
50
- def format_currency(value: Any,
51
- currency_symbol: str = "¥",
52
- precision: int = 2) -> Optional[str]:
53
- """
54
- 格式化货币
55
-
56
- :param value: 货币值
57
- :param currency_symbol: 货币符号
58
- :param precision: 小数点精度
59
- :return: 格式化后的货币字符串
60
- """
61
- formatted_number = DataFormatter.format_number(value, precision, thousand_separator=True)
62
- if formatted_number is None:
63
- return None
64
-
65
- return f"{currency_symbol}{formatted_number}"
66
-
67
- @staticmethod
68
- def format_percentage(value: Any,
69
- precision: int = 2,
70
- multiply_100: bool = True) -> Optional[str]:
71
- """
72
- 格式化百分比
73
-
74
- :param value: 百分比值
75
- :param precision: 小数点精度
76
- :param multiply_100: 是否乘以100(如果原始值是小数)
77
- :return: 格式化后的百分比字符串
78
- """
79
- if value is None:
80
- return None
81
-
82
- try:
83
- decimal_value = Decimal(str(value))
84
-
85
- if multiply_100:
86
- decimal_value *= 100
87
-
88
- formatted = f"{decimal_value:.{precision}f}%"
89
- return formatted
90
- except (ValueError, InvalidOperation):
91
- return None
92
-
93
- @staticmethod
94
- def format_phone_number(phone: str,
95
- country_code: str = "+86",
96
- format_type: str = "international") -> Optional[str]:
97
- """
98
- 格式化电话号码
99
-
100
- :param phone: 电话号码
101
- :param country_code: 国家代码
102
- :param format_type: 格式类型 ('international', 'domestic', 'plain')
103
- :return: 格式化后的电话号码
104
- """
105
- if not isinstance(phone, str):
106
- phone = str(phone)
107
-
108
- # 移除所有非数字字符
109
- digits = re.sub(r'\D', '', phone)
110
-
111
- if not digits:
112
- return None
113
-
114
- # 如果是11位中国手机号
115
- if len(digits) == 11 and digits.startswith('1'):
116
- if format_type == "international":
117
- return f"{country_code} {digits[:3]} {digits[3:7]} {digits[7:]}"
118
- elif format_type == "domestic":
119
- return f"{digits[:3]}-{digits[3:7]}-{digits[7:]}"
120
- else: # plain
121
- return digits
122
- else:
123
- # 其他情况简单处理
124
- if format_type == "international" and country_code:
125
- return f"{country_code} {digits}"
126
- else:
127
- return digits
128
-
129
- @staticmethod
130
- def format_chinese_id_card(id_card: str) -> Optional[str]:
131
- """
132
- 格式化中国身份证号码(隐藏中间部分)
133
-
134
- :param id_card: 身份证号码
135
- :return: 格式化后的身份证号码
136
- """
137
- if not isinstance(id_card, str):
138
- id_card = str(id_card)
139
-
140
- # 移除空格
141
- id_card = id_card.replace(" ", "")
142
-
143
- if len(id_card) == 18:
144
- # 18位身份证号
145
- return f"{id_card[:6]}********{id_card[-4:]}"
146
- elif len(id_card) == 15:
147
- # 15位身份证号
148
- return f"{id_card[:6]}******{id_card[-3:]}"
149
- else:
150
- return None
151
-
152
- @staticmethod
153
- def capitalize_words(text: str,
154
- delimiter: str = " ",
155
- preserve_articles: bool = True) -> str:
156
- """
157
- 单词首字母大写
158
-
159
- :param text: 文本
160
- :param delimiter: 单词分隔符
161
- :param preserve_articles: 是否保留冠词小写
162
- :return: 首字母大写后的文本
163
- """
164
- if not isinstance(text, str):
165
- return str(text)
166
-
167
- # 常见的冠词和介词
168
- articles = {'a', 'an', 'the', 'and', 'or', 'but', 'in', 'on', 'at', 'to', 'for', 'of', 'with', 'by'}
169
-
170
- words = text.split(delimiter)
171
- capitalized_words = []
172
-
173
- for i, word in enumerate(words):
174
- if not word:
175
- capitalized_words.append(word)
176
- continue
177
-
178
- # 第一个单词和最后一个单词总是大写
179
- if i == 0 or i == len(words) - 1 or not preserve_articles or word.lower() not in articles:
180
- capitalized_words.append(word.capitalize())
181
- else:
182
- capitalized_words.append(word.lower())
183
-
184
- return delimiter.join(capitalized_words)
185
-
186
-
187
- # =======================对外接口=======================
188
-
189
- def format_number(value: Any,
190
- precision: int = 2,
191
- thousand_separator: bool = False) -> Optional[str]:
192
- """格式化数字"""
193
- return DataFormatter.format_number(value, precision, thousand_separator)
194
-
195
-
196
- def format_currency(value: Any,
197
- currency_symbol: str = "¥",
198
- precision: int = 2) -> Optional[str]:
199
- """格式化货币"""
200
- return DataFormatter.format_currency(value, currency_symbol, precision)
201
-
202
-
203
- def format_percentage(value: Any,
204
- precision: int = 2,
205
- multiply_100: bool = True) -> Optional[str]:
206
- """格式化百分比"""
207
- return DataFormatter.format_percentage(value, precision, multiply_100)
208
-
209
-
210
- def format_phone_number(phone: str,
211
- country_code: str = "+86",
212
- format_type: str = "international") -> Optional[str]:
213
- """格式化电话号码"""
214
- return DataFormatter.format_phone_number(phone, country_code, format_type)
215
-
216
-
217
- def format_chinese_id_card(id_card: str) -> Optional[str]:
218
- """格式化中国身份证号码"""
219
- return DataFormatter.format_chinese_id_card(id_card)
220
-
221
-
222
- def capitalize_words(text: str,
223
- delimiter: str = " ",
224
- preserve_articles: bool = True) -> str:
225
- """单词首字母大写"""
226
- return DataFormatter.capitalize_words(text, delimiter, preserve_articles)
@@ -1,181 +0,0 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- # @Time : 2025-09-10 22:00
5
- # @Author : crawl-coder
6
- # @Desc : 数据验证工具
7
- """
8
-
9
- import re
10
- from typing import Any, Union, Dict, List
11
- from datetime import datetime
12
- from urllib.parse import urlparse
13
-
14
-
15
- class DataValidator:
16
- """数据验证工具类"""
17
-
18
- @staticmethod
19
- def validate_email(email: str) -> bool:
20
- """
21
- 验证邮箱地址格式
22
-
23
- Args:
24
- email (str): 邮箱地址
25
-
26
- Returns:
27
- bool: 验证结果
28
- """
29
- pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
30
- return bool(re.match(pattern, email))
31
-
32
- @staticmethod
33
- def validate_phone(phone: str, country_code: str = "CN") -> bool:
34
- """
35
- 验证电话号码格式
36
-
37
- Args:
38
- phone (str): 电话号码
39
- country_code (str): 国家代码,默认为"CN"
40
-
41
- Returns:
42
- bool: 验证结果
43
- """
44
- if country_code == "CN":
45
- # 中国手机号码格式
46
- pattern = r'^1[3-9]\d{9}$'
47
- return bool(re.match(pattern, phone))
48
- else:
49
- # 通用格式,只检查是否全为数字且长度在7-15之间
50
- pattern = r'^\d{7,15}$'
51
- return bool(re.match(pattern, phone))
52
-
53
- @staticmethod
54
- def validate_url(url: str) -> bool:
55
- """
56
- 验证URL格式
57
-
58
- Args:
59
- url (str): URL地址
60
-
61
- Returns:
62
- bool: 验证结果
63
- """
64
- try:
65
- result = urlparse(url)
66
- return all([result.scheme, result.netloc])
67
- except Exception:
68
- return False
69
-
70
- @staticmethod
71
- def validate_chinese_id_card(id_card: str) -> bool:
72
- """
73
- 验证中国身份证号码格式
74
-
75
- Args:
76
- id_card (str): 身份证号码
77
-
78
- Returns:
79
- bool: 验证结果
80
- """
81
- # 18位身份证号码格式
82
- pattern = r'^[1-9]\d{5}(18|19|20)\d{2}((0[1-9])|(1[0-2]))(([0-2][1-9])|10|20|30|31)\d{3}[0-9Xx]$'
83
- return bool(re.match(pattern, id_card))
84
-
85
- @staticmethod
86
- def validate_date(date_str: str, date_format: str = "%Y-%m-%d") -> bool:
87
- """
88
- 验证日期格式
89
-
90
- Args:
91
- date_str (str): 日期字符串
92
- date_format (str): 日期格式,默认为"%Y-%m-%d"
93
-
94
- Returns:
95
- bool: 验证结果
96
- """
97
- try:
98
- datetime.strptime(date_str, date_format)
99
- return True
100
- except ValueError:
101
- return False
102
-
103
- @staticmethod
104
- def validate_number_range(value: Union[int, float], min_val: Union[int, float],
105
- max_val: Union[int, float]) -> bool:
106
- """
107
- 验证数值是否在指定范围内
108
-
109
- Args:
110
- value (Union[int, float]): 要验证的数值
111
- min_val (Union[int, float]): 最小值
112
- max_val (Union[int, float]): 最大值
113
-
114
- Returns:
115
- bool: 验证结果
116
- """
117
- return min_val <= value <= max_val
118
-
119
- @staticmethod
120
- def check_data_integrity(data: Dict[str, Any], required_fields: List[str]) -> Dict[str, Any]:
121
- """
122
- 检查数据完整性,确保关键字段不为空
123
-
124
- Args:
125
- data (Dict[str, Any]): 要检查的数据
126
- required_fields (List[str]): 必需字段列表
127
-
128
- Returns:
129
- Dict[str, Any]: 检查结果,包含缺失字段和空值字段
130
- """
131
- missing_fields = []
132
- empty_fields = []
133
-
134
- for field in required_fields:
135
- if field not in data:
136
- missing_fields.append(field)
137
- elif data[field] is None or data[field] == "":
138
- empty_fields.append(field)
139
-
140
- return {
141
- "is_valid": len(missing_fields) == 0 and len(empty_fields) == 0,
142
- "missing_fields": missing_fields,
143
- "empty_fields": empty_fields
144
- }
145
-
146
-
147
- # 便捷函数
148
- def validate_email(email: str) -> bool:
149
- """验证邮箱地址格式"""
150
- return DataValidator.validate_email(email)
151
-
152
-
153
- def validate_phone(phone: str, country_code: str = "CN") -> bool:
154
- """验证电话号码格式"""
155
- return DataValidator.validate_phone(phone, country_code)
156
-
157
-
158
- def validate_url(url: str) -> bool:
159
- """验证URL格式"""
160
- return DataValidator.validate_url(url)
161
-
162
-
163
- def validate_chinese_id_card(id_card: str) -> bool:
164
- """验证中国身份证号码格式"""
165
- return DataValidator.validate_chinese_id_card(id_card)
166
-
167
-
168
- def validate_date(date_str: str, date_format: str = "%Y-%m-%d") -> bool:
169
- """验证日期格式"""
170
- return DataValidator.validate_date(date_str, date_format)
171
-
172
-
173
- def validate_number_range(value: Union[int, float], min_val: Union[int, float],
174
- max_val: Union[int, float]) -> bool:
175
- """验证数值是否在指定范围内"""
176
- return DataValidator.validate_number_range(value, min_val, max_val)
177
-
178
-
179
- def check_data_integrity(data: Dict[str, Any], required_fields: List[str]) -> Dict[str, Any]:
180
- """检查数据完整性"""
181
- return DataValidator.check_data_integrity(data, required_fields)