crawlo 1.1.4__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crawlo might be problematic. Click here for more details.

Files changed (186) hide show
  1. crawlo/__init__.py +61 -34
  2. crawlo/__version__.py +1 -1
  3. crawlo/cleaners/__init__.py +61 -0
  4. crawlo/cleaners/data_formatter.py +226 -0
  5. crawlo/cleaners/encoding_converter.py +126 -0
  6. crawlo/cleaners/text_cleaner.py +233 -0
  7. crawlo/cli.py +40 -40
  8. crawlo/commands/__init__.py +13 -13
  9. crawlo/commands/check.py +594 -594
  10. crawlo/commands/genspider.py +151 -151
  11. crawlo/commands/list.py +155 -155
  12. crawlo/commands/run.py +285 -285
  13. crawlo/commands/startproject.py +300 -196
  14. crawlo/commands/stats.py +188 -188
  15. crawlo/commands/utils.py +186 -186
  16. crawlo/config.py +309 -279
  17. crawlo/config_validator.py +253 -0
  18. crawlo/core/__init__.py +2 -2
  19. crawlo/core/engine.py +346 -172
  20. crawlo/core/processor.py +40 -40
  21. crawlo/core/scheduler.py +137 -166
  22. crawlo/crawler.py +1027 -1027
  23. crawlo/downloader/__init__.py +266 -242
  24. crawlo/downloader/aiohttp_downloader.py +220 -212
  25. crawlo/downloader/cffi_downloader.py +256 -251
  26. crawlo/downloader/httpx_downloader.py +259 -259
  27. crawlo/downloader/hybrid_downloader.py +214 -0
  28. crawlo/downloader/playwright_downloader.py +403 -0
  29. crawlo/downloader/selenium_downloader.py +473 -0
  30. crawlo/event.py +11 -11
  31. crawlo/exceptions.py +81 -81
  32. crawlo/extension/__init__.py +37 -37
  33. crawlo/extension/health_check.py +141 -141
  34. crawlo/extension/log_interval.py +57 -57
  35. crawlo/extension/log_stats.py +81 -81
  36. crawlo/extension/logging_extension.py +43 -43
  37. crawlo/extension/memory_monitor.py +104 -88
  38. crawlo/extension/performance_profiler.py +133 -117
  39. crawlo/extension/request_recorder.py +107 -107
  40. crawlo/filters/__init__.py +154 -154
  41. crawlo/filters/aioredis_filter.py +280 -242
  42. crawlo/filters/memory_filter.py +269 -269
  43. crawlo/items/__init__.py +23 -23
  44. crawlo/items/base.py +21 -21
  45. crawlo/items/fields.py +53 -53
  46. crawlo/items/items.py +104 -104
  47. crawlo/middleware/__init__.py +21 -21
  48. crawlo/middleware/default_header.py +32 -32
  49. crawlo/middleware/download_delay.py +28 -28
  50. crawlo/middleware/middleware_manager.py +135 -135
  51. crawlo/middleware/proxy.py +272 -248
  52. crawlo/middleware/request_ignore.py +30 -30
  53. crawlo/middleware/response_code.py +18 -18
  54. crawlo/middleware/response_filter.py +26 -26
  55. crawlo/middleware/retry.py +124 -124
  56. crawlo/mode_manager.py +206 -201
  57. crawlo/network/__init__.py +21 -21
  58. crawlo/network/request.py +338 -311
  59. crawlo/network/response.py +360 -271
  60. crawlo/pipelines/__init__.py +21 -21
  61. crawlo/pipelines/bloom_dedup_pipeline.py +156 -156
  62. crawlo/pipelines/console_pipeline.py +39 -39
  63. crawlo/pipelines/csv_pipeline.py +316 -316
  64. crawlo/pipelines/database_dedup_pipeline.py +224 -224
  65. crawlo/pipelines/json_pipeline.py +218 -218
  66. crawlo/pipelines/memory_dedup_pipeline.py +115 -115
  67. crawlo/pipelines/mongo_pipeline.py +131 -131
  68. crawlo/pipelines/mysql_pipeline.py +316 -316
  69. crawlo/pipelines/pipeline_manager.py +56 -56
  70. crawlo/pipelines/redis_dedup_pipeline.py +166 -162
  71. crawlo/project.py +153 -153
  72. crawlo/queue/pqueue.py +37 -37
  73. crawlo/queue/queue_manager.py +320 -307
  74. crawlo/queue/redis_priority_queue.py +277 -209
  75. crawlo/settings/__init__.py +7 -7
  76. crawlo/settings/default_settings.py +216 -278
  77. crawlo/settings/setting_manager.py +99 -99
  78. crawlo/spider/__init__.py +639 -639
  79. crawlo/stats_collector.py +59 -59
  80. crawlo/subscriber.py +130 -130
  81. crawlo/task_manager.py +30 -30
  82. crawlo/templates/crawlo.cfg.tmpl +10 -10
  83. crawlo/templates/project/__init__.py.tmpl +3 -3
  84. crawlo/templates/project/items.py.tmpl +17 -17
  85. crawlo/templates/project/middlewares.py.tmpl +110 -110
  86. crawlo/templates/project/pipelines.py.tmpl +97 -97
  87. crawlo/templates/project/run.py.tmpl +251 -251
  88. crawlo/templates/project/settings.py.tmpl +326 -279
  89. crawlo/templates/project/settings_distributed.py.tmpl +120 -0
  90. crawlo/templates/project/settings_gentle.py.tmpl +95 -0
  91. crawlo/templates/project/settings_high_performance.py.tmpl +152 -0
  92. crawlo/templates/project/settings_simple.py.tmpl +69 -0
  93. crawlo/templates/project/spiders/__init__.py.tmpl +5 -5
  94. crawlo/templates/spider/spider.py.tmpl +141 -141
  95. crawlo/tools/__init__.py +183 -0
  96. crawlo/tools/anti_crawler.py +269 -0
  97. crawlo/tools/authenticated_proxy.py +241 -0
  98. crawlo/tools/data_validator.py +181 -0
  99. crawlo/tools/date_tools.py +36 -0
  100. crawlo/tools/distributed_coordinator.py +387 -0
  101. crawlo/tools/retry_mechanism.py +221 -0
  102. crawlo/tools/scenario_adapter.py +263 -0
  103. crawlo/utils/__init__.py +35 -7
  104. crawlo/utils/batch_processor.py +261 -0
  105. crawlo/utils/controlled_spider_mixin.py +439 -439
  106. crawlo/utils/date_tools.py +290 -233
  107. crawlo/utils/db_helper.py +343 -343
  108. crawlo/utils/enhanced_error_handler.py +360 -0
  109. crawlo/utils/env_config.py +106 -0
  110. crawlo/utils/error_handler.py +126 -0
  111. crawlo/utils/func_tools.py +82 -82
  112. crawlo/utils/large_scale_config.py +286 -286
  113. crawlo/utils/large_scale_helper.py +343 -343
  114. crawlo/utils/log.py +128 -128
  115. crawlo/utils/performance_monitor.py +285 -0
  116. crawlo/utils/queue_helper.py +175 -175
  117. crawlo/utils/redis_connection_pool.py +335 -0
  118. crawlo/utils/redis_key_validator.py +200 -0
  119. crawlo/utils/request.py +267 -267
  120. crawlo/utils/request_serializer.py +219 -219
  121. crawlo/utils/spider_loader.py +62 -62
  122. crawlo/utils/system.py +11 -11
  123. crawlo/utils/tools.py +4 -4
  124. crawlo/utils/url.py +39 -39
  125. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/METADATA +401 -403
  126. crawlo-1.1.5.dist-info/RECORD +185 -0
  127. examples/__init__.py +7 -7
  128. tests/__init__.py +7 -7
  129. tests/advanced_tools_example.py +276 -0
  130. tests/authenticated_proxy_example.py +237 -0
  131. tests/cleaners_example.py +161 -0
  132. tests/config_validation_demo.py +103 -0
  133. {examples → tests}/controlled_spider_example.py +205 -205
  134. tests/date_tools_example.py +181 -0
  135. tests/dynamic_loading_example.py +524 -0
  136. tests/dynamic_loading_test.py +105 -0
  137. tests/env_config_example.py +134 -0
  138. tests/error_handling_example.py +172 -0
  139. tests/redis_key_validation_demo.py +131 -0
  140. tests/response_improvements_example.py +145 -0
  141. tests/test_advanced_tools.py +149 -0
  142. tests/test_all_redis_key_configs.py +146 -0
  143. tests/test_authenticated_proxy.py +142 -0
  144. tests/test_cleaners.py +55 -0
  145. tests/test_comprehensive.py +147 -0
  146. tests/test_config_validator.py +194 -0
  147. tests/test_date_tools.py +124 -0
  148. tests/test_dynamic_downloaders_proxy.py +125 -0
  149. tests/test_dynamic_proxy.py +93 -0
  150. tests/test_dynamic_proxy_config.py +147 -0
  151. tests/test_dynamic_proxy_real.py +110 -0
  152. tests/test_edge_cases.py +304 -0
  153. tests/test_enhanced_error_handler.py +271 -0
  154. tests/test_env_config.py +122 -0
  155. tests/test_error_handler_compatibility.py +113 -0
  156. tests/test_final_validation.py +153 -153
  157. tests/test_framework_env_usage.py +104 -0
  158. tests/test_integration.py +357 -0
  159. tests/test_item_dedup_redis_key.py +123 -0
  160. tests/test_parsel.py +30 -0
  161. tests/test_performance.py +328 -0
  162. tests/test_proxy_health_check.py +32 -32
  163. tests/test_proxy_middleware_integration.py +136 -136
  164. tests/test_proxy_providers.py +56 -56
  165. tests/test_proxy_stats.py +19 -19
  166. tests/test_proxy_strategies.py +59 -59
  167. tests/test_queue_manager_redis_key.py +177 -0
  168. tests/test_redis_config.py +28 -28
  169. tests/test_redis_connection_pool.py +295 -0
  170. tests/test_redis_key_naming.py +182 -0
  171. tests/test_redis_key_validator.py +124 -0
  172. tests/test_redis_queue.py +224 -224
  173. tests/test_request_serialization.py +70 -70
  174. tests/test_response_improvements.py +153 -0
  175. tests/test_scheduler.py +241 -241
  176. tests/test_simple_response.py +62 -0
  177. tests/test_telecom_spider_redis_key.py +206 -0
  178. tests/test_template_content.py +88 -0
  179. tests/test_template_redis_key.py +135 -0
  180. tests/test_tools.py +154 -0
  181. tests/tools_example.py +258 -0
  182. crawlo/core/enhanced_engine.py +0 -190
  183. crawlo-1.1.4.dist-info/RECORD +0 -117
  184. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/WHEEL +0 -0
  185. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/entry_points.txt +0 -0
  186. {crawlo-1.1.4.dist-info → crawlo-1.1.5.dist-info}/top_level.txt +0 -0
crawlo/__init__.py CHANGED
@@ -1,35 +1,62 @@
1
- #!/usr/bin/python
2
- # -*- coding: UTF-8 -*-
3
- """
4
- Crawlo - 一个异步爬虫框架
5
- """
6
- from crawlo.spider import Spider
7
- from crawlo.items import Item, Field
8
- from crawlo.network.request import Request
9
- from crawlo.network.response import Response
10
- from crawlo.downloader import DownloaderBase
11
- from crawlo.middleware import BaseMiddleware
12
-
13
-
14
- # 版本号:优先从元数据读取
15
- try:
16
- from importlib.metadata import version
17
- __version__ = version("crawlo")
18
- except Exception:
19
- # 开发模式下可能未安装,回退到 __version__.py 或 dev
20
- try:
21
- from crawlo.__version__ import __version__
22
- except ImportError:
23
- __version__ = "dev"
24
-
25
- # 定义对外 API
26
- __all__ = [
27
- 'Spider',
28
- 'Item',
29
- 'Field',
30
- 'Request',
31
- 'Response',
32
- 'DownloaderBase',
33
- 'BaseMiddleware',
34
- '__version__',
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ Crawlo - 一个异步爬虫框架
5
+ """
6
+ from crawlo.spider import Spider
7
+ from crawlo.items import Item, Field
8
+ from crawlo.network.request import Request
9
+ from crawlo.network.response import Response
10
+ from crawlo.downloader import DownloaderBase
11
+ from crawlo.middleware import BaseMiddleware
12
+ from crawlo.utils import (
13
+ TimeUtils,
14
+ parse_time,
15
+ format_time,
16
+ time_diff,
17
+ to_timestamp,
18
+ to_datetime,
19
+ now,
20
+ to_timezone,
21
+ to_utc,
22
+ to_local,
23
+ from_timestamp_with_tz
24
+ )
25
+ from crawlo import cleaners
26
+ from crawlo import tools
27
+
28
+ # 版本号:优先从元数据读取
29
+ try:
30
+ from importlib.metadata import version
31
+ __version__ = version("crawlo")
32
+ except Exception:
33
+ # 开发模式下可能未安装,回退到 __version__.py 或 dev
34
+ try:
35
+ from crawlo.__version__ import __version__
36
+ except ImportError:
37
+ __version__ = "dev"
38
+
39
+ # 定义对外 API
40
+ __all__ = [
41
+ 'Spider',
42
+ 'Item',
43
+ 'Field',
44
+ 'Request',
45
+ 'Response',
46
+ 'DownloaderBase',
47
+ 'BaseMiddleware',
48
+ 'TimeUtils',
49
+ 'parse_time',
50
+ 'format_time',
51
+ 'time_diff',
52
+ 'to_timestamp',
53
+ 'to_datetime',
54
+ 'now',
55
+ 'to_timezone',
56
+ 'to_utc',
57
+ 'to_local',
58
+ 'from_timestamp_with_tz',
59
+ 'cleaners',
60
+ 'tools',
61
+ '__version__',
35
62
  ]
crawlo/__version__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.1.4"
1
+ __version__ = "1.1.5"
@@ -0,0 +1,61 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-09-10 22:00
5
+ # @Author : crawl-coder
6
+ # @Desc : 数据清洗工具包
7
+ """
8
+
9
+ from .text_cleaner import (
10
+ TextCleaner,
11
+ remove_html_tags,
12
+ decode_html_entities,
13
+ remove_extra_whitespace,
14
+ remove_special_chars,
15
+ normalize_unicode,
16
+ clean_text,
17
+ extract_numbers,
18
+ extract_emails,
19
+ extract_urls
20
+ )
21
+
22
+ from .data_formatter import (
23
+ DataFormatter,
24
+ format_number,
25
+ format_currency,
26
+ format_percentage,
27
+ format_phone_number,
28
+ format_chinese_id_card,
29
+ capitalize_words
30
+ )
31
+
32
+ from .encoding_converter import (
33
+ EncodingConverter,
34
+ detect_encoding,
35
+ to_utf8,
36
+ convert_encoding
37
+ )
38
+
39
+ __all__ = [
40
+ "TextCleaner",
41
+ "DataFormatter",
42
+ "EncodingConverter",
43
+ "remove_html_tags",
44
+ "decode_html_entities",
45
+ "remove_extra_whitespace",
46
+ "remove_special_chars",
47
+ "normalize_unicode",
48
+ "clean_text",
49
+ "extract_numbers",
50
+ "extract_emails",
51
+ "extract_urls",
52
+ "format_number",
53
+ "format_currency",
54
+ "format_percentage",
55
+ "format_phone_number",
56
+ "format_chinese_id_card",
57
+ "capitalize_words",
58
+ "detect_encoding",
59
+ "to_utf8",
60
+ "convert_encoding"
61
+ ]
@@ -0,0 +1,226 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-09-10 22:00
5
+ # @Author : crawl-coder
6
+ # @Desc : 数据格式化工具
7
+ """
8
+ import re
9
+ from typing import Any, Optional, Union
10
+ from decimal import Decimal, InvalidOperation
11
+
12
+
13
+ class DataFormatter:
14
+ """
15
+ 数据格式化工具类,提供各种数据格式化功能。
16
+ 特别适用于爬虫中处理各种数据类型的格式化需求。
17
+ """
18
+
19
+ @staticmethod
20
+ def format_number(value: Any,
21
+ precision: int = 2,
22
+ thousand_separator: bool = False) -> Optional[str]:
23
+ """
24
+ 格式化数字
25
+
26
+ :param value: 数字值
27
+ :param precision: 小数点精度
28
+ :param thousand_separator: 是否使用千位分隔符
29
+ :return: 格式化后的数字字符串
30
+ """
31
+ if value is None:
32
+ return None
33
+
34
+ try:
35
+ # 转换为Decimal以避免浮点数精度问题
36
+ decimal_value = Decimal(str(value))
37
+
38
+ if thousand_separator:
39
+ # 使用千位分隔符
40
+ formatted = f"{decimal_value:,.{precision}f}"
41
+ else:
42
+ # 不使用千位分隔符
43
+ formatted = f"{decimal_value:.{precision}f}"
44
+
45
+ return formatted
46
+ except (ValueError, InvalidOperation):
47
+ return None
48
+
49
+ @staticmethod
50
+ def format_currency(value: Any,
51
+ currency_symbol: str = "¥",
52
+ precision: int = 2) -> Optional[str]:
53
+ """
54
+ 格式化货币
55
+
56
+ :param value: 货币值
57
+ :param currency_symbol: 货币符号
58
+ :param precision: 小数点精度
59
+ :return: 格式化后的货币字符串
60
+ """
61
+ formatted_number = DataFormatter.format_number(value, precision, thousand_separator=True)
62
+ if formatted_number is None:
63
+ return None
64
+
65
+ return f"{currency_symbol}{formatted_number}"
66
+
67
+ @staticmethod
68
+ def format_percentage(value: Any,
69
+ precision: int = 2,
70
+ multiply_100: bool = True) -> Optional[str]:
71
+ """
72
+ 格式化百分比
73
+
74
+ :param value: 百分比值
75
+ :param precision: 小数点精度
76
+ :param multiply_100: 是否乘以100(如果原始值是小数)
77
+ :return: 格式化后的百分比字符串
78
+ """
79
+ if value is None:
80
+ return None
81
+
82
+ try:
83
+ decimal_value = Decimal(str(value))
84
+
85
+ if multiply_100:
86
+ decimal_value *= 100
87
+
88
+ formatted = f"{decimal_value:.{precision}f}%"
89
+ return formatted
90
+ except (ValueError, InvalidOperation):
91
+ return None
92
+
93
+ @staticmethod
94
+ def format_phone_number(phone: str,
95
+ country_code: str = "+86",
96
+ format_type: str = "international") -> Optional[str]:
97
+ """
98
+ 格式化电话号码
99
+
100
+ :param phone: 电话号码
101
+ :param country_code: 国家代码
102
+ :param format_type: 格式类型 ('international', 'domestic', 'plain')
103
+ :return: 格式化后的电话号码
104
+ """
105
+ if not isinstance(phone, str):
106
+ phone = str(phone)
107
+
108
+ # 移除所有非数字字符
109
+ digits = re.sub(r'\D', '', phone)
110
+
111
+ if not digits:
112
+ return None
113
+
114
+ # 如果是11位中国手机号
115
+ if len(digits) == 11 and digits.startswith('1'):
116
+ if format_type == "international":
117
+ return f"{country_code} {digits[:3]} {digits[3:7]} {digits[7:]}"
118
+ elif format_type == "domestic":
119
+ return f"{digits[:3]}-{digits[3:7]}-{digits[7:]}"
120
+ else: # plain
121
+ return digits
122
+ else:
123
+ # 其他情况简单处理
124
+ if format_type == "international" and country_code:
125
+ return f"{country_code} {digits}"
126
+ else:
127
+ return digits
128
+
129
+ @staticmethod
130
+ def format_chinese_id_card(id_card: str) -> Optional[str]:
131
+ """
132
+ 格式化中国身份证号码(隐藏中间部分)
133
+
134
+ :param id_card: 身份证号码
135
+ :return: 格式化后的身份证号码
136
+ """
137
+ if not isinstance(id_card, str):
138
+ id_card = str(id_card)
139
+
140
+ # 移除空格
141
+ id_card = id_card.replace(" ", "")
142
+
143
+ if len(id_card) == 18:
144
+ # 18位身份证号
145
+ return f"{id_card[:6]}********{id_card[-4:]}"
146
+ elif len(id_card) == 15:
147
+ # 15位身份证号
148
+ return f"{id_card[:6]}******{id_card[-3:]}"
149
+ else:
150
+ return None
151
+
152
+ @staticmethod
153
+ def capitalize_words(text: str,
154
+ delimiter: str = " ",
155
+ preserve_articles: bool = True) -> str:
156
+ """
157
+ 单词首字母大写
158
+
159
+ :param text: 文本
160
+ :param delimiter: 单词分隔符
161
+ :param preserve_articles: 是否保留冠词小写
162
+ :return: 首字母大写后的文本
163
+ """
164
+ if not isinstance(text, str):
165
+ return str(text)
166
+
167
+ # 常见的冠词和介词
168
+ articles = {'a', 'an', 'the', 'and', 'or', 'but', 'in', 'on', 'at', 'to', 'for', 'of', 'with', 'by'}
169
+
170
+ words = text.split(delimiter)
171
+ capitalized_words = []
172
+
173
+ for i, word in enumerate(words):
174
+ if not word:
175
+ capitalized_words.append(word)
176
+ continue
177
+
178
+ # 第一个单词和最后一个单词总是大写
179
+ if i == 0 or i == len(words) - 1 or not preserve_articles or word.lower() not in articles:
180
+ capitalized_words.append(word.capitalize())
181
+ else:
182
+ capitalized_words.append(word.lower())
183
+
184
+ return delimiter.join(capitalized_words)
185
+
186
+
187
+ # =======================对外接口=======================
188
+
189
+ def format_number(value: Any,
190
+ precision: int = 2,
191
+ thousand_separator: bool = False) -> Optional[str]:
192
+ """格式化数字"""
193
+ return DataFormatter.format_number(value, precision, thousand_separator)
194
+
195
+
196
+ def format_currency(value: Any,
197
+ currency_symbol: str = "¥",
198
+ precision: int = 2) -> Optional[str]:
199
+ """格式化货币"""
200
+ return DataFormatter.format_currency(value, currency_symbol, precision)
201
+
202
+
203
+ def format_percentage(value: Any,
204
+ precision: int = 2,
205
+ multiply_100: bool = True) -> Optional[str]:
206
+ """格式化百分比"""
207
+ return DataFormatter.format_percentage(value, precision, multiply_100)
208
+
209
+
210
+ def format_phone_number(phone: str,
211
+ country_code: str = "+86",
212
+ format_type: str = "international") -> Optional[str]:
213
+ """格式化电话号码"""
214
+ return DataFormatter.format_phone_number(phone, country_code, format_type)
215
+
216
+
217
+ def format_chinese_id_card(id_card: str) -> Optional[str]:
218
+ """格式化中国身份证号码"""
219
+ return DataFormatter.format_chinese_id_card(id_card)
220
+
221
+
222
+ def capitalize_words(text: str,
223
+ delimiter: str = " ",
224
+ preserve_articles: bool = True) -> str:
225
+ """单词首字母大写"""
226
+ return DataFormatter.capitalize_words(text, delimiter, preserve_articles)
@@ -0,0 +1,126 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: UTF-8 -*-
3
+ """
4
+ # @Time : 2025-09-10 22:00
5
+ # @Author : crawl-coder
6
+ # @Desc : 编码转换工具
7
+ """
8
+ try:
9
+ import chardet
10
+ HAS_CHARDET = True
11
+ except ImportError:
12
+ HAS_CHARDET = False
13
+ from typing import Optional, Union
14
+
15
+
16
+ class EncodingConverter:
17
+ """
18
+ 编码转换工具类,提供各种编码转换功能。
19
+ 特别适用于爬虫中处理不同编码的网页内容。
20
+ """
21
+
22
+ @staticmethod
23
+ def detect_encoding(data: Union[str, bytes]) -> Optional[str]:
24
+ """
25
+ 检测数据编码
26
+
27
+ :param data: 数据(字符串或字节)
28
+ :return: 检测到的编码
29
+ """
30
+ if isinstance(data, str):
31
+ # 如果是字符串,直接返回
32
+ return 'utf-8'
33
+
34
+ if not isinstance(data, bytes):
35
+ return None
36
+
37
+ if HAS_CHARDET:
38
+ try:
39
+ # 使用chardet检测编码
40
+ result = chardet.detect(data)
41
+ return result['encoding']
42
+ except Exception:
43
+ return None
44
+ else:
45
+ # 如果没有chardet,返回None
46
+ return None
47
+
48
+ @staticmethod
49
+ def to_utf8(data: Union[str, bytes], source_encoding: Optional[str] = None) -> Optional[str]:
50
+ """
51
+ 转换为UTF-8编码的字符串
52
+
53
+ :param data: 数据(字符串或字节)
54
+ :param source_encoding: 源编码(如果为None则自动检测)
55
+ :return: UTF-8编码的字符串
56
+ """
57
+ if isinstance(data, str):
58
+ # 如果已经是字符串,假设它已经是UTF-8
59
+ return data
60
+
61
+ if not isinstance(data, bytes):
62
+ return None
63
+
64
+ try:
65
+ if source_encoding is None:
66
+ # 自动检测编码
67
+ source_encoding = EncodingConverter.detect_encoding(data)
68
+ if source_encoding is None:
69
+ # 如果检测失败,尝试常见编码
70
+ for encoding in ['utf-8', 'gbk', 'gb2312', 'latin1']:
71
+ try:
72
+ decoded = data.decode(encoding)
73
+ return decoded
74
+ except UnicodeDecodeError:
75
+ continue
76
+ return None
77
+ else:
78
+ # 使用指定编码
79
+ return data.decode(source_encoding)
80
+
81
+ # 使用检测到的编码解码
82
+ return data.decode(source_encoding)
83
+ except Exception:
84
+ return None
85
+
86
+ @staticmethod
87
+ def convert_encoding(data: Union[str, bytes],
88
+ source_encoding: Optional[str] = None,
89
+ target_encoding: str = 'utf-8') -> Optional[bytes]:
90
+ """
91
+ 编码转换
92
+
93
+ :param data: 数据(字符串或字节)
94
+ :param source_encoding: 源编码(如果为None则自动检测)
95
+ :param target_encoding: 目标编码
96
+ :return: 转换后的字节数据
97
+ """
98
+ # 先转换为UTF-8字符串
99
+ utf8_str = EncodingConverter.to_utf8(data, source_encoding)
100
+ if utf8_str is None:
101
+ return None
102
+
103
+ try:
104
+ # 再转换为目标编码
105
+ return utf8_str.encode(target_encoding)
106
+ except Exception:
107
+ return None
108
+
109
+
110
+ # =======================对外接口=======================
111
+
112
+ def detect_encoding(data: Union[str, bytes]) -> Optional[str]:
113
+ """检测数据编码"""
114
+ return EncodingConverter.detect_encoding(data)
115
+
116
+
117
+ def to_utf8(data: Union[str, bytes], source_encoding: Optional[str] = None) -> Optional[str]:
118
+ """转换为UTF-8编码的字符串"""
119
+ return EncodingConverter.to_utf8(data, source_encoding)
120
+
121
+
122
+ def convert_encoding(data: Union[str, bytes],
123
+ source_encoding: Optional[str] = None,
124
+ target_encoding: str = 'utf-8') -> Optional[bytes]:
125
+ """编码转换"""
126
+ return EncodingConverter.convert_encoding(data, source_encoding, target_encoding)