re-common 10.0.39__py3-none-any.whl → 10.0.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. re_common/baselibrary/__init__.py +4 -4
  2. re_common/baselibrary/baseabs/__init__.py +6 -6
  3. re_common/baselibrary/baseabs/baseabs.py +26 -26
  4. re_common/baselibrary/database/mbuilder.py +132 -132
  5. re_common/baselibrary/database/moudle.py +93 -93
  6. re_common/baselibrary/database/msqlite3.py +194 -194
  7. re_common/baselibrary/database/mysql.py +169 -169
  8. re_common/baselibrary/database/sql_factory.py +26 -26
  9. re_common/baselibrary/mthread/MThreadingRun.py +486 -486
  10. re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -349
  11. re_common/baselibrary/mthread/__init__.py +2 -2
  12. re_common/baselibrary/mthread/mythreading.py +695 -695
  13. re_common/baselibrary/pakge_other/socks.py +404 -404
  14. re_common/baselibrary/readconfig/config_factory.py +18 -18
  15. re_common/baselibrary/readconfig/ini_config.py +317 -317
  16. re_common/baselibrary/readconfig/toml_config.py +49 -49
  17. re_common/baselibrary/temporary/envdata.py +36 -36
  18. re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -118
  19. re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -102
  20. re_common/baselibrary/tools/all_requests/mrequest.py +412 -412
  21. re_common/baselibrary/tools/all_requests/requests_request.py +81 -81
  22. re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -31
  23. re_common/baselibrary/tools/contrast_db3.py +123 -123
  24. re_common/baselibrary/tools/copy_file.py +39 -39
  25. re_common/baselibrary/tools/db3_2_sizedb3.py +102 -102
  26. re_common/baselibrary/tools/foreachgz.py +39 -39
  27. re_common/baselibrary/tools/get_attr.py +10 -10
  28. re_common/baselibrary/tools/image_to_pdf.py +61 -61
  29. re_common/baselibrary/tools/java_code_deal.py +139 -139
  30. re_common/baselibrary/tools/javacode.py +79 -79
  31. re_common/baselibrary/tools/mdb_db3.py +48 -48
  32. re_common/baselibrary/tools/merge_file.py +171 -171
  33. re_common/baselibrary/tools/merge_gz_file.py +165 -165
  34. re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -42
  35. re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -42
  36. re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -38
  37. re_common/baselibrary/tools/mongo_tools.py +50 -50
  38. re_common/baselibrary/tools/move_file.py +170 -170
  39. re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -63
  40. re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -354
  41. re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -18
  42. re_common/baselibrary/tools/move_mongo/use_mv.py +93 -93
  43. re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -125
  44. re_common/baselibrary/tools/mpandas/pandas_visualization.py +7 -7
  45. re_common/baselibrary/tools/myparsel.py +104 -104
  46. re_common/baselibrary/tools/rename_dir_file.py +37 -37
  47. re_common/baselibrary/tools/sequoiadb_utils.py +398 -398
  48. re_common/baselibrary/tools/split_line_to_many.py +25 -25
  49. re_common/baselibrary/tools/stringtodicts.py +33 -33
  50. re_common/baselibrary/tools/workwechant_bot.py +84 -84
  51. re_common/baselibrary/utils/baseaiohttp.py +296 -296
  52. re_common/baselibrary/utils/baseaiomysql.py +87 -87
  53. re_common/baselibrary/utils/baseallstep.py +191 -191
  54. re_common/baselibrary/utils/baseavro.py +19 -19
  55. re_common/baselibrary/utils/baseboto3.py +291 -291
  56. re_common/baselibrary/utils/basecsv.py +32 -32
  57. re_common/baselibrary/utils/basedict.py +133 -133
  58. re_common/baselibrary/utils/basedir.py +241 -241
  59. re_common/baselibrary/utils/baseencode.py +351 -351
  60. re_common/baselibrary/utils/baseencoding.py +28 -28
  61. re_common/baselibrary/utils/baseesdsl.py +86 -86
  62. re_common/baselibrary/utils/baseexcel.py +264 -264
  63. re_common/baselibrary/utils/baseexcept.py +109 -109
  64. re_common/baselibrary/utils/basefile.py +654 -654
  65. re_common/baselibrary/utils/baseftp.py +214 -214
  66. re_common/baselibrary/utils/basegzip.py +60 -60
  67. re_common/baselibrary/utils/basehdfs.py +135 -135
  68. re_common/baselibrary/utils/basehttpx.py +268 -268
  69. re_common/baselibrary/utils/baseip.py +87 -87
  70. re_common/baselibrary/utils/basejson.py +2 -2
  71. re_common/baselibrary/utils/baselist.py +32 -32
  72. re_common/baselibrary/utils/basemotor.py +190 -190
  73. re_common/baselibrary/utils/basemssql.py +98 -98
  74. re_common/baselibrary/utils/baseodbc.py +113 -113
  75. re_common/baselibrary/utils/basepandas.py +302 -302
  76. re_common/baselibrary/utils/basepeewee.py +11 -11
  77. re_common/baselibrary/utils/basepika.py +180 -180
  78. re_common/baselibrary/utils/basepydash.py +143 -143
  79. re_common/baselibrary/utils/basepymongo.py +230 -230
  80. re_common/baselibrary/utils/basequeue.py +22 -22
  81. re_common/baselibrary/utils/baserar.py +57 -57
  82. re_common/baselibrary/utils/baserequest.py +279 -279
  83. re_common/baselibrary/utils/baseset.py +8 -8
  84. re_common/baselibrary/utils/basesmb.py +403 -403
  85. re_common/baselibrary/utils/basestring.py +382 -382
  86. re_common/baselibrary/utils/basetime.py +320 -320
  87. re_common/baselibrary/utils/baseurl.py +121 -121
  88. re_common/baselibrary/utils/basezip.py +57 -57
  89. re_common/baselibrary/utils/core/__init__.py +7 -7
  90. re_common/baselibrary/utils/core/bottomutils.py +18 -18
  91. re_common/baselibrary/utils/core/mdeprecated.py +327 -327
  92. re_common/baselibrary/utils/core/mlamada.py +16 -16
  93. re_common/baselibrary/utils/core/msginfo.py +25 -25
  94. re_common/baselibrary/utils/core/requests_core.py +103 -103
  95. re_common/baselibrary/utils/fateadm.py +429 -429
  96. re_common/baselibrary/utils/importfun.py +123 -123
  97. re_common/baselibrary/utils/mfaker.py +57 -57
  98. re_common/baselibrary/utils/my_abc/__init__.py +3 -3
  99. re_common/baselibrary/utils/my_abc/better_abc.py +32 -32
  100. re_common/baselibrary/utils/mylogger.py +414 -414
  101. re_common/baselibrary/utils/myredisclient.py +861 -861
  102. re_common/baselibrary/utils/pipupgrade.py +21 -21
  103. re_common/baselibrary/utils/ringlist.py +85 -85
  104. re_common/baselibrary/utils/version_compare.py +36 -36
  105. re_common/baselibrary/utils/ydmhttp.py +126 -126
  106. re_common/facade/lazy_import.py +11 -11
  107. re_common/facade/loggerfacade.py +25 -25
  108. re_common/facade/mysqlfacade.py +467 -467
  109. re_common/facade/now.py +31 -31
  110. re_common/facade/sqlite3facade.py +257 -257
  111. re_common/facade/use/mq_use_facade.py +83 -83
  112. re_common/facade/use/proxy_use_facade.py +19 -19
  113. re_common/libtest/base_dict_test.py +19 -19
  114. re_common/libtest/baseavro_test.py +13 -13
  115. re_common/libtest/basefile_test.py +14 -14
  116. re_common/libtest/basemssql_test.py +77 -77
  117. re_common/libtest/baseodbc_test.py +7 -7
  118. re_common/libtest/basepandas_test.py +38 -38
  119. re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -14
  120. re_common/libtest/get_attr_test/settings.py +54 -54
  121. re_common/libtest/idencode_test.py +53 -53
  122. re_common/libtest/iniconfig_test.py +35 -35
  123. re_common/libtest/ip_test.py +34 -34
  124. re_common/libtest/merge_file_test.py +20 -20
  125. re_common/libtest/mfaker_test.py +8 -8
  126. re_common/libtest/mm3_test.py +31 -31
  127. re_common/libtest/mylogger_test.py +88 -88
  128. re_common/libtest/myparsel_test.py +27 -27
  129. re_common/libtest/mysql_test.py +151 -151
  130. re_common/libtest/pymongo_test.py +21 -21
  131. re_common/libtest/split_test.py +11 -11
  132. re_common/libtest/sqlite3_merge_test.py +5 -5
  133. re_common/libtest/sqlite3_test.py +34 -34
  134. re_common/libtest/tomlconfig_test.py +30 -30
  135. re_common/libtest/use_tools_test/__init__.py +2 -2
  136. re_common/libtest/user/__init__.py +4 -4
  137. re_common/studio/__init__.py +4 -4
  138. re_common/studio/assignment_expressions.py +36 -36
  139. re_common/studio/mydash/test1.py +18 -18
  140. re_common/studio/pydashstudio/first.py +9 -9
  141. re_common/studio/streamlitstudio/first_app.py +65 -65
  142. re_common/studio/streamlitstudio/uber_pickups.py +23 -23
  143. re_common/studio/test.py +18 -18
  144. re_common/v2/baselibrary/business_utils/BusinessStringUtil.py +235 -220
  145. re_common/v2/baselibrary/business_utils/baseencodeid.py +100 -100
  146. re_common/v2/baselibrary/business_utils/full_doi_path.py +116 -116
  147. re_common/v2/baselibrary/business_utils/rel_tools.py +6 -6
  148. re_common/v2/baselibrary/decorators/utils.py +59 -59
  149. re_common/v2/baselibrary/helpers/search_packge/NearestNeighbors_test.py +105 -105
  150. re_common/v2/baselibrary/helpers/search_packge/fit_text_match.py +253 -253
  151. re_common/v2/baselibrary/helpers/search_packge/scikit_learn_text_matcher.py +260 -260
  152. re_common/v2/baselibrary/helpers/search_packge/test.py +1 -1
  153. re_common/v2/baselibrary/s3object/baseboto3.py +230 -230
  154. re_common/v2/baselibrary/tools/WeChatRobot.py +95 -95
  155. re_common/v2/baselibrary/tools/ac_ahocorasick.py +75 -75
  156. re_common/v2/baselibrary/tools/concurrency.py +35 -35
  157. re_common/v2/baselibrary/tools/data_processer/base.py +53 -53
  158. re_common/v2/baselibrary/tools/data_processer/data_processer.py +497 -508
  159. re_common/v2/baselibrary/tools/data_processer/data_reader.py +187 -187
  160. re_common/v2/baselibrary/tools/data_processer/data_writer.py +38 -38
  161. re_common/v2/baselibrary/tools/dict_tools.py +44 -44
  162. re_common/v2/baselibrary/tools/dolphinscheduler.py +187 -187
  163. re_common/v2/baselibrary/tools/hdfs_base_processor.py +204 -204
  164. re_common/v2/baselibrary/tools/hdfs_bulk_processor.py +67 -67
  165. re_common/v2/baselibrary/tools/hdfs_data_processer.py +338 -338
  166. re_common/v2/baselibrary/tools/hdfs_line_processor.py +74 -74
  167. re_common/v2/baselibrary/tools/list_tools.py +69 -69
  168. re_common/v2/baselibrary/tools/resume_tracker.py +94 -94
  169. re_common/v2/baselibrary/tools/search_hash_tools.py +54 -54
  170. re_common/v2/baselibrary/tools/text_matcher.py +326 -326
  171. re_common/v2/baselibrary/tools/tree_processor/__init__.py +0 -0
  172. re_common/v2/baselibrary/tools/tree_processor/builder.py +25 -0
  173. re_common/v2/baselibrary/tools/tree_processor/node.py +13 -0
  174. re_common/v2/baselibrary/tools/unionfind_tools.py +60 -60
  175. re_common/v2/baselibrary/utils/BusinessStringUtil.py +196 -196
  176. re_common/v2/baselibrary/utils/api_net_utils.py +270 -270
  177. re_common/v2/baselibrary/utils/author_smi.py +361 -361
  178. re_common/v2/baselibrary/utils/base_string_similarity.py +158 -158
  179. re_common/v2/baselibrary/utils/basedict.py +37 -37
  180. re_common/v2/baselibrary/utils/basehdfs.py +163 -163
  181. re_common/v2/baselibrary/utils/basepika.py +180 -180
  182. re_common/v2/baselibrary/utils/basetime.py +94 -77
  183. re_common/v2/baselibrary/utils/db.py +174 -156
  184. re_common/v2/baselibrary/utils/elasticsearch.py +46 -0
  185. re_common/v2/baselibrary/utils/json_cls.py +16 -16
  186. re_common/v2/baselibrary/utils/mq.py +83 -83
  187. re_common/v2/baselibrary/utils/n_ary_expression_tree.py +243 -243
  188. re_common/v2/baselibrary/utils/string_bool.py +187 -186
  189. re_common/v2/baselibrary/utils/string_clear.py +246 -246
  190. re_common/v2/baselibrary/utils/string_smi.py +18 -18
  191. re_common/v2/baselibrary/utils/stringutils.py +312 -271
  192. re_common/vip/base_step_process.py +11 -11
  193. re_common/vip/baseencodeid.py +90 -90
  194. re_common/vip/changetaskname.py +28 -28
  195. re_common/vip/core_var.py +24 -24
  196. re_common/vip/mmh3Hash.py +89 -89
  197. re_common/vip/proxy/allproxys.py +127 -127
  198. re_common/vip/proxy/allproxys_thread.py +159 -159
  199. re_common/vip/proxy/cnki_proxy.py +153 -153
  200. re_common/vip/proxy/kuaidaili.py +87 -87
  201. re_common/vip/proxy/proxy_all.py +113 -113
  202. re_common/vip/proxy/update_kuaidaili_0.py +42 -42
  203. re_common/vip/proxy/wanfang_proxy.py +152 -152
  204. re_common/vip/proxy/wp_proxy_all.py +181 -181
  205. re_common/vip/read_rawid_to_txt.py +91 -91
  206. re_common/vip/title/__init__.py +5 -5
  207. re_common/vip/title/transform/TransformBookTitleToZt.py +125 -125
  208. re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -139
  209. re_common/vip/title/transform/TransformCstadTitleToZt.py +195 -195
  210. re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -203
  211. re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -132
  212. re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -114
  213. re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -135
  214. re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -135
  215. re_common/vip/title/transform/__init__.py +10 -10
  216. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/LICENSE +201 -201
  217. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/METADATA +16 -16
  218. re_common-10.0.41.dist-info/RECORD +252 -0
  219. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/WHEEL +1 -1
  220. re_common-10.0.39.dist-info/RECORD +0 -248
  221. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/top_level.txt +0 -0
@@ -1,204 +1,204 @@
1
- import abc
2
- import asyncio
3
- import sys
4
- from concurrent.futures import ProcessPoolExecutor
5
- import gzip
6
-
7
- import multiprocessing
8
- from pathlib import Path
9
-
10
- from io import BytesIO
11
- import time
12
- from typing import Awaitable, Callable, Any, Generator, List, Literal, Union
13
-
14
- from hdfs import InsecureClient
15
-
16
-
17
- from re_common.v2.baselibrary.tools.resume_tracker import ResumeTracker
18
-
19
-
20
- _pool = None
21
- _loop = None
22
-
23
-
24
- class HDFSBaseProcessor(abc.ABC):
25
- def __init__(
26
- self,
27
- hdfs_dir: str,
28
- hdfs_url: str = "http://VIP-DC-MASTER-2:9870",
29
- hdfs_user: str = "root",
30
- db_path: Union[str, Path] = "processed_files.db",
31
- concurrency: int = 50,
32
- batch_size: int = 50,
33
- encoding: str = "utf-8",
34
- read_mode: Literal["all", "stream"] = "all",
35
- retries: int = 3,
36
- pool_factory: Callable[[], Awaitable[Any]] = None,
37
- max_processes: int = None, # 添加多进程支持
38
- result_dir: str = None,
39
- ):
40
- self.hdfs_dir = hdfs_dir
41
- self.hdfs_url = hdfs_url
42
- self.hdfs_user = hdfs_user
43
- self.tracker = ResumeTracker(db_path)
44
- self.concurrency = concurrency
45
- self.batch_size = batch_size
46
- self.encoding = encoding
47
- self.read_mode = read_mode
48
- self.retries = retries
49
- self.result_dir = result_dir
50
- self.pool_factory = pool_factory
51
- self.max_processes = max_processes or max(multiprocessing.cpu_count() - 1, 1) # 默认使用CPU核心数-1
52
- self._client = None
53
-
54
- self.tracker.init_db()
55
-
56
- @property
57
- def client(self):
58
- if self._client is None:
59
- self._client = InsecureClient(self.hdfs_url, user=self.hdfs_user)
60
- return self._client
61
-
62
- async def _get_pool(self):
63
- if self.pool_factory is None:
64
- return None
65
- global _pool
66
- if _pool is None:
67
- _pool = await self.pool_factory()
68
- return _pool
69
-
70
- def _list_gz_files(self) -> List[str]:
71
- """列出 HDFS 目录中的所有 gzip 文件"""
72
- return [
73
- f"{self.hdfs_dir}/{file[0]}"
74
- for file in self.client.list(self.hdfs_dir, status=True)
75
- if file[0].endswith(".gz")
76
- ]
77
-
78
- def _count_total_lines(self, gz_file_path: str) -> int:
79
- with self.client.read(gz_file_path) as hdfs_file:
80
- with gzip.GzipFile(fileobj=hdfs_file) as gz:
81
- return sum(1 for _ in gz)
82
-
83
- def _batch_read_gz_stream(self, gz_file_path: str) -> Generator[List[str], Any, None]:
84
- """流式读取gz文件,分批yield返回"""
85
- with self.client.read(gz_file_path) as hdfs_file:
86
- with gzip.GzipFile(fileobj=hdfs_file) as gz:
87
- while True:
88
- lines = []
89
- for _ in range(self.batch_size):
90
- try:
91
- line = next(gz)
92
- if line.strip(): # 移除空行
93
- lines.append(line.decode(self.encoding)) # 解码
94
- except StopIteration: # 文件已读完
95
- break
96
- if not lines:
97
- break
98
- yield lines
99
-
100
- def _batch_read_gz_all(self, gz_file_path: str) -> List[List[str]]:
101
- """一次读取gz文件全部内容,二维数组批量返回"""
102
- with self.client.read(gz_file_path) as reader: # 以二进制模式读取
103
- compressed_data = reader.read() # 读取压缩数据
104
- with gzip.GzipFile(fileobj=BytesIO(compressed_data)) as gz_file: # 解压缩
105
- content = gz_file.read().decode(self.encoding) # 解码为字符串
106
- print(f"文件读取成功: {gz_file_path}")
107
- lines = [i for i in content.split("\n") if i.strip()]
108
- batch_lines = [lines[i : i + self.batch_size] for i in range(0, len(lines), self.batch_size)]
109
- return batch_lines
110
-
111
- def _batch_read_gz(self, gz_file_path: str):
112
- # 这里根据不同的配置选用不同的读取文件的方法
113
- if self.read_mode == "stream":
114
- return self._batch_read_gz_stream(gz_file_path)
115
- else:
116
- return self._batch_read_gz_all(gz_file_path)
117
-
118
- def _generate_write_data(self, results):
119
- for res in results:
120
- yield str(res) + "\n"
121
-
122
- def _print_progress(self, file_path, processed_lines, total_lines, start_time, processing_start_time):
123
- elapsed_time = time.perf_counter() - start_time # 已用时间
124
- processing_time = time.perf_counter() - processing_start_time # 本次处理时间
125
- avg_processing_time = (
126
- (elapsed_time * 1000) / processed_lines if processed_lines > 0 else float("inf")
127
- ) # 平均每条数据的处理时间(毫秒)
128
- # 估算剩余时间
129
- remaining_time = (
130
- ((avg_processing_time / 1000) * (total_lines - processed_lines)) if processed_lines > 0 else float("inf")
131
- )
132
- # 显示进度信息
133
- print(
134
- f"文件: {file_path} 总进度: {processed_lines}/{total_lines} 行 | "
135
- f"已用时间: {elapsed_time:.2f}秒 | 本次处理时间: {processing_time:.2f}秒 | "
136
- f"预估剩余时间: {remaining_time:.2f}秒 | 平均每条处理时间: {avg_processing_time:.2f}毫秒"
137
- )
138
-
139
- def _print_final_progress(self, file_path, processed_lines, total_lines, start_time):
140
- final_elapsed_time = time.perf_counter() - start_time # 最终已用时间
141
- print(
142
- f"文件: {file_path} 处理完成 | 总进度: {processed_lines}/{total_lines} 行 | "
143
- f"总已用时间: {final_elapsed_time:.2f}秒 | "
144
- f"平均每条处理时间: {(final_elapsed_time * 1000) / processed_lines:.2f}毫秒"
145
- if processed_lines > 0
146
- else "处理无数据"
147
- )
148
-
149
- @abc.abstractmethod
150
- async def _process_file(self, hdfs_file_path, process_func):
151
- pass
152
-
153
- async def _retry_process_file(self, hdfs_file_path, process_func):
154
- """带重试机制的文件处理"""
155
- retry_count = 0
156
- while retry_count < self.retries:
157
- try:
158
- if self.tracker.is_processed(hdfs_file_path):
159
- print(f"跳过已处理文件: {hdfs_file_path}")
160
- return True
161
- await self._process_file(hdfs_file_path, process_func)
162
- self.tracker.mark_processed(hdfs_file_path) # 标记文件已处理
163
- return True # 成功处理后退出
164
- except Exception as e:
165
- retry_count += 1
166
- print(f"处理文件 {hdfs_file_path} 时发生错误: {e},正在重试 {retry_count}/{self.retries}")
167
- await asyncio.sleep(2**retry_count)
168
- print(f"处理文件 {hdfs_file_path} 失败,达到重试上限")
169
- return False
170
-
171
- def _process_file_wrapper(self, args):
172
- """为多进程执行准备的同步包装函数"""
173
- hdfs_file_path, process_func = args
174
- if sys.platform == "win32":
175
- loop = asyncio.get_event_loop()
176
- return loop.run_until_complete(self._retry_process_file(hdfs_file_path, process_func))
177
- else:
178
- global _loop
179
- if _loop is None:
180
- _loop = asyncio.new_event_loop()
181
- asyncio.set_event_loop(_loop)
182
- return _loop.run_until_complete(self._retry_process_file(hdfs_file_path, process_func))
183
-
184
- async def _run_multi_process(self, gz_files, process_func):
185
- """多进程并发运行文件处理任务"""
186
- args_list = [(file_path, process_func) for file_path in gz_files]
187
- with ProcessPoolExecutor(max_workers=self.max_processes) as executor:
188
- # return executor.map(self._process_file_wrapper, args_list)
189
- loop = asyncio.get_running_loop()
190
- self._client = None # 避免连接对象无法序列化导致卡死
191
- tasks = [loop.run_in_executor(executor, self._process_file_wrapper, args) for args in args_list]
192
- results = await asyncio.gather(*tasks)
193
-
194
- if all(results):
195
- # 处理完成后清理断点记录
196
- self.tracker.clear_processed_items()
197
- print(f"已清空断点记录: {self.tracker.db_path}")
198
- return results
199
- else:
200
- raise Exception("部分或全部文件处理失败")
201
-
202
- @abc.abstractmethod
203
- async def map(self, process_func: Callable[[Any, Any], Awaitable[Any]]) -> None:
204
- pass
1
+ import abc
2
+ import asyncio
3
+ import sys
4
+ from concurrent.futures import ProcessPoolExecutor
5
+ import gzip
6
+
7
+ import multiprocessing
8
+ from pathlib import Path
9
+
10
+ from io import BytesIO
11
+ import time
12
+ from typing import Awaitable, Callable, Any, Generator, List, Literal, Union
13
+
14
+ from hdfs import InsecureClient
15
+
16
+
17
+ from re_common.v2.baselibrary.tools.resume_tracker import ResumeTracker
18
+
19
+
20
+ _pool = None
21
+ _loop = None
22
+
23
+
24
+ class HDFSBaseProcessor(abc.ABC):
25
+ def __init__(
26
+ self,
27
+ hdfs_dir: str,
28
+ hdfs_url: str = "http://VIP-DC-MASTER-2:9870",
29
+ hdfs_user: str = "root",
30
+ db_path: Union[str, Path] = "processed_files.db",
31
+ concurrency: int = 50,
32
+ batch_size: int = 50,
33
+ encoding: str = "utf-8",
34
+ read_mode: Literal["all", "stream"] = "all",
35
+ retries: int = 3,
36
+ pool_factory: Callable[[], Awaitable[Any]] = None,
37
+ max_processes: int = None, # 添加多进程支持
38
+ result_dir: str = None,
39
+ ):
40
+ self.hdfs_dir = hdfs_dir
41
+ self.hdfs_url = hdfs_url
42
+ self.hdfs_user = hdfs_user
43
+ self.tracker = ResumeTracker(db_path)
44
+ self.concurrency = concurrency
45
+ self.batch_size = batch_size
46
+ self.encoding = encoding
47
+ self.read_mode = read_mode
48
+ self.retries = retries
49
+ self.result_dir = result_dir
50
+ self.pool_factory = pool_factory
51
+ self.max_processes = max_processes or max(multiprocessing.cpu_count() - 1, 1) # 默认使用CPU核心数-1
52
+ self._client = None
53
+
54
+ self.tracker.init_db()
55
+
56
+ @property
57
+ def client(self):
58
+ if self._client is None:
59
+ self._client = InsecureClient(self.hdfs_url, user=self.hdfs_user)
60
+ return self._client
61
+
62
+ async def _get_pool(self):
63
+ if self.pool_factory is None:
64
+ return None
65
+ global _pool
66
+ if _pool is None:
67
+ _pool = await self.pool_factory()
68
+ return _pool
69
+
70
+ def _list_gz_files(self) -> List[str]:
71
+ """列出 HDFS 目录中的所有 gzip 文件"""
72
+ return [
73
+ f"{self.hdfs_dir}/{file[0]}"
74
+ for file in self.client.list(self.hdfs_dir, status=True)
75
+ if file[0].endswith(".gz")
76
+ ]
77
+
78
+ def _count_total_lines(self, gz_file_path: str) -> int:
79
+ with self.client.read(gz_file_path) as hdfs_file:
80
+ with gzip.GzipFile(fileobj=hdfs_file) as gz:
81
+ return sum(1 for _ in gz)
82
+
83
+ def _batch_read_gz_stream(self, gz_file_path: str) -> Generator[List[str], Any, None]:
84
+ """流式读取gz文件,分批yield返回"""
85
+ with self.client.read(gz_file_path) as hdfs_file:
86
+ with gzip.GzipFile(fileobj=hdfs_file) as gz:
87
+ while True:
88
+ lines = []
89
+ for _ in range(self.batch_size):
90
+ try:
91
+ line = next(gz)
92
+ if line.strip(): # 移除空行
93
+ lines.append(line.decode(self.encoding)) # 解码
94
+ except StopIteration: # 文件已读完
95
+ break
96
+ if not lines:
97
+ break
98
+ yield lines
99
+
100
+ def _batch_read_gz_all(self, gz_file_path: str) -> List[List[str]]:
101
+ """一次读取gz文件全部内容,二维数组批量返回"""
102
+ with self.client.read(gz_file_path) as reader: # 以二进制模式读取
103
+ compressed_data = reader.read() # 读取压缩数据
104
+ with gzip.GzipFile(fileobj=BytesIO(compressed_data)) as gz_file: # 解压缩
105
+ content = gz_file.read().decode(self.encoding) # 解码为字符串
106
+ print(f"文件读取成功: {gz_file_path}")
107
+ lines = [i for i in content.split("\n") if i.strip()]
108
+ batch_lines = [lines[i : i + self.batch_size] for i in range(0, len(lines), self.batch_size)]
109
+ return batch_lines
110
+
111
+ def _batch_read_gz(self, gz_file_path: str):
112
+ # 这里根据不同的配置选用不同的读取文件的方法
113
+ if self.read_mode == "stream":
114
+ return self._batch_read_gz_stream(gz_file_path)
115
+ else:
116
+ return self._batch_read_gz_all(gz_file_path)
117
+
118
+ def _generate_write_data(self, results):
119
+ for res in results:
120
+ yield str(res) + "\n"
121
+
122
+ def _print_progress(self, file_path, processed_lines, total_lines, start_time, processing_start_time):
123
+ elapsed_time = time.perf_counter() - start_time # 已用时间
124
+ processing_time = time.perf_counter() - processing_start_time # 本次处理时间
125
+ avg_processing_time = (
126
+ (elapsed_time * 1000) / processed_lines if processed_lines > 0 else float("inf")
127
+ ) # 平均每条数据的处理时间(毫秒)
128
+ # 估算剩余时间
129
+ remaining_time = (
130
+ ((avg_processing_time / 1000) * (total_lines - processed_lines)) if processed_lines > 0 else float("inf")
131
+ )
132
+ # 显示进度信息
133
+ print(
134
+ f"文件: {file_path} 总进度: {processed_lines}/{total_lines} 行 | "
135
+ f"已用时间: {elapsed_time:.2f}秒 | 本次处理时间: {processing_time:.2f}秒 | "
136
+ f"预估剩余时间: {remaining_time:.2f}秒 | 平均每条处理时间: {avg_processing_time:.2f}毫秒"
137
+ )
138
+
139
+ def _print_final_progress(self, file_path, processed_lines, total_lines, start_time):
140
+ final_elapsed_time = time.perf_counter() - start_time # 最终已用时间
141
+ print(
142
+ f"文件: {file_path} 处理完成 | 总进度: {processed_lines}/{total_lines} 行 | "
143
+ f"总已用时间: {final_elapsed_time:.2f}秒 | "
144
+ f"平均每条处理时间: {(final_elapsed_time * 1000) / processed_lines:.2f}毫秒"
145
+ if processed_lines > 0
146
+ else "处理无数据"
147
+ )
148
+
149
+ @abc.abstractmethod
150
+ async def _process_file(self, hdfs_file_path, process_func):
151
+ pass
152
+
153
+ async def _retry_process_file(self, hdfs_file_path, process_func):
154
+ """带重试机制的文件处理"""
155
+ retry_count = 0
156
+ while retry_count < self.retries:
157
+ try:
158
+ if self.tracker.is_processed(hdfs_file_path):
159
+ print(f"跳过已处理文件: {hdfs_file_path}")
160
+ return True
161
+ await self._process_file(hdfs_file_path, process_func)
162
+ self.tracker.mark_processed(hdfs_file_path) # 标记文件已处理
163
+ return True # 成功处理后退出
164
+ except Exception as e:
165
+ retry_count += 1
166
+ print(f"处理文件 {hdfs_file_path} 时发生错误: {e},正在重试 {retry_count}/{self.retries}")
167
+ await asyncio.sleep(2**retry_count)
168
+ print(f"处理文件 {hdfs_file_path} 失败,达到重试上限")
169
+ return False
170
+
171
+ def _process_file_wrapper(self, args):
172
+ """为多进程执行准备的同步包装函数"""
173
+ hdfs_file_path, process_func = args
174
+ if sys.platform == "win32":
175
+ loop = asyncio.get_event_loop()
176
+ return loop.run_until_complete(self._retry_process_file(hdfs_file_path, process_func))
177
+ else:
178
+ global _loop
179
+ if _loop is None:
180
+ _loop = asyncio.new_event_loop()
181
+ asyncio.set_event_loop(_loop)
182
+ return _loop.run_until_complete(self._retry_process_file(hdfs_file_path, process_func))
183
+
184
+ async def _run_multi_process(self, gz_files, process_func):
185
+ """多进程并发运行文件处理任务"""
186
+ args_list = [(file_path, process_func) for file_path in gz_files]
187
+ with ProcessPoolExecutor(max_workers=self.max_processes) as executor:
188
+ # return executor.map(self._process_file_wrapper, args_list)
189
+ loop = asyncio.get_running_loop()
190
+ self._client = None # 避免连接对象无法序列化导致卡死
191
+ tasks = [loop.run_in_executor(executor, self._process_file_wrapper, args) for args in args_list]
192
+ results = await asyncio.gather(*tasks)
193
+
194
+ if all(results):
195
+ # 处理完成后清理断点记录
196
+ self.tracker.clear_processed_items()
197
+ print(f"已清空断点记录: {self.tracker.db_path}")
198
+ return results
199
+ else:
200
+ raise Exception("部分或全部文件处理失败")
201
+
202
+ @abc.abstractmethod
203
+ async def map(self, process_func: Callable[[Any, Any], Awaitable[Any]]) -> None:
204
+ pass
@@ -1,67 +1,67 @@
1
- from pathlib import Path
2
- import time
3
- from typing import Any, Awaitable, Callable, List
4
- from re_common.v2.baselibrary.tools.concurrency import AsyncTaskPool
5
- from re_common.v2.baselibrary.tools.hdfs_base_processor import HDFSBaseProcessor
6
-
7
-
8
- class HDFSBulkProcessor(HDFSBaseProcessor):
9
- def _flat_map(self, results):
10
- # return itertools.chain.from_iterable(chunked_results)
11
- for res in results:
12
- if isinstance(res, list):
13
- yield from res
14
- else:
15
- yield res
16
-
17
- async def _process_file(self, hdfs_file_path, process_func):
18
- start_time = time.perf_counter()
19
- total_lines = self._count_total_lines(hdfs_file_path)
20
- processed_lines = 0
21
- pool = await self._get_pool()
22
-
23
- tasks = []
24
- for lines in self._batch_read_gz(hdfs_file_path):
25
- # 处理读取到的批次数据
26
- if lines:
27
- tasks.append(process_func(lines, pool)) # 将批次数据传递给处理函数并收集任务
28
- processed_lines += len(lines) # 更新已处理行数
29
- results = await AsyncTaskPool(self.concurrency).run(tasks)
30
-
31
- if self.result_dir is not None:
32
- self.client.write(
33
- self.result_dir.rstrip("/") + f"/{Path(hdfs_file_path).stem}",
34
- data=self._generate_write_data(self._flat_map(results)),
35
- overwrite=True,
36
- encoding=self.encoding,
37
- )
38
-
39
- # 最终进度显示
40
- self._print_final_progress(hdfs_file_path, processed_lines, total_lines, start_time)
41
-
42
- async def map(self, process_func: Callable[[List[str], Any], Awaitable[Any]]) -> None:
43
- gz_files = self._list_gz_files()
44
- await self._run_multi_process(gz_files, process_func)
45
-
46
-
47
- # async def test_func(lines: List[str], pool):
48
- # pass
49
-
50
-
51
- # async def main():
52
- # processor = HDFSBulkProcessor(
53
- # "/xx/xx",
54
- # db_path=Path(__file__).parent / "test_bulk.db",
55
- # concurrency=200,
56
- # batch_size=1000,
57
- # pool_factory=get_pool,
58
- # max_processes=2,
59
- # result_dir="/xx/xx_res",
60
- # )
61
- # # processor.tracker.mark_many_processed(f"/xx/xx/part-{num:05d}.gz" for num in range(0, 6000))
62
-
63
- # await processor.map(test_func)
64
-
65
-
66
- # if __name__ == "__main__":
67
- # asyncio.run(main())
1
+ from pathlib import Path
2
+ import time
3
+ from typing import Any, Awaitable, Callable, List
4
+ from re_common.v2.baselibrary.tools.concurrency import AsyncTaskPool
5
+ from re_common.v2.baselibrary.tools.hdfs_base_processor import HDFSBaseProcessor
6
+
7
+
8
+ class HDFSBulkProcessor(HDFSBaseProcessor):
9
+ def _flat_map(self, results):
10
+ # return itertools.chain.from_iterable(chunked_results)
11
+ for res in results:
12
+ if isinstance(res, list):
13
+ yield from res
14
+ else:
15
+ yield res
16
+
17
+ async def _process_file(self, hdfs_file_path, process_func):
18
+ start_time = time.perf_counter()
19
+ total_lines = self._count_total_lines(hdfs_file_path)
20
+ processed_lines = 0
21
+ pool = await self._get_pool()
22
+
23
+ tasks = []
24
+ for lines in self._batch_read_gz(hdfs_file_path):
25
+ # 处理读取到的批次数据
26
+ if lines:
27
+ tasks.append(process_func(lines, pool)) # 将批次数据传递给处理函数并收集任务
28
+ processed_lines += len(lines) # 更新已处理行数
29
+ results = await AsyncTaskPool(self.concurrency).run(tasks)
30
+
31
+ if self.result_dir is not None:
32
+ self.client.write(
33
+ self.result_dir.rstrip("/") + f"/{Path(hdfs_file_path).stem}",
34
+ data=self._generate_write_data(self._flat_map(results)),
35
+ overwrite=True,
36
+ encoding=self.encoding,
37
+ )
38
+
39
+ # 最终进度显示
40
+ self._print_final_progress(hdfs_file_path, processed_lines, total_lines, start_time)
41
+
42
+ async def map(self, process_func: Callable[[List[str], Any], Awaitable[Any]]) -> None:
43
+ gz_files = self._list_gz_files()
44
+ await self._run_multi_process(gz_files, process_func)
45
+
46
+
47
+ # async def test_func(lines: List[str], pool):
48
+ # pass
49
+
50
+
51
+ # async def main():
52
+ # processor = HDFSBulkProcessor(
53
+ # "/xx/xx",
54
+ # db_path=Path(__file__).parent / "test_bulk.db",
55
+ # concurrency=200,
56
+ # batch_size=1000,
57
+ # pool_factory=get_pool,
58
+ # max_processes=2,
59
+ # result_dir="/xx/xx_res",
60
+ # )
61
+ # # processor.tracker.mark_many_processed(f"/xx/xx/part-{num:05d}.gz" for num in range(0, 6000))
62
+
63
+ # await processor.map(test_func)
64
+
65
+
66
+ # if __name__ == "__main__":
67
+ # asyncio.run(main())