re-common 10.0.28__py3-none-any.whl → 10.0.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -109,8 +109,8 @@ def doi_to_dir(doi):
109
109
  return first_dir + "/" + second_dir
110
110
 
111
111
 
112
- def get_doi_path(doi):
112
+ def get_doi_path(doi, case_insensitive=False):
113
113
  # 目前使用
114
114
  dir_path = doi_to_dir(doi)
115
- file_name = base_lngid.getDoiid(doi) + ".pdf"
115
+ file_name = base_lngid.getDoiid(doi, case_insensitive=case_insensitive) + ".pdf"
116
116
  return dir_path + "/" + file_name
@@ -0,0 +1,35 @@
1
+ import asyncio
2
+ from asyncio import Semaphore
3
+ from typing import Awaitable, List, Iterable, Callable
4
+
5
+
6
+ class AsyncTaskPool:
7
+ def __init__(self, max_workers: int = 10):
8
+ """
9
+
10
+ Args:
11
+ max_workers: 任务最大并发数
12
+ """
13
+ self.semaphore = Semaphore(max_workers)
14
+
15
+ async def _run_task(self, task: Awaitable):
16
+ async with self.semaphore:
17
+ return await task
18
+
19
+ async def run(self, tasks: List[Awaitable]):
20
+ return await asyncio.gather(*[self._run_task(task) for task in tasks])
21
+
22
+ async def map(self, fn: Callable[..., Awaitable], *iterables: Iterable):
23
+ tasks = [fn(*args) for args in zip(*iterables)]
24
+ return await self.run(tasks)
25
+
26
+
27
+ if __name__ == "__main__":
28
+
29
+ async def test(x, y):
30
+ await asyncio.sleep(1)
31
+ print(x, y)
32
+ return x + y
33
+
34
+ result = asyncio.run(AsyncTaskPool(2).map(test, [1, 2, 3, 4], [5, 6, 7, 8]))
35
+ print(result)
@@ -0,0 +1,195 @@
1
+ import abc
2
+ import asyncio
3
+ from concurrent.futures import ProcessPoolExecutor
4
+ import gzip
5
+
6
+ import multiprocessing
7
+ from pathlib import Path
8
+
9
+ from io import BytesIO
10
+ import time
11
+ from typing import Awaitable, Callable, Any, Generator, List, Literal, Union
12
+
13
+ from hdfs import InsecureClient
14
+
15
+
16
+ from re_common.v2.baselibrary.tools.resume_tracker import ResumeTracker
17
+
18
+
19
+ _pool = None
20
+
21
+
22
+ class HDFSBaseProcessor(abc.ABC):
23
+ def __init__(
24
+ self,
25
+ hdfs_dir: str,
26
+ hdfs_url: str = "http://VIP-DC-MASTER-2:9870",
27
+ hdfs_user: str = "root",
28
+ db_path: Union[str, Path] = "processed_files.db",
29
+ concurrency: int = 50,
30
+ batch_size: int = 50,
31
+ encoding: str = "utf-8",
32
+ read_mode: Literal["all", "stream"] = "all",
33
+ retries: int = 3,
34
+ pool_factory: Callable[[], Awaitable[Any]] = None,
35
+ max_processes: int = None, # 添加多进程支持
36
+ result_dir: str = None,
37
+ ):
38
+ self.hdfs_dir = hdfs_dir
39
+ self.hdfs_url = hdfs_url
40
+ self.hdfs_user = hdfs_user
41
+ self.tracker = ResumeTracker(db_path)
42
+ self.concurrency = concurrency
43
+ self.batch_size = batch_size
44
+ self.encoding = encoding
45
+ self.read_mode = read_mode
46
+ self.retries = retries
47
+ self.result_dir = result_dir
48
+ self.pool_factory = pool_factory
49
+ self.max_processes = max_processes or max(multiprocessing.cpu_count() - 1, 1) # 默认使用CPU核心数-1
50
+ self._client = None
51
+
52
+ self.tracker.init_db()
53
+
54
+ @property
55
+ def client(self):
56
+ if self._client is None:
57
+ self._client = InsecureClient(self.hdfs_url, user=self.hdfs_user)
58
+ return self._client
59
+
60
+ async def _get_pool(self):
61
+ if self.pool_factory is None:
62
+ return None
63
+ global _pool
64
+ if _pool is None:
65
+ _pool = await self.pool_factory()
66
+ return _pool
67
+
68
+ def _list_gz_files(self) -> List[str]:
69
+ """列出 HDFS 目录中的所有 gzip 文件"""
70
+ return [
71
+ f"{self.hdfs_dir}/{file[0]}"
72
+ for file in self.client.list(self.hdfs_dir, status=True)
73
+ if file[0].endswith(".gz")
74
+ ]
75
+
76
+ def _count_total_lines(self, gz_file_path: str) -> int:
77
+ with self.client.read(gz_file_path) as hdfs_file:
78
+ with gzip.GzipFile(fileobj=hdfs_file) as gz:
79
+ return sum(1 for _ in gz)
80
+
81
+ def _batch_read_gz_stream(self, gz_file_path: str) -> Generator[List[str], Any, None]:
82
+ """流式读取gz文件,分批yield返回"""
83
+ with self.client.read(gz_file_path) as hdfs_file:
84
+ with gzip.GzipFile(fileobj=hdfs_file) as gz:
85
+ while True:
86
+ lines = []
87
+ for _ in range(self.batch_size):
88
+ try:
89
+ line = next(gz)
90
+ if line.strip(): # 移除空行
91
+ lines.append(line.decode(self.encoding)) # 解码
92
+ except StopIteration: # 文件已读完
93
+ break
94
+ if not lines:
95
+ break
96
+ yield lines
97
+
98
+ def _batch_read_gz_all(self, gz_file_path: str) -> List[List[str]]:
99
+ """一次读取gz文件全部内容,二维数组批量返回"""
100
+ with self.client.read(gz_file_path) as reader: # 以二进制模式读取
101
+ compressed_data = reader.read() # 读取压缩数据
102
+ with gzip.GzipFile(fileobj=BytesIO(compressed_data)) as gz_file: # 解压缩
103
+ content = gz_file.read().decode(self.encoding) # 解码为字符串
104
+ print(f"文件读取成功: {gz_file_path}")
105
+ lines = [i for i in content.split("\n") if i.strip()]
106
+ batch_lines = [lines[i : i + self.batch_size] for i in range(0, len(lines), self.batch_size)]
107
+ return batch_lines
108
+
109
+ def _batch_read_gz(self, gz_file_path: str):
110
+ # 这里根据不同的配置选用不同的读取文件的方法
111
+ if self.read_mode == "stream":
112
+ return self._batch_read_gz_stream(gz_file_path)
113
+ else:
114
+ return self._batch_read_gz_all(gz_file_path)
115
+
116
+ def _generate_write_data(self, results):
117
+ for res in results:
118
+ yield str(res) + "\n"
119
+
120
+ def _print_progress(self, file_path, processed_lines, total_lines, start_time, processing_start_time):
121
+ elapsed_time = time.perf_counter() - start_time # 已用时间
122
+ processing_time = time.perf_counter() - processing_start_time # 本次处理时间
123
+ avg_processing_time = (
124
+ (elapsed_time * 1000) / processed_lines if processed_lines > 0 else float("inf")
125
+ ) # 平均每条数据的处理时间(毫秒)
126
+ # 估算剩余时间
127
+ remaining_time = (
128
+ ((avg_processing_time / 1000) * (total_lines - processed_lines)) if processed_lines > 0 else float("inf")
129
+ )
130
+ # 显示进度信息
131
+ print(
132
+ f"文件: {file_path} 总进度: {processed_lines}/{total_lines} 行 | "
133
+ f"已用时间: {elapsed_time:.2f}秒 | 本次处理时间: {processing_time:.2f}秒 | "
134
+ f"预估剩余时间: {remaining_time:.2f}秒 | 平均每条处理时间: {avg_processing_time:.2f}毫秒"
135
+ )
136
+
137
+ def _print_final_progress(self, file_path, processed_lines, total_lines, start_time):
138
+ final_elapsed_time = time.perf_counter() - start_time # 最终已用时间
139
+ print(
140
+ f"文件: {file_path} 处理完成 | 总进度: {processed_lines}/{total_lines} 行 | "
141
+ f"总已用时间: {final_elapsed_time:.2f}秒 | "
142
+ f"平均每条处理时间: {(final_elapsed_time * 1000) / processed_lines:.2f}毫秒"
143
+ if processed_lines > 0
144
+ else "处理无数据"
145
+ )
146
+
147
+ @abc.abstractmethod
148
+ async def _process_file(self, hdfs_file_path, process_func):
149
+ pass
150
+
151
+ async def _retry_process_file(self, hdfs_file_path, process_func):
152
+ """带重试机制的文件处理"""
153
+ retry_count = 0
154
+ while retry_count < self.retries:
155
+ try:
156
+ if self.tracker.is_processed(hdfs_file_path):
157
+ print(f"跳过已处理文件: {hdfs_file_path}")
158
+ return True
159
+ await self._process_file(hdfs_file_path, process_func)
160
+ self.tracker.mark_processed(hdfs_file_path) # 标记文件已处理
161
+ return True # 成功处理后退出
162
+ except Exception as e:
163
+ retry_count += 1
164
+ print(f"处理文件 {hdfs_file_path} 时发生错误: {e},正在重试 {retry_count}/{self.retries}")
165
+ await asyncio.sleep(2**retry_count)
166
+ print(f"处理文件 {hdfs_file_path} 失败,达到重试上限")
167
+ return False
168
+
169
+ def _process_file_wrapper(self, args):
170
+ """为多进程执行准备的同步包装函数"""
171
+ hdfs_file_path, process_func = args
172
+ loop = asyncio.get_event_loop()
173
+ return loop.run_until_complete(self._retry_process_file(hdfs_file_path, process_func))
174
+
175
+ async def _run_multi_process(self, gz_files, process_func):
176
+ """多进程并发运行文件处理任务"""
177
+ args_list = [(file_path, process_func) for file_path in gz_files]
178
+ with ProcessPoolExecutor(max_workers=self.max_processes) as executor:
179
+ # return executor.map(self._process_file_wrapper, args_list)
180
+ loop = asyncio.get_running_loop()
181
+ self._client = None # 避免连接对象无法序列化导致卡死
182
+ tasks = [loop.run_in_executor(executor, self._process_file_wrapper, args) for args in args_list]
183
+ results = await asyncio.gather(*tasks)
184
+
185
+ if all(results):
186
+ # 处理完成后清理断点记录
187
+ self.tracker.clear_processed_items()
188
+ print(f"已清空断点记录: {self.tracker.db_path}")
189
+ return results
190
+ else:
191
+ raise Exception("部分或全部文件处理失败")
192
+
193
+ @abc.abstractmethod
194
+ async def map(self, process_func: Callable[[Any, Any], Awaitable[Any]]) -> None:
195
+ pass
@@ -0,0 +1,67 @@
1
+ from pathlib import Path
2
+ import time
3
+ from typing import Any, Awaitable, Callable, List
4
+ from re_common.v2.baselibrary.tools.concurrency import AsyncTaskPool
5
+ from re_common.v2.baselibrary.tools.hdfs_base_processor import HDFSBaseProcessor
6
+
7
+
8
+ class HDFSBulkProcessor(HDFSBaseProcessor):
9
+ def _flat_map(self, results):
10
+ # return itertools.chain.from_iterable(chunked_results)
11
+ for res in results:
12
+ if isinstance(res, list):
13
+ yield from res
14
+ else:
15
+ yield res
16
+
17
+ async def _process_file(self, hdfs_file_path, process_func):
18
+ start_time = time.perf_counter()
19
+ total_lines = self._count_total_lines(hdfs_file_path)
20
+ processed_lines = 0
21
+ pool = await self._get_pool()
22
+
23
+ tasks = []
24
+ for lines in self._batch_read_gz(hdfs_file_path):
25
+ # 处理读取到的批次数据
26
+ if lines:
27
+ tasks.append(process_func(lines, pool)) # 将批次数据传递给处理函数并收集任务
28
+ processed_lines += len(lines) # 更新已处理行数
29
+ results = await AsyncTaskPool(self.concurrency).run(tasks)
30
+
31
+ if self.result_dir is not None:
32
+ self.client.write(
33
+ self.result_dir.rstrip("/") + f"/{Path(hdfs_file_path).stem}",
34
+ data=self._generate_write_data(self._flat_map(results)),
35
+ overwrite=True,
36
+ encoding=self.encoding,
37
+ )
38
+
39
+ # 最终进度显示
40
+ self._print_final_progress(hdfs_file_path, processed_lines, total_lines, start_time)
41
+
42
+ async def map(self, process_func: Callable[[List[str], Any], Awaitable[Any]]) -> None:
43
+ gz_files = self._list_gz_files()
44
+ await self._run_multi_process(gz_files, process_func)
45
+
46
+
47
+ # async def test_func(lines: List[str], pool):
48
+ # pass
49
+
50
+
51
+ # async def main():
52
+ # processor = HDFSBulkProcessor(
53
+ # "/xx/xx",
54
+ # db_path=Path(__file__).parent / "test_bulk.db",
55
+ # concurrency=200,
56
+ # batch_size=1000,
57
+ # pool_factory=get_pool,
58
+ # max_processes=2,
59
+ # result_dir="/xx/xx_res",
60
+ # )
61
+ # # processor.tracker.mark_many_processed(f"/xx/xx/part-{num:05d}.gz" for num in range(0, 6000))
62
+
63
+ # await processor.map(test_func)
64
+
65
+
66
+ # if __name__ == "__main__":
67
+ # asyncio.run(main())
@@ -0,0 +1,74 @@
1
+ import asyncio
2
+
3
+ from pathlib import Path
4
+ import time
5
+ from typing import Any, Awaitable, Callable
6
+ from re_common.v2.baselibrary.tools.concurrency import AsyncTaskPool
7
+ from re_common.v2.baselibrary.tools.hdfs_base_processor import HDFSBaseProcessor
8
+
9
+
10
+ class HDFSLineProcessor(HDFSBaseProcessor):
11
+ async def _process_data(self, data, process_func, pool):
12
+ """处理数据并执行处理函数"""
13
+ retry_count = 0
14
+ while retry_count < self.retries:
15
+ try:
16
+ return await process_func(data, pool) # 成功处理后退出
17
+ except Exception as e:
18
+ retry_count += 1
19
+ print(f"处理数据时发生错误: {e}, 正在重试 {retry_count}/{self.retries}, data: {data}")
20
+ await asyncio.sleep(2**retry_count)
21
+ raise Exception(f"处理数据失败, 达到重试上限, data: {data}")
22
+
23
+ async def _process_file(self, hdfs_file_path, process_func):
24
+ """处理单个 gz 文件"""
25
+ start_time = time.perf_counter()
26
+ total_lines = self._count_total_lines(hdfs_file_path)
27
+ processed_lines = 0
28
+ pool = await self._get_pool()
29
+ results = []
30
+
31
+ for lines in self._batch_read_gz(hdfs_file_path):
32
+ processing_start_time = time.perf_counter() # 记录本批处理开始时间
33
+
34
+ tasks = [self._process_data(line, process_func, pool) for line in lines]
35
+ results.extend(await AsyncTaskPool(self.concurrency).run(tasks))
36
+
37
+ processed_lines += len(lines)
38
+
39
+ self._print_progress(hdfs_file_path, processed_lines, total_lines, start_time, processing_start_time)
40
+
41
+ if self.result_dir is not None:
42
+ self.client.write(
43
+ self.result_dir.rstrip("/") + f"/{Path(hdfs_file_path).stem}",
44
+ data=self._generate_write_data(results),
45
+ overwrite=True,
46
+ encoding=self.encoding,
47
+ )
48
+
49
+ # 最终进度显示
50
+ self._print_final_progress(hdfs_file_path, processed_lines, total_lines, start_time)
51
+
52
+ async def map(self, process_func: Callable[[str, Any], Awaitable[Any]]) -> None:
53
+ gz_files = self._list_gz_files()
54
+ await self._run_multi_process(gz_files, process_func)
55
+
56
+
57
+ # async def test_func(line: str, pool):
58
+ # pass
59
+
60
+
61
+ # async def main():
62
+ # await HDFSLineProcessor(
63
+ # "/xx/xx",
64
+ # db_path=Path(__file__).parent / "test.db",
65
+ # concurrency=200,
66
+ # batch_size=1000,
67
+ # pool_factory=get_pool,
68
+ # max_processes=2,
69
+ # result_dir="/xx/xx_res",
70
+ # ).map(test_func)
71
+
72
+
73
+ # if __name__ == "__main__":
74
+ # asyncio.run(main())
@@ -0,0 +1,94 @@
1
+ import logging
2
+ from pathlib import Path
3
+ import sqlite3
4
+ from typing import Iterable, List, Literal, Union
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+
9
+ class ResumeTracker:
10
+ def __init__(
11
+ self,
12
+ db_path: Union[str, Path] = "processed.db",
13
+ timeout: float = 10.0,
14
+ isolation_level: Literal["DEFERRED", "EXCLUSIVE", "IMMEDIATE"] | None = "DEFERRED",
15
+ ):
16
+ self.db_path = Path(db_path)
17
+ self.timeout = timeout
18
+ self.isolation_level = isolation_level
19
+ self.init_db()
20
+
21
+ def _get_connection(self) -> sqlite3.Connection:
22
+ """创建数据库连接"""
23
+ return sqlite3.connect(self.db_path, timeout=self.timeout, isolation_level=self.isolation_level)
24
+
25
+ def init_db(self):
26
+ with self._get_connection() as conn:
27
+ cursor = conn.cursor()
28
+ cursor.execute("""
29
+ CREATE TABLE IF NOT EXISTS processed_items (
30
+ item_key TEXT PRIMARY KEY
31
+ )
32
+ """)
33
+ conn.commit()
34
+
35
+ def is_processed(self, item_key: str) -> bool:
36
+ with self._get_connection() as conn:
37
+ cursor = conn.cursor()
38
+ cursor.execute(
39
+ "SELECT 1 FROM processed_items WHERE item_key = ?",
40
+ (item_key,),
41
+ )
42
+ return cursor.fetchone() is not None
43
+
44
+ def mark_processed(self, item_key: str):
45
+ with self._get_connection() as conn:
46
+ cursor = conn.cursor()
47
+ cursor.execute(
48
+ "INSERT OR IGNORE INTO processed_items (item_key) VALUES (?)",
49
+ (item_key,),
50
+ )
51
+ conn.commit()
52
+
53
+ def mark_many_processed(self, item_keys: Iterable[str]):
54
+ with self._get_connection() as conn:
55
+ cursor = conn.cursor()
56
+ cursor.executemany(
57
+ "INSERT OR IGNORE INTO processed_items (item_key) VALUES (?)",
58
+ [(key,) for key in item_keys],
59
+ )
60
+ conn.commit()
61
+
62
+ def get_processed_count(self) -> int:
63
+ with self._get_connection() as conn:
64
+ cursor = conn.cursor()
65
+ cursor.execute("SELECT COUNT(*) FROM processed_items")
66
+ return cursor.fetchone()[0]
67
+
68
+ def get_processed_items(self) -> List[str]:
69
+ with self._get_connection() as conn:
70
+ cursor = conn.cursor()
71
+ cursor.execute("SELECT item_key FROM processed_items")
72
+ return [row[0] for row in cursor.fetchall()]
73
+
74
+ def clear_processed_items(self):
75
+ with self._get_connection() as conn:
76
+ conn.execute("DELETE FROM processed_items")
77
+ logger.info("Cleared all processed items")
78
+
79
+
80
+ if __name__ == "__main__":
81
+ tracker = ResumeTracker()
82
+ # 测试标记功能
83
+ tracker.mark_processed("test_key")
84
+ print(f"Is 'test_key' processed? {tracker.is_processed('test_key')}")
85
+
86
+ # 批量处理示例
87
+ test_keys = [f"key_{i}" for i in range(1, 10000)]
88
+ tracker.mark_many_processed(test_keys)
89
+
90
+ # 显示处理计数
91
+ print(f"Total processed items: {tracker.get_processed_count()}")
92
+
93
+ # 清理测试数据
94
+ tracker.clear_processed_items()
@@ -33,6 +33,12 @@ DB_CONFIG1 = {
33
33
  'echo': False, # 打印SQL语句
34
34
  }
35
35
 
36
+ async def get_pool_only(_DB_CONFIG: dict = None):
37
+ global DB_CONFIG
38
+ if _DB_CONFIG is not None:
39
+ DB_CONFIG = _DB_CONFIG
40
+ pool: Pool = await aiomysql.create_pool(**DB_CONFIG)
41
+ return pool
36
42
 
37
43
  @asynccontextmanager
38
44
  async def get_db_pool(_DB_CONFIG: dict = None):
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: re_common
3
- Version: 10.0.28
3
+ Version: 10.0.30
4
4
  Summary: a library about all python projects
5
5
  Home-page: https://gitee.com/xujiangios/re-common
6
6
  Author: vic
@@ -11,6 +11,14 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.6
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
+ Dynamic: author
15
+ Dynamic: author-email
16
+ Dynamic: classifier
17
+ Dynamic: description
18
+ Dynamic: description-content-type
19
+ Dynamic: home-page
20
+ Dynamic: requires-python
21
+ Dynamic: summary
14
22
 
15
23
 
16
24
  这是一个基础类,依赖很多的第三方包,是一个用得到的第三方库的封装,可以在此基础上迅速构建项目
@@ -166,7 +166,7 @@ re_common/v2/baselibrary/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
166
166
  re_common/v2/baselibrary/business_utils/BusinessStringUtil.py,sha256=njPcRgeBWpnZr5u2cPAO4qdWBq-CgTn99rJuvWFcChk,6788
167
167
  re_common/v2/baselibrary/business_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
168
168
  re_common/v2/baselibrary/business_utils/baseencodeid.py,sha256=3f52e0jtgCFzPEyReia8TupwiE64t_VyBT-a7uQCXAY,3595
169
- re_common/v2/baselibrary/business_utils/full_doi_path.py,sha256=PaMIrgDWWt_fzSFyvvDD-8CcYZJTNo6Pj-uR0WafNbY,3319
169
+ re_common/v2/baselibrary/business_utils/full_doi_path.py,sha256=vsoS1ZGyNzeORon_z1sHt1M41sS22pvJHMgWJH3xZ-M,3378
170
170
  re_common/v2/baselibrary/business_utils/rel_tools.py,sha256=LfnGFCkUSxg1SHvOMOQdP1PiHxIKqk7Syuk5YYpjJag,295
171
171
  re_common/v2/baselibrary/decorators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
172
172
  re_common/v2/baselibrary/decorators/utils.py,sha256=Q4D6KKCQxvNBXZkPQQn14keKKJpGtg8TUSakjJU40s0,2056
@@ -176,10 +176,15 @@ re_common/v2/baselibrary/s3object/baseboto3.py,sha256=mXuIFx99pnrPGQ4LJCZwlN1HLb
176
176
  re_common/v2/baselibrary/tools/WeChatRobot.py,sha256=sKBt2gPsfj0gzV6KaLSAhIhL-j3qNfHfqE-lII1LVwM,3537
177
177
  re_common/v2/baselibrary/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
178
178
  re_common/v2/baselibrary/tools/ac_ahocorasick.py,sha256=c63y5RtKVLD37nyPCnBqfNygwRj4gTQqyIdDOrC65G0,2847
179
+ re_common/v2/baselibrary/tools/concurrency.py,sha256=ctKBoeEbq1SGmhPp7oVR_QSXTKVAGLMByAHQKI6dAqU,981
179
180
  re_common/v2/baselibrary/tools/dict_tools.py,sha256=eSMwPTLp3oSjuviC_wlXg0I-dnkkmZfUfCRLX5djWV8,1365
180
181
  re_common/v2/baselibrary/tools/dolphinscheduler.py,sha256=1m7UGYDiuvJUCI6ik6CGM2fO8U5XteJzn55VRbwB9ts,7978
182
+ re_common/v2/baselibrary/tools/hdfs_base_processor.py,sha256=WVYBwQ3gUm5jrDV_Ar9Rv0VwJCMODkGByY19HP6fZCA,8405
183
+ re_common/v2/baselibrary/tools/hdfs_bulk_processor.py,sha256=8FjuZbcBXC_27zEQonLKvb0fMGwcIH9MPRTOmdMAuLU,2396
181
184
  re_common/v2/baselibrary/tools/hdfs_data_processer.py,sha256=g0DaNjXM1hIUblFQ6YBwnwEBKIXn48X8Y9Eiok4dVlQ,14824
185
+ re_common/v2/baselibrary/tools/hdfs_line_processor.py,sha256=h1J_mOPoNvsjw7zYMsD7rr0Q6bXvVzo9tRJZVAbei1s,2732
182
186
  re_common/v2/baselibrary/tools/list_tools.py,sha256=1NxGVM4EytSXh4IGAEfZQnvq0Ev-UOF-PGZBg2EQbOg,2132
187
+ re_common/v2/baselibrary/tools/resume_tracker.py,sha256=h3WyeIX0L2Q-O9AkCvKWJg48HP8Z8qILTowXzbRrB7c,3246
183
188
  re_common/v2/baselibrary/tools/search_hash_tools.py,sha256=2ENLtZE8opRsfkwRtTNMzITmpTsjO7wZ1ZkfkqpOH9U,1937
184
189
  re_common/v2/baselibrary/tools/text_matcher.py,sha256=cPMoFxaA0-ce3tLRxVSs8_3pTYS1oVIHDnNy_AlPU-4,10756
185
190
  re_common/v2/baselibrary/tools/unionfind_tools.py,sha256=VYHZZPXwBYljsm7TjV1B6iCgDn3O3btzNf9hMvQySVU,2965
@@ -197,7 +202,7 @@ re_common/v2/baselibrary/utils/basedict.py,sha256=sH3_RZ8u4649-jX2V1uKNNkjJVUijZ
197
202
  re_common/v2/baselibrary/utils/basehdfs.py,sha256=TPwFct_-UrmO1KCbo4gpV77rsnlCQDumNBbQKL0ZI9o,5953
198
203
  re_common/v2/baselibrary/utils/basepika.py,sha256=ifOb3UsGj79k40aD9UK6-5BMPw43ZAo0SO3AYD4q4vw,7332
199
204
  re_common/v2/baselibrary/utils/basetime.py,sha256=b7U_ho6nE3fjYBxSkdMHXUOd3ClH6KkW_7p7l2Gs4gA,3038
200
- re_common/v2/baselibrary/utils/db.py,sha256=ouDagXqqY9h4ucK4LDGrYVY-31rOiBQFxXLIlio9AJA,2297
205
+ re_common/v2/baselibrary/utils/db.py,sha256=ceXTLGI0JfZQm06gl-hnQww6Lw4IuwkgeehYFo7bVlA,2509
201
206
  re_common/v2/baselibrary/utils/json_cls.py,sha256=M93piYtmgm_wP8E57culTrd_AhHLoGg6PqeAJYdW2SM,438
202
207
  re_common/v2/baselibrary/utils/mq.py,sha256=UHpO8iNIHs91Tgp-BgnSUpZwjWquxrGLdpr3FMMv2zw,2858
203
208
  re_common/v2/baselibrary/utils/n_ary_expression_tree.py,sha256=-05kO6G2Rth7CEK-5lfFrthFZ1Q0-0a7cni7mWZ-2gg,9172
@@ -231,8 +236,8 @@ re_common/vip/title/transform/TransformRegulationTitleToZt.py,sha256=LKRdIsWKues
231
236
  re_common/vip/title/transform/TransformStandardTitleToZt.py,sha256=-fCKAbSBzXVyQDCE61CalvR9E_QzQMA08QOO_NePFNI,5563
232
237
  re_common/vip/title/transform/TransformThesisTitleToZt.py,sha256=QS-uV0cQrpUFAcKucuJQ9Ue2VRQH-inmfn_X3IplfRo,5488
233
238
  re_common/vip/title/transform/__init__.py,sha256=m83-CWyRq_VHPYHaALEQlmXrkTdrZ3e4B_kCfBYE-uc,239
234
- re_common-10.0.28.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
235
- re_common-10.0.28.dist-info/METADATA,sha256=6LyRvl5fLSmKd4qNyZFc72DkO8_hyJ6FQ27dba4PEvc,582
236
- re_common-10.0.28.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
237
- re_common-10.0.28.dist-info/top_level.txt,sha256=_H9H23zoLIalm1AIY_KYTVh_H0ZnmjxQIxsvXtLv45o,10
238
- re_common-10.0.28.dist-info/RECORD,,
239
+ re_common-10.0.30.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
240
+ re_common-10.0.30.dist-info/METADATA,sha256=DiZvXFGwxsVgDiegqZsCWgUpx1r-KZg51ajXGUso-E4,764
241
+ re_common-10.0.30.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
242
+ re_common-10.0.30.dist-info/top_level.txt,sha256=_H9H23zoLIalm1AIY_KYTVh_H0ZnmjxQIxsvXtLv45o,10
243
+ re_common-10.0.30.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5