re-common 2.0.1__py3-none-any.whl → 10.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- re_common/baselibrary/__init__.py +4 -0
- re_common/baselibrary/baseabs/__init__.py +7 -0
- re_common/baselibrary/baseabs/baseabs.py +26 -0
- re_common/baselibrary/database/__init__.py +0 -0
- re_common/baselibrary/database/mbuilder.py +132 -0
- re_common/baselibrary/database/moudle.py +93 -0
- re_common/baselibrary/database/msqlite3.py +194 -0
- re_common/baselibrary/database/mysql.py +169 -0
- re_common/baselibrary/database/sql_factory.py +26 -0
- re_common/baselibrary/mthread/MThreadingRun.py +486 -0
- re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -0
- re_common/baselibrary/mthread/__init__.py +3 -0
- re_common/baselibrary/mthread/mythreading.py +695 -0
- re_common/baselibrary/pakge_other/__init__.py +0 -0
- re_common/baselibrary/pakge_other/socks.py +404 -0
- re_common/baselibrary/readconfig/__init__.py +0 -0
- re_common/baselibrary/readconfig/config_factory.py +18 -0
- re_common/baselibrary/readconfig/ini_config.py +317 -0
- re_common/baselibrary/readconfig/toml_config.py +49 -0
- re_common/baselibrary/temporary/__init__.py +0 -0
- re_common/baselibrary/temporary/envdata.py +36 -0
- re_common/baselibrary/tools/__init__.py +0 -0
- re_common/baselibrary/tools/all_requests/__init__.py +0 -0
- re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -0
- re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -0
- re_common/baselibrary/tools/all_requests/mrequest.py +412 -0
- re_common/baselibrary/tools/all_requests/requests_request.py +81 -0
- re_common/baselibrary/tools/batch_compre/__init__.py +0 -0
- re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -0
- re_common/baselibrary/tools/contrast_db3.py +123 -0
- re_common/baselibrary/tools/copy_file.py +39 -0
- re_common/baselibrary/tools/db3_2_sizedb3.py +102 -0
- re_common/baselibrary/tools/foreachgz.py +40 -0
- re_common/baselibrary/tools/get_attr.py +11 -0
- re_common/baselibrary/tools/image_to_pdf.py +62 -0
- re_common/baselibrary/tools/java_code_deal.py +139 -0
- re_common/baselibrary/tools/javacode.py +79 -0
- re_common/baselibrary/tools/mdb_db3.py +48 -0
- re_common/baselibrary/tools/merge_file.py +171 -0
- re_common/baselibrary/tools/merge_gz_file.py +165 -0
- re_common/baselibrary/tools/mhdfstools/__init__.py +0 -0
- re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -0
- re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -0
- re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -0
- re_common/baselibrary/tools/mongo_tools.py +50 -0
- re_common/baselibrary/tools/move_file.py +170 -0
- re_common/baselibrary/tools/move_mongo/__init__.py +0 -0
- re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -0
- re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -0
- re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -0
- re_common/baselibrary/tools/move_mongo/use_mv.py +93 -0
- re_common/baselibrary/tools/mpandas/__init__.py +0 -0
- re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -0
- re_common/baselibrary/tools/mpandas/pandas_visualization.py +8 -0
- re_common/baselibrary/tools/myparsel.py +104 -0
- re_common/baselibrary/tools/rename_dir_file.py +37 -0
- re_common/baselibrary/tools/sequoiadb_utils.py +398 -0
- re_common/baselibrary/tools/split_line_to_many.py +25 -0
- re_common/baselibrary/tools/stringtodicts.py +33 -0
- re_common/baselibrary/tools/workwechant_bot.py +84 -0
- re_common/baselibrary/utils/__init__.py +0 -0
- re_common/baselibrary/utils/baseaiohttp.py +296 -0
- re_common/baselibrary/utils/baseaiomysql.py +87 -0
- re_common/baselibrary/utils/baseallstep.py +191 -0
- re_common/baselibrary/utils/baseavro.py +19 -0
- re_common/baselibrary/utils/baseboto3.py +291 -0
- re_common/baselibrary/utils/basecsv.py +32 -0
- re_common/baselibrary/utils/basedict.py +133 -0
- re_common/baselibrary/utils/basedir.py +241 -0
- re_common/baselibrary/utils/baseencode.py +351 -0
- re_common/baselibrary/utils/baseencoding.py +29 -0
- re_common/baselibrary/utils/baseesdsl.py +86 -0
- re_common/baselibrary/utils/baseexcel.py +264 -0
- re_common/baselibrary/utils/baseexcept.py +109 -0
- re_common/baselibrary/utils/basefile.py +654 -0
- re_common/baselibrary/utils/baseftp.py +214 -0
- re_common/baselibrary/utils/basegzip.py +60 -0
- re_common/baselibrary/utils/basehdfs.py +135 -0
- re_common/baselibrary/utils/basehttpx.py +268 -0
- re_common/baselibrary/utils/baseip.py +87 -0
- re_common/baselibrary/utils/basejson.py +2 -0
- re_common/baselibrary/utils/baselist.py +32 -0
- re_common/baselibrary/utils/basemotor.py +190 -0
- re_common/baselibrary/utils/basemssql.py +98 -0
- re_common/baselibrary/utils/baseodbc.py +113 -0
- re_common/baselibrary/utils/basepandas.py +302 -0
- re_common/baselibrary/utils/basepeewee.py +11 -0
- re_common/baselibrary/utils/basepika.py +180 -0
- re_common/baselibrary/utils/basepydash.py +143 -0
- re_common/baselibrary/utils/basepymongo.py +230 -0
- re_common/baselibrary/utils/basequeue.py +22 -0
- re_common/baselibrary/utils/baserar.py +57 -0
- re_common/baselibrary/utils/baserequest.py +279 -0
- re_common/baselibrary/utils/baseset.py +8 -0
- re_common/baselibrary/utils/basesmb.py +403 -0
- re_common/baselibrary/utils/basestring.py +382 -0
- re_common/baselibrary/utils/basetime.py +320 -0
- re_common/baselibrary/utils/basetuple.py +0 -0
- re_common/baselibrary/utils/baseurl.py +121 -0
- re_common/baselibrary/utils/basezip.py +57 -0
- re_common/baselibrary/utils/core/__init__.py +8 -0
- re_common/baselibrary/utils/core/bottomutils.py +18 -0
- re_common/baselibrary/utils/core/mdeprecated.py +327 -0
- re_common/baselibrary/utils/core/mlamada.py +16 -0
- re_common/baselibrary/utils/core/msginfo.py +25 -0
- re_common/baselibrary/utils/core/requests_core.py +103 -0
- re_common/baselibrary/utils/fateadm.py +429 -0
- re_common/baselibrary/utils/importfun.py +123 -0
- re_common/baselibrary/utils/mfaker.py +57 -0
- re_common/baselibrary/utils/my_abc/__init__.py +3 -0
- re_common/baselibrary/utils/my_abc/better_abc.py +32 -0
- re_common/baselibrary/utils/mylogger.py +414 -0
- re_common/baselibrary/utils/myredisclient.py +861 -0
- re_common/baselibrary/utils/pipupgrade.py +21 -0
- re_common/baselibrary/utils/ringlist.py +85 -0
- re_common/baselibrary/utils/version_compare.py +36 -0
- re_common/baselibrary/utils/ydmhttp.py +126 -0
- re_common/facade/__init__.py +1 -0
- re_common/facade/lazy_import.py +11 -0
- re_common/facade/loggerfacade.py +25 -0
- re_common/facade/mysqlfacade.py +467 -0
- re_common/facade/now.py +31 -0
- re_common/facade/sqlite3facade.py +257 -0
- re_common/facade/use/__init__.py +0 -0
- re_common/facade/use/mq_use_facade.py +83 -0
- re_common/facade/use/proxy_use_facade.py +20 -0
- re_common/libtest/__init__.py +0 -0
- re_common/libtest/base_dict_test.py +19 -0
- re_common/libtest/baseavro_test.py +13 -0
- re_common/libtest/basefile_test.py +14 -0
- re_common/libtest/basemssql_test.py +77 -0
- re_common/libtest/baseodbc_test.py +8 -0
- re_common/libtest/basepandas_test.py +38 -0
- re_common/libtest/get_attr_test/__init__.py +0 -0
- re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -0
- re_common/libtest/get_attr_test/settings.py +55 -0
- re_common/libtest/idencode_test.py +54 -0
- re_common/libtest/iniconfig_test.py +35 -0
- re_common/libtest/ip_test.py +35 -0
- re_common/libtest/merge_file_test.py +20 -0
- re_common/libtest/mfaker_test.py +9 -0
- re_common/libtest/mm3_test.py +32 -0
- re_common/libtest/mylogger_test.py +89 -0
- re_common/libtest/myparsel_test.py +28 -0
- re_common/libtest/mysql_test.py +151 -0
- re_common/libtest/pymongo_test.py +21 -0
- re_common/libtest/split_test.py +12 -0
- re_common/libtest/sqlite3_merge_test.py +6 -0
- re_common/libtest/sqlite3_test.py +34 -0
- re_common/libtest/tomlconfig_test.py +30 -0
- re_common/libtest/use_tools_test/__init__.py +3 -0
- re_common/libtest/user/__init__.py +5 -0
- re_common/studio/__init__.py +5 -0
- re_common/studio/assignment_expressions.py +37 -0
- re_common/studio/mydash/__init__.py +0 -0
- re_common/studio/mydash/test1.py +19 -0
- re_common/studio/pydashstudio/__init__.py +0 -0
- re_common/studio/pydashstudio/first.py +9 -0
- re_common/studio/streamlitstudio/__init__.py +0 -0
- re_common/studio/streamlitstudio/first_app.py +66 -0
- re_common/studio/streamlitstudio/uber_pickups.py +24 -0
- re_common/studio/test.py +19 -0
- re_common/vip/__init__.py +0 -0
- re_common/vip/base_step_process.py +11 -0
- re_common/vip/baseencodeid.py +91 -0
- re_common/vip/changetaskname.py +28 -0
- re_common/vip/core_var.py +24 -0
- re_common/vip/mmh3Hash.py +90 -0
- re_common/vip/proxy/__init__.py +0 -0
- re_common/vip/proxy/allproxys.py +127 -0
- re_common/vip/proxy/allproxys_thread.py +159 -0
- re_common/vip/proxy/cnki_proxy.py +153 -0
- re_common/vip/proxy/kuaidaili.py +87 -0
- re_common/vip/proxy/proxy_all.py +113 -0
- re_common/vip/proxy/update_kuaidaili_0.py +42 -0
- re_common/vip/proxy/wanfang_proxy.py +152 -0
- re_common/vip/proxy/wp_proxy_all.py +182 -0
- re_common/vip/read_rawid_to_txt.py +92 -0
- re_common/vip/title/__init__.py +5 -0
- re_common/vip/title/transform/TransformBookTitleToZt.py +125 -0
- re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -0
- re_common/vip/title/transform/TransformCstadTitleToZt.py +196 -0
- re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -0
- re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -0
- re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -0
- re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -0
- re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -0
- re_common/vip/title/transform/__init__.py +11 -0
- {re_common-2.0.1.dist-info → re_common-10.0.0.dist-info}/METADATA +1 -1
- re_common-10.0.0.dist-info/RECORD +213 -0
- re_common-2.0.1.dist-info/RECORD +0 -25
- {re_common-2.0.1.dist-info → re_common-10.0.0.dist-info}/LICENSE +0 -0
- {re_common-2.0.1.dist-info → re_common-10.0.0.dist-info}/WHEEL +0 -0
- {re_common-2.0.1.dist-info → re_common-10.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"""
|
|
2
|
+
多文件合并到压缩文件
|
|
3
|
+
方法一:按行读取写入
|
|
4
|
+
方法二: 按块读取写入
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
###########################################
|
|
9
|
+
# 同项目调用基础包
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
|
|
13
|
+
filepath = os.path.abspath(__file__)
|
|
14
|
+
pathlist = filepath.split(os.sep)
|
|
15
|
+
pathlist = pathlist[:-4]
|
|
16
|
+
TopPath = os.sep.join(pathlist)
|
|
17
|
+
sys.path.insert(0, TopPath)
|
|
18
|
+
|
|
19
|
+
############################################
|
|
20
|
+
|
|
21
|
+
import time
|
|
22
|
+
import gzip
|
|
23
|
+
|
|
24
|
+
from re_common.baselibrary.utils.basedir import BaseDir
|
|
25
|
+
from re_common.baselibrary.utils.basefile import BaseFile
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class MergeGzFile(object):
|
|
29
|
+
def __init__(self):
|
|
30
|
+
self.outPathFile = None
|
|
31
|
+
self.a_num = 1000
|
|
32
|
+
self.b_num = 20000
|
|
33
|
+
|
|
34
|
+
self.a_size = 1024 * 1024 * 1024
|
|
35
|
+
self.block_size = 2 * 1024 * 1024 * 1024
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_outpathfile(self,dst):
|
|
39
|
+
BaseDir.create_dir(dst)
|
|
40
|
+
now_time = time.strftime('%Y%m%d', time.localtime())
|
|
41
|
+
self.filename = "{}.gz".format(now_time)
|
|
42
|
+
self.outPathFile = BaseFile.get_new_path(dst,self.filename)
|
|
43
|
+
return self.outPathFile
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def line_all_2_one_gz(self,src,dst,dst_size=None,a_num=None,b_num=None):
|
|
47
|
+
"""
|
|
48
|
+
:param src: 输入路径
|
|
49
|
+
:param dst: 输出路径
|
|
50
|
+
:param dst_size: 指定输出文件的最大值
|
|
51
|
+
:param a_num: 打印条数的取值(默认1000条输出一次信息)
|
|
52
|
+
:param b_num: 打印路径的取值(默认20000条输出一次信息)
|
|
53
|
+
:return:
|
|
54
|
+
"""
|
|
55
|
+
# 程序开始时间
|
|
56
|
+
StartTime = time.time()
|
|
57
|
+
if a_num is None:
|
|
58
|
+
a_num = self.a_num
|
|
59
|
+
if b_num is None:
|
|
60
|
+
b_num = self.b_num
|
|
61
|
+
count_all = 0
|
|
62
|
+
dst_count = 0
|
|
63
|
+
src_file_num = 0
|
|
64
|
+
src_file_location = 0
|
|
65
|
+
if self.outPathFile is None:
|
|
66
|
+
self.get_outpathfile(dst)
|
|
67
|
+
|
|
68
|
+
f = gzip.open(self.outPathFile, "wb")
|
|
69
|
+
|
|
70
|
+
for files in BaseDir.get_dir_all_files(src):
|
|
71
|
+
src_file_num += 1
|
|
72
|
+
|
|
73
|
+
for files in BaseDir.get_dir_all_files(src):
|
|
74
|
+
# 处理每个文件的开始时间
|
|
75
|
+
start_time = time.time()
|
|
76
|
+
src_file_location += 1
|
|
77
|
+
print("next source_path:"+files)
|
|
78
|
+
|
|
79
|
+
count = 0
|
|
80
|
+
for line in BaseFile.read_file_rb_mode_yield(files):
|
|
81
|
+
count += 1
|
|
82
|
+
count_all +=1
|
|
83
|
+
dst_count +=1
|
|
84
|
+
|
|
85
|
+
f.write(line)
|
|
86
|
+
|
|
87
|
+
if count % a_num == 1:
|
|
88
|
+
print("process_src:%d/%d, Time total:%s, source_count: %d, dst_count: %d, allcount: %d" % (src_file_location, src_file_num, (repr(time.time() - start_time)), count, dst_count, count_all))
|
|
89
|
+
if count % b_num == 1:
|
|
90
|
+
print("source_path: %s , dst_path:%s" % (files,self.outPathFile))
|
|
91
|
+
if dst_size is not None:
|
|
92
|
+
if BaseFile.get_file_size(self.outPathFile) >= dst_size:
|
|
93
|
+
f.close()
|
|
94
|
+
print("cut dst file, now:source_count: %d,dst_count: %d, allcount: %d" % (count, dst_count, count_all))
|
|
95
|
+
dst_count = 0
|
|
96
|
+
self.outPathFile = BaseFile.change_file(self.outPathFile, size=dst_size,ending='gz')
|
|
97
|
+
# self.outPathFile = self.outPathFile.replace(".big_json",'.gz')
|
|
98
|
+
f = gzip.open(self.outPathFile, "wb")
|
|
99
|
+
print("new dst_path: %s" % self.outPathFile)
|
|
100
|
+
|
|
101
|
+
print("finish one file;Time total:%s, process_src:%d/%d, source_count: %d, dst_count: %d, allcount: %d" % (repr(time.time() - start_time),src_file_location, src_file_num,count, dst_count, count_all))
|
|
102
|
+
# 程序结束花费时间时间
|
|
103
|
+
print('\nProce Over\nTime total:' + repr(time.time() - StartTime) + '\n')
|
|
104
|
+
|
|
105
|
+
def block_all_2_one_gz(self,src,dst,dst_size=None,block_size=None,a_size=None):
|
|
106
|
+
"""
|
|
107
|
+
按块读取小文件,并写入文件
|
|
108
|
+
:param src: 输入路径
|
|
109
|
+
:param dst: 输出路径
|
|
110
|
+
:param source_size: 输入文件大于source_size,就直接复制过去
|
|
111
|
+
:param dst_size: 指定输出文件的最大值(但是需要操作完一个输入文件后判断大小)
|
|
112
|
+
:param block_size: 按块读取的值
|
|
113
|
+
:param a_size: 打印路径的取值(默认1G大小输出一次信息)
|
|
114
|
+
:return:
|
|
115
|
+
"""
|
|
116
|
+
# 程序开始时间
|
|
117
|
+
StartTime = time.time()
|
|
118
|
+
src_file_num = 0
|
|
119
|
+
src_all_size = 0
|
|
120
|
+
src_file_location = 0
|
|
121
|
+
if a_size is None:
|
|
122
|
+
a_size = self.a_size
|
|
123
|
+
if block_size is None:
|
|
124
|
+
block_size = self.block_size
|
|
125
|
+
|
|
126
|
+
if self.outPathFile is None:
|
|
127
|
+
self.get_outpathfile(dst)
|
|
128
|
+
|
|
129
|
+
f = gzip.open(self.outPathFile, "wb")
|
|
130
|
+
|
|
131
|
+
for files in BaseDir.get_dir_all_files(src):
|
|
132
|
+
src_file_num += 1
|
|
133
|
+
for files in BaseDir.get_dir_all_files(src):
|
|
134
|
+
# 处理每个文件的开始时间
|
|
135
|
+
src_all_size += BaseFile.get_file_size(files)
|
|
136
|
+
dst_file_size = 0
|
|
137
|
+
start_time = time.time()
|
|
138
|
+
src_file_location += 1
|
|
139
|
+
print("next source_path:" + files)
|
|
140
|
+
|
|
141
|
+
for block in BaseFile.read_file_rb_block(files,BLOCK_SIZE=block_size):
|
|
142
|
+
f.write(block)
|
|
143
|
+
|
|
144
|
+
if BaseFile.get_file_size(self.outPathFile) % a_size == 1:
|
|
145
|
+
print("process_src:%d/%d, dst_size:%d, src_size:%d, src_all_size:%d, Time total:%.2f/%.2f" % (src_file_location, src_file_num, dst_file_size ,BaseFile.get_file_size(files), src_all_size, (time.time() - start_time),(time.time() - StartTime) ))
|
|
146
|
+
dst_file_size = BaseFile.get_file_size(self.outPathFile)
|
|
147
|
+
print("finish one file;process_src:%d/%d, dst_size:%d, src_size:%d, src_all_size:%d, Time total:%.2f/%.2f" % (src_file_location, src_file_num, dst_file_size ,BaseFile.get_file_size(files), src_all_size, (time.time() - start_time),(time.time() - StartTime) ))
|
|
148
|
+
if dst_size is not None:
|
|
149
|
+
if BaseFile.get_file_size(self.outPathFile) >= dst_size:
|
|
150
|
+
f.close()
|
|
151
|
+
self.outPathFile = BaseFile.change_file(self.outPathFile, size=dst_size,ending='gz')
|
|
152
|
+
f = gzip.open(self.outPathFile, "wb")
|
|
153
|
+
print("new dst_path: %s" % self.outPathFile)
|
|
154
|
+
dst_file_size = 0
|
|
155
|
+
# 程序结束花费时间时间
|
|
156
|
+
print('\nProce Over\nTime total:' + repr(time.time() - StartTime) + '\n')
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
if __name__ == '__main__':
|
|
160
|
+
old_path = r'\\192.168.31.188\download\cnki_qk\download\get_ref\input'
|
|
161
|
+
new_path = r'\\192.168.31.188\download\cnki_qk\download\get_ref\output'
|
|
162
|
+
a = MergeGzFile()
|
|
163
|
+
a.line_all_2_one_gz(old_path,new_path,dst_size=1024*1024*100)
|
|
164
|
+
# a.block_all_2_one(old_path,new_path,dst_size=1024*1024*10)
|
|
165
|
+
|
|
File without changes
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
from re_common.baselibrary.baseabs import BaseAbs
|
|
4
|
+
from re_common.baselibrary.utils.basehdfs import BaseHDFS
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DownHdfs(object):
|
|
8
|
+
|
|
9
|
+
def __init__(self, toml_file):
|
|
10
|
+
self.toml_file = toml_file
|
|
11
|
+
self.dicts = {}
|
|
12
|
+
self.basehdfs = None
|
|
13
|
+
|
|
14
|
+
def read_toml(self):
|
|
15
|
+
dicts = BaseAbs.get_config_factory().toml_factory().set_config_path(
|
|
16
|
+
self.toml_file).read_file_remove_bom().get_dicts()
|
|
17
|
+
self.dicts = dicts
|
|
18
|
+
return self
|
|
19
|
+
|
|
20
|
+
def set_basehdfs(self, i=0):
|
|
21
|
+
self.basehdfs = BaseHDFS()
|
|
22
|
+
self.basehdfs.hdfsdir = self.dicts["PathPair"][i][0]
|
|
23
|
+
self.basehdfs.localdir = self.dicts["PathPair"][i][1]
|
|
24
|
+
self.basehdfs.user_name = self.dicts["UserName"]
|
|
25
|
+
self.basehdfs.namenode = self.dicts["NameNode"]
|
|
26
|
+
return self
|
|
27
|
+
|
|
28
|
+
def down_files(self):
|
|
29
|
+
self.basehdfs.get_client()
|
|
30
|
+
self.basehdfs.get_all_files_num()
|
|
31
|
+
self.basehdfs.get_all_files()
|
|
32
|
+
|
|
33
|
+
def use(self):
|
|
34
|
+
self.read_toml()
|
|
35
|
+
for i in range(len(self.dicts["PathPair"])):
|
|
36
|
+
self.set_basehdfs(i)
|
|
37
|
+
self.down_files()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
if __name__ == "__main__":
|
|
41
|
+
a = sys.argv[1]
|
|
42
|
+
DownHdfs(a).use()
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from re_common.baselibrary.baseabs import BaseAbs
|
|
2
|
+
from re_common.baselibrary.utils.basehdfs import BaseHDFS
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class HdfsTools(object):
|
|
6
|
+
def __init__(self):
|
|
7
|
+
self.toml_file = "config_down.toml"
|
|
8
|
+
self.dicts = {}
|
|
9
|
+
self.basehdfs = None
|
|
10
|
+
|
|
11
|
+
def read_toml(self):
|
|
12
|
+
dicts = BaseAbs.get_config_factory().toml_factory().set_config_path(
|
|
13
|
+
self.toml_file).read_file_remove_bom().get_dicts()
|
|
14
|
+
self.dicts = dicts
|
|
15
|
+
return self
|
|
16
|
+
|
|
17
|
+
def set_basehdfs(self, i=0):
|
|
18
|
+
self.basehdfs = BaseHDFS()
|
|
19
|
+
self.basehdfs.hdfsdir = self.dicts["PathPair"][i][0]
|
|
20
|
+
self.basehdfs.localdir = self.dicts["PathPair"][i][1]
|
|
21
|
+
self.basehdfs.user_name = self.dicts["UserName"]
|
|
22
|
+
self.basehdfs.namenode = self.dicts["NameNode"]
|
|
23
|
+
return self
|
|
24
|
+
|
|
25
|
+
def builder(self):
|
|
26
|
+
self.basehdfs.get_client()
|
|
27
|
+
|
|
28
|
+
def create_dirs(self):
|
|
29
|
+
self.basehdfs.get_client()
|
|
30
|
+
print(self.basehdfs.mk_hdfs_dirs(r"/RawData/wanfang/qk/ref/big_json/20200421"))
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def exists(self):
|
|
34
|
+
print(self.basehdfs.exists(r"/RawData/wanfang/qk/ref/big_json/20200421"))
|
|
35
|
+
|
|
36
|
+
def run(self):
|
|
37
|
+
self.read_toml()
|
|
38
|
+
self.set_basehdfs()
|
|
39
|
+
self.builder()
|
|
40
|
+
self.exists()
|
|
41
|
+
|
|
42
|
+
HdfsTools().run()
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from re_common.baselibrary.baseabs import BaseAbs
|
|
2
|
+
from re_common.baselibrary.utils.basehdfs import BaseHDFS
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DownHdfs(object):
|
|
6
|
+
|
|
7
|
+
def __init__(self):
|
|
8
|
+
self.toml_file = "config_up.toml"
|
|
9
|
+
self.dicts = {}
|
|
10
|
+
self.basehdfs = None
|
|
11
|
+
|
|
12
|
+
def read_toml(self):
|
|
13
|
+
dicts = BaseAbs.get_config_factory().toml_factory().set_config_path(
|
|
14
|
+
self.toml_file).read_file_remove_bom().get_dicts()
|
|
15
|
+
self.dicts = dicts
|
|
16
|
+
return self
|
|
17
|
+
|
|
18
|
+
def set_basehdfs(self, i=0):
|
|
19
|
+
self.basehdfs = BaseHDFS()
|
|
20
|
+
self.basehdfs.localdir = self.dicts["PathPair"][i][0]
|
|
21
|
+
self.basehdfs.hdfsdir = self.dicts["PathPair"][i][1]
|
|
22
|
+
self.basehdfs.user_name = self.dicts["UserName"]
|
|
23
|
+
self.basehdfs.namenode = self.dicts["NameNode"]
|
|
24
|
+
return self
|
|
25
|
+
|
|
26
|
+
def up_files(self):
|
|
27
|
+
self.basehdfs.get_client()
|
|
28
|
+
self.basehdfs.get_all_files_num()
|
|
29
|
+
self.basehdfs.up_all_files()
|
|
30
|
+
|
|
31
|
+
def use(self):
|
|
32
|
+
self.read_toml()
|
|
33
|
+
for i in range(len(self.dicts["PathPair"])):
|
|
34
|
+
self.set_basehdfs(i)
|
|
35
|
+
self.up_files()
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
DownHdfs().use()
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
from re_common.baselibrary.utils.basemotor import BaseMotor
|
|
4
|
+
from re_common.baselibrary.utils.basepymongo import BasePyMongo
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class MongoTools(object):
|
|
8
|
+
def __init__(self, conn_str, db_name, col):
|
|
9
|
+
self.conn_str = conn_str
|
|
10
|
+
self.db_name = db_name
|
|
11
|
+
self.col = col
|
|
12
|
+
self.bs_conn = None
|
|
13
|
+
|
|
14
|
+
def conn_mongo_base(self):
|
|
15
|
+
self.bs_conn = BasePyMongo(self.conn_str)
|
|
16
|
+
self.bs_conn.use_db(self.db_name)
|
|
17
|
+
self.bs_conn.create_col(self.col)
|
|
18
|
+
|
|
19
|
+
def conn_mongo_motor(self):
|
|
20
|
+
self.bs_conn = BaseMotor()
|
|
21
|
+
self.bs_conn.AsyncIOMotorClient(self.conn_str, self.db_name)
|
|
22
|
+
self.bs_conn.get_col(self.col)
|
|
23
|
+
|
|
24
|
+
def get_first_id(self, query):
|
|
25
|
+
for item in self.bs_conn.find({"sub_db_id": "00075", "_id": {"$lt": ""}},
|
|
26
|
+
{"_id": 1}).skip(0).limit(1):
|
|
27
|
+
return item["_id"]
|
|
28
|
+
|
|
29
|
+
def get_ids(self, ids, query):
|
|
30
|
+
count = 0
|
|
31
|
+
while True:
|
|
32
|
+
lists = []
|
|
33
|
+
query_temp = query.update({"_id": {"$gt": ids}})
|
|
34
|
+
for i in self.bs_conn.find(query_temp, {"_id": 1}).sort([("_id", 1)]).limit(1000000):
|
|
35
|
+
count = count + 1
|
|
36
|
+
ids = i["_id"]
|
|
37
|
+
lists.append((i["_id"]))
|
|
38
|
+
if len(lists) % 10000 == 1:
|
|
39
|
+
print(len(lists))
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
if __name__ == "__main__":
|
|
43
|
+
# mt = MongoTools(
|
|
44
|
+
# conn_str="mongodb://datahouse:vipdatacenter@192.168.31.243:32920/data_warehouse.base_obj_meta_a?authSource=data_warehouse",
|
|
45
|
+
# db_name="data_warehouse", col="base_obj_meta_a")
|
|
46
|
+
mt = MongoTools(
|
|
47
|
+
conn_str="mongodb://datahouse:vipdatacenter@192.168.31.208:32920,192.168.31.206:32920,192.168.31.243:32920/data_warehouse.base_obj_meta_a?authSource=data_warehouse",
|
|
48
|
+
db_name="data_warehouse", col="base_obj_meta_a")
|
|
49
|
+
mt.conn_mongo_base()
|
|
50
|
+
print(mt.get_first_id({"sub_db_id": "00075"}))
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun2
|
|
4
|
+
from re_common.baselibrary.mthread.mythreading import ThreadInfo
|
|
5
|
+
from re_common.baselibrary.utils.basedir import BaseDir
|
|
6
|
+
from re_common.baselibrary.utils.basefile import BaseFile
|
|
7
|
+
from re_common.baselibrary.utils.basequeue import BaseQueue
|
|
8
|
+
from re_common.facade.now import get_streamlogger, get_filelogger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def copy_one_file(fullname, sources_path, dst_path):
|
|
12
|
+
"""
|
|
13
|
+
复制文件 不会删除
|
|
14
|
+
:param fullname:
|
|
15
|
+
:param sources_path:
|
|
16
|
+
:param dst_path:
|
|
17
|
+
:return:
|
|
18
|
+
"""
|
|
19
|
+
fullname_size = BaseFile.get_file_size(fullname)
|
|
20
|
+
# 替换前面的路径得到新文件的路劲
|
|
21
|
+
dst_file = fullname.replace(sources_path, dst_path)
|
|
22
|
+
# 目录不存在就创建目录
|
|
23
|
+
dst_dir = BaseDir.get_file_dir_absolute(dst_file)
|
|
24
|
+
BaseDir.create_dir(dst_dir)
|
|
25
|
+
|
|
26
|
+
if BaseFile.is_file_exists(dst_file):
|
|
27
|
+
dst_dir_size = BaseFile.get_file_size(dst_file)
|
|
28
|
+
if fullname_size == dst_dir_size:
|
|
29
|
+
return True
|
|
30
|
+
# 复制文件
|
|
31
|
+
BaseFile.copy_file_to_file(fullname, dst_file)
|
|
32
|
+
dst_dir_size = BaseFile.get_file_size(dst_file)
|
|
33
|
+
# 文件大小一致才删除源文件
|
|
34
|
+
# 大小一致性对比不能用于windows 和 liunx之间的传输,两者会存在差异性
|
|
35
|
+
print(fullname_size,dst_dir_size)
|
|
36
|
+
if fullname_size == dst_dir_size:
|
|
37
|
+
return True
|
|
38
|
+
else:
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def move_one_file(fullname, sources_path, dst_path):
|
|
43
|
+
"""
|
|
44
|
+
移动文件,会删除源文件
|
|
45
|
+
:param fullname:
|
|
46
|
+
:param sources_path:
|
|
47
|
+
:param dst_path:
|
|
48
|
+
:return:
|
|
49
|
+
"""
|
|
50
|
+
fullname_size = BaseFile.get_file_size(fullname)
|
|
51
|
+
# 替换前面的路径得到新文件的路劲
|
|
52
|
+
dst_file = fullname.replace(sources_path, dst_path)
|
|
53
|
+
# 目录不存在就创建目录
|
|
54
|
+
dst_dir = BaseDir.get_file_dir_absolute(dst_file)
|
|
55
|
+
BaseDir.create_dir(dst_dir)
|
|
56
|
+
if BaseFile.is_file_exists(dst_file):
|
|
57
|
+
dst_dir_size = BaseFile.get_file_size(dst_file)
|
|
58
|
+
if fullname_size == dst_dir_size:
|
|
59
|
+
BaseFile.remove_file(fullname)
|
|
60
|
+
# 复制文件
|
|
61
|
+
BaseFile.copy_file_to_file(fullname, dst_file)
|
|
62
|
+
dst_dir_size = BaseFile.get_file_size(dst_file)
|
|
63
|
+
# 文件大小一致才删除源文件
|
|
64
|
+
if fullname_size == dst_dir_size:
|
|
65
|
+
BaseFile.remove_file(fullname)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def move_file(sources_path, dst_path):
|
|
69
|
+
"""
|
|
70
|
+
移动文件,会删除源文件
|
|
71
|
+
:param sources_path:
|
|
72
|
+
:param dst_path:
|
|
73
|
+
:return:
|
|
74
|
+
"""
|
|
75
|
+
k = 0
|
|
76
|
+
for fullname in BaseDir.get_dir_all_files(sources_path):
|
|
77
|
+
k += 1
|
|
78
|
+
move_one_file(fullname, sources_path, dst_path)
|
|
79
|
+
if k % 1000 == 0:
|
|
80
|
+
print("已经拷贝{}个文件".format(k))
|
|
81
|
+
print("已经拷贝{}个文件".format(k))
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class MoveFile(object):
|
|
85
|
+
|
|
86
|
+
def __init__(self):
|
|
87
|
+
self.sources_path = ''
|
|
88
|
+
self.dst_path = ''
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class MoveFileThread(MThreadingRun2):
|
|
92
|
+
def __init__(self, num, mf):
|
|
93
|
+
super(MoveFileThread, self).__init__(num)
|
|
94
|
+
self.mf = mf
|
|
95
|
+
self.k = 0
|
|
96
|
+
self.file_logger = get_filelogger(r"F:\fun2\log")
|
|
97
|
+
self.thread_pool.work_queue = BaseQueue(1000)
|
|
98
|
+
|
|
99
|
+
def create_dir(self):
|
|
100
|
+
"""
|
|
101
|
+
先创建所有目录,防止在多线程中创建目录冲突
|
|
102
|
+
:return:
|
|
103
|
+
"""
|
|
104
|
+
for dir in BaseDir.get_dir_all_dir(self.mf.sources_path):
|
|
105
|
+
new_dir = dir.replace(self.mf.sources_path, self.mf.dst_path)
|
|
106
|
+
print(new_dir)
|
|
107
|
+
BaseDir.create_dir(new_dir)
|
|
108
|
+
|
|
109
|
+
def set_task(self, threadval, *args, **kwargs):
|
|
110
|
+
self.create_dir()
|
|
111
|
+
for fullname in BaseDir.get_dir_all_files(self.mf.sources_path):
|
|
112
|
+
self.k = self.k + 1
|
|
113
|
+
self.add_job(self.func, fullname)
|
|
114
|
+
if self.k % 1000 == 0:
|
|
115
|
+
print("已经拷贝{}个文件".format(self.k))
|
|
116
|
+
while self.get_thread_stat():
|
|
117
|
+
time.sleep(1)
|
|
118
|
+
|
|
119
|
+
def get_thread_stat(self):
|
|
120
|
+
"""
|
|
121
|
+
如果是False代表所有的线程都在等待任务,说明所有工作都已经完成
|
|
122
|
+
:return:
|
|
123
|
+
"""
|
|
124
|
+
for k, v in self.thread_pool.thread_pool_dicts.items():
|
|
125
|
+
threadinfo = v
|
|
126
|
+
thread = threadinfo.get_thread()
|
|
127
|
+
if thread.runstatus is not False:
|
|
128
|
+
return True
|
|
129
|
+
return False
|
|
130
|
+
|
|
131
|
+
def deal_results(self, threadval, *args, **kwargs):
|
|
132
|
+
time.sleep(60)
|
|
133
|
+
|
|
134
|
+
def setProxy(self, threadval, proxysList=None):
|
|
135
|
+
time.sleep(60)
|
|
136
|
+
|
|
137
|
+
def is_break(self):
|
|
138
|
+
time.sleep(5)
|
|
139
|
+
if self.thread_pool.work_queue.is_empty() and self.thread_pool.result_queue.is_empty() and not self.get_thread_stat():
|
|
140
|
+
for k, v in self.thread_pool.thread_pool_dicts.items():
|
|
141
|
+
if k == self.etn.taskthreadname:
|
|
142
|
+
threadinfo = v
|
|
143
|
+
thread = threadinfo.get_thread()
|
|
144
|
+
if thread.is_alive():
|
|
145
|
+
return False
|
|
146
|
+
return True
|
|
147
|
+
else:
|
|
148
|
+
return False
|
|
149
|
+
|
|
150
|
+
def thread_pool_hook(self, threadinfo: ThreadInfo):
|
|
151
|
+
# 设置代理线程不重启,默认会重启
|
|
152
|
+
if threadinfo.get_thread_name() == self.etn.taskthreadname:
|
|
153
|
+
threadinfo.set_is_restart(False)
|
|
154
|
+
return {}
|
|
155
|
+
|
|
156
|
+
def fun(self, threadval, *args, **kwargs):
|
|
157
|
+
fullname = args[0]
|
|
158
|
+
copy_one_file(fullname, self.mf.sources_path, self.mf.dst_path)
|
|
159
|
+
# self.file_logger.info(fullname + "\n")
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
if __name__ == "__main__":
|
|
163
|
+
mf = MoveFile()
|
|
164
|
+
mf.sources_path = r"F:\fun2\test_gif"
|
|
165
|
+
# mf.dst_path = r"F:\fun2\test_gif2"
|
|
166
|
+
mf.dst_path = r"\\192.168.31.123\home\cjvip\qinym\soopat\image"
|
|
167
|
+
# mf.dst_path = r"\\192.168.31.177\down_data\test"
|
|
168
|
+
|
|
169
|
+
mft = MoveFileThread(30, mf)
|
|
170
|
+
mft.run()
|
|
File without changes
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import gzip
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from re_common.baselibrary.utils.basefile import BaseFile
|
|
6
|
+
from re_common.baselibrary.utils.basegzip import BaseGzip
|
|
7
|
+
from re_common.baselibrary.utils.basemotor import BaseMotor
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Configs(object):
|
|
11
|
+
|
|
12
|
+
def __init__(self):
|
|
13
|
+
self.mgdb_conn_motor = "mongodb://cjrw:vipdatacenter@192.168.31.243:32920,192.168.31.206:32920,192.168.31.208:32920/?authSource=htmljson"
|
|
14
|
+
self.mgdb_db = "htmljson"
|
|
15
|
+
self.mgdb_col = "other"
|
|
16
|
+
self.filepath = r"F:\fun3\up\detail\mbalib.big_json.gz"
|
|
17
|
+
self.query = {}
|
|
18
|
+
self.feild = None
|
|
19
|
+
self.one_file_num = 100000
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class MongoToFile(object):
|
|
23
|
+
def __init__(self, conf):
|
|
24
|
+
self.conf = conf
|
|
25
|
+
self.file_open = None
|
|
26
|
+
self.i = 0
|
|
27
|
+
|
|
28
|
+
def init_conn_mongodb(self):
|
|
29
|
+
self.bs = BaseMotor()
|
|
30
|
+
self.bs.AsyncIOMotorClient(
|
|
31
|
+
self.conf.mgdb_conn_motor,
|
|
32
|
+
self.conf.mgdb_db)
|
|
33
|
+
self.bs.get_col(self.conf.mgdb_col)
|
|
34
|
+
|
|
35
|
+
def open_file(self):
|
|
36
|
+
i = BaseGzip.get_gz_line_num(self.conf.filepath)
|
|
37
|
+
if i >= self.conf.one_file_num:
|
|
38
|
+
self.conf.filepath = BaseFile.get_new_filename(self.conf.filepath)
|
|
39
|
+
self.file_open = gzip.open(self.conf.filepath, "wb")
|
|
40
|
+
|
|
41
|
+
def close_file(self):
|
|
42
|
+
self.file_open.close()
|
|
43
|
+
|
|
44
|
+
def asyncio_run(self):
|
|
45
|
+
asyncio.get_event_loop().run_until_complete(
|
|
46
|
+
self.bs.find(self.dic_deal, self.conf.query, self.conf.feild))
|
|
47
|
+
self.close_file()
|
|
48
|
+
|
|
49
|
+
def hook_doc(self, doc):
|
|
50
|
+
return doc
|
|
51
|
+
|
|
52
|
+
async def dic_deal(self, doc):
|
|
53
|
+
doc = self.hook_doc(doc)
|
|
54
|
+
line = json.dumps(doc, ensure_ascii=False) + '\n'
|
|
55
|
+
lines = line.encode()
|
|
56
|
+
self.file_open.write(lines)
|
|
57
|
+
self.i = self.i + 1
|
|
58
|
+
print(self.i)
|
|
59
|
+
if self.i >= self.conf.one_file_num:
|
|
60
|
+
self.close_file()
|
|
61
|
+
self.conf.filepath = BaseFile.get_new_filename(self.conf.filepath)
|
|
62
|
+
self.open_file()
|
|
63
|
+
self.i = 0
|