re-common 10.0.22__py3-none-any.whl → 10.0.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- re_common/baselibrary/__init__.py +4 -4
- re_common/baselibrary/baseabs/__init__.py +6 -6
- re_common/baselibrary/baseabs/baseabs.py +26 -26
- re_common/baselibrary/database/mbuilder.py +132 -132
- re_common/baselibrary/database/moudle.py +93 -93
- re_common/baselibrary/database/msqlite3.py +194 -194
- re_common/baselibrary/database/mysql.py +169 -169
- re_common/baselibrary/database/sql_factory.py +26 -26
- re_common/baselibrary/mthread/MThreadingRun.py +486 -486
- re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -349
- re_common/baselibrary/mthread/__init__.py +2 -2
- re_common/baselibrary/mthread/mythreading.py +695 -695
- re_common/baselibrary/pakge_other/socks.py +404 -404
- re_common/baselibrary/readconfig/config_factory.py +18 -18
- re_common/baselibrary/readconfig/ini_config.py +317 -317
- re_common/baselibrary/readconfig/toml_config.py +49 -49
- re_common/baselibrary/temporary/envdata.py +36 -36
- re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -118
- re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -102
- re_common/baselibrary/tools/all_requests/mrequest.py +412 -412
- re_common/baselibrary/tools/all_requests/requests_request.py +81 -81
- re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -31
- re_common/baselibrary/tools/contrast_db3.py +123 -123
- re_common/baselibrary/tools/copy_file.py +39 -39
- re_common/baselibrary/tools/db3_2_sizedb3.py +102 -102
- re_common/baselibrary/tools/foreachgz.py +39 -39
- re_common/baselibrary/tools/get_attr.py +10 -10
- re_common/baselibrary/tools/image_to_pdf.py +61 -61
- re_common/baselibrary/tools/java_code_deal.py +139 -139
- re_common/baselibrary/tools/javacode.py +79 -79
- re_common/baselibrary/tools/mdb_db3.py +48 -48
- re_common/baselibrary/tools/merge_file.py +171 -171
- re_common/baselibrary/tools/merge_gz_file.py +165 -165
- re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -42
- re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -42
- re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -38
- re_common/baselibrary/tools/mongo_tools.py +50 -50
- re_common/baselibrary/tools/move_file.py +170 -170
- re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -63
- re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -354
- re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -18
- re_common/baselibrary/tools/move_mongo/use_mv.py +93 -93
- re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -125
- re_common/baselibrary/tools/mpandas/pandas_visualization.py +7 -7
- re_common/baselibrary/tools/myparsel.py +104 -104
- re_common/baselibrary/tools/rename_dir_file.py +37 -37
- re_common/baselibrary/tools/sequoiadb_utils.py +398 -398
- re_common/baselibrary/tools/split_line_to_many.py +25 -25
- re_common/baselibrary/tools/stringtodicts.py +33 -33
- re_common/baselibrary/tools/workwechant_bot.py +84 -84
- re_common/baselibrary/utils/baseaiohttp.py +296 -296
- re_common/baselibrary/utils/baseaiomysql.py +87 -87
- re_common/baselibrary/utils/baseallstep.py +191 -191
- re_common/baselibrary/utils/baseavro.py +19 -19
- re_common/baselibrary/utils/baseboto3.py +291 -291
- re_common/baselibrary/utils/basecsv.py +32 -32
- re_common/baselibrary/utils/basedict.py +133 -133
- re_common/baselibrary/utils/basedir.py +241 -241
- re_common/baselibrary/utils/baseencode.py +351 -351
- re_common/baselibrary/utils/baseencoding.py +28 -28
- re_common/baselibrary/utils/baseesdsl.py +86 -86
- re_common/baselibrary/utils/baseexcel.py +264 -264
- re_common/baselibrary/utils/baseexcept.py +109 -109
- re_common/baselibrary/utils/basefile.py +654 -654
- re_common/baselibrary/utils/baseftp.py +214 -214
- re_common/baselibrary/utils/basegzip.py +60 -60
- re_common/baselibrary/utils/basehdfs.py +135 -135
- re_common/baselibrary/utils/basehttpx.py +268 -268
- re_common/baselibrary/utils/baseip.py +87 -87
- re_common/baselibrary/utils/basejson.py +2 -2
- re_common/baselibrary/utils/baselist.py +32 -32
- re_common/baselibrary/utils/basemotor.py +190 -190
- re_common/baselibrary/utils/basemssql.py +98 -98
- re_common/baselibrary/utils/baseodbc.py +113 -113
- re_common/baselibrary/utils/basepandas.py +302 -302
- re_common/baselibrary/utils/basepeewee.py +11 -11
- re_common/baselibrary/utils/basepika.py +180 -180
- re_common/baselibrary/utils/basepydash.py +143 -143
- re_common/baselibrary/utils/basepymongo.py +230 -230
- re_common/baselibrary/utils/basequeue.py +22 -22
- re_common/baselibrary/utils/baserar.py +57 -57
- re_common/baselibrary/utils/baserequest.py +279 -279
- re_common/baselibrary/utils/baseset.py +8 -8
- re_common/baselibrary/utils/basesmb.py +403 -403
- re_common/baselibrary/utils/basestring.py +382 -382
- re_common/baselibrary/utils/basetime.py +320 -320
- re_common/baselibrary/utils/baseurl.py +121 -121
- re_common/baselibrary/utils/basezip.py +57 -57
- re_common/baselibrary/utils/core/__init__.py +7 -7
- re_common/baselibrary/utils/core/bottomutils.py +18 -18
- re_common/baselibrary/utils/core/mdeprecated.py +327 -327
- re_common/baselibrary/utils/core/mlamada.py +16 -16
- re_common/baselibrary/utils/core/msginfo.py +25 -25
- re_common/baselibrary/utils/core/requests_core.py +103 -103
- re_common/baselibrary/utils/fateadm.py +429 -429
- re_common/baselibrary/utils/importfun.py +123 -123
- re_common/baselibrary/utils/mfaker.py +57 -57
- re_common/baselibrary/utils/my_abc/__init__.py +3 -3
- re_common/baselibrary/utils/my_abc/better_abc.py +32 -32
- re_common/baselibrary/utils/mylogger.py +414 -414
- re_common/baselibrary/utils/myredisclient.py +861 -861
- re_common/baselibrary/utils/pipupgrade.py +21 -21
- re_common/baselibrary/utils/ringlist.py +85 -85
- re_common/baselibrary/utils/version_compare.py +36 -36
- re_common/baselibrary/utils/ydmhttp.py +126 -126
- re_common/facade/lazy_import.py +11 -11
- re_common/facade/loggerfacade.py +25 -25
- re_common/facade/mysqlfacade.py +467 -467
- re_common/facade/now.py +31 -31
- re_common/facade/sqlite3facade.py +257 -257
- re_common/facade/use/mq_use_facade.py +83 -83
- re_common/facade/use/proxy_use_facade.py +19 -19
- re_common/libtest/base_dict_test.py +19 -19
- re_common/libtest/baseavro_test.py +13 -13
- re_common/libtest/basefile_test.py +14 -14
- re_common/libtest/basemssql_test.py +77 -77
- re_common/libtest/baseodbc_test.py +7 -7
- re_common/libtest/basepandas_test.py +38 -38
- re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -14
- re_common/libtest/get_attr_test/settings.py +54 -54
- re_common/libtest/idencode_test.py +53 -53
- re_common/libtest/iniconfig_test.py +35 -35
- re_common/libtest/ip_test.py +34 -34
- re_common/libtest/merge_file_test.py +20 -20
- re_common/libtest/mfaker_test.py +8 -8
- re_common/libtest/mm3_test.py +31 -31
- re_common/libtest/mylogger_test.py +88 -88
- re_common/libtest/myparsel_test.py +27 -27
- re_common/libtest/mysql_test.py +151 -151
- re_common/libtest/pymongo_test.py +21 -21
- re_common/libtest/split_test.py +11 -11
- re_common/libtest/sqlite3_merge_test.py +5 -5
- re_common/libtest/sqlite3_test.py +34 -34
- re_common/libtest/tomlconfig_test.py +30 -30
- re_common/libtest/use_tools_test/__init__.py +2 -2
- re_common/libtest/user/__init__.py +4 -4
- re_common/studio/__init__.py +4 -4
- re_common/studio/assignment_expressions.py +36 -36
- re_common/studio/mydash/test1.py +18 -18
- re_common/studio/pydashstudio/first.py +9 -9
- re_common/studio/streamlitstudio/first_app.py +65 -65
- re_common/studio/streamlitstudio/uber_pickups.py +23 -23
- re_common/studio/test.py +18 -18
- re_common/v2/baselibrary/business_utils/BusinessStringUtil.py +195 -0
- re_common/v2/baselibrary/business_utils/__init__.py +0 -0
- re_common/v2/baselibrary/business_utils/rel_tools.py +6 -0
- re_common/v2/baselibrary/decorators/utils.py +59 -59
- re_common/v2/baselibrary/s3object/baseboto3.py +230 -230
- re_common/v2/baselibrary/tools/WeChatRobot.py +95 -79
- re_common/v2/baselibrary/tools/ac_ahocorasick.py +75 -75
- re_common/v2/baselibrary/tools/dict_tools.py +37 -37
- re_common/v2/baselibrary/tools/dolphinscheduler.py +187 -187
- re_common/v2/baselibrary/tools/hdfs_data_processer.py +338 -338
- re_common/v2/baselibrary/tools/list_tools.py +65 -65
- re_common/v2/baselibrary/tools/search_hash_tools.py +54 -54
- re_common/v2/baselibrary/tools/text_matcher.py +326 -326
- re_common/v2/baselibrary/tools/unionfind_tools.py +60 -60
- re_common/v2/baselibrary/utils/BusinessStringUtil.py +196 -196
- re_common/v2/baselibrary/utils/author_smi.py +360 -360
- re_common/v2/baselibrary/utils/base_string_similarity.py +158 -158
- re_common/v2/baselibrary/utils/basedict.py +37 -37
- re_common/v2/baselibrary/utils/basehdfs.py +161 -161
- re_common/v2/baselibrary/utils/basepika.py +180 -180
- re_common/v2/baselibrary/utils/basetime.py +77 -77
- re_common/v2/baselibrary/utils/db.py +38 -38
- re_common/v2/baselibrary/utils/json_cls.py +16 -16
- re_common/v2/baselibrary/utils/mq.py +83 -83
- re_common/v2/baselibrary/utils/n_ary_expression_tree.py +243 -243
- re_common/v2/baselibrary/utils/string_bool.py +186 -149
- re_common/v2/baselibrary/utils/string_clear.py +227 -204
- re_common/v2/baselibrary/utils/string_smi.py +18 -18
- re_common/v2/baselibrary/utils/stringutils.py +213 -213
- re_common/vip/base_step_process.py +11 -11
- re_common/vip/baseencodeid.py +90 -90
- re_common/vip/changetaskname.py +28 -28
- re_common/vip/core_var.py +24 -24
- re_common/vip/mmh3Hash.py +89 -89
- re_common/vip/proxy/allproxys.py +127 -127
- re_common/vip/proxy/allproxys_thread.py +159 -159
- re_common/vip/proxy/cnki_proxy.py +153 -153
- re_common/vip/proxy/kuaidaili.py +87 -87
- re_common/vip/proxy/proxy_all.py +113 -113
- re_common/vip/proxy/update_kuaidaili_0.py +42 -42
- re_common/vip/proxy/wanfang_proxy.py +152 -152
- re_common/vip/proxy/wp_proxy_all.py +181 -181
- re_common/vip/read_rawid_to_txt.py +91 -91
- re_common/vip/title/__init__.py +5 -5
- re_common/vip/title/transform/TransformBookTitleToZt.py +125 -125
- re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -139
- re_common/vip/title/transform/TransformCstadTitleToZt.py +195 -195
- re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -203
- re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -132
- re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -114
- re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -135
- re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -135
- re_common/vip/title/transform/__init__.py +10 -10
- {re_common-10.0.22.dist-info → re_common-10.0.24.dist-info}/LICENSE +201 -201
- {re_common-10.0.22.dist-info → re_common-10.0.24.dist-info}/METADATA +16 -16
- re_common-10.0.24.dist-info/RECORD +230 -0
- {re_common-10.0.22.dist-info → re_common-10.0.24.dist-info}/WHEEL +1 -1
- re_common-10.0.22.dist-info/RECORD +0 -227
- {re_common-10.0.22.dist-info → re_common-10.0.24.dist-info}/top_level.txt +0 -0
re_common/vip/changetaskname.py
CHANGED
|
@@ -1,28 +1,28 @@
|
|
|
1
|
-
from re_common.baselibrary.tools.stringtodicts import StringToDicts
|
|
2
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
3
|
-
|
|
4
|
-
"""
|
|
5
|
-
本方法主要用于provider 变化,有时候我们可能需要更新209的provider
|
|
6
|
-
用该方法更新方便快捷
|
|
7
|
-
"""
|
|
8
|
-
strings = """
|
|
9
|
-
host = 192.168.31.209
|
|
10
|
-
user = root
|
|
11
|
-
passwd = vipdatacenter
|
|
12
|
-
db = data_gather_record
|
|
13
|
-
port = 3306
|
|
14
|
-
chartset = utf8
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
dicts_change = {"key为原来的": "values为现在的"}
|
|
18
|
-
|
|
19
|
-
dicts = StringToDicts().string_to_dicts_by_equal(strings)
|
|
20
|
-
mysqlutils = MysqlUtiles("", "", builder="MysqlBuilderForDicts", dicts=dicts)
|
|
21
|
-
mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=0;")
|
|
22
|
-
for key, values in dicts_change:
|
|
23
|
-
sql1 = "update `data_gather_record`.`task` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
|
|
24
|
-
sql2 = "update `data_gather_record`.`updating` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
|
|
25
|
-
mysqlutils.ExeSqlToDB(sql1)
|
|
26
|
-
mysqlutils.ExeSqlToDB(sql2)
|
|
27
|
-
|
|
28
|
-
mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=1;")
|
|
1
|
+
from re_common.baselibrary.tools.stringtodicts import StringToDicts
|
|
2
|
+
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
本方法主要用于provider 变化,有时候我们可能需要更新209的provider
|
|
6
|
+
用该方法更新方便快捷
|
|
7
|
+
"""
|
|
8
|
+
strings = """
|
|
9
|
+
host = 192.168.31.209
|
|
10
|
+
user = root
|
|
11
|
+
passwd = vipdatacenter
|
|
12
|
+
db = data_gather_record
|
|
13
|
+
port = 3306
|
|
14
|
+
chartset = utf8
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
dicts_change = {"key为原来的": "values为现在的"}
|
|
18
|
+
|
|
19
|
+
dicts = StringToDicts().string_to_dicts_by_equal(strings)
|
|
20
|
+
mysqlutils = MysqlUtiles("", "", builder="MysqlBuilderForDicts", dicts=dicts)
|
|
21
|
+
mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=0;")
|
|
22
|
+
for key, values in dicts_change:
|
|
23
|
+
sql1 = "update `data_gather_record`.`task` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
|
|
24
|
+
sql2 = "update `data_gather_record`.`updating` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
|
|
25
|
+
mysqlutils.ExeSqlToDB(sql1)
|
|
26
|
+
mysqlutils.ExeSqlToDB(sql2)
|
|
27
|
+
|
|
28
|
+
mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=1;")
|
re_common/vip/core_var.py
CHANGED
|
@@ -1,24 +1,24 @@
|
|
|
1
|
-
ALL_SCHOOL_PROXY_LIST = ["192.168.31.176:8119", "192.168.31.176:8120", "192.168.31.176:8104", "192.168.31.176:8018",
|
|
2
|
-
"192.168.31.176:8076",
|
|
3
|
-
"192.168.31.176:8160", "192.168.31.176:8240", "192.168.31.176:8241", "192.168.31.176:8195",
|
|
4
|
-
"192.168.31.176:8243",
|
|
5
|
-
"192.168.31.176:8062", "192.168.31.176:8019", "192.168.31.176:8034", "192.168.31.176:8103",
|
|
6
|
-
"192.168.31.176:8181",
|
|
7
|
-
"192.168.31.176:8211", "192.168.31.123:8081", "192.168.31.176:8032", "192.168.31.176:8231",
|
|
8
|
-
"192.168.31.176:8189",
|
|
9
|
-
"192.167.31.176:8058", "192.168.31.36:8135", "192.168.31.176:8057", "192.168.31.176:8017",
|
|
10
|
-
"192.168.31.36:8033",
|
|
11
|
-
"192.168.31.176:8184", "192.168.31.176:8207", "192.168.31.176:8196", "192.168.31.176:8041",
|
|
12
|
-
"192.168.31.176:8087",
|
|
13
|
-
"192.168.31.176:8117", "192.168.31.36:8098", "192.168.31.176:8165", "192.168.31.36:8039",
|
|
14
|
-
"192.168.31.176:8159",
|
|
15
|
-
"192.168.31.176:8051", "192.168.31.176:8180", "192.168.31.176:8148", "192.168.31.176:8021",
|
|
16
|
-
"192.168.31.176:8008",
|
|
17
|
-
"192.168.31.176:8035", "192.168.31.36:8004", "192.168.31.176:8131", "192.168.31.176:8127",
|
|
18
|
-
"192.168.31.176:8052",
|
|
19
|
-
"192.168.31.36:8011", "192.168.31.36:8082", "192.168.31.36:8182", "192.168.31.176:8031",
|
|
20
|
-
"192.168.31.176:8171",
|
|
21
|
-
"192.168.31.176:8012", "192.168.31.176:8002", "192.168.31.176:8140", "192.168.31.36:8149",
|
|
22
|
-
"192.168.31.176:8074",
|
|
23
|
-
"192.168.31.3:8080", "192.168.31.4:8080", "192.168.31.179:8129", "192.168.31.179:8130",
|
|
24
|
-
"192.168.31.179:8132"]
|
|
1
|
+
ALL_SCHOOL_PROXY_LIST = ["192.168.31.176:8119", "192.168.31.176:8120", "192.168.31.176:8104", "192.168.31.176:8018",
|
|
2
|
+
"192.168.31.176:8076",
|
|
3
|
+
"192.168.31.176:8160", "192.168.31.176:8240", "192.168.31.176:8241", "192.168.31.176:8195",
|
|
4
|
+
"192.168.31.176:8243",
|
|
5
|
+
"192.168.31.176:8062", "192.168.31.176:8019", "192.168.31.176:8034", "192.168.31.176:8103",
|
|
6
|
+
"192.168.31.176:8181",
|
|
7
|
+
"192.168.31.176:8211", "192.168.31.123:8081", "192.168.31.176:8032", "192.168.31.176:8231",
|
|
8
|
+
"192.168.31.176:8189",
|
|
9
|
+
"192.167.31.176:8058", "192.168.31.36:8135", "192.168.31.176:8057", "192.168.31.176:8017",
|
|
10
|
+
"192.168.31.36:8033",
|
|
11
|
+
"192.168.31.176:8184", "192.168.31.176:8207", "192.168.31.176:8196", "192.168.31.176:8041",
|
|
12
|
+
"192.168.31.176:8087",
|
|
13
|
+
"192.168.31.176:8117", "192.168.31.36:8098", "192.168.31.176:8165", "192.168.31.36:8039",
|
|
14
|
+
"192.168.31.176:8159",
|
|
15
|
+
"192.168.31.176:8051", "192.168.31.176:8180", "192.168.31.176:8148", "192.168.31.176:8021",
|
|
16
|
+
"192.168.31.176:8008",
|
|
17
|
+
"192.168.31.176:8035", "192.168.31.36:8004", "192.168.31.176:8131", "192.168.31.176:8127",
|
|
18
|
+
"192.168.31.176:8052",
|
|
19
|
+
"192.168.31.36:8011", "192.168.31.36:8082", "192.168.31.36:8182", "192.168.31.176:8031",
|
|
20
|
+
"192.168.31.176:8171",
|
|
21
|
+
"192.168.31.176:8012", "192.168.31.176:8002", "192.168.31.176:8140", "192.168.31.36:8149",
|
|
22
|
+
"192.168.31.176:8074",
|
|
23
|
+
"192.168.31.3:8080", "192.168.31.4:8080", "192.168.31.179:8129", "192.168.31.179:8130",
|
|
24
|
+
"192.168.31.179:8132"]
|
re_common/vip/mmh3Hash.py
CHANGED
|
@@ -1,90 +1,90 @@
|
|
|
1
|
-
#!/bin/env python
|
|
2
|
-
# -*- coding: utf-8-*-
|
|
3
|
-
# author: ganruoxun
|
|
4
|
-
# date: 2020-09-25
|
|
5
|
-
|
|
6
|
-
import mmh3
|
|
7
|
-
import os
|
|
8
|
-
import binascii
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class Mmh3Hash(object):
|
|
12
|
-
def __init__(self, _type, resType):
|
|
13
|
-
self.typeDic = {
|
|
14
|
-
"2": "bs",
|
|
15
|
-
"3": "hy",
|
|
16
|
-
"4": "zl",
|
|
17
|
-
"5": "bz",
|
|
18
|
-
"10": "fg",
|
|
19
|
-
}
|
|
20
|
-
self.typeCode = self.typeDic[_type]
|
|
21
|
-
self.resType = resType
|
|
22
|
-
if self.typeCode == None:
|
|
23
|
-
raise RuntimeError('type 参数无法识别!')
|
|
24
|
-
|
|
25
|
-
# ##从filePath获取文件名,将文件名转大写,后缀转小写
|
|
26
|
-
# def normFileName(self, filePath):
|
|
27
|
-
# lngid = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[0].upper()
|
|
28
|
-
# prfx = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[1].lower()
|
|
29
|
-
# return "{0}{1}".format(lngid, prfx)
|
|
30
|
-
|
|
31
|
-
##规范文件名称,文件名转大写,文件后缀转小写
|
|
32
|
-
def normFileName(self, fileName):
|
|
33
|
-
if not "." in fileName:
|
|
34
|
-
return fileName.upper()
|
|
35
|
-
tmps = fileName.split(".")
|
|
36
|
-
filePrfx = tmps[0].upper()
|
|
37
|
-
fileSufx = tmps[1].lower()
|
|
38
|
-
return "{0}.{1}".format(filePrfx, fileSufx)
|
|
39
|
-
|
|
40
|
-
# 带有点的文件名,主要用于图片,不在标准定义当中
|
|
41
|
-
def normFileName_image(self, fileName):
|
|
42
|
-
if not "." in fileName:
|
|
43
|
-
return fileName.upper()
|
|
44
|
-
tmps = fileName.split(".")
|
|
45
|
-
fileSufx = tmps[-1].lower()
|
|
46
|
-
tmps.remove(tmps[-1])
|
|
47
|
-
filePrfx = ".".join(tmps).upper()
|
|
48
|
-
|
|
49
|
-
return "{0}.{1}".format(filePrfx, fileSufx)
|
|
50
|
-
|
|
51
|
-
## 使用murmurhash3算法将新全文文件名进行HASH并按照规则组装成HASH目录
|
|
52
|
-
def generateHashName(self, fileName):
|
|
53
|
-
hashCode = binascii.b2a_hex(mmh3.hash_bytes(fileName)).upper()[0:3]
|
|
54
|
-
firstCode = chr(hashCode[0])
|
|
55
|
-
secondCode = chr(hashCode[1])
|
|
56
|
-
thirdCode = chr(hashCode[2])
|
|
57
|
-
if thirdCode.isdigit():
|
|
58
|
-
return firstCode + secondCode + str(int(thirdCode) % 5)
|
|
59
|
-
elif thirdCode == 'D':
|
|
60
|
-
return firstCode + secondCode + 'A'
|
|
61
|
-
elif thirdCode == 'E':
|
|
62
|
-
return firstCode + secondCode + 'B'
|
|
63
|
-
elif thirdCode == 'F':
|
|
64
|
-
return firstCode + secondCode + 'C'
|
|
65
|
-
else:
|
|
66
|
-
return firstCode + secondCode + thirdCode
|
|
67
|
-
|
|
68
|
-
# fileName:文件名称,带后缀,不能为空,专利为公开号加文件后缀名,其他为lngid加文件后缀名
|
|
69
|
-
# years:年份,不能为空
|
|
70
|
-
# country:国家,如果为空,默认为cn
|
|
71
|
-
# type:自建资源类型,不能为空,目前只有bs(博硕),hy(会议),bz(标准),fg(法规),zl(专利)
|
|
72
|
-
def generatehashPath(self, fileName, years, country, resType):
|
|
73
|
-
if years == None or len(years) != 4:
|
|
74
|
-
raise RuntimeError('years 参数错误!')
|
|
75
|
-
elif fileName == None or len(fileName) == 0:
|
|
76
|
-
raise RuntimeError('fileName 参数错误!')
|
|
77
|
-
elif country == None or len(country) == 0:
|
|
78
|
-
country = 'cn'
|
|
79
|
-
if resType in ('bs', 'hy', 'fg', 'zl', 'bz'):
|
|
80
|
-
country = "cn"
|
|
81
|
-
country = country.lower()
|
|
82
|
-
intYear = int(years)
|
|
83
|
-
if intYear < 1989:
|
|
84
|
-
years = 'befor1989'
|
|
85
|
-
fileName = self.normFileName(fileName)
|
|
86
|
-
return "\\" + resType + "\\" + years + country + self.typeCode + "\\" + self.generateHashName(fileName)+ '\\' + fileName
|
|
87
|
-
# return '\\' + years + country + _type + '\\' + generateHashName(fileName) + '\\' + fileName
|
|
88
|
-
|
|
89
|
-
print(Mmh3Hash("3","").normFileName_image("aa.bb.jpg"))
|
|
1
|
+
#!/bin/env python
|
|
2
|
+
# -*- coding: utf-8-*-
|
|
3
|
+
# author: ganruoxun
|
|
4
|
+
# date: 2020-09-25
|
|
5
|
+
|
|
6
|
+
import mmh3
|
|
7
|
+
import os
|
|
8
|
+
import binascii
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Mmh3Hash(object):
|
|
12
|
+
def __init__(self, _type, resType):
|
|
13
|
+
self.typeDic = {
|
|
14
|
+
"2": "bs",
|
|
15
|
+
"3": "hy",
|
|
16
|
+
"4": "zl",
|
|
17
|
+
"5": "bz",
|
|
18
|
+
"10": "fg",
|
|
19
|
+
}
|
|
20
|
+
self.typeCode = self.typeDic[_type]
|
|
21
|
+
self.resType = resType
|
|
22
|
+
if self.typeCode == None:
|
|
23
|
+
raise RuntimeError('type 参数无法识别!')
|
|
24
|
+
|
|
25
|
+
# ##从filePath获取文件名,将文件名转大写,后缀转小写
|
|
26
|
+
# def normFileName(self, filePath):
|
|
27
|
+
# lngid = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[0].upper()
|
|
28
|
+
# prfx = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[1].lower()
|
|
29
|
+
# return "{0}{1}".format(lngid, prfx)
|
|
30
|
+
|
|
31
|
+
##规范文件名称,文件名转大写,文件后缀转小写
|
|
32
|
+
def normFileName(self, fileName):
|
|
33
|
+
if not "." in fileName:
|
|
34
|
+
return fileName.upper()
|
|
35
|
+
tmps = fileName.split(".")
|
|
36
|
+
filePrfx = tmps[0].upper()
|
|
37
|
+
fileSufx = tmps[1].lower()
|
|
38
|
+
return "{0}.{1}".format(filePrfx, fileSufx)
|
|
39
|
+
|
|
40
|
+
# 带有点的文件名,主要用于图片,不在标准定义当中
|
|
41
|
+
def normFileName_image(self, fileName):
|
|
42
|
+
if not "." in fileName:
|
|
43
|
+
return fileName.upper()
|
|
44
|
+
tmps = fileName.split(".")
|
|
45
|
+
fileSufx = tmps[-1].lower()
|
|
46
|
+
tmps.remove(tmps[-1])
|
|
47
|
+
filePrfx = ".".join(tmps).upper()
|
|
48
|
+
|
|
49
|
+
return "{0}.{1}".format(filePrfx, fileSufx)
|
|
50
|
+
|
|
51
|
+
## 使用murmurhash3算法将新全文文件名进行HASH并按照规则组装成HASH目录
|
|
52
|
+
def generateHashName(self, fileName):
|
|
53
|
+
hashCode = binascii.b2a_hex(mmh3.hash_bytes(fileName)).upper()[0:3]
|
|
54
|
+
firstCode = chr(hashCode[0])
|
|
55
|
+
secondCode = chr(hashCode[1])
|
|
56
|
+
thirdCode = chr(hashCode[2])
|
|
57
|
+
if thirdCode.isdigit():
|
|
58
|
+
return firstCode + secondCode + str(int(thirdCode) % 5)
|
|
59
|
+
elif thirdCode == 'D':
|
|
60
|
+
return firstCode + secondCode + 'A'
|
|
61
|
+
elif thirdCode == 'E':
|
|
62
|
+
return firstCode + secondCode + 'B'
|
|
63
|
+
elif thirdCode == 'F':
|
|
64
|
+
return firstCode + secondCode + 'C'
|
|
65
|
+
else:
|
|
66
|
+
return firstCode + secondCode + thirdCode
|
|
67
|
+
|
|
68
|
+
# fileName:文件名称,带后缀,不能为空,专利为公开号加文件后缀名,其他为lngid加文件后缀名
|
|
69
|
+
# years:年份,不能为空
|
|
70
|
+
# country:国家,如果为空,默认为cn
|
|
71
|
+
# type:自建资源类型,不能为空,目前只有bs(博硕),hy(会议),bz(标准),fg(法规),zl(专利)
|
|
72
|
+
def generatehashPath(self, fileName, years, country, resType):
|
|
73
|
+
if years == None or len(years) != 4:
|
|
74
|
+
raise RuntimeError('years 参数错误!')
|
|
75
|
+
elif fileName == None or len(fileName) == 0:
|
|
76
|
+
raise RuntimeError('fileName 参数错误!')
|
|
77
|
+
elif country == None or len(country) == 0:
|
|
78
|
+
country = 'cn'
|
|
79
|
+
if resType in ('bs', 'hy', 'fg', 'zl', 'bz'):
|
|
80
|
+
country = "cn"
|
|
81
|
+
country = country.lower()
|
|
82
|
+
intYear = int(years)
|
|
83
|
+
if intYear < 1989:
|
|
84
|
+
years = 'befor1989'
|
|
85
|
+
fileName = self.normFileName(fileName)
|
|
86
|
+
return "\\" + resType + "\\" + years + country + self.typeCode + "\\" + self.generateHashName(fileName)+ '\\' + fileName
|
|
87
|
+
# return '\\' + years + country + _type + '\\' + generateHashName(fileName) + '\\' + fileName
|
|
88
|
+
|
|
89
|
+
print(Mmh3Hash("3","").normFileName_image("aa.bb.jpg"))
|
|
90
90
|
print(Mmh3Hash("4","zl").generatehashPath("cn206103553u.pdf","2016","cn","zl"))
|
re_common/vip/proxy/allproxys.py
CHANGED
|
@@ -1,127 +1,127 @@
|
|
|
1
|
-
import json
|
|
2
|
-
###########################################
|
|
3
|
-
# 同项目调用基础包
|
|
4
|
-
import os
|
|
5
|
-
import sys
|
|
6
|
-
import time
|
|
7
|
-
|
|
8
|
-
import requests
|
|
9
|
-
|
|
10
|
-
filepath = os.path.abspath(__file__)
|
|
11
|
-
pathlist = filepath.split(os.sep)
|
|
12
|
-
pathlist = pathlist[:-4]
|
|
13
|
-
TopPath = os.sep.join(pathlist)
|
|
14
|
-
sys.path.insert(0, TopPath)
|
|
15
|
-
print(TopPath)
|
|
16
|
-
############################################
|
|
17
|
-
|
|
18
|
-
from re_common.baselibrary.utils.basedir import BaseDir
|
|
19
|
-
from re_common.baselibrary.utils.basefile import BaseFile
|
|
20
|
-
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
21
|
-
from re_common.facade.lazy_import import get_streamlogger
|
|
22
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
23
|
-
from re_common.baselibrary.utils.basetime import BaseTime
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class Kproxy(object):
|
|
27
|
-
def __init__(self):
|
|
28
|
-
self.cur_path = BaseDir.get_file_dir_absolute(__file__)
|
|
29
|
-
self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
|
|
30
|
-
self.logger = get_streamlogger()
|
|
31
|
-
self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
|
|
32
|
-
self.bsrequest = BaseRequest()
|
|
33
|
-
self.starttime = time.time()
|
|
34
|
-
self.starttime_val = time.time()
|
|
35
|
-
|
|
36
|
-
def get_taiyang_proxy(self, num=6):
|
|
37
|
-
"""
|
|
38
|
-
获取太阳代理 每分钟3个
|
|
39
|
-
:param num:
|
|
40
|
-
:return:
|
|
41
|
-
"""
|
|
42
|
-
self.starttime = time.time()
|
|
43
|
-
# url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
44
|
-
url = "http://http.tiqu.alibabaapi.com/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
45
|
-
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
46
|
-
timeout=30
|
|
47
|
-
)
|
|
48
|
-
if BoolResult:
|
|
49
|
-
dicts = json.loads(r.text)
|
|
50
|
-
for item in dicts["data"]:
|
|
51
|
-
proxy = item["ip"] + ":" + item["port"]
|
|
52
|
-
sources = "taiyang"
|
|
53
|
-
expire_time = item["expire_time"]
|
|
54
|
-
sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
|
|
55
|
-
proxy, sources, expire_time, expire_time)
|
|
56
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
57
|
-
else:
|
|
58
|
-
self.logger.error("获取失败")
|
|
59
|
-
|
|
60
|
-
def get_taiyang_num(self):
|
|
61
|
-
"""
|
|
62
|
-
获取太阳代理 每分钟3个
|
|
63
|
-
:param num:
|
|
64
|
-
:return:
|
|
65
|
-
"""
|
|
66
|
-
url = "http://ty-http-d.hamir.net/index/index/get_my_package_balance?neek=521821&appkey=1fcba6de94f71561ba3007f4c24ca0b1&ac=59105"
|
|
67
|
-
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
68
|
-
timeout=30
|
|
69
|
-
)
|
|
70
|
-
if BoolResult:
|
|
71
|
-
dicts = json.loads(r.text)
|
|
72
|
-
return dicts["data"]["package_balance"]
|
|
73
|
-
else:
|
|
74
|
-
self.logger.error("获取失败")
|
|
75
|
-
|
|
76
|
-
def val(self, proxy, sources):
|
|
77
|
-
# 请求地址
|
|
78
|
-
targetUrl = "https://www.baidu.com"
|
|
79
|
-
proxies = {
|
|
80
|
-
"http": "http://%s" % proxy,
|
|
81
|
-
"https": "http://%s" % proxy
|
|
82
|
-
}
|
|
83
|
-
resp = requests.get(targetUrl, proxies=proxies, timeout=5)
|
|
84
|
-
if resp.status_code == 200:
|
|
85
|
-
print(resp.status_code)
|
|
86
|
-
return True
|
|
87
|
-
else:
|
|
88
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
|
|
89
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
90
|
-
return False
|
|
91
|
-
|
|
92
|
-
def val_all(self):
|
|
93
|
-
self.starttime_val = time.time()
|
|
94
|
-
sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
|
|
95
|
-
bools, rows = self.mysqlutils.SelectFromDB(sql)
|
|
96
|
-
for row in rows:
|
|
97
|
-
try:
|
|
98
|
-
self.val(row[0], row[1])
|
|
99
|
-
except:
|
|
100
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
|
|
101
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
102
|
-
|
|
103
|
-
def run(self):
|
|
104
|
-
sleep_time = 0
|
|
105
|
-
while True:
|
|
106
|
-
num = 10
|
|
107
|
-
if sleep_time < 0:
|
|
108
|
-
print("time sleep {}".format(str(100 + sleep_time)))
|
|
109
|
-
if 100 + sleep_time > 0:
|
|
110
|
-
time.sleep(100 + sleep_time)
|
|
111
|
-
num = num + 10
|
|
112
|
-
nowtime = BaseTime().get_beijin_date_strins(format="%H%M%S")
|
|
113
|
-
print(nowtime)
|
|
114
|
-
if "133700" <= nowtime <= "134700":
|
|
115
|
-
num = Kproxy().get_taiyang_num()
|
|
116
|
-
start_time = time.time()
|
|
117
|
-
self.get_taiyang_proxy(num=num)
|
|
118
|
-
self.val_all()
|
|
119
|
-
use_time = int(time.time() - start_time)
|
|
120
|
-
sleep_time = 100 - use_time
|
|
121
|
-
print("time sleep {}".format(str(sleep_time)))
|
|
122
|
-
if sleep_time >= 3:
|
|
123
|
-
time.sleep(sleep_time)
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
if __name__ == "__main__":
|
|
127
|
-
Kproxy().run()
|
|
1
|
+
import json
|
|
2
|
+
###########################################
|
|
3
|
+
# 同项目调用基础包
|
|
4
|
+
import os
|
|
5
|
+
import sys
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
filepath = os.path.abspath(__file__)
|
|
11
|
+
pathlist = filepath.split(os.sep)
|
|
12
|
+
pathlist = pathlist[:-4]
|
|
13
|
+
TopPath = os.sep.join(pathlist)
|
|
14
|
+
sys.path.insert(0, TopPath)
|
|
15
|
+
print(TopPath)
|
|
16
|
+
############################################
|
|
17
|
+
|
|
18
|
+
from re_common.baselibrary.utils.basedir import BaseDir
|
|
19
|
+
from re_common.baselibrary.utils.basefile import BaseFile
|
|
20
|
+
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
21
|
+
from re_common.facade.lazy_import import get_streamlogger
|
|
22
|
+
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
23
|
+
from re_common.baselibrary.utils.basetime import BaseTime
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class Kproxy(object):
|
|
27
|
+
def __init__(self):
|
|
28
|
+
self.cur_path = BaseDir.get_file_dir_absolute(__file__)
|
|
29
|
+
self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
|
|
30
|
+
self.logger = get_streamlogger()
|
|
31
|
+
self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
|
|
32
|
+
self.bsrequest = BaseRequest()
|
|
33
|
+
self.starttime = time.time()
|
|
34
|
+
self.starttime_val = time.time()
|
|
35
|
+
|
|
36
|
+
def get_taiyang_proxy(self, num=6):
|
|
37
|
+
"""
|
|
38
|
+
获取太阳代理 每分钟3个
|
|
39
|
+
:param num:
|
|
40
|
+
:return:
|
|
41
|
+
"""
|
|
42
|
+
self.starttime = time.time()
|
|
43
|
+
# url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
44
|
+
url = "http://http.tiqu.alibabaapi.com/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
45
|
+
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
46
|
+
timeout=30
|
|
47
|
+
)
|
|
48
|
+
if BoolResult:
|
|
49
|
+
dicts = json.loads(r.text)
|
|
50
|
+
for item in dicts["data"]:
|
|
51
|
+
proxy = item["ip"] + ":" + item["port"]
|
|
52
|
+
sources = "taiyang"
|
|
53
|
+
expire_time = item["expire_time"]
|
|
54
|
+
sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
|
|
55
|
+
proxy, sources, expire_time, expire_time)
|
|
56
|
+
self.mysqlutils.ExeSqlToDB(sql)
|
|
57
|
+
else:
|
|
58
|
+
self.logger.error("获取失败")
|
|
59
|
+
|
|
60
|
+
def get_taiyang_num(self):
|
|
61
|
+
"""
|
|
62
|
+
获取太阳代理 每分钟3个
|
|
63
|
+
:param num:
|
|
64
|
+
:return:
|
|
65
|
+
"""
|
|
66
|
+
url = "http://ty-http-d.hamir.net/index/index/get_my_package_balance?neek=521821&appkey=1fcba6de94f71561ba3007f4c24ca0b1&ac=59105"
|
|
67
|
+
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
68
|
+
timeout=30
|
|
69
|
+
)
|
|
70
|
+
if BoolResult:
|
|
71
|
+
dicts = json.loads(r.text)
|
|
72
|
+
return dicts["data"]["package_balance"]
|
|
73
|
+
else:
|
|
74
|
+
self.logger.error("获取失败")
|
|
75
|
+
|
|
76
|
+
def val(self, proxy, sources):
|
|
77
|
+
# 请求地址
|
|
78
|
+
targetUrl = "https://www.baidu.com"
|
|
79
|
+
proxies = {
|
|
80
|
+
"http": "http://%s" % proxy,
|
|
81
|
+
"https": "http://%s" % proxy
|
|
82
|
+
}
|
|
83
|
+
resp = requests.get(targetUrl, proxies=proxies, timeout=5)
|
|
84
|
+
if resp.status_code == 200:
|
|
85
|
+
print(resp.status_code)
|
|
86
|
+
return True
|
|
87
|
+
else:
|
|
88
|
+
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
|
|
89
|
+
self.mysqlutils.ExeSqlToDB(sql)
|
|
90
|
+
return False
|
|
91
|
+
|
|
92
|
+
def val_all(self):
|
|
93
|
+
self.starttime_val = time.time()
|
|
94
|
+
sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
|
|
95
|
+
bools, rows = self.mysqlutils.SelectFromDB(sql)
|
|
96
|
+
for row in rows:
|
|
97
|
+
try:
|
|
98
|
+
self.val(row[0], row[1])
|
|
99
|
+
except:
|
|
100
|
+
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
|
|
101
|
+
self.mysqlutils.ExeSqlToDB(sql)
|
|
102
|
+
|
|
103
|
+
def run(self):
|
|
104
|
+
sleep_time = 0
|
|
105
|
+
while True:
|
|
106
|
+
num = 10
|
|
107
|
+
if sleep_time < 0:
|
|
108
|
+
print("time sleep {}".format(str(100 + sleep_time)))
|
|
109
|
+
if 100 + sleep_time > 0:
|
|
110
|
+
time.sleep(100 + sleep_time)
|
|
111
|
+
num = num + 10
|
|
112
|
+
nowtime = BaseTime().get_beijin_date_strins(format="%H%M%S")
|
|
113
|
+
print(nowtime)
|
|
114
|
+
if "133700" <= nowtime <= "134700":
|
|
115
|
+
num = Kproxy().get_taiyang_num()
|
|
116
|
+
start_time = time.time()
|
|
117
|
+
self.get_taiyang_proxy(num=num)
|
|
118
|
+
self.val_all()
|
|
119
|
+
use_time = int(time.time() - start_time)
|
|
120
|
+
sleep_time = 100 - use_time
|
|
121
|
+
print("time sleep {}".format(str(sleep_time)))
|
|
122
|
+
if sleep_time >= 3:
|
|
123
|
+
time.sleep(sleep_time)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
if __name__ == "__main__":
|
|
127
|
+
Kproxy().run()
|