re-common 0.2.54__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- re_common/v2/baselibrary/s3object/baseboto3.py +230 -0
- re_common/v2/baselibrary/tools/dict_tools.py +24 -0
- re_common/v2/baselibrary/tools/search_hash_tools.py +33 -0
- re_common/v2/baselibrary/tools/text_matcher.py +223 -0
- re_common/v2/baselibrary/tools/unionfind_tools.py +60 -0
- re_common/v2/baselibrary/utils/BusinessStringUtil.py +74 -0
- re_common/v2/baselibrary/utils/author_smi.py +308 -0
- re_common/v2/baselibrary/utils/basedict.py +26 -0
- re_common/v2/baselibrary/utils/basehdfs.py +127 -0
- re_common/v2/baselibrary/utils/json_cls.py +11 -0
- re_common/v2/baselibrary/utils/string_bool.py +9 -0
- re_common/v2/baselibrary/utils/string_clear.py +98 -0
- re_common/v2/baselibrary/utils/stringutils.py +95 -0
- {re_common-0.2.54.dist-info → re_common-2.0.1.dist-info}/METADATA +1 -1
- re_common-2.0.1.dist-info/RECORD +25 -0
- re_common/baselibrary/__init__.py +0 -4
- re_common/baselibrary/baseabs/__init__.py +0 -7
- re_common/baselibrary/baseabs/baseabs.py +0 -26
- re_common/baselibrary/database/mbuilder.py +0 -132
- re_common/baselibrary/database/moudle.py +0 -93
- re_common/baselibrary/database/msqlite3.py +0 -194
- re_common/baselibrary/database/mysql.py +0 -169
- re_common/baselibrary/database/sql_factory.py +0 -26
- re_common/baselibrary/mthread/MThreadingRun.py +0 -486
- re_common/baselibrary/mthread/MThreadingRunEvent.py +0 -349
- re_common/baselibrary/mthread/__init__.py +0 -3
- re_common/baselibrary/mthread/mythreading.py +0 -695
- re_common/baselibrary/pakge_other/socks.py +0 -404
- re_common/baselibrary/readconfig/config_factory.py +0 -18
- re_common/baselibrary/readconfig/ini_config.py +0 -317
- re_common/baselibrary/readconfig/toml_config.py +0 -49
- re_common/baselibrary/temporary/envdata.py +0 -36
- re_common/baselibrary/tools/all_requests/aiohttp_request.py +0 -118
- re_common/baselibrary/tools/all_requests/httpx_requet.py +0 -102
- re_common/baselibrary/tools/all_requests/mrequest.py +0 -412
- re_common/baselibrary/tools/all_requests/requests_request.py +0 -81
- re_common/baselibrary/tools/batch_compre/__init__.py +0 -0
- re_common/baselibrary/tools/batch_compre/bijiao_batch.py +0 -31
- re_common/baselibrary/tools/contrast_db3.py +0 -123
- re_common/baselibrary/tools/copy_file.py +0 -39
- re_common/baselibrary/tools/db3_2_sizedb3.py +0 -102
- re_common/baselibrary/tools/foreachgz.py +0 -40
- re_common/baselibrary/tools/get_attr.py +0 -11
- re_common/baselibrary/tools/image_to_pdf.py +0 -62
- re_common/baselibrary/tools/java_code_deal.py +0 -139
- re_common/baselibrary/tools/javacode.py +0 -79
- re_common/baselibrary/tools/mdb_db3.py +0 -48
- re_common/baselibrary/tools/merge_file.py +0 -171
- re_common/baselibrary/tools/merge_gz_file.py +0 -165
- re_common/baselibrary/tools/mhdfstools/__init__.py +0 -0
- re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +0 -42
- re_common/baselibrary/tools/mhdfstools/hdfst.py +0 -42
- re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +0 -38
- re_common/baselibrary/tools/mongo_tools.py +0 -50
- re_common/baselibrary/tools/move_file.py +0 -170
- re_common/baselibrary/tools/move_mongo/__init__.py +0 -0
- re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +0 -63
- re_common/baselibrary/tools/move_mongo/move_mongo_table.py +0 -354
- re_common/baselibrary/tools/move_mongo/use_mttf.py +0 -18
- re_common/baselibrary/tools/move_mongo/use_mv.py +0 -93
- re_common/baselibrary/tools/mpandas/__init__.py +0 -0
- re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +0 -125
- re_common/baselibrary/tools/mpandas/pandas_visualization.py +0 -8
- re_common/baselibrary/tools/myparsel.py +0 -104
- re_common/baselibrary/tools/rename_dir_file.py +0 -37
- re_common/baselibrary/tools/sequoiadb_utils.py +0 -398
- re_common/baselibrary/tools/split_line_to_many.py +0 -25
- re_common/baselibrary/tools/stringtodicts.py +0 -33
- re_common/baselibrary/tools/workwechant_bot.py +0 -84
- re_common/baselibrary/utils/__init__.py +0 -0
- re_common/baselibrary/utils/baseaiohttp.py +0 -296
- re_common/baselibrary/utils/baseaiomysql.py +0 -87
- re_common/baselibrary/utils/baseallstep.py +0 -191
- re_common/baselibrary/utils/baseavro.py +0 -19
- re_common/baselibrary/utils/baseboto3.py +0 -291
- re_common/baselibrary/utils/basecsv.py +0 -32
- re_common/baselibrary/utils/basedict.py +0 -133
- re_common/baselibrary/utils/basedir.py +0 -241
- re_common/baselibrary/utils/baseencode.py +0 -351
- re_common/baselibrary/utils/baseencoding.py +0 -29
- re_common/baselibrary/utils/baseesdsl.py +0 -86
- re_common/baselibrary/utils/baseexcel.py +0 -264
- re_common/baselibrary/utils/baseexcept.py +0 -109
- re_common/baselibrary/utils/basefile.py +0 -654
- re_common/baselibrary/utils/baseftp.py +0 -214
- re_common/baselibrary/utils/basegzip.py +0 -60
- re_common/baselibrary/utils/basehdfs.py +0 -135
- re_common/baselibrary/utils/basehttpx.py +0 -268
- re_common/baselibrary/utils/baseip.py +0 -87
- re_common/baselibrary/utils/basejson.py +0 -2
- re_common/baselibrary/utils/baselist.py +0 -32
- re_common/baselibrary/utils/basemotor.py +0 -190
- re_common/baselibrary/utils/basemssql.py +0 -98
- re_common/baselibrary/utils/baseodbc.py +0 -113
- re_common/baselibrary/utils/basepandas.py +0 -302
- re_common/baselibrary/utils/basepeewee.py +0 -11
- re_common/baselibrary/utils/basepika.py +0 -180
- re_common/baselibrary/utils/basepydash.py +0 -143
- re_common/baselibrary/utils/basepymongo.py +0 -230
- re_common/baselibrary/utils/basequeue.py +0 -22
- re_common/baselibrary/utils/baserar.py +0 -57
- re_common/baselibrary/utils/baserequest.py +0 -279
- re_common/baselibrary/utils/baseset.py +0 -8
- re_common/baselibrary/utils/basesmb.py +0 -403
- re_common/baselibrary/utils/basestring.py +0 -382
- re_common/baselibrary/utils/basetime.py +0 -320
- re_common/baselibrary/utils/basetuple.py +0 -0
- re_common/baselibrary/utils/baseurl.py +0 -121
- re_common/baselibrary/utils/basezip.py +0 -57
- re_common/baselibrary/utils/core/__init__.py +0 -8
- re_common/baselibrary/utils/core/bottomutils.py +0 -18
- re_common/baselibrary/utils/core/mdeprecated.py +0 -327
- re_common/baselibrary/utils/core/mlamada.py +0 -16
- re_common/baselibrary/utils/core/msginfo.py +0 -25
- re_common/baselibrary/utils/core/requests_core.py +0 -103
- re_common/baselibrary/utils/fateadm.py +0 -429
- re_common/baselibrary/utils/importfun.py +0 -123
- re_common/baselibrary/utils/mfaker.py +0 -57
- re_common/baselibrary/utils/my_abc/__init__.py +0 -3
- re_common/baselibrary/utils/my_abc/better_abc.py +0 -32
- re_common/baselibrary/utils/mylogger.py +0 -414
- re_common/baselibrary/utils/myredisclient.py +0 -861
- re_common/baselibrary/utils/pipupgrade.py +0 -21
- re_common/baselibrary/utils/ringlist.py +0 -85
- re_common/baselibrary/utils/version_compare.py +0 -36
- re_common/baselibrary/utils/ydmhttp.py +0 -126
- re_common/facade/__init__.py +0 -1
- re_common/facade/lazy_import.py +0 -11
- re_common/facade/loggerfacade.py +0 -25
- re_common/facade/mysqlfacade.py +0 -467
- re_common/facade/now.py +0 -31
- re_common/facade/sqlite3facade.py +0 -257
- re_common/facade/use/__init__.py +0 -0
- re_common/facade/use/mq_use_facade.py +0 -83
- re_common/facade/use/proxy_use_facade.py +0 -20
- re_common/libtest/__init__.py +0 -0
- re_common/libtest/base_dict_test.py +0 -19
- re_common/libtest/baseavro_test.py +0 -13
- re_common/libtest/basefile_test.py +0 -14
- re_common/libtest/basemssql_test.py +0 -77
- re_common/libtest/baseodbc_test.py +0 -8
- re_common/libtest/basepandas_test.py +0 -38
- re_common/libtest/get_attr_test/__init__.py +0 -0
- re_common/libtest/get_attr_test/get_attr_test_settings.py +0 -14
- re_common/libtest/get_attr_test/settings.py +0 -55
- re_common/libtest/idencode_test.py +0 -54
- re_common/libtest/iniconfig_test.py +0 -35
- re_common/libtest/ip_test.py +0 -35
- re_common/libtest/merge_file_test.py +0 -20
- re_common/libtest/mfaker_test.py +0 -9
- re_common/libtest/mm3_test.py +0 -32
- re_common/libtest/mylogger_test.py +0 -89
- re_common/libtest/myparsel_test.py +0 -28
- re_common/libtest/mysql_test.py +0 -151
- re_common/libtest/pymongo_test.py +0 -21
- re_common/libtest/split_test.py +0 -12
- re_common/libtest/sqlite3_merge_test.py +0 -6
- re_common/libtest/sqlite3_test.py +0 -34
- re_common/libtest/tomlconfig_test.py +0 -30
- re_common/libtest/use_tools_test/__init__.py +0 -3
- re_common/libtest/user/__init__.py +0 -5
- re_common/studio/__init__.py +0 -5
- re_common/studio/assignment_expressions.py +0 -37
- re_common/studio/mydash/__init__.py +0 -0
- re_common/studio/mydash/test1.py +0 -19
- re_common/studio/pydashstudio/__init__.py +0 -0
- re_common/studio/pydashstudio/first.py +0 -9
- re_common/studio/streamlitstudio/__init__.py +0 -0
- re_common/studio/streamlitstudio/first_app.py +0 -66
- re_common/studio/streamlitstudio/uber_pickups.py +0 -24
- re_common/studio/test.py +0 -19
- re_common/vip/__init__.py +0 -0
- re_common/vip/base_step_process.py +0 -11
- re_common/vip/baseencodeid.py +0 -91
- re_common/vip/changetaskname.py +0 -28
- re_common/vip/core_var.py +0 -24
- re_common/vip/mmh3Hash.py +0 -90
- re_common/vip/proxy/__init__.py +0 -0
- re_common/vip/proxy/allproxys.py +0 -127
- re_common/vip/proxy/allproxys_thread.py +0 -159
- re_common/vip/proxy/cnki_proxy.py +0 -153
- re_common/vip/proxy/kuaidaili.py +0 -87
- re_common/vip/proxy/proxy_all.py +0 -113
- re_common/vip/proxy/update_kuaidaili_0.py +0 -42
- re_common/vip/proxy/wanfang_proxy.py +0 -152
- re_common/vip/proxy/wp_proxy_all.py +0 -182
- re_common/vip/read_rawid_to_txt.py +0 -92
- re_common/vip/title/__init__.py +0 -5
- re_common/vip/title/transform/TransformBookTitleToZt.py +0 -125
- re_common/vip/title/transform/TransformConferenceTitleToZt.py +0 -139
- re_common/vip/title/transform/TransformCstadTitleToZt.py +0 -196
- re_common/vip/title/transform/TransformJournalTitleToZt.py +0 -203
- re_common/vip/title/transform/TransformPatentTitleToZt.py +0 -132
- re_common/vip/title/transform/TransformRegulationTitleToZt.py +0 -114
- re_common/vip/title/transform/TransformStandardTitleToZt.py +0 -135
- re_common/vip/title/transform/TransformThesisTitleToZt.py +0 -135
- re_common/vip/title/transform/__init__.py +0 -11
- re_common-0.2.54.dist-info/RECORD +0 -194
- /re_common/{baselibrary/database → v2}/__init__.py +0 -0
- /re_common/{baselibrary/pakge_other → v2/baselibrary}/__init__.py +0 -0
- /re_common/{baselibrary/readconfig → v2/baselibrary/s3object}/__init__.py +0 -0
- /re_common/{baselibrary/temporary → v2/baselibrary/tools}/__init__.py +0 -0
- /re_common/{baselibrary/tools/__init__.py → v2/baselibrary/tools/list_tools.py} +0 -0
- /re_common/{baselibrary/tools/all_requests → v2/baselibrary/utils}/__init__.py +0 -0
- {re_common-0.2.54.dist-info → re_common-2.0.1.dist-info}/LICENSE +0 -0
- {re_common-0.2.54.dist-info → re_common-2.0.1.dist-info}/WHEEL +0 -0
- {re_common-0.2.54.dist-info → re_common-2.0.1.dist-info}/top_level.txt +0 -0
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import streamlit as st
|
|
2
|
-
import pandas as pd
|
|
3
|
-
import numpy as np
|
|
4
|
-
|
|
5
|
-
st.title('Uber pickups in NYC')
|
|
6
|
-
|
|
7
|
-
DATE_COLUMN = 'date/time'
|
|
8
|
-
DATA_URL = ('https://s3-us-west-2.amazonaws.com/streamlit-demo-data/uber-raw-data-sep14.csv.gz')
|
|
9
|
-
|
|
10
|
-
@st.cache
|
|
11
|
-
def load_data(nrows):
|
|
12
|
-
data = pd.read_csv(DATA_URL, nrows=nrows)
|
|
13
|
-
lowercase = lambda x: str(x).lower()
|
|
14
|
-
data.rename(lowercase, axis='columns', inplace=True)
|
|
15
|
-
data[DATE_COLUMN] = pd.to_datetime(data[DATE_COLUMN])
|
|
16
|
-
return data
|
|
17
|
-
|
|
18
|
-
# Create a text element and let the reader know the data is loading.
|
|
19
|
-
data_load_state.text("Done! (using st.cache)")
|
|
20
|
-
|
|
21
|
-
# Load 10,000 rows of data into the dataframe.
|
|
22
|
-
data = load_data(10000)
|
|
23
|
-
# Notify the reader that the data was successfully loaded.
|
|
24
|
-
data_load_state.text('Loading data...done!')
|
re_common/studio/test.py
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
def test_sq():
|
|
2
|
-
from re_common.baselibrary.utils.basepymongo import BasePyMongo
|
|
3
|
-
# basemongo = BasePyMongo("mongodb://sdbadmin:sdbadmin@192.168.72.86:11817/test2?authMechanism=SCRAM-SHA-1")
|
|
4
|
-
# basemongo = BasePyMongo(
|
|
5
|
-
# "mongodb://sdbadmin:sdbadmin@192.168.72.86:11817/dataware_house.base_obj_meta_a")
|
|
6
|
-
basemongo = BasePyMongo(
|
|
7
|
-
"mongodb://sdbadmin:sdbadmin@192.168.72.86:11817/test2.test?authMechanism=SCRAM-SHA-1")
|
|
8
|
-
|
|
9
|
-
basemongo.use_db("test2")
|
|
10
|
-
# basemongo.auth("sdbadmin", "sdbadmin", "SCRAM-SHA-1")
|
|
11
|
-
basemongo.create_col("test")
|
|
12
|
-
# for items in basemongo.find({}):
|
|
13
|
-
# print(items["user"])
|
|
14
|
-
items = basemongo.find()
|
|
15
|
-
for item in items:
|
|
16
|
-
ids = item["id"]
|
|
17
|
-
print(ids)
|
|
18
|
-
|
|
19
|
-
test_sq()
|
re_common/vip/__init__.py
DELETED
|
File without changes
|
re_common/vip/baseencodeid.py
DELETED
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
import base64
|
|
2
|
-
import hashlib
|
|
3
|
-
|
|
4
|
-
"""
|
|
5
|
-
VIP编码lngid生成
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
class BaseLngid(object):
|
|
9
|
-
def __int__(self):
|
|
10
|
-
pass
|
|
11
|
-
|
|
12
|
-
def BaseEncodeID(self, strRaw):
|
|
13
|
-
r""" 自定义base编码 """
|
|
14
|
-
|
|
15
|
-
strEncode = base64.b32encode(strRaw.encode('utf8')).decode('utf8')
|
|
16
|
-
|
|
17
|
-
if strEncode.endswith('======'):
|
|
18
|
-
strEncode = '%s%s' % (strEncode[0:-6], '0')
|
|
19
|
-
elif strEncode.endswith('===='):
|
|
20
|
-
strEncode = '%s%s' % (strEncode[0:-4], '1')
|
|
21
|
-
elif strEncode.endswith('==='):
|
|
22
|
-
strEncode = '%s%s' % (strEncode[0:-3], '8')
|
|
23
|
-
elif strEncode.endswith('='):
|
|
24
|
-
strEncode = '%s%s' % (strEncode[0:-1], '9')
|
|
25
|
-
|
|
26
|
-
table = str.maketrans('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'ZYXWVUTSRQPONMLKJIHGFEDCBA9876543210')
|
|
27
|
-
strEncode = strEncode.translate(table)
|
|
28
|
-
|
|
29
|
-
return strEncode
|
|
30
|
-
|
|
31
|
-
def BaseDecodeID(self, strEncode):
|
|
32
|
-
r""" 自定义base解码 """
|
|
33
|
-
|
|
34
|
-
table = str.maketrans('ZYXWVUTSRQPONMLKJIHGFEDCBA9876543210', '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ')
|
|
35
|
-
strEncode = strEncode.translate(table)
|
|
36
|
-
|
|
37
|
-
if strEncode.endswith('0'):
|
|
38
|
-
strEncode = '%s%s' % (strEncode[0:-1], '======')
|
|
39
|
-
elif strEncode.endswith('1'):
|
|
40
|
-
strEncode = '%s%s' % (strEncode[0:-1], '====')
|
|
41
|
-
elif strEncode.endswith('8'):
|
|
42
|
-
strEncode = '%s%s' % (strEncode[0:-1], '===')
|
|
43
|
-
elif strEncode.endswith('9'):
|
|
44
|
-
strEncode = '%s%s' % (strEncode[0:-1], '=')
|
|
45
|
-
|
|
46
|
-
strRaw = base64.b32decode(strEncode.encode('utf8')).decode('utf8')
|
|
47
|
-
|
|
48
|
-
return strRaw
|
|
49
|
-
|
|
50
|
-
def GetLngid(self, sub_db_id, rawid, case_insensitive=False):
|
|
51
|
-
"""
|
|
52
|
-
:param sub_db_id:
|
|
53
|
-
:param rawid:
|
|
54
|
-
由 sub_db_id 和 rawid 得到 lngid。
|
|
55
|
-
:param case_insensitive: 标识源网站的 rawid 是否区分大小写
|
|
56
|
-
:return: lngid
|
|
57
|
-
"""
|
|
58
|
-
uppercase_rawid = '' # 大写版 rawid
|
|
59
|
-
if case_insensitive: # 源网站的 rawid 区分大小写
|
|
60
|
-
for ch in rawid:
|
|
61
|
-
if ch.upper() == ch:
|
|
62
|
-
uppercase_rawid += ch
|
|
63
|
-
else:
|
|
64
|
-
uppercase_rawid += ch.upper() + '_'
|
|
65
|
-
else:
|
|
66
|
-
uppercase_rawid = rawid.upper()
|
|
67
|
-
|
|
68
|
-
limited_id = uppercase_rawid # 限长ID
|
|
69
|
-
if len(uppercase_rawid) > 20:
|
|
70
|
-
limited_id = hashlib.md5(uppercase_rawid.encode('utf8')).hexdigest().upper()
|
|
71
|
-
else:
|
|
72
|
-
limited_id = self.BaseEncodeID(uppercase_rawid)
|
|
73
|
-
|
|
74
|
-
lngid = sub_db_id + limited_id
|
|
75
|
-
|
|
76
|
-
return lngid
|
|
77
|
-
|
|
78
|
-
def GetRawid(self, limited_id, case_insensitive=False):
|
|
79
|
-
try:
|
|
80
|
-
uppercase_rawid = self.BaseDecodeID(limited_id)
|
|
81
|
-
if case_insensitive:
|
|
82
|
-
str_ = "_"
|
|
83
|
-
uppercase_rawid_list = list(uppercase_rawid)
|
|
84
|
-
for num,li in enumerate(uppercase_rawid_list):
|
|
85
|
-
if li == str_:
|
|
86
|
-
old_str = "".join(uppercase_rawid_list[num-1:num+1])
|
|
87
|
-
uppercase_rawid = uppercase_rawid.replace(old_str,uppercase_rawid_list[num-1].lower())
|
|
88
|
-
except Exception as e:
|
|
89
|
-
raise Exception("长度超过20,不可逆")
|
|
90
|
-
|
|
91
|
-
return uppercase_rawid
|
re_common/vip/changetaskname.py
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
from re_common.baselibrary.tools.stringtodicts import StringToDicts
|
|
2
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
3
|
-
|
|
4
|
-
"""
|
|
5
|
-
本方法主要用于provider 变化,有时候我们可能需要更新209的provider
|
|
6
|
-
用该方法更新方便快捷
|
|
7
|
-
"""
|
|
8
|
-
strings = """
|
|
9
|
-
host = 192.168.31.209
|
|
10
|
-
user = root
|
|
11
|
-
passwd = vipdatacenter
|
|
12
|
-
db = data_gather_record
|
|
13
|
-
port = 3306
|
|
14
|
-
chartset = utf8
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
dicts_change = {"key为原来的": "values为现在的"}
|
|
18
|
-
|
|
19
|
-
dicts = StringToDicts().string_to_dicts_by_equal(strings)
|
|
20
|
-
mysqlutils = MysqlUtiles("", "", builder="MysqlBuilderForDicts", dicts=dicts)
|
|
21
|
-
mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=0;")
|
|
22
|
-
for key, values in dicts_change:
|
|
23
|
-
sql1 = "update `data_gather_record`.`task` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
|
|
24
|
-
sql2 = "update `data_gather_record`.`updating` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
|
|
25
|
-
mysqlutils.ExeSqlToDB(sql1)
|
|
26
|
-
mysqlutils.ExeSqlToDB(sql2)
|
|
27
|
-
|
|
28
|
-
mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=1;")
|
re_common/vip/core_var.py
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
ALL_SCHOOL_PROXY_LIST = ["192.168.31.176:8119", "192.168.31.176:8120", "192.168.31.176:8104", "192.168.31.176:8018",
|
|
2
|
-
"192.168.31.176:8076",
|
|
3
|
-
"192.168.31.176:8160", "192.168.31.176:8240", "192.168.31.176:8241", "192.168.31.176:8195",
|
|
4
|
-
"192.168.31.176:8243",
|
|
5
|
-
"192.168.31.176:8062", "192.168.31.176:8019", "192.168.31.176:8034", "192.168.31.176:8103",
|
|
6
|
-
"192.168.31.176:8181",
|
|
7
|
-
"192.168.31.176:8211", "192.168.31.123:8081", "192.168.31.176:8032", "192.168.31.176:8231",
|
|
8
|
-
"192.168.31.176:8189",
|
|
9
|
-
"192.167.31.176:8058", "192.168.31.36:8135", "192.168.31.176:8057", "192.168.31.176:8017",
|
|
10
|
-
"192.168.31.36:8033",
|
|
11
|
-
"192.168.31.176:8184", "192.168.31.176:8207", "192.168.31.176:8196", "192.168.31.176:8041",
|
|
12
|
-
"192.168.31.176:8087",
|
|
13
|
-
"192.168.31.176:8117", "192.168.31.36:8098", "192.168.31.176:8165", "192.168.31.36:8039",
|
|
14
|
-
"192.168.31.176:8159",
|
|
15
|
-
"192.168.31.176:8051", "192.168.31.176:8180", "192.168.31.176:8148", "192.168.31.176:8021",
|
|
16
|
-
"192.168.31.176:8008",
|
|
17
|
-
"192.168.31.176:8035", "192.168.31.36:8004", "192.168.31.176:8131", "192.168.31.176:8127",
|
|
18
|
-
"192.168.31.176:8052",
|
|
19
|
-
"192.168.31.36:8011", "192.168.31.36:8082", "192.168.31.36:8182", "192.168.31.176:8031",
|
|
20
|
-
"192.168.31.176:8171",
|
|
21
|
-
"192.168.31.176:8012", "192.168.31.176:8002", "192.168.31.176:8140", "192.168.31.36:8149",
|
|
22
|
-
"192.168.31.176:8074",
|
|
23
|
-
"192.168.31.3:8080", "192.168.31.4:8080", "192.168.31.179:8129", "192.168.31.179:8130",
|
|
24
|
-
"192.168.31.179:8132"]
|
re_common/vip/mmh3Hash.py
DELETED
|
@@ -1,90 +0,0 @@
|
|
|
1
|
-
#!/bin/env python
|
|
2
|
-
# -*- coding: utf-8-*-
|
|
3
|
-
# author: ganruoxun
|
|
4
|
-
# date: 2020-09-25
|
|
5
|
-
|
|
6
|
-
import mmh3
|
|
7
|
-
import os
|
|
8
|
-
import binascii
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class Mmh3Hash(object):
|
|
12
|
-
def __init__(self, _type, resType):
|
|
13
|
-
self.typeDic = {
|
|
14
|
-
"2": "bs",
|
|
15
|
-
"3": "hy",
|
|
16
|
-
"4": "zl",
|
|
17
|
-
"5": "bz",
|
|
18
|
-
"10": "fg",
|
|
19
|
-
}
|
|
20
|
-
self.typeCode = self.typeDic[_type]
|
|
21
|
-
self.resType = resType
|
|
22
|
-
if self.typeCode == None:
|
|
23
|
-
raise RuntimeError('type 参数无法识别!')
|
|
24
|
-
|
|
25
|
-
# ##从filePath获取文件名,将文件名转大写,后缀转小写
|
|
26
|
-
# def normFileName(self, filePath):
|
|
27
|
-
# lngid = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[0].upper()
|
|
28
|
-
# prfx = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[1].lower()
|
|
29
|
-
# return "{0}{1}".format(lngid, prfx)
|
|
30
|
-
|
|
31
|
-
##规范文件名称,文件名转大写,文件后缀转小写
|
|
32
|
-
def normFileName(self, fileName):
|
|
33
|
-
if not "." in fileName:
|
|
34
|
-
return fileName.upper()
|
|
35
|
-
tmps = fileName.split(".")
|
|
36
|
-
filePrfx = tmps[0].upper()
|
|
37
|
-
fileSufx = tmps[1].lower()
|
|
38
|
-
return "{0}.{1}".format(filePrfx, fileSufx)
|
|
39
|
-
|
|
40
|
-
# 带有点的文件名,主要用于图片,不在标准定义当中
|
|
41
|
-
def normFileName_image(self, fileName):
|
|
42
|
-
if not "." in fileName:
|
|
43
|
-
return fileName.upper()
|
|
44
|
-
tmps = fileName.split(".")
|
|
45
|
-
fileSufx = tmps[-1].lower()
|
|
46
|
-
tmps.remove(tmps[-1])
|
|
47
|
-
filePrfx = ".".join(tmps).upper()
|
|
48
|
-
|
|
49
|
-
return "{0}.{1}".format(filePrfx, fileSufx)
|
|
50
|
-
|
|
51
|
-
## 使用murmurhash3算法将新全文文件名进行HASH并按照规则组装成HASH目录
|
|
52
|
-
def generateHashName(self, fileName):
|
|
53
|
-
hashCode = binascii.b2a_hex(mmh3.hash_bytes(fileName)).upper()[0:3]
|
|
54
|
-
firstCode = chr(hashCode[0])
|
|
55
|
-
secondCode = chr(hashCode[1])
|
|
56
|
-
thirdCode = chr(hashCode[2])
|
|
57
|
-
if thirdCode.isdigit():
|
|
58
|
-
return firstCode + secondCode + str(int(thirdCode) % 5)
|
|
59
|
-
elif thirdCode == 'D':
|
|
60
|
-
return firstCode + secondCode + 'A'
|
|
61
|
-
elif thirdCode == 'E':
|
|
62
|
-
return firstCode + secondCode + 'B'
|
|
63
|
-
elif thirdCode == 'F':
|
|
64
|
-
return firstCode + secondCode + 'C'
|
|
65
|
-
else:
|
|
66
|
-
return firstCode + secondCode + thirdCode
|
|
67
|
-
|
|
68
|
-
# fileName:文件名称,带后缀,不能为空,专利为公开号加文件后缀名,其他为lngid加文件后缀名
|
|
69
|
-
# years:年份,不能为空
|
|
70
|
-
# country:国家,如果为空,默认为cn
|
|
71
|
-
# type:自建资源类型,不能为空,目前只有bs(博硕),hy(会议),bz(标准),fg(法规),zl(专利)
|
|
72
|
-
def generatehashPath(self, fileName, years, country, resType):
|
|
73
|
-
if years == None or len(years) != 4:
|
|
74
|
-
raise RuntimeError('years 参数错误!')
|
|
75
|
-
elif fileName == None or len(fileName) == 0:
|
|
76
|
-
raise RuntimeError('fileName 参数错误!')
|
|
77
|
-
elif country == None or len(country) == 0:
|
|
78
|
-
country = 'cn'
|
|
79
|
-
if resType in ('bs', 'hy', 'fg', 'zl', 'bz'):
|
|
80
|
-
country = "cn"
|
|
81
|
-
country = country.lower()
|
|
82
|
-
intYear = int(years)
|
|
83
|
-
if intYear < 1989:
|
|
84
|
-
years = 'befor1989'
|
|
85
|
-
fileName = self.normFileName(fileName)
|
|
86
|
-
return "\\" + resType + "\\" + years + country + self.typeCode + "\\" + self.generateHashName(fileName)+ '\\' + fileName
|
|
87
|
-
# return '\\' + years + country + _type + '\\' + generateHashName(fileName) + '\\' + fileName
|
|
88
|
-
|
|
89
|
-
print(Mmh3Hash("3","").normFileName_image("aa.bb.jpg"))
|
|
90
|
-
print(Mmh3Hash("4","zl").generatehashPath("cn206103553u.pdf","2016","cn","zl"))
|
re_common/vip/proxy/__init__.py
DELETED
|
File without changes
|
re_common/vip/proxy/allproxys.py
DELETED
|
@@ -1,127 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
###########################################
|
|
3
|
-
# 同项目调用基础包
|
|
4
|
-
import os
|
|
5
|
-
import sys
|
|
6
|
-
import time
|
|
7
|
-
|
|
8
|
-
import requests
|
|
9
|
-
|
|
10
|
-
filepath = os.path.abspath(__file__)
|
|
11
|
-
pathlist = filepath.split(os.sep)
|
|
12
|
-
pathlist = pathlist[:-4]
|
|
13
|
-
TopPath = os.sep.join(pathlist)
|
|
14
|
-
sys.path.insert(0, TopPath)
|
|
15
|
-
print(TopPath)
|
|
16
|
-
############################################
|
|
17
|
-
|
|
18
|
-
from re_common.baselibrary.utils.basedir import BaseDir
|
|
19
|
-
from re_common.baselibrary.utils.basefile import BaseFile
|
|
20
|
-
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
21
|
-
from re_common.facade.lazy_import import get_streamlogger
|
|
22
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
23
|
-
from re_common.baselibrary.utils.basetime import BaseTime
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class Kproxy(object):
|
|
27
|
-
def __init__(self):
|
|
28
|
-
self.cur_path = BaseDir.get_file_dir_absolute(__file__)
|
|
29
|
-
self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
|
|
30
|
-
self.logger = get_streamlogger()
|
|
31
|
-
self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
|
|
32
|
-
self.bsrequest = BaseRequest()
|
|
33
|
-
self.starttime = time.time()
|
|
34
|
-
self.starttime_val = time.time()
|
|
35
|
-
|
|
36
|
-
def get_taiyang_proxy(self, num=6):
|
|
37
|
-
"""
|
|
38
|
-
获取太阳代理 每分钟3个
|
|
39
|
-
:param num:
|
|
40
|
-
:return:
|
|
41
|
-
"""
|
|
42
|
-
self.starttime = time.time()
|
|
43
|
-
# url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
44
|
-
url = "http://http.tiqu.alibabaapi.com/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
45
|
-
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
46
|
-
timeout=30
|
|
47
|
-
)
|
|
48
|
-
if BoolResult:
|
|
49
|
-
dicts = json.loads(r.text)
|
|
50
|
-
for item in dicts["data"]:
|
|
51
|
-
proxy = item["ip"] + ":" + item["port"]
|
|
52
|
-
sources = "taiyang"
|
|
53
|
-
expire_time = item["expire_time"]
|
|
54
|
-
sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
|
|
55
|
-
proxy, sources, expire_time, expire_time)
|
|
56
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
57
|
-
else:
|
|
58
|
-
self.logger.error("获取失败")
|
|
59
|
-
|
|
60
|
-
def get_taiyang_num(self):
|
|
61
|
-
"""
|
|
62
|
-
获取太阳代理 每分钟3个
|
|
63
|
-
:param num:
|
|
64
|
-
:return:
|
|
65
|
-
"""
|
|
66
|
-
url = "http://ty-http-d.hamir.net/index/index/get_my_package_balance?neek=521821&appkey=1fcba6de94f71561ba3007f4c24ca0b1&ac=59105"
|
|
67
|
-
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
68
|
-
timeout=30
|
|
69
|
-
)
|
|
70
|
-
if BoolResult:
|
|
71
|
-
dicts = json.loads(r.text)
|
|
72
|
-
return dicts["data"]["package_balance"]
|
|
73
|
-
else:
|
|
74
|
-
self.logger.error("获取失败")
|
|
75
|
-
|
|
76
|
-
def val(self, proxy, sources):
|
|
77
|
-
# 请求地址
|
|
78
|
-
targetUrl = "https://www.baidu.com"
|
|
79
|
-
proxies = {
|
|
80
|
-
"http": "http://%s" % proxy,
|
|
81
|
-
"https": "http://%s" % proxy
|
|
82
|
-
}
|
|
83
|
-
resp = requests.get(targetUrl, proxies=proxies, timeout=5)
|
|
84
|
-
if resp.status_code == 200:
|
|
85
|
-
print(resp.status_code)
|
|
86
|
-
return True
|
|
87
|
-
else:
|
|
88
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
|
|
89
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
90
|
-
return False
|
|
91
|
-
|
|
92
|
-
def val_all(self):
|
|
93
|
-
self.starttime_val = time.time()
|
|
94
|
-
sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
|
|
95
|
-
bools, rows = self.mysqlutils.SelectFromDB(sql)
|
|
96
|
-
for row in rows:
|
|
97
|
-
try:
|
|
98
|
-
self.val(row[0], row[1])
|
|
99
|
-
except:
|
|
100
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
|
|
101
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
102
|
-
|
|
103
|
-
def run(self):
|
|
104
|
-
sleep_time = 0
|
|
105
|
-
while True:
|
|
106
|
-
num = 10
|
|
107
|
-
if sleep_time < 0:
|
|
108
|
-
print("time sleep {}".format(str(100 + sleep_time)))
|
|
109
|
-
if 100 + sleep_time > 0:
|
|
110
|
-
time.sleep(100 + sleep_time)
|
|
111
|
-
num = num + 10
|
|
112
|
-
nowtime = BaseTime().get_beijin_date_strins(format="%H%M%S")
|
|
113
|
-
print(nowtime)
|
|
114
|
-
if "133700" <= nowtime <= "134700":
|
|
115
|
-
num = Kproxy().get_taiyang_num()
|
|
116
|
-
start_time = time.time()
|
|
117
|
-
self.get_taiyang_proxy(num=num)
|
|
118
|
-
self.val_all()
|
|
119
|
-
use_time = int(time.time() - start_time)
|
|
120
|
-
sleep_time = 100 - use_time
|
|
121
|
-
print("time sleep {}".format(str(sleep_time)))
|
|
122
|
-
if sleep_time >= 3:
|
|
123
|
-
time.sleep(sleep_time)
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
if __name__ == "__main__":
|
|
127
|
-
Kproxy().run()
|
|
@@ -1,159 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
###########################################
|
|
3
|
-
# 同项目调用基础包
|
|
4
|
-
import os
|
|
5
|
-
import sys
|
|
6
|
-
import time
|
|
7
|
-
|
|
8
|
-
import requests
|
|
9
|
-
|
|
10
|
-
from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun
|
|
11
|
-
from re_common.baselibrary.mthread.mythreading import ThreadInfo, ThreadVal, ThreadPoolManger
|
|
12
|
-
|
|
13
|
-
filepath = os.path.abspath(__file__)
|
|
14
|
-
pathlist = filepath.split(os.sep)
|
|
15
|
-
pathlist = pathlist[:-4]
|
|
16
|
-
TopPath = os.sep.join(pathlist)
|
|
17
|
-
sys.path.insert(0, TopPath)
|
|
18
|
-
print(TopPath)
|
|
19
|
-
############################################
|
|
20
|
-
|
|
21
|
-
from re_common.baselibrary.utils.basedir import BaseDir
|
|
22
|
-
from re_common.baselibrary.utils.basefile import BaseFile
|
|
23
|
-
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
24
|
-
from re_common.facade.lazy_import import get_streamlogger
|
|
25
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class Kproxy(object):
|
|
29
|
-
def __init__(self):
|
|
30
|
-
self.cur_path = BaseDir.get_file_dir_absolute(__file__)
|
|
31
|
-
self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
|
|
32
|
-
self.logger = get_streamlogger()
|
|
33
|
-
self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
|
|
34
|
-
self.bsrequest = BaseRequest()
|
|
35
|
-
self.starttime = time.time()
|
|
36
|
-
self.starttime_val = time.time()
|
|
37
|
-
|
|
38
|
-
def get_taiyang_proxy(self, num=6):
|
|
39
|
-
"""
|
|
40
|
-
获取太阳代理 每分钟3个
|
|
41
|
-
:param num:
|
|
42
|
-
:return:
|
|
43
|
-
"""
|
|
44
|
-
self.starttime = time.time()
|
|
45
|
-
url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=56912&port=1&ts=1&lb=1&pb=45®ions=".format(num)
|
|
46
|
-
BoolResult, errString, r = self.bsrequest.base_request(url,
|
|
47
|
-
timeout=30
|
|
48
|
-
)
|
|
49
|
-
if BoolResult:
|
|
50
|
-
dicts = json.loads(r.text)
|
|
51
|
-
for item in dicts["data"]:
|
|
52
|
-
proxy = item["ip"] + ":" + item["port"]
|
|
53
|
-
sources = "taiyang"
|
|
54
|
-
expire_time = item["expire_time"]
|
|
55
|
-
sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
|
|
56
|
-
proxy, sources, expire_time, expire_time)
|
|
57
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
58
|
-
else:
|
|
59
|
-
self.logger.error("获取失败")
|
|
60
|
-
|
|
61
|
-
def val(self, proxy, sources,threadval):
|
|
62
|
-
# 请求地址
|
|
63
|
-
targetUrl = "https://www.baidu.com"
|
|
64
|
-
proxies = {
|
|
65
|
-
"http": "http://%s" % proxy,
|
|
66
|
-
"https": "http://%s" % proxy
|
|
67
|
-
}
|
|
68
|
-
resp = requests.get(targetUrl, proxies=proxies, timeout=5)
|
|
69
|
-
if resp.status_code == 200:
|
|
70
|
-
print(resp.status_code)
|
|
71
|
-
sql = "update proxyall_v1 set stat=1 where proxy='%s' and sources='%s';" % (proxy, sources)
|
|
72
|
-
threadval.get_result_queue().put(sql)
|
|
73
|
-
return True
|
|
74
|
-
else:
|
|
75
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
|
|
76
|
-
# self.mysqlutils.ExeSqlToDB(sql)
|
|
77
|
-
threadval.get_result_queue().put(sql)
|
|
78
|
-
return False
|
|
79
|
-
|
|
80
|
-
def val_all(self):
|
|
81
|
-
self.starttime_val = time.time()
|
|
82
|
-
sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
|
|
83
|
-
bools, rows = self.mysqlutils.SelectFromDB(sql)
|
|
84
|
-
for row in rows:
|
|
85
|
-
try:
|
|
86
|
-
self.val(row[0], row[1])
|
|
87
|
-
except:
|
|
88
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
|
|
89
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
90
|
-
|
|
91
|
-
def run(self):
|
|
92
|
-
while True:
|
|
93
|
-
start_time = time.time()
|
|
94
|
-
self.get_taiyang_proxy()
|
|
95
|
-
self.val_all()
|
|
96
|
-
use_time = int(time.time() - start_time)
|
|
97
|
-
sleep_time = 100 - use_time
|
|
98
|
-
print("time sleep {}".format(str(sleep_time)))
|
|
99
|
-
if sleep_time >= 3:
|
|
100
|
-
time.sleep(sleep_time)
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
class DetailThreadRun(MThreadingRun):
|
|
104
|
-
def __init__(self, num):
|
|
105
|
-
self.down = Kproxy()
|
|
106
|
-
super(DetailThreadRun, self).__init__(num)
|
|
107
|
-
|
|
108
|
-
@ThreadPoolManger.thread_lock
|
|
109
|
-
def getTask(self, *args, **kwargs):
|
|
110
|
-
sql = "select proxy,sources from proxyall_v1 where stat=0 ORDER BY `expire_time` DESC limit 1000"
|
|
111
|
-
bools, rows = self.down.mysqlutils.SelectFromDB(sql)
|
|
112
|
-
return rows
|
|
113
|
-
|
|
114
|
-
@ThreadPoolManger.thread_lock
|
|
115
|
-
def getTask2(self, *args, **kwargs):
|
|
116
|
-
sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `expire_time` DESC limit 1000"
|
|
117
|
-
bools, rows = self.down.mysqlutils.SelectFromDB(sql)
|
|
118
|
-
return rows
|
|
119
|
-
|
|
120
|
-
def setTask(self, results=None, *args, **kwargs):
|
|
121
|
-
if not results:
|
|
122
|
-
return self.BREAK
|
|
123
|
-
for row in results:
|
|
124
|
-
self.add_job(self.func, row[0], row[1])
|
|
125
|
-
rows = self.getTask2()
|
|
126
|
-
for row in rows:
|
|
127
|
-
self.add_job(self.func, row[0], row[1])
|
|
128
|
-
time.sleep(60*2)
|
|
129
|
-
return self.BREAK
|
|
130
|
-
|
|
131
|
-
@ThreadPoolManger.thread_lock
|
|
132
|
-
def dealresult(self, *args, **kwargs):
|
|
133
|
-
# for sql in self.results:
|
|
134
|
-
# self.down.mysqlutils.ExeSqlToDB(sql)
|
|
135
|
-
self.down.mysqlutils.ExeSqlListToDB(self.results)
|
|
136
|
-
|
|
137
|
-
def setProxy(self, proxysList=None):
|
|
138
|
-
time.sleep(300)
|
|
139
|
-
|
|
140
|
-
def is_break(self):
|
|
141
|
-
return False
|
|
142
|
-
|
|
143
|
-
def thread_pool_hook(self, threadinfo: ThreadInfo):
|
|
144
|
-
# 设置代理线程不重启,默认会重启
|
|
145
|
-
return {}
|
|
146
|
-
|
|
147
|
-
def fun(self, threadval: ThreadVal, *args, **kwargs):
|
|
148
|
-
proxy,sources = args[0],args[1]
|
|
149
|
-
try:
|
|
150
|
-
self.down.val(proxy, sources, threadval)
|
|
151
|
-
except:
|
|
152
|
-
sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
|
|
153
|
-
# self.mysqlutils.ExeSqlToDB(sql)
|
|
154
|
-
threadval.get_result_queue().put(sql)
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
if __name__ == '__main__':
|
|
158
|
-
down = DetailThreadRun(30)
|
|
159
|
-
down.run()
|