re-common 10.0.37__py3-none-any.whl → 10.0.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- re_common/baselibrary/__init__.py +4 -4
- re_common/baselibrary/baseabs/__init__.py +6 -6
- re_common/baselibrary/baseabs/baseabs.py +26 -26
- re_common/baselibrary/database/mbuilder.py +132 -132
- re_common/baselibrary/database/moudle.py +93 -93
- re_common/baselibrary/database/msqlite3.py +194 -194
- re_common/baselibrary/database/mysql.py +169 -169
- re_common/baselibrary/database/sql_factory.py +26 -26
- re_common/baselibrary/mthread/MThreadingRun.py +486 -486
- re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -349
- re_common/baselibrary/mthread/__init__.py +2 -2
- re_common/baselibrary/mthread/mythreading.py +695 -695
- re_common/baselibrary/pakge_other/socks.py +404 -404
- re_common/baselibrary/readconfig/config_factory.py +18 -18
- re_common/baselibrary/readconfig/ini_config.py +317 -317
- re_common/baselibrary/readconfig/toml_config.py +49 -49
- re_common/baselibrary/temporary/envdata.py +36 -36
- re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -118
- re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -102
- re_common/baselibrary/tools/all_requests/mrequest.py +412 -412
- re_common/baselibrary/tools/all_requests/requests_request.py +81 -81
- re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -31
- re_common/baselibrary/tools/contrast_db3.py +123 -123
- re_common/baselibrary/tools/copy_file.py +39 -39
- re_common/baselibrary/tools/db3_2_sizedb3.py +102 -102
- re_common/baselibrary/tools/foreachgz.py +39 -39
- re_common/baselibrary/tools/get_attr.py +10 -10
- re_common/baselibrary/tools/image_to_pdf.py +61 -61
- re_common/baselibrary/tools/java_code_deal.py +139 -139
- re_common/baselibrary/tools/javacode.py +79 -79
- re_common/baselibrary/tools/mdb_db3.py +48 -48
- re_common/baselibrary/tools/merge_file.py +171 -171
- re_common/baselibrary/tools/merge_gz_file.py +165 -165
- re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -42
- re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -42
- re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -38
- re_common/baselibrary/tools/mongo_tools.py +50 -50
- re_common/baselibrary/tools/move_file.py +170 -170
- re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -63
- re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -354
- re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -18
- re_common/baselibrary/tools/move_mongo/use_mv.py +93 -93
- re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -125
- re_common/baselibrary/tools/mpandas/pandas_visualization.py +7 -7
- re_common/baselibrary/tools/myparsel.py +104 -104
- re_common/baselibrary/tools/rename_dir_file.py +37 -37
- re_common/baselibrary/tools/sequoiadb_utils.py +398 -398
- re_common/baselibrary/tools/split_line_to_many.py +25 -25
- re_common/baselibrary/tools/stringtodicts.py +33 -33
- re_common/baselibrary/tools/workwechant_bot.py +84 -84
- re_common/baselibrary/utils/baseaiohttp.py +296 -296
- re_common/baselibrary/utils/baseaiomysql.py +87 -87
- re_common/baselibrary/utils/baseallstep.py +191 -191
- re_common/baselibrary/utils/baseavro.py +19 -19
- re_common/baselibrary/utils/baseboto3.py +291 -291
- re_common/baselibrary/utils/basecsv.py +32 -32
- re_common/baselibrary/utils/basedict.py +133 -133
- re_common/baselibrary/utils/basedir.py +241 -241
- re_common/baselibrary/utils/baseencode.py +351 -351
- re_common/baselibrary/utils/baseencoding.py +28 -28
- re_common/baselibrary/utils/baseesdsl.py +86 -86
- re_common/baselibrary/utils/baseexcel.py +264 -264
- re_common/baselibrary/utils/baseexcept.py +109 -109
- re_common/baselibrary/utils/basefile.py +654 -654
- re_common/baselibrary/utils/baseftp.py +214 -214
- re_common/baselibrary/utils/basegzip.py +60 -60
- re_common/baselibrary/utils/basehdfs.py +135 -135
- re_common/baselibrary/utils/basehttpx.py +268 -268
- re_common/baselibrary/utils/baseip.py +87 -87
- re_common/baselibrary/utils/basejson.py +2 -2
- re_common/baselibrary/utils/baselist.py +32 -32
- re_common/baselibrary/utils/basemotor.py +190 -190
- re_common/baselibrary/utils/basemssql.py +98 -98
- re_common/baselibrary/utils/baseodbc.py +113 -113
- re_common/baselibrary/utils/basepandas.py +302 -302
- re_common/baselibrary/utils/basepeewee.py +11 -11
- re_common/baselibrary/utils/basepika.py +180 -180
- re_common/baselibrary/utils/basepydash.py +143 -143
- re_common/baselibrary/utils/basepymongo.py +230 -230
- re_common/baselibrary/utils/basequeue.py +22 -22
- re_common/baselibrary/utils/baserar.py +57 -57
- re_common/baselibrary/utils/baserequest.py +279 -279
- re_common/baselibrary/utils/baseset.py +8 -8
- re_common/baselibrary/utils/basesmb.py +403 -403
- re_common/baselibrary/utils/basestring.py +382 -382
- re_common/baselibrary/utils/basetime.py +320 -320
- re_common/baselibrary/utils/baseurl.py +121 -121
- re_common/baselibrary/utils/basezip.py +57 -57
- re_common/baselibrary/utils/core/__init__.py +7 -7
- re_common/baselibrary/utils/core/bottomutils.py +18 -18
- re_common/baselibrary/utils/core/mdeprecated.py +327 -327
- re_common/baselibrary/utils/core/mlamada.py +16 -16
- re_common/baselibrary/utils/core/msginfo.py +25 -25
- re_common/baselibrary/utils/core/requests_core.py +103 -103
- re_common/baselibrary/utils/fateadm.py +429 -429
- re_common/baselibrary/utils/importfun.py +123 -123
- re_common/baselibrary/utils/mfaker.py +57 -57
- re_common/baselibrary/utils/my_abc/__init__.py +3 -3
- re_common/baselibrary/utils/my_abc/better_abc.py +32 -32
- re_common/baselibrary/utils/mylogger.py +414 -414
- re_common/baselibrary/utils/myredisclient.py +861 -861
- re_common/baselibrary/utils/pipupgrade.py +21 -21
- re_common/baselibrary/utils/ringlist.py +85 -85
- re_common/baselibrary/utils/version_compare.py +36 -36
- re_common/baselibrary/utils/ydmhttp.py +126 -126
- re_common/facade/lazy_import.py +11 -11
- re_common/facade/loggerfacade.py +25 -25
- re_common/facade/mysqlfacade.py +467 -467
- re_common/facade/now.py +31 -31
- re_common/facade/sqlite3facade.py +257 -257
- re_common/facade/use/mq_use_facade.py +83 -83
- re_common/facade/use/proxy_use_facade.py +19 -19
- re_common/libtest/base_dict_test.py +19 -19
- re_common/libtest/baseavro_test.py +13 -13
- re_common/libtest/basefile_test.py +14 -14
- re_common/libtest/basemssql_test.py +77 -77
- re_common/libtest/baseodbc_test.py +7 -7
- re_common/libtest/basepandas_test.py +38 -38
- re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -14
- re_common/libtest/get_attr_test/settings.py +54 -54
- re_common/libtest/idencode_test.py +53 -53
- re_common/libtest/iniconfig_test.py +35 -35
- re_common/libtest/ip_test.py +34 -34
- re_common/libtest/merge_file_test.py +20 -20
- re_common/libtest/mfaker_test.py +8 -8
- re_common/libtest/mm3_test.py +31 -31
- re_common/libtest/mylogger_test.py +88 -88
- re_common/libtest/myparsel_test.py +27 -27
- re_common/libtest/mysql_test.py +151 -151
- re_common/libtest/pymongo_test.py +21 -21
- re_common/libtest/split_test.py +11 -11
- re_common/libtest/sqlite3_merge_test.py +5 -5
- re_common/libtest/sqlite3_test.py +34 -34
- re_common/libtest/tomlconfig_test.py +30 -30
- re_common/libtest/use_tools_test/__init__.py +2 -2
- re_common/libtest/user/__init__.py +4 -4
- re_common/studio/__init__.py +4 -4
- re_common/studio/assignment_expressions.py +36 -36
- re_common/studio/mydash/test1.py +18 -18
- re_common/studio/pydashstudio/first.py +9 -9
- re_common/studio/streamlitstudio/first_app.py +65 -65
- re_common/studio/streamlitstudio/uber_pickups.py +23 -23
- re_common/studio/test.py +18 -18
- re_common/v2/baselibrary/business_utils/BusinessStringUtil.py +219 -219
- re_common/v2/baselibrary/business_utils/baseencodeid.py +100 -100
- re_common/v2/baselibrary/business_utils/full_doi_path.py +116 -116
- re_common/v2/baselibrary/business_utils/rel_tools.py +6 -6
- re_common/v2/baselibrary/decorators/utils.py +59 -59
- re_common/v2/baselibrary/helpers/search_packge/NearestNeighbors_test.py +105 -105
- re_common/v2/baselibrary/helpers/search_packge/fit_text_match.py +253 -253
- re_common/v2/baselibrary/helpers/search_packge/scikit_learn_text_matcher.py +260 -260
- re_common/v2/baselibrary/helpers/search_packge/test.py +1 -1
- re_common/v2/baselibrary/s3object/baseboto3.py +230 -230
- re_common/v2/baselibrary/tools/WeChatRobot.py +95 -95
- re_common/v2/baselibrary/tools/ac_ahocorasick.py +75 -75
- re_common/v2/baselibrary/tools/concurrency.py +35 -35
- re_common/v2/baselibrary/tools/data_processer/base.py +53 -53
- re_common/v2/baselibrary/tools/data_processer/data_processer.py +508 -508
- re_common/v2/baselibrary/tools/data_processer/data_reader.py +187 -187
- re_common/v2/baselibrary/tools/data_processer/data_writer.py +38 -38
- re_common/v2/baselibrary/tools/dict_tools.py +44 -44
- re_common/v2/baselibrary/tools/dolphinscheduler.py +187 -187
- re_common/v2/baselibrary/tools/hdfs_base_processor.py +204 -204
- re_common/v2/baselibrary/tools/hdfs_bulk_processor.py +67 -67
- re_common/v2/baselibrary/tools/hdfs_data_processer.py +338 -338
- re_common/v2/baselibrary/tools/hdfs_line_processor.py +74 -74
- re_common/v2/baselibrary/tools/list_tools.py +69 -69
- re_common/v2/baselibrary/tools/resume_tracker.py +94 -94
- re_common/v2/baselibrary/tools/search_hash_tools.py +54 -54
- re_common/v2/baselibrary/tools/text_matcher.py +326 -326
- re_common/v2/baselibrary/tools/unionfind_tools.py +60 -60
- re_common/v2/baselibrary/utils/BusinessStringUtil.py +196 -196
- re_common/v2/baselibrary/utils/api_net_utils.py +270 -270
- re_common/v2/baselibrary/utils/author_smi.py +361 -361
- re_common/v2/baselibrary/utils/base_string_similarity.py +158 -158
- re_common/v2/baselibrary/utils/basedict.py +37 -37
- re_common/v2/baselibrary/utils/basehdfs.py +163 -163
- re_common/v2/baselibrary/utils/basepika.py +180 -180
- re_common/v2/baselibrary/utils/basetime.py +77 -77
- re_common/v2/baselibrary/utils/db.py +156 -156
- re_common/v2/baselibrary/utils/json_cls.py +16 -16
- re_common/v2/baselibrary/utils/mq.py +83 -83
- re_common/v2/baselibrary/utils/n_ary_expression_tree.py +243 -243
- re_common/v2/baselibrary/utils/string_bool.py +186 -186
- re_common/v2/baselibrary/utils/string_clear.py +246 -246
- re_common/v2/baselibrary/utils/string_smi.py +18 -18
- re_common/v2/baselibrary/utils/stringutils.py +271 -278
- re_common/vip/base_step_process.py +11 -11
- re_common/vip/baseencodeid.py +90 -90
- re_common/vip/changetaskname.py +28 -28
- re_common/vip/core_var.py +24 -24
- re_common/vip/mmh3Hash.py +89 -89
- re_common/vip/proxy/allproxys.py +127 -127
- re_common/vip/proxy/allproxys_thread.py +159 -159
- re_common/vip/proxy/cnki_proxy.py +153 -153
- re_common/vip/proxy/kuaidaili.py +87 -87
- re_common/vip/proxy/proxy_all.py +113 -113
- re_common/vip/proxy/update_kuaidaili_0.py +42 -42
- re_common/vip/proxy/wanfang_proxy.py +152 -152
- re_common/vip/proxy/wp_proxy_all.py +181 -181
- re_common/vip/read_rawid_to_txt.py +91 -91
- re_common/vip/title/__init__.py +5 -5
- re_common/vip/title/transform/TransformBookTitleToZt.py +125 -125
- re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -139
- re_common/vip/title/transform/TransformCstadTitleToZt.py +195 -195
- re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -203
- re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -132
- re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -114
- re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -135
- re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -135
- re_common/vip/title/transform/__init__.py +10 -10
- {re_common-10.0.37.dist-info → re_common-10.0.39.dist-info}/LICENSE +201 -201
- {re_common-10.0.37.dist-info → re_common-10.0.39.dist-info}/METADATA +16 -16
- re_common-10.0.39.dist-info/RECORD +248 -0
- {re_common-10.0.37.dist-info → re_common-10.0.39.dist-info}/WHEEL +1 -1
- re_common-10.0.37.dist-info/RECORD +0 -248
- {re_common-10.0.37.dist-info → re_common-10.0.39.dist-info}/top_level.txt +0 -0
re_common/vip/proxy/proxy_all.py
CHANGED
|
@@ -1,113 +1,113 @@
|
|
|
1
|
-
import re
|
|
2
|
-
|
|
3
|
-
from bs4 import BeautifulSoup
|
|
4
|
-
|
|
5
|
-
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
6
|
-
from re_common.baselibrary.utils.baseurl import BaseUrl
|
|
7
|
-
from re_common.baselibrary.utils.myredisclient import MyRedis
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class ProxyAll(object):
|
|
11
|
-
|
|
12
|
-
def __init__(self, config="./db.ini"):
|
|
13
|
-
self.config = config
|
|
14
|
-
self.myredis = MyRedis(self.config)
|
|
15
|
-
self.myredis.set_redis_from_config()
|
|
16
|
-
self.myredis.conn_redis()
|
|
17
|
-
self.Headers = {
|
|
18
|
-
'Accept': '*/*',
|
|
19
|
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
|
|
20
|
-
}
|
|
21
|
-
self.UserAgent = 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
|
|
22
|
-
self.baserequest = BaseRequest()
|
|
23
|
-
|
|
24
|
-
def get_redis_all(self):
|
|
25
|
-
return self.myredis.getDataFromRedis()
|
|
26
|
-
|
|
27
|
-
def getProxyFromMimvp(self, num=600):
|
|
28
|
-
r""" 从 http://proxy.mimvp.com/ 获取代理地址 """
|
|
29
|
-
# http://proxy.mimvp.com/api/fetch.php?orderid=860160414114016557&num=100&http_type=1&anonymous=3&ping_time=1&transfer_time=5
|
|
30
|
-
url = 'http://proxy.mimvp.com/api/fetch.php'
|
|
31
|
-
dicts = {
|
|
32
|
-
"orderid": "860161011165812474",
|
|
33
|
-
"num": num,
|
|
34
|
-
"http_type": "1", # 协议类型(http)
|
|
35
|
-
"anonymous": "2,3,5", # 提取透明,匿名+欺骗,高匿
|
|
36
|
-
"ping_time": "1", # 响应时间
|
|
37
|
-
"transfer_time": "5", # 传输速度
|
|
38
|
-
}
|
|
39
|
-
url = BaseUrl.dicts_to_url(dicts, url=url)
|
|
40
|
-
|
|
41
|
-
proxyPool = set()
|
|
42
|
-
|
|
43
|
-
BoolResult, errString, r = self.baserequest.base_request(url, headers=self.Headers, timeout=10)
|
|
44
|
-
if not BoolResult:
|
|
45
|
-
return proxyPool
|
|
46
|
-
|
|
47
|
-
lst = r.text.split('\n')
|
|
48
|
-
for line in lst:
|
|
49
|
-
line = line.strip()
|
|
50
|
-
if line.count('.') == 3:
|
|
51
|
-
proxyPool.add(line)
|
|
52
|
-
return proxyPool
|
|
53
|
-
|
|
54
|
-
# 从 http://tpv.daxiangdaili.com 获取代理地址,<- 大象代理
|
|
55
|
-
# http://tpv.daxiangdaili.com/ip/?tid=556923006054759&num=1000
|
|
56
|
-
def getProxyFromDaxiang(self, num):
|
|
57
|
-
url = 'http://tpv.daxiangdaili.com/ip/'
|
|
58
|
-
dicts = {
|
|
59
|
-
"tid": "556923006054759",
|
|
60
|
-
"num": num,
|
|
61
|
-
# "filter":"on",
|
|
62
|
-
"foreign": "all",
|
|
63
|
-
"delay": "5" # 延迟时间
|
|
64
|
-
}
|
|
65
|
-
url = BaseUrl.dicts_to_url(dicts, url=url)
|
|
66
|
-
|
|
67
|
-
proxyPool = set()
|
|
68
|
-
|
|
69
|
-
BoolResult, errString, r = self.baserequest.base_request(url, headers=self.Headers, timeout=10)
|
|
70
|
-
if not BoolResult:
|
|
71
|
-
return proxyPool
|
|
72
|
-
|
|
73
|
-
# print('daili666:' + repr(r.text))
|
|
74
|
-
lst = r.text.split('\n')
|
|
75
|
-
for line in lst:
|
|
76
|
-
line = line.strip()
|
|
77
|
-
if line.count('.') == 3:
|
|
78
|
-
proxyPool.add(line)
|
|
79
|
-
|
|
80
|
-
return proxyPool
|
|
81
|
-
|
|
82
|
-
# 从http://www.xici.net.co/nn/获取代理地址
|
|
83
|
-
# pageNum表示采集第几页
|
|
84
|
-
def getProxyFromXICIOnePage(self, pageNum):
|
|
85
|
-
ProxyPool = set()
|
|
86
|
-
|
|
87
|
-
url = 'http://www.xicidaili.com/nn/'
|
|
88
|
-
if pageNum > 1:
|
|
89
|
-
url += str(pageNum)
|
|
90
|
-
|
|
91
|
-
proxyPool = set()
|
|
92
|
-
BoolResult, errString, r = self.baserequest.base_request(url, headers=self.Headers, timeout=10)
|
|
93
|
-
if not BoolResult:
|
|
94
|
-
return proxyPool
|
|
95
|
-
|
|
96
|
-
html = r.content.decode('utf-8')
|
|
97
|
-
|
|
98
|
-
soup = BeautifulSoup(html, 'html.parser')
|
|
99
|
-
ipTable = soup.find('table', id='ip_list')
|
|
100
|
-
if not ipTable:
|
|
101
|
-
print('Error: not ipTable')
|
|
102
|
-
return set()
|
|
103
|
-
|
|
104
|
-
for trTag in ipTable.find_all('tr'):
|
|
105
|
-
lst = list(trTag.find_all('td'))
|
|
106
|
-
if len(lst) != 10:
|
|
107
|
-
continue
|
|
108
|
-
ip = ''.join(lst[1].stripped_strings)
|
|
109
|
-
port = ''.join(lst[2].stripped_strings)
|
|
110
|
-
item = ip + ':' + port
|
|
111
|
-
if re.match(r'^[\d\.:]+$', item):
|
|
112
|
-
ProxyPool.add(item)
|
|
113
|
-
return ProxyPool
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
from bs4 import BeautifulSoup
|
|
4
|
+
|
|
5
|
+
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
6
|
+
from re_common.baselibrary.utils.baseurl import BaseUrl
|
|
7
|
+
from re_common.baselibrary.utils.myredisclient import MyRedis
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ProxyAll(object):
|
|
11
|
+
|
|
12
|
+
def __init__(self, config="./db.ini"):
|
|
13
|
+
self.config = config
|
|
14
|
+
self.myredis = MyRedis(self.config)
|
|
15
|
+
self.myredis.set_redis_from_config()
|
|
16
|
+
self.myredis.conn_redis()
|
|
17
|
+
self.Headers = {
|
|
18
|
+
'Accept': '*/*',
|
|
19
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
|
|
20
|
+
}
|
|
21
|
+
self.UserAgent = 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
|
|
22
|
+
self.baserequest = BaseRequest()
|
|
23
|
+
|
|
24
|
+
def get_redis_all(self):
|
|
25
|
+
return self.myredis.getDataFromRedis()
|
|
26
|
+
|
|
27
|
+
def getProxyFromMimvp(self, num=600):
|
|
28
|
+
r""" 从 http://proxy.mimvp.com/ 获取代理地址 """
|
|
29
|
+
# http://proxy.mimvp.com/api/fetch.php?orderid=860160414114016557&num=100&http_type=1&anonymous=3&ping_time=1&transfer_time=5
|
|
30
|
+
url = 'http://proxy.mimvp.com/api/fetch.php'
|
|
31
|
+
dicts = {
|
|
32
|
+
"orderid": "860161011165812474",
|
|
33
|
+
"num": num,
|
|
34
|
+
"http_type": "1", # 协议类型(http)
|
|
35
|
+
"anonymous": "2,3,5", # 提取透明,匿名+欺骗,高匿
|
|
36
|
+
"ping_time": "1", # 响应时间
|
|
37
|
+
"transfer_time": "5", # 传输速度
|
|
38
|
+
}
|
|
39
|
+
url = BaseUrl.dicts_to_url(dicts, url=url)
|
|
40
|
+
|
|
41
|
+
proxyPool = set()
|
|
42
|
+
|
|
43
|
+
BoolResult, errString, r = self.baserequest.base_request(url, headers=self.Headers, timeout=10)
|
|
44
|
+
if not BoolResult:
|
|
45
|
+
return proxyPool
|
|
46
|
+
|
|
47
|
+
lst = r.text.split('\n')
|
|
48
|
+
for line in lst:
|
|
49
|
+
line = line.strip()
|
|
50
|
+
if line.count('.') == 3:
|
|
51
|
+
proxyPool.add(line)
|
|
52
|
+
return proxyPool
|
|
53
|
+
|
|
54
|
+
# 从 http://tpv.daxiangdaili.com 获取代理地址,<- 大象代理
|
|
55
|
+
# http://tpv.daxiangdaili.com/ip/?tid=556923006054759&num=1000
|
|
56
|
+
def getProxyFromDaxiang(self, num):
|
|
57
|
+
url = 'http://tpv.daxiangdaili.com/ip/'
|
|
58
|
+
dicts = {
|
|
59
|
+
"tid": "556923006054759",
|
|
60
|
+
"num": num,
|
|
61
|
+
# "filter":"on",
|
|
62
|
+
"foreign": "all",
|
|
63
|
+
"delay": "5" # 延迟时间
|
|
64
|
+
}
|
|
65
|
+
url = BaseUrl.dicts_to_url(dicts, url=url)
|
|
66
|
+
|
|
67
|
+
proxyPool = set()
|
|
68
|
+
|
|
69
|
+
BoolResult, errString, r = self.baserequest.base_request(url, headers=self.Headers, timeout=10)
|
|
70
|
+
if not BoolResult:
|
|
71
|
+
return proxyPool
|
|
72
|
+
|
|
73
|
+
# print('daili666:' + repr(r.text))
|
|
74
|
+
lst = r.text.split('\n')
|
|
75
|
+
for line in lst:
|
|
76
|
+
line = line.strip()
|
|
77
|
+
if line.count('.') == 3:
|
|
78
|
+
proxyPool.add(line)
|
|
79
|
+
|
|
80
|
+
return proxyPool
|
|
81
|
+
|
|
82
|
+
# 从http://www.xici.net.co/nn/获取代理地址
|
|
83
|
+
# pageNum表示采集第几页
|
|
84
|
+
def getProxyFromXICIOnePage(self, pageNum):
|
|
85
|
+
ProxyPool = set()
|
|
86
|
+
|
|
87
|
+
url = 'http://www.xicidaili.com/nn/'
|
|
88
|
+
if pageNum > 1:
|
|
89
|
+
url += str(pageNum)
|
|
90
|
+
|
|
91
|
+
proxyPool = set()
|
|
92
|
+
BoolResult, errString, r = self.baserequest.base_request(url, headers=self.Headers, timeout=10)
|
|
93
|
+
if not BoolResult:
|
|
94
|
+
return proxyPool
|
|
95
|
+
|
|
96
|
+
html = r.content.decode('utf-8')
|
|
97
|
+
|
|
98
|
+
soup = BeautifulSoup(html, 'html.parser')
|
|
99
|
+
ipTable = soup.find('table', id='ip_list')
|
|
100
|
+
if not ipTable:
|
|
101
|
+
print('Error: not ipTable')
|
|
102
|
+
return set()
|
|
103
|
+
|
|
104
|
+
for trTag in ipTable.find_all('tr'):
|
|
105
|
+
lst = list(trTag.find_all('td'))
|
|
106
|
+
if len(lst) != 10:
|
|
107
|
+
continue
|
|
108
|
+
ip = ''.join(lst[1].stripped_strings)
|
|
109
|
+
port = ''.join(lst[2].stripped_strings)
|
|
110
|
+
item = ip + ':' + port
|
|
111
|
+
if re.match(r'^[\d\.:]+$', item):
|
|
112
|
+
ProxyPool.add(item)
|
|
113
|
+
return ProxyPool
|
|
@@ -1,42 +1,42 @@
|
|
|
1
|
-
import json
|
|
2
|
-
|
|
3
|
-
###########################################
|
|
4
|
-
# 同项目调用基础包
|
|
5
|
-
import os
|
|
6
|
-
import sys
|
|
7
|
-
import time
|
|
8
|
-
|
|
9
|
-
filepath = os.path.abspath(__file__)
|
|
10
|
-
pathlist = filepath.split(os.sep)
|
|
11
|
-
pathlist = pathlist[:-4]
|
|
12
|
-
TopPath = os.sep.join(pathlist)
|
|
13
|
-
sys.path.insert(0, TopPath)
|
|
14
|
-
print(TopPath)
|
|
15
|
-
############################################
|
|
16
|
-
|
|
17
|
-
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
18
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
19
|
-
from re_common.baselibrary.utils.basedir import BaseDir
|
|
20
|
-
from re_common.baselibrary.utils.basefile import BaseFile
|
|
21
|
-
from re_common.facade.lazy_import import get_streamlogger
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class Kproxy(object):
|
|
25
|
-
def __init__(self):
|
|
26
|
-
self.cur_path = BaseDir.get_file_dir_absolute(__file__)
|
|
27
|
-
self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
|
|
28
|
-
self.logger = get_streamlogger()
|
|
29
|
-
self.mysqlutils = MysqlUtiles(self.configfile, "dbkuaidaili", self.logger)
|
|
30
|
-
self.starttime = time.time()
|
|
31
|
-
self.starttime_val = time.time()
|
|
32
|
-
|
|
33
|
-
def run(self):
|
|
34
|
-
sql = "update `kuaidailiproxy` set val_stat=1 WHERE stat=1"
|
|
35
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
if __name__ == "__main__":
|
|
39
|
-
k = Kproxy()
|
|
40
|
-
while True:
|
|
41
|
-
k.run()
|
|
42
|
-
time.sleep(60*2)
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
###########################################
|
|
4
|
+
# 同项目调用基础包
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
import time
|
|
8
|
+
|
|
9
|
+
filepath = os.path.abspath(__file__)
|
|
10
|
+
pathlist = filepath.split(os.sep)
|
|
11
|
+
pathlist = pathlist[:-4]
|
|
12
|
+
TopPath = os.sep.join(pathlist)
|
|
13
|
+
sys.path.insert(0, TopPath)
|
|
14
|
+
print(TopPath)
|
|
15
|
+
############################################
|
|
16
|
+
|
|
17
|
+
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
18
|
+
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
19
|
+
from re_common.baselibrary.utils.basedir import BaseDir
|
|
20
|
+
from re_common.baselibrary.utils.basefile import BaseFile
|
|
21
|
+
from re_common.facade.lazy_import import get_streamlogger
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class Kproxy(object):
|
|
25
|
+
def __init__(self):
|
|
26
|
+
self.cur_path = BaseDir.get_file_dir_absolute(__file__)
|
|
27
|
+
self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
|
|
28
|
+
self.logger = get_streamlogger()
|
|
29
|
+
self.mysqlutils = MysqlUtiles(self.configfile, "dbkuaidaili", self.logger)
|
|
30
|
+
self.starttime = time.time()
|
|
31
|
+
self.starttime_val = time.time()
|
|
32
|
+
|
|
33
|
+
def run(self):
|
|
34
|
+
sql = "update `kuaidailiproxy` set val_stat=1 WHERE stat=1"
|
|
35
|
+
self.mysqlutils.ExeSqlToDB(sql)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
if __name__ == "__main__":
|
|
39
|
+
k = Kproxy()
|
|
40
|
+
while True:
|
|
41
|
+
k.run()
|
|
42
|
+
time.sleep(60*2)
|
|
@@ -1,152 +1,152 @@
|
|
|
1
|
-
###########################################
|
|
2
|
-
# 同项目调用基础包
|
|
3
|
-
import os
|
|
4
|
-
import sys
|
|
5
|
-
import time
|
|
6
|
-
|
|
7
|
-
filepath = os.path.abspath(__file__)
|
|
8
|
-
pathlist = filepath.split(os.sep)
|
|
9
|
-
pathlist = pathlist[:-4]
|
|
10
|
-
TopPath = os.sep.join(pathlist)
|
|
11
|
-
sys.path.insert(0, TopPath)
|
|
12
|
-
print(TopPath)
|
|
13
|
-
############################################
|
|
14
|
-
|
|
15
|
-
from re_common.facade.loggerfacade import get_streamlogger
|
|
16
|
-
from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun
|
|
17
|
-
from re_common.baselibrary.mthread.mythreading import ThreadPoolManger, ThreadInfo
|
|
18
|
-
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
19
|
-
from re_common.baselibrary.utils.core.requests_core import set_proxy
|
|
20
|
-
from re_common.baselibrary.utils.myredisclient import MyRedis
|
|
21
|
-
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
22
|
-
|
|
23
|
-
from proxy_all import ProxyAll
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class WanfangProxy(object):
|
|
27
|
-
|
|
28
|
-
def __init__(self, config="./db.ini"):
|
|
29
|
-
self.config = config
|
|
30
|
-
self.logger = get_streamlogger()
|
|
31
|
-
self.mysqlutils = MysqlUtiles(self.config, "dbwanfang", self.logger)
|
|
32
|
-
self.Headers = {
|
|
33
|
-
'Accept': '*/*',
|
|
34
|
-
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
|
|
35
|
-
}
|
|
36
|
-
self.UserAgent = 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
|
|
37
|
-
self.baserequest = BaseRequest()
|
|
38
|
-
|
|
39
|
-
def checking_proxy(self, proxy):
|
|
40
|
-
url = "http://www.wanfangdata.com.cn/index.html"
|
|
41
|
-
proxies = set_proxy(proxy)
|
|
42
|
-
BoolResult, errString, r = self.baserequest.base_request(url,
|
|
43
|
-
headers=self.Headers,
|
|
44
|
-
proxies=proxies,
|
|
45
|
-
marks=["container"],
|
|
46
|
-
timeout=5)
|
|
47
|
-
if BoolResult:
|
|
48
|
-
return proxy
|
|
49
|
-
return ""
|
|
50
|
-
|
|
51
|
-
def get_mysql_proxy(self):
|
|
52
|
-
sql = "SELECT proxy FROM `proxy_pool`"
|
|
53
|
-
bools, rows = self.mysqlutils.SelectFromDB(sql)
|
|
54
|
-
if not bools:
|
|
55
|
-
return set()
|
|
56
|
-
results = set()
|
|
57
|
-
for row in rows:
|
|
58
|
-
results.add(row[0])
|
|
59
|
-
sql = "delete from proxy_pool"
|
|
60
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
61
|
-
return results
|
|
62
|
-
|
|
63
|
-
def get_all_proxy(self):
|
|
64
|
-
proxy_set = set()
|
|
65
|
-
proxyall = ProxyAll()
|
|
66
|
-
redisproxy = proxyall.get_redis_all()
|
|
67
|
-
mimvpproxy = proxyall.getProxyFromMimvp(1000)
|
|
68
|
-
daxiangproxy = proxyall.getProxyFromDaxiang(1000)
|
|
69
|
-
xiciproxy1 = proxyall.getProxyFromXICIOnePage(1)
|
|
70
|
-
xiciproxy2 = proxyall.getProxyFromXICIOnePage(2)
|
|
71
|
-
mysqlproxy = self.get_mysql_proxy()
|
|
72
|
-
proxy_set = proxy_set.union(mysqlproxy, redisproxy, mimvpproxy, daxiangproxy, xiciproxy1, xiciproxy2)
|
|
73
|
-
self.logger.info("all proxy size is:{}".format(len(proxy_set)))
|
|
74
|
-
self.proxy_set = proxy_set
|
|
75
|
-
return proxy_set
|
|
76
|
-
|
|
77
|
-
def get_can_use_proxy(self):
|
|
78
|
-
count = 0
|
|
79
|
-
use_proxy = set()
|
|
80
|
-
for proxy in self.proxy_set:
|
|
81
|
-
proxy_ = self.checking_proxy(proxy)
|
|
82
|
-
use_proxy.add(proxy_)
|
|
83
|
-
if len(use_proxy) > 20:
|
|
84
|
-
count = count + 1
|
|
85
|
-
if count == 1:
|
|
86
|
-
sql = "delete from proxy_pool"
|
|
87
|
-
self.mysqlutils.ExeSqlToDB(sql)
|
|
88
|
-
sql = "insert into proxy_pool(proxy) values ('%s')"
|
|
89
|
-
self.mysqlutils.ExeSqlMany(sql, use_proxy)
|
|
90
|
-
use_proxy.clear()
|
|
91
|
-
if len(use_proxy) > 0:
|
|
92
|
-
sql = "insert into proxy_pool(proxy) values ('%s')"
|
|
93
|
-
self.mysqlutils.ExeSqlMany(sql, use_proxy)
|
|
94
|
-
use_proxy.clear()
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
class DetailThreadRun(MThreadingRun):
|
|
98
|
-
def __init__(self, num):
|
|
99
|
-
self.cnki = WanfangProxy()
|
|
100
|
-
super(DetailThreadRun, self).__init__(num)
|
|
101
|
-
self.config = "./db.ini"
|
|
102
|
-
self.myredisset = MyRedis(self.config)
|
|
103
|
-
self.myredisset.set_redis_from_config(sesc="proxysetwanfangjournal")
|
|
104
|
-
self.myredisset.conn_redis()
|
|
105
|
-
self.myredisset.get_pipeline()
|
|
106
|
-
|
|
107
|
-
@ThreadPoolManger.thread_lock
|
|
108
|
-
def getTask(self, *args, **kwargs):
|
|
109
|
-
self.myredisset.delete(self.myredisset.RedisKey)
|
|
110
|
-
proxy_set = self.cnki.get_all_proxy()
|
|
111
|
-
return proxy_set
|
|
112
|
-
|
|
113
|
-
def setTask(self, results=None, *args, **kwargs):
|
|
114
|
-
for url_tasks in results:
|
|
115
|
-
# 将每一页加入任务队列
|
|
116
|
-
self.add_job(self.func, url_tasks)
|
|
117
|
-
time.sleep(10 * 60)
|
|
118
|
-
|
|
119
|
-
@ThreadPoolManger.thread_lock
|
|
120
|
-
def dealresult(self, *args, **kwargs):
|
|
121
|
-
sql = "replace into proxy_pool(`proxy`) values (%s)"
|
|
122
|
-
self.cnki.mysqlutils.ExeSqlMany(sql, self.results)
|
|
123
|
-
self.myredisset.sadd(self.myredisset.RedisKey, set(self.results))
|
|
124
|
-
|
|
125
|
-
def setProxy(self, proxysList=None):
|
|
126
|
-
pass
|
|
127
|
-
|
|
128
|
-
def is_break(self):
|
|
129
|
-
return False
|
|
130
|
-
|
|
131
|
-
def thread_pool_hook(self, threadinfo: ThreadInfo):
|
|
132
|
-
# 设置代理线程不重启,默认会重启
|
|
133
|
-
if threadinfo.get_thread_name() == self.etn.proxythreadname:
|
|
134
|
-
threadinfo.set_is_restart(False)
|
|
135
|
-
if threadinfo.get_thread_name() == self.etn.taskthreadname:
|
|
136
|
-
threadinfo.set_is_restart(False)
|
|
137
|
-
return {}
|
|
138
|
-
|
|
139
|
-
def fun(self, threadval, *args, **kwargs):
|
|
140
|
-
standardid = args[0]
|
|
141
|
-
proxys = self.cnki.checking_proxy(standardid)
|
|
142
|
-
if proxys != "":
|
|
143
|
-
threadval.result_queue.put(proxys)
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
def main():
|
|
147
|
-
down = DetailThreadRun(40)
|
|
148
|
-
down.run()
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
if __name__ == "__main__":
|
|
152
|
-
main()
|
|
1
|
+
###########################################
|
|
2
|
+
# 同项目调用基础包
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import time
|
|
6
|
+
|
|
7
|
+
filepath = os.path.abspath(__file__)
|
|
8
|
+
pathlist = filepath.split(os.sep)
|
|
9
|
+
pathlist = pathlist[:-4]
|
|
10
|
+
TopPath = os.sep.join(pathlist)
|
|
11
|
+
sys.path.insert(0, TopPath)
|
|
12
|
+
print(TopPath)
|
|
13
|
+
############################################
|
|
14
|
+
|
|
15
|
+
from re_common.facade.loggerfacade import get_streamlogger
|
|
16
|
+
from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun
|
|
17
|
+
from re_common.baselibrary.mthread.mythreading import ThreadPoolManger, ThreadInfo
|
|
18
|
+
from re_common.baselibrary.utils.baserequest import BaseRequest
|
|
19
|
+
from re_common.baselibrary.utils.core.requests_core import set_proxy
|
|
20
|
+
from re_common.baselibrary.utils.myredisclient import MyRedis
|
|
21
|
+
from re_common.facade.mysqlfacade import MysqlUtiles
|
|
22
|
+
|
|
23
|
+
from proxy_all import ProxyAll
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class WanfangProxy(object):
|
|
27
|
+
|
|
28
|
+
def __init__(self, config="./db.ini"):
|
|
29
|
+
self.config = config
|
|
30
|
+
self.logger = get_streamlogger()
|
|
31
|
+
self.mysqlutils = MysqlUtiles(self.config, "dbwanfang", self.logger)
|
|
32
|
+
self.Headers = {
|
|
33
|
+
'Accept': '*/*',
|
|
34
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
|
|
35
|
+
}
|
|
36
|
+
self.UserAgent = 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
|
|
37
|
+
self.baserequest = BaseRequest()
|
|
38
|
+
|
|
39
|
+
def checking_proxy(self, proxy):
|
|
40
|
+
url = "http://www.wanfangdata.com.cn/index.html"
|
|
41
|
+
proxies = set_proxy(proxy)
|
|
42
|
+
BoolResult, errString, r = self.baserequest.base_request(url,
|
|
43
|
+
headers=self.Headers,
|
|
44
|
+
proxies=proxies,
|
|
45
|
+
marks=["container"],
|
|
46
|
+
timeout=5)
|
|
47
|
+
if BoolResult:
|
|
48
|
+
return proxy
|
|
49
|
+
return ""
|
|
50
|
+
|
|
51
|
+
def get_mysql_proxy(self):
|
|
52
|
+
sql = "SELECT proxy FROM `proxy_pool`"
|
|
53
|
+
bools, rows = self.mysqlutils.SelectFromDB(sql)
|
|
54
|
+
if not bools:
|
|
55
|
+
return set()
|
|
56
|
+
results = set()
|
|
57
|
+
for row in rows:
|
|
58
|
+
results.add(row[0])
|
|
59
|
+
sql = "delete from proxy_pool"
|
|
60
|
+
self.mysqlutils.ExeSqlToDB(sql)
|
|
61
|
+
return results
|
|
62
|
+
|
|
63
|
+
def get_all_proxy(self):
|
|
64
|
+
proxy_set = set()
|
|
65
|
+
proxyall = ProxyAll()
|
|
66
|
+
redisproxy = proxyall.get_redis_all()
|
|
67
|
+
mimvpproxy = proxyall.getProxyFromMimvp(1000)
|
|
68
|
+
daxiangproxy = proxyall.getProxyFromDaxiang(1000)
|
|
69
|
+
xiciproxy1 = proxyall.getProxyFromXICIOnePage(1)
|
|
70
|
+
xiciproxy2 = proxyall.getProxyFromXICIOnePage(2)
|
|
71
|
+
mysqlproxy = self.get_mysql_proxy()
|
|
72
|
+
proxy_set = proxy_set.union(mysqlproxy, redisproxy, mimvpproxy, daxiangproxy, xiciproxy1, xiciproxy2)
|
|
73
|
+
self.logger.info("all proxy size is:{}".format(len(proxy_set)))
|
|
74
|
+
self.proxy_set = proxy_set
|
|
75
|
+
return proxy_set
|
|
76
|
+
|
|
77
|
+
def get_can_use_proxy(self):
|
|
78
|
+
count = 0
|
|
79
|
+
use_proxy = set()
|
|
80
|
+
for proxy in self.proxy_set:
|
|
81
|
+
proxy_ = self.checking_proxy(proxy)
|
|
82
|
+
use_proxy.add(proxy_)
|
|
83
|
+
if len(use_proxy) > 20:
|
|
84
|
+
count = count + 1
|
|
85
|
+
if count == 1:
|
|
86
|
+
sql = "delete from proxy_pool"
|
|
87
|
+
self.mysqlutils.ExeSqlToDB(sql)
|
|
88
|
+
sql = "insert into proxy_pool(proxy) values ('%s')"
|
|
89
|
+
self.mysqlutils.ExeSqlMany(sql, use_proxy)
|
|
90
|
+
use_proxy.clear()
|
|
91
|
+
if len(use_proxy) > 0:
|
|
92
|
+
sql = "insert into proxy_pool(proxy) values ('%s')"
|
|
93
|
+
self.mysqlutils.ExeSqlMany(sql, use_proxy)
|
|
94
|
+
use_proxy.clear()
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DetailThreadRun(MThreadingRun):
|
|
98
|
+
def __init__(self, num):
|
|
99
|
+
self.cnki = WanfangProxy()
|
|
100
|
+
super(DetailThreadRun, self).__init__(num)
|
|
101
|
+
self.config = "./db.ini"
|
|
102
|
+
self.myredisset = MyRedis(self.config)
|
|
103
|
+
self.myredisset.set_redis_from_config(sesc="proxysetwanfangjournal")
|
|
104
|
+
self.myredisset.conn_redis()
|
|
105
|
+
self.myredisset.get_pipeline()
|
|
106
|
+
|
|
107
|
+
@ThreadPoolManger.thread_lock
|
|
108
|
+
def getTask(self, *args, **kwargs):
|
|
109
|
+
self.myredisset.delete(self.myredisset.RedisKey)
|
|
110
|
+
proxy_set = self.cnki.get_all_proxy()
|
|
111
|
+
return proxy_set
|
|
112
|
+
|
|
113
|
+
def setTask(self, results=None, *args, **kwargs):
|
|
114
|
+
for url_tasks in results:
|
|
115
|
+
# 将每一页加入任务队列
|
|
116
|
+
self.add_job(self.func, url_tasks)
|
|
117
|
+
time.sleep(10 * 60)
|
|
118
|
+
|
|
119
|
+
@ThreadPoolManger.thread_lock
|
|
120
|
+
def dealresult(self, *args, **kwargs):
|
|
121
|
+
sql = "replace into proxy_pool(`proxy`) values (%s)"
|
|
122
|
+
self.cnki.mysqlutils.ExeSqlMany(sql, self.results)
|
|
123
|
+
self.myredisset.sadd(self.myredisset.RedisKey, set(self.results))
|
|
124
|
+
|
|
125
|
+
def setProxy(self, proxysList=None):
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
def is_break(self):
|
|
129
|
+
return False
|
|
130
|
+
|
|
131
|
+
def thread_pool_hook(self, threadinfo: ThreadInfo):
|
|
132
|
+
# 设置代理线程不重启,默认会重启
|
|
133
|
+
if threadinfo.get_thread_name() == self.etn.proxythreadname:
|
|
134
|
+
threadinfo.set_is_restart(False)
|
|
135
|
+
if threadinfo.get_thread_name() == self.etn.taskthreadname:
|
|
136
|
+
threadinfo.set_is_restart(False)
|
|
137
|
+
return {}
|
|
138
|
+
|
|
139
|
+
def fun(self, threadval, *args, **kwargs):
|
|
140
|
+
standardid = args[0]
|
|
141
|
+
proxys = self.cnki.checking_proxy(standardid)
|
|
142
|
+
if proxys != "":
|
|
143
|
+
threadval.result_queue.put(proxys)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def main():
|
|
147
|
+
down = DetailThreadRun(40)
|
|
148
|
+
down.run()
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
if __name__ == "__main__":
|
|
152
|
+
main()
|