re-common 10.0.39__py3-none-any.whl → 10.0.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. re_common/baselibrary/__init__.py +4 -4
  2. re_common/baselibrary/baseabs/__init__.py +6 -6
  3. re_common/baselibrary/baseabs/baseabs.py +26 -26
  4. re_common/baselibrary/database/mbuilder.py +132 -132
  5. re_common/baselibrary/database/moudle.py +93 -93
  6. re_common/baselibrary/database/msqlite3.py +194 -194
  7. re_common/baselibrary/database/mysql.py +169 -169
  8. re_common/baselibrary/database/sql_factory.py +26 -26
  9. re_common/baselibrary/mthread/MThreadingRun.py +486 -486
  10. re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -349
  11. re_common/baselibrary/mthread/__init__.py +2 -2
  12. re_common/baselibrary/mthread/mythreading.py +695 -695
  13. re_common/baselibrary/pakge_other/socks.py +404 -404
  14. re_common/baselibrary/readconfig/config_factory.py +18 -18
  15. re_common/baselibrary/readconfig/ini_config.py +317 -317
  16. re_common/baselibrary/readconfig/toml_config.py +49 -49
  17. re_common/baselibrary/temporary/envdata.py +36 -36
  18. re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -118
  19. re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -102
  20. re_common/baselibrary/tools/all_requests/mrequest.py +412 -412
  21. re_common/baselibrary/tools/all_requests/requests_request.py +81 -81
  22. re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -31
  23. re_common/baselibrary/tools/contrast_db3.py +123 -123
  24. re_common/baselibrary/tools/copy_file.py +39 -39
  25. re_common/baselibrary/tools/db3_2_sizedb3.py +102 -102
  26. re_common/baselibrary/tools/foreachgz.py +39 -39
  27. re_common/baselibrary/tools/get_attr.py +10 -10
  28. re_common/baselibrary/tools/image_to_pdf.py +61 -61
  29. re_common/baselibrary/tools/java_code_deal.py +139 -139
  30. re_common/baselibrary/tools/javacode.py +79 -79
  31. re_common/baselibrary/tools/mdb_db3.py +48 -48
  32. re_common/baselibrary/tools/merge_file.py +171 -171
  33. re_common/baselibrary/tools/merge_gz_file.py +165 -165
  34. re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -42
  35. re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -42
  36. re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -38
  37. re_common/baselibrary/tools/mongo_tools.py +50 -50
  38. re_common/baselibrary/tools/move_file.py +170 -170
  39. re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -63
  40. re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -354
  41. re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -18
  42. re_common/baselibrary/tools/move_mongo/use_mv.py +93 -93
  43. re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -125
  44. re_common/baselibrary/tools/mpandas/pandas_visualization.py +7 -7
  45. re_common/baselibrary/tools/myparsel.py +104 -104
  46. re_common/baselibrary/tools/rename_dir_file.py +37 -37
  47. re_common/baselibrary/tools/sequoiadb_utils.py +398 -398
  48. re_common/baselibrary/tools/split_line_to_many.py +25 -25
  49. re_common/baselibrary/tools/stringtodicts.py +33 -33
  50. re_common/baselibrary/tools/workwechant_bot.py +84 -84
  51. re_common/baselibrary/utils/baseaiohttp.py +296 -296
  52. re_common/baselibrary/utils/baseaiomysql.py +87 -87
  53. re_common/baselibrary/utils/baseallstep.py +191 -191
  54. re_common/baselibrary/utils/baseavro.py +19 -19
  55. re_common/baselibrary/utils/baseboto3.py +291 -291
  56. re_common/baselibrary/utils/basecsv.py +32 -32
  57. re_common/baselibrary/utils/basedict.py +133 -133
  58. re_common/baselibrary/utils/basedir.py +241 -241
  59. re_common/baselibrary/utils/baseencode.py +351 -351
  60. re_common/baselibrary/utils/baseencoding.py +28 -28
  61. re_common/baselibrary/utils/baseesdsl.py +86 -86
  62. re_common/baselibrary/utils/baseexcel.py +264 -264
  63. re_common/baselibrary/utils/baseexcept.py +109 -109
  64. re_common/baselibrary/utils/basefile.py +654 -654
  65. re_common/baselibrary/utils/baseftp.py +214 -214
  66. re_common/baselibrary/utils/basegzip.py +60 -60
  67. re_common/baselibrary/utils/basehdfs.py +135 -135
  68. re_common/baselibrary/utils/basehttpx.py +268 -268
  69. re_common/baselibrary/utils/baseip.py +87 -87
  70. re_common/baselibrary/utils/basejson.py +2 -2
  71. re_common/baselibrary/utils/baselist.py +32 -32
  72. re_common/baselibrary/utils/basemotor.py +190 -190
  73. re_common/baselibrary/utils/basemssql.py +98 -98
  74. re_common/baselibrary/utils/baseodbc.py +113 -113
  75. re_common/baselibrary/utils/basepandas.py +302 -302
  76. re_common/baselibrary/utils/basepeewee.py +11 -11
  77. re_common/baselibrary/utils/basepika.py +180 -180
  78. re_common/baselibrary/utils/basepydash.py +143 -143
  79. re_common/baselibrary/utils/basepymongo.py +230 -230
  80. re_common/baselibrary/utils/basequeue.py +22 -22
  81. re_common/baselibrary/utils/baserar.py +57 -57
  82. re_common/baselibrary/utils/baserequest.py +279 -279
  83. re_common/baselibrary/utils/baseset.py +8 -8
  84. re_common/baselibrary/utils/basesmb.py +403 -403
  85. re_common/baselibrary/utils/basestring.py +382 -382
  86. re_common/baselibrary/utils/basetime.py +320 -320
  87. re_common/baselibrary/utils/baseurl.py +121 -121
  88. re_common/baselibrary/utils/basezip.py +57 -57
  89. re_common/baselibrary/utils/core/__init__.py +7 -7
  90. re_common/baselibrary/utils/core/bottomutils.py +18 -18
  91. re_common/baselibrary/utils/core/mdeprecated.py +327 -327
  92. re_common/baselibrary/utils/core/mlamada.py +16 -16
  93. re_common/baselibrary/utils/core/msginfo.py +25 -25
  94. re_common/baselibrary/utils/core/requests_core.py +103 -103
  95. re_common/baselibrary/utils/fateadm.py +429 -429
  96. re_common/baselibrary/utils/importfun.py +123 -123
  97. re_common/baselibrary/utils/mfaker.py +57 -57
  98. re_common/baselibrary/utils/my_abc/__init__.py +3 -3
  99. re_common/baselibrary/utils/my_abc/better_abc.py +32 -32
  100. re_common/baselibrary/utils/mylogger.py +414 -414
  101. re_common/baselibrary/utils/myredisclient.py +861 -861
  102. re_common/baselibrary/utils/pipupgrade.py +21 -21
  103. re_common/baselibrary/utils/ringlist.py +85 -85
  104. re_common/baselibrary/utils/version_compare.py +36 -36
  105. re_common/baselibrary/utils/ydmhttp.py +126 -126
  106. re_common/facade/lazy_import.py +11 -11
  107. re_common/facade/loggerfacade.py +25 -25
  108. re_common/facade/mysqlfacade.py +467 -467
  109. re_common/facade/now.py +31 -31
  110. re_common/facade/sqlite3facade.py +257 -257
  111. re_common/facade/use/mq_use_facade.py +83 -83
  112. re_common/facade/use/proxy_use_facade.py +19 -19
  113. re_common/libtest/base_dict_test.py +19 -19
  114. re_common/libtest/baseavro_test.py +13 -13
  115. re_common/libtest/basefile_test.py +14 -14
  116. re_common/libtest/basemssql_test.py +77 -77
  117. re_common/libtest/baseodbc_test.py +7 -7
  118. re_common/libtest/basepandas_test.py +38 -38
  119. re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -14
  120. re_common/libtest/get_attr_test/settings.py +54 -54
  121. re_common/libtest/idencode_test.py +53 -53
  122. re_common/libtest/iniconfig_test.py +35 -35
  123. re_common/libtest/ip_test.py +34 -34
  124. re_common/libtest/merge_file_test.py +20 -20
  125. re_common/libtest/mfaker_test.py +8 -8
  126. re_common/libtest/mm3_test.py +31 -31
  127. re_common/libtest/mylogger_test.py +88 -88
  128. re_common/libtest/myparsel_test.py +27 -27
  129. re_common/libtest/mysql_test.py +151 -151
  130. re_common/libtest/pymongo_test.py +21 -21
  131. re_common/libtest/split_test.py +11 -11
  132. re_common/libtest/sqlite3_merge_test.py +5 -5
  133. re_common/libtest/sqlite3_test.py +34 -34
  134. re_common/libtest/tomlconfig_test.py +30 -30
  135. re_common/libtest/use_tools_test/__init__.py +2 -2
  136. re_common/libtest/user/__init__.py +4 -4
  137. re_common/studio/__init__.py +4 -4
  138. re_common/studio/assignment_expressions.py +36 -36
  139. re_common/studio/mydash/test1.py +18 -18
  140. re_common/studio/pydashstudio/first.py +9 -9
  141. re_common/studio/streamlitstudio/first_app.py +65 -65
  142. re_common/studio/streamlitstudio/uber_pickups.py +23 -23
  143. re_common/studio/test.py +18 -18
  144. re_common/v2/baselibrary/business_utils/BusinessStringUtil.py +235 -220
  145. re_common/v2/baselibrary/business_utils/baseencodeid.py +100 -100
  146. re_common/v2/baselibrary/business_utils/full_doi_path.py +116 -116
  147. re_common/v2/baselibrary/business_utils/rel_tools.py +6 -6
  148. re_common/v2/baselibrary/decorators/utils.py +59 -59
  149. re_common/v2/baselibrary/helpers/search_packge/NearestNeighbors_test.py +105 -105
  150. re_common/v2/baselibrary/helpers/search_packge/fit_text_match.py +253 -253
  151. re_common/v2/baselibrary/helpers/search_packge/scikit_learn_text_matcher.py +260 -260
  152. re_common/v2/baselibrary/helpers/search_packge/test.py +1 -1
  153. re_common/v2/baselibrary/s3object/baseboto3.py +230 -230
  154. re_common/v2/baselibrary/tools/WeChatRobot.py +95 -95
  155. re_common/v2/baselibrary/tools/ac_ahocorasick.py +75 -75
  156. re_common/v2/baselibrary/tools/concurrency.py +35 -35
  157. re_common/v2/baselibrary/tools/data_processer/base.py +53 -53
  158. re_common/v2/baselibrary/tools/data_processer/data_processer.py +497 -508
  159. re_common/v2/baselibrary/tools/data_processer/data_reader.py +187 -187
  160. re_common/v2/baselibrary/tools/data_processer/data_writer.py +38 -38
  161. re_common/v2/baselibrary/tools/dict_tools.py +44 -44
  162. re_common/v2/baselibrary/tools/dolphinscheduler.py +187 -187
  163. re_common/v2/baselibrary/tools/hdfs_base_processor.py +204 -204
  164. re_common/v2/baselibrary/tools/hdfs_bulk_processor.py +67 -67
  165. re_common/v2/baselibrary/tools/hdfs_data_processer.py +338 -338
  166. re_common/v2/baselibrary/tools/hdfs_line_processor.py +74 -74
  167. re_common/v2/baselibrary/tools/list_tools.py +69 -69
  168. re_common/v2/baselibrary/tools/resume_tracker.py +94 -94
  169. re_common/v2/baselibrary/tools/search_hash_tools.py +54 -54
  170. re_common/v2/baselibrary/tools/text_matcher.py +326 -326
  171. re_common/v2/baselibrary/tools/tree_processor/__init__.py +0 -0
  172. re_common/v2/baselibrary/tools/tree_processor/builder.py +25 -0
  173. re_common/v2/baselibrary/tools/tree_processor/node.py +13 -0
  174. re_common/v2/baselibrary/tools/unionfind_tools.py +60 -60
  175. re_common/v2/baselibrary/utils/BusinessStringUtil.py +196 -196
  176. re_common/v2/baselibrary/utils/api_net_utils.py +270 -270
  177. re_common/v2/baselibrary/utils/author_smi.py +361 -361
  178. re_common/v2/baselibrary/utils/base_string_similarity.py +158 -158
  179. re_common/v2/baselibrary/utils/basedict.py +37 -37
  180. re_common/v2/baselibrary/utils/basehdfs.py +163 -163
  181. re_common/v2/baselibrary/utils/basepika.py +180 -180
  182. re_common/v2/baselibrary/utils/basetime.py +94 -77
  183. re_common/v2/baselibrary/utils/db.py +174 -156
  184. re_common/v2/baselibrary/utils/elasticsearch.py +46 -0
  185. re_common/v2/baselibrary/utils/json_cls.py +16 -16
  186. re_common/v2/baselibrary/utils/mq.py +83 -83
  187. re_common/v2/baselibrary/utils/n_ary_expression_tree.py +243 -243
  188. re_common/v2/baselibrary/utils/string_bool.py +187 -186
  189. re_common/v2/baselibrary/utils/string_clear.py +246 -246
  190. re_common/v2/baselibrary/utils/string_smi.py +18 -18
  191. re_common/v2/baselibrary/utils/stringutils.py +312 -271
  192. re_common/vip/base_step_process.py +11 -11
  193. re_common/vip/baseencodeid.py +90 -90
  194. re_common/vip/changetaskname.py +28 -28
  195. re_common/vip/core_var.py +24 -24
  196. re_common/vip/mmh3Hash.py +89 -89
  197. re_common/vip/proxy/allproxys.py +127 -127
  198. re_common/vip/proxy/allproxys_thread.py +159 -159
  199. re_common/vip/proxy/cnki_proxy.py +153 -153
  200. re_common/vip/proxy/kuaidaili.py +87 -87
  201. re_common/vip/proxy/proxy_all.py +113 -113
  202. re_common/vip/proxy/update_kuaidaili_0.py +42 -42
  203. re_common/vip/proxy/wanfang_proxy.py +152 -152
  204. re_common/vip/proxy/wp_proxy_all.py +181 -181
  205. re_common/vip/read_rawid_to_txt.py +91 -91
  206. re_common/vip/title/__init__.py +5 -5
  207. re_common/vip/title/transform/TransformBookTitleToZt.py +125 -125
  208. re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -139
  209. re_common/vip/title/transform/TransformCstadTitleToZt.py +195 -195
  210. re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -203
  211. re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -132
  212. re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -114
  213. re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -135
  214. re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -135
  215. re_common/vip/title/transform/__init__.py +10 -10
  216. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/LICENSE +201 -201
  217. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/METADATA +16 -16
  218. re_common-10.0.41.dist-info/RECORD +252 -0
  219. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/WHEEL +1 -1
  220. re_common-10.0.39.dist-info/RECORD +0 -248
  221. {re_common-10.0.39.dist-info → re_common-10.0.41.dist-info}/top_level.txt +0 -0
@@ -1,127 +1,127 @@
1
- import json
2
- ###########################################
3
- # 同项目调用基础包
4
- import os
5
- import sys
6
- import time
7
-
8
- import requests
9
-
10
- filepath = os.path.abspath(__file__)
11
- pathlist = filepath.split(os.sep)
12
- pathlist = pathlist[:-4]
13
- TopPath = os.sep.join(pathlist)
14
- sys.path.insert(0, TopPath)
15
- print(TopPath)
16
- ############################################
17
-
18
- from re_common.baselibrary.utils.basedir import BaseDir
19
- from re_common.baselibrary.utils.basefile import BaseFile
20
- from re_common.baselibrary.utils.baserequest import BaseRequest
21
- from re_common.facade.lazy_import import get_streamlogger
22
- from re_common.facade.mysqlfacade import MysqlUtiles
23
- from re_common.baselibrary.utils.basetime import BaseTime
24
-
25
-
26
- class Kproxy(object):
27
- def __init__(self):
28
- self.cur_path = BaseDir.get_file_dir_absolute(__file__)
29
- self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
30
- self.logger = get_streamlogger()
31
- self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
32
- self.bsrequest = BaseRequest()
33
- self.starttime = time.time()
34
- self.starttime_val = time.time()
35
-
36
- def get_taiyang_proxy(self, num=6):
37
- """
38
- 获取太阳代理 每分钟3个
39
- :param num:
40
- :return:
41
- """
42
- self.starttime = time.time()
43
- # url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45&regions=".format(num)
44
- url = "http://http.tiqu.alibabaapi.com/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45&regions=".format(num)
45
- BoolResult, errString, r = self.bsrequest.base_request(url,
46
- timeout=30
47
- )
48
- if BoolResult:
49
- dicts = json.loads(r.text)
50
- for item in dicts["data"]:
51
- proxy = item["ip"] + ":" + item["port"]
52
- sources = "taiyang"
53
- expire_time = item["expire_time"]
54
- sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
55
- proxy, sources, expire_time, expire_time)
56
- self.mysqlutils.ExeSqlToDB(sql)
57
- else:
58
- self.logger.error("获取失败")
59
-
60
- def get_taiyang_num(self):
61
- """
62
- 获取太阳代理 每分钟3个
63
- :param num:
64
- :return:
65
- """
66
- url = "http://ty-http-d.hamir.net/index/index/get_my_package_balance?neek=521821&appkey=1fcba6de94f71561ba3007f4c24ca0b1&ac=59105"
67
- BoolResult, errString, r = self.bsrequest.base_request(url,
68
- timeout=30
69
- )
70
- if BoolResult:
71
- dicts = json.loads(r.text)
72
- return dicts["data"]["package_balance"]
73
- else:
74
- self.logger.error("获取失败")
75
-
76
- def val(self, proxy, sources):
77
- # 请求地址
78
- targetUrl = "https://www.baidu.com"
79
- proxies = {
80
- "http": "http://%s" % proxy,
81
- "https": "http://%s" % proxy
82
- }
83
- resp = requests.get(targetUrl, proxies=proxies, timeout=5)
84
- if resp.status_code == 200:
85
- print(resp.status_code)
86
- return True
87
- else:
88
- sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
89
- self.mysqlutils.ExeSqlToDB(sql)
90
- return False
91
-
92
- def val_all(self):
93
- self.starttime_val = time.time()
94
- sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
95
- bools, rows = self.mysqlutils.SelectFromDB(sql)
96
- for row in rows:
97
- try:
98
- self.val(row[0], row[1])
99
- except:
100
- sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
101
- self.mysqlutils.ExeSqlToDB(sql)
102
-
103
- def run(self):
104
- sleep_time = 0
105
- while True:
106
- num = 10
107
- if sleep_time < 0:
108
- print("time sleep {}".format(str(100 + sleep_time)))
109
- if 100 + sleep_time > 0:
110
- time.sleep(100 + sleep_time)
111
- num = num + 10
112
- nowtime = BaseTime().get_beijin_date_strins(format="%H%M%S")
113
- print(nowtime)
114
- if "133700" <= nowtime <= "134700":
115
- num = Kproxy().get_taiyang_num()
116
- start_time = time.time()
117
- self.get_taiyang_proxy(num=num)
118
- self.val_all()
119
- use_time = int(time.time() - start_time)
120
- sleep_time = 100 - use_time
121
- print("time sleep {}".format(str(sleep_time)))
122
- if sleep_time >= 3:
123
- time.sleep(sleep_time)
124
-
125
-
126
- if __name__ == "__main__":
127
- Kproxy().run()
1
+ import json
2
+ ###########################################
3
+ # 同项目调用基础包
4
+ import os
5
+ import sys
6
+ import time
7
+
8
+ import requests
9
+
10
+ filepath = os.path.abspath(__file__)
11
+ pathlist = filepath.split(os.sep)
12
+ pathlist = pathlist[:-4]
13
+ TopPath = os.sep.join(pathlist)
14
+ sys.path.insert(0, TopPath)
15
+ print(TopPath)
16
+ ############################################
17
+
18
+ from re_common.baselibrary.utils.basedir import BaseDir
19
+ from re_common.baselibrary.utils.basefile import BaseFile
20
+ from re_common.baselibrary.utils.baserequest import BaseRequest
21
+ from re_common.facade.lazy_import import get_streamlogger
22
+ from re_common.facade.mysqlfacade import MysqlUtiles
23
+ from re_common.baselibrary.utils.basetime import BaseTime
24
+
25
+
26
+ class Kproxy(object):
27
+ def __init__(self):
28
+ self.cur_path = BaseDir.get_file_dir_absolute(__file__)
29
+ self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
30
+ self.logger = get_streamlogger()
31
+ self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
32
+ self.bsrequest = BaseRequest()
33
+ self.starttime = time.time()
34
+ self.starttime_val = time.time()
35
+
36
+ def get_taiyang_proxy(self, num=6):
37
+ """
38
+ 获取太阳代理 每分钟3个
39
+ :param num:
40
+ :return:
41
+ """
42
+ self.starttime = time.time()
43
+ # url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45&regions=".format(num)
44
+ url = "http://http.tiqu.alibabaapi.com/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45&regions=".format(num)
45
+ BoolResult, errString, r = self.bsrequest.base_request(url,
46
+ timeout=30
47
+ )
48
+ if BoolResult:
49
+ dicts = json.loads(r.text)
50
+ for item in dicts["data"]:
51
+ proxy = item["ip"] + ":" + item["port"]
52
+ sources = "taiyang"
53
+ expire_time = item["expire_time"]
54
+ sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
55
+ proxy, sources, expire_time, expire_time)
56
+ self.mysqlutils.ExeSqlToDB(sql)
57
+ else:
58
+ self.logger.error("获取失败")
59
+
60
+ def get_taiyang_num(self):
61
+ """
62
+ 获取太阳代理 每分钟3个
63
+ :param num:
64
+ :return:
65
+ """
66
+ url = "http://ty-http-d.hamir.net/index/index/get_my_package_balance?neek=521821&appkey=1fcba6de94f71561ba3007f4c24ca0b1&ac=59105"
67
+ BoolResult, errString, r = self.bsrequest.base_request(url,
68
+ timeout=30
69
+ )
70
+ if BoolResult:
71
+ dicts = json.loads(r.text)
72
+ return dicts["data"]["package_balance"]
73
+ else:
74
+ self.logger.error("获取失败")
75
+
76
+ def val(self, proxy, sources):
77
+ # 请求地址
78
+ targetUrl = "https://www.baidu.com"
79
+ proxies = {
80
+ "http": "http://%s" % proxy,
81
+ "https": "http://%s" % proxy
82
+ }
83
+ resp = requests.get(targetUrl, proxies=proxies, timeout=5)
84
+ if resp.status_code == 200:
85
+ print(resp.status_code)
86
+ return True
87
+ else:
88
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
89
+ self.mysqlutils.ExeSqlToDB(sql)
90
+ return False
91
+
92
+ def val_all(self):
93
+ self.starttime_val = time.time()
94
+ sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
95
+ bools, rows = self.mysqlutils.SelectFromDB(sql)
96
+ for row in rows:
97
+ try:
98
+ self.val(row[0], row[1])
99
+ except:
100
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
101
+ self.mysqlutils.ExeSqlToDB(sql)
102
+
103
+ def run(self):
104
+ sleep_time = 0
105
+ while True:
106
+ num = 10
107
+ if sleep_time < 0:
108
+ print("time sleep {}".format(str(100 + sleep_time)))
109
+ if 100 + sleep_time > 0:
110
+ time.sleep(100 + sleep_time)
111
+ num = num + 10
112
+ nowtime = BaseTime().get_beijin_date_strins(format="%H%M%S")
113
+ print(nowtime)
114
+ if "133700" <= nowtime <= "134700":
115
+ num = Kproxy().get_taiyang_num()
116
+ start_time = time.time()
117
+ self.get_taiyang_proxy(num=num)
118
+ self.val_all()
119
+ use_time = int(time.time() - start_time)
120
+ sleep_time = 100 - use_time
121
+ print("time sleep {}".format(str(sleep_time)))
122
+ if sleep_time >= 3:
123
+ time.sleep(sleep_time)
124
+
125
+
126
+ if __name__ == "__main__":
127
+ Kproxy().run()
@@ -1,159 +1,159 @@
1
- import json
2
- ###########################################
3
- # 同项目调用基础包
4
- import os
5
- import sys
6
- import time
7
-
8
- import requests
9
-
10
- from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun
11
- from re_common.baselibrary.mthread.mythreading import ThreadInfo, ThreadVal, ThreadPoolManger
12
-
13
- filepath = os.path.abspath(__file__)
14
- pathlist = filepath.split(os.sep)
15
- pathlist = pathlist[:-4]
16
- TopPath = os.sep.join(pathlist)
17
- sys.path.insert(0, TopPath)
18
- print(TopPath)
19
- ############################################
20
-
21
- from re_common.baselibrary.utils.basedir import BaseDir
22
- from re_common.baselibrary.utils.basefile import BaseFile
23
- from re_common.baselibrary.utils.baserequest import BaseRequest
24
- from re_common.facade.lazy_import import get_streamlogger
25
- from re_common.facade.mysqlfacade import MysqlUtiles
26
-
27
-
28
- class Kproxy(object):
29
- def __init__(self):
30
- self.cur_path = BaseDir.get_file_dir_absolute(__file__)
31
- self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
32
- self.logger = get_streamlogger()
33
- self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
34
- self.bsrequest = BaseRequest()
35
- self.starttime = time.time()
36
- self.starttime_val = time.time()
37
-
38
- def get_taiyang_proxy(self, num=6):
39
- """
40
- 获取太阳代理 每分钟3个
41
- :param num:
42
- :return:
43
- """
44
- self.starttime = time.time()
45
- url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=56912&port=1&ts=1&lb=1&pb=45&regions=".format(num)
46
- BoolResult, errString, r = self.bsrequest.base_request(url,
47
- timeout=30
48
- )
49
- if BoolResult:
50
- dicts = json.loads(r.text)
51
- for item in dicts["data"]:
52
- proxy = item["ip"] + ":" + item["port"]
53
- sources = "taiyang"
54
- expire_time = item["expire_time"]
55
- sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
56
- proxy, sources, expire_time, expire_time)
57
- self.mysqlutils.ExeSqlToDB(sql)
58
- else:
59
- self.logger.error("获取失败")
60
-
61
- def val(self, proxy, sources,threadval):
62
- # 请求地址
63
- targetUrl = "https://www.baidu.com"
64
- proxies = {
65
- "http": "http://%s" % proxy,
66
- "https": "http://%s" % proxy
67
- }
68
- resp = requests.get(targetUrl, proxies=proxies, timeout=5)
69
- if resp.status_code == 200:
70
- print(resp.status_code)
71
- sql = "update proxyall_v1 set stat=1 where proxy='%s' and sources='%s';" % (proxy, sources)
72
- threadval.get_result_queue().put(sql)
73
- return True
74
- else:
75
- sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
76
- # self.mysqlutils.ExeSqlToDB(sql)
77
- threadval.get_result_queue().put(sql)
78
- return False
79
-
80
- def val_all(self):
81
- self.starttime_val = time.time()
82
- sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
83
- bools, rows = self.mysqlutils.SelectFromDB(sql)
84
- for row in rows:
85
- try:
86
- self.val(row[0], row[1])
87
- except:
88
- sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
89
- self.mysqlutils.ExeSqlToDB(sql)
90
-
91
- def run(self):
92
- while True:
93
- start_time = time.time()
94
- self.get_taiyang_proxy()
95
- self.val_all()
96
- use_time = int(time.time() - start_time)
97
- sleep_time = 100 - use_time
98
- print("time sleep {}".format(str(sleep_time)))
99
- if sleep_time >= 3:
100
- time.sleep(sleep_time)
101
-
102
-
103
- class DetailThreadRun(MThreadingRun):
104
- def __init__(self, num):
105
- self.down = Kproxy()
106
- super(DetailThreadRun, self).__init__(num)
107
-
108
- @ThreadPoolManger.thread_lock
109
- def getTask(self, *args, **kwargs):
110
- sql = "select proxy,sources from proxyall_v1 where stat=0 ORDER BY `expire_time` DESC limit 1000"
111
- bools, rows = self.down.mysqlutils.SelectFromDB(sql)
112
- return rows
113
-
114
- @ThreadPoolManger.thread_lock
115
- def getTask2(self, *args, **kwargs):
116
- sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `expire_time` DESC limit 1000"
117
- bools, rows = self.down.mysqlutils.SelectFromDB(sql)
118
- return rows
119
-
120
- def setTask(self, results=None, *args, **kwargs):
121
- if not results:
122
- return self.BREAK
123
- for row in results:
124
- self.add_job(self.func, row[0], row[1])
125
- rows = self.getTask2()
126
- for row in rows:
127
- self.add_job(self.func, row[0], row[1])
128
- time.sleep(60*2)
129
- return self.BREAK
130
-
131
- @ThreadPoolManger.thread_lock
132
- def dealresult(self, *args, **kwargs):
133
- # for sql in self.results:
134
- # self.down.mysqlutils.ExeSqlToDB(sql)
135
- self.down.mysqlutils.ExeSqlListToDB(self.results)
136
-
137
- def setProxy(self, proxysList=None):
138
- time.sleep(300)
139
-
140
- def is_break(self):
141
- return False
142
-
143
- def thread_pool_hook(self, threadinfo: ThreadInfo):
144
- # 设置代理线程不重启,默认会重启
145
- return {}
146
-
147
- def fun(self, threadval: ThreadVal, *args, **kwargs):
148
- proxy,sources = args[0],args[1]
149
- try:
150
- self.down.val(proxy, sources, threadval)
151
- except:
152
- sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
153
- # self.mysqlutils.ExeSqlToDB(sql)
154
- threadval.get_result_queue().put(sql)
155
-
156
-
157
- if __name__ == '__main__':
158
- down = DetailThreadRun(30)
159
- down.run()
1
+ import json
2
+ ###########################################
3
+ # 同项目调用基础包
4
+ import os
5
+ import sys
6
+ import time
7
+
8
+ import requests
9
+
10
+ from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun
11
+ from re_common.baselibrary.mthread.mythreading import ThreadInfo, ThreadVal, ThreadPoolManger
12
+
13
+ filepath = os.path.abspath(__file__)
14
+ pathlist = filepath.split(os.sep)
15
+ pathlist = pathlist[:-4]
16
+ TopPath = os.sep.join(pathlist)
17
+ sys.path.insert(0, TopPath)
18
+ print(TopPath)
19
+ ############################################
20
+
21
+ from re_common.baselibrary.utils.basedir import BaseDir
22
+ from re_common.baselibrary.utils.basefile import BaseFile
23
+ from re_common.baselibrary.utils.baserequest import BaseRequest
24
+ from re_common.facade.lazy_import import get_streamlogger
25
+ from re_common.facade.mysqlfacade import MysqlUtiles
26
+
27
+
28
+ class Kproxy(object):
29
+ def __init__(self):
30
+ self.cur_path = BaseDir.get_file_dir_absolute(__file__)
31
+ self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
32
+ self.logger = get_streamlogger()
33
+ self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
34
+ self.bsrequest = BaseRequest()
35
+ self.starttime = time.time()
36
+ self.starttime_val = time.time()
37
+
38
+ def get_taiyang_proxy(self, num=6):
39
+ """
40
+ 获取太阳代理 每分钟3个
41
+ :param num:
42
+ :return:
43
+ """
44
+ self.starttime = time.time()
45
+ url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=56912&port=1&ts=1&lb=1&pb=45&regions=".format(num)
46
+ BoolResult, errString, r = self.bsrequest.base_request(url,
47
+ timeout=30
48
+ )
49
+ if BoolResult:
50
+ dicts = json.loads(r.text)
51
+ for item in dicts["data"]:
52
+ proxy = item["ip"] + ":" + item["port"]
53
+ sources = "taiyang"
54
+ expire_time = item["expire_time"]
55
+ sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
56
+ proxy, sources, expire_time, expire_time)
57
+ self.mysqlutils.ExeSqlToDB(sql)
58
+ else:
59
+ self.logger.error("获取失败")
60
+
61
+ def val(self, proxy, sources,threadval):
62
+ # 请求地址
63
+ targetUrl = "https://www.baidu.com"
64
+ proxies = {
65
+ "http": "http://%s" % proxy,
66
+ "https": "http://%s" % proxy
67
+ }
68
+ resp = requests.get(targetUrl, proxies=proxies, timeout=5)
69
+ if resp.status_code == 200:
70
+ print(resp.status_code)
71
+ sql = "update proxyall_v1 set stat=1 where proxy='%s' and sources='%s';" % (proxy, sources)
72
+ threadval.get_result_queue().put(sql)
73
+ return True
74
+ else:
75
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
76
+ # self.mysqlutils.ExeSqlToDB(sql)
77
+ threadval.get_result_queue().put(sql)
78
+ return False
79
+
80
+ def val_all(self):
81
+ self.starttime_val = time.time()
82
+ sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
83
+ bools, rows = self.mysqlutils.SelectFromDB(sql)
84
+ for row in rows:
85
+ try:
86
+ self.val(row[0], row[1])
87
+ except:
88
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
89
+ self.mysqlutils.ExeSqlToDB(sql)
90
+
91
+ def run(self):
92
+ while True:
93
+ start_time = time.time()
94
+ self.get_taiyang_proxy()
95
+ self.val_all()
96
+ use_time = int(time.time() - start_time)
97
+ sleep_time = 100 - use_time
98
+ print("time sleep {}".format(str(sleep_time)))
99
+ if sleep_time >= 3:
100
+ time.sleep(sleep_time)
101
+
102
+
103
+ class DetailThreadRun(MThreadingRun):
104
+ def __init__(self, num):
105
+ self.down = Kproxy()
106
+ super(DetailThreadRun, self).__init__(num)
107
+
108
+ @ThreadPoolManger.thread_lock
109
+ def getTask(self, *args, **kwargs):
110
+ sql = "select proxy,sources from proxyall_v1 where stat=0 ORDER BY `expire_time` DESC limit 1000"
111
+ bools, rows = self.down.mysqlutils.SelectFromDB(sql)
112
+ return rows
113
+
114
+ @ThreadPoolManger.thread_lock
115
+ def getTask2(self, *args, **kwargs):
116
+ sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `expire_time` DESC limit 1000"
117
+ bools, rows = self.down.mysqlutils.SelectFromDB(sql)
118
+ return rows
119
+
120
+ def setTask(self, results=None, *args, **kwargs):
121
+ if not results:
122
+ return self.BREAK
123
+ for row in results:
124
+ self.add_job(self.func, row[0], row[1])
125
+ rows = self.getTask2()
126
+ for row in rows:
127
+ self.add_job(self.func, row[0], row[1])
128
+ time.sleep(60*2)
129
+ return self.BREAK
130
+
131
+ @ThreadPoolManger.thread_lock
132
+ def dealresult(self, *args, **kwargs):
133
+ # for sql in self.results:
134
+ # self.down.mysqlutils.ExeSqlToDB(sql)
135
+ self.down.mysqlutils.ExeSqlListToDB(self.results)
136
+
137
+ def setProxy(self, proxysList=None):
138
+ time.sleep(300)
139
+
140
+ def is_break(self):
141
+ return False
142
+
143
+ def thread_pool_hook(self, threadinfo: ThreadInfo):
144
+ # 设置代理线程不重启,默认会重启
145
+ return {}
146
+
147
+ def fun(self, threadval: ThreadVal, *args, **kwargs):
148
+ proxy,sources = args[0],args[1]
149
+ try:
150
+ self.down.val(proxy, sources, threadval)
151
+ except:
152
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
153
+ # self.mysqlutils.ExeSqlToDB(sql)
154
+ threadval.get_result_queue().put(sql)
155
+
156
+
157
+ if __name__ == '__main__':
158
+ down = DetailThreadRun(30)
159
+ down.run()