re-common 2.0.1__py3-none-any.whl → 10.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. re_common/baselibrary/__init__.py +4 -0
  2. re_common/baselibrary/baseabs/__init__.py +7 -0
  3. re_common/baselibrary/baseabs/baseabs.py +26 -0
  4. re_common/baselibrary/database/__init__.py +0 -0
  5. re_common/baselibrary/database/mbuilder.py +132 -0
  6. re_common/baselibrary/database/moudle.py +93 -0
  7. re_common/baselibrary/database/msqlite3.py +194 -0
  8. re_common/baselibrary/database/mysql.py +169 -0
  9. re_common/baselibrary/database/sql_factory.py +26 -0
  10. re_common/baselibrary/mthread/MThreadingRun.py +486 -0
  11. re_common/baselibrary/mthread/MThreadingRunEvent.py +349 -0
  12. re_common/baselibrary/mthread/__init__.py +3 -0
  13. re_common/baselibrary/mthread/mythreading.py +695 -0
  14. re_common/baselibrary/pakge_other/__init__.py +0 -0
  15. re_common/baselibrary/pakge_other/socks.py +404 -0
  16. re_common/baselibrary/readconfig/__init__.py +0 -0
  17. re_common/baselibrary/readconfig/config_factory.py +18 -0
  18. re_common/baselibrary/readconfig/ini_config.py +317 -0
  19. re_common/baselibrary/readconfig/toml_config.py +49 -0
  20. re_common/baselibrary/temporary/__init__.py +0 -0
  21. re_common/baselibrary/temporary/envdata.py +36 -0
  22. re_common/baselibrary/tools/__init__.py +0 -0
  23. re_common/baselibrary/tools/all_requests/__init__.py +0 -0
  24. re_common/baselibrary/tools/all_requests/aiohttp_request.py +118 -0
  25. re_common/baselibrary/tools/all_requests/httpx_requet.py +102 -0
  26. re_common/baselibrary/tools/all_requests/mrequest.py +412 -0
  27. re_common/baselibrary/tools/all_requests/requests_request.py +81 -0
  28. re_common/baselibrary/tools/batch_compre/__init__.py +0 -0
  29. re_common/baselibrary/tools/batch_compre/bijiao_batch.py +31 -0
  30. re_common/baselibrary/tools/contrast_db3.py +123 -0
  31. re_common/baselibrary/tools/copy_file.py +39 -0
  32. re_common/baselibrary/tools/db3_2_sizedb3.py +102 -0
  33. re_common/baselibrary/tools/foreachgz.py +40 -0
  34. re_common/baselibrary/tools/get_attr.py +11 -0
  35. re_common/baselibrary/tools/image_to_pdf.py +62 -0
  36. re_common/baselibrary/tools/java_code_deal.py +139 -0
  37. re_common/baselibrary/tools/javacode.py +79 -0
  38. re_common/baselibrary/tools/mdb_db3.py +48 -0
  39. re_common/baselibrary/tools/merge_file.py +171 -0
  40. re_common/baselibrary/tools/merge_gz_file.py +165 -0
  41. re_common/baselibrary/tools/mhdfstools/__init__.py +0 -0
  42. re_common/baselibrary/tools/mhdfstools/down_hdfs_files.py +42 -0
  43. re_common/baselibrary/tools/mhdfstools/hdfst.py +42 -0
  44. re_common/baselibrary/tools/mhdfstools/up_hdfs_files.py +38 -0
  45. re_common/baselibrary/tools/mongo_tools.py +50 -0
  46. re_common/baselibrary/tools/move_file.py +170 -0
  47. re_common/baselibrary/tools/move_mongo/__init__.py +0 -0
  48. re_common/baselibrary/tools/move_mongo/mongo_table_to_file.py +63 -0
  49. re_common/baselibrary/tools/move_mongo/move_mongo_table.py +354 -0
  50. re_common/baselibrary/tools/move_mongo/use_mttf.py +18 -0
  51. re_common/baselibrary/tools/move_mongo/use_mv.py +93 -0
  52. re_common/baselibrary/tools/mpandas/__init__.py +0 -0
  53. re_common/baselibrary/tools/mpandas/mpandasreadexcel.py +125 -0
  54. re_common/baselibrary/tools/mpandas/pandas_visualization.py +8 -0
  55. re_common/baselibrary/tools/myparsel.py +104 -0
  56. re_common/baselibrary/tools/rename_dir_file.py +37 -0
  57. re_common/baselibrary/tools/sequoiadb_utils.py +398 -0
  58. re_common/baselibrary/tools/split_line_to_many.py +25 -0
  59. re_common/baselibrary/tools/stringtodicts.py +33 -0
  60. re_common/baselibrary/tools/workwechant_bot.py +84 -0
  61. re_common/baselibrary/utils/__init__.py +0 -0
  62. re_common/baselibrary/utils/baseaiohttp.py +296 -0
  63. re_common/baselibrary/utils/baseaiomysql.py +87 -0
  64. re_common/baselibrary/utils/baseallstep.py +191 -0
  65. re_common/baselibrary/utils/baseavro.py +19 -0
  66. re_common/baselibrary/utils/baseboto3.py +291 -0
  67. re_common/baselibrary/utils/basecsv.py +32 -0
  68. re_common/baselibrary/utils/basedict.py +133 -0
  69. re_common/baselibrary/utils/basedir.py +241 -0
  70. re_common/baselibrary/utils/baseencode.py +351 -0
  71. re_common/baselibrary/utils/baseencoding.py +29 -0
  72. re_common/baselibrary/utils/baseesdsl.py +86 -0
  73. re_common/baselibrary/utils/baseexcel.py +264 -0
  74. re_common/baselibrary/utils/baseexcept.py +109 -0
  75. re_common/baselibrary/utils/basefile.py +654 -0
  76. re_common/baselibrary/utils/baseftp.py +214 -0
  77. re_common/baselibrary/utils/basegzip.py +60 -0
  78. re_common/baselibrary/utils/basehdfs.py +135 -0
  79. re_common/baselibrary/utils/basehttpx.py +268 -0
  80. re_common/baselibrary/utils/baseip.py +87 -0
  81. re_common/baselibrary/utils/basejson.py +2 -0
  82. re_common/baselibrary/utils/baselist.py +32 -0
  83. re_common/baselibrary/utils/basemotor.py +190 -0
  84. re_common/baselibrary/utils/basemssql.py +98 -0
  85. re_common/baselibrary/utils/baseodbc.py +113 -0
  86. re_common/baselibrary/utils/basepandas.py +302 -0
  87. re_common/baselibrary/utils/basepeewee.py +11 -0
  88. re_common/baselibrary/utils/basepika.py +180 -0
  89. re_common/baselibrary/utils/basepydash.py +143 -0
  90. re_common/baselibrary/utils/basepymongo.py +230 -0
  91. re_common/baselibrary/utils/basequeue.py +22 -0
  92. re_common/baselibrary/utils/baserar.py +57 -0
  93. re_common/baselibrary/utils/baserequest.py +279 -0
  94. re_common/baselibrary/utils/baseset.py +8 -0
  95. re_common/baselibrary/utils/basesmb.py +403 -0
  96. re_common/baselibrary/utils/basestring.py +382 -0
  97. re_common/baselibrary/utils/basetime.py +320 -0
  98. re_common/baselibrary/utils/basetuple.py +0 -0
  99. re_common/baselibrary/utils/baseurl.py +121 -0
  100. re_common/baselibrary/utils/basezip.py +57 -0
  101. re_common/baselibrary/utils/core/__init__.py +8 -0
  102. re_common/baselibrary/utils/core/bottomutils.py +18 -0
  103. re_common/baselibrary/utils/core/mdeprecated.py +327 -0
  104. re_common/baselibrary/utils/core/mlamada.py +16 -0
  105. re_common/baselibrary/utils/core/msginfo.py +25 -0
  106. re_common/baselibrary/utils/core/requests_core.py +103 -0
  107. re_common/baselibrary/utils/fateadm.py +429 -0
  108. re_common/baselibrary/utils/importfun.py +123 -0
  109. re_common/baselibrary/utils/mfaker.py +57 -0
  110. re_common/baselibrary/utils/my_abc/__init__.py +3 -0
  111. re_common/baselibrary/utils/my_abc/better_abc.py +32 -0
  112. re_common/baselibrary/utils/mylogger.py +414 -0
  113. re_common/baselibrary/utils/myredisclient.py +861 -0
  114. re_common/baselibrary/utils/pipupgrade.py +21 -0
  115. re_common/baselibrary/utils/ringlist.py +85 -0
  116. re_common/baselibrary/utils/version_compare.py +36 -0
  117. re_common/baselibrary/utils/ydmhttp.py +126 -0
  118. re_common/facade/__init__.py +1 -0
  119. re_common/facade/lazy_import.py +11 -0
  120. re_common/facade/loggerfacade.py +25 -0
  121. re_common/facade/mysqlfacade.py +467 -0
  122. re_common/facade/now.py +31 -0
  123. re_common/facade/sqlite3facade.py +257 -0
  124. re_common/facade/use/__init__.py +0 -0
  125. re_common/facade/use/mq_use_facade.py +83 -0
  126. re_common/facade/use/proxy_use_facade.py +20 -0
  127. re_common/libtest/__init__.py +0 -0
  128. re_common/libtest/base_dict_test.py +19 -0
  129. re_common/libtest/baseavro_test.py +13 -0
  130. re_common/libtest/basefile_test.py +14 -0
  131. re_common/libtest/basemssql_test.py +77 -0
  132. re_common/libtest/baseodbc_test.py +8 -0
  133. re_common/libtest/basepandas_test.py +38 -0
  134. re_common/libtest/get_attr_test/__init__.py +0 -0
  135. re_common/libtest/get_attr_test/get_attr_test_settings.py +14 -0
  136. re_common/libtest/get_attr_test/settings.py +55 -0
  137. re_common/libtest/idencode_test.py +54 -0
  138. re_common/libtest/iniconfig_test.py +35 -0
  139. re_common/libtest/ip_test.py +35 -0
  140. re_common/libtest/merge_file_test.py +20 -0
  141. re_common/libtest/mfaker_test.py +9 -0
  142. re_common/libtest/mm3_test.py +32 -0
  143. re_common/libtest/mylogger_test.py +89 -0
  144. re_common/libtest/myparsel_test.py +28 -0
  145. re_common/libtest/mysql_test.py +151 -0
  146. re_common/libtest/pymongo_test.py +21 -0
  147. re_common/libtest/split_test.py +12 -0
  148. re_common/libtest/sqlite3_merge_test.py +6 -0
  149. re_common/libtest/sqlite3_test.py +34 -0
  150. re_common/libtest/tomlconfig_test.py +30 -0
  151. re_common/libtest/use_tools_test/__init__.py +3 -0
  152. re_common/libtest/user/__init__.py +5 -0
  153. re_common/studio/__init__.py +5 -0
  154. re_common/studio/assignment_expressions.py +37 -0
  155. re_common/studio/mydash/__init__.py +0 -0
  156. re_common/studio/mydash/test1.py +19 -0
  157. re_common/studio/pydashstudio/__init__.py +0 -0
  158. re_common/studio/pydashstudio/first.py +9 -0
  159. re_common/studio/streamlitstudio/__init__.py +0 -0
  160. re_common/studio/streamlitstudio/first_app.py +66 -0
  161. re_common/studio/streamlitstudio/uber_pickups.py +24 -0
  162. re_common/studio/test.py +19 -0
  163. re_common/v2/baselibrary/utils/author_smi.py +14 -3
  164. re_common/v2/baselibrary/utils/stringutils.py +1 -0
  165. re_common/vip/__init__.py +0 -0
  166. re_common/vip/base_step_process.py +11 -0
  167. re_common/vip/baseencodeid.py +91 -0
  168. re_common/vip/changetaskname.py +28 -0
  169. re_common/vip/core_var.py +24 -0
  170. re_common/vip/mmh3Hash.py +90 -0
  171. re_common/vip/proxy/__init__.py +0 -0
  172. re_common/vip/proxy/allproxys.py +127 -0
  173. re_common/vip/proxy/allproxys_thread.py +159 -0
  174. re_common/vip/proxy/cnki_proxy.py +153 -0
  175. re_common/vip/proxy/kuaidaili.py +87 -0
  176. re_common/vip/proxy/proxy_all.py +113 -0
  177. re_common/vip/proxy/update_kuaidaili_0.py +42 -0
  178. re_common/vip/proxy/wanfang_proxy.py +152 -0
  179. re_common/vip/proxy/wp_proxy_all.py +182 -0
  180. re_common/vip/read_rawid_to_txt.py +92 -0
  181. re_common/vip/title/__init__.py +5 -0
  182. re_common/vip/title/transform/TransformBookTitleToZt.py +125 -0
  183. re_common/vip/title/transform/TransformConferenceTitleToZt.py +139 -0
  184. re_common/vip/title/transform/TransformCstadTitleToZt.py +196 -0
  185. re_common/vip/title/transform/TransformJournalTitleToZt.py +203 -0
  186. re_common/vip/title/transform/TransformPatentTitleToZt.py +132 -0
  187. re_common/vip/title/transform/TransformRegulationTitleToZt.py +114 -0
  188. re_common/vip/title/transform/TransformStandardTitleToZt.py +135 -0
  189. re_common/vip/title/transform/TransformThesisTitleToZt.py +135 -0
  190. re_common/vip/title/transform/__init__.py +11 -0
  191. {re_common-2.0.1.dist-info → re_common-10.0.1.dist-info}/METADATA +1 -1
  192. re_common-10.0.1.dist-info/RECORD +213 -0
  193. re_common-2.0.1.dist-info/RECORD +0 -25
  194. {re_common-2.0.1.dist-info → re_common-10.0.1.dist-info}/LICENSE +0 -0
  195. {re_common-2.0.1.dist-info → re_common-10.0.1.dist-info}/WHEEL +0 -0
  196. {re_common-2.0.1.dist-info → re_common-10.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,24 @@
1
+ import streamlit as st
2
+ import pandas as pd
3
+ import numpy as np
4
+
5
+ st.title('Uber pickups in NYC')
6
+
7
+ DATE_COLUMN = 'date/time'
8
+ DATA_URL = ('https://s3-us-west-2.amazonaws.com/streamlit-demo-data/uber-raw-data-sep14.csv.gz')
9
+
10
+ @st.cache
11
+ def load_data(nrows):
12
+ data = pd.read_csv(DATA_URL, nrows=nrows)
13
+ lowercase = lambda x: str(x).lower()
14
+ data.rename(lowercase, axis='columns', inplace=True)
15
+ data[DATE_COLUMN] = pd.to_datetime(data[DATE_COLUMN])
16
+ return data
17
+
18
+ # Create a text element and let the reader know the data is loading.
19
+ data_load_state.text("Done! (using st.cache)")
20
+
21
+ # Load 10,000 rows of data into the dataframe.
22
+ data = load_data(10000)
23
+ # Notify the reader that the data was successfully loaded.
24
+ data_load_state.text('Loading data...done!')
@@ -0,0 +1,19 @@
1
+ def test_sq():
2
+ from re_common.baselibrary.utils.basepymongo import BasePyMongo
3
+ # basemongo = BasePyMongo("mongodb://sdbadmin:sdbadmin@192.168.72.86:11817/test2?authMechanism=SCRAM-SHA-1")
4
+ # basemongo = BasePyMongo(
5
+ # "mongodb://sdbadmin:sdbadmin@192.168.72.86:11817/dataware_house.base_obj_meta_a")
6
+ basemongo = BasePyMongo(
7
+ "mongodb://sdbadmin:sdbadmin@192.168.72.86:11817/test2.test?authMechanism=SCRAM-SHA-1")
8
+
9
+ basemongo.use_db("test2")
10
+ # basemongo.auth("sdbadmin", "sdbadmin", "SCRAM-SHA-1")
11
+ basemongo.create_col("test")
12
+ # for items in basemongo.find({}):
13
+ # print(items["user"])
14
+ items = basemongo.find()
15
+ for item in items:
16
+ ids = item["id"]
17
+ print(ids)
18
+
19
+ test_sq()
@@ -1,3 +1,4 @@
1
+ import copy
1
2
  import re
2
3
  import string
3
4
 
@@ -248,6 +249,16 @@ def AuthorRatio(
248
249
  if len(l1) == len(l2) and (is_same_or_initials_match(l1, l2) or set(l1) == set(l2)):
249
250
  return 1
250
251
 
252
+ # 在这里针对上面一条算法再增加一条算法,先对list 排序在对他进行上面的对比
253
+ # 如果长度相等 简写也是单词的首字母 那么两个名字一致 举例:Guo, Qiang @@ Q. Guo
254
+ sort_l1 = copy.deepcopy(l1)
255
+ sort_l2 = copy.deepcopy(l2)
256
+ sort_l1.sort()
257
+ sort_l2.sort()
258
+ if len(sort_l1) == len(sort_l2) and (is_same_or_initials_match(sort_l1, sort_l2) or set(sort_l1) == set(sort_l2)):
259
+ return 0.99
260
+
261
+
251
262
  ##############################################################
252
263
  # 以上为情况穷举情况,以下为其他情况的相似率计算
253
264
  ##############################################################
@@ -262,7 +273,7 @@ def AuthorRatio(
262
273
  len_ratio = len1 / len2 if len1 > len2 else len2 / len1
263
274
 
264
275
  # 计算归一化的 Indel 相似度。 对于比率<score_cutoff,返回0。
265
- end_ratio = normal_end_ratio = Jaro.normalized_similarity(s1, s2)
276
+ end_ratio = normal_end_ratio = Jaro.normalized_similarity(s1.lower(), s2.lower())
266
277
 
267
278
  # 需要对作者的比率分布进行调研决定哪些是小比率哪些是大比率
268
279
  if len_ratio > 1.5 and len_ratio < 3:
@@ -287,7 +298,7 @@ def AuthorRatio(
287
298
 
288
299
  # 首字母相同提分
289
300
  # if is_contained(extract_initials(s1), extract_initials(s2)):
290
- if is_contained_list([i[:1] for i in l1], [i[:1] for i in l2]):
301
+ if is_contained_list([i[:1].lower() for i in l1], [i[:1].lower() for i in l2]):
291
302
  # 应该提分
292
303
  end_ratio = end_ratio * 1.05
293
304
  else:
@@ -302,7 +313,7 @@ def AuthorRatio(
302
313
  end_ratio = end_ratio * 1.1
303
314
 
304
315
  if l1[0] != l2[0]:
305
- end_ratio = end_ratio * Jaro.normalized_similarity(l1[0], l2[0])
316
+ end_ratio = end_ratio * Jaro.normalized_similarity(l1[0].lower(), l2[0].lower())
306
317
 
307
318
  # 如果字符串本身的相似度高 应该拉上去 否者应该拉下来
308
319
  return min(end_ratio, 1) * 0.5 + normal_end_ratio * 0.5
@@ -63,6 +63,7 @@ def get_diacritic_variant(char1):
63
63
  return base_char1
64
64
 
65
65
  def get_alphabetic_ratio(text: str) -> float:
66
+ # 返回字母型字符所占比例
66
67
  if not text:
67
68
  return 0
68
69
 
File without changes
@@ -0,0 +1,11 @@
1
+ from abc import ABC, abstractmethod
2
+
3
+
4
+ class BaseStepProcess(ABC):
5
+
6
+ def __init__(self):
7
+ self.stat_dicts = {}
8
+
9
+ @abstractmethod
10
+ def do_task(self, *args, **kwargs):
11
+ pass
@@ -0,0 +1,91 @@
1
+ import base64
2
+ import hashlib
3
+
4
+ """
5
+ VIP编码lngid生成
6
+ """
7
+
8
+ class BaseLngid(object):
9
+ def __int__(self):
10
+ pass
11
+
12
+ def BaseEncodeID(self, strRaw):
13
+ r""" 自定义base编码 """
14
+
15
+ strEncode = base64.b32encode(strRaw.encode('utf8')).decode('utf8')
16
+
17
+ if strEncode.endswith('======'):
18
+ strEncode = '%s%s' % (strEncode[0:-6], '0')
19
+ elif strEncode.endswith('===='):
20
+ strEncode = '%s%s' % (strEncode[0:-4], '1')
21
+ elif strEncode.endswith('==='):
22
+ strEncode = '%s%s' % (strEncode[0:-3], '8')
23
+ elif strEncode.endswith('='):
24
+ strEncode = '%s%s' % (strEncode[0:-1], '9')
25
+
26
+ table = str.maketrans('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'ZYXWVUTSRQPONMLKJIHGFEDCBA9876543210')
27
+ strEncode = strEncode.translate(table)
28
+
29
+ return strEncode
30
+
31
+ def BaseDecodeID(self, strEncode):
32
+ r""" 自定义base解码 """
33
+
34
+ table = str.maketrans('ZYXWVUTSRQPONMLKJIHGFEDCBA9876543210', '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ')
35
+ strEncode = strEncode.translate(table)
36
+
37
+ if strEncode.endswith('0'):
38
+ strEncode = '%s%s' % (strEncode[0:-1], '======')
39
+ elif strEncode.endswith('1'):
40
+ strEncode = '%s%s' % (strEncode[0:-1], '====')
41
+ elif strEncode.endswith('8'):
42
+ strEncode = '%s%s' % (strEncode[0:-1], '===')
43
+ elif strEncode.endswith('9'):
44
+ strEncode = '%s%s' % (strEncode[0:-1], '=')
45
+
46
+ strRaw = base64.b32decode(strEncode.encode('utf8')).decode('utf8')
47
+
48
+ return strRaw
49
+
50
+ def GetLngid(self, sub_db_id, rawid, case_insensitive=False):
51
+ """
52
+ :param sub_db_id:
53
+ :param rawid:
54
+ 由 sub_db_id 和 rawid 得到 lngid。
55
+ :param case_insensitive: 标识源网站的 rawid 是否区分大小写
56
+ :return: lngid
57
+ """
58
+ uppercase_rawid = '' # 大写版 rawid
59
+ if case_insensitive: # 源网站的 rawid 区分大小写
60
+ for ch in rawid:
61
+ if ch.upper() == ch:
62
+ uppercase_rawid += ch
63
+ else:
64
+ uppercase_rawid += ch.upper() + '_'
65
+ else:
66
+ uppercase_rawid = rawid.upper()
67
+
68
+ limited_id = uppercase_rawid # 限长ID
69
+ if len(uppercase_rawid) > 20:
70
+ limited_id = hashlib.md5(uppercase_rawid.encode('utf8')).hexdigest().upper()
71
+ else:
72
+ limited_id = self.BaseEncodeID(uppercase_rawid)
73
+
74
+ lngid = sub_db_id + limited_id
75
+
76
+ return lngid
77
+
78
+ def GetRawid(self, limited_id, case_insensitive=False):
79
+ try:
80
+ uppercase_rawid = self.BaseDecodeID(limited_id)
81
+ if case_insensitive:
82
+ str_ = "_"
83
+ uppercase_rawid_list = list(uppercase_rawid)
84
+ for num,li in enumerate(uppercase_rawid_list):
85
+ if li == str_:
86
+ old_str = "".join(uppercase_rawid_list[num-1:num+1])
87
+ uppercase_rawid = uppercase_rawid.replace(old_str,uppercase_rawid_list[num-1].lower())
88
+ except Exception as e:
89
+ raise Exception("长度超过20,不可逆")
90
+
91
+ return uppercase_rawid
@@ -0,0 +1,28 @@
1
+ from re_common.baselibrary.tools.stringtodicts import StringToDicts
2
+ from re_common.facade.mysqlfacade import MysqlUtiles
3
+
4
+ """
5
+ 本方法主要用于provider 变化,有时候我们可能需要更新209的provider
6
+ 用该方法更新方便快捷
7
+ """
8
+ strings = """
9
+ host = 192.168.31.209
10
+ user = root
11
+ passwd = vipdatacenter
12
+ db = data_gather_record
13
+ port = 3306
14
+ chartset = utf8
15
+ """
16
+
17
+ dicts_change = {"key为原来的": "values为现在的"}
18
+
19
+ dicts = StringToDicts().string_to_dicts_by_equal(strings)
20
+ mysqlutils = MysqlUtiles("", "", builder="MysqlBuilderForDicts", dicts=dicts)
21
+ mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=0;")
22
+ for key, values in dicts_change:
23
+ sql1 = "update `data_gather_record`.`task` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
24
+ sql2 = "update `data_gather_record`.`updating` set `provider` = '{}' WHERE `provider` = '{}';".format(values, key)
25
+ mysqlutils.ExeSqlToDB(sql1)
26
+ mysqlutils.ExeSqlToDB(sql2)
27
+
28
+ mysqlutils.ExeSqlToDB("SET FOREIGN_KEY_CHECKS=1;")
@@ -0,0 +1,24 @@
1
+ ALL_SCHOOL_PROXY_LIST = ["192.168.31.176:8119", "192.168.31.176:8120", "192.168.31.176:8104", "192.168.31.176:8018",
2
+ "192.168.31.176:8076",
3
+ "192.168.31.176:8160", "192.168.31.176:8240", "192.168.31.176:8241", "192.168.31.176:8195",
4
+ "192.168.31.176:8243",
5
+ "192.168.31.176:8062", "192.168.31.176:8019", "192.168.31.176:8034", "192.168.31.176:8103",
6
+ "192.168.31.176:8181",
7
+ "192.168.31.176:8211", "192.168.31.123:8081", "192.168.31.176:8032", "192.168.31.176:8231",
8
+ "192.168.31.176:8189",
9
+ "192.167.31.176:8058", "192.168.31.36:8135", "192.168.31.176:8057", "192.168.31.176:8017",
10
+ "192.168.31.36:8033",
11
+ "192.168.31.176:8184", "192.168.31.176:8207", "192.168.31.176:8196", "192.168.31.176:8041",
12
+ "192.168.31.176:8087",
13
+ "192.168.31.176:8117", "192.168.31.36:8098", "192.168.31.176:8165", "192.168.31.36:8039",
14
+ "192.168.31.176:8159",
15
+ "192.168.31.176:8051", "192.168.31.176:8180", "192.168.31.176:8148", "192.168.31.176:8021",
16
+ "192.168.31.176:8008",
17
+ "192.168.31.176:8035", "192.168.31.36:8004", "192.168.31.176:8131", "192.168.31.176:8127",
18
+ "192.168.31.176:8052",
19
+ "192.168.31.36:8011", "192.168.31.36:8082", "192.168.31.36:8182", "192.168.31.176:8031",
20
+ "192.168.31.176:8171",
21
+ "192.168.31.176:8012", "192.168.31.176:8002", "192.168.31.176:8140", "192.168.31.36:8149",
22
+ "192.168.31.176:8074",
23
+ "192.168.31.3:8080", "192.168.31.4:8080", "192.168.31.179:8129", "192.168.31.179:8130",
24
+ "192.168.31.179:8132"]
@@ -0,0 +1,90 @@
1
+ #!/bin/env python
2
+ # -*- coding: utf-8-*-
3
+ # author: ganruoxun
4
+ # date: 2020-09-25
5
+
6
+ import mmh3
7
+ import os
8
+ import binascii
9
+
10
+
11
+ class Mmh3Hash(object):
12
+ def __init__(self, _type, resType):
13
+ self.typeDic = {
14
+ "2": "bs",
15
+ "3": "hy",
16
+ "4": "zl",
17
+ "5": "bz",
18
+ "10": "fg",
19
+ }
20
+ self.typeCode = self.typeDic[_type]
21
+ self.resType = resType
22
+ if self.typeCode == None:
23
+ raise RuntimeError('type 参数无法识别!')
24
+
25
+ # ##从filePath获取文件名,将文件名转大写,后缀转小写
26
+ # def normFileName(self, filePath):
27
+ # lngid = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[0].upper()
28
+ # prfx = os.path.splitext(os.path.normpath(os.path.basename(filePath)))[1].lower()
29
+ # return "{0}{1}".format(lngid, prfx)
30
+
31
+ ##规范文件名称,文件名转大写,文件后缀转小写
32
+ def normFileName(self, fileName):
33
+ if not "." in fileName:
34
+ return fileName.upper()
35
+ tmps = fileName.split(".")
36
+ filePrfx = tmps[0].upper()
37
+ fileSufx = tmps[1].lower()
38
+ return "{0}.{1}".format(filePrfx, fileSufx)
39
+
40
+ # 带有点的文件名,主要用于图片,不在标准定义当中
41
+ def normFileName_image(self, fileName):
42
+ if not "." in fileName:
43
+ return fileName.upper()
44
+ tmps = fileName.split(".")
45
+ fileSufx = tmps[-1].lower()
46
+ tmps.remove(tmps[-1])
47
+ filePrfx = ".".join(tmps).upper()
48
+
49
+ return "{0}.{1}".format(filePrfx, fileSufx)
50
+
51
+ ## 使用murmurhash3算法将新全文文件名进行HASH并按照规则组装成HASH目录
52
+ def generateHashName(self, fileName):
53
+ hashCode = binascii.b2a_hex(mmh3.hash_bytes(fileName)).upper()[0:3]
54
+ firstCode = chr(hashCode[0])
55
+ secondCode = chr(hashCode[1])
56
+ thirdCode = chr(hashCode[2])
57
+ if thirdCode.isdigit():
58
+ return firstCode + secondCode + str(int(thirdCode) % 5)
59
+ elif thirdCode == 'D':
60
+ return firstCode + secondCode + 'A'
61
+ elif thirdCode == 'E':
62
+ return firstCode + secondCode + 'B'
63
+ elif thirdCode == 'F':
64
+ return firstCode + secondCode + 'C'
65
+ else:
66
+ return firstCode + secondCode + thirdCode
67
+
68
+ # fileName:文件名称,带后缀,不能为空,专利为公开号加文件后缀名,其他为lngid加文件后缀名
69
+ # years:年份,不能为空
70
+ # country:国家,如果为空,默认为cn
71
+ # type:自建资源类型,不能为空,目前只有bs(博硕),hy(会议),bz(标准),fg(法规),zl(专利)
72
+ def generatehashPath(self, fileName, years, country, resType):
73
+ if years == None or len(years) != 4:
74
+ raise RuntimeError('years 参数错误!')
75
+ elif fileName == None or len(fileName) == 0:
76
+ raise RuntimeError('fileName 参数错误!')
77
+ elif country == None or len(country) == 0:
78
+ country = 'cn'
79
+ if resType in ('bs', 'hy', 'fg', 'zl', 'bz'):
80
+ country = "cn"
81
+ country = country.lower()
82
+ intYear = int(years)
83
+ if intYear < 1989:
84
+ years = 'befor1989'
85
+ fileName = self.normFileName(fileName)
86
+ return "\\" + resType + "\\" + years + country + self.typeCode + "\\" + self.generateHashName(fileName)+ '\\' + fileName
87
+ # return '\\' + years + country + _type + '\\' + generateHashName(fileName) + '\\' + fileName
88
+
89
+ print(Mmh3Hash("3","").normFileName_image("aa.bb.jpg"))
90
+ print(Mmh3Hash("4","zl").generatehashPath("cn206103553u.pdf","2016","cn","zl"))
File without changes
@@ -0,0 +1,127 @@
1
+ import json
2
+ ###########################################
3
+ # 同项目调用基础包
4
+ import os
5
+ import sys
6
+ import time
7
+
8
+ import requests
9
+
10
+ filepath = os.path.abspath(__file__)
11
+ pathlist = filepath.split(os.sep)
12
+ pathlist = pathlist[:-4]
13
+ TopPath = os.sep.join(pathlist)
14
+ sys.path.insert(0, TopPath)
15
+ print(TopPath)
16
+ ############################################
17
+
18
+ from re_common.baselibrary.utils.basedir import BaseDir
19
+ from re_common.baselibrary.utils.basefile import BaseFile
20
+ from re_common.baselibrary.utils.baserequest import BaseRequest
21
+ from re_common.facade.lazy_import import get_streamlogger
22
+ from re_common.facade.mysqlfacade import MysqlUtiles
23
+ from re_common.baselibrary.utils.basetime import BaseTime
24
+
25
+
26
+ class Kproxy(object):
27
+ def __init__(self):
28
+ self.cur_path = BaseDir.get_file_dir_absolute(__file__)
29
+ self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
30
+ self.logger = get_streamlogger()
31
+ self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
32
+ self.bsrequest = BaseRequest()
33
+ self.starttime = time.time()
34
+ self.starttime_val = time.time()
35
+
36
+ def get_taiyang_proxy(self, num=6):
37
+ """
38
+ 获取太阳代理 每分钟3个
39
+ :param num:
40
+ :return:
41
+ """
42
+ self.starttime = time.time()
43
+ # url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45&regions=".format(num)
44
+ url = "http://http.tiqu.alibabaapi.com/getip?num={}&type=2&pack=59105&port=1&ts=1&lb=1&pb=45&regions=".format(num)
45
+ BoolResult, errString, r = self.bsrequest.base_request(url,
46
+ timeout=30
47
+ )
48
+ if BoolResult:
49
+ dicts = json.loads(r.text)
50
+ for item in dicts["data"]:
51
+ proxy = item["ip"] + ":" + item["port"]
52
+ sources = "taiyang"
53
+ expire_time = item["expire_time"]
54
+ sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
55
+ proxy, sources, expire_time, expire_time)
56
+ self.mysqlutils.ExeSqlToDB(sql)
57
+ else:
58
+ self.logger.error("获取失败")
59
+
60
+ def get_taiyang_num(self):
61
+ """
62
+ 获取太阳代理 每分钟3个
63
+ :param num:
64
+ :return:
65
+ """
66
+ url = "http://ty-http-d.hamir.net/index/index/get_my_package_balance?neek=521821&appkey=1fcba6de94f71561ba3007f4c24ca0b1&ac=59105"
67
+ BoolResult, errString, r = self.bsrequest.base_request(url,
68
+ timeout=30
69
+ )
70
+ if BoolResult:
71
+ dicts = json.loads(r.text)
72
+ return dicts["data"]["package_balance"]
73
+ else:
74
+ self.logger.error("获取失败")
75
+
76
+ def val(self, proxy, sources):
77
+ # 请求地址
78
+ targetUrl = "https://www.baidu.com"
79
+ proxies = {
80
+ "http": "http://%s" % proxy,
81
+ "https": "http://%s" % proxy
82
+ }
83
+ resp = requests.get(targetUrl, proxies=proxies, timeout=5)
84
+ if resp.status_code == 200:
85
+ print(resp.status_code)
86
+ return True
87
+ else:
88
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
89
+ self.mysqlutils.ExeSqlToDB(sql)
90
+ return False
91
+
92
+ def val_all(self):
93
+ self.starttime_val = time.time()
94
+ sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
95
+ bools, rows = self.mysqlutils.SelectFromDB(sql)
96
+ for row in rows:
97
+ try:
98
+ self.val(row[0], row[1])
99
+ except:
100
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
101
+ self.mysqlutils.ExeSqlToDB(sql)
102
+
103
+ def run(self):
104
+ sleep_time = 0
105
+ while True:
106
+ num = 10
107
+ if sleep_time < 0:
108
+ print("time sleep {}".format(str(100 + sleep_time)))
109
+ if 100 + sleep_time > 0:
110
+ time.sleep(100 + sleep_time)
111
+ num = num + 10
112
+ nowtime = BaseTime().get_beijin_date_strins(format="%H%M%S")
113
+ print(nowtime)
114
+ if "133700" <= nowtime <= "134700":
115
+ num = Kproxy().get_taiyang_num()
116
+ start_time = time.time()
117
+ self.get_taiyang_proxy(num=num)
118
+ self.val_all()
119
+ use_time = int(time.time() - start_time)
120
+ sleep_time = 100 - use_time
121
+ print("time sleep {}".format(str(sleep_time)))
122
+ if sleep_time >= 3:
123
+ time.sleep(sleep_time)
124
+
125
+
126
+ if __name__ == "__main__":
127
+ Kproxy().run()
@@ -0,0 +1,159 @@
1
+ import json
2
+ ###########################################
3
+ # 同项目调用基础包
4
+ import os
5
+ import sys
6
+ import time
7
+
8
+ import requests
9
+
10
+ from re_common.baselibrary.mthread.MThreadingRun import MThreadingRun
11
+ from re_common.baselibrary.mthread.mythreading import ThreadInfo, ThreadVal, ThreadPoolManger
12
+
13
+ filepath = os.path.abspath(__file__)
14
+ pathlist = filepath.split(os.sep)
15
+ pathlist = pathlist[:-4]
16
+ TopPath = os.sep.join(pathlist)
17
+ sys.path.insert(0, TopPath)
18
+ print(TopPath)
19
+ ############################################
20
+
21
+ from re_common.baselibrary.utils.basedir import BaseDir
22
+ from re_common.baselibrary.utils.basefile import BaseFile
23
+ from re_common.baselibrary.utils.baserequest import BaseRequest
24
+ from re_common.facade.lazy_import import get_streamlogger
25
+ from re_common.facade.mysqlfacade import MysqlUtiles
26
+
27
+
28
+ class Kproxy(object):
29
+ def __init__(self):
30
+ self.cur_path = BaseDir.get_file_dir_absolute(__file__)
31
+ self.configfile = BaseFile.get_new_path(self.cur_path, "db.ini")
32
+ self.logger = get_streamlogger()
33
+ self.mysqlutils = MysqlUtiles(self.configfile, "allproxy", self.logger)
34
+ self.bsrequest = BaseRequest()
35
+ self.starttime = time.time()
36
+ self.starttime_val = time.time()
37
+
38
+ def get_taiyang_proxy(self, num=6):
39
+ """
40
+ 获取太阳代理 每分钟3个
41
+ :param num:
42
+ :return:
43
+ """
44
+ self.starttime = time.time()
45
+ url = "http://http.tiqu.qingjuhe.cn/getip?num={}&type=2&pack=56912&port=1&ts=1&lb=1&pb=45&regions=".format(num)
46
+ BoolResult, errString, r = self.bsrequest.base_request(url,
47
+ timeout=30
48
+ )
49
+ if BoolResult:
50
+ dicts = json.loads(r.text)
51
+ for item in dicts["data"]:
52
+ proxy = item["ip"] + ":" + item["port"]
53
+ sources = "taiyang"
54
+ expire_time = item["expire_time"]
55
+ sql = "insert into proxyall_v1 (proxy,sources,expire_time) values ('%s','%s','%s') on DUPLICATE key update stat=1,expire_time='%s'" % (
56
+ proxy, sources, expire_time, expire_time)
57
+ self.mysqlutils.ExeSqlToDB(sql)
58
+ else:
59
+ self.logger.error("获取失败")
60
+
61
+ def val(self, proxy, sources,threadval):
62
+ # 请求地址
63
+ targetUrl = "https://www.baidu.com"
64
+ proxies = {
65
+ "http": "http://%s" % proxy,
66
+ "https": "http://%s" % proxy
67
+ }
68
+ resp = requests.get(targetUrl, proxies=proxies, timeout=5)
69
+ if resp.status_code == 200:
70
+ print(resp.status_code)
71
+ sql = "update proxyall_v1 set stat=1 where proxy='%s' and sources='%s';" % (proxy, sources)
72
+ threadval.get_result_queue().put(sql)
73
+ return True
74
+ else:
75
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
76
+ # self.mysqlutils.ExeSqlToDB(sql)
77
+ threadval.get_result_queue().put(sql)
78
+ return False
79
+
80
+ def val_all(self):
81
+ self.starttime_val = time.time()
82
+ sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `update_time`"
83
+ bools, rows = self.mysqlutils.SelectFromDB(sql)
84
+ for row in rows:
85
+ try:
86
+ self.val(row[0], row[1])
87
+ except:
88
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (row[0], row[1])
89
+ self.mysqlutils.ExeSqlToDB(sql)
90
+
91
+ def run(self):
92
+ while True:
93
+ start_time = time.time()
94
+ self.get_taiyang_proxy()
95
+ self.val_all()
96
+ use_time = int(time.time() - start_time)
97
+ sleep_time = 100 - use_time
98
+ print("time sleep {}".format(str(sleep_time)))
99
+ if sleep_time >= 3:
100
+ time.sleep(sleep_time)
101
+
102
+
103
+ class DetailThreadRun(MThreadingRun):
104
+ def __init__(self, num):
105
+ self.down = Kproxy()
106
+ super(DetailThreadRun, self).__init__(num)
107
+
108
+ @ThreadPoolManger.thread_lock
109
+ def getTask(self, *args, **kwargs):
110
+ sql = "select proxy,sources from proxyall_v1 where stat=0 ORDER BY `expire_time` DESC limit 1000"
111
+ bools, rows = self.down.mysqlutils.SelectFromDB(sql)
112
+ return rows
113
+
114
+ @ThreadPoolManger.thread_lock
115
+ def getTask2(self, *args, **kwargs):
116
+ sql = "select proxy,sources from proxyall_v1 where stat=1 ORDER BY `expire_time` DESC limit 1000"
117
+ bools, rows = self.down.mysqlutils.SelectFromDB(sql)
118
+ return rows
119
+
120
+ def setTask(self, results=None, *args, **kwargs):
121
+ if not results:
122
+ return self.BREAK
123
+ for row in results:
124
+ self.add_job(self.func, row[0], row[1])
125
+ rows = self.getTask2()
126
+ for row in rows:
127
+ self.add_job(self.func, row[0], row[1])
128
+ time.sleep(60*2)
129
+ return self.BREAK
130
+
131
+ @ThreadPoolManger.thread_lock
132
+ def dealresult(self, *args, **kwargs):
133
+ # for sql in self.results:
134
+ # self.down.mysqlutils.ExeSqlToDB(sql)
135
+ self.down.mysqlutils.ExeSqlListToDB(self.results)
136
+
137
+ def setProxy(self, proxysList=None):
138
+ time.sleep(300)
139
+
140
+ def is_break(self):
141
+ return False
142
+
143
+ def thread_pool_hook(self, threadinfo: ThreadInfo):
144
+ # 设置代理线程不重启,默认会重启
145
+ return {}
146
+
147
+ def fun(self, threadval: ThreadVal, *args, **kwargs):
148
+ proxy,sources = args[0],args[1]
149
+ try:
150
+ self.down.val(proxy, sources, threadval)
151
+ except:
152
+ sql = "update proxyall_v1 set stat=0 where proxy='%s' and sources='%s';" % (proxy, sources)
153
+ # self.mysqlutils.ExeSqlToDB(sql)
154
+ threadval.get_result_queue().put(sql)
155
+
156
+
157
+ if __name__ == '__main__':
158
+ down = DetailThreadRun(30)
159
+ down.run()