toolkits 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- toolkits/3des/3des.py +93 -0
- toolkits/3des/__init__.py +0 -0
- toolkits/__init__.py +2 -0
- toolkits/basic/__init__.py +0 -0
- toolkits/basic/list_helper.py +26 -0
- toolkits/config/__init__.py +0 -0
- toolkits/config/config_demo.py +43 -0
- toolkits/databases/__init__.py +0 -0
- toolkits/databases/database_client_util.py +143 -0
- toolkits/databases/es_client.py +88 -0
- toolkits/databases/hive_client.py +72 -0
- toolkits/databases/hive_cmd.py +113 -0
- toolkits/databases/hive_helper.py +220 -0
- toolkits/databases/redis_mgmt.py +95 -0
- toolkits/databases/sql_helper.py +291 -0
- toolkits/databases/sqlalchemy_helper.py +71 -0
- toolkits/databases/status_check.py +162 -0
- toolkits/db_query_demo.py +72 -0
- toolkits/libs_core/__init__.py +0 -0
- toolkits/libs_core/config_groups_helper.py +60 -0
- toolkits/libs_core/config_helper.py +22 -0
- toolkits/libs_core/env_prepare.py +145 -0
- toolkits/libs_core/load_module.py +46 -0
- toolkits/libs_core/mysql_helper.py +151 -0
- toolkits/network/__init__.py +0 -0
- toolkits/network/ip_helper.py +32 -0
- toolkits/network/pdi_helper.py +206 -0
- toolkits/network/send_mail.py +105 -0
- toolkits/system/__init__.py +0 -0
- toolkits/system/aes_cipher.py +44 -0
- toolkits/system/basic_utils.py +20 -0
- toolkits/system/collections_helper.py +72 -0
- toolkits/system/crpyt_helper.py +39 -0
- toolkits/system/dict2xml.py +416 -0
- toolkits/system/dict_helper.py +29 -0
- toolkits/system/excel_helper.py +101 -0
- toolkits/system/file_helper.py +52 -0
- toolkits/system/load_module.py +47 -0
- toolkits/system/priority_tasks.py +199 -0
- toolkits/system/process_monitor/__init__.py +0 -0
- toolkits/system/process_monitor/process_monitor.py +349 -0
- toolkits/system/shell_helper.py +263 -0
- toolkits/system/str_helper.py +187 -0
- toolkits/system/tasks_deamon/__init__.py +0 -0
- toolkits/system/tasks_deamon/tasks_controller.py +70 -0
- toolkits/system/tasks_deamon/tasks_multiprocessing.py +134 -0
- toolkits/system/tasks_deamon/tasks_process.py +137 -0
- toolkits/system/test_shell_helper.py +2 -0
- toolkits/system/time_helper.py +175 -0
- toolkits/system/win32_env.py +49 -0
- toolkits/tookits_app.py +17 -0
- toolkits/tookits_cli.py +126 -0
- toolkits-0.2.7.dist-info/METADATA +35 -0
- toolkits-0.2.7.dist-info/RECORD +56 -0
- toolkits-0.2.7.dist-info/WHEEL +4 -0
- toolkits-0.2.7.dist-info/entry_points.txt +5 -0
@@ -0,0 +1,263 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
import _thread
|
3
|
+
import argparse
|
4
|
+
import asyncio
|
5
|
+
import importlib
|
6
|
+
import os
|
7
|
+
import pprint
|
8
|
+
import subprocess
|
9
|
+
import sys
|
10
|
+
import tempfile
|
11
|
+
import time
|
12
|
+
import traceback
|
13
|
+
import uuid
|
14
|
+
from multiprocessing import Process
|
15
|
+
from threading import Timer
|
16
|
+
|
17
|
+
import arrow
|
18
|
+
from log4python.Log4python import log
|
19
|
+
from unipath import Path
|
20
|
+
|
21
|
+
from toolkits.system.file_helper import read_content
|
22
|
+
|
23
|
+
importlib.reload(sys)
|
24
|
+
logger = log("shellHelper")
|
25
|
+
|
26
|
+
|
27
|
+
def change_the_path_split(file_path):
|
28
|
+
path_split_str_win = "\\"
|
29
|
+
path_split_str_linux = "/"
|
30
|
+
if str(sys.platform).find("win") >= 0:
|
31
|
+
path_final = str(file_path).replace(path_split_str_linux, path_split_str_win)
|
32
|
+
else:
|
33
|
+
path_final = str(file_path).replace(path_split_str_win, path_split_str_linux)
|
34
|
+
return path_final
|
35
|
+
|
36
|
+
|
37
|
+
def include_path(target_root_directory, relative_path_list):
|
38
|
+
for item in relative_path_list:
|
39
|
+
item = change_the_path_split(item)
|
40
|
+
path_parent = "%s/%s" % (target_root_directory, item)
|
41
|
+
sys.path.append(path_parent)
|
42
|
+
|
43
|
+
|
44
|
+
def get_relative_directory_levels(script_file_path, relative_levels):
|
45
|
+
path_cur = os.path.dirname(os.path.realpath(script_file_path))
|
46
|
+
directory_target = "/.." * relative_levels
|
47
|
+
target_root_path = "%s%s" % (path_cur, directory_target)
|
48
|
+
return change_the_path_split(target_root_path)
|
49
|
+
|
50
|
+
|
51
|
+
def run_with_timeout(timeout, default, f, *args, **kwargs):
|
52
|
+
if not timeout:
|
53
|
+
return f(*args, **kwargs)
|
54
|
+
timeout_timer = Timer(timeout, _thread.interrupt_main)
|
55
|
+
try:
|
56
|
+
timeout_timer.start()
|
57
|
+
result = f(*args, **kwargs)
|
58
|
+
return result
|
59
|
+
except KeyboardInterrupt:
|
60
|
+
return default
|
61
|
+
finally:
|
62
|
+
timeout_timer.cancel()
|
63
|
+
|
64
|
+
|
65
|
+
def exec_cmd(cmd, work_path):
|
66
|
+
exec_shell_with_pipe(cmd, work_path=work_path)
|
67
|
+
|
68
|
+
|
69
|
+
def worker(cmd, work_path):
|
70
|
+
p = Process(target=exec_cmd, args=(cmd, work_path))
|
71
|
+
p.start()
|
72
|
+
os._exit(1)
|
73
|
+
|
74
|
+
|
75
|
+
def exec_external_cmd_background(cmd, work_path=""):
|
76
|
+
p = Process(target=worker, args=(cmd, work_path))
|
77
|
+
p.start()
|
78
|
+
p.join()
|
79
|
+
|
80
|
+
|
81
|
+
def file_is_used(monitor_file):
|
82
|
+
# fuser or lsof to check file's status
|
83
|
+
cmd = "lsof %s" % monitor_file
|
84
|
+
ret = exec_shell_with_pipe(cmd)
|
85
|
+
if not ret:
|
86
|
+
return True
|
87
|
+
else:
|
88
|
+
return False
|
89
|
+
|
90
|
+
|
91
|
+
def exec_shell_with_pipe(cmd, timeout=0, work_path=""):
|
92
|
+
"""exeShellWithPipe("grep 'processor' /proc/cpuinfo | sort -u | wc -l")
|
93
|
+
|
94
|
+
return-val
|
95
|
+
output-lines-list # line list ['output_01', 'output_02']
|
96
|
+
|
97
|
+
:param work_path:
|
98
|
+
:param timeout:
|
99
|
+
:param cmd: exec command
|
100
|
+
"""
|
101
|
+
result = []
|
102
|
+
none_num = 0
|
103
|
+
if cmd == "" or cmd is None:
|
104
|
+
return "No Cmd Input"
|
105
|
+
if work_path == "":
|
106
|
+
scan_process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
107
|
+
else:
|
108
|
+
scan_process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=work_path)
|
109
|
+
while True:
|
110
|
+
if none_num > 3:
|
111
|
+
break
|
112
|
+
if timeout != 0:
|
113
|
+
ret = run_with_timeout(timeout, None, scan_process.stdout.readline)
|
114
|
+
else:
|
115
|
+
ret = scan_process.stdout.readline()
|
116
|
+
if ret == "" or ret is None:
|
117
|
+
none_num += 1
|
118
|
+
else:
|
119
|
+
result.append(ret.strip())
|
120
|
+
none_num = 0
|
121
|
+
return result
|
122
|
+
|
123
|
+
|
124
|
+
async def run_command_async(command, timeout=5):
|
125
|
+
process = None
|
126
|
+
try:
|
127
|
+
process = await asyncio.create_subprocess_shell(
|
128
|
+
command,
|
129
|
+
|
130
|
+
stdout=asyncio.subprocess.PIPE,
|
131
|
+
stderr=asyncio.subprocess.PIPE
|
132
|
+
)
|
133
|
+
stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=timeout)
|
134
|
+
ret = {
|
135
|
+
"exit_code": process.returncode,
|
136
|
+
"stdout": stdout,
|
137
|
+
"stderr": stderr
|
138
|
+
}
|
139
|
+
return ret
|
140
|
+
except asyncio.TimeoutError:
|
141
|
+
if process and process.returncode is None:
|
142
|
+
process.terminate()
|
143
|
+
ret = {
|
144
|
+
"exit_code": -1,
|
145
|
+
"stdout": None,
|
146
|
+
"stderr": f"Command timed out after {timeout} seconds."
|
147
|
+
}
|
148
|
+
return ret
|
149
|
+
|
150
|
+
|
151
|
+
def exec_shell_async(command, timeout=5):
|
152
|
+
loop = asyncio.get_event_loop()
|
153
|
+
return loop.run_until_complete(run_command_async(command, timeout=timeout))
|
154
|
+
|
155
|
+
|
156
|
+
def exec_shell(cmd, timeout=0, bufsize=0, executable=None,
|
157
|
+
stdin=None, stdout=None, stderr=None,
|
158
|
+
preexec_fn=None, close_fds=False, shell=False,
|
159
|
+
cwd=None, env=None, universal_newlines=False,
|
160
|
+
startupinfo=None, creationflags=0):
|
161
|
+
"""exec_shell("grep 'processor' /proc/cpuinfo | sort -u | wc -l")
|
162
|
+
|
163
|
+
|
164
|
+
return-code::
|
165
|
+
|
166
|
+
ret = {
|
167
|
+
"exit_code": return_code, # 0 or -1
|
168
|
+
"stdout": stdout_msg, # line list ['output_01', 'output_02']
|
169
|
+
"stderr": stderr_msg # line list ['output_01', 'output_02']
|
170
|
+
}
|
171
|
+
code-end
|
172
|
+
|
173
|
+
:param preexec_fn:
|
174
|
+
:type stdin: object
|
175
|
+
:param executable:
|
176
|
+
:param cwd:
|
177
|
+
:param timeout:
|
178
|
+
:param cmd: exec command
|
179
|
+
:return: return a dict to caller
|
180
|
+
|
181
|
+
"""
|
182
|
+
if cmd == "" or cmd is None:
|
183
|
+
return "No Cmd Input"
|
184
|
+
|
185
|
+
fp_out = subprocess.PIPE
|
186
|
+
fp_err = subprocess.PIPE
|
187
|
+
temp_dir = tempfile.gettempdir()
|
188
|
+
stdout = os.path.join(temp_dir, "stdout_%s" % str(uuid.uuid4()).replace("-", "").upper())
|
189
|
+
stderr = os.path.join(temp_dir, "stderr_%s" % str(uuid.uuid4()).replace("-", "").upper())
|
190
|
+
if stdout:
|
191
|
+
fp_out = open(stdout, "w+")
|
192
|
+
if stderr:
|
193
|
+
fp_err = open(stderr, "w+")
|
194
|
+
|
195
|
+
scan_process = subprocess.Popen(cmd, shell=True, stdout=fp_out, stderr=fp_err, cwd=cwd, bufsize=bufsize,
|
196
|
+
executable=executable, stdin=stdin,
|
197
|
+
preexec_fn=preexec_fn, close_fds=close_fds,
|
198
|
+
env=env, universal_newlines=universal_newlines,
|
199
|
+
startupinfo=startupinfo, creationflags=creationflags)
|
200
|
+
|
201
|
+
return_code = None
|
202
|
+
while True:
|
203
|
+
return_code = scan_process.poll()
|
204
|
+
if return_code is None:
|
205
|
+
time.sleep(1)
|
206
|
+
else:
|
207
|
+
break
|
208
|
+
|
209
|
+
fp_out.close()
|
210
|
+
fp_err.close()
|
211
|
+
stdout_msg = read_content(stdout)
|
212
|
+
stderr_msg = read_content(stderr)
|
213
|
+
Path(stderr).remove()
|
214
|
+
Path(stdout).remove()
|
215
|
+
|
216
|
+
ret = {
|
217
|
+
"exit_code": return_code,
|
218
|
+
"stdout": stdout_msg,
|
219
|
+
"stderr": stderr_msg
|
220
|
+
}
|
221
|
+
return ret
|
222
|
+
|
223
|
+
|
224
|
+
def dump_data(data_to_dump, file_prefix_name="py_dump_data", dump_path=None):
|
225
|
+
date_str = arrow.now().format('YYYYMMDD_HHmmss')
|
226
|
+
file_name = "%s_%s_%s.dat" % (file_prefix_name, date_str, str(uuid.uuid4()).replace("-", ""))
|
227
|
+
tmp_dir = tempfile.gettempdir()
|
228
|
+
if dump_path:
|
229
|
+
try:
|
230
|
+
if not Path(tmp_dir).exists():
|
231
|
+
Path(tmp_dir).mkdir(parents=True)
|
232
|
+
tmp_dir = dump_path
|
233
|
+
except Exception as ex:
|
234
|
+
logger.error("Error: %s" % ex)
|
235
|
+
logger.error(traceback.format_exc())
|
236
|
+
|
237
|
+
data_dump_file = os.path.join(tmp_dir, file_name)
|
238
|
+
fp = open(data_dump_file, "w+")
|
239
|
+
|
240
|
+
if type(data_to_dump) is str or type(data_to_dump) is str:
|
241
|
+
fp.write("%s\n" % data_to_dump)
|
242
|
+
elif type(data_to_dump) is list:
|
243
|
+
for item in data_to_dump:
|
244
|
+
str_line = pprint.pformat(item)
|
245
|
+
fp.write("%s\n" % str_line)
|
246
|
+
else:
|
247
|
+
str_line = pprint.pformat(data_to_dump)
|
248
|
+
fp.write("%s\n" % str_line)
|
249
|
+
fp.close()
|
250
|
+
|
251
|
+
return data_dump_file
|
252
|
+
|
253
|
+
|
254
|
+
if __name__ == '__main__':
|
255
|
+
try:
|
256
|
+
parser = argparse.ArgumentParser()
|
257
|
+
parser.add_argument("logFile", type=str, help="specify the log file's path")
|
258
|
+
args = parser.parse_args()
|
259
|
+
print((args.logFile))
|
260
|
+
# exec_shell()
|
261
|
+
except Exception as ex:
|
262
|
+
logger.debug("Error: %s" % ex)
|
263
|
+
logger.debug(traceback.format_exc())
|
@@ -0,0 +1,187 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
import hashlib
|
3
|
+
import os
|
4
|
+
import pprint
|
5
|
+
import re
|
6
|
+
import sys
|
7
|
+
import uuid
|
8
|
+
from log4python.Log4python import log
|
9
|
+
import importlib
|
10
|
+
importlib.reload(sys)
|
11
|
+
logger = log("strHelper")
|
12
|
+
import esm
|
13
|
+
|
14
|
+
|
15
|
+
class StrHelper:
|
16
|
+
def __init__(self):
|
17
|
+
self.__base_path = self.__get_script_dir()
|
18
|
+
self.__sql_file = "%s/config/driver_query.sql" % self.__base_path
|
19
|
+
|
20
|
+
@staticmethod
|
21
|
+
def __element_count(arr, target):
|
22
|
+
return arr.count(target)
|
23
|
+
|
24
|
+
@staticmethod
|
25
|
+
def is_string(obj):
|
26
|
+
try:
|
27
|
+
obj.lower() + obj.title() + obj + ""
|
28
|
+
except Exception as ex:
|
29
|
+
return False
|
30
|
+
else:
|
31
|
+
return True
|
32
|
+
|
33
|
+
@staticmethod
|
34
|
+
def __reg_find_data(reg_pattern, search_data):
|
35
|
+
data_search = []
|
36
|
+
m = re.finditer(reg_pattern, search_data)
|
37
|
+
try:
|
38
|
+
while True:
|
39
|
+
data_match = m.next().group()
|
40
|
+
if data_match:
|
41
|
+
data_search.append(data_match)
|
42
|
+
except Exception as ex:
|
43
|
+
pass
|
44
|
+
return data_search
|
45
|
+
|
46
|
+
@staticmethod
|
47
|
+
def multi_search(search_string, search_list):
|
48
|
+
"""
|
49
|
+
多模匹配搜索字符串
|
50
|
+
|
51
|
+
search_string:
|
52
|
+
search_str = u"哎呀,今天在楼下看到了宝马,我老家倒是有养马的,以前的邻居有个奔驰,不对是保时捷,大爷的,都是马"
|
53
|
+
search_words:
|
54
|
+
search_words = [u"宝马", u"马", u"奔驰", u"保时捷"]
|
55
|
+
result::
|
56
|
+
|
57
|
+
[((33, 39), u'\\u5b9d\\u9a6c'),
|
58
|
+
(36, 39), u'\\u9a6c'),
|
59
|
+
(63, 66), u'\\u9a6c'),
|
60
|
+
(93, 99), u'\\u5954\\u9a70'),
|
61
|
+
(111, 120), u'\\u4fdd\\u65f6\\u6377'),
|
62
|
+
(141, 144), u'\\u9a6c')]
|
63
|
+
|
64
|
+
end-code
|
65
|
+
|
66
|
+
:param search_string:
|
67
|
+
:param search_list:
|
68
|
+
:return:
|
69
|
+
"""
|
70
|
+
index = esm.Index()
|
71
|
+
for item in search_list:
|
72
|
+
index.enter(item)
|
73
|
+
index.fix()
|
74
|
+
ret = index.query(search_string)
|
75
|
+
return ret
|
76
|
+
|
77
|
+
def __element_repeat_stat(self, data_array):
|
78
|
+
if not data_array:
|
79
|
+
return []
|
80
|
+
|
81
|
+
uniq_data = list(set(data_array))
|
82
|
+
data_list = []
|
83
|
+
for item in uniq_data:
|
84
|
+
count_num = self.__element_count(data_array, item)
|
85
|
+
val_stat = {
|
86
|
+
'emlement': item,
|
87
|
+
'count': count_num
|
88
|
+
}
|
89
|
+
data_list.append(val_stat)
|
90
|
+
return data_list
|
91
|
+
|
92
|
+
@staticmethod
|
93
|
+
def __get_run_dir():
|
94
|
+
script_path = sys.argv[0]
|
95
|
+
if script_path[0] != "/":
|
96
|
+
full_path = "%s/%s" % (os.getcwd(), script_path)
|
97
|
+
else:
|
98
|
+
full_path = script_path
|
99
|
+
return os.path.dirname(full_path)
|
100
|
+
|
101
|
+
@staticmethod
|
102
|
+
def __get_script_dir():
|
103
|
+
return os.path.dirname(os.path.realpath(__file__))
|
104
|
+
|
105
|
+
@staticmethod
|
106
|
+
def md5_str(str_input):
|
107
|
+
from md5 import md5
|
108
|
+
return md5(str_input).hexdigest().upper()
|
109
|
+
|
110
|
+
@staticmethod
|
111
|
+
def md5_file(file_path):
|
112
|
+
hash_md5 = hashlib.md5()
|
113
|
+
with open(file_path, "rb") as f:
|
114
|
+
for chunk in iter(lambda: f.read(4096), b""):
|
115
|
+
hash_md5.update(chunk)
|
116
|
+
return hash_md5.hexdigest()
|
117
|
+
|
118
|
+
@staticmethod
|
119
|
+
def random_str():
|
120
|
+
return str(uuid.uuid4()).replace("-", "")
|
121
|
+
|
122
|
+
def match_phone_info(self, search_data, match_pattern):
|
123
|
+
"""
|
124
|
+
match phone info
|
125
|
+
|
126
|
+
search_data:
|
127
|
+
test_str = ' 175****3754 158****5552 158****5552 13412337575 15812311265 user_id-1334321093 '
|
128
|
+
match_pattern:
|
129
|
+
match_pattern = [
|
130
|
+
{
|
131
|
+
"name": "phone",
|
132
|
+
"reg": r'((13[0-9])|(14[5,7])|(15[0-3,5-9])|(17[0,3,5-8])|(18[0-9])|166|190|192|19[6-9]|(147))\d{8}'
|
133
|
+
},
|
134
|
+
{
|
135
|
+
"name": "phone_mask",
|
136
|
+
"reg": r'[0-9]{3}\*{4}[0-9]{4}'
|
137
|
+
},
|
138
|
+
{
|
139
|
+
"name": "user_id",
|
140
|
+
"reg": r'user_id-([0-9]+)'
|
141
|
+
}]
|
142
|
+
|
143
|
+
return-val:
|
144
|
+
{
|
145
|
+
"phone": [
|
146
|
+
{
|
147
|
+
"count": 1,
|
148
|
+
"emlement": "13412337575"
|
149
|
+
},
|
150
|
+
{
|
151
|
+
"count": 1,
|
152
|
+
"emlement": "15812311265"
|
153
|
+
}
|
154
|
+
],
|
155
|
+
"user_id": [
|
156
|
+
{
|
157
|
+
"count": 1,
|
158
|
+
"emlement": "user_id-1334321093"
|
159
|
+
}
|
160
|
+
],
|
161
|
+
"phone_mask": [
|
162
|
+
{
|
163
|
+
"count": 2,
|
164
|
+
"emlement": "158****5552"
|
165
|
+
},
|
166
|
+
{
|
167
|
+
"count": 1,
|
168
|
+
"emlement": "175****3754"
|
169
|
+
}
|
170
|
+
]
|
171
|
+
}
|
172
|
+
|
173
|
+
end-code
|
174
|
+
|
175
|
+
:param search_data:
|
176
|
+
:param match_pattern:
|
177
|
+
:return:
|
178
|
+
"""
|
179
|
+
match_info = {}
|
180
|
+
for item_pattern in match_pattern:
|
181
|
+
phone_reg = item_pattern['reg']
|
182
|
+
pattern_name = item_pattern['name']
|
183
|
+
data_match = self.__reg_find_data(phone_reg, search_data)
|
184
|
+
data_check = self.__element_repeat_stat(data_match)
|
185
|
+
match_info[pattern_name] = data_check
|
186
|
+
|
187
|
+
return match_info
|
File without changes
|
@@ -0,0 +1,70 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
import multiprocessing
|
3
|
+
import sys
|
4
|
+
import time
|
5
|
+
|
6
|
+
from multiprocessing import Pool
|
7
|
+
import schedule
|
8
|
+
from log4python.Log4python import log
|
9
|
+
import traceback
|
10
|
+
from toolkits.system.time_helper import get_time_range_interval
|
11
|
+
import importlib
|
12
|
+
|
13
|
+
importlib.reload(sys)
|
14
|
+
logger = log("TasksController")
|
15
|
+
|
16
|
+
|
17
|
+
class TasksController:
|
18
|
+
def __init__(self, process_num=10):
|
19
|
+
self.process_num = process_num
|
20
|
+
self.pool_process = None
|
21
|
+
|
22
|
+
@staticmethod
|
23
|
+
def run_schedule(exec_worker, exec_time="3:00"):
|
24
|
+
"""
|
25
|
+
# schedule.every().minutes.do(self_phone_task)
|
26
|
+
# schedule.every().minutes.do(quick_region_monitor_week_task)
|
27
|
+
# schedule.every().monday.at("5:00").do(quick_region_monitor_week_task)
|
28
|
+
# schedule.every().friday.at("17:32").do(self_phone_task)
|
29
|
+
:return:
|
30
|
+
"""
|
31
|
+
schedule.every().day.at(exec_time).do(exec_worker)
|
32
|
+
while True:
|
33
|
+
schedule.run_pending()
|
34
|
+
time.sleep(1)
|
35
|
+
|
36
|
+
@staticmethod
|
37
|
+
def func(msg):
|
38
|
+
print(multiprocessing.current_process().name + '-' + msg)
|
39
|
+
|
40
|
+
def run_redis_tasks(self, exec_worker):
|
41
|
+
self.pool_process = Pool(self.process_num)
|
42
|
+
results = []
|
43
|
+
for range_item in range(0, self.process_num):
|
44
|
+
result = self.pool_process.apply_async(exec_worker, (range_item,), callback=None)
|
45
|
+
results.append(result)
|
46
|
+
|
47
|
+
# 关闭进程池,表示不能再往进程池中添加进程,需要在join之前调用
|
48
|
+
self.pool_process.close()
|
49
|
+
self.pool_process.join()
|
50
|
+
|
51
|
+
def run_time_range(self, exec_worker, time_begin, time_end, time_interval="1d"):
|
52
|
+
self.pool_process = Pool(self.process_num)
|
53
|
+
time_range_list = get_time_range_interval(time_begin, time_end, time_interval)
|
54
|
+
|
55
|
+
results = []
|
56
|
+
for range_item in time_range_list:
|
57
|
+
result = self.pool_process.apply_async(exec_worker, (range_item,), callback=None)
|
58
|
+
results.append(result)
|
59
|
+
|
60
|
+
# 关闭进程池,表示不能再往进程池中添加进程,需要在join之前调用
|
61
|
+
self.pool_process.close()
|
62
|
+
self.pool_process.join()
|
63
|
+
|
64
|
+
|
65
|
+
if __name__ == '__main__':
|
66
|
+
try:
|
67
|
+
app = TasksController()
|
68
|
+
except Exception as ex:
|
69
|
+
logger.error("Error: %s" % ex)
|
70
|
+
logger.error(traceback.format_exc())
|
@@ -0,0 +1,134 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
import os
|
3
|
+
import pprint
|
4
|
+
import sys
|
5
|
+
import fire
|
6
|
+
from log4python.Log4python import log
|
7
|
+
import traceback
|
8
|
+
import importlib
|
9
|
+
|
10
|
+
path_cur = os.path.dirname(os.path.realpath(__file__))
|
11
|
+
path_parent = "%s/../../" % path_cur
|
12
|
+
sys.path.append(path_parent)
|
13
|
+
|
14
|
+
from system.priority_tasks import PriorityTasks
|
15
|
+
from databases.database_client_util import DatabaseClientUtil
|
16
|
+
from .tasks_controller import TasksController
|
17
|
+
importlib.reload(sys)
|
18
|
+
logger = log("TasksMultiprocessing")
|
19
|
+
|
20
|
+
|
21
|
+
queue_name = None
|
22
|
+
queue_switch = None
|
23
|
+
func_task = None
|
24
|
+
redis_info = None
|
25
|
+
|
26
|
+
|
27
|
+
def exec_redis_tasks(task_item):
|
28
|
+
if not queue_name or not queue_switch or not redis_info or not func_task:
|
29
|
+
logger.error("Err: parameters is Wrong")
|
30
|
+
return None
|
31
|
+
|
32
|
+
try:
|
33
|
+
task = PriorityTasks(redis_info, queue_name, work_status_queue_name=queue_switch)
|
34
|
+
task.worker(func_task)
|
35
|
+
except Exception as ex:
|
36
|
+
logger.error("Error: %s" % ex)
|
37
|
+
logger.error(traceback.format_exc())
|
38
|
+
|
39
|
+
|
40
|
+
class TasksMultiprocessing:
|
41
|
+
def __init__(self, tasks_queue_name, tasks_switch, process_num=10):
|
42
|
+
global queue_name, queue_switch
|
43
|
+
queue_name = tasks_queue_name
|
44
|
+
queue_switch = tasks_switch
|
45
|
+
self.__base_path = self.__get_script_dir()
|
46
|
+
self.__database_init = None
|
47
|
+
self.__redis_cli = None
|
48
|
+
self.__db_info = None
|
49
|
+
self.__process_num = process_num
|
50
|
+
|
51
|
+
def init_db(self, database_info, process_num=10):
|
52
|
+
"""
|
53
|
+
:param database_info:
|
54
|
+
:param process_num:
|
55
|
+
:return:
|
56
|
+
"""
|
57
|
+
self.__db_info = database_info
|
58
|
+
self.__database_init = DatabaseClientUtil(database_info)
|
59
|
+
self.__redis_cli = self.__database_init.get_redis_client(db_num=database_info['redis']['db'])
|
60
|
+
self.__process_num = process_num
|
61
|
+
self.start_tasks()
|
62
|
+
|
63
|
+
@staticmethod
|
64
|
+
def __get_run_dir():
|
65
|
+
script_path = sys.argv[0]
|
66
|
+
if script_path[0] != "/":
|
67
|
+
full_path = "%s/%s" % (os.getcwd(), script_path)
|
68
|
+
else:
|
69
|
+
full_path = script_path
|
70
|
+
return os.path.dirname(full_path)
|
71
|
+
|
72
|
+
@staticmethod
|
73
|
+
def __get_script_dir():
|
74
|
+
return os.path.dirname(os.path.realpath(__file__))
|
75
|
+
|
76
|
+
def get_tasks_queue_info(self):
|
77
|
+
global queue_name, queue_switch
|
78
|
+
queue_info = {
|
79
|
+
"tasks_redis": {
|
80
|
+
"host": self.__db_info['redis']['host'],
|
81
|
+
"port": self.__db_info['redis']['port'],
|
82
|
+
"db_num": self.__db_info['redis']['db']
|
83
|
+
},
|
84
|
+
"tasks_queue": ["%s_high" % queue_name,
|
85
|
+
"%s_mid" % queue_name,
|
86
|
+
"%s_low" % queue_name],
|
87
|
+
"tasks_switch": queue_switch
|
88
|
+
}
|
89
|
+
pprint.pprint(queue_info)
|
90
|
+
|
91
|
+
def start_tasks(self):
|
92
|
+
global queue_switch
|
93
|
+
self.__redis_cli.set(queue_switch, "on")
|
94
|
+
logger.debug("Set switch ON [%s]" % queue_switch)
|
95
|
+
|
96
|
+
def __wait_process_stop(self):
|
97
|
+
pass
|
98
|
+
|
99
|
+
def __kill_process(self):
|
100
|
+
pass
|
101
|
+
|
102
|
+
def set_worker(self, func_process):
|
103
|
+
global func_task, redis_info
|
104
|
+
func_task = func_process
|
105
|
+
redis_info = self.__db_info['redis']
|
106
|
+
|
107
|
+
def stop_tasks(self):
|
108
|
+
global queue_switch
|
109
|
+
self.__redis_cli.set(queue_switch, "off")
|
110
|
+
logger.debug("Set switch OFF [%s]" % queue_switch)
|
111
|
+
self.__wait_process_stop()
|
112
|
+
self.__kill_process()
|
113
|
+
|
114
|
+
def daemon_start(self, action):
|
115
|
+
"""
|
116
|
+
action = start
|
117
|
+
:param action:
|
118
|
+
:return:
|
119
|
+
"""
|
120
|
+
if action == "start":
|
121
|
+
task_controller = TasksController(self.__process_num)
|
122
|
+
task_controller.run_redis_tasks(exec_redis_tasks)
|
123
|
+
else:
|
124
|
+
print("Please Check the parameter!!")
|
125
|
+
|
126
|
+
|
127
|
+
if __name__ == '__main__':
|
128
|
+
try:
|
129
|
+
# import pydevd
|
130
|
+
# pydevd.settrace('127.0.0.1', port=6868, stdoutToServer=True, stderrToServer=True)
|
131
|
+
fire.Fire(TasksMultiprocessing)
|
132
|
+
except Exception as ex:
|
133
|
+
logger.error("Error: %s" % ex)
|
134
|
+
logger.error(traceback.format_exc())
|