toolkits 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. toolkits/3des/3des.py +93 -0
  2. toolkits/3des/__init__.py +0 -0
  3. toolkits/__init__.py +2 -0
  4. toolkits/basic/__init__.py +0 -0
  5. toolkits/basic/list_helper.py +26 -0
  6. toolkits/config/__init__.py +0 -0
  7. toolkits/config/config_demo.py +43 -0
  8. toolkits/databases/__init__.py +0 -0
  9. toolkits/databases/database_client_util.py +143 -0
  10. toolkits/databases/es_client.py +88 -0
  11. toolkits/databases/hive_client.py +72 -0
  12. toolkits/databases/hive_cmd.py +113 -0
  13. toolkits/databases/hive_helper.py +220 -0
  14. toolkits/databases/redis_mgmt.py +95 -0
  15. toolkits/databases/sql_helper.py +291 -0
  16. toolkits/databases/sqlalchemy_helper.py +71 -0
  17. toolkits/databases/status_check.py +162 -0
  18. toolkits/db_query_demo.py +72 -0
  19. toolkits/libs_core/__init__.py +0 -0
  20. toolkits/libs_core/config_groups_helper.py +60 -0
  21. toolkits/libs_core/config_helper.py +22 -0
  22. toolkits/libs_core/env_prepare.py +145 -0
  23. toolkits/libs_core/load_module.py +46 -0
  24. toolkits/libs_core/mysql_helper.py +151 -0
  25. toolkits/network/__init__.py +0 -0
  26. toolkits/network/ip_helper.py +32 -0
  27. toolkits/network/pdi_helper.py +206 -0
  28. toolkits/network/send_mail.py +105 -0
  29. toolkits/system/__init__.py +0 -0
  30. toolkits/system/aes_cipher.py +44 -0
  31. toolkits/system/basic_utils.py +20 -0
  32. toolkits/system/collections_helper.py +72 -0
  33. toolkits/system/crpyt_helper.py +39 -0
  34. toolkits/system/dict2xml.py +416 -0
  35. toolkits/system/dict_helper.py +29 -0
  36. toolkits/system/excel_helper.py +101 -0
  37. toolkits/system/file_helper.py +52 -0
  38. toolkits/system/load_module.py +47 -0
  39. toolkits/system/priority_tasks.py +199 -0
  40. toolkits/system/process_monitor/__init__.py +0 -0
  41. toolkits/system/process_monitor/process_monitor.py +349 -0
  42. toolkits/system/shell_helper.py +263 -0
  43. toolkits/system/str_helper.py +187 -0
  44. toolkits/system/tasks_deamon/__init__.py +0 -0
  45. toolkits/system/tasks_deamon/tasks_controller.py +70 -0
  46. toolkits/system/tasks_deamon/tasks_multiprocessing.py +134 -0
  47. toolkits/system/tasks_deamon/tasks_process.py +137 -0
  48. toolkits/system/test_shell_helper.py +2 -0
  49. toolkits/system/time_helper.py +175 -0
  50. toolkits/system/win32_env.py +49 -0
  51. toolkits/tookits_app.py +17 -0
  52. toolkits/tookits_cli.py +126 -0
  53. toolkits-0.2.7.dist-info/METADATA +35 -0
  54. toolkits-0.2.7.dist-info/RECORD +56 -0
  55. toolkits-0.2.7.dist-info/WHEEL +4 -0
  56. toolkits-0.2.7.dist-info/entry_points.txt +5 -0
@@ -0,0 +1,220 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import json
4
+ import os
5
+ import sys
6
+
7
+ import time
8
+ import uuid
9
+ from os.path import dirname
10
+
11
+ import arrow
12
+ from log4python.Log4python import log
13
+ import traceback
14
+
15
+ from toolkits.databases.hive_cmd import HiveCmd
16
+ from toolkits.system.excel_helper import ExcelHelper
17
+ from toolkits.system.shell_helper import exec_shell_with_pipe
18
+ from unipath import Path
19
+ import importlib
20
+
21
+ importlib.reload(sys)
22
+ logger = log("HiveHelper")
23
+
24
+
25
+ class HiveHelper:
26
+ def __init__(self, work_path_base=None, yarn_queue="root.db.default", db_name="db_info"):
27
+ if not work_path_base:
28
+ self.base_path = self.get_run_dir()
29
+ else:
30
+ self.base_path = work_path_base
31
+ self.log_path = "%s/logs" % self.base_path
32
+ self.yarn_queue = yarn_queue
33
+ self.db_name = db_name
34
+ self.sql_file = "%s/config/driver_query.sql" % self.base_path
35
+
36
+ @staticmethod
37
+ def get_run_dir():
38
+ script_path = sys.argv[0]
39
+ if script_path[0] != "/":
40
+ full_path = "%s/%s" % (os.getcwd(), script_path)
41
+ else:
42
+ full_path = script_path
43
+ return os.path.dirname(full_path)
44
+
45
+ def get_table_schema(self, tmp_table_name, target_table_name, table_info, time_range, data_file):
46
+ '''
47
+ :param table_info:
48
+ {"fields_schema" :'', "partition_type": 'day|hour'}
49
+ :param target_table_name:
50
+ :param tmp_table_name:
51
+ :param time_range:
52
+ :param data_file:
53
+ :return: dict_sql = {
54
+ "header": content_header,
55
+ "insert": sql_insert,
56
+ "delete": sql_delete_tmp_table
57
+ }:
58
+ '''
59
+ partition_time = time_range['time_begin']
60
+ year = arrow.get(partition_time).format('YYYY')
61
+ month = arrow.get(partition_time).format('MM')
62
+ day = arrow.get(partition_time).format('DD')
63
+ hour = arrow.get(partition_time).format('HH')
64
+
65
+ header = '''set mapred.job.queue.name=%s;\nuse %s;\n''' % (self.yarn_queue, self.db_name)
66
+ sql_create_tmp_table = '''CREATE TABLE %s(
67
+ %s
68
+ )
69
+ ROW FORMAT DELIMITED
70
+ FIELDS TERMINATED BY '\\t'
71
+ STORED AS TEXTFILE;\n'''
72
+ sql_delete_tmp_table = 'drop table %s;\n'
73
+ sql_load_data = '''LOAD DATA LOCAL INPATH '%s' INTO TABLE %s;\n'''
74
+
75
+ if table_info['partition_type'] == "hour":
76
+ sql_insert = '''INSERT INTO TABLE %s PARTITION (year='%s', month='%s', day='%s', hour='%s') select * from %s;\n''' % (target_table_name, year, month, day, hour, tmp_table_name)
77
+ else:
78
+ sql_insert = '''INSERT INTO TABLE %s PARTITION (year='%s', month='%s', day='%s') select * from %s;\n''' % (target_table_name, year, month, day, tmp_table_name)
79
+
80
+ content_header = "%s\n%s\n%s\n" % (
81
+ header,
82
+ sql_create_tmp_table % (tmp_table_name, table_info['fields_schema']),
83
+ sql_load_data % (data_file, tmp_table_name)
84
+ )
85
+
86
+ dict_sql = {
87
+ "header": content_header,
88
+ "insert": sql_insert,
89
+ "tmp_table_delete": sql_delete_tmp_table % tmp_table_name
90
+ }
91
+ return dict_sql
92
+
93
+ def get_file_path(self, time_range):
94
+ '''
95
+ :param time_range:
96
+ :return:
97
+ ret_dict = {
98
+ "table_name": table_name,
99
+ "file_sql": hive_sql,
100
+ "file_err": hive_err,
101
+ "file_out": hive_out,
102
+ "file_data": data_file
103
+ }
104
+ '''
105
+ begin = arrow.get(time_range['time_begin']).format('YYYYMMDD_HHmmss')
106
+ table_create_time = arrow.now().format('YYYYMMDDHH')
107
+ end = arrow.get(time_range['time_end']).format('YYYYMMDD_HHmmss')
108
+ month_str = arrow.get(time_range['time_end']).format('YYYYMM')
109
+
110
+ random_str = str(uuid.uuid4()).replace("-", "")
111
+ name_post_fix = "%s_%s_%s" % (begin, end, random_str)
112
+ month_dir = "%s/%s" % (self.log_path, month_str)
113
+ if not Path(month_dir).exists():
114
+ Path(month_dir).mkdir(True)
115
+ data_file = "%s/data_%s.dat" % (month_dir, name_post_fix)
116
+ table_name = "tmp_%s_%s" % (table_create_time, random_str)
117
+ hive_sql = "%s/audit_%s.hql" % (month_dir, name_post_fix)
118
+ hive_err = "%s/audit_%s.err" % (month_dir, name_post_fix)
119
+ hive_out = "%s/audit_%s.dat" % (month_dir, name_post_fix)
120
+
121
+ ret_dict = {
122
+ "table_name": table_name,
123
+ "file_sql": hive_sql,
124
+ "file_err": hive_err,
125
+ "file_out": hive_out,
126
+ "file_data": data_file
127
+ }
128
+ return ret_dict
129
+
130
+ def hive_execute(self, dict_path, content, values):
131
+ self.write_file(dict_path['file_sql'], content, "w")
132
+ self.write_file(dict_path['file_data'], values, "w")
133
+
134
+ cmd = "hive -f %s 2> %s > %s " % (dict_path['file_sql'], dict_path['file_err'], dict_path['file_out'])
135
+ logger.debug("CMD:[%s]" % cmd)
136
+ ret = exec_shell_with_pipe(cmd)
137
+ logger.debug("CMD_RET:[%s]" % ";".join(ret))
138
+
139
+ def hive_insert(self, table_info, data, time_range, action_type=True, event_table_name="empl_access"):
140
+ rows_warn = []
141
+ for item in data:
142
+ rows_warn.append("\t".join(map(str, item)))
143
+
144
+ if rows_warn:
145
+ values = "\n".join(list(set(rows_warn)))
146
+ dict_path = self.get_file_path(time_range)
147
+ # logger.debug("FilePath:[%s]" % json.dumps(dict_path, ensure_ascii=False))
148
+ dict_sql = self.get_table_schema(dict_path['table_name'], event_table_name, table_info,
149
+ time_range, dict_path['file_data'])
150
+
151
+ if action_type:
152
+ content = "%s\n%s\n%s" % (
153
+ dict_sql['header'],
154
+ dict_sql['insert'],
155
+ dict_sql['tmp_table_delete']
156
+ )
157
+ else:
158
+ content = dict_sql['header']
159
+ logger.debug("Mode:Debug!! Just write data to Tmp table[%s]\n Delete Tmp Table[drop table %s]" %
160
+ (dict_path['table_name'], dict_path['table_name']))
161
+ self.hive_execute(dict_path, content, values)
162
+ else:
163
+ logger.debug("input [rows_warn] is empty!! ")
164
+
165
+ @staticmethod
166
+ def write_file(file_path, content, mode="w"):
167
+ fp = open(file_path, mode)
168
+ fp.write(content)
169
+ fp.close()
170
+
171
+ def hive_query(self, sql_query, file_name=""):
172
+ random_str = str(uuid.uuid4()).replace("-", "")
173
+ query_data_file = "%s/hive_query/hive_query_%s_%s.log" % (self.base_path, file_name, random_str)
174
+ dir_path = dirname(query_data_file)
175
+ if not Path(dir_path).exists():
176
+ try:
177
+ Path(dir_path).mkdir(parents=True)
178
+ except Exception as ex:
179
+ logger.error("Error: %s" % ex)
180
+ logger.error(traceback.format_exc())
181
+
182
+ hive_cmd = HiveCmd(query_data_file, sql_query, self.db_name, self.yarn_queue)
183
+ logger.debug("Query:[%s]; ResultFile:[%s]" % (sql_query, query_data_file))
184
+ hive_cmd.query()
185
+ while True:
186
+ if hive_cmd.query_is_finished():
187
+ time.sleep(1)
188
+ break
189
+ return query_data_file
190
+
191
+ def query(self, sql_query):
192
+ file_name_time = arrow.now().format('YYYY-MM-DDTHHmmss')
193
+ result_file = self.hive_query(sql_query, file_name_time)
194
+ phone_list = []
195
+
196
+ if Path(result_file).exists():
197
+ excel = ExcelHelper(result_file, column_split="\t")
198
+ phone_list = excel.data
199
+ return phone_list
200
+
201
+ def worker(self):
202
+ pass
203
+
204
+
205
+ if __name__ == '__main__':
206
+ try:
207
+ parser = argparse.ArgumentParser()
208
+ parser.add_argument("action", type=str, help="specify the action [start]")
209
+ args = parser.parse_args()
210
+
211
+ if args.action == "start":
212
+ app = HiveHelper()
213
+ app.worker()
214
+ else:
215
+ print("Please Check the parameter!!")
216
+ except Exception as ex:
217
+ logger.error("Error: %s" % ex)
218
+ logger.error(traceback.format_exc())
219
+
220
+
@@ -0,0 +1,95 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import json
4
+ import sys
5
+ import redis
6
+ from log4python.Log4python import log
7
+ import traceback
8
+ import importlib
9
+
10
+ importlib.reload(sys)
11
+ logger = log("RedisMgmt")
12
+
13
+ host_ip = '192.168.100.110'
14
+ redis_info = {
15
+ 'password': 'password',
16
+ 'host': host_ip,
17
+ 'port': 6307
18
+ }
19
+
20
+
21
+ class RedisMgmt:
22
+ def __init__(self, redis_db=8):
23
+ self.redisCli = redis.StrictRedis(host=redis_info['host'], port=redis_info['port'],
24
+ password=redis_info['password'], db=int(redis_db))
25
+
26
+ def dump_queue(self, store_type, key_source, key_destination):
27
+ data_list = []
28
+ while True:
29
+ data = self.redisCli.lpop(key_source)
30
+ if data:
31
+ data_list.append(data)
32
+ else:
33
+ break
34
+
35
+ if store_type == "redis":
36
+ for data_item in data_list:
37
+ self.redisCli.rpush(key_destination, data_item)
38
+ else:
39
+ fp = open(key_destination, "a+")
40
+ content = "\n".join(data_list)
41
+ fp.write(content)
42
+ fp.close()
43
+
44
+ logger.debug("Dumps Success data number; [%s]" % str(len(data_list)))
45
+
46
+ def load_to_redis(self, store_type, key_source, key_destination):
47
+ data_list = []
48
+ if store_type == "redis":
49
+ while True:
50
+ data = self.redisCli.lpop(key_source)
51
+ if data:
52
+ data_list.append(data)
53
+ else:
54
+ break
55
+ else:
56
+ fp = open(key_source, "r")
57
+ data = fp.readlines()
58
+ if data:
59
+ if isinstance(data, list):
60
+ data_list.extend(data)
61
+ else:
62
+ data_list.append(data)
63
+ fp.close()
64
+
65
+ for data_item in data_list:
66
+ self.redisCli.rpush(key_destination, data_item.strip())
67
+ logger.debug("Loads Success data number; [%s]; Data:[%s]" % (str(len(data_list)),
68
+ json.dumps(data_list, ensure_ascii=False)))
69
+
70
+
71
+ if __name__ == '__main__':
72
+ try:
73
+ parser = argparse.ArgumentParser()
74
+ parser.add_argument("redis_db", type=str, help="specify the redis_db")
75
+ parser.add_argument("store_type", type=str, help="specify the store type [file|redis]")
76
+ parser.add_argument("action_type", type=str, help="specify the action type [load|dump]")
77
+ parser.add_argument("source", type=str, help="specify the queue_source or file")
78
+ parser.add_argument("destination", type=str, help="specify the queue_destination or file")
79
+ args = parser.parse_args()
80
+
81
+ redis_mgmt = RedisMgmt(args.redis_db)
82
+ if args.store_type not in ("file", "redis"):
83
+ logger.debug("Store Type Error! not in [file, redis]")
84
+
85
+ if args.action_type not in ("dump", "load"):
86
+ logger.debug("Store Type Error! not in [dump, load]")
87
+
88
+ if args.action_type == "dump":
89
+ redis_mgmt.dump_queue(args.store_type, args.source, args.destination)
90
+ else:
91
+ redis_mgmt.load_to_redis(args.store_type, args.source, args.destination)
92
+
93
+ except Exception as ex:
94
+ logger.debug("Error: %s" % ex)
95
+ logger.debug(traceback.format_exc())
@@ -0,0 +1,291 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import pprint
4
+ import re
5
+ import sys
6
+
7
+ import sqlparse
8
+ from log4python.Log4python import log
9
+ from sqlparse.sql import Where, Comparison, Parenthesis, Identifier, Token, Function
10
+ import traceback
11
+
12
+ from sqlparse.tokens import DML
13
+ import importlib
14
+
15
+ importlib.reload(sys)
16
+ logger = log("SqlHelper")
17
+
18
+
19
+ class SqlWhere:
20
+ def __init__(self, sql_dml):
21
+ self.sql_dml = sql_dml
22
+ self.where_tokens = None
23
+ self.condition_data = {
24
+ "condition": {
25
+ "key": "",
26
+ "op": "",
27
+ "val": []
28
+ },
29
+ "or_list": [],
30
+ "and_list": []
31
+ }
32
+ self.where_conditions = None
33
+
34
+ @staticmethod
35
+ def split_or(where_tokens):
36
+ or_list = []
37
+ or_item = []
38
+ found_flag = False
39
+
40
+ for item in where_tokens:
41
+ if item.is_keyword and item.value.upper() == 'WHERE':
42
+ continue
43
+
44
+ if item.is_keyword and item.value.upper() == 'OR':
45
+ found_flag = True
46
+ # or_item_tmp = copy.deepcopy(or_item)
47
+ or_list.append(or_item)
48
+ or_item = []
49
+ else:
50
+ or_item.append(item)
51
+
52
+ if found_flag:
53
+ or_list.append(or_item)
54
+
55
+ return or_list
56
+
57
+ @staticmethod
58
+ def split_and(where_tokens):
59
+ and_list = []
60
+ and_item = []
61
+ found_flag = False
62
+ between_flag = False
63
+
64
+ for item in where_tokens:
65
+ if item.is_keyword and item.value.upper() == 'WHERE':
66
+ continue
67
+
68
+ if item.is_keyword and item.value.upper() == 'BETWEEN':
69
+ between_flag = True
70
+
71
+ if item.is_keyword and item.value.upper() == 'AND' and not between_flag:
72
+ found_flag = True
73
+ and_list.append(and_item)
74
+ and_item = []
75
+ else:
76
+ if item.is_keyword and item.value.upper() == 'AND' and between_flag:
77
+ between_flag = False
78
+ and_item.append(item)
79
+
80
+ if found_flag:
81
+ and_list.append(and_item)
82
+
83
+ return and_list
84
+
85
+ @staticmethod
86
+ def get_condition(tokens):
87
+ key_found_flag = False
88
+ val = []
89
+ key = ""
90
+ op = []
91
+
92
+ for token in tokens:
93
+ if token.is_whitespace:
94
+ continue
95
+
96
+ if token.is_keyword and token.value.upper() == 'WHERE':
97
+ continue
98
+
99
+ if isinstance(token, Comparison):
100
+ key = token.left.value
101
+ op = ["eq"]
102
+ val = token.right.value
103
+ break
104
+
105
+ if token.is_keyword:
106
+ if key_found_flag:
107
+ op.append(token.value)
108
+ else:
109
+ if not key_found_flag:
110
+ key = token.value
111
+ key_found_flag = True
112
+ else:
113
+ if isinstance(token, Parenthesis):
114
+ tmp_val = token.value.strip("() ").split(",")
115
+ val_strip = [item.strip("\"") for item in tmp_val]
116
+ val.extend(val_strip)
117
+ else:
118
+ val.append(token.value)
119
+ # val += token.value
120
+ condition_tmp = {
121
+ "key": key,
122
+ "op": "-".join(op).upper(),
123
+ "val": val
124
+ }
125
+
126
+ return condition_tmp
127
+
128
+ def check_sub_conditions(self, where_tokens):
129
+ sub_tokens = []
130
+ for token in where_tokens:
131
+ if isinstance(token, Parenthesis) and (self.split_or(token.tokens) or self.split_and(token.tokens)):
132
+ # sub_tokens.append(token.tokens)
133
+ sub_tokens = token.tokens
134
+ return sub_tokens
135
+
136
+ def parse_where_tokens(self, where_tokens):
137
+ where_group_tmp = {
138
+ "condition": None,
139
+ "sub_condition": None,
140
+ "or_list": [],
141
+ "and_list": [],
142
+ }
143
+ or_list = self.split_or(where_tokens)
144
+ if or_list:
145
+ for or_item in or_list:
146
+ where_group_tmp['or_list'].append(self.parse_where_tokens(or_item))
147
+ else:
148
+ and_list = self.split_and(where_tokens)
149
+ if and_list:
150
+ for and_item in and_list:
151
+ where_group_tmp['and_list'].append(self.parse_where_tokens(and_item))
152
+ else:
153
+ sub_tokens = self.check_sub_conditions(where_tokens)
154
+ if sub_tokens:
155
+ where_group_tmp['sub_condition'] = self.parse_where_tokens(sub_tokens)
156
+ else:
157
+ where_group_tmp['condition'] = self.get_condition(where_tokens)
158
+
159
+ return where_group_tmp
160
+
161
+ def print_where(self):
162
+ pprint.pprint(self.where_conditions)
163
+
164
+ def get_column_condition(self, where_condition):
165
+ column_condition = []
166
+ if where_condition['condition']:
167
+ column_condition.append(where_condition['condition'])
168
+
169
+ if where_condition['sub_condition']:
170
+ column_condition.extend(self.get_column_condition(where_condition['sub_condition']))
171
+
172
+ if where_condition['or_list']:
173
+ for item in where_condition['or_list']:
174
+ column_condition.extend(self.get_column_condition(item))
175
+
176
+ if where_condition['and_list']:
177
+ for item in where_condition['and_list']:
178
+ column_condition.extend(self.get_column_condition(item))
179
+
180
+ return column_condition
181
+
182
+ def get_where_conditions(self):
183
+ where_tokens_list = self.get_sql_where()
184
+
185
+ column_condition_list = []
186
+ for where in where_tokens_list:
187
+ tmp_where_tokens = self.parse_where_tokens(where)
188
+ tmp_list = self.get_column_condition(tmp_where_tokens)
189
+ column_condition_list.extend(tmp_list)
190
+
191
+ return column_condition_list
192
+
193
+ def has_dml(self, obj_check, level):
194
+ sql_dml = None
195
+ try:
196
+ if level == 0:
197
+ return sql_dml
198
+ else:
199
+ level -= 1
200
+ found_dml = False
201
+ if not isinstance(obj_check, Token):
202
+ for token_item in obj_check.tokens:
203
+ if token_item.ttype is DML:
204
+ found_dml = True
205
+ break
206
+ if found_dml:
207
+ sql_dml = obj_check
208
+ else:
209
+ if not isinstance(obj_check, Token):
210
+ for token_item in obj_check.tokens:
211
+ ret = self.has_dml(token_item, level)
212
+ if ret:
213
+ sql_dml = token_item
214
+ break
215
+ except Exception as ex:
216
+ sql_dml = None
217
+ logger.debug("Error: %s" % ex)
218
+ logger.debug(traceback.format_exc())
219
+ finally:
220
+ return sql_dml
221
+
222
+ def get_where_tokens(self, tokens):
223
+ tokens_where = []
224
+ sql_dml = False
225
+ for token_item in tokens:
226
+ if token_item.ttype is DML:
227
+ sql_dml = True
228
+ break
229
+
230
+ if not sql_dml:
231
+ return tokens_where
232
+
233
+ for token_item in tokens:
234
+ if isinstance(token_item, Where):
235
+ tokens_where.append(token_item)
236
+ if isinstance(token_item, Identifier):
237
+ token_check = self.has_dml(token_item, 2)
238
+ if token_check:
239
+ sql_where = self.get_where_tokens(token_check.tokens)
240
+ if sql_where:
241
+ tokens_where.extend(sql_where)
242
+
243
+ return tokens_where
244
+
245
+ def search_columns(self, where_list, columns_list):
246
+ for where_item in where_list:
247
+ for token in where_item.tokens:
248
+ for column in columns_list:
249
+ if token.is_group:
250
+ pass
251
+ elif token.is_keyword:
252
+ pass
253
+ elif token.is_whitespace:
254
+ pass
255
+ if isinstance(token, Identifier):
256
+ pass
257
+ if isinstance(token, Comparison):
258
+ pass
259
+ if isinstance(token, Function):
260
+ pass
261
+
262
+ def get_sql_where(self):
263
+ '''
264
+ xx = "select * from test01 where id=1 or (id=2 and id=4) and id=5 or id = 6 and id not in (4,5,6)"
265
+ xx = "select * from test01 where id=1 or (id=2 and id=4)"
266
+ '''
267
+ sql_query = str(re.sub("--.*?;", " ", self.sql_dml)).replace(";", " ")
268
+ sql_list = sqlparse.parse(sql_query)
269
+ where_tokens_list = []
270
+
271
+ for sql in sql_list:
272
+ where_list = re.findall("where", sql.value)
273
+ where_num = len(where_list)
274
+ if where_num == 0:
275
+ print("NO found where.")
276
+ break
277
+ where_tokens = self.get_where_tokens(sql.tokens)
278
+ where_tokens_list.extend(where_tokens)
279
+
280
+ return where_tokens_list
281
+
282
+
283
+ if __name__ == '__main__':
284
+ try:
285
+ xx = "select * from test01 where id=1 or (id=2 and id=4)"
286
+ sql_where = SqlWhere(xx)
287
+ conditions_list = sql_where.get_where_conditions()
288
+ pprint.pprint(conditions_list)
289
+ except Exception as ex:
290
+ logger.debug("Error: %s" % ex)
291
+ logger.debug(traceback.format_exc())
@@ -0,0 +1,71 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import os
4
+ import pprint
5
+ import sys
6
+ from log4python.Log4python import log
7
+ import traceback
8
+ import importlib
9
+ importlib.reload(sys)
10
+ logger = log("SqlAlchemyHelper")
11
+
12
+
13
+ class SqlAlchemyHelper:
14
+ def __init__(self):
15
+ self.base_path = self.__get_script_dir()
16
+ self.sql_file = "%s/config/driver_query.sql" % self.base_path
17
+
18
+ @staticmethod
19
+ def __get_run_dir():
20
+ script_path = sys.argv[0]
21
+ if script_path[0] != "/":
22
+ full_path = "%s/%s" % (os.getcwd(), script_path)
23
+ else:
24
+ full_path = script_path
25
+ return os.path.dirname(full_path)
26
+
27
+ @staticmethod
28
+ def __get_script_dir():
29
+ return os.path.dirname(os.path.realpath(__file__))
30
+
31
+ @staticmethod
32
+ def __row2dict(row):
33
+ d = {}
34
+ for column in list(row.keys()):
35
+ # a.encode('utf-8').strip()
36
+ content = ""
37
+ try:
38
+ content = str(row[column]).encode('utf-8')
39
+ except Exception as ex:
40
+ logger.debug("ColumnName:[%s]; ColumnContent:[%s]" % (column, pprint.pformat(row[column])))
41
+ logger.error("Error: %s" % ex)
42
+ logger.error(traceback.format_exc())
43
+ finally:
44
+ d[column] = content
45
+ return d
46
+
47
+ def rows2list(self, result):
48
+ rows = []
49
+ for item in result:
50
+ rows.append(self.__row2dict(item))
51
+ return rows
52
+
53
+ def worker(self):
54
+ pass
55
+
56
+
57
+ if __name__ == '__main__':
58
+ try:
59
+ parser = argparse.ArgumentParser()
60
+ parser.add_argument("action", type=str, help="specify the action [start]")
61
+ args = parser.parse_args()
62
+
63
+ if args.action == "start":
64
+ app = SqlAlchemyHelper()
65
+ app.worker()
66
+ else:
67
+ print("Please Check the parameter!!")
68
+ except Exception as ex:
69
+ logger.error("Error: %s" % ex)
70
+ logger.error(traceback.format_exc())
71
+