toolkits 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. toolkits/3des/3des.py +93 -0
  2. toolkits/3des/__init__.py +0 -0
  3. toolkits/__init__.py +2 -0
  4. toolkits/basic/__init__.py +0 -0
  5. toolkits/basic/list_helper.py +26 -0
  6. toolkits/config/__init__.py +0 -0
  7. toolkits/config/config_demo.py +43 -0
  8. toolkits/databases/__init__.py +0 -0
  9. toolkits/databases/database_client_util.py +143 -0
  10. toolkits/databases/es_client.py +88 -0
  11. toolkits/databases/hive_client.py +72 -0
  12. toolkits/databases/hive_cmd.py +113 -0
  13. toolkits/databases/hive_helper.py +220 -0
  14. toolkits/databases/redis_mgmt.py +95 -0
  15. toolkits/databases/sql_helper.py +291 -0
  16. toolkits/databases/sqlalchemy_helper.py +71 -0
  17. toolkits/databases/status_check.py +162 -0
  18. toolkits/db_query_demo.py +72 -0
  19. toolkits/libs_core/__init__.py +0 -0
  20. toolkits/libs_core/config_groups_helper.py +60 -0
  21. toolkits/libs_core/config_helper.py +22 -0
  22. toolkits/libs_core/env_prepare.py +145 -0
  23. toolkits/libs_core/load_module.py +46 -0
  24. toolkits/libs_core/mysql_helper.py +151 -0
  25. toolkits/network/__init__.py +0 -0
  26. toolkits/network/ip_helper.py +32 -0
  27. toolkits/network/pdi_helper.py +206 -0
  28. toolkits/network/send_mail.py +105 -0
  29. toolkits/system/__init__.py +0 -0
  30. toolkits/system/aes_cipher.py +44 -0
  31. toolkits/system/basic_utils.py +20 -0
  32. toolkits/system/collections_helper.py +72 -0
  33. toolkits/system/crpyt_helper.py +39 -0
  34. toolkits/system/dict2xml.py +416 -0
  35. toolkits/system/dict_helper.py +29 -0
  36. toolkits/system/excel_helper.py +101 -0
  37. toolkits/system/file_helper.py +52 -0
  38. toolkits/system/load_module.py +47 -0
  39. toolkits/system/priority_tasks.py +199 -0
  40. toolkits/system/process_monitor/__init__.py +0 -0
  41. toolkits/system/process_monitor/process_monitor.py +349 -0
  42. toolkits/system/shell_helper.py +263 -0
  43. toolkits/system/str_helper.py +187 -0
  44. toolkits/system/tasks_deamon/__init__.py +0 -0
  45. toolkits/system/tasks_deamon/tasks_controller.py +70 -0
  46. toolkits/system/tasks_deamon/tasks_multiprocessing.py +134 -0
  47. toolkits/system/tasks_deamon/tasks_process.py +137 -0
  48. toolkits/system/test_shell_helper.py +2 -0
  49. toolkits/system/time_helper.py +175 -0
  50. toolkits/system/win32_env.py +49 -0
  51. toolkits/tookits_app.py +17 -0
  52. toolkits/tookits_cli.py +126 -0
  53. toolkits-0.2.7.dist-info/METADATA +35 -0
  54. toolkits-0.2.7.dist-info/RECORD +56 -0
  55. toolkits-0.2.7.dist-info/WHEEL +4 -0
  56. toolkits-0.2.7.dist-info/entry_points.txt +5 -0
toolkits/3des/3des.py ADDED
@@ -0,0 +1,93 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ import pyDes
4
+ import base64
5
+
6
+
7
+ class TripleDesUtils:
8
+ des_mode = {"CBC": pyDes.CBC, "ECB": pyDes.ECB}
9
+ des_pad_mode = {"PAD_PKCS5": pyDes.PAD_PKCS5, "PAD_NORMAL": pyDes.PAD_NORMAL}
10
+
11
+ def __init__(self, mode, pad_mode, key, iv, pad=None, trans_base64=False):
12
+ """
13
+ :param mode: des 加密模式,目前支持 CBC,ECB
14
+ :param pad_mode: 目前支持 PAD_PKCS5,PAD_NORMAL
15
+ :param trans_base64: 加密结果是否以 base64 格式输出
16
+ :param key: 密钥
17
+ :param iv: 偏移量
18
+ :param pad:
19
+ """
20
+ self.trans_base64 = trans_base64
21
+ self.k = pyDes.triple_des(key, TripleDesUtils.des_mode.get(mode), iv, pad, TripleDesUtils.des_pad_mode.get(pad_mode))
22
+
23
+ def encryption(self, data):
24
+ """
25
+ 3des 加密
26
+ 说明: 3DES数据块长度为64位,所以IV长度需要为8个字符(ECB模式不用IV),密钥长度为16或24个字符(8个字符以内则结果与DES相同
27
+ IV与密钥超过长度则截取,不足则在末尾填充'\0'补足
28
+ :param data: 待加密数据
29
+ :return:
30
+ """
31
+ _encryption_result = self.k.encrypt(data)
32
+ if self.trans_base64:
33
+ _encryption_result = self._base64encode(_encryption_result)
34
+ return _encryption_result
35
+
36
+ def decrypt(self, data):
37
+ """
38
+ 3des 解密
39
+ :param data: 待解密数据
40
+ :return:
41
+ """
42
+ if self.trans_base64:
43
+ data = self._base64decode(data)
44
+ _decrypt_result = self.k.decrypt(data)
45
+ return _decrypt_result
46
+
47
+ @staticmethod
48
+ def _base64encode(data):
49
+ """
50
+ base 64 encode
51
+ :param data: encode data
52
+ :return:
53
+ """
54
+ try:
55
+ _b64encode_result = base64.b64encode(data)
56
+ except Exception as e:
57
+ raise Exception("base64 encode error:{e}")
58
+ return _b64encode_result
59
+
60
+ @staticmethod
61
+ def _base64decode(data):
62
+ """
63
+ base 64 decode
64
+ :param data: decode data
65
+ :return:
66
+ """
67
+ try:
68
+ _b64decode_result = base64.b64decode(data)
69
+ except Exception as e:
70
+ raise Exception("base64 decode error:{e}")
71
+ return _b64decode_result
72
+
73
+
74
+ if __name__ == "__main__":
75
+ test_data = "12345678a"
76
+ key_a = "uusafeuusafeuusafeuusafe"
77
+ key_b = "jiayufeuusafeuusafeuusaf"
78
+ # [12345678a] 3des Result: 2yjtt0Y/c7xEOa9VGetBVA==
79
+ DesObj = TripleDesUtils(mode="CBC", pad_mode="PAD_PKCS5", key=key_a, iv="01234567", trans_base64=True)
80
+ result_a = DesObj.encryption(test_data)
81
+ print("加密结果: %s" % result_a)
82
+ DesObj = TripleDesUtils(mode="CBC", pad_mode="PAD_PKCS5", key=key_b, iv="01234567", trans_base64=True)
83
+ result_b = DesObj.encryption(result_a)
84
+ print("加密结果: %s" % result_b)
85
+
86
+ # result2 = DesObj.decrypt(result)
87
+ # print("解密结果: %s" % result2)
88
+ DesObj = TripleDesUtils(mode="CBC", pad_mode="PAD_PKCS5", key=key_b, iv="01234567", trans_base64=True)
89
+ result_a = DesObj.encryption(test_data)
90
+ print("加密结果: %s" % result_a)
91
+ DesObj = TripleDesUtils(mode="CBC", pad_mode="PAD_PKCS5", key=key_a, iv="01234567", trans_base64=True)
92
+ result_b = DesObj.encryption(result_a)
93
+ print("加密结果: %s" % result_b)
File without changes
toolkits/__init__.py ADDED
@@ -0,0 +1,2 @@
1
+ __author__ = 'li_jia_yue'
2
+ __version__ = "0.2.6"
File without changes
@@ -0,0 +1,26 @@
1
+ # -*- coding: utf-8 -*-
2
+ import importlib
3
+ import sys
4
+ from log4python.Log4python import log
5
+
6
+ importlib.reload(sys)
7
+ logger = log("ListHelper")
8
+
9
+
10
+ class ListHelper:
11
+ def __init__(self):
12
+ pass
13
+
14
+ @staticmethod
15
+ def list_split_by_length(data_list, length_interval):
16
+ list_final = []
17
+ data_len = len(data_list)
18
+ times = data_len / length_interval
19
+ for index in range(0, times+1):
20
+ begin = index * length_interval
21
+ end = begin + length_interval
22
+ if end > data_len:
23
+ end = data_len
24
+ if len(data_list[begin:end]) > 0:
25
+ list_final.append(data_list[begin:end])
26
+ return list_final
File without changes
@@ -0,0 +1,43 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ config_global = {
4
+ "db_list": {
5
+ 'dev_ops': {
6
+ 'desc': "Home 数据库",
7
+ 'db_name': "dev_ops",
8
+ 'user_name': 'root',
9
+ 'password': '4tKTiDYr81YM',
10
+ 'host': "192.168.100.155",
11
+ 'port': 6606
12
+ },
13
+ 'online_alarm': {
14
+ 'desc': "16.15 数据库",
15
+ 'user_name': 'db_admin',
16
+ 'password': '123QWEas!@#',
17
+ 'host': "10.83.16.15",
18
+ 'port': 8012,
19
+ 'db_name': "sec_admin"
20
+ }
21
+ },
22
+ "kafka_list": {
23
+ "home": {
24
+ 'host': "192.168.100.155",
25
+ 'port': 9092
26
+ }
27
+ },
28
+ "redis_list": {
29
+ "home": {
30
+ 'password': 's1hKcWqRj9Se',
31
+ 'host': '192.168.100.155',
32
+ 'port': 9379,
33
+ 'db': 1
34
+ }
35
+ },
36
+ "zookeeper_list": {
37
+ "home": {
38
+ "bin_path": "/usr/local/dev/kafka/kafka_2.12-2.3.0",
39
+ "ip": "192.168.100.155",
40
+ "port": "2184"
41
+ }
42
+ }
43
+ }
File without changes
@@ -0,0 +1,143 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import os
4
+ import sys
5
+ import redis
6
+ from log4python.Log4python import log
7
+ import traceback
8
+ from sqlalchemy import create_engine
9
+ import importlib
10
+
11
+ path_cur = os.path.dirname(os.path.realpath(__file__))
12
+ path_parent = "%s/../" % path_cur
13
+ sys.path.append(path_parent)
14
+
15
+ from .hive_helper import HiveHelper
16
+ from .hive_client import HiveClient
17
+ importlib.reload(sys)
18
+ logger = log("DatabaseClientUtil")
19
+
20
+
21
+ class DatabaseClientUtil:
22
+ def __init__(self, database_config):
23
+ """
24
+ database_config:
25
+ hive_config = {
26
+ "ip": "192.168.100.36",
27
+ "port": "1627",
28
+ "username": "test_01",
29
+ "password": "",
30
+ "database": "test_db"
31
+ }
32
+
33
+ mysql_info_online_alarm = {
34
+ 'user': 'db_user',
35
+ 'pwd': 'password!@#',
36
+ 'host': "192.168.100.36",
37
+ 'port': 3306,
38
+ 'db_name': "db_name"
39
+ }
40
+
41
+ redis_info = {
42
+ 'password': 'xxxpassword',
43
+ 'host': '192.168.100.36',
44
+ 'port': 9027,
45
+ 'db': 1
46
+ }
47
+
48
+ database_config = {
49
+ "hive": hive_config,
50
+ "mysql": mysql_info_online_alarm,
51
+ "redis": redis_info,
52
+ }
53
+ """
54
+ self.base_path = self.__get_script_dir()
55
+ self.sql_file = "%s/config/driver_query.sql" % self.base_path
56
+ self.redis_client = None
57
+ self.conn_mysql = None
58
+ self.hive_client = None
59
+ self.hive_config = None
60
+ self.mysql_config = None
61
+ self.redis_config = None
62
+ self.init_database(database_config)
63
+
64
+ def init_database(self, config):
65
+ if config:
66
+ self.hive_config = config['hive']
67
+ self.mysql_config = config['mysql']
68
+ self.redis_config = config['redis']
69
+
70
+ def get_hive_client(self, database=None, config_user=None, hive_mode="jdbc",
71
+ work_path_base=None, yarn_queue="root.db.default", db_name='db_info'):
72
+ if config_user:
73
+ config_init = config_user
74
+ else:
75
+ config_init = self.hive_config
76
+
77
+ if hive_mode != "jdbc":
78
+ hive_client = HiveHelper(work_path_base=work_path_base, yarn_queue=yarn_queue, db_name=db_name)
79
+ else:
80
+ config_database_name = config_init['database']
81
+ if database:
82
+ config_database_name = database
83
+ hive_client = HiveClient(config_init['ip'], config_init['username'], config_init['password'],
84
+ config_database_name, config_init['port'])
85
+ return hive_client
86
+
87
+ def get_mysql_client(self, config_user=None):
88
+ '''
89
+ mysql_db = 'mysql://root:***@10.89.189.48:8027/log_etl'
90
+ '''
91
+ if config_user:
92
+ config_init = config_user
93
+ else:
94
+ config_init = self.mysql_config
95
+ mysql_db = 'mysql://%s:%s@%s:%s/%s?charset=utf8' % (config_init['user'],
96
+ config_init['pwd'],
97
+ config_init['host'],
98
+ config_init['port'],
99
+ config_init['db_name']
100
+ )
101
+ engine = create_engine(mysql_db, echo=False, pool_recycle=3600, pool_pre_ping=True)
102
+ return engine
103
+
104
+ def get_redis_client(self, db_num=8, redis_config=None):
105
+ if redis_config:
106
+ config_init = redis_config
107
+ else:
108
+ config_init = self.redis_config
109
+ redis_client = redis.StrictRedis(host=config_init['host'], port=config_init['port'],
110
+ password=config_init['password'], db=db_num)
111
+ return redis_client
112
+
113
+ @staticmethod
114
+ def __get_run_dir():
115
+ script_path = sys.argv[0]
116
+ if script_path[0] != "/":
117
+ full_path = "%s/%s" % (os.getcwd(), script_path)
118
+ else:
119
+ full_path = script_path
120
+ return os.path.dirname(full_path)
121
+
122
+ @staticmethod
123
+ def __get_script_dir():
124
+ return os.path.dirname(os.path.realpath(__file__))
125
+
126
+ def worker(self):
127
+ pass
128
+
129
+
130
+ if __name__ == '__main__':
131
+ try:
132
+ parser = argparse.ArgumentParser()
133
+ parser.add_argument("action", type=str, help="specify the action [start]")
134
+ args = parser.parse_args()
135
+
136
+ if args.action == "start":
137
+ app = DatabaseClientUtil()
138
+ app.worker()
139
+ else:
140
+ print("Please Check the parameter!!")
141
+ except Exception as ex:
142
+ logger.error("Error: %s" % ex)
143
+ logger.error(traceback.format_exc())
@@ -0,0 +1,88 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import json
4
+ import sys
5
+ from log4python.Log4python import log
6
+ from elasticsearch import Elasticsearch
7
+ import traceback
8
+ import importlib
9
+ importlib.reload(sys)
10
+ logger = log("AgssQuery")
11
+
12
+
13
+ class EsClient:
14
+ def __init__(self, es_url):
15
+ self.es_url = es_url #
16
+ self.scroll = "1m"
17
+ self.es = self.es_connect()
18
+
19
+ def es_connect(self):
20
+ es = Elasticsearch(
21
+ [
22
+ self.es_url
23
+ ],
24
+ verify_certs=False,
25
+ timeout=60*60
26
+ )
27
+ return es
28
+
29
+ def es_search(self, index_name, body_query):
30
+ res = self.es.search(index=index_name, body=body_query)
31
+ return res
32
+
33
+ def es_search_scroll(self, index_name, body_query):
34
+ result = []
35
+ page = self.es.search(index=index_name, body=body_query, scroll=self.scroll)
36
+ sid = page['_scroll_id']
37
+ scroll_size = page['hits']['total']
38
+ result.append(page)
39
+
40
+ # Start scrolling
41
+ while scroll_size > 0:
42
+ logger.debug("Scrolling...")
43
+ page = self.es.scroll(scroll_id=sid, scroll=self.scroll)
44
+ # Update the scroll ID
45
+ sid = page['_scroll_id']
46
+ # Get the number of results that we returned in the last scroll
47
+ scroll_size = len(page['hits']['hits'])
48
+ logger.debug("scroll size: " + str(scroll_size))
49
+ result.append(page)
50
+ return result
51
+
52
+
53
+ class AgssQuery:
54
+ def __init__(self, config, es_url):
55
+ self.es_client = EsClient(es_url)
56
+ self.config_audit = config
57
+
58
+ @staticmethod
59
+ def construct_query(config_sys):
60
+ sys_name = config_sys['desc']
61
+ rule = config_sys['rule']
62
+
63
+ index = rule['es_index']
64
+ json_search = rule['search']['json']
65
+ action = rule['action']
66
+
67
+ return sys_name, index, json_search, action
68
+
69
+ def main_query(self):
70
+ (sys_name, es_index_name, json_search, action) = self.construct_query(self.config_audit)
71
+ # query
72
+ logger.debug("SYS:[%s]; QueryJSON:%s" % (sys_name, json.dumps(json_search, ensure_ascii=False, default=lambda x: x.__dict__)))
73
+ response = self.es_client.es_search(es_index_name, json_search)
74
+ logger.debug("SYS:[%s]; Result:%s" % (sys_name, json.dumps(response, ensure_ascii=False, default=lambda x: x.__dict__)))
75
+ # process
76
+ ret_process = action(self.config_audit, response)
77
+
78
+ return ret_process
79
+
80
+ if __name__ == '__main__':
81
+ try:
82
+ parser = argparse.ArgumentParser()
83
+ parser.add_argument("logFile", type=str, help="specify the log file's path")
84
+ args = parser.parse_args()
85
+ print((args.logFile))
86
+ except Exception as ex:
87
+ logger.debug("Error: %s" % ex)
88
+ logger.debug(traceback.format_exc())
@@ -0,0 +1,72 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # hive util with hive server2
4
+
5
+ import xlwt as xlwt
6
+ from log4python.Log4python import log
7
+ import sys
8
+ import importlib
9
+
10
+ logger = log("HiveClient")
11
+
12
+ default_encoding = 'utf-8'
13
+ if sys.getdefaultencoding() != default_encoding:
14
+ importlib.reload(sys)
15
+ sys.setdefaultencoding(default_encoding)
16
+
17
+
18
+ class HiveClient:
19
+ def __init__(self, db_host, user, password, database, port=10000, authMechanism="PLAIN"):
20
+ """
21
+ create connection to hive server2
22
+ """
23
+ import pyhs2
24
+ import xlwt
25
+ config = {"mapreduce.job.queuename": "xx.queue.default"}
26
+ self.conn = pyhs2.connect(host=db_host,
27
+ port=port,
28
+ authMechanism=authMechanism,
29
+ user=user,
30
+ password=password,
31
+ database=database,
32
+ configuration=config
33
+ )
34
+
35
+ def query(self, sql):
36
+
37
+ """
38
+ query
39
+ """
40
+ with self.conn.cursor() as cursor:
41
+ cursor.execute(sql)
42
+ return cursor.fetch()
43
+
44
+ def execute(self, sql):
45
+ try:
46
+ with self.conn.cursor() as cursor:
47
+ cursor.execute(sql)
48
+ except Exception as ex:
49
+ print(ex)
50
+
51
+ def close(self):
52
+ """
53
+ close connection
54
+ """
55
+ self.conn.close()
56
+
57
+
58
+ def writeXlwt(filename,result):
59
+ book=xlwt.Workbook() #打开一个工作薄
60
+ sheet1=book.add_sheet('sheel1')#添加一个sheet页
61
+ for i in range(len(result)+1):
62
+ if i ==0:
63
+ sheet1.row(i).write(0, '日期')
64
+ sheet1.row(i).write(1, '小时')
65
+ sheet1.row(i).write(2, '楼层')
66
+ sheet1.row(i).write(3, '店铺号')
67
+ sheet1.row(i).write(4, '店铺名称')
68
+ sheet1.row(i).write(5, '人数')
69
+ else:
70
+ for a in range(len(result[i-1])):
71
+ sheet1.row(i).write(a,result[i-1][a])
72
+ book.save(filename)
@@ -0,0 +1,113 @@
1
+ # -*- coding: utf-8 -*-
2
+ import argparse
3
+ import os
4
+ import subprocess
5
+ import sys
6
+ import time
7
+ from os.path import dirname
8
+ from log4python.Log4python import log
9
+ import traceback
10
+ from unipath import Path
11
+
12
+ from toolkits.system.shell_helper import exec_shell_with_pipe, file_is_used
13
+ import importlib
14
+
15
+ path_cur = os.path.dirname(os.path.realpath(__file__))
16
+ path_parent = "%s/../" % path_cur
17
+ sys.path.append(path_parent)
18
+ # from system.shellHelper import exec_shell_with_pipe, file_is_used
19
+
20
+ from threading import Timer
21
+ import _thread
22
+ importlib.reload(sys)
23
+ logger = log("HiveCmd")
24
+
25
+
26
+ class HiveCmd:
27
+ def __init__(self, file_name, sql, db_name, yarn_queue, work_path=None):
28
+ self.file_name = str(file_name).replace(":", "")
29
+ if not work_path:
30
+ self.work_path = dirname(self.file_name)
31
+ else:
32
+ self.work_path = work_path
33
+ self.sql = sql
34
+ self.hive_hql_file = "%s.hql" % self.file_name
35
+ self.hive_err_file = "%s.err" % self.file_name
36
+ self.hive_info = "use %s;\nset mapred.job.queue.name=%s; \n" % (db_name, yarn_queue)
37
+ logger.debug("HiveCmd:filename[%s]; sql[%s]" % (self.file_name, self.sql))
38
+
39
+ path_to_check = dirname(self.file_name)
40
+ if not Path(path_to_check).exists():
41
+ Path(path_to_check).mkdir(True)
42
+
43
+ def query(self):
44
+ hql_content = self.hive_info + "%s"
45
+ hql_cmd = "hive -f %s"
46
+ Path(self.hive_hql_file).write_file(hql_content % self.sql)
47
+ cmd = hql_cmd % self.hive_hql_file
48
+ return exec_shell(cmd, stdout=self.file_name, stderr=self.hive_err_file, work_path=self.work_path)
49
+
50
+ def query_is_finished(self):
51
+ return file_is_used(self.file_name)
52
+
53
+
54
+ def run_with_timeout(timeout, default, f, *args, **kwargs):
55
+ if not timeout:
56
+ return f(*args, **kwargs)
57
+ timeout_timer = Timer(timeout, _thread.interrupt_main)
58
+ try:
59
+ timeout_timer.start()
60
+ result = f(*args, **kwargs)
61
+ return result
62
+ except KeyboardInterrupt:
63
+ return default
64
+ finally:
65
+ timeout_timer.cancel()
66
+
67
+
68
+ def exec_shell(cmd, timeout=0, work_path="", stdout=None, stderr=None):
69
+ """exeShellWithPipe("grep 'processor' /proc/cpuinfo | sort -u | wc -l")
70
+ :param stderr:
71
+ :param stdout:
72
+ :param work_path:
73
+ :param timeout:
74
+ :param cmd: exec command
75
+ """
76
+ if cmd == "" or cmd is None:
77
+ return "No Cmd Input"
78
+
79
+ fp_out = subprocess.PIPE
80
+ fp_err = subprocess.PIPE
81
+ if stdout:
82
+ fp_out = open(stdout, "w+")
83
+ if stderr:
84
+ fp_err = open(stderr, "w+")
85
+
86
+ if work_path == "":
87
+ scan_process = subprocess.Popen(cmd, shell=True, stdout=fp_out, stderr=fp_err)
88
+ else:
89
+ scan_process = subprocess.Popen(cmd, shell=True, stdout=fp_out, stderr=fp_err, cwd=work_path)
90
+
91
+ return_code = None
92
+ while True:
93
+ return_code = scan_process.poll()
94
+ if return_code is None:
95
+ time.sleep(1)
96
+ else:
97
+ break
98
+ return return_code
99
+
100
+
101
+ if __name__ == '__main__':
102
+ try:
103
+ parser = argparse.ArgumentParser()
104
+ parser.add_argument("fileName", type=str, help="specify the sql output file's path")
105
+ parser.add_argument("sql", type=str, help="specify the sql to query")
106
+ args = parser.parse_args()
107
+
108
+ hive_cmd = HiveCmd(args.fileName, args.sql, "db_name", "root.db.default")
109
+ ret_code = hive_cmd.query()
110
+ print(("RetCode:[%s]" % str(ret_code)))
111
+ except Exception as ex:
112
+ logger.debug("Error: %s" % ex)
113
+ logger.debug(traceback.format_exc())