gomyck-tools 1.3.1__py3-none-any.whl → 1.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. ctools/__init__.py +0 -0
  2. ctools/aes_tools.py +35 -0
  3. ctools/api_result.py +55 -0
  4. ctools/application.py +386 -0
  5. ctools/b64.py +7 -0
  6. ctools/bashPath.py +13 -0
  7. ctools/bottle_web_base.py +169 -0
  8. ctools/bottle_webserver.py +143 -0
  9. ctools/bottle_websocket.py +75 -0
  10. ctools/browser_element_tools.py +314 -0
  11. ctools/call.py +71 -0
  12. ctools/cftp.py +74 -0
  13. ctools/cjson.py +54 -0
  14. ctools/ckafka.py +159 -0
  15. ctools/compile_tools.py +18 -0
  16. ctools/console.py +55 -0
  17. ctools/coord_trans.py +127 -0
  18. ctools/credis.py +111 -0
  19. ctools/cron_lite.py +252 -0
  20. ctools/ctoken.py +34 -0
  21. ctools/cword.py +30 -0
  22. ctools/czip.py +130 -0
  23. ctools/database.py +185 -0
  24. ctools/date_utils.py +43 -0
  25. ctools/dict_wrapper.py +20 -0
  26. ctools/douglas_rarefy.py +136 -0
  27. ctools/download_tools.py +57 -0
  28. ctools/enums.py +4 -0
  29. ctools/ex.py +31 -0
  30. ctools/excelOpt.py +36 -0
  31. ctools/html_soup.py +35 -0
  32. ctools/http_utils.py +24 -0
  33. ctools/images_tools.py +27 -0
  34. ctools/imgDialog.py +44 -0
  35. ctools/metrics.py +131 -0
  36. ctools/mqtt_utils.py +289 -0
  37. ctools/obj.py +20 -0
  38. ctools/pacth.py +74 -0
  39. ctools/plan_area_tools.py +97 -0
  40. ctools/process_pool.py +36 -0
  41. ctools/pty_tools.py +72 -0
  42. ctools/resource_bundle_tools.py +121 -0
  43. ctools/rsa.py +70 -0
  44. ctools/screenshot_tools.py +127 -0
  45. ctools/sign.py +20 -0
  46. ctools/sm_tools.py +49 -0
  47. ctools/snow_id.py +76 -0
  48. ctools/str_diff.py +20 -0
  49. ctools/string_tools.py +85 -0
  50. ctools/sys_info.py +157 -0
  51. ctools/sys_log.py +89 -0
  52. ctools/thread_pool.py +35 -0
  53. ctools/upload_tools.py +40 -0
  54. ctools/win_canvas.py +83 -0
  55. ctools/win_control.py +106 -0
  56. ctools/word_fill.py +562 -0
  57. ctools/word_fill_entity.py +46 -0
  58. ctools/work_path.py +69 -0
  59. {gomyck_tools-1.3.1.dist-info → gomyck_tools-1.3.2.dist-info}/METADATA +1 -1
  60. gomyck_tools-1.3.2.dist-info/RECORD +62 -0
  61. gomyck_tools-1.3.2.dist-info/top_level.txt +1 -0
  62. gomyck_tools-1.3.1.dist-info/RECORD +0 -4
  63. gomyck_tools-1.3.1.dist-info/top_level.txt +0 -1
  64. {gomyck_tools-1.3.1.dist-info → gomyck_tools-1.3.2.dist-info}/WHEEL +0 -0
ctools/cron_lite.py ADDED
@@ -0,0 +1,252 @@
1
+ #!/usr/bin/env python3.6
2
+ # coding: utf-8
3
+ import sched
4
+ import threading
5
+ import time
6
+ import traceback
7
+ from datetime import datetime
8
+ from functools import wraps
9
+ from typing import Optional, Dict
10
+
11
+ import pytz
12
+ from croniter import croniter
13
+
14
+ """
15
+ @cron_lite.cron_task('0/1 * * * * ? *')
16
+ def demo():
17
+ print('hello world')
18
+
19
+ @cron_lite.cron_task('0/1 * * * * ? *')
20
+ def demo1():
21
+ print('hello world111')
22
+
23
+ def demo2(xx, fff):
24
+ print('hello world222', xx, fff)
25
+
26
+ cron_lite.apply_cron_task('0/1 * * * * ? *', demo2, (123123123, 34534534))
27
+ print(123123)
28
+
29
+ cron_lite.start_all()
30
+ """
31
+
32
+ class SchedulerMeta:
33
+ timer_task_name: str = None
34
+ switch: bool = True
35
+ status: bool = False
36
+ event: sched.Event = None
37
+ scheduler: sched.scheduler = None
38
+
39
+
40
+ scheduler_map: Dict[str, SchedulerMeta] = {} # {timer_task_name: SchedulerMeta}
41
+ _switch = False
42
+ _info_handler = print
43
+ _error_handler = print
44
+ _time_zone: Optional[pytz.BaseTzInfo] = None
45
+
46
+
47
+ def set_time_zone(time_zone_name: str):
48
+ global _time_zone
49
+ _time_zone = pytz.timezone(time_zone_name)
50
+
51
+ # @annotation
52
+ def cron_task(cron_expr: str, task_name: str = None, till_time_stamp: int = None):
53
+ """
54
+ cron_task decorator to register a function as crontab task
55
+ :param cron_expr: the croniter accepted cron_expression. NOTICE: the default timezone is UTC and can be changed by
56
+ `set_time_zone`. The format is `min hour day month weekday [sec]`
57
+ :param till_time_stamp: run this jog till when. None means forever
58
+ :return: the real decorator
59
+ """
60
+ cron_expr = _convert_cron(cron_expr)
61
+ assert len(cron_expr.split(" ")) in (5, 6), \
62
+ "only supported <min hour day month weekday> and <min hour day month weekday sec>"
63
+
64
+ def deco(func):
65
+ @wraps(func)
66
+ def inner():
67
+ try:
68
+ func()
69
+ except Exception:
70
+ try:
71
+ _error_handler(f"run {func.__name__} failed\n" + traceback.format_exc())
72
+ except Exception:
73
+ _error_handler(f"run {func.__name__} failed\n")
74
+ _register_next(inner.__name__ if task_name is None else task_name, inner, cron_expr, till_time_stamp)
75
+
76
+ _register_next(inner.__name__ if task_name is None else task_name, inner, cron_expr, till_time_stamp, init=True)
77
+ return inner
78
+
79
+ return deco
80
+
81
+
82
+ def apply_cron_task(cron_expr, func, params, timer_task_name=None, till_time_stamp=None):
83
+ """
84
+ cron_task decorator to register a function as crontab task
85
+ :param func: task callback function
86
+ :param params: transparent parameters
87
+ :param cron_expr: the croniter accepted cron_expression. NOTICE: the default timezone is UTC and can be changed by
88
+ `set_time_zone`. The format is `min hour day month weekday [sec]`
89
+ :param timer_task_name: task name
90
+ :param till_time_stamp: run this jog till when. None means forever
91
+ :return: the real decorator
92
+ """
93
+ cron_expr = _convert_cron(cron_expr)
94
+ assert len(cron_expr.split(" ")) in (5, 6), "Only supported <minute hour day month weekday> and <minute hour day month weekday second>"
95
+ task_name = func.__name__ if timer_task_name is None else timer_task_name
96
+ @wraps(func)
97
+ def wrapper(*args, **kwargs):
98
+ try:
99
+ nonlocal params
100
+ func.__taskName__ = task_name
101
+ func(*params, *args, **kwargs)
102
+ except Exception as exc:
103
+ _error_handler(f"Run {func.__name__} failed with error: {str(exc)}")
104
+ finally:
105
+ _register_next(task_name, wrapper, cron_expr, till_time_stamp)
106
+
107
+ _register_next(task_name, wrapper, cron_expr, till_time_stamp, init=True)
108
+ # 不使用 submit, 因为提交的任务, 不是 daemon 线程
109
+ t = threading.Thread(target=_start, args=(timer_task_name, ))
110
+ t.setDaemon(True)
111
+ t.start()
112
+ return t
113
+
114
+ def start_all(spawn: bool = True, daemon: bool = True, info_handler=None, error_handler=None) -> Optional[threading.Thread]:
115
+ """
116
+ start_all starts all cron tasks registered before.
117
+ :param spawn: whether to start a new thread for scheduler. If not, the action will block the current thread
118
+ :param daemon: the new thread is daemon if True
119
+ :param info_handler: handle info output (scheduler start / stop), default = print, can use logging.info
120
+ :param error_handler: handle error output (task execute exception), default = print, can use logging.error
121
+ :raise RuntimeError: if the tasks are already started and still running we cannot start again. The feature is not
122
+ concurrent-safe
123
+ :return: the new thread if spawn = True
124
+ """
125
+ global _switch, _info_handler, _error_handler
126
+ if _switch:
127
+ raise RuntimeError("the crontab was already started...")
128
+ if info_handler:
129
+ _info_handler = info_handler
130
+ if error_handler:
131
+ _error_handler = error_handler
132
+ if spawn:
133
+ t = threading.Thread(target=_start)
134
+ t.setDaemon(daemon)
135
+ t.start()
136
+ return t
137
+ else:
138
+ _start()
139
+
140
+
141
+ def is_active(timer_task_name):
142
+ res = False
143
+ if timer_task_name in scheduler_map:
144
+ res = scheduler_map.get(timer_task_name).switch or scheduler_map.get(timer_task_name).status
145
+ return res
146
+
147
+
148
+ def active(timer_task_name):
149
+ if timer_task_name in scheduler_map:
150
+ scheduler_map.get(timer_task_name).status = True
151
+
152
+ def get_switch(timer_task_name):
153
+ switch = True
154
+ if timer_task_name in scheduler_map:
155
+ switch = scheduler_map.get(timer_task_name).switch
156
+ return switch
157
+
158
+
159
+ def inactive(timer_task_name):
160
+ if timer_task_name in scheduler_map:
161
+ scheduler_map.get(timer_task_name).status = False
162
+ if not scheduler_map.get(timer_task_name).switch:
163
+ scheduler_map.get(timer_task_name).scheduler.cancel(scheduler_map[timer_task_name].event)
164
+
165
+
166
+ def stop(timer_task_name):
167
+ if timer_task_name in scheduler_map:
168
+ scheduler_map.get(timer_task_name).switch = False
169
+ time.sleep(1)
170
+
171
+
172
+ def stop_all(wait_thread: Optional[threading.Thread] = None):
173
+ """
174
+ stop_all turns off the switch to stop the scheduler. Running jobs will be wait till finished.
175
+ :param wait_thread: join() the spawned scheduler thread (if you started it as spawn and you want) to ensure all jobs
176
+ to finish
177
+ :return:
178
+ """
179
+ for timer_task_name in scheduler_map:
180
+ scheduler_map.get(timer_task_name).switch = False
181
+ if wait_thread:
182
+ wait_thread.join()
183
+
184
+
185
+ def _register_next(timer_task_name, base_func, cron_expr, till_time_stamp, init: bool = False):
186
+ cron_obj = croniter(cron_expr)
187
+ if _time_zone:
188
+ cron_obj.set_current(datetime.now(tz=_time_zone))
189
+ next_time = int(cron_obj.get_next())
190
+ if scheduler_map.get(timer_task_name) is None:
191
+ scheduler_meta = SchedulerMeta()
192
+ scheduler_meta.timer_task_name = timer_task_name
193
+ scheduler_meta.switch = True
194
+ scheduler_meta.scheduler = sched.scheduler(time.time, time.sleep)
195
+ scheduler_map[timer_task_name] = scheduler_meta
196
+ elif init:
197
+ raise ValueError(f"task name: {timer_task_name} already exists!!!!!")
198
+ if till_time_stamp is None or next_time <= till_time_stamp:
199
+ scheduler_map[timer_task_name].event = scheduler_map[timer_task_name].scheduler.enterabs(next_time, 0, base_func)
200
+
201
+
202
+ def _run_sched(scheduler_meta: SchedulerMeta):
203
+ active(scheduler_meta.timer_task_name)
204
+ while True:
205
+ scheduler = scheduler_meta.scheduler
206
+ if not _switch or not scheduler_meta.switch:
207
+ scheduler.empty()
208
+ inactive(scheduler_meta.timer_task_name)
209
+ return
210
+ t = scheduler.run(False)
211
+ if t is None:
212
+ inactive(scheduler_meta.timer_task_name)
213
+ return
214
+ st = time.time()
215
+ while time.time() - st < t:
216
+ if not _switch or not scheduler_meta.switch:
217
+ scheduler.empty()
218
+ inactive(scheduler_meta.timer_task_name)
219
+ return
220
+ time.sleep(0.5)
221
+
222
+
223
+ def _start(taskName: str = None):
224
+ global _switch
225
+ _switch = True
226
+ _info_handler("cron job begin start...")
227
+ taskList = []
228
+ for timer_task_name, scheduler_meta in scheduler_map.items():
229
+ if taskName is not None and timer_task_name != taskName: continue
230
+ print("register job: ", timer_task_name)
231
+ thread = threading.Thread(target=_run_sched, args=(scheduler_meta, ))
232
+ thread.setDaemon(True)
233
+ thread.start()
234
+ taskList.append(thread)
235
+ for task in taskList: task.join()
236
+ _info_handler("cron job execute finished...")
237
+ _switch = False
238
+ scheduler_map.clear()
239
+
240
+
241
+ def _convert_cron(cron_expr):
242
+ res_cron = ""
243
+ cron_list = cron_expr.split(" ")
244
+ if len(cron_list) > 6:
245
+ for cron in cron_list[1:]:
246
+ if cron != "?":
247
+ res_cron += "%s " % cron
248
+ res_cron += "%s" % cron_list[0]
249
+ else:
250
+ res_cron = cron_expr
251
+ return res_cron
252
+
ctools/ctoken.py ADDED
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ __author__ = 'haoyang'
4
+ __date__ = '2025/1/21 16:01'
5
+
6
+ import time
7
+ import jwt
8
+ from bottle import request
9
+
10
+ from ctools.dict_wrapper import DictWrapper
11
+
12
+ token_header = 'Authorization'
13
+
14
+ def gen_token(payload: {}, secret_key, expired: int=3600) -> str:
15
+ payload.update({'exp': time.time() + expired})
16
+ return jwt.encode(payload, secret_key, algorithm='HS256')
17
+
18
+ def get_payload(token, secret_key):
19
+ try:
20
+ payload = jwt.decode(token, secret_key, algorithms=['HS256'])
21
+ return DictWrapper(payload)
22
+ except Exception as e:
23
+ return None
24
+
25
+ def get_token(key):
26
+ return get_payload(request.get_header(token_header), key)
27
+
28
+ def is_valid(key):
29
+ return get_payload(request.get_header(token_header), key) is not None
30
+
31
+ # if __name__ == '__main__':
32
+ # token = gen_token({"xx": 123}, '123')
33
+ # xx = get_payload(token, '123')
34
+ # print(xx.xx)
ctools/cword.py ADDED
@@ -0,0 +1,30 @@
1
+ # from docxtpl import DocxTemplate
2
+ #
3
+ # # tpl = DocxTemplate('/Users/haoyang/Desktop/xxx.docx')
4
+ # tpl = DocxTemplate('/Users/haoyang/Desktop/123.doc')
5
+ #
6
+ # # 设置好各标签需要填写的内容
7
+ # context = {'xxxx': '计算机科学与技术', 'cccc': '2022050513'}
8
+ # # 将标签内容填入模板中
9
+ # tpl.render(context)
10
+ # # 保存
11
+ # tpl.save('/Users/haoyang/Desktop/new_test2.docx')
12
+
13
+ from docx import Document
14
+
15
+ def merge_word_files(input_files: [], output_file: str):
16
+ merged_doc = Document()
17
+ for file in input_files:
18
+ doc = Document(file)
19
+ for element in doc.element.body:
20
+ merged_doc.element.body.append(element)
21
+ merged_doc.save(output_file)
22
+
23
+
24
+ def read_word_file(input_file: str):
25
+ doc = Document(input_file)
26
+ text = []
27
+ for paragraph in doc.paragraphs:
28
+ text.append(paragraph.text)
29
+ return "\n".join(text)
30
+
ctools/czip.py ADDED
@@ -0,0 +1,130 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ __author__ = 'haoyang'
4
+ __date__ = '2025/1/24 08:48'
5
+
6
+ import io
7
+ import os
8
+ import time
9
+ import pyzipper
10
+
11
+ """
12
+ target_directory = '/Users/haoyang/Desktop/知识库文件'
13
+ zip_password = None
14
+ process_directory_to_single_zip(target_directory, zip_password, "knowledge_base")
15
+
16
+ files_to_compress = [
17
+ '/path/to/file1.txt',
18
+ '/path/to/file2.pdf',
19
+ '/path/to/file3.jpg'
20
+ ]
21
+ output_directory = '/Users/haoyang/Desktop'
22
+ compress_specific_files(files_to_compress, output_directory, zip_password, "my_files")
23
+ """
24
+ def create_zip_with_files(file_dict, password=None) -> io.BytesIO:
25
+ """Compress multiple files into a single password-protected ZIP archive in memory.
26
+ Args:
27
+ file_dict: Dictionary of {filename: file_content} pairs
28
+ filename = os.path.relpath(file_path, start=root_dir) # 相对路径获取, 用于在 zip 内的路径定位
29
+ password: Optional password for the ZIP file
30
+ Returns:
31
+ BytesIO object containing the ZIP file
32
+ """
33
+ zip_buffer = io.BytesIO()
34
+ try:
35
+ if password:
36
+ with pyzipper.AESZipFile(zip_buffer, 'w', compression=pyzipper.ZIP_DEFLATED, encryption=pyzipper.WZ_AES) as zipf:
37
+ zipf.setpassword(password.encode('utf-8'))
38
+ for filename, content in file_dict.items():
39
+ zipf.writestr(filename, content)
40
+ else:
41
+ with pyzipper.ZipFile(zip_buffer, 'w', compression=pyzipper.ZIP_DEFLATED) as zipf:
42
+ for filename, content in file_dict.items():
43
+ zipf.writestr(filename, content)
44
+ zip_buffer.seek(0)
45
+ return zip_buffer
46
+ except Exception as e:
47
+ zip_buffer.close()
48
+ raise e
49
+
50
+
51
+ def process_directory_to_single_zip(root_dir, password=None, zip_name=None):
52
+ """Walk through directory and compress all files into a single ZIP.
53
+ Args:
54
+ root_dir: Root directory to scan for files
55
+ password: Optional password for the ZIP file
56
+ zip_name: Base name for the ZIP file (without extension)
57
+ """
58
+ file_dict = {}
59
+ for dirpath, _, filenames in os.walk(root_dir):
60
+ for filename in filenames:
61
+ file_path = os.path.join(dirpath, filename)
62
+ try:
63
+ with open(file_path, 'rb') as f:
64
+ rel_path = os.path.relpath(file_path, start=root_dir)
65
+ file_dict[rel_path] = f.read()
66
+ except Exception as e:
67
+ print(f"Error reading {file_path}: {str(e)}")
68
+ if not file_dict:
69
+ print("No files found to compress.")
70
+ return
71
+
72
+ try:
73
+ zip_buffer = create_zip_with_files(file_dict, password)
74
+ timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime())
75
+ if zip_name:
76
+ base_name = f"{zip_name}_{timestamp}.zip"
77
+ else:
78
+ base_name = f"archive_{timestamp}.zip"
79
+ output_path = os.path.join(root_dir, base_name)
80
+ with open(output_path, 'wb') as out_file:
81
+ out_file.write(zip_buffer.read())
82
+ print(f"Created single archive: {output_path}")
83
+ except Exception as e:
84
+ print(f"Error creating ZIP archive: {str(e)}")
85
+ finally:
86
+ if 'zip_buffer' in locals(): zip_buffer.close()
87
+
88
+
89
+ def compress_specific_files(file_paths:[], output_dir:str, password=None, zip_name=None):
90
+ """Compress multiple specified files into a single ZIP archive.
91
+ Args:
92
+ file_paths: List of absolute file paths to compress
93
+ output_dir: Directory where the ZIP file will be saved
94
+ password: Optional password for the ZIP file
95
+ zip_name: Base name for the ZIP file (without extension)
96
+ """
97
+ if not file_paths:
98
+ print("No files specified to compress.")
99
+ return
100
+ file_dict = {}
101
+ for file_path in file_paths:
102
+ if not os.path.isfile(file_path):
103
+ print(f"Warning: {file_path} is not a file or doesn't exist. Skipping.")
104
+ continue
105
+ try:
106
+ with open(file_path, 'rb') as f:
107
+ filename_in_zip = os.path.basename(file_path)
108
+ file_dict[filename_in_zip] = f.read()
109
+ except Exception as e:
110
+ print(f"Error reading {file_path}: {str(e)}")
111
+ if not file_dict:
112
+ print("No valid files found to compress.")
113
+ return
114
+ try:
115
+ zip_buffer = create_zip_with_files(file_dict, password)
116
+ timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime())
117
+ if zip_name:
118
+ base_name = f"{zip_name}_{timestamp}.zip"
119
+ else:
120
+ first_file = os.path.basename(file_paths[0])
121
+ base_name = f"{os.path.splitext(first_file)[0]}_{timestamp}.zip"
122
+ output_path = os.path.join(output_dir, base_name)
123
+ os.makedirs(output_dir, exist_ok=True)
124
+ with open(output_path, 'wb') as out_file:
125
+ out_file.write(zip_buffer.read())
126
+ print(f"Created archive: {output_path}")
127
+ except Exception as e:
128
+ print(f"Error creating ZIP archive: {str(e)}")
129
+ finally:
130
+ if 'zip_buffer' in locals(): zip_buffer.close()
ctools/database.py ADDED
@@ -0,0 +1,185 @@
1
+ import contextlib
2
+ import datetime
3
+ import math
4
+
5
+ from sqlalchemy import create_engine, Integer, Column, event
6
+ from sqlalchemy.ext.declarative import declarative_base
7
+ from sqlalchemy.orm import sessionmaker, Session
8
+ from sqlalchemy.sql import text
9
+
10
+ from ctools import call, string_tools
11
+ from ctools.thread_pool import thread_local
12
+
13
+ """
14
+ class XXXX(BaseMixin):
15
+ __tablename__ = 't_xxx_info'
16
+ __table_args__ = {'comment': 'xxx信息表'}
17
+ server_content: Column = Column(String(50), nullable=True, default='', comment='123123')
18
+ server_ip: Column = Column(String(30), index=True)
19
+ user_id: Column = Column(BigInteger)
20
+
21
+ database.init_db('postgresql://postgres:123456@192.168.3.107:32566/abc', default_schema='public', db_key='source', pool_size=100)
22
+ with database.get_session('source') as s:
23
+ s.execute(text('insert into xxx (name) values (:name)'), {'name': string_tools.get_random_str(5)})
24
+ s.commit()
25
+ """
26
+
27
+ Base = None
28
+ inited_db = {}
29
+ engines = {}
30
+ sessionMakers = {}
31
+
32
+ def getEngine(db_key: str='default'):
33
+ return engines[db_key]
34
+
35
+ @call.init
36
+ def _init():
37
+ global Base
38
+ Base = declarative_base()
39
+
40
+ """
41
+ The string form of the URL is
42
+ dialect[+driver]://user:password@host/dbname[?key=value..]
43
+ where ``dialect`` is a database name such as ``mysql``, ``oracle``, ``postgresql``, etc.
44
+ and ``driver`` the name of a DBAPI such as ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively
45
+ """
46
+
47
+ # 密码里的@ 要替换成 %40
48
+
49
+ # sqlite connect_args={"check_same_thread": False} db_url=sqlite:///{}.format(db_url)
50
+ # sqlite 数据库, 初始化之后, 优化一下配置
51
+ # $ sqlite3 app.db
52
+ # > PRAGMA journal_mode=WAL; 设置事务的模式, wal 允许读写并发, 但是会额外创建俩文件
53
+ # > PRAGMA synchronous=NORMAL; 设置写盘策略, 默认是 FULL, 日志,数据都落, 设置成 NORMAL, 日志写完就算事务完成
54
+
55
+ def init_db(db_url: str, db_key: str='default', connect_args: dict={}, default_schema: str=None, pool_size: int=5, max_overflow: int=25, echo: bool=False):
56
+ if db_url.startswith('mysql'):
57
+ import pymysql
58
+ pymysql.install_as_MySQLdb()
59
+ if inited_db.get(db_key): raise Exception('db {} already init!!!'.format(db_key))
60
+ global engines, sessionMakers
61
+ engine, sessionMaker = _create_connection(db_url=db_url, connect_args=connect_args, pool_size=pool_size, max_overflow=max_overflow, echo=echo)
62
+ engines[db_key] = engine
63
+ sessionMakers[db_key] = sessionMaker
64
+ inited_db[db_key] = True
65
+ if default_schema: event.listen(engine, 'connect', lambda dbapi_connection, connection_record: _set_search_path(dbapi_connection, default_schema))
66
+ Base.metadata.create_all(engine)
67
+
68
+ def _set_search_path(dbapi_connection, default_schema):
69
+ with dbapi_connection.cursor() as cursor:
70
+ cursor.execute(f'SET search_path TO {default_schema}')
71
+
72
+ def _create_connection(db_url: str, pool_size: int=5, max_overflow: int=25, connect_args={}, echo: bool=False):
73
+ engine = create_engine('{}'.format(db_url),
74
+ echo=echo,
75
+ future=True,
76
+ pool_size=pool_size,
77
+ max_overflow=max_overflow,
78
+ pool_pre_ping=True,
79
+ pool_recycle=3600,
80
+ connect_args=connect_args)
81
+ sm = sessionmaker(bind=engine)
82
+ return engine, sm
83
+
84
+ def generate_custom_id():
85
+ return str(string_tools.get_snowflake_id())
86
+
87
+ class BaseMixin(Base):
88
+ __abstract__ = True
89
+ obj_id = Column(Integer, primary_key=True, default=generate_custom_id)
90
+
91
+ # ext1 = Column(String)
92
+ # ext2 = Column(String)
93
+ # ext3 = Column(String)
94
+ # create_time = Column(DateTime, nullable=False, default=datetime.datetime.now)
95
+ # update_time = Column(DateTime, nullable=False, default=datetime.datetime.now, onupdate=datetime.datetime.now, index=True)
96
+
97
+ def to_dict(self):
98
+ return self.__getstate__()
99
+
100
+ def from_dict(self, v):
101
+ self.__dict__.update(v)
102
+
103
+ def __getstate__(self):
104
+ ret_state = {}
105
+ state = self.__dict__.copy()
106
+ for key in state.keys():
107
+ if not key.startswith("_"):
108
+ if type(state[key]) == datetime.datetime:
109
+ ret_state[key] = state[key].strftime("%Y-%m-%d %H:%M:%S")
110
+ else:
111
+ ret_state[key] = state[key]
112
+ return ret_state
113
+
114
+ @contextlib.contextmanager
115
+ def get_session(db_key: str='default') -> Session:
116
+ thread_local.db_key = db_key
117
+ if sm:=sessionMakers.get(db_key):
118
+ s = sm()
119
+ else:
120
+ raise ValueError("Invalid db_key: {}".format(db_key))
121
+ try:
122
+ yield s
123
+ except Exception as e:
124
+ s.rollback()
125
+ raise e
126
+ finally:
127
+ s.close()
128
+
129
+ class PageInfoBuilder:
130
+
131
+ def __init__(self, pageInfo, total_count, records):
132
+ self.page_size = pageInfo.page_size
133
+ self.page_index = pageInfo.page_index
134
+ self.total_count = total_count
135
+ self.total_page = math.ceil(total_count / int(pageInfo.page_size))
136
+ self.records = records
137
+
138
+ def query_by_page(query, pageInfo):
139
+ records = query.offset((pageInfo.page_index - 1) * pageInfo.page_size).limit(pageInfo.page_size).all()
140
+ rs = []
141
+ for r in records:
142
+ rs.append(r)
143
+ return PageInfoBuilder(pageInfo, query.count(), rs)
144
+
145
+ def query4_crd_sql(session, sql: str, params: dict) -> []:
146
+ records = session.execute(text(sql), params).fetchall()
147
+ rs = []
148
+ for record in records:
149
+ data = {}
150
+ for index, key in enumerate(record._mapping):
151
+ data[key] = record[index]
152
+ rs.append(data)
153
+ return rs
154
+
155
+ sqlite_and_pg_page_sql = """
156
+ limit :limit offset :offset
157
+ """
158
+ mysql_page_sql = """
159
+ limit :offset, :limit
160
+ """
161
+
162
+ def query_by_page4_crd_sql(session, sql: str, params: dict, pageInfo) -> []:
163
+ db_name = engines[thread_local.db_key].name
164
+ if db_name == 'postgresql' or db_name == 'sqlite':
165
+ page_sql = sqlite_and_pg_page_sql
166
+ elif db_name == 'mysql':
167
+ page_sql = mysql_page_sql
168
+ else:
169
+ raise Exception('not support db: {}'.format(db_name))
170
+ wrapper_sql = """
171
+ select * from ({}) as t {}
172
+ """.format(sql, page_sql)
173
+ count_sql = """
174
+ select count(1) from ({}) as t
175
+ """.format(sql)
176
+ params["limit"] = pageInfo.page_size
177
+ params["offset"] = (pageInfo.page_index - 1) * pageInfo.page_size
178
+ records = session.execute(text(wrapper_sql), params).fetchall()
179
+ rs = []
180
+ for record in records:
181
+ data = {}
182
+ for index, key in enumerate(record._mapping):
183
+ data[key] = record[index]
184
+ rs.append(data)
185
+ return PageInfoBuilder(pageInfo, session.execute(text(count_sql), params).first()[0], rs)
ctools/date_utils.py ADDED
@@ -0,0 +1,43 @@
1
+ import time
2
+ from datetime import datetime, timedelta
3
+
4
+ def get_date():
5
+ return time.strftime('%Y-%m-%d', time.localtime(time.time()))
6
+
7
+ def get_time():
8
+ return time.strftime('%H-%M-%S', time.localtime(time.time()))
9
+
10
+ def get_date_time(fmt="%Y-%m-%d %H:%M:%S"):
11
+ return time.strftime(fmt, time.localtime(time.time()))
12
+
13
+ def str_to_datetime(val: str, fmt="%Y-%m-%d %H:%M:%S"):
14
+ return time.strptime(val, fmt)
15
+
16
+ def str_to_timestamp(val: str, fmt="%Y-%m-%d %H:%M:%S"):
17
+ return time.mktime(time.strptime(val, fmt))
18
+
19
+ def timestamp_to_str(timestamp: int=time.time(), fmt="%Y-%m-%d %H:%M:%S"):
20
+ return time.strftime(fmt, time.localtime(timestamp))
21
+
22
+ def get_today_start_end(now: datetime.now()):
23
+ start = datetime(now.year, now.month, now.day, 0, 0, 0)
24
+ end = datetime(now.year, now.month, now.day, 23, 59, 59)
25
+ return start.strftime("%Y-%m-%d %H:%M:%S"), end.strftime("%Y-%m-%d %H:%M:%S")
26
+
27
+ def get_week_start_end(now: datetime.now()):
28
+ start = now - timedelta(days=now.weekday()) # 本周一
29
+ end = start + timedelta(days=6) # 本周日
30
+ return start.strftime("%Y-%m-%d 00:00:00"), end.strftime("%Y-%m-%d 23:59:59")
31
+
32
+ def time_diff_in_seconds(sub_head: str=get_date_time(), sub_end: str=get_date_time()):
33
+ start_ts = str_to_timestamp(sub_head)
34
+ end_ts = str_to_timestamp(sub_end)
35
+ return int(start_ts - end_ts)
36
+
37
+ def opt_time(base_time=None, days=0, hours=0, minutes=0, seconds=0, weeks=0, fmt="%Y-%m-%d %H:%M:%S"):
38
+ if base_time is None:
39
+ base_time = datetime.now()
40
+ elif isinstance(base_time, str):
41
+ base_time = datetime.strptime(base_time, fmt)
42
+ new_time = base_time + timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds, weeks=weeks)
43
+ return new_time.strftime(fmt)
ctools/dict_wrapper.py ADDED
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env python
2
+ # -*- coding: UTF-8 -*-
3
+ __author__ = 'haoyang'
4
+ __date__ = '2024/10/25 09:42'
5
+
6
+ class DictWrapper(dict):
7
+
8
+ def __getattr__(self, key):
9
+ res = self.get(key)
10
+ if res is None:
11
+ raise AttributeError(f" ==>> {key} <<== Not Found In This Entity!!!")
12
+ if isinstance(res, dict):
13
+ return DictWrapper(res)
14
+ return res
15
+
16
+ def __setattr__(self, key, value):
17
+ self[key] = value
18
+
19
+ def __delattr__(self, key):
20
+ del self[key]