icemammoth-common 0.1.0.dev0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. icemammoth_common-0.1.0.dev0/PKG-INFO +18 -0
  2. icemammoth_common-0.1.0.dev0/README.md +0 -0
  3. icemammoth_common-0.1.0.dev0/icemammoth_common/__init__.py +0 -0
  4. icemammoth_common-0.1.0.dev0/icemammoth_common/base/__init__.py +1 -0
  5. icemammoth_common-0.1.0.dev0/icemammoth_common/base/base_object.py +17 -0
  6. icemammoth_common-0.1.0.dev0/icemammoth_common/base/singleton.py +20 -0
  7. icemammoth_common-0.1.0.dev0/icemammoth_common/database/__init__.py +0 -0
  8. icemammoth_common-0.1.0.dev0/icemammoth_common/database/base_dao.py +320 -0
  9. icemammoth_common-0.1.0.dev0/icemammoth_common/database/connect_pool.py +53 -0
  10. icemammoth_common-0.1.0.dev0/icemammoth_common/database/transaction.py +80 -0
  11. icemammoth_common-0.1.0.dev0/icemammoth_common/decorator/__init__.py +0 -0
  12. icemammoth_common-0.1.0.dev0/icemammoth_common/decorator/retry.py +170 -0
  13. icemammoth_common-0.1.0.dev0/icemammoth_common/error/__init__.py +1 -0
  14. icemammoth_common-0.1.0.dev0/icemammoth_common/error/error.py +38 -0
  15. icemammoth_common-0.1.0.dev0/icemammoth_common/factory/__init__.py +0 -0
  16. icemammoth_common-0.1.0.dev0/icemammoth_common/factory/factory.py +44 -0
  17. icemammoth_common-0.1.0.dev0/icemammoth_common/factory/register.py +63 -0
  18. icemammoth_common-0.1.0.dev0/icemammoth_common/util/__init__.py +0 -0
  19. icemammoth_common-0.1.0.dev0/icemammoth_common/util/collection_util.py +12 -0
  20. icemammoth_common-0.1.0.dev0/icemammoth_common/util/digest_util.py +6 -0
  21. icemammoth_common-0.1.0.dev0/icemammoth_common/util/file_util.py +204 -0
  22. icemammoth_common-0.1.0.dev0/icemammoth_common/util/func_util.py +5 -0
  23. icemammoth_common-0.1.0.dev0/icemammoth_common/util/html_util.py +31 -0
  24. icemammoth_common-0.1.0.dev0/icemammoth_common/util/http_util.py +88 -0
  25. icemammoth_common-0.1.0.dev0/icemammoth_common/util/image_util.py +59 -0
  26. icemammoth_common-0.1.0.dev0/icemammoth_common/util/log_util.py +92 -0
  27. icemammoth_common-0.1.0.dev0/icemammoth_common/util/module_util.py +28 -0
  28. icemammoth_common-0.1.0.dev0/icemammoth_common/util/number_util.py +56 -0
  29. icemammoth_common-0.1.0.dev0/icemammoth_common/util/object_util.py +44 -0
  30. icemammoth_common-0.1.0.dev0/icemammoth_common/util/path_util.py +31 -0
  31. icemammoth_common-0.1.0.dev0/icemammoth_common/util/prometheus_util.py +29 -0
  32. icemammoth_common-0.1.0.dev0/icemammoth_common/util/string_util.py +64 -0
  33. icemammoth_common-0.1.0.dev0/icemammoth_common/util/time_util.py +79 -0
  34. icemammoth_common-0.1.0.dev0/icemammoth_common/util/url_util.py +5 -0
  35. icemammoth_common-0.1.0.dev0/icemammoth_common/util/xml_util.py +57 -0
  36. icemammoth_common-0.1.0.dev0/icemammoth_common.egg-info/PKG-INFO +18 -0
  37. icemammoth_common-0.1.0.dev0/icemammoth_common.egg-info/SOURCES.txt +43 -0
  38. icemammoth_common-0.1.0.dev0/icemammoth_common.egg-info/dependency_links.txt +1 -0
  39. icemammoth_common-0.1.0.dev0/icemammoth_common.egg-info/requires.txt +5 -0
  40. icemammoth_common-0.1.0.dev0/icemammoth_common.egg-info/top_level.txt +2 -0
  41. icemammoth_common-0.1.0.dev0/setup.cfg +4 -0
  42. icemammoth_common-0.1.0.dev0/setup.py +37 -0
  43. icemammoth_common-0.1.0.dev0/test/__init__.py +0 -0
  44. icemammoth_common-0.1.0.dev0/test/util/__init__.py +0 -0
  45. icemammoth_common-0.1.0.dev0/test/util/test_file_util.py +7 -0
@@ -0,0 +1,18 @@
1
+ Metadata-Version: 2.1
2
+ Name: icemammoth_common
3
+ Version: 0.1.0.dev0
4
+ Summary: common tools collection
5
+ Home-page:
6
+ Author: Klein
7
+ Author-email: myicemammoth@gmail.com
8
+ Keywords: common utils
9
+ Classifier: Development Status :: 3 - Alpha
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Description-Content-Type: text/markdown
14
+ Requires-Dist: beautifulsoup4>=4.12.3
15
+ Requires-Dist: coloredlogs>=15.0.1
16
+ Requires-Dist: mysql_connector_repackaged>=0.3.1
17
+ Requires-Dist: Pillow>=10.3.0
18
+ Requires-Dist: Requests>=2.32.2
File without changes
@@ -0,0 +1 @@
1
+ from .base_object import *
@@ -0,0 +1,17 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ class BaseObject(object):
4
+
5
+ def __repr__(self) -> str:
6
+ return self.__str__()
7
+
8
+ def __str__(self) -> str:
9
+ attributes = [
10
+ attribute
11
+ for attribute in dir(self)
12
+ if not attribute.startswith("__") and not callable(getattr(self, attribute))
13
+ ]
14
+ attributeMap = {
15
+ attribute: str(getattr(self, attribute)) for attribute in attributes
16
+ }
17
+ return f"{attributeMap}"
@@ -0,0 +1,20 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ class SingletonMeta(type):
4
+ """
5
+ The Singleton class can be implemented in different ways in Python. Some
6
+ possible methods include: base class, decorator, metaclass. We will use the
7
+ metaclass because it is best suited for this purpose.
8
+ """
9
+
10
+ _instances = {}
11
+
12
+ def __call__(cls, *args, **kwargs):
13
+ """
14
+ Possible changes to the value of the `__init__` argument do not affect
15
+ the returned instance.
16
+ """
17
+ if cls not in cls._instances:
18
+ instance = super().__call__(*args, **kwargs)
19
+ cls._instances[cls] = instance
20
+ return cls._instances[cls]
@@ -0,0 +1,320 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ from typing import Any, Dict, List, Tuple, TypeVar, Generic
4
+ from base import BaseObject
5
+
6
+ from database.connect_pool import ConnectPool
7
+ from .transaction import TransactionManager
8
+ from icemammoth_common.util.log_util import logger
9
+ from icemammoth_common.util import string_util, object_util
10
+
11
+ class ModelAttrValueConvertor(object):
12
+
13
+ def attrValueToColumnValue(self, table, attrName, attrValue):
14
+ pass
15
+
16
+ def columnValueToAttrValue(self, table, columnName, columnValue):
17
+ pass
18
+
19
+
20
+ class DAOInheritException(BaseException):
21
+
22
+ def __init__(self, msg):
23
+ super(msg)
24
+
25
+
26
+ class ORMConfig(BaseObject):
27
+
28
+ def __init__(
29
+ self,
30
+ table: str = None,
31
+ # columns: [str] = None,
32
+ model=None,
33
+ columnAttrMap: Dict[str, str] = None,
34
+ attrValueConvertor: ModelAttrValueConvertor = None,
35
+ ):
36
+ self.table = table
37
+ self.model = model
38
+ self.columnAttrMap = columnAttrMap
39
+ self.attrValueConvertor = attrValueConvertor
40
+
41
+
42
+ # 使用泛型,必须先使用TypeVar定义泛型类型
43
+ ModelType = TypeVar("ModelType", bound=BaseObject)
44
+
45
+ def defaultProcessFunc(cursor) :
46
+ return cursor.fetchall()
47
+
48
+
49
+ class BaseDAO(Generic[ModelType]):
50
+
51
+ def __init__(self):
52
+ if not self.__ORM_CONFIG__:
53
+ raise DAOInheritException(
54
+ f"class {
55
+ self.__class__.__name__} inherit from BaseDAO must set attribute __ORM_CONFIG__"
56
+ )
57
+ self.ormConfig = self.__ORM_CONFIG__
58
+ self.table = self.ormConfig.table
59
+ self.modelType = self.ormConfig.model
60
+ self.modelAttrs = object_util.fetchObjectAttributes(
61
+ self.ormConfig.model())
62
+ self.attrColumnMap = {}
63
+ self.columnAttrMap = {}
64
+ self.mappedColumns = []
65
+ self.mappedAttrs:List[str] = []
66
+ for column, attr in self.ormConfig.columnAttrMap.items():
67
+ if not attr:
68
+ attr = string_util.underscore_to_camel(column)
69
+ if attr not in self.modelAttrs:
70
+ raise NotExistError(
71
+ f"dao {self.__class__.__name__} orm config error, class {self.ormConfig.model.__name__} doesn't exist attribute {
72
+ attr} which is mapped with column {self.table}.{column}"
73
+ )
74
+ self.attrColumnMap[attr] = column
75
+ self.columnAttrMap[column] = attr
76
+ self.mappedColumns.append(column)
77
+ self.mappedAttrs.append(attr)
78
+
79
+ self.attrValueConvertor = self.ormConfig.attrValueConvertor
80
+ self.connectionPool = ConnectPool()
81
+ self.transactionManager = TransactionManager()
82
+
83
+ def _fetchValuesByAttrs(self, model, attrs=None) -> List[Any]:
84
+ row = []
85
+
86
+ attrValueMap = object_util.getObjectAttributesAndValues(model)
87
+ attrs = attrs if attrs else self.mappedAttrs
88
+
89
+ for attr in attrs:
90
+ value = attrValueMap.get(attr)
91
+ value = self.attrValueConvertor.attrValueToColumnValue(
92
+ self.table, attr, value) if self.attrValueConvertor else value
93
+ row.append(value)
94
+
95
+ return row
96
+ def fillRowDataToModel(self, columns: List[str], row: List[Any], model):
97
+
98
+ if row is None or columns is None:
99
+ return None
100
+
101
+ for idx, column in enumerate(columns):
102
+ attr = self._fetchAttrByColum(column)
103
+ if not attr:
104
+ raise NotExistError(
105
+ f"dao {self.__class__.__name__} convert row to model failed, class {
106
+ self.ormConfig.model.__name__} no attribute mapped with column {self.table}.{column}."
107
+ )
108
+ value = row[idx]
109
+ value = self.attrValueConvertor.columnValueToAttrValue(
110
+ self.table, column, value) if self.attrValueConvertor else value
111
+ setattr(model, attr, value)
112
+ return model
113
+
114
+ def _insertData(self, cursor):
115
+ return cursor.lastrowid
116
+
117
+ def _insertDatas(self, cursor):
118
+ return (cursor.lastrowid, cursor.rowcount)
119
+
120
+ def _readDatas(self, cursor):
121
+ return (
122
+ [
123
+ self.fillRowDataToModel(
124
+ cursor.column_names, result, self.ormConfig.model())
125
+ for result in results
126
+ ]
127
+ if (results := cursor.fetchall())
128
+ else None
129
+ )
130
+
131
+ def _readData(self, cursor):
132
+ result = cursor.fetchone()
133
+ return self.fillRowDataToModel(cursor.column_names, result, self.ormConfig.model())
134
+
135
+ def _deleteDatas(self, cursor):
136
+ return cursor.rowcount
137
+
138
+ def _updateDatas(self, cursor):
139
+ return cursor.rowcount
140
+
141
+ def execute(self, sql: str, params:List[Any], process_func=None, executeMany=False):
142
+ try:
143
+ connection = None
144
+ if self.transactionManager.inTransaction:
145
+ connection = self.transactionManager.connection
146
+ else:
147
+ connection = self.connectionPool.getConnection()
148
+ cursor = connection.cursor()
149
+ if executeMany:
150
+ cursor.executemany(sql, params)
151
+ else:
152
+ cursor.execute(sql, params)
153
+
154
+ result = None
155
+ if process_func is None:
156
+ result = defaultProcessFunc(cursor)
157
+ else:
158
+ result = process_func(cursor)
159
+ cursor.close
160
+ return result
161
+ except Exception as e:
162
+ logger.exception(
163
+ f"execute sql failed!sql:{sql},params:{params},executeMany:{executeMany}"
164
+ )
165
+ raise e
166
+ finally:
167
+ if not self.transactionManager.inTransaction and connection:
168
+ connection.close()
169
+
170
+ def insertData(self, model: ModelType, ignoreDuplicate=False, duplicateUpdateAttrs: List[str] = []) -> int:
171
+ sql: str = self._generateInsertSql(ignoreDuplicate=ignoreDuplicate, duplicateUpdateAttrs=duplicateUpdateAttrs)
172
+ logger.debug(f"sql is {sql}")
173
+ params: List[Any] = self._fetchValuesByAttrs(model)
174
+ return self.execute(sql, params, self._insertData)
175
+
176
+ def insertDatas(self, models: List[ModelType], ignoreDuplicate=False, duplicateUpdateAttrs: List[str] = []) -> Tuple[int, int]:
177
+ sql: str = self._generateInsertSql(ignoreDuplicate=ignoreDuplicate, duplicateUpdateAttrs=duplicateUpdateAttrs)
178
+ logger.debug(f"sql is {sql}")
179
+ params: List[List[Any]] = [self._fetchValuesByAttrs(model) for model in models]
180
+ return self.execute(sql, params, self._insertDatas, True)
181
+ def _generateInsertSql(self, insertAttrs: List[str] = None, ignoreDuplicate=False, duplicateUpdateAttrs: List[str] = []):
182
+ columns = []
183
+ if insertAttrs:
184
+ for attr in insertAttrs:
185
+ column = self._fetchColumnByAttr(attr)
186
+ columns.append(column)
187
+ else:
188
+ columns = self.mappedColumns
189
+ columnsSQL = ",".join(columns)
190
+ ignoreDuplicateTag = 'IGNORE' if ignoreDuplicate else ''
191
+ valuesSQL = ','.join(['%s']*len(columns))
192
+
193
+ duplicateUpdateSQL: str = ""
194
+ if not ignoreDuplicate and duplicateUpdateAttrs:
195
+
196
+ duplicateUpdateSQL = 'ON DUPLICATE KEY UPDATE'
197
+ for attr in duplicateUpdateAttrs:
198
+ column = self._fetchColumnByAttr(attr)
199
+ duplicateUpdateSQL = f'{duplicateUpdateSQL} {column} = VALUES({column}), '
200
+
201
+ duplicateUpdateSQL = duplicateUpdateSQL[:-2]
202
+
203
+ return f"INSERT {ignoreDuplicateTag} INTO {self.table} ({columnsSQL}) VALUES ({valuesSQL}) {duplicateUpdateSQL}"
204
+
205
+ def readData(self, readAttrs: List[str] = None, **conditions) -> ModelType:
206
+ sql = self._generateReadSql(
207
+ readOne=True, readAttrs=readAttrs, **conditions)
208
+ params = list(conditions.values())
209
+ return self.execute(sql, params, self._readData)
210
+
211
+ def readDatas(self, readAttrs: List[str] = None, **conditions) -> List[ModelType]:
212
+ sql = self._generateReadSql(
213
+ readOne=False, readAttrs=readAttrs, **conditions)
214
+ params = list(conditions.values())
215
+ return self.execute(sql, params, self._readDatas)
216
+
217
+ def readDatasByConditionSQL(
218
+ self, readAttrs: List[str] = None, conditionSql: str = None, params=None
219
+ ) -> List[ModelType]:
220
+
221
+ readColumns = []
222
+ if readAttrs:
223
+ for attr in readAttrs:
224
+ column = self._fetchColumnByAttr(attr)
225
+ readColumns.append(column)
226
+ else:
227
+ readColumns = self.mappedColumns
228
+ columnsSQL: str = ",".join(readColumns)
229
+
230
+ selectSQL = f"SELECT {columnsSQL} FROM {
231
+ self.table} WHERE {conditionSql}"
232
+ return self.execute(selectSQL, params, self._readDatas)
233
+
234
+ def _generateReadSql(self, readOne: bool = False, readAttrs: List[str] = None, **conditions):
235
+
236
+ params = []
237
+
238
+ readColumns = []
239
+ if readAttrs:
240
+ for attr in readAttrs:
241
+ column = self._fetchColumnByAttr(attr)
242
+ readColumns.append(column)
243
+ else:
244
+ readColumns = self.mappedColumns
245
+ columnsSQL: str = ",".join(readColumns)
246
+
247
+ conditionColumns = []
248
+ for attr, value in conditions.items():
249
+ column = self._fetchColumnByAttr(attr)
250
+ conditionColumns.append(column)
251
+ value = self.attrValueConvertor.attrValueToColumnValue(
252
+ self.table, attr, value) if self.attrValueConvertor else value
253
+ params.append(value)
254
+ conditionSQL: str = " and ".join(
255
+ [f"{column} = %s" for column in conditionColumns])
256
+
257
+ limitSQL: str = "LIMIT 1" if readOne else ""
258
+
259
+ return f"SELECT {columnsSQL} FROM {self.table} WHERE {conditionSQL} {limitSQL}"
260
+
261
+ def deleteDatas(self, **conditions):
262
+
263
+ params = []
264
+
265
+ conditionColumns = []
266
+ for attr, value in conditions.items():
267
+ column = self._fetchColumnByAttr(attr)
268
+ conditionColumns.append(column)
269
+ value = self.attrValueConvertor.attrValueToColumnValue(
270
+ self.table, attr, value) if self.attrValueConvertor else value
271
+ params.append(value)
272
+ conditionSQL = " AND ".join(
273
+ [f"{column} = %s" for column in conditionColumns])
274
+
275
+ deleteSQL = f"DELETE FROM {self.table} WHERE {conditionSQL}"
276
+
277
+ return self.execute(deleteSQL, params, self._deleteDatas)
278
+
279
+ def updateDatas(self, updateAttrs: Dict[str, str], **conditions):
280
+
281
+ updateColumns = []
282
+ params = []
283
+
284
+ for attr, value in updateAttrs.items():
285
+ column = self._fetchColumnByAttr(attr)
286
+ updateColumns.append(column)
287
+ value = self.attrValueConvertor.attrValueToColumnValue(
288
+ self.table, attr, value) if self.attrValueConvertor else value
289
+ params.append(value)
290
+ updateColumnSQL = ', '.join(
291
+ [f'{column} = %s' for column in updateColumns])
292
+
293
+ conditionColumns = []
294
+ for attr, value in conditions.items():
295
+ column = self._fetchColumnByAttr(attr)
296
+ conditionColumns.append(column)
297
+ value = self.attrValueConvertor.attrValueToColumnValue(
298
+ self.table, attr, value) if self.attrValueConvertor else value
299
+ params.append(value)
300
+ conditionSQL = " AND ".join(
301
+ [f"{column} = %s" for column in conditionColumns])
302
+
303
+ updateSQL = f"UPDATE {self.table} SET {
304
+ updateColumnSQL} WHERE {conditionSQL}"
305
+
306
+ return self.execute(updateSQL, params, self._updateDatas)
307
+
308
+ def _fetchColumnByAttr(self, attr: str):
309
+ column = self.attrColumnMap.get(attr)
310
+ if not column:
311
+ raise TypeError(f'table {self.table} not column bind with attr {
312
+ self.modelType.__qualname__}.{attr}')
313
+ return column
314
+
315
+ def _fetchAttrByColum(self, column):
316
+ attr = self.columnAttrMap.get(column)
317
+ if not attr:
318
+ raise TypeError(f'{self.modelType.__qualname__} not attribute bind with table {
319
+ self.table} column {column}')
320
+ return attr
@@ -0,0 +1,53 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ from mysql.connector import pooling
4
+ from icemammoth_common.util import module_util
5
+ from icemammoth_common.base.singleton import SingletonMeta
6
+ from icemammoth_common.util import module_util
7
+ from icemammoth_common.util import log_util
8
+
9
+ CONFIG_MODULE_NAME = "config"
10
+
11
+ class ConnectPool(metaclass=SingletonMeta):
12
+
13
+ def __init__(self):
14
+
15
+ if not module_util.module_exist(CONFIG_MODULE_NAME):
16
+ log_util.logger.exception(
17
+ "config module is not exist, must set config module first!")
18
+ raise Exception(
19
+ "config module is not exist, must set config module first!")
20
+
21
+ datebase_host = module_util.get_variable(
22
+ CONFIG_MODULE_NAME, "DATABASE_HOST", except_unexist=True)
23
+ database_port = module_util.get_variable(
24
+ CONFIG_MODULE_NAME, "DATABASE_PORT", except_unexist=True)
25
+ database_user = module_util.get_variable(
26
+ CONFIG_MODULE_NAME, "DATABASE_USER", except_unexist=True)
27
+ database_password = module_util.get_variable(
28
+ CONFIG_MODULE_NAME, "DATABASE_PASSWORD", except_unexist=True)
29
+ database_db_name = module_util.get_variable(
30
+ CONFIG_MODULE_NAME, "DATABASE_DB_NAME", except_unexist=True)
31
+ database_connect_pool_size = module_util.get_variable(
32
+ CONFIG_MODULE_NAME, "DATABASE_CONNECT_POOL_SIZE", except_unexist=True)
33
+
34
+ # dbconfig = {
35
+ # "host": datebase_host,
36
+ # "port": database_port,
37
+ # "user": database_user,
38
+ # "password": database_password,
39
+ # "database":database_db_name,
40
+ # }
41
+
42
+ self.cnxpool = pooling.MySQLConnectionPool(host=datebase_host,
43
+ port=database_port,
44
+ user=database_user,
45
+ password=database_password,
46
+ database=database_db_name,
47
+ pool_name="connection-pool",
48
+ pool_reset_session=True,
49
+ pool_size=database_connect_pool_size,
50
+ autocommit=True)
51
+
52
+ def getConnection(self):
53
+ return self.cnxpool.get_connection()
@@ -0,0 +1,80 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ import threading
4
+ from icemammoth_common.decorator import decorator
5
+ from icemammoth_common.util.log_util import logger
6
+ from database.connect_pool import ConnectPool
7
+
8
+ '''
9
+ ThreadLocal.local每次调用都会返回一个新对象,并非线程内唯一
10
+ 作为全局变量方便实现线程的内变量共享,放到函数体内则在函数每次执行时会生成不同的变量,无法实现线程的内变量共享
11
+ 这点与Java中的ThreadLocal相同
12
+ '''
13
+ local = threading.local()
14
+
15
+ class TransactionManager(object):
16
+
17
+ connection = None
18
+ inTransaction = False
19
+
20
+ def __init__(self):
21
+ self.connectionPool = ConnectPool()
22
+
23
+ def startTx(self):
24
+ TransactionManager.connection = self.connectionPool.getConnection()
25
+ TransactionManager.connection.start_transaction()
26
+ TransactionManager.inTransaction = True
27
+
28
+ def commit(self):
29
+ try:
30
+ TransactionManager.connection.commit()
31
+ TransactionManager.connection.close()
32
+ finally:
33
+ TransactionManager.connection = None
34
+ TransactionManager.inTransaction = False
35
+
36
+ def rollback(self):
37
+ try:
38
+ TransactionManager.connection.rollback()
39
+ TransactionManager.connection.close()
40
+ finally:
41
+ TransactionManager.connection = None
42
+ TransactionManager.inTransaction = False
43
+
44
+ def transaction():
45
+ @decorator
46
+ def transaction_decorator(f, *fargs, **fkwargs):
47
+ firstLevel = False
48
+ # transactionManager = None
49
+ try:
50
+ local.transactionManager
51
+ except AttributeError as error:
52
+ logger.debug('no transactionManager in thread local')
53
+ logger.debug('enter transaction scope')
54
+ local.transactionManager = None
55
+
56
+ if not local.transactionManager:
57
+ firstLevel = True
58
+ local.transactionManager = TransactionManager()
59
+ # transactionManager = local.transactionManager
60
+
61
+ try:
62
+ if firstLevel:
63
+ logger.debug(msg='start transaction')
64
+ local.transactionManager.startTx()
65
+ result = f(*fargs,**fkwargs)
66
+ if firstLevel:
67
+ logger.debug('commit transaction')
68
+ local.transactionManager.commit()
69
+ return result
70
+ except Exception as exception:
71
+ if firstLevel and local.transactionManager.inTransaction:
72
+ logger.exception(f'transaction execute failed, rollback!func:{f.__module__}:{f.__qualname__}')
73
+ local.transactionManager.rollback()
74
+ raise exception
75
+ finally:
76
+ if firstLevel:
77
+ logger.debug('exit transaction scope')
78
+ local.transactionManager = None
79
+
80
+ return transaction_decorator
@@ -0,0 +1,170 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ import random
4
+ import time
5
+ import traceback
6
+ from icemammoth_common.util.log_util import logger as Logger
7
+ from functools import partial
8
+
9
+ from decorator import decorator
10
+
11
+
12
+ def __retry_internal(
13
+ f,
14
+ exceptions=Exception,
15
+ tries=-1,
16
+ delay=0,
17
+ max_delay=10,
18
+ backoff=1,
19
+ jitter=0,
20
+ logger=Logger,
21
+ log_traceback=False,
22
+ on_exception=None,
23
+ ):
24
+ """
25
+ Executes a function and retries it if it failed.
26
+
27
+ :param f: the function to execute.
28
+ :param exceptions: an exception or a tuple of exceptions to catch. default: Exception.
29
+ :param tries: the maximum number of attempts. default: -1 (infinite).
30
+ :param delay: initial delay between attempts. default: 0.
31
+ :param max_delay: the maximum value of delay. default: None (no limit).
32
+ :param backoff: multiplier applied to delay between attempts. default: 1 (no backoff).
33
+ :param jitter: extra seconds added to delay between attempts. default: 0.
34
+ fixed if a number, random if a range tuple (min, max)
35
+ :param logger: logger.warning(fmt, error, delay) will be called on failed attempts.
36
+ default: retry.logging_logger. if None, logging is disabled.
37
+ :param on_exception: handler called when exception occurs. will be passed the captured
38
+ exception as an argument. further retries are stopped when handler
39
+ returns True. default: None
40
+ :returns: the result of the f function.
41
+ """
42
+ _tries, _delay = tries, delay
43
+ while _tries:
44
+ try:
45
+ return f()
46
+ except exceptions as e:
47
+ if on_exception is not None and on_exception(e):
48
+ raise e
49
+
50
+ _tries -= 1
51
+ if _tries <= 0:
52
+ raise e
53
+
54
+ try:
55
+ func_qualname = f.func.__qualname__
56
+ except AttributeError:
57
+ func_qualname = str(f.func)
58
+ logger.warning(
59
+ f'method {f.func.__module__}.{func_qualname} execute failed, ' + \
60
+ f'will do {tries-_tries}th retry in {_delay} seconds. ' + \
61
+ f'error_type:{e.__class__.__qualname__}, error_message:{e}'
62
+ )
63
+
64
+ if log_traceback:
65
+ logger.warning(traceback.format_exc())
66
+
67
+ time.sleep(_delay)
68
+ _delay *= backoff
69
+
70
+ _delay += random.uniform(*jitter) if isinstance(jitter, tuple) else jitter
71
+ if max_delay is not None:
72
+ _delay = min(_delay, max_delay)
73
+
74
+
75
+ def retry(
76
+ exceptions=Exception,
77
+ tries=-1,
78
+ delay=0,
79
+ max_delay=10,
80
+ backoff=1,
81
+ jitter=0,
82
+ logger=Logger,
83
+ log_traceback=False,
84
+ on_exception=None,
85
+ ):
86
+ """Returns a retry decorator.
87
+
88
+ :param exceptions: an exception or a tuple of exceptions to catch. default: Exception.
89
+ :param tries: the maximum number of attempts. default: -1 (infinite).
90
+ :param delay: initial delay between attempts. default: 0.
91
+ :param max_delay: the maximum value of delay. default: None (no limit).
92
+ :param backoff: multiplier applied to delay between attempts. default: 1 (no backoff).
93
+ :param jitter: extra seconds added to delay between attempts. default: 0.
94
+ fixed if a number, random if a range tuple (min, max)
95
+ :param logger: logger.warning(fmt, error, delay) will be called on failed attempts.
96
+ default: retry.logging_logger. if None, logging is disabled.
97
+ :param on_exception: handler called when exception occurs. will be passed the captured
98
+ exception as an argument. further retries are stopped when handler
99
+ returns True. default: None
100
+ :returns: a retry decorator.
101
+ """
102
+
103
+ @decorator
104
+ def retry_decorator(f, *fargs, **fkwargs):
105
+ args = fargs or []
106
+ kwargs = fkwargs or {}
107
+ return __retry_internal(
108
+ partial(f, *args, **kwargs),
109
+ exceptions,
110
+ tries,
111
+ delay,
112
+ max_delay,
113
+ backoff,
114
+ jitter,
115
+ logger,
116
+ log_traceback,
117
+ on_exception,
118
+ )
119
+
120
+ return retry_decorator
121
+
122
+
123
+ def retry_call(
124
+ f,
125
+ fargs=None,
126
+ fkwargs=None,
127
+ exceptions=Exception,
128
+ tries=-1,
129
+ delay=0,
130
+ max_delay=10,
131
+ backoff=1,
132
+ jitter=0,
133
+ logger=Logger,
134
+ log_traceback=False,
135
+ on_exception=None,
136
+ ):
137
+ """
138
+ Calls a function and re-executes it if it failed.
139
+
140
+ :param f: the function to execute.
141
+ :param fargs: the positional arguments of the function to execute.
142
+ :param fkwargs: the named arguments of the function to execute.
143
+ :param exceptions: an exception or a tuple of exceptions to catch. default: Exception.
144
+ :param tries: the maximum number of attempts. default: -1 (infinite).
145
+ :param delay: initial delay between attempts. default: 0.
146
+ :param max_delay: the maximum value of delay. default: None (no limit).
147
+ :param backoff: multiplier applied to delay between attempts. default: 1 (no backoff).
148
+ :param jitter: extra seconds added to delay between attempts. default: 0.
149
+ fixed if a number, random if a range tuple (min, max)
150
+ :param logger: logger.warning(fmt, error, delay) will be called on failed attempts.
151
+ default: retry.logging_logger. if None, logging is disabled.
152
+ :param on_exception: handler called when exception occurs. will be passed the captured
153
+ exception as an argument. further retries are stopped when handler
154
+ returns True. default: None
155
+ :returns: the result of the f function.
156
+ """
157
+ args = fargs or []
158
+ kwargs = fkwargs or {}
159
+ return __retry_internal(
160
+ partial(f, *args, **kwargs),
161
+ exceptions,
162
+ tries,
163
+ delay,
164
+ max_delay,
165
+ backoff,
166
+ jitter,
167
+ logger,
168
+ log_traceback,
169
+ on_exception,
170
+ )