python3-commons 0.0.0__py3-none-any.whl → 0.2.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of python3-commons might be problematic. Click here for more details.
- python3_commons/api_client.py +44 -16
- python3_commons/audit.py +127 -138
- python3_commons/auth.py +53 -47
- python3_commons/cache.py +36 -38
- python3_commons/conf.py +37 -6
- python3_commons/db/__init__.py +15 -10
- python3_commons/db/helpers.py +5 -7
- python3_commons/db/models/__init__.py +8 -2
- python3_commons/db/models/auth.py +2 -2
- python3_commons/db/models/common.py +8 -6
- python3_commons/db/models/rbac.py +5 -5
- python3_commons/fs.py +2 -2
- python3_commons/helpers.py +44 -13
- python3_commons/object_storage.py +135 -73
- python3_commons/permissions.py +2 -4
- python3_commons/serializers/common.py +8 -0
- python3_commons/serializers/json.py +5 -7
- python3_commons/serializers/msgpack.py +19 -21
- python3_commons/serializers/msgspec.py +50 -27
- {python3_commons-0.0.0.dist-info → python3_commons-0.2.16.dist-info}/METADATA +13 -13
- python3_commons-0.2.16.dist-info/RECORD +30 -0
- {python3_commons-0.0.0.dist-info → python3_commons-0.2.16.dist-info}/WHEEL +1 -1
- python3_commons-0.0.0.dist-info/RECORD +0 -29
- /python3_commons/{logging → log}/__init__.py +0 -0
- /python3_commons/{logging → log}/filters.py +0 -0
- /python3_commons/{logging → log}/formatters.py +0 -0
- {python3_commons-0.0.0.dist-info → python3_commons-0.2.16.dist-info}/licenses/AUTHORS.rst +0 -0
- {python3_commons-0.0.0.dist-info → python3_commons-0.2.16.dist-info}/licenses/LICENSE +0 -0
- {python3_commons-0.0.0.dist-info → python3_commons-0.2.16.dist-info}/top_level.txt +0 -0
python3_commons/cache.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import socket
|
|
3
|
+
from collections.abc import Mapping, Sequence
|
|
3
4
|
from platform import platform
|
|
4
|
-
from typing import Any
|
|
5
|
+
from typing import Any
|
|
5
6
|
|
|
6
7
|
import valkey
|
|
7
8
|
from pydantic import RedisDsn
|
|
@@ -34,10 +35,9 @@ class AsyncValkeyClient(metaclass=SingletonMeta):
|
|
|
34
35
|
|
|
35
36
|
@staticmethod
|
|
36
37
|
def _get_keepalive_options():
|
|
37
|
-
if platform
|
|
38
|
+
if platform in {'linux', 'darwin'}:
|
|
38
39
|
return {socket.TCP_KEEPIDLE: 10, socket.TCP_KEEPINTVL: 5, socket.TCP_KEEPCNT: 5}
|
|
39
|
-
|
|
40
|
-
return {}
|
|
40
|
+
return {}
|
|
41
41
|
|
|
42
42
|
def _initialize_sentinel(self, dsn: RedisDsn):
|
|
43
43
|
sentinel = Sentinel(
|
|
@@ -88,7 +88,7 @@ async def delete(*names: str | bytes | memoryview):
|
|
|
88
88
|
await get_valkey_client().delete(*names)
|
|
89
89
|
|
|
90
90
|
|
|
91
|
-
async def store_bytes(name: str, data: bytes, ttl: int = None, if_not_set: bool = False):
|
|
91
|
+
async def store_bytes(name: str, data: bytes, ttl: int | None = None, *, if_not_set: bool = False):
|
|
92
92
|
r = get_valkey_client()
|
|
93
93
|
|
|
94
94
|
return await r.set(name, data, ex=ttl, nx=if_not_set)
|
|
@@ -100,18 +100,18 @@ async def get_bytes(name: str) -> bytes | None:
|
|
|
100
100
|
return await r.get(name)
|
|
101
101
|
|
|
102
102
|
|
|
103
|
-
async def store(name: str, obj: Any, ttl: int = None, if_not_set: bool = False):
|
|
104
|
-
return await store_bytes(name, serialize_msgpack_native(obj), ttl, if_not_set)
|
|
103
|
+
async def store(name: str, obj: Any, ttl: int | None = None, *, if_not_set: bool = False):
|
|
104
|
+
return await store_bytes(name, serialize_msgpack_native(obj), ttl, if_not_set=if_not_set)
|
|
105
105
|
|
|
106
106
|
|
|
107
|
-
async def get(name: str, default=None, data_type: Any = None) -> Any:
|
|
107
|
+
async def get(name: str, default: Any | None = None, data_type: Any = None) -> Any | None:
|
|
108
108
|
if data := await get_bytes(name):
|
|
109
109
|
return deserialize_msgpack_native(data, data_type)
|
|
110
110
|
|
|
111
111
|
return default
|
|
112
112
|
|
|
113
113
|
|
|
114
|
-
async def store_string(name: str, data: str, ttl: int = None):
|
|
114
|
+
async def store_string(name: str, data: str, ttl: int | None = None):
|
|
115
115
|
await store_bytes(name, data.encode(), ttl)
|
|
116
116
|
|
|
117
117
|
|
|
@@ -122,7 +122,7 @@ async def get_string(name: str) -> str | None:
|
|
|
122
122
|
return None
|
|
123
123
|
|
|
124
124
|
|
|
125
|
-
async def store_sequence(name: str, data: Sequence, ttl: int = None):
|
|
125
|
+
async def store_sequence(name: str, data: Sequence, ttl: int | None = None):
|
|
126
126
|
if data:
|
|
127
127
|
try:
|
|
128
128
|
r = get_valkey_client()
|
|
@@ -130,8 +130,8 @@ async def store_sequence(name: str, data: Sequence, ttl: int = None):
|
|
|
130
130
|
|
|
131
131
|
if ttl:
|
|
132
132
|
await r.expire(name, ttl)
|
|
133
|
-
except valkey.exceptions.ConnectionError
|
|
134
|
-
logger.
|
|
133
|
+
except valkey.exceptions.ConnectionError:
|
|
134
|
+
logger.exception('Failed to store sequence in cache.')
|
|
135
135
|
|
|
136
136
|
|
|
137
137
|
async def get_sequence(name: str, _type: type = list) -> Sequence:
|
|
@@ -141,7 +141,7 @@ async def get_sequence(name: str, _type: type = list) -> Sequence:
|
|
|
141
141
|
return _type(map(deserialize_msgpack_native, lrange))
|
|
142
142
|
|
|
143
143
|
|
|
144
|
-
async def store_dict(name: str, data: Mapping, ttl: int = None):
|
|
144
|
+
async def store_dict(name: str, data: Mapping, ttl: int | None = None):
|
|
145
145
|
if data:
|
|
146
146
|
try:
|
|
147
147
|
r = get_valkey_client()
|
|
@@ -150,22 +150,20 @@ async def store_dict(name: str, data: Mapping, ttl: int = None):
|
|
|
150
150
|
|
|
151
151
|
if ttl:
|
|
152
152
|
await r.expire(name, ttl)
|
|
153
|
-
except valkey.exceptions.ConnectionError
|
|
154
|
-
logger.
|
|
153
|
+
except valkey.exceptions.ConnectionError:
|
|
154
|
+
logger.exception('Failed to store dict in cache.')
|
|
155
155
|
|
|
156
156
|
|
|
157
157
|
async def get_dict(name: str, value_data_type=None) -> dict | None:
|
|
158
158
|
r = get_valkey_client()
|
|
159
159
|
|
|
160
160
|
if data := await r.hgetall(name):
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
return data
|
|
161
|
+
return {k.decode(): deserialize_msgpack(v, value_data_type) for k, v in data.items()}
|
|
164
162
|
|
|
165
163
|
return None
|
|
166
164
|
|
|
167
165
|
|
|
168
|
-
async def set_dict(name: str, mapping: dict, ttl: int = None):
|
|
166
|
+
async def set_dict(name: str, mapping: dict, ttl: int | None = None):
|
|
169
167
|
if mapping:
|
|
170
168
|
try:
|
|
171
169
|
r = get_valkey_client()
|
|
@@ -174,8 +172,8 @@ async def set_dict(name: str, mapping: dict, ttl: int = None):
|
|
|
174
172
|
|
|
175
173
|
if ttl:
|
|
176
174
|
await r.expire(name, ttl)
|
|
177
|
-
except valkey.exceptions.ConnectionError
|
|
178
|
-
logger.
|
|
175
|
+
except valkey.exceptions.ConnectionError:
|
|
176
|
+
logger.exception('Failed to set dict in cache.')
|
|
179
177
|
|
|
180
178
|
|
|
181
179
|
async def get_dict_item(name: str, key: str, data_type=None, default=None):
|
|
@@ -184,39 +182,39 @@ async def get_dict_item(name: str, key: str, data_type=None, default=None):
|
|
|
184
182
|
|
|
185
183
|
if data := await r.hget(name, key):
|
|
186
184
|
return deserialize_msgpack_native(data, data_type)
|
|
185
|
+
except valkey.exceptions.ConnectionError:
|
|
186
|
+
logger.exception('Failed to get dict item from cache.')
|
|
187
187
|
|
|
188
|
-
return
|
|
189
|
-
except valkey.exceptions.ConnectionError as e:
|
|
190
|
-
logger.error(f'Failed to get dict item from cache: {e}')
|
|
188
|
+
return None
|
|
191
189
|
|
|
192
|
-
return
|
|
190
|
+
return default
|
|
193
191
|
|
|
194
192
|
|
|
195
193
|
async def set_dict_item(name: str, key: str, obj: Any):
|
|
196
194
|
try:
|
|
197
195
|
r = get_valkey_client()
|
|
198
196
|
await r.hset(name, key, serialize_msgpack_native(obj))
|
|
199
|
-
except valkey.exceptions.ConnectionError
|
|
200
|
-
logger.
|
|
197
|
+
except valkey.exceptions.ConnectionError:
|
|
198
|
+
logger.exception('Failed to set dict item in cache.')
|
|
201
199
|
|
|
202
200
|
|
|
203
201
|
async def delete_dict_item(name: str, *keys):
|
|
204
202
|
try:
|
|
205
203
|
r = get_valkey_client()
|
|
206
204
|
await r.hdel(name, *keys)
|
|
207
|
-
except valkey.exceptions.ConnectionError
|
|
208
|
-
logger.
|
|
205
|
+
except valkey.exceptions.ConnectionError:
|
|
206
|
+
logger.exception('Failed to delete dict item from cache.')
|
|
209
207
|
|
|
210
208
|
|
|
211
|
-
async def store_set(name: str, value: set, ttl: int = None):
|
|
209
|
+
async def store_set(name: str, value: set, ttl: int | None = None):
|
|
212
210
|
try:
|
|
213
211
|
r = get_valkey_client()
|
|
214
212
|
await r.sadd(name, *map(serialize_msgpack_native, value))
|
|
215
213
|
|
|
216
214
|
if ttl:
|
|
217
215
|
await r.expire(name, ttl)
|
|
218
|
-
except valkey.exceptions.ConnectionError
|
|
219
|
-
logger.
|
|
216
|
+
except valkey.exceptions.ConnectionError:
|
|
217
|
+
logger.exception('Failed to store set in cache.')
|
|
220
218
|
|
|
221
219
|
|
|
222
220
|
async def has_set_item(name: str, value: str) -> bool:
|
|
@@ -224,8 +222,8 @@ async def has_set_item(name: str, value: str) -> bool:
|
|
|
224
222
|
r = get_valkey_client()
|
|
225
223
|
|
|
226
224
|
return await r.sismember(name, serialize_msgpack_native(value)) == 1
|
|
227
|
-
except valkey.exceptions.ConnectionError
|
|
228
|
-
logger.
|
|
225
|
+
except valkey.exceptions.ConnectionError:
|
|
226
|
+
logger.exception('Failed to check if set has item in cache.')
|
|
229
227
|
|
|
230
228
|
return False
|
|
231
229
|
|
|
@@ -234,8 +232,8 @@ async def add_set_item(name: str, *values: str):
|
|
|
234
232
|
try:
|
|
235
233
|
r = get_valkey_client()
|
|
236
234
|
await r.sadd(name, *map(serialize_msgpack_native, values))
|
|
237
|
-
except valkey.exceptions.ConnectionError
|
|
238
|
-
logger.
|
|
235
|
+
except valkey.exceptions.ConnectionError:
|
|
236
|
+
logger.exception('Failed to add set item into cache.')
|
|
239
237
|
|
|
240
238
|
|
|
241
239
|
async def delete_set_item(name: str, value: str):
|
|
@@ -249,8 +247,8 @@ async def get_set_members(name: str) -> set[str] | None:
|
|
|
249
247
|
smembers = await r.smembers(name)
|
|
250
248
|
|
|
251
249
|
return set(map(deserialize_msgpack_native, smembers))
|
|
252
|
-
except valkey.exceptions.ConnectionError
|
|
253
|
-
logger.
|
|
250
|
+
except valkey.exceptions.ConnectionError:
|
|
251
|
+
logger.exception('Failed to get set members from cache.')
|
|
254
252
|
|
|
255
253
|
return None
|
|
256
254
|
|
python3_commons/conf.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
from
|
|
1
|
+
from typing import Literal
|
|
2
|
+
|
|
3
|
+
from pydantic import Field, HttpUrl, PostgresDsn, RedisDsn, SecretStr, model_validator
|
|
2
4
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
3
5
|
|
|
4
6
|
|
|
@@ -24,21 +26,50 @@ class ValkeySettings(BaseSettings):
|
|
|
24
26
|
|
|
25
27
|
|
|
26
28
|
class DBSettings(BaseSettings):
|
|
27
|
-
model_config = SettingsConfigDict(env_prefix='DB_')
|
|
29
|
+
model_config = SettingsConfigDict(env_prefix='DB_', validate_by_name=True, validate_by_alias=True)
|
|
30
|
+
|
|
31
|
+
dsn: PostgresDsn | None = None
|
|
32
|
+
scheme: str = 'postgresql+asyncpg'
|
|
33
|
+
host: str = 'localhost'
|
|
34
|
+
port: int = 5432
|
|
35
|
+
name: str | None = None
|
|
36
|
+
user: str | None = None
|
|
37
|
+
password: SecretStr | None = Field(default=None, alias='DB_PASS')
|
|
28
38
|
|
|
29
|
-
dsn: PostgresDsn | None = Field(default=None, serialization_alias='url')
|
|
30
39
|
echo: bool = False
|
|
31
40
|
pool_size: int = 20
|
|
32
41
|
max_overflow: int = 0
|
|
33
42
|
pool_timeout: int = 30
|
|
34
43
|
pool_recycle: int = 1800 # 30 minutes
|
|
35
44
|
|
|
45
|
+
@model_validator(mode='after')
|
|
46
|
+
def build_dsn_if_missing(self) -> 'DBSettings':
|
|
47
|
+
if self.dsn is None and all(
|
|
48
|
+
(
|
|
49
|
+
self.user,
|
|
50
|
+
self.password,
|
|
51
|
+
self.name,
|
|
52
|
+
)
|
|
53
|
+
):
|
|
54
|
+
self.dsn = PostgresDsn.build(
|
|
55
|
+
scheme=self.scheme,
|
|
56
|
+
username=self.user,
|
|
57
|
+
password=self.password.get_secret_value() if self.password else None,
|
|
58
|
+
host=self.host,
|
|
59
|
+
port=self.port,
|
|
60
|
+
path=self.name,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
return self
|
|
64
|
+
|
|
36
65
|
|
|
37
66
|
class S3Settings(BaseSettings):
|
|
67
|
+
aws_region: str | None = None
|
|
68
|
+
aws_access_key_id: SecretStr | None = None
|
|
69
|
+
aws_secret_access_key: SecretStr | None = None
|
|
70
|
+
|
|
38
71
|
s3_endpoint_url: str | None = None
|
|
39
|
-
|
|
40
|
-
s3_access_key_id: SecretStr = ''
|
|
41
|
-
s3_secret_access_key: SecretStr = ''
|
|
72
|
+
s3_addressing_style: Literal['path', 'virtual'] = 'virtual'
|
|
42
73
|
s3_secure: bool = True
|
|
43
74
|
s3_bucket: str | None = None
|
|
44
75
|
s3_bucket_root: str | None = None
|
python3_commons/db/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import contextlib
|
|
2
2
|
import logging
|
|
3
|
-
from
|
|
3
|
+
from collections.abc import AsyncGenerator, Callable, Mapping
|
|
4
4
|
|
|
5
5
|
from sqlalchemy import MetaData
|
|
6
6
|
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_engine_from_config
|
|
@@ -24,17 +24,22 @@ class AsyncSessionManager:
|
|
|
24
24
|
try:
|
|
25
25
|
return self.db_settings[name]
|
|
26
26
|
except KeyError:
|
|
27
|
-
logger.
|
|
27
|
+
logger.exception(f'Missing database settings: {name}')
|
|
28
28
|
|
|
29
29
|
raise
|
|
30
30
|
|
|
31
31
|
def async_engine_from_db_settings(self, name):
|
|
32
32
|
db_settings = self.get_db_settings(name)
|
|
33
|
-
configuration =
|
|
34
|
-
|
|
35
|
-
|
|
33
|
+
configuration = {
|
|
34
|
+
'url': str(db_settings.dsn),
|
|
35
|
+
'echo': db_settings.echo,
|
|
36
|
+
'pool_size': db_settings.pool_size,
|
|
37
|
+
'max_overflow': db_settings.max_overflow,
|
|
38
|
+
'pool_timeout': db_settings.pool_timeout,
|
|
39
|
+
'pool_recycle': db_settings.pool_recycle,
|
|
40
|
+
}
|
|
36
41
|
|
|
37
|
-
return
|
|
42
|
+
return async_engine_from_config(configuration, prefix='')
|
|
38
43
|
|
|
39
44
|
def get_engine(self, name: str) -> AsyncEngine:
|
|
40
45
|
try:
|
|
@@ -57,8 +62,8 @@ class AsyncSessionManager:
|
|
|
57
62
|
|
|
58
63
|
return session_maker
|
|
59
64
|
|
|
60
|
-
def get_async_session(self, name: str) -> Callable[[], AsyncGenerator[AsyncSession
|
|
61
|
-
async def get_session() -> AsyncGenerator[AsyncSession
|
|
65
|
+
def get_async_session(self, name: str) -> Callable[[], AsyncGenerator[AsyncSession]]:
|
|
66
|
+
async def get_session() -> AsyncGenerator[AsyncSession]:
|
|
62
67
|
session_maker = self.get_session_maker(name)
|
|
63
68
|
|
|
64
69
|
async with session_maker() as session:
|
|
@@ -77,7 +82,7 @@ async def is_healthy(engine: AsyncEngine) -> bool:
|
|
|
77
82
|
result = await conn.execute('SELECT 1;')
|
|
78
83
|
|
|
79
84
|
return result.scalar() == 1
|
|
80
|
-
except Exception
|
|
81
|
-
logger.
|
|
85
|
+
except Exception:
|
|
86
|
+
logger.exception('Database connection is not healthy.')
|
|
82
87
|
|
|
83
88
|
return False
|
python3_commons/db/helpers.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from
|
|
2
|
+
from collections.abc import Mapping
|
|
3
3
|
|
|
4
4
|
import sqlalchemy as sa
|
|
5
5
|
from sqlalchemy import asc, desc, func
|
|
@@ -26,11 +26,12 @@ def get_query(
|
|
|
26
26
|
for order_by_col in order_by.split(','):
|
|
27
27
|
if order_by_col.startswith('-'):
|
|
28
28
|
direction = desc
|
|
29
|
-
|
|
29
|
+
order_by_col_clean = order_by_col[1:]
|
|
30
30
|
else:
|
|
31
31
|
direction = asc
|
|
32
|
+
order_by_col_clean = order_by_col
|
|
32
33
|
|
|
33
|
-
order_by_cols[
|
|
34
|
+
order_by_cols[order_by_col_clean] = direction
|
|
34
35
|
|
|
35
36
|
order_by_clauses = tuple(
|
|
36
37
|
direction(columns[order_by_col][0]) for order_by_col, direction in order_by_cols.items()
|
|
@@ -54,9 +55,6 @@ def get_query(
|
|
|
54
55
|
else:
|
|
55
56
|
where_parts = None
|
|
56
57
|
|
|
57
|
-
if where_parts
|
|
58
|
-
where_clause = sa.and_(*where_parts)
|
|
59
|
-
else:
|
|
60
|
-
where_clause = None
|
|
58
|
+
where_clause = sa.and_(*where_parts) if where_parts else None
|
|
61
59
|
|
|
62
60
|
return where_clause, order_by_clauses
|
|
@@ -1,2 +1,8 @@
|
|
|
1
|
-
from python3_commons.db.models.auth import ApiKey
|
|
2
|
-
from python3_commons.db.models.
|
|
1
|
+
from python3_commons.db.models.auth import ApiKey as ApiKey
|
|
2
|
+
from python3_commons.db.models.auth import User as User
|
|
3
|
+
from python3_commons.db.models.auth import UserGroup as UserGroup
|
|
4
|
+
from python3_commons.db.models.rbac import RBACApiKeyRole as RBACApiKeyRole
|
|
5
|
+
from python3_commons.db.models.rbac import RBACPermission as RBACPermission
|
|
6
|
+
from python3_commons.db.models.rbac import RBACRole as RBACRole
|
|
7
|
+
from python3_commons.db.models.rbac import RBACRolePermission as RBACRolePermission
|
|
8
|
+
from python3_commons.db.models.rbac import RBACUserRole as RBACUserRole
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import uuid
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
|
|
3
4
|
from fastapi_users_db_sqlalchemy import GUID, SQLAlchemyBaseUserTableUUID
|
|
4
|
-
from pydantic import AwareDatetime
|
|
5
5
|
from sqlalchemy import BIGINT, DateTime, ForeignKey, String
|
|
6
6
|
from sqlalchemy.orm import Mapped, mapped_column
|
|
7
7
|
|
|
@@ -32,4 +32,4 @@ class ApiKey(BaseDBUUIDModel, Base):
|
|
|
32
32
|
)
|
|
33
33
|
partner_name: Mapped[str] = mapped_column(String, unique=True)
|
|
34
34
|
key: Mapped[str] = mapped_column(String, unique=True)
|
|
35
|
-
expires_at: Mapped[
|
|
35
|
+
expires_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True))
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
|
|
1
|
+
import uuid
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
2
4
|
from sqlalchemy import BIGINT, DateTime
|
|
3
5
|
from sqlalchemy.dialects.postgresql import UUID
|
|
4
6
|
from sqlalchemy.ext.compiler import compiles
|
|
@@ -25,15 +27,15 @@ def use_identity(element, compiler, **kw):
|
|
|
25
27
|
|
|
26
28
|
class BaseDBModel:
|
|
27
29
|
id: Mapped[int] = mapped_column(BIGINT, primary_key=True, sort_order=-3)
|
|
28
|
-
created_at: Mapped[
|
|
30
|
+
created_at: Mapped[datetime] = mapped_column(
|
|
29
31
|
DateTime(timezone=True), nullable=False, server_default=UTCNow(), sort_order=-2
|
|
30
32
|
)
|
|
31
|
-
updated_at: Mapped[
|
|
33
|
+
updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), onupdate=UTCNow(), sort_order=-1)
|
|
32
34
|
|
|
33
35
|
|
|
34
36
|
class BaseDBUUIDModel:
|
|
35
|
-
uid: Mapped[UUID] = mapped_column(UUID, primary_key=True, sort_order=-3)
|
|
36
|
-
created_at: Mapped[
|
|
37
|
+
uid: Mapped[uuid.UUID] = mapped_column(UUID, primary_key=True, sort_order=-3)
|
|
38
|
+
created_at: Mapped[datetime] = mapped_column(
|
|
37
39
|
DateTime(timezone=True), nullable=False, server_default=UTCNow(), sort_order=-2
|
|
38
40
|
)
|
|
39
|
-
updated_at: Mapped[
|
|
41
|
+
updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), onupdate=UTCNow(), sort_order=-1)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import uuid
|
|
2
|
+
from datetime import datetime
|
|
2
3
|
|
|
3
4
|
from fastapi_users_db_sqlalchemy import GUID
|
|
4
|
-
from pydantic import AwareDatetime
|
|
5
5
|
from sqlalchemy import CheckConstraint, DateTime, ForeignKey, PrimaryKeyConstraint, String
|
|
6
6
|
from sqlalchemy.dialects.postgresql import UUID
|
|
7
7
|
from sqlalchemy.orm import Mapped, mapped_column
|
|
@@ -55,8 +55,8 @@ class RBACUserRole(Base):
|
|
|
55
55
|
ForeignKey('rbac_roles.uid', name='fk_rbac_user_roles_role', ondelete='CASCADE'),
|
|
56
56
|
index=True,
|
|
57
57
|
)
|
|
58
|
-
starts_at: Mapped[
|
|
59
|
-
expires_at: Mapped[
|
|
58
|
+
starts_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
|
59
|
+
expires_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True))
|
|
60
60
|
|
|
61
61
|
__table_args__ = (PrimaryKeyConstraint('user_id', 'role_uid', name='pk_rbac_user_roles'),)
|
|
62
62
|
|
|
@@ -74,8 +74,8 @@ class RBACApiKeyRole(Base):
|
|
|
74
74
|
ForeignKey('rbac_roles.uid', name='fk_rbac_api_key_roles_role', ondelete='CASCADE'),
|
|
75
75
|
index=True,
|
|
76
76
|
)
|
|
77
|
-
starts_at: Mapped[
|
|
78
|
-
expires_at: Mapped[
|
|
77
|
+
starts_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
|
78
|
+
expires_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True))
|
|
79
79
|
|
|
80
80
|
__table_args__ = (PrimaryKeyConstraint('api_key_uid', 'role_uid', name='pk_rbac_api_key_roles'),)
|
|
81
81
|
|
python3_commons/fs.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
+
from collections.abc import Generator
|
|
1
2
|
from pathlib import Path
|
|
2
|
-
from typing import Generator
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
def iter_files(root: Path, recursive: bool = True) -> Generator[Path
|
|
5
|
+
def iter_files(root: Path, *, recursive: bool = True) -> Generator[Path]:
|
|
6
6
|
for item in root.iterdir():
|
|
7
7
|
if item.is_file():
|
|
8
8
|
yield item
|
python3_commons/helpers.py
CHANGED
|
@@ -1,10 +1,17 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import inspect
|
|
1
3
|
import logging
|
|
2
4
|
import shlex
|
|
3
5
|
import threading
|
|
6
|
+
import time
|
|
7
|
+
from abc import ABCMeta
|
|
8
|
+
from collections import defaultdict
|
|
9
|
+
from collections.abc import Mapping, MutableMapping, Sequence
|
|
4
10
|
from datetime import date, datetime, timedelta
|
|
5
11
|
from decimal import ROUND_HALF_UP, Decimal
|
|
12
|
+
from http.cookies import BaseCookie
|
|
6
13
|
from json import dumps
|
|
7
|
-
from typing import
|
|
14
|
+
from typing import ClassVar, Literal
|
|
8
15
|
from urllib.parse import urlencode
|
|
9
16
|
|
|
10
17
|
from python3_commons.serializers.json import CustomJSONEncoder
|
|
@@ -12,24 +19,24 @@ from python3_commons.serializers.json import CustomJSONEncoder
|
|
|
12
19
|
logger = logging.getLogger(__name__)
|
|
13
20
|
|
|
14
21
|
|
|
15
|
-
class SingletonMeta(
|
|
22
|
+
class SingletonMeta(ABCMeta):
|
|
16
23
|
"""
|
|
17
24
|
A metaclass that creates a Singleton base class when called.
|
|
18
25
|
"""
|
|
19
26
|
|
|
20
|
-
|
|
21
|
-
|
|
27
|
+
__instances: ClassVar[MutableMapping] = {}
|
|
28
|
+
__locks: ClassVar[defaultdict] = defaultdict(threading.Lock)
|
|
22
29
|
|
|
23
30
|
def __call__(cls, *args, **kwargs):
|
|
24
31
|
try:
|
|
25
|
-
return cls.
|
|
32
|
+
return cls.__instances[cls]
|
|
26
33
|
except KeyError:
|
|
27
|
-
with cls.
|
|
34
|
+
with cls.__locks[cls]:
|
|
28
35
|
try:
|
|
29
|
-
return cls.
|
|
36
|
+
return cls.__instances[cls]
|
|
30
37
|
except KeyError:
|
|
31
|
-
instance = super(
|
|
32
|
-
cls.
|
|
38
|
+
instance = super().__call__(*args, **kwargs)
|
|
39
|
+
cls.__instances[cls] = instance
|
|
33
40
|
|
|
34
41
|
return instance
|
|
35
42
|
|
|
@@ -56,13 +63,14 @@ def date_range(start_date, end_date):
|
|
|
56
63
|
def tries(times):
|
|
57
64
|
def func_wrapper(f):
|
|
58
65
|
async def wrapper(*args, **kwargs):
|
|
59
|
-
for
|
|
66
|
+
for _time in range(times if times > 0 else 1):
|
|
60
67
|
# noinspection PyBroadException
|
|
61
68
|
try:
|
|
62
69
|
return await f(*args, **kwargs)
|
|
63
|
-
except Exception
|
|
64
|
-
if
|
|
65
|
-
raise
|
|
70
|
+
except Exception:
|
|
71
|
+
if _time >= times:
|
|
72
|
+
raise
|
|
73
|
+
return None
|
|
66
74
|
|
|
67
75
|
return wrapper
|
|
68
76
|
|
|
@@ -81,6 +89,7 @@ def request_to_curl(
|
|
|
81
89
|
query: Mapping | None = None,
|
|
82
90
|
method: Literal['get', 'post', 'put', 'patch', 'options', 'head', 'delete'] = 'get',
|
|
83
91
|
headers: Mapping | None = None,
|
|
92
|
+
cookies: BaseCookie[str] | None = None,
|
|
84
93
|
json: Mapping | Sequence | str | None = None,
|
|
85
94
|
data: bytes | None = None,
|
|
86
95
|
) -> str:
|
|
@@ -95,6 +104,10 @@ def request_to_curl(
|
|
|
95
104
|
curl_cmd.append('-H')
|
|
96
105
|
curl_cmd.append(shlex.quote(header_line))
|
|
97
106
|
|
|
107
|
+
if cookies:
|
|
108
|
+
cookie_str = '; '.join(f'{k}={v.value}' for k, v in cookies.items())
|
|
109
|
+
curl_cmd.extend(['-b', shlex.quote(cookie_str)])
|
|
110
|
+
|
|
98
111
|
if json:
|
|
99
112
|
curl_cmd.append('-H')
|
|
100
113
|
curl_cmd.append(shlex.quote('Content-Type: application/json'))
|
|
@@ -106,3 +119,21 @@ def request_to_curl(
|
|
|
106
119
|
curl_cmd.append(shlex.quote(data.decode('utf-8')))
|
|
107
120
|
|
|
108
121
|
return ' '.join(curl_cmd)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def log_execution_time(func):
|
|
125
|
+
_logger = logging.getLogger(func.__module__)
|
|
126
|
+
|
|
127
|
+
@functools.wraps(func)
|
|
128
|
+
async def wrapper(*args, **kwargs):
|
|
129
|
+
start_time = time.monotonic()
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
return await func(*args, **kwargs)
|
|
133
|
+
finally:
|
|
134
|
+
elapsed = time.monotonic() - start_time
|
|
135
|
+
_logger.info(f'{func.__module__}.{func.__name__} executed in {elapsed:.4f} seconds')
|
|
136
|
+
|
|
137
|
+
wrapper.__signature__ = inspect.signature(func)
|
|
138
|
+
|
|
139
|
+
return wrapper
|