python3-commons 0.8.36__py3-none-any.whl → 0.8.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of python3-commons might be problematic. Click here for more details.
- python3_commons/db/models/__init__.py +2 -8
- python3_commons/helpers.py +7 -9
- python3_commons/object_storage.py +70 -126
- {python3_commons-0.8.36.dist-info → python3_commons-0.8.38.dist-info}/METADATA +6 -6
- {python3_commons-0.8.36.dist-info → python3_commons-0.8.38.dist-info}/RECORD +9 -9
- {python3_commons-0.8.36.dist-info → python3_commons-0.8.38.dist-info}/WHEEL +0 -0
- {python3_commons-0.8.36.dist-info → python3_commons-0.8.38.dist-info}/licenses/AUTHORS.rst +0 -0
- {python3_commons-0.8.36.dist-info → python3_commons-0.8.38.dist-info}/licenses/LICENSE +0 -0
- {python3_commons-0.8.36.dist-info → python3_commons-0.8.38.dist-info}/top_level.txt +0 -0
|
@@ -1,8 +1,2 @@
|
|
|
1
|
-
from python3_commons.db.models.auth import ApiKey
|
|
2
|
-
from python3_commons.db.models.
|
|
3
|
-
from python3_commons.db.models.auth import UserGroup as UserGroup
|
|
4
|
-
from python3_commons.db.models.rbac import RBACApiKeyRole as RBACApiKeyRole
|
|
5
|
-
from python3_commons.db.models.rbac import RBACPermission as RBACPermission
|
|
6
|
-
from python3_commons.db.models.rbac import RBACRole as RBACRole
|
|
7
|
-
from python3_commons.db.models.rbac import RBACRolePermission as RBACRolePermission
|
|
8
|
-
from python3_commons.db.models.rbac import RBACUserRole as RBACUserRole
|
|
1
|
+
from python3_commons.db.models.auth import ApiKey, User, UserGroup
|
|
2
|
+
from python3_commons.db.models.rbac import RBACApiKeyRole, RBACPermission, RBACRole, RBACRolePermission, RBACUserRole
|
python3_commons/helpers.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import shlex
|
|
3
3
|
import threading
|
|
4
|
-
from abc import ABCMeta
|
|
5
|
-
from collections import defaultdict
|
|
6
4
|
from datetime import date, datetime, timedelta
|
|
7
5
|
from decimal import ROUND_HALF_UP, Decimal
|
|
8
6
|
from json import dumps
|
|
@@ -14,24 +12,24 @@ from python3_commons.serializers.json import CustomJSONEncoder
|
|
|
14
12
|
logger = logging.getLogger(__name__)
|
|
15
13
|
|
|
16
14
|
|
|
17
|
-
class SingletonMeta(
|
|
15
|
+
class SingletonMeta(type):
|
|
18
16
|
"""
|
|
19
17
|
A metaclass that creates a Singleton base class when called.
|
|
20
18
|
"""
|
|
21
19
|
|
|
22
|
-
|
|
23
|
-
|
|
20
|
+
_instances = {}
|
|
21
|
+
_lock = threading.Lock()
|
|
24
22
|
|
|
25
23
|
def __call__(cls, *args, **kwargs):
|
|
26
24
|
try:
|
|
27
|
-
return cls.
|
|
25
|
+
return cls._instances[cls]
|
|
28
26
|
except KeyError:
|
|
29
|
-
with cls.
|
|
27
|
+
with cls._lock:
|
|
30
28
|
try:
|
|
31
|
-
return cls.
|
|
29
|
+
return cls._instances[cls]
|
|
32
30
|
except KeyError:
|
|
33
31
|
instance = super(SingletonMeta, cls).__call__(*args, **kwargs)
|
|
34
|
-
cls.
|
|
32
|
+
cls._instances[cls] = instance
|
|
35
33
|
|
|
36
34
|
return instance
|
|
37
35
|
|
|
@@ -1,16 +1,12 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
1
|
import io
|
|
4
2
|
import logging
|
|
5
|
-
from contextlib import
|
|
3
|
+
from contextlib import contextmanager
|
|
6
4
|
from datetime import datetime
|
|
7
|
-
from typing import
|
|
8
|
-
|
|
9
|
-
import aiobotocore.session
|
|
10
|
-
from aiobotocore.response import StreamingBody
|
|
5
|
+
from typing import Generator, Iterable
|
|
11
6
|
|
|
12
|
-
|
|
13
|
-
|
|
7
|
+
from minio import Minio
|
|
8
|
+
from minio.datatypes import Object
|
|
9
|
+
from minio.deleteobjects import DeleteError, DeleteObject
|
|
14
10
|
|
|
15
11
|
from python3_commons.conf import S3Settings, s3_settings
|
|
16
12
|
from python3_commons.helpers import SingletonMeta
|
|
@@ -20,23 +16,20 @@ logger = logging.getLogger(__name__)
|
|
|
20
16
|
|
|
21
17
|
class ObjectStorage(metaclass=SingletonMeta):
|
|
22
18
|
def __init__(self, settings: S3Settings):
|
|
23
|
-
if not
|
|
24
|
-
raise ValueError('s3_endpoint_url must be set')
|
|
19
|
+
if not s3_settings.s3_endpoint_url:
|
|
20
|
+
raise ValueError('s3_settings.s3_endpoint_url must be set')
|
|
25
21
|
|
|
26
|
-
self.
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
}
|
|
22
|
+
self._client = Minio(
|
|
23
|
+
settings.s3_endpoint_url,
|
|
24
|
+
region=settings.s3_region_name,
|
|
25
|
+
access_key=settings.s3_access_key_id.get_secret_value(),
|
|
26
|
+
secret_key=settings.s3_secret_access_key.get_secret_value(),
|
|
27
|
+
secure=settings.s3_secure,
|
|
28
|
+
cert_check=settings.s3_cert_verify,
|
|
29
|
+
)
|
|
35
30
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
async with self._session.create_client('s3', **self._config) as client:
|
|
39
|
-
yield client
|
|
31
|
+
def get_client(self) -> Minio:
|
|
32
|
+
return self._client
|
|
40
33
|
|
|
41
34
|
|
|
42
35
|
def get_absolute_path(path: str) -> str:
|
|
@@ -49,135 +42,86 @@ def get_absolute_path(path: str) -> str:
|
|
|
49
42
|
return path
|
|
50
43
|
|
|
51
44
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
async with storage.get_client() as s3_client:
|
|
56
|
-
try:
|
|
57
|
-
data.seek(0)
|
|
58
|
-
|
|
59
|
-
await s3_client.put_object(Bucket=bucket_name, Key=path, Body=data, ContentLength=length)
|
|
60
|
-
|
|
61
|
-
logger.debug(f'Stored object into object storage: {bucket_name}:{path}')
|
|
62
|
-
|
|
63
|
-
return f's3://{bucket_name}/{path}'
|
|
45
|
+
def put_object(bucket_name: str, path: str, data: io.BytesIO, length: int, part_size: int = 0) -> str | None:
|
|
46
|
+
if s3_client := ObjectStorage(s3_settings).get_client():
|
|
47
|
+
result = s3_client.put_object(bucket_name, path, data, length, part_size=part_size)
|
|
64
48
|
|
|
65
|
-
|
|
66
|
-
logger.error(f'Failed to put object to object storage: {bucket_name}:{path}', exc_info=e)
|
|
49
|
+
logger.debug(f'Stored object into object storage: {bucket_name}:{path}')
|
|
67
50
|
|
|
68
|
-
|
|
51
|
+
return result.location
|
|
52
|
+
else:
|
|
53
|
+
logger.warning('No S3 client available, skipping object put')
|
|
69
54
|
|
|
70
55
|
|
|
71
|
-
@
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
async with storage.get_client() as s3_client:
|
|
56
|
+
@contextmanager
|
|
57
|
+
def get_object_stream(bucket_name: str, path: str):
|
|
58
|
+
if s3_client := ObjectStorage(s3_settings).get_client():
|
|
76
59
|
logger.debug(f'Getting object from object storage: {bucket_name}:{path}')
|
|
77
60
|
|
|
78
61
|
try:
|
|
79
|
-
response =
|
|
80
|
-
|
|
81
|
-
async with response['Body'] as stream:
|
|
82
|
-
yield stream
|
|
62
|
+
response = s3_client.get_object(bucket_name, path)
|
|
83
63
|
except Exception as e:
|
|
84
64
|
logger.debug(f'Failed getting object from object storage: {bucket_name}:{path}', exc_info=e)
|
|
85
65
|
|
|
86
66
|
raise
|
|
87
67
|
|
|
68
|
+
yield response
|
|
88
69
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
70
|
+
response.close()
|
|
71
|
+
response.release_conn()
|
|
72
|
+
else:
|
|
73
|
+
logger.warning('No S3 client available, skipping object put')
|
|
92
74
|
|
|
93
|
-
logger.debug(f'Loaded object from object storage: {bucket_name}:{path}')
|
|
94
|
-
|
|
95
|
-
return body
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
async def list_objects(bucket_name: str, prefix: str, recursive: bool = True) -> AsyncGenerator[Mapping, None]:
|
|
99
|
-
storage = ObjectStorage(s3_settings)
|
|
100
75
|
|
|
101
|
-
|
|
102
|
-
|
|
76
|
+
def get_object(bucket_name: str, path: str) -> bytes:
|
|
77
|
+
with get_object_stream(bucket_name, path) as stream:
|
|
78
|
+
body = stream.read()
|
|
103
79
|
|
|
104
|
-
|
|
80
|
+
logger.debug(f'Loaded object from object storage: {bucket_name}:{path}')
|
|
105
81
|
|
|
106
|
-
|
|
107
|
-
if 'Contents' in page:
|
|
108
|
-
for obj in page['Contents']:
|
|
109
|
-
yield dict(obj)
|
|
82
|
+
return body
|
|
110
83
|
|
|
111
84
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
) -> AsyncGenerator[tuple[str, datetime, StreamingBody], None]:
|
|
115
|
-
async for obj in list_objects(bucket_name, path, recursive):
|
|
116
|
-
object_name = obj['Key']
|
|
117
|
-
last_modified = obj['LastModified']
|
|
85
|
+
def list_objects(bucket_name: str, prefix: str, recursive: bool = True) -> Generator[Object, None, None]:
|
|
86
|
+
s3_client = ObjectStorage(s3_settings).get_client()
|
|
118
87
|
|
|
119
|
-
|
|
120
|
-
yield object_name, last_modified, stream
|
|
88
|
+
yield from s3_client.list_objects(bucket_name, prefix=prefix, recursive=recursive)
|
|
121
89
|
|
|
122
90
|
|
|
123
|
-
|
|
91
|
+
def get_objects(
|
|
124
92
|
bucket_name: str, path: str, recursive: bool = True
|
|
125
|
-
) ->
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
yield object_name, last_modified, data
|
|
93
|
+
) -> Generator[tuple[str, datetime, bytes], None, None]:
|
|
94
|
+
for obj in list_objects(bucket_name, path, recursive):
|
|
95
|
+
object_name = obj.object_name
|
|
130
96
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
storage = ObjectStorage(s3_settings)
|
|
134
|
-
|
|
135
|
-
async with storage.get_client() as s3_client:
|
|
136
|
-
try:
|
|
137
|
-
await s3_client.delete_object(Bucket=bucket_name, Key=object_name)
|
|
138
|
-
logger.debug(f'Removed object from object storage: {bucket_name}:{object_name}')
|
|
139
|
-
except Exception as e:
|
|
140
|
-
logger.error(f'Failed to remove object from object storage: {bucket_name}:{object_name}', exc_info=e)
|
|
141
|
-
|
|
142
|
-
raise
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
async def remove_objects(
|
|
146
|
-
bucket_name: str, prefix: str = None, object_names: Iterable[str] = None
|
|
147
|
-
) -> Sequence[Mapping] | None:
|
|
148
|
-
storage = ObjectStorage(s3_settings)
|
|
149
|
-
|
|
150
|
-
async with storage.get_client() as s3_client:
|
|
151
|
-
objects_to_delete = []
|
|
152
|
-
|
|
153
|
-
if prefix:
|
|
154
|
-
async for obj in list_objects(bucket_name, prefix, recursive=True):
|
|
155
|
-
objects_to_delete.append({'Key': obj['Key']})
|
|
156
|
-
elif object_names:
|
|
157
|
-
objects_to_delete = [{'Key': name} for name in object_names]
|
|
97
|
+
if obj.size:
|
|
98
|
+
data = get_object(bucket_name, object_name)
|
|
158
99
|
else:
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
if not objects_to_delete:
|
|
162
|
-
return None
|
|
163
|
-
|
|
164
|
-
try:
|
|
165
|
-
errors = []
|
|
166
|
-
# S3 delete_objects can handle up to 1000 objects at once
|
|
167
|
-
chunk_size = 1000
|
|
100
|
+
data = b''
|
|
168
101
|
|
|
169
|
-
|
|
170
|
-
chunk = objects_to_delete[i : i + chunk_size]
|
|
102
|
+
yield object_name, obj.last_modified, data
|
|
171
103
|
|
|
172
|
-
response = await s3_client.delete_objects(Bucket=bucket_name, Delete={'Objects': chunk})
|
|
173
104
|
|
|
174
|
-
|
|
175
|
-
|
|
105
|
+
def remove_object(bucket_name: str, object_name: str):
|
|
106
|
+
s3_client = ObjectStorage(s3_settings).get_client()
|
|
107
|
+
s3_client.remove_object(bucket_name, object_name)
|
|
176
108
|
|
|
177
|
-
logger.debug(f'Removed {len(objects_to_delete)} objects from object storage: {bucket_name}')
|
|
178
109
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
110
|
+
def remove_objects(
|
|
111
|
+
bucket_name: str, prefix: str = None, object_names: Iterable[str] = None
|
|
112
|
+
) -> Iterable[DeleteError] | None:
|
|
113
|
+
s3_client = ObjectStorage(s3_settings).get_client()
|
|
114
|
+
|
|
115
|
+
if prefix:
|
|
116
|
+
delete_object_list = map(
|
|
117
|
+
lambda obj: DeleteObject(obj.object_name),
|
|
118
|
+
s3_client.list_objects(bucket_name, prefix=prefix, recursive=True),
|
|
119
|
+
)
|
|
120
|
+
elif object_names:
|
|
121
|
+
delete_object_list = map(DeleteObject, object_names)
|
|
122
|
+
else:
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
errors = s3_client.remove_objects(bucket_name, delete_object_list)
|
|
126
|
+
|
|
127
|
+
return errors
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: python3-commons
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.38
|
|
4
4
|
Summary: Re-usable Python3 code
|
|
5
5
|
Author-email: Oleg Korsak <kamikaze.is.waiting.you@gmail.com>
|
|
6
6
|
License-Expression: GPL-3.0
|
|
@@ -8,20 +8,20 @@ Project-URL: Homepage, https://github.com/kamikaze/python3-commons
|
|
|
8
8
|
Project-URL: Documentation, https://github.com/kamikaze/python3-commons/wiki
|
|
9
9
|
Classifier: Development Status :: 4 - Beta
|
|
10
10
|
Classifier: Programming Language :: Python
|
|
11
|
-
Requires-Python: ==3.
|
|
11
|
+
Requires-Python: ==3.14.*
|
|
12
12
|
Description-Content-Type: text/x-rst
|
|
13
13
|
License-File: LICENSE
|
|
14
14
|
License-File: AUTHORS.rst
|
|
15
|
-
Requires-Dist:
|
|
16
|
-
Requires-Dist: aiohttp[speedups]~=3.12.14
|
|
15
|
+
Requires-Dist: aiohttp[speedups]~=3.13.0
|
|
17
16
|
Requires-Dist: asyncpg~=0.30.0
|
|
18
17
|
Requires-Dist: fastapi-users-db-sqlalchemy~=7.0.0
|
|
19
18
|
Requires-Dist: fastapi-users[sqlalchemy]~=14.0.1
|
|
20
19
|
Requires-Dist: lxml~=6.0.0
|
|
20
|
+
Requires-Dist: minio~=7.2.15
|
|
21
21
|
Requires-Dist: msgpack~=1.1.1
|
|
22
22
|
Requires-Dist: msgspec~=0.19.0
|
|
23
|
-
Requires-Dist: pydantic[email]~=2.
|
|
24
|
-
Requires-Dist: pydantic-settings~=2.
|
|
23
|
+
Requires-Dist: pydantic[email]~=2.12.0
|
|
24
|
+
Requires-Dist: pydantic-settings~=2.11.0
|
|
25
25
|
Requires-Dist: python-jose==3.5.0
|
|
26
26
|
Requires-Dist: SQLAlchemy[asyncio]~=2.0.40
|
|
27
27
|
Requires-Dist: valkey[libvalkey]~=6.1.0
|
|
@@ -5,12 +5,12 @@ python3_commons/auth.py,sha256=vVaiJ5MHUMSbiLF6TIxe4dqVPhBlLttf940jjODL3a4,2934
|
|
|
5
5
|
python3_commons/cache.py,sha256=lf27LTD4Z9Iqi5GaK8jH8UC0cL9sHH8wicZ88YDp6Mg,7725
|
|
6
6
|
python3_commons/conf.py,sha256=JenspXyTqTlYeEb8X9Njfje1AiLCee23nm0k8zhYLfs,2283
|
|
7
7
|
python3_commons/fs.py,sha256=wfLjybXndwLqNlOxTpm_HRJnuTcC4wbrHEOaEeCo9Wc,337
|
|
8
|
-
python3_commons/helpers.py,sha256=
|
|
9
|
-
python3_commons/object_storage.py,sha256=
|
|
8
|
+
python3_commons/helpers.py,sha256=ygnTv3KYoiibOFIi99-g8EXaETKHLt5i3jvykGrv6aE,3079
|
|
9
|
+
python3_commons/object_storage.py,sha256=nQsXca0zzzeSY35qhnjE6pLfkLuxn7jDul0-hw0jizE,3985
|
|
10
10
|
python3_commons/permissions.py,sha256=bhjTp-tq-oaTGFMHNnSBlcVX5XQCTL0nWcu6SdPEAB4,1555
|
|
11
11
|
python3_commons/db/__init__.py,sha256=5nArsGm17e-pelpOwAeBKy2n_Py20XqklZsNgkcJ-DQ,2947
|
|
12
12
|
python3_commons/db/helpers.py,sha256=PY0h08aLiGx-J54wmP3GHPCgGCcLd60rayAUnR3aWdI,1742
|
|
13
|
-
python3_commons/db/models/__init__.py,sha256=
|
|
13
|
+
python3_commons/db/models/__init__.py,sha256=Utr5AJf1FwcrxNtdesgjq92WMK4zpK4VL_8z1JEkJw0,185
|
|
14
14
|
python3_commons/db/models/auth.py,sha256=dmyD3BX7LVBgKiepPN-bxlY6J3PhcmUfVdQwhNR45fU,1187
|
|
15
15
|
python3_commons/db/models/common.py,sha256=IwrVfMQhAkPqrPmPRkG9CAB0KRa6YG_0Mogs1aafAoA,1537
|
|
16
16
|
python3_commons/db/models/rbac.py,sha256=7NNTUbS8whuPUHpm4oba_UWDdNiJlHrm8HBO7oGtk64,3185
|
|
@@ -21,9 +21,9 @@ python3_commons/serializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
|
21
21
|
python3_commons/serializers/json.py,sha256=91UaXLGKGj0yPyrnuMeNrkG2GuPUgcgAsmIokUgEwpU,808
|
|
22
22
|
python3_commons/serializers/msgpack.py,sha256=WrvaPE187shSK8zkH4UHHMimEZNMv9RaDSwsBE2HlCw,1269
|
|
23
23
|
python3_commons/serializers/msgspec.py,sha256=0AliXlEl5sewi0UENjI8St5ZScXE5DNRERKzqWKy2Ps,2674
|
|
24
|
-
python3_commons-0.8.
|
|
25
|
-
python3_commons-0.8.
|
|
26
|
-
python3_commons-0.8.
|
|
27
|
-
python3_commons-0.8.
|
|
28
|
-
python3_commons-0.8.
|
|
29
|
-
python3_commons-0.8.
|
|
24
|
+
python3_commons-0.8.38.dist-info/licenses/AUTHORS.rst,sha256=3R9JnfjfjH5RoPWOeqKFJgxVShSSfzQPIrEr1nxIo9Q,90
|
|
25
|
+
python3_commons-0.8.38.dist-info/licenses/LICENSE,sha256=xxILuojHm4fKQOrMHPSslbyy6WuKAN2RiG74HbrYfzM,34575
|
|
26
|
+
python3_commons-0.8.38.dist-info/METADATA,sha256=L6V4wFohHTZLYLVs3HL61LUN4mrI6Khx6Hb4DHseSzc,1127
|
|
27
|
+
python3_commons-0.8.38.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
28
|
+
python3_commons-0.8.38.dist-info/top_level.txt,sha256=lJI6sCBf68eUHzupCnn2dzG10lH3jJKTWM_hrN1cQ7M,16
|
|
29
|
+
python3_commons-0.8.38.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|