python3-commons 0.9.12__py3-none-any.whl → 0.9.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- python3_commons/api_client.py +7 -2
- python3_commons/audit.py +122 -126
- python3_commons/helpers.py +10 -0
- {python3_commons-0.9.12.dist-info → python3_commons-0.9.13.dist-info}/METADATA +2 -2
- {python3_commons-0.9.12.dist-info → python3_commons-0.9.13.dist-info}/RECORD +9 -9
- {python3_commons-0.9.12.dist-info → python3_commons-0.9.13.dist-info}/WHEEL +0 -0
- {python3_commons-0.9.12.dist-info → python3_commons-0.9.13.dist-info}/licenses/AUTHORS.rst +0 -0
- {python3_commons-0.9.12.dist-info → python3_commons-0.9.13.dist-info}/licenses/LICENSE +0 -0
- {python3_commons-0.9.12.dist-info → python3_commons-0.9.13.dist-info}/top_level.txt +0 -0
python3_commons/api_client.py
CHANGED
@@ -8,6 +8,7 @@ from typing import AsyncGenerator, Literal, Mapping, Sequence
|
|
8
8
|
from uuid import uuid4
|
9
9
|
|
10
10
|
from aiohttp import ClientResponse, ClientSession, ClientTimeout, client_exceptions
|
11
|
+
from aiohttp.abc import URL
|
11
12
|
|
12
13
|
from python3_commons import audit
|
13
14
|
from python3_commons.conf import s3_settings
|
@@ -57,12 +58,15 @@ async def request(
|
|
57
58
|
|
58
59
|
if audit_name:
|
59
60
|
curl_request = None
|
61
|
+
cookies = client.cookie_jar.filter_cookies(URL(base_url))
|
60
62
|
|
61
63
|
if method == 'get':
|
62
64
|
if headers or query:
|
63
|
-
curl_request = request_to_curl(url, query, method, headers)
|
65
|
+
curl_request = request_to_curl(url=url, query=query, method=method, headers=headers, cookies=cookies)
|
64
66
|
else:
|
65
|
-
curl_request = request_to_curl(
|
67
|
+
curl_request = request_to_curl(
|
68
|
+
url=url, query=query, method=method, headers=headers, cookies=cookies, json=json, data=data
|
69
|
+
)
|
66
70
|
|
67
71
|
if curl_request:
|
68
72
|
await audit.write_audit_data(
|
@@ -70,6 +74,7 @@ async def request(
|
|
70
74
|
f'{date_path}/{audit_name}/{uri_path}/{method}_{timestamp}_{request_id}_request.txt',
|
71
75
|
curl_request.encode('utf-8'),
|
72
76
|
)
|
77
|
+
|
73
78
|
client_method = getattr(client, method)
|
74
79
|
|
75
80
|
logger.debug(f'Requesting {method} {url}')
|
python3_commons/audit.py
CHANGED
@@ -1,11 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import io
|
3
3
|
import logging
|
4
|
-
import
|
5
|
-
from bz2 import BZ2Compressor
|
6
|
-
from collections import deque
|
7
|
-
from datetime import UTC, datetime, timedelta
|
8
|
-
from typing import AsyncGenerator
|
4
|
+
from datetime import UTC, datetime
|
9
5
|
from uuid import uuid4
|
10
6
|
|
11
7
|
from lxml import etree
|
@@ -18,104 +14,127 @@ from python3_commons.conf import S3Settings, s3_settings
|
|
18
14
|
logger = logging.getLogger(__name__)
|
19
15
|
|
20
16
|
|
21
|
-
class GeneratedStream(io.BytesIO):
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
async def generate_archive(
|
69
|
-
|
70
|
-
) -> AsyncGenerator[bytes]:
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
async def generate_bzip2(chunks: AsyncGenerator[bytes]) -> AsyncGenerator[bytes]:
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
17
|
+
# class GeneratedStream(io.BytesIO):
|
18
|
+
# def __init__(self, generator: AsyncGenerator[bytes], *args, **kwargs):
|
19
|
+
# super().__init__(*args, **kwargs)
|
20
|
+
# self.generator = generator
|
21
|
+
#
|
22
|
+
# def read(self, size: int = -1):
|
23
|
+
# if size < 0:
|
24
|
+
# while True:
|
25
|
+
# try:
|
26
|
+
# chunk = anext(self.generator)
|
27
|
+
# except StopIteration:
|
28
|
+
# break
|
29
|
+
# else:
|
30
|
+
# self.write(chunk)
|
31
|
+
# else:
|
32
|
+
# total_written_size = 0
|
33
|
+
#
|
34
|
+
# while total_written_size < size:
|
35
|
+
# try:
|
36
|
+
# chunk = anext(self.generator)
|
37
|
+
# except StopIteration:
|
38
|
+
# break
|
39
|
+
# else:
|
40
|
+
# total_written_size += self.write(chunk)
|
41
|
+
#
|
42
|
+
# self.seek(0)
|
43
|
+
#
|
44
|
+
# if chunk := super().read(size):
|
45
|
+
# pos = self.tell()
|
46
|
+
#
|
47
|
+
# buf = self.getbuffer()
|
48
|
+
# unread_data_size = len(buf) - pos
|
49
|
+
#
|
50
|
+
# if unread_data_size > 0:
|
51
|
+
# buf[:unread_data_size] = buf[pos : pos + unread_data_size]
|
52
|
+
#
|
53
|
+
# del buf
|
54
|
+
#
|
55
|
+
# self.seek(0)
|
56
|
+
# self.truncate(unread_data_size)
|
57
|
+
#
|
58
|
+
# return chunk
|
59
|
+
#
|
60
|
+
# def readable(self):
|
61
|
+
# return True
|
62
|
+
#
|
63
|
+
#
|
64
|
+
# async def generate_archive(
|
65
|
+
# objects: AsyncGenerator[tuple[str, datetime, bytes]], chunk_size: int = 4096
|
66
|
+
# ) -> AsyncGenerator[bytes]:
|
67
|
+
# buffer = deque()
|
68
|
+
#
|
69
|
+
# with tarfile.open(fileobj=buffer, mode='w') as archive:
|
70
|
+
# async for name, last_modified, content in objects:
|
71
|
+
# logger.info(f'Adding {name} to archive')
|
72
|
+
# info = tarfile.TarInfo(name)
|
73
|
+
# info.size = len(content)
|
74
|
+
# info.mtime = int(last_modified.timestamp())
|
75
|
+
# archive.addfile(info, io.BytesIO(content))
|
76
|
+
#
|
77
|
+
# buffer_length = buffer.tell()
|
78
|
+
#
|
79
|
+
# while buffer_length >= chunk_size:
|
80
|
+
# buffer.seek(0)
|
81
|
+
# chunk = buffer.read(chunk_size)
|
82
|
+
# chunk_len = len(chunk)
|
83
|
+
#
|
84
|
+
# if not chunk:
|
85
|
+
# break
|
86
|
+
#
|
87
|
+
# yield chunk
|
88
|
+
#
|
89
|
+
# buffer.seek(0)
|
90
|
+
# buffer.truncate(chunk_len)
|
91
|
+
# buffer.seek(0, io.SEEK_END)
|
92
|
+
# buffer_length = buffer.tell()
|
93
|
+
#
|
94
|
+
# while True:
|
95
|
+
# chunk = buffer.read(chunk_size)
|
96
|
+
#
|
97
|
+
# if not chunk:
|
98
|
+
# break
|
99
|
+
#
|
100
|
+
# yield chunk
|
101
|
+
#
|
102
|
+
# buffer.seek(0)
|
103
|
+
# buffer.truncate(0)
|
104
|
+
#
|
105
|
+
#
|
106
|
+
# async def generate_bzip2(chunks: AsyncGenerator[bytes]) -> AsyncGenerator[bytes]:
|
107
|
+
# compressor = BZ2Compressor()
|
108
|
+
#
|
109
|
+
# async for chunk in chunks:
|
110
|
+
# if compressed_chunk := compressor.compress(chunk):
|
111
|
+
# yield compressed_chunk
|
112
|
+
#
|
113
|
+
# if compressed_chunk := compressor.flush():
|
114
|
+
# yield compressed_chunk
|
115
|
+
#
|
116
|
+
|
117
|
+
# async def archive_audit_data(root_path: str = 'audit'):
|
118
|
+
# now = datetime.now(tz=UTC) - timedelta(days=1)
|
119
|
+
# year = now.year
|
120
|
+
# month = now.month
|
121
|
+
# day = now.day
|
122
|
+
# bucket_name = s3_settings.s3_bucket
|
123
|
+
# date_path = object_storage.get_absolute_path(f'{root_path}/{year}/{month:02}/{day:02}')
|
124
|
+
#
|
125
|
+
# if objects := object_storage.get_objects(bucket_name, date_path, recursive=True):
|
126
|
+
# logger.info(f'Compacting files in: {date_path}')
|
127
|
+
#
|
128
|
+
# generator = generate_archive(objects, chunk_size=900_000)
|
129
|
+
# bzip2_generator = generate_bzip2(generator)
|
130
|
+
# archive_stream = GeneratedStream(bzip2_generator)
|
131
|
+
#
|
132
|
+
# archive_path = object_storage.get_absolute_path(f'audit/.archive/{year}_{month:02}_{day:02}.tar.bz2')
|
133
|
+
# await object_storage.put_object(bucket_name, archive_path, archive_stream, -1, part_size=5 * 1024 * 1024)
|
134
|
+
#
|
135
|
+
# if errors := await object_storage.remove_objects(bucket_name, date_path):
|
136
|
+
# for error in errors:
|
137
|
+
# logger.error(f'Failed to delete object in {bucket_name=}: {error}')
|
119
138
|
|
120
139
|
|
121
140
|
async def write_audit_data(settings: S3Settings, key: str, data: bytes):
|
@@ -132,29 +151,6 @@ async def write_audit_data(settings: S3Settings, key: str, data: bytes):
|
|
132
151
|
logger.debug(f'S3 is not configured, not storing object in storage: {key}')
|
133
152
|
|
134
153
|
|
135
|
-
async def archive_audit_data(root_path: str = 'audit'):
|
136
|
-
now = datetime.now(tz=UTC) - timedelta(days=1)
|
137
|
-
year = now.year
|
138
|
-
month = now.month
|
139
|
-
day = now.day
|
140
|
-
bucket_name = s3_settings.s3_bucket
|
141
|
-
date_path = object_storage.get_absolute_path(f'{root_path}/{year}/{month:02}/{day:02}')
|
142
|
-
|
143
|
-
if objects := object_storage.get_objects(bucket_name, date_path, recursive=True):
|
144
|
-
logger.info(f'Compacting files in: {date_path}')
|
145
|
-
|
146
|
-
generator = generate_archive(objects, chunk_size=900_000)
|
147
|
-
bzip2_generator = generate_bzip2(generator)
|
148
|
-
archive_stream = GeneratedStream(bzip2_generator)
|
149
|
-
|
150
|
-
archive_path = object_storage.get_absolute_path(f'audit/.archive/{year}_{month:02}_{day:02}.tar.bz2')
|
151
|
-
await object_storage.put_object(bucket_name, archive_path, archive_stream, -1, part_size=5 * 1024 * 1024)
|
152
|
-
|
153
|
-
if errors := await object_storage.remove_objects(bucket_name, date_path):
|
154
|
-
for error in errors:
|
155
|
-
logger.error(f'Failed to delete object in {bucket_name=}: {error}')
|
156
|
-
|
157
|
-
|
158
154
|
class ZeepAuditPlugin(Plugin):
|
159
155
|
def __init__(self, audit_name: str = 'zeep'):
|
160
156
|
super().__init__()
|
python3_commons/helpers.py
CHANGED
@@ -8,6 +8,7 @@ from abc import ABCMeta
|
|
8
8
|
from collections import defaultdict
|
9
9
|
from datetime import date, datetime, timedelta
|
10
10
|
from decimal import ROUND_HALF_UP, Decimal
|
11
|
+
from http.cookies import BaseCookie, SimpleCookie
|
11
12
|
from json import dumps
|
12
13
|
from typing import Literal, Mapping, Sequence
|
13
14
|
from urllib.parse import urlencode
|
@@ -86,6 +87,7 @@ def request_to_curl(
|
|
86
87
|
query: Mapping | None = None,
|
87
88
|
method: Literal['get', 'post', 'put', 'patch', 'options', 'head', 'delete'] = 'get',
|
88
89
|
headers: Mapping | None = None,
|
90
|
+
cookies: BaseCookie[str] | None = None,
|
89
91
|
json: Mapping | Sequence | str | None = None,
|
90
92
|
data: bytes | None = None,
|
91
93
|
) -> str:
|
@@ -100,6 +102,14 @@ def request_to_curl(
|
|
100
102
|
curl_cmd.append('-H')
|
101
103
|
curl_cmd.append(shlex.quote(header_line))
|
102
104
|
|
105
|
+
if cookies:
|
106
|
+
if isinstance(cookies, SimpleCookie):
|
107
|
+
cookie_str = '; '.join(f'{morsel.key}={morsel.value}' for morsel in cookies.values())
|
108
|
+
else:
|
109
|
+
cookie_str = '; '.join(f'{k}={v}' for k, v in cookies.items())
|
110
|
+
|
111
|
+
curl_cmd.extend(['-b', shlex.quote(cookie_str)])
|
112
|
+
|
103
113
|
if json:
|
104
114
|
curl_cmd.append('-H')
|
105
115
|
curl_cmd.append(shlex.quote('Content-Type: application/json'))
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: python3-commons
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.13
|
4
4
|
Summary: Re-usable Python3 code
|
5
5
|
Author-email: Oleg Korsak <kamikaze.is.waiting.you@gmail.com>
|
6
6
|
License-Expression: GPL-3.0
|
@@ -12,7 +12,7 @@ Requires-Python: ==3.13.*
|
|
12
12
|
Description-Content-Type: text/x-rst
|
13
13
|
License-File: LICENSE
|
14
14
|
License-File: AUTHORS.rst
|
15
|
-
Requires-Dist: aiobotocore~=2.24.
|
15
|
+
Requires-Dist: aiobotocore~=2.24.2
|
16
16
|
Requires-Dist: aiohttp[speedups]~=3.12.15
|
17
17
|
Requires-Dist: asyncpg~=0.30.0
|
18
18
|
Requires-Dist: fastapi-users-db-sqlalchemy~=7.0.0
|
@@ -1,11 +1,11 @@
|
|
1
1
|
python3_commons/__init__.py,sha256=0KgaYU46H_IMKn-BuasoRN3C4Hi45KlkHHoPbU9cwiA,189
|
2
|
-
python3_commons/api_client.py,sha256=
|
3
|
-
python3_commons/audit.py,sha256
|
2
|
+
python3_commons/api_client.py,sha256=yqwwtxakpLWjJtu4LVmj0BtZUuX0CyGk0lKFWu_uECU,4948
|
3
|
+
python3_commons/audit.py,sha256=p4KRKt0ogkHhJSulg6j5GU-JKBBE903H2c0nuW16GtM,6083
|
4
4
|
python3_commons/auth.py,sha256=fINE7zeq-oaEk2lwkdP1KOhfCpcIBaC8P9UzXQI37J0,2922
|
5
5
|
python3_commons/cache.py,sha256=lf27LTD4Z9Iqi5GaK8jH8UC0cL9sHH8wicZ88YDp6Mg,7725
|
6
6
|
python3_commons/conf.py,sha256=DYFA2_n7W40MBbpaNWv4iTWh7-GPsGU6Ilygz32tHhs,2397
|
7
7
|
python3_commons/fs.py,sha256=wfLjybXndwLqNlOxTpm_HRJnuTcC4wbrHEOaEeCo9Wc,337
|
8
|
-
python3_commons/helpers.py,sha256=
|
8
|
+
python3_commons/helpers.py,sha256=PoaTYzygtIqDhrukyvqN_Fz_c2Fm5EAA6t8CPx5rwfk,4101
|
9
9
|
python3_commons/object_storage.py,sha256=4-nWfcYF9Uj6bx3aVM6a4P5Ac2fFCTEaDfejDzn63Qk,6441
|
10
10
|
python3_commons/permissions.py,sha256=bhjTp-tq-oaTGFMHNnSBlcVX5XQCTL0nWcu6SdPEAB4,1555
|
11
11
|
python3_commons/db/__init__.py,sha256=5nArsGm17e-pelpOwAeBKy2n_Py20XqklZsNgkcJ-DQ,2947
|
@@ -21,9 +21,9 @@ python3_commons/serializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
21
21
|
python3_commons/serializers/json.py,sha256=91UaXLGKGj0yPyrnuMeNrkG2GuPUgcgAsmIokUgEwpU,808
|
22
22
|
python3_commons/serializers/msgpack.py,sha256=WrvaPE187shSK8zkH4UHHMimEZNMv9RaDSwsBE2HlCw,1269
|
23
23
|
python3_commons/serializers/msgspec.py,sha256=0AliXlEl5sewi0UENjI8St5ZScXE5DNRERKzqWKy2Ps,2674
|
24
|
-
python3_commons-0.9.
|
25
|
-
python3_commons-0.9.
|
26
|
-
python3_commons-0.9.
|
27
|
-
python3_commons-0.9.
|
28
|
-
python3_commons-0.9.
|
29
|
-
python3_commons-0.9.
|
24
|
+
python3_commons-0.9.13.dist-info/licenses/AUTHORS.rst,sha256=3R9JnfjfjH5RoPWOeqKFJgxVShSSfzQPIrEr1nxIo9Q,90
|
25
|
+
python3_commons-0.9.13.dist-info/licenses/LICENSE,sha256=xxILuojHm4fKQOrMHPSslbyy6WuKAN2RiG74HbrYfzM,34575
|
26
|
+
python3_commons-0.9.13.dist-info/METADATA,sha256=noOYsttBcdoGBTps8zqTjZomvxGq28CgmgF4ZYevllE,1134
|
27
|
+
python3_commons-0.9.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
28
|
+
python3_commons-0.9.13.dist-info/top_level.txt,sha256=lJI6sCBf68eUHzupCnn2dzG10lH3jJKTWM_hrN1cQ7M,16
|
29
|
+
python3_commons-0.9.13.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|