python3-commons 0.8.35__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
python3_commons/audit.py CHANGED
@@ -5,23 +5,21 @@ import tarfile
5
5
  from bz2 import BZ2Compressor
6
6
  from collections import deque
7
7
  from datetime import UTC, datetime, timedelta
8
- from typing import Generator, Iterable
8
+ from typing import AsyncGenerator
9
9
  from uuid import uuid4
10
10
 
11
11
  from lxml import etree
12
- from minio import S3Error
13
12
  from zeep.plugins import Plugin
14
13
  from zeep.wsdl.definitions import AbstractOperation
15
14
 
16
15
  from python3_commons import object_storage
17
16
  from python3_commons.conf import S3Settings, s3_settings
18
- from python3_commons.object_storage import ObjectStorage
19
17
 
20
18
  logger = logging.getLogger(__name__)
21
19
 
22
20
 
23
21
  class GeneratedStream(io.BytesIO):
24
- def __init__(self, generator: Generator[bytes, None, None], *args, **kwargs):
22
+ def __init__(self, generator: AsyncGenerator[bytes], *args, **kwargs):
25
23
  super().__init__(*args, **kwargs)
26
24
  self.generator = generator
27
25
 
@@ -29,7 +27,7 @@ class GeneratedStream(io.BytesIO):
29
27
  if size < 0:
30
28
  while True:
31
29
  try:
32
- chunk = next(self.generator)
30
+ chunk = anext(self.generator)
33
31
  except StopIteration:
34
32
  break
35
33
  else:
@@ -39,7 +37,7 @@ class GeneratedStream(io.BytesIO):
39
37
 
40
38
  while total_written_size < size:
41
39
  try:
42
- chunk = next(self.generator)
40
+ chunk = anext(self.generator)
43
41
  except StopIteration:
44
42
  break
45
43
  else:
@@ -67,17 +65,17 @@ class GeneratedStream(io.BytesIO):
67
65
  return True
68
66
 
69
67
 
70
- def generate_archive(
71
- objects: Iterable[tuple[str, datetime, bytes]], chunk_size: int = 4096
72
- ) -> Generator[bytes, None, None]:
68
+ async def generate_archive(
69
+ objects: AsyncGenerator[tuple[str, datetime, bytes]], chunk_size: int = 4096
70
+ ) -> AsyncGenerator[bytes]:
73
71
  buffer = deque()
74
72
 
75
73
  with tarfile.open(fileobj=buffer, mode='w') as archive:
76
- for name, last_modified, content in objects:
74
+ async for name, last_modified, content in objects:
77
75
  logger.info(f'Adding {name} to archive')
78
76
  info = tarfile.TarInfo(name)
79
77
  info.size = len(content)
80
- info.mtime = last_modified.timestamp()
78
+ info.mtime = int(last_modified.timestamp())
81
79
  archive.addfile(info, io.BytesIO(content))
82
80
 
83
81
  buffer_length = buffer.tell()
@@ -109,10 +107,10 @@ def generate_archive(
109
107
  buffer.truncate(0)
110
108
 
111
109
 
112
- def generate_bzip2(chunks: Generator[bytes, None, None]) -> Generator[bytes, None, None]:
110
+ async def generate_bzip2(chunks: AsyncGenerator[bytes]) -> AsyncGenerator[bytes]:
113
111
  compressor = BZ2Compressor()
114
112
 
115
- for chunk in chunks:
113
+ async for chunk in chunks:
116
114
  if compressed_chunk := compressor.compress(chunk):
117
115
  yield compressed_chunk
118
116
 
@@ -120,14 +118,13 @@ def generate_bzip2(chunks: Generator[bytes, None, None]) -> Generator[bytes, Non
120
118
  yield compressed_chunk
121
119
 
122
120
 
123
- def write_audit_data_sync(settings: S3Settings, key: str, data: bytes):
121
+ async def write_audit_data(settings: S3Settings, key: str, data: bytes):
124
122
  if settings.s3_secret_access_key:
125
123
  try:
126
- client = ObjectStorage(settings).get_client()
127
124
  absolute_path = object_storage.get_absolute_path(f'audit/{key}')
128
125
 
129
- client.put_object(settings.s3_bucket, absolute_path, io.BytesIO(data), len(data))
130
- except S3Error as e:
126
+ await object_storage.put_object(settings.s3_bucket, absolute_path, io.BytesIO(data), len(data))
127
+ except Exception as e:
131
128
  logger.error(f'Failed storing object in storage: {e}')
132
129
  else:
133
130
  logger.debug(f'Stored object in storage: {key}')
@@ -135,10 +132,6 @@ def write_audit_data_sync(settings: S3Settings, key: str, data: bytes):
135
132
  logger.debug(f'S3 is not configured, not storing object in storage: {key}')
136
133
 
137
134
 
138
- async def write_audit_data(settings: S3Settings, key: str, data: bytes):
139
- await asyncio.to_thread(write_audit_data_sync, settings, key, data)
140
-
141
-
142
135
  async def archive_audit_data(root_path: str = 'audit'):
143
136
  now = datetime.now(tz=UTC) - timedelta(days=1)
144
137
  year = now.year
@@ -155,9 +148,9 @@ async def archive_audit_data(root_path: str = 'audit'):
155
148
  archive_stream = GeneratedStream(bzip2_generator)
156
149
 
157
150
  archive_path = object_storage.get_absolute_path(f'audit/.archive/{year}_{month:02}_{day:02}.tar.bz2')
158
- object_storage.put_object(bucket_name, archive_path, archive_stream, -1, part_size=5 * 1024 * 1024)
151
+ await object_storage.put_object(bucket_name, archive_path, archive_stream, -1, part_size=5 * 1024 * 1024)
159
152
 
160
- if errors := object_storage.remove_objects(bucket_name, date_path):
153
+ if errors := await object_storage.remove_objects(bucket_name, date_path):
161
154
  for error in errors:
162
155
  logger.error(f'Failed to delete object in {bucket_name=}: {error}')
163
156
 
python3_commons/conf.py CHANGED
@@ -64,8 +64,8 @@ class DBSettings(BaseSettings):
64
64
  class S3Settings(BaseSettings):
65
65
  s3_endpoint_url: str | None = None
66
66
  s3_region_name: str | None = None
67
- s3_access_key_id: SecretStr = ''
68
- s3_secret_access_key: SecretStr = ''
67
+ s3_access_key_id: SecretStr = SecretStr('')
68
+ s3_secret_access_key: SecretStr = SecretStr('')
69
69
  s3_secure: bool = True
70
70
  s3_bucket: str | None = None
71
71
  s3_bucket_root: str | None = None
@@ -1,2 +1,8 @@
1
- from python3_commons.db.models.auth import ApiKey, User, UserGroup
2
- from python3_commons.db.models.rbac import RBACApiKeyRole, RBACPermission, RBACRole, RBACRolePermission, RBACUserRole
1
+ from python3_commons.db.models.auth import ApiKey as ApiKey
2
+ from python3_commons.db.models.auth import User as User
3
+ from python3_commons.db.models.auth import UserGroup as UserGroup
4
+ from python3_commons.db.models.rbac import RBACApiKeyRole as RBACApiKeyRole
5
+ from python3_commons.db.models.rbac import RBACPermission as RBACPermission
6
+ from python3_commons.db.models.rbac import RBACRole as RBACRole
7
+ from python3_commons.db.models.rbac import RBACRolePermission as RBACRolePermission
8
+ from python3_commons.db.models.rbac import RBACUserRole as RBACUserRole
@@ -1,6 +1,8 @@
1
1
  import logging
2
2
  import shlex
3
3
  import threading
4
+ from abc import ABCMeta
5
+ from collections import defaultdict
4
6
  from datetime import date, datetime, timedelta
5
7
  from decimal import ROUND_HALF_UP, Decimal
6
8
  from json import dumps
@@ -12,24 +14,24 @@ from python3_commons.serializers.json import CustomJSONEncoder
12
14
  logger = logging.getLogger(__name__)
13
15
 
14
16
 
15
- class SingletonMeta(type):
17
+ class SingletonMeta(ABCMeta):
16
18
  """
17
19
  A metaclass that creates a Singleton base class when called.
18
20
  """
19
21
 
20
- _instances = {}
21
- _lock = threading.Lock()
22
+ __instances = {}
23
+ __locks = defaultdict(threading.Lock)
22
24
 
23
25
  def __call__(cls, *args, **kwargs):
24
26
  try:
25
- return cls._instances[cls]
27
+ return cls.__instances[cls]
26
28
  except KeyError:
27
- with cls._lock:
29
+ with cls.__locks[cls]:
28
30
  try:
29
- return cls._instances[cls]
31
+ return cls.__instances[cls]
30
32
  except KeyError:
31
33
  instance = super(SingletonMeta, cls).__call__(*args, **kwargs)
32
- cls._instances[cls] = instance
34
+ cls.__instances[cls] = instance
33
35
 
34
36
  return instance
35
37
 
@@ -1,12 +1,16 @@
1
+ from __future__ import annotations
2
+
1
3
  import io
2
4
  import logging
3
- from contextlib import contextmanager
5
+ from contextlib import asynccontextmanager
4
6
  from datetime import datetime
5
- from typing import Generator, Iterable
7
+ from typing import TYPE_CHECKING, AsyncGenerator, Iterable, Mapping, Sequence
8
+
9
+ import aiobotocore.session
10
+ from aiobotocore.response import StreamingBody
6
11
 
7
- from minio import Minio
8
- from minio.datatypes import Object
9
- from minio.deleteobjects import DeleteError, DeleteObject
12
+ if TYPE_CHECKING:
13
+ from types_aiobotocore_s3.client import S3Client
10
14
 
11
15
  from python3_commons.conf import S3Settings, s3_settings
12
16
  from python3_commons.helpers import SingletonMeta
@@ -16,20 +20,23 @@ logger = logging.getLogger(__name__)
16
20
 
17
21
  class ObjectStorage(metaclass=SingletonMeta):
18
22
  def __init__(self, settings: S3Settings):
19
- if not s3_settings.s3_endpoint_url:
20
- raise ValueError('s3_settings.s3_endpoint_url must be set')
23
+ if not settings.s3_endpoint_url:
24
+ raise ValueError('s3_endpoint_url must be set')
21
25
 
22
- self._client = Minio(
23
- settings.s3_endpoint_url,
24
- region=settings.s3_region_name,
25
- access_key=settings.s3_access_key_id.get_secret_value(),
26
- secret_key=settings.s3_secret_access_key.get_secret_value(),
27
- secure=settings.s3_secure,
28
- cert_check=settings.s3_cert_verify,
29
- )
26
+ self._session = aiobotocore.session.get_session()
27
+ self._config = {
28
+ 'endpoint_url': settings.s3_endpoint_url,
29
+ 'region_name': settings.s3_region_name,
30
+ 'aws_access_key_id': settings.s3_access_key_id.get_secret_value(),
31
+ 'aws_secret_access_key': settings.s3_secret_access_key.get_secret_value(),
32
+ 'use_ssl': settings.s3_secure,
33
+ 'verify': settings.s3_cert_verify,
34
+ }
30
35
 
31
- def get_client(self) -> Minio:
32
- return self._client
36
+ @asynccontextmanager
37
+ async def get_client(self) -> AsyncGenerator[S3Client, None]:
38
+ async with self._session.create_client('s3', **self._config) as client:
39
+ yield client
33
40
 
34
41
 
35
42
  def get_absolute_path(path: str) -> str:
@@ -42,86 +49,135 @@ def get_absolute_path(path: str) -> str:
42
49
  return path
43
50
 
44
51
 
45
- def put_object(bucket_name: str, path: str, data: io.BytesIO, length: int, part_size: int = 0) -> str | None:
46
- if s3_client := ObjectStorage(s3_settings).get_client():
47
- result = s3_client.put_object(bucket_name, path, data, length, part_size=part_size)
52
+ async def put_object(bucket_name: str, path: str, data: io.BytesIO, length: int, part_size: int = 0) -> str | None:
53
+ storage = ObjectStorage(s3_settings)
54
+
55
+ async with storage.get_client() as s3_client:
56
+ try:
57
+ data.seek(0)
58
+
59
+ await s3_client.put_object(Bucket=bucket_name, Key=path, Body=data, ContentLength=length)
60
+
61
+ logger.debug(f'Stored object into object storage: {bucket_name}:{path}')
62
+
63
+ return f's3://{bucket_name}/{path}'
48
64
 
49
- logger.debug(f'Stored object into object storage: {bucket_name}:{path}')
65
+ except Exception as e:
66
+ logger.error(f'Failed to put object to object storage: {bucket_name}:{path}', exc_info=e)
50
67
 
51
- return result.location
52
- else:
53
- logger.warning('No S3 client available, skipping object put')
68
+ return None
54
69
 
55
70
 
56
- @contextmanager
57
- def get_object_stream(bucket_name: str, path: str):
58
- if s3_client := ObjectStorage(s3_settings).get_client():
71
+ @asynccontextmanager
72
+ async def get_object_stream(bucket_name: str, path: str) -> AsyncGenerator[StreamingBody]:
73
+ storage = ObjectStorage(s3_settings)
74
+
75
+ async with storage.get_client() as s3_client:
59
76
  logger.debug(f'Getting object from object storage: {bucket_name}:{path}')
60
77
 
61
78
  try:
62
- response = s3_client.get_object(bucket_name, path)
79
+ response = await s3_client.get_object(Bucket=bucket_name, Key=path)
80
+
81
+ async with response['Body'] as stream:
82
+ yield stream
63
83
  except Exception as e:
64
84
  logger.debug(f'Failed getting object from object storage: {bucket_name}:{path}', exc_info=e)
65
85
 
66
86
  raise
67
87
 
68
- yield response
69
88
 
70
- response.close()
71
- response.release_conn()
72
- else:
73
- logger.warning('No S3 client available, skipping object put')
74
-
75
-
76
- def get_object(bucket_name: str, path: str) -> bytes:
77
- with get_object_stream(bucket_name, path) as stream:
78
- body = stream.read()
89
+ async def get_object(bucket_name: str, path: str) -> bytes:
90
+ async with get_object_stream(bucket_name, path) as stream:
91
+ body = await stream.read()
79
92
 
80
93
  logger.debug(f'Loaded object from object storage: {bucket_name}:{path}')
81
94
 
82
95
  return body
83
96
 
84
97
 
85
- def list_objects(bucket_name: str, prefix: str, recursive: bool = True) -> Generator[Object, None, None]:
86
- s3_client = ObjectStorage(s3_settings).get_client()
98
+ async def list_objects(bucket_name: str, prefix: str, recursive: bool = True) -> AsyncGenerator[Mapping, None]:
99
+ storage = ObjectStorage(s3_settings)
100
+
101
+ async with storage.get_client() as s3_client:
102
+ paginator = s3_client.get_paginator('list_objects_v2')
87
103
 
88
- yield from s3_client.list_objects(bucket_name, prefix=prefix, recursive=recursive)
104
+ page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix, Delimiter='' if recursive else '/')
89
105
 
106
+ async for page in page_iterator:
107
+ if 'Contents' in page:
108
+ for obj in page['Contents']:
109
+ yield dict(obj)
90
110
 
91
- def get_objects(
111
+
112
+ async def get_object_streams(
92
113
  bucket_name: str, path: str, recursive: bool = True
93
- ) -> Generator[tuple[str, datetime, bytes], None, None]:
94
- for obj in list_objects(bucket_name, path, recursive):
95
- object_name = obj.object_name
114
+ ) -> AsyncGenerator[tuple[str, datetime, StreamingBody], None]:
115
+ async for obj in list_objects(bucket_name, path, recursive):
116
+ object_name = obj['Key']
117
+ last_modified = obj['LastModified']
96
118
 
97
- if obj.size:
98
- data = get_object(bucket_name, object_name)
99
- else:
100
- data = b''
119
+ async with get_object_stream(bucket_name, path) as stream:
120
+ yield object_name, last_modified, stream
101
121
 
102
- yield object_name, obj.last_modified, data
103
122
 
123
+ async def get_objects(
124
+ bucket_name: str, path: str, recursive: bool = True
125
+ ) -> AsyncGenerator[tuple[str, datetime, bytes], None]:
126
+ async for object_name, last_modified, stream in get_object_streams(bucket_name, path, recursive):
127
+ data = await stream.read()
128
+
129
+ yield object_name, last_modified, data
104
130
 
105
- def remove_object(bucket_name: str, object_name: str):
106
- s3_client = ObjectStorage(s3_settings).get_client()
107
- s3_client.remove_object(bucket_name, object_name)
108
131
 
132
+ async def remove_object(bucket_name: str, object_name: str):
133
+ storage = ObjectStorage(s3_settings)
109
134
 
110
- def remove_objects(
135
+ async with storage.get_client() as s3_client:
136
+ try:
137
+ await s3_client.delete_object(Bucket=bucket_name, Key=object_name)
138
+ logger.debug(f'Removed object from object storage: {bucket_name}:{object_name}')
139
+ except Exception as e:
140
+ logger.error(f'Failed to remove object from object storage: {bucket_name}:{object_name}', exc_info=e)
141
+
142
+ raise
143
+
144
+
145
+ async def remove_objects(
111
146
  bucket_name: str, prefix: str = None, object_names: Iterable[str] = None
112
- ) -> Iterable[DeleteError] | None:
113
- s3_client = ObjectStorage(s3_settings).get_client()
114
-
115
- if prefix:
116
- delete_object_list = map(
117
- lambda obj: DeleteObject(obj.object_name),
118
- s3_client.list_objects(bucket_name, prefix=prefix, recursive=True),
119
- )
120
- elif object_names:
121
- delete_object_list = map(DeleteObject, object_names)
122
- else:
123
- return None
124
-
125
- errors = s3_client.remove_objects(bucket_name, delete_object_list)
126
-
127
- return errors
147
+ ) -> Sequence[Mapping] | None:
148
+ storage = ObjectStorage(s3_settings)
149
+
150
+ async with storage.get_client() as s3_client:
151
+ objects_to_delete = []
152
+
153
+ if prefix:
154
+ async for obj in list_objects(bucket_name, prefix, recursive=True):
155
+ objects_to_delete.append({'Key': obj['Key']})
156
+ elif object_names:
157
+ objects_to_delete = [{'Key': name} for name in object_names]
158
+ else:
159
+ return None
160
+
161
+ if not objects_to_delete:
162
+ return None
163
+
164
+ try:
165
+ errors = []
166
+ # S3 delete_objects can handle up to 1000 objects at once
167
+ chunk_size = 1000
168
+
169
+ for i in range(0, len(objects_to_delete), chunk_size):
170
+ chunk = objects_to_delete[i : i + chunk_size]
171
+
172
+ response = await s3_client.delete_objects(Bucket=bucket_name, Delete={'Objects': chunk})
173
+
174
+ if 'Errors' in response:
175
+ errors.extend(response['Errors'])
176
+
177
+ logger.debug(f'Removed {len(objects_to_delete)} objects from object storage: {bucket_name}')
178
+
179
+ return errors if errors else None
180
+ except Exception as e:
181
+ logger.error(f'Failed to remove objects from object storage: {bucket_name}', exc_info=e)
182
+
183
+ raise
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: python3-commons
3
- Version: 0.8.35
3
+ Version: 0.9.0
4
4
  Summary: Re-usable Python3 code
5
5
  Author-email: Oleg Korsak <kamikaze.is.waiting.you@gmail.com>
6
6
  License-Expression: GPL-3.0
@@ -12,12 +12,12 @@ Requires-Python: ==3.13.*
12
12
  Description-Content-Type: text/x-rst
13
13
  License-File: LICENSE
14
14
  License-File: AUTHORS.rst
15
- Requires-Dist: aiohttp[speedups]~=3.12.13
15
+ Requires-Dist: aiobotocore~=2.23.0
16
+ Requires-Dist: aiohttp[speedups]~=3.12.14
16
17
  Requires-Dist: asyncpg~=0.30.0
17
18
  Requires-Dist: fastapi-users-db-sqlalchemy~=7.0.0
18
19
  Requires-Dist: fastapi-users[sqlalchemy]~=14.0.1
19
20
  Requires-Dist: lxml~=6.0.0
20
- Requires-Dist: minio~=7.2.15
21
21
  Requires-Dist: msgpack~=1.1.1
22
22
  Requires-Dist: msgspec~=0.19.0
23
23
  Requires-Dist: pydantic[email]~=2.11.7
@@ -1,16 +1,16 @@
1
1
  python3_commons/__init__.py,sha256=0KgaYU46H_IMKn-BuasoRN3C4Hi45KlkHHoPbU9cwiA,189
2
2
  python3_commons/api_client.py,sha256=LT7_YmnYVHK2ucKxIhUJCZrmxgfy-lfOxx08-R0WvW0,4505
3
- python3_commons/audit.py,sha256=osx2ywZXf-V0zOkrhlNgSyzCBvojXQwSYBQ4-ze1xiM,6249
3
+ python3_commons/audit.py,sha256=-jYGjkQ2r8rg3gj-C-5uTQ1lXhK3dRXkktonZxOs1PM,5994
4
4
  python3_commons/auth.py,sha256=vVaiJ5MHUMSbiLF6TIxe4dqVPhBlLttf940jjODL3a4,2934
5
5
  python3_commons/cache.py,sha256=lf27LTD4Z9Iqi5GaK8jH8UC0cL9sHH8wicZ88YDp6Mg,7725
6
- python3_commons/conf.py,sha256=JenspXyTqTlYeEb8X9Njfje1AiLCee23nm0k8zhYLfs,2283
6
+ python3_commons/conf.py,sha256=K3GuXDp7iopHms_E-rNR4OzTBIoWSEZmMwRjxKYltTk,2305
7
7
  python3_commons/fs.py,sha256=wfLjybXndwLqNlOxTpm_HRJnuTcC4wbrHEOaEeCo9Wc,337
8
- python3_commons/helpers.py,sha256=ygnTv3KYoiibOFIi99-g8EXaETKHLt5i3jvykGrv6aE,3079
9
- python3_commons/object_storage.py,sha256=nQsXca0zzzeSY35qhnjE6pLfkLuxn7jDul0-hw0jizE,3985
8
+ python3_commons/helpers.py,sha256=qSBQ1A02BEmIY8UPJkCoO28aZq0BPsMg6AGW9k-CThM,3166
9
+ python3_commons/object_storage.py,sha256=eF0EOB29yFn3oVr8y0SegEVJLo1W5gJT3IYcINEtLOU,6301
10
10
  python3_commons/permissions.py,sha256=bhjTp-tq-oaTGFMHNnSBlcVX5XQCTL0nWcu6SdPEAB4,1555
11
11
  python3_commons/db/__init__.py,sha256=5nArsGm17e-pelpOwAeBKy2n_Py20XqklZsNgkcJ-DQ,2947
12
12
  python3_commons/db/helpers.py,sha256=PY0h08aLiGx-J54wmP3GHPCgGCcLd60rayAUnR3aWdI,1742
13
- python3_commons/db/models/__init__.py,sha256=Utr5AJf1FwcrxNtdesgjq92WMK4zpK4VL_8z1JEkJw0,185
13
+ python3_commons/db/models/__init__.py,sha256=zjZCf0DNDkqmPZ49quJ6KZohtKH87viI_ijDG3E0PVE,554
14
14
  python3_commons/db/models/auth.py,sha256=dmyD3BX7LVBgKiepPN-bxlY6J3PhcmUfVdQwhNR45fU,1187
15
15
  python3_commons/db/models/common.py,sha256=IwrVfMQhAkPqrPmPRkG9CAB0KRa6YG_0Mogs1aafAoA,1537
16
16
  python3_commons/db/models/rbac.py,sha256=7NNTUbS8whuPUHpm4oba_UWDdNiJlHrm8HBO7oGtk64,3185
@@ -21,9 +21,9 @@ python3_commons/serializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
21
21
  python3_commons/serializers/json.py,sha256=91UaXLGKGj0yPyrnuMeNrkG2GuPUgcgAsmIokUgEwpU,808
22
22
  python3_commons/serializers/msgpack.py,sha256=WrvaPE187shSK8zkH4UHHMimEZNMv9RaDSwsBE2HlCw,1269
23
23
  python3_commons/serializers/msgspec.py,sha256=0AliXlEl5sewi0UENjI8St5ZScXE5DNRERKzqWKy2Ps,2674
24
- python3_commons-0.8.35.dist-info/licenses/AUTHORS.rst,sha256=3R9JnfjfjH5RoPWOeqKFJgxVShSSfzQPIrEr1nxIo9Q,90
25
- python3_commons-0.8.35.dist-info/licenses/LICENSE,sha256=xxILuojHm4fKQOrMHPSslbyy6WuKAN2RiG74HbrYfzM,34575
26
- python3_commons-0.8.35.dist-info/METADATA,sha256=1bEvDnldw6teuH2wW6t9d4q05Dhupjw5g30yMJ8u3Ys,1128
27
- python3_commons-0.8.35.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
28
- python3_commons-0.8.35.dist-info/top_level.txt,sha256=lJI6sCBf68eUHzupCnn2dzG10lH3jJKTWM_hrN1cQ7M,16
29
- python3_commons-0.8.35.dist-info/RECORD,,
24
+ python3_commons-0.9.0.dist-info/licenses/AUTHORS.rst,sha256=3R9JnfjfjH5RoPWOeqKFJgxVShSSfzQPIrEr1nxIo9Q,90
25
+ python3_commons-0.9.0.dist-info/licenses/LICENSE,sha256=xxILuojHm4fKQOrMHPSslbyy6WuKAN2RiG74HbrYfzM,34575
26
+ python3_commons-0.9.0.dist-info/METADATA,sha256=ldVEpTyONYD-QMBV0ZSgYFg12IgYwa0sXJIjvtXaOI8,1133
27
+ python3_commons-0.9.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
28
+ python3_commons-0.9.0.dist-info/top_level.txt,sha256=lJI6sCBf68eUHzupCnn2dzG10lH3jJKTWM_hrN1cQ7M,16
29
+ python3_commons-0.9.0.dist-info/RECORD,,