nucliadb-utils 6.3.1.post3524__py3-none-any.whl → 6.3.1.post3531__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nucliadb-utils might be problematic. Click here for more details.

@@ -57,7 +57,7 @@ from nucliadb_utils.nuclia_usage.utils.kb_usage_report import KbUsageReportUtili
57
57
 
58
58
 
59
59
  class RequestContext:
60
- def __init__(self):
60
+ def __init__(self: "RequestContext"):
61
61
  self.audit_request: AuditRequest = AuditRequest()
62
62
  self.start_time: float = time.monotonic()
63
63
  self.path: str = ""
nucliadb_utils/nats.py CHANGED
@@ -47,8 +47,9 @@ def get_traced_jetstream(
47
47
 
48
48
  if tracer_provider is not None and jetstream is not None: # pragma: no cover
49
49
  logger.info(f"Configuring {service_name} jetstream with telemetry")
50
- jetstream = JetStreamContextTelemetry(jetstream, service_name, tracer_provider)
51
- return jetstream
50
+ return JetStreamContextTelemetry(jetstream, service_name, tracer_provider)
51
+ else:
52
+ return jetstream
52
53
 
53
54
 
54
55
  class MessageProgressUpdater:
nucliadb_utils/signals.py CHANGED
@@ -56,9 +56,9 @@ class Signal:
56
56
 
57
57
  async def dispatch(self, payload: Any):
58
58
  """Send signal to all registered callbacks by they priority order."""
59
- assert isinstance(
60
- payload, self.payload_model_type
61
- ), "Can't dispatch a signal with an invalid model"
59
+ assert isinstance(payload, self.payload_model_type), (
60
+ "Can't dispatch a signal with an invalid model"
61
+ )
62
62
 
63
63
  awaitables = [
64
64
  cb(payload=payload)
@@ -211,8 +211,10 @@ class AzureStorage(Storage):
211
211
  bucket_name = self.get_bucket_name(kbid)
212
212
  return await self.object_store.bucket_delete(bucket_name)
213
213
 
214
- async def iterate_objects(self, bucket: str, prefix: str) -> AsyncGenerator[ObjectInfo, None]:
215
- async for obj in self.object_store.iterate(bucket, prefix):
214
+ async def iterate_objects(
215
+ self, bucket: str, prefix: str, start: Optional[str] = None
216
+ ) -> AsyncGenerator[ObjectInfo, None]:
217
+ async for obj in self.object_store.iterate(bucket, prefix, start):
216
218
  yield obj
217
219
 
218
220
  async def insert_object(self, bucket_name: str, key: str, data: bytes) -> None:
@@ -373,9 +375,13 @@ class AzureObjectStore(ObjectStore):
373
375
  async for chunk in downloader.chunks():
374
376
  yield chunk
375
377
 
376
- async def iterate(self, bucket: str, prefix: str) -> AsyncGenerator[ObjectInfo, None]:
378
+ async def iterate(
379
+ self, bucket: str, prefix: str, start: Optional[str] = None
380
+ ) -> AsyncGenerator[ObjectInfo, None]:
377
381
  container_client = self.service_client.get_container_client(bucket)
378
382
  async for blob in container_client.list_blobs(name_starts_with=prefix):
383
+ if start and blob.name <= start:
384
+ continue
379
385
  yield ObjectInfo(name=blob.name)
380
386
 
381
387
  async def get_metadata(self, bucket: str, key: str) -> ObjectMetadata:
@@ -39,7 +39,7 @@ class InvalidOffset(Exception):
39
39
  class ResumableUploadGone(Exception):
40
40
  def __init__(self, text: str):
41
41
  self.text = text
42
- super().__init__("Resumable upload is no longer available " "Google: \n " f"{text}")
42
+ super().__init__(f"Resumable upload is no longer available Google: \n {text}")
43
43
 
44
44
 
45
45
  class CouldNotCopyNotFound(Exception):
@@ -371,7 +371,7 @@ class GCSStorageField(StorageField):
371
371
  await self.storage.delete_upload(self.field.old_uri, self.field.bucket_name)
372
372
  except GoogleCloudException as e:
373
373
  logger.warning(
374
- f"Could not delete existing google cloud file " f"with uri: {self.field.uri}: {e}"
374
+ f"Could not delete existing google cloud file with uri: {self.field.uri}: {e}"
375
375
  )
376
376
  if self.field.upload_uri != self.key:
377
377
  await self.move(self.field.upload_uri, self.key, self.field.bucket_name, self.bucket)
@@ -551,7 +551,7 @@ class GCSStorage(Storage):
551
551
  data = {"text": text}
552
552
  if resp.status == 404:
553
553
  logger.warning(
554
- f"Attempt to delete not found gcloud: {data}, " f"status: {resp.status}",
554
+ f"Attempt to delete not found gcloud: {data}, status: {resp.status}",
555
555
  exc_info=True,
556
556
  )
557
557
  elif resp.status not in (200, 204):
@@ -658,7 +658,7 @@ class GCSStorage(Storage):
658
658
  logger.error("Not implemented")
659
659
  elif resp.status == 404:
660
660
  logger.error(
661
- f"Attempt to delete not found gcloud: {data}, " f"status: {resp.status}",
661
+ f"Attempt to delete not found gcloud: {data}, status: {resp.status}",
662
662
  exc_info=True,
663
663
  )
664
664
  else:
@@ -695,20 +695,29 @@ class GCSStorage(Storage):
695
695
  errors.capture_message(msg, "error", scope)
696
696
  return deleted, conflict
697
697
 
698
- async def iterate_objects(self, bucket: str, prefix: str) -> AsyncGenerator[ObjectInfo, None]:
698
+ async def iterate_objects(
699
+ self, bucket: str, prefix: str, start: Optional[str] = None
700
+ ) -> AsyncGenerator[ObjectInfo, None]:
699
701
  if self.session is None:
700
702
  raise AttributeError()
701
703
  url = "{}/{}/o".format(self.object_base_url, bucket)
702
704
  headers = await self.get_access_headers()
705
+ params = {"prefix": prefix}
706
+ if start:
707
+ params["startOffset"] = start
703
708
  async with self.session.get(
704
709
  url,
705
710
  headers=headers,
706
- params={"prefix": prefix},
711
+ params=params,
707
712
  ) as resp:
708
713
  assert resp.status == 200
709
714
  data = await resp.json()
710
715
  if "items" in data:
711
716
  for item in data["items"]:
717
+ if start is not None and item["name"] == start:
718
+ # Skip the start item to be compatible with all
719
+ # storage implementations
720
+ continue
712
721
  yield ObjectInfo(name=item["name"])
713
722
 
714
723
  page_token = data.get("nextPageToken")
@@ -241,14 +241,17 @@ class LocalStorage(Storage):
241
241
 
242
242
  async def create_kb(self, kbid: str):
243
243
  bucket = self.get_bucket_name(kbid)
244
- path = self.get_bucket_path(bucket)
245
244
  try:
246
- os.makedirs(path, exist_ok=True)
245
+ await self.create_bucket(bucket)
247
246
  created = True
248
247
  except FileExistsError:
249
248
  created = False
250
249
  return created
251
250
 
251
+ async def create_bucket(self, bucket_name: str):
252
+ path = self.get_bucket_path(bucket_name)
253
+ os.makedirs(path, exist_ok=True)
254
+
252
255
  async def delete_kb(self, kbid: str) -> tuple[bool, bool]:
253
256
  bucket = self.get_bucket_name(kbid)
254
257
  path = self.get_bucket_path(bucket)
@@ -277,20 +280,27 @@ class LocalStorage(Storage):
277
280
  deleted = False
278
281
  return deleted
279
282
 
280
- async def iterate_objects(self, bucket: str, prefix: str) -> AsyncGenerator[ObjectInfo, None]:
281
- pathname = f"{self.get_file_path(bucket, prefix)}*"
282
- for key in glob.glob(pathname):
283
+ async def iterate_objects(
284
+ self, bucket: str, prefix: str, start: Optional[str] = None
285
+ ) -> AsyncGenerator[ObjectInfo, None]:
286
+ bucket_path = self.get_bucket_path(bucket)
287
+ pathname = f"{self.get_file_path(bucket, prefix)}**/*"
288
+ for key in sorted(glob.glob(pathname, recursive=True)):
289
+ if not os.path.isfile(key):
290
+ continue
283
291
  if key.endswith(".metadata"):
284
292
  # Skip metadata files -- they are internal to the local-storage implementation.
285
293
  continue
286
- name = key.split("/")[-1]
294
+ name = key.split(bucket_path)[-1].lstrip("/")
295
+ if start is not None and name <= start:
296
+ continue
287
297
  yield ObjectInfo(name=name)
288
298
 
289
- async def download(self, bucket_name: str, key: str, range: Optional[Range] = None):
290
- key_path = self.get_file_path(bucket_name, key)
299
+ async def download(self, bucket: str, key: str, range: Optional[Range] = None):
300
+ key_path = self.get_file_path(bucket, key)
291
301
  if not os.path.exists(key_path):
292
302
  return
293
- async for chunk in super().download(bucket_name, key, range=range):
303
+ async for chunk in super().download(bucket, key, range=range):
294
304
  yield chunk
295
305
 
296
306
  async def insert_object(self, bucket: str, key: str, data: bytes) -> None:
@@ -120,7 +120,9 @@ class ObjectStore(abc.ABC, metaclass=abc.ABCMeta):
120
120
  yield b""
121
121
 
122
122
  @abc.abstractmethod
123
- async def iterate(self, bucket: str, prefix: str) -> AsyncGenerator[ObjectInfo, None]:
123
+ async def iterate(
124
+ self, bucket: str, prefix: str, start: Optional[str] = None
125
+ ) -> AsyncGenerator[ObjectInfo, None]:
124
126
  raise NotImplementedError()
125
127
  yield ObjectInfo(name="")
126
128
 
@@ -392,7 +392,7 @@ class S3Storage(Storage):
392
392
  self._session = get_session()
393
393
  return self._session
394
394
 
395
- async def initialize(self):
395
+ async def initialize(self: "S3Storage") -> None:
396
396
  session = AioSession()
397
397
  self._s3aioclient: AioBaseClient = await self._exit_stack.enter_async_context(
398
398
  session.create_client("s3", **self.opts)
@@ -422,10 +422,14 @@ class S3Storage(Storage):
422
422
  else:
423
423
  raise AttributeError("No valid uri")
424
424
 
425
- async def iterate_objects(self, bucket: str, prefix: str = "/") -> AsyncGenerator[ObjectInfo, None]:
425
+ async def iterate_objects(
426
+ self, bucket: str, prefix: str = "/", start: Optional[str] = None
427
+ ) -> AsyncGenerator[ObjectInfo, None]:
426
428
  paginator = self._s3aioclient.get_paginator("list_objects")
427
- async for result in paginator.paginate(Bucket=bucket, Prefix=prefix):
428
- for item in result.get("Contents", []):
429
+ async for result in paginator.paginate(
430
+ Bucket=bucket, Prefix=prefix, PaginationConfig={"StartingToken": start}
431
+ ):
432
+ for item in result.get("Contents") or []:
429
433
  yield ObjectInfo(name=item["Key"])
430
434
 
431
435
  async def create_kb(self, kbid: str):
@@ -502,7 +502,9 @@ class Storage(abc.ABC, metaclass=abc.ABCMeta):
502
502
  async def finalize(self) -> None: ...
503
503
 
504
504
  @abc.abstractmethod
505
- async def iterate_objects(self, bucket: str, prefix: str) -> AsyncGenerator[ObjectInfo, None]:
505
+ async def iterate_objects(
506
+ self, bucket: str, prefix: str, start: Optional[str] = None
507
+ ) -> AsyncGenerator[ObjectInfo, None]:
506
508
  raise NotImplementedError()
507
509
  yield ObjectInfo(name="")
508
510
 
@@ -553,6 +555,13 @@ class Storage(abc.ABC, metaclass=abc.ABCMeta):
553
555
  else:
554
556
  await self.insert_object(bucket, key, data)
555
557
 
558
+ @abc.abstractmethod
559
+ async def create_bucket(self, bucket_name: str) -> None:
560
+ """
561
+ Create a new bucket in the storage.
562
+ """
563
+ ...
564
+
556
565
 
557
566
  async def iter_and_add_size(
558
567
  stream: AsyncGenerator[bytes, None], cf: CloudFile
@@ -19,7 +19,7 @@
19
19
  #
20
20
  from contextlib import ExitStack
21
21
  from dataclasses import dataclass
22
- from typing import Any, Generator
22
+ from typing import Any, Generator, Iterator
23
23
  from unittest.mock import patch
24
24
 
25
25
  import pytest
@@ -112,7 +112,7 @@ def azurite() -> Generator[AzuriteFixture, None, None]:
112
112
 
113
113
 
114
114
  @pytest.fixture(scope="function")
115
- def azure_storage_settings(azurite: AzuriteFixture) -> dict[str, Any]:
115
+ def azure_storage_settings(azurite: AzuriteFixture) -> Iterator[dict[str, Any]]:
116
116
  settings = {
117
117
  "file_backend": FileBackendConfig.AZURE,
118
118
  "azure_account_url": azurite.account_url,
@@ -18,7 +18,7 @@
18
18
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
19
19
  #
20
20
  import os
21
- from typing import Any, Type
21
+ from typing import Any, Iterator, Type
22
22
  from unittest.mock import Mock
23
23
 
24
24
  import pytest
@@ -75,7 +75,7 @@ async def storage(request):
75
75
 
76
76
 
77
77
  @pytest.fixture(scope="function")
78
- def storage_settings(request, storage) -> dict[str, Any]:
78
+ def storage_settings(request, storage) -> Iterator[dict[str, Any]]:
79
79
  """Useful fixture that returns the settings used in the generic `storage`
80
80
  fixture.
81
81
 
@@ -20,7 +20,7 @@
20
20
  import re
21
21
  from concurrent.futures.thread import ThreadPoolExecutor
22
22
  from contextlib import ExitStack
23
- from typing import Any, Optional
23
+ from typing import Any, Iterator, Optional
24
24
  from unittest.mock import patch
25
25
 
26
26
  import docker # type: ignore # type: ignore
@@ -91,7 +91,7 @@ def running_in_mac_os() -> bool:
91
91
 
92
92
 
93
93
  @pytest.fixture(scope="function")
94
- def gcs_storage_settings(gcs) -> dict[str, Any]:
94
+ def gcs_storage_settings(gcs) -> Iterator[dict[str, Any]]:
95
95
  settings = {
96
96
  "file_backend": FileBackendConfig.GCS,
97
97
  "gcs_endpoint_url": gcs,
@@ -19,7 +19,7 @@
19
19
  #
20
20
  from contextlib import ExitStack
21
21
  from pathlib import Path
22
- from typing import Any
22
+ from typing import Any, Iterator
23
23
  from unittest.mock import patch
24
24
 
25
25
  import pytest
@@ -29,7 +29,7 @@ from nucliadb_utils.storages.local import LocalStorage
29
29
 
30
30
 
31
31
  @pytest.fixture(scope="function")
32
- def local_storage_settings(tmp_path: Path) -> dict[str, Any]:
32
+ def local_storage_settings(tmp_path: Path) -> Iterator[dict[str, Any]]:
33
33
  settings = {
34
34
  "file_backend": FileBackendConfig.LOCAL,
35
35
  "local_files": str((tmp_path / "blob").absolute()),
@@ -18,7 +18,7 @@
18
18
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
19
19
  #
20
20
  from contextlib import ExitStack
21
- from typing import Any
21
+ from typing import Any, AsyncIterator
22
22
  from unittest.mock import patch
23
23
 
24
24
  import pytest
@@ -62,7 +62,7 @@ def s3():
62
62
 
63
63
 
64
64
  @pytest.fixture(scope="function")
65
- async def s3_storage_settings(s3) -> dict[str, Any]:
65
+ async def s3_storage_settings(s3) -> AsyncIterator[dict[str, Any]]:
66
66
  settings = {
67
67
  "file_backend": FileBackendConfig.S3,
68
68
  "s3_endpoint": s3,
@@ -51,6 +51,10 @@ class WaitFor:
51
51
  self.seq = seq
52
52
 
53
53
 
54
+ class TransactionError(Exception):
55
+ pass
56
+
57
+
54
58
  class TransactionCommitTimeoutError(Exception):
55
59
  pass
56
60
 
@@ -80,7 +84,7 @@ class LocalTransactionUtility:
80
84
 
81
85
  resp = await ingest.ProcessMessage(iterator(writer)) # type: ignore
82
86
  if resp.status != OpStatusWriter.Status.OK:
83
- logger.error(f"Local transaction failed processing {writer}")
87
+ raise TransactionError(f"Local transaction failed processing {writer}")
84
88
  return 0
85
89
 
86
90
  async def finalize(self):
@@ -181,7 +181,7 @@ async def _create_storage(gcs_scopes: Optional[List[str]] = None) -> Storage:
181
181
  raise ConfigurationError("Invalid storage settings, please configure FILE_BACKEND")
182
182
 
183
183
 
184
- async def teardown_storage():
184
+ async def teardown_storage() -> None:
185
185
  storage: Optional[Storage] = get_utility(Utility.STORAGE)
186
186
  if storage is None:
187
187
  return
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nucliadb_utils
3
- Version: 6.3.1.post3524
3
+ Version: 6.3.1.post3531
4
4
  Summary: NucliaDB util library
5
5
  Author-email: Nuclia <nucliadb@nuclia.com>
6
6
  License: AGPL
@@ -27,8 +27,8 @@ Requires-Dist: nats-py[nkeys]>=2.6.0
27
27
  Requires-Dist: PyNaCl
28
28
  Requires-Dist: pyjwt>=2.4.0
29
29
  Requires-Dist: mrflagly>=0.2.9
30
- Requires-Dist: nucliadb-protos>=6.3.1.post3524
31
- Requires-Dist: nucliadb-telemetry>=6.3.1.post3524
30
+ Requires-Dist: nucliadb-protos>=6.3.1.post3531
31
+ Requires-Dist: nucliadb-telemetry>=6.3.1.post3531
32
32
  Provides-Extra: cache
33
33
  Requires-Dist: redis>=4.3.4; extra == "cache"
34
34
  Requires-Dist: orjson>=3.6.7; extra == "cache"
@@ -7,15 +7,15 @@ nucliadb_utils/exceptions.py,sha256=y_3wk77WLVUtdo-5FtbBsdSkCtK_DsJkdWb5BoPn3qo,
7
7
  nucliadb_utils/featureflagging.py,sha256=YxDiXzWuiDlHtqgeTVeyakmbAdzBePzJpgJv53ELbmI,2259
8
8
  nucliadb_utils/grpc.py,sha256=apu0uePnkGHCAT7GRQ9YZfRYyFj26kJ440i8jitbM3U,3314
9
9
  nucliadb_utils/helpers.py,sha256=nPw8yod3hP-pxq80VF8QC36s7ygSg0dBUdfI-LatvCs,1600
10
- nucliadb_utils/nats.py,sha256=99YJkK1QMfWDxKJK8u0acK0lLYb8yThalzulGETiT80,15266
10
+ nucliadb_utils/nats.py,sha256=-ehKk0u5ZhiJEyg67yV26YysBGEGp4TDSQKq3aydJn0,15275
11
11
  nucliadb_utils/partition.py,sha256=jBgy4Hu5Iwn4gjbPPcthSykwf-qNx-GcLAIwbzPd1d0,1157
12
12
  nucliadb_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  nucliadb_utils/run.py,sha256=Es0_Bu5Yc-LWczvwL6gzWqSwC85RjDCk-0oFQAJi9g4,1827
14
14
  nucliadb_utils/settings.py,sha256=RnGhEUvwv6faNqALiqDCivvzNOyyXVBflYh_37uNkow,8193
15
- nucliadb_utils/signals.py,sha256=JRNv2y9zLtBjOANBf7krGfDGfOc9qcoXZ6N1nKWS2FE,2674
15
+ nucliadb_utils/signals.py,sha256=lo_Mk12NIX5Au--3H3WObvDOXq_OMurql2qiC2TnAao,2676
16
16
  nucliadb_utils/store.py,sha256=kQ35HemE0v4_Qg6xVqNIJi8vSFAYQtwI3rDtMsNy62Y,890
17
- nucliadb_utils/transaction.py,sha256=YYnTpxCDs56lo0tS6ErABjk9WjDuieUc4f7r63Q_OP8,7864
18
- nucliadb_utils/utilities.py,sha256=usyqgkfinZeDoUSFpIugmKKJZpoi8wppFL_lAhhlkBI,15371
17
+ nucliadb_utils/transaction.py,sha256=z_VeiTIta48rosS2SXMqx86XaavprSMRWf6s6zWIeEs,7920
18
+ nucliadb_utils/utilities.py,sha256=Vc4zLpDf-FQh9bs0ZyAfGAjzlbbMTMWf3VWt2Ao5V3k,15379
19
19
  nucliadb_utils/aiopynecone/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn4bIXMa20ks,835
20
20
  nucliadb_utils/aiopynecone/client.py,sha256=MPyHnDXwhukJr7U3CJh7BpsekfSuOkyM4g5b9LLtzc8,22941
21
21
  nucliadb_utils/aiopynecone/exceptions.py,sha256=fUErx3ceKQK1MUbOnYcZhIzpNe8UVAptZE9JIRDLXDE,4000
@@ -23,7 +23,7 @@ nucliadb_utils/aiopynecone/models.py,sha256=XkNIZx4bxdbVo9zYVn8IRp70q4DWUMWN79yb
23
23
  nucliadb_utils/audit/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn4bIXMa20ks,835
24
24
  nucliadb_utils/audit/audit.py,sha256=VDUqcSToEoz5m7iG-4omjTlsBSEJGcp0_xl51idnU3Q,3384
25
25
  nucliadb_utils/audit/basic.py,sha256=pVf1oKp7T_oTDCyt8QbSsfC_uWjF66ooF6rqY-SWSrc,4032
26
- nucliadb_utils/audit/stream.py,sha256=AT4OuLTtQ3mID8Geasn6d7qRK8D7MIMUi7GemzU_7mk,16017
26
+ nucliadb_utils/audit/stream.py,sha256=SovPj5iTzXka2cg4NnwAZwnOYljhsT6ynhDC2OsRVPc,16035
27
27
  nucliadb_utils/cache/__init__.py,sha256=itSI7dtTwFP55YMX4iK7JzdMHS5CQVUiB1XzQu4UBh8,833
28
28
  nucliadb_utils/cache/exceptions.py,sha256=Zu-O_-0-yctOEgoDGI92gPzWfBMRrpiAyESA62ld6MA,975
29
29
  nucliadb_utils/cache/nats.py,sha256=-AjCfkFgKVdJUlGR0hT9JDSNkPVFg4S6w9eW-ZIcXPM,7037
@@ -39,25 +39,25 @@ nucliadb_utils/nuclia_usage/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn
39
39
  nucliadb_utils/nuclia_usage/utils/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn4bIXMa20ks,835
40
40
  nucliadb_utils/nuclia_usage/utils/kb_usage_report.py,sha256=6lLuxCCPQVn3dOuZNL5ThPjl2yws-1TJ_7duhQSWkPU,3934
41
41
  nucliadb_utils/storages/__init__.py,sha256=5Qc8AUWiJv9_JbGCBpAn88AIJhwDlm0OPQpg2ZdRL4U,872
42
- nucliadb_utils/storages/azure.py,sha256=FK4c_v9AUAwagScm_F1uDmJeQQq7P4jZswiD2trwb4A,16394
43
- nucliadb_utils/storages/exceptions.py,sha256=_xUDbfr3XkmUc0xXYZu3DpG1C8TtfmEweRwj9mbRvMU,2507
44
- nucliadb_utils/storages/gcs.py,sha256=R35XQmf_M8Ba_eIuQumjAWSgU_lSouoTp-TTua-VgOM,28911
45
- nucliadb_utils/storages/local.py,sha256=JxlWNtu49JJ04dq6o7bBAqbpbeYpVyvvBM5jq1sGJ-4,11003
42
+ nucliadb_utils/storages/azure.py,sha256=b5WzTjwsyrFxkuD5GzOUSBw6Szm0OkNt8t2NGoMiTJI,16557
43
+ nucliadb_utils/storages/exceptions.py,sha256=GOPKH-F3dPTfHEkwGNfVkSfF70eWJJXjI83yccw9WpA,2501
44
+ nucliadb_utils/storages/gcs.py,sha256=KR267zRvBBjT0ksxcfCCz0CmqPD8W-Kj3_lWZXWFg0Q,29249
45
+ nucliadb_utils/storages/local.py,sha256=USEYXavJfTK5bU71iLozVM9XjDdtPNte3uWJBuiWIOo,11371
46
46
  nucliadb_utils/storages/nuclia.py,sha256=vEv94xAT7QM2g80S25QyrOw2pzvP2BAX-ADgZLtuCVc,2097
47
- nucliadb_utils/storages/object_store.py,sha256=HtKjIKhErSBvuqx1SuCOnL0SkiHqgfyekNMP8o2piZU,4492
48
- nucliadb_utils/storages/s3.py,sha256=pgakBqBLfDAFbdjwwX6u19QTv7gQYZ82Pjw67Yi6ob8,20895
47
+ nucliadb_utils/storages/object_store.py,sha256=2PueRP5Q3XOuWgKhj6B9Kp2fyBql5np0T400YRUbqn4,4535
48
+ nucliadb_utils/storages/s3.py,sha256=4-cs_uFC4ZoLHLTZqvGi8v3nQcbrPBFJZuIeWltkFHI,21026
49
49
  nucliadb_utils/storages/settings.py,sha256=ugCPy1zxBOmA2KosT-4tsjpvP002kg5iQyi42yCGCJA,1285
50
- nucliadb_utils/storages/storage.py,sha256=1LSaZKQ4xSoBP85OVATh7zBZdBQ369Wl1uYBymZPfPw,21135
50
+ nucliadb_utils/storages/storage.py,sha256=MGmwWmGuZFff5lOvTY6ZgKrYJUvJojISwAM3K4qzFKM,21344
51
51
  nucliadb_utils/storages/utils.py,sha256=8g2rIwJeYIumQLOB47Yw1rx3twlhRB_cJxer65QfZmk,1479
52
52
  nucliadb_utils/tests/__init__.py,sha256=Oo9CAE7B0eW5VHn8sHd6o30SQzOWUhktLPRXdlDOleA,1456
53
53
  nucliadb_utils/tests/asyncbenchmark.py,sha256=vrX_x9ifCXi18PfNShc23w9x_VUiB_Ph-2nuolh9z3Q,10707
54
- nucliadb_utils/tests/azure.py,sha256=Dg-Eb4KVScG-O6P9y-bVQZTAKTNUMQ0i-CKEd9IdrWw,4474
55
- nucliadb_utils/tests/fixtures.py,sha256=i0sqPqe5a5JlKGFdaIvOlHYkZ3pHZ2hTIgTsaIB3vSM,3472
56
- nucliadb_utils/tests/gcs.py,sha256=KW_DLet1WRlssSW55eI-IQ-0d94Jo2Oh7Di4xGv4JCc,4685
57
- nucliadb_utils/tests/local.py,sha256=fXIBasrvdaFJM-sw2wk1_oiFzBcm9O10iCyC-OiXwY8,1914
54
+ nucliadb_utils/tests/azure.py,sha256=NvMrPG6gfbpDE0m_aZgaa7eorbmA1r9rhAsAANhMlJk,4494
55
+ nucliadb_utils/tests/fixtures.py,sha256=uNiZ8qaOz2bSmSn-2ZMXxhQcHVh3tCyr5A-e0XaoRr8,3492
56
+ nucliadb_utils/tests/gcs.py,sha256=MBMzn_UHU5SU6iILuCsB5zU4umhNcaCw_MKrxZhwvOc,4705
57
+ nucliadb_utils/tests/local.py,sha256=cxIfPrKuqs5Ef0nbrVYQQAH2mwc4E0iD9bC2sWegS-c,1934
58
58
  nucliadb_utils/tests/nats.py,sha256=RWHjwqq5esuO7OFbP24yYX1cXnpPLcWJwDUdmwCpH28,1897
59
- nucliadb_utils/tests/s3.py,sha256=pl-RJFjA4MH6iXkqhsh5g8gDuEhrYu1nPZ-laxlrMlE,3704
60
- nucliadb_utils-6.3.1.post3524.dist-info/METADATA,sha256=tMP8Wx4jEKAM8zESbKBv1QQKqJe7jvbxoW0md9DHIDc,2209
61
- nucliadb_utils-6.3.1.post3524.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
62
- nucliadb_utils-6.3.1.post3524.dist-info/top_level.txt,sha256=fE3vJtALTfgh7bcAWcNhcfXkNPp_eVVpbKK-2IYua3E,15
63
- nucliadb_utils-6.3.1.post3524.dist-info/RECORD,,
59
+ nucliadb_utils/tests/s3.py,sha256=DACUh3HvgH3BchKFZ9R7RFUzsrg3v9A-cxTcXx4nmvA,3734
60
+ nucliadb_utils-6.3.1.post3531.dist-info/METADATA,sha256=42hW8sy26Suv1ZQdpxtvzqRARgqMWm-pfADzOY9-Bvk,2209
61
+ nucliadb_utils-6.3.1.post3531.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
62
+ nucliadb_utils-6.3.1.post3531.dist-info/top_level.txt,sha256=fE3vJtALTfgh7bcAWcNhcfXkNPp_eVVpbKK-2IYua3E,15
63
+ nucliadb_utils-6.3.1.post3531.dist-info/RECORD,,