nucliadb-utils 6.8.1.post4957__py3-none-any.whl → 6.9.5.post5447__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nucliadb_utils/const.py CHANGED
@@ -41,4 +41,4 @@ class Features:
41
41
  SKIP_EXTERNAL_INDEX = "nucliadb_skip_external_index"
42
42
  LOG_REQUEST_PAYLOADS = "nucliadb_log_request_payloads"
43
43
  IGNORE_EXTRACTED_IN_SEARCH = "nucliadb_ignore_extracted_in_search"
44
- DEBUG_MISSING_VECTORS = "nucliadb_debug_missing_vectors"
44
+ REBALANCE_ENABLED = "nucliadb_rebalance"
@@ -45,10 +45,7 @@ DEFAULT_FLAG_DATA: dict[str, Any] = {
45
45
  "rollout": 0,
46
46
  "variants": {"environment": ["local"]},
47
47
  },
48
- const.Features.DEBUG_MISSING_VECTORS: {
49
- "rollout": 0,
50
- "variants": {"environment": ["local"]},
51
- },
48
+ const.Features.REBALANCE_ENABLED: {"rollout": 0, "variants": {"environment": ["local"]}},
52
49
  }
53
50
 
54
51
 
@@ -125,6 +125,12 @@ class StorageSettings(BaseSettings):
125
125
  examples=["https://<storageaccountname>.blob.core.windows.net"],
126
126
  )
127
127
 
128
+ azure_kb_account_url: Optional[str] = Field(
129
+ default=None,
130
+ description="Azure Account URL for KB containers. If unspecified, uses `azure_account_url`", # noqa
131
+ examples=["https://<storageaccountname>.blob.core.windows.net"],
132
+ )
133
+
128
134
  # For testing purposes: Azurite docker image requires a connection string as it
129
135
  # doesn't support Azure's default credential authentication method
130
136
  azure_connection_string: Optional[str] = None
@@ -20,6 +20,7 @@
20
20
 
21
21
  from __future__ import annotations
22
22
 
23
+ import base64
23
24
  import logging
24
25
  from datetime import datetime
25
26
  from typing import AsyncGenerator, AsyncIterator, Optional, Union
@@ -31,7 +32,6 @@ from azure.storage.blob.aio import BlobServiceClient
31
32
 
32
33
  from nucliadb_protos.resources_pb2 import CloudFile
33
34
  from nucliadb_telemetry import metrics
34
- from nucliadb_utils.storages.exceptions import ObjectNotFoundError
35
35
  from nucliadb_utils.storages.object_store import ObjectStore
36
36
  from nucliadb_utils.storages.storage import Storage, StorageField
37
37
  from nucliadb_utils.storages.utils import ObjectInfo, ObjectMetadata, Range
@@ -146,7 +146,7 @@ class AzureStorageField(StorageField):
146
146
  return None
147
147
  try:
148
148
  return await self.storage.object_store.get_metadata(bucket, key)
149
- except ObjectNotFoundError:
149
+ except KeyError:
150
150
  return None
151
151
 
152
152
  async def upload(self, iterator: AsyncIterator, origin: CloudFile) -> CloudFile:
@@ -163,22 +163,32 @@ class AzureStorageField(StorageField):
163
163
 
164
164
  class AzureStorage(Storage):
165
165
  field_klass = AzureStorageField
166
- object_store: ObjectStore
166
+ object_store: AzureObjectStore
167
+ kb_object_store: AzureObjectStore
167
168
  source = CloudFile.AZURE
168
169
 
169
170
  def __init__(
170
171
  self,
171
172
  account_url: str,
172
- deadletter_bucket: str = "deadletter",
173
- indexing_bucket: str = "indexing",
173
+ kb_account_url: str,
174
+ deadletter_bucket: Optional[str] = "deadletter",
175
+ indexing_bucket: Optional[str] = "indexing",
174
176
  connection_string: Optional[str] = None,
175
177
  ):
176
178
  self.object_store = AzureObjectStore(account_url, connection_string=connection_string)
179
+ self.kb_object_store = AzureObjectStore(kb_account_url, connection_string=connection_string)
177
180
  self.deadletter_bucket = deadletter_bucket
178
181
  self.indexing_bucket = indexing_bucket
179
182
 
183
+ def object_store_for_bucket(self, bucket_name: str) -> AzureObjectStore:
184
+ if bucket_name in [self.indexing_bucket, self.deadletter_bucket]:
185
+ return self.object_store
186
+ else:
187
+ return self.kb_object_store
188
+
180
189
  async def initialize(self, service_name: Optional[str] = None):
181
190
  await self.object_store.initialize()
191
+ await self.kb_object_store.initialize()
182
192
  for bucket in [
183
193
  self.deadletter_bucket,
184
194
  self.indexing_bucket,
@@ -195,39 +205,39 @@ class AzureStorage(Storage):
195
205
 
196
206
  async def delete_upload(self, uri: str, bucket_name: str):
197
207
  try:
198
- await self.object_store.delete(bucket_name, uri)
199
- except ObjectNotFoundError:
208
+ await self.object_store_for_bucket(bucket_name).delete(bucket_name, uri)
209
+ except KeyError:
200
210
  pass
201
211
 
202
212
  async def create_bucket(self, bucket_name: str, kbid: Optional[str] = None):
203
- if await self.object_store.bucket_exists(bucket_name):
213
+ if await self.object_store_for_bucket(bucket_name).bucket_exists(bucket_name):
204
214
  return
205
- await self.object_store.bucket_create(bucket_name)
215
+ await self.object_store_for_bucket(bucket_name).bucket_create(bucket_name)
206
216
 
207
217
  def get_bucket_name(self, kbid: str):
208
218
  return f"nucliadb-{kbid}"
209
219
 
210
220
  async def create_kb(self, kbid: str) -> bool:
211
221
  bucket_name = self.get_bucket_name(kbid)
212
- return await self.object_store.bucket_create(bucket_name)
222
+ return await self.kb_object_store.bucket_create(bucket_name)
213
223
 
214
224
  async def schedule_delete_kb(self, kbid: str) -> bool:
215
225
  bucket_name = self.get_bucket_name(kbid)
216
- deleted, _ = await self.object_store.bucket_delete(bucket_name)
226
+ deleted, _ = await self.kb_object_store.bucket_delete(bucket_name)
217
227
  return deleted
218
228
 
219
229
  async def delete_kb(self, kbid: str) -> tuple[bool, bool]:
220
230
  bucket_name = self.get_bucket_name(kbid)
221
- return await self.object_store.bucket_delete(bucket_name)
231
+ return await self.kb_object_store.bucket_delete(bucket_name)
222
232
 
223
233
  async def iterate_objects(
224
234
  self, bucket: str, prefix: str, start: Optional[str] = None
225
235
  ) -> AsyncGenerator[ObjectInfo, None]:
226
- async for obj in self.object_store.iterate(bucket, prefix, start):
236
+ async for obj in self.object_store_for_bucket(bucket).iterate(bucket, prefix, start):
227
237
  yield obj
228
238
 
229
- async def insert_object(self, bucket_name: str, key: str, data: bytes) -> None:
230
- await self.object_store.insert(bucket_name, key, data)
239
+ async def insert_object(self, bucket: str, key: str, data: bytes) -> None:
240
+ await self.object_store_for_bucket(bucket).insert(bucket, key, data)
231
241
 
232
242
 
233
243
  class AzureObjectStore(ObjectStore):
@@ -328,7 +338,7 @@ class AzureObjectStore(ObjectStore):
328
338
  try:
329
339
  await container_client.delete_blob(key, delete_snapshots="include")
330
340
  except ResourceNotFoundError:
331
- raise ObjectNotFoundError()
341
+ raise KeyError(f"Not found: {bucket}/{key}")
332
342
 
333
343
  @ops_observer.wrap({"type": "upload"})
334
344
  async def upload(
@@ -350,6 +360,7 @@ class AzureObjectStore(ObjectStore):
350
360
  name=key,
351
361
  data=data,
352
362
  length=length,
363
+ overwrite=True,
353
364
  blob_type=BlobType.BLOCKBLOB,
354
365
  metadata=custom_metadata,
355
366
  content_settings=ContentSettings(
@@ -361,7 +372,7 @@ class AzureObjectStore(ObjectStore):
361
372
  @ops_observer.wrap({"type": "insert"})
362
373
  async def insert(self, bucket: str, key: str, data: bytes) -> None:
363
374
  container_client = self.service_client.get_container_client(bucket)
364
- await container_client.upload_blob(name=key, data=data, length=len(data))
375
+ await container_client.upload_blob(name=key, data=data, length=len(data), overwrite=True)
365
376
 
366
377
  @ops_observer.wrap({"type": "download"})
367
378
  async def download(self, bucket: str, key: str) -> bytes:
@@ -370,7 +381,7 @@ class AzureObjectStore(ObjectStore):
370
381
  try:
371
382
  downloader = await blob_client.download_blob()
372
383
  except ResourceNotFoundError:
373
- raise ObjectNotFoundError()
384
+ raise KeyError(f"Not found: {bucket}/{key}")
374
385
  return await downloader.readall()
375
386
 
376
387
  async def download_stream(
@@ -383,14 +394,14 @@ class AzureObjectStore(ObjectStore):
383
394
  length = None
384
395
  if range.any():
385
396
  offset = range.start or 0
386
- length = range.end - offset + 1 if range.end else None
397
+ length = range.end - offset + 1 if range.end is not None else None
387
398
  try:
388
399
  downloader = await blob_client.download_blob(
389
400
  offset=offset, # type: ignore
390
401
  length=length, # type: ignore
391
402
  )
392
403
  except ResourceNotFoundError:
393
- raise ObjectNotFoundError()
404
+ raise KeyError(f"Not found: {bucket}/{key}")
394
405
  async for chunk in downloader.chunks():
395
406
  yield chunk
396
407
 
@@ -411,18 +422,25 @@ class AzureObjectStore(ObjectStore):
411
422
  properties: BlobProperties = await blob_client.get_blob_properties()
412
423
  return parse_object_metadata(properties, key)
413
424
  except ResourceNotFoundError:
414
- raise ObjectNotFoundError()
425
+ raise KeyError(f"Not found: {bucket}/{key}")
415
426
 
416
427
  @ops_observer.wrap({"type": "multipart_start"})
417
428
  async def upload_multipart_start(self, bucket: str, key: str, metadata: ObjectMetadata) -> None:
418
429
  container_client = self.service_client.get_container_client(bucket)
419
- custom_metadata = {key: str(value) for key, value in metadata.model_dump().items()}
430
+ custom_metadata = {
431
+ "base64_filename": base64.b64encode(metadata.filename.encode()).decode(),
432
+ "content_type": metadata.content_type,
433
+ "size": str(metadata.size),
434
+ }
420
435
  blob_client = container_client.get_blob_client(key)
436
+ safe_filename = (
437
+ metadata.filename.encode("ascii", "replace").decode().replace('"', "").replace("\n", "")
438
+ )
421
439
  await blob_client.create_append_blob(
422
440
  metadata=custom_metadata,
423
441
  content_settings=ContentSettings(
424
442
  content_type=metadata.content_type,
425
- content_disposition=f"attachment; filename={metadata.filename}",
443
+ content_disposition=f'attachment; filename="{safe_filename}"',
426
444
  ),
427
445
  )
428
446
 
@@ -450,7 +468,12 @@ def parse_object_metadata(properties: BlobProperties, key: str) -> ObjectMetadat
450
468
  size = int(custom_metadata_size)
451
469
  else:
452
470
  size = properties.size
453
- filename = custom_metadata.get("filename") or key.split("/")[-1]
471
+
472
+ b64_filename = custom_metadata.get("base64_filename")
473
+ if b64_filename:
474
+ filename = base64.b64decode(b64_filename.encode()).decode()
475
+ else:
476
+ filename = key.split("/")[-1]
454
477
  content_type = custom_metadata.get("content_type") or properties.content_settings.content_type or ""
455
478
  return ObjectMetadata(
456
479
  filename=filename,
@@ -71,9 +71,3 @@ class UnparsableResponse(Exception):
71
71
  Raised when trying to parse a response from a storage API and it's not
72
72
  possible
73
73
  """
74
-
75
-
76
- class ObjectNotFoundError(Exception):
77
- """
78
- Raised when the object is not found in storage
79
- """
@@ -179,7 +179,7 @@ class S3StorageField(StorageField):
179
179
  Bucket=bucket_name,
180
180
  Key=upload_id,
181
181
  Metadata={
182
- "FILENAME": cf.filename,
182
+ "base64_filename": base64.b64encode(cf.filename.encode()).decode(),
183
183
  "SIZE": str(cf.size),
184
184
  "CONTENT_TYPE": cf.content_type,
185
185
  },
@@ -33,6 +33,9 @@ class Settings(BaseSettings):
33
33
  s3_deadletter_bucket: Optional[str] = None
34
34
  s3_indexing_bucket: Optional[str] = None
35
35
 
36
+ azure_deadletter_bucket: Optional[str] = None
37
+ azure_indexing_bucket: Optional[str] = None
38
+
36
39
  local_testing_files: str = os.path.dirname(__file__)
37
40
 
38
41
 
@@ -394,6 +394,7 @@ class Storage(abc.ABC, metaclass=abc.ABCMeta):
394
394
  key: str,
395
395
  payload: bytes,
396
396
  filename: str = "payload",
397
+ # REVIEW: this default may be settings file without content type
397
398
  content_type: str = "",
398
399
  ):
399
400
  """
@@ -28,6 +28,7 @@ from pytest_docker_fixtures.containers._base import BaseImage # type: ignore #
28
28
 
29
29
  from nucliadb_utils.settings import FileBackendConfig, storage_settings
30
30
  from nucliadb_utils.storages.azure import AzureStorage
31
+ from nucliadb_utils.storages.settings import settings as extended_storage_settings
31
32
 
32
33
  images.settings["azurite"] = {
33
34
  "image": "mcr.microsoft.com/azure-storage/azurite",
@@ -105,7 +106,7 @@ def azurite() -> Generator[AzuriteFixture, None, None]:
105
106
  port=port,
106
107
  container=container.container_obj,
107
108
  connection_string=get_connection_string(host, port),
108
- account_url=f"http://{host}:{port}/devstoreaccount1",
109
+ account_url=f"https://devstoreaccount1.blob.core.windows.net",
109
110
  )
110
111
  finally:
111
112
  container.stop()
@@ -118,12 +119,18 @@ def azure_storage_settings(azurite: AzuriteFixture) -> Iterator[dict[str, Any]]:
118
119
  "azure_account_url": azurite.account_url,
119
120
  "azure_connection_string": azurite.connection_string,
120
121
  }
122
+ extended_settings = {
123
+ "azure_deadletter_bucket": "deadletter",
124
+ "azure_indexing_bucket": "indexing",
125
+ }
121
126
  with ExitStack() as stack:
122
127
  for key, value in settings.items():
123
128
  context = patch.object(storage_settings, key, value)
124
129
  stack.enter_context(context)
125
-
126
- yield settings
130
+ for key, value in extended_settings.items():
131
+ context = patch.object(extended_storage_settings, key, value)
132
+ stack.enter_context(context)
133
+ yield settings | extended_settings
127
134
 
128
135
 
129
136
  @pytest.fixture(scope="function")
@@ -132,8 +139,10 @@ async def azure_storage(azurite, azure_storage_settings: dict[str, Any]):
132
139
 
133
140
  storage = AzureStorage(
134
141
  account_url=storage_settings.azure_account_url,
142
+ kb_account_url=storage_settings.azure_kb_account_url or storage_settings.azure_account_url,
135
143
  connection_string=storage_settings.azure_connection_string,
136
144
  )
137
145
  await storage.initialize()
146
+ await storage.create_bucket("nidx")
138
147
  yield storage
139
148
  await storage.finalize()
@@ -23,12 +23,14 @@ from typing import Any, Iterator, Type
23
23
  from unittest.mock import Mock
24
24
 
25
25
  import pytest
26
+ from pytest import FixtureRequest
26
27
  from pytest_lazy_fixtures import lazy_fixture
27
28
 
28
29
  from nucliadb_utils.storages.azure import AzureStorage
29
30
  from nucliadb_utils.storages.gcs import GCSStorage
30
31
  from nucliadb_utils.storages.local import LocalStorage
31
32
  from nucliadb_utils.storages.s3 import S3Storage
33
+ from nucliadb_utils.storages.storage import Storage
32
34
  from nucliadb_utils.utilities import Utility, clean_utility, set_utility
33
35
 
34
36
 
@@ -52,7 +54,7 @@ def hosted_nucliadb():
52
54
  nuclia_settings.onprem = original
53
55
 
54
56
 
55
- def get_testing_storage_backend():
57
+ def get_testing_storage_backend() -> str:
56
58
  """
57
59
  Default to gcs for linux users and s3 for macOS users. This is because some
58
60
  tests fail on macOS with the gcs backend with a weird nidx error (to be looked into).
@@ -72,7 +74,7 @@ def lazy_storage_fixture():
72
74
 
73
75
 
74
76
  @pytest.fixture(scope="function", params=lazy_storage_fixture())
75
- async def storage(request):
77
+ def storage(request: FixtureRequest) -> Iterator[Storage]:
76
78
  """
77
79
  Generic storage fixture that allows us to run the same tests for different storage backends.
78
80
  """
@@ -18,12 +18,16 @@
18
18
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
19
19
 
20
20
  import socket
21
+ from typing import AsyncIterator, Iterator
21
22
 
22
23
  import nats
23
24
  import pytest
24
25
  from pytest_docker_fixtures import images # type: ignore
25
26
  from pytest_docker_fixtures.containers._base import BaseImage # type: ignore
26
27
 
28
+ from nucliadb_utils.nats import NatsConnectionManager
29
+ from nucliadb_utils.utilities import start_nats_manager, stop_nats_manager
30
+
27
31
  images.settings["nats"] = {
28
32
  "image": "nats",
29
33
  "version": "2.10.21",
@@ -48,7 +52,7 @@ nats_image = NatsImage()
48
52
 
49
53
 
50
54
  @pytest.fixture(scope="session")
51
- def natsd(): # pragma: no cover
55
+ def natsd() -> Iterator[str]:
52
56
  nats_host, nats_port = nats_image.run()
53
57
  print("Started natsd docker")
54
58
  yield f"nats://{nats_host}:{nats_port}"
@@ -56,10 +60,17 @@ def natsd(): # pragma: no cover
56
60
 
57
61
 
58
62
  @pytest.fixture(scope="function")
59
- async def nats_server(natsd: str):
63
+ async def nats_server(natsd: str) -> AsyncIterator[str]:
60
64
  yield natsd
61
65
 
62
66
  # cleanup nats
63
67
  nc = await nats.connect(servers=[natsd])
64
68
  await nc.drain()
65
69
  await nc.close()
70
+
71
+
72
+ @pytest.fixture(scope="function")
73
+ async def nats_manager(nats_server: str) -> AsyncIterator[NatsConnectionManager]:
74
+ ncm = await start_nats_manager("nucliadb_tests", [nats_server], None)
75
+ yield ncm
76
+ await stop_nats_manager()
@@ -29,7 +29,6 @@ from typing import TYPE_CHECKING, Any, List, Optional, Union, cast
29
29
  from nucliadb_protos.writer_pb2_grpc import WriterStub
30
30
  from nucliadb_telemetry.metrics import Counter
31
31
  from nucliadb_utils import featureflagging
32
- from nucliadb_utils.aiopynecone.client import PineconeSession
33
32
  from nucliadb_utils.audit.audit import AuditStorage
34
33
  from nucliadb_utils.audit.basic import BasicAuditStorage
35
34
  from nucliadb_utils.audit.stream import StreamAuditStorage
@@ -85,7 +84,6 @@ class Utility(str, Enum):
85
84
  MAINDB_DRIVER = "driver"
86
85
  USAGE = "usage"
87
86
  ENDECRYPTOR = "endecryptor"
88
- PINECONE_SESSION = "pinecone_session"
89
87
  NIDX = "nidx"
90
88
 
91
89
 
@@ -124,7 +122,10 @@ async def _create_storage(gcs_scopes: Optional[List[str]] = None) -> Storage:
124
122
 
125
123
  azureutil = AzureStorage(
126
124
  account_url=storage_settings.azure_account_url,
125
+ kb_account_url=storage_settings.azure_kb_account_url or storage_settings.azure_account_url,
127
126
  connection_string=storage_settings.azure_connection_string,
127
+ deadletter_bucket=extended_storage_settings.azure_deadletter_bucket,
128
+ indexing_bucket=extended_storage_settings.azure_indexing_bucket,
128
129
  )
129
130
 
130
131
  logger.info("Configuring Azure Storage")
@@ -364,6 +365,11 @@ async def stop_audit_utility():
364
365
  async def start_nats_manager(
365
366
  service_name: str, nats_servers: list[str], nats_creds: Optional[str] = None
366
367
  ) -> NatsConnectionManager:
368
+ util = get_utility(Utility.NATS_MANAGER)
369
+ if util is not None:
370
+ logger.warning("Warning, nats manager utility was already set, ignoring")
371
+ return util
372
+
367
373
  nats_manager = NatsConnectionManager(
368
374
  service_name=service_name,
369
375
  nats_servers=nats_servers,
@@ -436,16 +442,3 @@ def get_endecryptor() -> EndecryptorUtility:
436
442
  ) from ex
437
443
  set_utility(Utility.ENDECRYPTOR, util)
438
444
  return util
439
-
440
-
441
- def get_pinecone() -> PineconeSession:
442
- util = get_utility(Utility.PINECONE_SESSION)
443
- if util is not None:
444
- return util
445
- util = PineconeSession()
446
- set_utility(Utility.PINECONE_SESSION, util)
447
- return util
448
-
449
-
450
- def clean_pinecone():
451
- clean_utility(Utility.PINECONE_SESSION)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nucliadb_utils
3
- Version: 6.8.1.post4957
3
+ Version: 6.9.5.post5447
4
4
  Summary: NucliaDB util library
5
5
  Author-email: Nuclia <nucliadb@nuclia.com>
6
6
  License-Expression: AGPL-3.0-or-later
@@ -8,13 +8,12 @@ Project-URL: Homepage, https://nuclia.com
8
8
  Project-URL: Repository, https://github.com/nuclia/nucliadb
9
9
  Classifier: Development Status :: 4 - Beta
10
10
  Classifier: Programming Language :: Python
11
- Classifier: Programming Language :: Python :: 3.9
12
11
  Classifier: Programming Language :: Python :: 3.10
13
12
  Classifier: Programming Language :: Python :: 3.11
14
13
  Classifier: Programming Language :: Python :: 3.12
15
14
  Classifier: Programming Language :: Python :: 3 :: Only
16
15
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
17
- Requires-Python: <4,>=3.9
16
+ Requires-Python: <4,>=3.10
18
17
  Description-Content-Type: text/markdown
19
18
  Requires-Dist: pydantic>=2.6
20
19
  Requires-Dist: pydantic-settings>=2.2
@@ -27,8 +26,9 @@ Requires-Dist: nats-py[nkeys]>=2.6.0
27
26
  Requires-Dist: PyNaCl
28
27
  Requires-Dist: pyjwt>=2.4.0
29
28
  Requires-Dist: mrflagly>=0.2.9
30
- Requires-Dist: nucliadb-protos>=6.8.1.post4957
31
- Requires-Dist: nucliadb-telemetry>=6.8.1.post4957
29
+ Requires-Dist: nidx-protos>=6.9.5.post5447
30
+ Requires-Dist: nucliadb-protos>=6.9.5.post5447
31
+ Requires-Dist: nucliadb-telemetry>=6.9.5.post5447
32
32
  Provides-Extra: cache
33
33
  Requires-Dist: redis>=4.3.4; extra == "cache"
34
34
  Requires-Dist: orjson>=3.6.7; extra == "cache"
@@ -1,25 +1,21 @@
1
1
  nucliadb_utils/__init__.py,sha256=EvBCH1iTODe-AgXm48aj4kVUt_Std3PeL8QnwimR5wI,895
2
2
  nucliadb_utils/asyncio_utils.py,sha256=h8Y-xpcFFRgNzaiIW0eidz7griAQa7ggbNk34-tAt2c,2888
3
3
  nucliadb_utils/authentication.py,sha256=5_b323v2ylJaJvM_0coeSQEtnD-p9IGD-6CPA6IXhik,6471
4
- nucliadb_utils/const.py,sha256=kgTPXlvVxJB_HPz9GkEigE94UNEkrnMECgrqhPW2Qi4,1537
4
+ nucliadb_utils/const.py,sha256=YtWadXGm044MbwLcfEImNC6skh3e4LKZDu6hjlO0qMU,1521
5
5
  nucliadb_utils/debug.py,sha256=Q56Nx9Dp7V2ae3CU2H0ztaZcHTJXdlflPLKLeOPZ170,2436
6
6
  nucliadb_utils/exceptions.py,sha256=y_3wk77WLVUtdo-5FtbBsdSkCtK_DsJkdWb5BoPn3qo,1094
7
- nucliadb_utils/featureflagging.py,sha256=kLIJT-AJe8AqTuL0sY9nn9r5Soax8EvqPangM6UygfM,2380
7
+ nucliadb_utils/featureflagging.py,sha256=ctd9Nqm_nhoedMIV2GC819-cSP5GlkLYXCRE0DbwxYU,2353
8
8
  nucliadb_utils/grpc.py,sha256=apu0uePnkGHCAT7GRQ9YZfRYyFj26kJ440i8jitbM3U,3314
9
9
  nucliadb_utils/helpers.py,sha256=eed7_E1MKh9eW3CpqOXka3OvLw5C9eJGC_R-1MPYdfY,3336
10
10
  nucliadb_utils/nats.py,sha256=U21Cfg36_IHd3ZLXEC4eZ7nZ1Soh_ZNFFwjryNyd2-8,15248
11
11
  nucliadb_utils/partition.py,sha256=jBgy4Hu5Iwn4gjbPPcthSykwf-qNx-GcLAIwbzPd1d0,1157
12
12
  nucliadb_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  nucliadb_utils/run.py,sha256=Es0_Bu5Yc-LWczvwL6gzWqSwC85RjDCk-0oFQAJi9g4,1827
14
- nucliadb_utils/settings.py,sha256=lZUCliwNKYfk_Tt0KiYeHsT4jRBG0gLAompuHWu9fBI,8233
14
+ nucliadb_utils/settings.py,sha256=H9yKrHPR5emTxai-D4owg4CjE4_-E0qR0HyuHERQNH4,8493
15
15
  nucliadb_utils/signals.py,sha256=lo_Mk12NIX5Au--3H3WObvDOXq_OMurql2qiC2TnAao,2676
16
16
  nucliadb_utils/store.py,sha256=kQ35HemE0v4_Qg6xVqNIJi8vSFAYQtwI3rDtMsNy62Y,890
17
17
  nucliadb_utils/transaction.py,sha256=l3ZvrITYMnAs_fv1OOC-1nDZxWPG5qmbBhzvuC3DUzQ,8039
18
- nucliadb_utils/utilities.py,sha256=D07dg5Ycm1sYkO65OqxHG9TqDfvBoy91143iln8pOhA,15782
19
- nucliadb_utils/aiopynecone/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn4bIXMa20ks,835
20
- nucliadb_utils/aiopynecone/client.py,sha256=MPyHnDXwhukJr7U3CJh7BpsekfSuOkyM4g5b9LLtzc8,22941
21
- nucliadb_utils/aiopynecone/exceptions.py,sha256=fUErx3ceKQK1MUbOnYcZhIzpNe8UVAptZE9JIRDLXDE,4000
22
- nucliadb_utils/aiopynecone/models.py,sha256=XkNIZx4bxdbVo9zYVn8IRp70q4DWUMWN79ybGloFj2Q,3492
18
+ nucliadb_utils/utilities.py,sha256=SjPnCwCUH_lWUKSOZQp9vIcTYmLP0yL_UC8nPwlnds4,15817
23
19
  nucliadb_utils/audit/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn4bIXMa20ks,835
24
20
  nucliadb_utils/audit/audit.py,sha256=xmJJiAGG8rPGADwD9gXN9-QJ80GeGvqmY-kCwEf6PiQ,3598
25
21
  nucliadb_utils/audit/basic.py,sha256=fcCYvoFSGVbbB8cSCnm95bN2rf1AAeuWhGfh5no0S-Y,4246
@@ -39,25 +35,25 @@ nucliadb_utils/nuclia_usage/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn
39
35
  nucliadb_utils/nuclia_usage/utils/__init__.py,sha256=cp15ZcFnHvpcu_5-aK2A4uUyvuZVV_MJn4bIXMa20ks,835
40
36
  nucliadb_utils/nuclia_usage/utils/kb_usage_report.py,sha256=6lLuxCCPQVn3dOuZNL5ThPjl2yws-1TJ_7duhQSWkPU,3934
41
37
  nucliadb_utils/storages/__init__.py,sha256=5Qc8AUWiJv9_JbGCBpAn88AIJhwDlm0OPQpg2ZdRL4U,872
42
- nucliadb_utils/storages/azure.py,sha256=EEUyi-2c69FQz8iPhKixkZDp8xVMgMFGEPaZDVuillc,17429
43
- nucliadb_utils/storages/exceptions.py,sha256=07Isip18qxkEynGz28AkO2BBC34b_zjL5dEUzeSC2OU,2451
38
+ nucliadb_utils/storages/azure.py,sha256=t0ZL_698NqsBz-Ihwkc79tusfzGcm0BVTB5pqceNlvA,18456
39
+ nucliadb_utils/storages/exceptions.py,sha256=6YhFLf8k0ABy5AVfxIJUo7w6AK0SJjktiyQTwF3gCdg,2344
44
40
  nucliadb_utils/storages/gcs.py,sha256=VyT72My34N4pEMmrQc5wdAMNLiuqpYl8OW3d50cJfSA,28222
45
41
  nucliadb_utils/storages/local.py,sha256=2aCHpZymORG_dUc1FDq0VFcgQulu0w2pZiUaj9dphFs,11686
46
42
  nucliadb_utils/storages/nuclia.py,sha256=vEv94xAT7QM2g80S25QyrOw2pzvP2BAX-ADgZLtuCVc,2097
47
43
  nucliadb_utils/storages/object_store.py,sha256=2PueRP5Q3XOuWgKhj6B9Kp2fyBql5np0T400YRUbqn4,4535
48
- nucliadb_utils/storages/s3.py,sha256=eFFVRgNTIxTz1Hpmd6ofRz9KQhPJAmiyetW4EmWN8EM,21835
49
- nucliadb_utils/storages/settings.py,sha256=ugCPy1zxBOmA2KosT-4tsjpvP002kg5iQyi42yCGCJA,1285
50
- nucliadb_utils/storages/storage.py,sha256=2EIgnaCN5XzKpienounOjQ2AX3ANtQA2Xgl6hnMpHr4,21951
44
+ nucliadb_utils/storages/s3.py,sha256=EUqlNoJW32AI6jpETbDla3teYbxlz8RFTfxSdHgWZdo,21878
45
+ nucliadb_utils/storages/settings.py,sha256=mepN3wbLGL0Pv5yI6D-sNjSAFinEWT7aRi6N3eClNDg,1384
46
+ nucliadb_utils/storages/storage.py,sha256=aOJnx6-WX8U3AAqPL_sWPCghIzlr8e3GKGi8z3-mtqw,22024
51
47
  nucliadb_utils/storages/utils.py,sha256=F4Iboa_0_bhDQr-JOKD9sGPld_-hKwJW5ptyZdn9Oag,1505
52
48
  nucliadb_utils/tests/__init__.py,sha256=Oo9CAE7B0eW5VHn8sHd6o30SQzOWUhktLPRXdlDOleA,1456
53
49
  nucliadb_utils/tests/asyncbenchmark.py,sha256=vrX_x9ifCXi18PfNShc23w9x_VUiB_Ph-2nuolh9z3Q,10707
54
- nucliadb_utils/tests/azure.py,sha256=NvMrPG6gfbpDE0m_aZgaa7eorbmA1r9rhAsAANhMlJk,4494
55
- nucliadb_utils/tests/fixtures.py,sha256=4lzz-khYvbGzdbT18IG6KKg40f7CVex2q3ho88I-jL8,3799
50
+ nucliadb_utils/tests/azure.py,sha256=rt1KRSYZW1EYhKy4Q0i7IEL9vdoOU6BYw2__S51YfGg,5039
51
+ nucliadb_utils/tests/fixtures.py,sha256=-OeR4NhtXveBqN6sZa7KVaNwyUdsvJUxS_yFmIgF148,3923
56
52
  nucliadb_utils/tests/gcs.py,sha256=JNqp5ymeNNU9Ci8rNYTh7-VqP4fjybElhyB3ap7EV1c,4721
57
53
  nucliadb_utils/tests/local.py,sha256=z9E11_ol1mu7N8Y6PkjKl-WMPPMl7JqQbDj3uhVa1A0,1933
58
- nucliadb_utils/tests/nats.py,sha256=RWHjwqq5esuO7OFbP24yYX1cXnpPLcWJwDUdmwCpH28,1897
54
+ nucliadb_utils/tests/nats.py,sha256=rbTaC6kv-u6SdZ7N-XBEGS40XCRiUmFUsKHIYWJfxTs,2325
59
55
  nucliadb_utils/tests/s3.py,sha256=kz9ULxrAYLVslZ59I8dtweZ9DJz5R8Ioy2XYrveZzHw,3829
60
- nucliadb_utils-6.8.1.post4957.dist-info/METADATA,sha256=NAdKMGS6yS8IeT3XEJkW2wuUk8gE1oqR_-_Gwm3Xv1Y,2180
61
- nucliadb_utils-6.8.1.post4957.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
62
- nucliadb_utils-6.8.1.post4957.dist-info/top_level.txt,sha256=fE3vJtALTfgh7bcAWcNhcfXkNPp_eVVpbKK-2IYua3E,15
63
- nucliadb_utils-6.8.1.post4957.dist-info/RECORD,,
56
+ nucliadb_utils-6.9.5.post5447.dist-info/METADATA,sha256=6Xl5Km6XkK8mFKdFWW6ebDu_r22nBt-W6YPUGq7WoyI,2174
57
+ nucliadb_utils-6.9.5.post5447.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
58
+ nucliadb_utils-6.9.5.post5447.dist-info/top_level.txt,sha256=fE3vJtALTfgh7bcAWcNhcfXkNPp_eVVpbKK-2IYua3E,15
59
+ nucliadb_utils-6.9.5.post5447.dist-info/RECORD,,
@@ -1,19 +0,0 @@
1
- # Copyright (C) 2021 Bosutech XXI S.L.
2
- #
3
- # nucliadb is offered under the AGPL v3.0 and as commercial software.
4
- # For commercial licensing, contact us at info@nuclia.com.
5
- #
6
- # AGPL:
7
- # This program is free software: you can redistribute it and/or modify
8
- # it under the terms of the GNU Affero General Public License as
9
- # published by the Free Software Foundation, either version 3 of the
10
- # License, or (at your option) any later version.
11
- #
12
- # This program is distributed in the hope that it will be useful,
13
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
- # GNU Affero General Public License for more details.
16
- #
17
- # You should have received a copy of the GNU Affero General Public License
18
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
19
- #