scruby 0.10.3__py3-none-any.whl → 0.24.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of scruby might be problematic. Click here for more details.

scruby/db.py CHANGED
@@ -1,443 +1,194 @@
1
- """Creation and management of the database."""
2
-
3
- from __future__ import annotations
4
-
5
- __all__ = ("Scruby",)
6
-
7
- import concurrent.futures
8
- import contextlib
9
- import logging
10
- import zlib
11
- from collections.abc import Callable
12
- from pathlib import Path as SyncPath
13
- from shutil import rmtree
14
- from typing import Any, Literal, Never, TypeVar, assert_never
15
-
16
- import orjson
17
- from anyio import Path, to_thread
18
- from pydantic import BaseModel
19
-
20
- from scruby import constants
21
-
22
- logger = logging.getLogger(__name__)
23
-
24
- T = TypeVar("T")
25
-
26
-
27
- class _Meta(BaseModel):
28
- """Metadata of Collection."""
29
-
30
- db_root: str
31
- model_name: str
32
- length_reduction_hash: int
33
- counter_documents: int
34
-
35
-
36
- class Scruby[T]:
37
- """Creation and management of database.
38
-
39
- Args:
40
- class_model: Class of Model (Pydantic).
41
- """
42
-
43
- def __init__( # noqa: D107
44
- self,
45
- class_model: T,
46
- ) -> None:
47
- self.__meta = _Meta
48
- self.__class_model = class_model
49
- self.__db_root = constants.DB_ROOT
50
- self.__length_reduction_hash = constants.LENGTH_REDUCTION_HASH
51
- # The maximum number of keys.
52
- match self.__length_reduction_hash:
53
- case 0:
54
- self.__max_num_keys = 4294967296
55
- case 2:
56
- self.__max_num_keys = 16777216
57
- case 4:
58
- self.__max_num_keys = 65536
59
- case 6:
60
- self.__max_num_keys = 256
61
- case _ as unreachable:
62
- msg: str = f"{unreachable} - Unacceptable value for LENGTH_REDUCTION_HASH."
63
- logger.critical(msg)
64
- assert_never(Never(unreachable))
65
- # 1.Create metadata if absent.
66
- # 2.Check metadata.
67
- self._create_metadata()
68
-
69
- def _create_metadata(self) -> None:
70
- """Create metadata for collection if absent.
71
-
72
- This method is for internal use.
73
- """
74
- key: int = 0
75
- key_as_hash: str = f"{key:08x}"[self.__length_reduction_hash :]
76
- separated_hash: str = "/".join(list(key_as_hash))
77
- branch_path = SyncPath(
78
- *(
79
- self.__db_root,
80
- self.__class_model.__name__,
81
- separated_hash,
82
- ),
83
- )
84
- if not branch_path.exists():
85
- branch_path.mkdir(parents=True)
86
- meta = _Meta(
87
- db_root=self.__db_root,
88
- model_name=self.__class_model.__name__,
89
- length_reduction_hash=self.__length_reduction_hash,
90
- counter_documents=0,
91
- )
92
- meta_json = meta.model_dump_json()
93
- meta_path = SyncPath(*(branch_path, "meta.json"))
94
- meta_path.write_text(meta_json, "utf-8")
95
-
96
- async def _get_meta_path(self) -> Path:
97
- """Asynchronous method for getting path to metadata of collection.
98
-
99
- This method is for internal use.
100
- """
101
- key: int = 0
102
- key_as_hash: str = f"{key:08x}"[self.__length_reduction_hash :]
103
- separated_hash: str = "/".join(list(key_as_hash))
104
- return Path(
105
- *(
106
- self.__db_root,
107
- self.__class_model.__name__,
108
- separated_hash,
109
- "meta.json",
110
- ),
111
- )
112
-
113
- async def _get_meta(self) -> _Meta:
114
- """Asynchronous method for getting metadata of collection.
115
-
116
- This method is for internal use.
117
- """
118
- meta_path = await self._get_meta_path()
119
- meta_json = await meta_path.read_text()
120
- meta: _Meta = self.__meta.model_validate_json(meta_json)
121
- return meta
122
-
123
- async def _set_meta(self, meta: _Meta) -> None:
124
- """Asynchronous method for updating metadata of collection.
125
-
126
- This method is for internal use.
127
- """
128
- meta_path = await self._get_meta_path()
129
- meta_json = meta.model_dump_json()
130
- await meta_path.write_text(meta_json, "utf-8")
131
-
132
- async def _counter_documents(self, step: Literal[1, -1]) -> None:
133
- """Management of documents in metadata of collection.
134
-
135
- This method is for internal use.
136
- """
137
- meta = await self._get_meta()
138
- meta.counter_documents += step
139
- if meta.counter_documents < 0:
140
- meta.counter_documents = 0
141
- await self._set_meta(meta)
142
-
143
- async def _get_leaf_path(self, key: str) -> Path:
144
- """Asynchronous method for getting path to collection cell by key.
145
-
146
- This method is for internal use.
147
-
148
- Args:
149
- key: Key name.
150
- """
151
- if not isinstance(key, str):
152
- logger.error("The key is not a type of `str`.")
153
- raise KeyError("The key is not a type of `str`.")
154
- if len(key) == 0:
155
- logger.error("The key should not be empty.")
156
- raise KeyError("The key should not be empty.")
157
- # Key to crc32 sum.
158
- key_as_hash: str = f"{zlib.crc32(key.encode('utf-8')):08x}"[self.__length_reduction_hash :]
159
- # Convert crc32 sum in the segment of path.
160
- separated_hash: str = "/".join(list(key_as_hash))
161
- # The path of the branch to the database.
162
- branch_path: Path = Path(
163
- *(
164
- self.__db_root,
165
- self.__class_model.__name__,
166
- separated_hash,
167
- ),
168
- )
169
- # If the branch does not exist, need to create it.
170
- if not await branch_path.exists():
171
- await branch_path.mkdir(parents=True)
172
- # The path to the database cell.
173
- leaf_path: Path = Path(*(branch_path, "leaf.json"))
174
- return leaf_path
175
-
176
- async def set_key(
177
- self,
178
- key: str,
179
- value: T,
180
- ) -> None:
181
- """Asynchronous method for adding and updating keys to collection.
182
-
183
- Args:
184
- key: Key name.
185
- value: Value of key.
186
- """
187
- # The path to the database cell.
188
- leaf_path: Path = await self._get_leaf_path(key)
189
- value_json: str = value.model_dump_json()
190
- # Write key-value to the database.
191
- if await leaf_path.exists():
192
- # Add new key or update existing.
193
- data_json: bytes = await leaf_path.read_bytes()
194
- data: dict = orjson.loads(data_json) or {}
195
- if data.get(key) is None:
196
- await self._counter_documents(1)
197
- data[key] = value_json
198
- await leaf_path.write_bytes(orjson.dumps(data))
199
- else:
200
- # Add new key to a blank leaf.
201
- await leaf_path.write_bytes(orjson.dumps({key: value_json}))
202
- await self._counter_documents(1)
203
-
204
- async def get_key(self, key: str) -> T:
205
- """Asynchronous method for getting value of key from collection.
206
-
207
- Args:
208
- key: Key name.
209
- """
210
- # The path to the database cell.
211
- leaf_path: Path = await self._get_leaf_path(key)
212
- # Get value of key.
213
- if await leaf_path.exists():
214
- data_json: bytes = await leaf_path.read_bytes()
215
- data: dict = orjson.loads(data_json) or {}
216
- obj: T = self.__class_model.model_validate_json(data[key])
217
- return obj
218
- msg: str = "`get_key` - The unacceptable key value."
219
- logger.error(msg)
220
- raise KeyError()
221
-
222
- async def has_key(self, key: str) -> bool:
223
- """Asynchronous method for checking presence of key in collection.
224
-
225
- Args:
226
- key: Key name.
227
- """
228
- # The path to the database cell.
229
- leaf_path: Path = await self._get_leaf_path(key)
230
- # Checking whether there is a key.
231
- if await leaf_path.exists():
232
- data_json: bytes = await leaf_path.read_bytes()
233
- data: dict = orjson.loads(data_json) or {}
234
- try:
235
- data[key]
236
- return True
237
- except KeyError:
238
- return False
239
- return False
240
-
241
- async def delete_key(self, key: str) -> None:
242
- """Asynchronous method for deleting key from collection.
243
-
244
- Args:
245
- key: Key name.
246
- """
247
- # The path to the database cell.
248
- leaf_path: Path = await self._get_leaf_path(key)
249
- # Deleting key.
250
- if await leaf_path.exists():
251
- data_json: bytes = await leaf_path.read_bytes()
252
- data: dict = orjson.loads(data_json) or {}
253
- del data[key]
254
- await leaf_path.write_bytes(orjson.dumps(data))
255
- await self._counter_documents(-1)
256
- return
257
- msg: str = "`delete_key` - The unacceptable key value."
258
- logger.error(msg)
259
- raise KeyError()
260
-
261
- @staticmethod
262
- async def napalm() -> None:
263
- """Asynchronous method for full database deletion.
264
-
265
- The main purpose is tests.
266
-
267
- Warning:
268
- - `Be careful, this will remove all keys.`
269
- """
270
- with contextlib.suppress(FileNotFoundError):
271
- await to_thread.run_sync(rmtree, constants.DB_ROOT)
272
- return
273
-
274
- @staticmethod
275
- def _task_find(
276
- key: int,
277
- filter_fn: Callable,
278
- length_reduction_hash: str,
279
- db_root: str,
280
- class_model: T,
281
- ) -> dict[str, Any] | None:
282
- """Task for searching for documents.
283
-
284
- This method is for internal use.
285
- """
286
- key_as_hash: str = f"{key:08x}"[length_reduction_hash:]
287
- separated_hash: str = "/".join(list(key_as_hash))
288
- leaf_path: SyncPath = SyncPath(
289
- *(
290
- db_root,
291
- class_model.__name__,
292
- separated_hash,
293
- "leaf.json",
294
- ),
295
- )
296
- if leaf_path.exists():
297
- data_json: bytes = leaf_path.read_bytes()
298
- data: dict[str, str] = orjson.loads(data_json) or {}
299
- for _, val in data.items():
300
- doc = class_model.model_validate_json(val)
301
- if filter_fn(doc):
302
- return doc
303
- return None
304
-
305
- def find_one(
306
- self,
307
- filter_fn: Callable,
308
- max_workers: int | None = None,
309
- timeout: float | None = None,
310
- ) -> T | None:
311
- """Find a single document matching the filter.
312
-
313
- The search is based on the effect of a quantum loop.
314
- The search effectiveness depends on the number of processor threads.
315
- Ideally, hundreds and even thousands of threads are required.
316
-
317
- Args:
318
- filter_fn: A function that execute the conditions of filtering.
319
- max_workers: The maximum number of processes that can be used to
320
- execute the given calls. If None or not given then as many
321
- worker processes will be created as the machine has processors.
322
- timeout: The number of seconds to wait for the result if the future isn't done.
323
- If None, then there is no limit on the wait time.
324
- """
325
- keys: range = range(1, self.__max_num_keys)
326
- search_task_fn: Callable = self._task_find
327
- length_reduction_hash: int = self.__length_reduction_hash
328
- db_root: str = self.__db_root
329
- class_model: T = self.__class_model
330
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
331
- for key in keys:
332
- future = executor.submit(
333
- search_task_fn,
334
- key,
335
- filter_fn,
336
- length_reduction_hash,
337
- db_root,
338
- class_model,
339
- )
340
- doc = future.result(timeout)
341
- if doc is not None:
342
- return doc
343
- return None
344
-
345
- def find(
346
- self,
347
- filter_fn: Callable,
348
- db_query_docs_limit: int = 1000,
349
- max_workers: int | None = None,
350
- timeout: float | None = None,
351
- ) -> list[T] | None:
352
- """Find one or more documents matching the filter.
353
-
354
- The search is based on the effect of a quantum loop.
355
- The search effectiveness depends on the number of processor threads.
356
- Ideally, hundreds and even thousands of threads are required.
357
-
358
- Args:
359
- filter_fn: A function that execute the conditions of filtering.
360
- db_query_docs_limit: Limiting the number of request results. By default = 1000.
361
- max_workers: The maximum number of processes that can be used to
362
- execute the given calls. If None or not given then as many
363
- worker processes will be created as the machine has processors.
364
- timeout: The number of seconds to wait for the result if the future isn't done.
365
- If None, then there is no limit on the wait time.
366
- """
367
- keys: range = range(1, self.__max_num_keys)
368
- search_task_fn: Callable = self._task_find
369
- length_reduction_hash: int = self.__length_reduction_hash
370
- db_root: str = self.__db_root
371
- class_model: T = self.__class_model
372
- counter: int = 0
373
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
374
- results = []
375
- for key in keys:
376
- if counter == db_query_docs_limit:
377
- break
378
- future = executor.submit(
379
- search_task_fn,
380
- key,
381
- filter_fn,
382
- length_reduction_hash,
383
- db_root,
384
- class_model,
385
- )
386
- doc = future.result(timeout)
387
- if doc is not None:
388
- results.append(doc)
389
- counter += 1
390
- return results or None
391
-
392
- def collection_name(self) -> str:
393
- """Get collection name."""
394
- return self.__class_model.__name__
395
-
396
- def collection_full_name(self) -> str:
397
- """Get full name of collection."""
398
- return f"{self.__db_root}/{self.__class_model.__name__}"
399
-
400
- async def estimated_document_count(self) -> int:
401
- """Get an estimate of the number of documents in this collection using collection metadata."""
402
- meta = await self._get_meta()
403
- return meta.counter_documents
404
-
405
- def count_documents(
406
- self,
407
- filter_fn: Callable,
408
- max_workers: int | None = None,
409
- timeout: float | None = None,
410
- ) -> int:
411
- """Count the number of documents a matching the filter in this collection.
412
-
413
- The search is based on the effect of a quantum loop.
414
- The search effectiveness depends on the number of processor threads.
415
- Ideally, hundreds and even thousands of threads are required.
416
-
417
- Args:
418
- filter_fn: A function that execute the conditions of filtering.
419
- max_workers: The maximum number of processes that can be used to
420
- execute the given calls. If None or not given then as many
421
- worker processes will be created as the machine has processors.
422
- timeout: The number of seconds to wait for the result if the future isn't done.
423
- If None, then there is no limit on the wait time.
424
- """
425
- keys: range = range(1, self.__max_num_keys)
426
- search_task_fn: Callable = self._task_find
427
- length_reduction_hash: int = self.__length_reduction_hash
428
- db_root: str = self.__db_root
429
- class_model: T = self.__class_model
430
- counter: int = 0
431
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
432
- for key in keys:
433
- future = executor.submit(
434
- search_task_fn,
435
- key,
436
- filter_fn,
437
- length_reduction_hash,
438
- db_root,
439
- class_model,
440
- )
441
- if future.result(timeout) is not None:
442
- counter += 1
443
- return counter
1
+ """Creation and management of the database."""
2
+
3
+ from __future__ import annotations
4
+
5
+ __all__ = ("Scruby",)
6
+
7
+ import contextlib
8
+ import logging
9
+ import zlib
10
+ from shutil import rmtree
11
+ from typing import Any, Literal, Never, TypeVar, assert_never
12
+
13
+ from anyio import Path
14
+ from pydantic import BaseModel
15
+
16
+ from scruby import constants, mixins
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+ T = TypeVar("T")
21
+
22
+
23
+ class _Meta(BaseModel):
24
+ """Metadata of Collection."""
25
+
26
+ db_root: str
27
+ collection_name: str
28
+ hash_reduce_left: int
29
+ max_branch_number: int
30
+ counter_documents: int
31
+
32
+
33
+ class Scruby[T](
34
+ mixins.Keys,
35
+ mixins.Find,
36
+ mixins.CustomTask,
37
+ mixins.Collection,
38
+ mixins.Count,
39
+ mixins.Delete,
40
+ mixins.Update,
41
+ ):
42
+ """Creation and management of database."""
43
+
44
+ def __init__( # noqa: D107
45
+ self,
46
+ ) -> None:
47
+ super().__init__()
48
+ self._meta = _Meta
49
+ self._db_root = constants.DB_ROOT
50
+ self._hash_reduce_left = constants.HASH_REDUCE_LEFT
51
+ # The maximum number of branches.
52
+ match self._hash_reduce_left:
53
+ case 0:
54
+ self._max_branch_number = 4294967296
55
+ case 2:
56
+ self._max_branch_number = 16777216
57
+ case 4:
58
+ self._max_branch_number = 65536
59
+ case 6:
60
+ self._max_branch_number = 256
61
+ case _ as unreachable:
62
+ msg: str = f"{unreachable} - Unacceptable value for HASH_REDUCE_LEFT."
63
+ logger.critical(msg)
64
+ assert_never(Never(unreachable))
65
+
66
+ @classmethod
67
+ async def create(cls, class_model: T) -> Any:
68
+ """Get an object to access a collection.
69
+
70
+ Args:
71
+ class_model: Class of Model (Pydantic).
72
+
73
+ Returns:
74
+ Instance of Scruby for access a collection.
75
+ """
76
+ assert BaseModel in class_model.__bases__, "`class_model` does not contain the base class `pydantic.BaseModel`!"
77
+ instance = cls()
78
+ instance.__dict__["_class_model"] = class_model
79
+ # Caching a pati for metadata.
80
+ # The zero branch is reserved for metadata.
81
+ branch_number: int = 0
82
+ branch_number_as_hash: str = f"{branch_number:08x}"[constants.HASH_REDUCE_LEFT :]
83
+ separated_hash: str = "/".join(list(branch_number_as_hash))
84
+ meta_dir_path_tuple = (
85
+ constants.DB_ROOT,
86
+ class_model.__name__,
87
+ separated_hash,
88
+ )
89
+ instance.__dict__["_meta_path"] = Path(
90
+ *meta_dir_path_tuple,
91
+ "meta.json",
92
+ )
93
+ # Create metadata for collection, if missing.
94
+ branch_path = Path(*meta_dir_path_tuple)
95
+ if not await branch_path.exists():
96
+ await branch_path.mkdir(parents=True)
97
+ meta = _Meta(
98
+ db_root=constants.DB_ROOT,
99
+ collection_name=class_model.__name__,
100
+ hash_reduce_left=constants.HASH_REDUCE_LEFT,
101
+ max_branch_number=instance.__dict__["_max_branch_number"],
102
+ counter_documents=0,
103
+ )
104
+ meta_json = meta.model_dump_json()
105
+ meta_path = Path(*(branch_path, "meta.json"))
106
+ await meta_path.write_text(meta_json, "utf-8")
107
+ return instance
108
+
109
+ async def get_meta(self) -> _Meta:
110
+ """Asynchronous method for getting metadata of collection.
111
+
112
+ This method is for internal use.
113
+
114
+ Returns:
115
+ Metadata object.
116
+ """
117
+ meta_json = await self._meta_path.read_text()
118
+ meta: _Meta = self._meta.model_validate_json(meta_json)
119
+ return meta
120
+
121
+ async def _set_meta(self, meta: _Meta) -> None:
122
+ """Asynchronous method for updating metadata of collection.
123
+
124
+ This method is for internal use.
125
+
126
+ Returns:
127
+ None.
128
+ """
129
+ meta_json = meta.model_dump_json()
130
+ await self._meta_path.write_text(meta_json, "utf-8")
131
+
132
+ async def _counter_documents(self, step: Literal[1, -1]) -> None:
133
+ """Asynchronous method for management of documents in metadata of collection.
134
+
135
+ This method is for internal use.
136
+
137
+ Returns:
138
+ None.
139
+ """
140
+ meta_path = self._meta_path
141
+ meta_json = await meta_path.read_text("utf-8")
142
+ meta: _Meta = self._meta.model_validate_json(meta_json)
143
+ meta.counter_documents += step
144
+ meta_json = meta.model_dump_json()
145
+ await meta_path.write_text(meta_json, "utf-8")
146
+
147
+ async def _get_leaf_path(self, key: str) -> Path:
148
+ """Asynchronous method for getting path to collection cell by key.
149
+
150
+ This method is for internal use.
151
+
152
+ Args:
153
+ key: Key name.
154
+
155
+ Returns:
156
+ Path to cell of collection.
157
+ """
158
+ if len(key) == 0:
159
+ logger.error("The key should not be empty.")
160
+ raise KeyError("The key should not be empty.")
161
+ # Key to crc32 sum.
162
+ key_as_hash: str = f"{zlib.crc32(key.encode('utf-8')):08x}"[self._hash_reduce_left :]
163
+ # Convert crc32 sum in the segment of path.
164
+ separated_hash: str = "/".join(list(key_as_hash))
165
+ # The path of the branch to the database.
166
+ branch_path: Path = Path(
167
+ *(
168
+ self._db_root,
169
+ self._class_model.__name__,
170
+ separated_hash,
171
+ ),
172
+ )
173
+ # If the branch does not exist, need to create it.
174
+ if not await branch_path.exists():
175
+ await branch_path.mkdir(parents=True)
176
+ # The path to the database cell.
177
+ leaf_path: Path = Path(*(branch_path, "leaf.json"))
178
+ return leaf_path
179
+
180
+ @staticmethod
181
+ def napalm() -> None:
182
+ """Method for full database deletion.
183
+
184
+ The main purpose is tests.
185
+
186
+ Warning:
187
+ - `Be careful, this will remove all keys.`
188
+
189
+ Returns:
190
+ None.
191
+ """
192
+ with contextlib.suppress(FileNotFoundError):
193
+ rmtree(constants.DB_ROOT)
194
+ return