scruby 0.26.0__py3-none-any.whl → 0.28.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of scruby might be problematic. Click here for more details.

scruby/__init__.py CHANGED
@@ -1,16 +1,11 @@
1
1
  #
2
- # .|'''|                        '||
3
- # ||                             ||
4
- # `|'''|, .|'', '||''| '||  ||`  ||''|, '||  ||`
5
- #  .   || ||     ||     ||  ||   ||  ||  `|..||
6
- #  |...|' `|..' .||.    `|..'|. .||..|'      ||
7
- #                                         ,  |'
8
- #                                          ''
9
- #
2
+ # .dP"Y8  dP""b8 88""Yb 88   88 88""Yb Yb  dP
3
+ # `Ybo." dP   `" 88__dP 88   88 88__dP  YbdP
4
+ # o.`Y8b Yb      88"Yb  Y8   8P 88""Yb   8P
5
+ # 8bodP'  YboodP 88  Yb `YbodP' 88oodP  dP
10
6
  #
11
7
  # Copyright (c) 2025 Gennady Kostyunin
12
- # Scruby is free software under terms of the MIT License.
13
- # Repository https://github.com/kebasyaty/scruby
8
+ # SPDX-License-Identifier: MIT
14
9
  #
15
10
  """Asynchronous library for building and managing a hybrid database, by scheme of key-value.
16
11
 
@@ -30,14 +25,10 @@ requires a large number of processor threads.
30
25
 
31
26
  from __future__ import annotations
32
27
 
33
- __all__ = ("Scruby",)
34
-
35
- import logging
28
+ __all__ = (
29
+ "settings",
30
+ "Scruby",
31
+ )
36
32
 
33
+ from scruby import settings
37
34
  from scruby.db import Scruby
38
-
39
- logging.basicConfig(
40
- level=logging.INFO,
41
- datefmt="%Y-%m-%d %H:%M:%S",
42
- format="[%(asctime)s.%(msecs)03d] %(module)10s:%(lineno)-3d %(levelname)-7s - %(message)s",
43
- )
scruby/aggregation.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Aggregation classes."""
2
6
 
3
7
  from __future__ import annotations
scruby/db.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Creation and management of the database."""
2
6
 
3
7
  from __future__ import annotations
@@ -14,9 +18,7 @@ from typing import Any, Literal, Never, assert_never
14
18
  from anyio import Path
15
19
  from pydantic import BaseModel
16
20
 
17
- from scruby import constants, mixins
18
-
19
- logger = logging.getLogger(__name__)
21
+ from scruby import mixins, settings
20
22
 
21
23
 
22
24
  class _Meta(BaseModel):
@@ -30,7 +32,7 @@ class _Meta(BaseModel):
30
32
 
31
33
 
32
34
  class Scruby(
33
- mixins.Docs,
35
+ mixins.Keys,
34
36
  mixins.Find,
35
37
  mixins.CustomTask,
36
38
  mixins.Collection,
@@ -45,8 +47,9 @@ class Scruby(
45
47
  ) -> None:
46
48
  super().__init__()
47
49
  self._meta = _Meta
48
- self._db_root = constants.DB_ROOT
49
- self._hash_reduce_left = constants.HASH_REDUCE_LEFT
50
+ self._db_root = settings.DB_ROOT
51
+ self._hash_reduce_left = settings.HASH_REDUCE_LEFT
52
+ self._max_workers = settings.MAX_WORKERS
50
53
  # The maximum number of branches.
51
54
  match self._hash_reduce_left:
52
55
  case 0:
@@ -59,7 +62,7 @@ class Scruby(
59
62
  self._max_branch_number = 256
60
63
  case _ as unreachable:
61
64
  msg: str = f"{unreachable} - Unacceptable value for HASH_REDUCE_LEFT."
62
- logger.critical(msg)
65
+ logging.critical(msg)
63
66
  assert_never(Never(unreachable)) # pyrefly: ignore[not-callable]
64
67
 
65
68
  @classmethod
@@ -79,10 +82,10 @@ class Scruby(
79
82
  # Caching a pati for metadata.
80
83
  # The zero branch is reserved for metadata.
81
84
  branch_number: int = 0
82
- branch_number_as_hash: str = f"{branch_number:08x}"[constants.HASH_REDUCE_LEFT :]
85
+ branch_number_as_hash: str = f"{branch_number:08x}"[settings.HASH_REDUCE_LEFT :]
83
86
  separated_hash: str = "/".join(list(branch_number_as_hash))
84
87
  meta_dir_path_tuple = (
85
- constants.DB_ROOT,
88
+ settings.DB_ROOT,
86
89
  class_model.__name__,
87
90
  separated_hash,
88
91
  )
@@ -95,9 +98,9 @@ class Scruby(
95
98
  if not await branch_path.exists():
96
99
  await branch_path.mkdir(parents=True)
97
100
  meta = _Meta(
98
- db_root=constants.DB_ROOT,
101
+ db_root=settings.DB_ROOT,
99
102
  collection_name=class_model.__name__,
100
- hash_reduce_left=constants.HASH_REDUCE_LEFT,
103
+ hash_reduce_left=settings.HASH_REDUCE_LEFT,
101
104
  max_branch_number=instance.__dict__["_max_branch_number"],
102
105
  counter_documents=0,
103
106
  )
@@ -123,6 +126,9 @@ class Scruby(
123
126
 
124
127
  This method is for internal use.
125
128
 
129
+ Args:
130
+ meta (_Meta): Metadata of Collection.
131
+
126
132
  Returns:
127
133
  None.
128
134
  """
@@ -134,6 +140,9 @@ class Scruby(
134
140
 
135
141
  This method is for internal use.
136
142
 
143
+ Args:
144
+ step (Literal[1, -1]): Number of documents added or removed.
145
+
137
146
  Returns:
138
147
  None.
139
148
  """
@@ -157,7 +166,7 @@ class Scruby(
157
166
  """
158
167
  if not isinstance(key, str):
159
168
  msg = "The key is not a string."
160
- logger.error(msg)
169
+ logging.error(msg)
161
170
  raise KeyError(msg)
162
171
  # Prepare key.
163
172
  # Removes spaces at the beginning and end of a string.
@@ -166,7 +175,7 @@ class Scruby(
166
175
  # Check the key for an empty string.
167
176
  if len(prepared_key) == 0:
168
177
  msg = "The key should not be empty."
169
- logger.error(msg)
178
+ logging.error(msg)
170
179
  raise KeyError(msg)
171
180
  # Key to crc32 sum.
172
181
  key_as_hash: str = f"{zlib.crc32(prepared_key.encode('utf-8')):08x}"[self._hash_reduce_left :]
@@ -200,5 +209,5 @@ class Scruby(
200
209
  None.
201
210
  """
202
211
  with contextlib.suppress(FileNotFoundError):
203
- rmtree(constants.DB_ROOT)
212
+ rmtree(settings.DB_ROOT)
204
213
  return
scruby/errors.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Scruby Exceptions."""
2
6
 
3
7
  from __future__ import annotations
scruby/mixins/__init__.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Mixins."""
2
6
 
3
7
  from __future__ import annotations
@@ -8,7 +12,7 @@ __all__ = (
8
12
  "CustomTask",
9
13
  "Delete",
10
14
  "Find",
11
- "Docs",
15
+ "Keys",
12
16
  "Update",
13
17
  )
14
18
 
@@ -16,6 +20,6 @@ from scruby.mixins.collection import Collection
16
20
  from scruby.mixins.count import Count
17
21
  from scruby.mixins.custom_task import CustomTask
18
22
  from scruby.mixins.delete import Delete
19
- from scruby.mixins.docs import Docs
20
23
  from scruby.mixins.find import Find
24
+ from scruby.mixins.keys import Keys
21
25
  from scruby.mixins.update import Update
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Methods for working with collections."""
2
6
 
3
7
  from __future__ import annotations
@@ -5,16 +9,13 @@ from __future__ import annotations
5
9
  __all__ = ("Collection",)
6
10
 
7
11
  from shutil import rmtree
8
- from typing import TypeVar
9
12
 
10
13
  from anyio import Path, to_thread
11
14
 
12
- from scruby import constants
15
+ from scruby import settings
13
16
 
14
- T = TypeVar("T")
15
17
 
16
-
17
- class Collection[T]:
18
+ class Collection:
18
19
  """Methods for working with collections."""
19
20
 
20
21
  def collection_name(self) -> str:
@@ -28,7 +29,7 @@ class Collection[T]:
28
29
  @staticmethod
29
30
  async def collection_list() -> list[str]:
30
31
  """Get collection list."""
31
- target_directory = Path(constants.DB_ROOT)
32
+ target_directory = Path(settings.DB_ROOT)
32
33
  # Get all entries in the directory
33
34
  all_entries = Path.iterdir(target_directory)
34
35
  directory_names: list[str] = [entry.name async for entry in all_entries]
@@ -44,6 +45,6 @@ class Collection[T]:
44
45
  Returns:
45
46
  None.
46
47
  """
47
- target_directory = f"{constants.DB_ROOT}/{name}"
48
+ target_directory = f"{settings.DB_ROOT}/{name}"
48
49
  await to_thread.run_sync(rmtree, target_directory) # pyrefly: ignore[bad-argument-type]
49
50
  return
scruby/mixins/count.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Methods for counting the number of documents."""
2
6
 
3
7
  from __future__ import annotations
@@ -6,12 +10,10 @@ __all__ = ("Count",)
6
10
 
7
11
  import concurrent.futures
8
12
  from collections.abc import Callable
9
- from typing import TypeVar
13
+ from typing import Any
10
14
 
11
- T = TypeVar("T")
12
15
 
13
-
14
- class Count[T]:
16
+ class Count:
15
17
  """Methods for counting the number of documents."""
16
18
 
17
19
  async def estimated_document_count(self) -> int:
@@ -26,30 +28,27 @@ class Count[T]:
26
28
  async def count_documents(
27
29
  self,
28
30
  filter_fn: Callable,
29
- max_workers: int | None = None,
30
31
  ) -> int:
31
32
  """Count the number of documents a matching the filter in this collection.
32
33
 
33
34
  The search is based on the effect of a quantum loop.
34
35
  The search effectiveness depends on the number of processor threads.
35
- Ideally, hundreds and even thousands of threads are required.
36
36
 
37
37
  Args:
38
38
  filter_fn: A function that execute the conditions of filtering.
39
- max_workers: The maximum number of processes that can be used to
40
- execute the given calls. If None or not given then as many
41
- worker processes will be created as the machine has processors.
42
39
 
43
40
  Returns:
44
41
  The number of documents.
45
42
  """
46
- branch_numbers: range = range(1, self._max_branch_number)
43
+ # Variable initialization
47
44
  search_task_fn: Callable = self._task_find
45
+ branch_numbers: range = range(1, self._max_branch_number)
48
46
  hash_reduce_left: int = self._hash_reduce_left
49
47
  db_root: str = self._db_root
50
- class_model: T = self._class_model
48
+ class_model: Any = self._class_model
51
49
  counter: int = 0
52
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
50
+ # Run quantum loop
51
+ with concurrent.futures.ThreadPoolExecutor(self._max_workers) as executor:
53
52
  for branch_number in branch_numbers:
54
53
  future = executor.submit(
55
54
  search_task_fn,
@@ -1,22 +1,21 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Quantum methods for running custom tasks."""
2
6
 
3
7
  from __future__ import annotations
4
8
 
5
9
  __all__ = ("CustomTask",)
6
10
 
7
- import logging
8
11
  from collections.abc import Callable
9
- from typing import Any, TypeVar
12
+ from typing import Any
10
13
 
11
14
  import orjson
12
15
  from anyio import Path
13
16
 
14
- logger = logging.getLogger(__name__)
15
17
 
16
- T = TypeVar("T")
17
-
18
-
19
- class CustomTask[T]:
18
+ class CustomTask:
20
19
  """Quantum methods for running custom tasks."""
21
20
 
22
21
  @staticmethod
@@ -24,7 +23,7 @@ class CustomTask[T]:
24
23
  branch_number: int,
25
24
  hash_reduce_left: int,
26
25
  db_root: str,
27
- class_model: T,
26
+ class_model: Any,
28
27
  ) -> list[Any]:
29
28
  """Get documents for custom task.
30
29
 
@@ -56,7 +55,6 @@ class CustomTask[T]:
56
55
 
57
56
  This method running a task created on the basis of a quantum loop.
58
57
  Effectiveness running task depends on the number of processor threads.
59
- Ideally, hundreds and even thousands of threads are required.
60
58
 
61
59
  Args:
62
60
  custom_task_fn: A function that execute the custom task.
scruby/mixins/delete.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Methods for deleting documents."""
2
6
 
3
7
  from __future__ import annotations
@@ -5,19 +9,14 @@ from __future__ import annotations
5
9
  __all__ = ("Delete",)
6
10
 
7
11
  import concurrent.futures
8
- import logging
9
12
  from collections.abc import Callable
10
- from typing import TypeVar
13
+ from typing import Any
11
14
 
12
15
  import orjson
13
16
  from anyio import Path
14
17
 
15
- logger = logging.getLogger(__name__)
16
18
 
17
- T = TypeVar("T")
18
-
19
-
20
- class Delete[T]:
19
+ class Delete:
21
20
  """Methods for deleting documents."""
22
21
 
23
22
  @staticmethod
@@ -26,7 +25,7 @@ class Delete[T]:
26
25
  filter_fn: Callable,
27
26
  hash_reduce_left: int,
28
27
  db_root: str,
29
- class_model: T,
28
+ class_model: Any,
30
29
  ) -> int:
31
30
  """Task for find and delete documents.
32
31
 
@@ -62,30 +61,27 @@ class Delete[T]:
62
61
  async def delete_many(
63
62
  self,
64
63
  filter_fn: Callable,
65
- max_workers: int | None = None,
66
64
  ) -> int:
67
65
  """Delete one or more documents matching the filter.
68
66
 
69
67
  The search is based on the effect of a quantum loop.
70
68
  The search effectiveness depends on the number of processor threads.
71
- Ideally, hundreds and even thousands of threads are required.
72
69
 
73
70
  Args:
74
71
  filter_fn: A function that execute the conditions of filtering.
75
- max_workers: The maximum number of processes that can be used to
76
- execute the given calls. If None or not given then as many
77
- worker processes will be created as the machine has processors.
78
72
 
79
73
  Returns:
80
74
  The number of deleted documents.
81
75
  """
82
- branch_numbers: range = range(1, self._max_branch_number)
76
+ # Variable initialization
83
77
  search_task_fn: Callable = self._task_delete
78
+ branch_numbers: range = range(1, self._max_branch_number)
84
79
  hash_reduce_left: int = self._hash_reduce_left
85
80
  db_root: str = self._db_root
86
- class_model: T = self._class_model
81
+ class_model: Any = self._class_model
87
82
  counter: int = 0
88
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
83
+ # Run quantum loop
84
+ with concurrent.futures.ThreadPoolExecutor(self._max_workers) as executor:
89
85
  for branch_number in branch_numbers:
90
86
  future = executor.submit(
91
87
  search_task_fn,
scruby/mixins/find.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Quantum methods for searching documents."""
2
6
 
3
7
  from __future__ import annotations
@@ -5,19 +9,14 @@ from __future__ import annotations
5
9
  __all__ = ("Find",)
6
10
 
7
11
  import concurrent.futures
8
- import logging
9
12
  from collections.abc import Callable
10
- from typing import TypeVar
13
+ from typing import Any
11
14
 
12
15
  import orjson
13
16
  from anyio import Path
14
17
 
15
- logger = logging.getLogger(__name__)
16
18
 
17
- T = TypeVar("T")
18
-
19
-
20
- class Find[T]:
19
+ class Find:
21
20
  """Quantum methods for searching documents."""
22
21
 
23
22
  @staticmethod
@@ -26,8 +25,8 @@ class Find[T]:
26
25
  filter_fn: Callable,
27
26
  hash_reduce_left: str,
28
27
  db_root: str,
29
- class_model: T,
30
- ) -> list[T] | None:
28
+ class_model: Any,
29
+ ) -> list[Any] | None:
31
30
  """Task for find documents.
32
31
 
33
32
  This method is for internal use.
@@ -35,6 +34,7 @@ class Find[T]:
35
34
  Returns:
36
35
  List of documents or None.
37
36
  """
37
+ # Variable initialization
38
38
  branch_number_as_hash: str = f"{branch_number:08x}"[hash_reduce_left:]
39
39
  separated_hash: str = "/".join(list(branch_number_as_hash))
40
40
  leaf_path: Path = Path(
@@ -45,7 +45,7 @@ class Find[T]:
45
45
  "leaf.json",
46
46
  ),
47
47
  )
48
- docs: list[T] = []
48
+ docs: list[Any] = []
49
49
  if await leaf_path.exists():
50
50
  data_json: bytes = await leaf_path.read_bytes()
51
51
  data: dict[str, str] = orjson.loads(data_json) or {}
@@ -58,29 +58,26 @@ class Find[T]:
58
58
  async def find_one(
59
59
  self,
60
60
  filter_fn: Callable,
61
- max_workers: int | None = None,
62
- ) -> T | None:
63
- """Finds a single document matching the filter.
61
+ ) -> Any | None:
62
+ """Find one document matching the filter.
64
63
 
65
64
  The search is based on the effect of a quantum loop.
66
65
  The search effectiveness depends on the number of processor threads.
67
- Ideally, hundreds and even thousands of threads are required.
68
66
 
69
67
  Args:
70
- filter_fn: A function that execute the conditions of filtering.
71
- max_workers: The maximum number of processes that can be used to
72
- execute the given calls. If None or not given then as many
73
- worker processes will be created as the machine has processors.
68
+ filter_fn (Callable): A function that execute the conditions of filtering.
74
69
 
75
70
  Returns:
76
71
  Document or None.
77
72
  """
78
- branch_numbers: range = range(1, self._max_branch_number)
73
+ # Variable initialization
79
74
  search_task_fn: Callable = self._task_find
75
+ branch_numbers: range = range(1, self._max_branch_number)
80
76
  hash_reduce_left: int = self._hash_reduce_left
81
77
  db_root: str = self._db_root
82
- class_model: T = self._class_model
83
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
78
+ class_model: Any = self._class_model
79
+ # Run quantum loop
80
+ with concurrent.futures.ThreadPoolExecutor(self._max_workers) as executor:
84
81
  for branch_number in branch_numbers:
85
82
  future = executor.submit(
86
83
  search_task_fn,
@@ -97,36 +94,40 @@ class Find[T]:
97
94
 
98
95
  async def find_many(
99
96
  self,
100
- filter_fn: Callable,
97
+ filter_fn: Callable = lambda _: True,
101
98
  limit_docs: int = 1000,
102
- max_workers: int | None = None,
103
- ) -> list[T] | None:
104
- """Finds one or more documents matching the filter.
99
+ page_number: int = 1,
100
+ ) -> list[Any] | None:
101
+ """Find many documents matching the filter.
105
102
 
106
103
  The search is based on the effect of a quantum loop.
107
104
  The search effectiveness depends on the number of processor threads.
108
- Ideally, hundreds and even thousands of threads are required.
109
105
 
110
106
  Args:
111
- filter_fn: A function that execute the conditions of filtering.
112
- limit_docs: Limiting the number of documents. By default = 1000.
113
- max_workers: The maximum number of processes that can be used to
114
- execute the given calls. If None or not given then as many
115
- worker processes will be created as the machine has processors.
107
+ filter_fn (Callable): A function that execute the conditions of filtering.
108
+ By default it searches for all documents.
109
+ limit_docs (int): Limiting the number of documents. By default = 1000.
110
+ page_number (int): For pagination output. By default = 1.
111
+ Number of documents per page = limit_docs.
116
112
 
117
113
  Returns:
118
114
  List of documents or None.
119
115
  """
120
- branch_numbers: range = range(1, self._max_branch_number)
116
+ # The `page_number` parameter must not be less than one
117
+ assert page_number > 0, "`find_many` => The `page_number` parameter must not be less than one."
118
+ # Variable initialization
121
119
  search_task_fn: Callable = self._task_find
120
+ branch_numbers: range = range(1, self._max_branch_number)
122
121
  hash_reduce_left: int = self._hash_reduce_left
123
122
  db_root: str = self._db_root
124
- class_model: T = self._class_model
123
+ class_model: Any = self._class_model
125
124
  counter: int = 0
126
- result: list[T] = []
127
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
125
+ number_docs_skippe: int = limit_docs * (page_number - 1) if page_number > 1 else 0
126
+ result: list[Any] = []
127
+ # Run quantum loop
128
+ with concurrent.futures.ThreadPoolExecutor(self._max_workers) as executor:
128
129
  for branch_number in branch_numbers:
129
- if counter >= limit_docs:
130
+ if number_docs_skippe == 0 and counter >= limit_docs:
130
131
  return result[:limit_docs]
131
132
  future = executor.submit(
132
133
  search_task_fn,
@@ -139,8 +140,11 @@ class Find[T]:
139
140
  docs = await future.result()
140
141
  if docs is not None:
141
142
  for doc in docs:
142
- if counter >= limit_docs:
143
- return result[:limit_docs]
144
- result.append(doc)
145
- counter += 1
143
+ if number_docs_skippe == 0:
144
+ if counter >= limit_docs:
145
+ return result[:limit_docs]
146
+ result.append(doc)
147
+ counter += 1
148
+ else:
149
+ number_docs_skippe -= 1
146
150
  return result or None
@@ -1,8 +1,12 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Methods for working with keys."""
2
6
 
3
7
  from __future__ import annotations
4
8
 
5
- __all__ = ("Docs",)
9
+ __all__ = ("Keys",)
6
10
 
7
11
  import logging
8
12
  from typing import Any
@@ -14,11 +18,9 @@ from scruby.errors import (
14
18
  KeyNotExistsError,
15
19
  )
16
20
 
17
- logger = logging.getLogger(__name__)
18
21
 
19
-
20
- class Docs:
21
- """Methods for working with document."""
22
+ class Keys:
23
+ """Methods for working with keys."""
22
24
 
23
25
  async def add_doc(self, doc: Any) -> None:
24
26
  """Asynchronous method for adding document to collection.
@@ -36,7 +38,7 @@ class Docs:
36
38
  msg = (
37
39
  f"(add_doc) Parameter `doc` => Model `{doc_class_name}` does not match collection `{collection_name}`!"
38
40
  )
39
- logger.error(msg)
41
+ logging.error(msg)
40
42
  raise TypeError(msg)
41
43
  # The path to cell of collection.
42
44
  leaf_path, prepared_key = await self._get_leaf_path(doc.key)
@@ -53,7 +55,7 @@ class Docs:
53
55
  await leaf_path.write_bytes(orjson.dumps(data))
54
56
  else:
55
57
  err = KeyAlreadyExistsError()
56
- logger.error(err.message)
58
+ logging.error(err.message)
57
59
  raise err
58
60
  else:
59
61
  # Add new document to a blank leaf.
@@ -77,7 +79,7 @@ class Docs:
77
79
  f"(update_doc) Parameter `doc` => Model `{doc_class_name}` "
78
80
  f"does not match collection `{collection_name}`!"
79
81
  )
80
- logger.error(msg)
82
+ logging.error(msg)
81
83
  raise TypeError(msg)
82
84
  # The path to cell of collection.
83
85
  leaf_path, prepared_key = await self._get_leaf_path(doc.key)
@@ -93,10 +95,10 @@ class Docs:
93
95
  await leaf_path.write_bytes(orjson.dumps(data))
94
96
  except KeyError:
95
97
  err = KeyNotExistsError()
96
- logger.error(err.message)
98
+ logging.error(err.message)
97
99
  raise err from None
98
100
  else:
99
- logger.error("The key not exists.")
101
+ logging.error("The key not exists.")
100
102
  raise KeyError()
101
103
 
102
104
  async def get_key(self, key: str) -> Any:
@@ -117,7 +119,7 @@ class Docs:
117
119
  obj: Any = self._class_model.model_validate_json(data[prepared_key])
118
120
  return obj
119
121
  msg: str = "`get_key` - The unacceptable key value."
120
- logger.error(msg)
122
+ logging.error(msg)
121
123
  raise KeyError()
122
124
 
123
125
  async def has_key(self, key: str) -> bool:
@@ -162,5 +164,5 @@ class Docs:
162
164
  await self._counter_documents(-1)
163
165
  return
164
166
  msg: str = "`delete_key` - The unacceptable key value."
165
- logger.error(msg)
167
+ logging.error(msg)
166
168
  raise KeyError()
scruby/mixins/update.py CHANGED
@@ -1,3 +1,7 @@
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
1
5
  """Methods for updating documents."""
2
6
 
3
7
  from __future__ import annotations
@@ -5,19 +9,14 @@ from __future__ import annotations
5
9
  __all__ = ("Update",)
6
10
 
7
11
  import concurrent.futures
8
- import logging
9
12
  from collections.abc import Callable
10
- from typing import Any, TypeVar
13
+ from typing import Any
11
14
 
12
15
  import orjson
13
16
  from anyio import Path
14
17
 
15
- logger = logging.getLogger(__name__)
16
18
 
17
- T = TypeVar("T")
18
-
19
-
20
- class Update[T]:
19
+ class Update:
21
20
  """Methods for updating documents."""
22
21
 
23
22
  @staticmethod
@@ -26,7 +25,7 @@ class Update[T]:
26
25
  filter_fn: Callable,
27
26
  hash_reduce_left: str,
28
27
  db_root: str,
29
- class_model: T,
28
+ class_model: Any,
30
29
  new_data: dict[str, Any],
31
30
  ) -> int:
32
31
  """Task for find documents.
@@ -65,31 +64,28 @@ class Update[T]:
65
64
  self,
66
65
  filter_fn: Callable,
67
66
  new_data: dict[str, Any],
68
- max_workers: int | None = None,
69
67
  ) -> int:
70
68
  """Updates one or more documents matching the filter.
71
69
 
72
70
  The search is based on the effect of a quantum loop.
73
71
  The search effectiveness depends on the number of processor threads.
74
- Ideally, hundreds and even thousands of threads are required.
75
72
 
76
73
  Args:
77
74
  filter_fn: A function that execute the conditions of filtering.
78
75
  new_data: New data for the fields that need to be updated.
79
- max_workers: The maximum number of processes that can be used to
80
- execute the given calls. If None or not given then as many
81
- worker processes will be created as the machine has processors.
82
76
 
83
77
  Returns:
84
78
  The number of updated documents.
85
79
  """
86
- branch_numbers: range = range(1, self._max_branch_number)
80
+ # Variable initialization
87
81
  update_task_fn: Callable = self._task_update
82
+ branch_numbers: range = range(1, self._max_branch_number)
88
83
  hash_reduce_left: int = self._hash_reduce_left
89
84
  db_root: str = self._db_root
90
- class_model: T = self._class_model
85
+ class_model: Any = self._class_model
91
86
  counter: int = 0
92
- with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
87
+ # Run quantum loop
88
+ with concurrent.futures.ThreadPoolExecutor(self._max_workers) as executor:
93
89
  for branch_number in branch_numbers:
94
90
  future = executor.submit(
95
91
  update_task_fn,
@@ -1,6 +1,10 @@
1
- """Constant variables.
1
+ # Scruby - Asynchronous library for building and managing a hybrid database, by scheme of key-value.
2
+ # Copyright (c) 2025 Gennady Kostyunin
3
+ # SPDX-License-Identifier: MIT
4
+ #
5
+ """Database settings.
2
6
 
3
- The module contains the following variables:
7
+ The module contains the following parameters:
4
8
 
5
9
  - `DB_ROOT` - Path to root directory of database. `By default = "ScrubyDB" (in root of project)`.
6
10
  - `HASH_REDUCE_LEFT` - The length of the hash reduction on the left side.
@@ -8,6 +12,7 @@ The module contains the following variables:
8
12
  - `2` - 16777216 branches in collection.
9
13
  - `4` - 65536 branches in collection.
10
14
  - `6` - 256 branches in collection (by default).
15
+ - `MAX_WORKERS` - The maximum number of processes that can be used `By default = None`.
11
16
  """
12
17
 
13
18
  from __future__ import annotations
@@ -15,6 +20,7 @@ from __future__ import annotations
15
20
  __all__ = (
16
21
  "DB_ROOT",
17
22
  "HASH_REDUCE_LEFT",
23
+ "MAX_WORKERS",
18
24
  )
19
25
 
20
26
  from typing import Literal
@@ -31,3 +37,8 @@ DB_ROOT: str = "ScrubyDB"
31
37
  # Number of branches is number of requests to the hard disk during quantum operations.
32
38
  # Quantum operations: find_one, find_many, count_documents, delete_many, run_custom_task.
33
39
  HASH_REDUCE_LEFT: Literal[0, 2, 4, 6] = 6
40
+
41
+ # The maximum number of processes that can be used to execute the given calls.
42
+ # If None, then as many worker processes will be
43
+ # created as the machine has processors.
44
+ MAX_WORKERS: int | None = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: scruby
3
- Version: 0.26.0
3
+ Version: 0.28.3
4
4
  Summary: Asynchronous library for building and managing a hybrid database, by scheme of key-value.
5
5
  Project-URL: Homepage, https://kebasyaty.github.io/scruby/
6
6
  Project-URL: Repository, https://github.com/kebasyaty/scruby
@@ -17,6 +17,7 @@ Classifier: License :: OSI Approved :: MIT License
17
17
  Classifier: Operating System :: MacOS :: MacOS X
18
18
  Classifier: Operating System :: Microsoft :: Windows
19
19
  Classifier: Operating System :: POSIX
20
+ Classifier: Operating System :: POSIX :: Linux
20
21
  Classifier: Programming Language :: Python :: 3
21
22
  Classifier: Programming Language :: Python :: 3 :: Only
22
23
  Classifier: Programming Language :: Python :: 3.12
@@ -110,10 +111,11 @@ import datetime
110
111
  from typing import Annotated
111
112
  from pydantic import BaseModel, EmailStr, Field
112
113
  from pydantic_extra_types.phone_numbers import PhoneNumber, PhoneNumberValidator
113
- from scruby import Scruby, constants
114
+ from scruby import Scruby, settings
114
115
 
115
- constants.DB_ROOT = "ScrubyDB" # By default = "ScrubyDB"
116
- constants.HASH_REDUCE_LEFT = 6 # By default = 6
116
+ settings.DB_ROOT = "ScrubyDB" # By default = "ScrubyDB"
117
+ settings.HASH_REDUCE_LEFT = 6 # By default = 6
118
+ settings.MAX_WORKERS = None # By default = None
117
119
 
118
120
  class User(BaseModel):
119
121
  """User model."""
@@ -132,7 +134,7 @@ class User(BaseModel):
132
134
 
133
135
  async def main() -> None:
134
136
  """Example."""
135
- # Get collection of `User`.
137
+ # Get collection `User`.
136
138
  user_coll = await Scruby.collection(User)
137
139
 
138
140
  user = User(
@@ -167,22 +169,22 @@ if __name__ == "__main__":
167
169
  ```
168
170
 
169
171
  ```python
170
- """Find a single document matching the filter.
172
+ """Find one document matching the filter.
171
173
 
172
174
  The search is based on the effect of a quantum loop.
173
175
  The search effectiveness depends on the number of processor threads.
174
- Ideally, hundreds and even thousands of threads are required.
175
176
  """
176
177
 
177
178
  import anyio
178
179
  import datetime
179
180
  from typing import Annotated
180
181
  from pydantic import BaseModel, Field
181
- from scruby import Scruby, constants
182
+ from scruby import Scruby, settings
182
183
  from pprint import pprint as pp
183
184
 
184
- constants.DB_ROOT = "ScrubyDB" # By default = "ScrubyDB"
185
- constants.HASH_REDUCE_LEFT = 6 # By default = 6
185
+ settings.DB_ROOT = "ScrubyDB" # By default = "ScrubyDB"
186
+ settings.HASH_REDUCE_LEFT = 6 # By default = 6
187
+ settings.MAX_WORKERS = None # By default = None
186
188
 
187
189
 
188
190
  class Phone(BaseModel):
@@ -201,7 +203,7 @@ class Phone(BaseModel):
201
203
 
202
204
  async def main() -> None:
203
205
  """Example."""
204
- # Get collection of `Phone`.
206
+ # Get collection `Phone`.
205
207
  phone_coll = await Scruby.collection(Phone)
206
208
 
207
209
  # Create phone.
@@ -243,22 +245,22 @@ if __name__ == "__main__":
243
245
  ```
244
246
 
245
247
  ```python
246
- """Find one or more documents matching the filter.
248
+ """Find many documents matching the filter.
247
249
 
248
250
  The search is based on the effect of a quantum loop.
249
251
  The search effectiveness depends on the number of processor threads.
250
- Ideally, hundreds and even thousands of threads are required.
251
252
  """
252
253
 
253
254
  import anyio
254
255
  import datetime
255
256
  from typing import Annotated
256
257
  from pydantic import BaseModel, Field
257
- from scruby import Scruby, constants
258
+ from scruby import Scruby, settings
258
259
  from pprint import pprint as pp
259
260
 
260
- constants.DB_ROOT = "ScrubyDB" # By default = "ScrubyDB"
261
- constants.HASH_REDUCE_LEFT = 6 # By default = 6
261
+ settings.DB_ROOT = "ScrubyDB" # By default = "ScrubyDB"
262
+ settings.HASH_REDUCE_LEFT = 6 # By default = 6
263
+ settings.MAX_WORKERS = None # By default = None
262
264
 
263
265
 
264
266
  class Car(BaseModel):
@@ -277,11 +279,11 @@ class Car(BaseModel):
277
279
 
278
280
  async def main() -> None:
279
281
  """Example."""
280
- # Get collection of `Car`.
282
+ # Get collection `Car`.
281
283
  car_coll = await Scruby.collection(Car)
282
284
 
283
285
  # Create cars.
284
- for name in range(1, 10):
286
+ for num in range(1, 10):
285
287
  car = Car(
286
288
  brand="Mazda",
287
289
  model=f"EZ-6 {num}",
@@ -299,9 +301,23 @@ async def main() -> None:
299
301
  else:
300
302
  print("No cars!")
301
303
 
302
- # Get collection list.
303
- collection_list = await Scruby.collection_list()
304
- print(collection_list) # ["Car"]
304
+ # Find all cars.
305
+ car_list: list[Car] | None = await car_coll.find_many()
306
+ if car_list is not None:
307
+ pp(car_list)
308
+ else:
309
+ print("No cars!")
310
+
311
+ # For pagination output.
312
+ car_list: list[Car] | None = await car_coll.find_many(
313
+ filter_fn=lambda doc: doc.brand == "Mazda",
314
+ limit_docs=5,
315
+ page_number=2,
316
+ )
317
+ if car_list is not None:
318
+ pp(car_list)
319
+ else:
320
+ print("No cars!")
305
321
 
306
322
  # Full database deletion.
307
323
  # Hint: The main purpose is tests.
@@ -0,0 +1,18 @@
1
+ scruby/__init__.py,sha256=bw2Le5ULYlf2nFQT_617rmEumu66Ll-QCLCxqDFERWw,1014
2
+ scruby/aggregation.py,sha256=bd70J1Xye6faNHD8LS3lVQoHWKtPdPV_cqT_i7oui38,3491
3
+ scruby/db.py,sha256=djo4JkfuKCcV3jRbd2L3mIwENS3ptqJBt7SlAuiRhGY,6794
4
+ scruby/errors.py,sha256=D0jisudUsZk9iXp4nRSymaSMwyqHPVshsSlxx4HDVVk,1297
5
+ scruby/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ scruby/settings.py,sha256=_uVdZIGWoi6q9zcu0c2PS51OBEBNASRRrxfzaF7Nwy0,1580
7
+ scruby/mixins/__init__.py,sha256=XPMjJvOZN7dLpTE1FfGMBGQ_0421HXug-0rWKMU5fRQ,627
8
+ scruby/mixins/collection.py,sha256=coF-IOhicV_EihDwnYf6SW5Mfi3nOFR0gAhCc619NmI,1382
9
+ scruby/mixins/count.py,sha256=PGzRtgLvseQnHg6wt-A81s30pnsdY1d8cr-EQRnbfyU,2050
10
+ scruby/mixins/custom_task.py,sha256=ZhvCDiYnJ8BTIWlnRu6cTH-9G9o7dHSixjMIsxAtDpw,2316
11
+ scruby/mixins/delete.py,sha256=B2loiowj8ToO0euumDRxpHUVrLQx0iTcRys0jszn-rA,3046
12
+ scruby/mixins/find.py,sha256=gnHjnm0MZbzMHmWOBUJbMm8LZFBqdJ6yWA6Pxfap51Q,5340
13
+ scruby/mixins/keys.py,sha256=OUByWbHfNVWJVkUrhCsJZdVqf0zez_an6Gti2n5iKnM,5671
14
+ scruby/mixins/update.py,sha256=YkUiz1gcVtNXdgf7Mmda-0g3vJm3jL09v-msGy2tAWg,3229
15
+ scruby-0.28.3.dist-info/METADATA,sha256=DoDzTEj_wrZu3EfAhBcKBtCfbr9pb74FS3spyDpFfzY,10849
16
+ scruby-0.28.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
17
+ scruby-0.28.3.dist-info/licenses/LICENSE,sha256=mS0Wz0yGNB63gEcWEnuIb_lldDYV0sjRaO-o_GL6CWE,1074
18
+ scruby-0.28.3.dist-info/RECORD,,
@@ -1,18 +0,0 @@
1
- scruby/__init__.py,sha256=iYjvi002DeRh-U_ND2cCOHlbX2xwxN8IIhsposeotNw,1504
2
- scruby/aggregation.py,sha256=SYGcnMy2eq9vJb-pW3xR9LLAQIQ55TK-LGW_yKQ-7sU,3318
3
- scruby/constants.py,sha256=KInSZ_4dsQNXilrs7DvtQXevKEYibnNzl69a7XiWG4k,1099
4
- scruby/db.py,sha256=06GjnhN9lKvZo585nxKFd4z8Ox858Ep08c7eCbMA99k,6462
5
- scruby/errors.py,sha256=aj1zQlfxGwZC-bZZ07DRX2vHx31SpyWPqXHMpQ9kRVY,1124
6
- scruby/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- scruby/mixins/__init__.py,sha256=w1Be13FHAGSkdRfXcmoZ-eDn5Q8cFsPRAV7k1tXkIwY,454
8
- scruby/mixins/collection.py,sha256=kqUgzJbgG9pTZhlP7OD5DsOaArRzu0fl6fVibLAdNtk,1260
9
- scruby/mixins/count.py,sha256=Wcn6CeWrYSgsTTmYQ4J-CEiM4630rUSwRP9iKwbCl6c,2193
10
- scruby/mixins/custom_task.py,sha256=DL-pQZninz7CJUyRYlVV7SNPC60qMD3ZQyMLnC3zVTM,2294
11
- scruby/mixins/delete.py,sha256=BmfQH68iX7kzC20w16xzFcLO3uLxYKdNyqZqIbXb1M0,3240
12
- scruby/mixins/docs.py,sha256=UHawXUjIkDBtik6MIQwbPF3DZKSOG8WI4Da9_i_-9R4,5533
13
- scruby/mixins/find.py,sha256=va1hTm6Poua7_TMcZW2iqI-xmL1HcCUOx8pkKvTvu6U,5063
14
- scruby/mixins/update.py,sha256=A9V4PjA3INnqLTGoBxIvC8y8Wo-nLxlFejkPUhsebzQ,3428
15
- scruby-0.26.0.dist-info/METADATA,sha256=CluDLzRgB952ZDbbxdsXuHGE598BzIiF5eYXpGaNdj4,10483
16
- scruby-0.26.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
17
- scruby-0.26.0.dist-info/licenses/LICENSE,sha256=mS0Wz0yGNB63gEcWEnuIb_lldDYV0sjRaO-o_GL6CWE,1074
18
- scruby-0.26.0.dist-info/RECORD,,