collections-cache 0.3.7.20250420__tar.gz → 0.3.9.20250420__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/PKG-INFO +1 -1
- {collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/collections_cache/collections_cache.py +11 -13
- {collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/pyproject.toml +1 -1
- {collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/LICENSE +0 -0
- {collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/README.md +0 -0
- {collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/collections_cache/__init__.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: collections-cache
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.9.20250420
|
4
4
|
Summary: collections-cache is a Python package for managing data collections across multiple SQLite databases. It allows efficient storage, retrieval, and updating of key-value pairs, supporting various data types serialized with pickle. The package uses parallel processing for fast access and manipulation of large collections.
|
5
5
|
License: MIT
|
6
6
|
Author: Luiz-Trindade
|
@@ -6,7 +6,6 @@ from itertools import chain
|
|
6
6
|
from os import cpu_count, path, makedirs, scandir
|
7
7
|
from concurrent.futures import ProcessPoolExecutor as Pool
|
8
8
|
from concurrent.futures import ThreadPoolExecutor as Thread
|
9
|
-
from threading import Thread as native_thread
|
10
9
|
|
11
10
|
class Collection_Cache:
|
12
11
|
def __init__(self, collection_name: str, constant_per_core: int = 100):
|
@@ -14,8 +13,7 @@ class Collection_Cache:
|
|
14
13
|
self.collection_name = collection_name
|
15
14
|
self.constant_per_core = constant_per_core
|
16
15
|
self.cpu_cores = cpu_count()
|
17
|
-
|
18
|
-
self.size_limit = 1
|
16
|
+
self.size_limit = self.constant_per_core * self.cpu_cores
|
19
17
|
self.collection_dir = path.join("./Collections", self.collection_name)
|
20
18
|
self.databases_list = []
|
21
19
|
self.keys_databases = {}
|
@@ -79,21 +77,15 @@ class Collection_Cache:
|
|
79
77
|
if type_of_operation == "set_key" and len(self.temp_keys_values) >= self.size_limit:
|
80
78
|
self.set_multi_keys(self.temp_keys_values)
|
81
79
|
self.temp_keys_values = {}
|
82
|
-
elif type_of_operation == "get_key":
|
83
|
-
self.set_multi_keys(self.temp_keys_values)
|
84
|
-
self.temp_keys_values = {}
|
85
|
-
elif type_of_operation == "set_key_force":
|
80
|
+
elif type_of_operation == "get_key" or type_of_operation == "set_key_force":
|
86
81
|
self.set_multi_keys(self.temp_keys_values)
|
87
82
|
self.temp_keys_values = {}
|
88
83
|
|
89
84
|
# Experimental
|
90
85
|
def set_key(self, key: str, value: any):
|
91
86
|
"""Used to store values and associate a value with a key."""
|
92
|
-
self.temp_keys_values[key]
|
93
|
-
|
94
|
-
|
95
|
-
t = native_thread(target=self.verify_size_of_temp_queue, args=("set_key",))
|
96
|
-
t.start()
|
87
|
+
self.temp_keys_values[key] = value
|
88
|
+
self.verify_size_of_temp_queue("set_key")
|
97
89
|
|
98
90
|
def set_key_exec(self, key: str, value: any):
|
99
91
|
"""Used to store values and associate a value with a key."""
|
@@ -121,6 +113,12 @@ class Collection_Cache:
|
|
121
113
|
with Thread(self.cpu_cores) as thread:
|
122
114
|
thread.map(lambda kv: self.set_key_exec(kv[0], kv[1]), keys_and_values.items())
|
123
115
|
|
116
|
+
# New feature
|
117
|
+
def set_key_force(self, key: str, value: any):
|
118
|
+
"""Used to force a unique key to be stored"""
|
119
|
+
self.set_key(key, value)
|
120
|
+
self.verify_size_of_temp_queue("set_key_force")
|
121
|
+
|
124
122
|
def add_to_keys_database(self, key, database):
|
125
123
|
self.keys_databases[key] = database
|
126
124
|
|
@@ -180,7 +178,7 @@ class Collection_Cache:
|
|
180
178
|
def export_to_json(self):
|
181
179
|
"""Test"""
|
182
180
|
pass
|
183
|
-
|
181
|
+
|
184
182
|
def shutdown(self):
|
185
183
|
"""Save all keys to the collection before close or shutdown"""
|
186
184
|
self.verify_size_of_temp_queue("set_key_force")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "collections-cache"
|
3
|
-
version = "0.3.
|
3
|
+
version = "0.3.9.20250420"
|
4
4
|
description = "collections-cache is a Python package for managing data collections across multiple SQLite databases. It allows efficient storage, retrieval, and updating of key-value pairs, supporting various data types serialized with pickle. The package uses parallel processing for fast access and manipulation of large collections."
|
5
5
|
authors = ["Luiz-Trindade <luiz.gabriel.m.trindade@gmail.com>"]
|
6
6
|
license = "MIT"
|
File without changes
|
File without changes
|
{collections_cache-0.3.7.20250420 → collections_cache-0.3.9.20250420}/collections_cache/__init__.py
RENAMED
File without changes
|