collections-cache 0.1.9__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,28 +1,25 @@
1
+ import pickle
1
2
  import sqlite3
2
- #from multiprocessing import Pool
3
- from os import cpu_count, path, makedirs, scandir
4
- from itertools import chain
5
3
  from random import choice
6
- #from threading import Thread as task
7
- import pickle
8
- from concurrent.futures import ProcessPoolExecutor as Pool
4
+ from itertools import chain
5
+ from os import cpu_count, path, makedirs, scandir
6
+ #from concurrent.futures import ProcessPoolExecutor as Pool
7
+ from concurrent.futures import ThreadPoolExecutor as Pool
9
8
 
10
9
  class Collection_Cache:
11
10
  def __init__(self, collection_name):
11
+ # Variables
12
12
  self.collection_name = collection_name
13
13
  self.cpu_cores = cpu_count()
14
14
  self.collection_dir = path.join("./Collections", self.collection_name)
15
15
  self.databases_list = []
16
16
  self.keys_databases = {}
17
-
18
- #print(f"Collection '{self.collection_name}' created!")
19
- #print(f"Number of cpu cores: {self.cpu_cores}")
17
+ # Init methods
20
18
  self.create_collection()
21
19
  self.get_all_databases()
22
20
 
23
21
  def create_collection(self):
24
22
  makedirs(self.collection_dir, exist_ok=True)
25
-
26
23
  for core in range(self.cpu_cores):
27
24
  db_path = path.join(self.collection_dir, f"database_{core}.db")
28
25
  self.initialize_databases(db_path)
@@ -30,7 +27,6 @@ class Collection_Cache:
30
27
  def initialize_databases(self, db_path):
31
28
  conn = sqlite3.connect(db_path)
32
29
  self.configure_connection(conn)
33
- #conn.execute("PRAGMA journal_mode=WAL;")
34
30
  conn.execute("""
35
31
  CREATE TABLE IF NOT EXISTS data(
36
32
  key TEXT,
@@ -40,96 +36,81 @@ class Collection_Cache:
40
36
  conn.close()
41
37
 
42
38
  def get_all_databases(self):
43
- #print("Obtaining all keys...")
44
39
  with scandir(self.collection_dir) as contents:
45
40
  self.databases_list = [path.join(self.collection_dir, content.name) for content in contents]
46
-
47
41
  with Pool(self.cpu_cores) as pool:
48
42
  self.keys_databases = dict(chain.from_iterable(pool.map(self.get_all_keys, self.databases_list)))
49
- #print(self.keys_databases)
50
43
 
51
44
  def get_all_keys(self, database):
52
45
  conn = sqlite3.connect(database)
53
46
  self.configure_connection(conn)
54
47
  cursor = conn.cursor()
55
- #cursor.execute("PRAGMA journal_mode=WAL;")
56
48
  cursor.execute("SELECT key FROM data;")
57
49
  result = cursor.fetchall()
58
50
  keys = [(line[0], database) for line in result]
59
51
  conn.close()
60
52
  return keys
61
53
 
62
- def set_key(self, key, value):
54
+ def set_key(self, key: str, value: any):
63
55
  """Used to store values and associate a value with a key."""
64
56
  if key not in self.keys_databases:
65
57
  database_to_insert = choice(self.databases_list)
66
- #print(f"Inserting in {database_to_insert}")
67
58
  conn = sqlite3.connect(database_to_insert)
68
59
  self.configure_connection(conn)
69
60
  cursor = conn.cursor()
70
- #cursor.execute("PRAGMA journal_mode=WAL;")
71
61
  cursor.execute("INSERT INTO data(key, value) VALUES (?, ?);", (key, pickle.dumps(value)))
72
62
  conn.commit()
73
63
  conn.close()
74
64
  self.add_to_keys_database(key, database_to_insert)
75
-
76
65
  else:
77
- #print(f"Updating key '{key}' in {self.keys_databases[key]}...")
78
66
  database_to_update = self.keys_databases[key]
79
67
  conn = sqlite3.connect(database_to_update)
80
68
  self.configure_connection(conn)
81
69
  cursor = conn.cursor()
82
- #cursor.execute("PRAGMA journal_mode=WAL;")
83
70
  cursor.execute("UPDATE data SET value = ? WHERE key = ?;", (pickle.dumps(value), key))
84
71
  conn.commit()
85
72
  conn.close()
86
- #print(f"Key '{key}' updated successfully in {database_to_update}")
73
+
74
+ def set_multi_keys(self, keys_and_values: dict[str, any]):
75
+ """Experimental. Set multiple keys and values at the same time."""
76
+ with Pool(self.cpu_cores) as pool:
77
+ pool.map(lambda kv: self.set_key(kv[0], kv[1]), keys_and_values.items())
87
78
 
88
79
  def add_to_keys_database(self, key, database):
89
80
  self.keys_databases[key] = database
90
- #print(self.keys_databases)
91
81
 
92
82
  def delete_to_keys_database(self, key):
93
83
  """Removes the key from the dictionary of stored keys"""
94
84
  if key in self.keys_databases:
95
85
  del self.keys_databases[key]
96
86
 
97
- def get_key(self, key):
87
+ def get_key(self, key: str):
98
88
  """Used to obtain the value stored by the key"""
99
89
  try:
100
90
  database_to_search = self.keys_databases[key]
101
- #print(database_to_search)
102
-
103
91
  conn = sqlite3.connect(database_to_search)
104
92
  self.configure_connection(conn)
105
93
  cursor = conn.cursor()
106
- #cursor.execute("PRAGMA journal_mode=WAL;")
107
94
  cursor.execute("SELECT value FROM data WHERE key = ?", (key,))
108
95
  result = cursor.fetchall()
109
96
  conn.close()
110
97
  return pickle.loads(result[0][0])
111
-
112
98
  except Exception as error:
113
99
  return error
114
100
 
115
- def delete_key(self, key):
101
+ def delete_key(self, key: str):
116
102
  """Used to delete the value stored by the key"""
117
103
  try:
118
104
  database_to_delete = self.keys_databases[key]
119
- #print(database_to_search)
120
-
121
105
  conn = sqlite3.connect(database_to_delete)
122
106
  self.configure_connection(conn)
123
107
  cursor = conn.cursor()
124
- #cursor.execute("PRAGMA journal_mode=WAL;")
125
108
  cursor.execute("DELETE FROM data WHERE key = ?", (key,))
126
109
  conn.commit()
127
110
  conn.close()
128
111
  self.delete_to_keys_database(key)
129
-
130
112
  except KeyError:
131
113
  return f"Key '{key}' not found."
132
-
133
114
  except Exception as error:
134
115
  return error
135
116
 
@@ -137,3 +118,11 @@ class Collection_Cache:
137
118
  conn.execute("PRAGMA journal_mode = WAL;")
138
119
  conn.execute("PRAGMA synchronous = NORMAL;")
139
120
  conn.execute("PRAGMA wal_autocheckpoint = 1000;")
121
+
122
+ def keys(self):
123
+ """Returns all stored keys"""
124
+ return list(self.keys_databases.keys())
125
+
126
+ def export_to_json(self):
127
+ """Test"""
128
+ pass
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: collections-cache
3
- Version: 0.1.9
4
- Summary: Collection Cache is a Python package for managing data collections across multiple SQLite databases. It allows efficient storage, retrieval, and updating of key-value pairs, supporting various data types serialized with pickle. The package uses parallel processing for fast access and manipulation of large collections.
3
+ Version: 0.2.1
4
+ Summary: collections-cache is a Python package for managing data collections across multiple SQLite databases. It allows efficient storage, retrieval, and updating of key-value pairs, supporting various data types serialized with pickle. The package uses parallel processing for fast access and manipulation of large collections.
5
5
  License: MIT
6
6
  Author: Luiz-Trindade
7
7
  Author-email: luiz.gabriel.m.trindade@gmail.com
@@ -63,9 +63,11 @@ print(products) # Output: ['apple', 'orange', 'onion']
63
63
 
64
64
  ### Methods
65
65
 
66
- - **`set_key(key, value)`**: Stores a key-value pair in the cache. If the key already exists, its value is updated.
66
+ - **`set_key(key, value)`**: Stores a key-value pair in the cache. If the key already exists, its value is updated.
67
+ - **`set_multi_keys(key_and_value)`**: Stores a multi key-value pair in the cache. If the key already exists, its value is updated.
67
68
  - **`get_key(key)`**: Retrieves the value associated with a key.
68
69
  - **`delete_key(key)`**: Removes an existing key from the cache.
70
+ - **`keys`***: Returns all stored keys.
69
71
 
70
72
  ## Development
71
73
 
@@ -0,0 +1,6 @@
1
+ collections_cache/__init__.py,sha256=uUp8lhp-HnZRumnU_8MT6qVq95t0pOzn7oLW7ARbnvc,48
2
+ collections_cache/collections_cache.py,sha256=E610JrTKVnu2SjA6IGRJDMrAG9_BwrGq9gTuRj0d-yQ,4931
3
+ collections_cache-0.2.1.dist-info/LICENSE,sha256=RAIL-FmXSiNRgyiVlfhm2SvVI4XDVsN0jDt9207SJ8o,1168
4
+ collections_cache-0.2.1.dist-info/METADATA,sha256=iU4UgRbo17FdBbcHftNrnYY6IUT2rNmkNePmRIHdYzI,3408
5
+ collections_cache-0.2.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
+ collections_cache-0.2.1.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- collections_cache/__init__.py,sha256=uUp8lhp-HnZRumnU_8MT6qVq95t0pOzn7oLW7ARbnvc,48
2
- collections_cache/collections_cache.py,sha256=qwK6heBRGqMy6lVfAalpXLUmuooeVCIePgvjsOOWSfg,5348
3
- collections_cache-0.1.9.dist-info/LICENSE,sha256=RAIL-FmXSiNRgyiVlfhm2SvVI4XDVsN0jDt9207SJ8o,1168
4
- collections_cache-0.1.9.dist-info/METADATA,sha256=9zEnDjTiUCrhpe2K_PhMY53fXNRMkFsIYqQbfk-AM4M,3235
5
- collections_cache-0.1.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
6
- collections_cache-0.1.9.dist-info/RECORD,,