esuls 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- esuls/db_cli.py +36 -14
- esuls/tests/test_db_concurrent.py +161 -0
- {esuls-0.1.12.dist-info → esuls-0.1.14.dist-info}/METADATA +1 -1
- esuls-0.1.14.dist-info/RECORD +11 -0
- esuls-0.1.12.dist-info/RECORD +0 -10
- {esuls-0.1.12.dist-info → esuls-0.1.14.dist-info}/WHEEL +0 -0
- {esuls-0.1.12.dist-info → esuls-0.1.14.dist-info}/licenses/LICENSE +0 -0
- {esuls-0.1.12.dist-info → esuls-0.1.14.dist-info}/top_level.txt +0 -0
esuls/db_cli.py
CHANGED
|
@@ -31,6 +31,8 @@ class AsyncDB(Generic[SchemaType]):
|
|
|
31
31
|
|
|
32
32
|
# Shared write locks per database file (class-level)
|
|
33
33
|
_db_locks: dict[str, asyncio.Lock] = {}
|
|
34
|
+
# Lock for schema initialization (class-level)
|
|
35
|
+
_schema_init_lock: asyncio.Lock = None
|
|
34
36
|
|
|
35
37
|
def __init__(self, db_path: Union[str, Path], table_name: str, schema_class: Type[SchemaType]):
|
|
36
38
|
"""Initialize AsyncDB with a path and schema dataclass."""
|
|
@@ -57,20 +59,40 @@ class AsyncDB(Generic[SchemaType]):
|
|
|
57
59
|
if not hasattr(AsyncDB, '_initialized_schemas'):
|
|
58
60
|
AsyncDB._initialized_schemas = set()
|
|
59
61
|
|
|
60
|
-
async def _get_connection(self) -> aiosqlite.Connection:
|
|
61
|
-
"""Create a new optimized connection."""
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
62
|
+
async def _get_connection(self, max_retries: int = 5) -> aiosqlite.Connection:
|
|
63
|
+
"""Create a new optimized connection with retry logic for concurrent access."""
|
|
64
|
+
# Ensure schema init lock exists (lazy init for asyncio compatibility)
|
|
65
|
+
if AsyncDB._schema_init_lock is None:
|
|
66
|
+
AsyncDB._schema_init_lock = asyncio.Lock()
|
|
67
|
+
|
|
68
|
+
last_error = None
|
|
69
|
+
for attempt in range(max_retries):
|
|
70
|
+
try:
|
|
71
|
+
db = await aiosqlite.connect(self.db_path, timeout=30.0)
|
|
72
|
+
# Fast WAL mode with minimal sync
|
|
73
|
+
await db.execute("PRAGMA journal_mode=WAL")
|
|
74
|
+
await db.execute("PRAGMA synchronous=NORMAL")
|
|
75
|
+
await db.execute("PRAGMA cache_size=10000")
|
|
76
|
+
await db.execute("PRAGMA busy_timeout=30000") # 30s busy timeout
|
|
77
|
+
|
|
78
|
+
# Initialize schema if needed (with lock to prevent race condition)
|
|
79
|
+
if self._db_key not in AsyncDB._initialized_schemas:
|
|
80
|
+
async with AsyncDB._schema_init_lock:
|
|
81
|
+
# Double-check after acquiring lock
|
|
82
|
+
if self._db_key not in AsyncDB._initialized_schemas:
|
|
83
|
+
await self._init_schema(db)
|
|
84
|
+
AsyncDB._initialized_schemas.add(self._db_key)
|
|
85
|
+
|
|
86
|
+
return db
|
|
87
|
+
except Exception as e:
|
|
88
|
+
last_error = e
|
|
89
|
+
if attempt < max_retries - 1:
|
|
90
|
+
# Exponential backoff: 0.1s, 0.2s, 0.4s, 0.8s, 1.6s
|
|
91
|
+
wait_time = 0.1 * (2 ** attempt)
|
|
92
|
+
await asyncio.sleep(wait_time)
|
|
93
|
+
continue
|
|
94
|
+
raise
|
|
95
|
+
raise last_error
|
|
74
96
|
|
|
75
97
|
async def _init_schema(self, db: aiosqlite.Connection) -> None:
|
|
76
98
|
"""Generate schema from dataclass structure with support for field additions."""
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Test concurrent database access to verify retry logic works.
|
|
3
|
+
"""
|
|
4
|
+
import asyncio
|
|
5
|
+
import tempfile
|
|
6
|
+
import os
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from esuls.db_cli import AsyncDB, BaseModel
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class TestItem(BaseModel):
|
|
15
|
+
name: str = ""
|
|
16
|
+
value: int = 0
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def test_concurrent_reads(temp_db):
|
|
20
|
+
"""Test many concurrent read operations."""
|
|
21
|
+
db = AsyncDB(temp_db, "items", TestItem)
|
|
22
|
+
|
|
23
|
+
# Save some test data first
|
|
24
|
+
for i in range(10):
|
|
25
|
+
await db.save(TestItem(name=f"item_{i}", value=i))
|
|
26
|
+
|
|
27
|
+
# Run 100 concurrent reads
|
|
28
|
+
async def read_all():
|
|
29
|
+
return await db.find()
|
|
30
|
+
|
|
31
|
+
tasks = [read_all() for _ in range(100)]
|
|
32
|
+
results = await asyncio.gather(*tasks)
|
|
33
|
+
|
|
34
|
+
# All reads should succeed and return same data
|
|
35
|
+
assert all(len(r) == 10 for r in results)
|
|
36
|
+
print(f"✓ 100 concurrent reads completed successfully")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
async def test_concurrent_writes(temp_db):
|
|
40
|
+
"""Test many concurrent write operations."""
|
|
41
|
+
db = AsyncDB(temp_db, "items", TestItem)
|
|
42
|
+
|
|
43
|
+
# Run 50 concurrent writes
|
|
44
|
+
async def write_item(i: int):
|
|
45
|
+
return await db.save(TestItem(name=f"concurrent_{i}", value=i))
|
|
46
|
+
|
|
47
|
+
tasks = [write_item(i) for i in range(50)]
|
|
48
|
+
results = await asyncio.gather(*tasks)
|
|
49
|
+
|
|
50
|
+
# All writes should succeed
|
|
51
|
+
assert all(r is True for r in results)
|
|
52
|
+
|
|
53
|
+
# Verify all items were saved
|
|
54
|
+
items = await db.find()
|
|
55
|
+
assert len(items) == 50
|
|
56
|
+
print(f"✓ 50 concurrent writes completed successfully")
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
async def test_concurrent_mixed_operations(temp_db):
|
|
60
|
+
"""Test concurrent reads and writes together."""
|
|
61
|
+
db = AsyncDB(temp_db, "items", TestItem)
|
|
62
|
+
|
|
63
|
+
# Seed some data
|
|
64
|
+
for i in range(5):
|
|
65
|
+
await db.save(TestItem(name=f"seed_{i}", value=i))
|
|
66
|
+
|
|
67
|
+
async def read_op():
|
|
68
|
+
return await db.find()
|
|
69
|
+
|
|
70
|
+
async def write_op(i: int):
|
|
71
|
+
return await db.save(TestItem(name=f"mixed_{i}", value=i))
|
|
72
|
+
|
|
73
|
+
async def count_op():
|
|
74
|
+
return await db.count()
|
|
75
|
+
|
|
76
|
+
# Mix of 100 reads, 50 writes, 50 counts - all concurrent
|
|
77
|
+
tasks = []
|
|
78
|
+
tasks.extend([read_op() for _ in range(100)])
|
|
79
|
+
tasks.extend([write_op(i) for i in range(50)])
|
|
80
|
+
tasks.extend([count_op() for _ in range(50)])
|
|
81
|
+
|
|
82
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
83
|
+
|
|
84
|
+
# Check no exceptions
|
|
85
|
+
exceptions = [r for r in results if isinstance(r, Exception)]
|
|
86
|
+
if exceptions:
|
|
87
|
+
print(f"✗ {len(exceptions)} exceptions occurred:")
|
|
88
|
+
for e in exceptions[:5]:
|
|
89
|
+
print(f" - {type(e).__name__}: {e}")
|
|
90
|
+
raise AssertionError(f"{len(exceptions)} operations failed")
|
|
91
|
+
|
|
92
|
+
# Verify final state
|
|
93
|
+
items = await db.find()
|
|
94
|
+
assert len(items) == 55 # 5 seed + 50 writes
|
|
95
|
+
print(f"✓ 200 concurrent mixed operations completed successfully")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
async def test_stress_concurrent_access(temp_db):
|
|
99
|
+
"""Stress test with very high concurrency."""
|
|
100
|
+
db = AsyncDB(temp_db, "items", TestItem)
|
|
101
|
+
|
|
102
|
+
# Run 500 concurrent operations
|
|
103
|
+
async def random_op(i: int):
|
|
104
|
+
if i % 3 == 0:
|
|
105
|
+
return await db.save(TestItem(name=f"stress_{i}", value=i))
|
|
106
|
+
elif i % 3 == 1:
|
|
107
|
+
return await db.find()
|
|
108
|
+
else:
|
|
109
|
+
return await db.count()
|
|
110
|
+
|
|
111
|
+
tasks = [random_op(i) for i in range(500)]
|
|
112
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
113
|
+
|
|
114
|
+
# Count successes and failures
|
|
115
|
+
exceptions = [r for r in results if isinstance(r, Exception)]
|
|
116
|
+
successes = len(results) - len(exceptions)
|
|
117
|
+
|
|
118
|
+
print(f"Results: {successes} successes, {len(exceptions)} failures")
|
|
119
|
+
|
|
120
|
+
if exceptions:
|
|
121
|
+
print(f"Sample exceptions:")
|
|
122
|
+
for e in exceptions[:3]:
|
|
123
|
+
print(f" - {type(e).__name__}: {e}")
|
|
124
|
+
|
|
125
|
+
# Should have very few or no failures with retry logic
|
|
126
|
+
assert len(exceptions) == 0, f"{len(exceptions)} operations failed"
|
|
127
|
+
print(f"✓ 500 concurrent stress operations completed successfully")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
if __name__ == "__main__":
|
|
131
|
+
import sys
|
|
132
|
+
|
|
133
|
+
async def run_all_tests():
|
|
134
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
135
|
+
db_path = Path(tmpdir) / "test_concurrent.db"
|
|
136
|
+
|
|
137
|
+
print("\n" + "=" * 60)
|
|
138
|
+
print("CONCURRENT DATABASE ACCESS TESTS")
|
|
139
|
+
print("=" * 60)
|
|
140
|
+
|
|
141
|
+
print("\n[Test 1] Concurrent reads...")
|
|
142
|
+
await test_concurrent_reads(db_path)
|
|
143
|
+
|
|
144
|
+
# New db for each test
|
|
145
|
+
db_path2 = Path(tmpdir) / "test_concurrent2.db"
|
|
146
|
+
print("\n[Test 2] Concurrent writes...")
|
|
147
|
+
await test_concurrent_writes(db_path2)
|
|
148
|
+
|
|
149
|
+
db_path3 = Path(tmpdir) / "test_concurrent3.db"
|
|
150
|
+
print("\n[Test 3] Mixed operations...")
|
|
151
|
+
await test_concurrent_mixed_operations(db_path3)
|
|
152
|
+
|
|
153
|
+
db_path4 = Path(tmpdir) / "test_concurrent4.db"
|
|
154
|
+
print("\n[Test 4] Stress test (500 ops)...")
|
|
155
|
+
await test_stress_concurrent_access(db_path4)
|
|
156
|
+
|
|
157
|
+
print("\n" + "=" * 60)
|
|
158
|
+
print("ALL TESTS PASSED!")
|
|
159
|
+
print("=" * 60)
|
|
160
|
+
|
|
161
|
+
asyncio.run(run_all_tests())
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
esuls/__init__.py,sha256=dtZtmjZZ8jNspOd17BWsE9D9ofeg3vZF0vIpSgKaZqk,529
|
|
2
|
+
esuls/db_cli.py,sha256=s6uIUiezqmkEjw1gXPv90HmaCh4YUl2iTi1xOLWo8ro,19399
|
|
3
|
+
esuls/download_icon.py,sha256=w-bWbyPSbWvonzq43aDDtdxIvdKSa7OSyZ7LaN0uudg,3623
|
|
4
|
+
esuls/request_cli.py,sha256=pILF8J9IILpTmWacm1vCtvDaef-kOXjkWAbcE2S9_cA,17962
|
|
5
|
+
esuls/utils.py,sha256=AAh9y8dSB1vGO8e7A10dpsYMPI5-e9gw-GPInYBoOvg,577
|
|
6
|
+
esuls/tests/test_db_concurrent.py,sha256=kjaF8cOYcFo2sJJytrn04tcU65RZASjoopme0dcZj4c,4961
|
|
7
|
+
esuls-0.1.14.dist-info/licenses/LICENSE,sha256=AY0N01ARt0kbKB7CkByYLqqNQU-yalb-rpv-eXITEWA,1066
|
|
8
|
+
esuls-0.1.14.dist-info/METADATA,sha256=M4ZiuHzJVJsfACMB9k6bYl1qKSHpYXFbb0nWlbhxZZQ,7002
|
|
9
|
+
esuls-0.1.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
10
|
+
esuls-0.1.14.dist-info/top_level.txt,sha256=WWBDHRhQ0DQLBZKD7Un8uFN93GvVQnP4WvJKkvbACVA,6
|
|
11
|
+
esuls-0.1.14.dist-info/RECORD,,
|
esuls-0.1.12.dist-info/RECORD
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
esuls/__init__.py,sha256=dtZtmjZZ8jNspOd17BWsE9D9ofeg3vZF0vIpSgKaZqk,529
|
|
2
|
-
esuls/db_cli.py,sha256=fGCKJDvPL1VeqLj8My-It1U0WIS365M6rakNMdzXbuk,18234
|
|
3
|
-
esuls/download_icon.py,sha256=w-bWbyPSbWvonzq43aDDtdxIvdKSa7OSyZ7LaN0uudg,3623
|
|
4
|
-
esuls/request_cli.py,sha256=pILF8J9IILpTmWacm1vCtvDaef-kOXjkWAbcE2S9_cA,17962
|
|
5
|
-
esuls/utils.py,sha256=AAh9y8dSB1vGO8e7A10dpsYMPI5-e9gw-GPInYBoOvg,577
|
|
6
|
-
esuls-0.1.12.dist-info/licenses/LICENSE,sha256=AY0N01ARt0kbKB7CkByYLqqNQU-yalb-rpv-eXITEWA,1066
|
|
7
|
-
esuls-0.1.12.dist-info/METADATA,sha256=wAUYuyCsfT5mFby1IYmbrWmT97YxnlBzrm5zshi4GXg,7002
|
|
8
|
-
esuls-0.1.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
-
esuls-0.1.12.dist-info/top_level.txt,sha256=WWBDHRhQ0DQLBZKD7Un8uFN93GvVQnP4WvJKkvbACVA,6
|
|
10
|
-
esuls-0.1.12.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|