sqliter-py 0.9.0__py3-none-any.whl → 0.16.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqliter/constants.py +4 -3
- sqliter/exceptions.py +43 -0
- sqliter/model/__init__.py +38 -3
- sqliter/model/foreign_key.py +153 -0
- sqliter/model/model.py +42 -3
- sqliter/model/unique.py +20 -11
- sqliter/orm/__init__.py +16 -0
- sqliter/orm/fields.py +412 -0
- sqliter/orm/foreign_key.py +8 -0
- sqliter/orm/model.py +243 -0
- sqliter/orm/query.py +221 -0
- sqliter/orm/registry.py +169 -0
- sqliter/query/query.py +720 -69
- sqliter/sqliter.py +533 -76
- sqliter/tui/__init__.py +62 -0
- sqliter/tui/__main__.py +6 -0
- sqliter/tui/app.py +179 -0
- sqliter/tui/demos/__init__.py +96 -0
- sqliter/tui/demos/base.py +114 -0
- sqliter/tui/demos/caching.py +283 -0
- sqliter/tui/demos/connection.py +150 -0
- sqliter/tui/demos/constraints.py +211 -0
- sqliter/tui/demos/crud.py +154 -0
- sqliter/tui/demos/errors.py +231 -0
- sqliter/tui/demos/field_selection.py +150 -0
- sqliter/tui/demos/filters.py +389 -0
- sqliter/tui/demos/models.py +248 -0
- sqliter/tui/demos/ordering.py +156 -0
- sqliter/tui/demos/orm.py +460 -0
- sqliter/tui/demos/results.py +241 -0
- sqliter/tui/demos/string_filters.py +210 -0
- sqliter/tui/demos/timestamps.py +126 -0
- sqliter/tui/demos/transactions.py +177 -0
- sqliter/tui/runner.py +116 -0
- sqliter/tui/styles/app.tcss +130 -0
- sqliter/tui/widgets/__init__.py +7 -0
- sqliter/tui/widgets/code_display.py +81 -0
- sqliter/tui/widgets/demo_list.py +65 -0
- sqliter/tui/widgets/output_display.py +92 -0
- {sqliter_py-0.9.0.dist-info → sqliter_py-0.16.0.dist-info}/METADATA +27 -11
- sqliter_py-0.16.0.dist-info/RECORD +47 -0
- {sqliter_py-0.9.0.dist-info → sqliter_py-0.16.0.dist-info}/WHEEL +2 -2
- sqliter_py-0.16.0.dist-info/entry_points.txt +3 -0
- sqliter_py-0.9.0.dist-info/RECORD +0 -14
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
"""Caching demos."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import io
|
|
6
|
+
import tempfile
|
|
7
|
+
import time
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from sqliter import SqliterDB
|
|
11
|
+
from sqliter.model import BaseDBModel
|
|
12
|
+
from sqliter.tui.demos.base import Demo, DemoCategory, extract_demo_code
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _run_enable_cache() -> str:
|
|
16
|
+
"""Demonstrate enabling query result caching for performance.
|
|
17
|
+
|
|
18
|
+
Caching stores query results in memory, speeding up repeated queries
|
|
19
|
+
by avoiding disk I/O. Benefits are most apparent with complex queries
|
|
20
|
+
and large datasets.
|
|
21
|
+
"""
|
|
22
|
+
output = io.StringIO()
|
|
23
|
+
|
|
24
|
+
class User(BaseDBModel):
|
|
25
|
+
name: str
|
|
26
|
+
email: str
|
|
27
|
+
age: int
|
|
28
|
+
|
|
29
|
+
# Use file-based database to show real caching benefits
|
|
30
|
+
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
31
|
+
db_path = f.name
|
|
32
|
+
|
|
33
|
+
db = None
|
|
34
|
+
try:
|
|
35
|
+
db = SqliterDB(db_path, cache_enabled=True)
|
|
36
|
+
db.create_table(User)
|
|
37
|
+
|
|
38
|
+
# Insert more data for a more realistic demo
|
|
39
|
+
for i in range(50):
|
|
40
|
+
db.insert(
|
|
41
|
+
User(
|
|
42
|
+
name=f"User {i}",
|
|
43
|
+
email=f"user{i}@example.com",
|
|
44
|
+
age=20 + i,
|
|
45
|
+
)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
output.write("Inserted 50 users\n")
|
|
49
|
+
output.write("Caching stores query results to avoid repeated I/O\n\n")
|
|
50
|
+
|
|
51
|
+
# Query with filter (more expensive than simple pk lookup)
|
|
52
|
+
# First query - cache miss
|
|
53
|
+
start = time.perf_counter()
|
|
54
|
+
users = db.select(User).filter(age__gte=40).fetch_all()
|
|
55
|
+
miss_time = (time.perf_counter() - start) * 1000
|
|
56
|
+
output.write(f"First query (cache miss): {miss_time:.3f}ms\n")
|
|
57
|
+
output.write(f"Found {len(users)} users age 40+\n")
|
|
58
|
+
|
|
59
|
+
# Second query with same filter - cache hit
|
|
60
|
+
start = time.perf_counter()
|
|
61
|
+
users = db.select(User).filter(age__gte=40).fetch_all()
|
|
62
|
+
hit_time = (time.perf_counter() - start) * 1000
|
|
63
|
+
output.write(f"Second query (cache hit): {hit_time:.3f}ms\n")
|
|
64
|
+
output.write(f"Found {len(users)} users age 40+\n")
|
|
65
|
+
|
|
66
|
+
# Show speedup
|
|
67
|
+
if hit_time > 0:
|
|
68
|
+
speedup = miss_time / hit_time
|
|
69
|
+
output.write(f"\nCache hit is {speedup:.1f}x faster!\n")
|
|
70
|
+
output.write("(Benefits increase with query complexity and data size)")
|
|
71
|
+
finally:
|
|
72
|
+
if db is not None:
|
|
73
|
+
db.close()
|
|
74
|
+
# Cleanup
|
|
75
|
+
Path(db_path).unlink(missing_ok=True)
|
|
76
|
+
|
|
77
|
+
return output.getvalue()
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _run_cache_stats() -> str:
|
|
81
|
+
"""Show how to view cache hit/miss statistics.
|
|
82
|
+
|
|
83
|
+
Use get_cache_stats() to monitor cache performance and see how
|
|
84
|
+
effective your caching strategy is.
|
|
85
|
+
"""
|
|
86
|
+
output = io.StringIO()
|
|
87
|
+
|
|
88
|
+
class Product(BaseDBModel):
|
|
89
|
+
name: str
|
|
90
|
+
price: float
|
|
91
|
+
|
|
92
|
+
db = SqliterDB(memory=True, cache_enabled=True)
|
|
93
|
+
db.create_table(Product)
|
|
94
|
+
|
|
95
|
+
product = db.insert(Product(name="Widget", price=19.99))
|
|
96
|
+
|
|
97
|
+
# Perform queries
|
|
98
|
+
for _ in range(5):
|
|
99
|
+
db.get(Product, product.pk)
|
|
100
|
+
|
|
101
|
+
stats = db.get_cache_stats()
|
|
102
|
+
output.write("Cache statistics:\n")
|
|
103
|
+
output.write(f" - Total queries: {stats['total']}\n")
|
|
104
|
+
output.write(f" - Cache hits: {stats['hits']}\n")
|
|
105
|
+
output.write(f" - Cache misses: {stats['misses']}\n")
|
|
106
|
+
output.write(f" - Hit rate: {stats['hit_rate']}%\n")
|
|
107
|
+
|
|
108
|
+
db.close()
|
|
109
|
+
return output.getvalue()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _run_get_cache_controls() -> str:
|
|
113
|
+
"""Show get() caching, bypass, and TTL overrides."""
|
|
114
|
+
output = io.StringIO()
|
|
115
|
+
|
|
116
|
+
class Product(BaseDBModel):
|
|
117
|
+
name: str
|
|
118
|
+
price: float
|
|
119
|
+
|
|
120
|
+
db = SqliterDB(memory=True, cache_enabled=True, cache_ttl=60)
|
|
121
|
+
db.create_table(Product)
|
|
122
|
+
|
|
123
|
+
product = db.insert(Product(name="Widget", price=19.99))
|
|
124
|
+
|
|
125
|
+
db.get(Product, product.pk)
|
|
126
|
+
stats = db.get_cache_stats()
|
|
127
|
+
output.write("After first get (miss):\n")
|
|
128
|
+
output.write(f" - Hits: {stats['hits']}\n")
|
|
129
|
+
output.write(f" - Misses: {stats['misses']}\n")
|
|
130
|
+
|
|
131
|
+
db.get(Product, product.pk)
|
|
132
|
+
stats = db.get_cache_stats()
|
|
133
|
+
output.write("After second get (hit):\n")
|
|
134
|
+
output.write(f" - Hits: {stats['hits']}\n")
|
|
135
|
+
output.write(f" - Misses: {stats['misses']}\n")
|
|
136
|
+
|
|
137
|
+
db.get(Product, product.pk, bypass_cache=True)
|
|
138
|
+
stats = db.get_cache_stats()
|
|
139
|
+
output.write("After bypass_cache=True (stats unchanged):\n")
|
|
140
|
+
output.write(f" - Hits: {stats['hits']}\n")
|
|
141
|
+
output.write(f" - Misses: {stats['misses']}\n")
|
|
142
|
+
|
|
143
|
+
db.get(Product, product.pk, cache_ttl=5)
|
|
144
|
+
output.write("Per-call TTL override set to 5s for this lookup\n")
|
|
145
|
+
|
|
146
|
+
db.close()
|
|
147
|
+
return output.getvalue()
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _run_cache_bypass() -> str:
|
|
151
|
+
"""Bypass the cache to fetch fresh data from the database.
|
|
152
|
+
|
|
153
|
+
Use bypass_cache() when you need to ensure you're getting the most
|
|
154
|
+
up-to-date data, ignoring any cached results.
|
|
155
|
+
"""
|
|
156
|
+
output = io.StringIO()
|
|
157
|
+
|
|
158
|
+
class Item(BaseDBModel):
|
|
159
|
+
name: str
|
|
160
|
+
|
|
161
|
+
db = SqliterDB(memory=True, cache_enabled=True)
|
|
162
|
+
db.create_table(Item)
|
|
163
|
+
|
|
164
|
+
# Insert item to query
|
|
165
|
+
db.insert(Item(name="Item 1"))
|
|
166
|
+
|
|
167
|
+
# First query - uses cache
|
|
168
|
+
db.select(Item).filter(name__eq="Item 1").fetch_one()
|
|
169
|
+
output.write("First query: cached\n")
|
|
170
|
+
|
|
171
|
+
# Bypass cache for fresh data - skips cache, hits DB
|
|
172
|
+
db.select(Item).filter(name__eq="Item 1").bypass_cache().fetch_one()
|
|
173
|
+
output.write("Second query: bypassed cache for fresh data\n")
|
|
174
|
+
|
|
175
|
+
db.close()
|
|
176
|
+
return output.getvalue()
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _run_cache_ttl() -> str:
|
|
180
|
+
"""Set a time-to-live (TTL) for cached entries.
|
|
181
|
+
|
|
182
|
+
Cache entries automatically expire after the specified number of seconds,
|
|
183
|
+
ensuring stale data isn't served indefinitely.
|
|
184
|
+
"""
|
|
185
|
+
output = io.StringIO()
|
|
186
|
+
|
|
187
|
+
class Article(BaseDBModel):
|
|
188
|
+
title: str
|
|
189
|
+
|
|
190
|
+
db = SqliterDB(memory=True, cache_enabled=True, cache_ttl=60)
|
|
191
|
+
db.create_table(Article)
|
|
192
|
+
|
|
193
|
+
article = db.insert(Article(title="News Article"))
|
|
194
|
+
output.write(f"Created: {article.title}\n")
|
|
195
|
+
output.write("Cache TTL set to 60 seconds\n")
|
|
196
|
+
output.write("Cached entries expire after TTL\n")
|
|
197
|
+
|
|
198
|
+
db.close()
|
|
199
|
+
return output.getvalue()
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _run_cache_clear() -> str:
|
|
203
|
+
"""Manually clear the cache to free memory or force refresh.
|
|
204
|
+
|
|
205
|
+
Use clear_cache() when you need to invalidate all cached results
|
|
206
|
+
and start fresh.
|
|
207
|
+
"""
|
|
208
|
+
output = io.StringIO()
|
|
209
|
+
|
|
210
|
+
class Document(BaseDBModel):
|
|
211
|
+
title: str
|
|
212
|
+
|
|
213
|
+
db = SqliterDB(memory=True, cache_enabled=True)
|
|
214
|
+
db.create_table(Document)
|
|
215
|
+
|
|
216
|
+
doc = db.insert(Document(title="Doc 1"))
|
|
217
|
+
db.get(Document, doc.pk)
|
|
218
|
+
output.write("Query executed and cached\n")
|
|
219
|
+
|
|
220
|
+
db.clear_cache()
|
|
221
|
+
output.write("Cache cleared\n")
|
|
222
|
+
|
|
223
|
+
db.close()
|
|
224
|
+
return output.getvalue()
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def get_category() -> DemoCategory:
|
|
228
|
+
"""Get the Caching demo category."""
|
|
229
|
+
return DemoCategory(
|
|
230
|
+
id="caching",
|
|
231
|
+
title="Caching",
|
|
232
|
+
icon="",
|
|
233
|
+
demos=[
|
|
234
|
+
Demo(
|
|
235
|
+
id="cache_enable",
|
|
236
|
+
title="Enable Cache",
|
|
237
|
+
description="Enable query result caching",
|
|
238
|
+
category="caching",
|
|
239
|
+
code=extract_demo_code(_run_enable_cache),
|
|
240
|
+
execute=_run_enable_cache,
|
|
241
|
+
),
|
|
242
|
+
Demo(
|
|
243
|
+
id="cache_stats",
|
|
244
|
+
title="Cache Statistics",
|
|
245
|
+
description="View cache hit/miss statistics",
|
|
246
|
+
category="caching",
|
|
247
|
+
code=extract_demo_code(_run_cache_stats),
|
|
248
|
+
execute=_run_cache_stats,
|
|
249
|
+
),
|
|
250
|
+
Demo(
|
|
251
|
+
id="cache_get_controls",
|
|
252
|
+
title="Get Cache Controls",
|
|
253
|
+
description="Cache, bypass, and TTL for get()",
|
|
254
|
+
category="caching",
|
|
255
|
+
code=extract_demo_code(_run_get_cache_controls),
|
|
256
|
+
execute=_run_get_cache_controls,
|
|
257
|
+
),
|
|
258
|
+
Demo(
|
|
259
|
+
id="cache_bypass",
|
|
260
|
+
title="Cache Bypass",
|
|
261
|
+
description="Bypass cache for fresh data",
|
|
262
|
+
category="caching",
|
|
263
|
+
code=extract_demo_code(_run_cache_bypass),
|
|
264
|
+
execute=_run_cache_bypass,
|
|
265
|
+
),
|
|
266
|
+
Demo(
|
|
267
|
+
id="cache_ttl",
|
|
268
|
+
title="Cache TTL",
|
|
269
|
+
description="Set cache expiration time",
|
|
270
|
+
category="caching",
|
|
271
|
+
code=extract_demo_code(_run_cache_ttl),
|
|
272
|
+
execute=_run_cache_ttl,
|
|
273
|
+
),
|
|
274
|
+
Demo(
|
|
275
|
+
id="cache_clear",
|
|
276
|
+
title="Clear Cache",
|
|
277
|
+
description="Manually clear the cache",
|
|
278
|
+
category="caching",
|
|
279
|
+
code=extract_demo_code(_run_cache_clear),
|
|
280
|
+
execute=_run_cache_clear,
|
|
281
|
+
),
|
|
282
|
+
],
|
|
283
|
+
)
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Connection & Setup demos."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import io
|
|
6
|
+
import tempfile
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from sqliter import SqliterDB
|
|
10
|
+
from sqliter.model import BaseDBModel
|
|
11
|
+
from sqliter.tui.demos.base import Demo, DemoCategory, extract_demo_code
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _run_memory_db() -> str:
|
|
15
|
+
"""Create an in-memory SQLite database.
|
|
16
|
+
|
|
17
|
+
Use memory=True for fast, temporary databases that don't persist.
|
|
18
|
+
"""
|
|
19
|
+
output = io.StringIO()
|
|
20
|
+
|
|
21
|
+
db = SqliterDB(memory=True)
|
|
22
|
+
output.write(f"Created database: {db}\n")
|
|
23
|
+
output.write(f"Is memory: {db.is_memory}\n")
|
|
24
|
+
output.write(f"Filename: {db.filename}\n")
|
|
25
|
+
|
|
26
|
+
db.connect()
|
|
27
|
+
output.write(f"Connected: {db.is_connected}\n")
|
|
28
|
+
|
|
29
|
+
db.close()
|
|
30
|
+
output.write(f"After close: {db.is_connected}\n")
|
|
31
|
+
|
|
32
|
+
return output.getvalue()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _run_file_db() -> str:
|
|
36
|
+
"""Create a file-based SQLite database for persistent storage.
|
|
37
|
+
|
|
38
|
+
Provide a file path to store data that persists across sessions.
|
|
39
|
+
"""
|
|
40
|
+
output = io.StringIO()
|
|
41
|
+
|
|
42
|
+
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
43
|
+
db_path = f.name
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
db = SqliterDB(db_path)
|
|
47
|
+
output.write("Created file database\n")
|
|
48
|
+
output.write(f"Filename: {db.filename}\n")
|
|
49
|
+
output.write(f"Is memory: {db.is_memory}\n")
|
|
50
|
+
|
|
51
|
+
db.connect()
|
|
52
|
+
output.write(f"Connected to: {db_path}\n")
|
|
53
|
+
db.close()
|
|
54
|
+
finally:
|
|
55
|
+
Path(db_path).unlink(missing_ok=True)
|
|
56
|
+
output.write("Cleaned up database file\n")
|
|
57
|
+
|
|
58
|
+
return output.getvalue()
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _run_debug_mode() -> str:
|
|
62
|
+
"""Enable debug mode to see SQL queries being executed.
|
|
63
|
+
|
|
64
|
+
Set debug=True to log all SQL queries to the console for debugging.
|
|
65
|
+
"""
|
|
66
|
+
output = io.StringIO()
|
|
67
|
+
|
|
68
|
+
output.write("Debug mode enables SQL query logging.\n")
|
|
69
|
+
output.write("When debug=True, all SQL queries are logged.\n\n")
|
|
70
|
+
|
|
71
|
+
class User(BaseDBModel):
|
|
72
|
+
name: str
|
|
73
|
+
|
|
74
|
+
db = SqliterDB(memory=True, debug=True)
|
|
75
|
+
db.create_table(User)
|
|
76
|
+
|
|
77
|
+
output.write("SQL queries would be logged to console:\n")
|
|
78
|
+
output.write(' CREATE TABLE IF NOT EXISTS "users" (...)\n')
|
|
79
|
+
|
|
80
|
+
db.close()
|
|
81
|
+
return output.getvalue()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _run_context_manager() -> str:
|
|
85
|
+
"""Use context manager for automatic connection management.
|
|
86
|
+
|
|
87
|
+
The `with db:` block handles connection, transactions, and cleanup.
|
|
88
|
+
"""
|
|
89
|
+
output = io.StringIO()
|
|
90
|
+
|
|
91
|
+
class Task(BaseDBModel):
|
|
92
|
+
title: str
|
|
93
|
+
done: bool = False
|
|
94
|
+
|
|
95
|
+
output.write("Using context manager for transactions:\n\n")
|
|
96
|
+
|
|
97
|
+
db = SqliterDB(memory=True)
|
|
98
|
+
|
|
99
|
+
with db:
|
|
100
|
+
db.create_table(Task)
|
|
101
|
+
task = db.insert(Task(title="Learn SQLiter", done=False))
|
|
102
|
+
output.write(f"Inserted: {task.title} (pk={task.pk})\n")
|
|
103
|
+
output.write("Transaction auto-commits on exit\n")
|
|
104
|
+
|
|
105
|
+
output.write(f"\nAfter context: connected={db.is_connected}\n")
|
|
106
|
+
return output.getvalue()
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def get_category() -> DemoCategory:
|
|
110
|
+
"""Get the Connection & Setup demo category."""
|
|
111
|
+
return DemoCategory(
|
|
112
|
+
id="connection",
|
|
113
|
+
title="Connection & Setup",
|
|
114
|
+
icon="",
|
|
115
|
+
demos=[
|
|
116
|
+
Demo(
|
|
117
|
+
id="conn_memory",
|
|
118
|
+
title="In-memory Database",
|
|
119
|
+
description="Create a temporary in-memory database",
|
|
120
|
+
category="connection",
|
|
121
|
+
code=extract_demo_code(_run_memory_db),
|
|
122
|
+
execute=_run_memory_db,
|
|
123
|
+
),
|
|
124
|
+
Demo(
|
|
125
|
+
id="conn_file",
|
|
126
|
+
title="File-based Database",
|
|
127
|
+
description="Create a persistent file database",
|
|
128
|
+
category="connection",
|
|
129
|
+
code=extract_demo_code(_run_file_db),
|
|
130
|
+
execute=_run_file_db,
|
|
131
|
+
),
|
|
132
|
+
Demo(
|
|
133
|
+
id="conn_debug",
|
|
134
|
+
title="Debug Mode",
|
|
135
|
+
description="Enable SQL query logging",
|
|
136
|
+
category="connection",
|
|
137
|
+
code=extract_demo_code(_run_debug_mode),
|
|
138
|
+
execute=_run_debug_mode,
|
|
139
|
+
),
|
|
140
|
+
Demo(
|
|
141
|
+
id="conn_context",
|
|
142
|
+
title="Context Manager",
|
|
143
|
+
description="Auto commit/rollback with 'with' statement",
|
|
144
|
+
category="connection",
|
|
145
|
+
code=extract_demo_code(_run_context_manager),
|
|
146
|
+
execute=_run_context_manager,
|
|
147
|
+
),
|
|
148
|
+
],
|
|
149
|
+
expanded=True, # First category starts expanded
|
|
150
|
+
)
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
"""Unique & Foreign Key constraint demos."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import io
|
|
6
|
+
from typing import Annotated, Optional
|
|
7
|
+
|
|
8
|
+
from sqliter import SqliterDB
|
|
9
|
+
from sqliter.model import BaseDBModel
|
|
10
|
+
from sqliter.model.unique import unique
|
|
11
|
+
from sqliter.orm.foreign_key import ForeignKey
|
|
12
|
+
from sqliter.tui.demos.base import Demo, DemoCategory, extract_demo_code
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _run_unique_field() -> str:
|
|
16
|
+
"""Enforce uniqueness on a field to prevent duplicate values.
|
|
17
|
+
|
|
18
|
+
Use unique() to ensure no two records have the same value for
|
|
19
|
+
a specific field (like email).
|
|
20
|
+
"""
|
|
21
|
+
output = io.StringIO()
|
|
22
|
+
|
|
23
|
+
class User(BaseDBModel):
|
|
24
|
+
email: Annotated[str, unique()]
|
|
25
|
+
name: str
|
|
26
|
+
|
|
27
|
+
db = SqliterDB(memory=True)
|
|
28
|
+
db.create_table(User)
|
|
29
|
+
|
|
30
|
+
user1 = db.insert(User(email="alice@example.com", name="Alice"))
|
|
31
|
+
output.write(f"Created: {user1.name} ({user1.email})\n")
|
|
32
|
+
|
|
33
|
+
user2 = db.insert(User(email="bob@example.com", name="Bob"))
|
|
34
|
+
output.write(f"Created: {user2.name} ({user2.email})\n")
|
|
35
|
+
|
|
36
|
+
db.close()
|
|
37
|
+
return output.getvalue()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _run_multi_field_unique() -> str:
|
|
41
|
+
"""Enforce uniqueness on multiple fields.
|
|
42
|
+
|
|
43
|
+
Each unique() field is constrained independently (not a composite
|
|
44
|
+
unique constraint).
|
|
45
|
+
"""
|
|
46
|
+
output = io.StringIO()
|
|
47
|
+
|
|
48
|
+
class Enrollment(BaseDBModel):
|
|
49
|
+
student_id: Annotated[int, unique()]
|
|
50
|
+
course_id: Annotated[int, unique()]
|
|
51
|
+
|
|
52
|
+
db = SqliterDB(memory=True)
|
|
53
|
+
db.create_table(Enrollment)
|
|
54
|
+
|
|
55
|
+
output.write("Table created with unique fields (each column independent)\n")
|
|
56
|
+
enrollment = db.insert(Enrollment(student_id=1, course_id=101))
|
|
57
|
+
output.write(
|
|
58
|
+
f"Enrolled student {enrollment.student_id} in course "
|
|
59
|
+
f"{enrollment.course_id}\n"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
db.close()
|
|
63
|
+
return output.getvalue()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _run_foreign_key_cascade() -> str:
|
|
67
|
+
"""Automatically delete related records when parent is deleted.
|
|
68
|
+
|
|
69
|
+
CASCADE on_delete means deleting a record also deletes all
|
|
70
|
+
records that reference it via foreign key.
|
|
71
|
+
"""
|
|
72
|
+
output = io.StringIO()
|
|
73
|
+
|
|
74
|
+
class Author(BaseDBModel):
|
|
75
|
+
name: str
|
|
76
|
+
|
|
77
|
+
class Book(BaseDBModel):
|
|
78
|
+
title: str
|
|
79
|
+
author_id: ForeignKey[Author] = ForeignKey(
|
|
80
|
+
Author,
|
|
81
|
+
on_delete="CASCADE",
|
|
82
|
+
on_update="CASCADE",
|
|
83
|
+
null=True,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
db = SqliterDB(memory=True)
|
|
87
|
+
db.create_table(Author)
|
|
88
|
+
db.create_table(Book)
|
|
89
|
+
|
|
90
|
+
author = db.insert(Author(name="Jane Austen"))
|
|
91
|
+
book = db.insert(Book(title="Pride and Prejudice", author_id=author.pk))
|
|
92
|
+
output.write(f"Book '{book.title}' linked to author {author.pk}\n")
|
|
93
|
+
output.write("Foreign key: CASCADE on delete/update\n")
|
|
94
|
+
|
|
95
|
+
db.close()
|
|
96
|
+
return output.getvalue()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _run_foreign_key_restrict() -> str:
|
|
100
|
+
"""Prevent deletion of records that are referenced by others.
|
|
101
|
+
|
|
102
|
+
RESTRICT on_delete prevents deleting a record if other records
|
|
103
|
+
reference it via foreign key.
|
|
104
|
+
"""
|
|
105
|
+
output = io.StringIO()
|
|
106
|
+
|
|
107
|
+
class Category(BaseDBModel):
|
|
108
|
+
name: str
|
|
109
|
+
|
|
110
|
+
class Product(BaseDBModel):
|
|
111
|
+
name: str
|
|
112
|
+
category_id: ForeignKey[Category] = ForeignKey(
|
|
113
|
+
Category, on_delete="RESTRICT"
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
db = SqliterDB(memory=True)
|
|
117
|
+
db.create_table(Category)
|
|
118
|
+
db.create_table(Product)
|
|
119
|
+
|
|
120
|
+
category = db.insert(Category(name="Electronics"))
|
|
121
|
+
product = db.insert(Product(name="Laptop", category_id=category.pk))
|
|
122
|
+
output.write(f"Product '{product.name}' in category '{category.name}'\n")
|
|
123
|
+
output.write(
|
|
124
|
+
"Foreign key: RESTRICT prevents deletion of referenced records\n"
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
db.close()
|
|
128
|
+
return output.getvalue()
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _run_foreign_key_set_null() -> str:
|
|
132
|
+
"""Set foreign key to NULL when referenced record is deleted.
|
|
133
|
+
|
|
134
|
+
SET NULL on_delete sets the foreign key field to None when the
|
|
135
|
+
referenced record is deleted (requires nullable FK).
|
|
136
|
+
"""
|
|
137
|
+
output = io.StringIO()
|
|
138
|
+
|
|
139
|
+
class Department(BaseDBModel):
|
|
140
|
+
name: str
|
|
141
|
+
|
|
142
|
+
class Employee(BaseDBModel):
|
|
143
|
+
name: str
|
|
144
|
+
department_id: Optional[ForeignKey[Department]] = ForeignKey(
|
|
145
|
+
Department,
|
|
146
|
+
on_delete="SET NULL",
|
|
147
|
+
null=True,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
db = SqliterDB(memory=True)
|
|
151
|
+
db.create_table(Department)
|
|
152
|
+
db.create_table(Employee)
|
|
153
|
+
|
|
154
|
+
dept = db.insert(Department(name="Engineering"))
|
|
155
|
+
emp = db.insert(Employee(name="Alice", department_id=dept.pk))
|
|
156
|
+
output.write(f"Employee '{emp.name}' in department {emp.department_id}\n")
|
|
157
|
+
output.write("Foreign key: SET NULL on delete of referenced record\n")
|
|
158
|
+
|
|
159
|
+
db.close()
|
|
160
|
+
return output.getvalue()
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def get_category() -> DemoCategory:
|
|
164
|
+
"""Get the Constraints demo category."""
|
|
165
|
+
return DemoCategory(
|
|
166
|
+
id="constraints",
|
|
167
|
+
title="Constraints",
|
|
168
|
+
icon="",
|
|
169
|
+
demos=[
|
|
170
|
+
Demo(
|
|
171
|
+
id="constraint_unique_field",
|
|
172
|
+
title="Unique Field",
|
|
173
|
+
description="Enforce uniqueness on a field",
|
|
174
|
+
category="constraints",
|
|
175
|
+
code=extract_demo_code(_run_unique_field),
|
|
176
|
+
execute=_run_unique_field,
|
|
177
|
+
),
|
|
178
|
+
Demo(
|
|
179
|
+
id="constraint_multi_unique",
|
|
180
|
+
title="Multiple Unique Fields",
|
|
181
|
+
description="Multiple unique fields in one table",
|
|
182
|
+
category="constraints",
|
|
183
|
+
code=extract_demo_code(_run_multi_field_unique),
|
|
184
|
+
execute=_run_multi_field_unique,
|
|
185
|
+
),
|
|
186
|
+
Demo(
|
|
187
|
+
id="constraint_fk_cascade",
|
|
188
|
+
title="Foreign Key CASCADE",
|
|
189
|
+
description="Cascade deletes to related records",
|
|
190
|
+
category="constraints",
|
|
191
|
+
code=extract_demo_code(_run_foreign_key_cascade),
|
|
192
|
+
execute=_run_foreign_key_cascade,
|
|
193
|
+
),
|
|
194
|
+
Demo(
|
|
195
|
+
id="constraint_fk_restrict",
|
|
196
|
+
title="Foreign Key RESTRICT",
|
|
197
|
+
description="Prevent deletion of referenced records",
|
|
198
|
+
category="constraints",
|
|
199
|
+
code=extract_demo_code(_run_foreign_key_restrict),
|
|
200
|
+
execute=_run_foreign_key_restrict,
|
|
201
|
+
),
|
|
202
|
+
Demo(
|
|
203
|
+
id="constraint_fk_set_null",
|
|
204
|
+
title="Foreign Key SET NULL",
|
|
205
|
+
description="Set field to NULL on reference deletion",
|
|
206
|
+
category="constraints",
|
|
207
|
+
code=extract_demo_code(_run_foreign_key_set_null),
|
|
208
|
+
execute=_run_foreign_key_set_null,
|
|
209
|
+
),
|
|
210
|
+
],
|
|
211
|
+
)
|