sqliter-py 0.6.0__tar.gz → 0.10.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqliter-py might be problematic. Click here for more details.
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/PKG-INFO +26 -21
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/README.md +13 -12
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/pyproject.toml +16 -15
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/constants.py +8 -0
- sqliter_py-0.10.0/sqliter/helpers.py +100 -0
- sqliter_py-0.10.0/sqliter/model/__init__.py +37 -0
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/model/model.py +89 -6
- sqliter_py-0.10.0/sqliter/model/unique.py +28 -0
- sqliter_py-0.10.0/sqliter/py.typed +0 -0
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/query/query.py +58 -8
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/sqliter.py +128 -22
- sqliter_py-0.6.0/.gitignore +0 -222
- sqliter_py-0.6.0/LICENSE.txt +0 -20
- sqliter_py-0.6.0/sqliter/helpers.py +0 -35
- sqliter_py-0.6.0/sqliter/model/__init__.py +0 -11
- sqliter_py-0.6.0/sqliter/model/unique.py +0 -19
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/__init__.py +0 -0
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/exceptions.py +0 -0
- {sqliter_py-0.6.0 → sqliter_py-0.10.0}/sqliter/query/__init__.py +0 -0
|
@@ -1,14 +1,10 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: sqliter-py
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.10.0
|
|
4
4
|
Summary: Interact with SQLite databases using Python and Pydantic
|
|
5
|
-
|
|
6
|
-
Project-URL: Bug Tracker, https://github.com/seapagan/sqliter-py/issues
|
|
7
|
-
Project-URL: Changelog, https://github.com/seapagan/sqliter-py/blob/main/CHANGELOG.md
|
|
8
|
-
Project-URL: Repository, https://github.com/seapagan/sqliter-py
|
|
5
|
+
Author: Grant Ramsay
|
|
9
6
|
Author-email: Grant Ramsay <grant@gnramsay.com>
|
|
10
7
|
License-Expression: MIT
|
|
11
|
-
License-File: LICENSE.txt
|
|
12
8
|
Classifier: Development Status :: 4 - Beta
|
|
13
9
|
Classifier: Intended Audience :: Developers
|
|
14
10
|
Classifier: License :: OSI Approved :: MIT License
|
|
@@ -18,12 +14,20 @@ Classifier: Programming Language :: Python :: 3.9
|
|
|
18
14
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
15
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
16
|
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
19
|
+
Classifier: Topic :: Database :: Front-Ends
|
|
21
20
|
Classifier: Topic :: Software Development
|
|
22
21
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
22
|
+
Requires-Dist: pydantic>=2.12.5
|
|
23
|
+
Requires-Dist: inflect==7.0.0 ; extra == 'extras'
|
|
23
24
|
Requires-Python: >=3.9
|
|
24
|
-
|
|
25
|
+
Project-URL: Bug Tracker, https://github.com/seapagan/sqliter-py/issues
|
|
26
|
+
Project-URL: Changelog, https://github.com/seapagan/sqliter-py/blob/main/CHANGELOG.md
|
|
27
|
+
Project-URL: Homepage, http://sqliter.grantramsay.dev
|
|
28
|
+
Project-URL: Pull Requests, https://github.com/seapagan/sqliter-py/pulls
|
|
29
|
+
Project-URL: Repository, https://github.com/seapagan/sqliter-py
|
|
25
30
|
Provides-Extra: extras
|
|
26
|
-
Requires-Dist: inflect==7.0.0; extra == 'extras'
|
|
27
31
|
Description-Content-Type: text/markdown
|
|
28
32
|
|
|
29
33
|
# SQLiter <!-- omit in toc -->
|
|
@@ -47,22 +51,16 @@ time).
|
|
|
47
51
|
The ideal use case is more for Python CLI tools that need to store data in a
|
|
48
52
|
database-like format without needing to learn SQL or use a full ORM.
|
|
49
53
|
|
|
50
|
-
Full documentation is available on the [
|
|
51
|
-
Website](https://sqliter.grantramsay.dev)
|
|
54
|
+
Full documentation is available on the [Website](https://sqliter.grantramsay.dev)
|
|
52
55
|
|
|
53
56
|
> [!CAUTION]
|
|
57
|
+
>
|
|
54
58
|
> This project is still in the early stages of development and is lacking some
|
|
55
59
|
> planned functionality. Please use with caution - Classes and methods may
|
|
56
60
|
> change until a stable release is made. I'll try to keep this to an absolute
|
|
57
61
|
> minimum and the releases and documentation will be very clear about any
|
|
58
62
|
> breaking changes.
|
|
59
63
|
>
|
|
60
|
-
> Also, structures like `list`, `dict`, `set` etc are not supported **at this
|
|
61
|
-
> time** as field types, since SQLite does not have a native column type for
|
|
62
|
-
> these. This is the **next planned enhancement**. These will need to be
|
|
63
|
-
> `pickled` first then stored as a BLOB in the database . Also support `date`
|
|
64
|
-
> which can be stored as a Unix timestamp in an integer field.
|
|
65
|
-
>
|
|
66
64
|
> See the [TODO](TODO.md) for planned features and improvements.
|
|
67
65
|
|
|
68
66
|
- [Features](#features)
|
|
@@ -75,6 +73,9 @@ Website](https://sqliter.grantramsay.dev)
|
|
|
75
73
|
## Features
|
|
76
74
|
|
|
77
75
|
- Table creation based on Pydantic models
|
|
76
|
+
- Supports `date` and `datetime` fields
|
|
77
|
+
- Support for complex data types (`list`, `dict`, `set`, `tuple`) stored as
|
|
78
|
+
BLOBs
|
|
78
79
|
- Automatic primary key generation
|
|
79
80
|
- User defined indexes on any field
|
|
80
81
|
- Set any field as UNIQUE
|
|
@@ -159,12 +160,16 @@ for user in results:
|
|
|
159
160
|
new_user.age = 31
|
|
160
161
|
db.update(new_user)
|
|
161
162
|
|
|
162
|
-
# Delete a record
|
|
163
|
+
# Delete a record by primary key
|
|
163
164
|
db.delete(User, new_user.pk)
|
|
165
|
+
|
|
166
|
+
# Delete all records returned from a query:
|
|
167
|
+
delete_count = db.select(User).filter(age__gt=30).delete()
|
|
164
168
|
```
|
|
165
169
|
|
|
166
|
-
See the [
|
|
167
|
-
for more detailed information on how to use SQLiter, and advanced
|
|
170
|
+
See the [Guide](https://sqliter.grantramsay.dev/guide/guide/) section of the
|
|
171
|
+
documentation for more detailed information on how to use SQLiter, and advanced
|
|
172
|
+
features.
|
|
168
173
|
|
|
169
174
|
## Contributing
|
|
170
175
|
|
|
@@ -180,7 +185,7 @@ which you can read in the [CODE_OF_CONDUCT](CODE_OF_CONDUCT.md) file.
|
|
|
180
185
|
This project is licensed under the MIT License.
|
|
181
186
|
|
|
182
187
|
```pre
|
|
183
|
-
Copyright (c) 2024 Grant Ramsay
|
|
188
|
+
Copyright (c) 2024-2025 Grant Ramsay
|
|
184
189
|
|
|
185
190
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
186
191
|
of this software and associated documentation files (the "Software"), to deal
|
|
@@ -19,22 +19,16 @@ time).
|
|
|
19
19
|
The ideal use case is more for Python CLI tools that need to store data in a
|
|
20
20
|
database-like format without needing to learn SQL or use a full ORM.
|
|
21
21
|
|
|
22
|
-
Full documentation is available on the [
|
|
23
|
-
Website](https://sqliter.grantramsay.dev)
|
|
22
|
+
Full documentation is available on the [Website](https://sqliter.grantramsay.dev)
|
|
24
23
|
|
|
25
24
|
> [!CAUTION]
|
|
25
|
+
>
|
|
26
26
|
> This project is still in the early stages of development and is lacking some
|
|
27
27
|
> planned functionality. Please use with caution - Classes and methods may
|
|
28
28
|
> change until a stable release is made. I'll try to keep this to an absolute
|
|
29
29
|
> minimum and the releases and documentation will be very clear about any
|
|
30
30
|
> breaking changes.
|
|
31
31
|
>
|
|
32
|
-
> Also, structures like `list`, `dict`, `set` etc are not supported **at this
|
|
33
|
-
> time** as field types, since SQLite does not have a native column type for
|
|
34
|
-
> these. This is the **next planned enhancement**. These will need to be
|
|
35
|
-
> `pickled` first then stored as a BLOB in the database . Also support `date`
|
|
36
|
-
> which can be stored as a Unix timestamp in an integer field.
|
|
37
|
-
>
|
|
38
32
|
> See the [TODO](TODO.md) for planned features and improvements.
|
|
39
33
|
|
|
40
34
|
- [Features](#features)
|
|
@@ -47,6 +41,9 @@ Website](https://sqliter.grantramsay.dev)
|
|
|
47
41
|
## Features
|
|
48
42
|
|
|
49
43
|
- Table creation based on Pydantic models
|
|
44
|
+
- Supports `date` and `datetime` fields
|
|
45
|
+
- Support for complex data types (`list`, `dict`, `set`, `tuple`) stored as
|
|
46
|
+
BLOBs
|
|
50
47
|
- Automatic primary key generation
|
|
51
48
|
- User defined indexes on any field
|
|
52
49
|
- Set any field as UNIQUE
|
|
@@ -131,12 +128,16 @@ for user in results:
|
|
|
131
128
|
new_user.age = 31
|
|
132
129
|
db.update(new_user)
|
|
133
130
|
|
|
134
|
-
# Delete a record
|
|
131
|
+
# Delete a record by primary key
|
|
135
132
|
db.delete(User, new_user.pk)
|
|
133
|
+
|
|
134
|
+
# Delete all records returned from a query:
|
|
135
|
+
delete_count = db.select(User).filter(age__gt=30).delete()
|
|
136
136
|
```
|
|
137
137
|
|
|
138
|
-
See the [
|
|
139
|
-
for more detailed information on how to use SQLiter, and advanced
|
|
138
|
+
See the [Guide](https://sqliter.grantramsay.dev/guide/guide/) section of the
|
|
139
|
+
documentation for more detailed information on how to use SQLiter, and advanced
|
|
140
|
+
features.
|
|
140
141
|
|
|
141
142
|
## Contributing
|
|
142
143
|
|
|
@@ -152,7 +153,7 @@ which you can read in the [CODE_OF_CONDUCT](CODE_OF_CONDUCT.md) file.
|
|
|
152
153
|
This project is licensed under the MIT License.
|
|
153
154
|
|
|
154
155
|
```pre
|
|
155
|
-
Copyright (c) 2024 Grant Ramsay
|
|
156
|
+
Copyright (c) 2024-2025 Grant Ramsay
|
|
156
157
|
|
|
157
158
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
158
159
|
of this software and associated documentation files (the "Software"), to deal
|
|
@@ -3,13 +3,13 @@
|
|
|
3
3
|
|
|
4
4
|
[project]
|
|
5
5
|
name = "sqliter-py"
|
|
6
|
-
version = "0.
|
|
6
|
+
version = "0.10.0"
|
|
7
7
|
description = "Interact with SQLite databases using Python and Pydantic"
|
|
8
8
|
readme = "README.md"
|
|
9
9
|
requires-python = ">=3.9"
|
|
10
10
|
license = "MIT"
|
|
11
11
|
authors = [{ name = "Grant Ramsay", email = "grant@gnramsay.com" }]
|
|
12
|
-
dependencies = ["pydantic>=2.
|
|
12
|
+
dependencies = ["pydantic>=2.12.5"]
|
|
13
13
|
|
|
14
14
|
classifiers = [
|
|
15
15
|
"Development Status :: 4 - Beta",
|
|
@@ -21,6 +21,9 @@ classifiers = [
|
|
|
21
21
|
"Programming Language :: Python :: 3.10",
|
|
22
22
|
"Programming Language :: Python :: 3.11",
|
|
23
23
|
"Programming Language :: Python :: 3.12",
|
|
24
|
+
"Programming Language :: Python :: 3.13",
|
|
25
|
+
"Programming Language :: Python :: 3.14",
|
|
26
|
+
"Topic :: Database :: Front-Ends",
|
|
24
27
|
"Topic :: Software Development",
|
|
25
28
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
26
29
|
]
|
|
@@ -29,27 +32,25 @@ classifiers = [
|
|
|
29
32
|
extras = ["inflect==7.0.0"]
|
|
30
33
|
|
|
31
34
|
[project.urls]
|
|
32
|
-
|
|
35
|
+
"Homepage" = "http://sqliter.grantramsay.dev"
|
|
33
36
|
"Pull Requests" = "https://github.com/seapagan/sqliter-py/pulls"
|
|
34
37
|
"Bug Tracker" = "https://github.com/seapagan/sqliter-py/issues"
|
|
35
38
|
"Changelog" = "https://github.com/seapagan/sqliter-py/blob/main/CHANGELOG.md"
|
|
36
39
|
"Repository" = "https://github.com/seapagan/sqliter-py"
|
|
37
40
|
|
|
38
41
|
[build-system]
|
|
39
|
-
requires = ["
|
|
40
|
-
build-backend = "
|
|
42
|
+
requires = ["uv_build>=0.9.9,<0.10.0"]
|
|
43
|
+
build-backend = "uv_build"
|
|
41
44
|
|
|
42
|
-
[tool.
|
|
43
|
-
|
|
45
|
+
[tool.uv.build-backend]
|
|
46
|
+
module-name = "sqliter"
|
|
47
|
+
module-root = ""
|
|
44
48
|
|
|
45
|
-
[
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
[tool.uv]
|
|
49
|
-
dev-dependencies = [
|
|
49
|
+
[dependency-groups]
|
|
50
|
+
dev = [
|
|
50
51
|
"mock>=5.1.0",
|
|
51
52
|
"mypy>=1.11.2",
|
|
52
|
-
"pytest>=8.3.2",
|
|
53
|
+
"pytest>=8.3.2,<9.0",
|
|
53
54
|
"pytest-mock>=3.14.0",
|
|
54
55
|
"ruff>=0.6.4",
|
|
55
56
|
"pytest-sugar>=1.0.0",
|
|
@@ -100,12 +101,11 @@ changelog.help = "Generate a changelog"
|
|
|
100
101
|
line-length = 80
|
|
101
102
|
lint.select = ["ALL"] # we are being very strict!
|
|
102
103
|
lint.ignore = [
|
|
103
|
-
"ANN101",
|
|
104
|
-
"ANN102",
|
|
105
104
|
"PGH003",
|
|
106
105
|
"FBT002",
|
|
107
106
|
"FBT003",
|
|
108
107
|
"B006",
|
|
108
|
+
"S301", # in this library we use 'pickle' for saving and loading list etc
|
|
109
109
|
] # These rules are too strict even for us 😝
|
|
110
110
|
lint.extend-ignore = [
|
|
111
111
|
"COM812",
|
|
@@ -148,6 +148,7 @@ plugins = ["pydantic.mypy"]
|
|
|
148
148
|
|
|
149
149
|
python_version = "3.9"
|
|
150
150
|
exclude = ["docs"]
|
|
151
|
+
|
|
151
152
|
[[tool.mypy.overrides]]
|
|
152
153
|
disable_error_code = ["method-assign", "no-untyped-def", "attr-defined"]
|
|
153
154
|
module = "tests.*"
|
|
@@ -6,6 +6,8 @@ operators and data types, which are crucial for translating between
|
|
|
6
6
|
Pydantic models and SQLite database operations.
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
|
+
import datetime
|
|
10
|
+
|
|
9
11
|
# A dictionary mapping SQLiter filter operators to their corresponding SQL
|
|
10
12
|
# operators.
|
|
11
13
|
OPERATOR_MAPPING = {
|
|
@@ -34,4 +36,10 @@ SQLITE_TYPE_MAPPING = {
|
|
|
34
36
|
str: "TEXT",
|
|
35
37
|
bool: "INTEGER", # SQLite stores booleans as integers (0 or 1)
|
|
36
38
|
bytes: "BLOB",
|
|
39
|
+
datetime.datetime: "INTEGER", # Store as Unix timestamp
|
|
40
|
+
datetime.date: "INTEGER", # Store as Unix timestamp
|
|
41
|
+
list: "BLOB",
|
|
42
|
+
dict: "BLOB",
|
|
43
|
+
set: "BLOB",
|
|
44
|
+
tuple: "BLOB",
|
|
37
45
|
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""Utility functions for SQLiter internal operations.
|
|
2
|
+
|
|
3
|
+
This module provides helper functions used across the SQLiter library,
|
|
4
|
+
primarily for type inference and mapping between Python and SQLite
|
|
5
|
+
data types. These utilities support the core functionality of model
|
|
6
|
+
to database schema translation.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import datetime
|
|
12
|
+
from typing import Union
|
|
13
|
+
|
|
14
|
+
from sqliter.constants import SQLITE_TYPE_MAPPING
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def infer_sqlite_type(field_type: Union[type, None]) -> str:
|
|
18
|
+
"""Infer the SQLite column type based on the Python type.
|
|
19
|
+
|
|
20
|
+
This function maps Python types to their corresponding SQLite column
|
|
21
|
+
types. It's used when creating database tables to ensure that the
|
|
22
|
+
correct SQLite types are used for each field.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
field_type: The Python type of the field, or None.
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
A string representing the corresponding SQLite column type.
|
|
29
|
+
|
|
30
|
+
Note:
|
|
31
|
+
If the input type is None or not recognized, it defaults to 'TEXT'.
|
|
32
|
+
"""
|
|
33
|
+
# If field_type is None, default to TEXT
|
|
34
|
+
if field_type is None:
|
|
35
|
+
return "TEXT"
|
|
36
|
+
|
|
37
|
+
# Map the simplified type to an SQLite type
|
|
38
|
+
return SQLITE_TYPE_MAPPING.get(field_type, "TEXT")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def to_unix_timestamp(value: datetime.date | datetime.datetime) -> int:
|
|
42
|
+
"""Convert datetime or date to a Unix timestamp in UTC.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
value: The datetime or date object to convert.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
An integer Unix timestamp.
|
|
49
|
+
|
|
50
|
+
Raises:
|
|
51
|
+
TypeError: If the value is not a datetime or date object.
|
|
52
|
+
"""
|
|
53
|
+
if isinstance(value, datetime.datetime):
|
|
54
|
+
# If no timezone is provided, assume local time and convert to UTC
|
|
55
|
+
if value.tzinfo is None:
|
|
56
|
+
value = value.astimezone() # Convert to user's local timezone
|
|
57
|
+
# Convert to UTC before storing
|
|
58
|
+
value = value.astimezone(datetime.timezone.utc)
|
|
59
|
+
return int(value.timestamp())
|
|
60
|
+
if isinstance(value, datetime.date):
|
|
61
|
+
# Convert date to datetime at midnight in UTC
|
|
62
|
+
dt = datetime.datetime.combine(
|
|
63
|
+
value, datetime.time(0, 0), tzinfo=datetime.timezone.utc
|
|
64
|
+
)
|
|
65
|
+
return int(dt.timestamp())
|
|
66
|
+
|
|
67
|
+
err_msg = "Expected datetime or date object."
|
|
68
|
+
raise TypeError(err_msg)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def from_unix_timestamp(
|
|
72
|
+
value: int, to_type: type, *, localize: bool = True
|
|
73
|
+
) -> datetime.date | datetime.datetime:
|
|
74
|
+
"""Convert a Unix timestamp to datetime or date, optionally to local time.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
value: The Unix timestamp as an integer.
|
|
78
|
+
to_type: The expected output type, either datetime or date.
|
|
79
|
+
localize: If True, convert the datetime to the user's local timezone.
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
The corresponding datetime or date object.
|
|
83
|
+
|
|
84
|
+
Raises:
|
|
85
|
+
TypeError: If to_type is not datetime or date.
|
|
86
|
+
"""
|
|
87
|
+
if to_type is datetime.datetime:
|
|
88
|
+
# Convert the Unix timestamp to UTC datetime
|
|
89
|
+
dt = datetime.datetime.fromtimestamp(value, tz=datetime.timezone.utc)
|
|
90
|
+
# Convert to local time if requested
|
|
91
|
+
return dt.astimezone() if localize else dt
|
|
92
|
+
if to_type is datetime.date:
|
|
93
|
+
# Convert to UTC datetime first
|
|
94
|
+
dt = datetime.datetime.fromtimestamp(value, tz=datetime.timezone.utc)
|
|
95
|
+
# Convert to local time if requested, then return the date part
|
|
96
|
+
dt_local = dt.astimezone() if localize else dt
|
|
97
|
+
return dt_local.date() # Extract the date part
|
|
98
|
+
|
|
99
|
+
err_msg = "Expected datetime or date type."
|
|
100
|
+
raise TypeError(err_msg)
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""This module provides the base model class for SQLiter database models.
|
|
2
|
+
|
|
3
|
+
It exports the BaseDBModel class, which is used to define database
|
|
4
|
+
models in SQLiter applications, and the unique function, which is used to
|
|
5
|
+
define unique constraints on model fields.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import warnings
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from typing_extensions import deprecated
|
|
12
|
+
|
|
13
|
+
from .model import BaseDBModel, SerializableField
|
|
14
|
+
from .unique import unique
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@deprecated("Use 'unique' instead. Will be removed in a future version.")
|
|
18
|
+
def Unique(default: Any = ..., **kwargs: Any) -> Any: # noqa: ANN401, N802
|
|
19
|
+
"""Deprecated: Use 'unique' instead. Will be removed in a future version.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
default: The default value for the field.
|
|
23
|
+
**kwargs: Additional keyword arguments to pass to Field.
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
A Field with unique metadata attached.
|
|
27
|
+
"""
|
|
28
|
+
warnings.warn(
|
|
29
|
+
"Unique is deprecated and will be removed in a future version. "
|
|
30
|
+
"Use 'unique' instead.",
|
|
31
|
+
DeprecationWarning,
|
|
32
|
+
stacklevel=2,
|
|
33
|
+
)
|
|
34
|
+
return unique(default=default, **kwargs)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
__all__ = ["BaseDBModel", "SerializableField", "Unique", "unique"]
|
|
@@ -9,12 +9,14 @@ in SQLiter applications.
|
|
|
9
9
|
|
|
10
10
|
from __future__ import annotations
|
|
11
11
|
|
|
12
|
+
import datetime
|
|
13
|
+
import pickle
|
|
12
14
|
import re
|
|
13
15
|
from typing import (
|
|
14
16
|
Any,
|
|
15
17
|
ClassVar,
|
|
16
18
|
Optional,
|
|
17
|
-
|
|
19
|
+
Protocol,
|
|
18
20
|
Union,
|
|
19
21
|
cast,
|
|
20
22
|
get_args,
|
|
@@ -22,8 +24,13 @@ from typing import (
|
|
|
22
24
|
)
|
|
23
25
|
|
|
24
26
|
from pydantic import BaseModel, ConfigDict, Field
|
|
27
|
+
from typing_extensions import Self
|
|
25
28
|
|
|
26
|
-
|
|
29
|
+
from sqliter.helpers import from_unix_timestamp, to_unix_timestamp
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SerializableField(Protocol):
|
|
33
|
+
"""Protocol for fields that can be serialized or deserialized."""
|
|
27
34
|
|
|
28
35
|
|
|
29
36
|
class BaseDBModel(BaseModel):
|
|
@@ -38,11 +45,19 @@ class BaseDBModel(BaseModel):
|
|
|
38
45
|
"""
|
|
39
46
|
|
|
40
47
|
pk: int = Field(0, description="The mandatory primary key of the table.")
|
|
48
|
+
created_at: int = Field(
|
|
49
|
+
default=0,
|
|
50
|
+
description="Unix timestamp when the record was created.",
|
|
51
|
+
)
|
|
52
|
+
updated_at: int = Field(
|
|
53
|
+
default=0,
|
|
54
|
+
description="Unix timestamp when the record was last updated.",
|
|
55
|
+
)
|
|
41
56
|
|
|
42
57
|
model_config = ConfigDict(
|
|
43
58
|
extra="ignore",
|
|
44
59
|
populate_by_name=True,
|
|
45
|
-
validate_assignment=
|
|
60
|
+
validate_assignment=True,
|
|
46
61
|
from_attributes=True,
|
|
47
62
|
)
|
|
48
63
|
|
|
@@ -70,7 +85,7 @@ class BaseDBModel(BaseModel):
|
|
|
70
85
|
unique_indexes: ClassVar[list[Union[str, tuple[str]]]] = []
|
|
71
86
|
|
|
72
87
|
@classmethod
|
|
73
|
-
def model_validate_partial(cls
|
|
88
|
+
def model_validate_partial(cls, obj: dict[str, Any]) -> Self:
|
|
74
89
|
"""Validate and create a model instance from partial data.
|
|
75
90
|
|
|
76
91
|
This method allows for the creation of a model instance even when
|
|
@@ -106,7 +121,7 @@ class BaseDBModel(BaseModel):
|
|
|
106
121
|
else:
|
|
107
122
|
converted_obj[field_name] = field_type(value)
|
|
108
123
|
|
|
109
|
-
return cast(
|
|
124
|
+
return cast("Self", cls.model_construct(**converted_obj))
|
|
110
125
|
|
|
111
126
|
@classmethod
|
|
112
127
|
def get_table_name(cls) -> str:
|
|
@@ -130,7 +145,7 @@ class BaseDBModel(BaseModel):
|
|
|
130
145
|
|
|
131
146
|
# Pluralize the table name
|
|
132
147
|
try:
|
|
133
|
-
import inflect
|
|
148
|
+
import inflect # noqa: PLC0415
|
|
134
149
|
|
|
135
150
|
p = inflect.engine()
|
|
136
151
|
return p.plural(snake_case_name)
|
|
@@ -151,3 +166,71 @@ class BaseDBModel(BaseModel):
|
|
|
151
166
|
def should_create_pk(cls) -> bool:
|
|
152
167
|
"""Returns True since the primary key is always created."""
|
|
153
168
|
return True
|
|
169
|
+
|
|
170
|
+
@classmethod
|
|
171
|
+
def serialize_field(cls, value: SerializableField) -> SerializableField:
|
|
172
|
+
"""Serialize datetime or date fields to Unix timestamp.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
field_name: The name of the field.
|
|
176
|
+
value: The value of the field.
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
An integer Unix timestamp if the field is a datetime or date.
|
|
180
|
+
"""
|
|
181
|
+
if isinstance(value, (datetime.datetime, datetime.date)):
|
|
182
|
+
return to_unix_timestamp(value)
|
|
183
|
+
if isinstance(value, (list, dict, set, tuple)):
|
|
184
|
+
return pickle.dumps(value)
|
|
185
|
+
return value # Return value as-is for other fields
|
|
186
|
+
|
|
187
|
+
# Deserialization after fetching from the database
|
|
188
|
+
|
|
189
|
+
@classmethod
|
|
190
|
+
def deserialize_field(
|
|
191
|
+
cls,
|
|
192
|
+
field_name: str,
|
|
193
|
+
value: SerializableField,
|
|
194
|
+
*,
|
|
195
|
+
return_local_time: bool,
|
|
196
|
+
) -> object:
|
|
197
|
+
"""Deserialize fields from Unix timestamp to datetime or date.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
field_name: The name of the field being deserialized.
|
|
201
|
+
value: The Unix timestamp value fetched from the database.
|
|
202
|
+
return_local_time: Flag to control whether the datetime is localized
|
|
203
|
+
to the user's timezone.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
A datetime or date object if the field type is datetime or date,
|
|
207
|
+
otherwise returns the value as-is.
|
|
208
|
+
"""
|
|
209
|
+
if value is None:
|
|
210
|
+
return None
|
|
211
|
+
|
|
212
|
+
# Get field type if it exists in model_fields
|
|
213
|
+
field_info = cls.model_fields.get(field_name)
|
|
214
|
+
if field_info is None:
|
|
215
|
+
# If field doesn't exist in model, return value as-is
|
|
216
|
+
return value
|
|
217
|
+
|
|
218
|
+
field_type = field_info.annotation
|
|
219
|
+
|
|
220
|
+
if (
|
|
221
|
+
isinstance(field_type, type)
|
|
222
|
+
and issubclass(field_type, (datetime.datetime, datetime.date))
|
|
223
|
+
and isinstance(value, int)
|
|
224
|
+
):
|
|
225
|
+
return from_unix_timestamp(
|
|
226
|
+
value, field_type, localize=return_local_time
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
origin_type = get_origin(field_type) or field_type
|
|
230
|
+
if origin_type in (list, dict, set, tuple) and isinstance(value, bytes):
|
|
231
|
+
try:
|
|
232
|
+
return pickle.loads(value)
|
|
233
|
+
except pickle.UnpicklingError:
|
|
234
|
+
return value
|
|
235
|
+
|
|
236
|
+
return value
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Define a custom field type for unique constraints in SQLiter."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def unique(default: Any = ..., **kwargs: Any) -> Any: # noqa: ANN401
|
|
9
|
+
"""A custom field type for unique constraints in SQLiter.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
default: The default value for the field.
|
|
13
|
+
**kwargs: Additional keyword arguments to pass to Field.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
A Field with unique metadata attached.
|
|
17
|
+
"""
|
|
18
|
+
# Extract any existing json_schema_extra from kwargs
|
|
19
|
+
existing_extra = kwargs.pop("json_schema_extra", {})
|
|
20
|
+
|
|
21
|
+
# Ensure it's a dict
|
|
22
|
+
if not isinstance(existing_extra, dict):
|
|
23
|
+
existing_extra = {}
|
|
24
|
+
|
|
25
|
+
# Add our unique marker to json_schema_extra
|
|
26
|
+
existing_extra["unique"] = True
|
|
27
|
+
|
|
28
|
+
return Field(default=default, json_schema_extra=existing_extra, **kwargs)
|
|
File without changes
|
|
@@ -28,6 +28,7 @@ from sqliter.exceptions import (
|
|
|
28
28
|
InvalidFilterError,
|
|
29
29
|
InvalidOffsetError,
|
|
30
30
|
InvalidOrderError,
|
|
31
|
+
RecordDeletionError,
|
|
31
32
|
RecordFetchError,
|
|
32
33
|
)
|
|
33
34
|
|
|
@@ -35,7 +36,7 @@ if TYPE_CHECKING: # pragma: no cover
|
|
|
35
36
|
from pydantic.fields import FieldInfo
|
|
36
37
|
|
|
37
38
|
from sqliter import SqliterDB
|
|
38
|
-
from sqliter.model import BaseDBModel
|
|
39
|
+
from sqliter.model import BaseDBModel, SerializableField
|
|
39
40
|
|
|
40
41
|
# Define a type alias for the possible value types
|
|
41
42
|
FilterValue = Union[
|
|
@@ -609,14 +610,32 @@ class QueryBuilder:
|
|
|
609
610
|
An instance of the model class populated with the row data.
|
|
610
611
|
"""
|
|
611
612
|
if self._fields:
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
return self.model_class(
|
|
616
|
-
**{
|
|
617
|
-
field: row[idx]
|
|
618
|
-
for idx, field in enumerate(self.model_class.model_fields)
|
|
613
|
+
data = {
|
|
614
|
+
field: self._deserialize(field, row[idx])
|
|
615
|
+
for idx, field in enumerate(self._fields)
|
|
619
616
|
}
|
|
617
|
+
return self.model_class.model_validate_partial(data)
|
|
618
|
+
|
|
619
|
+
data = {
|
|
620
|
+
field: self._deserialize(field, row[idx])
|
|
621
|
+
for idx, field in enumerate(self.model_class.model_fields)
|
|
622
|
+
}
|
|
623
|
+
return self.model_class(**data)
|
|
624
|
+
|
|
625
|
+
def _deserialize(
|
|
626
|
+
self, field_name: str, value: SerializableField
|
|
627
|
+
) -> SerializableField:
|
|
628
|
+
"""Deserialize a field value if needed.
|
|
629
|
+
|
|
630
|
+
Args:
|
|
631
|
+
field_name: Name of the field being deserialized.
|
|
632
|
+
value: Value from the database.
|
|
633
|
+
|
|
634
|
+
Returns:
|
|
635
|
+
The deserialized value.
|
|
636
|
+
"""
|
|
637
|
+
return self.model_class.deserialize_field(
|
|
638
|
+
field_name, value, return_local_time=self.db.return_local_time
|
|
620
639
|
)
|
|
621
640
|
|
|
622
641
|
@overload
|
|
@@ -710,3 +729,34 @@ class QueryBuilder:
|
|
|
710
729
|
True if at least one result exists, False otherwise.
|
|
711
730
|
"""
|
|
712
731
|
return self.count() > 0
|
|
732
|
+
|
|
733
|
+
def delete(self) -> int:
|
|
734
|
+
"""Delete records that match the current query conditions.
|
|
735
|
+
|
|
736
|
+
Returns:
|
|
737
|
+
The number of records deleted.
|
|
738
|
+
|
|
739
|
+
Raises:
|
|
740
|
+
RecordDeletionError: If there's an error deleting the records.
|
|
741
|
+
"""
|
|
742
|
+
sql = f'DELETE FROM "{self.table_name}"' # noqa: S608 # nosec
|
|
743
|
+
|
|
744
|
+
# Build the WHERE clause with special handling for None (NULL in SQL)
|
|
745
|
+
values, where_clause = self._parse_filter()
|
|
746
|
+
|
|
747
|
+
if self.filters:
|
|
748
|
+
sql += f" WHERE {where_clause}"
|
|
749
|
+
|
|
750
|
+
# Print the raw SQL and values if debug is enabled
|
|
751
|
+
if self.db.debug:
|
|
752
|
+
self.db._log_sql(sql, values) # noqa: SLF001
|
|
753
|
+
|
|
754
|
+
try:
|
|
755
|
+
with self.db.connect() as conn:
|
|
756
|
+
cursor = conn.cursor()
|
|
757
|
+
cursor.execute(sql, values)
|
|
758
|
+
deleted_count = cursor.rowcount
|
|
759
|
+
self.db._maybe_commit() # noqa: SLF001
|
|
760
|
+
return deleted_count
|
|
761
|
+
except sqlite3.Error as exc:
|
|
762
|
+
raise RecordDeletionError(self.table_name) from exc
|
|
@@ -10,6 +10,7 @@ from __future__ import annotations
|
|
|
10
10
|
|
|
11
11
|
import logging
|
|
12
12
|
import sqlite3
|
|
13
|
+
import time
|
|
13
14
|
from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union
|
|
14
15
|
|
|
15
16
|
from typing_extensions import Self
|
|
@@ -27,7 +28,6 @@ from sqliter.exceptions import (
|
|
|
27
28
|
TableDeletionError,
|
|
28
29
|
)
|
|
29
30
|
from sqliter.helpers import infer_sqlite_type
|
|
30
|
-
from sqliter.model.unique import Unique
|
|
31
31
|
from sqliter.query.query import QueryBuilder
|
|
32
32
|
|
|
33
33
|
if TYPE_CHECKING: # pragma: no cover
|
|
@@ -51,7 +51,9 @@ class SqliterDB:
|
|
|
51
51
|
logger (Optional[logging.Logger]): Custom logger for debug output.
|
|
52
52
|
"""
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
MEMORY_DB = ":memory:"
|
|
55
|
+
|
|
56
|
+
def __init__( # noqa: PLR0913
|
|
55
57
|
self,
|
|
56
58
|
db_filename: Optional[str] = None,
|
|
57
59
|
*,
|
|
@@ -60,6 +62,7 @@ class SqliterDB:
|
|
|
60
62
|
debug: bool = False,
|
|
61
63
|
logger: Optional[logging.Logger] = None,
|
|
62
64
|
reset: bool = False,
|
|
65
|
+
return_local_time: bool = True,
|
|
63
66
|
) -> None:
|
|
64
67
|
"""Initialize a new SqliterDB instance.
|
|
65
68
|
|
|
@@ -71,12 +74,13 @@ class SqliterDB:
|
|
|
71
74
|
logger: Custom logger for debug output.
|
|
72
75
|
reset: Whether to reset the database on initialization. This will
|
|
73
76
|
basically drop all existing tables.
|
|
77
|
+
return_local_time: Whether to return local time for datetime fields.
|
|
74
78
|
|
|
75
79
|
Raises:
|
|
76
80
|
ValueError: If no filename is provided for a non-memory database.
|
|
77
81
|
"""
|
|
78
82
|
if memory:
|
|
79
|
-
self.db_filename =
|
|
83
|
+
self.db_filename = self.MEMORY_DB
|
|
80
84
|
elif db_filename:
|
|
81
85
|
self.db_filename = db_filename
|
|
82
86
|
else:
|
|
@@ -90,6 +94,7 @@ class SqliterDB:
|
|
|
90
94
|
self.logger = logger
|
|
91
95
|
self.conn: Optional[sqlite3.Connection] = None
|
|
92
96
|
self.reset = reset
|
|
97
|
+
self.return_local_time = return_local_time
|
|
93
98
|
|
|
94
99
|
self._in_transaction = False
|
|
95
100
|
|
|
@@ -99,6 +104,54 @@ class SqliterDB:
|
|
|
99
104
|
if self.reset:
|
|
100
105
|
self._reset_database()
|
|
101
106
|
|
|
107
|
+
@property
|
|
108
|
+
def filename(self) -> Optional[str]:
|
|
109
|
+
"""Returns the filename of the current database or None if in-memory."""
|
|
110
|
+
return None if self.db_filename == self.MEMORY_DB else self.db_filename
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def is_memory(self) -> bool:
|
|
114
|
+
"""Returns True if the database is in-memory."""
|
|
115
|
+
return self.db_filename == self.MEMORY_DB
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def is_autocommit(self) -> bool:
|
|
119
|
+
"""Returns True if auto-commit is enabled."""
|
|
120
|
+
return self.auto_commit
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def is_connected(self) -> bool:
|
|
124
|
+
"""Returns True if the database is connected, False otherwise."""
|
|
125
|
+
return self.conn is not None
|
|
126
|
+
|
|
127
|
+
@property
|
|
128
|
+
def table_names(self) -> list[str]:
|
|
129
|
+
"""Returns a list of all table names in the database.
|
|
130
|
+
|
|
131
|
+
Temporarily connects to the database if not connected and restores
|
|
132
|
+
the connection state afterward.
|
|
133
|
+
"""
|
|
134
|
+
was_connected = self.is_connected
|
|
135
|
+
if not was_connected:
|
|
136
|
+
self.connect()
|
|
137
|
+
|
|
138
|
+
if self.conn is None:
|
|
139
|
+
err_msg = "Failed to establish a database connection."
|
|
140
|
+
raise DatabaseConnectionError(err_msg)
|
|
141
|
+
|
|
142
|
+
cursor = self.conn.cursor()
|
|
143
|
+
cursor.execute(
|
|
144
|
+
"SELECT name FROM sqlite_master WHERE type='table' "
|
|
145
|
+
"AND name NOT LIKE 'sqlite_%';"
|
|
146
|
+
)
|
|
147
|
+
tables = [row[0] for row in cursor.fetchall()]
|
|
148
|
+
|
|
149
|
+
# Restore the connection state
|
|
150
|
+
if not was_connected:
|
|
151
|
+
self.close()
|
|
152
|
+
|
|
153
|
+
return tables
|
|
154
|
+
|
|
102
155
|
def _reset_database(self) -> None:
|
|
103
156
|
"""Drop all user-created tables in the database."""
|
|
104
157
|
with self.connect() as conn:
|
|
@@ -240,9 +293,16 @@ class SqliterDB:
|
|
|
240
293
|
for field_name, field_info in model_class.model_fields.items():
|
|
241
294
|
if field_name != primary_key:
|
|
242
295
|
sqlite_type = infer_sqlite_type(field_info.annotation)
|
|
243
|
-
unique_constraint =
|
|
244
|
-
|
|
245
|
-
|
|
296
|
+
unique_constraint = ""
|
|
297
|
+
if (
|
|
298
|
+
(
|
|
299
|
+
hasattr(field_info, "json_schema_extra")
|
|
300
|
+
and field_info.json_schema_extra
|
|
301
|
+
)
|
|
302
|
+
and isinstance(field_info.json_schema_extra, dict)
|
|
303
|
+
and field_info.json_schema_extra.get("unique", False)
|
|
304
|
+
):
|
|
305
|
+
unique_constraint = "UNIQUE"
|
|
246
306
|
fields.append(
|
|
247
307
|
f"{field_name} {sqlite_type} {unique_constraint}".strip()
|
|
248
308
|
)
|
|
@@ -379,11 +439,18 @@ class SqliterDB:
|
|
|
379
439
|
if not self._in_transaction and self.auto_commit and self.conn:
|
|
380
440
|
self.conn.commit()
|
|
381
441
|
|
|
382
|
-
def insert(
|
|
442
|
+
def insert(
|
|
443
|
+
self, model_instance: T, *, timestamp_override: bool = False
|
|
444
|
+
) -> T:
|
|
383
445
|
"""Insert a new record into the database.
|
|
384
446
|
|
|
385
447
|
Args:
|
|
386
448
|
model_instance: The instance of the model class to insert.
|
|
449
|
+
timestamp_override: If True, override the created_at and updated_at
|
|
450
|
+
timestamps with provided values. Default is False. If the values
|
|
451
|
+
are not provided, they will be set to the current time as
|
|
452
|
+
normal. Without this flag, the timestamps will always be set to
|
|
453
|
+
the current time, even if provided.
|
|
387
454
|
|
|
388
455
|
Returns:
|
|
389
456
|
The updated model instance with the primary key (pk) set.
|
|
@@ -394,8 +461,28 @@ class SqliterDB:
|
|
|
394
461
|
model_class = type(model_instance)
|
|
395
462
|
table_name = model_class.get_table_name()
|
|
396
463
|
|
|
464
|
+
# Always set created_at and updated_at timestamps
|
|
465
|
+
current_timestamp = int(time.time())
|
|
466
|
+
|
|
467
|
+
# Handle the case where timestamp_override is False
|
|
468
|
+
if not timestamp_override:
|
|
469
|
+
# Always override both timestamps with the current time
|
|
470
|
+
model_instance.created_at = current_timestamp
|
|
471
|
+
model_instance.updated_at = current_timestamp
|
|
472
|
+
else:
|
|
473
|
+
# Respect provided values, but set to current time if they are 0
|
|
474
|
+
if model_instance.created_at == 0:
|
|
475
|
+
model_instance.created_at = current_timestamp
|
|
476
|
+
if model_instance.updated_at == 0:
|
|
477
|
+
model_instance.updated_at = current_timestamp
|
|
478
|
+
|
|
397
479
|
# Get the data from the model
|
|
398
480
|
data = model_instance.model_dump()
|
|
481
|
+
|
|
482
|
+
# Serialize the data
|
|
483
|
+
for field_name, value in list(data.items()):
|
|
484
|
+
data[field_name] = model_instance.serialize_field(value)
|
|
485
|
+
|
|
399
486
|
# remove the primary key field if it exists, otherwise we'll get
|
|
400
487
|
# TypeErrors as multiple primary keys will exist
|
|
401
488
|
if data.get("pk", None) == 0:
|
|
@@ -422,7 +509,13 @@ class SqliterDB:
|
|
|
422
509
|
raise RecordInsertionError(table_name) from exc
|
|
423
510
|
else:
|
|
424
511
|
data.pop("pk", None)
|
|
425
|
-
|
|
512
|
+
# Deserialize each field before creating the model instance
|
|
513
|
+
deserialized_data = {}
|
|
514
|
+
for field_name, value in data.items():
|
|
515
|
+
deserialized_data[field_name] = model_class.deserialize_field(
|
|
516
|
+
field_name, value, return_local_time=self.return_local_time
|
|
517
|
+
)
|
|
518
|
+
return model_class(pk=cursor.lastrowid, **deserialized_data)
|
|
426
519
|
|
|
427
520
|
def get(
|
|
428
521
|
self, model_class: type[BaseDBModel], primary_key_value: int
|
|
@@ -459,7 +552,17 @@ class SqliterDB:
|
|
|
459
552
|
field: result[idx]
|
|
460
553
|
for idx, field in enumerate(model_class.model_fields)
|
|
461
554
|
}
|
|
462
|
-
|
|
555
|
+
# Deserialize each field before creating the model instance
|
|
556
|
+
deserialized_data = {}
|
|
557
|
+
for field_name, value in result_dict.items():
|
|
558
|
+
deserialized_data[field_name] = (
|
|
559
|
+
model_class.deserialize_field(
|
|
560
|
+
field_name,
|
|
561
|
+
value,
|
|
562
|
+
return_local_time=self.return_local_time,
|
|
563
|
+
)
|
|
564
|
+
)
|
|
565
|
+
return model_class(**deserialized_data)
|
|
463
566
|
except sqlite3.Error as exc:
|
|
464
567
|
raise RecordFetchError(table_name) from exc
|
|
465
568
|
else:
|
|
@@ -473,24 +576,27 @@ class SqliterDB:
|
|
|
473
576
|
|
|
474
577
|
Raises:
|
|
475
578
|
RecordUpdateError: If there's an error updating the record or if it
|
|
476
|
-
|
|
579
|
+
is not found.
|
|
477
580
|
"""
|
|
478
581
|
model_class = type(model_instance)
|
|
479
582
|
table_name = model_class.get_table_name()
|
|
480
|
-
|
|
481
583
|
primary_key = model_class.get_primary_key()
|
|
482
584
|
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
primary_key_value =
|
|
585
|
+
# Set updated_at timestamp
|
|
586
|
+
current_timestamp = int(time.time())
|
|
587
|
+
model_instance.updated_at = current_timestamp
|
|
588
|
+
|
|
589
|
+
# Get the data and serialize any datetime/date fields
|
|
590
|
+
data = model_instance.model_dump()
|
|
591
|
+
for field_name, value in list(data.items()):
|
|
592
|
+
data[field_name] = model_instance.serialize_field(value)
|
|
593
|
+
|
|
594
|
+
# Remove the primary key from the update data
|
|
595
|
+
primary_key_value = data.pop(primary_key)
|
|
596
|
+
|
|
597
|
+
# Create the SQL using the processed data
|
|
598
|
+
fields = ", ".join(f"{field} = ?" for field in data)
|
|
599
|
+
values = tuple(data.values())
|
|
494
600
|
|
|
495
601
|
update_sql = f"""
|
|
496
602
|
UPDATE {table_name}
|
sqliter_py-0.6.0/.gitignore
DELETED
|
@@ -1,222 +0,0 @@
|
|
|
1
|
-
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
|
|
2
|
-
# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,python
|
|
3
|
-
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,linux,python
|
|
4
|
-
|
|
5
|
-
### Linux ###
|
|
6
|
-
*~
|
|
7
|
-
|
|
8
|
-
# temporary files which can be created if a process still has a handle open of a deleted file
|
|
9
|
-
.fuse_hidden*
|
|
10
|
-
|
|
11
|
-
# KDE directory preferences
|
|
12
|
-
.directory
|
|
13
|
-
|
|
14
|
-
# Linux trash folder which might appear on any partition or disk
|
|
15
|
-
.Trash-*
|
|
16
|
-
|
|
17
|
-
# .nfs files are created when an open file is removed but is still being accessed
|
|
18
|
-
.nfs*
|
|
19
|
-
|
|
20
|
-
### Python ###
|
|
21
|
-
# Byte-compiled / optimized / DLL files
|
|
22
|
-
__pycache__/
|
|
23
|
-
*.py[cod]
|
|
24
|
-
*$py.class
|
|
25
|
-
|
|
26
|
-
# C extensions
|
|
27
|
-
*.so
|
|
28
|
-
|
|
29
|
-
# Distribution / packaging
|
|
30
|
-
.Python
|
|
31
|
-
build/
|
|
32
|
-
develop-eggs/
|
|
33
|
-
dist/
|
|
34
|
-
downloads/
|
|
35
|
-
eggs/
|
|
36
|
-
.eggs/
|
|
37
|
-
lib/
|
|
38
|
-
lib64/
|
|
39
|
-
parts/
|
|
40
|
-
sdist/
|
|
41
|
-
var/
|
|
42
|
-
wheels/
|
|
43
|
-
share/python-wheels/
|
|
44
|
-
*.egg-info/
|
|
45
|
-
.installed.cfg
|
|
46
|
-
*.egg
|
|
47
|
-
MANIFEST
|
|
48
|
-
|
|
49
|
-
# PyInstaller
|
|
50
|
-
# Usually these files are written by a python script from a template
|
|
51
|
-
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
52
|
-
*.manifest
|
|
53
|
-
*.spec
|
|
54
|
-
|
|
55
|
-
# Installer logs
|
|
56
|
-
pip-log.txt
|
|
57
|
-
pip-delete-this-directory.txt
|
|
58
|
-
|
|
59
|
-
# Unit test / coverage reports
|
|
60
|
-
htmlcov/
|
|
61
|
-
.tox/
|
|
62
|
-
.nox/
|
|
63
|
-
.coverage
|
|
64
|
-
.coverage.*
|
|
65
|
-
.cache
|
|
66
|
-
nosetests.xml
|
|
67
|
-
coverage.xml
|
|
68
|
-
coverage.lcov
|
|
69
|
-
*.cover
|
|
70
|
-
*.py,cover
|
|
71
|
-
.hypothesis/
|
|
72
|
-
.pytest_cache/
|
|
73
|
-
cover/
|
|
74
|
-
|
|
75
|
-
# Translations
|
|
76
|
-
*.mo
|
|
77
|
-
*.pot
|
|
78
|
-
|
|
79
|
-
# Django stuff:
|
|
80
|
-
*.log
|
|
81
|
-
local_settings.py
|
|
82
|
-
db.sqlite3
|
|
83
|
-
db.sqlite3-journal
|
|
84
|
-
|
|
85
|
-
# Flask stuff:
|
|
86
|
-
instance/
|
|
87
|
-
.webassets-cache
|
|
88
|
-
|
|
89
|
-
# Scrapy stuff:
|
|
90
|
-
.scrapy
|
|
91
|
-
|
|
92
|
-
# Sphinx documentation
|
|
93
|
-
docs/_build/
|
|
94
|
-
|
|
95
|
-
# PyBuilder
|
|
96
|
-
.pybuilder/
|
|
97
|
-
target/
|
|
98
|
-
|
|
99
|
-
# Jupyter Notebook
|
|
100
|
-
.ipynb_checkpoints
|
|
101
|
-
|
|
102
|
-
# IPython
|
|
103
|
-
profile_default/
|
|
104
|
-
ipython_config.py
|
|
105
|
-
|
|
106
|
-
# pyenv
|
|
107
|
-
# For a library or package, you might want to ignore these files since the code is
|
|
108
|
-
# intended to run in multiple environments; otherwise, check them in:
|
|
109
|
-
# .python-version
|
|
110
|
-
|
|
111
|
-
# pipenv
|
|
112
|
-
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
113
|
-
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
114
|
-
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
115
|
-
# install all needed dependencies.
|
|
116
|
-
#Pipfile.lock
|
|
117
|
-
|
|
118
|
-
# poetry
|
|
119
|
-
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
120
|
-
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
121
|
-
# commonly ignored for libraries.
|
|
122
|
-
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
123
|
-
#poetry.lock
|
|
124
|
-
|
|
125
|
-
# pdm
|
|
126
|
-
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
127
|
-
#pdm.lock
|
|
128
|
-
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
129
|
-
# in version control.
|
|
130
|
-
# https://pdm.fming.dev/#use-with-ide
|
|
131
|
-
.pdm.toml
|
|
132
|
-
|
|
133
|
-
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
134
|
-
__pypackages__/
|
|
135
|
-
|
|
136
|
-
# Celery stuff
|
|
137
|
-
celerybeat-schedule
|
|
138
|
-
celerybeat.pid
|
|
139
|
-
|
|
140
|
-
# SageMath parsed files
|
|
141
|
-
*.sage.py
|
|
142
|
-
|
|
143
|
-
# Environments
|
|
144
|
-
.env
|
|
145
|
-
.venv
|
|
146
|
-
env/
|
|
147
|
-
venv/
|
|
148
|
-
ENV/
|
|
149
|
-
env.bak/
|
|
150
|
-
venv.bak/
|
|
151
|
-
|
|
152
|
-
# Spyder project settings
|
|
153
|
-
.spyderproject
|
|
154
|
-
.spyproject
|
|
155
|
-
|
|
156
|
-
# Rope project settings
|
|
157
|
-
.ropeproject
|
|
158
|
-
|
|
159
|
-
# mkdocs documentation
|
|
160
|
-
/site
|
|
161
|
-
|
|
162
|
-
# mypy
|
|
163
|
-
.mypy_cache/
|
|
164
|
-
.dmypy.json
|
|
165
|
-
dmypy.json
|
|
166
|
-
|
|
167
|
-
# Pyre type checker
|
|
168
|
-
.pyre/
|
|
169
|
-
|
|
170
|
-
# pytype static type analyzer
|
|
171
|
-
.pytype/
|
|
172
|
-
|
|
173
|
-
# Cython debug symbols
|
|
174
|
-
cython_debug/
|
|
175
|
-
|
|
176
|
-
# PyCharm
|
|
177
|
-
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
178
|
-
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
179
|
-
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
180
|
-
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
181
|
-
#.idea/
|
|
182
|
-
|
|
183
|
-
### Python Patch ###
|
|
184
|
-
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
|
185
|
-
poetry.toml
|
|
186
|
-
|
|
187
|
-
# ruff
|
|
188
|
-
.ruff_cache/
|
|
189
|
-
|
|
190
|
-
# LSP config files
|
|
191
|
-
pyrightconfig.json
|
|
192
|
-
|
|
193
|
-
### VisualStudioCode ###
|
|
194
|
-
.vscode/*
|
|
195
|
-
!.vscode/settings.json
|
|
196
|
-
!.vscode/tasks.json
|
|
197
|
-
!.vscode/launch.json
|
|
198
|
-
!.vscode/extensions.json
|
|
199
|
-
!.vscode/*.code-snippets
|
|
200
|
-
|
|
201
|
-
# Local History for Visual Studio Code
|
|
202
|
-
.history/
|
|
203
|
-
|
|
204
|
-
# Built Visual Studio Code Extensions
|
|
205
|
-
*.vsix
|
|
206
|
-
|
|
207
|
-
### VisualStudioCode Patch ###
|
|
208
|
-
# Ignore all local history of files
|
|
209
|
-
.history
|
|
210
|
-
.ionide
|
|
211
|
-
|
|
212
|
-
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,python
|
|
213
|
-
|
|
214
|
-
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
|
|
215
|
-
|
|
216
|
-
.python-version
|
|
217
|
-
*.db
|
|
218
|
-
.vscode
|
|
219
|
-
.changelog_generator.toml
|
|
220
|
-
repopack-output.xml
|
|
221
|
-
.envrc
|
|
222
|
-
demo-db
|
sqliter_py-0.6.0/LICENSE.txt
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
The MIT License (MIT)
|
|
2
|
-
Copyright (c) 2024 Grant Ramsay
|
|
3
|
-
|
|
4
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
5
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
6
|
-
in the Software without restriction, including without limitation the rights
|
|
7
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
8
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
9
|
-
furnished to do so, subject to the following conditions:
|
|
10
|
-
|
|
11
|
-
The above copyright notice and this permission notice shall be included in all
|
|
12
|
-
copies or substantial portions of the Software.
|
|
13
|
-
|
|
14
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
15
|
-
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
16
|
-
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
17
|
-
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
|
18
|
-
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
|
19
|
-
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
|
|
20
|
-
OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
"""Utility functions for SQLiter internal operations.
|
|
2
|
-
|
|
3
|
-
This module provides helper functions used across the SQLiter library,
|
|
4
|
-
primarily for type inference and mapping between Python and SQLite
|
|
5
|
-
data types. These utilities support the core functionality of model
|
|
6
|
-
to database schema translation.
|
|
7
|
-
"""
|
|
8
|
-
|
|
9
|
-
from typing import Union
|
|
10
|
-
|
|
11
|
-
from sqliter.constants import SQLITE_TYPE_MAPPING
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def infer_sqlite_type(field_type: Union[type, None]) -> str:
|
|
15
|
-
"""Infer the SQLite column type based on the Python type.
|
|
16
|
-
|
|
17
|
-
This function maps Python types to their corresponding SQLite column
|
|
18
|
-
types. It's used when creating database tables to ensure that the
|
|
19
|
-
correct SQLite types are used for each field.
|
|
20
|
-
|
|
21
|
-
Args:
|
|
22
|
-
field_type: The Python type of the field, or None.
|
|
23
|
-
|
|
24
|
-
Returns:
|
|
25
|
-
A string representing the corresponding SQLite column type.
|
|
26
|
-
|
|
27
|
-
Note:
|
|
28
|
-
If the input type is None or not recognized, it defaults to 'TEXT'.
|
|
29
|
-
"""
|
|
30
|
-
# If field_type is None, default to TEXT
|
|
31
|
-
if field_type is None:
|
|
32
|
-
return "TEXT"
|
|
33
|
-
|
|
34
|
-
# Map the simplified type to an SQLite type
|
|
35
|
-
return SQLITE_TYPE_MAPPING.get(field_type, "TEXT")
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
"""This module provides the base model class for SQLiter database models.
|
|
2
|
-
|
|
3
|
-
It exports the BaseDBModel class, which is used to define database
|
|
4
|
-
models in SQLiter applications, and the Unique class, which is used to
|
|
5
|
-
define unique constraints on model fields.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
from .model import BaseDBModel
|
|
9
|
-
from .unique import Unique
|
|
10
|
-
|
|
11
|
-
__all__ = ["BaseDBModel", "Unique"]
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
"""Define a custom field type for unique constraints in SQLiter."""
|
|
2
|
-
|
|
3
|
-
from typing import Any
|
|
4
|
-
|
|
5
|
-
from pydantic.fields import FieldInfo
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class Unique(FieldInfo):
|
|
9
|
-
"""A custom field type for unique constraints in SQLiter."""
|
|
10
|
-
|
|
11
|
-
def __init__(self, default: Any = ..., **kwargs: Any) -> None: # noqa: ANN401
|
|
12
|
-
"""Initialize a Unique field.
|
|
13
|
-
|
|
14
|
-
Args:
|
|
15
|
-
default: The default value for the field.
|
|
16
|
-
**kwargs: Additional keyword arguments to pass to FieldInfo.
|
|
17
|
-
"""
|
|
18
|
-
super().__init__(default=default, **kwargs)
|
|
19
|
-
self.unique = True
|
|
File without changes
|
|
File without changes
|
|
File without changes
|