sqliter-py 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqliter-py might be problematic. Click here for more details.
- sqliter_py-0.1.0/.gitignore +217 -0
- sqliter_py-0.1.0/PKG-INFO +30 -0
- sqliter_py-0.1.0/README.md +6 -0
- sqliter_py-0.1.0/pyproject.toml +99 -0
- sqliter_py-0.1.0/sqliter/__init__.py +5 -0
- sqliter_py-0.1.0/sqliter/model/__init__.py +8 -0
- sqliter_py-0.1.0/sqliter/model/model.py +38 -0
- sqliter_py-0.1.0/sqliter/query/__init__.py +5 -0
- sqliter_py-0.1.0/sqliter/query/query.py +139 -0
- sqliter_py-0.1.0/sqliter/sqliter.py +179 -0
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
|
|
2
|
+
# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,python
|
|
3
|
+
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,linux,python
|
|
4
|
+
|
|
5
|
+
### Linux ###
|
|
6
|
+
*~
|
|
7
|
+
|
|
8
|
+
# temporary files which can be created if a process still has a handle open of a deleted file
|
|
9
|
+
.fuse_hidden*
|
|
10
|
+
|
|
11
|
+
# KDE directory preferences
|
|
12
|
+
.directory
|
|
13
|
+
|
|
14
|
+
# Linux trash folder which might appear on any partition or disk
|
|
15
|
+
.Trash-*
|
|
16
|
+
|
|
17
|
+
# .nfs files are created when an open file is removed but is still being accessed
|
|
18
|
+
.nfs*
|
|
19
|
+
|
|
20
|
+
### Python ###
|
|
21
|
+
# Byte-compiled / optimized / DLL files
|
|
22
|
+
__pycache__/
|
|
23
|
+
*.py[cod]
|
|
24
|
+
*$py.class
|
|
25
|
+
|
|
26
|
+
# C extensions
|
|
27
|
+
*.so
|
|
28
|
+
|
|
29
|
+
# Distribution / packaging
|
|
30
|
+
.Python
|
|
31
|
+
build/
|
|
32
|
+
develop-eggs/
|
|
33
|
+
dist/
|
|
34
|
+
downloads/
|
|
35
|
+
eggs/
|
|
36
|
+
.eggs/
|
|
37
|
+
lib/
|
|
38
|
+
lib64/
|
|
39
|
+
parts/
|
|
40
|
+
sdist/
|
|
41
|
+
var/
|
|
42
|
+
wheels/
|
|
43
|
+
share/python-wheels/
|
|
44
|
+
*.egg-info/
|
|
45
|
+
.installed.cfg
|
|
46
|
+
*.egg
|
|
47
|
+
MANIFEST
|
|
48
|
+
|
|
49
|
+
# PyInstaller
|
|
50
|
+
# Usually these files are written by a python script from a template
|
|
51
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
52
|
+
*.manifest
|
|
53
|
+
*.spec
|
|
54
|
+
|
|
55
|
+
# Installer logs
|
|
56
|
+
pip-log.txt
|
|
57
|
+
pip-delete-this-directory.txt
|
|
58
|
+
|
|
59
|
+
# Unit test / coverage reports
|
|
60
|
+
htmlcov/
|
|
61
|
+
.tox/
|
|
62
|
+
.nox/
|
|
63
|
+
.coverage
|
|
64
|
+
.coverage.*
|
|
65
|
+
.cache
|
|
66
|
+
nosetests.xml
|
|
67
|
+
coverage.xml
|
|
68
|
+
*.cover
|
|
69
|
+
*.py,cover
|
|
70
|
+
.hypothesis/
|
|
71
|
+
.pytest_cache/
|
|
72
|
+
cover/
|
|
73
|
+
|
|
74
|
+
# Translations
|
|
75
|
+
*.mo
|
|
76
|
+
*.pot
|
|
77
|
+
|
|
78
|
+
# Django stuff:
|
|
79
|
+
*.log
|
|
80
|
+
local_settings.py
|
|
81
|
+
db.sqlite3
|
|
82
|
+
db.sqlite3-journal
|
|
83
|
+
|
|
84
|
+
# Flask stuff:
|
|
85
|
+
instance/
|
|
86
|
+
.webassets-cache
|
|
87
|
+
|
|
88
|
+
# Scrapy stuff:
|
|
89
|
+
.scrapy
|
|
90
|
+
|
|
91
|
+
# Sphinx documentation
|
|
92
|
+
docs/_build/
|
|
93
|
+
|
|
94
|
+
# PyBuilder
|
|
95
|
+
.pybuilder/
|
|
96
|
+
target/
|
|
97
|
+
|
|
98
|
+
# Jupyter Notebook
|
|
99
|
+
.ipynb_checkpoints
|
|
100
|
+
|
|
101
|
+
# IPython
|
|
102
|
+
profile_default/
|
|
103
|
+
ipython_config.py
|
|
104
|
+
|
|
105
|
+
# pyenv
|
|
106
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
107
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
108
|
+
# .python-version
|
|
109
|
+
|
|
110
|
+
# pipenv
|
|
111
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
112
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
113
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
114
|
+
# install all needed dependencies.
|
|
115
|
+
#Pipfile.lock
|
|
116
|
+
|
|
117
|
+
# poetry
|
|
118
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
119
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
120
|
+
# commonly ignored for libraries.
|
|
121
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
122
|
+
#poetry.lock
|
|
123
|
+
|
|
124
|
+
# pdm
|
|
125
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
126
|
+
#pdm.lock
|
|
127
|
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
128
|
+
# in version control.
|
|
129
|
+
# https://pdm.fming.dev/#use-with-ide
|
|
130
|
+
.pdm.toml
|
|
131
|
+
|
|
132
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
133
|
+
__pypackages__/
|
|
134
|
+
|
|
135
|
+
# Celery stuff
|
|
136
|
+
celerybeat-schedule
|
|
137
|
+
celerybeat.pid
|
|
138
|
+
|
|
139
|
+
# SageMath parsed files
|
|
140
|
+
*.sage.py
|
|
141
|
+
|
|
142
|
+
# Environments
|
|
143
|
+
.env
|
|
144
|
+
.venv
|
|
145
|
+
env/
|
|
146
|
+
venv/
|
|
147
|
+
ENV/
|
|
148
|
+
env.bak/
|
|
149
|
+
venv.bak/
|
|
150
|
+
|
|
151
|
+
# Spyder project settings
|
|
152
|
+
.spyderproject
|
|
153
|
+
.spyproject
|
|
154
|
+
|
|
155
|
+
# Rope project settings
|
|
156
|
+
.ropeproject
|
|
157
|
+
|
|
158
|
+
# mkdocs documentation
|
|
159
|
+
/site
|
|
160
|
+
|
|
161
|
+
# mypy
|
|
162
|
+
.mypy_cache/
|
|
163
|
+
.dmypy.json
|
|
164
|
+
dmypy.json
|
|
165
|
+
|
|
166
|
+
# Pyre type checker
|
|
167
|
+
.pyre/
|
|
168
|
+
|
|
169
|
+
# pytype static type analyzer
|
|
170
|
+
.pytype/
|
|
171
|
+
|
|
172
|
+
# Cython debug symbols
|
|
173
|
+
cython_debug/
|
|
174
|
+
|
|
175
|
+
# PyCharm
|
|
176
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
177
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
178
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
179
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
180
|
+
#.idea/
|
|
181
|
+
|
|
182
|
+
### Python Patch ###
|
|
183
|
+
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
|
184
|
+
poetry.toml
|
|
185
|
+
|
|
186
|
+
# ruff
|
|
187
|
+
.ruff_cache/
|
|
188
|
+
|
|
189
|
+
# LSP config files
|
|
190
|
+
pyrightconfig.json
|
|
191
|
+
|
|
192
|
+
### VisualStudioCode ###
|
|
193
|
+
.vscode/*
|
|
194
|
+
!.vscode/settings.json
|
|
195
|
+
!.vscode/tasks.json
|
|
196
|
+
!.vscode/launch.json
|
|
197
|
+
!.vscode/extensions.json
|
|
198
|
+
!.vscode/*.code-snippets
|
|
199
|
+
|
|
200
|
+
# Local History for Visual Studio Code
|
|
201
|
+
.history/
|
|
202
|
+
|
|
203
|
+
# Built Visual Studio Code Extensions
|
|
204
|
+
*.vsix
|
|
205
|
+
|
|
206
|
+
### VisualStudioCode Patch ###
|
|
207
|
+
# Ignore all local history of files
|
|
208
|
+
.history
|
|
209
|
+
.ionide
|
|
210
|
+
|
|
211
|
+
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,linux,python
|
|
212
|
+
|
|
213
|
+
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
|
|
214
|
+
|
|
215
|
+
.python-version
|
|
216
|
+
*.db
|
|
217
|
+
.vscode
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: sqliter-py
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Interact with SQLite databases using Python and Pydantic
|
|
5
|
+
Project-URL: Pull Requests, https://github.com/seapagan/sqliter-py/pulls
|
|
6
|
+
Project-URL: Bug Tracker, https://github.com/seapagan/sqliter-py/issues
|
|
7
|
+
Project-URL: Repository, https://github.com/seapagan/sqliter-py
|
|
8
|
+
Author-email: Grant Ramsay <grant@gnramsay.com>
|
|
9
|
+
License-Expression: MIT
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Topic :: Software Development
|
|
20
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
21
|
+
Requires-Python: >=3.9
|
|
22
|
+
Requires-Dist: pydantic>=2.9.0
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
# SQLiter
|
|
26
|
+
|
|
27
|
+
An SQLite wrapper in Python using Pydantic and written primarily using ChatGPT,
|
|
28
|
+
as an experiment in how viable it is to write working code using a LLM.
|
|
29
|
+
|
|
30
|
+
The code was then cleaned up, typed and linted by hand.
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "sqliter-py"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Interact with SQLite databases using Python and Pydantic"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.9"
|
|
7
|
+
license = "MIT"
|
|
8
|
+
authors = [{ name = "Grant Ramsay", email = "grant@gnramsay.com" }]
|
|
9
|
+
dependencies = ["pydantic>=2.9.0"]
|
|
10
|
+
|
|
11
|
+
classifiers = [
|
|
12
|
+
"Development Status :: 4 - Beta",
|
|
13
|
+
"Intended Audience :: Developers",
|
|
14
|
+
"License :: OSI Approved :: MIT License",
|
|
15
|
+
"Operating System :: OS Independent",
|
|
16
|
+
"Programming Language :: Python :: 3",
|
|
17
|
+
"Programming Language :: Python :: 3.9",
|
|
18
|
+
"Programming Language :: Python :: 3.10",
|
|
19
|
+
"Programming Language :: Python :: 3.11",
|
|
20
|
+
"Programming Language :: Python :: 3.12",
|
|
21
|
+
"Topic :: Software Development",
|
|
22
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
[project.urls]
|
|
26
|
+
# "HomeHage" = "https://xxxxxx"
|
|
27
|
+
"Pull Requests" = "https://github.com/seapagan/sqliter-py/pulls"
|
|
28
|
+
"Bug Tracker" = "https://github.com/seapagan/sqliter-py/issues"
|
|
29
|
+
# "Changelog" = "https://github.com/seapagan/sqliter-py/blob/main/CHANGELOG.md"
|
|
30
|
+
"Repository" = "https://github.com/seapagan/sqliter-py"
|
|
31
|
+
|
|
32
|
+
# [project.scripts]
|
|
33
|
+
# demo = "demo:main"
|
|
34
|
+
|
|
35
|
+
[build-system]
|
|
36
|
+
requires = ["hatchling"]
|
|
37
|
+
build-backend = "hatchling.build"
|
|
38
|
+
|
|
39
|
+
[tool.hatch.build.targets.sdist]
|
|
40
|
+
packages = ["sqliter"]
|
|
41
|
+
|
|
42
|
+
[tool.hatch.build.targets.wheel]
|
|
43
|
+
packages = ["sqliter"]
|
|
44
|
+
|
|
45
|
+
[tool.uv]
|
|
46
|
+
dev-dependencies = ["mypy>=1.11.2", "ruff>=0.6.4"]
|
|
47
|
+
|
|
48
|
+
[tool.ruff]
|
|
49
|
+
line-length = 80
|
|
50
|
+
lint.select = ["ALL"] # we are being very strict!
|
|
51
|
+
lint.ignore = [
|
|
52
|
+
"ANN101",
|
|
53
|
+
"ANN102",
|
|
54
|
+
"PGH003",
|
|
55
|
+
"FBT002",
|
|
56
|
+
"FBT003",
|
|
57
|
+
"B006",
|
|
58
|
+
] # These rules are too strict even for us 😝
|
|
59
|
+
lint.extend-ignore = [
|
|
60
|
+
"COM812",
|
|
61
|
+
"ISC001",
|
|
62
|
+
] # these are ignored for ruff formatting
|
|
63
|
+
|
|
64
|
+
src = ["lice2"]
|
|
65
|
+
target-version = "py39" # minimum python version supported
|
|
66
|
+
|
|
67
|
+
[tool.ruff.format]
|
|
68
|
+
indent-style = "space"
|
|
69
|
+
quote-style = "double"
|
|
70
|
+
|
|
71
|
+
[tool.ruff.lint.pep8-naming]
|
|
72
|
+
classmethod-decorators = ["pydantic.validator", "pydantic.root_validator"]
|
|
73
|
+
|
|
74
|
+
[tool.ruff.lint.pydocstyle]
|
|
75
|
+
convention = "google"
|
|
76
|
+
|
|
77
|
+
[tool.ruff.lint.extend-per-file-ignores]
|
|
78
|
+
"lice2/tests/**/*.py" = [
|
|
79
|
+
"S101", # we can (and MUST!) use 'assert' in test files.
|
|
80
|
+
"ANN001", # annotations for fixtures are sometimes a pain for test files
|
|
81
|
+
"ARG00", # test fixtures often are not directly used
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
[tool.ruff.lint.isort]
|
|
85
|
+
known-first-party = ["lice2"]
|
|
86
|
+
|
|
87
|
+
[tool.ruff.lint.pyupgrade]
|
|
88
|
+
keep-runtime-typing = true
|
|
89
|
+
|
|
90
|
+
[tool.mypy]
|
|
91
|
+
python_version = "3.9"
|
|
92
|
+
|
|
93
|
+
[[tool.mypy.overrides]]
|
|
94
|
+
module = "pyperclip"
|
|
95
|
+
ignore_missing_imports = true
|
|
96
|
+
|
|
97
|
+
[[tool.mypy.overrides]]
|
|
98
|
+
disable_error_code = ["method-assign", "no-untyped-def", "attr-defined"]
|
|
99
|
+
module = "tests.*"
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Define the Base model class."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BaseDBModel(BaseModel):
|
|
11
|
+
"""Custom base model for database models."""
|
|
12
|
+
|
|
13
|
+
class Meta:
|
|
14
|
+
"""Configure the base model with default options."""
|
|
15
|
+
|
|
16
|
+
create_id: bool = True # Whether to create an auto-increment ID
|
|
17
|
+
primary_key: str = "id" # Default primary key field
|
|
18
|
+
table_name: Optional[str] = (
|
|
19
|
+
None # Table name, defaults to class name if not set
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def get_table_name(cls) -> str:
|
|
24
|
+
"""Get the table name from the Meta, or default to the classname."""
|
|
25
|
+
table_name: str | None = getattr(cls.Meta, "table_name", None)
|
|
26
|
+
if table_name is not None:
|
|
27
|
+
return table_name
|
|
28
|
+
return cls.__name__.lower() # Default to class name in lowercase
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def get_primary_key(cls) -> str:
|
|
32
|
+
"""Get the primary key from the Meta class or default to 'id'."""
|
|
33
|
+
return getattr(cls.Meta, "primary_key", "id")
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
def should_create_id(cls) -> bool:
|
|
37
|
+
"""Check whether the model should create an auto-increment ID."""
|
|
38
|
+
return getattr(cls.Meta, "create_id", True)
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""Define the 'QueryBuilder' class for building SQL queries."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
6
|
+
|
|
7
|
+
from typing_extensions import Self
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from sqliter import SqliterDB
|
|
11
|
+
from sqliter.model import BaseDBModel
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class QueryBuilder:
|
|
15
|
+
"""Functions to build and execute queries for a given model."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, db: SqliterDB, model_class: type[BaseDBModel]) -> None:
|
|
18
|
+
"""Initialize the query builder with the database, model class, etc."""
|
|
19
|
+
self.db = db
|
|
20
|
+
self.model_class = model_class
|
|
21
|
+
self.table_name = model_class.get_table_name() # Use model_class method
|
|
22
|
+
self.filters: list[tuple[str, Any]] = []
|
|
23
|
+
|
|
24
|
+
def filter(self, **conditions: str | float | None) -> Self:
|
|
25
|
+
"""Add filter conditions to the query."""
|
|
26
|
+
for field, value in conditions.items():
|
|
27
|
+
self.filters.append((field, value))
|
|
28
|
+
return self
|
|
29
|
+
|
|
30
|
+
def _execute_query(
|
|
31
|
+
self,
|
|
32
|
+
limit: Optional[int] = None,
|
|
33
|
+
offset: Optional[int] = None,
|
|
34
|
+
order_by: Optional[str] = None,
|
|
35
|
+
*,
|
|
36
|
+
fetch_one: bool = False,
|
|
37
|
+
) -> list[tuple[Any, ...]] | Optional[tuple[Any, ...]]:
|
|
38
|
+
"""Helper function to execute the query with filters."""
|
|
39
|
+
fields = ", ".join(self.model_class.model_fields)
|
|
40
|
+
where_clause = " AND ".join(
|
|
41
|
+
[f"{field} = ?" for field, _ in self.filters]
|
|
42
|
+
)
|
|
43
|
+
sql = f"SELECT {fields} FROM {self.table_name}" # noqa: S608
|
|
44
|
+
|
|
45
|
+
if self.filters:
|
|
46
|
+
sql += f" WHERE {where_clause}"
|
|
47
|
+
|
|
48
|
+
if order_by:
|
|
49
|
+
sql += f" ORDER BY {order_by}"
|
|
50
|
+
|
|
51
|
+
if limit is not None:
|
|
52
|
+
sql += f" LIMIT {limit}"
|
|
53
|
+
|
|
54
|
+
if offset is not None:
|
|
55
|
+
sql += f" OFFSET {offset}"
|
|
56
|
+
|
|
57
|
+
values = [value for _, value in self.filters]
|
|
58
|
+
|
|
59
|
+
with self.db.connect() as conn:
|
|
60
|
+
cursor = conn.cursor()
|
|
61
|
+
cursor.execute(sql, values)
|
|
62
|
+
return cursor.fetchall() if not fetch_one else cursor.fetchone()
|
|
63
|
+
|
|
64
|
+
def fetch_all(self) -> list[BaseDBModel]:
|
|
65
|
+
"""Fetch all results matching the filters."""
|
|
66
|
+
results = self._execute_query()
|
|
67
|
+
|
|
68
|
+
if results is None:
|
|
69
|
+
return []
|
|
70
|
+
|
|
71
|
+
return [
|
|
72
|
+
self.model_class(
|
|
73
|
+
**{
|
|
74
|
+
field: row[idx]
|
|
75
|
+
for idx, field in enumerate(self.model_class.model_fields)
|
|
76
|
+
}
|
|
77
|
+
)
|
|
78
|
+
for row in results
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
def fetch_one(self) -> BaseDBModel | None:
|
|
82
|
+
"""Fetch exactly one result."""
|
|
83
|
+
result = self._execute_query(fetch_one=True)
|
|
84
|
+
if not result:
|
|
85
|
+
return None
|
|
86
|
+
return self.model_class(
|
|
87
|
+
**{
|
|
88
|
+
field: result[idx]
|
|
89
|
+
for idx, field in enumerate(self.model_class.model_fields)
|
|
90
|
+
}
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def fetch_first(self) -> BaseDBModel | None:
|
|
94
|
+
"""Fetch the first result of the query."""
|
|
95
|
+
result = self._execute_query(limit=1)
|
|
96
|
+
if not result:
|
|
97
|
+
return None
|
|
98
|
+
return self.model_class(
|
|
99
|
+
**{
|
|
100
|
+
field: result[0][idx]
|
|
101
|
+
for idx, field in enumerate(self.model_class.model_fields)
|
|
102
|
+
}
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
def fetch_last(self) -> BaseDBModel | None:
|
|
106
|
+
"""Fetch the last result of the query (based on the primary key)."""
|
|
107
|
+
primary_key = self.model_class.get_primary_key()
|
|
108
|
+
result = self._execute_query(limit=1, order_by=f"{primary_key} DESC")
|
|
109
|
+
if not result:
|
|
110
|
+
return None
|
|
111
|
+
return self.model_class(
|
|
112
|
+
**{
|
|
113
|
+
field: result[0][idx]
|
|
114
|
+
for idx, field in enumerate(self.model_class.model_fields)
|
|
115
|
+
}
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
def count(self) -> int:
|
|
119
|
+
"""Return the count of records matching the filters."""
|
|
120
|
+
where_clause = " AND ".join(
|
|
121
|
+
[f"{field} = ?" for field, _ in self.filters]
|
|
122
|
+
)
|
|
123
|
+
sql = f"SELECT COUNT(*) FROM {self.table_name}" # noqa: S608
|
|
124
|
+
|
|
125
|
+
if self.filters:
|
|
126
|
+
sql += f" WHERE {where_clause}"
|
|
127
|
+
|
|
128
|
+
values = [value for _, value in self.filters]
|
|
129
|
+
|
|
130
|
+
with self.db.connect() as conn:
|
|
131
|
+
cursor = conn.cursor()
|
|
132
|
+
cursor.execute(sql, values)
|
|
133
|
+
result = cursor.fetchone()
|
|
134
|
+
|
|
135
|
+
return int(result[0]) if result else 0
|
|
136
|
+
|
|
137
|
+
def exists(self) -> bool:
|
|
138
|
+
"""Return True if any record matches the filters."""
|
|
139
|
+
return self.count() > 0
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""This is the main module for the sqliter package."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import sqlite3
|
|
6
|
+
from typing import TYPE_CHECKING, Optional
|
|
7
|
+
|
|
8
|
+
from typing_extensions import Self
|
|
9
|
+
|
|
10
|
+
from sqliter.query.query import QueryBuilder
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from types import TracebackType
|
|
14
|
+
|
|
15
|
+
from sqliter.model.model import BaseDBModel
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SqliterDB:
|
|
19
|
+
"""Class to manage SQLite database interactions."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, db_filename: str, *, auto_commit: bool = False) -> None:
|
|
22
|
+
"""Initialize the class and options."""
|
|
23
|
+
self.db_filename = db_filename
|
|
24
|
+
self.auto_commit = auto_commit
|
|
25
|
+
self.conn: Optional[sqlite3.Connection] = None
|
|
26
|
+
|
|
27
|
+
def connect(self) -> sqlite3.Connection:
|
|
28
|
+
"""Create or return a connection to the SQLite database."""
|
|
29
|
+
if not self.conn:
|
|
30
|
+
self.conn = sqlite3.connect(self.db_filename)
|
|
31
|
+
return self.conn
|
|
32
|
+
|
|
33
|
+
def create_table(self, model_class: type[BaseDBModel]) -> None:
|
|
34
|
+
"""Create a table based on the Pydantic model."""
|
|
35
|
+
table_name = model_class.get_table_name()
|
|
36
|
+
primary_key = model_class.get_primary_key()
|
|
37
|
+
create_id = model_class.should_create_id()
|
|
38
|
+
|
|
39
|
+
fields = ", ".join(
|
|
40
|
+
f"{field_name} TEXT" for field_name in model_class.model_fields
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
if create_id:
|
|
44
|
+
create_table_sql = f"""
|
|
45
|
+
CREATE TABLE IF NOT EXISTS {table_name} (
|
|
46
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
47
|
+
{fields}
|
|
48
|
+
)
|
|
49
|
+
"""
|
|
50
|
+
else:
|
|
51
|
+
create_table_sql = f"""
|
|
52
|
+
CREATE TABLE IF NOT EXISTS {table_name} (
|
|
53
|
+
{fields},
|
|
54
|
+
PRIMARY KEY ({primary_key})
|
|
55
|
+
)
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
with self.connect() as conn:
|
|
59
|
+
cursor = conn.cursor()
|
|
60
|
+
cursor.execute(create_table_sql)
|
|
61
|
+
conn.commit()
|
|
62
|
+
|
|
63
|
+
def _maybe_commit(self, conn: sqlite3.Connection) -> None:
|
|
64
|
+
"""Commit changes if auto_commit is True."""
|
|
65
|
+
if self.auto_commit:
|
|
66
|
+
conn.commit()
|
|
67
|
+
|
|
68
|
+
def insert(self, model_instance: BaseDBModel) -> None:
|
|
69
|
+
"""Insert a new record into the table defined by the Pydantic model."""
|
|
70
|
+
model_class = type(model_instance)
|
|
71
|
+
table_name = model_class.get_table_name()
|
|
72
|
+
|
|
73
|
+
fields = ", ".join(model_class.model_fields)
|
|
74
|
+
placeholders = ", ".join(["?"] * len(model_class.model_fields))
|
|
75
|
+
values = tuple(
|
|
76
|
+
getattr(model_instance, field) for field in model_class.model_fields
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
insert_sql = f"""
|
|
80
|
+
INSERT OR REPLACE INTO {table_name} ({fields})
|
|
81
|
+
VALUES ({placeholders})
|
|
82
|
+
""" # noqa: S608
|
|
83
|
+
|
|
84
|
+
with self.connect() as conn:
|
|
85
|
+
cursor = conn.cursor()
|
|
86
|
+
cursor.execute(insert_sql, values)
|
|
87
|
+
self._maybe_commit(conn)
|
|
88
|
+
|
|
89
|
+
def get(
|
|
90
|
+
self, model_class: type[BaseDBModel], primary_key_value: str
|
|
91
|
+
) -> BaseDBModel | None:
|
|
92
|
+
"""Retrieve a record by its PK and return a Pydantic instance."""
|
|
93
|
+
table_name = model_class.get_table_name()
|
|
94
|
+
primary_key = model_class.get_primary_key()
|
|
95
|
+
|
|
96
|
+
fields = ", ".join(model_class.model_fields)
|
|
97
|
+
|
|
98
|
+
select_sql = f"""
|
|
99
|
+
SELECT {fields} FROM {table_name} WHERE {primary_key} = ?
|
|
100
|
+
""" # noqa: S608
|
|
101
|
+
|
|
102
|
+
with self.connect() as conn:
|
|
103
|
+
cursor = conn.cursor()
|
|
104
|
+
cursor.execute(select_sql, (primary_key_value,))
|
|
105
|
+
result = cursor.fetchone()
|
|
106
|
+
|
|
107
|
+
if result:
|
|
108
|
+
result_dict = {
|
|
109
|
+
field: result[idx]
|
|
110
|
+
for idx, field in enumerate(model_class.model_fields)
|
|
111
|
+
}
|
|
112
|
+
return model_class(**result_dict)
|
|
113
|
+
return None
|
|
114
|
+
|
|
115
|
+
def update(self, model_instance: BaseDBModel) -> None:
|
|
116
|
+
"""Update an existing record using the Pydantic model."""
|
|
117
|
+
model_class = type(model_instance)
|
|
118
|
+
table_name = model_class.get_table_name()
|
|
119
|
+
primary_key = model_class.get_primary_key()
|
|
120
|
+
|
|
121
|
+
fields = ", ".join(
|
|
122
|
+
f"{field} = ?"
|
|
123
|
+
for field in model_class.model_fields
|
|
124
|
+
if field != primary_key
|
|
125
|
+
)
|
|
126
|
+
values = tuple(
|
|
127
|
+
getattr(model_instance, field)
|
|
128
|
+
for field in model_class.model_fields
|
|
129
|
+
if field != primary_key
|
|
130
|
+
)
|
|
131
|
+
primary_key_value = getattr(model_instance, primary_key)
|
|
132
|
+
|
|
133
|
+
update_sql = f"""
|
|
134
|
+
UPDATE {table_name} SET {fields} WHERE {primary_key} = ?
|
|
135
|
+
""" # noqa: S608
|
|
136
|
+
|
|
137
|
+
with self.connect() as conn:
|
|
138
|
+
cursor = conn.cursor()
|
|
139
|
+
cursor.execute(update_sql, (*values, primary_key_value))
|
|
140
|
+
self._maybe_commit(conn)
|
|
141
|
+
|
|
142
|
+
def delete(
|
|
143
|
+
self, model_class: type[BaseDBModel], primary_key_value: str
|
|
144
|
+
) -> None:
|
|
145
|
+
"""Delete a record by its primary key."""
|
|
146
|
+
table_name = model_class.get_table_name()
|
|
147
|
+
primary_key = model_class.get_primary_key()
|
|
148
|
+
|
|
149
|
+
delete_sql = f"""
|
|
150
|
+
DELETE FROM {table_name} WHERE {primary_key} = ?
|
|
151
|
+
""" # noqa: S608
|
|
152
|
+
|
|
153
|
+
with self.connect() as conn:
|
|
154
|
+
cursor = conn.cursor()
|
|
155
|
+
cursor.execute(delete_sql, (primary_key_value,))
|
|
156
|
+
self._maybe_commit(conn)
|
|
157
|
+
|
|
158
|
+
def select(self, model_class: type[BaseDBModel]) -> QueryBuilder:
|
|
159
|
+
"""Start a query for the given model."""
|
|
160
|
+
return QueryBuilder(self, model_class)
|
|
161
|
+
|
|
162
|
+
# --- Context manager methods ---
|
|
163
|
+
def __enter__(self) -> Self:
|
|
164
|
+
"""Enter the runtime context for the 'with' statement."""
|
|
165
|
+
self.connect()
|
|
166
|
+
return self
|
|
167
|
+
|
|
168
|
+
def __exit__(
|
|
169
|
+
self,
|
|
170
|
+
exc_type: Optional[type[BaseException]],
|
|
171
|
+
exc_value: Optional[BaseException],
|
|
172
|
+
traceback: Optional[TracebackType],
|
|
173
|
+
) -> None:
|
|
174
|
+
"""Exit the runtime context and close the connection."""
|
|
175
|
+
if self.conn:
|
|
176
|
+
if not self.auto_commit:
|
|
177
|
+
self.conn.commit()
|
|
178
|
+
self.conn.close()
|
|
179
|
+
self.conn = None
|