autoendpoint 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autoendpoint-0.1.0/.gitignore +149 -0
- autoendpoint-0.1.0/.gitlab-ci.yml +68 -0
- autoendpoint-0.1.0/PKG-INFO +12 -0
- autoendpoint-0.1.0/README.md +115 -0
- autoendpoint-0.1.0/pyproject.toml +52 -0
- autoendpoint-0.1.0/src/autoendpoint/__init__.py +5 -0
- autoendpoint-0.1.0/src/autoendpoint/core.py +216 -0
- autoendpoint-0.1.0/src/autoendpoint/graphql_utils.py +42 -0
- autoendpoint-0.1.0/src/autoendpoint/models.py +39 -0
- autoendpoint-0.1.0/src/autoendpoint/utils.py +31 -0
- autoendpoint-0.1.0/uv.lock +864 -0
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
/.idea/*
|
|
2
|
+
# Byte-compiled / optimized / DLL files
|
|
3
|
+
__pycache__/
|
|
4
|
+
*.py[cod]
|
|
5
|
+
*$py.class
|
|
6
|
+
|
|
7
|
+
# C extensions
|
|
8
|
+
*.so
|
|
9
|
+
|
|
10
|
+
# Distribution / packaging
|
|
11
|
+
.Python
|
|
12
|
+
build/
|
|
13
|
+
develop-eggs/
|
|
14
|
+
dist/
|
|
15
|
+
downloads/
|
|
16
|
+
eggs/
|
|
17
|
+
.eggs/
|
|
18
|
+
lib/
|
|
19
|
+
lib64/
|
|
20
|
+
parts/
|
|
21
|
+
sdist/
|
|
22
|
+
var/
|
|
23
|
+
wheels/
|
|
24
|
+
share/python-wheels/
|
|
25
|
+
*.egg-info/
|
|
26
|
+
.installed.cfg
|
|
27
|
+
*.egg
|
|
28
|
+
MANIFEST
|
|
29
|
+
|
|
30
|
+
# PyInstaller
|
|
31
|
+
# Usually these files are written by a python script, before a executable
|
|
32
|
+
# is created.
|
|
33
|
+
*.manifest
|
|
34
|
+
*.spec
|
|
35
|
+
|
|
36
|
+
# Installer logs
|
|
37
|
+
pip-log.txt
|
|
38
|
+
pip-delete-this-directory.txt
|
|
39
|
+
|
|
40
|
+
# Unit test / coverage reports
|
|
41
|
+
htmlcov/
|
|
42
|
+
.tox/
|
|
43
|
+
.nox/
|
|
44
|
+
.coverage
|
|
45
|
+
.coverage.*
|
|
46
|
+
.cache
|
|
47
|
+
nosetests.xml
|
|
48
|
+
coverage.xml
|
|
49
|
+
*.cover
|
|
50
|
+
*.py,cover
|
|
51
|
+
.hypothesis/
|
|
52
|
+
.pytest_cache/
|
|
53
|
+
cover/
|
|
54
|
+
|
|
55
|
+
# Translations
|
|
56
|
+
*.mo
|
|
57
|
+
*.pot
|
|
58
|
+
|
|
59
|
+
# Django stuff:
|
|
60
|
+
*.log
|
|
61
|
+
local_settings.py
|
|
62
|
+
db.sqlite3
|
|
63
|
+
db.sqlite3-journal
|
|
64
|
+
|
|
65
|
+
# Flask stuff:
|
|
66
|
+
instance/
|
|
67
|
+
.webassets-cache
|
|
68
|
+
|
|
69
|
+
# Scrapy stuff:
|
|
70
|
+
.scrapy
|
|
71
|
+
|
|
72
|
+
# Sphinx documentation
|
|
73
|
+
docs/_build/
|
|
74
|
+
|
|
75
|
+
# PyBuilder
|
|
76
|
+
.pybuilder/
|
|
77
|
+
target/
|
|
78
|
+
|
|
79
|
+
# Jupyter Notebook
|
|
80
|
+
.ipynb_checkpoints
|
|
81
|
+
|
|
82
|
+
# IPython
|
|
83
|
+
profile_default/
|
|
84
|
+
ipython_config.py
|
|
85
|
+
|
|
86
|
+
# pyenv
|
|
87
|
+
# For a library or binary, you shouldn't check in .python-version
|
|
88
|
+
# .python-version
|
|
89
|
+
|
|
90
|
+
# pipenv
|
|
91
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
92
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
93
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or even
|
|
94
|
+
# fail to install.
|
|
95
|
+
#Pipfile.lock
|
|
96
|
+
|
|
97
|
+
# poetry
|
|
98
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
99
|
+
#poetry.lock
|
|
100
|
+
|
|
101
|
+
# pdm
|
|
102
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
103
|
+
#pdm.lock
|
|
104
|
+
|
|
105
|
+
# PEP 582; used by e.g. github.com/pdm-project/pdm
|
|
106
|
+
__pypackages__/
|
|
107
|
+
|
|
108
|
+
# Celery stuff
|
|
109
|
+
celerybeat-schedule
|
|
110
|
+
celerybeat.pid
|
|
111
|
+
|
|
112
|
+
# SageMath parsed files
|
|
113
|
+
*.sage.py
|
|
114
|
+
|
|
115
|
+
# Environments
|
|
116
|
+
.env
|
|
117
|
+
.venv
|
|
118
|
+
env/
|
|
119
|
+
venv/
|
|
120
|
+
ENV/
|
|
121
|
+
env.bak/
|
|
122
|
+
venv.bak/
|
|
123
|
+
|
|
124
|
+
# Spyder project settings
|
|
125
|
+
.spyderproject
|
|
126
|
+
.spyproject
|
|
127
|
+
|
|
128
|
+
# Rope project settings
|
|
129
|
+
.ropeproject
|
|
130
|
+
|
|
131
|
+
# mkdocs documentation
|
|
132
|
+
/site
|
|
133
|
+
|
|
134
|
+
# mypy
|
|
135
|
+
.mypy_cache/
|
|
136
|
+
.dmypy.json
|
|
137
|
+
dmypy.json
|
|
138
|
+
|
|
139
|
+
# Pyre type checker
|
|
140
|
+
.pyre/
|
|
141
|
+
|
|
142
|
+
# pytype static type analyzer
|
|
143
|
+
.pytype/
|
|
144
|
+
|
|
145
|
+
# Cython debug symbols
|
|
146
|
+
cython_debug/
|
|
147
|
+
|
|
148
|
+
# uv
|
|
149
|
+
.uv-cache/
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
default:
|
|
2
|
+
image: python:3.14-slim
|
|
3
|
+
before_script:
|
|
4
|
+
- apt-get update && apt-get install -y curl ca-certificates
|
|
5
|
+
- curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
6
|
+
# Change the source line to match the new uv location
|
|
7
|
+
- source $HOME/.local/bin/env
|
|
8
|
+
|
|
9
|
+
stages:
|
|
10
|
+
- test
|
|
11
|
+
- build
|
|
12
|
+
- deploy
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
variables:
|
|
16
|
+
UV_CACHE_DIR: ../.uv-cache
|
|
17
|
+
|
|
18
|
+
cache:
|
|
19
|
+
paths:
|
|
20
|
+
- ../.uv-cache
|
|
21
|
+
|
|
22
|
+
test:
|
|
23
|
+
stage: test
|
|
24
|
+
script:
|
|
25
|
+
- uv sync --group test
|
|
26
|
+
# Generate both XML (for GitLab's visualization) and HTML (for the report)
|
|
27
|
+
- uv run pytest --cov=autoendpoint --cov-report=xml --cov-report=term --cov-report=html:htmlcov
|
|
28
|
+
coverage: '/(?i)total.*? (100(?:\.0+)?%|[1-9]?\d(?:\.\d+)?%)/'
|
|
29
|
+
artifacts:
|
|
30
|
+
reports:
|
|
31
|
+
coverage_report:
|
|
32
|
+
coverage_format: cobertura
|
|
33
|
+
path: coverage.xml
|
|
34
|
+
paths:
|
|
35
|
+
- htmlcov/
|
|
36
|
+
expire_in: 1 week
|
|
37
|
+
|
|
38
|
+
pages:
|
|
39
|
+
stage: build
|
|
40
|
+
script:
|
|
41
|
+
- uv sync --group docs
|
|
42
|
+
# Create the structure: public/docs and public/coverage
|
|
43
|
+
- mkdir -p public/docs
|
|
44
|
+
- mkdir -p public/coverage
|
|
45
|
+
# Build Sphinx documentation into public/docs
|
|
46
|
+
- cd docs
|
|
47
|
+
- uv run sphinx-build -b html . ../public/docs
|
|
48
|
+
- cd ..
|
|
49
|
+
# Move the HTML coverage report from 'test' job into public/coverage
|
|
50
|
+
- mv htmlcov/* public/coverage/
|
|
51
|
+
# Optional: Create a simple index.html for navigation in public/
|
|
52
|
+
- echo "<html><head><title>Project Overview</title></head><body><h1>Project Overview</h1><ul><li><a href='docs/'>Documentation</a></li><li><a href='coverage/'>Test Coverage</a></li></ul></body></html>" > public/index.html
|
|
53
|
+
artifacts:
|
|
54
|
+
paths:
|
|
55
|
+
- public
|
|
56
|
+
rules:
|
|
57
|
+
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
|
58
|
+
|
|
59
|
+
publish:
|
|
60
|
+
stage: deploy
|
|
61
|
+
script:
|
|
62
|
+
- uv build
|
|
63
|
+
- uv tool run twine upload dist/*
|
|
64
|
+
variables:
|
|
65
|
+
TWINE_USERNAME: __token__
|
|
66
|
+
TWINE_PASSWORD: ${PYPI_TOKEN}
|
|
67
|
+
rules:
|
|
68
|
+
- if: $CI_COMMIT_TAG # This job ONLY runs when you push a tag (e.g., v0.1.0)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: autoendpoint
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Library to create basics endpoints from Models SQLModels
|
|
5
|
+
Requires-Python: >=3.14
|
|
6
|
+
Requires-Dist: fastapi>=0.129.0
|
|
7
|
+
Requires-Dist: sqlmodel>=0.0.33
|
|
8
|
+
Requires-Dist: strawberry-graphql[fastapi]>=0.312.0
|
|
9
|
+
Provides-Extra: postgresql
|
|
10
|
+
Requires-Dist: asyncpg>=0.31.0; extra == 'postgresql'
|
|
11
|
+
Requires-Dist: greenlet>=3.3.2; extra == 'postgresql'
|
|
12
|
+
Requires-Dist: psycopg2-binary>=2.9.11; extra == 'postgresql'
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
# Auto Endpoint
|
|
2
|
+
|
|
3
|
+
`autoendpoint` is a lightweight library for FastAPI that automatically generates asynchronous RESTful API endpoints for your SQLModel classes. It simplifies the creation of CRUD operations by dynamically building Pydantic models for data validation and handling database interactions using SQLAlchemy's `AsyncSession`.
|
|
4
|
+
|
|
5
|
+
## Key Features
|
|
6
|
+
|
|
7
|
+
- **Full CRUD Endpoints**: Automatically generates `GET` (all), `POST` (create), `GET` (by ID), `PUT` (update), `PATCH` (partial update), `DELETE`, `GET` (by unique field), and `GET` (by any field filter) endpoints for any SQLModel.
|
|
8
|
+
- **Dynamic GraphQL Endpoint**: Automatically generates a GraphQL schema and adds a `/graphql` endpoint (powered by Strawberry) to query your models.
|
|
9
|
+
- **String Representation in GraphQL**: Automatically includes a `string_representation` field in GraphQL queries, which uses your model's `__str__` method for flexible object display.
|
|
10
|
+
- **Enhanced OpenAPI Documentation**: All generated endpoint parameters (Body, Path, Query) include clear, dynamic descriptions for better readability in tools like FastMCP and Swagger UI.
|
|
11
|
+
- **Asynchronous Support**: Built for high-performance async workflows using `AsyncSession`.
|
|
12
|
+
- **Smart Model Generation**: Dynamically creates schemas for creation, full updates, and partial updates (PATCH), excluding primary keys from request bodies.
|
|
13
|
+
- **Flexible Retrieval**:
|
|
14
|
+
- **Unique Retrieval**: Endpoint to fetch a single record by any unique field (e.g., email), with error handling for duplicates.
|
|
15
|
+
- **Generic Filtering**: New endpoint to retrieve multiple records by any field and value.
|
|
16
|
+
- **FastAPI Integration**: Seamlessly integrates with existing FastAPI applications using an `APIRouter`.
|
|
17
|
+
- **Type Safety**: Leverages Python type hints and SQLModel/Pydantic for robust validation.
|
|
18
|
+
- **Google Style Docstrings**: Fully documented for Sphinx compatibility.
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
You can install `autoendpoint` using `uv`. To include asynchronous database support (e.g., PostgreSQL), use the `postgresql` extra:
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
# Basic installation
|
|
26
|
+
uv add autoendpoint
|
|
27
|
+
|
|
28
|
+
# Installation with PostgreSQL support (asyncpg, greenlet, psycopg2-binary)
|
|
29
|
+
uv add "autoendpoint[postgresql]"
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
*Note: The library is designed to be driver-agnostic but requires `greenlet` and a compatible async driver (like `asyncpg` or `aiosqlite`) for SQLAlchemy's asynchronous operations.*
|
|
33
|
+
|
|
34
|
+
## Quick Start
|
|
35
|
+
|
|
36
|
+
Here's how to use `AutoEndpoint` in your FastAPI project:
|
|
37
|
+
|
|
38
|
+
### 1. Define your SQLModel
|
|
39
|
+
|
|
40
|
+
```python
|
|
41
|
+
import uuid
|
|
42
|
+
from sqlmodel import Field, SQLModel
|
|
43
|
+
from typing import Optional
|
|
44
|
+
|
|
45
|
+
class Hero(SQLModel, table=True):
|
|
46
|
+
id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
|
|
47
|
+
name: str
|
|
48
|
+
secret_name: str
|
|
49
|
+
age: Optional[int] = None
|
|
50
|
+
email: str = Field(unique=True)
|
|
51
|
+
|
|
52
|
+
def __str__(self):
|
|
53
|
+
return f"{self.name} ({self.age or '?'})"
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
### 2. Set up the Async Engine and Session
|
|
57
|
+
|
|
58
|
+
```python
|
|
59
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
|
60
|
+
from sqlalchemy.orm import sessionmaker
|
|
61
|
+
|
|
62
|
+
DATABASE_URL = "postgresql+asyncpg://user:password@localhost/dbname"
|
|
63
|
+
engine = create_async_engine(DATABASE_URL)
|
|
64
|
+
async_session_maker = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
### 3. Initialize and Register Endpoints
|
|
68
|
+
|
|
69
|
+
```python
|
|
70
|
+
from fastapi import FastAPI
|
|
71
|
+
from autoendpoint.core import AutoEndpoint
|
|
72
|
+
|
|
73
|
+
app = FastAPI()
|
|
74
|
+
|
|
75
|
+
@app.on_event("startup")
|
|
76
|
+
async def startup():
|
|
77
|
+
async with async_session_maker() as session:
|
|
78
|
+
# Create endpoints for the Hero model
|
|
79
|
+
# Note: In a production app, you might want to manage the session differently
|
|
80
|
+
# enable_graphql defaults to True
|
|
81
|
+
my_endpoints = AutoEndpoint(db_session=session, models=[Hero], enable_graphql=True)
|
|
82
|
+
my_endpoints.init_app(app=app)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Generated Endpoints
|
|
86
|
+
|
|
87
|
+
For a model named `Hero`, the following endpoints are automatically generated:
|
|
88
|
+
|
|
89
|
+
- **GET `/hero`**: List all records.
|
|
90
|
+
- **POST `/hero`**: Create a new record. Primary key is excluded from the request body and expected to be server-side or factory generated.
|
|
91
|
+
- **GET `/hero/{id}`**: Retrieve a single record by its primary key.
|
|
92
|
+
- **PUT `/hero/{id}`**: Update all fields of a record (excluding primary key).
|
|
93
|
+
- **PATCH `/hero/{id}`**: Partially update fields of a record (excluding primary key).
|
|
94
|
+
- **DELETE `/hero/{id}`**: Delete a record by its primary key.
|
|
95
|
+
- **GET `/hero/unique/{field_name}?value=...`**: Retrieve a single record by any unique field (e.g., `/hero/unique/email?value=test@example.com`). Raises a 400 error if multiple records are found.
|
|
96
|
+
- **GET `/hero/filter/{field_name}?value=...`**: Retrieve all records matching a specific field and value (e.g., `/hero/filter/age?value=30`).
|
|
97
|
+
- **POST `/graphql`**: The GraphQL endpoint (if enabled) for querying your models. Includes a `string_representation` field for each model that reflects its `__str__` output.
|
|
98
|
+
|
|
99
|
+
## FastMCP Compatibility
|
|
100
|
+
|
|
101
|
+
The generated endpoints are enhanced with `description` fields for all parameters, making them highly readable and easy to use with [FastMCP](https://github.com/jlowin/fastmcp) and other LLM-friendly tools. Each parameter explicitly describes its role (e.g., "The Hero data to create" or "The Hero record to retrieve by id").
|
|
102
|
+
|
|
103
|
+
## Documentation
|
|
104
|
+
|
|
105
|
+
The project includes Sphinx documentation and test coverage reports.
|
|
106
|
+
|
|
107
|
+
In GitLab CI, these are automatically generated and hosted via GitLab Pages:
|
|
108
|
+
- **Documentation**: ` https://autoendpoint-beaddd.gitlab.io/docs/`
|
|
109
|
+
- **Coverage Report**: ` https://autoendpoint-beaddd.gitlab.io/coverage/`
|
|
110
|
+
|
|
111
|
+
Methods are documented using the Google format.
|
|
112
|
+
|
|
113
|
+
## License
|
|
114
|
+
|
|
115
|
+
Under construction.
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "autoendpoint"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Library to create basics endpoints from Models SQLModels"
|
|
5
|
+
requires-python = ">=3.14"
|
|
6
|
+
dependencies = [
|
|
7
|
+
"fastapi>=0.129.0",
|
|
8
|
+
"sqlmodel>=0.0.33",
|
|
9
|
+
"strawberry-graphql[fastapi]>=0.312.0",
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
[project.optional-dependencies]
|
|
13
|
+
postgresql = [
|
|
14
|
+
"asyncpg>=0.31.0",
|
|
15
|
+
"greenlet>=3.3.2",
|
|
16
|
+
"psycopg2-binary>=2.9.11",
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
[build-system]
|
|
20
|
+
requires = ["hatchling"]
|
|
21
|
+
build-backend = "hatchling.build"
|
|
22
|
+
|
|
23
|
+
[dependency-groups]
|
|
24
|
+
# For running your sandbox and manual testing
|
|
25
|
+
dev = [
|
|
26
|
+
"uvicorn>=0.40.0",
|
|
27
|
+
"httpx>=0.24.0", # Useful for manual scripts
|
|
28
|
+
"ruff>=0.15.1",
|
|
29
|
+
"autoendpoint[postgresql]",
|
|
30
|
+
]
|
|
31
|
+
# For your GitLab CI and local testing
|
|
32
|
+
test = [
|
|
33
|
+
"pytest>=7.0.0",
|
|
34
|
+
"pytest-cov>=4.0.0",
|
|
35
|
+
]
|
|
36
|
+
# For Sphinx and GitLab Pages
|
|
37
|
+
docs = [
|
|
38
|
+
"sphinx>=7.0.0",
|
|
39
|
+
"sphinx-rtd-theme>=1.2.0",
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
[tool.hatch.build.targets.sdist]
|
|
43
|
+
exclude = [
|
|
44
|
+
"/.uv-cache",
|
|
45
|
+
"/.venv",
|
|
46
|
+
"/sandbox",
|
|
47
|
+
"/tests",
|
|
48
|
+
"/docs",
|
|
49
|
+
]
|
|
50
|
+
|
|
51
|
+
[tool.hatch.build.targets.wheel]
|
|
52
|
+
packages = ["src/autoendpoint"]
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
from typing import Any, List, Type
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, Body, HTTPException, Path, Query
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
from sqlmodel import SQLModel, select
|
|
6
|
+
from strawberry.fastapi import GraphQLRouter
|
|
7
|
+
|
|
8
|
+
from autoendpoint.graphql_utils import generate_graphql_schema
|
|
9
|
+
from autoendpoint.models import generate_crud_models
|
|
10
|
+
from autoendpoint.utils import is_primary_key
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AutoEndpoint:
|
|
14
|
+
"""Automatic FastAPI endpoint generator for SQLModel models.
|
|
15
|
+
|
|
16
|
+
This class takes a database session and a list of SQLModel models and automatically
|
|
17
|
+
generates a set of CRUD (Create, Read, Update, Delete) endpoints for each model.
|
|
18
|
+
The generated endpoints are registered to a FastAPI APIRouter.
|
|
19
|
+
|
|
20
|
+
Attributes:
|
|
21
|
+
_db_session (AsyncSession): The asynchronous database session.
|
|
22
|
+
models (List[Type[SQLModel]]): A list of SQLModel classes to generate endpoints for.
|
|
23
|
+
router (APIRouter): The FastAPI router containing the generated endpoints.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, db_session: AsyncSession, models: List[Type[SQLModel]], enable_graphql: bool = True):
|
|
27
|
+
"""Initializes the AutoEndpoint with a database session and models.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
db_session (AsyncSession): The asynchronous SQLAlchemy/SQLModel session.
|
|
31
|
+
models (List[Type[SQLModel]]): A list of SQLModel classes to be exposed via API.
|
|
32
|
+
enable_graphql (bool): Whether to enable the GraphQL endpoint. Defaults to True.
|
|
33
|
+
"""
|
|
34
|
+
self._db_session = db_session
|
|
35
|
+
self.models = models
|
|
36
|
+
self.enable_graphql = enable_graphql
|
|
37
|
+
self.router = APIRouter()
|
|
38
|
+
self._build_routes()
|
|
39
|
+
|
|
40
|
+
def _build_routes(self):
|
|
41
|
+
"""Iterates through all models and registers their endpoints."""
|
|
42
|
+
for model in self.models:
|
|
43
|
+
self._register_model_endpoints(model=model)
|
|
44
|
+
|
|
45
|
+
if self.enable_graphql:
|
|
46
|
+
self._add_graphql_route()
|
|
47
|
+
|
|
48
|
+
def _add_graphql_route(self):
|
|
49
|
+
"""Adds a dynamic GraphQL endpoint using Strawberry."""
|
|
50
|
+
schema = generate_graphql_schema(models=self.models, db_session=self._db_session)
|
|
51
|
+
graphql_app = GraphQLRouter(schema)
|
|
52
|
+
self.router.include_router(graphql_app, prefix="/graphql", tags=["GraphQL"])
|
|
53
|
+
|
|
54
|
+
def _register_model_endpoints(self, model: Type[SQLModel]):
|
|
55
|
+
"""Generates and registers CRUD endpoints for a specific SQLModel."""
|
|
56
|
+
name = model.__name__.lower()
|
|
57
|
+
model_fields = model.model_fields
|
|
58
|
+
CreateModel, UpdateModel, PatchModel = generate_crud_models(model=model)
|
|
59
|
+
pk_name = next((n for n, f in model_fields.items() if is_primary_key(field=f)), None)
|
|
60
|
+
|
|
61
|
+
self._add_read_all_route(model=model, name=name)
|
|
62
|
+
self._add_create_one_route(model=model, name=name, CreateModel=CreateModel)
|
|
63
|
+
|
|
64
|
+
if pk_name:
|
|
65
|
+
pk_type = model_fields[pk_name].annotation
|
|
66
|
+
self._add_read_one_by_id_route(model=model, name=name, pk_name=pk_name, pk_type=pk_type)
|
|
67
|
+
self._add_update_one_route(model=model, name=name, pk_name=pk_name, pk_type=pk_type, UpdateModel=UpdateModel)
|
|
68
|
+
self._add_patch_one_route(model=model, name=name, pk_name=pk_name, pk_type=pk_type, PatchModel=PatchModel)
|
|
69
|
+
self._add_delete_one_route(model=model, name=name, pk_name=pk_name, pk_type=pk_type)
|
|
70
|
+
|
|
71
|
+
self._add_read_one_by_unique_field_route(model=model, name=name, model_fields=model_fields)
|
|
72
|
+
self._add_read_by_field_route(model=model, name=name, model_fields=model_fields)
|
|
73
|
+
|
|
74
|
+
def _add_read_all_route(self, model: Type[SQLModel], name: str):
|
|
75
|
+
@self.router.get(f"/{name}",
|
|
76
|
+
response_model=List[model],
|
|
77
|
+
tags=[model.__name__],
|
|
78
|
+
description=f"Retrieve all {model.__name__} records",
|
|
79
|
+
summary=f"Retrieve all {model.__name__}")
|
|
80
|
+
async def read_all():
|
|
81
|
+
results = await self._db_session.execute(select(model))
|
|
82
|
+
return results.scalars().all()
|
|
83
|
+
|
|
84
|
+
def _add_create_one_route(self, model: Type[SQLModel], name: str, CreateModel: Type[SQLModel]):
|
|
85
|
+
@self.router.post(f"/{name}",
|
|
86
|
+
response_model=model,
|
|
87
|
+
tags=[model.__name__],
|
|
88
|
+
description=f"Create a new {model.__name__} record",
|
|
89
|
+
summary=f"Create {model.__name__}")
|
|
90
|
+
async def create_one(item: CreateModel = Body(..., description=f"The {model.__name__} data to create")):
|
|
91
|
+
db_item = model(**item.model_dump())
|
|
92
|
+
self._db_session.add(db_item)
|
|
93
|
+
await self._db_session.commit()
|
|
94
|
+
await self._db_session.refresh(db_item)
|
|
95
|
+
return db_item
|
|
96
|
+
|
|
97
|
+
def _add_read_one_by_id_route(self, model: Type[SQLModel], name: str, pk_name: str, pk_type: Type):
|
|
98
|
+
@self.router.get(f"/{name}/{{{pk_name}}}",
|
|
99
|
+
response_model=model,
|
|
100
|
+
tags=[model.__name__],
|
|
101
|
+
description=f"Retrieve a {model.__name__} record by {pk_name}",
|
|
102
|
+
summary=f"Retrieve {model.__name__} by {pk_name}")
|
|
103
|
+
async def read_one_by_id(pk_value: pk_type = Path(..., alias=pk_name, description=f"The {model.__name__} record to retrieve by {pk_name}")):
|
|
104
|
+
results = await self._db_session.execute(select(model).where(getattr(model, pk_name) == pk_value))
|
|
105
|
+
records = results.scalars().all()
|
|
106
|
+
if not records:
|
|
107
|
+
raise HTTPException(status_code=404, detail=f"{model.__name__} not found")
|
|
108
|
+
if len(records) > 1:
|
|
109
|
+
raise HTTPException(status_code=400, detail=f"Multiple {model.__name__} records found for {pk_name}={pk_value}")
|
|
110
|
+
return records[0]
|
|
111
|
+
|
|
112
|
+
def _add_update_one_route(self, model: Type[SQLModel], name: str, pk_name: str, pk_type: Type, UpdateModel: Type[SQLModel]):
|
|
113
|
+
@self.router.put(f"/{name}/{{{pk_name}}}",
|
|
114
|
+
response_model=model,
|
|
115
|
+
tags=[model.__name__],
|
|
116
|
+
description=f"Update a {model.__name__} record by {pk_name}",
|
|
117
|
+
summary=f"Update {model.__name__} by {pk_name}")
|
|
118
|
+
async def update_one(item: UpdateModel = Body(..., description=f"The {model.__name__} data to update"),
|
|
119
|
+
pk_value: pk_type = Path(..., alias=pk_name, description=f"The {model.__name__} record to update by {pk_name}")):
|
|
120
|
+
results = await self._db_session.execute(select(model).where(getattr(model, pk_name) == pk_value))
|
|
121
|
+
db_item = results.scalars().first()
|
|
122
|
+
if not db_item:
|
|
123
|
+
raise HTTPException(status_code=404, detail=f"{model.__name__} not found")
|
|
124
|
+
|
|
125
|
+
update_data = item.model_dump(exclude_unset=False)
|
|
126
|
+
for key, value in update_data.items():
|
|
127
|
+
setattr(db_item, key, value)
|
|
128
|
+
|
|
129
|
+
self._db_session.add(db_item)
|
|
130
|
+
await self._db_session.commit()
|
|
131
|
+
await self._db_session.refresh(db_item)
|
|
132
|
+
return db_item
|
|
133
|
+
|
|
134
|
+
def _add_patch_one_route(self, model: Type[SQLModel], name: str, pk_name: str, pk_type: Type, PatchModel: Type[SQLModel]):
|
|
135
|
+
@self.router.patch(f"/{name}/{{{pk_name}}}",
|
|
136
|
+
response_model=model,
|
|
137
|
+
tags=[model.__name__],
|
|
138
|
+
description=f"Partially update a {model.__name__} record by {pk_name}",
|
|
139
|
+
summary=f"Patch {model.__name__} by {pk_name}")
|
|
140
|
+
async def patch_one(item: PatchModel = Body(..., description=f"The {model.__name__} data to patch"),
|
|
141
|
+
pk_value: pk_type = Path(..., alias=pk_name, description=f"The {model.__name__} record to patch by {pk_name}")):
|
|
142
|
+
results = await self._db_session.execute(select(model).where(getattr(model, pk_name) == pk_value))
|
|
143
|
+
db_item = results.scalars().first()
|
|
144
|
+
if not db_item:
|
|
145
|
+
raise HTTPException(status_code=404, detail=f"{model.__name__} not found")
|
|
146
|
+
|
|
147
|
+
update_data = item.model_dump(exclude_unset=True)
|
|
148
|
+
for key, value in update_data.items():
|
|
149
|
+
setattr(db_item, key, value)
|
|
150
|
+
|
|
151
|
+
self._db_session.add(db_item)
|
|
152
|
+
await self._db_session.commit()
|
|
153
|
+
await self._db_session.refresh(db_item)
|
|
154
|
+
return db_item
|
|
155
|
+
|
|
156
|
+
def _add_delete_one_route(self, model: Type[SQLModel], name: str, pk_name: str, pk_type: Type):
|
|
157
|
+
@self.router.delete(f"/{name}/{{{pk_name}}}",
|
|
158
|
+
tags=[model.__name__],
|
|
159
|
+
description=f"Delete a {model.__name__} record by {pk_name}",
|
|
160
|
+
summary=f"Delete {model.__name__} by {pk_name}")
|
|
161
|
+
async def delete_one(pk_value: pk_type = Path(..., alias=pk_name, description=f"The {model.__name__} record to delete by {pk_name}")):
|
|
162
|
+
results = await self._db_session.execute(select(model).where(getattr(model, pk_name) == pk_value))
|
|
163
|
+
db_item = results.scalars().first()
|
|
164
|
+
if not db_item:
|
|
165
|
+
raise HTTPException(status_code=404, detail=f"{model.__name__} not found")
|
|
166
|
+
|
|
167
|
+
await self._db_session.delete(db_item)
|
|
168
|
+
await self._db_session.commit()
|
|
169
|
+
return {"detail": f"{model.__name__} deleted"}
|
|
170
|
+
|
|
171
|
+
def _add_read_one_by_unique_field_route(self, model: Type[SQLModel], name: str, model_fields: dict):
|
|
172
|
+
@self.router.get(f"/{name}/unique/{{field_name}}",
|
|
173
|
+
response_model=model,
|
|
174
|
+
tags=[model.__name__],
|
|
175
|
+
description=f"Retrieve a {model.__name__} record by a unique field and value",
|
|
176
|
+
summary=f"Retrieve {model.__name__} by unique field")
|
|
177
|
+
async def read_one_by_unique_field(
|
|
178
|
+
field_name: str = Path(..., description="The name of the unique field to search by"),
|
|
179
|
+
value: Any = Query(..., description="The value of the unique field to search for")
|
|
180
|
+
):
|
|
181
|
+
if field_name not in model_fields:
|
|
182
|
+
raise HTTPException(status_code=400, detail=f"Field '{field_name}' not found in {model.__name__}")
|
|
183
|
+
|
|
184
|
+
results = await self._db_session.execute(select(model).where(getattr(model, field_name) == value))
|
|
185
|
+
records = results.scalars().all()
|
|
186
|
+
|
|
187
|
+
if not records:
|
|
188
|
+
raise HTTPException(status_code=404, detail=f"{model.__name__} not found")
|
|
189
|
+
if len(records) > 1:
|
|
190
|
+
raise HTTPException(status_code=400, detail=f"Multiple {model.__name__} records found for {field_name}={value}")
|
|
191
|
+
return records[0]
|
|
192
|
+
|
|
193
|
+
def _add_read_by_field_route(self, model: Type[SQLModel], name: str, model_fields: dict):
|
|
194
|
+
@self.router.get(f"/{name}/filter/{{field_name}}",
|
|
195
|
+
response_model=List[model],
|
|
196
|
+
tags=[model.__name__],
|
|
197
|
+
description=f"Retrieve {model.__name__} records by a field and value",
|
|
198
|
+
summary=f"Retrieve {model.__name__} by field")
|
|
199
|
+
async def read_by_field(
|
|
200
|
+
field_name: str = Path(..., description="The name of the field to filter by"),
|
|
201
|
+
value: Any = Query(..., description="The value of the field to filter for")
|
|
202
|
+
):
|
|
203
|
+
if field_name not in model_fields:
|
|
204
|
+
raise HTTPException(status_code=400, detail=f"Field '{field_name}' not found in {model.__name__}")
|
|
205
|
+
|
|
206
|
+
results = await self._db_session.execute(select(model).where(getattr(model, field_name) == value))
|
|
207
|
+
return results.scalars().all()
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def init_app(self, app):
|
|
211
|
+
"""Includes the generated router into a FastAPI application.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
app: The FastAPI application instance.
|
|
215
|
+
"""
|
|
216
|
+
app.include_router(self.router)
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from typing import List, Type
|
|
2
|
+
|
|
3
|
+
import strawberry
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
from sqlmodel import SQLModel, select
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def generate_graphql_schema(models: List[Type[SQLModel]], db_session: AsyncSession) -> strawberry.Schema:
|
|
9
|
+
"""Dynamically generates a Strawberry GraphQL schema for the given SQLModels.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
models: A list of SQLModel classes to include in the schema.
|
|
13
|
+
db_session: The asynchronous database session to use for queries.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
A Strawberry Schema object.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
query_fields = {}
|
|
20
|
+
|
|
21
|
+
for model in models:
|
|
22
|
+
# Create a Strawberry type from the SQLModel
|
|
23
|
+
@strawberry.experimental.pydantic.type(model=model, all_fields=True)
|
|
24
|
+
class ModelType:
|
|
25
|
+
@strawberry.field
|
|
26
|
+
def string_representation(self, root: model) -> str:
|
|
27
|
+
return str(root)
|
|
28
|
+
|
|
29
|
+
# Define a closure for the resolver
|
|
30
|
+
def make_resolver(m=model):
|
|
31
|
+
async def resolve(self) -> List[ModelType]:
|
|
32
|
+
results = await db_session.execute(select(m))
|
|
33
|
+
return results.scalars().all()
|
|
34
|
+
return resolve
|
|
35
|
+
|
|
36
|
+
# Add to query fields. Strawberry converts snake_case to camelCase by default.
|
|
37
|
+
query_fields[f"list_{model.__name__.lower()}"] = strawberry.field(resolver=make_resolver(model))
|
|
38
|
+
|
|
39
|
+
DynamicQuery = type("Query", (object,), query_fields)
|
|
40
|
+
DynamicQuery = strawberry.type(DynamicQuery)
|
|
41
|
+
|
|
42
|
+
return strawberry.Schema(query=DynamicQuery)
|