sqlspec 0.1.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__metadata__.py +1 -1
- sqlspec/_serialization.py +1 -1
- sqlspec/_typing.py +138 -0
- sqlspec/adapters/adbc/config.py +52 -0
- sqlspec/adapters/aiosqlite/__init__.py +3 -0
- sqlspec/adapters/aiosqlite/config.py +95 -0
- sqlspec/adapters/asyncmy/__init__.py +3 -0
- sqlspec/adapters/asyncmy/config.py +176 -0
- sqlspec/adapters/asyncpg/__init__.py +0 -0
- sqlspec/adapters/asyncpg/config.py +145 -0
- sqlspec/adapters/duckdb/__init__.py +0 -0
- sqlspec/adapters/duckdb/config.py +201 -0
- sqlspec/adapters/oracledb/__init__.py +13 -0
- sqlspec/adapters/oracledb/config/__init__.py +9 -0
- sqlspec/adapters/oracledb/config/_asyncio.py +95 -0
- sqlspec/adapters/oracledb/config/_common.py +151 -0
- sqlspec/adapters/oracledb/config/_sync.py +95 -0
- sqlspec/adapters/psycopg/__init__.py +0 -0
- sqlspec/adapters/psycopg/config/__init__.py +9 -0
- sqlspec/adapters/psycopg/config/_async.py +75 -0
- sqlspec/adapters/psycopg/config/_common.py +73 -0
- sqlspec/adapters/psycopg/config/_sync.py +75 -0
- sqlspec/adapters/sqlite/__init__.py +0 -0
- sqlspec/adapters/sqlite/config.py +92 -0
- sqlspec/config.py +16 -0
- sqlspec/exceptions.py +29 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/litestar/__init__.py +0 -0
- sqlspec/extensions/litestar/plugin.py +34 -0
- sqlspec/filters.py +35 -28
- sqlspec/typing.py +415 -0
- sqlspec-0.4.0.dist-info/METADATA +84 -0
- sqlspec-0.4.0.dist-info/RECORD +39 -0
- {sqlspec-0.1.1.dist-info → sqlspec-0.4.0.dist-info}/WHEEL +1 -1
- sqlspec-0.4.0.dist-info/licenses/NOTICE +29 -0
- sqlspec/types/empty.py +0 -18
- sqlspec/types/protocols.py +0 -117
- sqlspec/utils/dataclass.py +0 -130
- sqlspec-0.1.1.dist-info/METADATA +0 -25
- sqlspec-0.1.1.dist-info/RECORD +0 -14
- /sqlspec/{types → adapters}/__init__.py +0 -0
- /sqlspec/{utils → adapters/adbc}/__init__.py +0 -0
sqlspec/typing.py
ADDED
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Sequence
|
|
4
|
+
from dataclasses import Field, fields
|
|
5
|
+
from functools import lru_cache
|
|
6
|
+
from typing import (
|
|
7
|
+
TYPE_CHECKING,
|
|
8
|
+
Annotated,
|
|
9
|
+
Any,
|
|
10
|
+
TypeVar,
|
|
11
|
+
Union,
|
|
12
|
+
cast,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from typing_extensions import TypeAlias, TypeGuard
|
|
16
|
+
|
|
17
|
+
from sqlspec._typing import (
|
|
18
|
+
MSGSPEC_INSTALLED,
|
|
19
|
+
PYDANTIC_INSTALLED,
|
|
20
|
+
UNSET,
|
|
21
|
+
BaseModel,
|
|
22
|
+
DataclassProtocol,
|
|
23
|
+
Empty,
|
|
24
|
+
EmptyType,
|
|
25
|
+
FailFast,
|
|
26
|
+
Struct,
|
|
27
|
+
TypeAdapter,
|
|
28
|
+
UnsetType,
|
|
29
|
+
convert,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
if TYPE_CHECKING:
|
|
33
|
+
from collections.abc import Iterable
|
|
34
|
+
from collections.abc import Set as AbstractSet
|
|
35
|
+
|
|
36
|
+
from sqlspec.filters import StatementFilter
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
PYDANTIC_USE_FAILFAST = False # leave permanently disabled for now
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
T = TypeVar("T")
|
|
43
|
+
|
|
44
|
+
ModelT = TypeVar("ModelT", bound="Struct | BaseModel | DataclassProtocol")
|
|
45
|
+
|
|
46
|
+
FilterTypeT = TypeVar("FilterTypeT", bound="StatementFilter")
|
|
47
|
+
"""Type variable for filter types.
|
|
48
|
+
|
|
49
|
+
:class:`~advanced_alchemy.filters.StatementFilter`
|
|
50
|
+
"""
|
|
51
|
+
ModelDictT: TypeAlias = Union[dict[str, Any], ModelT, DataclassProtocol, Struct, BaseModel]
|
|
52
|
+
"""Type alias for model dictionaries.
|
|
53
|
+
|
|
54
|
+
Represents:
|
|
55
|
+
- :type:`dict[str, Any]` | :class:`~advanced_alchemy.base.ModelProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel` | :class:`litestar.dto.data_structures.DTOData` | :class:`~advanced_alchemy.base.ModelProtocol`
|
|
56
|
+
"""
|
|
57
|
+
ModelDictListT: TypeAlias = Sequence[Union[dict[str, Any], ModelT, DataclassProtocol, Struct, BaseModel]]
|
|
58
|
+
"""Type alias for model dictionary lists.
|
|
59
|
+
|
|
60
|
+
A list or sequence of any of the following:
|
|
61
|
+
- :type:`Sequence`[:type:`dict[str, Any]` | :class:`~advanced_alchemy.base.ModelProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`]
|
|
62
|
+
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def is_dataclass_instance(obj: Any) -> TypeGuard[DataclassProtocol]:
|
|
67
|
+
"""Check if an object is a dataclass instance.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
obj: An object to check.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
True if the object is a dataclass instance.
|
|
74
|
+
"""
|
|
75
|
+
return hasattr(type(obj), "__dataclass_fields__")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@lru_cache(typed=True)
|
|
79
|
+
def get_type_adapter(f: type[T]) -> TypeAdapter[T]:
|
|
80
|
+
"""Caches and returns a pydantic type adapter.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
f: Type to create a type adapter for.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
:class:`pydantic.TypeAdapter`[:class:`typing.TypeVar`[T]]
|
|
87
|
+
"""
|
|
88
|
+
if PYDANTIC_USE_FAILFAST:
|
|
89
|
+
return TypeAdapter(
|
|
90
|
+
Annotated[f, FailFast()],
|
|
91
|
+
)
|
|
92
|
+
return TypeAdapter(f)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def is_pydantic_model(v: Any) -> TypeGuard[BaseModel]:
|
|
96
|
+
"""Check if a value is a pydantic model.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
v: Value to check.
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
bool
|
|
103
|
+
"""
|
|
104
|
+
return PYDANTIC_INSTALLED and isinstance(v, BaseModel)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def is_msgspec_model(v: Any) -> TypeGuard[Struct]:
|
|
108
|
+
"""Check if a value is a msgspec model.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
v: Value to check.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
bool
|
|
115
|
+
"""
|
|
116
|
+
return MSGSPEC_INSTALLED and isinstance(v, Struct)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def is_dict(v: Any) -> TypeGuard[dict[str, Any]]:
|
|
120
|
+
"""Check if a value is a dictionary.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
v: Value to check.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
bool
|
|
127
|
+
"""
|
|
128
|
+
return isinstance(v, dict)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def is_dict_with_field(v: Any, field_name: str) -> TypeGuard[dict[str, Any]]:
|
|
132
|
+
"""Check if a dictionary has a specific field.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
v: Value to check.
|
|
136
|
+
field_name: Field name to check for.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
bool
|
|
140
|
+
"""
|
|
141
|
+
return is_dict(v) and field_name in v
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def is_dict_without_field(v: Any, field_name: str) -> TypeGuard[dict[str, Any]]:
|
|
145
|
+
"""Check if a dictionary does not have a specific field.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
v: Value to check.
|
|
149
|
+
field_name: Field name to check for.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
bool
|
|
153
|
+
"""
|
|
154
|
+
return is_dict(v) and field_name not in v
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def is_dataclass(v: Any) -> TypeGuard[DataclassProtocol]:
|
|
158
|
+
"""Check if a value is a dataclass.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
v: Value to check.
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
bool
|
|
165
|
+
"""
|
|
166
|
+
return is_dataclass_instance(v)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def is_dataclass_with_field(v: Any, field_name: str) -> TypeGuard[DataclassProtocol]:
|
|
170
|
+
"""Check if a dataclass has a specific field.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
v: Value to check.
|
|
174
|
+
field_name: Field name to check for.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
bool
|
|
178
|
+
"""
|
|
179
|
+
return is_dataclass(v) and field_name in v.__dataclass_fields__
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def is_dataclass_without_field(v: Any, field_name: str) -> TypeGuard[DataclassProtocol]:
|
|
183
|
+
"""Check if a dataclass does not have a specific field.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
v: Value to check.
|
|
187
|
+
field_name: Field name to check for.
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
bool
|
|
191
|
+
"""
|
|
192
|
+
return is_dataclass(v) and field_name not in v.__dataclass_fields__
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def is_pydantic_model_with_field(v: Any, field_name: str) -> TypeGuard[BaseModel]:
|
|
196
|
+
"""Check if a pydantic model has a specific field.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
v: Value to check.
|
|
200
|
+
field_name: Field name to check for.
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
bool
|
|
204
|
+
"""
|
|
205
|
+
return is_pydantic_model(v) and field_name in v.model_fields
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def is_pydantic_model_without_field(v: Any, field_name: str) -> TypeGuard[BaseModel]:
|
|
209
|
+
"""Check if a pydantic model does not have a specific field.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
v: Value to check.
|
|
213
|
+
field_name: Field name to check for.
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
bool
|
|
217
|
+
"""
|
|
218
|
+
return not is_pydantic_model_with_field(v, field_name)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def is_msgspec_model_with_field(v: Any, field_name: str) -> TypeGuard[Struct]:
|
|
222
|
+
"""Check if a msgspec model has a specific field.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
v: Value to check.
|
|
226
|
+
field_name: Field name to check for.
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
bool
|
|
230
|
+
"""
|
|
231
|
+
return is_msgspec_model(v) and field_name in v.__struct_fields__
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def is_msgspec_model_without_field(v: Any, field_name: str) -> TypeGuard[Struct]:
|
|
235
|
+
"""Check if a msgspec model does not have a specific field.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
v: Value to check.
|
|
239
|
+
field_name: Field name to check for.
|
|
240
|
+
|
|
241
|
+
Returns:
|
|
242
|
+
bool
|
|
243
|
+
"""
|
|
244
|
+
return not is_msgspec_model_with_field(v, field_name)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def extract_dataclass_fields(
|
|
248
|
+
dt: DataclassProtocol,
|
|
249
|
+
exclude_none: bool = False,
|
|
250
|
+
exclude_empty: bool = False,
|
|
251
|
+
include: AbstractSet[str] | None = None,
|
|
252
|
+
exclude: AbstractSet[str] | None = None,
|
|
253
|
+
) -> tuple[Field[Any], ...]:
|
|
254
|
+
"""Extract dataclass fields.
|
|
255
|
+
|
|
256
|
+
Args:
|
|
257
|
+
dt: A dataclass instance.
|
|
258
|
+
exclude_none: Whether to exclude None values.
|
|
259
|
+
exclude_empty: Whether to exclude Empty values.
|
|
260
|
+
include: An iterable of fields to include.
|
|
261
|
+
exclude: An iterable of fields to exclude.
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
A tuple of dataclass fields.
|
|
266
|
+
"""
|
|
267
|
+
include = include or set()
|
|
268
|
+
exclude = exclude or set()
|
|
269
|
+
|
|
270
|
+
if common := (include & exclude):
|
|
271
|
+
msg = f"Fields {common} are both included and excluded."
|
|
272
|
+
raise ValueError(msg)
|
|
273
|
+
|
|
274
|
+
dataclass_fields: Iterable[Field[Any]] = fields(dt)
|
|
275
|
+
if exclude_none:
|
|
276
|
+
dataclass_fields = (field for field in dataclass_fields if getattr(dt, field.name) is not None)
|
|
277
|
+
if exclude_empty:
|
|
278
|
+
dataclass_fields = (field for field in dataclass_fields if getattr(dt, field.name) is not Empty)
|
|
279
|
+
if include:
|
|
280
|
+
dataclass_fields = (field for field in dataclass_fields if field.name in include)
|
|
281
|
+
if exclude:
|
|
282
|
+
dataclass_fields = (field for field in dataclass_fields if field.name not in exclude)
|
|
283
|
+
|
|
284
|
+
return tuple(dataclass_fields)
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def extract_dataclass_items(
|
|
288
|
+
dt: DataclassProtocol,
|
|
289
|
+
exclude_none: bool = False,
|
|
290
|
+
exclude_empty: bool = False,
|
|
291
|
+
include: AbstractSet[str] | None = None,
|
|
292
|
+
exclude: AbstractSet[str] | None = None,
|
|
293
|
+
) -> tuple[tuple[str, Any], ...]:
|
|
294
|
+
"""Extract dataclass name, value pairs.
|
|
295
|
+
|
|
296
|
+
Unlike the 'asdict' method exports by the stdlib, this function does not pickle values.
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
dt: A dataclass instance.
|
|
300
|
+
exclude_none: Whether to exclude None values.
|
|
301
|
+
exclude_empty: Whether to exclude Empty values.
|
|
302
|
+
include: An iterable of fields to include.
|
|
303
|
+
exclude: An iterable of fields to exclude.
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
A tuple of key/value pairs.
|
|
307
|
+
"""
|
|
308
|
+
dataclass_fields = extract_dataclass_fields(dt, exclude_none, exclude_empty, include, exclude)
|
|
309
|
+
return tuple((field.name, getattr(dt, field.name)) for field in dataclass_fields)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def dataclass_to_dict(
|
|
313
|
+
obj: DataclassProtocol,
|
|
314
|
+
exclude_none: bool = False,
|
|
315
|
+
exclude_empty: bool = False,
|
|
316
|
+
convert_nested: bool = True,
|
|
317
|
+
exclude: set[str] | None = None,
|
|
318
|
+
) -> dict[str, Any]:
|
|
319
|
+
"""Convert a dataclass to a dictionary.
|
|
320
|
+
|
|
321
|
+
This method has important differences to the standard library version:
|
|
322
|
+
- it does not deepcopy values
|
|
323
|
+
- it does not recurse into collections
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
obj: A dataclass instance.
|
|
327
|
+
exclude_none: Whether to exclude None values.
|
|
328
|
+
exclude_empty: Whether to exclude Empty values.
|
|
329
|
+
convert_nested: Whether to recursively convert nested dataclasses.
|
|
330
|
+
exclude: An iterable of fields to exclude.
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
A dictionary of key/value pairs.
|
|
334
|
+
"""
|
|
335
|
+
ret = {}
|
|
336
|
+
for field in extract_dataclass_fields(obj, exclude_none, exclude_empty, exclude=exclude):
|
|
337
|
+
value = getattr(obj, field.name)
|
|
338
|
+
if is_dataclass_instance(value) and convert_nested:
|
|
339
|
+
ret[field.name] = dataclass_to_dict(value, exclude_none, exclude_empty)
|
|
340
|
+
else:
|
|
341
|
+
ret[field.name] = getattr(obj, field.name)
|
|
342
|
+
return ret
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def schema_dump(
|
|
346
|
+
data: dict[str, Any] | Struct | BaseModel | DataclassProtocol,
|
|
347
|
+
exclude_unset: bool = True,
|
|
348
|
+
) -> dict[str, Any]:
|
|
349
|
+
"""Dump a data object to a dictionary.
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
data: dict[str, Any] | ModelT | Struct | BaseModel | DataclassProtocol
|
|
353
|
+
exclude_unset: :type:`bool` Whether to exclude unset values.
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
:type: dict[str, Any]
|
|
357
|
+
"""
|
|
358
|
+
if is_dataclass(data):
|
|
359
|
+
return dataclass_to_dict(data, exclude_empty=exclude_unset)
|
|
360
|
+
if is_pydantic_model(data):
|
|
361
|
+
return data.model_dump(exclude_unset=exclude_unset)
|
|
362
|
+
if is_msgspec_model(data) and exclude_unset:
|
|
363
|
+
return {f: val for f in data.__struct_fields__ if (val := getattr(data, f, None)) != UNSET}
|
|
364
|
+
if is_msgspec_model(data) and not exclude_unset:
|
|
365
|
+
return {f: getattr(data, f, None) for f in data.__struct_fields__}
|
|
366
|
+
return cast("dict[str,Any]", data)
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
__all__ = (
|
|
370
|
+
"MSGSPEC_INSTALLED",
|
|
371
|
+
"PYDANTIC_INSTALLED",
|
|
372
|
+
"PYDANTIC_USE_FAILFAST",
|
|
373
|
+
"UNSET",
|
|
374
|
+
"BaseModel",
|
|
375
|
+
"DataclassProtocol",
|
|
376
|
+
"Empty",
|
|
377
|
+
"EmptyType",
|
|
378
|
+
"FailFast",
|
|
379
|
+
"FilterTypeT",
|
|
380
|
+
"ModelDictListT",
|
|
381
|
+
"ModelDictT",
|
|
382
|
+
"Struct",
|
|
383
|
+
"TypeAdapter",
|
|
384
|
+
"UnsetType",
|
|
385
|
+
"convert",
|
|
386
|
+
"dataclass_to_dict",
|
|
387
|
+
"extract_dataclass_fields",
|
|
388
|
+
"extract_dataclass_items",
|
|
389
|
+
"get_type_adapter",
|
|
390
|
+
"is_dataclass",
|
|
391
|
+
"is_dataclass_instance",
|
|
392
|
+
"is_dataclass_with_field",
|
|
393
|
+
"is_dataclass_without_field",
|
|
394
|
+
"is_dict",
|
|
395
|
+
"is_dict_with_field",
|
|
396
|
+
"is_dict_without_field",
|
|
397
|
+
"is_msgspec_model",
|
|
398
|
+
"is_msgspec_model_with_field",
|
|
399
|
+
"is_msgspec_model_without_field",
|
|
400
|
+
"is_pydantic_model",
|
|
401
|
+
"is_pydantic_model_with_field",
|
|
402
|
+
"is_pydantic_model_without_field",
|
|
403
|
+
"schema_dump",
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
if TYPE_CHECKING:
|
|
407
|
+
if not PYDANTIC_INSTALLED:
|
|
408
|
+
from ._typing import BaseModel, FailFast, TypeAdapter
|
|
409
|
+
else:
|
|
410
|
+
from pydantic import BaseModel, FailFast, TypeAdapter # noqa: TC004
|
|
411
|
+
|
|
412
|
+
if not MSGSPEC_INSTALLED:
|
|
413
|
+
from ._typing import UNSET, Struct, UnsetType, convert
|
|
414
|
+
else:
|
|
415
|
+
from msgspec import UNSET, Struct, UnsetType, convert # noqa: TC004
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: sqlspec
|
|
3
|
+
Version: 0.4.0
|
|
4
|
+
Summary: SQL Experiments in Python
|
|
5
|
+
Author-email: Cody Fincher <cody@litestar.dev>
|
|
6
|
+
Maintainer-email: Litestar Developers <hello@litestar.dev>
|
|
7
|
+
License-File: NOTICE
|
|
8
|
+
Requires-Python: <4.0,>=3.9
|
|
9
|
+
Requires-Dist: eval-type-backport; python_version <= '3.9'
|
|
10
|
+
Requires-Dist: sqlglot
|
|
11
|
+
Requires-Dist: typing-extensions>=4.0.0
|
|
12
|
+
Provides-Extra: adbc
|
|
13
|
+
Requires-Dist: adbc-driver-manager; extra == 'adbc'
|
|
14
|
+
Requires-Dist: pyarrow; extra == 'adbc'
|
|
15
|
+
Provides-Extra: aioodbc
|
|
16
|
+
Requires-Dist: aioodbc; extra == 'aioodbc'
|
|
17
|
+
Provides-Extra: aiosqlite
|
|
18
|
+
Requires-Dist: aiosqlite; extra == 'aiosqlite'
|
|
19
|
+
Provides-Extra: asyncmy
|
|
20
|
+
Requires-Dist: asyncmy; extra == 'asyncmy'
|
|
21
|
+
Provides-Extra: asyncpg
|
|
22
|
+
Requires-Dist: asyncpg; extra == 'asyncpg'
|
|
23
|
+
Provides-Extra: bigquery
|
|
24
|
+
Requires-Dist: google-cloud-bigquery; extra == 'bigquery'
|
|
25
|
+
Provides-Extra: duckdb
|
|
26
|
+
Requires-Dist: duckdb; extra == 'duckdb'
|
|
27
|
+
Provides-Extra: fastapi
|
|
28
|
+
Requires-Dist: fastapi; extra == 'fastapi'
|
|
29
|
+
Provides-Extra: flask
|
|
30
|
+
Requires-Dist: flask; extra == 'flask'
|
|
31
|
+
Provides-Extra: litestar
|
|
32
|
+
Requires-Dist: litestar; extra == 'litestar'
|
|
33
|
+
Provides-Extra: msgspec
|
|
34
|
+
Requires-Dist: msgspec; extra == 'msgspec'
|
|
35
|
+
Provides-Extra: oracledb
|
|
36
|
+
Requires-Dist: oracledb; extra == 'oracledb'
|
|
37
|
+
Provides-Extra: performance
|
|
38
|
+
Requires-Dist: google-re2; (sys_platform == 'linux') and extra == 'performance'
|
|
39
|
+
Requires-Dist: sqlglot[rs]; extra == 'performance'
|
|
40
|
+
Provides-Extra: psycopg
|
|
41
|
+
Requires-Dist: psycopg[binary,pool]; extra == 'psycopg'
|
|
42
|
+
Provides-Extra: pydantic
|
|
43
|
+
Requires-Dist: pydantic; extra == 'pydantic'
|
|
44
|
+
Provides-Extra: pymssql
|
|
45
|
+
Requires-Dist: pymssql; extra == 'pymssql'
|
|
46
|
+
Provides-Extra: pymysql
|
|
47
|
+
Requires-Dist: pymysql; extra == 'pymysql'
|
|
48
|
+
Provides-Extra: spanner
|
|
49
|
+
Requires-Dist: google-cloud-spanner; extra == 'spanner'
|
|
50
|
+
Description-Content-Type: text/markdown
|
|
51
|
+
|
|
52
|
+
<!-- markdownlint-disable -->
|
|
53
|
+
<p align="center">
|
|
54
|
+
<!-- github-banner-start -->
|
|
55
|
+
<img src="https://raw.githubusercontent.com/litestar-org/branding/main/assets/Branding%20-%20SVG%20-%20Transparent/Logo%20-%20Banner%20-%20Inline%20-%20Light.svg#gh-light-mode-only" alt="Litestar Logo - Light" width="100%" height="auto" />
|
|
56
|
+
<img src="https://raw.githubusercontent.com/litestar-org/branding/main/assets/Branding%20-%20SVG%20-%20Transparent/Logo%20-%20Banner%20-%20Inline%20-%20Dark.svg#gh-dark-mode-only" alt="Litestar Logo - Dark" width="100%" height="auto" />
|
|
57
|
+
<!-- github-banner-end -->
|
|
58
|
+
|
|
59
|
+
</p>
|
|
60
|
+
<div align="center">
|
|
61
|
+
<!-- markdownlint-restore -->
|
|
62
|
+
|
|
63
|
+
# SQLSpec
|
|
64
|
+
|
|
65
|
+
SQL Experiments in Python
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
## Minimal SQL Abstractions for Python.
|
|
69
|
+
|
|
70
|
+
- Modern: Typed and Extensible
|
|
71
|
+
- Multi-database: SQLite, Postgres, DuckDB, MySQL, Oracle, SQL Server, Spanner, Big Query, and more...
|
|
72
|
+
- Easy ability to manipulate and add filters to queries
|
|
73
|
+
- Validate and Convert between dialects with `sqlglot`
|
|
74
|
+
- and more...
|
|
75
|
+
|
|
76
|
+
## Can it do `X`?
|
|
77
|
+
|
|
78
|
+
- Probably not currently; but, if it makes sense we can add enhancements.
|
|
79
|
+
|
|
80
|
+
## Inspiration
|
|
81
|
+
|
|
82
|
+
`aiosql` is the primary influence for this library. However, I wanted to be able to use the query interface from `aiosql` a bit more flexibly.
|
|
83
|
+
|
|
84
|
+
Why not add it to `aiosql`? Where it makes sense, many of these changes will likely get submitted to aiosql as a PR (`spanner` and `bigquery` drivers are likely the starting point.)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
sqlspec/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
|
|
2
|
+
sqlspec/__metadata__.py,sha256=Vw99abV_UQNVH2jB0IBa9-8emyZQcXm1J9eMtLxFX2Y,496
|
|
3
|
+
sqlspec/_serialization.py,sha256=OL4x0Rz5UjF7RgAKqhYChi5qSS_ImtaVrIlA4DhIKUE,824
|
|
4
|
+
sqlspec/_typing.py,sha256=Za41GlNdEKAGTy66GTofOiGYROE7ccFMvxZdh_vxIFk,3278
|
|
5
|
+
sqlspec/config.py,sha256=BOX_V_q2MOP33tK0ISpYaiQJt3zrvK4D_JIBD9FOixY,272
|
|
6
|
+
sqlspec/exceptions.py,sha256=fhCOILBj0J7HJP67BNSC0d9YUbW8QpZPXM55xJJzE8A,3039
|
|
7
|
+
sqlspec/filters.py,sha256=H5UAn1HKm598QqDujQHwvytKHjw4QoQ2zPpgXMcYpSU,3552
|
|
8
|
+
sqlspec/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
+
sqlspec/typing.py,sha256=t5fpzxUCWkKJIpFDKmUeq77RkdmNNFqMj4azgJpRS7k,11316
|
|
10
|
+
sqlspec/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
sqlspec/adapters/adbc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
sqlspec/adapters/adbc/config.py,sha256=BeiZdqoBQXuht0GXWleqVKL_LOFI3Ygna1XWDmb-dBw,1704
|
|
13
|
+
sqlspec/adapters/aiosqlite/__init__.py,sha256=PLqWg24l3TooJvqA0Xf1WErrxtqwo8DEoL_Zp2iSCzs,68
|
|
14
|
+
sqlspec/adapters/aiosqlite/config.py,sha256=5DOKaYT_lBFTimrITJwIqTHoweZ2aZlQ07kti1VJtxk,3821
|
|
15
|
+
sqlspec/adapters/asyncmy/__init__.py,sha256=o0R_Azae3FHiSZ1TQ5ZjyCneDOuvnEeMjmSkhuiKoWo,103
|
|
16
|
+
sqlspec/adapters/asyncmy/config.py,sha256=7VsNEokhBtBgXDj2gakdwplOYPaV3ADIwWX-o22vwNk,5461
|
|
17
|
+
sqlspec/adapters/asyncpg/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
sqlspec/adapters/asyncpg/config.py,sha256=NyVvsT2x3IIZBZF6dTaSv1w6lCPEvmUprYXyTSJ9Ixk,6014
|
|
19
|
+
sqlspec/adapters/duckdb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
sqlspec/adapters/duckdb/config.py,sha256=BgGGkk0Y3CQboQeI1aB81_ABVGmgOL1cbk_cZKzeggQ,7927
|
|
21
|
+
sqlspec/adapters/oracledb/__init__.py,sha256=fFQ2xOxFcgpr-ug4AVv430irnJgBRUINvt4sL3qzyBw,275
|
|
22
|
+
sqlspec/adapters/oracledb/config/__init__.py,sha256=XoHgInT4IbXjDg5ax3ncuUoVvnYB5qQjI-Ib7gwSycU,338
|
|
23
|
+
sqlspec/adapters/oracledb/config/_asyncio.py,sha256=Z87V7t-5blS6NkMhvFjoBgUrJtKgOrTyKSl8xy2uD_M,3211
|
|
24
|
+
sqlspec/adapters/oracledb/config/_common.py,sha256=k8ou3aWR1En1jl5uo4fORMG3CoF-XQ96qdwULtteHLo,6173
|
|
25
|
+
sqlspec/adapters/oracledb/config/_sync.py,sha256=guCrotTdNJajcbG5xwVCLoza7wLPQxZkQaw99s4ibNE,3071
|
|
26
|
+
sqlspec/adapters/psycopg/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
|
+
sqlspec/adapters/psycopg/config/__init__.py,sha256=pXI9Pa2VYESTchPgM3tt5kFF8tsmgq-ksZRGR6pgiUQ,280
|
|
28
|
+
sqlspec/adapters/psycopg/config/_async.py,sha256=sNQU3dzencGgOToHZuMfxoqhR9EvRyrxdWqtPDC2gUY,2712
|
|
29
|
+
sqlspec/adapters/psycopg/config/_common.py,sha256=gC-QQDy9DNjp0DGZAT3wu7MKW8ejHe5fuHU-318Vgr0,2757
|
|
30
|
+
sqlspec/adapters/psycopg/config/_sync.py,sha256=lwzaeO6I-nYuC7vif-bjn2jLugPwbPjTW226_hcweqo,2590
|
|
31
|
+
sqlspec/adapters/sqlite/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
|
+
sqlspec/adapters/sqlite/config.py,sha256=rCv6XWWWi1dzpmAVnwHxg-ahmAif78yfFhA548fNaT8,3697
|
|
33
|
+
sqlspec/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
|
+
sqlspec/extensions/litestar/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
35
|
+
sqlspec/extensions/litestar/plugin.py,sha256=oiBFfRffNvy_vnGptREd6JYZGB6Yd98KbtVct_VcW0A,837
|
|
36
|
+
sqlspec-0.4.0.dist-info/METADATA,sha256=OnTkY_OG3IeqaJQBkzHjsKuQwkfmhQO4WnhXKbEoDLc,3222
|
|
37
|
+
sqlspec-0.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
38
|
+
sqlspec-0.4.0.dist-info/licenses/NOTICE,sha256=Lyir8ozXWov7CyYS4huVaOCNrtgL17P-bNV-5daLntQ,1634
|
|
39
|
+
sqlspec-0.4.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# Early versions of this utility adapt code from `aoisql`.
|
|
2
|
+
# BSD 2-Clause License
|
|
3
|
+
Copyright (c) 2014-2017, Honza Pokorny
|
|
4
|
+
Copyright (c) 2018, William Vaughn
|
|
5
|
+
All rights reserved.
|
|
6
|
+
|
|
7
|
+
Redistribution and use in source and binary forms, with or without
|
|
8
|
+
modification, are permitted provided that the following conditions are met:
|
|
9
|
+
|
|
10
|
+
1. Redistributions of source code must retain the above copyright notice, this
|
|
11
|
+
list of conditions and the following disclaimer.
|
|
12
|
+
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
13
|
+
this list of conditions and the following disclaimer in the documentation
|
|
14
|
+
and/or other materials provided with the distribution.
|
|
15
|
+
|
|
16
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
17
|
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
18
|
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
19
|
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
20
|
+
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
21
|
+
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
22
|
+
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
23
|
+
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
24
|
+
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
25
|
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
26
|
+
|
|
27
|
+
The views and conclusions contained in the software and documentation are those
|
|
28
|
+
of the authors and should not be interpreted as representing official policies,
|
|
29
|
+
either expressed or implied, of the aiosql Project.
|
sqlspec/types/empty.py
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from enum import Enum
|
|
4
|
-
from typing import Final, Literal, Union
|
|
5
|
-
|
|
6
|
-
from msgspec import UnsetType # pyright: ignore[reportMissingImports]
|
|
7
|
-
|
|
8
|
-
__all__ = ("Empty", "EmptyType")
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class _EmptyEnum(Enum):
|
|
12
|
-
"""A sentinel enum used as placeholder."""
|
|
13
|
-
|
|
14
|
-
EMPTY = 0
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
EmptyType = Union[Literal[_EmptyEnum.EMPTY], UnsetType]
|
|
18
|
-
Empty: Final = _EmptyEnum.EMPTY
|