sqlspec 0.12.2__py3-none-any.whl → 0.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/_sql.py +21 -180
- sqlspec/adapters/adbc/config.py +10 -12
- sqlspec/adapters/adbc/driver.py +120 -118
- sqlspec/adapters/aiosqlite/config.py +3 -3
- sqlspec/adapters/aiosqlite/driver.py +100 -130
- sqlspec/adapters/asyncmy/config.py +3 -4
- sqlspec/adapters/asyncmy/driver.py +123 -135
- sqlspec/adapters/asyncpg/config.py +3 -7
- sqlspec/adapters/asyncpg/driver.py +98 -140
- sqlspec/adapters/bigquery/config.py +4 -5
- sqlspec/adapters/bigquery/driver.py +125 -167
- sqlspec/adapters/duckdb/config.py +3 -6
- sqlspec/adapters/duckdb/driver.py +114 -111
- sqlspec/adapters/oracledb/config.py +6 -5
- sqlspec/adapters/oracledb/driver.py +242 -259
- sqlspec/adapters/psqlpy/config.py +3 -7
- sqlspec/adapters/psqlpy/driver.py +118 -93
- sqlspec/adapters/psycopg/config.py +18 -31
- sqlspec/adapters/psycopg/driver.py +283 -236
- sqlspec/adapters/sqlite/config.py +3 -3
- sqlspec/adapters/sqlite/driver.py +103 -97
- sqlspec/config.py +0 -4
- sqlspec/driver/_async.py +89 -98
- sqlspec/driver/_common.py +52 -17
- sqlspec/driver/_sync.py +81 -105
- sqlspec/driver/connection.py +207 -0
- sqlspec/driver/mixins/_csv_writer.py +91 -0
- sqlspec/driver/mixins/_pipeline.py +38 -49
- sqlspec/driver/mixins/_result_utils.py +27 -9
- sqlspec/driver/mixins/_storage.py +67 -181
- sqlspec/driver/mixins/_type_coercion.py +3 -4
- sqlspec/driver/parameters.py +138 -0
- sqlspec/exceptions.py +10 -2
- sqlspec/extensions/aiosql/adapter.py +0 -10
- sqlspec/extensions/litestar/handlers.py +0 -1
- sqlspec/extensions/litestar/plugin.py +0 -3
- sqlspec/extensions/litestar/providers.py +0 -14
- sqlspec/loader.py +25 -90
- sqlspec/protocols.py +542 -0
- sqlspec/service/__init__.py +3 -2
- sqlspec/service/_util.py +147 -0
- sqlspec/service/base.py +1116 -9
- sqlspec/statement/builder/__init__.py +42 -32
- sqlspec/statement/builder/_ddl_utils.py +0 -10
- sqlspec/statement/builder/_parsing_utils.py +10 -4
- sqlspec/statement/builder/base.py +67 -22
- sqlspec/statement/builder/column.py +283 -0
- sqlspec/statement/builder/ddl.py +91 -67
- sqlspec/statement/builder/delete.py +23 -7
- sqlspec/statement/builder/insert.py +29 -15
- sqlspec/statement/builder/merge.py +4 -4
- sqlspec/statement/builder/mixins/_aggregate_functions.py +113 -14
- sqlspec/statement/builder/mixins/_common_table_expr.py +0 -1
- sqlspec/statement/builder/mixins/_delete_from.py +1 -1
- sqlspec/statement/builder/mixins/_from.py +10 -8
- sqlspec/statement/builder/mixins/_group_by.py +0 -1
- sqlspec/statement/builder/mixins/_insert_from_select.py +0 -1
- sqlspec/statement/builder/mixins/_insert_values.py +0 -2
- sqlspec/statement/builder/mixins/_join.py +20 -13
- sqlspec/statement/builder/mixins/_limit_offset.py +3 -3
- sqlspec/statement/builder/mixins/_merge_clauses.py +3 -4
- sqlspec/statement/builder/mixins/_order_by.py +2 -2
- sqlspec/statement/builder/mixins/_pivot.py +4 -7
- sqlspec/statement/builder/mixins/_select_columns.py +6 -5
- sqlspec/statement/builder/mixins/_unpivot.py +6 -9
- sqlspec/statement/builder/mixins/_update_from.py +2 -1
- sqlspec/statement/builder/mixins/_update_set.py +11 -8
- sqlspec/statement/builder/mixins/_where.py +61 -34
- sqlspec/statement/builder/select.py +32 -17
- sqlspec/statement/builder/update.py +25 -11
- sqlspec/statement/filters.py +39 -14
- sqlspec/statement/parameter_manager.py +220 -0
- sqlspec/statement/parameters.py +210 -79
- sqlspec/statement/pipelines/__init__.py +166 -23
- sqlspec/statement/pipelines/analyzers/_analyzer.py +21 -20
- sqlspec/statement/pipelines/context.py +35 -39
- sqlspec/statement/pipelines/transformers/__init__.py +2 -3
- sqlspec/statement/pipelines/transformers/_expression_simplifier.py +19 -187
- sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +628 -58
- sqlspec/statement/pipelines/transformers/_remove_comments_and_hints.py +76 -0
- sqlspec/statement/pipelines/validators/_dml_safety.py +33 -18
- sqlspec/statement/pipelines/validators/_parameter_style.py +87 -14
- sqlspec/statement/pipelines/validators/_performance.py +38 -23
- sqlspec/statement/pipelines/validators/_security.py +39 -62
- sqlspec/statement/result.py +37 -129
- sqlspec/statement/splitter.py +0 -12
- sqlspec/statement/sql.py +863 -391
- sqlspec/statement/sql_compiler.py +140 -0
- sqlspec/storage/__init__.py +10 -2
- sqlspec/storage/backends/fsspec.py +53 -8
- sqlspec/storage/backends/obstore.py +15 -19
- sqlspec/storage/capabilities.py +101 -0
- sqlspec/storage/registry.py +56 -83
- sqlspec/typing.py +6 -434
- sqlspec/utils/cached_property.py +25 -0
- sqlspec/utils/correlation.py +0 -2
- sqlspec/utils/logging.py +0 -6
- sqlspec/utils/sync_tools.py +0 -4
- sqlspec/utils/text.py +0 -5
- sqlspec/utils/type_guards.py +892 -0
- {sqlspec-0.12.2.dist-info → sqlspec-0.13.0.dist-info}/METADATA +1 -1
- sqlspec-0.13.0.dist-info/RECORD +150 -0
- sqlspec/statement/builder/protocols.py +0 -20
- sqlspec/statement/pipelines/base.py +0 -315
- sqlspec/statement/pipelines/result_types.py +0 -41
- sqlspec/statement/pipelines/transformers/_remove_comments.py +0 -66
- sqlspec/statement/pipelines/transformers/_remove_hints.py +0 -81
- sqlspec/statement/pipelines/validators/base.py +0 -67
- sqlspec/storage/protocol.py +0 -173
- sqlspec-0.12.2.dist-info/RECORD +0 -145
- {sqlspec-0.12.2.dist-info → sqlspec-0.13.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.12.2.dist-info → sqlspec-0.13.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.12.2.dist-info → sqlspec-0.13.0.dist-info}/licenses/NOTICE +0 -0
sqlspec/typing.py
CHANGED
|
@@ -1,11 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
from collections.abc import
|
|
3
|
-
from dataclasses import Field, fields
|
|
1
|
+
# pyright: ignore[reportAttributeAccessIssue]
|
|
2
|
+
from collections.abc import Mapping
|
|
4
3
|
from functools import lru_cache
|
|
5
|
-
from typing import TYPE_CHECKING, Annotated, Any,
|
|
4
|
+
from typing import TYPE_CHECKING, Annotated, Any, Union
|
|
6
5
|
|
|
7
|
-
from
|
|
8
|
-
from typing_extensions import TypeAlias, TypeGuard, TypeVar
|
|
6
|
+
from typing_extensions import TypeAlias, TypeVar
|
|
9
7
|
|
|
10
8
|
from sqlspec._typing import (
|
|
11
9
|
AIOSQL_INSTALLED,
|
|
@@ -47,7 +45,7 @@ from sqlspec._typing import (
|
|
|
47
45
|
)
|
|
48
46
|
|
|
49
47
|
if TYPE_CHECKING:
|
|
50
|
-
from collections.abc import
|
|
48
|
+
from collections.abc import Sequence
|
|
51
49
|
|
|
52
50
|
|
|
53
51
|
PYDANTIC_USE_FAILFAST = False # leave permanently disabled for now
|
|
@@ -132,20 +130,6 @@ Represents:
|
|
|
132
130
|
"""
|
|
133
131
|
|
|
134
132
|
|
|
135
|
-
def is_dataclass_instance(obj: Any) -> TypeGuard[DataclassProtocol]:
|
|
136
|
-
"""Check if an object is a dataclass instance.
|
|
137
|
-
|
|
138
|
-
Args:
|
|
139
|
-
obj: An object to check.
|
|
140
|
-
|
|
141
|
-
Returns:
|
|
142
|
-
True if the object is a dataclass instance.
|
|
143
|
-
"""
|
|
144
|
-
# Ensure obj is an instance and not the class itself,
|
|
145
|
-
# and that its type is a dataclass.
|
|
146
|
-
return not isinstance(obj, type) and hasattr(type(obj), "__dataclass_fields__")
|
|
147
|
-
|
|
148
|
-
|
|
149
133
|
@lru_cache(typed=True)
|
|
150
134
|
def get_type_adapter(f: "type[T]") -> "TypeAdapter[T]":
|
|
151
135
|
"""Caches and returns a pydantic type adapter.
|
|
@@ -161,394 +145,6 @@ def get_type_adapter(f: "type[T]") -> "TypeAdapter[T]":
|
|
|
161
145
|
return TypeAdapter(f)
|
|
162
146
|
|
|
163
147
|
|
|
164
|
-
def is_pydantic_model(obj: Any) -> "TypeGuard[BaseModel]":
|
|
165
|
-
"""Check if a value is a pydantic model.
|
|
166
|
-
|
|
167
|
-
Args:
|
|
168
|
-
obj: Value to check.
|
|
169
|
-
|
|
170
|
-
Returns:
|
|
171
|
-
bool
|
|
172
|
-
"""
|
|
173
|
-
return PYDANTIC_INSTALLED and isinstance(obj, BaseModel)
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
def is_pydantic_model_with_field(obj: "Any", field_name: str) -> "TypeGuard[BaseModel]":
|
|
177
|
-
"""Check if a pydantic model has a specific field.
|
|
178
|
-
|
|
179
|
-
Args:
|
|
180
|
-
obj: Value to check.
|
|
181
|
-
field_name: Field name to check for.
|
|
182
|
-
|
|
183
|
-
Returns:
|
|
184
|
-
bool
|
|
185
|
-
"""
|
|
186
|
-
return is_pydantic_model(obj) and hasattr(obj, field_name)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
def is_pydantic_model_without_field(obj: "Any", field_name: str) -> "TypeGuard[BaseModel]":
|
|
190
|
-
"""Check if a pydantic model does not have a specific field.
|
|
191
|
-
|
|
192
|
-
Args:
|
|
193
|
-
obj: Value to check.
|
|
194
|
-
field_name: Field name to check for.
|
|
195
|
-
|
|
196
|
-
Returns:
|
|
197
|
-
bool
|
|
198
|
-
"""
|
|
199
|
-
return is_pydantic_model(obj) and not hasattr(obj, field_name)
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
def is_msgspec_struct(obj: "Any") -> "TypeGuard[Struct]":
|
|
203
|
-
"""Check if a value is a msgspec struct.
|
|
204
|
-
|
|
205
|
-
Args:
|
|
206
|
-
obj: Value to check.
|
|
207
|
-
|
|
208
|
-
Returns:
|
|
209
|
-
bool
|
|
210
|
-
"""
|
|
211
|
-
return MSGSPEC_INSTALLED and isinstance(obj, Struct)
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
def is_msgspec_struct_with_field(obj: "Any", field_name: str) -> "TypeGuard[Struct]":
|
|
215
|
-
"""Check if a msgspec struct has a specific field.
|
|
216
|
-
|
|
217
|
-
Args:
|
|
218
|
-
obj: Value to check.
|
|
219
|
-
field_name: Field name to check for.
|
|
220
|
-
|
|
221
|
-
Returns:
|
|
222
|
-
bool
|
|
223
|
-
"""
|
|
224
|
-
return is_msgspec_struct(obj) and hasattr(obj, field_name)
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
def is_msgspec_struct_without_field(obj: "Any", field_name: str) -> "TypeGuard[Struct]":
|
|
228
|
-
"""Check if a msgspec struct does not have a specific field.
|
|
229
|
-
|
|
230
|
-
Args:
|
|
231
|
-
obj: Value to check.
|
|
232
|
-
field_name: Field name to check for.
|
|
233
|
-
|
|
234
|
-
Returns:
|
|
235
|
-
bool
|
|
236
|
-
"""
|
|
237
|
-
return is_msgspec_struct(obj) and not hasattr(obj, field_name)
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
def is_dict(obj: "Any") -> "TypeGuard[dict[str, Any]]":
|
|
241
|
-
"""Check if a value is a dictionary.
|
|
242
|
-
|
|
243
|
-
Args:
|
|
244
|
-
obj: Value to check.
|
|
245
|
-
|
|
246
|
-
Returns:
|
|
247
|
-
bool
|
|
248
|
-
"""
|
|
249
|
-
return isinstance(obj, dict)
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
def is_dict_with_field(obj: "Any", field_name: str) -> "TypeGuard[dict[str, Any]]":
|
|
253
|
-
"""Check if a dictionary has a specific field.
|
|
254
|
-
|
|
255
|
-
Args:
|
|
256
|
-
obj: Value to check.
|
|
257
|
-
field_name: Field name to check for.
|
|
258
|
-
|
|
259
|
-
Returns:
|
|
260
|
-
bool
|
|
261
|
-
"""
|
|
262
|
-
return is_dict(obj) and field_name in obj
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
def is_dict_without_field(obj: "Any", field_name: str) -> "TypeGuard[dict[str, Any]]":
|
|
266
|
-
"""Check if a dictionary does not have a specific field.
|
|
267
|
-
|
|
268
|
-
Args:
|
|
269
|
-
obj: Value to check.
|
|
270
|
-
field_name: Field name to check for.
|
|
271
|
-
|
|
272
|
-
Returns:
|
|
273
|
-
bool
|
|
274
|
-
"""
|
|
275
|
-
return is_dict(obj) and field_name not in obj
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
def is_schema(obj: "Any") -> "TypeGuard[SupportedSchemaModel]":
|
|
279
|
-
"""Check if a value is a msgspec Struct or Pydantic model.
|
|
280
|
-
|
|
281
|
-
Args:
|
|
282
|
-
obj: Value to check.
|
|
283
|
-
|
|
284
|
-
Returns:
|
|
285
|
-
bool
|
|
286
|
-
"""
|
|
287
|
-
return is_msgspec_struct(obj) or is_pydantic_model(obj)
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
def is_schema_or_dict(obj: "Any") -> "TypeGuard[Union[SupportedSchemaModel, dict[str, Any]]]":
|
|
291
|
-
"""Check if a value is a msgspec Struct, Pydantic model, or dict.
|
|
292
|
-
|
|
293
|
-
Args:
|
|
294
|
-
obj: Value to check.
|
|
295
|
-
|
|
296
|
-
Returns:
|
|
297
|
-
bool
|
|
298
|
-
"""
|
|
299
|
-
return is_schema(obj) or is_dict(obj)
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
def is_schema_with_field(obj: "Any", field_name: str) -> "TypeGuard[SupportedSchemaModel]":
|
|
303
|
-
"""Check if a value is a msgspec Struct or Pydantic model with a specific field.
|
|
304
|
-
|
|
305
|
-
Args:
|
|
306
|
-
obj: Value to check.
|
|
307
|
-
field_name: Field name to check for.
|
|
308
|
-
|
|
309
|
-
Returns:
|
|
310
|
-
bool
|
|
311
|
-
"""
|
|
312
|
-
return is_msgspec_struct_with_field(obj, field_name) or is_pydantic_model_with_field(obj, field_name)
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
def is_schema_without_field(obj: "Any", field_name: str) -> "TypeGuard[SupportedSchemaModel]":
|
|
316
|
-
"""Check if a value is a msgspec Struct or Pydantic model without a specific field.
|
|
317
|
-
|
|
318
|
-
Args:
|
|
319
|
-
obj: Value to check.
|
|
320
|
-
field_name: Field name to check for.
|
|
321
|
-
|
|
322
|
-
Returns:
|
|
323
|
-
bool
|
|
324
|
-
"""
|
|
325
|
-
return not is_schema_with_field(obj, field_name)
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
def is_schema_or_dict_with_field(
|
|
329
|
-
obj: "Any", field_name: str
|
|
330
|
-
) -> "TypeGuard[Union[SupportedSchemaModel, dict[str, Any]]]":
|
|
331
|
-
"""Check if a value is a msgspec Struct, Pydantic model, or dict with a specific field.
|
|
332
|
-
|
|
333
|
-
Args:
|
|
334
|
-
obj: Value to check.
|
|
335
|
-
field_name: Field name to check for.
|
|
336
|
-
|
|
337
|
-
Returns:
|
|
338
|
-
bool
|
|
339
|
-
"""
|
|
340
|
-
return is_schema_with_field(obj, field_name) or is_dict_with_field(obj, field_name)
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
def is_schema_or_dict_without_field(
|
|
344
|
-
obj: "Any", field_name: str
|
|
345
|
-
) -> "TypeGuard[Union[SupportedSchemaModel, dict[str, Any]]]":
|
|
346
|
-
"""Check if a value is a msgspec Struct, Pydantic model, or dict without a specific field.
|
|
347
|
-
|
|
348
|
-
Args:
|
|
349
|
-
obj: Value to check.
|
|
350
|
-
field_name: Field name to check for.
|
|
351
|
-
|
|
352
|
-
Returns:
|
|
353
|
-
bool
|
|
354
|
-
"""
|
|
355
|
-
return not is_schema_or_dict_with_field(obj, field_name)
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
def is_dataclass(obj: "Any") -> "TypeGuard[DataclassProtocol]":
|
|
359
|
-
"""Check if an object is a dataclass.
|
|
360
|
-
|
|
361
|
-
Args:
|
|
362
|
-
obj: Value to check.
|
|
363
|
-
|
|
364
|
-
Returns:
|
|
365
|
-
bool
|
|
366
|
-
"""
|
|
367
|
-
if isinstance(obj, type) and hasattr(obj, "__dataclass_fields__"):
|
|
368
|
-
return True
|
|
369
|
-
return is_dataclass_instance(obj)
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
def is_dataclass_with_field(
|
|
373
|
-
obj: "Any", field_name: str
|
|
374
|
-
) -> "TypeGuard[object]": # Can't specify dataclass type directly
|
|
375
|
-
"""Check if an object is a dataclass and has a specific field.
|
|
376
|
-
|
|
377
|
-
Args:
|
|
378
|
-
obj: Value to check.
|
|
379
|
-
field_name: Field name to check for.
|
|
380
|
-
|
|
381
|
-
Returns:
|
|
382
|
-
bool
|
|
383
|
-
"""
|
|
384
|
-
return is_dataclass(obj) and hasattr(obj, field_name)
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
def is_dataclass_without_field(obj: "Any", field_name: str) -> "TypeGuard[object]":
|
|
388
|
-
"""Check if an object is a dataclass and does not have a specific field.
|
|
389
|
-
|
|
390
|
-
Args:
|
|
391
|
-
obj: Value to check.
|
|
392
|
-
field_name: Field name to check for.
|
|
393
|
-
|
|
394
|
-
Returns:
|
|
395
|
-
bool
|
|
396
|
-
"""
|
|
397
|
-
return is_dataclass(obj) and not hasattr(obj, field_name)
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
def extract_dataclass_fields(
|
|
401
|
-
obj: "DataclassProtocol",
|
|
402
|
-
exclude_none: bool = False,
|
|
403
|
-
exclude_empty: bool = False,
|
|
404
|
-
include: "Optional[AbstractSet[str]]" = None,
|
|
405
|
-
exclude: "Optional[AbstractSet[str]]" = None,
|
|
406
|
-
) -> "tuple[Field[Any], ...]":
|
|
407
|
-
"""Extract dataclass fields.
|
|
408
|
-
|
|
409
|
-
Args:
|
|
410
|
-
obj: A dataclass instance.
|
|
411
|
-
exclude_none: Whether to exclude None values.
|
|
412
|
-
exclude_empty: Whether to exclude Empty values.
|
|
413
|
-
include: An iterable of fields to include.
|
|
414
|
-
exclude: An iterable of fields to exclude.
|
|
415
|
-
|
|
416
|
-
Raises:
|
|
417
|
-
ValueError: If there are fields that are both included and excluded.
|
|
418
|
-
|
|
419
|
-
Returns:
|
|
420
|
-
A tuple of dataclass fields.
|
|
421
|
-
"""
|
|
422
|
-
include = include or set()
|
|
423
|
-
exclude = exclude or set()
|
|
424
|
-
|
|
425
|
-
if common := (include & exclude):
|
|
426
|
-
msg = f"Fields {common} are both included and excluded."
|
|
427
|
-
raise ValueError(msg)
|
|
428
|
-
|
|
429
|
-
dataclass_fields: Iterable[Field[Any]] = fields(obj)
|
|
430
|
-
if exclude_none:
|
|
431
|
-
dataclass_fields = (field for field in dataclass_fields if getattr(obj, field.name) is not None)
|
|
432
|
-
if exclude_empty:
|
|
433
|
-
dataclass_fields = (field for field in dataclass_fields if getattr(obj, field.name) is not Empty)
|
|
434
|
-
if include:
|
|
435
|
-
dataclass_fields = (field for field in dataclass_fields if field.name in include)
|
|
436
|
-
if exclude:
|
|
437
|
-
dataclass_fields = (field for field in dataclass_fields if field.name not in exclude)
|
|
438
|
-
|
|
439
|
-
return tuple(dataclass_fields)
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
def extract_dataclass_items(
|
|
443
|
-
obj: "DataclassProtocol",
|
|
444
|
-
exclude_none: bool = False,
|
|
445
|
-
exclude_empty: bool = False,
|
|
446
|
-
include: "Optional[AbstractSet[str]]" = None,
|
|
447
|
-
exclude: "Optional[AbstractSet[str]]" = None,
|
|
448
|
-
) -> "tuple[tuple[str, Any], ...]":
|
|
449
|
-
"""Extract dataclass name, value pairs.
|
|
450
|
-
|
|
451
|
-
Unlike the 'asdict' method exports by the stdlib, this function does not pickle values.
|
|
452
|
-
|
|
453
|
-
Args:
|
|
454
|
-
obj: A dataclass instance.
|
|
455
|
-
exclude_none: Whether to exclude None values.
|
|
456
|
-
exclude_empty: Whether to exclude Empty values.
|
|
457
|
-
include: An iterable of fields to include.
|
|
458
|
-
exclude: An iterable of fields to exclude.
|
|
459
|
-
|
|
460
|
-
Returns:
|
|
461
|
-
A tuple of key/value pairs.
|
|
462
|
-
"""
|
|
463
|
-
dataclass_fields = extract_dataclass_fields(obj, exclude_none, exclude_empty, include, exclude)
|
|
464
|
-
return tuple((field.name, getattr(obj, field.name)) for field in dataclass_fields)
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
def dataclass_to_dict(
|
|
468
|
-
obj: "DataclassProtocol",
|
|
469
|
-
exclude_none: bool = False,
|
|
470
|
-
exclude_empty: bool = False,
|
|
471
|
-
convert_nested: bool = True,
|
|
472
|
-
exclude: "Optional[AbstractSet[str]]" = None,
|
|
473
|
-
) -> "dict[str, Any]":
|
|
474
|
-
"""Convert a dataclass to a dictionary.
|
|
475
|
-
|
|
476
|
-
This method has important differences to the standard library version:
|
|
477
|
-
- it does not deepcopy values
|
|
478
|
-
- it does not recurse into collections
|
|
479
|
-
|
|
480
|
-
Args:
|
|
481
|
-
obj: A dataclass instance.
|
|
482
|
-
exclude_none: Whether to exclude None values.
|
|
483
|
-
exclude_empty: Whether to exclude Empty values.
|
|
484
|
-
convert_nested: Whether to recursively convert nested dataclasses.
|
|
485
|
-
exclude: An iterable of fields to exclude.
|
|
486
|
-
|
|
487
|
-
Returns:
|
|
488
|
-
A dictionary of key/value pairs.
|
|
489
|
-
"""
|
|
490
|
-
ret = {}
|
|
491
|
-
for field in extract_dataclass_fields(obj, exclude_none, exclude_empty, exclude=exclude):
|
|
492
|
-
value = getattr(obj, field.name)
|
|
493
|
-
if is_dataclass_instance(value) and convert_nested:
|
|
494
|
-
ret[field.name] = dataclass_to_dict(value, exclude_none, exclude_empty)
|
|
495
|
-
else:
|
|
496
|
-
ret[field.name] = getattr(obj, field.name)
|
|
497
|
-
return cast("dict[str, Any]", ret)
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
def schema_dump(
|
|
501
|
-
data: "Union[dict[str, Any], DataclassProtocol, Struct, BaseModel]", exclude_unset: bool = True
|
|
502
|
-
) -> "dict[str, Any]":
|
|
503
|
-
"""Dump a data object to a dictionary.
|
|
504
|
-
|
|
505
|
-
Args:
|
|
506
|
-
data: :type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`
|
|
507
|
-
exclude_unset: :type:`bool` Whether to exclude unset values.
|
|
508
|
-
|
|
509
|
-
Returns:
|
|
510
|
-
:type:`dict[str, Any]`
|
|
511
|
-
"""
|
|
512
|
-
if is_dict(data):
|
|
513
|
-
return data
|
|
514
|
-
if is_dataclass(data):
|
|
515
|
-
return dataclass_to_dict(data, exclude_empty=exclude_unset)
|
|
516
|
-
if is_pydantic_model(data):
|
|
517
|
-
return data.model_dump(exclude_unset=exclude_unset)
|
|
518
|
-
if is_msgspec_struct(data):
|
|
519
|
-
if exclude_unset:
|
|
520
|
-
return {f: val for f in data.__struct_fields__ if (val := getattr(data, f, None)) != UNSET}
|
|
521
|
-
return {f: getattr(data, f, None) for f in data.__struct_fields__}
|
|
522
|
-
|
|
523
|
-
if hasattr(data, "__dict__"):
|
|
524
|
-
return data.__dict__
|
|
525
|
-
return cast("dict[str, Any]", data)
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
def is_dto_data(v: Any) -> TypeGuard[DTOData[Any]]:
|
|
529
|
-
"""Check if a value is a Litestar DTOData object.
|
|
530
|
-
|
|
531
|
-
Args:
|
|
532
|
-
v: Value to check.
|
|
533
|
-
|
|
534
|
-
Returns:
|
|
535
|
-
bool
|
|
536
|
-
"""
|
|
537
|
-
return LITESTAR_INSTALLED and isinstance(v, DTOData)
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
def is_expression(obj: "Any") -> "TypeGuard[exp.Expression]":
|
|
541
|
-
"""Check if a value is a sqlglot Expression.
|
|
542
|
-
|
|
543
|
-
Args:
|
|
544
|
-
obj: Value to check.
|
|
545
|
-
|
|
546
|
-
Returns:
|
|
547
|
-
bool
|
|
548
|
-
"""
|
|
549
|
-
return isinstance(obj, exp.Expression)
|
|
550
|
-
|
|
551
|
-
|
|
552
148
|
def MixinOf(base: type[T]) -> type[T]: # noqa: N802
|
|
553
149
|
"""Useful function to make mixins with baseclass type hint
|
|
554
150
|
|
|
@@ -585,6 +181,7 @@ __all__ = (
|
|
|
585
181
|
"BulkModelDict",
|
|
586
182
|
"ConnectionT",
|
|
587
183
|
"Counter",
|
|
184
|
+
"DTOData",
|
|
588
185
|
"DataclassProtocol",
|
|
589
186
|
"DictRow",
|
|
590
187
|
"Empty",
|
|
@@ -617,32 +214,7 @@ __all__ = (
|
|
|
617
214
|
"UnsetType",
|
|
618
215
|
"aiosql",
|
|
619
216
|
"convert",
|
|
620
|
-
"dataclass_to_dict",
|
|
621
|
-
"extract_dataclass_fields",
|
|
622
|
-
"extract_dataclass_items",
|
|
623
217
|
"get_type_adapter",
|
|
624
|
-
"is_dataclass",
|
|
625
|
-
"is_dataclass_instance",
|
|
626
|
-
"is_dataclass_with_field",
|
|
627
|
-
"is_dataclass_without_field",
|
|
628
|
-
"is_dict",
|
|
629
|
-
"is_dict_with_field",
|
|
630
|
-
"is_dict_without_field",
|
|
631
|
-
"is_dto_data",
|
|
632
|
-
"is_expression",
|
|
633
|
-
"is_msgspec_struct",
|
|
634
|
-
"is_msgspec_struct_with_field",
|
|
635
|
-
"is_msgspec_struct_without_field",
|
|
636
|
-
"is_pydantic_model",
|
|
637
|
-
"is_pydantic_model_with_field",
|
|
638
|
-
"is_pydantic_model_without_field",
|
|
639
|
-
"is_schema",
|
|
640
|
-
"is_schema_or_dict",
|
|
641
|
-
"is_schema_or_dict_with_field",
|
|
642
|
-
"is_schema_or_dict_without_field",
|
|
643
|
-
"is_schema_with_field",
|
|
644
|
-
"is_schema_without_field",
|
|
645
|
-
"schema_dump",
|
|
646
218
|
"trace",
|
|
647
219
|
)
|
|
648
220
|
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""A simple cached property decorator."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Callable, Generic, TypeVar
|
|
4
|
+
|
|
5
|
+
__all__ = ("CachedProperty",)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T")
|
|
9
|
+
Cls = TypeVar("Cls")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CachedProperty(Generic[T]):
|
|
13
|
+
"""A property that is only computed once per instance and then replaces
|
|
14
|
+
itself with an ordinary attribute. Deleting the attribute resets the
|
|
15
|
+
property.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self, func: Callable[[Any], T]) -> None:
|
|
19
|
+
self.func = func
|
|
20
|
+
|
|
21
|
+
def __get__(self, obj: Any, cls: type[Any]) -> Any:
|
|
22
|
+
if obj is None:
|
|
23
|
+
return self
|
|
24
|
+
value = obj.__dict__[self.func.__name__] = self.func(obj)
|
|
25
|
+
return value
|
sqlspec/utils/correlation.py
CHANGED
|
@@ -72,7 +72,6 @@ class CorrelationContext:
|
|
|
72
72
|
previous_id = cls.get()
|
|
73
73
|
|
|
74
74
|
try:
|
|
75
|
-
# Set the new correlation ID
|
|
76
75
|
cls.set(correlation_id)
|
|
77
76
|
yield correlation_id
|
|
78
77
|
finally:
|
|
@@ -145,7 +144,6 @@ def get_correlation_adapter(logger: Any) -> LoggerAdapter:
|
|
|
145
144
|
"""
|
|
146
145
|
extra = kwargs.get("extra", {})
|
|
147
146
|
|
|
148
|
-
# Add correlation ID if available
|
|
149
147
|
if correlation_id := CorrelationContext.get():
|
|
150
148
|
extra["correlation_id"] = correlation_id
|
|
151
149
|
|
sqlspec/utils/logging.py
CHANGED
|
@@ -63,15 +63,12 @@ class StructuredFormatter(logging.Formatter):
|
|
|
63
63
|
"line": record.lineno,
|
|
64
64
|
}
|
|
65
65
|
|
|
66
|
-
# Add correlation ID if available
|
|
67
66
|
if correlation_id := get_correlation_id():
|
|
68
67
|
log_entry["correlation_id"] = correlation_id
|
|
69
68
|
|
|
70
|
-
# Add any extra fields from the record
|
|
71
69
|
if hasattr(record, "extra_fields"):
|
|
72
70
|
log_entry.update(record.extra_fields) # pyright: ignore
|
|
73
71
|
|
|
74
|
-
# Add exception info if present
|
|
75
72
|
if record.exc_info:
|
|
76
73
|
log_entry["exception"] = self.formatException(record.exc_info)
|
|
77
74
|
|
|
@@ -107,13 +104,11 @@ def get_logger(name: str | None = None) -> logging.Logger:
|
|
|
107
104
|
if name is None:
|
|
108
105
|
return logging.getLogger("sqlspec")
|
|
109
106
|
|
|
110
|
-
# Ensure all loggers are under the sqlspec namespace
|
|
111
107
|
if not name.startswith("sqlspec"):
|
|
112
108
|
name = f"sqlspec.{name}"
|
|
113
109
|
|
|
114
110
|
logger = logging.getLogger(name)
|
|
115
111
|
|
|
116
|
-
# Add correlation ID filter if not already present
|
|
117
112
|
if not any(isinstance(f, CorrelationIDFilter) for f in logger.filters):
|
|
118
113
|
logger.addFilter(CorrelationIDFilter())
|
|
119
114
|
|
|
@@ -129,7 +124,6 @@ def log_with_context(logger: logging.Logger, level: int, message: str, **extra_f
|
|
|
129
124
|
message: Log message
|
|
130
125
|
**extra_fields: Additional fields to include in structured logs
|
|
131
126
|
"""
|
|
132
|
-
# Create a LogRecord with extra fields
|
|
133
127
|
record = logger.makeRecord(logger.name, level, "(unknown file)", 0, message, (), None)
|
|
134
128
|
record.extra_fields = extra_fields
|
|
135
129
|
logger.handle(record)
|
sqlspec/utils/sync_tools.py
CHANGED
|
@@ -79,7 +79,6 @@ def run_(async_function: "Callable[ParamSpecT, Coroutine[Any, Any, ReturnT]]") -
|
|
|
79
79
|
if loop is not None:
|
|
80
80
|
# Running in an existing event loop
|
|
81
81
|
return asyncio.run(partial_f())
|
|
82
|
-
# Create a new event loop and run the function
|
|
83
82
|
if uvloop and sys.platform != "win32":
|
|
84
83
|
uvloop.install() # pyright: ignore[reportUnknownMemberType]
|
|
85
84
|
return asyncio.run(partial_f())
|
|
@@ -107,7 +106,6 @@ def await_(
|
|
|
107
106
|
try:
|
|
108
107
|
loop = asyncio.get_running_loop()
|
|
109
108
|
except RuntimeError:
|
|
110
|
-
# No running event loop
|
|
111
109
|
if raise_sync_error:
|
|
112
110
|
msg = "Cannot run async function"
|
|
113
111
|
raise RuntimeError(msg) from None
|
|
@@ -116,7 +114,6 @@ def await_(
|
|
|
116
114
|
# Running in an existing event loop.
|
|
117
115
|
if loop.is_running():
|
|
118
116
|
try:
|
|
119
|
-
# Check if the current context is within a task managed by this loop
|
|
120
117
|
current_task = asyncio.current_task(loop=loop)
|
|
121
118
|
except RuntimeError:
|
|
122
119
|
# Not running inside a task managed by this loop
|
|
@@ -138,7 +135,6 @@ def await_(
|
|
|
138
135
|
if raise_sync_error:
|
|
139
136
|
msg = "Cannot run async function"
|
|
140
137
|
raise RuntimeError(msg)
|
|
141
|
-
# Fallback to running in a new loop
|
|
142
138
|
return asyncio.run(partial_f())
|
|
143
139
|
|
|
144
140
|
return wrapper
|
sqlspec/utils/text.py
CHANGED
|
@@ -14,7 +14,6 @@ _SLUGIFY_HYPHEN_COLLAPSE = re.compile(r"-+")
|
|
|
14
14
|
_SNAKE_CASE_LOWER_OR_DIGIT_TO_UPPER = re.compile(r"(?<=[a-z0-9])(?=[A-Z])", re.UNICODE)
|
|
15
15
|
# Insert underscore between uppercase letter and uppercase followed by lowercase
|
|
16
16
|
_SNAKE_CASE_UPPER_TO_UPPER_LOWER = re.compile(r"(?<=[A-Z])(?=[A-Z][a-z])", re.UNICODE)
|
|
17
|
-
# Replace hyphens, spaces, dots, and @ symbols with underscores for snake_case
|
|
18
17
|
_SNAKE_CASE_HYPHEN_SPACE = re.compile(r"[.\s@-]+", re.UNICODE)
|
|
19
18
|
# Collapse multiple underscores
|
|
20
19
|
_SNAKE_CASE_MULTIPLE_UNDERSCORES = re.compile(r"__+", re.UNICODE)
|
|
@@ -66,17 +65,13 @@ def slugify(value: str, allow_unicode: bool = False, separator: Optional[str] =
|
|
|
66
65
|
value = value.lower().strip()
|
|
67
66
|
sep = separator if separator is not None else "-"
|
|
68
67
|
if not sep:
|
|
69
|
-
# Remove all non-alphanumeric characters and return
|
|
70
68
|
return _SLUGIFY_REMOVE_NON_ALPHANUMERIC.sub("", value)
|
|
71
|
-
# Replace all runs of non-alphanumeric chars with the separator
|
|
72
69
|
value = _SLUGIFY_REMOVE_NON_ALPHANUMERIC.sub(sep, value)
|
|
73
|
-
# Remove leading/trailing separators and collapse multiple separators
|
|
74
70
|
# For dynamic separators, we need to use re.sub with escaped separator
|
|
75
71
|
if sep == "-":
|
|
76
72
|
# Use pre-compiled regex for common case
|
|
77
73
|
value = value.strip("-")
|
|
78
74
|
return _SLUGIFY_HYPHEN_COLLAPSE.sub("-", value)
|
|
79
|
-
# For other separators, use dynamic regex
|
|
80
75
|
value = re.sub(rf"^{re.escape(sep)}+|{re.escape(sep)}+$", "", value)
|
|
81
76
|
return re.sub(rf"{re.escape(sep)}+", sep, value)
|
|
82
77
|
|