sqlspec 0.11.0__py3-none-any.whl → 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +16 -3
- sqlspec/_serialization.py +3 -10
- sqlspec/_sql.py +1147 -0
- sqlspec/_typing.py +343 -41
- sqlspec/adapters/adbc/__init__.py +2 -6
- sqlspec/adapters/adbc/config.py +474 -149
- sqlspec/adapters/adbc/driver.py +330 -644
- sqlspec/adapters/aiosqlite/__init__.py +2 -6
- sqlspec/adapters/aiosqlite/config.py +143 -57
- sqlspec/adapters/aiosqlite/driver.py +269 -462
- sqlspec/adapters/asyncmy/__init__.py +3 -8
- sqlspec/adapters/asyncmy/config.py +247 -202
- sqlspec/adapters/asyncmy/driver.py +217 -451
- sqlspec/adapters/asyncpg/__init__.py +4 -7
- sqlspec/adapters/asyncpg/config.py +329 -176
- sqlspec/adapters/asyncpg/driver.py +418 -498
- sqlspec/adapters/bigquery/__init__.py +2 -2
- sqlspec/adapters/bigquery/config.py +407 -0
- sqlspec/adapters/bigquery/driver.py +592 -634
- sqlspec/adapters/duckdb/__init__.py +4 -1
- sqlspec/adapters/duckdb/config.py +432 -321
- sqlspec/adapters/duckdb/driver.py +393 -436
- sqlspec/adapters/oracledb/__init__.py +3 -8
- sqlspec/adapters/oracledb/config.py +625 -0
- sqlspec/adapters/oracledb/driver.py +549 -942
- sqlspec/adapters/psqlpy/__init__.py +4 -7
- sqlspec/adapters/psqlpy/config.py +372 -203
- sqlspec/adapters/psqlpy/driver.py +197 -550
- sqlspec/adapters/psycopg/__init__.py +3 -8
- sqlspec/adapters/psycopg/config.py +741 -0
- sqlspec/adapters/psycopg/driver.py +732 -733
- sqlspec/adapters/sqlite/__init__.py +2 -6
- sqlspec/adapters/sqlite/config.py +146 -81
- sqlspec/adapters/sqlite/driver.py +243 -426
- sqlspec/base.py +220 -825
- sqlspec/config.py +354 -0
- sqlspec/driver/__init__.py +22 -0
- sqlspec/driver/_async.py +252 -0
- sqlspec/driver/_common.py +338 -0
- sqlspec/driver/_sync.py +261 -0
- sqlspec/driver/mixins/__init__.py +17 -0
- sqlspec/driver/mixins/_pipeline.py +523 -0
- sqlspec/driver/mixins/_result_utils.py +122 -0
- sqlspec/driver/mixins/_sql_translator.py +35 -0
- sqlspec/driver/mixins/_storage.py +993 -0
- sqlspec/driver/mixins/_type_coercion.py +131 -0
- sqlspec/exceptions.py +299 -7
- sqlspec/extensions/aiosql/__init__.py +10 -0
- sqlspec/extensions/aiosql/adapter.py +474 -0
- sqlspec/extensions/litestar/__init__.py +1 -6
- sqlspec/extensions/litestar/_utils.py +1 -5
- sqlspec/extensions/litestar/config.py +5 -6
- sqlspec/extensions/litestar/handlers.py +13 -12
- sqlspec/extensions/litestar/plugin.py +22 -24
- sqlspec/extensions/litestar/providers.py +37 -55
- sqlspec/loader.py +528 -0
- sqlspec/service/__init__.py +3 -0
- sqlspec/service/base.py +24 -0
- sqlspec/service/pagination.py +26 -0
- sqlspec/statement/__init__.py +21 -0
- sqlspec/statement/builder/__init__.py +54 -0
- sqlspec/statement/builder/_ddl_utils.py +119 -0
- sqlspec/statement/builder/_parsing_utils.py +135 -0
- sqlspec/statement/builder/base.py +328 -0
- sqlspec/statement/builder/ddl.py +1379 -0
- sqlspec/statement/builder/delete.py +80 -0
- sqlspec/statement/builder/insert.py +274 -0
- sqlspec/statement/builder/merge.py +95 -0
- sqlspec/statement/builder/mixins/__init__.py +65 -0
- sqlspec/statement/builder/mixins/_aggregate_functions.py +151 -0
- sqlspec/statement/builder/mixins/_case_builder.py +91 -0
- sqlspec/statement/builder/mixins/_common_table_expr.py +91 -0
- sqlspec/statement/builder/mixins/_delete_from.py +34 -0
- sqlspec/statement/builder/mixins/_from.py +61 -0
- sqlspec/statement/builder/mixins/_group_by.py +119 -0
- sqlspec/statement/builder/mixins/_having.py +35 -0
- sqlspec/statement/builder/mixins/_insert_from_select.py +48 -0
- sqlspec/statement/builder/mixins/_insert_into.py +36 -0
- sqlspec/statement/builder/mixins/_insert_values.py +69 -0
- sqlspec/statement/builder/mixins/_join.py +110 -0
- sqlspec/statement/builder/mixins/_limit_offset.py +53 -0
- sqlspec/statement/builder/mixins/_merge_clauses.py +405 -0
- sqlspec/statement/builder/mixins/_order_by.py +46 -0
- sqlspec/statement/builder/mixins/_pivot.py +82 -0
- sqlspec/statement/builder/mixins/_returning.py +37 -0
- sqlspec/statement/builder/mixins/_select_columns.py +60 -0
- sqlspec/statement/builder/mixins/_set_ops.py +122 -0
- sqlspec/statement/builder/mixins/_unpivot.py +80 -0
- sqlspec/statement/builder/mixins/_update_from.py +54 -0
- sqlspec/statement/builder/mixins/_update_set.py +91 -0
- sqlspec/statement/builder/mixins/_update_table.py +29 -0
- sqlspec/statement/builder/mixins/_where.py +374 -0
- sqlspec/statement/builder/mixins/_window_functions.py +86 -0
- sqlspec/statement/builder/protocols.py +20 -0
- sqlspec/statement/builder/select.py +206 -0
- sqlspec/statement/builder/update.py +178 -0
- sqlspec/statement/filters.py +571 -0
- sqlspec/statement/parameters.py +736 -0
- sqlspec/statement/pipelines/__init__.py +67 -0
- sqlspec/statement/pipelines/analyzers/__init__.py +9 -0
- sqlspec/statement/pipelines/analyzers/_analyzer.py +649 -0
- sqlspec/statement/pipelines/base.py +315 -0
- sqlspec/statement/pipelines/context.py +119 -0
- sqlspec/statement/pipelines/result_types.py +41 -0
- sqlspec/statement/pipelines/transformers/__init__.py +8 -0
- sqlspec/statement/pipelines/transformers/_expression_simplifier.py +256 -0
- sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +623 -0
- sqlspec/statement/pipelines/transformers/_remove_comments.py +66 -0
- sqlspec/statement/pipelines/transformers/_remove_hints.py +81 -0
- sqlspec/statement/pipelines/validators/__init__.py +23 -0
- sqlspec/statement/pipelines/validators/_dml_safety.py +275 -0
- sqlspec/statement/pipelines/validators/_parameter_style.py +297 -0
- sqlspec/statement/pipelines/validators/_performance.py +703 -0
- sqlspec/statement/pipelines/validators/_security.py +990 -0
- sqlspec/statement/pipelines/validators/base.py +67 -0
- sqlspec/statement/result.py +527 -0
- sqlspec/statement/splitter.py +701 -0
- sqlspec/statement/sql.py +1198 -0
- sqlspec/storage/__init__.py +15 -0
- sqlspec/storage/backends/__init__.py +0 -0
- sqlspec/storage/backends/base.py +166 -0
- sqlspec/storage/backends/fsspec.py +315 -0
- sqlspec/storage/backends/obstore.py +464 -0
- sqlspec/storage/protocol.py +170 -0
- sqlspec/storage/registry.py +315 -0
- sqlspec/typing.py +157 -36
- sqlspec/utils/correlation.py +155 -0
- sqlspec/utils/deprecation.py +3 -6
- sqlspec/utils/fixtures.py +6 -11
- sqlspec/utils/logging.py +135 -0
- sqlspec/utils/module_loader.py +45 -43
- sqlspec/utils/serializers.py +4 -0
- sqlspec/utils/singleton.py +6 -8
- sqlspec/utils/sync_tools.py +15 -27
- sqlspec/utils/text.py +58 -26
- {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/METADATA +100 -26
- sqlspec-0.12.0.dist-info/RECORD +145 -0
- sqlspec/adapters/bigquery/config/__init__.py +0 -3
- sqlspec/adapters/bigquery/config/_common.py +0 -40
- sqlspec/adapters/bigquery/config/_sync.py +0 -87
- sqlspec/adapters/oracledb/config/__init__.py +0 -9
- sqlspec/adapters/oracledb/config/_asyncio.py +0 -186
- sqlspec/adapters/oracledb/config/_common.py +0 -131
- sqlspec/adapters/oracledb/config/_sync.py +0 -186
- sqlspec/adapters/psycopg/config/__init__.py +0 -19
- sqlspec/adapters/psycopg/config/_async.py +0 -169
- sqlspec/adapters/psycopg/config/_common.py +0 -56
- sqlspec/adapters/psycopg/config/_sync.py +0 -168
- sqlspec/filters.py +0 -330
- sqlspec/mixins.py +0 -306
- sqlspec/statement.py +0 -378
- sqlspec-0.11.0.dist-info/RECORD +0 -69
- {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"""Correlation ID tracking for distributed tracing.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for tracking correlation IDs across
|
|
4
|
+
database operations, enabling distributed tracing and debugging.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import uuid
|
|
10
|
+
from contextlib import contextmanager
|
|
11
|
+
from contextvars import ContextVar
|
|
12
|
+
from typing import TYPE_CHECKING, Any
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from collections.abc import Generator, MutableMapping
|
|
16
|
+
from logging import LoggerAdapter
|
|
17
|
+
|
|
18
|
+
__all__ = ("CorrelationContext", "correlation_context", "get_correlation_adapter")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class CorrelationContext:
|
|
22
|
+
"""Context manager for correlation ID tracking.
|
|
23
|
+
|
|
24
|
+
This class provides a context-aware way to track correlation IDs
|
|
25
|
+
across async and sync operations.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
_correlation_id: ContextVar[str | None] = ContextVar("sqlspec_correlation_id", default=None)
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def get(cls) -> str | None:
|
|
32
|
+
"""Get the current correlation ID.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
The current correlation ID or None if not set
|
|
36
|
+
"""
|
|
37
|
+
return cls._correlation_id.get()
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def set(cls, correlation_id: str | None) -> None:
|
|
41
|
+
"""Set the correlation ID.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
correlation_id: The correlation ID to set
|
|
45
|
+
"""
|
|
46
|
+
cls._correlation_id.set(correlation_id)
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def generate(cls) -> str:
|
|
50
|
+
"""Generate a new correlation ID.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
A new UUID-based correlation ID
|
|
54
|
+
"""
|
|
55
|
+
return str(uuid.uuid4())
|
|
56
|
+
|
|
57
|
+
@classmethod
|
|
58
|
+
@contextmanager
|
|
59
|
+
def context(cls, correlation_id: str | None = None) -> Generator[str, None, None]:
|
|
60
|
+
"""Context manager for correlation ID scope.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
correlation_id: The correlation ID to use. If None, generates a new one.
|
|
64
|
+
|
|
65
|
+
Yields:
|
|
66
|
+
The correlation ID being used
|
|
67
|
+
"""
|
|
68
|
+
if correlation_id is None:
|
|
69
|
+
correlation_id = cls.generate()
|
|
70
|
+
|
|
71
|
+
# Save the current correlation ID
|
|
72
|
+
previous_id = cls.get()
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
# Set the new correlation ID
|
|
76
|
+
cls.set(correlation_id)
|
|
77
|
+
yield correlation_id
|
|
78
|
+
finally:
|
|
79
|
+
# Restore the previous correlation ID
|
|
80
|
+
cls.set(previous_id)
|
|
81
|
+
|
|
82
|
+
@classmethod
|
|
83
|
+
def clear(cls) -> None:
|
|
84
|
+
"""Clear the current correlation ID."""
|
|
85
|
+
cls.set(None)
|
|
86
|
+
|
|
87
|
+
@classmethod
|
|
88
|
+
def to_dict(cls) -> dict[str, Any]:
|
|
89
|
+
"""Get correlation context as a dictionary.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
Dictionary with correlation_id key if set
|
|
93
|
+
"""
|
|
94
|
+
correlation_id = cls.get()
|
|
95
|
+
return {"correlation_id": correlation_id} if correlation_id else {}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@contextmanager
|
|
99
|
+
def correlation_context(correlation_id: str | None = None) -> Generator[str, None, None]:
|
|
100
|
+
"""Convenience context manager for correlation ID tracking.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
correlation_id: Optional correlation ID. If None, generates a new one.
|
|
104
|
+
|
|
105
|
+
Yields:
|
|
106
|
+
The active correlation ID
|
|
107
|
+
|
|
108
|
+
Example:
|
|
109
|
+
```python
|
|
110
|
+
with correlation_context() as correlation_id:
|
|
111
|
+
logger.info(
|
|
112
|
+
"Processing request",
|
|
113
|
+
extra={"correlation_id": correlation_id},
|
|
114
|
+
)
|
|
115
|
+
# All operations within this context will have the same correlation ID
|
|
116
|
+
```
|
|
117
|
+
"""
|
|
118
|
+
with CorrelationContext.context(correlation_id) as cid:
|
|
119
|
+
yield cid
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def get_correlation_adapter(logger: Any) -> LoggerAdapter:
|
|
123
|
+
"""Get a logger adapter that automatically includes correlation ID.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
logger: The base logger to wrap
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
LoggerAdapter that includes correlation ID in all logs
|
|
130
|
+
"""
|
|
131
|
+
from logging import LoggerAdapter
|
|
132
|
+
|
|
133
|
+
class CorrelationAdapter(LoggerAdapter):
|
|
134
|
+
"""Logger adapter that adds correlation ID to all logs."""
|
|
135
|
+
|
|
136
|
+
def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, dict[str, Any]]:
|
|
137
|
+
"""Add correlation ID to the log record.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
msg: The log message
|
|
141
|
+
kwargs: Keyword arguments for the log record
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
The message and updated kwargs
|
|
145
|
+
"""
|
|
146
|
+
extra = kwargs.get("extra", {})
|
|
147
|
+
|
|
148
|
+
# Add correlation ID if available
|
|
149
|
+
if correlation_id := CorrelationContext.get():
|
|
150
|
+
extra["correlation_id"] = correlation_id
|
|
151
|
+
|
|
152
|
+
kwargs["extra"] = extra
|
|
153
|
+
return msg, dict(kwargs)
|
|
154
|
+
|
|
155
|
+
return CorrelationAdapter(logger, {})
|
sqlspec/utils/deprecation.py
CHANGED
|
@@ -44,15 +44,12 @@ def warn_deprecation(
|
|
|
44
44
|
access_type = "Use of"
|
|
45
45
|
|
|
46
46
|
if pending:
|
|
47
|
-
parts.append(f"{access_type} {kind} awaiting deprecation {deprecated_name
|
|
47
|
+
parts.append(f"{access_type} {kind} awaiting deprecation '{deprecated_name}'") # pyright: ignore[reportUnknownMemberType]
|
|
48
48
|
else:
|
|
49
|
-
parts.append(f"{access_type} deprecated {kind} {deprecated_name
|
|
49
|
+
parts.append(f"{access_type} deprecated {kind} '{deprecated_name}'") # pyright: ignore[reportUnknownMemberType]
|
|
50
50
|
|
|
51
51
|
parts.extend( # pyright: ignore[reportUnknownMemberType]
|
|
52
|
-
(
|
|
53
|
-
f"Deprecated in SQLSpec {version}",
|
|
54
|
-
f"This {kind} will be removed in {removal_in or 'the next major version'}",
|
|
55
|
-
),
|
|
52
|
+
(f"Deprecated in SQLSpec {version}", f"This {kind} will be removed in {removal_in or 'the next major version'}")
|
|
56
53
|
)
|
|
57
54
|
if alternative:
|
|
58
55
|
parts.append(f"Use {alternative!r} instead") # pyright: ignore[reportUnknownMemberType]
|
sqlspec/utils/fixtures.py
CHANGED
|
@@ -1,21 +1,17 @@
|
|
|
1
|
-
from
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Any
|
|
2
3
|
|
|
3
4
|
from sqlspec._serialization import decode_json
|
|
4
5
|
from sqlspec.exceptions import MissingDependencyError
|
|
5
6
|
|
|
6
|
-
if TYPE_CHECKING:
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
|
|
9
|
-
from anyio import Path as AsyncPath
|
|
10
|
-
|
|
11
7
|
__all__ = ("open_fixture", "open_fixture_async")
|
|
12
8
|
|
|
13
9
|
|
|
14
|
-
def open_fixture(fixtures_path:
|
|
10
|
+
def open_fixture(fixtures_path: Any, fixture_name: str) -> Any:
|
|
15
11
|
"""Loads JSON file with the specified fixture name
|
|
16
12
|
|
|
17
13
|
Args:
|
|
18
|
-
fixtures_path:
|
|
14
|
+
fixtures_path: The path to look for fixtures (pathlib.Path or anyio.Path)
|
|
19
15
|
fixture_name (str): The fixture name to load.
|
|
20
16
|
|
|
21
17
|
Raises:
|
|
@@ -24,7 +20,6 @@ def open_fixture(fixtures_path: "Union[Path, AsyncPath]", fixture_name: str) ->
|
|
|
24
20
|
Returns:
|
|
25
21
|
Any: The parsed JSON data
|
|
26
22
|
"""
|
|
27
|
-
from pathlib import Path
|
|
28
23
|
|
|
29
24
|
fixture = Path(fixtures_path / f"{fixture_name}.json")
|
|
30
25
|
if fixture.exists():
|
|
@@ -35,11 +30,11 @@ def open_fixture(fixtures_path: "Union[Path, AsyncPath]", fixture_name: str) ->
|
|
|
35
30
|
raise FileNotFoundError(msg)
|
|
36
31
|
|
|
37
32
|
|
|
38
|
-
async def open_fixture_async(fixtures_path:
|
|
33
|
+
async def open_fixture_async(fixtures_path: Any, fixture_name: str) -> Any:
|
|
39
34
|
"""Loads JSON file with the specified fixture name
|
|
40
35
|
|
|
41
36
|
Args:
|
|
42
|
-
fixtures_path:
|
|
37
|
+
fixtures_path: The path to look for fixtures (pathlib.Path or anyio.Path)
|
|
43
38
|
fixture_name (str): The fixture name to load.
|
|
44
39
|
|
|
45
40
|
Raises:
|
sqlspec/utils/logging.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Logging utilities for SQLSpec.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for structured logging with correlation IDs.
|
|
4
|
+
Users should configure their own logging handlers and levels as needed.
|
|
5
|
+
SQLSpec provides StructuredFormatter for JSON-formatted logs if desired.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
from contextvars import ContextVar
|
|
12
|
+
from typing import TYPE_CHECKING, Any
|
|
13
|
+
|
|
14
|
+
from sqlspec._serialization import encode_json
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from logging import LogRecord
|
|
18
|
+
|
|
19
|
+
__all__ = ("StructuredFormatter", "correlation_id_var", "get_correlation_id", "get_logger", "set_correlation_id")
|
|
20
|
+
|
|
21
|
+
# Context variable for correlation ID tracking
|
|
22
|
+
correlation_id_var: ContextVar[str | None] = ContextVar("correlation_id", default=None)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def set_correlation_id(correlation_id: str | None) -> None:
|
|
26
|
+
"""Set the correlation ID for the current context.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
correlation_id: The correlation ID to set, or None to clear
|
|
30
|
+
"""
|
|
31
|
+
correlation_id_var.set(correlation_id)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_correlation_id() -> str | None:
|
|
35
|
+
"""Get the current correlation ID.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
The current correlation ID or None if not set
|
|
39
|
+
"""
|
|
40
|
+
return correlation_id_var.get()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class StructuredFormatter(logging.Formatter):
|
|
44
|
+
"""Structured JSON formatter with correlation ID support."""
|
|
45
|
+
|
|
46
|
+
def format(self, record: LogRecord) -> str:
|
|
47
|
+
"""Format log record as structured JSON.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
record: The log record to format
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
JSON formatted log entry
|
|
54
|
+
"""
|
|
55
|
+
# Base log entry
|
|
56
|
+
log_entry = {
|
|
57
|
+
"timestamp": self.formatTime(record, self.datefmt),
|
|
58
|
+
"level": record.levelname,
|
|
59
|
+
"logger": record.name,
|
|
60
|
+
"message": record.getMessage(),
|
|
61
|
+
"module": record.module,
|
|
62
|
+
"function": record.funcName,
|
|
63
|
+
"line": record.lineno,
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
# Add correlation ID if available
|
|
67
|
+
if correlation_id := get_correlation_id():
|
|
68
|
+
log_entry["correlation_id"] = correlation_id
|
|
69
|
+
|
|
70
|
+
# Add any extra fields from the record
|
|
71
|
+
if hasattr(record, "extra_fields"):
|
|
72
|
+
log_entry.update(record.extra_fields) # pyright: ignore
|
|
73
|
+
|
|
74
|
+
# Add exception info if present
|
|
75
|
+
if record.exc_info:
|
|
76
|
+
log_entry["exception"] = self.formatException(record.exc_info)
|
|
77
|
+
|
|
78
|
+
return encode_json(log_entry)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class CorrelationIDFilter(logging.Filter):
|
|
82
|
+
"""Filter that adds correlation ID to log records."""
|
|
83
|
+
|
|
84
|
+
def filter(self, record: LogRecord) -> bool:
|
|
85
|
+
"""Add correlation ID to record if available.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
record: The log record to filter
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
Always True to pass the record through
|
|
92
|
+
"""
|
|
93
|
+
if correlation_id := get_correlation_id():
|
|
94
|
+
record.correlation_id = correlation_id
|
|
95
|
+
return True
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def get_logger(name: str | None = None) -> logging.Logger:
|
|
99
|
+
"""Get a logger instance with standardized configuration.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
name: Logger name. If not provided, returns the root sqlspec logger.
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Configured logger instance
|
|
106
|
+
"""
|
|
107
|
+
if name is None:
|
|
108
|
+
return logging.getLogger("sqlspec")
|
|
109
|
+
|
|
110
|
+
# Ensure all loggers are under the sqlspec namespace
|
|
111
|
+
if not name.startswith("sqlspec"):
|
|
112
|
+
name = f"sqlspec.{name}"
|
|
113
|
+
|
|
114
|
+
logger = logging.getLogger(name)
|
|
115
|
+
|
|
116
|
+
# Add correlation ID filter if not already present
|
|
117
|
+
if not any(isinstance(f, CorrelationIDFilter) for f in logger.filters):
|
|
118
|
+
logger.addFilter(CorrelationIDFilter())
|
|
119
|
+
|
|
120
|
+
return logger
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def log_with_context(logger: logging.Logger, level: int, message: str, **extra_fields: Any) -> None:
|
|
124
|
+
"""Log a message with structured extra fields.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
logger: The logger to use
|
|
128
|
+
level: Log level
|
|
129
|
+
message: Log message
|
|
130
|
+
**extra_fields: Additional fields to include in structured logs
|
|
131
|
+
"""
|
|
132
|
+
# Create a LogRecord with extra fields
|
|
133
|
+
record = logger.makeRecord(logger.name, level, "(unknown file)", 0, message, (), None)
|
|
134
|
+
record.extra_fields = extra_fields
|
|
135
|
+
logger.handle(record)
|
sqlspec/utils/module_loader.py
CHANGED
|
@@ -1,18 +1,11 @@
|
|
|
1
1
|
"""General utility functions."""
|
|
2
2
|
|
|
3
|
-
import
|
|
4
|
-
from importlib import import_module
|
|
3
|
+
import importlib
|
|
5
4
|
from importlib.util import find_spec
|
|
6
5
|
from pathlib import Path
|
|
7
|
-
from typing import
|
|
6
|
+
from typing import Any, Optional
|
|
8
7
|
|
|
9
|
-
|
|
10
|
-
from types import ModuleType
|
|
11
|
-
|
|
12
|
-
__all__ = (
|
|
13
|
-
"import_string",
|
|
14
|
-
"module_to_os_path",
|
|
15
|
-
)
|
|
8
|
+
__all__ = ("import_string", "module_to_os_path")
|
|
16
9
|
|
|
17
10
|
|
|
18
11
|
def module_to_os_path(dotted_path: str = "app") -> "Path":
|
|
@@ -51,42 +44,51 @@ def import_string(dotted_path: str) -> "Any":
|
|
|
51
44
|
Args:
|
|
52
45
|
dotted_path: The path of the module to import.
|
|
53
46
|
|
|
54
|
-
Raises:
|
|
55
|
-
ImportError: Could not import the module.
|
|
56
|
-
|
|
57
47
|
Returns:
|
|
58
48
|
object: The imported object.
|
|
59
49
|
"""
|
|
60
50
|
|
|
61
|
-
def
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def _cached_import(module_path: str, class_name: str) -> Any:
|
|
67
|
-
"""Import and cache a class from a module.
|
|
68
|
-
|
|
69
|
-
Args:
|
|
70
|
-
module_path: dotted path to module.
|
|
71
|
-
class_name: Class or function name.
|
|
72
|
-
|
|
73
|
-
Returns:
|
|
74
|
-
object: The imported class or function
|
|
75
|
-
"""
|
|
76
|
-
# Check whether module is loaded and fully initialized.
|
|
77
|
-
module = sys.modules.get(module_path)
|
|
78
|
-
if not _is_loaded(module):
|
|
79
|
-
module = import_module(module_path)
|
|
80
|
-
return getattr(module, class_name)
|
|
81
|
-
|
|
82
|
-
try:
|
|
83
|
-
module_path, class_name = dotted_path.rsplit(".", 1)
|
|
84
|
-
except ValueError as e:
|
|
85
|
-
msg = "%s doesn't look like a module path"
|
|
86
|
-
raise ImportError(msg, dotted_path) from e
|
|
51
|
+
def _raise_import_error(msg: str, exc: "Optional[Exception]" = None) -> None:
|
|
52
|
+
if exc is not None:
|
|
53
|
+
raise ImportError(msg) from exc
|
|
54
|
+
raise ImportError(msg)
|
|
87
55
|
|
|
56
|
+
obj: Any = None
|
|
88
57
|
try:
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
58
|
+
parts = dotted_path.split(".")
|
|
59
|
+
module = None
|
|
60
|
+
i = len(parts) # Initialize to full length
|
|
61
|
+
|
|
62
|
+
for i in range(len(parts), 0, -1):
|
|
63
|
+
module_path = ".".join(parts[:i])
|
|
64
|
+
try:
|
|
65
|
+
module = importlib.import_module(module_path)
|
|
66
|
+
break
|
|
67
|
+
except ModuleNotFoundError:
|
|
68
|
+
continue
|
|
69
|
+
else:
|
|
70
|
+
_raise_import_error(f"{dotted_path} doesn't look like a module path")
|
|
71
|
+
|
|
72
|
+
if module is None:
|
|
73
|
+
_raise_import_error(f"Failed to import any module from {dotted_path}")
|
|
74
|
+
|
|
75
|
+
obj = module
|
|
76
|
+
attrs = parts[i:]
|
|
77
|
+
if not attrs and i == len(parts) and len(parts) > 1:
|
|
78
|
+
parent_module_path = ".".join(parts[:-1])
|
|
79
|
+
attr = parts[-1]
|
|
80
|
+
try:
|
|
81
|
+
parent_module = importlib.import_module(parent_module_path)
|
|
82
|
+
except Exception:
|
|
83
|
+
return obj
|
|
84
|
+
if not hasattr(parent_module, attr):
|
|
85
|
+
_raise_import_error(f"Module '{parent_module_path}' has no attribute '{attr}' in '{dotted_path}'")
|
|
86
|
+
for attr in attrs:
|
|
87
|
+
if not hasattr(obj, attr):
|
|
88
|
+
_raise_import_error(
|
|
89
|
+
f"Module '{module.__name__ if module is not None else 'unknown'}' has no attribute '{attr}' in '{dotted_path}'"
|
|
90
|
+
)
|
|
91
|
+
obj = getattr(obj, attr)
|
|
92
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
|
93
|
+
_raise_import_error(f"Could not import '{dotted_path}': {e}", e)
|
|
94
|
+
return obj
|
sqlspec/utils/singleton.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import threading
|
|
1
2
|
from typing import Any, TypeVar
|
|
2
3
|
|
|
3
4
|
__all__ = ("SingletonMeta",)
|
|
@@ -11,6 +12,7 @@ class SingletonMeta(type):
|
|
|
11
12
|
|
|
12
13
|
# We store instances keyed by the class type
|
|
13
14
|
_instances: dict[type, object] = {}
|
|
15
|
+
_lock = threading.Lock()
|
|
14
16
|
|
|
15
17
|
def __call__(cls: type[_T], *args: Any, **kwargs: Any) -> _T:
|
|
16
18
|
"""Call method for the singleton metaclass.
|
|
@@ -23,13 +25,9 @@ class SingletonMeta(type):
|
|
|
23
25
|
Returns:
|
|
24
26
|
The singleton instance of the class.
|
|
25
27
|
"""
|
|
26
|
-
# Use SingletonMeta._instances to access the class attribute
|
|
27
28
|
if cls not in SingletonMeta._instances: # pyright: ignore[reportUnnecessaryContains]
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
# Return the cached instance. We cast here because the dictionary stores `object`,
|
|
33
|
-
# but we know it's of type _T for the given cls key.
|
|
34
|
-
# Mypy might need an ignore here depending on configuration, but pyright should handle it.
|
|
29
|
+
with SingletonMeta._lock:
|
|
30
|
+
if cls not in SingletonMeta._instances:
|
|
31
|
+
instance = super().__call__(*args, **kwargs) # type: ignore[misc]
|
|
32
|
+
SingletonMeta._instances[cls] = instance
|
|
35
33
|
return SingletonMeta._instances[cls] # type: ignore[return-value]
|
sqlspec/utils/sync_tools.py
CHANGED
|
@@ -3,15 +3,7 @@ import functools
|
|
|
3
3
|
import inspect
|
|
4
4
|
import sys
|
|
5
5
|
from contextlib import AbstractAsyncContextManager, AbstractContextManager
|
|
6
|
-
from typing import
|
|
7
|
-
TYPE_CHECKING,
|
|
8
|
-
Any,
|
|
9
|
-
Generic,
|
|
10
|
-
Optional,
|
|
11
|
-
TypeVar,
|
|
12
|
-
Union,
|
|
13
|
-
cast,
|
|
14
|
-
)
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, Union, cast
|
|
15
7
|
|
|
16
8
|
from typing_extensions import ParamSpec
|
|
17
9
|
|
|
@@ -44,7 +36,7 @@ class CapacityLimiter:
|
|
|
44
36
|
|
|
45
37
|
@property
|
|
46
38
|
def total_tokens(self) -> int:
|
|
47
|
-
return self._semaphore._value
|
|
39
|
+
return self._semaphore._value
|
|
48
40
|
|
|
49
41
|
@total_tokens.setter
|
|
50
42
|
def total_tokens(self, value: int) -> None:
|
|
@@ -55,9 +47,9 @@ class CapacityLimiter:
|
|
|
55
47
|
|
|
56
48
|
async def __aexit__(
|
|
57
49
|
self,
|
|
58
|
-
exc_type: "Optional[type[BaseException]]",
|
|
59
|
-
exc_val: "Optional[BaseException]",
|
|
60
|
-
exc_tb: "Optional[TracebackType]",
|
|
50
|
+
exc_type: "Optional[type[BaseException]]",
|
|
51
|
+
exc_val: "Optional[BaseException]",
|
|
52
|
+
exc_tb: "Optional[TracebackType]",
|
|
61
53
|
) -> None:
|
|
62
54
|
self.release()
|
|
63
55
|
|
|
@@ -96,8 +88,7 @@ def run_(async_function: "Callable[ParamSpecT, Coroutine[Any, Any, ReturnT]]") -
|
|
|
96
88
|
|
|
97
89
|
|
|
98
90
|
def await_(
|
|
99
|
-
async_function: "Callable[ParamSpecT, Coroutine[Any, Any, ReturnT]]",
|
|
100
|
-
raise_sync_error: bool = True,
|
|
91
|
+
async_function: "Callable[ParamSpecT, Coroutine[Any, Any, ReturnT]]", raise_sync_error: bool = True
|
|
101
92
|
) -> "Callable[ParamSpecT, ReturnT]":
|
|
102
93
|
"""Convert an async function to a blocking one, running in the main async loop.
|
|
103
94
|
|
|
@@ -118,7 +109,7 @@ def await_(
|
|
|
118
109
|
except RuntimeError:
|
|
119
110
|
# No running event loop
|
|
120
111
|
if raise_sync_error:
|
|
121
|
-
msg = "
|
|
112
|
+
msg = "Cannot run async function"
|
|
122
113
|
raise RuntimeError(msg) from None
|
|
123
114
|
return asyncio.run(partial_f())
|
|
124
115
|
else:
|
|
@@ -145,7 +136,7 @@ def await_(
|
|
|
145
136
|
# but the loop isn't running, but handle defensively.
|
|
146
137
|
# loop is not running
|
|
147
138
|
if raise_sync_error:
|
|
148
|
-
msg = "
|
|
139
|
+
msg = "Cannot run async function"
|
|
149
140
|
raise RuntimeError(msg)
|
|
150
141
|
# Fallback to running in a new loop
|
|
151
142
|
return asyncio.run(partial_f())
|
|
@@ -154,9 +145,7 @@ def await_(
|
|
|
154
145
|
|
|
155
146
|
|
|
156
147
|
def async_(
|
|
157
|
-
function: "Callable[ParamSpecT, ReturnT]",
|
|
158
|
-
*,
|
|
159
|
-
limiter: "Optional[CapacityLimiter]" = None,
|
|
148
|
+
function: "Callable[ParamSpecT, ReturnT]", *, limiter: "Optional[CapacityLimiter]" = None
|
|
160
149
|
) -> "Callable[ParamSpecT, Awaitable[ReturnT]]":
|
|
161
150
|
"""Convert a blocking function to an async one using asyncio.to_thread().
|
|
162
151
|
|
|
@@ -169,10 +158,8 @@ def async_(
|
|
|
169
158
|
Callable: An async function that runs the original function in a thread.
|
|
170
159
|
"""
|
|
171
160
|
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
**kwargs: "ParamSpecT.kwargs",
|
|
175
|
-
) -> "ReturnT":
|
|
161
|
+
@functools.wraps(function)
|
|
162
|
+
async def wrapper(*args: "ParamSpecT.args", **kwargs: "ParamSpecT.kwargs") -> "ReturnT":
|
|
176
163
|
partial_f = functools.partial(function, *args, **kwargs)
|
|
177
164
|
used_limiter = limiter or _default_limiter
|
|
178
165
|
async with used_limiter:
|
|
@@ -195,6 +182,7 @@ def ensure_async_(
|
|
|
195
182
|
if inspect.iscoroutinefunction(function):
|
|
196
183
|
return function
|
|
197
184
|
|
|
185
|
+
@functools.wraps(function)
|
|
198
186
|
async def wrapper(*args: "ParamSpecT.args", **kwargs: "ParamSpecT.kwargs") -> "ReturnT":
|
|
199
187
|
result = function(*args, **kwargs)
|
|
200
188
|
if inspect.isawaitable(result):
|
|
@@ -213,9 +201,9 @@ class _ContextManagerWrapper(Generic[T]):
|
|
|
213
201
|
|
|
214
202
|
async def __aexit__(
|
|
215
203
|
self,
|
|
216
|
-
exc_type: "Optional[type[BaseException]]",
|
|
217
|
-
exc_val: "Optional[BaseException]",
|
|
218
|
-
exc_tb: "Optional[TracebackType]",
|
|
204
|
+
exc_type: "Optional[type[BaseException]]",
|
|
205
|
+
exc_val: "Optional[BaseException]",
|
|
206
|
+
exc_tb: "Optional[TracebackType]",
|
|
219
207
|
) -> "Optional[bool]":
|
|
220
208
|
return self._cm.__exit__(exc_type, exc_val, exc_tb)
|
|
221
209
|
|