sqlspec 0.27.0__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/_typing.py +93 -0
- sqlspec/adapters/adbc/adk/store.py +21 -11
- sqlspec/adapters/adbc/data_dictionary.py +27 -5
- sqlspec/adapters/adbc/driver.py +83 -14
- sqlspec/adapters/aiosqlite/adk/store.py +27 -18
- sqlspec/adapters/asyncmy/adk/store.py +26 -16
- sqlspec/adapters/asyncpg/adk/store.py +26 -16
- sqlspec/adapters/asyncpg/data_dictionary.py +24 -17
- sqlspec/adapters/bigquery/adk/store.py +30 -21
- sqlspec/adapters/bigquery/config.py +11 -0
- sqlspec/adapters/bigquery/driver.py +138 -1
- sqlspec/adapters/duckdb/adk/store.py +21 -11
- sqlspec/adapters/duckdb/driver.py +87 -1
- sqlspec/adapters/oracledb/adk/store.py +89 -206
- sqlspec/adapters/oracledb/driver.py +183 -2
- sqlspec/adapters/oracledb/litestar/store.py +22 -24
- sqlspec/adapters/psqlpy/adk/store.py +28 -27
- sqlspec/adapters/psqlpy/data_dictionary.py +24 -17
- sqlspec/adapters/psqlpy/driver.py +7 -10
- sqlspec/adapters/psycopg/adk/store.py +51 -33
- sqlspec/adapters/psycopg/data_dictionary.py +48 -34
- sqlspec/adapters/sqlite/adk/store.py +29 -19
- sqlspec/config.py +100 -2
- sqlspec/core/filters.py +18 -10
- sqlspec/core/result.py +133 -2
- sqlspec/driver/_async.py +89 -0
- sqlspec/driver/_common.py +64 -29
- sqlspec/driver/_sync.py +95 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +2 -2
- sqlspec/extensions/adk/service.py +3 -3
- sqlspec/extensions/adk/store.py +8 -8
- sqlspec/extensions/aiosql/adapter.py +3 -15
- sqlspec/extensions/fastapi/__init__.py +21 -0
- sqlspec/extensions/fastapi/extension.py +331 -0
- sqlspec/extensions/fastapi/providers.py +543 -0
- sqlspec/extensions/flask/__init__.py +36 -0
- sqlspec/extensions/flask/_state.py +71 -0
- sqlspec/extensions/flask/_utils.py +40 -0
- sqlspec/extensions/flask/extension.py +389 -0
- sqlspec/extensions/litestar/config.py +3 -6
- sqlspec/extensions/litestar/plugin.py +26 -2
- sqlspec/extensions/starlette/__init__.py +10 -0
- sqlspec/extensions/starlette/_state.py +25 -0
- sqlspec/extensions/starlette/_utils.py +52 -0
- sqlspec/extensions/starlette/extension.py +254 -0
- sqlspec/extensions/starlette/middleware.py +154 -0
- sqlspec/protocols.py +40 -0
- sqlspec/storage/_utils.py +1 -14
- sqlspec/storage/backends/fsspec.py +3 -5
- sqlspec/storage/backends/local.py +1 -1
- sqlspec/storage/backends/obstore.py +10 -18
- sqlspec/typing.py +16 -0
- sqlspec/utils/__init__.py +25 -4
- sqlspec/utils/arrow_helpers.py +81 -0
- sqlspec/utils/module_loader.py +203 -3
- sqlspec/utils/portal.py +311 -0
- sqlspec/utils/serializers.py +110 -1
- sqlspec/utils/sync_tools.py +15 -5
- sqlspec/utils/type_guards.py +25 -0
- {sqlspec-0.27.0.dist-info → sqlspec-0.28.0.dist-info}/METADATA +2 -2
- {sqlspec-0.27.0.dist-info → sqlspec-0.28.0.dist-info}/RECORD +64 -50
- {sqlspec-0.27.0.dist-info → sqlspec-0.28.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.27.0.dist-info → sqlspec-0.28.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.27.0.dist-info → sqlspec-0.28.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""Arrow conversion helpers for dict-to-Arrow transformations.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for converting Python dictionaries to Apache Arrow
|
|
4
|
+
format, handling empty results, NULL values, and type inference.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from sqlspec.typing import ArrowRecordBatch, ArrowTable
|
|
11
|
+
|
|
12
|
+
__all__ = ("convert_dict_to_arrow",)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def convert_dict_to_arrow(
|
|
16
|
+
data: "list[dict[str, Any]]",
|
|
17
|
+
return_format: Literal["table", "reader", "batches"] = "table",
|
|
18
|
+
batch_size: int | None = None,
|
|
19
|
+
) -> "ArrowTable | ArrowRecordBatch":
|
|
20
|
+
"""Convert list of dictionaries to Arrow Table or RecordBatch.
|
|
21
|
+
|
|
22
|
+
Handles empty results, NULL values, and automatic type inference.
|
|
23
|
+
Used by adapters that don't have native Arrow support to convert
|
|
24
|
+
dict-based results to Arrow format.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
data: List of dictionaries (one per row).
|
|
28
|
+
return_format: Output format - "table" for Table, "batches" for RecordBatch.
|
|
29
|
+
"reader" is converted to "table" (streaming handled at driver level).
|
|
30
|
+
batch_size: Chunk size for batching (used when return_format="batches").
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
ArrowTable or ArrowRecordBatch depending on return_format.
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
Examples:
|
|
37
|
+
>>> data = [
|
|
38
|
+
... {"id": 1, "name": "Alice"},
|
|
39
|
+
... {"id": 2, "name": "Bob"},
|
|
40
|
+
... ]
|
|
41
|
+
>>> table = convert_dict_to_arrow(data, return_format="table")
|
|
42
|
+
>>> print(table.num_rows)
|
|
43
|
+
2
|
|
44
|
+
|
|
45
|
+
>>> batch = convert_dict_to_arrow(data, return_format="batch")
|
|
46
|
+
>>> print(batch.num_rows)
|
|
47
|
+
2
|
|
48
|
+
"""
|
|
49
|
+
from sqlspec.utils.module_loader import ensure_pyarrow
|
|
50
|
+
|
|
51
|
+
ensure_pyarrow()
|
|
52
|
+
|
|
53
|
+
import pyarrow as pa
|
|
54
|
+
|
|
55
|
+
# Handle empty results
|
|
56
|
+
if not data:
|
|
57
|
+
empty_schema = pa.schema([])
|
|
58
|
+
empty_table = pa.Table.from_pydict({}, schema=empty_schema)
|
|
59
|
+
|
|
60
|
+
if return_format == "batches":
|
|
61
|
+
# Create empty RecordBatch
|
|
62
|
+
batches = empty_table.to_batches()
|
|
63
|
+
return batches[0] if batches else pa.RecordBatch.from_pydict({})
|
|
64
|
+
|
|
65
|
+
return empty_table
|
|
66
|
+
|
|
67
|
+
# Convert list of dicts to columnar format
|
|
68
|
+
# This is more efficient than row-by-row conversion
|
|
69
|
+
columns: dict[str, list[Any]] = {key: [row.get(key) for row in data] for key in data[0]}
|
|
70
|
+
|
|
71
|
+
# Create Arrow Table (auto type inference)
|
|
72
|
+
arrow_table = pa.Table.from_pydict(columns)
|
|
73
|
+
|
|
74
|
+
# Return appropriate format
|
|
75
|
+
if return_format == "batches":
|
|
76
|
+
# Convert table to single RecordBatch
|
|
77
|
+
batches = arrow_table.to_batches()
|
|
78
|
+
return batches[0] if batches else pa.RecordBatch.from_pydict({})
|
|
79
|
+
|
|
80
|
+
# return_format == "table" or "reader" (reader handled at driver level)
|
|
81
|
+
return arrow_table
|
sqlspec/utils/module_loader.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
"""Module loading utilities for SQLSpec.
|
|
2
2
|
|
|
3
|
-
Provides functions for dynamic module imports
|
|
4
|
-
Used for loading modules from dotted paths
|
|
3
|
+
Provides functions for dynamic module imports, path resolution, and dependency
|
|
4
|
+
availability checking. Used for loading modules from dotted paths, converting
|
|
5
|
+
module paths to filesystem paths, and ensuring optional dependencies are installed.
|
|
5
6
|
"""
|
|
6
7
|
|
|
7
8
|
import importlib
|
|
@@ -9,7 +10,46 @@ from importlib.util import find_spec
|
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
from typing import Any
|
|
11
12
|
|
|
12
|
-
|
|
13
|
+
from sqlspec.exceptions import MissingDependencyError
|
|
14
|
+
from sqlspec.typing import (
|
|
15
|
+
AIOSQL_INSTALLED,
|
|
16
|
+
ATTRS_INSTALLED,
|
|
17
|
+
CATTRS_INSTALLED,
|
|
18
|
+
FSSPEC_INSTALLED,
|
|
19
|
+
LITESTAR_INSTALLED,
|
|
20
|
+
MSGSPEC_INSTALLED,
|
|
21
|
+
NUMPY_INSTALLED,
|
|
22
|
+
OBSTORE_INSTALLED,
|
|
23
|
+
OPENTELEMETRY_INSTALLED,
|
|
24
|
+
ORJSON_INSTALLED,
|
|
25
|
+
PANDAS_INSTALLED,
|
|
26
|
+
PGVECTOR_INSTALLED,
|
|
27
|
+
POLARS_INSTALLED,
|
|
28
|
+
PROMETHEUS_INSTALLED,
|
|
29
|
+
PYARROW_INSTALLED,
|
|
30
|
+
PYDANTIC_INSTALLED,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
__all__ = (
|
|
34
|
+
"ensure_aiosql",
|
|
35
|
+
"ensure_attrs",
|
|
36
|
+
"ensure_cattrs",
|
|
37
|
+
"ensure_fsspec",
|
|
38
|
+
"ensure_litestar",
|
|
39
|
+
"ensure_msgspec",
|
|
40
|
+
"ensure_numpy",
|
|
41
|
+
"ensure_obstore",
|
|
42
|
+
"ensure_opentelemetry",
|
|
43
|
+
"ensure_orjson",
|
|
44
|
+
"ensure_pandas",
|
|
45
|
+
"ensure_pgvector",
|
|
46
|
+
"ensure_polars",
|
|
47
|
+
"ensure_prometheus",
|
|
48
|
+
"ensure_pyarrow",
|
|
49
|
+
"ensure_pydantic",
|
|
50
|
+
"import_string",
|
|
51
|
+
"module_to_os_path",
|
|
52
|
+
)
|
|
13
53
|
|
|
14
54
|
|
|
15
55
|
def module_to_os_path(dotted_path: str = "app") -> "Path":
|
|
@@ -91,3 +131,163 @@ def import_string(dotted_path: str) -> "Any":
|
|
|
91
131
|
except Exception as e: # pylint: disable=broad-exception-caught
|
|
92
132
|
_raise_import_error(f"Could not import '{dotted_path}': {e}", e)
|
|
93
133
|
return obj
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def ensure_aiosql() -> None:
|
|
137
|
+
"""Ensure aiosql is available.
|
|
138
|
+
|
|
139
|
+
Raises:
|
|
140
|
+
MissingDependencyError: If aiosql is not installed.
|
|
141
|
+
"""
|
|
142
|
+
if not AIOSQL_INSTALLED:
|
|
143
|
+
raise MissingDependencyError(package="aiosql", install_package="aiosql")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def ensure_attrs() -> None:
|
|
147
|
+
"""Ensure attrs is available.
|
|
148
|
+
|
|
149
|
+
Raises:
|
|
150
|
+
MissingDependencyError: If attrs is not installed.
|
|
151
|
+
"""
|
|
152
|
+
if not ATTRS_INSTALLED:
|
|
153
|
+
raise MissingDependencyError(package="attrs", install_package="attrs")
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def ensure_cattrs() -> None:
|
|
157
|
+
"""Ensure cattrs is available.
|
|
158
|
+
|
|
159
|
+
Raises:
|
|
160
|
+
MissingDependencyError: If cattrs is not installed.
|
|
161
|
+
"""
|
|
162
|
+
if not CATTRS_INSTALLED:
|
|
163
|
+
raise MissingDependencyError(package="cattrs", install_package="cattrs")
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def ensure_fsspec() -> None:
|
|
167
|
+
"""Ensure fsspec is available for filesystem operations.
|
|
168
|
+
|
|
169
|
+
Raises:
|
|
170
|
+
MissingDependencyError: If fsspec is not installed.
|
|
171
|
+
"""
|
|
172
|
+
if not FSSPEC_INSTALLED:
|
|
173
|
+
raise MissingDependencyError(package="fsspec", install_package="fsspec")
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def ensure_litestar() -> None:
|
|
177
|
+
"""Ensure Litestar is available.
|
|
178
|
+
|
|
179
|
+
Raises:
|
|
180
|
+
MissingDependencyError: If litestar is not installed.
|
|
181
|
+
"""
|
|
182
|
+
if not LITESTAR_INSTALLED:
|
|
183
|
+
raise MissingDependencyError(package="litestar", install_package="litestar")
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def ensure_msgspec() -> None:
|
|
187
|
+
"""Ensure msgspec is available for serialization.
|
|
188
|
+
|
|
189
|
+
Raises:
|
|
190
|
+
MissingDependencyError: If msgspec is not installed.
|
|
191
|
+
"""
|
|
192
|
+
if not MSGSPEC_INSTALLED:
|
|
193
|
+
raise MissingDependencyError(package="msgspec", install_package="msgspec")
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def ensure_numpy() -> None:
|
|
197
|
+
"""Ensure NumPy is available for array operations.
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
MissingDependencyError: If numpy is not installed.
|
|
201
|
+
"""
|
|
202
|
+
if not NUMPY_INSTALLED:
|
|
203
|
+
raise MissingDependencyError(package="numpy", install_package="numpy")
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def ensure_obstore() -> None:
|
|
207
|
+
"""Ensure obstore is available for object storage operations.
|
|
208
|
+
|
|
209
|
+
Raises:
|
|
210
|
+
MissingDependencyError: If obstore is not installed.
|
|
211
|
+
"""
|
|
212
|
+
if not OBSTORE_INSTALLED:
|
|
213
|
+
raise MissingDependencyError(package="obstore", install_package="obstore")
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def ensure_opentelemetry() -> None:
|
|
217
|
+
"""Ensure OpenTelemetry is available for tracing.
|
|
218
|
+
|
|
219
|
+
Raises:
|
|
220
|
+
MissingDependencyError: If opentelemetry-api is not installed.
|
|
221
|
+
"""
|
|
222
|
+
if not OPENTELEMETRY_INSTALLED:
|
|
223
|
+
raise MissingDependencyError(package="opentelemetry-api", install_package="opentelemetry")
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def ensure_orjson() -> None:
|
|
227
|
+
"""Ensure orjson is available for fast JSON operations.
|
|
228
|
+
|
|
229
|
+
Raises:
|
|
230
|
+
MissingDependencyError: If orjson is not installed.
|
|
231
|
+
"""
|
|
232
|
+
if not ORJSON_INSTALLED:
|
|
233
|
+
raise MissingDependencyError(package="orjson", install_package="orjson")
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def ensure_pandas() -> None:
|
|
237
|
+
"""Ensure pandas is available for DataFrame operations.
|
|
238
|
+
|
|
239
|
+
Raises:
|
|
240
|
+
MissingDependencyError: If pandas is not installed.
|
|
241
|
+
"""
|
|
242
|
+
if not PANDAS_INSTALLED:
|
|
243
|
+
raise MissingDependencyError(package="pandas", install_package="pandas")
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def ensure_pgvector() -> None:
|
|
247
|
+
"""Ensure pgvector is available for vector operations.
|
|
248
|
+
|
|
249
|
+
Raises:
|
|
250
|
+
MissingDependencyError: If pgvector is not installed.
|
|
251
|
+
"""
|
|
252
|
+
if not PGVECTOR_INSTALLED:
|
|
253
|
+
raise MissingDependencyError(package="pgvector", install_package="pgvector")
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def ensure_polars() -> None:
|
|
257
|
+
"""Ensure Polars is available for DataFrame operations.
|
|
258
|
+
|
|
259
|
+
Raises:
|
|
260
|
+
MissingDependencyError: If polars is not installed.
|
|
261
|
+
"""
|
|
262
|
+
if not POLARS_INSTALLED:
|
|
263
|
+
raise MissingDependencyError(package="polars", install_package="polars")
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def ensure_prometheus() -> None:
|
|
267
|
+
"""Ensure Prometheus client is available for metrics.
|
|
268
|
+
|
|
269
|
+
Raises:
|
|
270
|
+
MissingDependencyError: If prometheus-client is not installed.
|
|
271
|
+
"""
|
|
272
|
+
if not PROMETHEUS_INSTALLED:
|
|
273
|
+
raise MissingDependencyError(package="prometheus-client", install_package="prometheus")
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def ensure_pyarrow() -> None:
|
|
277
|
+
"""Ensure PyArrow is available for Arrow operations.
|
|
278
|
+
|
|
279
|
+
Raises:
|
|
280
|
+
MissingDependencyError: If pyarrow is not installed.
|
|
281
|
+
"""
|
|
282
|
+
if not PYARROW_INSTALLED:
|
|
283
|
+
raise MissingDependencyError(package="pyarrow", install_package="pyarrow")
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def ensure_pydantic() -> None:
|
|
287
|
+
"""Ensure Pydantic is available for data validation.
|
|
288
|
+
|
|
289
|
+
Raises:
|
|
290
|
+
MissingDependencyError: If pydantic is not installed.
|
|
291
|
+
"""
|
|
292
|
+
if not PYDANTIC_INSTALLED:
|
|
293
|
+
raise MissingDependencyError(package="pydantic", install_package="pydantic")
|
sqlspec/utils/portal.py
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""Portal provider for calling async functions from synchronous contexts.
|
|
2
|
+
|
|
3
|
+
Provides a background thread with an event loop to execute async database operations
|
|
4
|
+
from sync frameworks like Flask. Based on the portal pattern from Advanced Alchemy.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import functools
|
|
9
|
+
import queue
|
|
10
|
+
import threading
|
|
11
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
12
|
+
|
|
13
|
+
from sqlspec.exceptions import ImproperConfigurationError
|
|
14
|
+
from sqlspec.utils.logging import get_logger
|
|
15
|
+
from sqlspec.utils.singleton import SingletonMeta
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from collections.abc import Callable, Coroutine
|
|
19
|
+
|
|
20
|
+
__all__ = ("Portal", "PortalManager", "PortalProvider", "get_global_portal")
|
|
21
|
+
|
|
22
|
+
logger = get_logger("utils.portal")
|
|
23
|
+
|
|
24
|
+
_R = TypeVar("_R")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class PortalProvider:
|
|
28
|
+
"""Manages a background thread with event loop for async operations.
|
|
29
|
+
|
|
30
|
+
Creates a daemon thread running an event loop to execute async functions
|
|
31
|
+
from synchronous contexts (Flask routes, etc.).
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(self) -> None:
|
|
35
|
+
"""Initialize the PortalProvider."""
|
|
36
|
+
self._request_queue: queue.Queue[
|
|
37
|
+
tuple[
|
|
38
|
+
Callable[..., Coroutine[Any, Any, Any]],
|
|
39
|
+
tuple[Any, ...],
|
|
40
|
+
dict[str, Any],
|
|
41
|
+
queue.Queue[tuple[Any | None, Exception | None]],
|
|
42
|
+
]
|
|
43
|
+
] = queue.Queue()
|
|
44
|
+
self._loop: asyncio.AbstractEventLoop | None = None
|
|
45
|
+
self._thread: threading.Thread | None = None
|
|
46
|
+
self._ready_event: threading.Event = threading.Event()
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def portal(self) -> "Portal":
|
|
50
|
+
"""The portal instance for calling async functions.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Portal instance.
|
|
54
|
+
"""
|
|
55
|
+
return Portal(self)
|
|
56
|
+
|
|
57
|
+
@property
|
|
58
|
+
def is_running(self) -> bool:
|
|
59
|
+
"""Check if portal provider is running.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
True if thread is alive, False otherwise.
|
|
63
|
+
"""
|
|
64
|
+
return self._thread is not None and self._thread.is_alive()
|
|
65
|
+
|
|
66
|
+
@property
|
|
67
|
+
def is_ready(self) -> bool:
|
|
68
|
+
"""Check if portal provider is ready.
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
True if ready event is set, False otherwise.
|
|
72
|
+
"""
|
|
73
|
+
return self._ready_event.is_set()
|
|
74
|
+
|
|
75
|
+
@property
|
|
76
|
+
def loop(self) -> "asyncio.AbstractEventLoop":
|
|
77
|
+
"""Get the event loop.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
The event loop.
|
|
81
|
+
|
|
82
|
+
Raises:
|
|
83
|
+
ImproperConfigurationError: If portal provider not started.
|
|
84
|
+
"""
|
|
85
|
+
if self._loop is None:
|
|
86
|
+
msg = "Portal provider not started. Call start() first."
|
|
87
|
+
raise ImproperConfigurationError(msg)
|
|
88
|
+
return self._loop
|
|
89
|
+
|
|
90
|
+
def start(self) -> None:
|
|
91
|
+
"""Start the background thread and event loop.
|
|
92
|
+
|
|
93
|
+
Creates a daemon thread running an event loop for async operations.
|
|
94
|
+
"""
|
|
95
|
+
if self._thread is not None:
|
|
96
|
+
logger.debug("Portal provider already started")
|
|
97
|
+
return
|
|
98
|
+
|
|
99
|
+
self._thread = threading.Thread(target=self._run_event_loop, daemon=True)
|
|
100
|
+
self._thread.start()
|
|
101
|
+
self._ready_event.wait()
|
|
102
|
+
logger.debug("Portal provider started")
|
|
103
|
+
|
|
104
|
+
def stop(self) -> None:
|
|
105
|
+
"""Stop the background thread and event loop.
|
|
106
|
+
|
|
107
|
+
Gracefully shuts down the event loop and waits for thread to finish.
|
|
108
|
+
"""
|
|
109
|
+
if self._loop is None or self._thread is None:
|
|
110
|
+
logger.debug("Portal provider not running")
|
|
111
|
+
return
|
|
112
|
+
|
|
113
|
+
self._loop.call_soon_threadsafe(self._loop.stop)
|
|
114
|
+
self._thread.join(timeout=5)
|
|
115
|
+
|
|
116
|
+
if self._thread.is_alive():
|
|
117
|
+
logger.warning("Portal thread did not stop within 5 seconds")
|
|
118
|
+
|
|
119
|
+
self._loop.close()
|
|
120
|
+
self._loop = None
|
|
121
|
+
self._thread = None
|
|
122
|
+
self._ready_event.clear()
|
|
123
|
+
logger.debug("Portal provider stopped")
|
|
124
|
+
|
|
125
|
+
def _run_event_loop(self) -> None:
|
|
126
|
+
"""Main function of the background thread.
|
|
127
|
+
|
|
128
|
+
Creates event loop and runs forever until stopped.
|
|
129
|
+
"""
|
|
130
|
+
if self._loop is None:
|
|
131
|
+
self._loop = asyncio.new_event_loop()
|
|
132
|
+
|
|
133
|
+
asyncio.set_event_loop(self._loop)
|
|
134
|
+
self._ready_event.set()
|
|
135
|
+
self._loop.run_forever()
|
|
136
|
+
|
|
137
|
+
@staticmethod
|
|
138
|
+
async def _async_caller(
|
|
139
|
+
func: "Callable[..., Coroutine[Any, Any, _R]]", args: "tuple[Any, ...]", kwargs: "dict[str, Any]"
|
|
140
|
+
) -> _R:
|
|
141
|
+
"""Wrapper to run async function.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
func: The async function to call.
|
|
145
|
+
args: Positional arguments.
|
|
146
|
+
kwargs: Keyword arguments.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Result of the async function.
|
|
150
|
+
"""
|
|
151
|
+
result: _R = await func(*args, **kwargs)
|
|
152
|
+
return result
|
|
153
|
+
|
|
154
|
+
def call(self, func: "Callable[..., Coroutine[Any, Any, _R]]", *args: Any, **kwargs: Any) -> _R:
|
|
155
|
+
"""Call an async function from synchronous context.
|
|
156
|
+
|
|
157
|
+
Executes the async function in the background event loop and blocks
|
|
158
|
+
until the result is available.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
func: The async function to call.
|
|
162
|
+
*args: Positional arguments to the function.
|
|
163
|
+
**kwargs: Keyword arguments to the function.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
Result of the async function.
|
|
167
|
+
|
|
168
|
+
Raises:
|
|
169
|
+
ImproperConfigurationError: If portal provider not started.
|
|
170
|
+
"""
|
|
171
|
+
if self._loop is None:
|
|
172
|
+
msg = "Portal provider not started. Call start() first."
|
|
173
|
+
raise ImproperConfigurationError(msg)
|
|
174
|
+
|
|
175
|
+
local_result_queue: queue.Queue[tuple[_R | None, Exception | None]] = queue.Queue()
|
|
176
|
+
|
|
177
|
+
self._request_queue.put((func, args, kwargs, local_result_queue))
|
|
178
|
+
|
|
179
|
+
self._loop.call_soon_threadsafe(self._process_request)
|
|
180
|
+
|
|
181
|
+
result, exception = local_result_queue.get()
|
|
182
|
+
|
|
183
|
+
if exception:
|
|
184
|
+
raise exception
|
|
185
|
+
return result # type: ignore[return-value]
|
|
186
|
+
|
|
187
|
+
def _process_request(self) -> None:
|
|
188
|
+
"""Process a request from the request queue in the event loop."""
|
|
189
|
+
if self._loop is None:
|
|
190
|
+
return
|
|
191
|
+
|
|
192
|
+
if not self._request_queue.empty():
|
|
193
|
+
func, args, kwargs, local_result_queue = self._request_queue.get()
|
|
194
|
+
future = asyncio.run_coroutine_threadsafe(self._async_caller(func, args, kwargs), self._loop)
|
|
195
|
+
|
|
196
|
+
future.add_done_callback(
|
|
197
|
+
functools.partial(self._handle_future_result, local_result_queue=local_result_queue) # pyright: ignore[reportArgumentType]
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
@staticmethod
|
|
201
|
+
def _handle_future_result(
|
|
202
|
+
future: "asyncio.Future[Any]", local_result_queue: "queue.Queue[tuple[Any | None, Exception | None]]"
|
|
203
|
+
) -> None:
|
|
204
|
+
"""Handle result or exception from completed future.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
future: The completed future.
|
|
208
|
+
local_result_queue: Queue to put result in.
|
|
209
|
+
"""
|
|
210
|
+
try:
|
|
211
|
+
result = future.result()
|
|
212
|
+
local_result_queue.put((result, None))
|
|
213
|
+
except Exception as exc:
|
|
214
|
+
local_result_queue.put((None, exc))
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
class Portal:
|
|
218
|
+
"""Portal for calling async functions using PortalProvider."""
|
|
219
|
+
|
|
220
|
+
def __init__(self, provider: "PortalProvider") -> None:
|
|
221
|
+
"""Initialize Portal with provider.
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
provider: The portal provider instance.
|
|
225
|
+
"""
|
|
226
|
+
self._provider = provider
|
|
227
|
+
|
|
228
|
+
def call(self, func: "Callable[..., Coroutine[Any, Any, _R]]", *args: Any, **kwargs: Any) -> _R:
|
|
229
|
+
"""Call an async function using the portal provider.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
func: The async function to call.
|
|
233
|
+
*args: Positional arguments to the function.
|
|
234
|
+
**kwargs: Keyword arguments to the function.
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
Result of the async function.
|
|
238
|
+
"""
|
|
239
|
+
return self._provider.call(func, *args, **kwargs)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class PortalManager(metaclass=SingletonMeta):
|
|
243
|
+
"""Singleton manager for global portal instance.
|
|
244
|
+
|
|
245
|
+
Provides a global portal for use by sync_tools and other utilities
|
|
246
|
+
that need to call async functions from synchronous contexts without
|
|
247
|
+
an existing event loop.
|
|
248
|
+
|
|
249
|
+
Example:
|
|
250
|
+
manager = PortalManager()
|
|
251
|
+
portal = manager.get_or_create_portal()
|
|
252
|
+
result = portal.call(some_async_function, arg1, arg2)
|
|
253
|
+
"""
|
|
254
|
+
|
|
255
|
+
def __init__(self) -> None:
|
|
256
|
+
"""Initialize the PortalManager singleton."""
|
|
257
|
+
self._provider: PortalProvider | None = None
|
|
258
|
+
self._portal: Portal | None = None
|
|
259
|
+
self._lock = threading.Lock()
|
|
260
|
+
|
|
261
|
+
def get_or_create_portal(self) -> Portal:
|
|
262
|
+
"""Get or create the global portal instance.
|
|
263
|
+
|
|
264
|
+
Lazily creates and starts the portal provider on first access.
|
|
265
|
+
Thread-safe via locking.
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
Global portal instance.
|
|
269
|
+
"""
|
|
270
|
+
if self._portal is None:
|
|
271
|
+
with self._lock:
|
|
272
|
+
if self._portal is None:
|
|
273
|
+
self._provider = PortalProvider()
|
|
274
|
+
self._provider.start()
|
|
275
|
+
self._portal = Portal(self._provider)
|
|
276
|
+
logger.debug("Global portal provider created and started")
|
|
277
|
+
|
|
278
|
+
return self._portal
|
|
279
|
+
|
|
280
|
+
@property
|
|
281
|
+
def is_running(self) -> bool:
|
|
282
|
+
"""Check if global portal is running.
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
True if portal provider exists and is running, False otherwise.
|
|
286
|
+
"""
|
|
287
|
+
return self._provider is not None and self._provider.is_running
|
|
288
|
+
|
|
289
|
+
def stop(self) -> None:
|
|
290
|
+
"""Stop the global portal provider.
|
|
291
|
+
|
|
292
|
+
Should typically only be called during application shutdown.
|
|
293
|
+
"""
|
|
294
|
+
if self._provider is not None:
|
|
295
|
+
self._provider.stop()
|
|
296
|
+
self._provider = None
|
|
297
|
+
self._portal = None
|
|
298
|
+
logger.debug("Global portal provider stopped")
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def get_global_portal() -> Portal:
|
|
302
|
+
"""Get the global portal instance for async-to-sync bridging.
|
|
303
|
+
|
|
304
|
+
Convenience function that creates and returns the singleton portal.
|
|
305
|
+
Used by sync_tools and other utilities.
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
Global portal instance.
|
|
309
|
+
"""
|
|
310
|
+
manager = PortalManager()
|
|
311
|
+
return manager.get_or_create_portal()
|