sqlspec 0.17.1__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (77) hide show
  1. sqlspec/__init__.py +1 -1
  2. sqlspec/_sql.py +54 -159
  3. sqlspec/adapters/adbc/config.py +24 -30
  4. sqlspec/adapters/adbc/driver.py +42 -61
  5. sqlspec/adapters/aiosqlite/config.py +5 -10
  6. sqlspec/adapters/aiosqlite/driver.py +9 -25
  7. sqlspec/adapters/aiosqlite/pool.py +43 -35
  8. sqlspec/adapters/asyncmy/config.py +10 -7
  9. sqlspec/adapters/asyncmy/driver.py +18 -39
  10. sqlspec/adapters/asyncpg/config.py +4 -0
  11. sqlspec/adapters/asyncpg/driver.py +32 -79
  12. sqlspec/adapters/bigquery/config.py +12 -65
  13. sqlspec/adapters/bigquery/driver.py +39 -133
  14. sqlspec/adapters/duckdb/config.py +11 -15
  15. sqlspec/adapters/duckdb/driver.py +61 -85
  16. sqlspec/adapters/duckdb/pool.py +2 -5
  17. sqlspec/adapters/oracledb/_types.py +8 -1
  18. sqlspec/adapters/oracledb/config.py +55 -38
  19. sqlspec/adapters/oracledb/driver.py +35 -92
  20. sqlspec/adapters/oracledb/migrations.py +257 -0
  21. sqlspec/adapters/psqlpy/config.py +13 -9
  22. sqlspec/adapters/psqlpy/driver.py +28 -103
  23. sqlspec/adapters/psycopg/config.py +9 -5
  24. sqlspec/adapters/psycopg/driver.py +107 -175
  25. sqlspec/adapters/sqlite/config.py +7 -5
  26. sqlspec/adapters/sqlite/driver.py +37 -73
  27. sqlspec/adapters/sqlite/pool.py +3 -12
  28. sqlspec/base.py +19 -22
  29. sqlspec/builder/__init__.py +1 -1
  30. sqlspec/builder/_base.py +34 -20
  31. sqlspec/builder/_ddl.py +407 -183
  32. sqlspec/builder/_insert.py +1 -1
  33. sqlspec/builder/mixins/_insert_operations.py +26 -6
  34. sqlspec/builder/mixins/_merge_operations.py +1 -1
  35. sqlspec/builder/mixins/_select_operations.py +1 -5
  36. sqlspec/cli.py +281 -33
  37. sqlspec/config.py +183 -14
  38. sqlspec/core/__init__.py +89 -14
  39. sqlspec/core/cache.py +57 -104
  40. sqlspec/core/compiler.py +57 -112
  41. sqlspec/core/filters.py +1 -21
  42. sqlspec/core/hashing.py +13 -47
  43. sqlspec/core/parameters.py +272 -261
  44. sqlspec/core/result.py +12 -27
  45. sqlspec/core/splitter.py +17 -21
  46. sqlspec/core/statement.py +150 -159
  47. sqlspec/driver/_async.py +2 -15
  48. sqlspec/driver/_common.py +16 -95
  49. sqlspec/driver/_sync.py +2 -15
  50. sqlspec/driver/mixins/_result_tools.py +8 -29
  51. sqlspec/driver/mixins/_sql_translator.py +6 -8
  52. sqlspec/exceptions.py +1 -2
  53. sqlspec/extensions/litestar/plugin.py +15 -8
  54. sqlspec/loader.py +43 -115
  55. sqlspec/migrations/__init__.py +1 -1
  56. sqlspec/migrations/base.py +34 -45
  57. sqlspec/migrations/commands.py +34 -15
  58. sqlspec/migrations/loaders.py +1 -1
  59. sqlspec/migrations/runner.py +104 -19
  60. sqlspec/migrations/tracker.py +49 -2
  61. sqlspec/protocols.py +3 -6
  62. sqlspec/storage/__init__.py +4 -4
  63. sqlspec/storage/backends/fsspec.py +5 -6
  64. sqlspec/storage/backends/obstore.py +7 -8
  65. sqlspec/storage/registry.py +3 -3
  66. sqlspec/utils/__init__.py +2 -2
  67. sqlspec/utils/logging.py +6 -10
  68. sqlspec/utils/sync_tools.py +27 -4
  69. sqlspec/utils/text.py +6 -1
  70. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/METADATA +1 -1
  71. sqlspec-0.19.0.dist-info/RECORD +138 -0
  72. sqlspec/builder/_ddl_utils.py +0 -103
  73. sqlspec-0.17.1.dist-info/RECORD +0 -138
  74. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/WHEEL +0 -0
  75. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/entry_points.txt +0 -0
  76. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/licenses/LICENSE +0 -0
  77. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/licenses/NOTICE +0 -0
@@ -5,13 +5,13 @@ This module handles migration file loading and execution using SQLFileLoader.
5
5
 
6
6
  import time
7
7
  from pathlib import Path
8
- from typing import TYPE_CHECKING, Any, Optional
8
+ from typing import TYPE_CHECKING, Any, Optional, cast
9
9
 
10
10
  from sqlspec.core.statement import SQL
11
11
  from sqlspec.migrations.base import BaseMigrationRunner
12
12
  from sqlspec.migrations.loaders import get_migration_loader
13
13
  from sqlspec.utils.logging import get_logger
14
- from sqlspec.utils.sync_tools import run_
14
+ from sqlspec.utils.sync_tools import await_
15
15
 
16
16
  if TYPE_CHECKING:
17
17
  from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase
@@ -22,7 +22,7 @@ logger = get_logger("migrations.runner")
22
22
 
23
23
 
24
24
  class SyncMigrationRunner(BaseMigrationRunner["SyncDriverAdapterBase"]):
25
- """Executes migrations using SQLFileLoader."""
25
+ """Synchronous migration executor."""
26
26
 
27
27
  def get_migration_files(self) -> "list[tuple[str, Path]]":
28
28
  """Get all migration files sorted by version.
@@ -55,12 +55,15 @@ class SyncMigrationRunner(BaseMigrationRunner["SyncDriverAdapterBase"]):
55
55
  Returns:
56
56
  Tuple of (sql_content, execution_time_ms).
57
57
  """
58
- upgrade_sql = self._get_migration_sql(migration, "up")
59
- if upgrade_sql is None:
58
+ upgrade_sql_list = self._get_migration_sql(migration, "up")
59
+ if upgrade_sql_list is None:
60
60
  return None, 0
61
61
 
62
62
  start_time = time.time()
63
- driver.execute(upgrade_sql)
63
+
64
+ for sql_statement in upgrade_sql_list:
65
+ if sql_statement.strip():
66
+ driver.execute_script(sql_statement)
64
67
  execution_time = int((time.time() - start_time) * 1000)
65
68
  return None, execution_time
66
69
 
@@ -76,12 +79,15 @@ class SyncMigrationRunner(BaseMigrationRunner["SyncDriverAdapterBase"]):
76
79
  Returns:
77
80
  Tuple of (sql_content, execution_time_ms).
78
81
  """
79
- downgrade_sql = self._get_migration_sql(migration, "down")
80
- if downgrade_sql is None:
82
+ downgrade_sql_list = self._get_migration_sql(migration, "down")
83
+ if downgrade_sql_list is None:
81
84
  return None, 0
82
85
 
83
86
  start_time = time.time()
84
- driver.execute(downgrade_sql)
87
+
88
+ for sql_statement in downgrade_sql_list:
89
+ if sql_statement.strip():
90
+ driver.execute_script(sql_statement)
85
91
  execution_time = int((time.time() - start_time) * 1000)
86
92
  return None, execution_time
87
93
 
@@ -103,8 +109,8 @@ class SyncMigrationRunner(BaseMigrationRunner["SyncDriverAdapterBase"]):
103
109
  loader = get_migration_loader(file_path, self.migrations_path, self.project_root)
104
110
 
105
111
  try:
106
- up_sql = run_(loader.get_up_sql)(file_path)
107
- down_sql = run_(loader.get_down_sql)(file_path)
112
+ up_sql = await_(loader.get_up_sql, raise_sync_error=False)(file_path)
113
+ down_sql = await_(loader.get_down_sql, raise_sync_error=False)(file_path)
108
114
 
109
115
  if up_sql:
110
116
  all_queries[f"migrate-{version}-up"] = SQL(up_sql[0])
@@ -118,7 +124,7 @@ class SyncMigrationRunner(BaseMigrationRunner["SyncDriverAdapterBase"]):
118
124
 
119
125
 
120
126
  class AsyncMigrationRunner(BaseMigrationRunner["AsyncDriverAdapterBase"]):
121
- """Executes migrations using SQLFileLoader."""
127
+ """Asynchronous migration executor."""
122
128
 
123
129
  async def get_migration_files(self) -> "list[tuple[str, Path]]":
124
130
  """Get all migration files sorted by version.
@@ -137,7 +143,80 @@ class AsyncMigrationRunner(BaseMigrationRunner["AsyncDriverAdapterBase"]):
137
143
  Returns:
138
144
  Dictionary containing migration metadata.
139
145
  """
140
- return self._load_migration_metadata(file_path)
146
+ return await self._load_migration_metadata_async(file_path)
147
+
148
+ async def _load_migration_metadata_async(self, file_path: Path) -> "dict[str, Any]":
149
+ """Load migration metadata from file (async version).
150
+
151
+ Args:
152
+ file_path: Path to the migration file.
153
+
154
+ Returns:
155
+ Migration metadata dictionary.
156
+ """
157
+ loader = get_migration_loader(file_path, self.migrations_path, self.project_root)
158
+ loader.validate_migration_file(file_path)
159
+ content = file_path.read_text(encoding="utf-8")
160
+ checksum = self._calculate_checksum(content)
161
+ version = self._extract_version(file_path.name)
162
+ description = file_path.stem.split("_", 1)[1] if "_" in file_path.stem else ""
163
+
164
+ has_upgrade, has_downgrade = True, False
165
+
166
+ if file_path.suffix == ".sql":
167
+ up_query, down_query = f"migrate-{version}-up", f"migrate-{version}-down"
168
+ self.loader.clear_cache()
169
+ self.loader.load_sql(file_path)
170
+ has_upgrade, has_downgrade = self.loader.has_query(up_query), self.loader.has_query(down_query)
171
+ else:
172
+ try:
173
+ has_downgrade = bool(await loader.get_down_sql(file_path))
174
+ except Exception:
175
+ has_downgrade = False
176
+
177
+ return {
178
+ "version": version,
179
+ "description": description,
180
+ "file_path": file_path,
181
+ "checksum": checksum,
182
+ "has_upgrade": has_upgrade,
183
+ "has_downgrade": has_downgrade,
184
+ "loader": loader,
185
+ }
186
+
187
+ async def _get_migration_sql_async(self, migration: "dict[str, Any]", direction: str) -> "Optional[list[str]]":
188
+ """Get migration SQL for given direction (async version).
189
+
190
+ Args:
191
+ migration: Migration metadata.
192
+ direction: Either 'up' or 'down'.
193
+
194
+ Returns:
195
+ SQL statements for the migration.
196
+ """
197
+ if not migration.get(f"has_{direction}grade"):
198
+ if direction == "down":
199
+ logger.warning("Migration %s has no downgrade query", migration["version"])
200
+ return None
201
+ msg = f"Migration {migration['version']} has no upgrade query"
202
+ raise ValueError(msg)
203
+
204
+ file_path, loader = migration["file_path"], migration["loader"]
205
+
206
+ try:
207
+ method = loader.get_up_sql if direction == "up" else loader.get_down_sql
208
+ sql_statements = await method(file_path)
209
+
210
+ except Exception as e:
211
+ if direction == "down":
212
+ logger.warning("Failed to load downgrade for migration %s: %s", migration["version"], e)
213
+ return None
214
+ msg = f"Failed to load upgrade for migration {migration['version']}: {e}"
215
+ raise ValueError(msg) from e
216
+ else:
217
+ if sql_statements:
218
+ return cast("list[str]", sql_statements)
219
+ return None
141
220
 
142
221
  async def execute_upgrade(
143
222
  self, driver: "AsyncDriverAdapterBase", migration: "dict[str, Any]"
@@ -151,12 +230,15 @@ class AsyncMigrationRunner(BaseMigrationRunner["AsyncDriverAdapterBase"]):
151
230
  Returns:
152
231
  Tuple of (sql_content, execution_time_ms).
153
232
  """
154
- upgrade_sql = self._get_migration_sql(migration, "up")
155
- if upgrade_sql is None:
233
+ upgrade_sql_list = await self._get_migration_sql_async(migration, "up")
234
+ if upgrade_sql_list is None:
156
235
  return None, 0
157
236
 
158
237
  start_time = time.time()
159
- await driver.execute(upgrade_sql)
238
+
239
+ for sql_statement in upgrade_sql_list:
240
+ if sql_statement.strip():
241
+ await driver.execute_script(sql_statement)
160
242
  execution_time = int((time.time() - start_time) * 1000)
161
243
  return None, execution_time
162
244
 
@@ -172,12 +254,15 @@ class AsyncMigrationRunner(BaseMigrationRunner["AsyncDriverAdapterBase"]):
172
254
  Returns:
173
255
  Tuple of (sql_content, execution_time_ms).
174
256
  """
175
- downgrade_sql = self._get_migration_sql(migration, "down")
176
- if downgrade_sql is None:
257
+ downgrade_sql_list = await self._get_migration_sql_async(migration, "down")
258
+ if downgrade_sql_list is None:
177
259
  return None, 0
178
260
 
179
261
  start_time = time.time()
180
- await driver.execute(downgrade_sql)
262
+
263
+ for sql_statement in downgrade_sql_list:
264
+ if sql_statement.strip():
265
+ await driver.execute_script(sql_statement)
181
266
  execution_time = int((time.time() - start_time) * 1000)
182
267
  return None, execution_time
183
268
 
@@ -7,15 +7,18 @@ import os
7
7
  from typing import TYPE_CHECKING, Any, Optional
8
8
 
9
9
  from sqlspec.migrations.base import BaseMigrationTracker
10
+ from sqlspec.utils.logging import get_logger
10
11
 
11
12
  if TYPE_CHECKING:
12
13
  from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase
13
14
 
14
15
  __all__ = ("AsyncMigrationTracker", "SyncMigrationTracker")
15
16
 
17
+ logger = get_logger("migrations.tracker")
18
+
16
19
 
17
20
  class SyncMigrationTracker(BaseMigrationTracker["SyncDriverAdapterBase"]):
18
- """Tracks applied migrations in the database."""
21
+ """Synchronous migration version tracker."""
19
22
 
20
23
  def ensure_tracking_table(self, driver: "SyncDriverAdapterBase") -> None:
21
24
  """Create the migration tracking table if it doesn't exist.
@@ -24,6 +27,7 @@ class SyncMigrationTracker(BaseMigrationTracker["SyncDriverAdapterBase"]):
24
27
  driver: The database driver to use.
25
28
  """
26
29
  driver.execute(self._get_create_table_sql())
30
+ self._safe_commit(driver)
27
31
 
28
32
  def get_current_version(self, driver: "SyncDriverAdapterBase") -> Optional[str]:
29
33
  """Get the latest applied migration version.
@@ -66,6 +70,7 @@ class SyncMigrationTracker(BaseMigrationTracker["SyncDriverAdapterBase"]):
66
70
  version, description, execution_time_ms, checksum, os.environ.get("USER", "unknown")
67
71
  )
68
72
  )
73
+ self._safe_commit(driver)
69
74
 
70
75
  def remove_migration(self, driver: "SyncDriverAdapterBase", version: str) -> None:
71
76
  """Remove a migration record (used during downgrade).
@@ -75,10 +80,30 @@ class SyncMigrationTracker(BaseMigrationTracker["SyncDriverAdapterBase"]):
75
80
  version: Version number to remove.
76
81
  """
77
82
  driver.execute(self._get_remove_migration_sql(version))
83
+ self._safe_commit(driver)
84
+
85
+ def _safe_commit(self, driver: "SyncDriverAdapterBase") -> None:
86
+ """Safely commit a transaction only if autocommit is disabled.
87
+
88
+ Args:
89
+ driver: The database driver to use.
90
+ """
91
+ try:
92
+ connection = getattr(driver, "connection", None)
93
+ if connection and hasattr(connection, "autocommit") and getattr(connection, "autocommit", False):
94
+ return
95
+
96
+ driver_features = getattr(driver, "driver_features", {})
97
+ if driver_features and driver_features.get("autocommit", False):
98
+ return
99
+
100
+ driver.commit()
101
+ except Exception:
102
+ logger.debug("Failed to commit transaction, likely due to autocommit being enabled")
78
103
 
79
104
 
80
105
  class AsyncMigrationTracker(BaseMigrationTracker["AsyncDriverAdapterBase"]):
81
- """Tracks applied migrations in the database."""
106
+ """Asynchronous migration version tracker."""
82
107
 
83
108
  async def ensure_tracking_table(self, driver: "AsyncDriverAdapterBase") -> None:
84
109
  """Create the migration tracking table if it doesn't exist.
@@ -87,6 +112,7 @@ class AsyncMigrationTracker(BaseMigrationTracker["AsyncDriverAdapterBase"]):
87
112
  driver: The database driver to use.
88
113
  """
89
114
  await driver.execute(self._get_create_table_sql())
115
+ await self._safe_commit_async(driver)
90
116
 
91
117
  async def get_current_version(self, driver: "AsyncDriverAdapterBase") -> Optional[str]:
92
118
  """Get the latest applied migration version.
@@ -129,6 +155,7 @@ class AsyncMigrationTracker(BaseMigrationTracker["AsyncDriverAdapterBase"]):
129
155
  version, description, execution_time_ms, checksum, os.environ.get("USER", "unknown")
130
156
  )
131
157
  )
158
+ await self._safe_commit_async(driver)
132
159
 
133
160
  async def remove_migration(self, driver: "AsyncDriverAdapterBase", version: str) -> None:
134
161
  """Remove a migration record (used during downgrade).
@@ -138,3 +165,23 @@ class AsyncMigrationTracker(BaseMigrationTracker["AsyncDriverAdapterBase"]):
138
165
  version: Version number to remove.
139
166
  """
140
167
  await driver.execute(self._get_remove_migration_sql(version))
168
+ await self._safe_commit_async(driver)
169
+
170
+ async def _safe_commit_async(self, driver: "AsyncDriverAdapterBase") -> None:
171
+ """Safely commit a transaction only if autocommit is disabled.
172
+
173
+ Args:
174
+ driver: The database driver to use.
175
+ """
176
+ try:
177
+ connection = getattr(driver, "connection", None)
178
+ if connection and hasattr(connection, "autocommit") and getattr(connection, "autocommit", False):
179
+ return
180
+
181
+ driver_features = getattr(driver, "driver_features", {})
182
+ if driver_features and driver_features.get("autocommit", False):
183
+ return
184
+
185
+ await driver.commit()
186
+ except Exception:
187
+ logger.debug("Failed to commit transaction, likely due to autocommit being enabled")
sqlspec/protocols.py CHANGED
@@ -1,7 +1,7 @@
1
- """Runtime-checkable protocols for SQLSpec to replace duck typing.
1
+ """Runtime-checkable protocols for type safety and runtime checks.
2
2
 
3
3
  This module provides protocols that can be used for static type checking
4
- and runtime isinstance() checks, replacing defensive hasattr() patterns.
4
+ and runtime isinstance() checks.
5
5
  """
6
6
 
7
7
  from typing import TYPE_CHECKING, Any, ClassVar, Optional, Protocol, Union, runtime_checkable
@@ -192,10 +192,7 @@ class ObjectStoreItemProtocol(Protocol):
192
192
 
193
193
  @runtime_checkable
194
194
  class ObjectStoreProtocol(Protocol):
195
- """Protocol for object storage operations.
196
-
197
- Defines the interface for storage backends with both sync and async operations.
198
- """
195
+ """Protocol for object storage operations."""
199
196
 
200
197
  capabilities: ClassVar["StorageCapabilities"]
201
198
 
@@ -1,10 +1,10 @@
1
1
  """Storage abstraction layer for SQLSpec.
2
2
 
3
- Provides a flexible storage system with:
3
+ Provides a storage system with:
4
4
  - Multiple backend support (local, fsspec, obstore)
5
- - Lazy loading and configuration-based registration
6
- - URI scheme-based automatic backend resolution
7
- - Key-based named storage configurations
5
+ - Configuration-based registration
6
+ - URI scheme-based backend resolution
7
+ - Named storage configurations
8
8
  - Capability-based backend selection
9
9
  """
10
10
 
@@ -32,7 +32,7 @@ class _ArrowStreamer:
32
32
  return self
33
33
 
34
34
  async def _initialize(self) -> None:
35
- """Initialize the paths iterator."""
35
+ """Initialize paths iterator."""
36
36
  if self.paths_iterator is None:
37
37
  paths = await async_(self.backend.glob)(self.pattern, **self.kwargs)
38
38
  self.paths_iterator = iter(paths)
@@ -59,9 +59,8 @@ class _ArrowStreamer:
59
59
  class FSSpecBackend(ObjectStoreBase):
60
60
  """Storage backend using fsspec.
61
61
 
62
- Implements the ObjectStoreProtocol using fsspec,
63
- providing support for various protocols including HTTP, HTTPS, FTP,
64
- and cloud storage services.
62
+ Implements the ObjectStoreProtocol using fsspec for various protocols
63
+ including HTTP, HTTPS, FTP, and cloud storage services.
65
64
  """
66
65
 
67
66
  _default_capabilities: ClassVar[StorageCapabilities] = StorageCapabilities(
@@ -116,7 +115,7 @@ class FSSpecBackend(ObjectStoreBase):
116
115
  return path_str
117
116
 
118
117
  def _detect_capabilities(self) -> StorageCapabilities:
119
- """Detect capabilities based on underlying filesystem protocol."""
118
+ """Detect capabilities based on filesystem protocol."""
120
119
  protocol = self.protocol.lower()
121
120
 
122
121
  if protocol in {"s3", "s3a", "s3n"}:
@@ -138,7 +137,7 @@ class FSSpecBackend(ObjectStoreBase):
138
137
 
139
138
  @property
140
139
  def capabilities(self) -> StorageCapabilities:
141
- """Return instance-specific capabilities based on detected protocol."""
140
+ """Return capabilities based on detected protocol."""
142
141
  return getattr(self, "_instance_capabilities", self.__class__._default_capabilities)
143
142
 
144
143
  @classmethod
@@ -1,8 +1,7 @@
1
1
  """Object storage backend using obstore.
2
2
 
3
- Implements the ObjectStoreProtocol using obstore,
4
- providing native support for S3, GCS, Azure, and local file storage
5
- with Arrow support.
3
+ Implements the ObjectStoreProtocol using obstore for S3, GCS, Azure,
4
+ and local file storage with Arrow support.
6
5
  """
7
6
 
8
7
  from __future__ import annotations
@@ -56,11 +55,11 @@ DEFAULT_OPTIONS: Final[dict[str, Any]] = {"connect_timeout": "30s", "request_tim
56
55
  class ObStoreBackend(ObjectStoreBase, HasStorageCapabilities):
57
56
  """Object storage backend using obstore.
58
57
 
59
- Uses obstore's Rust-based implementation for storage operations,
60
- providing native support for AWS S3, Google Cloud Storage, Azure Blob Storage,
58
+ Uses obstore's Rust-based implementation for storage operations.
59
+ Supports AWS S3, Google Cloud Storage, Azure Blob Storage,
61
60
  local filesystem, and HTTP endpoints.
62
61
 
63
- Includes native Arrow support.
62
+ Includes Arrow support.
64
63
  """
65
64
 
66
65
  capabilities: ClassVar[StorageCapabilities] = StorageCapabilities(
@@ -203,7 +202,7 @@ class ObStoreBackend(ObjectStoreBase, HasStorageCapabilities):
203
202
  raise StorageOperationFailedError(msg) from exc
204
203
 
205
204
  def glob(self, pattern: str, **kwargs: Any) -> list[str]:
206
- """Find objects matching pattern using obstore.
205
+ """Find objects matching pattern.
207
206
 
208
207
  Lists all objects and filters them client-side using the pattern.
209
208
  """
@@ -330,7 +329,7 @@ class ObStoreBackend(ObjectStoreBase, HasStorageCapabilities):
330
329
  raise StorageOperationFailedError(msg) from exc
331
330
 
332
331
  def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch]:
333
- """Stream Arrow record batches using obstore.
332
+ """Stream Arrow record batches.
334
333
 
335
334
  Yields:
336
335
  Iterator of Arrow record batches from matching objects.
@@ -1,8 +1,8 @@
1
- """Unified Storage Registry for ObjectStore backends.
1
+ """Storage Registry for ObjectStore backends.
2
2
 
3
- Provides a flexible, lazy-loading storage registry that supports URI-first access
3
+ Provides a storage registry that supports URI-first access
4
4
  pattern with automatic backend detection, ObStore preferred with FSSpec fallback,
5
- intelligent scheme-based routing, and named aliases for common configurations.
5
+ scheme-based routing, and named aliases for common configurations.
6
6
  """
7
7
 
8
8
  import logging
sqlspec/utils/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- from sqlspec.utils import deprecation, fixtures, module_loader, singleton, sync_tools, text, type_guards
1
+ from sqlspec.utils import deprecation, fixtures, logging, module_loader, singleton, sync_tools, text, type_guards
2
2
 
3
- __all__ = ("deprecation", "fixtures", "module_loader", "singleton", "sync_tools", "text", "type_guards")
3
+ __all__ = ("deprecation", "fixtures", "logging", "module_loader", "singleton", "sync_tools", "text", "type_guards")
sqlspec/utils/logging.py CHANGED
@@ -5,23 +5,19 @@ Users should configure their own logging handlers and levels as needed.
5
5
  SQLSpec provides StructuredFormatter for JSON-formatted logs if desired.
6
6
  """
7
7
 
8
- from __future__ import annotations
9
-
10
8
  import logging
11
9
  from contextvars import ContextVar
12
- from typing import TYPE_CHECKING, Any
10
+ from logging import LogRecord
11
+ from typing import Any, Optional
13
12
 
14
13
  from sqlspec._serialization import encode_json
15
14
 
16
- if TYPE_CHECKING:
17
- from logging import LogRecord
18
-
19
15
  __all__ = ("StructuredFormatter", "correlation_id_var", "get_correlation_id", "get_logger", "set_correlation_id")
20
16
 
21
- correlation_id_var: ContextVar[str | None] = ContextVar("correlation_id", default=None)
17
+ correlation_id_var: "ContextVar[Optional[str]]" = ContextVar("correlation_id", default=None)
22
18
 
23
19
 
24
- def set_correlation_id(correlation_id: str | None) -> None:
20
+ def set_correlation_id(correlation_id: "Optional[str]") -> None:
25
21
  """Set the correlation ID for the current context.
26
22
 
27
23
  Args:
@@ -30,7 +26,7 @@ def set_correlation_id(correlation_id: str | None) -> None:
30
26
  correlation_id_var.set(correlation_id)
31
27
 
32
28
 
33
- def get_correlation_id() -> str | None:
29
+ def get_correlation_id() -> "Optional[str]":
34
30
  """Get the current correlation ID.
35
31
 
36
32
  Returns:
@@ -90,7 +86,7 @@ class CorrelationIDFilter(logging.Filter):
90
86
  return True
91
87
 
92
88
 
93
- def get_logger(name: str | None = None) -> logging.Logger:
89
+ def get_logger(name: "Optional[str]" = None) -> logging.Logger:
94
90
  """Get a logger instance with standardized configuration.
95
91
 
96
92
  Args:
@@ -1,3 +1,10 @@
1
+ """Utilities for async/sync interoperability in SQLSpec.
2
+
3
+ This module provides utilities for converting between async and sync functions,
4
+ managing concurrency limits, and handling context managers. Used primarily
5
+ for adapter implementations that need to support both sync and async patterns.
6
+ """
7
+
1
8
  import asyncio
2
9
  import functools
3
10
  import inspect
@@ -26,7 +33,15 @@ class CapacityLimiter:
26
33
  """Limits the number of concurrent operations using a semaphore."""
27
34
 
28
35
  def __init__(self, total_tokens: int) -> None:
29
- self._semaphore = asyncio.Semaphore(total_tokens)
36
+ self._total_tokens = total_tokens
37
+ self._semaphore_instance: Optional[asyncio.Semaphore] = None
38
+
39
+ @property
40
+ def _semaphore(self) -> asyncio.Semaphore:
41
+ """Lazy initialization of asyncio.Semaphore for Python 3.9 compatibility."""
42
+ if self._semaphore_instance is None:
43
+ self._semaphore_instance = asyncio.Semaphore(self._total_tokens)
44
+ return self._semaphore_instance
30
45
 
31
46
  async def acquire(self) -> None:
32
47
  await self._semaphore.acquire()
@@ -36,11 +51,12 @@ class CapacityLimiter:
36
51
 
37
52
  @property
38
53
  def total_tokens(self) -> int:
39
- return self._semaphore._value
54
+ return self._total_tokens
40
55
 
41
56
  @total_tokens.setter
42
57
  def total_tokens(self, value: int) -> None:
43
- self._semaphore = asyncio.Semaphore(value)
58
+ self._total_tokens = value
59
+ self._semaphore_instance = None
44
60
 
45
61
  async def __aenter__(self) -> None:
46
62
  await self.acquire()
@@ -76,7 +92,14 @@ def run_(async_function: "Callable[ParamSpecT, Coroutine[Any, Any, ReturnT]]") -
76
92
  loop = None
77
93
 
78
94
  if loop is not None:
79
- return asyncio.run(partial_f())
95
+ if loop.is_running():
96
+ import concurrent.futures
97
+
98
+ with concurrent.futures.ThreadPoolExecutor() as executor:
99
+ future = executor.submit(asyncio.run, partial_f())
100
+ return future.result()
101
+ else:
102
+ return asyncio.run(partial_f())
80
103
  if uvloop and sys.platform != "win32":
81
104
  uvloop.install() # pyright: ignore[reportUnknownMemberType]
82
105
  return asyncio.run(partial_f())
sqlspec/utils/text.py CHANGED
@@ -1,4 +1,9 @@
1
- """General utility functions."""
1
+ """Text processing utilities for SQLSpec.
2
+
3
+ Provides functions for string manipulation including case conversion,
4
+ slugification, and email validation. Used primarily for identifier
5
+ generation and data validation.
6
+ """
2
7
 
3
8
  import re
4
9
  import unicodedata
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlspec
3
- Version: 0.17.1
3
+ Version: 0.19.0
4
4
  Summary: SQL Experiments in Python
5
5
  Project-URL: Discord, https://discord.gg/litestar
6
6
  Project-URL: Issue, https://github.com/litestar-org/sqlspec/issues/