sqlspec 0.12.0__tar.gz → 0.12.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (353) hide show
  1. {sqlspec-0.12.0 → sqlspec-0.12.2}/.pre-commit-config.yaml +1 -1
  2. {sqlspec-0.12.0 → sqlspec-0.12.2}/PKG-INFO +1 -1
  3. {sqlspec-0.12.0 → sqlspec-0.12.2}/pyproject.toml +2 -2
  4. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/aiosqlite/driver.py +16 -11
  5. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/bigquery/driver.py +113 -21
  6. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/duckdb/driver.py +18 -13
  7. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/psycopg/config.py +55 -54
  8. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/psycopg/driver.py +82 -1
  9. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/sqlite/driver.py +50 -10
  10. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/mixins/_storage.py +83 -36
  11. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/loader.py +8 -30
  12. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/base.py +3 -1
  13. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/ddl.py +14 -1
  14. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/analyzers/_analyzer.py +1 -5
  15. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +56 -2
  16. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/sql.py +40 -6
  17. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/backends/fsspec.py +29 -27
  18. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/backends/obstore.py +55 -34
  19. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/protocol.py +28 -25
  20. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_data_types.py +5 -4
  21. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/test_execute_many.py +0 -1
  22. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_bigquery/test_arrow_functionality.py +0 -1
  23. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_oracledb/test_driver_sync.py +65 -2
  24. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psqlpy/test_arrow_functionality.py +9 -6
  25. sqlspec-0.12.2/tests/integration/test_adapters/test_psycopg/conftest.py +84 -0
  26. sqlspec-0.12.2/tests/integration/test_adapters/test_psycopg/test_async_copy.py +106 -0
  27. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psycopg/test_driver.py +3 -10
  28. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_dialect_propagation.py +36 -5
  29. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_storage/test_driver_storage_integration.py +61 -17
  30. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_storage/test_storage_mixins.py +150 -18
  31. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_asyncmy/test_driver.py +0 -28
  32. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_psqlpy/test_driver.py +0 -38
  33. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_sqlite/test_driver.py +17 -9
  34. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_base.py +20 -4
  35. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_loader.py +90 -28
  36. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_base.py +7 -1
  37. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_builder_mixins.py +2 -2
  38. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_delete.py +3 -17
  39. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_update.py +3 -11
  40. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_parameter_preservation.py +2 -3
  41. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_analyzer_subquery_detection.py +8 -11
  42. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_transformers_literal_parameterizer.py +44 -5
  43. sqlspec-0.12.2/tests/unit/test_statement/test_pipelines/test_transformers_literal_parameterizer_cte.py +196 -0
  44. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_validators_security.py +12 -2
  45. sqlspec-0.12.2/tests/unit/test_statement/test_sql_as_many.py +102 -0
  46. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_storage/test_backends/test_fsspec_backend.py +139 -0
  47. sqlspec-0.12.2/tests/unit/test_storage/test_backends/test_obstore_backend.py +1164 -0
  48. {sqlspec-0.12.0 → sqlspec-0.12.2}/tools/sphinx_ext/missing_references.py +2 -2
  49. {sqlspec-0.12.0 → sqlspec-0.12.2}/uv.lock +439 -442
  50. sqlspec-0.12.0/tests/unit/test_storage/test_backends/test_obstore_backend.py +0 -671
  51. {sqlspec-0.12.0 → sqlspec-0.12.2}/.gitignore +0 -0
  52. {sqlspec-0.12.0 → sqlspec-0.12.2}/CONTRIBUTING.rst +0 -0
  53. {sqlspec-0.12.0 → sqlspec-0.12.2}/LICENSE +0 -0
  54. {sqlspec-0.12.0 → sqlspec-0.12.2}/Makefile +0 -0
  55. {sqlspec-0.12.0 → sqlspec-0.12.2}/NOTICE +0 -0
  56. {sqlspec-0.12.0 → sqlspec-0.12.2}/README.md +0 -0
  57. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/__init__.py +0 -0
  58. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/__metadata__.py +0 -0
  59. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/_serialization.py +0 -0
  60. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/_sql.py +0 -0
  61. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/_typing.py +0 -0
  62. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/__init__.py +0 -0
  63. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/adbc/__init__.py +0 -0
  64. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/adbc/config.py +0 -0
  65. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/adbc/driver.py +0 -0
  66. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/aiosqlite/__init__.py +0 -0
  67. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/aiosqlite/config.py +0 -0
  68. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/asyncmy/__init__.py +0 -0
  69. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/asyncmy/config.py +0 -0
  70. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/asyncmy/driver.py +0 -0
  71. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/asyncpg/__init__.py +0 -0
  72. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/asyncpg/config.py +0 -0
  73. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/asyncpg/driver.py +0 -0
  74. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/bigquery/__init__.py +0 -0
  75. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/bigquery/config.py +0 -0
  76. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/duckdb/__init__.py +0 -0
  77. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/duckdb/config.py +0 -0
  78. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/oracledb/__init__.py +0 -0
  79. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/oracledb/config.py +0 -0
  80. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/oracledb/driver.py +0 -0
  81. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/psqlpy/__init__.py +0 -0
  82. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/psqlpy/config.py +0 -0
  83. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/psqlpy/driver.py +0 -0
  84. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/psycopg/__init__.py +0 -0
  85. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/sqlite/__init__.py +0 -0
  86. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/adapters/sqlite/config.py +0 -0
  87. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/base.py +0 -0
  88. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/config.py +0 -0
  89. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/__init__.py +0 -0
  90. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/_async.py +0 -0
  91. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/_common.py +0 -0
  92. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/_sync.py +0 -0
  93. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/mixins/__init__.py +0 -0
  94. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/mixins/_pipeline.py +0 -0
  95. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/mixins/_result_utils.py +0 -0
  96. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/mixins/_sql_translator.py +0 -0
  97. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/driver/mixins/_type_coercion.py +0 -0
  98. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/exceptions.py +0 -0
  99. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/__init__.py +0 -0
  100. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/aiosql/__init__.py +0 -0
  101. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/aiosql/adapter.py +0 -0
  102. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/litestar/__init__.py +0 -0
  103. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/litestar/_utils.py +0 -0
  104. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/litestar/config.py +0 -0
  105. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/litestar/handlers.py +0 -0
  106. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/litestar/plugin.py +0 -0
  107. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/extensions/litestar/providers.py +0 -0
  108. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/py.typed +0 -0
  109. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/service/__init__.py +0 -0
  110. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/service/base.py +0 -0
  111. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/service/pagination.py +0 -0
  112. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/__init__.py +0 -0
  113. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/__init__.py +0 -0
  114. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/_ddl_utils.py +0 -0
  115. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/_parsing_utils.py +0 -0
  116. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/delete.py +0 -0
  117. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/insert.py +0 -0
  118. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/merge.py +0 -0
  119. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/__init__.py +0 -0
  120. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_aggregate_functions.py +0 -0
  121. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_case_builder.py +0 -0
  122. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_common_table_expr.py +0 -0
  123. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_delete_from.py +0 -0
  124. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_from.py +0 -0
  125. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_group_by.py +0 -0
  126. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_having.py +0 -0
  127. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_insert_from_select.py +0 -0
  128. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_insert_into.py +0 -0
  129. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_insert_values.py +0 -0
  130. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_join.py +0 -0
  131. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_limit_offset.py +0 -0
  132. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_merge_clauses.py +0 -0
  133. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_order_by.py +0 -0
  134. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_pivot.py +0 -0
  135. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_returning.py +0 -0
  136. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_select_columns.py +0 -0
  137. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_set_ops.py +0 -0
  138. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_unpivot.py +0 -0
  139. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_update_from.py +0 -0
  140. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_update_set.py +0 -0
  141. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_update_table.py +0 -0
  142. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_where.py +0 -0
  143. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/mixins/_window_functions.py +0 -0
  144. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/protocols.py +0 -0
  145. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/select.py +0 -0
  146. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/builder/update.py +0 -0
  147. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/filters.py +0 -0
  148. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/parameters.py +0 -0
  149. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/__init__.py +0 -0
  150. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/analyzers/__init__.py +0 -0
  151. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/base.py +0 -0
  152. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/context.py +0 -0
  153. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/result_types.py +0 -0
  154. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/transformers/__init__.py +0 -0
  155. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/transformers/_expression_simplifier.py +0 -0
  156. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/transformers/_remove_comments.py +0 -0
  157. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/transformers/_remove_hints.py +0 -0
  158. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/validators/__init__.py +0 -0
  159. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/validators/_dml_safety.py +0 -0
  160. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/validators/_parameter_style.py +0 -0
  161. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/validators/_performance.py +0 -0
  162. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/validators/_security.py +0 -0
  163. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/pipelines/validators/base.py +0 -0
  164. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/result.py +0 -0
  165. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/statement/splitter.py +0 -0
  166. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/__init__.py +0 -0
  167. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/backends/__init__.py +0 -0
  168. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/backends/base.py +0 -0
  169. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/storage/registry.py +0 -0
  170. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/typing.py +0 -0
  171. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/__init__.py +0 -0
  172. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/correlation.py +0 -0
  173. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/deprecation.py +0 -0
  174. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/fixtures.py +0 -0
  175. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/logging.py +0 -0
  176. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/module_loader.py +0 -0
  177. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/serializers.py +0 -0
  178. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/singleton.py +0 -0
  179. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/sync_tools.py +0 -0
  180. {sqlspec-0.12.0 → sqlspec-0.12.2}/sqlspec/utils/text.py +0 -0
  181. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/__init__.py +0 -0
  182. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/conftest.py +0 -0
  183. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/__init__.py +0 -0
  184. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/ddls-mysql-collection.sql +0 -0
  185. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/ddls-postgres-collection.sql +0 -0
  186. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/example_usage.py +0 -0
  187. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/init.sql +0 -0
  188. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-config.sql +0 -0
  189. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-data_types.sql +0 -0
  190. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-database_details.sql +0 -0
  191. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-engines.sql +0 -0
  192. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-hostname.sql +0 -0
  193. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-plugins.sql +0 -0
  194. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-process_list.sql +0 -0
  195. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-resource-groups.sql +0 -0
  196. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-schema_objects.sql +0 -0
  197. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-table_details.sql +0 -0
  198. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/collection-users.sql +0 -0
  199. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/mysql/init.sql +0 -0
  200. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/oracle.ddl.sql +0 -0
  201. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-applications.sql +0 -0
  202. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-aws_extension_dependency.sql +0 -0
  203. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-aws_oracle_exists.sql +0 -0
  204. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-bg_writer_stats.sql +0 -0
  205. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-calculated_metrics.sql +0 -0
  206. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-data_types.sql +0 -0
  207. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-database_details.sql +0 -0
  208. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-extensions.sql +0 -0
  209. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-index_details.sql +0 -0
  210. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-pglogical-details.sql +0 -0
  211. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-privileges.sql +0 -0
  212. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-replication_slots.sql +0 -0
  213. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-replication_stats.sql +0 -0
  214. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-schema_details.sql +0 -0
  215. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-schema_objects.sql +0 -0
  216. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-settings.sql +0 -0
  217. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-source_details.sql +0 -0
  218. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/collection-table_details.sql +0 -0
  219. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/extended-collection-all-databases.sql +0 -0
  220. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/postgres/init.sql +0 -0
  221. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/readiness-check.sql +0 -0
  222. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/fixtures/sql_utils.py +0 -0
  223. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/__init__.py +0 -0
  224. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/__init__.py +0 -0
  225. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/__init__.py +0 -0
  226. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/conftest.py +0 -0
  227. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_arrow_functionality.py +0 -0
  228. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_bigquery_driver.py +0 -0
  229. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_connection.py +0 -0
  230. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_driver.py +0 -0
  231. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_duckdb_driver.py +0 -0
  232. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_execute_many.py +0 -0
  233. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_execute_script.py +0 -0
  234. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_parameter_styles.py +0 -0
  235. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_postgres_driver.py +0 -0
  236. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_returning.py +0 -0
  237. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_adbc/test_sqlite_driver.py +0 -0
  238. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_aiosqlite/__init__.py +0 -0
  239. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_aiosqlite/test_driver.py +0 -0
  240. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncmy/__init__.py +0 -0
  241. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncmy/test_config.py +0 -0
  242. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/__init__.py +0 -0
  243. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/conftest.py +0 -0
  244. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/test_arrow_functionality.py +0 -0
  245. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/test_connection.py +0 -0
  246. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/test_driver.py +0 -0
  247. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_asyncpg/test_parameter_styles.py +0 -0
  248. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_bigquery/__init__.py +0 -0
  249. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_bigquery/conftest.py +0 -0
  250. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_bigquery/test_connection.py +0 -0
  251. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_bigquery/test_driver.py +0 -0
  252. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_duckdb/__init__.py +0 -0
  253. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_duckdb/test_arrow_functionality.py +0 -0
  254. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_duckdb/test_connection.py +0 -0
  255. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_duckdb/test_driver.py +0 -0
  256. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_duckdb/test_execute_many.py +0 -0
  257. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_duckdb/test_parameter_styles.py +0 -0
  258. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_oracledb/__init__.py +0 -0
  259. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_oracledb/test_connection.py +0 -0
  260. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_oracledb/test_driver_async.py +0 -0
  261. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psqlpy/__init__.py +0 -0
  262. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psqlpy/test_connection.py +0 -0
  263. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psqlpy/test_driver.py +0 -0
  264. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psycopg/__init__.py +0 -0
  265. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psycopg/test_connection.py +0 -0
  266. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psycopg/test_execute_many.py +0 -0
  267. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_psycopg/test_parameter_styles.py +0 -0
  268. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_sqlite/__init__.py +0 -0
  269. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_adapters/test_sqlite/test_driver.py +0 -0
  270. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_extensions/__init__.py +0 -0
  271. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_extensions/test_aiosql/__init__.py +0 -0
  272. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_extensions/test_litestar/__init__.py +0 -0
  273. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_sql_file_loader.py +0 -0
  274. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_storage/__init__.py +0 -0
  275. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/integration/test_storage/test_end_to_end_workflows.py +0 -0
  276. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/__init__.py +0 -0
  277. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/__init__.py +0 -0
  278. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_adbc/__init__.py +0 -0
  279. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_adbc/test_config.py +0 -0
  280. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_adbc/test_driver.py +0 -0
  281. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_aiosqlite/__init__.py +0 -0
  282. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_aiosqlite/test_config.py +0 -0
  283. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_aiosqlite/test_driver.py +0 -0
  284. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_asyncmy/__init__.py +0 -0
  285. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_asyncpg/__init__.py +0 -0
  286. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_asyncpg/test_config.py +0 -0
  287. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_asyncpg/test_driver.py +0 -0
  288. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_bigquery/__init__.py +0 -0
  289. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_bigquery/test_config.py +0 -0
  290. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_bigquery/test_driver.py +0 -0
  291. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_duckdb/__init__.py +0 -0
  292. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_duckdb/test_config.py +0 -0
  293. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_duckdb/test_driver.py +0 -0
  294. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_oracledb/__init__.py +0 -0
  295. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_oracledb/test_config.py +0 -0
  296. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_oracledb/test_driver.py +0 -0
  297. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_psqlpy/__init__.py +0 -0
  298. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_psqlpy/test_config.py +0 -0
  299. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_psycopg/__init__.py +0 -0
  300. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_psycopg/test_config.py +0 -0
  301. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_psycopg/test_driver.py +0 -0
  302. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_sqlite/__init__.py +0 -0
  303. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_adapters/test_sqlite/test_config.py +0 -0
  304. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_config.py +0 -0
  305. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_config_dialect.py +0 -0
  306. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_driver.py +0 -0
  307. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_exceptions.py +0 -0
  308. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_extensions/__init__.py +0 -0
  309. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_extensions/test_aiosql/test_adapter.py +0 -0
  310. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/__init__.py +0 -0
  311. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_base.py +0 -0
  312. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/__init__.py +0 -0
  313. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_insert.py +0 -0
  314. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_merge.py +0 -0
  315. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_builder/test_select.py +0 -0
  316. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_config.py +0 -0
  317. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_filters.py +0 -0
  318. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_mixins.py +0 -0
  319. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_oracle_numeric_parameters.py +0 -0
  320. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_parameter_normalization.py +0 -0
  321. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_parameters.py +0 -0
  322. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/__init__.py +0 -0
  323. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_analyzer.py +0 -0
  324. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_base.py +0 -0
  325. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_expression_simplifier.py +0 -0
  326. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_transformers_remove_comments.py +0 -0
  327. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_validators_dml_safety.py +0 -0
  328. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_validators_parameter_style.py +0 -0
  329. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_pipelines/test_validators_performance.py +0 -0
  330. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_result.py +0 -0
  331. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_splitter.py +0 -0
  332. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_sql.py +0 -0
  333. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_sql_translator_mixin.py +0 -0
  334. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_sqlfactory.py +0 -0
  335. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_storage.py +0 -0
  336. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_statement/test_transformers/test_expression_simplifier_parameter_tracking.py +0 -0
  337. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_storage/__init__.py +0 -0
  338. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_storage/test_backends/__init__.py +0 -0
  339. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_storage/test_base.py +0 -0
  340. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_storage/test_registry.py +0 -0
  341. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_typing.py +0 -0
  342. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/__init__.py +0 -0
  343. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/test_deprecation.py +0 -0
  344. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/test_fixtures.py +0 -0
  345. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/test_module_loader.py +0 -0
  346. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/test_singleton.py +0 -0
  347. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/test_sync_tools.py +0 -0
  348. {sqlspec-0.12.0 → sqlspec-0.12.2}/tests/unit/test_utils/test_text.py +0 -0
  349. {sqlspec-0.12.0 → sqlspec-0.12.2}/tools/__init__.py +0 -0
  350. {sqlspec-0.12.0 → sqlspec-0.12.2}/tools/build_docs.py +0 -0
  351. {sqlspec-0.12.0 → sqlspec-0.12.2}/tools/pypi_readme.py +0 -0
  352. {sqlspec-0.12.0 → sqlspec-0.12.2}/tools/sphinx_ext/__init__.py +0 -0
  353. {sqlspec-0.12.0 → sqlspec-0.12.2}/tools/sphinx_ext/changelog.py +0 -0
@@ -17,7 +17,7 @@ repos:
17
17
  - id: mixed-line-ending
18
18
  - id: trailing-whitespace
19
19
  - repo: https://github.com/charliermarsh/ruff-pre-commit
20
- rev: "v0.12.0"
20
+ rev: "v0.12.1"
21
21
  hooks:
22
22
  - id: ruff
23
23
  args: ["--fix"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sqlspec
3
- Version: 0.12.0
3
+ Version: 0.12.2
4
4
  Summary: SQL Experiments in Python
5
5
  Project-URL: Discord, https://discord.gg/litestar
6
6
  Project-URL: Issue, https://github.com/litestar-org/sqlspec/issues/
@@ -7,7 +7,7 @@ maintainers = [{ name = "Litestar Developers", email = "hello@litestar.dev" }]
7
7
  name = "sqlspec"
8
8
  readme = "README.md"
9
9
  requires-python = ">=3.9, <4.0"
10
- version = "0.12.0"
10
+ version = "0.12.2"
11
11
 
12
12
  [project.urls]
13
13
  Discord = "https://discord.gg/litestar"
@@ -126,7 +126,7 @@ packages = ["sqlspec"]
126
126
  allow_dirty = true
127
127
  commit = false
128
128
  commit_args = "--no-verify"
129
- current_version = "0.12.0"
129
+ current_version = "0.12.2"
130
130
  ignore_missing_files = false
131
131
  ignore_missing_version = false
132
132
  message = "chore(release): bump to v{new_version}"
@@ -203,8 +203,7 @@ class AiosqliteDriver(
203
203
  return result
204
204
 
205
205
  async def _bulk_load_file(self, file_path: Path, table_name: str, format: str, mode: str, **options: Any) -> int:
206
- """Database-specific bulk load implementation."""
207
- # TODO: convert this to use the storage backend. it has async support
206
+ """Database-specific bulk load implementation using storage backend."""
208
207
  if format != "csv":
209
208
  msg = f"aiosqlite driver only supports CSV for bulk loading, not {format}."
210
209
  raise NotImplementedError(msg)
@@ -215,15 +214,21 @@ class AiosqliteDriver(
215
214
  if mode == "replace":
216
215
  await cursor.execute(f"DELETE FROM {table_name}")
217
216
 
218
- # Using sync file IO here as it's a fallback path and aiofiles is not a dependency
219
- with Path(file_path).open(encoding="utf-8") as f: # noqa: ASYNC230
220
- reader = csv.reader(f, **options)
221
- header = next(reader) # Skip header
222
- placeholders = ", ".join("?" for _ in header)
223
- sql = f"INSERT INTO {table_name} VALUES ({placeholders})"
224
- data_iter = list(reader)
225
- await cursor.executemany(sql, data_iter)
226
- rowcount = cursor.rowcount
217
+ # Use async storage backend to read the file
218
+ file_path_str = str(file_path)
219
+ backend = self._get_storage_backend(file_path_str)
220
+ content = await backend.read_text_async(file_path_str, encoding="utf-8")
221
+ # Parse CSV content
222
+ import io
223
+
224
+ csv_file = io.StringIO(content)
225
+ reader = csv.reader(csv_file, **options)
226
+ header = next(reader) # Skip header
227
+ placeholders = ", ".join("?" for _ in header)
228
+ sql = f"INSERT INTO {table_name} VALUES ({placeholders})"
229
+ data_iter = list(reader)
230
+ await cursor.executemany(sql, data_iter)
231
+ rowcount = cursor.rowcount
227
232
  await conn.commit()
228
233
  return rowcount
229
234
  finally:
@@ -1,6 +1,8 @@
1
+ import contextlib
1
2
  import datetime
2
3
  import io
3
4
  import logging
5
+ import uuid
4
6
  from collections.abc import Iterator
5
7
  from decimal import Decimal
6
8
  from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union, cast
@@ -8,10 +10,12 @@ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union, cast
8
10
  from google.cloud.bigquery import (
9
11
  ArrayQueryParameter,
10
12
  Client,
13
+ ExtractJobConfig,
11
14
  LoadJobConfig,
12
15
  QueryJob,
13
16
  QueryJobConfig,
14
17
  ScalarQueryParameter,
18
+ SourceFormat,
15
19
  WriteDisposition,
16
20
  )
17
21
  from google.cloud.bigquery.table import Row as BigQueryRow
@@ -32,6 +36,8 @@ from sqlspec.typing import DictRow, ModelDTOT, RowT
32
36
  from sqlspec.utils.serializers import to_json
33
37
 
34
38
  if TYPE_CHECKING:
39
+ from pathlib import Path
40
+
35
41
  from sqlglot.dialects.dialect import DialectType
36
42
 
37
43
 
@@ -258,23 +264,17 @@ class BigQueryDriver(
258
264
  param_value,
259
265
  type(param_value),
260
266
  )
261
- # Let BigQuery generate the job ID to avoid collisions
262
- # This is the recommended approach for production code and works better with emulators
263
- logger.warning("About to send to BigQuery - SQL: %r", sql_str)
264
- logger.warning("Query parameters in job config: %r", final_job_config.query_parameters)
265
267
  query_job = conn.query(sql_str, job_config=final_job_config)
266
268
 
267
269
  # Get the auto-generated job ID for callbacks
268
270
  if self.on_job_start and query_job.job_id:
269
- try:
271
+ with contextlib.suppress(Exception):
272
+ # Callback errors should not interfere with job execution
270
273
  self.on_job_start(query_job.job_id)
271
- except Exception as e:
272
- logger.warning("Job start callback failed: %s", str(e), extra={"adapter": "bigquery"})
273
274
  if self.on_job_complete and query_job.job_id:
274
- try:
275
+ with contextlib.suppress(Exception):
276
+ # Callback errors should not interfere with job execution
275
277
  self.on_job_complete(query_job.job_id, query_job)
276
- except Exception as e:
277
- logger.warning("Job complete callback failed: %s", str(e), extra={"adapter": "bigquery"})
278
278
 
279
279
  return query_job
280
280
 
@@ -529,28 +529,120 @@ class BigQueryDriver(
529
529
  # BigQuery Native Export Support
530
530
  # ============================================================================
531
531
 
532
- def _export_native(self, query: str, destination_uri: str, format: str, **options: Any) -> int:
533
- """BigQuery native export implementation.
532
+ def _export_native(self, query: str, destination_uri: "Union[str, Path]", format: str, **options: Any) -> int:
533
+ """BigQuery native export implementation with automatic GCS staging.
534
534
 
535
- For local files, BigQuery doesn't support direct export, so we raise NotImplementedError
536
- to trigger the fallback mechanism that uses fetch + write.
535
+ For GCS URIs, uses direct export. For other locations, automatically stages
536
+ through a temporary GCS location and transfers to the final destination.
537
537
 
538
538
  Args:
539
539
  query: SQL query to execute
540
- destination_uri: Destination URI (local file path or gs:// URI)
540
+ destination_uri: Destination URI (local file path, gs:// URI, or Path object)
541
541
  format: Export format (parquet, csv, json, avro)
542
- **options: Additional export options
542
+ **options: Additional export options including 'gcs_staging_bucket'
543
543
 
544
544
  Returns:
545
545
  Number of rows exported
546
546
 
547
547
  Raises:
548
- NotImplementedError: Always, to trigger fallback to fetch + write
548
+ NotImplementedError: If no staging bucket is configured for non-GCS destinations
549
549
  """
550
- # BigQuery only supports native export to GCS, not local files
551
- # By raising NotImplementedError, the mixin will fall back to fetch + write
552
- msg = "BigQuery native export only supports GCS URIs, using fallback for local files"
553
- raise NotImplementedError(msg)
550
+ destination_str = str(destination_uri)
551
+
552
+ # If it's already a GCS URI, use direct export
553
+ if destination_str.startswith("gs://"):
554
+ return self._export_to_gcs_native(query, destination_str, format, **options)
555
+
556
+ # For non-GCS destinations, check if staging is configured
557
+ staging_bucket = options.get("gcs_staging_bucket") or getattr(self.config, "gcs_staging_bucket", None)
558
+ if not staging_bucket:
559
+ # Fall back to fetch + write for non-GCS destinations without staging
560
+ msg = "BigQuery native export requires GCS staging bucket for non-GCS destinations"
561
+ raise NotImplementedError(msg)
562
+
563
+ # Generate temporary GCS path
564
+ from datetime import timezone
565
+
566
+ timestamp = datetime.datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
567
+ temp_filename = f"bigquery_export_{timestamp}_{uuid.uuid4().hex[:8]}.{format}"
568
+ temp_gcs_uri = f"gs://{staging_bucket}/temp_exports/{temp_filename}"
569
+
570
+ try:
571
+ # Export to temporary GCS location
572
+ rows_exported = self._export_to_gcs_native(query, temp_gcs_uri, format, **options)
573
+
574
+ # Transfer from GCS to final destination using storage backend
575
+ backend, path = self._resolve_backend_and_path(destination_str)
576
+ gcs_backend = self._get_storage_backend(temp_gcs_uri)
577
+
578
+ # Download from GCS and upload to final destination
579
+ data = gcs_backend.read_bytes(temp_gcs_uri)
580
+ backend.write_bytes(path, data)
581
+
582
+ return rows_exported
583
+ finally:
584
+ # Clean up temporary file
585
+ try:
586
+ gcs_backend = self._get_storage_backend(temp_gcs_uri)
587
+ gcs_backend.delete(temp_gcs_uri)
588
+ except Exception as e:
589
+ logger.warning("Failed to clean up temporary GCS file %s: %s", temp_gcs_uri, e)
590
+
591
+ def _export_to_gcs_native(self, query: str, gcs_uri: str, format: str, **options: Any) -> int:
592
+ """Direct BigQuery export to GCS.
593
+
594
+ Args:
595
+ query: SQL query to execute
596
+ gcs_uri: GCS destination URI (must start with gs://)
597
+ format: Export format (parquet, csv, json, avro)
598
+ **options: Additional export options
599
+
600
+ Returns:
601
+ Number of rows exported
602
+ """
603
+ # First, run the query and store results in a temporary table
604
+
605
+ temp_table_id = f"temp_export_{uuid.uuid4().hex[:8]}"
606
+ dataset_id = getattr(self.connection, "default_dataset", None) or options.get("dataset", "temp")
607
+
608
+ # Create a temporary table with query results
609
+ query_with_table = f"CREATE OR REPLACE TABLE `{dataset_id}.{temp_table_id}` AS {query}"
610
+ create_job = self._run_query_job(query_with_table, [])
611
+ create_job.result()
612
+
613
+ # Get row count
614
+ count_query = f"SELECT COUNT(*) as cnt FROM `{dataset_id}.{temp_table_id}`"
615
+ count_job = self._run_query_job(count_query, [])
616
+ count_result = list(count_job.result())
617
+ row_count = count_result[0]["cnt"] if count_result else 0
618
+
619
+ try:
620
+ # Configure extract job
621
+ extract_config = ExtractJobConfig(**options) # type: ignore[no-untyped-call]
622
+
623
+ # Set format
624
+ format_mapping = {
625
+ "parquet": SourceFormat.PARQUET,
626
+ "csv": SourceFormat.CSV,
627
+ "json": SourceFormat.NEWLINE_DELIMITED_JSON,
628
+ "avro": SourceFormat.AVRO,
629
+ }
630
+ extract_config.destination_format = format_mapping.get(format, SourceFormat.PARQUET)
631
+
632
+ # Extract table to GCS
633
+ table_ref = self.connection.dataset(dataset_id).table(temp_table_id)
634
+ extract_job = self.connection.extract_table(table_ref, gcs_uri, job_config=extract_config)
635
+ extract_job.result()
636
+
637
+ return row_count
638
+ finally:
639
+ # Clean up temporary table
640
+ try:
641
+ delete_query = f"DROP TABLE IF EXISTS `{dataset_id}.{temp_table_id}`"
642
+ delete_job = self._run_query_job(delete_query, [])
643
+ delete_job.result()
644
+ except Exception as e:
645
+ logger.warning("Failed to clean up temporary table %s: %s", temp_table_id, e)
554
646
 
555
647
  # ============================================================================
556
648
  # BigQuery Native Arrow Support
@@ -2,6 +2,7 @@ import contextlib
2
2
  import uuid
3
3
  from collections.abc import Generator
4
4
  from contextlib import contextmanager
5
+ from pathlib import Path
5
6
  from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast
6
7
 
7
8
  from duckdb import DuckDBPyConnection
@@ -251,7 +252,7 @@ class DuckDBDriver(
251
252
  return True
252
253
  return False
253
254
 
254
- def _export_native(self, query: str, destination_uri: str, format: str, **options: Any) -> int:
255
+ def _export_native(self, query: str, destination_uri: Union[str, Path], format: str, **options: Any) -> int:
255
256
  conn = self._connection(None)
256
257
  copy_options: list[str] = []
257
258
 
@@ -283,19 +284,21 @@ class DuckDBDriver(
283
284
  raise ValueError(msg)
284
285
 
285
286
  options_str = f"({', '.join(copy_options)})" if copy_options else ""
286
- copy_sql = f"COPY ({query}) TO '{destination_uri}' {options_str}"
287
+ copy_sql = f"COPY ({query}) TO '{destination_uri!s}' {options_str}"
287
288
  result_rel = conn.execute(copy_sql)
288
289
  result = result_rel.fetchone() if result_rel else None
289
290
  return result[0] if result else 0
290
291
 
291
- def _import_native(self, source_uri: str, table_name: str, format: str, mode: str, **options: Any) -> int:
292
+ def _import_native(
293
+ self, source_uri: Union[str, Path], table_name: str, format: str, mode: str, **options: Any
294
+ ) -> int:
292
295
  conn = self._connection(None)
293
296
  if format == "parquet":
294
- read_func = f"read_parquet('{source_uri}')"
297
+ read_func = f"read_parquet('{source_uri!s}')"
295
298
  elif format == "csv":
296
- read_func = f"read_csv_auto('{source_uri}')"
299
+ read_func = f"read_csv_auto('{source_uri!s}')"
297
300
  elif format == "json":
298
- read_func = f"read_json_auto('{source_uri}')"
301
+ read_func = f"read_json_auto('{source_uri!s}')"
299
302
  else:
300
303
  msg = f"Unsupported format for DuckDB native import: {format}"
301
304
  raise ValueError(msg)
@@ -320,16 +323,16 @@ class DuckDBDriver(
320
323
  return int(count_result[0]) if count_result else 0
321
324
 
322
325
  def _read_parquet_native(
323
- self, source_uri: str, columns: Optional[list[str]] = None, **options: Any
326
+ self, source_uri: Union[str, Path], columns: Optional[list[str]] = None, **options: Any
324
327
  ) -> "SQLResult[dict[str, Any]]":
325
328
  conn = self._connection(None)
326
329
  if isinstance(source_uri, list):
327
330
  file_list = "[" + ", ".join(f"'{f}'" for f in source_uri) + "]"
328
331
  read_func = f"read_parquet({file_list})"
329
- elif "*" in source_uri or "?" in source_uri:
330
- read_func = f"read_parquet('{source_uri}')"
332
+ elif "*" in str(source_uri) or "?" in str(source_uri):
333
+ read_func = f"read_parquet('{source_uri!s}')"
331
334
  else:
332
- read_func = f"read_parquet('{source_uri}')"
335
+ read_func = f"read_parquet('{source_uri!s}')"
333
336
 
334
337
  column_list = ", ".join(columns) if columns else "*"
335
338
  query = f"SELECT {column_list} FROM {read_func}"
@@ -353,7 +356,9 @@ class DuckDBDriver(
353
356
  statement=SQL(query), data=rows, column_names=column_names, rows_affected=num_rows, operation_type="SELECT"
354
357
  )
355
358
 
356
- def _write_parquet_native(self, data: Union[str, "ArrowTable"], destination_uri: str, **options: Any) -> None:
359
+ def _write_parquet_native(
360
+ self, data: Union[str, "ArrowTable"], destination_uri: Union[str, Path], **options: Any
361
+ ) -> None:
357
362
  conn = self._connection(None)
358
363
  copy_options: list[str] = ["FORMAT PARQUET"]
359
364
  if "compression" in options:
@@ -364,13 +369,13 @@ class DuckDBDriver(
364
369
  options_str = f"({', '.join(copy_options)})"
365
370
 
366
371
  if isinstance(data, str):
367
- copy_sql = f"COPY ({data}) TO '{destination_uri}' {options_str}"
372
+ copy_sql = f"COPY ({data}) TO '{destination_uri!s}' {options_str}"
368
373
  conn.execute(copy_sql)
369
374
  else:
370
375
  temp_name = f"_arrow_data_{uuid.uuid4().hex[:8]}"
371
376
  conn.register(temp_name, data)
372
377
  try:
373
- copy_sql = f"COPY {temp_name} TO '{destination_uri}' {options_str}"
378
+ copy_sql = f"COPY {temp_name} TO '{destination_uri!s}' {options_str}"
374
379
  conn.execute(copy_sql)
375
380
  finally:
376
381
  with contextlib.suppress(Exception):
@@ -304,7 +304,7 @@ class PsycopgSyncConfig(SyncDatabaseConfig[PsycopgSyncConnection, ConnectionPool
304
304
  if conninfo:
305
305
  # If conninfo is provided, use it directly
306
306
  # Don't pass kwargs when using conninfo string
307
- pool = ConnectionPool(conninfo, **pool_params)
307
+ pool = ConnectionPool(conninfo, open=True, **pool_params)
308
308
  else:
309
309
  # Otherwise, pass connection parameters via kwargs
310
310
  # Remove any non-connection parameters
@@ -312,7 +312,7 @@ class PsycopgSyncConfig(SyncDatabaseConfig[PsycopgSyncConnection, ConnectionPool
312
312
  all_config.pop("row_factory", None)
313
313
  # Remove pool-specific settings that may have been left
314
314
  all_config.pop("kwargs", None)
315
- pool = ConnectionPool("", kwargs=all_config, **pool_params)
315
+ pool = ConnectionPool("", kwargs=all_config, open=True, **pool_params)
316
316
 
317
317
  logger.info("Psycopg connection pool created successfully", extra={"adapter": "psycopg"})
318
318
  except Exception as e:
@@ -328,11 +328,19 @@ class PsycopgSyncConfig(SyncDatabaseConfig[PsycopgSyncConnection, ConnectionPool
328
328
  logger.info("Closing Psycopg connection pool", extra={"adapter": "psycopg"})
329
329
 
330
330
  try:
331
+ # Set a flag to prevent __del__ from running cleanup
332
+ # This avoids the "cannot join current thread" error during garbage collection
333
+ if hasattr(self.pool_instance, "_closed"):
334
+ self.pool_instance._closed = True
335
+
331
336
  self.pool_instance.close()
332
337
  logger.info("Psycopg connection pool closed successfully", extra={"adapter": "psycopg"})
333
338
  except Exception as e:
334
339
  logger.exception("Failed to close Psycopg connection pool", extra={"adapter": "psycopg", "error": str(e)})
335
340
  raise
341
+ finally:
342
+ # Clear the reference to help garbage collection
343
+ self.pool_instance = None
336
344
 
337
345
  def create_connection(self) -> "PsycopgSyncConnection":
338
346
  """Create a single connection (not from pool).
@@ -606,57 +614,50 @@ class PsycopgAsyncConfig(AsyncDatabaseConfig[PsycopgAsyncConnection, AsyncConnec
606
614
 
607
615
  async def _create_pool(self) -> "AsyncConnectionPool":
608
616
  """Create the actual async connection pool."""
609
- logger.info("Creating async Psycopg connection pool", extra={"adapter": "psycopg"})
610
617
 
611
- try:
612
- # Get all config (creates a new dict)
613
- all_config = self.pool_config_dict.copy()
618
+ # Get all config (creates a new dict)
619
+ all_config = self.pool_config_dict.copy()
620
+
621
+ # Separate pool-specific parameters that AsyncConnectionPool accepts directly
622
+ pool_params = {
623
+ "min_size": all_config.pop("min_size", 4),
624
+ "max_size": all_config.pop("max_size", None),
625
+ "name": all_config.pop("name", None),
626
+ "timeout": all_config.pop("timeout", 30.0),
627
+ "max_waiting": all_config.pop("max_waiting", 0),
628
+ "max_lifetime": all_config.pop("max_lifetime", 3600.0),
629
+ "max_idle": all_config.pop("max_idle", 600.0),
630
+ "reconnect_timeout": all_config.pop("reconnect_timeout", 300.0),
631
+ "num_workers": all_config.pop("num_workers", 3),
632
+ }
614
633
 
615
- # Separate pool-specific parameters that AsyncConnectionPool accepts directly
616
- pool_params = {
617
- "min_size": all_config.pop("min_size", 4),
618
- "max_size": all_config.pop("max_size", None),
619
- "name": all_config.pop("name", None),
620
- "timeout": all_config.pop("timeout", 30.0),
621
- "max_waiting": all_config.pop("max_waiting", 0),
622
- "max_lifetime": all_config.pop("max_lifetime", 3600.0),
623
- "max_idle": all_config.pop("max_idle", 600.0),
624
- "reconnect_timeout": all_config.pop("reconnect_timeout", 300.0),
625
- "num_workers": all_config.pop("num_workers", 3),
626
- }
634
+ # Create a configure callback to set row_factory
635
+ async def configure_connection(conn: "PsycopgAsyncConnection") -> None:
636
+ # Set DictRow as the row factory
637
+ conn.row_factory = dict_row
627
638
 
628
- # Create a configure callback to set row_factory
629
- async def configure_connection(conn: "PsycopgAsyncConnection") -> None:
630
- # Set DictRow as the row factory
631
- conn.row_factory = dict_row
639
+ pool_params["configure"] = all_config.pop("configure", configure_connection)
632
640
 
633
- pool_params["configure"] = all_config.pop("configure", configure_connection)
641
+ # Remove None values from pool_params
642
+ pool_params = {k: v for k, v in pool_params.items() if v is not None}
634
643
 
635
- # Remove None values from pool_params
636
- pool_params = {k: v for k, v in pool_params.items() if v is not None}
644
+ # Handle conninfo vs individual connection parameters
645
+ conninfo = all_config.pop("conninfo", None)
646
+ if conninfo:
647
+ # If conninfo is provided, use it directly
648
+ # Don't pass kwargs when using conninfo string
649
+ pool = AsyncConnectionPool(conninfo, open=False, **pool_params)
650
+ else:
651
+ # Otherwise, pass connection parameters via kwargs
652
+ # Remove any non-connection parameters
653
+ # row_factory is already popped out earlier
654
+ all_config.pop("row_factory", None)
655
+ # Remove pool-specific settings that may have been left
656
+ all_config.pop("kwargs", None)
657
+ pool = AsyncConnectionPool("", kwargs=all_config, open=False, **pool_params)
637
658
 
638
- # Handle conninfo vs individual connection parameters
639
- conninfo = all_config.pop("conninfo", None)
640
- if conninfo:
641
- # If conninfo is provided, use it directly
642
- # Don't pass kwargs when using conninfo string
643
- pool = AsyncConnectionPool(conninfo, **pool_params)
644
- else:
645
- # Otherwise, pass connection parameters via kwargs
646
- # Remove any non-connection parameters
647
- # row_factory is already popped out earlier
648
- all_config.pop("row_factory", None)
649
- # Remove pool-specific settings that may have been left
650
- all_config.pop("kwargs", None)
651
- pool = AsyncConnectionPool("", kwargs=all_config, **pool_params)
659
+ await pool.open()
652
660
 
653
- await pool.open()
654
- logger.info("Async Psycopg connection pool created successfully", extra={"adapter": "psycopg"})
655
- except Exception as e:
656
- logger.exception(
657
- "Failed to create async Psycopg connection pool", extra={"adapter": "psycopg", "error": str(e)}
658
- )
659
- raise
660
661
  return pool
661
662
 
662
663
  async def _close_pool(self) -> None:
@@ -664,16 +665,16 @@ class PsycopgAsyncConfig(AsyncDatabaseConfig[PsycopgAsyncConnection, AsyncConnec
664
665
  if not self.pool_instance:
665
666
  return
666
667
 
667
- logger.info("Closing async Psycopg connection pool", extra={"adapter": "psycopg"})
668
-
669
668
  try:
669
+ # Set a flag to prevent __del__ from running cleanup
670
+ # This avoids the "cannot join current thread" error during garbage collection
671
+ if hasattr(self.pool_instance, "_closed"):
672
+ self.pool_instance._closed = True
673
+
670
674
  await self.pool_instance.close()
671
- logger.info("Async Psycopg connection pool closed successfully", extra={"adapter": "psycopg"})
672
- except Exception as e:
673
- logger.exception(
674
- "Failed to close async Psycopg connection pool", extra={"adapter": "psycopg", "error": str(e)}
675
- )
676
- raise
675
+ finally:
676
+ # Clear the reference to help garbage collection
677
+ self.pool_instance = None
677
678
 
678
679
  async def create_connection(self) -> "PsycopgAsyncConnection": # pyright: ignore
679
680
  """Create a single async connection (not from pool).
@@ -20,6 +20,7 @@ from sqlspec.driver.mixins import (
20
20
  ToSchemaMixin,
21
21
  TypeCoercionMixin,
22
22
  )
23
+ from sqlspec.exceptions import PipelineExecutionError
23
24
  from sqlspec.statement.parameters import ParameterStyle
24
25
  from sqlspec.statement.result import ArrowResult, DMLResultDict, ScriptResultDict, SelectResultDict, SQLResult
25
26
  from sqlspec.statement.splitter import split_sql_script
@@ -113,6 +114,12 @@ class PsycopgSyncDriver(
113
114
  **kwargs: Any,
114
115
  ) -> Union[SelectResultDict, DMLResultDict]:
115
116
  conn = self._connection(connection)
117
+
118
+ # Check if this is a COPY command
119
+ sql_upper = sql.strip().upper()
120
+ if sql_upper.startswith("COPY") and ("FROM STDIN" in sql_upper or "TO STDOUT" in sql_upper):
121
+ return self._handle_copy_command(sql, parameters, conn)
122
+
116
123
  with conn.cursor() as cursor:
117
124
  cursor.execute(cast("Query", sql), parameters)
118
125
  # Check if the statement returns rows by checking cursor.description
@@ -123,6 +130,38 @@ class PsycopgSyncDriver(
123
130
  return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
124
131
  return {"rows_affected": cursor.rowcount, "status_message": cursor.statusmessage or "OK"}
125
132
 
133
+ def _handle_copy_command(
134
+ self, sql: str, data: Any, connection: PsycopgSyncConnection
135
+ ) -> Union[SelectResultDict, DMLResultDict]:
136
+ """Handle PostgreSQL COPY commands using cursor.copy() method."""
137
+ sql_upper = sql.strip().upper()
138
+
139
+ with connection.cursor() as cursor:
140
+ if "TO STDOUT" in sql_upper:
141
+ # COPY TO STDOUT - read data from the database
142
+ output_data: list[Any] = []
143
+ with cursor.copy(cast("Query", sql)) as copy:
144
+ output_data.extend(row for row in copy)
145
+
146
+ # Return as SelectResultDict with the raw COPY data
147
+ return {"data": output_data, "column_names": ["copy_data"], "rows_affected": len(output_data)}
148
+ # COPY FROM STDIN - write data to the database
149
+ with cursor.copy(cast("Query", sql)) as copy:
150
+ if data:
151
+ # If data is provided, write it to the copy stream
152
+ if isinstance(data, (str, bytes)):
153
+ copy.write(data)
154
+ elif isinstance(data, (list, tuple)):
155
+ # If data is a list/tuple of rows, write each row
156
+ for row in data:
157
+ copy.write_row(row)
158
+ else:
159
+ # Single row
160
+ copy.write_row(data)
161
+
162
+ # For COPY operations, cursor.rowcount contains the number of rows affected
163
+ return {"rows_affected": cursor.rowcount or -1, "status_message": cursor.statusmessage or "COPY COMPLETE"}
164
+
126
165
  def _execute_many(
127
166
  self, sql: str, param_list: Any, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
128
167
  ) -> DMLResultDict:
@@ -242,7 +281,6 @@ class PsycopgSyncDriver(
242
281
  Returns:
243
282
  List of SQLResult objects from all operations
244
283
  """
245
- from sqlspec.exceptions import PipelineExecutionError
246
284
 
247
285
  results = []
248
286
  connection = self._connection()
@@ -489,6 +527,12 @@ class PsycopgAsyncDriver(
489
527
  **kwargs: Any,
490
528
  ) -> Union[SelectResultDict, DMLResultDict]:
491
529
  conn = self._connection(connection)
530
+
531
+ # Check if this is a COPY command
532
+ sql_upper = sql.strip().upper()
533
+ if sql_upper.startswith("COPY") and ("FROM STDIN" in sql_upper or "TO STDOUT" in sql_upper):
534
+ return await self._handle_copy_command(sql, parameters, conn)
535
+
492
536
  async with conn.cursor() as cursor:
493
537
  await cursor.execute(cast("Query", sql), parameters)
494
538
 
@@ -510,6 +554,38 @@ class PsycopgAsyncDriver(
510
554
  }
511
555
  return dml_result
512
556
 
557
+ async def _handle_copy_command(
558
+ self, sql: str, data: Any, connection: PsycopgAsyncConnection
559
+ ) -> Union[SelectResultDict, DMLResultDict]:
560
+ """Handle PostgreSQL COPY commands using cursor.copy() method."""
561
+ sql_upper = sql.strip().upper()
562
+
563
+ async with connection.cursor() as cursor:
564
+ if "TO STDOUT" in sql_upper:
565
+ # COPY TO STDOUT - read data from the database
566
+ output_data = []
567
+ async with cursor.copy(cast("Query", sql)) as copy:
568
+ output_data.extend([row async for row in copy])
569
+
570
+ # Return as SelectResultDict with the raw COPY data
571
+ return {"data": output_data, "column_names": ["copy_data"], "rows_affected": len(output_data)}
572
+ # COPY FROM STDIN - write data to the database
573
+ async with cursor.copy(cast("Query", sql)) as copy:
574
+ if data:
575
+ # If data is provided, write it to the copy stream
576
+ if isinstance(data, (str, bytes)):
577
+ await copy.write(data)
578
+ elif isinstance(data, (list, tuple)):
579
+ # If data is a list/tuple of rows, write each row
580
+ for row in data:
581
+ await copy.write_row(row)
582
+ else:
583
+ # Single row
584
+ await copy.write_row(data)
585
+
586
+ # For COPY operations, cursor.rowcount contains the number of rows affected
587
+ return {"rows_affected": cursor.rowcount or -1, "status_message": cursor.statusmessage or "COPY COMPLETE"}
588
+
513
589
  async def _execute_many(
514
590
  self, sql: str, param_list: Any, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
515
591
  ) -> DMLResultDict:
@@ -595,6 +671,11 @@ class PsycopgAsyncDriver(
595
671
  if statement.expression:
596
672
  operation_type = str(statement.expression.key).upper()
597
673
 
674
+ # Handle case where we got a SelectResultDict but it was routed here due to parsing being disabled
675
+ if is_dict_with_field(result, "data") and is_dict_with_field(result, "column_names"):
676
+ # This is actually a SELECT result, wrap it properly
677
+ return await self._wrap_select_result(statement, cast("SelectResultDict", result), **kwargs)
678
+
598
679
  if is_dict_with_field(result, "statements_executed"):
599
680
  return SQLResult[RowT](
600
681
  statement=statement,