sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (531) hide show
  1. ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
  2. sqlspec/__init__.py +140 -0
  3. sqlspec/__main__.py +12 -0
  4. sqlspec/__metadata__.py +14 -0
  5. sqlspec/_serialization.py +315 -0
  6. sqlspec/_typing.py +700 -0
  7. sqlspec/adapters/__init__.py +0 -0
  8. sqlspec/adapters/adbc/__init__.py +5 -0
  9. sqlspec/adapters/adbc/_typing.py +82 -0
  10. sqlspec/adapters/adbc/adk/__init__.py +5 -0
  11. sqlspec/adapters/adbc/adk/store.py +1273 -0
  12. sqlspec/adapters/adbc/config.py +295 -0
  13. sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
  14. sqlspec/adapters/adbc/core.py +735 -0
  15. sqlspec/adapters/adbc/data_dictionary.py +334 -0
  16. sqlspec/adapters/adbc/driver.py +529 -0
  17. sqlspec/adapters/adbc/events/__init__.py +5 -0
  18. sqlspec/adapters/adbc/events/store.py +285 -0
  19. sqlspec/adapters/adbc/litestar/__init__.py +5 -0
  20. sqlspec/adapters/adbc/litestar/store.py +502 -0
  21. sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  22. sqlspec/adapters/adbc/type_converter.py +140 -0
  23. sqlspec/adapters/aiosqlite/__init__.py +25 -0
  24. sqlspec/adapters/aiosqlite/_typing.py +82 -0
  25. sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
  26. sqlspec/adapters/aiosqlite/adk/store.py +818 -0
  27. sqlspec/adapters/aiosqlite/config.py +334 -0
  28. sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
  29. sqlspec/adapters/aiosqlite/core.py +315 -0
  30. sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
  31. sqlspec/adapters/aiosqlite/driver.py +313 -0
  32. sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
  33. sqlspec/adapters/aiosqlite/events/store.py +20 -0
  34. sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
  35. sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
  36. sqlspec/adapters/aiosqlite/pool.py +533 -0
  37. sqlspec/adapters/asyncmy/__init__.py +21 -0
  38. sqlspec/adapters/asyncmy/_typing.py +87 -0
  39. sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
  40. sqlspec/adapters/asyncmy/adk/store.py +703 -0
  41. sqlspec/adapters/asyncmy/config.py +302 -0
  42. sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
  43. sqlspec/adapters/asyncmy/core.py +360 -0
  44. sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
  45. sqlspec/adapters/asyncmy/driver.py +383 -0
  46. sqlspec/adapters/asyncmy/events/__init__.py +5 -0
  47. sqlspec/adapters/asyncmy/events/store.py +104 -0
  48. sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
  49. sqlspec/adapters/asyncmy/litestar/store.py +296 -0
  50. sqlspec/adapters/asyncpg/__init__.py +19 -0
  51. sqlspec/adapters/asyncpg/_typing.py +88 -0
  52. sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
  53. sqlspec/adapters/asyncpg/adk/store.py +748 -0
  54. sqlspec/adapters/asyncpg/config.py +569 -0
  55. sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
  56. sqlspec/adapters/asyncpg/core.py +367 -0
  57. sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
  58. sqlspec/adapters/asyncpg/driver.py +487 -0
  59. sqlspec/adapters/asyncpg/events/__init__.py +6 -0
  60. sqlspec/adapters/asyncpg/events/backend.py +286 -0
  61. sqlspec/adapters/asyncpg/events/store.py +40 -0
  62. sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
  63. sqlspec/adapters/asyncpg/litestar/store.py +251 -0
  64. sqlspec/adapters/bigquery/__init__.py +14 -0
  65. sqlspec/adapters/bigquery/_typing.py +86 -0
  66. sqlspec/adapters/bigquery/adk/__init__.py +5 -0
  67. sqlspec/adapters/bigquery/adk/store.py +827 -0
  68. sqlspec/adapters/bigquery/config.py +353 -0
  69. sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
  70. sqlspec/adapters/bigquery/core.py +715 -0
  71. sqlspec/adapters/bigquery/data_dictionary.py +128 -0
  72. sqlspec/adapters/bigquery/driver.py +548 -0
  73. sqlspec/adapters/bigquery/events/__init__.py +5 -0
  74. sqlspec/adapters/bigquery/events/store.py +139 -0
  75. sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
  76. sqlspec/adapters/bigquery/litestar/store.py +325 -0
  77. sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  78. sqlspec/adapters/bigquery/type_converter.py +107 -0
  79. sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
  80. sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
  81. sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
  82. sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
  83. sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
  84. sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
  85. sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
  86. sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
  87. sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
  88. sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
  89. sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
  90. sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
  91. sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
  92. sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
  93. sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
  94. sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
  95. sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
  96. sqlspec/adapters/cockroach_psycopg/config.py +484 -0
  97. sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
  98. sqlspec/adapters/cockroach_psycopg/core.py +63 -0
  99. sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
  100. sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
  101. sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
  102. sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
  103. sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
  104. sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
  105. sqlspec/adapters/duckdb/__init__.py +25 -0
  106. sqlspec/adapters/duckdb/_typing.py +81 -0
  107. sqlspec/adapters/duckdb/adk/__init__.py +14 -0
  108. sqlspec/adapters/duckdb/adk/store.py +850 -0
  109. sqlspec/adapters/duckdb/config.py +463 -0
  110. sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
  111. sqlspec/adapters/duckdb/core.py +257 -0
  112. sqlspec/adapters/duckdb/data_dictionary.py +140 -0
  113. sqlspec/adapters/duckdb/driver.py +430 -0
  114. sqlspec/adapters/duckdb/events/__init__.py +5 -0
  115. sqlspec/adapters/duckdb/events/store.py +57 -0
  116. sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
  117. sqlspec/adapters/duckdb/litestar/store.py +330 -0
  118. sqlspec/adapters/duckdb/pool.py +293 -0
  119. sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  120. sqlspec/adapters/duckdb/type_converter.py +118 -0
  121. sqlspec/adapters/mock/__init__.py +72 -0
  122. sqlspec/adapters/mock/_typing.py +147 -0
  123. sqlspec/adapters/mock/config.py +483 -0
  124. sqlspec/adapters/mock/core.py +319 -0
  125. sqlspec/adapters/mock/data_dictionary.py +366 -0
  126. sqlspec/adapters/mock/driver.py +721 -0
  127. sqlspec/adapters/mysqlconnector/__init__.py +36 -0
  128. sqlspec/adapters/mysqlconnector/_typing.py +141 -0
  129. sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
  130. sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
  131. sqlspec/adapters/mysqlconnector/config.py +394 -0
  132. sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
  133. sqlspec/adapters/mysqlconnector/core.py +303 -0
  134. sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
  135. sqlspec/adapters/mysqlconnector/driver.py +483 -0
  136. sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
  137. sqlspec/adapters/mysqlconnector/events/store.py +98 -0
  138. sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
  139. sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
  140. sqlspec/adapters/oracledb/__init__.py +60 -0
  141. sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
  142. sqlspec/adapters/oracledb/_typing.py +182 -0
  143. sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
  144. sqlspec/adapters/oracledb/adk/__init__.py +10 -0
  145. sqlspec/adapters/oracledb/adk/store.py +2369 -0
  146. sqlspec/adapters/oracledb/config.py +550 -0
  147. sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
  148. sqlspec/adapters/oracledb/core.py +543 -0
  149. sqlspec/adapters/oracledb/data_dictionary.py +536 -0
  150. sqlspec/adapters/oracledb/driver.py +1229 -0
  151. sqlspec/adapters/oracledb/events/__init__.py +16 -0
  152. sqlspec/adapters/oracledb/events/backend.py +347 -0
  153. sqlspec/adapters/oracledb/events/store.py +420 -0
  154. sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
  155. sqlspec/adapters/oracledb/litestar/store.py +781 -0
  156. sqlspec/adapters/oracledb/migrations.py +535 -0
  157. sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  158. sqlspec/adapters/oracledb/type_converter.py +211 -0
  159. sqlspec/adapters/psqlpy/__init__.py +17 -0
  160. sqlspec/adapters/psqlpy/_typing.py +79 -0
  161. sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
  162. sqlspec/adapters/psqlpy/adk/store.py +766 -0
  163. sqlspec/adapters/psqlpy/config.py +304 -0
  164. sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
  165. sqlspec/adapters/psqlpy/core.py +480 -0
  166. sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
  167. sqlspec/adapters/psqlpy/driver.py +438 -0
  168. sqlspec/adapters/psqlpy/events/__init__.py +6 -0
  169. sqlspec/adapters/psqlpy/events/backend.py +310 -0
  170. sqlspec/adapters/psqlpy/events/store.py +20 -0
  171. sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
  172. sqlspec/adapters/psqlpy/litestar/store.py +270 -0
  173. sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  174. sqlspec/adapters/psqlpy/type_converter.py +113 -0
  175. sqlspec/adapters/psycopg/__init__.py +32 -0
  176. sqlspec/adapters/psycopg/_typing.py +164 -0
  177. sqlspec/adapters/psycopg/adk/__init__.py +10 -0
  178. sqlspec/adapters/psycopg/adk/store.py +1387 -0
  179. sqlspec/adapters/psycopg/config.py +576 -0
  180. sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
  181. sqlspec/adapters/psycopg/core.py +450 -0
  182. sqlspec/adapters/psycopg/data_dictionary.py +289 -0
  183. sqlspec/adapters/psycopg/driver.py +975 -0
  184. sqlspec/adapters/psycopg/events/__init__.py +20 -0
  185. sqlspec/adapters/psycopg/events/backend.py +458 -0
  186. sqlspec/adapters/psycopg/events/store.py +42 -0
  187. sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
  188. sqlspec/adapters/psycopg/litestar/store.py +552 -0
  189. sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  190. sqlspec/adapters/psycopg/type_converter.py +93 -0
  191. sqlspec/adapters/pymysql/__init__.py +21 -0
  192. sqlspec/adapters/pymysql/_typing.py +71 -0
  193. sqlspec/adapters/pymysql/adk/__init__.py +5 -0
  194. sqlspec/adapters/pymysql/adk/store.py +540 -0
  195. sqlspec/adapters/pymysql/config.py +195 -0
  196. sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
  197. sqlspec/adapters/pymysql/core.py +299 -0
  198. sqlspec/adapters/pymysql/data_dictionary.py +122 -0
  199. sqlspec/adapters/pymysql/driver.py +259 -0
  200. sqlspec/adapters/pymysql/events/__init__.py +5 -0
  201. sqlspec/adapters/pymysql/events/store.py +50 -0
  202. sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
  203. sqlspec/adapters/pymysql/litestar/store.py +232 -0
  204. sqlspec/adapters/pymysql/pool.py +137 -0
  205. sqlspec/adapters/spanner/__init__.py +40 -0
  206. sqlspec/adapters/spanner/_typing.py +86 -0
  207. sqlspec/adapters/spanner/adk/__init__.py +5 -0
  208. sqlspec/adapters/spanner/adk/store.py +732 -0
  209. sqlspec/adapters/spanner/config.py +352 -0
  210. sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
  211. sqlspec/adapters/spanner/core.py +188 -0
  212. sqlspec/adapters/spanner/data_dictionary.py +120 -0
  213. sqlspec/adapters/spanner/dialect/__init__.py +6 -0
  214. sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
  215. sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
  216. sqlspec/adapters/spanner/driver.py +373 -0
  217. sqlspec/adapters/spanner/events/__init__.py +5 -0
  218. sqlspec/adapters/spanner/events/store.py +187 -0
  219. sqlspec/adapters/spanner/litestar/__init__.py +5 -0
  220. sqlspec/adapters/spanner/litestar/store.py +291 -0
  221. sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  222. sqlspec/adapters/spanner/type_converter.py +331 -0
  223. sqlspec/adapters/sqlite/__init__.py +19 -0
  224. sqlspec/adapters/sqlite/_typing.py +80 -0
  225. sqlspec/adapters/sqlite/adk/__init__.py +5 -0
  226. sqlspec/adapters/sqlite/adk/store.py +958 -0
  227. sqlspec/adapters/sqlite/config.py +280 -0
  228. sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
  229. sqlspec/adapters/sqlite/core.py +312 -0
  230. sqlspec/adapters/sqlite/data_dictionary.py +202 -0
  231. sqlspec/adapters/sqlite/driver.py +359 -0
  232. sqlspec/adapters/sqlite/events/__init__.py +5 -0
  233. sqlspec/adapters/sqlite/events/store.py +20 -0
  234. sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
  235. sqlspec/adapters/sqlite/litestar/store.py +316 -0
  236. sqlspec/adapters/sqlite/pool.py +198 -0
  237. sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  238. sqlspec/adapters/sqlite/type_converter.py +114 -0
  239. sqlspec/base.py +747 -0
  240. sqlspec/builder/__init__.py +179 -0
  241. sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
  242. sqlspec/builder/_base.py +1022 -0
  243. sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
  244. sqlspec/builder/_column.py +521 -0
  245. sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
  246. sqlspec/builder/_ddl.py +1642 -0
  247. sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
  248. sqlspec/builder/_delete.py +95 -0
  249. sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
  250. sqlspec/builder/_dml.py +365 -0
  251. sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
  252. sqlspec/builder/_explain.py +579 -0
  253. sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
  254. sqlspec/builder/_expression_wrappers.py +46 -0
  255. sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
  256. sqlspec/builder/_factory.py +1697 -0
  257. sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
  258. sqlspec/builder/_insert.py +328 -0
  259. sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
  260. sqlspec/builder/_join.py +499 -0
  261. sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
  262. sqlspec/builder/_merge.py +821 -0
  263. sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
  264. sqlspec/builder/_parsing_utils.py +297 -0
  265. sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
  266. sqlspec/builder/_select.py +1660 -0
  267. sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
  268. sqlspec/builder/_temporal.py +139 -0
  269. sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
  270. sqlspec/builder/_update.py +173 -0
  271. sqlspec/builder/_vector_expressions.py +267 -0
  272. sqlspec/cli.py +911 -0
  273. sqlspec/config.py +1755 -0
  274. sqlspec/core/__init__.py +374 -0
  275. sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
  276. sqlspec/core/_correlation.py +176 -0
  277. sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
  278. sqlspec/core/cache.py +1069 -0
  279. sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
  280. sqlspec/core/compiler.py +954 -0
  281. sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
  282. sqlspec/core/explain.py +275 -0
  283. sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
  284. sqlspec/core/filters.py +952 -0
  285. sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
  286. sqlspec/core/hashing.py +262 -0
  287. sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
  288. sqlspec/core/metrics.py +83 -0
  289. sqlspec/core/parameters/__init__.py +71 -0
  290. sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
  291. sqlspec/core/parameters/_alignment.py +270 -0
  292. sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  293. sqlspec/core/parameters/_converter.py +543 -0
  294. sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
  295. sqlspec/core/parameters/_processor.py +505 -0
  296. sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
  297. sqlspec/core/parameters/_registry.py +206 -0
  298. sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
  299. sqlspec/core/parameters/_transformers.py +292 -0
  300. sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
  301. sqlspec/core/parameters/_types.py +499 -0
  302. sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
  303. sqlspec/core/parameters/_validator.py +180 -0
  304. sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
  305. sqlspec/core/pipeline.py +319 -0
  306. sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
  307. sqlspec/core/query_modifiers.py +437 -0
  308. sqlspec/core/result/__init__.py +23 -0
  309. sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
  310. sqlspec/core/result/_base.py +1121 -0
  311. sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
  312. sqlspec/core/result/_io.py +28 -0
  313. sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
  314. sqlspec/core/splitter.py +966 -0
  315. sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
  316. sqlspec/core/stack.py +163 -0
  317. sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
  318. sqlspec/core/statement.py +1503 -0
  319. sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
  320. sqlspec/core/type_converter.py +339 -0
  321. sqlspec/data_dictionary/__init__.py +22 -0
  322. sqlspec/data_dictionary/_loader.py +123 -0
  323. sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
  324. sqlspec/data_dictionary/_registry.py +74 -0
  325. sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
  326. sqlspec/data_dictionary/_types.py +121 -0
  327. sqlspec/data_dictionary/dialects/__init__.py +21 -0
  328. sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
  329. sqlspec/data_dictionary/dialects/bigquery.py +49 -0
  330. sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
  331. sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
  332. sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
  333. sqlspec/data_dictionary/dialects/duckdb.py +47 -0
  334. sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
  335. sqlspec/data_dictionary/dialects/mysql.py +42 -0
  336. sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
  337. sqlspec/data_dictionary/dialects/oracle.py +34 -0
  338. sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
  339. sqlspec/data_dictionary/dialects/postgres.py +46 -0
  340. sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
  341. sqlspec/data_dictionary/dialects/spanner.py +37 -0
  342. sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
  343. sqlspec/data_dictionary/dialects/sqlite.py +42 -0
  344. sqlspec/data_dictionary/sql/.gitkeep +0 -0
  345. sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
  346. sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
  347. sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
  348. sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
  349. sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
  350. sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
  351. sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
  352. sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
  353. sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
  354. sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
  355. sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
  356. sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
  357. sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
  358. sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
  359. sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
  360. sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
  361. sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
  362. sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
  363. sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
  364. sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
  365. sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
  366. sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
  367. sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
  368. sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
  369. sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
  370. sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
  371. sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
  372. sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
  373. sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
  374. sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
  375. sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
  376. sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
  377. sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
  378. sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
  379. sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
  380. sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
  381. sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
  382. sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
  383. sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
  384. sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
  385. sqlspec/driver/__init__.py +32 -0
  386. sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
  387. sqlspec/driver/_async.py +1737 -0
  388. sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
  389. sqlspec/driver/_common.py +1478 -0
  390. sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
  391. sqlspec/driver/_sql_helpers.py +148 -0
  392. sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
  393. sqlspec/driver/_storage_helpers.py +144 -0
  394. sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
  395. sqlspec/driver/_sync.py +1710 -0
  396. sqlspec/exceptions.py +338 -0
  397. sqlspec/extensions/__init__.py +0 -0
  398. sqlspec/extensions/adk/__init__.py +70 -0
  399. sqlspec/extensions/adk/_types.py +51 -0
  400. sqlspec/extensions/adk/converters.py +172 -0
  401. sqlspec/extensions/adk/memory/__init__.py +69 -0
  402. sqlspec/extensions/adk/memory/_types.py +30 -0
  403. sqlspec/extensions/adk/memory/converters.py +149 -0
  404. sqlspec/extensions/adk/memory/service.py +217 -0
  405. sqlspec/extensions/adk/memory/store.py +569 -0
  406. sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
  407. sqlspec/extensions/adk/migrations/__init__.py +0 -0
  408. sqlspec/extensions/adk/service.py +225 -0
  409. sqlspec/extensions/adk/store.py +567 -0
  410. sqlspec/extensions/events/__init__.py +51 -0
  411. sqlspec/extensions/events/_channel.py +703 -0
  412. sqlspec/extensions/events/_hints.py +45 -0
  413. sqlspec/extensions/events/_models.py +23 -0
  414. sqlspec/extensions/events/_payload.py +69 -0
  415. sqlspec/extensions/events/_protocols.py +134 -0
  416. sqlspec/extensions/events/_queue.py +461 -0
  417. sqlspec/extensions/events/_store.py +209 -0
  418. sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
  419. sqlspec/extensions/events/migrations/__init__.py +3 -0
  420. sqlspec/extensions/fastapi/__init__.py +19 -0
  421. sqlspec/extensions/fastapi/extension.py +351 -0
  422. sqlspec/extensions/fastapi/providers.py +607 -0
  423. sqlspec/extensions/flask/__init__.py +37 -0
  424. sqlspec/extensions/flask/_state.py +76 -0
  425. sqlspec/extensions/flask/_utils.py +71 -0
  426. sqlspec/extensions/flask/extension.py +519 -0
  427. sqlspec/extensions/litestar/__init__.py +28 -0
  428. sqlspec/extensions/litestar/_utils.py +52 -0
  429. sqlspec/extensions/litestar/channels.py +165 -0
  430. sqlspec/extensions/litestar/cli.py +102 -0
  431. sqlspec/extensions/litestar/config.py +90 -0
  432. sqlspec/extensions/litestar/handlers.py +316 -0
  433. sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
  434. sqlspec/extensions/litestar/migrations/__init__.py +3 -0
  435. sqlspec/extensions/litestar/plugin.py +671 -0
  436. sqlspec/extensions/litestar/providers.py +526 -0
  437. sqlspec/extensions/litestar/store.py +296 -0
  438. sqlspec/extensions/otel/__init__.py +58 -0
  439. sqlspec/extensions/prometheus/__init__.py +113 -0
  440. sqlspec/extensions/starlette/__init__.py +19 -0
  441. sqlspec/extensions/starlette/_state.py +30 -0
  442. sqlspec/extensions/starlette/_utils.py +96 -0
  443. sqlspec/extensions/starlette/extension.py +346 -0
  444. sqlspec/extensions/starlette/middleware.py +235 -0
  445. sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
  446. sqlspec/loader.py +702 -0
  447. sqlspec/migrations/__init__.py +36 -0
  448. sqlspec/migrations/base.py +731 -0
  449. sqlspec/migrations/commands.py +1232 -0
  450. sqlspec/migrations/context.py +157 -0
  451. sqlspec/migrations/fix.py +204 -0
  452. sqlspec/migrations/loaders.py +443 -0
  453. sqlspec/migrations/runner.py +1172 -0
  454. sqlspec/migrations/templates.py +234 -0
  455. sqlspec/migrations/tracker.py +611 -0
  456. sqlspec/migrations/utils.py +256 -0
  457. sqlspec/migrations/validation.py +207 -0
  458. sqlspec/migrations/version.py +446 -0
  459. sqlspec/observability/__init__.py +55 -0
  460. sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
  461. sqlspec/observability/_common.py +77 -0
  462. sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
  463. sqlspec/observability/_config.py +348 -0
  464. sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
  465. sqlspec/observability/_diagnostics.py +74 -0
  466. sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
  467. sqlspec/observability/_dispatcher.py +152 -0
  468. sqlspec/observability/_formatters/__init__.py +13 -0
  469. sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
  470. sqlspec/observability/_formatters/_aws.py +102 -0
  471. sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
  472. sqlspec/observability/_formatters/_azure.py +96 -0
  473. sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
  474. sqlspec/observability/_formatters/_base.py +57 -0
  475. sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
  476. sqlspec/observability/_formatters/_gcp.py +131 -0
  477. sqlspec/observability/_formatting.py +58 -0
  478. sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
  479. sqlspec/observability/_observer.py +357 -0
  480. sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
  481. sqlspec/observability/_runtime.py +420 -0
  482. sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
  483. sqlspec/observability/_sampling.py +188 -0
  484. sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
  485. sqlspec/observability/_spans.py +161 -0
  486. sqlspec/protocols.py +916 -0
  487. sqlspec/py.typed +0 -0
  488. sqlspec/storage/__init__.py +48 -0
  489. sqlspec/storage/_utils.py +104 -0
  490. sqlspec/storage/backends/__init__.py +1 -0
  491. sqlspec/storage/backends/base.py +253 -0
  492. sqlspec/storage/backends/fsspec.py +529 -0
  493. sqlspec/storage/backends/local.py +441 -0
  494. sqlspec/storage/backends/obstore.py +916 -0
  495. sqlspec/storage/errors.py +104 -0
  496. sqlspec/storage/pipeline.py +582 -0
  497. sqlspec/storage/registry.py +301 -0
  498. sqlspec/typing.py +395 -0
  499. sqlspec/utils/__init__.py +7 -0
  500. sqlspec/utils/arrow_helpers.py +318 -0
  501. sqlspec/utils/config_tools.py +332 -0
  502. sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
  503. sqlspec/utils/correlation.py +134 -0
  504. sqlspec/utils/deprecation.py +190 -0
  505. sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
  506. sqlspec/utils/fixtures.py +258 -0
  507. sqlspec/utils/logging.py +222 -0
  508. sqlspec/utils/module_loader.py +306 -0
  509. sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
  510. sqlspec/utils/portal.py +375 -0
  511. sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
  512. sqlspec/utils/schema.py +485 -0
  513. sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
  514. sqlspec/utils/serializers.py +408 -0
  515. sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
  516. sqlspec/utils/singleton.py +41 -0
  517. sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
  518. sqlspec/utils/sync_tools.py +311 -0
  519. sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
  520. sqlspec/utils/text.py +108 -0
  521. sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
  522. sqlspec/utils/type_converters.py +128 -0
  523. sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
  524. sqlspec/utils/type_guards.py +1360 -0
  525. sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
  526. sqlspec/utils/uuids.py +225 -0
  527. sqlspec-0.36.0.dist-info/METADATA +205 -0
  528. sqlspec-0.36.0.dist-info/RECORD +531 -0
  529. sqlspec-0.36.0.dist-info/WHEEL +7 -0
  530. sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
  531. sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,1710 @@
1
+ """Synchronous driver protocol implementation."""
2
+
3
+ import graphlib
4
+ import logging
5
+ import re
6
+ from abc import abstractmethod
7
+ from contextlib import suppress
8
+ from time import perf_counter
9
+ from typing import TYPE_CHECKING, Any, ClassVar, Final, cast, final, overload
10
+
11
+ from mypy_extensions import mypyc_attr
12
+
13
+ from sqlspec.core import SQL, ProcessedState, StackResult, create_arrow_result
14
+ from sqlspec.core.stack import StackOperation, StatementStack
15
+ from sqlspec.data_dictionary._loader import get_data_dictionary_loader
16
+ from sqlspec.data_dictionary._registry import get_dialect_config
17
+ from sqlspec.driver._common import (
18
+ VERSION_GROUPS_MIN_FOR_MINOR,
19
+ VERSION_GROUPS_MIN_FOR_PATCH,
20
+ CommonDriverAttributesMixin,
21
+ ExecutionResult,
22
+ StackExecutionObserver,
23
+ SyncExceptionHandler,
24
+ describe_stack_statement,
25
+ handle_single_row_error,
26
+ resolve_db_system,
27
+ )
28
+ from sqlspec.driver._sql_helpers import DEFAULT_PRETTY
29
+ from sqlspec.driver._sql_helpers import convert_to_dialect as _convert_to_dialect_impl
30
+ from sqlspec.driver._storage_helpers import (
31
+ arrow_table_to_rows,
32
+ attach_partition_telemetry,
33
+ build_ingest_telemetry,
34
+ coerce_arrow_table,
35
+ create_storage_job,
36
+ stringify_storage_target,
37
+ )
38
+ from sqlspec.exceptions import ImproperConfigurationError, SQLFileNotFoundError, StackExecutionError
39
+ from sqlspec.storage import StorageBridgeJob, StorageDestination, StorageFormat, StorageTelemetry, SyncStoragePipeline
40
+ from sqlspec.typing import VersionInfo
41
+ from sqlspec.utils.arrow_helpers import convert_dict_to_arrow_with_schema
42
+ from sqlspec.utils.logging import get_logger, log_with_context
43
+
44
+ if TYPE_CHECKING:
45
+ from collections.abc import Sequence
46
+
47
+ from sqlglot.dialects.dialect import DialectType
48
+
49
+ from sqlspec.builder import QueryBuilder
50
+ from sqlspec.core import ArrowResult, SQLResult, Statement, StatementConfig, StatementFilter
51
+ from sqlspec.data_dictionary._types import DialectConfig
52
+ from sqlspec.protocols import HasDataProtocol, HasExecuteProtocol
53
+ from sqlspec.typing import (
54
+ ArrowReturnFormat,
55
+ ArrowTable,
56
+ ColumnMetadata,
57
+ ForeignKeyMetadata,
58
+ IndexMetadata,
59
+ SchemaT,
60
+ StatementParameters,
61
+ TableMetadata,
62
+ )
63
+
64
+ _LOGGER_NAME: Final[str] = "sqlspec"
65
+ logger = get_logger(_LOGGER_NAME)
66
+
67
+ __all__ = ("SyncDataDictionaryBase", "SyncDriverAdapterBase")
68
+
69
+
70
+ EMPTY_FILTERS: Final["list[StatementFilter]"] = []
71
+
72
+
73
+ @mypyc_attr(allow_interpreted_subclasses=True)
74
+ class SyncDriverAdapterBase(CommonDriverAttributesMixin):
75
+ """Base class for synchronous database drivers.
76
+
77
+ This class includes flattened storage and SQL translation methods that were
78
+ previously in StorageDriverMixin and SQLTranslatorMixin. The flattening
79
+ eliminates cross-trait attribute access that caused mypyc segmentation faults.
80
+
81
+ Method Organization:
82
+ 1. Core dispatch methods (the execution engine)
83
+ 2. Transaction management (abstract methods)
84
+ 3. Public API - execution methods
85
+ 4. Public API - query methods (select/fetch variants)
86
+ 5. Arrow API methods
87
+ 6. Stack execution
88
+ 7. Storage API methods
89
+ 8. Utility methods
90
+ 9. Private/internal methods
91
+
92
+ """
93
+
94
+ __slots__ = ()
95
+
96
+ dialect: "DialectType | None" = None
97
+
98
+ @property
99
+ @abstractmethod
100
+ def data_dictionary(self) -> "SyncDataDictionaryBase":
101
+ """Get the data dictionary for this driver.
102
+
103
+ Returns:
104
+ Data dictionary instance for metadata queries
105
+
106
+ """
107
+
108
+ # ─────────────────────────────────────────────────────────────────────────────
109
+ # CORE DISPATCH METHODS - The Execution Engine
110
+ # ─────────────────────────────────────────────────────────────────────────────
111
+
112
+ @final
113
+ def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> "SQLResult":
114
+ """Central execution dispatcher using the Template Method Pattern.
115
+
116
+ Args:
117
+ statement: The SQL statement to execute
118
+ connection: The database connection to use
119
+
120
+ Returns:
121
+ The result of the SQL execution
122
+
123
+ """
124
+ runtime = self.observability
125
+ compiled_sql, execution_parameters = statement.compile()
126
+ _ = cast("ProcessedState", statement.get_processed_state())
127
+ operation = statement.operation_type
128
+ query_context = {
129
+ "sql": compiled_sql,
130
+ "parameters": execution_parameters,
131
+ "driver": type(self).__name__,
132
+ "operation": operation,
133
+ "is_many": statement.is_many,
134
+ "is_script": statement.is_script,
135
+ }
136
+ runtime.emit_query_start(**query_context)
137
+ span = runtime.start_query_span(compiled_sql, operation, type(self).__name__)
138
+ started = perf_counter()
139
+
140
+ result: SQLResult | None = None
141
+ exc_handler = self.handle_database_exceptions()
142
+ try:
143
+ with exc_handler, self.with_cursor(connection) as cursor:
144
+ special_result = self.dispatch_special_handling(cursor, statement)
145
+ if special_result is not None:
146
+ result = special_result
147
+ elif statement.is_script:
148
+ execution_result = self.dispatch_execute_script(cursor, statement)
149
+ result = self.build_statement_result(statement, execution_result)
150
+ elif statement.is_many:
151
+ execution_result = self.dispatch_execute_many(cursor, statement)
152
+ result = self.build_statement_result(statement, execution_result)
153
+ else:
154
+ execution_result = self.dispatch_execute(cursor, statement)
155
+ result = self.build_statement_result(statement, execution_result)
156
+ except Exception as exc: # pragma: no cover - instrumentation path
157
+ if exc_handler.pending_exception is not None:
158
+ mapped_exc = exc_handler.pending_exception
159
+ runtime.span_manager.end_span(span, error=mapped_exc)
160
+ runtime.emit_error(mapped_exc, **query_context)
161
+ raise mapped_exc from exc
162
+ runtime.span_manager.end_span(span, error=exc)
163
+ runtime.emit_error(exc, **query_context)
164
+ raise
165
+
166
+ if exc_handler.pending_exception is not None:
167
+ mapped_exc = exc_handler.pending_exception
168
+ runtime.span_manager.end_span(span, error=mapped_exc)
169
+ runtime.emit_error(mapped_exc, **query_context)
170
+ raise mapped_exc from None
171
+
172
+ assert result is not None # Guaranteed: no exception means result was assigned
173
+
174
+ runtime.span_manager.end_span(span)
175
+ duration = perf_counter() - started
176
+ runtime.emit_query_complete(**{**query_context, "rows_affected": result.rows_affected})
177
+ runtime.emit_statement_event(
178
+ sql=compiled_sql,
179
+ parameters=execution_parameters,
180
+ driver=type(self).__name__,
181
+ operation=operation,
182
+ execution_mode=self.statement_config.execution_mode,
183
+ is_many=statement.is_many,
184
+ is_script=statement.is_script,
185
+ rows_affected=result.rows_affected,
186
+ duration_s=duration,
187
+ storage_backend=(result.metadata or {}).get("storage_backend"),
188
+ started_at=started,
189
+ )
190
+ return result
191
+
192
+ @abstractmethod
193
+ def dispatch_execute(self, cursor: Any, statement: "SQL") -> ExecutionResult:
194
+ """Execute a single SQL statement.
195
+
196
+ Must be implemented by each driver for database-specific execution logic.
197
+
198
+ Args:
199
+ cursor: Database cursor/connection object
200
+ statement: SQL statement object with all necessary data and configuration
201
+
202
+ Returns:
203
+ ExecutionResult with execution data
204
+
205
+ """
206
+
207
+ @abstractmethod
208
+ def dispatch_execute_many(self, cursor: Any, statement: "SQL") -> ExecutionResult:
209
+ """Execute SQL with multiple parameter sets (executemany).
210
+
211
+ Must be implemented by each driver for database-specific executemany logic.
212
+
213
+ Args:
214
+ cursor: Database cursor/connection object
215
+ statement: SQL statement object with all necessary data and configuration
216
+
217
+ Returns:
218
+ ExecutionResult with execution data for the many operation
219
+
220
+ """
221
+
222
+ def dispatch_execute_script(self, cursor: Any, statement: "SQL") -> ExecutionResult:
223
+ """Execute a SQL script containing multiple statements.
224
+
225
+ Default implementation splits the script and executes statements individually.
226
+ Drivers can override for database-specific script execution methods.
227
+
228
+ Args:
229
+ cursor: Database cursor/connection object
230
+ statement: SQL statement object with all necessary data and configuration
231
+
232
+ Returns:
233
+ ExecutionResult with script execution data including statement counts
234
+
235
+ """
236
+ sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
237
+ statements = self.split_script_statements(sql, self.statement_config, strip_trailing_semicolon=True)
238
+
239
+ statement_count: int = len(statements)
240
+ successful_count: int = 0
241
+
242
+ for stmt in statements:
243
+ single_stmt = statement.copy(statement=stmt, parameters=prepared_parameters)
244
+ self.dispatch_execute(cursor, single_stmt)
245
+ successful_count += 1
246
+
247
+ return self.create_execution_result(
248
+ cursor, statement_count=statement_count, successful_statements=successful_count, is_script_result=True
249
+ )
250
+
251
+ def dispatch_special_handling(self, cursor: Any, statement: "SQL") -> "SQLResult | None":
252
+ """Hook for database-specific special operations (e.g., PostgreSQL COPY, bulk operations).
253
+
254
+ This method is called first in dispatch_statement_execution() to allow drivers to handle
255
+ special operations that don't follow the standard SQL execution pattern.
256
+
257
+ Args:
258
+ cursor: Database cursor/connection object
259
+ statement: SQL statement to analyze
260
+
261
+ Returns:
262
+ SQLResult if the special operation was handled and completed,
263
+ None if standard execution should proceed
264
+
265
+ """
266
+ _ = (cursor, statement)
267
+ return None
268
+
269
+ # ─────────────────────────────────────────────────────────────────────────────
270
+ # TRANSACTION MANAGEMENT - Required Abstract Methods
271
+ # ─────────────────────────────────────────────────────────────────────────────
272
+
273
+ @abstractmethod
274
+ def begin(self) -> None:
275
+ """Begin a database transaction on the current connection."""
276
+
277
+ @abstractmethod
278
+ def commit(self) -> None:
279
+ """Commit the current transaction on the current connection."""
280
+
281
+ @abstractmethod
282
+ def rollback(self) -> None:
283
+ """Rollback the current transaction on the current connection."""
284
+
285
+ @abstractmethod
286
+ def with_cursor(self, connection: Any) -> Any:
287
+ """Create and return a context manager for cursor acquisition and cleanup.
288
+
289
+ Returns a context manager that yields a cursor for database operations.
290
+ Concrete implementations handle database-specific cursor creation and cleanup.
291
+ """
292
+
293
+ @abstractmethod
294
+ def handle_database_exceptions(self) -> "SyncExceptionHandler":
295
+ """Handle database-specific exceptions and wrap them appropriately.
296
+
297
+ Returns:
298
+ Exception handler with deferred exception pattern for mypyc compatibility.
299
+ The handler stores mapped exceptions in pending_exception rather than
300
+ raising from __exit__ to avoid ABI boundary violations.
301
+
302
+ """
303
+
304
+ # ─────────────────────────────────────────────────────────────────────────────
305
+ # PUBLIC API - Core Execution Methods
306
+ # ─────────────────────────────────────────────────────────────────────────────
307
+
308
+ def execute(
309
+ self,
310
+ statement: "SQL | Statement | QueryBuilder",
311
+ /,
312
+ *parameters: "StatementParameters | StatementFilter",
313
+ statement_config: "StatementConfig | None" = None,
314
+ **kwargs: Any,
315
+ ) -> "SQLResult":
316
+ """Execute a statement with parameter handling."""
317
+ sql_statement = self.prepare_statement(
318
+ statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs
319
+ )
320
+ return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection)
321
+
322
+ def execute_many(
323
+ self,
324
+ statement: "SQL | Statement | QueryBuilder",
325
+ /,
326
+ parameters: "Sequence[StatementParameters]",
327
+ *filters: "StatementParameters | StatementFilter",
328
+ statement_config: "StatementConfig | None" = None,
329
+ **kwargs: Any,
330
+ ) -> "SQLResult":
331
+ """Execute statement multiple times with different parameters.
332
+
333
+ Parameters passed will be used as the batch execution sequence.
334
+ """
335
+ config = statement_config or self.statement_config
336
+
337
+ if isinstance(statement, SQL):
338
+ statement_seed = statement.raw_expression or statement.raw_sql
339
+ sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs)
340
+ else:
341
+ base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs)
342
+ statement_seed = base_statement.raw_expression or base_statement.raw_sql
343
+ sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs)
344
+
345
+ return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection)
346
+
347
+ def execute_script(
348
+ self,
349
+ statement: "str | SQL",
350
+ /,
351
+ *parameters: "StatementParameters | StatementFilter",
352
+ statement_config: "StatementConfig | None" = None,
353
+ **kwargs: Any,
354
+ ) -> "SQLResult":
355
+ """Execute a multi-statement script.
356
+
357
+ By default, validates each statement and logs warnings for dangerous
358
+ operations. Use suppress_warnings=True for migrations and admin scripts.
359
+ """
360
+ config = statement_config or self.statement_config
361
+ sql_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs)
362
+
363
+ return self.dispatch_statement_execution(statement=sql_statement.as_script(), connection=self.connection)
364
+
365
+ # ─────────────────────────────────────────────────────────────────────────────
366
+ # PUBLIC API - Query Methods (select/fetch variants)
367
+ # ─────────────────────────────────────────────────────────────────────────────
368
+
369
+ @overload
370
+ def select(
371
+ self,
372
+ statement: "Statement | QueryBuilder",
373
+ /,
374
+ *parameters: "StatementParameters | StatementFilter",
375
+ schema_type: "type[SchemaT]",
376
+ statement_config: "StatementConfig | None" = None,
377
+ **kwargs: Any,
378
+ ) -> "list[SchemaT]": ...
379
+
380
+ @overload
381
+ def select(
382
+ self,
383
+ statement: "Statement | QueryBuilder",
384
+ /,
385
+ *parameters: "StatementParameters | StatementFilter",
386
+ schema_type: None = None,
387
+ statement_config: "StatementConfig | None" = None,
388
+ **kwargs: Any,
389
+ ) -> "list[dict[str, Any]]": ...
390
+
391
+ def select(
392
+ self,
393
+ statement: "Statement | QueryBuilder",
394
+ /,
395
+ *parameters: "StatementParameters | StatementFilter",
396
+ schema_type: "type[SchemaT] | None" = None,
397
+ statement_config: "StatementConfig | None" = None,
398
+ **kwargs: Any,
399
+ ) -> "list[SchemaT] | list[dict[str, Any]]":
400
+ """Execute a select statement and return all rows."""
401
+ result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
402
+ return result.get_data(schema_type=schema_type)
403
+
404
+ @overload
405
+ def fetch(
406
+ self,
407
+ statement: "Statement | QueryBuilder",
408
+ /,
409
+ *parameters: "StatementParameters | StatementFilter",
410
+ schema_type: "type[SchemaT]",
411
+ statement_config: "StatementConfig | None" = None,
412
+ **kwargs: Any,
413
+ ) -> "list[SchemaT]": ...
414
+
415
+ @overload
416
+ def fetch(
417
+ self,
418
+ statement: "Statement | QueryBuilder",
419
+ /,
420
+ *parameters: "StatementParameters | StatementFilter",
421
+ schema_type: None = None,
422
+ statement_config: "StatementConfig | None" = None,
423
+ **kwargs: Any,
424
+ ) -> "list[dict[str, Any]]": ...
425
+
426
+ def fetch(
427
+ self,
428
+ statement: "Statement | QueryBuilder",
429
+ /,
430
+ *parameters: "StatementParameters | StatementFilter",
431
+ schema_type: "type[SchemaT] | None" = None,
432
+ statement_config: "StatementConfig | None" = None,
433
+ **kwargs: Any,
434
+ ) -> "list[SchemaT] | list[dict[str, Any]]":
435
+ """Execute a select statement and return all rows.
436
+
437
+ This is an alias for :meth:`select` provided for users familiar
438
+ with asyncpg's fetch() naming convention.
439
+
440
+ See Also:
441
+ select(): Primary method with identical behavior
442
+
443
+ """
444
+ return self.select(statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs)
445
+
446
+ @overload
447
+ def select_one(
448
+ self,
449
+ statement: "Statement | QueryBuilder",
450
+ /,
451
+ *parameters: "StatementParameters | StatementFilter",
452
+ schema_type: "type[SchemaT]",
453
+ statement_config: "StatementConfig | None" = None,
454
+ **kwargs: Any,
455
+ ) -> "SchemaT": ...
456
+
457
+ @overload
458
+ def select_one(
459
+ self,
460
+ statement: "Statement | QueryBuilder",
461
+ /,
462
+ *parameters: "StatementParameters | StatementFilter",
463
+ schema_type: None = None,
464
+ statement_config: "StatementConfig | None" = None,
465
+ **kwargs: Any,
466
+ ) -> "dict[str, Any]": ...
467
+
468
+ def select_one(
469
+ self,
470
+ statement: "Statement | QueryBuilder",
471
+ /,
472
+ *parameters: "StatementParameters | StatementFilter",
473
+ schema_type: "type[SchemaT] | None" = None,
474
+ statement_config: "StatementConfig | None" = None,
475
+ **kwargs: Any,
476
+ ) -> "SchemaT | dict[str, Any]":
477
+ """Execute a select statement and return exactly one row.
478
+
479
+ Raises an exception if no rows or more than one row is returned.
480
+ """
481
+ result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
482
+ try:
483
+ return result.one(schema_type=schema_type)
484
+ except ValueError as error:
485
+ handle_single_row_error(error)
486
+
487
+ @overload
488
+ def fetch_one(
489
+ self,
490
+ statement: "Statement | QueryBuilder",
491
+ /,
492
+ *parameters: "StatementParameters | StatementFilter",
493
+ schema_type: "type[SchemaT]",
494
+ statement_config: "StatementConfig | None" = None,
495
+ **kwargs: Any,
496
+ ) -> "SchemaT": ...
497
+
498
+ @overload
499
+ def fetch_one(
500
+ self,
501
+ statement: "Statement | QueryBuilder",
502
+ /,
503
+ *parameters: "StatementParameters | StatementFilter",
504
+ schema_type: None = None,
505
+ statement_config: "StatementConfig | None" = None,
506
+ **kwargs: Any,
507
+ ) -> "dict[str, Any]": ...
508
+
509
+ def fetch_one(
510
+ self,
511
+ statement: "Statement | QueryBuilder",
512
+ /,
513
+ *parameters: "StatementParameters | StatementFilter",
514
+ schema_type: "type[SchemaT] | None" = None,
515
+ statement_config: "StatementConfig | None" = None,
516
+ **kwargs: Any,
517
+ ) -> "SchemaT | dict[str, Any]":
518
+ """Execute a select statement and return exactly one row.
519
+
520
+ This is an alias for :meth:`select_one` provided for users familiar
521
+ with asyncpg's fetch_one() naming convention.
522
+
523
+ Raises an exception if no rows or more than one row is returned.
524
+
525
+ See Also:
526
+ select_one(): Primary method with identical behavior
527
+
528
+ """
529
+ return self.select_one(
530
+ statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
531
+ )
532
+
533
+ @overload
534
+ def select_one_or_none(
535
+ self,
536
+ statement: "Statement | QueryBuilder",
537
+ /,
538
+ *parameters: "StatementParameters | StatementFilter",
539
+ schema_type: "type[SchemaT]",
540
+ statement_config: "StatementConfig | None" = None,
541
+ **kwargs: Any,
542
+ ) -> "SchemaT | None": ...
543
+
544
+ @overload
545
+ def select_one_or_none(
546
+ self,
547
+ statement: "Statement | QueryBuilder",
548
+ /,
549
+ *parameters: "StatementParameters | StatementFilter",
550
+ schema_type: None = None,
551
+ statement_config: "StatementConfig | None" = None,
552
+ **kwargs: Any,
553
+ ) -> "dict[str, Any] | None": ...
554
+
555
+ def select_one_or_none(
556
+ self,
557
+ statement: "Statement | QueryBuilder",
558
+ /,
559
+ *parameters: "StatementParameters | StatementFilter",
560
+ schema_type: "type[SchemaT] | None" = None,
561
+ statement_config: "StatementConfig | None" = None,
562
+ **kwargs: Any,
563
+ ) -> "SchemaT | dict[str, Any] | None":
564
+ """Execute a select statement and return at most one row.
565
+
566
+ Returns None if no rows are found. Raises ``ValueError`` if more than one
567
+ row is returned. Any database or SQL execution errors raised by the driver
568
+ are propagated unchanged.
569
+ """
570
+ result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
571
+ return result.one_or_none(schema_type=schema_type)
572
+
573
+ @overload
574
+ def fetch_one_or_none(
575
+ self,
576
+ statement: "Statement | QueryBuilder",
577
+ /,
578
+ *parameters: "StatementParameters | StatementFilter",
579
+ schema_type: "type[SchemaT]",
580
+ statement_config: "StatementConfig | None" = None,
581
+ **kwargs: Any,
582
+ ) -> "SchemaT | None": ...
583
+
584
+ @overload
585
+ def fetch_one_or_none(
586
+ self,
587
+ statement: "Statement | QueryBuilder",
588
+ /,
589
+ *parameters: "StatementParameters | StatementFilter",
590
+ schema_type: None = None,
591
+ statement_config: "StatementConfig | None" = None,
592
+ **kwargs: Any,
593
+ ) -> "dict[str, Any] | None": ...
594
+
595
+ def fetch_one_or_none(
596
+ self,
597
+ statement: "Statement | QueryBuilder",
598
+ /,
599
+ *parameters: "StatementParameters | StatementFilter",
600
+ schema_type: "type[SchemaT] | None" = None,
601
+ statement_config: "StatementConfig | None" = None,
602
+ **kwargs: Any,
603
+ ) -> "SchemaT | dict[str, Any] | None":
604
+ """Execute a select statement and return at most one row.
605
+
606
+ This is an alias for :meth:`select_one_or_none` provided for users familiar
607
+ with asyncpg's fetch_one_or_none() naming convention.
608
+
609
+ Returns None if no rows are found.
610
+ Raises an exception if more than one row is returned.
611
+
612
+ See Also:
613
+ select_one_or_none(): Primary method with identical behavior
614
+
615
+ """
616
+ return self.select_one_or_none(
617
+ statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
618
+ )
619
+
620
+ def select_value(
621
+ self,
622
+ statement: "Statement | QueryBuilder",
623
+ /,
624
+ *parameters: "StatementParameters | StatementFilter",
625
+ statement_config: "StatementConfig | None" = None,
626
+ **kwargs: Any,
627
+ ) -> Any:
628
+ """Execute a select statement and return a single scalar value.
629
+
630
+ Expects exactly one row with one column.
631
+ Raises an exception if no rows or more than one row/column is returned.
632
+ """
633
+ result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
634
+ try:
635
+ return result.scalar()
636
+ except ValueError as error:
637
+ handle_single_row_error(error)
638
+
639
+ def fetch_value(
640
+ self,
641
+ statement: "Statement | QueryBuilder",
642
+ /,
643
+ *parameters: "StatementParameters | StatementFilter",
644
+ statement_config: "StatementConfig | None" = None,
645
+ **kwargs: Any,
646
+ ) -> Any:
647
+ """Execute a select statement and return a single scalar value.
648
+
649
+ This is an alias for :meth:`select_value` provided for users familiar
650
+ with asyncpg's fetch_value() naming convention.
651
+
652
+ Expects exactly one row with one column.
653
+ Raises an exception if no rows or more than one row/column is returned.
654
+
655
+ See Also:
656
+ select_value(): Primary method with identical behavior
657
+
658
+ """
659
+ return self.select_value(statement, *parameters, statement_config=statement_config, **kwargs)
660
+
661
+ def select_value_or_none(
662
+ self,
663
+ statement: "Statement | QueryBuilder",
664
+ /,
665
+ *parameters: "StatementParameters | StatementFilter",
666
+ statement_config: "StatementConfig | None" = None,
667
+ **kwargs: Any,
668
+ ) -> Any:
669
+ """Execute a select statement and return a single scalar value or None.
670
+
671
+ Returns None if no rows are found.
672
+ Expects at most one row with one column.
673
+ Raises an exception if more than one row is returned.
674
+ """
675
+ result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
676
+ return result.scalar_or_none()
677
+
678
+ def fetch_value_or_none(
679
+ self,
680
+ statement: "Statement | QueryBuilder",
681
+ /,
682
+ *parameters: "StatementParameters | StatementFilter",
683
+ statement_config: "StatementConfig | None" = None,
684
+ **kwargs: Any,
685
+ ) -> Any:
686
+ """Execute a select statement and return a single scalar value or None.
687
+
688
+ This is an alias for :meth:`select_value_or_none` provided for users familiar
689
+ with asyncpg's fetch_value_or_none() naming convention.
690
+
691
+ Returns None if no rows are found.
692
+ Expects at most one row with one column.
693
+ Raises an exception if more than one row is returned.
694
+
695
+ See Also:
696
+ select_value_or_none(): Primary method with identical behavior
697
+
698
+ """
699
+ return self.select_value_or_none(statement, *parameters, statement_config=statement_config, **kwargs)
700
+
701
+ @overload
702
+ def select_with_total(
703
+ self,
704
+ statement: "Statement | QueryBuilder",
705
+ /,
706
+ *parameters: "StatementParameters | StatementFilter",
707
+ schema_type: "type[SchemaT]",
708
+ statement_config: "StatementConfig | None" = None,
709
+ **kwargs: Any,
710
+ ) -> "tuple[list[SchemaT], int]": ...
711
+
712
+ @overload
713
+ def select_with_total(
714
+ self,
715
+ statement: "Statement | QueryBuilder",
716
+ /,
717
+ *parameters: "StatementParameters | StatementFilter",
718
+ schema_type: None = None,
719
+ statement_config: "StatementConfig | None" = None,
720
+ **kwargs: Any,
721
+ ) -> "tuple[list[dict[str, Any]], int]": ...
722
+
723
+ def select_with_total(
724
+ self,
725
+ statement: "Statement | QueryBuilder",
726
+ /,
727
+ *parameters: "StatementParameters | StatementFilter",
728
+ schema_type: "type[SchemaT] | None" = None,
729
+ statement_config: "StatementConfig | None" = None,
730
+ **kwargs: Any,
731
+ ) -> "tuple[list[SchemaT] | list[dict[str, Any]], int]":
732
+ """Execute a select statement and return both the data and total count.
733
+
734
+ This method is designed for pagination scenarios where you need both
735
+ the current page of data and the total number of rows that match the query.
736
+
737
+ Args:
738
+ statement: The SQL statement, QueryBuilder, or raw SQL string
739
+ *parameters: Parameters for the SQL statement
740
+ schema_type: Optional schema type for data transformation
741
+ statement_config: Optional SQL configuration
742
+ **kwargs: Additional keyword arguments
743
+
744
+ Returns:
745
+ A tuple containing:
746
+ - List of data rows (transformed by schema_type if provided)
747
+ - Total count of rows matching the query (ignoring LIMIT/OFFSET)
748
+
749
+ """
750
+ sql_statement = self.prepare_statement(
751
+ statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs
752
+ )
753
+ count_result = self.dispatch_statement_execution(self._create_count_query(sql_statement), self.connection)
754
+ select_result = self.execute(sql_statement)
755
+
756
+ return (select_result.get_data(schema_type=schema_type), count_result.scalar())
757
+
758
+ @overload
759
+ def fetch_with_total(
760
+ self,
761
+ statement: "Statement | QueryBuilder",
762
+ /,
763
+ *parameters: "StatementParameters | StatementFilter",
764
+ schema_type: "type[SchemaT]",
765
+ statement_config: "StatementConfig | None" = None,
766
+ **kwargs: Any,
767
+ ) -> "tuple[list[SchemaT], int]": ...
768
+
769
+ @overload
770
+ def fetch_with_total(
771
+ self,
772
+ statement: "Statement | QueryBuilder",
773
+ /,
774
+ *parameters: "StatementParameters | StatementFilter",
775
+ schema_type: None = None,
776
+ statement_config: "StatementConfig | None" = None,
777
+ **kwargs: Any,
778
+ ) -> "tuple[list[dict[str, Any]], int]": ...
779
+
780
+ def fetch_with_total(
781
+ self,
782
+ statement: "Statement | QueryBuilder",
783
+ /,
784
+ *parameters: "StatementParameters | StatementFilter",
785
+ schema_type: "type[SchemaT] | None" = None,
786
+ statement_config: "StatementConfig | None" = None,
787
+ **kwargs: Any,
788
+ ) -> "tuple[list[SchemaT] | list[dict[str, Any]], int]":
789
+ """Execute a select statement and return both the data and total count.
790
+
791
+ This is an alias for :meth:`select_with_total` provided for users familiar
792
+ with asyncpg's fetch() naming convention.
793
+
794
+ This method is designed for pagination scenarios where you need both
795
+ the current page of data and the total number of rows that match the query.
796
+
797
+ See Also:
798
+ select_with_total(): Primary method with identical behavior and full documentation
799
+
800
+ """
801
+ return self.select_with_total(
802
+ statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
803
+ )
804
+
805
+ # ─────────────────────────────────────────────────────────────────────────────
806
+ # ARROW API METHODS
807
+ # ─────────────────────────────────────────────────────────────────────────────
808
+
809
+ def select_to_arrow(
810
+ self,
811
+ statement: "Statement | QueryBuilder",
812
+ /,
813
+ *parameters: "StatementParameters | StatementFilter",
814
+ statement_config: "StatementConfig | None" = None,
815
+ return_format: "ArrowReturnFormat" = "table",
816
+ native_only: bool = False,
817
+ batch_size: int | None = None,
818
+ arrow_schema: Any = None,
819
+ **kwargs: Any,
820
+ ) -> "ArrowResult":
821
+ """Execute query and return results as Apache Arrow format.
822
+
823
+ This base implementation uses the conversion path: execute() → dict → Arrow.
824
+ Adapters with native Arrow support (ADBC, DuckDB, BigQuery) override this
825
+ method to use zero-copy native paths for 5-10x performance improvement.
826
+
827
+ Args:
828
+ statement: SQL query string, Statement, or QueryBuilder
829
+ *parameters: Query parameters (same format as execute()/select())
830
+ statement_config: Optional statement configuration override
831
+ return_format: "table" for pyarrow.Table (default), "batch" for single RecordBatch,
832
+ "batches" for iterator of RecordBatches, "reader" for RecordBatchReader
833
+ native_only: If True, raise error if native Arrow unavailable (default: False)
834
+ batch_size: Rows per batch for "batch"/"batches" format (default: None = all rows)
835
+ arrow_schema: Optional pyarrow.Schema for type casting
836
+ **kwargs: Additional keyword arguments
837
+
838
+ Returns:
839
+ ArrowResult containing pyarrow.Table, RecordBatchReader, or RecordBatches
840
+
841
+ Raises:
842
+ ImproperConfigurationError: If native_only=True and adapter doesn't support native Arrow
843
+
844
+ Examples:
845
+ >>> result = driver.select_to_arrow(
846
+ ... "SELECT * FROM users WHERE age > ?", 18
847
+ ... )
848
+ >>> df = result.to_pandas()
849
+ >>> print(df.head())
850
+
851
+ >>> # Force native Arrow path (raises error if unavailable)
852
+ >>> result = driver.select_to_arrow(
853
+ ... "SELECT * FROM users", native_only=True
854
+ ... )
855
+
856
+ """
857
+ if native_only:
858
+ msg = (
859
+ f"Adapter '{self.__class__.__name__}' does not support native Arrow results. "
860
+ f"Use native_only=False to allow conversion path, or switch to an adapter "
861
+ f"with native Arrow support (ADBC, DuckDB, BigQuery)."
862
+ )
863
+ raise ImproperConfigurationError(msg)
864
+
865
+ result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
866
+
867
+ arrow_data = convert_dict_to_arrow_with_schema(
868
+ result.data, return_format=return_format, batch_size=batch_size, arrow_schema=arrow_schema
869
+ )
870
+
871
+ return create_arrow_result(
872
+ statement=result.statement,
873
+ data=arrow_data,
874
+ rows_affected=result.rows_affected,
875
+ last_inserted_id=result.last_inserted_id,
876
+ execution_time=result.execution_time,
877
+ metadata=result.metadata,
878
+ )
879
+
880
+ def fetch_to_arrow(
881
+ self,
882
+ statement: "Statement | QueryBuilder",
883
+ /,
884
+ *parameters: "StatementParameters | StatementFilter",
885
+ statement_config: "StatementConfig | None" = None,
886
+ return_format: "ArrowReturnFormat" = "table",
887
+ native_only: bool = False,
888
+ batch_size: int | None = None,
889
+ arrow_schema: Any = None,
890
+ **kwargs: Any,
891
+ ) -> "ArrowResult":
892
+ """Execute query and return results as Apache Arrow format.
893
+
894
+ This is an alias for :meth:`select_to_arrow` provided for users familiar
895
+ with asyncpg's fetch() naming convention.
896
+
897
+ See Also:
898
+ select_to_arrow(): Primary method with identical behavior and full documentation
899
+
900
+ """
901
+ return self.select_to_arrow(
902
+ statement,
903
+ *parameters,
904
+ statement_config=statement_config,
905
+ return_format=return_format,
906
+ native_only=native_only,
907
+ batch_size=batch_size,
908
+ arrow_schema=arrow_schema,
909
+ **kwargs,
910
+ )
911
+
912
+ # ─────────────────────────────────────────────────────────────────────────────
913
+ # STACK EXECUTION
914
+ # ─────────────────────────────────────────────────────────────────────────────
915
+
916
+ def execute_stack(self, stack: "StatementStack", *, continue_on_error: bool = False) -> "tuple[StackResult, ...]":
917
+ """Execute a StatementStack sequentially using the adapter's primitives."""
918
+ if not isinstance(stack, StatementStack):
919
+ msg = "execute_stack expects a StatementStack instance"
920
+ raise TypeError(msg)
921
+ if not stack:
922
+ msg = "Cannot execute an empty StatementStack"
923
+ raise ValueError(msg)
924
+
925
+ results: list[StackResult] = []
926
+ single_transaction = not continue_on_error
927
+
928
+ with StackExecutionObserver(self, stack, continue_on_error, native_pipeline=False) as observer:
929
+ started_transaction = False
930
+
931
+ try:
932
+ if single_transaction and not self._connection_in_transaction():
933
+ self.begin()
934
+ started_transaction = True
935
+
936
+ for index, operation in enumerate(stack.operations):
937
+ try:
938
+ result = self._execute_stack_operation(operation)
939
+ except Exception as exc: # pragma: no cover - exercised via tests
940
+ stack_error = StackExecutionError(
941
+ index,
942
+ describe_stack_statement(operation.statement),
943
+ exc,
944
+ adapter=type(self).__name__,
945
+ mode="continue-on-error" if continue_on_error else "fail-fast",
946
+ )
947
+
948
+ if started_transaction and not continue_on_error:
949
+ try:
950
+ self.rollback()
951
+ except Exception as rollback_error: # pragma: no cover - diagnostics only
952
+ logger.debug("Rollback after stack failure failed: %s", rollback_error)
953
+ started_transaction = False
954
+
955
+ if continue_on_error:
956
+ self._rollback_after_stack_error()
957
+ observer.record_operation_error(stack_error)
958
+ results.append(StackResult.from_error(stack_error))
959
+ continue
960
+
961
+ raise stack_error from exc
962
+
963
+ results.append(StackResult(result=result))
964
+
965
+ if continue_on_error:
966
+ self._commit_after_stack_operation()
967
+
968
+ if started_transaction:
969
+ self.commit()
970
+ except Exception:
971
+ if started_transaction:
972
+ try:
973
+ self.rollback()
974
+ except Exception as rollback_error: # pragma: no cover - diagnostics only
975
+ logger.debug("Rollback after stack failure failed: %s", rollback_error)
976
+ raise
977
+
978
+ return tuple(results)
979
+
980
+ # ─────────────────────────────────────────────────────────────────────────────
981
+ # STORAGE API METHODS
982
+ # ─────────────────────────────────────────────────────────────────────────────
983
+
984
+ def select_to_storage(
985
+ self,
986
+ statement: "SQL | str",
987
+ destination: "StorageDestination",
988
+ /,
989
+ *parameters: "StatementParameters | StatementFilter",
990
+ statement_config: "StatementConfig | None" = None,
991
+ partitioner: "dict[str, object] | None" = None,
992
+ format_hint: "StorageFormat | None" = None,
993
+ telemetry: "StorageTelemetry | None" = None,
994
+ ) -> "StorageBridgeJob":
995
+ """Stream a SELECT statement directly into storage.
996
+
997
+ Args:
998
+ statement: SQL statement to execute.
999
+ destination: Storage destination path.
1000
+ parameters: Query parameters.
1001
+ statement_config: Optional statement configuration.
1002
+ partitioner: Optional partitioner configuration.
1003
+ format_hint: Optional format hint for storage.
1004
+ telemetry: Optional telemetry dict to merge.
1005
+
1006
+ Returns:
1007
+ StorageBridgeJob with execution telemetry.
1008
+
1009
+ Raises:
1010
+ StorageCapabilityError: If not implemented.
1011
+
1012
+ """
1013
+ self._raise_storage_not_implemented("select_to_storage")
1014
+ raise NotImplementedError
1015
+
1016
+ def load_from_arrow(
1017
+ self,
1018
+ table: str,
1019
+ source: "ArrowResult | Any",
1020
+ *,
1021
+ partitioner: "dict[str, object] | None" = None,
1022
+ overwrite: bool = False,
1023
+ ) -> "StorageBridgeJob":
1024
+ """Load Arrow data into the target table.
1025
+
1026
+ Args:
1027
+ table: Target table name.
1028
+ source: Arrow data source.
1029
+ partitioner: Optional partitioner configuration.
1030
+ overwrite: Whether to overwrite existing data.
1031
+
1032
+ Returns:
1033
+ StorageBridgeJob with execution telemetry.
1034
+
1035
+ Raises:
1036
+ StorageCapabilityError: If not implemented.
1037
+
1038
+ """
1039
+ self._raise_storage_not_implemented("load_from_arrow")
1040
+ raise NotImplementedError
1041
+
1042
+ def load_from_storage(
1043
+ self,
1044
+ table: str,
1045
+ source: "StorageDestination",
1046
+ *,
1047
+ file_format: "StorageFormat",
1048
+ partitioner: "dict[str, object] | None" = None,
1049
+ overwrite: bool = False,
1050
+ ) -> "StorageBridgeJob":
1051
+ """Load artifacts from storage into the target table.
1052
+
1053
+ Args:
1054
+ table: Target table name.
1055
+ source: Storage source path.
1056
+ file_format: File format of source.
1057
+ partitioner: Optional partitioner configuration.
1058
+ overwrite: Whether to overwrite existing data.
1059
+
1060
+ Returns:
1061
+ StorageBridgeJob with execution telemetry.
1062
+
1063
+ Raises:
1064
+ StorageCapabilityError: If not implemented.
1065
+
1066
+ """
1067
+ self._raise_storage_not_implemented("load_from_storage")
1068
+ raise NotImplementedError
1069
+
1070
+ def stage_artifact(self, request: "dict[str, Any]") -> "dict[str, Any]":
1071
+ """Provision staging metadata for adapters that require remote URIs.
1072
+
1073
+ Args:
1074
+ request: Staging request configuration.
1075
+
1076
+ Returns:
1077
+ Staging metadata dict.
1078
+
1079
+ Raises:
1080
+ StorageCapabilityError: If not implemented.
1081
+
1082
+ """
1083
+ self._raise_storage_not_implemented("stage_artifact")
1084
+ raise NotImplementedError
1085
+
1086
+ def flush_staging_artifacts(self, artifacts: "list[dict[str, Any]]", *, error: Exception | None = None) -> None:
1087
+ """Clean up staged artifacts after a job completes.
1088
+
1089
+ Args:
1090
+ artifacts: List of staging artifacts to clean up.
1091
+ error: Optional error that triggered cleanup.
1092
+
1093
+ """
1094
+ if artifacts:
1095
+ self._raise_storage_not_implemented("flush_staging_artifacts")
1096
+
1097
+ def get_storage_job(self, job_id: str) -> "StorageBridgeJob | None":
1098
+ """Fetch a previously created job handle.
1099
+
1100
+ Args:
1101
+ job_id: Job identifier.
1102
+
1103
+ Returns:
1104
+ StorageBridgeJob if found, None otherwise.
1105
+
1106
+ """
1107
+ return None
1108
+
1109
+ # ─────────────────────────────────────────────────────────────────────────────
1110
+ # UTILITY METHODS
1111
+ # ─────────────────────────────────────────────────────────────────────────────
1112
+
1113
+ def convert_to_dialect(
1114
+ self, statement: "Statement", to_dialect: "DialectType | None" = None, pretty: bool = DEFAULT_PRETTY
1115
+ ) -> str:
1116
+ """Convert a statement to a target SQL dialect.
1117
+
1118
+ Args:
1119
+ statement: SQL statement to convert.
1120
+ to_dialect: Target dialect (defaults to current dialect).
1121
+ pretty: Whether to format the output SQL.
1122
+
1123
+ Returns:
1124
+ SQL string in target dialect.
1125
+
1126
+ """
1127
+ return _convert_to_dialect_impl(statement, self.dialect, to_dialect, pretty)
1128
+
1129
+ # ─────────────────────────────────────────────────────────────────────────────
1130
+ # PRIVATE/INTERNAL METHODS
1131
+ # ─────────────────────────────────────────────────────────────────────────────
1132
+
1133
+ def _connection_in_transaction(self) -> bool:
1134
+ """Check if the connection is inside a transaction.
1135
+
1136
+ Each adapter MUST override this method with direct attribute access
1137
+ for optimal mypyc performance. Do not use getattr chains.
1138
+
1139
+ Raises:
1140
+ NotImplementedError: Always - subclasses must override.
1141
+
1142
+ """
1143
+ msg = "Adapters must override _connection_in_transaction()"
1144
+ raise NotImplementedError(msg)
1145
+
1146
+ def _execute_stack_operation(self, operation: "StackOperation") -> "SQLResult | ArrowResult | None":
1147
+ kwargs = dict(operation.keyword_arguments) if operation.keyword_arguments else {}
1148
+
1149
+ if operation.method == "execute":
1150
+ return self.execute(operation.statement, *operation.arguments, **kwargs)
1151
+
1152
+ if operation.method == "execute_many":
1153
+ if not operation.arguments:
1154
+ msg = "execute_many stack operation requires parameter sets"
1155
+ raise ValueError(msg)
1156
+ parameter_sets = operation.arguments[0]
1157
+ filters = operation.arguments[1:]
1158
+ return self.execute_many(operation.statement, parameter_sets, *filters, **kwargs)
1159
+
1160
+ if operation.method == "execute_script":
1161
+ return self.execute_script(operation.statement, *operation.arguments, **kwargs)
1162
+
1163
+ if operation.method == "execute_arrow":
1164
+ return self.select_to_arrow(operation.statement, *operation.arguments, **kwargs)
1165
+
1166
+ msg = f"Unsupported stack operation method: {operation.method}"
1167
+ raise ValueError(msg)
1168
+
1169
+ def _rollback_after_stack_error(self) -> None:
1170
+ """Attempt to rollback after a stack operation error to clear connection state."""
1171
+ try:
1172
+ self.rollback()
1173
+ except Exception as rollback_error: # pragma: no cover - driver-specific cleanup
1174
+ logger.debug("Rollback after stack error failed: %s", rollback_error)
1175
+
1176
+ def _commit_after_stack_operation(self) -> None:
1177
+ """Attempt to commit after a successful stack operation when not batching."""
1178
+ try:
1179
+ self.commit()
1180
+ except Exception as commit_error: # pragma: no cover - driver-specific cleanup
1181
+ logger.debug("Commit after stack operation failed: %s", commit_error)
1182
+
1183
+ def _storage_pipeline(self) -> "SyncStoragePipeline":
1184
+ """Get or create a sync storage pipeline.
1185
+
1186
+ Returns:
1187
+ SyncStoragePipeline instance.
1188
+
1189
+ """
1190
+ factory = self.storage_pipeline_factory
1191
+ if factory is None:
1192
+ return SyncStoragePipeline()
1193
+ return cast("SyncStoragePipeline", factory())
1194
+
1195
+ def _write_result_to_storage_sync(
1196
+ self,
1197
+ result: "ArrowResult",
1198
+ destination: "StorageDestination",
1199
+ *,
1200
+ format_hint: "StorageFormat | None" = None,
1201
+ storage_options: "dict[str, Any] | None" = None,
1202
+ pipeline: "SyncStoragePipeline | None" = None,
1203
+ ) -> "StorageTelemetry":
1204
+ """Write Arrow result to storage with telemetry.
1205
+
1206
+ Args:
1207
+ result: Arrow result to write.
1208
+ destination: Storage destination.
1209
+ format_hint: Optional format hint.
1210
+ storage_options: Optional storage options.
1211
+ pipeline: Optional storage pipeline.
1212
+
1213
+ Returns:
1214
+ StorageTelemetry with write metrics.
1215
+
1216
+ """
1217
+ runtime = self.observability
1218
+ span = runtime.start_storage_span(
1219
+ "write", destination=stringify_storage_target(destination), format_label=format_hint
1220
+ )
1221
+ try:
1222
+ telemetry = result.write_to_storage_sync(
1223
+ destination, format_hint=format_hint, storage_options=storage_options, pipeline=pipeline
1224
+ )
1225
+ except Exception as exc:
1226
+ runtime.end_storage_span(span, error=exc)
1227
+ raise
1228
+ telemetry = runtime.annotate_storage_telemetry(telemetry)
1229
+ runtime.end_storage_span(span, telemetry=telemetry)
1230
+ return telemetry
1231
+
1232
+ def _read_arrow_from_storage_sync(
1233
+ self,
1234
+ source: "StorageDestination",
1235
+ *,
1236
+ file_format: "StorageFormat",
1237
+ storage_options: "dict[str, Any] | None" = None,
1238
+ ) -> "tuple[ArrowTable, StorageTelemetry]":
1239
+ """Read Arrow table from storage with telemetry.
1240
+
1241
+ Args:
1242
+ source: Storage source path.
1243
+ file_format: File format to read.
1244
+ storage_options: Optional storage options.
1245
+
1246
+ Returns:
1247
+ Tuple of (ArrowTable, StorageTelemetry).
1248
+
1249
+ """
1250
+ runtime = self.observability
1251
+ span = runtime.start_storage_span(
1252
+ "read", destination=stringify_storage_target(source), format_label=file_format
1253
+ )
1254
+ pipeline = self._storage_pipeline()
1255
+ try:
1256
+ table, telemetry = pipeline.read_arrow(source, file_format=file_format, storage_options=storage_options)
1257
+ except Exception as exc:
1258
+ runtime.end_storage_span(span, error=exc)
1259
+ raise
1260
+ telemetry = runtime.annotate_storage_telemetry(telemetry)
1261
+ runtime.end_storage_span(span, telemetry=telemetry)
1262
+ return table, telemetry
1263
+
1264
+ def _coerce_arrow_table(self, source: "ArrowResult | Any") -> "ArrowTable":
1265
+ """Coerce various sources to a PyArrow Table.
1266
+
1267
+ Args:
1268
+ source: ArrowResult, PyArrow Table, RecordBatch, or iterable of dicts.
1269
+
1270
+ Returns:
1271
+ PyArrow Table.
1272
+
1273
+ """
1274
+ return coerce_arrow_table(source)
1275
+
1276
+ @staticmethod
1277
+ def _arrow_table_to_rows(
1278
+ table: "ArrowTable", columns: "list[str] | None" = None
1279
+ ) -> "tuple[list[str], list[tuple[Any, ...]]]":
1280
+ """Convert Arrow table to column names and row tuples.
1281
+
1282
+ Args:
1283
+ table: Arrow table to convert.
1284
+ columns: Optional list of columns to extract.
1285
+
1286
+ Returns:
1287
+ Tuple of (column_names, list of row tuples).
1288
+
1289
+ """
1290
+ return arrow_table_to_rows(table, columns)
1291
+
1292
+ @staticmethod
1293
+ def _build_ingest_telemetry(table: "ArrowTable", *, format_label: str = "arrow") -> "StorageTelemetry":
1294
+ """Build telemetry dict from Arrow table statistics.
1295
+
1296
+ Args:
1297
+ table: Arrow table to extract statistics from.
1298
+ format_label: Format label for telemetry.
1299
+
1300
+ Returns:
1301
+ StorageTelemetry dict with row/byte counts.
1302
+
1303
+ """
1304
+ return build_ingest_telemetry(table, format_label=format_label)
1305
+
1306
+ def _attach_partition_telemetry(
1307
+ self, telemetry: "StorageTelemetry", partitioner: "dict[str, object] | None"
1308
+ ) -> None:
1309
+ """Attach partitioner info to telemetry dict.
1310
+
1311
+ Args:
1312
+ telemetry: Telemetry dict to update.
1313
+ partitioner: Partitioner configuration or None.
1314
+
1315
+ """
1316
+ attach_partition_telemetry(telemetry, partitioner)
1317
+
1318
+ def _create_storage_job(
1319
+ self, produced: "StorageTelemetry", provided: "StorageTelemetry | None" = None, *, status: str = "completed"
1320
+ ) -> "StorageBridgeJob":
1321
+ """Create a StorageBridgeJob from telemetry data.
1322
+
1323
+ Args:
1324
+ produced: Telemetry from the production side of the operation.
1325
+ provided: Optional telemetry from the source side.
1326
+ status: Job status string.
1327
+
1328
+ Returns:
1329
+ StorageBridgeJob instance.
1330
+
1331
+ """
1332
+ return create_storage_job(produced, provided, status=status)
1333
+
1334
+
1335
+ @mypyc_attr(allow_interpreted_subclasses=True, native_class=False)
1336
+ class SyncDataDictionaryBase:
1337
+ """Base class for synchronous data dictionary implementations.
1338
+
1339
+ Uses Python-compatible class layouts for cross-module inheritance.
1340
+ Child classes define dialect as a class attribute.
1341
+ """
1342
+
1343
+ _version_cache: "dict[int, VersionInfo | None]"
1344
+ _version_fetch_attempted: "set[int]"
1345
+
1346
+ dialect: "ClassVar[str]"
1347
+ """Dialect identifier. Must be defined by subclasses as a class attribute."""
1348
+
1349
+ def __init__(self) -> None:
1350
+ self._version_cache = {}
1351
+ self._version_fetch_attempted = set()
1352
+
1353
+ # ─────────────────────────────────────────────────────────────────────────────
1354
+ # DIALECT SQL METHODS (merged from DialectSQLMixin)
1355
+ # ─────────────────────────────────────────────────────────────────────────────
1356
+
1357
+ def get_dialect_config(self) -> "DialectConfig":
1358
+ """Return the dialect configuration for this data dictionary."""
1359
+ return get_dialect_config(type(self).dialect)
1360
+
1361
+ def get_query(self, name: str) -> "SQL":
1362
+ """Return a named SQL query for this dialect."""
1363
+ loader = get_data_dictionary_loader()
1364
+ return loader.get_query(type(self).dialect, name)
1365
+
1366
+ def get_query_text(self, name: str) -> str:
1367
+ """Return raw SQL text for a named query for this dialect."""
1368
+ loader = get_data_dictionary_loader()
1369
+ return loader.get_query_text(type(self).dialect, name)
1370
+
1371
+ def get_query_text_or_none(self, name: str) -> "str | None":
1372
+ """Return raw SQL text for a named query or None if missing."""
1373
+ try:
1374
+ return self.get_query_text(name)
1375
+ except SQLFileNotFoundError:
1376
+ return None
1377
+
1378
+ def resolve_schema(self, schema: "str | None") -> "str | None":
1379
+ """Return a schema name using dialect defaults when missing."""
1380
+ if schema is not None:
1381
+ return schema
1382
+ config = self.get_dialect_config()
1383
+ return config.default_schema
1384
+
1385
+ def resolve_feature_flag(self, feature: str, version: "VersionInfo | None") -> bool:
1386
+ """Resolve a feature flag using dialect config and version info."""
1387
+ config = self.get_dialect_config()
1388
+ flag = config.get_feature_flag(feature)
1389
+ if flag is not None:
1390
+ return flag
1391
+ required_version = config.get_feature_version(feature)
1392
+ if required_version is None or version is None:
1393
+ return False
1394
+ return bool(version >= required_version)
1395
+
1396
+ # ─────────────────────────────────────────────────────────────────────────────
1397
+ # VERSION CACHING METHODS (inlined from DataDictionaryMixin)
1398
+ # ─────────────────────────────────────────────────────────────────────────────
1399
+
1400
+ def get_cached_version(self, driver_id: int) -> object:
1401
+ """Get cached version info for a driver.
1402
+
1403
+ Args:
1404
+ driver_id: The id() of the driver instance.
1405
+
1406
+ Returns:
1407
+ Tuple of (was_cached, version_info). If was_cached is False,
1408
+ the caller should fetch the version and call cache_version().
1409
+ """
1410
+ if driver_id in self._version_fetch_attempted:
1411
+ return True, self._version_cache.get(driver_id)
1412
+ return False, None
1413
+
1414
+ def cache_version(self, driver_id: int, version: "VersionInfo | None") -> None:
1415
+ """Cache version info for a driver.
1416
+
1417
+ Args:
1418
+ driver_id: The id() of the driver instance.
1419
+ version: The version info to cache (can be None if detection failed).
1420
+ """
1421
+ self._version_fetch_attempted.add(driver_id)
1422
+ if version is not None:
1423
+ self._version_cache[driver_id] = version
1424
+
1425
+ def parse_version_string(self, version_str: str) -> "VersionInfo | None":
1426
+ """Parse version string into VersionInfo.
1427
+
1428
+ Args:
1429
+ version_str: Raw version string from database
1430
+
1431
+ Returns:
1432
+ VersionInfo instance or None if parsing fails
1433
+ """
1434
+ patterns = [r"(\d+)\.(\d+)\.(\d+)", r"(\d+)\.(\d+)", r"(\d+)"]
1435
+ for pattern in patterns:
1436
+ match = re.search(pattern, version_str)
1437
+ if match:
1438
+ groups = match.groups()
1439
+ major = int(groups[0])
1440
+ minor = int(groups[1]) if len(groups) > VERSION_GROUPS_MIN_FOR_MINOR else 0
1441
+ patch = int(groups[2]) if len(groups) > VERSION_GROUPS_MIN_FOR_PATCH else 0
1442
+ return VersionInfo(major, minor, patch)
1443
+ return None
1444
+
1445
+ def parse_version_with_pattern(self, pattern: "re.Pattern[str]", version_str: str) -> "VersionInfo | None":
1446
+ """Parse version string using a specific regex pattern.
1447
+
1448
+ Args:
1449
+ pattern: Compiled regex pattern for the version format
1450
+ version_str: Raw version string from database
1451
+
1452
+ Returns:
1453
+ VersionInfo instance or None if parsing fails
1454
+ """
1455
+ match = pattern.search(version_str)
1456
+ if not match:
1457
+ return None
1458
+ groups = match.groups()
1459
+ if not groups:
1460
+ return None
1461
+ major = int(groups[0])
1462
+ minor = int(groups[1]) if len(groups) > VERSION_GROUPS_MIN_FOR_MINOR and groups[1] else 0
1463
+ patch = int(groups[2]) if len(groups) > VERSION_GROUPS_MIN_FOR_PATCH and groups[2] else 0
1464
+ return VersionInfo(major, minor, patch)
1465
+
1466
+ def _resolve_log_adapter(self) -> str:
1467
+ """Resolve adapter identifier for logging."""
1468
+ return str(type(self).dialect)
1469
+
1470
+ def _log_version_detected(self, adapter: str, version: "VersionInfo") -> None:
1471
+ """Log detected database version with db.system context."""
1472
+ logger.debug(
1473
+ "Detected database version", extra={"db.system": resolve_db_system(adapter), "db.version": str(version)}
1474
+ )
1475
+
1476
+ def _log_version_unavailable(self, adapter: str, reason: str) -> None:
1477
+ """Log that database version could not be determined."""
1478
+ logger.debug("Database version unavailable", extra={"db.system": resolve_db_system(adapter), "reason": reason})
1479
+
1480
+ def _log_schema_introspect(
1481
+ self, driver: Any, *, schema_name: "str | None", table_name: "str | None", operation: str
1482
+ ) -> None:
1483
+ """Log schema-level introspection activity."""
1484
+ log_with_context(
1485
+ logger,
1486
+ logging.DEBUG,
1487
+ "schema.introspect",
1488
+ db_system=resolve_db_system(type(driver).__name__),
1489
+ schema_name=schema_name,
1490
+ table_name=table_name,
1491
+ operation=operation,
1492
+ )
1493
+
1494
+ def _log_table_describe(self, driver: Any, *, schema_name: "str | None", table_name: str, operation: str) -> None:
1495
+ """Log table-level introspection activity."""
1496
+ log_with_context(
1497
+ logger,
1498
+ logging.DEBUG,
1499
+ "table.describe",
1500
+ db_system=resolve_db_system(type(driver).__name__),
1501
+ schema_name=schema_name,
1502
+ table_name=table_name,
1503
+ operation=operation,
1504
+ )
1505
+
1506
+ def detect_version_with_queries(self, driver: "HasExecuteProtocol", queries: "list[str]") -> "VersionInfo | None":
1507
+ """Try multiple version queries to detect database version.
1508
+
1509
+ Args:
1510
+ driver: Database driver with execute support
1511
+ queries: List of SQL queries to try
1512
+
1513
+ Returns:
1514
+ Version information or None if detection fails
1515
+ """
1516
+ for query in queries:
1517
+ with suppress(Exception):
1518
+ result: HasDataProtocol = driver.execute(query)
1519
+ result_data = result.data
1520
+ if result_data:
1521
+ first_row = result_data[0]
1522
+ version_str = str(first_row)
1523
+ if isinstance(first_row, dict):
1524
+ version_str = str(next(iter(first_row.values())))
1525
+ elif isinstance(first_row, (list, tuple)):
1526
+ version_str = str(first_row[0])
1527
+
1528
+ parsed_version = self.parse_version_string(version_str)
1529
+ if parsed_version:
1530
+ self._log_version_detected(self._resolve_log_adapter(), parsed_version)
1531
+ return parsed_version
1532
+
1533
+ self._log_version_unavailable(self._resolve_log_adapter(), "queries_exhausted")
1534
+ return None
1535
+
1536
+ def get_default_type_mapping(self) -> "dict[str, str]":
1537
+ """Get default type mappings for common categories.
1538
+
1539
+ Returns:
1540
+ Dictionary mapping type categories to generic SQL types
1541
+ """
1542
+ return {
1543
+ "json": "TEXT",
1544
+ "uuid": "VARCHAR(36)",
1545
+ "boolean": "INTEGER",
1546
+ "timestamp": "TIMESTAMP",
1547
+ "text": "TEXT",
1548
+ "blob": "BLOB",
1549
+ }
1550
+
1551
+ def get_default_features(self) -> "list[str]":
1552
+ """Get default feature flags supported by most databases.
1553
+
1554
+ Returns:
1555
+ List of commonly supported feature names
1556
+ """
1557
+ return ["supports_transactions", "supports_prepared_statements"]
1558
+
1559
+ def sort_tables_topologically(self, tables: "list[str]", foreign_keys: "list[ForeignKeyMetadata]") -> "list[str]":
1560
+ """Sort tables topologically based on foreign key dependencies.
1561
+
1562
+ Args:
1563
+ tables: List of table names.
1564
+ foreign_keys: List of foreign key metadata.
1565
+
1566
+ Returns:
1567
+ List of table names in topological order (dependencies first).
1568
+ """
1569
+ sorter: graphlib.TopologicalSorter[str] = graphlib.TopologicalSorter()
1570
+ for table in tables:
1571
+ sorter.add(table)
1572
+ for fk in foreign_keys:
1573
+ if fk.table_name == fk.referenced_table:
1574
+ continue
1575
+ sorter.add(fk.table_name, fk.referenced_table)
1576
+ return list(sorter.static_order())
1577
+
1578
+ def get_cached_version_for_driver(self, driver: Any) -> object:
1579
+ """Get cached version info for a driver instance.
1580
+
1581
+ Args:
1582
+ driver: Sync database driver instance.
1583
+
1584
+ Returns:
1585
+ Tuple of (was_cached, version_info).
1586
+
1587
+ """
1588
+ return self.get_cached_version(id(driver))
1589
+
1590
+ def cache_version_for_driver(self, driver: Any, version: "VersionInfo | None") -> None:
1591
+ """Cache version info for a driver instance.
1592
+
1593
+ Args:
1594
+ driver: Sync database driver instance.
1595
+ version: Parsed version info or None.
1596
+
1597
+ """
1598
+ self.cache_version(id(driver), version)
1599
+
1600
+ @abstractmethod
1601
+ def get_version(self, driver: Any) -> "VersionInfo | None":
1602
+ """Get database version information.
1603
+
1604
+ Args:
1605
+ driver: Sync database driver instance
1606
+
1607
+ Returns:
1608
+ Version information or None if detection fails
1609
+
1610
+ """
1611
+
1612
+ @abstractmethod
1613
+ def get_feature_flag(self, driver: Any, feature: str) -> bool:
1614
+ """Check if database supports a specific feature.
1615
+
1616
+ Args:
1617
+ driver: Sync database driver instance
1618
+ feature: Feature name to check
1619
+
1620
+ Returns:
1621
+ True if feature is supported, False otherwise
1622
+
1623
+ """
1624
+
1625
+ @abstractmethod
1626
+ def get_optimal_type(self, driver: Any, type_category: str) -> str:
1627
+ """Get optimal database type for a category.
1628
+
1629
+ Args:
1630
+ driver: Sync database driver instance
1631
+ type_category: Type category (e.g., 'json', 'uuid', 'boolean')
1632
+
1633
+ Returns:
1634
+ Database-specific type name
1635
+
1636
+ """
1637
+
1638
+ @abstractmethod
1639
+ def get_tables(self, driver: Any, schema: "str | None" = None) -> "list[TableMetadata]":
1640
+ """Get list of tables in schema.
1641
+
1642
+ Args:
1643
+ driver: Sync database driver instance
1644
+ schema: Schema name (None for default)
1645
+
1646
+ Returns:
1647
+ List of table metadata dictionaries
1648
+
1649
+ """
1650
+
1651
+ @abstractmethod
1652
+ def get_columns(
1653
+ self, driver: Any, table: "str | None" = None, schema: "str | None" = None
1654
+ ) -> "list[ColumnMetadata]":
1655
+ """Get column information for a table or schema.
1656
+
1657
+ Args:
1658
+ driver: Sync database driver instance
1659
+ table: Table name (None to fetch columns for all tables in schema)
1660
+ schema: Schema name (None for default)
1661
+
1662
+ Returns:
1663
+ List of column metadata dictionaries
1664
+
1665
+ """
1666
+
1667
+ @abstractmethod
1668
+ def get_indexes(
1669
+ self, driver: Any, table: "str | None" = None, schema: "str | None" = None
1670
+ ) -> "list[IndexMetadata]":
1671
+ """Get index information for a table or schema.
1672
+
1673
+ Args:
1674
+ driver: Sync database driver instance
1675
+ table: Table name (None to fetch indexes for all tables in schema)
1676
+ schema: Schema name (None for default)
1677
+
1678
+ Returns:
1679
+ List of index metadata dictionaries
1680
+
1681
+ """
1682
+
1683
+ @abstractmethod
1684
+ def get_foreign_keys(
1685
+ self, driver: Any, table: "str | None" = None, schema: "str | None" = None
1686
+ ) -> "list[ForeignKeyMetadata]":
1687
+ """Get foreign key metadata.
1688
+
1689
+ Args:
1690
+ driver: Sync database driver instance
1691
+ table: Optional table name filter
1692
+ schema: Optional schema name filter
1693
+
1694
+ Returns:
1695
+ List of foreign key metadata
1696
+
1697
+ """
1698
+
1699
+ def list_available_features(self) -> "list[str]":
1700
+ """List all features that can be checked via get_feature_flag.
1701
+
1702
+ Returns:
1703
+ List of feature names this data dictionary supports
1704
+
1705
+ """
1706
+ config = self.get_dialect_config()
1707
+ features = set(self.get_default_features())
1708
+ features.update(config.feature_flags.keys())
1709
+ features.update(config.feature_versions.keys())
1710
+ return sorted(features)