sqlalchemy-jdbcapi 2.0.0.post2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlalchemy_jdbcapi/__init__.py +128 -0
- sqlalchemy_jdbcapi/_version.py +34 -0
- sqlalchemy_jdbcapi/dialects/__init__.py +30 -0
- sqlalchemy_jdbcapi/dialects/base.py +879 -0
- sqlalchemy_jdbcapi/dialects/db2.py +134 -0
- sqlalchemy_jdbcapi/dialects/mssql.py +117 -0
- sqlalchemy_jdbcapi/dialects/mysql.py +152 -0
- sqlalchemy_jdbcapi/dialects/oceanbase.py +218 -0
- sqlalchemy_jdbcapi/dialects/odbc_base.py +389 -0
- sqlalchemy_jdbcapi/dialects/odbc_mssql.py +69 -0
- sqlalchemy_jdbcapi/dialects/odbc_mysql.py +101 -0
- sqlalchemy_jdbcapi/dialects/odbc_oracle.py +80 -0
- sqlalchemy_jdbcapi/dialects/odbc_postgresql.py +63 -0
- sqlalchemy_jdbcapi/dialects/oracle.py +180 -0
- sqlalchemy_jdbcapi/dialects/postgresql.py +110 -0
- sqlalchemy_jdbcapi/dialects/sqlite.py +141 -0
- sqlalchemy_jdbcapi/jdbc/__init__.py +98 -0
- sqlalchemy_jdbcapi/jdbc/connection.py +244 -0
- sqlalchemy_jdbcapi/jdbc/cursor.py +329 -0
- sqlalchemy_jdbcapi/jdbc/dataframe.py +198 -0
- sqlalchemy_jdbcapi/jdbc/driver_manager.py +353 -0
- sqlalchemy_jdbcapi/jdbc/exceptions.py +53 -0
- sqlalchemy_jdbcapi/jdbc/jvm.py +176 -0
- sqlalchemy_jdbcapi/jdbc/type_converter.py +292 -0
- sqlalchemy_jdbcapi/jdbc/types.py +72 -0
- sqlalchemy_jdbcapi/odbc/__init__.py +46 -0
- sqlalchemy_jdbcapi/odbc/connection.py +136 -0
- sqlalchemy_jdbcapi/odbc/exceptions.py +48 -0
- sqlalchemy_jdbcapi/py.typed +2 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/METADATA +825 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/RECORD +36 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/WHEEL +5 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/entry_points.txt +20 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/licenses/AUTHORS +7 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/licenses/LICENSE +13 -0
- sqlalchemy_jdbcapi-2.0.0.post2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,879 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base JDBC dialect implementation following SOLID principles.
|
|
3
|
+
|
|
4
|
+
This module provides the abstract base class for all JDBC dialects,
|
|
5
|
+
implementing common functionality and defining the interface that
|
|
6
|
+
database-specific dialects must implement.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import logging
|
|
12
|
+
from abc import ABC, abstractmethod
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from typing import Any, ClassVar
|
|
15
|
+
|
|
16
|
+
from sqlalchemy import pool
|
|
17
|
+
from sqlalchemy.engine import Connection, Dialect, reflection
|
|
18
|
+
from sqlalchemy.engine.url import URL
|
|
19
|
+
from sqlalchemy.types import (
|
|
20
|
+
BIGINT,
|
|
21
|
+
BINARY,
|
|
22
|
+
BOOLEAN,
|
|
23
|
+
CHAR,
|
|
24
|
+
DATE,
|
|
25
|
+
DECIMAL,
|
|
26
|
+
FLOAT,
|
|
27
|
+
INTEGER,
|
|
28
|
+
NUMERIC,
|
|
29
|
+
REAL,
|
|
30
|
+
SMALLINT,
|
|
31
|
+
TIME,
|
|
32
|
+
TIMESTAMP,
|
|
33
|
+
VARBINARY,
|
|
34
|
+
VARCHAR,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
from ..jdbc.exceptions import DatabaseError, OperationalError
|
|
38
|
+
|
|
39
|
+
logger = logging.getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass(frozen=True)
|
|
43
|
+
class JDBCDriverConfig:
|
|
44
|
+
"""
|
|
45
|
+
Configuration for a JDBC driver.
|
|
46
|
+
|
|
47
|
+
This encapsulates all driver-specific configuration in a single
|
|
48
|
+
immutable object, following the Single Responsibility Principle.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
driver_class: str
|
|
52
|
+
"""Fully qualified Java class name of the JDBC driver"""
|
|
53
|
+
|
|
54
|
+
jdbc_url_template: str
|
|
55
|
+
"""Template for JDBC URL (e.g., 'jdbc:postgresql://{host}:{port}/{database}')"""
|
|
56
|
+
|
|
57
|
+
default_port: int
|
|
58
|
+
"""Default port number for the database"""
|
|
59
|
+
|
|
60
|
+
supports_transactions: bool = True
|
|
61
|
+
"""Whether the database supports transactions"""
|
|
62
|
+
|
|
63
|
+
supports_schemas: bool = True
|
|
64
|
+
"""Whether the database supports schemas"""
|
|
65
|
+
|
|
66
|
+
supports_sequences: bool = True
|
|
67
|
+
"""Whether the database supports sequences"""
|
|
68
|
+
|
|
69
|
+
def format_jdbc_url(
|
|
70
|
+
self,
|
|
71
|
+
host: str,
|
|
72
|
+
port: int | None,
|
|
73
|
+
database: str | None,
|
|
74
|
+
query_params: dict[str, Any] | None = None,
|
|
75
|
+
) -> str:
|
|
76
|
+
"""
|
|
77
|
+
Format a JDBC connection URL.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
host: Database host
|
|
81
|
+
port: Database port (uses default_port if None)
|
|
82
|
+
database: Database name
|
|
83
|
+
query_params: Additional query parameters
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
Formatted JDBC URL
|
|
87
|
+
"""
|
|
88
|
+
port = port or self.default_port
|
|
89
|
+
url = self.jdbc_url_template.format(
|
|
90
|
+
host=host, port=port, database=database or ""
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if query_params:
|
|
94
|
+
params = "&".join(f"{k}={v}" for k, v in query_params.items())
|
|
95
|
+
separator = "?" if "?" not in url else "&"
|
|
96
|
+
url = f"{url}{separator}{params}"
|
|
97
|
+
|
|
98
|
+
return url
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class BaseJDBCDialect(Dialect, ABC):
|
|
102
|
+
"""
|
|
103
|
+
Abstract base class for JDBC-based SQLAlchemy dialects.
|
|
104
|
+
|
|
105
|
+
This class implements the Template Method pattern, providing common
|
|
106
|
+
JDBC functionality while allowing database-specific customization
|
|
107
|
+
through abstract methods.
|
|
108
|
+
|
|
109
|
+
Subclasses must implement:
|
|
110
|
+
- get_driver_config(): Return driver configuration
|
|
111
|
+
- _get_server_version_info(): Parse database version
|
|
112
|
+
|
|
113
|
+
SQLAlchemy 2.0+ compatible with full type hints.
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
# DB-API module (our custom JDBC bridge)
|
|
117
|
+
driver = "jdbcapi"
|
|
118
|
+
|
|
119
|
+
# SQLAlchemy capabilities
|
|
120
|
+
supports_native_decimal = True
|
|
121
|
+
supports_sane_rowcount = False
|
|
122
|
+
supports_sane_multi_rowcount = False
|
|
123
|
+
supports_unicode_binds = True
|
|
124
|
+
supports_statement_cache = True
|
|
125
|
+
supports_server_side_cursors = False
|
|
126
|
+
|
|
127
|
+
# Connection pooling
|
|
128
|
+
supports_native_boolean = True
|
|
129
|
+
poolclass = pool.QueuePool
|
|
130
|
+
|
|
131
|
+
# Execution options
|
|
132
|
+
execution_ctx_cls: ClassVar[type | None] = None
|
|
133
|
+
|
|
134
|
+
@classmethod
|
|
135
|
+
def dbapi(cls) -> type:
|
|
136
|
+
"""
|
|
137
|
+
Return the DB-API module.
|
|
138
|
+
|
|
139
|
+
Returns our custom JDBC bridge module that implements
|
|
140
|
+
the Python DB-API 2.0 specification.
|
|
141
|
+
"""
|
|
142
|
+
from .. import jdbc
|
|
143
|
+
|
|
144
|
+
return jdbc # type: ignore
|
|
145
|
+
|
|
146
|
+
@classmethod
|
|
147
|
+
@abstractmethod
|
|
148
|
+
def get_driver_config(cls) -> JDBCDriverConfig:
|
|
149
|
+
"""
|
|
150
|
+
Get JDBC driver configuration for this dialect.
|
|
151
|
+
|
|
152
|
+
This method must be implemented by each database dialect
|
|
153
|
+
to provide driver-specific configuration.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
JDBCDriverConfig instance
|
|
157
|
+
"""
|
|
158
|
+
...
|
|
159
|
+
|
|
160
|
+
def create_connect_args(self, url: URL) -> tuple[list[Any], dict[str, Any]]:
|
|
161
|
+
"""
|
|
162
|
+
Create connection arguments from SQLAlchemy URL.
|
|
163
|
+
|
|
164
|
+
Converts a SQLAlchemy URL into arguments for our JDBC connect()
|
|
165
|
+
function, following the Adapter pattern.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
url: SQLAlchemy connection URL
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
Tuple of (args, kwargs) for jdbc.connect()
|
|
172
|
+
"""
|
|
173
|
+
config = self.get_driver_config()
|
|
174
|
+
|
|
175
|
+
# Build JDBC URL
|
|
176
|
+
jdbc_url = config.format_jdbc_url(
|
|
177
|
+
host=url.host or "localhost",
|
|
178
|
+
port=url.port,
|
|
179
|
+
database=url.database,
|
|
180
|
+
query_params=dict(url.query) if url.query else None,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
logger.debug(f"Creating connection to: {jdbc_url}")
|
|
184
|
+
|
|
185
|
+
# Build driver arguments
|
|
186
|
+
driver_args: dict[str, Any] = {}
|
|
187
|
+
|
|
188
|
+
if url.username:
|
|
189
|
+
driver_args["user"] = url.username
|
|
190
|
+
if url.password:
|
|
191
|
+
driver_args["password"] = url.password
|
|
192
|
+
|
|
193
|
+
# Add query parameters as connection properties
|
|
194
|
+
if url.query:
|
|
195
|
+
driver_args.update(url.query)
|
|
196
|
+
|
|
197
|
+
# Connection arguments for jdbc.connect()
|
|
198
|
+
kwargs = {
|
|
199
|
+
"jclassname": config.driver_class,
|
|
200
|
+
"url": jdbc_url,
|
|
201
|
+
"driver_args": driver_args if driver_args else None,
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
return ([], kwargs)
|
|
205
|
+
|
|
206
|
+
def initialize(self, connection: Connection) -> None:
|
|
207
|
+
"""
|
|
208
|
+
Initialize a new connection.
|
|
209
|
+
|
|
210
|
+
Called when a new connection is established to set up
|
|
211
|
+
connection-specific settings.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
connection: SQLAlchemy connection object
|
|
215
|
+
"""
|
|
216
|
+
super().initialize(connection)
|
|
217
|
+
|
|
218
|
+
# Set up server version
|
|
219
|
+
if not hasattr(self, "_server_version_info"):
|
|
220
|
+
self._server_version_info = self._get_server_version_info(connection)
|
|
221
|
+
logger.debug(f"Server version: {self._server_version_info}")
|
|
222
|
+
|
|
223
|
+
@abstractmethod
|
|
224
|
+
def _get_server_version_info(self, connection: Connection) -> tuple[int, ...]:
|
|
225
|
+
"""
|
|
226
|
+
Get database server version information.
|
|
227
|
+
|
|
228
|
+
This must be implemented by each dialect to parse version
|
|
229
|
+
information in a database-specific way.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
connection: SQLAlchemy connection
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
Tuple of version numbers (e.g., (14, 5, 0))
|
|
236
|
+
"""
|
|
237
|
+
...
|
|
238
|
+
|
|
239
|
+
def is_disconnect(self, e: Exception, connection: Any, cursor: Any) -> bool:
|
|
240
|
+
"""
|
|
241
|
+
Check if an exception indicates a database disconnect.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
e: Exception that occurred
|
|
245
|
+
connection: Database connection
|
|
246
|
+
cursor: Database cursor
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
True if this is a disconnect error
|
|
250
|
+
"""
|
|
251
|
+
if isinstance(e, (DatabaseError, OperationalError)):
|
|
252
|
+
error_str = str(e).lower()
|
|
253
|
+
disconnect_indicators = [
|
|
254
|
+
"connection is closed",
|
|
255
|
+
"cursor is closed",
|
|
256
|
+
"connection reset",
|
|
257
|
+
"broken pipe",
|
|
258
|
+
"connection refused",
|
|
259
|
+
"connection lost",
|
|
260
|
+
"can't connect",
|
|
261
|
+
"connection terminated",
|
|
262
|
+
]
|
|
263
|
+
return any(indicator in error_str for indicator in disconnect_indicators)
|
|
264
|
+
|
|
265
|
+
return False
|
|
266
|
+
|
|
267
|
+
def do_rollback(self, dbapi_connection: Any) -> None:
|
|
268
|
+
"""
|
|
269
|
+
Perform a rollback on the connection.
|
|
270
|
+
|
|
271
|
+
Some JDBC drivers have issues with rollback,
|
|
272
|
+
this can be overridden by subclasses.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
dbapi_connection: DB-API connection object
|
|
276
|
+
"""
|
|
277
|
+
try:
|
|
278
|
+
dbapi_connection.rollback()
|
|
279
|
+
except Exception as e:
|
|
280
|
+
logger.warning(f"Rollback failed: {e}")
|
|
281
|
+
|
|
282
|
+
def do_commit(self, dbapi_connection: Any) -> None:
|
|
283
|
+
"""
|
|
284
|
+
Perform a commit on the connection.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
dbapi_connection: DB-API connection object
|
|
288
|
+
"""
|
|
289
|
+
dbapi_connection.commit()
|
|
290
|
+
|
|
291
|
+
def do_close(self, dbapi_connection: Any) -> None:
|
|
292
|
+
"""
|
|
293
|
+
Close the connection.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
dbapi_connection: DB-API connection object
|
|
297
|
+
"""
|
|
298
|
+
dbapi_connection.close()
|
|
299
|
+
|
|
300
|
+
def do_ping(self, dbapi_connection: Any) -> bool:
|
|
301
|
+
"""
|
|
302
|
+
Check if connection is alive.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
dbapi_connection: DB-API connection object
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
True if connection is alive, False otherwise
|
|
309
|
+
"""
|
|
310
|
+
try:
|
|
311
|
+
cursor = dbapi_connection.cursor()
|
|
312
|
+
cursor.execute("SELECT 1")
|
|
313
|
+
cursor.close()
|
|
314
|
+
return True
|
|
315
|
+
except Exception as e:
|
|
316
|
+
logger.debug(f"Ping failed: {e}")
|
|
317
|
+
return False
|
|
318
|
+
|
|
319
|
+
def get_isolation_level(self, dbapi_connection: Any) -> str | None:
|
|
320
|
+
"""
|
|
321
|
+
Get the current transaction isolation level.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
dbapi_connection: DB-API connection object
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
Isolation level name or None
|
|
328
|
+
"""
|
|
329
|
+
# This would need JDBC-specific implementation
|
|
330
|
+
# Most JDBC connections support getTransactionIsolation()
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
def set_isolation_level(self, dbapi_connection: Any, level: str) -> None:
|
|
334
|
+
"""
|
|
335
|
+
Set the transaction isolation level.
|
|
336
|
+
|
|
337
|
+
Args:
|
|
338
|
+
dbapi_connection: DB-API connection object
|
|
339
|
+
level: Isolation level to set
|
|
340
|
+
"""
|
|
341
|
+
# This would need JDBC-specific implementation
|
|
342
|
+
# Most JDBC connections support setTransactionIsolation()
|
|
343
|
+
|
|
344
|
+
# ========================================================================
|
|
345
|
+
# JDBC Reflection Methods - Using DatabaseMetaData API
|
|
346
|
+
# ========================================================================
|
|
347
|
+
|
|
348
|
+
def _get_jdbc_metadata(self, connection: Connection) -> Any:
|
|
349
|
+
"""
|
|
350
|
+
Get JDBC DatabaseMetaData object from connection.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
connection: SQLAlchemy connection
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
JDBC DatabaseMetaData object
|
|
357
|
+
"""
|
|
358
|
+
# Get the raw JDBC connection
|
|
359
|
+
dbapi_conn = connection.connection.dbapi_connection
|
|
360
|
+
if hasattr(dbapi_conn, "_jdbc_connection"):
|
|
361
|
+
jdbc_conn = dbapi_conn._jdbc_connection
|
|
362
|
+
return jdbc_conn.getMetaData()
|
|
363
|
+
raise OperationalError("Cannot access JDBC connection metadata")
|
|
364
|
+
|
|
365
|
+
def _jdbc_type_to_sqlalchemy(self, jdbc_type_name: str, jdbc_type: int) -> Any:
|
|
366
|
+
"""
|
|
367
|
+
Convert JDBC type to SQLAlchemy type.
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
jdbc_type_name: JDBC type name (e.g., 'VARCHAR')
|
|
371
|
+
jdbc_type: JDBC type code (from java.sql.Types)
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
SQLAlchemy type instance
|
|
375
|
+
"""
|
|
376
|
+
# Map common JDBC type codes to SQLAlchemy types
|
|
377
|
+
type_map = {
|
|
378
|
+
-7: BOOLEAN, # BIT
|
|
379
|
+
-6: SMALLINT, # TINYINT
|
|
380
|
+
-5: BIGINT, # BIGINT
|
|
381
|
+
-4: VARBINARY, # LONGVARBINARY
|
|
382
|
+
-3: VARBINARY, # VARBINARY
|
|
383
|
+
-2: BINARY, # BINARY
|
|
384
|
+
-1: VARCHAR, # LONGVARCHAR
|
|
385
|
+
1: CHAR, # CHAR
|
|
386
|
+
2: NUMERIC, # NUMERIC
|
|
387
|
+
3: DECIMAL, # DECIMAL
|
|
388
|
+
4: INTEGER, # INTEGER
|
|
389
|
+
5: SMALLINT, # SMALLINT
|
|
390
|
+
6: FLOAT, # FLOAT
|
|
391
|
+
7: REAL, # REAL
|
|
392
|
+
8: FLOAT, # DOUBLE
|
|
393
|
+
12: VARCHAR, # VARCHAR
|
|
394
|
+
16: BOOLEAN, # BOOLEAN
|
|
395
|
+
91: DATE, # DATE
|
|
396
|
+
92: TIME, # TIME
|
|
397
|
+
93: TIMESTAMP, # TIMESTAMP
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
return type_map.get(jdbc_type, VARCHAR())
|
|
401
|
+
|
|
402
|
+
@reflection.cache
|
|
403
|
+
def get_schema_names(self, connection: Connection, **kw: Any) -> list[str]:
|
|
404
|
+
"""
|
|
405
|
+
Get list of schema names using JDBC DatabaseMetaData.
|
|
406
|
+
|
|
407
|
+
Args:
|
|
408
|
+
connection: SQLAlchemy connection
|
|
409
|
+
**kw: Additional keyword arguments
|
|
410
|
+
|
|
411
|
+
Returns:
|
|
412
|
+
List of schema names
|
|
413
|
+
"""
|
|
414
|
+
try:
|
|
415
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
416
|
+
schemas = []
|
|
417
|
+
|
|
418
|
+
rs = metadata.getSchemas()
|
|
419
|
+
while rs.next():
|
|
420
|
+
schema_name = rs.getString("TABLE_SCHEM")
|
|
421
|
+
if schema_name:
|
|
422
|
+
schemas.append(schema_name)
|
|
423
|
+
rs.close()
|
|
424
|
+
|
|
425
|
+
logger.debug(f"Found {len(schemas)} schemas")
|
|
426
|
+
return schemas
|
|
427
|
+
|
|
428
|
+
except Exception as e:
|
|
429
|
+
logger.warning(f"Failed to get schema names: {e}")
|
|
430
|
+
return []
|
|
431
|
+
|
|
432
|
+
@reflection.cache
|
|
433
|
+
def get_table_names(
|
|
434
|
+
self, connection: Connection, schema: str | None = None, **kw: Any
|
|
435
|
+
) -> list[str]:
|
|
436
|
+
"""
|
|
437
|
+
Get list of table names using JDBC DatabaseMetaData.
|
|
438
|
+
|
|
439
|
+
Args:
|
|
440
|
+
connection: SQLAlchemy connection
|
|
441
|
+
schema: Schema name (None for default schema)
|
|
442
|
+
**kw: Additional keyword arguments
|
|
443
|
+
|
|
444
|
+
Returns:
|
|
445
|
+
List of table names
|
|
446
|
+
"""
|
|
447
|
+
try:
|
|
448
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
449
|
+
tables = []
|
|
450
|
+
|
|
451
|
+
# getTables(catalog, schemaPattern, tableNamePattern, types[])
|
|
452
|
+
rs = metadata.getTables(None, schema, "%", ["TABLE"])
|
|
453
|
+
while rs.next():
|
|
454
|
+
table_name = rs.getString("TABLE_NAME")
|
|
455
|
+
if table_name:
|
|
456
|
+
tables.append(table_name)
|
|
457
|
+
rs.close()
|
|
458
|
+
|
|
459
|
+
logger.debug(f"Found {len(tables)} tables in schema '{schema}'")
|
|
460
|
+
return tables
|
|
461
|
+
|
|
462
|
+
except Exception as e:
|
|
463
|
+
logger.warning(f"Failed to get table names: {e}")
|
|
464
|
+
return []
|
|
465
|
+
|
|
466
|
+
@reflection.cache
|
|
467
|
+
def get_view_names(
|
|
468
|
+
self, connection: Connection, schema: str | None = None, **kw: Any
|
|
469
|
+
) -> list[str]:
|
|
470
|
+
"""
|
|
471
|
+
Get list of view names using JDBC DatabaseMetaData.
|
|
472
|
+
|
|
473
|
+
Args:
|
|
474
|
+
connection: SQLAlchemy connection
|
|
475
|
+
schema: Schema name (None for default schema)
|
|
476
|
+
**kw: Additional keyword arguments
|
|
477
|
+
|
|
478
|
+
Returns:
|
|
479
|
+
List of view names
|
|
480
|
+
"""
|
|
481
|
+
try:
|
|
482
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
483
|
+
views = []
|
|
484
|
+
|
|
485
|
+
rs = metadata.getTables(None, schema, "%", ["VIEW"])
|
|
486
|
+
while rs.next():
|
|
487
|
+
view_name = rs.getString("TABLE_NAME")
|
|
488
|
+
if view_name:
|
|
489
|
+
views.append(view_name)
|
|
490
|
+
rs.close()
|
|
491
|
+
|
|
492
|
+
logger.debug(f"Found {len(views)} views in schema '{schema}'")
|
|
493
|
+
return views
|
|
494
|
+
|
|
495
|
+
except Exception as e:
|
|
496
|
+
logger.warning(f"Failed to get view names: {e}")
|
|
497
|
+
return []
|
|
498
|
+
|
|
499
|
+
def has_table(
|
|
500
|
+
self,
|
|
501
|
+
connection: Connection,
|
|
502
|
+
table_name: str,
|
|
503
|
+
schema: str | None = None,
|
|
504
|
+
**kw: Any,
|
|
505
|
+
) -> bool:
|
|
506
|
+
"""
|
|
507
|
+
Check if a table exists using JDBC DatabaseMetaData.
|
|
508
|
+
|
|
509
|
+
Args:
|
|
510
|
+
connection: SQLAlchemy connection
|
|
511
|
+
table_name: Table name to check
|
|
512
|
+
schema: Schema name (None for default schema)
|
|
513
|
+
**kw: Additional keyword arguments
|
|
514
|
+
|
|
515
|
+
Returns:
|
|
516
|
+
True if table exists, False otherwise
|
|
517
|
+
"""
|
|
518
|
+
try:
|
|
519
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
520
|
+
|
|
521
|
+
rs = metadata.getTables(None, schema, table_name, ["TABLE"])
|
|
522
|
+
exists = rs.next()
|
|
523
|
+
rs.close()
|
|
524
|
+
|
|
525
|
+
logger.debug(f"Table '{schema}.{table_name}' exists: {exists}")
|
|
526
|
+
return exists
|
|
527
|
+
|
|
528
|
+
except Exception as e:
|
|
529
|
+
logger.warning(f"Failed to check table existence: {e}")
|
|
530
|
+
return False
|
|
531
|
+
|
|
532
|
+
@reflection.cache
|
|
533
|
+
def get_columns(
|
|
534
|
+
self,
|
|
535
|
+
connection: Connection,
|
|
536
|
+
table_name: str,
|
|
537
|
+
schema: str | None = None,
|
|
538
|
+
**kw: Any,
|
|
539
|
+
) -> list[dict[str, Any]]:
|
|
540
|
+
"""
|
|
541
|
+
Get column definitions for a table using JDBC DatabaseMetaData.
|
|
542
|
+
|
|
543
|
+
Args:
|
|
544
|
+
connection: SQLAlchemy connection
|
|
545
|
+
table_name: Table name
|
|
546
|
+
schema: Schema name (None for default schema)
|
|
547
|
+
**kw: Additional keyword arguments
|
|
548
|
+
|
|
549
|
+
Returns:
|
|
550
|
+
List of column dictionaries with keys:
|
|
551
|
+
- name: Column name
|
|
552
|
+
- type: SQLAlchemy type instance
|
|
553
|
+
- nullable: Boolean
|
|
554
|
+
- default: Default value (string or None)
|
|
555
|
+
- autoincrement: Boolean
|
|
556
|
+
"""
|
|
557
|
+
try:
|
|
558
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
559
|
+
columns = []
|
|
560
|
+
|
|
561
|
+
rs = metadata.getColumns(None, schema, table_name, "%")
|
|
562
|
+
while rs.next():
|
|
563
|
+
column_name = rs.getString("COLUMN_NAME")
|
|
564
|
+
data_type = rs.getInt("DATA_TYPE")
|
|
565
|
+
type_name = rs.getString("TYPE_NAME")
|
|
566
|
+
column_size = rs.getInt("COLUMN_SIZE")
|
|
567
|
+
nullable = rs.getInt("NULLABLE") == 1 # DatabaseMetaData.columnNullable
|
|
568
|
+
column_def = rs.getString("COLUMN_DEF")
|
|
569
|
+
is_autoincrement = rs.getString("IS_AUTOINCREMENT")
|
|
570
|
+
|
|
571
|
+
# Convert JDBC type to SQLAlchemy type
|
|
572
|
+
sa_type = self._jdbc_type_to_sqlalchemy(type_name, data_type)
|
|
573
|
+
|
|
574
|
+
# Apply size for character/binary types
|
|
575
|
+
if hasattr(sa_type, "length") and column_size:
|
|
576
|
+
if isinstance(sa_type, (VARCHAR, CHAR, VARBINARY, BINARY)):
|
|
577
|
+
sa_type = type(sa_type)(length=column_size)
|
|
578
|
+
|
|
579
|
+
columns.append(
|
|
580
|
+
{
|
|
581
|
+
"name": column_name,
|
|
582
|
+
"type": sa_type,
|
|
583
|
+
"nullable": nullable,
|
|
584
|
+
"default": column_def,
|
|
585
|
+
"autoincrement": is_autoincrement == "YES"
|
|
586
|
+
if is_autoincrement
|
|
587
|
+
else False,
|
|
588
|
+
}
|
|
589
|
+
)
|
|
590
|
+
|
|
591
|
+
rs.close()
|
|
592
|
+
|
|
593
|
+
logger.debug(
|
|
594
|
+
f"Found {len(columns)} columns for table '{schema}.{table_name}'"
|
|
595
|
+
)
|
|
596
|
+
return columns
|
|
597
|
+
|
|
598
|
+
except Exception as e:
|
|
599
|
+
logger.warning(f"Failed to get columns: {e}")
|
|
600
|
+
return []
|
|
601
|
+
|
|
602
|
+
@reflection.cache
|
|
603
|
+
def get_pk_constraint(
|
|
604
|
+
self,
|
|
605
|
+
connection: Connection,
|
|
606
|
+
table_name: str,
|
|
607
|
+
schema: str | None = None,
|
|
608
|
+
**kw: Any,
|
|
609
|
+
) -> dict[str, Any]:
|
|
610
|
+
"""
|
|
611
|
+
Get primary key constraint for a table using JDBC DatabaseMetaData.
|
|
612
|
+
|
|
613
|
+
Args:
|
|
614
|
+
connection: SQLAlchemy connection
|
|
615
|
+
table_name: Table name
|
|
616
|
+
schema: Schema name (None for default schema)
|
|
617
|
+
**kw: Additional keyword arguments
|
|
618
|
+
|
|
619
|
+
Returns:
|
|
620
|
+
Dictionary with keys:
|
|
621
|
+
- name: Constraint name
|
|
622
|
+
- constrained_columns: List of column names
|
|
623
|
+
"""
|
|
624
|
+
try:
|
|
625
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
626
|
+
pk_columns = []
|
|
627
|
+
pk_name = None
|
|
628
|
+
|
|
629
|
+
rs = metadata.getPrimaryKeys(None, schema, table_name)
|
|
630
|
+
while rs.next():
|
|
631
|
+
column_name = rs.getString("COLUMN_NAME")
|
|
632
|
+
pk_name = rs.getString("PK_NAME")
|
|
633
|
+
key_seq = rs.getInt("KEY_SEQ")
|
|
634
|
+
pk_columns.append((key_seq, column_name))
|
|
635
|
+
|
|
636
|
+
rs.close()
|
|
637
|
+
|
|
638
|
+
# Sort by KEY_SEQ to maintain correct column order
|
|
639
|
+
pk_columns.sort(key=lambda x: x[0])
|
|
640
|
+
constrained_columns = [col for _, col in pk_columns]
|
|
641
|
+
|
|
642
|
+
result = {
|
|
643
|
+
"name": pk_name,
|
|
644
|
+
"constrained_columns": constrained_columns,
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
logger.debug(f"Primary key for '{schema}.{table_name}': {result}")
|
|
648
|
+
return result
|
|
649
|
+
|
|
650
|
+
except Exception as e:
|
|
651
|
+
logger.warning(f"Failed to get primary key: {e}")
|
|
652
|
+
return {"name": None, "constrained_columns": []}
|
|
653
|
+
|
|
654
|
+
@reflection.cache
|
|
655
|
+
def get_foreign_keys(
|
|
656
|
+
self,
|
|
657
|
+
connection: Connection,
|
|
658
|
+
table_name: str,
|
|
659
|
+
schema: str | None = None,
|
|
660
|
+
**kw: Any,
|
|
661
|
+
) -> list[dict[str, Any]]:
|
|
662
|
+
"""
|
|
663
|
+
Get foreign key constraints for a table using JDBC DatabaseMetaData.
|
|
664
|
+
|
|
665
|
+
Args:
|
|
666
|
+
connection: SQLAlchemy connection
|
|
667
|
+
table_name: Table name
|
|
668
|
+
schema: Schema name (None for default schema)
|
|
669
|
+
**kw: Additional keyword arguments
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
List of dictionaries with keys:
|
|
673
|
+
- name: Constraint name
|
|
674
|
+
- constrained_columns: List of local column names
|
|
675
|
+
- referred_schema: Referenced schema name
|
|
676
|
+
- referred_table: Referenced table name
|
|
677
|
+
- referred_columns: List of referenced column names
|
|
678
|
+
"""
|
|
679
|
+
try:
|
|
680
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
681
|
+
fks: dict[str, dict[str, Any]] = {}
|
|
682
|
+
|
|
683
|
+
rs = metadata.getImportedKeys(None, schema, table_name)
|
|
684
|
+
while rs.next():
|
|
685
|
+
fk_name = rs.getString("FK_NAME")
|
|
686
|
+
fk_column = rs.getString("FKCOLUMN_NAME")
|
|
687
|
+
pk_table = rs.getString("PKTABLE_NAME")
|
|
688
|
+
pk_schema = rs.getString("PKTABLE_SCHEM")
|
|
689
|
+
pk_column = rs.getString("PKCOLUMN_NAME")
|
|
690
|
+
key_seq = rs.getInt("KEY_SEQ")
|
|
691
|
+
|
|
692
|
+
if fk_name not in fks:
|
|
693
|
+
fks[fk_name] = {
|
|
694
|
+
"name": fk_name,
|
|
695
|
+
"constrained_columns": [],
|
|
696
|
+
"referred_schema": pk_schema,
|
|
697
|
+
"referred_table": pk_table,
|
|
698
|
+
"referred_columns": [],
|
|
699
|
+
"_seq": [],
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
fks[fk_name]["_seq"].append(key_seq)
|
|
703
|
+
fks[fk_name]["constrained_columns"].append(fk_column)
|
|
704
|
+
fks[fk_name]["referred_columns"].append(pk_column)
|
|
705
|
+
|
|
706
|
+
rs.close()
|
|
707
|
+
|
|
708
|
+
# Sort columns by KEY_SEQ
|
|
709
|
+
result = []
|
|
710
|
+
for fk_name, fk_data in fks.items():
|
|
711
|
+
# Sort by sequence
|
|
712
|
+
sorted_data = sorted(
|
|
713
|
+
zip(
|
|
714
|
+
fk_data["_seq"],
|
|
715
|
+
fk_data["constrained_columns"],
|
|
716
|
+
fk_data["referred_columns"],
|
|
717
|
+
)
|
|
718
|
+
)
|
|
719
|
+
fk_data["constrained_columns"] = [col for _, col, _ in sorted_data]
|
|
720
|
+
fk_data["referred_columns"] = [col for _, _, col in sorted_data]
|
|
721
|
+
del fk_data["_seq"]
|
|
722
|
+
result.append(fk_data)
|
|
723
|
+
|
|
724
|
+
logger.debug(
|
|
725
|
+
f"Found {len(result)} foreign keys for '{schema}.{table_name}'"
|
|
726
|
+
)
|
|
727
|
+
return result
|
|
728
|
+
|
|
729
|
+
except Exception as e:
|
|
730
|
+
logger.warning(f"Failed to get foreign keys: {e}")
|
|
731
|
+
return []
|
|
732
|
+
|
|
733
|
+
@reflection.cache
|
|
734
|
+
def get_indexes(
|
|
735
|
+
self,
|
|
736
|
+
connection: Connection,
|
|
737
|
+
table_name: str,
|
|
738
|
+
schema: str | None = None,
|
|
739
|
+
**kw: Any,
|
|
740
|
+
) -> list[dict[str, Any]]:
|
|
741
|
+
"""
|
|
742
|
+
Get indexes for a table using JDBC DatabaseMetaData.
|
|
743
|
+
|
|
744
|
+
Args:
|
|
745
|
+
connection: SQLAlchemy connection
|
|
746
|
+
table_name: Table name
|
|
747
|
+
schema: Schema name (None for default schema)
|
|
748
|
+
**kw: Additional keyword arguments
|
|
749
|
+
|
|
750
|
+
Returns:
|
|
751
|
+
List of dictionaries with keys:
|
|
752
|
+
- name: Index name
|
|
753
|
+
- column_names: List of column names
|
|
754
|
+
- unique: Boolean
|
|
755
|
+
"""
|
|
756
|
+
try:
|
|
757
|
+
metadata = self._get_jdbc_metadata(connection)
|
|
758
|
+
indexes: dict[str, dict[str, Any]] = {}
|
|
759
|
+
|
|
760
|
+
# getIndexInfo(catalog, schema, table, unique, approximate)
|
|
761
|
+
rs = metadata.getIndexInfo(None, schema, table_name, False, True)
|
|
762
|
+
while rs.next():
|
|
763
|
+
index_name = rs.getString("INDEX_NAME")
|
|
764
|
+
|
|
765
|
+
# Skip statistics
|
|
766
|
+
if not index_name:
|
|
767
|
+
continue
|
|
768
|
+
|
|
769
|
+
column_name = rs.getString("COLUMN_NAME")
|
|
770
|
+
non_unique = rs.getBoolean("NON_UNIQUE")
|
|
771
|
+
ordinal_position = rs.getInt("ORDINAL_POSITION")
|
|
772
|
+
|
|
773
|
+
if index_name not in indexes:
|
|
774
|
+
indexes[index_name] = {
|
|
775
|
+
"name": index_name,
|
|
776
|
+
"column_names": [],
|
|
777
|
+
"unique": not non_unique,
|
|
778
|
+
"_positions": [],
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
indexes[index_name]["_positions"].append(ordinal_position)
|
|
782
|
+
indexes[index_name]["column_names"].append(column_name)
|
|
783
|
+
|
|
784
|
+
rs.close()
|
|
785
|
+
|
|
786
|
+
# Sort columns by ordinal position
|
|
787
|
+
result = []
|
|
788
|
+
for idx_name, idx_data in indexes.items():
|
|
789
|
+
sorted_data = sorted(
|
|
790
|
+
zip(idx_data["_positions"], idx_data["column_names"])
|
|
791
|
+
)
|
|
792
|
+
idx_data["column_names"] = [col for _, col in sorted_data]
|
|
793
|
+
del idx_data["_positions"]
|
|
794
|
+
result.append(idx_data)
|
|
795
|
+
|
|
796
|
+
logger.debug(f"Found {len(result)} indexes for '{schema}.{table_name}'")
|
|
797
|
+
return result
|
|
798
|
+
|
|
799
|
+
except Exception as e:
|
|
800
|
+
logger.warning(f"Failed to get indexes: {e}")
|
|
801
|
+
return []
|
|
802
|
+
|
|
803
|
+
@reflection.cache
|
|
804
|
+
def get_unique_constraints(
|
|
805
|
+
self,
|
|
806
|
+
connection: Connection,
|
|
807
|
+
table_name: str,
|
|
808
|
+
schema: str | None = None,
|
|
809
|
+
**kw: Any,
|
|
810
|
+
) -> list[dict[str, Any]]:
|
|
811
|
+
"""
|
|
812
|
+
Get unique constraints for a table.
|
|
813
|
+
|
|
814
|
+
This is extracted from get_indexes() by filtering for unique indexes.
|
|
815
|
+
|
|
816
|
+
Args:
|
|
817
|
+
connection: SQLAlchemy connection
|
|
818
|
+
table_name: Table name
|
|
819
|
+
schema: Schema name (None for default schema)
|
|
820
|
+
**kw: Additional keyword arguments
|
|
821
|
+
|
|
822
|
+
Returns:
|
|
823
|
+
List of dictionaries with keys:
|
|
824
|
+
- name: Constraint name
|
|
825
|
+
- column_names: List of column names
|
|
826
|
+
"""
|
|
827
|
+
try:
|
|
828
|
+
indexes = self.get_indexes(connection, table_name, schema, **kw)
|
|
829
|
+
unique_constraints = [
|
|
830
|
+
{"name": idx["name"], "column_names": idx["column_names"]}
|
|
831
|
+
for idx in indexes
|
|
832
|
+
if idx.get("unique", False)
|
|
833
|
+
]
|
|
834
|
+
|
|
835
|
+
logger.debug(
|
|
836
|
+
f"Found {len(unique_constraints)} unique constraints for '{schema}.{table_name}'"
|
|
837
|
+
)
|
|
838
|
+
return unique_constraints
|
|
839
|
+
|
|
840
|
+
except Exception as e:
|
|
841
|
+
logger.warning(f"Failed to get unique constraints: {e}")
|
|
842
|
+
return []
|
|
843
|
+
|
|
844
|
+
@reflection.cache
|
|
845
|
+
def get_check_constraints(
|
|
846
|
+
self,
|
|
847
|
+
connection: Connection,
|
|
848
|
+
table_name: str,
|
|
849
|
+
schema: str | None = None,
|
|
850
|
+
**kw: Any,
|
|
851
|
+
) -> list[dict[str, Any]]:
|
|
852
|
+
"""
|
|
853
|
+
Get check constraints for a table.
|
|
854
|
+
|
|
855
|
+
Note: JDBC DatabaseMetaData doesn't have a standard method for check constraints.
|
|
856
|
+
This returns an empty list. Database-specific dialects should override this
|
|
857
|
+
method to query system tables if check constraint information is needed.
|
|
858
|
+
|
|
859
|
+
Args:
|
|
860
|
+
connection: SQLAlchemy connection
|
|
861
|
+
table_name: Table name
|
|
862
|
+
schema: Schema name (None for default schema)
|
|
863
|
+
**kw: Additional keyword arguments
|
|
864
|
+
|
|
865
|
+
Returns:
|
|
866
|
+
List of dictionaries with keys:
|
|
867
|
+
- name: Constraint name
|
|
868
|
+
- sqltext: Check constraint SQL expression
|
|
869
|
+
"""
|
|
870
|
+
# JDBC doesn't provide standard access to check constraints
|
|
871
|
+
# Subclasses can override to query database-specific system tables
|
|
872
|
+
logger.debug(
|
|
873
|
+
f"Check constraints not available via JDBC for '{schema}.{table_name}'"
|
|
874
|
+
)
|
|
875
|
+
return []
|
|
876
|
+
|
|
877
|
+
def __repr__(self) -> str:
|
|
878
|
+
"""String representation of the dialect."""
|
|
879
|
+
return f"<{self.__class__.__name__}>"
|