velocity-python 0.0.129__py3-none-any.whl → 0.0.132__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of velocity-python might be problematic. Click here for more details.
- velocity/__init__.py +1 -1
- velocity/aws/handlers/mixins/__init__.py +16 -0
- velocity/aws/handlers/mixins/activity_tracker.py +142 -0
- velocity/aws/handlers/mixins/error_handler.py +192 -0
- velocity/aws/handlers/mixins/legacy_mixin.py +53 -0
- velocity/aws/handlers/mixins/standard_mixin.py +73 -0
- velocity/db/servers/base/__init__.py +9 -0
- velocity/db/servers/base/initializer.py +69 -0
- velocity/db/servers/base/operators.py +98 -0
- velocity/db/servers/base/sql.py +503 -0
- velocity/db/servers/base/types.py +135 -0
- velocity/db/servers/mysql/__init__.py +64 -0
- velocity/db/servers/mysql/operators.py +54 -0
- velocity/db/servers/{mysql_reserved.py → mysql/reserved.py} +2 -14
- velocity/db/servers/mysql/sql.py +569 -0
- velocity/db/servers/mysql/types.py +107 -0
- velocity/db/servers/postgres/__init__.py +40 -0
- velocity/db/servers/postgres/operators.py +34 -0
- velocity/db/servers/postgres/sql.py +4 -3
- velocity/db/servers/postgres/types.py +88 -2
- velocity/db/servers/sqlite/__init__.py +52 -0
- velocity/db/servers/sqlite/operators.py +52 -0
- velocity/db/servers/sqlite/reserved.py +20 -0
- velocity/db/servers/sqlite/sql.py +530 -0
- velocity/db/servers/sqlite/types.py +92 -0
- velocity/db/servers/sqlserver/__init__.py +64 -0
- velocity/db/servers/sqlserver/operators.py +47 -0
- velocity/db/servers/sqlserver/reserved.py +32 -0
- velocity/db/servers/sqlserver/sql.py +625 -0
- velocity/db/servers/sqlserver/types.py +114 -0
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/METADATA +1 -1
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/RECORD +35 -16
- velocity/db/servers/mysql.py +0 -640
- velocity/db/servers/sqlite.py +0 -968
- velocity/db/servers/sqlite_reserved.py +0 -208
- velocity/db/servers/sqlserver.py +0 -921
- velocity/db/servers/sqlserver_reserved.py +0 -314
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/WHEEL +0 -0
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/licenses/LICENSE +0 -0
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,625 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import hashlib
|
|
3
|
+
import decimal
|
|
4
|
+
import datetime
|
|
5
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
6
|
+
from collections.abc import Mapping, Sequence
|
|
7
|
+
|
|
8
|
+
from velocity.db import exceptions
|
|
9
|
+
from ..base.sql import BaseSQLDialect
|
|
10
|
+
from .reserved import reserved_words
|
|
11
|
+
from .types import TYPES
|
|
12
|
+
from .operators import OPERATORS, SQLServerOperators
|
|
13
|
+
from ..tablehelper import TableHelper
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# Configure TableHelper for SQL Server
|
|
17
|
+
TableHelper.reserved = reserved_words
|
|
18
|
+
TableHelper.operators = OPERATORS
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def quote(data):
|
|
22
|
+
"""Quote SQL Server identifiers."""
|
|
23
|
+
if isinstance(data, list):
|
|
24
|
+
return [quote(item) for item in data]
|
|
25
|
+
else:
|
|
26
|
+
parts = data.split(".")
|
|
27
|
+
new = []
|
|
28
|
+
for part in parts:
|
|
29
|
+
if "[" in part:
|
|
30
|
+
new.append(part)
|
|
31
|
+
elif part.upper() in reserved_words:
|
|
32
|
+
new.append("[" + part + "]")
|
|
33
|
+
elif re.findall("[/]", part):
|
|
34
|
+
new.append("[" + part + "]")
|
|
35
|
+
else:
|
|
36
|
+
new.append(part)
|
|
37
|
+
return ".".join(new)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class SQL(BaseSQLDialect):
|
|
41
|
+
server = "SQL Server"
|
|
42
|
+
type_column_identifier = "DATA_TYPE"
|
|
43
|
+
is_nullable = "IS_NULLABLE"
|
|
44
|
+
|
|
45
|
+
default_schema = "dbo"
|
|
46
|
+
|
|
47
|
+
# SQL Server error numbers
|
|
48
|
+
ApplicationErrorCodes = []
|
|
49
|
+
DatabaseMissingErrorCodes = ["911"] # Database not found
|
|
50
|
+
TableMissingErrorCodes = ["208"] # Invalid object name
|
|
51
|
+
ColumnMissingErrorCodes = ["207"] # Invalid column name
|
|
52
|
+
ForeignKeyMissingErrorCodes = ["1759"] # Foreign key error
|
|
53
|
+
ConnectionErrorCodes = ["2", "53", "1326"] # Connection errors
|
|
54
|
+
DuplicateKeyErrorCodes = ["2627", "2601"] # Primary key / unique constraint
|
|
55
|
+
RetryTransactionCodes = ["1205"] # Deadlock
|
|
56
|
+
TruncationErrorCodes = ["8152"] # String truncation
|
|
57
|
+
LockTimeoutErrorCodes = ["1222"] # Lock request timeout
|
|
58
|
+
DatabaseObjectExistsErrorCodes = ["2714"] # Object already exists
|
|
59
|
+
DataIntegrityErrorCodes = ["547", "515"] # Foreign key, null constraint
|
|
60
|
+
|
|
61
|
+
types = TYPES
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def get_error(cls, e):
|
|
65
|
+
"""Extract error information from SQL Server exception."""
|
|
66
|
+
# pytds exceptions have different attributes
|
|
67
|
+
error_number = getattr(e, "number", None) or getattr(e, "msgno", None)
|
|
68
|
+
error_message = getattr(e, "message", None) or str(e)
|
|
69
|
+
return error_number, error_message
|
|
70
|
+
|
|
71
|
+
@classmethod
|
|
72
|
+
def select(
|
|
73
|
+
cls,
|
|
74
|
+
tx,
|
|
75
|
+
columns=None,
|
|
76
|
+
table=None,
|
|
77
|
+
where=None,
|
|
78
|
+
orderby=None,
|
|
79
|
+
groupby=None,
|
|
80
|
+
having=None,
|
|
81
|
+
start=None,
|
|
82
|
+
qty=None,
|
|
83
|
+
lock=None,
|
|
84
|
+
skip_locked=None,
|
|
85
|
+
):
|
|
86
|
+
"""Generate a SQL Server SELECT statement."""
|
|
87
|
+
if not table:
|
|
88
|
+
raise ValueError("Table name is required")
|
|
89
|
+
|
|
90
|
+
sql_parts = []
|
|
91
|
+
vals = []
|
|
92
|
+
|
|
93
|
+
# SELECT clause with TOP (SQL Server pagination)
|
|
94
|
+
sql_parts.append("SELECT")
|
|
95
|
+
|
|
96
|
+
# Handle TOP clause for SQL Server pagination
|
|
97
|
+
if qty is not None and start is None:
|
|
98
|
+
sql_parts.append(f"TOP {qty}")
|
|
99
|
+
|
|
100
|
+
# Column selection
|
|
101
|
+
if columns is None:
|
|
102
|
+
columns = ["*"]
|
|
103
|
+
elif isinstance(columns, str):
|
|
104
|
+
columns = [columns]
|
|
105
|
+
|
|
106
|
+
sql_parts.append(", ".join(columns))
|
|
107
|
+
|
|
108
|
+
# FROM clause
|
|
109
|
+
sql_parts.append("FROM")
|
|
110
|
+
sql_parts.append(quote(table))
|
|
111
|
+
|
|
112
|
+
# WHERE clause
|
|
113
|
+
if where:
|
|
114
|
+
where_sql, where_vals = cls._build_where(where)
|
|
115
|
+
sql_parts.append("WHERE")
|
|
116
|
+
sql_parts.append(where_sql)
|
|
117
|
+
vals.extend(where_vals)
|
|
118
|
+
|
|
119
|
+
# GROUP BY clause
|
|
120
|
+
if groupby:
|
|
121
|
+
if isinstance(groupby, str):
|
|
122
|
+
groupby = [groupby]
|
|
123
|
+
sql_parts.append("GROUP BY")
|
|
124
|
+
sql_parts.append(", ".join(quote(col) for col in groupby))
|
|
125
|
+
|
|
126
|
+
# HAVING clause
|
|
127
|
+
if having:
|
|
128
|
+
having_sql, having_vals = cls._build_where(having)
|
|
129
|
+
sql_parts.append("HAVING")
|
|
130
|
+
sql_parts.append(having_sql)
|
|
131
|
+
vals.extend(having_vals)
|
|
132
|
+
|
|
133
|
+
# ORDER BY clause (required for OFFSET/FETCH)
|
|
134
|
+
if orderby:
|
|
135
|
+
if isinstance(orderby, str):
|
|
136
|
+
orderby = [orderby]
|
|
137
|
+
elif isinstance(orderby, dict):
|
|
138
|
+
orderby_list = []
|
|
139
|
+
for col, direction in orderby.items():
|
|
140
|
+
orderby_list.append(f"{quote(col)} {direction.upper()}")
|
|
141
|
+
orderby = orderby_list
|
|
142
|
+
sql_parts.append("ORDER BY")
|
|
143
|
+
sql_parts.append(", ".join(orderby))
|
|
144
|
+
elif start is not None:
|
|
145
|
+
# ORDER BY is required for OFFSET/FETCH in SQL Server
|
|
146
|
+
sql_parts.append("ORDER BY")
|
|
147
|
+
sql_parts.append("(SELECT NULL)")
|
|
148
|
+
|
|
149
|
+
# OFFSET and FETCH (SQL Server 2012+)
|
|
150
|
+
if start is not None:
|
|
151
|
+
sql_parts.append(f"OFFSET {start} ROWS")
|
|
152
|
+
if qty is not None:
|
|
153
|
+
sql_parts.append(f"FETCH NEXT {qty} ROWS ONLY")
|
|
154
|
+
|
|
155
|
+
# Locking hints
|
|
156
|
+
if lock:
|
|
157
|
+
sql_parts.append("WITH (UPDLOCK)")
|
|
158
|
+
if skip_locked:
|
|
159
|
+
sql_parts.append("WITH (READPAST)")
|
|
160
|
+
|
|
161
|
+
return " ".join(sql_parts), vals
|
|
162
|
+
|
|
163
|
+
@classmethod
|
|
164
|
+
def _build_where(cls, where):
|
|
165
|
+
"""Build WHERE clause for SQL Server."""
|
|
166
|
+
if isinstance(where, str):
|
|
167
|
+
return where, []
|
|
168
|
+
|
|
169
|
+
if isinstance(where, dict):
|
|
170
|
+
where = list(where.items())
|
|
171
|
+
|
|
172
|
+
if not isinstance(where, (list, tuple)):
|
|
173
|
+
raise ValueError("WHERE clause must be string, dict, or list")
|
|
174
|
+
|
|
175
|
+
conditions = []
|
|
176
|
+
vals = []
|
|
177
|
+
|
|
178
|
+
for key, val in where:
|
|
179
|
+
if val is None:
|
|
180
|
+
if "!" in key:
|
|
181
|
+
key = key.replace("!", "")
|
|
182
|
+
conditions.append(f"{quote(key)} IS NOT NULL")
|
|
183
|
+
else:
|
|
184
|
+
conditions.append(f"{quote(key)} IS NULL")
|
|
185
|
+
elif isinstance(val, (list, tuple)):
|
|
186
|
+
if "!" in key:
|
|
187
|
+
key = key.replace("!", "")
|
|
188
|
+
conditions.append(f"{quote(key)} NOT IN ({', '.join(['?'] * len(val))})")
|
|
189
|
+
else:
|
|
190
|
+
conditions.append(f"{quote(key)} IN ({', '.join(['?'] * len(val))})")
|
|
191
|
+
vals.extend(val)
|
|
192
|
+
else:
|
|
193
|
+
# Handle operators
|
|
194
|
+
op = "="
|
|
195
|
+
if "<>" in key:
|
|
196
|
+
key = key.replace("<>", "")
|
|
197
|
+
op = "<>"
|
|
198
|
+
elif "!=" in key:
|
|
199
|
+
key = key.replace("!=", "")
|
|
200
|
+
op = "<>"
|
|
201
|
+
elif "%" in key:
|
|
202
|
+
key = key.replace("%", "")
|
|
203
|
+
op = "LIKE"
|
|
204
|
+
elif "!" in key:
|
|
205
|
+
key = key.replace("!", "")
|
|
206
|
+
op = "<>"
|
|
207
|
+
|
|
208
|
+
conditions.append(f"{quote(key)} {op} ?")
|
|
209
|
+
vals.append(val)
|
|
210
|
+
|
|
211
|
+
return " AND ".join(conditions), vals
|
|
212
|
+
|
|
213
|
+
@classmethod
|
|
214
|
+
def insert(cls, table, data):
|
|
215
|
+
"""Generate an INSERT statement for SQL Server."""
|
|
216
|
+
if not data:
|
|
217
|
+
raise ValueError("Data cannot be empty")
|
|
218
|
+
|
|
219
|
+
columns = list(data.keys())
|
|
220
|
+
values = list(data.values())
|
|
221
|
+
|
|
222
|
+
sql_parts = [
|
|
223
|
+
"INSERT INTO",
|
|
224
|
+
quote(table),
|
|
225
|
+
f"({', '.join(quote(col) for col in columns)})",
|
|
226
|
+
"VALUES",
|
|
227
|
+
f"({', '.join(['?'] * len(values))})" # SQL Server uses ? placeholders
|
|
228
|
+
]
|
|
229
|
+
|
|
230
|
+
return " ".join(sql_parts), values
|
|
231
|
+
|
|
232
|
+
@classmethod
|
|
233
|
+
def update(cls, tx, table, data, where=None, pk=None, excluded=False):
|
|
234
|
+
"""Generate an UPDATE statement for SQL Server."""
|
|
235
|
+
if not data:
|
|
236
|
+
raise ValueError("Data cannot be empty")
|
|
237
|
+
|
|
238
|
+
if not where and not pk:
|
|
239
|
+
raise ValueError("Either WHERE clause or primary key must be provided")
|
|
240
|
+
|
|
241
|
+
# Build SET clause
|
|
242
|
+
set_clauses = []
|
|
243
|
+
vals = []
|
|
244
|
+
|
|
245
|
+
for col, val in data.items():
|
|
246
|
+
set_clauses.append(f"{quote(col)} = ?")
|
|
247
|
+
vals.append(val)
|
|
248
|
+
|
|
249
|
+
# Build WHERE clause
|
|
250
|
+
if pk:
|
|
251
|
+
if where:
|
|
252
|
+
# Merge pk into where
|
|
253
|
+
if isinstance(where, dict):
|
|
254
|
+
where.update(pk)
|
|
255
|
+
else:
|
|
256
|
+
# Convert to dict for merging
|
|
257
|
+
where_dict = dict(where) if isinstance(where, (list, tuple)) else {}
|
|
258
|
+
where_dict.update(pk)
|
|
259
|
+
where = where_dict
|
|
260
|
+
else:
|
|
261
|
+
where = pk
|
|
262
|
+
|
|
263
|
+
where_sql, where_vals = cls._build_where(where) if where else ("", [])
|
|
264
|
+
|
|
265
|
+
sql_parts = [
|
|
266
|
+
"UPDATE",
|
|
267
|
+
quote(table),
|
|
268
|
+
"SET",
|
|
269
|
+
", ".join(set_clauses)
|
|
270
|
+
]
|
|
271
|
+
|
|
272
|
+
if where_sql:
|
|
273
|
+
sql_parts.extend(["WHERE", where_sql])
|
|
274
|
+
vals.extend(where_vals)
|
|
275
|
+
|
|
276
|
+
return " ".join(sql_parts), vals
|
|
277
|
+
|
|
278
|
+
@classmethod
|
|
279
|
+
def delete(cls, tx, table, where):
|
|
280
|
+
"""Generate a DELETE statement for SQL Server."""
|
|
281
|
+
if not where:
|
|
282
|
+
raise ValueError("WHERE clause is required for DELETE")
|
|
283
|
+
|
|
284
|
+
where_sql, where_vals = cls._build_where(where)
|
|
285
|
+
|
|
286
|
+
sql_parts = [
|
|
287
|
+
"DELETE FROM",
|
|
288
|
+
quote(table),
|
|
289
|
+
"WHERE",
|
|
290
|
+
where_sql
|
|
291
|
+
]
|
|
292
|
+
|
|
293
|
+
return " ".join(sql_parts), where_vals
|
|
294
|
+
|
|
295
|
+
@classmethod
|
|
296
|
+
def merge(cls, tx, table, data, pk, on_conflict_do_nothing, on_conflict_update):
|
|
297
|
+
"""Generate a MERGE statement for SQL Server."""
|
|
298
|
+
# SQL Server MERGE is complex - simplified version
|
|
299
|
+
if on_conflict_do_nothing:
|
|
300
|
+
# Use IF NOT EXISTS pattern
|
|
301
|
+
pk_conditions = " AND ".join([f"{quote(k)} = ?" for k in pk.keys()])
|
|
302
|
+
pk_values = list(pk.values())
|
|
303
|
+
|
|
304
|
+
insert_sql, insert_vals = cls.insert(table, data)
|
|
305
|
+
wrapped_sql = f"""
|
|
306
|
+
IF NOT EXISTS (SELECT 1 FROM {quote(table)} WHERE {pk_conditions})
|
|
307
|
+
BEGIN
|
|
308
|
+
{insert_sql}
|
|
309
|
+
END
|
|
310
|
+
"""
|
|
311
|
+
return wrapped_sql, pk_values + insert_vals
|
|
312
|
+
elif on_conflict_update:
|
|
313
|
+
# Use actual MERGE statement
|
|
314
|
+
pk_columns = list(pk.keys())
|
|
315
|
+
data_columns = [k for k in data.keys() if k not in pk_columns]
|
|
316
|
+
|
|
317
|
+
# Build MERGE statement
|
|
318
|
+
merge_parts = [
|
|
319
|
+
f"MERGE {quote(table)} AS target",
|
|
320
|
+
f"USING (SELECT {', '.join(['?' for _ in data])} AS ({', '.join(quote(k) for k in data.keys())})) AS source",
|
|
321
|
+
f"ON ({' AND '.join([f'target.{quote(k)} = source.{quote(k)}' for k in pk_columns])})",
|
|
322
|
+
"WHEN MATCHED THEN",
|
|
323
|
+
f"UPDATE SET {', '.join([f'{quote(k)} = source.{quote(k)}' for k in data_columns])}",
|
|
324
|
+
"WHEN NOT MATCHED THEN",
|
|
325
|
+
f"INSERT ({', '.join(quote(k) for k in data.keys())})",
|
|
326
|
+
f"VALUES ({', '.join([f'source.{quote(k)}' for k in data.keys()])});",
|
|
327
|
+
]
|
|
328
|
+
|
|
329
|
+
return " ".join(merge_parts), list(data.values())
|
|
330
|
+
else:
|
|
331
|
+
return cls.insert(table, data)
|
|
332
|
+
|
|
333
|
+
# Metadata queries
|
|
334
|
+
@classmethod
|
|
335
|
+
def version(cls):
|
|
336
|
+
return "SELECT @@VERSION"
|
|
337
|
+
|
|
338
|
+
@classmethod
|
|
339
|
+
def timestamp(cls):
|
|
340
|
+
return "SELECT GETDATE()"
|
|
341
|
+
|
|
342
|
+
@classmethod
|
|
343
|
+
def user(cls):
|
|
344
|
+
return "SELECT SYSTEM_USER"
|
|
345
|
+
|
|
346
|
+
@classmethod
|
|
347
|
+
def databases(cls):
|
|
348
|
+
return "SELECT name FROM sys.databases WHERE database_id > 4"
|
|
349
|
+
|
|
350
|
+
@classmethod
|
|
351
|
+
def schemas(cls):
|
|
352
|
+
return "SELECT name FROM sys.schemas"
|
|
353
|
+
|
|
354
|
+
@classmethod
|
|
355
|
+
def current_schema(cls):
|
|
356
|
+
return "SELECT SCHEMA_NAME()"
|
|
357
|
+
|
|
358
|
+
@classmethod
|
|
359
|
+
def current_database(cls):
|
|
360
|
+
return "SELECT DB_NAME()"
|
|
361
|
+
|
|
362
|
+
@classmethod
|
|
363
|
+
def tables(cls, system=False):
|
|
364
|
+
if system:
|
|
365
|
+
return "SELECT name FROM sys.tables"
|
|
366
|
+
else:
|
|
367
|
+
return "SELECT name FROM sys.tables WHERE is_ms_shipped = 0"
|
|
368
|
+
|
|
369
|
+
@classmethod
|
|
370
|
+
def views(cls, system=False):
|
|
371
|
+
if system:
|
|
372
|
+
return "SELECT name FROM sys.views"
|
|
373
|
+
else:
|
|
374
|
+
return "SELECT name FROM sys.views WHERE is_ms_shipped = 0"
|
|
375
|
+
|
|
376
|
+
@classmethod
|
|
377
|
+
def create_database(cls, name):
|
|
378
|
+
return f"CREATE DATABASE {quote(name)}"
|
|
379
|
+
|
|
380
|
+
@classmethod
|
|
381
|
+
def drop_database(cls, name):
|
|
382
|
+
return f"DROP DATABASE {quote(name)}"
|
|
383
|
+
|
|
384
|
+
@classmethod
|
|
385
|
+
def create_table(cls, name, columns=None, drop=False):
|
|
386
|
+
if drop:
|
|
387
|
+
return f"DROP TABLE IF EXISTS {quote(name)}"
|
|
388
|
+
|
|
389
|
+
# Basic CREATE TABLE
|
|
390
|
+
return f"CREATE TABLE {quote(name)} (id INT IDENTITY(1,1) PRIMARY KEY)"
|
|
391
|
+
|
|
392
|
+
@classmethod
|
|
393
|
+
def drop_table(cls, name):
|
|
394
|
+
return f"DROP TABLE {quote(name)}"
|
|
395
|
+
|
|
396
|
+
@classmethod
|
|
397
|
+
def truncate(cls, table):
|
|
398
|
+
return f"TRUNCATE TABLE {quote(table)}"
|
|
399
|
+
|
|
400
|
+
@classmethod
|
|
401
|
+
def columns(cls, name):
|
|
402
|
+
return f"""
|
|
403
|
+
SELECT
|
|
404
|
+
COLUMN_NAME,
|
|
405
|
+
DATA_TYPE,
|
|
406
|
+
IS_NULLABLE,
|
|
407
|
+
COLUMN_DEFAULT,
|
|
408
|
+
CHARACTER_MAXIMUM_LENGTH
|
|
409
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
410
|
+
WHERE TABLE_NAME = '{name}'
|
|
411
|
+
ORDER BY ORDINAL_POSITION
|
|
412
|
+
"""
|
|
413
|
+
|
|
414
|
+
@classmethod
|
|
415
|
+
def column_info(cls, table, name):
|
|
416
|
+
return f"""
|
|
417
|
+
SELECT
|
|
418
|
+
COLUMN_NAME,
|
|
419
|
+
DATA_TYPE,
|
|
420
|
+
IS_NULLABLE,
|
|
421
|
+
COLUMN_DEFAULT,
|
|
422
|
+
CHARACTER_MAXIMUM_LENGTH
|
|
423
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
424
|
+
WHERE TABLE_NAME = '{table}' AND COLUMN_NAME = '{name}'
|
|
425
|
+
"""
|
|
426
|
+
|
|
427
|
+
@classmethod
|
|
428
|
+
def drop_column(cls, table, name, cascade=True):
|
|
429
|
+
return f"ALTER TABLE {quote(table)} DROP COLUMN {quote(name)}"
|
|
430
|
+
|
|
431
|
+
@classmethod
|
|
432
|
+
def alter_add(cls, table, columns, null_allowed=True):
|
|
433
|
+
alter_parts = []
|
|
434
|
+
for col, col_type in columns.items():
|
|
435
|
+
null_clause = "NULL" if null_allowed else "NOT NULL"
|
|
436
|
+
alter_parts.append(f"ADD {quote(col)} {col_type} {null_clause}")
|
|
437
|
+
|
|
438
|
+
return f"ALTER TABLE {quote(table)} {', '.join(alter_parts)}"
|
|
439
|
+
|
|
440
|
+
@classmethod
|
|
441
|
+
def alter_drop(cls, table, columns):
|
|
442
|
+
drop_parts = [f"DROP COLUMN {quote(col)}" for col in columns]
|
|
443
|
+
return f"ALTER TABLE {quote(table)} {', '.join(drop_parts)}"
|
|
444
|
+
|
|
445
|
+
@classmethod
|
|
446
|
+
def alter_column_by_type(cls, table, column, value, nullable=True):
|
|
447
|
+
null_clause = "NULL" if nullable else "NOT NULL"
|
|
448
|
+
return f"ALTER TABLE {quote(table)} ALTER COLUMN {quote(column)} {value} {null_clause}"
|
|
449
|
+
|
|
450
|
+
@classmethod
|
|
451
|
+
def alter_column_by_sql(cls, table, column, value):
|
|
452
|
+
return f"ALTER TABLE {quote(table)} ALTER COLUMN {quote(column)} {value}"
|
|
453
|
+
|
|
454
|
+
@classmethod
|
|
455
|
+
def rename_column(cls, table, orig, new):
|
|
456
|
+
return f"EXEC sp_rename '{table}.{orig}', '{new}', 'COLUMN'"
|
|
457
|
+
|
|
458
|
+
@classmethod
|
|
459
|
+
def rename_table(cls, table, new):
|
|
460
|
+
return f"EXEC sp_rename '{table}', '{new}'"
|
|
461
|
+
|
|
462
|
+
@classmethod
|
|
463
|
+
def primary_keys(cls, table):
|
|
464
|
+
return f"""
|
|
465
|
+
SELECT COLUMN_NAME
|
|
466
|
+
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
467
|
+
WHERE OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + CONSTRAINT_NAME), 'IsPrimaryKey') = 1
|
|
468
|
+
AND TABLE_NAME = '{table}'
|
|
469
|
+
"""
|
|
470
|
+
|
|
471
|
+
@classmethod
|
|
472
|
+
def foreign_key_info(cls, table=None, column=None, schema=None):
|
|
473
|
+
sql = """
|
|
474
|
+
SELECT
|
|
475
|
+
FK.TABLE_NAME,
|
|
476
|
+
CU.COLUMN_NAME,
|
|
477
|
+
PK.TABLE_NAME AS REFERENCED_TABLE_NAME,
|
|
478
|
+
PT.COLUMN_NAME AS REFERENCED_COLUMN_NAME,
|
|
479
|
+
C.CONSTRAINT_NAME
|
|
480
|
+
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS C
|
|
481
|
+
INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS FK ON C.CONSTRAINT_NAME = FK.CONSTRAINT_NAME
|
|
482
|
+
INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS PK ON C.UNIQUE_CONSTRAINT_NAME = PK.CONSTRAINT_NAME
|
|
483
|
+
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE CU ON C.CONSTRAINT_NAME = CU.CONSTRAINT_NAME
|
|
484
|
+
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE PT ON PK.CONSTRAINT_NAME = PT.CONSTRAINT_NAME
|
|
485
|
+
"""
|
|
486
|
+
if table:
|
|
487
|
+
sql += f" WHERE FK.TABLE_NAME = '{table}'"
|
|
488
|
+
if column:
|
|
489
|
+
conjunction = " AND" if table else " WHERE"
|
|
490
|
+
sql += f"{conjunction} CU.COLUMN_NAME = '{column}'"
|
|
491
|
+
return sql
|
|
492
|
+
|
|
493
|
+
@classmethod
|
|
494
|
+
def create_foreign_key(cls, table, columns, key_to_table, key_to_columns, name=None, schema=None):
|
|
495
|
+
if name is None:
|
|
496
|
+
name = f"FK_{table}_{'_'.join(columns)}"
|
|
497
|
+
|
|
498
|
+
col_list = ", ".join(quote(col) for col in columns)
|
|
499
|
+
ref_col_list = ", ".join(quote(col) for col in key_to_columns)
|
|
500
|
+
|
|
501
|
+
return f"""
|
|
502
|
+
ALTER TABLE {quote(table)}
|
|
503
|
+
ADD CONSTRAINT {quote(name)}
|
|
504
|
+
FOREIGN KEY ({col_list})
|
|
505
|
+
REFERENCES {quote(key_to_table)} ({ref_col_list})
|
|
506
|
+
"""
|
|
507
|
+
|
|
508
|
+
@classmethod
|
|
509
|
+
def drop_foreign_key(cls, table, columns, key_to_table=None, key_to_columns=None, name=None, schema=None):
|
|
510
|
+
if name is None:
|
|
511
|
+
name = f"FK_{table}_{'_'.join(columns)}"
|
|
512
|
+
|
|
513
|
+
return f"ALTER TABLE {quote(table)} DROP CONSTRAINT {quote(name)}"
|
|
514
|
+
|
|
515
|
+
@classmethod
|
|
516
|
+
def create_index(cls, tx, table=None, columns=None, unique=False, direction=None, where=None, name=None, schema=None, trigram=None, lower=None):
|
|
517
|
+
if name is None:
|
|
518
|
+
name = f"IX_{table}_{'_'.join(columns)}"
|
|
519
|
+
|
|
520
|
+
index_type = "UNIQUE INDEX" if unique else "INDEX"
|
|
521
|
+
col_list = ", ".join(quote(col) for col in columns)
|
|
522
|
+
|
|
523
|
+
sql = f"CREATE {index_type} {quote(name)} ON {quote(table)} ({col_list})"
|
|
524
|
+
|
|
525
|
+
if where:
|
|
526
|
+
sql += f" WHERE {where}"
|
|
527
|
+
|
|
528
|
+
return sql
|
|
529
|
+
|
|
530
|
+
@classmethod
|
|
531
|
+
def drop_index(cls, table=None, columns=None, name=None, schema=None, trigram=None):
|
|
532
|
+
if name is None:
|
|
533
|
+
name = f"IX_{table}_{'_'.join(columns)}"
|
|
534
|
+
|
|
535
|
+
return f"DROP INDEX {quote(name)} ON {quote(table)}"
|
|
536
|
+
|
|
537
|
+
@classmethod
|
|
538
|
+
def indexes(cls, table):
|
|
539
|
+
return f"""
|
|
540
|
+
SELECT
|
|
541
|
+
i.name AS index_name,
|
|
542
|
+
c.name AS column_name,
|
|
543
|
+
i.is_unique
|
|
544
|
+
FROM sys.indexes i
|
|
545
|
+
INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
|
|
546
|
+
INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
|
|
547
|
+
WHERE i.object_id = OBJECT_ID('{table}')
|
|
548
|
+
ORDER BY i.name, ic.key_ordinal
|
|
549
|
+
"""
|
|
550
|
+
|
|
551
|
+
@classmethod
|
|
552
|
+
def create_savepoint(cls, sp):
|
|
553
|
+
return f"SAVE TRANSACTION {sp}"
|
|
554
|
+
|
|
555
|
+
@classmethod
|
|
556
|
+
def release_savepoint(cls, sp):
|
|
557
|
+
return f"-- SQL Server doesn't support RELEASE SAVEPOINT {sp}"
|
|
558
|
+
|
|
559
|
+
@classmethod
|
|
560
|
+
def rollback_savepoint(cls, sp):
|
|
561
|
+
return f"ROLLBACK TRANSACTION {sp}"
|
|
562
|
+
|
|
563
|
+
@classmethod
|
|
564
|
+
def create_view(cls, name, query, temp=False, silent=True):
|
|
565
|
+
# SQL Server doesn't support temporary views in the same way
|
|
566
|
+
return f"CREATE VIEW {quote(name)} AS {query}"
|
|
567
|
+
|
|
568
|
+
@classmethod
|
|
569
|
+
def drop_view(cls, name, silent=True):
|
|
570
|
+
if silent:
|
|
571
|
+
return f"DROP VIEW IF EXISTS {quote(name)}"
|
|
572
|
+
else:
|
|
573
|
+
return f"DROP VIEW {quote(name)}"
|
|
574
|
+
|
|
575
|
+
@classmethod
|
|
576
|
+
def last_id(cls, table):
|
|
577
|
+
return "SELECT @@IDENTITY"
|
|
578
|
+
|
|
579
|
+
@classmethod
|
|
580
|
+
def current_id(cls, table):
|
|
581
|
+
return f"SELECT IDENT_CURRENT('{table}')"
|
|
582
|
+
|
|
583
|
+
@classmethod
|
|
584
|
+
def set_id(cls, table, start):
|
|
585
|
+
return f"DBCC CHECKIDENT('{table}', RESEED, {start})"
|
|
586
|
+
|
|
587
|
+
@classmethod
|
|
588
|
+
def set_sequence(cls, table, next_value):
|
|
589
|
+
return f"DBCC CHECKIDENT('{table}', RESEED, {next_value})"
|
|
590
|
+
|
|
591
|
+
@classmethod
|
|
592
|
+
def massage_data(cls, data):
|
|
593
|
+
"""Massage data before insert/update operations."""
|
|
594
|
+
# SQL Server-specific data transformations
|
|
595
|
+
return data
|
|
596
|
+
|
|
597
|
+
@classmethod
|
|
598
|
+
def alter_trigger(cls, table, state="ENABLE", name="USER"):
|
|
599
|
+
state_cmd = "ENABLE" if state.upper() == "ENABLE" else "DISABLE"
|
|
600
|
+
return f"ALTER TABLE {quote(table)} {state_cmd} TRIGGER ALL"
|
|
601
|
+
|
|
602
|
+
@classmethod
|
|
603
|
+
def missing(cls, tx, table, list_values, column="SYS_ID", where=None):
|
|
604
|
+
"""Generate query to find missing values from a list."""
|
|
605
|
+
# SQL Server version using VALUES clause
|
|
606
|
+
value_rows = ", ".join([f"(?)" for _ in list_values])
|
|
607
|
+
|
|
608
|
+
sql = f"""
|
|
609
|
+
SELECT value_column FROM (
|
|
610
|
+
VALUES {value_rows}
|
|
611
|
+
) AS input_values(value_column)
|
|
612
|
+
WHERE value_column NOT IN (
|
|
613
|
+
SELECT {quote(column)} FROM {quote(table)}
|
|
614
|
+
"""
|
|
615
|
+
|
|
616
|
+
vals = list_values
|
|
617
|
+
|
|
618
|
+
if where:
|
|
619
|
+
where_sql, where_vals = cls._build_where(where)
|
|
620
|
+
sql += f" WHERE {where_sql}"
|
|
621
|
+
vals.extend(where_vals)
|
|
622
|
+
|
|
623
|
+
sql += ")"
|
|
624
|
+
|
|
625
|
+
return sql, vals
|