velocity-python 0.0.34__py3-none-any.whl → 0.0.64__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of velocity-python might be problematic. Click here for more details.
- velocity/__init__.py +1 -1
- velocity/db/core/column.py +25 -105
- velocity/db/core/database.py +79 -23
- velocity/db/core/decorators.py +84 -47
- velocity/db/core/engine.py +179 -184
- velocity/db/core/result.py +94 -49
- velocity/db/core/row.py +81 -46
- velocity/db/core/sequence.py +112 -22
- velocity/db/core/table.py +660 -243
- velocity/db/core/transaction.py +75 -77
- velocity/db/servers/mysql.py +5 -237
- velocity/db/servers/mysql_reserved.py +237 -0
- velocity/db/servers/postgres/__init__.py +19 -0
- velocity/db/servers/postgres/operators.py +23 -0
- velocity/db/servers/postgres/reserved.py +254 -0
- velocity/db/servers/postgres/sql.py +1041 -0
- velocity/db/servers/postgres/types.py +109 -0
- velocity/db/servers/sqlite.py +1 -210
- velocity/db/servers/sqlite_reserved.py +208 -0
- velocity/db/servers/sqlserver.py +1 -316
- velocity/db/servers/sqlserver_reserved.py +314 -0
- velocity/db/servers/tablehelper.py +277 -0
- velocity/misc/conv/iconv.py +277 -91
- velocity/misc/conv/oconv.py +5 -4
- velocity/misc/db.py +2 -2
- velocity/misc/format.py +2 -2
- {velocity_python-0.0.34.dist-info → velocity_python-0.0.64.dist-info}/METADATA +6 -6
- velocity_python-0.0.64.dist-info/RECORD +47 -0
- {velocity_python-0.0.34.dist-info → velocity_python-0.0.64.dist-info}/WHEEL +1 -1
- velocity/db/servers/postgres.py +0 -1396
- velocity_python-0.0.34.dist-info/RECORD +0 -39
- {velocity_python-0.0.34.dist-info → velocity_python-0.0.64.dist-info}/LICENSE +0 -0
- {velocity_python-0.0.34.dist-info → velocity_python-0.0.64.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1041 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import hashlib
|
|
3
|
+
import sqlparse
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from velocity.db.core import exceptions
|
|
7
|
+
|
|
8
|
+
from .reserved import reserved_words
|
|
9
|
+
from .types import TYPES
|
|
10
|
+
from .operators import OPERATORS
|
|
11
|
+
from ..tablehelper import TableHelper
|
|
12
|
+
from collections.abc import Mapping, Sequence
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
TableHelper.reserved = reserved_words
|
|
16
|
+
TableHelper.operators = OPERATORS
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
system_fields = [
|
|
20
|
+
"sys_id",
|
|
21
|
+
"sys_created",
|
|
22
|
+
"sys_modified",
|
|
23
|
+
"sys_modified_by",
|
|
24
|
+
"sys_dirty",
|
|
25
|
+
"sys_table",
|
|
26
|
+
"description",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class SQL:
|
|
31
|
+
server = "PostGreSQL"
|
|
32
|
+
type_column_identifier = "data_type"
|
|
33
|
+
is_nullable = "is_nullable"
|
|
34
|
+
|
|
35
|
+
default_schema = "public"
|
|
36
|
+
|
|
37
|
+
ApplicationErrorCodes = ["22P02", "42883"]
|
|
38
|
+
|
|
39
|
+
DatabaseMissingErrorCodes = []
|
|
40
|
+
TableMissingErrorCodes = ["42P01"]
|
|
41
|
+
ColumnMissingErrorCodes = ["42703"]
|
|
42
|
+
ForeignKeyMissingErrorCodes = ["42704"]
|
|
43
|
+
|
|
44
|
+
ConnectionErrorCodes = ["08001", "08S01", "57P03", "08006", "53300"]
|
|
45
|
+
DuplicateKeyErrorCodes = [] # Handled in regex check.
|
|
46
|
+
RetryTransactionCodes = []
|
|
47
|
+
TruncationErrorCodes = ["22001"]
|
|
48
|
+
LockTimeoutErrorCodes = ["55P03"]
|
|
49
|
+
DatabaseObjectExistsErrorCodes = ["42710", "42P07", "42P04"]
|
|
50
|
+
DataIntegrityErrorCodes = ["23503"]
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def get_error(self, e):
|
|
54
|
+
error_code = getattr(e, "pgcode", None)
|
|
55
|
+
error_mesg = getattr(e, "pgerror", None)
|
|
56
|
+
return error_code, error_mesg
|
|
57
|
+
|
|
58
|
+
types = TYPES
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def select(
|
|
62
|
+
cls,
|
|
63
|
+
tx,
|
|
64
|
+
columns=None,
|
|
65
|
+
table=None,
|
|
66
|
+
where=None,
|
|
67
|
+
orderby=None,
|
|
68
|
+
groupby=None,
|
|
69
|
+
having=None,
|
|
70
|
+
start=None,
|
|
71
|
+
qty=None,
|
|
72
|
+
lock=None,
|
|
73
|
+
skip_locked=None,
|
|
74
|
+
):
|
|
75
|
+
|
|
76
|
+
if not table:
|
|
77
|
+
raise ValueError("Table name is required.")
|
|
78
|
+
|
|
79
|
+
sql_parts = {
|
|
80
|
+
"SELECT": [],
|
|
81
|
+
"FROM": [],
|
|
82
|
+
"WHERE": [],
|
|
83
|
+
"GROUP BY": [],
|
|
84
|
+
"HAVING": [],
|
|
85
|
+
"ORDER BY": [],
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
sql = []
|
|
89
|
+
vals = []
|
|
90
|
+
|
|
91
|
+
# Assume these helpers and functions exist externally
|
|
92
|
+
th = TableHelper(tx, table)
|
|
93
|
+
|
|
94
|
+
# Handle columns and DISTINCT before aliasing
|
|
95
|
+
if columns is None:
|
|
96
|
+
# No columns specified - select all
|
|
97
|
+
columns = ["*"]
|
|
98
|
+
elif isinstance(columns, str):
|
|
99
|
+
columns = th.split_columns(columns)
|
|
100
|
+
|
|
101
|
+
if not isinstance(columns, Sequence):
|
|
102
|
+
raise Exception(
|
|
103
|
+
f"variable `columns` must be a sequence, but {type(columns)} was found"
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
columns = [c.strip() for c in columns] # Preserve original case
|
|
107
|
+
distinct = False
|
|
108
|
+
|
|
109
|
+
if any(
|
|
110
|
+
"distinct" in c.lower() for c in columns
|
|
111
|
+
): # Check if "distinct" exists in any entry
|
|
112
|
+
distinct = True
|
|
113
|
+
columns = [
|
|
114
|
+
c.replace("distinct", "", 1).strip() if "distinct" in c.lower() else c
|
|
115
|
+
for c in columns
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
processed_columns = []
|
|
119
|
+
for col in columns:
|
|
120
|
+
processed_columns.append(
|
|
121
|
+
th.resolve_references(
|
|
122
|
+
col, options={"alias_column": True, "alias_table": True}
|
|
123
|
+
)
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
columns = processed_columns
|
|
127
|
+
|
|
128
|
+
# Handle WHERE conditions
|
|
129
|
+
if isinstance(where, Mapping):
|
|
130
|
+
new_where = []
|
|
131
|
+
for key, val in where.items():
|
|
132
|
+
new_where.append(th.make_predicate(key, val))
|
|
133
|
+
where = new_where
|
|
134
|
+
|
|
135
|
+
new_orderby = []
|
|
136
|
+
if isinstance(orderby, str):
|
|
137
|
+
orderby = th.split_columns(orderby)
|
|
138
|
+
# Handle orderby references
|
|
139
|
+
if isinstance(orderby, (Sequence)):
|
|
140
|
+
for column in orderby:
|
|
141
|
+
if " " in column:
|
|
142
|
+
col_name, direction = column.split(" ", 1)
|
|
143
|
+
col_name = th.resolve_references(
|
|
144
|
+
col_name, options={"alias_only": True}
|
|
145
|
+
)
|
|
146
|
+
new_orderby.append(f"{col_name} {direction}")
|
|
147
|
+
else:
|
|
148
|
+
new_orderby.append(
|
|
149
|
+
th.resolve_references(
|
|
150
|
+
column.strip(), options={"alias_only": True}
|
|
151
|
+
)
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
if isinstance(orderby, Mapping):
|
|
155
|
+
for key, val in orderby.items():
|
|
156
|
+
parsed_key = th.resolve_references(key, options={"alias_only": True})
|
|
157
|
+
new_orderby.append(f"{parsed_key} {val}")
|
|
158
|
+
orderby = new_orderby
|
|
159
|
+
|
|
160
|
+
# Handle groupby
|
|
161
|
+
if isinstance(groupby, str):
|
|
162
|
+
groupby = th.split_columns(groupby)
|
|
163
|
+
if isinstance(groupby, (Sequence)):
|
|
164
|
+
new_groupby = []
|
|
165
|
+
for gcol in groupby:
|
|
166
|
+
new_groupby.append(
|
|
167
|
+
th.resolve_references(gcol, options={"alias_only": True})
|
|
168
|
+
)
|
|
169
|
+
groupby = new_groupby
|
|
170
|
+
|
|
171
|
+
# Handle having
|
|
172
|
+
if isinstance(having, Mapping):
|
|
173
|
+
new_having = []
|
|
174
|
+
for key, val in having.items():
|
|
175
|
+
new_having.append(th.make_predicate(key, val))
|
|
176
|
+
having = new_having
|
|
177
|
+
|
|
178
|
+
# SELECT clause
|
|
179
|
+
# columns is a list/tuple of already processed references
|
|
180
|
+
sql_parts["SELECT"].extend(columns)
|
|
181
|
+
alias = th.get_table_alias("current_table")
|
|
182
|
+
if not alias:
|
|
183
|
+
raise ValueError("Main table alias resolution failed.")
|
|
184
|
+
|
|
185
|
+
# FROM clause
|
|
186
|
+
if th.foreign_keys:
|
|
187
|
+
sql_parts["FROM"].append(f"{th.quote(table)} AS {th.quote(alias)}")
|
|
188
|
+
# Handle joins
|
|
189
|
+
done = []
|
|
190
|
+
for key, ref_info in th.foreign_keys.items():
|
|
191
|
+
ref_table = ref_info["ref_table"]
|
|
192
|
+
if ref_table in done:
|
|
193
|
+
continue
|
|
194
|
+
done.append(ref_table)
|
|
195
|
+
if not all(
|
|
196
|
+
k in ref_info
|
|
197
|
+
for k in ("alias", "local_column", "ref_table", "ref_column")
|
|
198
|
+
):
|
|
199
|
+
raise ValueError(f"Invalid table alias info for {ref_table}.")
|
|
200
|
+
sql_parts["FROM"].append(
|
|
201
|
+
f"LEFT JOIN {th.quote(ref_table)} AS {th.quote(ref_info['alias'])} "
|
|
202
|
+
f"ON {th.quote(alias)}.{th.quote(ref_info['local_column'])} = {th.quote(ref_info['alias'])}.{th.quote(ref_info['ref_column'])}"
|
|
203
|
+
)
|
|
204
|
+
else:
|
|
205
|
+
sql_parts["FROM"].append(th.quote(table))
|
|
206
|
+
|
|
207
|
+
# WHERE
|
|
208
|
+
if where:
|
|
209
|
+
if isinstance(where, str):
|
|
210
|
+
sql_parts["WHERE"].append(where)
|
|
211
|
+
else:
|
|
212
|
+
for pred, val in where:
|
|
213
|
+
sql_parts["WHERE"].append(pred)
|
|
214
|
+
if val is None:
|
|
215
|
+
pass
|
|
216
|
+
elif isinstance(val, tuple):
|
|
217
|
+
vals.extend(val)
|
|
218
|
+
else:
|
|
219
|
+
vals.append(val)
|
|
220
|
+
|
|
221
|
+
# GROUP BY
|
|
222
|
+
if groupby:
|
|
223
|
+
sql_parts["GROUP BY"].append(",".join(groupby))
|
|
224
|
+
|
|
225
|
+
# HAVING
|
|
226
|
+
if having:
|
|
227
|
+
if isinstance(having, str):
|
|
228
|
+
sql_parts["HAVING"].append(having)
|
|
229
|
+
else:
|
|
230
|
+
for pred, val in having:
|
|
231
|
+
sql_parts["HAVING"].append(pred)
|
|
232
|
+
if val is None:
|
|
233
|
+
pass
|
|
234
|
+
elif isinstance(val, tuple):
|
|
235
|
+
vals.extend(val)
|
|
236
|
+
else:
|
|
237
|
+
vals.append(val)
|
|
238
|
+
|
|
239
|
+
# ORDER BY
|
|
240
|
+
if orderby:
|
|
241
|
+
sql_parts["ORDER BY"].append(",".join(orderby))
|
|
242
|
+
|
|
243
|
+
# Construct final SQL
|
|
244
|
+
if sql_parts["SELECT"]:
|
|
245
|
+
sql.append("SELECT")
|
|
246
|
+
if distinct:
|
|
247
|
+
sql.append("DISTINCT")
|
|
248
|
+
sql.append(", ".join(sql_parts["SELECT"]))
|
|
249
|
+
|
|
250
|
+
if sql_parts["FROM"]:
|
|
251
|
+
sql.append("FROM")
|
|
252
|
+
sql.append(" ".join(sql_parts["FROM"]))
|
|
253
|
+
|
|
254
|
+
if sql_parts["WHERE"]:
|
|
255
|
+
sql.append("WHERE " + " AND ".join(sql_parts["WHERE"]))
|
|
256
|
+
|
|
257
|
+
if sql_parts["GROUP BY"]:
|
|
258
|
+
sql.append("GROUP BY " + " ".join(sql_parts["GROUP BY"]))
|
|
259
|
+
|
|
260
|
+
if sql_parts["HAVING"]:
|
|
261
|
+
sql.append("HAVING " + " AND ".join(sql_parts["HAVING"]))
|
|
262
|
+
|
|
263
|
+
if sql_parts["ORDER BY"]:
|
|
264
|
+
sql.append("ORDER BY " + " ".join(sql_parts["ORDER BY"]))
|
|
265
|
+
|
|
266
|
+
# OFFSET/FETCH
|
|
267
|
+
if start is not None:
|
|
268
|
+
if not isinstance(start, int):
|
|
269
|
+
raise ValueError("Start (OFFSET) must be an integer.")
|
|
270
|
+
sql.append(f"OFFSET {start} ROWS")
|
|
271
|
+
|
|
272
|
+
if qty is not None:
|
|
273
|
+
if not isinstance(qty, int):
|
|
274
|
+
raise ValueError("Qty (FETCH) must be an integer.")
|
|
275
|
+
sql.append(f"FETCH NEXT {qty} ROWS ONLY")
|
|
276
|
+
|
|
277
|
+
# FOR UPDATE and SKIP LOCKED
|
|
278
|
+
if lock or skip_locked:
|
|
279
|
+
sql.append("FOR UPDATE")
|
|
280
|
+
if skip_locked:
|
|
281
|
+
sql.append("SKIP LOCKED")
|
|
282
|
+
|
|
283
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
284
|
+
return sql, tuple(vals)
|
|
285
|
+
|
|
286
|
+
@classmethod
|
|
287
|
+
def update(cls, tx, table, data, where=None, pk=None, excluded=False):
|
|
288
|
+
if not table:
|
|
289
|
+
raise ValueError("Table name is required.")
|
|
290
|
+
if not pk and not where:
|
|
291
|
+
raise ValueError("Where clause (where) or primary key (pk) is required.")
|
|
292
|
+
if not isinstance(data, Mapping) or not data:
|
|
293
|
+
raise ValueError("data must be a non-empty mapping of column-value pairs.")
|
|
294
|
+
|
|
295
|
+
th = TableHelper(tx, table)
|
|
296
|
+
|
|
297
|
+
set_clauses = []
|
|
298
|
+
vals = []
|
|
299
|
+
|
|
300
|
+
if pk:
|
|
301
|
+
if where:
|
|
302
|
+
where.update(pk)
|
|
303
|
+
else:
|
|
304
|
+
where = pk
|
|
305
|
+
|
|
306
|
+
# Handle data columns (SET clause)
|
|
307
|
+
for col, val in data.items():
|
|
308
|
+
col = th.resolve_references(
|
|
309
|
+
col, options={"alias_column": False, "alias_table": False}
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
# Normal column
|
|
313
|
+
if excluded:
|
|
314
|
+
set_clauses.append(f"{col} = EXCLUDED.{col}")
|
|
315
|
+
else:
|
|
316
|
+
set_clauses.append(f"{col} = %s")
|
|
317
|
+
vals.append(val)
|
|
318
|
+
|
|
319
|
+
# Extract the final where conditions and values
|
|
320
|
+
where_clauses = []
|
|
321
|
+
if not excluded:
|
|
322
|
+
# First handle user-provided WHERE conditions
|
|
323
|
+
if isinstance(where, Mapping):
|
|
324
|
+
for key, val in where.items():
|
|
325
|
+
col, value = th.make_predicate(key, val)
|
|
326
|
+
where_clauses.append(col)
|
|
327
|
+
if isinstance(value, tuple):
|
|
328
|
+
vals.extend(value)
|
|
329
|
+
else:
|
|
330
|
+
vals.append(value)
|
|
331
|
+
|
|
332
|
+
# Final assembly of SQL
|
|
333
|
+
sql = []
|
|
334
|
+
sql.append("UPDATE")
|
|
335
|
+
if not excluded:
|
|
336
|
+
if th.foreign_keys:
|
|
337
|
+
sql.append(
|
|
338
|
+
f"{th.quote(table)} AS {th.quote(th.get_table_alias('current_table'))}"
|
|
339
|
+
)
|
|
340
|
+
else:
|
|
341
|
+
sql.append(TableHelper.quote(table))
|
|
342
|
+
sql.append("SET " + ", ".join(set_clauses))
|
|
343
|
+
if not excluded:
|
|
344
|
+
if th.foreign_keys:
|
|
345
|
+
for key, ref_info in th.foreign_keys.items():
|
|
346
|
+
ref_table = ref_info["ref_table"]
|
|
347
|
+
sql.append(
|
|
348
|
+
f"LEFT JOIN {th.quote(ref_table)} AS {th.quote(ref_info['alias'])} "
|
|
349
|
+
)
|
|
350
|
+
where_clauses.append(
|
|
351
|
+
f"{th.quote(th.get_table_alias('current_table'))}.{th.quote(ref_info['local_column'])} = {th.quote(ref_info['alias'])}.{th.quote(ref_info['ref_column'])}"
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
if not excluded:
|
|
355
|
+
if where_clauses:
|
|
356
|
+
sql.append("WHERE " + " AND ".join(where_clauses))
|
|
357
|
+
else:
|
|
358
|
+
# Without a WHERE, this updates all rows.
|
|
359
|
+
# If this is not desired, raise an error.
|
|
360
|
+
if not excluded:
|
|
361
|
+
raise ValueError(
|
|
362
|
+
"No WHERE clause could be constructed. Update would affect all rows."
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
# Final assembled query
|
|
366
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
367
|
+
return sql, tuple(vals)
|
|
368
|
+
|
|
369
|
+
@classmethod
|
|
370
|
+
def insert(cls, table, data):
|
|
371
|
+
keys = []
|
|
372
|
+
vals = []
|
|
373
|
+
args = []
|
|
374
|
+
for key, val in data.items():
|
|
375
|
+
keys.append(TableHelper.quote(key.lower()))
|
|
376
|
+
if isinstance(val, str) and len(val) > 2 and val[:2] == "@@" and val[2:]:
|
|
377
|
+
vals.append(val[2:])
|
|
378
|
+
else:
|
|
379
|
+
vals.append("%s")
|
|
380
|
+
args.append(val)
|
|
381
|
+
|
|
382
|
+
sql = ["INSERT INTO"]
|
|
383
|
+
sql.append(TableHelper.quote(table))
|
|
384
|
+
sql.append("(")
|
|
385
|
+
sql.append(",".join(keys))
|
|
386
|
+
sql.append(")")
|
|
387
|
+
sql.append("VALUES")
|
|
388
|
+
sql.append("(")
|
|
389
|
+
sql.append(",".join(vals))
|
|
390
|
+
sql.append(")")
|
|
391
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
392
|
+
return sql, tuple(args)
|
|
393
|
+
|
|
394
|
+
@classmethod
|
|
395
|
+
def merge(cls, tx, table, data, pk, on_conflict_do_nothing, on_conflict_update):
|
|
396
|
+
if pk is None:
|
|
397
|
+
pkeys = tx.table(table).primary_keys()
|
|
398
|
+
if not pkeys:
|
|
399
|
+
raise ValueError("Primary key required for merge.")
|
|
400
|
+
# If there are multiple primary keys, we need to use all of them
|
|
401
|
+
if len(pkeys) > 1:
|
|
402
|
+
pk = {pk: data[pk] for pk in pkeys}
|
|
403
|
+
else:
|
|
404
|
+
pk = {pkeys[0]: data[pkeys[0]]}
|
|
405
|
+
# remove primary key from data
|
|
406
|
+
data = {k: v for k, v in data.items() if k not in pk}
|
|
407
|
+
|
|
408
|
+
full_data = {}
|
|
409
|
+
full_data.update(data)
|
|
410
|
+
full_data.update(pk)
|
|
411
|
+
|
|
412
|
+
sql, vals = cls.insert(table, full_data)
|
|
413
|
+
sql = [sql]
|
|
414
|
+
vals = list(vals)
|
|
415
|
+
if on_conflict_do_nothing != on_conflict_update:
|
|
416
|
+
sql.append("ON CONFLICT")
|
|
417
|
+
sql.append("(")
|
|
418
|
+
sql.append(",".join(pk.keys()))
|
|
419
|
+
sql.append(")")
|
|
420
|
+
sql.append("DO")
|
|
421
|
+
if on_conflict_do_nothing:
|
|
422
|
+
sql.append("NOTHING")
|
|
423
|
+
elif on_conflict_update:
|
|
424
|
+
sql2, vals2 = cls.update(tx, table, data, pk, excluded=True)
|
|
425
|
+
sql.append(sql2)
|
|
426
|
+
vals.extend(vals2)
|
|
427
|
+
else:
|
|
428
|
+
raise Exception(
|
|
429
|
+
"Update on conflict must have one and only one option to complete on conflict."
|
|
430
|
+
)
|
|
431
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
432
|
+
return sql, tuple(vals)
|
|
433
|
+
|
|
434
|
+
@classmethod
|
|
435
|
+
def version(cls):
|
|
436
|
+
return "select version()", tuple()
|
|
437
|
+
|
|
438
|
+
@classmethod
|
|
439
|
+
def timestamp(cls):
|
|
440
|
+
return "select current_timestamp", tuple()
|
|
441
|
+
|
|
442
|
+
@classmethod
|
|
443
|
+
def user(cls):
|
|
444
|
+
return "select current_user", tuple()
|
|
445
|
+
|
|
446
|
+
@classmethod
|
|
447
|
+
def databases(cls):
|
|
448
|
+
return "select datname from pg_database where datistemplate = false", tuple()
|
|
449
|
+
|
|
450
|
+
@classmethod
|
|
451
|
+
def schemas(cls):
|
|
452
|
+
return "select schema_name from information_schema.schemata", tuple()
|
|
453
|
+
|
|
454
|
+
@classmethod
|
|
455
|
+
def current_schema(cls):
|
|
456
|
+
return "select current_schema", tuple()
|
|
457
|
+
|
|
458
|
+
@classmethod
|
|
459
|
+
def current_database(cls):
|
|
460
|
+
return "select current_database()", tuple()
|
|
461
|
+
|
|
462
|
+
@classmethod
|
|
463
|
+
def tables(cls, system=False):
|
|
464
|
+
if system:
|
|
465
|
+
return (
|
|
466
|
+
"select table_schema,table_name from information_schema.tables where table_type = 'BASE TABLE' order by table_schema,table_name",
|
|
467
|
+
tuple(),
|
|
468
|
+
)
|
|
469
|
+
else:
|
|
470
|
+
return (
|
|
471
|
+
"select table_schema, table_name from information_schema.tables where table_type = 'BASE TABLE' and table_schema NOT IN ('pg_catalog', 'information_schema')",
|
|
472
|
+
tuple(),
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
@classmethod
|
|
476
|
+
def views(cls, system=False):
|
|
477
|
+
if system:
|
|
478
|
+
return (
|
|
479
|
+
"select table_schema, table_name from information_schema.views order by table_schema,table_name",
|
|
480
|
+
tuple(),
|
|
481
|
+
)
|
|
482
|
+
else:
|
|
483
|
+
return (
|
|
484
|
+
"select table_schema, table_name from information_schema.views where table_schema = any (current_schemas(false)) order by table_schema,table_name",
|
|
485
|
+
tuple(),
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
@classmethod
|
|
489
|
+
def create_database(cls, name):
|
|
490
|
+
return f"create database {name}", tuple()
|
|
491
|
+
|
|
492
|
+
@classmethod
|
|
493
|
+
def last_id(cls, table):
|
|
494
|
+
return "SELECT CURRVAL(PG_GET_SERIAL_SEQUENCE(%s, 'sys_id'))", tuple([table])
|
|
495
|
+
|
|
496
|
+
@classmethod
|
|
497
|
+
def current_id(cls, table):
|
|
498
|
+
return (
|
|
499
|
+
"SELECT pg_sequence_last_value(PG_GET_SERIAL_SEQUENCE(%s, 'sys_id'))",
|
|
500
|
+
tuple([table]),
|
|
501
|
+
)
|
|
502
|
+
|
|
503
|
+
@classmethod
|
|
504
|
+
def set_id(cls, table, start):
|
|
505
|
+
return "SELECT SETVAL(PG_GET_SERIAL_SEQUENCE(%s, 'sys_id'), %s)", tuple(
|
|
506
|
+
[table, start]
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
@classmethod
|
|
510
|
+
def drop_database(cls, name):
|
|
511
|
+
return f"drop database if exists {name}", tuple()
|
|
512
|
+
|
|
513
|
+
@classmethod
|
|
514
|
+
def create_table(cls, name, columns={}, drop=False):
|
|
515
|
+
if "." in name:
|
|
516
|
+
fqtn = name
|
|
517
|
+
else:
|
|
518
|
+
fqtn = f"public.{name}"
|
|
519
|
+
schema, table = fqtn.split(".")
|
|
520
|
+
name = fqtn.replace(".", "_")
|
|
521
|
+
sql = []
|
|
522
|
+
if drop:
|
|
523
|
+
sql.append(cls.drop_table(fqtn)[0])
|
|
524
|
+
sql.append(
|
|
525
|
+
f"""
|
|
526
|
+
CREATE TABLE {fqtn} (
|
|
527
|
+
sys_id BIGSERIAL PRIMARY KEY,
|
|
528
|
+
sys_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
529
|
+
sys_created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
530
|
+
sys_modified_by TEXT,
|
|
531
|
+
sys_dirty BOOLEAN NOT NULL DEFAULT FALSE,
|
|
532
|
+
sys_table TEXT,
|
|
533
|
+
description TEXT
|
|
534
|
+
);
|
|
535
|
+
|
|
536
|
+
SELECT SETVAL(PG_GET_SERIAL_SEQUENCE('{fqtn}', 'sys_id'),1000,TRUE);
|
|
537
|
+
|
|
538
|
+
CREATE OR REPLACE FUNCTION {schema}.on_sys_modified()
|
|
539
|
+
RETURNS TRIGGER AS
|
|
540
|
+
$BODY$
|
|
541
|
+
BEGIN
|
|
542
|
+
-- update sys_modified on each insert/update.
|
|
543
|
+
NEW.sys_modified := now();
|
|
544
|
+
if (TG_OP = 'INSERT') THEN
|
|
545
|
+
NEW.sys_created :=now();
|
|
546
|
+
ELSEIF (TG_OP = 'UDPATE') THEN
|
|
547
|
+
-- Do not allow sys_created to be modified.
|
|
548
|
+
NEW.sys_created := OLD.sys_created;
|
|
549
|
+
END IF;
|
|
550
|
+
-- Insert table name to row
|
|
551
|
+
NEW.sys_table := TG_TABLE_NAME;
|
|
552
|
+
RETURN NEW;
|
|
553
|
+
END;
|
|
554
|
+
$BODY$
|
|
555
|
+
LANGUAGE plpgsql VOLATILE
|
|
556
|
+
COST 100;
|
|
557
|
+
|
|
558
|
+
CREATE TRIGGER on_update_row_{fqtn.replace('.', '_')}
|
|
559
|
+
BEFORE INSERT OR UPDATE ON {fqtn}
|
|
560
|
+
FOR EACH ROW EXECUTE PROCEDURE {schema}.on_sys_modified();
|
|
561
|
+
|
|
562
|
+
"""
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
for key, val in columns.items():
|
|
566
|
+
key = re.sub("<>!=%", "", key.lower())
|
|
567
|
+
if key in system_fields:
|
|
568
|
+
continue
|
|
569
|
+
sql.append(
|
|
570
|
+
f"ALTER TABLE {TableHelper.quote(fqtn)} ADD COLUMN {TableHelper.quote(key)} {TYPES.get_type(val)};"
|
|
571
|
+
)
|
|
572
|
+
|
|
573
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
574
|
+
return sql, tuple()
|
|
575
|
+
|
|
576
|
+
@classmethod
|
|
577
|
+
def drop_table(cls, name):
|
|
578
|
+
return f"drop table if exists {TableHelper.quote(name)} cascade;", tuple()
|
|
579
|
+
|
|
580
|
+
@classmethod
|
|
581
|
+
def drop_column(cls, table, name, cascade=True):
|
|
582
|
+
if cascade:
|
|
583
|
+
return (
|
|
584
|
+
f"ALTER TABLE {TableHelper.quote(table)} DROP COLUMN {TableHelper.quote(name)} CASCADE",
|
|
585
|
+
tuple(),
|
|
586
|
+
)
|
|
587
|
+
else:
|
|
588
|
+
return (
|
|
589
|
+
f"ALTER TABLE {TableHelper.quote(table)} DROP COLUMN {TableHelper.quote(name)}",
|
|
590
|
+
tuple(),
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
@classmethod
|
|
594
|
+
def columns(cls, name):
|
|
595
|
+
if "." in name:
|
|
596
|
+
return """
|
|
597
|
+
select column_name
|
|
598
|
+
from information_schema.columns
|
|
599
|
+
where UPPER(table_schema) = UPPER(%s)
|
|
600
|
+
and UPPER(table_name) = UPPER(%s)
|
|
601
|
+
""", tuple(
|
|
602
|
+
name.split(".")
|
|
603
|
+
)
|
|
604
|
+
else:
|
|
605
|
+
return """
|
|
606
|
+
select column_name
|
|
607
|
+
from information_schema.columns
|
|
608
|
+
where UPPER(table_name) = UPPER(%s)
|
|
609
|
+
""", tuple(
|
|
610
|
+
[
|
|
611
|
+
name,
|
|
612
|
+
]
|
|
613
|
+
)
|
|
614
|
+
|
|
615
|
+
@classmethod
|
|
616
|
+
def column_info(cls, table, name):
|
|
617
|
+
params = table.split(".")
|
|
618
|
+
params.append(name)
|
|
619
|
+
if "." in table:
|
|
620
|
+
return """
|
|
621
|
+
select *
|
|
622
|
+
from information_schema.columns
|
|
623
|
+
where UPPER(table_schema ) = UPPER(%s)
|
|
624
|
+
and UPPER(table_name) = UPPER(%s)
|
|
625
|
+
and UPPER(column_name) = UPPER(%s)
|
|
626
|
+
""", tuple(
|
|
627
|
+
params
|
|
628
|
+
)
|
|
629
|
+
else:
|
|
630
|
+
return """
|
|
631
|
+
select *
|
|
632
|
+
from information_schema.columns
|
|
633
|
+
where UPPER(table_name) = UPPER(%s)
|
|
634
|
+
and UPPER(column_name) = UPPER(%s)
|
|
635
|
+
""", tuple(
|
|
636
|
+
params
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
@classmethod
|
|
640
|
+
def primary_keys(cls, table):
|
|
641
|
+
params = table.split(".")
|
|
642
|
+
params.reverse()
|
|
643
|
+
if "." in table:
|
|
644
|
+
return """
|
|
645
|
+
SELECT
|
|
646
|
+
pg_attribute.attname
|
|
647
|
+
FROM pg_index, pg_class, pg_attribute, pg_namespace
|
|
648
|
+
WHERE
|
|
649
|
+
pg_class.oid = %s::regclass AND
|
|
650
|
+
indrelid = pg_class.oid AND
|
|
651
|
+
nspname = %s AND
|
|
652
|
+
pg_class.relnamespace = pg_namespace.oid AND
|
|
653
|
+
pg_attribute.attrelid = pg_class.oid AND
|
|
654
|
+
pg_attribute.attnum = any(pg_index.indkey)
|
|
655
|
+
AND indisprimary
|
|
656
|
+
""", tuple(
|
|
657
|
+
params
|
|
658
|
+
)
|
|
659
|
+
else:
|
|
660
|
+
return """
|
|
661
|
+
SELECT
|
|
662
|
+
pg_attribute.attname
|
|
663
|
+
FROM pg_index, pg_class, pg_attribute, pg_namespace
|
|
664
|
+
WHERE
|
|
665
|
+
pg_class.oid = %s::regclass AND
|
|
666
|
+
indrelid = pg_class.oid AND
|
|
667
|
+
pg_class.relnamespace = pg_namespace.oid AND
|
|
668
|
+
pg_attribute.attrelid = pg_class.oid AND
|
|
669
|
+
pg_attribute.attnum = any(pg_index.indkey)
|
|
670
|
+
AND indisprimary
|
|
671
|
+
""", tuple(
|
|
672
|
+
params
|
|
673
|
+
)
|
|
674
|
+
|
|
675
|
+
@classmethod
|
|
676
|
+
def foreign_key_info(cls, table=None, column=None, schema=None):
|
|
677
|
+
if "." in table:
|
|
678
|
+
schema, table = table.split(".")
|
|
679
|
+
|
|
680
|
+
sql = [
|
|
681
|
+
"""
|
|
682
|
+
SELECT
|
|
683
|
+
KCU1.CONSTRAINT_NAME AS "FK_CONSTRAINT_NAME"
|
|
684
|
+
, KCU1.CONSTRAINT_SCHEMA AS "FK_CONSTRAINT_SCHEMA"
|
|
685
|
+
, KCU1.CONSTRAINT_CATALOG AS "FK_CONSTRAINT_CATALOG"
|
|
686
|
+
, KCU1.TABLE_NAME AS "FK_TABLE_NAME"
|
|
687
|
+
, KCU1.COLUMN_NAME AS "FK_COLUMN_NAME"
|
|
688
|
+
, KCU1.ORDINAL_POSITION AS "FK_ORDINAL_POSITION"
|
|
689
|
+
, KCU2.CONSTRAINT_NAME AS "UQ_CONSTRAINT_NAME"
|
|
690
|
+
, KCU2.CONSTRAINT_SCHEMA AS "UQ_CONSTRAINT_SCHEMA"
|
|
691
|
+
, KCU2.CONSTRAINT_CATALOG AS "UQ_CONSTRAINT_CATALOG"
|
|
692
|
+
, KCU2.TABLE_NAME AS "UQ_TABLE_NAME"
|
|
693
|
+
, KCU2.COLUMN_NAME AS "UQ_COLUMN_NAME"
|
|
694
|
+
, KCU2.ORDINAL_POSITION AS "UQ_ORDINAL_POSITION"
|
|
695
|
+
, KCU1.CONSTRAINT_NAME AS "CONSTRAINT_NAME"
|
|
696
|
+
, KCU2.CONSTRAINT_SCHEMA AS "REFERENCED_TABLE_SCHEMA"
|
|
697
|
+
, KCU2.TABLE_NAME AS "REFERENCED_TABLE_NAME"
|
|
698
|
+
, KCU2.COLUMN_NAME AS "REFERENCED_COLUMN_NAME"
|
|
699
|
+
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS RC
|
|
700
|
+
JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE KCU1
|
|
701
|
+
ON KCU1.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG
|
|
702
|
+
AND KCU1.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA
|
|
703
|
+
AND KCU1.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
|
|
704
|
+
JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE KCU2
|
|
705
|
+
ON KCU2.CONSTRAINT_CATALOG = RC.UNIQUE_CONSTRAINT_CATALOG
|
|
706
|
+
AND KCU2.CONSTRAINT_SCHEMA = RC.UNIQUE_CONSTRAINT_SCHEMA
|
|
707
|
+
AND KCU2.CONSTRAINT_NAME = RC.UNIQUE_CONSTRAINT_NAME
|
|
708
|
+
AND KCU2.ORDINAL_POSITION = KCU1.ORDINAL_POSITION
|
|
709
|
+
"""
|
|
710
|
+
]
|
|
711
|
+
vals = []
|
|
712
|
+
where = {}
|
|
713
|
+
if schema:
|
|
714
|
+
where["LOWER(KCU1.CONSTRAINT_SCHEMA)"] = schema.lower()
|
|
715
|
+
if table:
|
|
716
|
+
where["LOWER(KCU1.TABLE_NAME)"] = table.lower()
|
|
717
|
+
if column:
|
|
718
|
+
where["LOWER(KCU1.COLUMN_NAME)"] = column.lower()
|
|
719
|
+
sql.append("WHERE")
|
|
720
|
+
connect = ""
|
|
721
|
+
for key, val in where.items():
|
|
722
|
+
if connect:
|
|
723
|
+
sql.append(connect)
|
|
724
|
+
sql.append(f"{key} = %s")
|
|
725
|
+
vals.append(val)
|
|
726
|
+
connect = "AND"
|
|
727
|
+
|
|
728
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
729
|
+
return sql, tuple(vals)
|
|
730
|
+
|
|
731
|
+
@classmethod
|
|
732
|
+
def create_foreign_key(
|
|
733
|
+
cls, table, columns, key_to_table, key_to_columns, name=None, schema=None
|
|
734
|
+
):
|
|
735
|
+
if "." not in table and schema:
|
|
736
|
+
table = f"{schema}.{table}"
|
|
737
|
+
if isinstance(key_to_columns, str):
|
|
738
|
+
key_to_columns = [key_to_columns]
|
|
739
|
+
if isinstance(columns, str):
|
|
740
|
+
columns = [columns]
|
|
741
|
+
if not name:
|
|
742
|
+
m = hashlib.md5()
|
|
743
|
+
m.update(table.encode("utf-8"))
|
|
744
|
+
m.update(" ".join(columns).encode("utf-8"))
|
|
745
|
+
m.update(key_to_table.encode("utf-8"))
|
|
746
|
+
m.update(" ".join(key_to_columns).encode("utf-8"))
|
|
747
|
+
name = f"FK_{m.hexdigest()}"
|
|
748
|
+
sql = f"ALTER TABLE {table} ADD CONSTRAINT {name} FOREIGN KEY ({','.join(columns)}) REFERENCES {key_to_table} ({','.join(key_to_columns)});"
|
|
749
|
+
sql = sqlparse.format(sql, reindent=True, keyword_case="upper")
|
|
750
|
+
return sql, tuple()
|
|
751
|
+
|
|
752
|
+
@classmethod
|
|
753
|
+
def drop_foreign_key(
|
|
754
|
+
cls,
|
|
755
|
+
table,
|
|
756
|
+
columns,
|
|
757
|
+
key_to_table=None,
|
|
758
|
+
key_to_columns=None,
|
|
759
|
+
name=None,
|
|
760
|
+
schema=None,
|
|
761
|
+
):
|
|
762
|
+
if "." not in table and schema:
|
|
763
|
+
table = f"{schema}.{table}"
|
|
764
|
+
if isinstance(key_to_columns, str):
|
|
765
|
+
key_to_columns = [key_to_columns]
|
|
766
|
+
if isinstance(columns, str):
|
|
767
|
+
columns = [columns]
|
|
768
|
+
if not name:
|
|
769
|
+
m = hashlib.md5()
|
|
770
|
+
m.update(table.encode("utf-8"))
|
|
771
|
+
m.update(" ".join(columns).encode("utf-8"))
|
|
772
|
+
m.update(key_to_table.encode("utf-8"))
|
|
773
|
+
m.update(" ".join(key_to_columns).encode("utf-8"))
|
|
774
|
+
name = f"FK_{m.hexdigest()}"
|
|
775
|
+
sql = f"ALTER TABLE {table} DROP CONSTRAINT {name};"
|
|
776
|
+
return sql, tuple()
|
|
777
|
+
|
|
778
|
+
@classmethod
|
|
779
|
+
def create_index(
|
|
780
|
+
cls,
|
|
781
|
+
tx,
|
|
782
|
+
table=None,
|
|
783
|
+
columns=None,
|
|
784
|
+
unique=False,
|
|
785
|
+
direction=None,
|
|
786
|
+
where=None,
|
|
787
|
+
name=None,
|
|
788
|
+
schema=None,
|
|
789
|
+
trigram=None,
|
|
790
|
+
lower=None,
|
|
791
|
+
):
|
|
792
|
+
"""
|
|
793
|
+
The following statements must be executed on the database instance once to enable respective trigram features.
|
|
794
|
+
CREATE EXTENSION pg_trgm; is required to use gin.
|
|
795
|
+
CREATE EXTENSION btree_gist; is required to use gist
|
|
796
|
+
"""
|
|
797
|
+
if "." not in table and schema:
|
|
798
|
+
table = f"{schema}.{table}"
|
|
799
|
+
if isinstance(columns, (list, set)):
|
|
800
|
+
columns = ",".join([TableHelper.quote(c.lower()) for c in columns])
|
|
801
|
+
else:
|
|
802
|
+
columns = TableHelper.quote(columns)
|
|
803
|
+
sql = ["CREATE"]
|
|
804
|
+
if unique:
|
|
805
|
+
sql.append("UNIQUE")
|
|
806
|
+
sql.append("INDEX")
|
|
807
|
+
tablename = TableHelper.quote(table)
|
|
808
|
+
if not name:
|
|
809
|
+
name = re.sub(
|
|
810
|
+
r"\([^)]*\)",
|
|
811
|
+
"",
|
|
812
|
+
columns.replace(" ", "").replace(",", "_").replace('"', ""),
|
|
813
|
+
)
|
|
814
|
+
if trigram:
|
|
815
|
+
sql.append(f"IDX__TRGM_{table.replace('.', '_')}_{trigram}__{name}".upper())
|
|
816
|
+
else:
|
|
817
|
+
sql.append(f"IDX__{table.replace('.', '_')}__{name}".upper())
|
|
818
|
+
sql.append("ON")
|
|
819
|
+
sql.append(TableHelper.quote(tablename))
|
|
820
|
+
|
|
821
|
+
if trigram:
|
|
822
|
+
sql.append("USING")
|
|
823
|
+
sql.append(trigram)
|
|
824
|
+
sql.append("(")
|
|
825
|
+
join = ""
|
|
826
|
+
for column_name in columns.split(","):
|
|
827
|
+
column_name = column_name.replace('"', "")
|
|
828
|
+
if join:
|
|
829
|
+
sql.append(join)
|
|
830
|
+
column = tx.table(table).column(column_name)
|
|
831
|
+
print(column)
|
|
832
|
+
if not column.exists():
|
|
833
|
+
raise Exception(
|
|
834
|
+
f"Column {column_name} does not exist in table {table}."
|
|
835
|
+
)
|
|
836
|
+
if column.py_type == str:
|
|
837
|
+
if lower:
|
|
838
|
+
sql.append(f"lower({TableHelper.quote(column_name)})")
|
|
839
|
+
else:
|
|
840
|
+
sql.append(TableHelper.quote(column_name))
|
|
841
|
+
else:
|
|
842
|
+
sql.append(TableHelper.quote(column_name))
|
|
843
|
+
join = ","
|
|
844
|
+
|
|
845
|
+
if trigram:
|
|
846
|
+
sql.append(f"{trigram.lower()}_trgm_ops")
|
|
847
|
+
sql.append(")")
|
|
848
|
+
vals = []
|
|
849
|
+
s, v = TableHelper(tx, table).make_where(where)
|
|
850
|
+
sql.append(s)
|
|
851
|
+
vals.extend(v)
|
|
852
|
+
|
|
853
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
854
|
+
return sql, tuple(vals)
|
|
855
|
+
|
|
856
|
+
@classmethod
|
|
857
|
+
def drop_index(cls, table=None, columns=None, name=None, schema=None, trigram=None):
|
|
858
|
+
if "." not in table and schema:
|
|
859
|
+
table = f"{schema}.{table}"
|
|
860
|
+
if isinstance(columns, (list, set)):
|
|
861
|
+
columns = ",".join([TableHelper.quote(c.lower()) for c in columns])
|
|
862
|
+
else:
|
|
863
|
+
columns = TableHelper.quote(columns)
|
|
864
|
+
sql = ["DROP"]
|
|
865
|
+
sql.append("INDEX IF EXISTS")
|
|
866
|
+
tablename = TableHelper.quote(table)
|
|
867
|
+
if not name:
|
|
868
|
+
name = re.sub(
|
|
869
|
+
r"\([^)]*\)",
|
|
870
|
+
"",
|
|
871
|
+
columns.replace(" ", "").replace(",", "_").replace('"', ""),
|
|
872
|
+
)
|
|
873
|
+
if trigram:
|
|
874
|
+
sql.append(f"IDX__TRGM_{table.replace('.', '_')}_{trigram.upper()}__{name}")
|
|
875
|
+
else:
|
|
876
|
+
sql.append(f"IDX__{table.replace('.', '_')}__{name}")
|
|
877
|
+
|
|
878
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
879
|
+
return sql, tuple()
|
|
880
|
+
|
|
881
|
+
@classmethod
|
|
882
|
+
def massage_data(cls, data):
|
|
883
|
+
data = {key.lower(): val for key, val in data.items()}
|
|
884
|
+
primaryKey = set(cls.GetPrimaryKeyColumnNames())
|
|
885
|
+
if not primaryKey:
|
|
886
|
+
if not cls.Exists():
|
|
887
|
+
raise exceptions.DbTableMissingError
|
|
888
|
+
dataKeys = set(data.keys()).intersection(primaryKey)
|
|
889
|
+
dataColumns = set(data.keys()).difference(primaryKey)
|
|
890
|
+
pk = {}
|
|
891
|
+
pk.update([(k, data[k]) for k in dataKeys])
|
|
892
|
+
d = {}
|
|
893
|
+
d.update([(k, data[k]) for k in dataColumns])
|
|
894
|
+
return d, pk
|
|
895
|
+
|
|
896
|
+
@classmethod
|
|
897
|
+
def alter_add(cls, table, columns, null_allowed=True):
|
|
898
|
+
sql = []
|
|
899
|
+
null = "NOT NULL" if not null_allowed else ""
|
|
900
|
+
if isinstance(columns, dict):
|
|
901
|
+
for key, val in columns.items():
|
|
902
|
+
key = re.sub("<>!=%", "", key.lower())
|
|
903
|
+
sql.append(
|
|
904
|
+
f"ALTER TABLE {TableHelper.quote(table)} ADD {TableHelper.quote(key)} {TYPES.get_type(val)} {null};"
|
|
905
|
+
)
|
|
906
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
907
|
+
return sql, tuple()
|
|
908
|
+
|
|
909
|
+
@classmethod
|
|
910
|
+
def alter_drop(cls, table, columns):
|
|
911
|
+
sql = [f"ALTER TABLE {TableHelper.quote(table)} DROP COLUMN"]
|
|
912
|
+
if isinstance(columns, dict):
|
|
913
|
+
for key, val in columns.items():
|
|
914
|
+
key = re.sub("<>!=%", "", key.lower())
|
|
915
|
+
sql.append(f"{key},")
|
|
916
|
+
if sql[-1][-1] == ",":
|
|
917
|
+
sql[-1] = sql[-1][:-1]
|
|
918
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
919
|
+
return sql, tuple()
|
|
920
|
+
|
|
921
|
+
@classmethod
|
|
922
|
+
def alter_column_by_type(cls, table, column, value, nullable=True):
|
|
923
|
+
sql = [f"ALTER TABLE {TableHelper.quote(table)} ALTER COLUMN"]
|
|
924
|
+
sql.append(f"{TableHelper.quote(column)} TYPE {TYPES.get_type(value)}")
|
|
925
|
+
sql.append(f"USING {TableHelper.quote(column)}::{TYPES.get_conv(value)}")
|
|
926
|
+
if not nullable:
|
|
927
|
+
sql.append("NOT NULL")
|
|
928
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
929
|
+
return sql, tuple()
|
|
930
|
+
|
|
931
|
+
@classmethod
|
|
932
|
+
def alter_column_by_sql(cls, table, column, value):
|
|
933
|
+
sql = [f"ALTER TABLE {TableHelper.quote(table)} ALTER COLUMN"]
|
|
934
|
+
sql.append(f"{TableHelper.quote(column)} {value}")
|
|
935
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
936
|
+
return sql, tuple()
|
|
937
|
+
|
|
938
|
+
@classmethod
|
|
939
|
+
def rename_column(cls, table, orig, new):
|
|
940
|
+
return (
|
|
941
|
+
f"ALTER TABLE {TableHelper.quote(table)} RENAME COLUMN {TableHelper.quote(orig)} TO {TableHelper.quote(new)};",
|
|
942
|
+
tuple(),
|
|
943
|
+
)
|
|
944
|
+
|
|
945
|
+
@classmethod
|
|
946
|
+
def rename_table(cls, table, new):
|
|
947
|
+
return (
|
|
948
|
+
f"ALTER TABLE {TableHelper.quote(table)} RENAME TO {TableHelper.quote(new)};",
|
|
949
|
+
tuple(),
|
|
950
|
+
)
|
|
951
|
+
|
|
952
|
+
@classmethod
|
|
953
|
+
def create_savepoint(cls, sp):
|
|
954
|
+
return f'SAVEPOINT "{sp}"', tuple()
|
|
955
|
+
|
|
956
|
+
@classmethod
|
|
957
|
+
def release_savepoint(cls, sp):
|
|
958
|
+
return f'RELEASE SAVEPOINT "{sp}"', tuple()
|
|
959
|
+
|
|
960
|
+
@classmethod
|
|
961
|
+
def rollback_savepoint(cls, sp):
|
|
962
|
+
return f'ROLLBACK TO SAVEPOINT "{sp}"', tuple()
|
|
963
|
+
|
|
964
|
+
@classmethod
|
|
965
|
+
def delete(cls, tx, table, where):
|
|
966
|
+
sql = [f"DELETE FROM {table}"]
|
|
967
|
+
vals = []
|
|
968
|
+
if where:
|
|
969
|
+
s, v = TableHelper(tx, table).make_where(where)
|
|
970
|
+
sql.append(s)
|
|
971
|
+
vals.extend(v)
|
|
972
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
973
|
+
return sql, tuple(vals)
|
|
974
|
+
|
|
975
|
+
@classmethod
|
|
976
|
+
def truncate(cls, table):
|
|
977
|
+
return f"truncate table {TableHelper.quote(table)}", tuple()
|
|
978
|
+
|
|
979
|
+
@classmethod
|
|
980
|
+
def create_view(cls, name, query, temp=False, silent=True):
|
|
981
|
+
sql = ["CREATE"]
|
|
982
|
+
if silent:
|
|
983
|
+
sql.append("OR REPLACE")
|
|
984
|
+
if temp:
|
|
985
|
+
sql.append("TEMPORARY")
|
|
986
|
+
sql.append("VIEW")
|
|
987
|
+
sql.append(name)
|
|
988
|
+
sql.append("AS")
|
|
989
|
+
sql.append(query)
|
|
990
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
991
|
+
return sql, tuple()
|
|
992
|
+
|
|
993
|
+
@classmethod
|
|
994
|
+
def drop_view(cls, name, silent=True):
|
|
995
|
+
sql = ["DROP VIEW"]
|
|
996
|
+
if silent:
|
|
997
|
+
sql.append("IF EXISTS")
|
|
998
|
+
sql.append(name)
|
|
999
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
1000
|
+
return sql, tuple()
|
|
1001
|
+
|
|
1002
|
+
@classmethod
|
|
1003
|
+
def alter_trigger(cls, table, state="ENABLE", name="USER"):
|
|
1004
|
+
return f"ALTER TABLE {table} {state} TRIGGER {name}", tuple()
|
|
1005
|
+
|
|
1006
|
+
@classmethod
|
|
1007
|
+
def set_sequence(cls, table, next_value):
|
|
1008
|
+
return (
|
|
1009
|
+
f"SELECT SETVAL(PG_GET_SERIAL_SEQUENCE('{table}', 'sys_id'),{next_value},FALSE)",
|
|
1010
|
+
tuple(),
|
|
1011
|
+
)
|
|
1012
|
+
|
|
1013
|
+
@classmethod
|
|
1014
|
+
def missing(cls, tx, table, list, column="SYS_ID", where=None):
|
|
1015
|
+
sql = [
|
|
1016
|
+
f"SELECT * FROM",
|
|
1017
|
+
f"UNNEST('{{{','.join([str(x) for x in list])}}}'::int[]) id",
|
|
1018
|
+
f"EXCEPT ALL",
|
|
1019
|
+
f"SELECT {column} FROM {table}",
|
|
1020
|
+
]
|
|
1021
|
+
vals = []
|
|
1022
|
+
if where:
|
|
1023
|
+
s, v = TableHelper(tx, table).make_where(where)
|
|
1024
|
+
sql.append(s)
|
|
1025
|
+
vals.extend(v)
|
|
1026
|
+
sql = sqlparse.format(" ".join(sql), reindent=True, keyword_case="upper")
|
|
1027
|
+
return sql, tuple(vals)
|
|
1028
|
+
|
|
1029
|
+
@classmethod
|
|
1030
|
+
def indexes(cls, table):
|
|
1031
|
+
"""
|
|
1032
|
+
Returns SQL for retrieving all indexes on a given table with detailed attributes.
|
|
1033
|
+
"""
|
|
1034
|
+
return (
|
|
1035
|
+
"""
|
|
1036
|
+
SELECT indexname, tablename, schemaname, indexdef
|
|
1037
|
+
FROM pg_indexes
|
|
1038
|
+
WHERE tablename = %s
|
|
1039
|
+
""",
|
|
1040
|
+
(table,),
|
|
1041
|
+
)
|