velocity-python 0.0.129__py3-none-any.whl → 0.0.132__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of velocity-python might be problematic. Click here for more details.
- velocity/__init__.py +1 -1
- velocity/aws/handlers/mixins/__init__.py +16 -0
- velocity/aws/handlers/mixins/activity_tracker.py +142 -0
- velocity/aws/handlers/mixins/error_handler.py +192 -0
- velocity/aws/handlers/mixins/legacy_mixin.py +53 -0
- velocity/aws/handlers/mixins/standard_mixin.py +73 -0
- velocity/db/servers/base/__init__.py +9 -0
- velocity/db/servers/base/initializer.py +69 -0
- velocity/db/servers/base/operators.py +98 -0
- velocity/db/servers/base/sql.py +503 -0
- velocity/db/servers/base/types.py +135 -0
- velocity/db/servers/mysql/__init__.py +64 -0
- velocity/db/servers/mysql/operators.py +54 -0
- velocity/db/servers/{mysql_reserved.py → mysql/reserved.py} +2 -14
- velocity/db/servers/mysql/sql.py +569 -0
- velocity/db/servers/mysql/types.py +107 -0
- velocity/db/servers/postgres/__init__.py +40 -0
- velocity/db/servers/postgres/operators.py +34 -0
- velocity/db/servers/postgres/sql.py +4 -3
- velocity/db/servers/postgres/types.py +88 -2
- velocity/db/servers/sqlite/__init__.py +52 -0
- velocity/db/servers/sqlite/operators.py +52 -0
- velocity/db/servers/sqlite/reserved.py +20 -0
- velocity/db/servers/sqlite/sql.py +530 -0
- velocity/db/servers/sqlite/types.py +92 -0
- velocity/db/servers/sqlserver/__init__.py +64 -0
- velocity/db/servers/sqlserver/operators.py +47 -0
- velocity/db/servers/sqlserver/reserved.py +32 -0
- velocity/db/servers/sqlserver/sql.py +625 -0
- velocity/db/servers/sqlserver/types.py +114 -0
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/METADATA +1 -1
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/RECORD +35 -16
- velocity/db/servers/mysql.py +0 -640
- velocity/db/servers/sqlite.py +0 -968
- velocity/db/servers/sqlite_reserved.py +0 -208
- velocity/db/servers/sqlserver.py +0 -921
- velocity/db/servers/sqlserver_reserved.py +0 -314
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/WHEEL +0 -0
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/licenses/LICENSE +0 -0
- {velocity_python-0.0.129.dist-info → velocity_python-0.0.132.dist-info}/top_level.txt +0 -0
velocity/db/servers/sqlserver.py
DELETED
|
@@ -1,921 +0,0 @@
|
|
|
1
|
-
import decimal
|
|
2
|
-
import hashlib
|
|
3
|
-
import datetime
|
|
4
|
-
import re
|
|
5
|
-
from velocity.db import exceptions
|
|
6
|
-
from .sqlserver_reserved import reserved_words
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
def initialize(config):
|
|
10
|
-
import pytds
|
|
11
|
-
from velocity.db.core.engine import Engine
|
|
12
|
-
|
|
13
|
-
return Engine(pytds, config, SQL)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def make_where(where, sql, vals, is_join=False):
|
|
17
|
-
if not where:
|
|
18
|
-
return
|
|
19
|
-
sql.append("WHERE")
|
|
20
|
-
if isinstance(where, str):
|
|
21
|
-
sql.append(where)
|
|
22
|
-
return
|
|
23
|
-
if isinstance(where, dict):
|
|
24
|
-
where = where.items()
|
|
25
|
-
if isinstance(where, list):
|
|
26
|
-
join = ""
|
|
27
|
-
for key, val in where:
|
|
28
|
-
if join:
|
|
29
|
-
sql.append(join)
|
|
30
|
-
if is_join:
|
|
31
|
-
if "." not in key:
|
|
32
|
-
key = "A." + key
|
|
33
|
-
if val is None:
|
|
34
|
-
if "!" in key:
|
|
35
|
-
key = key.replace("!", "")
|
|
36
|
-
sql.append("{} is not NULL".format(quote(key.lower())))
|
|
37
|
-
else:
|
|
38
|
-
sql.append("{} is NULL".format(quote(key.lower())))
|
|
39
|
-
elif isinstance(val, (list, tuple)):
|
|
40
|
-
if "!" in key:
|
|
41
|
-
key = key.replace("!", "")
|
|
42
|
-
sql.append("{} not in %s".format(quote(key.lower())))
|
|
43
|
-
vals.append(tuple(val))
|
|
44
|
-
else:
|
|
45
|
-
sql.append("{} in %s".format(quote(key.lower())))
|
|
46
|
-
vals.append(tuple(val))
|
|
47
|
-
else:
|
|
48
|
-
if "<>" in key:
|
|
49
|
-
key = key.replace("<>", "")
|
|
50
|
-
op = "<>"
|
|
51
|
-
elif "!=" in key:
|
|
52
|
-
key = key.replace("!=", "")
|
|
53
|
-
op = "<>"
|
|
54
|
-
elif "!%" in key:
|
|
55
|
-
key = key.replace("!%", "")
|
|
56
|
-
op = "not like"
|
|
57
|
-
elif "%%" in key:
|
|
58
|
-
key = key.replace("%%", "")
|
|
59
|
-
op = "%"
|
|
60
|
-
elif "%>" in key:
|
|
61
|
-
key = key.replace("%>", "")
|
|
62
|
-
op = "%>"
|
|
63
|
-
elif "<%" in key:
|
|
64
|
-
key = key.replace("<%", "")
|
|
65
|
-
op = "<%"
|
|
66
|
-
elif "==" in key:
|
|
67
|
-
key = key.replace("==", "")
|
|
68
|
-
op = "="
|
|
69
|
-
elif "<=" in key:
|
|
70
|
-
key = key.replace("<=", "")
|
|
71
|
-
op = "<="
|
|
72
|
-
elif ">=" in key:
|
|
73
|
-
key = key.replace(">=", "")
|
|
74
|
-
op = ">="
|
|
75
|
-
elif "<" in key:
|
|
76
|
-
key = key.replace("<", "")
|
|
77
|
-
op = "<"
|
|
78
|
-
elif ">" in key:
|
|
79
|
-
key = key.replace(">", "")
|
|
80
|
-
op = ">"
|
|
81
|
-
elif "%" in key:
|
|
82
|
-
key = key.replace("%", "")
|
|
83
|
-
op = "like"
|
|
84
|
-
elif "!" in key:
|
|
85
|
-
key = key.replace("!", "")
|
|
86
|
-
op = "<>"
|
|
87
|
-
elif "=" in key:
|
|
88
|
-
key = key.replace("=", "")
|
|
89
|
-
op = "="
|
|
90
|
-
else:
|
|
91
|
-
op = "="
|
|
92
|
-
if isinstance(val, str) and val[:2] == "@@":
|
|
93
|
-
sql.append("{} {} {}".format(quote(key.lower()), op, val[2:]))
|
|
94
|
-
else:
|
|
95
|
-
if "like" in op:
|
|
96
|
-
sql.append(
|
|
97
|
-
"lower({}) {} lower(%s)".format(quote(key.lower()), op)
|
|
98
|
-
)
|
|
99
|
-
else:
|
|
100
|
-
sql.append("{} {} %s".format(quote(key.lower()), op))
|
|
101
|
-
vals.append(val)
|
|
102
|
-
join = "AND"
|
|
103
|
-
# for index, value in enumerate(vals):
|
|
104
|
-
# print "In loop..."
|
|
105
|
-
# if isinstance(value, (bytearray,buffer)):
|
|
106
|
-
# print "Converting bytearray to pytds.Binary..."
|
|
107
|
-
# print value
|
|
108
|
-
# vals[index] = pytds.Binary(str(value))
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
def quote(data):
|
|
112
|
-
if isinstance(data, list):
|
|
113
|
-
new = []
|
|
114
|
-
for item in data:
|
|
115
|
-
new.append(quote(item))
|
|
116
|
-
return new
|
|
117
|
-
else:
|
|
118
|
-
parts = data.split(".")
|
|
119
|
-
new = []
|
|
120
|
-
for part in parts:
|
|
121
|
-
if "[" in part:
|
|
122
|
-
new.append(part)
|
|
123
|
-
elif part.upper() in reserved_words:
|
|
124
|
-
new.append("[" + part + "]")
|
|
125
|
-
elif re.findall("[/]", part):
|
|
126
|
-
new.append("[" + part + "]")
|
|
127
|
-
else:
|
|
128
|
-
new.append(part)
|
|
129
|
-
return ".".join(new)
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
class SQL:
|
|
133
|
-
server = "SQL Server"
|
|
134
|
-
type_column_identifier = "data_type"
|
|
135
|
-
default_schema = "dbo"
|
|
136
|
-
|
|
137
|
-
ApplicationErrorCodes = []
|
|
138
|
-
|
|
139
|
-
DatabaseMissingErrorCodes = []
|
|
140
|
-
TableMissingErrorCodes = [
|
|
141
|
-
208,
|
|
142
|
-
]
|
|
143
|
-
ColumnMissingErrorCodes = [207, 1911]
|
|
144
|
-
ForeignKeyMissingErrorCodes = []
|
|
145
|
-
|
|
146
|
-
ConnectionErrorCodes = []
|
|
147
|
-
DuplicateKeyErrorCodes = []
|
|
148
|
-
RetryTransactionCodes = []
|
|
149
|
-
TruncationErrorCodes = [
|
|
150
|
-
8152,
|
|
151
|
-
]
|
|
152
|
-
LockTimeoutErrorCodes = []
|
|
153
|
-
DatabaseObjectExistsErrorCodes = []
|
|
154
|
-
|
|
155
|
-
@classmethod
|
|
156
|
-
def version(cls):
|
|
157
|
-
return "select @@version", tuple()
|
|
158
|
-
|
|
159
|
-
@classmethod
|
|
160
|
-
def timestamp(cls):
|
|
161
|
-
return "select current_timestamp", tuple()
|
|
162
|
-
|
|
163
|
-
@classmethod
|
|
164
|
-
def user(cls):
|
|
165
|
-
return "select current_user", tuple()
|
|
166
|
-
|
|
167
|
-
@classmethod
|
|
168
|
-
def databases(cls):
|
|
169
|
-
return "select name from master.dbo.sysdatabases", tuple()
|
|
170
|
-
|
|
171
|
-
@classmethod
|
|
172
|
-
def schemas(cls):
|
|
173
|
-
return "select schema_name from information_schema.schemata", tuple()
|
|
174
|
-
|
|
175
|
-
@classmethod
|
|
176
|
-
def current_schema(cls):
|
|
177
|
-
return "select schema_name()", tuple()
|
|
178
|
-
|
|
179
|
-
@classmethod
|
|
180
|
-
def current_database(cls):
|
|
181
|
-
return "select db_name() as current_database", tuple()
|
|
182
|
-
|
|
183
|
-
@classmethod
|
|
184
|
-
def tables(cls, system=False):
|
|
185
|
-
return (
|
|
186
|
-
"""
|
|
187
|
-
select table_schema, table_name
|
|
188
|
-
from information_schema.tables
|
|
189
|
-
where table_type = 'BASE TABLE'
|
|
190
|
-
order by table_schema,table_name
|
|
191
|
-
""",
|
|
192
|
-
tuple(),
|
|
193
|
-
)
|
|
194
|
-
|
|
195
|
-
@classmethod
|
|
196
|
-
def views(cls, system=False):
|
|
197
|
-
return (
|
|
198
|
-
"SELECT s.name , v.name FROM sys.views v inner join sys.schemas s on s.schema_id = v.schema_id",
|
|
199
|
-
tuple(),
|
|
200
|
-
)
|
|
201
|
-
|
|
202
|
-
@classmethod
|
|
203
|
-
def __has_pointer(cls, columns):
|
|
204
|
-
if columns:
|
|
205
|
-
if isinstance(columns, list):
|
|
206
|
-
columns = ",".join(columns)
|
|
207
|
-
if ">" in columns:
|
|
208
|
-
return True
|
|
209
|
-
return False
|
|
210
|
-
|
|
211
|
-
@classmethod
|
|
212
|
-
def select(
|
|
213
|
-
cls,
|
|
214
|
-
columns=None,
|
|
215
|
-
table=None,
|
|
216
|
-
where=None,
|
|
217
|
-
orderby=None,
|
|
218
|
-
groupby=None,
|
|
219
|
-
having=None,
|
|
220
|
-
start=None,
|
|
221
|
-
qty=None,
|
|
222
|
-
tbl=None,
|
|
223
|
-
):
|
|
224
|
-
is_join = False
|
|
225
|
-
|
|
226
|
-
if isinstance(columns, str) and "distinct" in columns.lower():
|
|
227
|
-
sql = [
|
|
228
|
-
"SELECT",
|
|
229
|
-
columns,
|
|
230
|
-
"FROM",
|
|
231
|
-
quote(table),
|
|
232
|
-
]
|
|
233
|
-
elif cls.__has_pointer(columns):
|
|
234
|
-
is_join = True
|
|
235
|
-
if isinstance(columns, str):
|
|
236
|
-
columns = columns.split(",")
|
|
237
|
-
letter = 65
|
|
238
|
-
tables = {table: chr(letter)}
|
|
239
|
-
letter += 1
|
|
240
|
-
__select = []
|
|
241
|
-
__from = ["{} AS {}".format(quote(table), tables.get(table))]
|
|
242
|
-
__left_join = []
|
|
243
|
-
|
|
244
|
-
for column in columns:
|
|
245
|
-
if ">" in column:
|
|
246
|
-
is_join = True
|
|
247
|
-
parts = column.split(">")
|
|
248
|
-
foreign = tbl.foreign_key_info(parts[0])
|
|
249
|
-
if not foreign:
|
|
250
|
-
raise exceptions.DbApplicationError("Foreign key not defined")
|
|
251
|
-
ref_table = foreign["referenced_table_name"]
|
|
252
|
-
ref_schema = foreign["referenced_table_schema"]
|
|
253
|
-
ref_column = foreign["referenced_column_name"]
|
|
254
|
-
lookup = "{}:{}".format(ref_table, parts[0])
|
|
255
|
-
if tables.has_key(lookup):
|
|
256
|
-
__select.append(
|
|
257
|
-
'{}."{}" as "{}"'.format(
|
|
258
|
-
tables.get(lookup), parts[1], "_".join(parts)
|
|
259
|
-
)
|
|
260
|
-
)
|
|
261
|
-
else:
|
|
262
|
-
tables[lookup] = chr(letter)
|
|
263
|
-
letter += 1
|
|
264
|
-
__select.append(
|
|
265
|
-
'{}."{}" as "{}"'.format(
|
|
266
|
-
tables.get(lookup), parts[1], "_".join(parts)
|
|
267
|
-
)
|
|
268
|
-
)
|
|
269
|
-
__left_join.append(
|
|
270
|
-
'LEFT OUTER JOIN "{}"."{}" AS {}'.format(
|
|
271
|
-
ref_schema, ref_table, tables.get(lookup)
|
|
272
|
-
)
|
|
273
|
-
)
|
|
274
|
-
__left_join.append(
|
|
275
|
-
'ON {}."{}" = {}."{}"'.format(
|
|
276
|
-
tables.get(table),
|
|
277
|
-
parts[0],
|
|
278
|
-
tables.get(lookup),
|
|
279
|
-
ref_column,
|
|
280
|
-
)
|
|
281
|
-
)
|
|
282
|
-
if orderby and column in orderby:
|
|
283
|
-
orderby = orderby.replace(
|
|
284
|
-
column, "{}.{}".format(tables.get(lookup), parts[1])
|
|
285
|
-
)
|
|
286
|
-
|
|
287
|
-
else:
|
|
288
|
-
if "(" in column:
|
|
289
|
-
__select.append(column)
|
|
290
|
-
else:
|
|
291
|
-
__select.append("{}.{}".format(tables.get(table), column))
|
|
292
|
-
sql = ["SELECT"]
|
|
293
|
-
sql.append(",".join(__select))
|
|
294
|
-
sql.append("FROM")
|
|
295
|
-
sql.extend(__from)
|
|
296
|
-
sql.extend(__left_join)
|
|
297
|
-
else:
|
|
298
|
-
if columns:
|
|
299
|
-
if isinstance(columns, str):
|
|
300
|
-
columns = columns.split(",")
|
|
301
|
-
if isinstance(columns, list):
|
|
302
|
-
columns = quote(columns)
|
|
303
|
-
columns = ",".join(columns)
|
|
304
|
-
else:
|
|
305
|
-
columns = "*"
|
|
306
|
-
sql = [
|
|
307
|
-
"SELECT",
|
|
308
|
-
columns,
|
|
309
|
-
"FROM",
|
|
310
|
-
quote(table),
|
|
311
|
-
]
|
|
312
|
-
vals = []
|
|
313
|
-
make_where(where, sql, vals, is_join)
|
|
314
|
-
if groupby:
|
|
315
|
-
sql.append("GROUP BY")
|
|
316
|
-
if isinstance(groupby, (list, tuple)):
|
|
317
|
-
groupby = ",".join(groupby)
|
|
318
|
-
sql.append(groupby)
|
|
319
|
-
if having:
|
|
320
|
-
sql.append("HAVING")
|
|
321
|
-
if isinstance(having, (list, tuple)):
|
|
322
|
-
having = ",".join(having)
|
|
323
|
-
sql.append(having)
|
|
324
|
-
if orderby:
|
|
325
|
-
sql.append("ORDER BY")
|
|
326
|
-
if isinstance(orderby, (list, tuple)):
|
|
327
|
-
orderby = ",".join(orderby)
|
|
328
|
-
sql.append(orderby)
|
|
329
|
-
if start and qty:
|
|
330
|
-
sql.append("OFFSET {} ROWS FETCH NEXT {} ROWS ONLY".format(start, qty))
|
|
331
|
-
elif start:
|
|
332
|
-
sql.append("OFFSET {} ROWS".format(start))
|
|
333
|
-
elif qty:
|
|
334
|
-
sql.append("FETCH NEXT {} ROWS ONLY".format(qty))
|
|
335
|
-
sql = " ".join(sql)
|
|
336
|
-
return sql, tuple(vals)
|
|
337
|
-
|
|
338
|
-
@classmethod
|
|
339
|
-
def create_database(cls, name):
|
|
340
|
-
return "create database " + name, tuple()
|
|
341
|
-
|
|
342
|
-
@classmethod
|
|
343
|
-
def last_id(cls, table):
|
|
344
|
-
return "SELECT @@IDENTITY", tuple()
|
|
345
|
-
|
|
346
|
-
@classmethod
|
|
347
|
-
def drop_database(cls, name):
|
|
348
|
-
return "drop database " + name, tuple()
|
|
349
|
-
|
|
350
|
-
@classmethod
|
|
351
|
-
def foreign_key_info(cls, table=None, column=None, schema=None):
|
|
352
|
-
if "." in table:
|
|
353
|
-
schema, table = table.split(".")
|
|
354
|
-
|
|
355
|
-
sql = [
|
|
356
|
-
"""
|
|
357
|
-
SELECT
|
|
358
|
-
KCU1.CONSTRAINT_NAME AS FK_CONSTRAINT_NAME
|
|
359
|
-
,KCU1.TABLE_NAME AS FK_TABLE_NAME
|
|
360
|
-
,KCU1.COLUMN_NAME AS FK_COLUMN_NAME
|
|
361
|
-
,KCU1.ORDINAL_POSITION AS FK_ORDINAL_POSITION
|
|
362
|
-
,KCU2.CONSTRAINT_NAME AS REFERENCED_CONSTRAINT_NAME
|
|
363
|
-
,KCU2.TABLE_NAME AS referenced_table_name
|
|
364
|
-
,KCU2.COLUMN_NAME AS referenced_column_name
|
|
365
|
-
,KCU2.ORDINAL_POSITION AS REFERENCED_ORDINAL_POSITION
|
|
366
|
-
,KCU2.CONSTRAINT_SCHEMA AS referenced_table_schema
|
|
367
|
-
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
|
|
368
|
-
|
|
369
|
-
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU1
|
|
370
|
-
ON KCU1.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG
|
|
371
|
-
AND KCU1.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA
|
|
372
|
-
AND KCU1.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
|
|
373
|
-
|
|
374
|
-
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU2
|
|
375
|
-
ON KCU2.CONSTRAINT_CATALOG = RC.UNIQUE_CONSTRAINT_CATALOG
|
|
376
|
-
AND KCU2.CONSTRAINT_SCHEMA = RC.UNIQUE_CONSTRAINT_SCHEMA
|
|
377
|
-
AND KCU2.CONSTRAINT_NAME = RC.UNIQUE_CONSTRAINT_NAME
|
|
378
|
-
AND KCU2.ORDINAL_POSITION = KCU1.ORDINAL_POSITION
|
|
379
|
-
"""
|
|
380
|
-
]
|
|
381
|
-
vals = []
|
|
382
|
-
where = {}
|
|
383
|
-
if schema:
|
|
384
|
-
where["LOWER(KCU1.CONSTRAINT_SCHEMA)"] = schema.lower()
|
|
385
|
-
if table:
|
|
386
|
-
where["LOWER(KCU1.TABLE_NAME)"] = table.lower()
|
|
387
|
-
if column:
|
|
388
|
-
where["LOWER(KCU1.COLUMN_NAME)"] = column.lower()
|
|
389
|
-
make_where(where, sql, vals)
|
|
390
|
-
return " ".join(sql), tuple(vals)
|
|
391
|
-
|
|
392
|
-
@classmethod
|
|
393
|
-
def create_foreign_key(
|
|
394
|
-
cls, table, columns, key_to_table, key_to_columns, name=None, schema=None
|
|
395
|
-
):
|
|
396
|
-
if "." not in table and schema:
|
|
397
|
-
if schema is None:
|
|
398
|
-
schema = cls.default_schema
|
|
399
|
-
table = "{}.{}".format(schema, table)
|
|
400
|
-
if isinstance(key_to_columns, str):
|
|
401
|
-
key_to_columns = [key_to_columns]
|
|
402
|
-
if isinstance(columns, str):
|
|
403
|
-
columns = [columns]
|
|
404
|
-
if not name:
|
|
405
|
-
m = hashlib.md5()
|
|
406
|
-
m.update(table)
|
|
407
|
-
m.update(" ".join(columns))
|
|
408
|
-
m.update(key_to_table)
|
|
409
|
-
m.update(" ".join(key_to_columns))
|
|
410
|
-
name = "FK_" + m.hexdigest()
|
|
411
|
-
sql = "ALTER TABLE {} ADD CONSTRAINT {} FOREIGN KEY ({}) REFERENCES {} ({}) ON DELETE CASCADE ON UPDATE CASCADE;".format(
|
|
412
|
-
table, name, ",".join(columns), key_to_table, ",".join(key_to_columns)
|
|
413
|
-
)
|
|
414
|
-
|
|
415
|
-
return sql, tuple()
|
|
416
|
-
|
|
417
|
-
@classmethod
|
|
418
|
-
def create_table(cls, name, columns={}, drop=False):
|
|
419
|
-
if "." in name:
|
|
420
|
-
fqtn = name
|
|
421
|
-
else:
|
|
422
|
-
fqtn = cls.default_schema + "." + name
|
|
423
|
-
schema, table = fqtn.split(".")
|
|
424
|
-
name = fqtn.replace(".", "_")
|
|
425
|
-
trigger = "on_update_row_{0}".format(name)
|
|
426
|
-
sql = []
|
|
427
|
-
sql.append("DECLARE @script1 nVarChar(MAX);")
|
|
428
|
-
sql.append("DECLARE @script2 nVarChar(MAX);")
|
|
429
|
-
if drop:
|
|
430
|
-
sql.append(cls.drop_table(fqtn))
|
|
431
|
-
sql.append(
|
|
432
|
-
"""
|
|
433
|
-
SET @script1 = '
|
|
434
|
-
CREATE TABLE {0} (
|
|
435
|
-
sys_id int identity(1000,1) primary key,
|
|
436
|
-
sys_modified datetime not null default(getdate()),
|
|
437
|
-
sys_created datetime not null default(getdate())
|
|
438
|
-
)'
|
|
439
|
-
""".format(
|
|
440
|
-
fqtn, table, trigger
|
|
441
|
-
)
|
|
442
|
-
)
|
|
443
|
-
sql.append(
|
|
444
|
-
"""
|
|
445
|
-
SET @script2 = '
|
|
446
|
-
CREATE TRIGGER {2}
|
|
447
|
-
ON {0}
|
|
448
|
-
AFTER UPDATE
|
|
449
|
-
AS
|
|
450
|
-
BEGIN
|
|
451
|
-
UPDATE t
|
|
452
|
-
SET t.sys_modified = CURRENT_TIMESTAMP,
|
|
453
|
-
t.sys_created = d.sys_created
|
|
454
|
-
FROM {0} AS t
|
|
455
|
-
INNER JOIN deleted AS d on t.sys_id=i.sys_id
|
|
456
|
-
END'
|
|
457
|
-
""".format(
|
|
458
|
-
fqtn, table, trigger
|
|
459
|
-
)
|
|
460
|
-
)
|
|
461
|
-
sql.append("EXEC (@script1);")
|
|
462
|
-
sql.append("EXEC (@script2);")
|
|
463
|
-
for key, val in columns.items():
|
|
464
|
-
sql.append("ALTER TABLE {} ADD {} {};".format(fqtn, key, cls.get_type(val)))
|
|
465
|
-
return "\n\t".join(sql), tuple()
|
|
466
|
-
|
|
467
|
-
@classmethod
|
|
468
|
-
def drop_table(cls, name):
|
|
469
|
-
return (
|
|
470
|
-
"IF OBJECT_ID('%s', 'U') IS NOT NULL DROP TABLE %s;"
|
|
471
|
-
% (
|
|
472
|
-
quote(cls.default_schema + "." + name),
|
|
473
|
-
quote(cls.default_schema + "." + name),
|
|
474
|
-
),
|
|
475
|
-
tuple(),
|
|
476
|
-
)
|
|
477
|
-
|
|
478
|
-
@classmethod
|
|
479
|
-
def columns(cls, name):
|
|
480
|
-
if "." in name:
|
|
481
|
-
return """
|
|
482
|
-
select column_name
|
|
483
|
-
from information_schema.columns
|
|
484
|
-
where table_schema = %s
|
|
485
|
-
and table_name = %s
|
|
486
|
-
""", tuple(
|
|
487
|
-
name.split(".")
|
|
488
|
-
)
|
|
489
|
-
else:
|
|
490
|
-
return """
|
|
491
|
-
select column_name
|
|
492
|
-
from information_schema.columns
|
|
493
|
-
where table_name = %s
|
|
494
|
-
""", tuple(
|
|
495
|
-
[name]
|
|
496
|
-
)
|
|
497
|
-
|
|
498
|
-
@classmethod
|
|
499
|
-
def column_info(cls, table, name):
|
|
500
|
-
params = table.split(".")
|
|
501
|
-
params.append(name)
|
|
502
|
-
if "." in table:
|
|
503
|
-
return """
|
|
504
|
-
select *
|
|
505
|
-
from information_schema.columns
|
|
506
|
-
where table_schema = %s
|
|
507
|
-
and table_name = %s
|
|
508
|
-
and column_name = %s
|
|
509
|
-
""", tuple(
|
|
510
|
-
params
|
|
511
|
-
)
|
|
512
|
-
else:
|
|
513
|
-
return """
|
|
514
|
-
select *
|
|
515
|
-
from information_schema.columns
|
|
516
|
-
where table_name = %s
|
|
517
|
-
and column_name = %s
|
|
518
|
-
""", tuple(
|
|
519
|
-
params
|
|
520
|
-
)
|
|
521
|
-
|
|
522
|
-
@classmethod
|
|
523
|
-
def primary_keys(cls, table):
|
|
524
|
-
params = table.split(".")
|
|
525
|
-
if "." in table:
|
|
526
|
-
return """
|
|
527
|
-
SELECT COLUMN_NAME
|
|
528
|
-
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
529
|
-
WHERE OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + QUOTENAME(CONSTRAINT_NAME)), 'IsPrimaryKey') = 1
|
|
530
|
-
AND TABLE_SCHEMA = %s AND TABLE_NAME = %s
|
|
531
|
-
""", tuple(
|
|
532
|
-
params
|
|
533
|
-
)
|
|
534
|
-
else:
|
|
535
|
-
return """
|
|
536
|
-
SELECT COLUMN_NAME
|
|
537
|
-
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
538
|
-
WHERE OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + QUOTENAME(CONSTRAINT_NAME)), 'IsPrimaryKey') = 1
|
|
539
|
-
AND TABLE_NAME = %s
|
|
540
|
-
""", tuple(
|
|
541
|
-
params
|
|
542
|
-
)
|
|
543
|
-
|
|
544
|
-
@classmethod
|
|
545
|
-
def xforeign_keys(cls, table):
|
|
546
|
-
params = table.split(".")
|
|
547
|
-
if "." in table:
|
|
548
|
-
return """
|
|
549
|
-
SELECT COLUMN_NAME
|
|
550
|
-
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
551
|
-
WHERE OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + QUOTENAME(CONSTRAINT_NAME)), 'IsPrimaryKey') = 1
|
|
552
|
-
AND TABLE_SCHEMA = %s AND TABLE_NAME = %s
|
|
553
|
-
""", tuple(
|
|
554
|
-
params
|
|
555
|
-
)
|
|
556
|
-
else:
|
|
557
|
-
return """
|
|
558
|
-
SELECT COLUMN_NAME
|
|
559
|
-
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
560
|
-
WHERE OBJECTPROPERTY(OBJECT_ID(CONSTRAINT_SCHEMA + '.' + QUOTENAME(CONSTRAINT_NAME)), 'IsPrimaryKey') = 1
|
|
561
|
-
AND TABLE_NAME = %s
|
|
562
|
-
""", tuple(
|
|
563
|
-
params
|
|
564
|
-
)
|
|
565
|
-
|
|
566
|
-
@classmethod
|
|
567
|
-
def insert(cls, table, data):
|
|
568
|
-
import pytds
|
|
569
|
-
|
|
570
|
-
keys = []
|
|
571
|
-
vals = []
|
|
572
|
-
args = []
|
|
573
|
-
for key, val in data.items():
|
|
574
|
-
keys.append(quote(key.lower()))
|
|
575
|
-
if isinstance(val, str) and len(val) > 2 and val[:2] == "@@":
|
|
576
|
-
vals.append(val[2:])
|
|
577
|
-
elif isinstance(val, (bytearray, bytes)):
|
|
578
|
-
vals.append("%s")
|
|
579
|
-
args.append(pytds.Binary(str(val)))
|
|
580
|
-
else:
|
|
581
|
-
vals.append("%s")
|
|
582
|
-
args.append(val)
|
|
583
|
-
|
|
584
|
-
sql = []
|
|
585
|
-
if "sys_id" in data:
|
|
586
|
-
sql.append("SET IDENTITY_INSERT {} ON;".format(table))
|
|
587
|
-
sql.append("INSERT INTO")
|
|
588
|
-
sql.append(quote(table))
|
|
589
|
-
sql.append("(")
|
|
590
|
-
sql.append(",".join(keys))
|
|
591
|
-
sql.append(")")
|
|
592
|
-
sql.append("VALUES")
|
|
593
|
-
sql.append("(")
|
|
594
|
-
sql.append(",".join(vals))
|
|
595
|
-
sql.append(");")
|
|
596
|
-
if "sys_id" in data:
|
|
597
|
-
sql.append("SET IDENTITY_INSERT {} OFF;".format(table))
|
|
598
|
-
sql = " ".join(sql)
|
|
599
|
-
return sql, tuple(args)
|
|
600
|
-
|
|
601
|
-
@classmethod
|
|
602
|
-
def update(cls, table, data, pk):
|
|
603
|
-
import pytds
|
|
604
|
-
|
|
605
|
-
sql = ["UPDATE"]
|
|
606
|
-
sql.append(quote(table))
|
|
607
|
-
sql.append("SET")
|
|
608
|
-
vals = []
|
|
609
|
-
join = ""
|
|
610
|
-
for key in sorted(data.keys()):
|
|
611
|
-
val = data[key]
|
|
612
|
-
if join:
|
|
613
|
-
sql.append(join)
|
|
614
|
-
sql.append("{} = %s".format(quote(key.lower())))
|
|
615
|
-
if isinstance(val, (bytearray, bytes)):
|
|
616
|
-
vals.append(pytds.Binary(str(val)))
|
|
617
|
-
else:
|
|
618
|
-
vals.append(val)
|
|
619
|
-
join = ","
|
|
620
|
-
if pk:
|
|
621
|
-
sql.append("\nWHERE")
|
|
622
|
-
join = ""
|
|
623
|
-
for key in sorted(pk.keys()):
|
|
624
|
-
val = pk[key]
|
|
625
|
-
if join:
|
|
626
|
-
sql.append(join)
|
|
627
|
-
if val is None:
|
|
628
|
-
sql.append("{} is null".format(quote(key.lower())))
|
|
629
|
-
else:
|
|
630
|
-
sql.append("{} = %s".format(quote(key.lower())))
|
|
631
|
-
vals.append(val)
|
|
632
|
-
join = "AND"
|
|
633
|
-
sql = " ".join(sql)
|
|
634
|
-
return sql, tuple(vals)
|
|
635
|
-
|
|
636
|
-
@classmethod
|
|
637
|
-
def create_index(
|
|
638
|
-
cls,
|
|
639
|
-
table=None,
|
|
640
|
-
columns=None,
|
|
641
|
-
unique=False,
|
|
642
|
-
direction=None,
|
|
643
|
-
where=None,
|
|
644
|
-
name=None,
|
|
645
|
-
schema=None,
|
|
646
|
-
trigram=None,
|
|
647
|
-
tbl=None,
|
|
648
|
-
):
|
|
649
|
-
if "." not in table and schema:
|
|
650
|
-
table = "{}.{}".format(schema, table)
|
|
651
|
-
if isinstance(columns, (list, set)):
|
|
652
|
-
columns = ",".join([quote(c.lower()) for c in sorted(columns)])
|
|
653
|
-
else:
|
|
654
|
-
columns = quote(columns)
|
|
655
|
-
sql = ["CREATE"]
|
|
656
|
-
if unique:
|
|
657
|
-
sql.append("UNIQUE")
|
|
658
|
-
sql.append("INDEX")
|
|
659
|
-
tablename = quote(table)
|
|
660
|
-
if not name:
|
|
661
|
-
name = re.sub(r"\([^)]*\)", "", columns.replace(",", "_"))
|
|
662
|
-
sql.append("IDX__{}__{}".format(table.replace(".", "_"), name))
|
|
663
|
-
sql.append("ON")
|
|
664
|
-
sql.append(tablename)
|
|
665
|
-
sql.append("(")
|
|
666
|
-
sql.append(columns)
|
|
667
|
-
sql.append(")")
|
|
668
|
-
return " ".join(sql), tuple()
|
|
669
|
-
|
|
670
|
-
@classmethod
|
|
671
|
-
def drop_index(cls, table=None, columns=None, name=None, schema=None):
|
|
672
|
-
if "." not in table and schema:
|
|
673
|
-
table = "{}.{}".format(schema, table)
|
|
674
|
-
if isinstance(columns, (list, set)):
|
|
675
|
-
columns = ",".join([quote(c.lower()) for c in sorted(columns)])
|
|
676
|
-
else:
|
|
677
|
-
columns = quote(columns)
|
|
678
|
-
sql = ["DROP"]
|
|
679
|
-
sql.append("INDEX IF EXISTS")
|
|
680
|
-
tablename = quote(table)
|
|
681
|
-
if not name:
|
|
682
|
-
name = re.sub(r"\([^)]*\)", "", columns.replace(",", "_"))
|
|
683
|
-
sql.append("IDX__{}__{}".format(table.replace(".", "_"), name))
|
|
684
|
-
print(" ".join(sql))
|
|
685
|
-
return " ".join(sql), tuple()
|
|
686
|
-
|
|
687
|
-
@classmethod
|
|
688
|
-
def get_type(cls, v):
|
|
689
|
-
if isinstance(v, str):
|
|
690
|
-
if v[:2] == "@@":
|
|
691
|
-
return v[2:]
|
|
692
|
-
elif isinstance(v, (str, bytes)) or v is str or v is bytes:
|
|
693
|
-
return cls.TYPES.TEXT
|
|
694
|
-
elif isinstance(v, bool) or v is bool:
|
|
695
|
-
return cls.TYPES.BOOLEAN
|
|
696
|
-
elif isinstance(v, int) or v is int:
|
|
697
|
-
if v is int:
|
|
698
|
-
return cls.TYPES.INTEGER
|
|
699
|
-
if v > 2147483647 or v < -2147483648:
|
|
700
|
-
return cls.TYPES.BIGINT
|
|
701
|
-
else:
|
|
702
|
-
return cls.TYPES.INTEGER
|
|
703
|
-
elif isinstance(v, float) or v is float:
|
|
704
|
-
return cls.TYPES.NUMERIC + "(19, 6)"
|
|
705
|
-
elif isinstance(v, decimal.Decimal) or v is decimal.Decimal:
|
|
706
|
-
return cls.TYPES.NUMERIC + "(19, 6)"
|
|
707
|
-
elif isinstance(v, datetime.datetime) or v is datetime.datetime:
|
|
708
|
-
return cls.TYPES.DATETIME
|
|
709
|
-
elif isinstance(v, datetime.date) or v is datetime.date:
|
|
710
|
-
return cls.TYPES.DATE
|
|
711
|
-
elif isinstance(v, datetime.time) or v is datetime.time:
|
|
712
|
-
return cls.TYPES.TIME
|
|
713
|
-
elif isinstance(v, (bytearray, bytes)) or v is bytearray or v is bytes:
|
|
714
|
-
return cls.TYPES.BINARY
|
|
715
|
-
# Everything else defaults to TEXT, incl. None
|
|
716
|
-
return cls.TYPES.TEXT
|
|
717
|
-
|
|
718
|
-
@classmethod
|
|
719
|
-
def py_type(cls, v):
|
|
720
|
-
v = str(v).upper()
|
|
721
|
-
if v == cls.TYPES.INTEGER:
|
|
722
|
-
return int
|
|
723
|
-
elif v in cls.TYPES.TEXT:
|
|
724
|
-
return str
|
|
725
|
-
elif v == cls.TYPES.BOOLEAN:
|
|
726
|
-
return bool
|
|
727
|
-
elif v == cls.TYPES.DATE:
|
|
728
|
-
return datetime.date
|
|
729
|
-
elif v == cls.TYPES.TIME:
|
|
730
|
-
return datetime.time
|
|
731
|
-
elif v == cls.TYPES.DATETIME:
|
|
732
|
-
return datetime.datetime
|
|
733
|
-
else:
|
|
734
|
-
raise Exception("unmapped type %s" % v)
|
|
735
|
-
|
|
736
|
-
@classmethod
|
|
737
|
-
def massage_data(cls, data):
|
|
738
|
-
"""
|
|
739
|
-
|
|
740
|
-
:param :
|
|
741
|
-
:param :
|
|
742
|
-
:param :
|
|
743
|
-
:returns:
|
|
744
|
-
"""
|
|
745
|
-
data = {key.lower(): val for key, val in data.items()}
|
|
746
|
-
primaryKey = set(cls.GetPrimaryKeyColumnNames())
|
|
747
|
-
if not primaryKey:
|
|
748
|
-
if not cls.Exists():
|
|
749
|
-
raise exceptions.DbTableMissingError
|
|
750
|
-
dataKeys = set(data.keys()).intersection(primaryKey)
|
|
751
|
-
dataColumns = set(data.keys()).difference(primaryKey)
|
|
752
|
-
pk = {}
|
|
753
|
-
pk.update([(k, data[k]) for k in dataKeys])
|
|
754
|
-
d = {}
|
|
755
|
-
d.update([(k, data[k]) for k in dataColumns])
|
|
756
|
-
return d, pk
|
|
757
|
-
|
|
758
|
-
@classmethod
|
|
759
|
-
def alter_add(cls, table, columns, null_allowed=True):
|
|
760
|
-
sql = []
|
|
761
|
-
null = "NOT NULL" if not null_allowed else ""
|
|
762
|
-
if isinstance(columns, dict):
|
|
763
|
-
for key, val in columns.items():
|
|
764
|
-
sql.append(
|
|
765
|
-
"ALTER TABLE {} ADD {} {} {};".format(
|
|
766
|
-
quote(table), quote(key), cls.get_type(val), null
|
|
767
|
-
)
|
|
768
|
-
)
|
|
769
|
-
return "\n\t".join(sql), tuple()
|
|
770
|
-
|
|
771
|
-
@classmethod
|
|
772
|
-
def alter_drop(cls, table, columns):
|
|
773
|
-
sql = ["ALTER TABLE {} DROP COLUMN".format(quote(table))]
|
|
774
|
-
if isinstance(columns, dict):
|
|
775
|
-
for key, val in columns.items():
|
|
776
|
-
sql.append("{},".format(key))
|
|
777
|
-
if sql[-1][-1] == ",":
|
|
778
|
-
sql[-1] = sql[-1][:-1]
|
|
779
|
-
return "\n\t".join(sql), tuple()
|
|
780
|
-
|
|
781
|
-
@classmethod
|
|
782
|
-
def alter_column_by_type(cls, table, column, value, null_allowed=True):
|
|
783
|
-
sql = ["ALTER TABLE {} ALTER COLUMN".format(quote(table))]
|
|
784
|
-
sql.append("{} {}".format(quote(column), cls.get_type(value)))
|
|
785
|
-
if not null_allowed:
|
|
786
|
-
sql.append("NOT NULL")
|
|
787
|
-
return "\n\t".join(sql), tuple()
|
|
788
|
-
|
|
789
|
-
@classmethod
|
|
790
|
-
def alter_column_by_sql(cls, table, column, value):
|
|
791
|
-
sql = ["ALTER TABLE {} ALTER COLUMN".format(quote(table))]
|
|
792
|
-
sql.append("{} {}".format(quote(column), value))
|
|
793
|
-
return " ".join(sql), tuple()
|
|
794
|
-
|
|
795
|
-
@classmethod
|
|
796
|
-
def rename_column(cls, table, orig, new):
|
|
797
|
-
if "." in table:
|
|
798
|
-
schema, table = table.split(".")
|
|
799
|
-
else:
|
|
800
|
-
schema = cls.default_schema
|
|
801
|
-
return (
|
|
802
|
-
"sp_rename '{}.{}.{}', '{}', 'COLUMN';".format(
|
|
803
|
-
quote(schema), quote(table), quote(orig), new
|
|
804
|
-
),
|
|
805
|
-
tuple(),
|
|
806
|
-
)
|
|
807
|
-
|
|
808
|
-
@classmethod
|
|
809
|
-
def rename_table(cls, table, name, new):
|
|
810
|
-
if "." in table:
|
|
811
|
-
schema, table = table.split(".")
|
|
812
|
-
else:
|
|
813
|
-
schema = cls.default_schema
|
|
814
|
-
return (
|
|
815
|
-
"sp_rename '{}.{}', '{}';".format(quote(schema), quote(name), new),
|
|
816
|
-
tuple(),
|
|
817
|
-
)
|
|
818
|
-
|
|
819
|
-
@classmethod
|
|
820
|
-
def create_savepoint(cls, sp):
|
|
821
|
-
return None, tuple()
|
|
822
|
-
|
|
823
|
-
@classmethod
|
|
824
|
-
def release_savepoint(cls, sp):
|
|
825
|
-
return None, tuple()
|
|
826
|
-
|
|
827
|
-
@classmethod
|
|
828
|
-
def rollback_savepoint(cls, sp):
|
|
829
|
-
return None, tuple()
|
|
830
|
-
|
|
831
|
-
@classmethod
|
|
832
|
-
def find_duplicates(cls, table, columns, key):
|
|
833
|
-
if isinstance(columns, str):
|
|
834
|
-
columns = [columns]
|
|
835
|
-
return (
|
|
836
|
-
"""
|
|
837
|
-
SELECT {2}
|
|
838
|
-
FROM (SELECT {2},
|
|
839
|
-
ROW_NUMBER() OVER (partition BY {1} ORDER BY {2}) AS rnum
|
|
840
|
-
FROM {0}) t
|
|
841
|
-
WHERE t.rnum > 1;
|
|
842
|
-
""".format(
|
|
843
|
-
table, ",".join(quote(columns)), key
|
|
844
|
-
),
|
|
845
|
-
tuple(),
|
|
846
|
-
)
|
|
847
|
-
|
|
848
|
-
@classmethod
|
|
849
|
-
def delete_duplicates(cls, table, columns, key):
|
|
850
|
-
if isinstance(columns, str):
|
|
851
|
-
columns = [columns]
|
|
852
|
-
return (
|
|
853
|
-
"""
|
|
854
|
-
DELETE FROM {0}
|
|
855
|
-
WHERE {2} IN (SELECT {2}
|
|
856
|
-
FROM (SELECT {2},
|
|
857
|
-
ROW_NUMBER() OVER (partition BY {1} ORDER BY {2}) AS rnum
|
|
858
|
-
FROM {0}) t
|
|
859
|
-
WHERE t.rnum > 1);
|
|
860
|
-
""".format(
|
|
861
|
-
table, ",".join(quote(columns)), key
|
|
862
|
-
),
|
|
863
|
-
tuple(),
|
|
864
|
-
)
|
|
865
|
-
|
|
866
|
-
@classmethod
|
|
867
|
-
def delete(cls, table, where):
|
|
868
|
-
sql = ["DELETE FROM {}".format(table)]
|
|
869
|
-
sql.append("WHERE")
|
|
870
|
-
vals = []
|
|
871
|
-
if isinstance(where, dict):
|
|
872
|
-
join = ""
|
|
873
|
-
for key in sorted(where.keys()):
|
|
874
|
-
if join:
|
|
875
|
-
sql.append(join)
|
|
876
|
-
if where[key] is None:
|
|
877
|
-
sql.append("{} is NULL".format(quote(key.lower())))
|
|
878
|
-
else:
|
|
879
|
-
sql.append("{} = %s".format(quote(key.lower())))
|
|
880
|
-
vals.append(where[key])
|
|
881
|
-
join = "AND"
|
|
882
|
-
else:
|
|
883
|
-
sql.append(where)
|
|
884
|
-
return " ".join(sql), tuple(vals)
|
|
885
|
-
|
|
886
|
-
@classmethod
|
|
887
|
-
def truncate(cls, table):
|
|
888
|
-
return "truncate table {}".format(quote(table)), tuple()
|
|
889
|
-
|
|
890
|
-
@classmethod
|
|
891
|
-
def create_view(cls, name, query, temp=False, silent=True):
|
|
892
|
-
sql = ["CREATE"]
|
|
893
|
-
if silent:
|
|
894
|
-
sql.append("OR REPLACE")
|
|
895
|
-
if temp:
|
|
896
|
-
sql.append("TEMPORARY")
|
|
897
|
-
sql.append("VIEW")
|
|
898
|
-
sql.append(cls.default_schema + "." + name)
|
|
899
|
-
sql.append("AS")
|
|
900
|
-
sql.append(query)
|
|
901
|
-
return " ".join(sql), tuple()
|
|
902
|
-
|
|
903
|
-
@classmethod
|
|
904
|
-
def drop_view(cls, name, silent=True):
|
|
905
|
-
sql = ["DROP VIEW"]
|
|
906
|
-
if silent:
|
|
907
|
-
sql.append("IF EXISTS")
|
|
908
|
-
sql.append(cls.default_schema + "." + name)
|
|
909
|
-
return " ".join(sql), tuple()
|
|
910
|
-
|
|
911
|
-
class TYPES(object):
|
|
912
|
-
TEXT = "VARCHAR(MAX)"
|
|
913
|
-
INTEGER = "INT"
|
|
914
|
-
NUMERIC = "NUMERIC"
|
|
915
|
-
DATETIME = "DATETIME"
|
|
916
|
-
TIMESTAMP = "DATETIME"
|
|
917
|
-
DATE = "DATE"
|
|
918
|
-
TIME = "TIME"
|
|
919
|
-
BIGINT = "BIGINT"
|
|
920
|
-
BOOLEAN = "BIT"
|
|
921
|
-
BINARY = "VARBINARY(MAX)"
|