teradataml 20.0.0.2__py3-none-any.whl → 20.0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of teradataml might be problematic. Click here for more details.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/README.md +196 -2
- teradataml/__init__.py +4 -0
- teradataml/_version.py +1 -1
- teradataml/analytics/analytic_function_executor.py +79 -4
- teradataml/analytics/json_parser/metadata.py +12 -3
- teradataml/analytics/json_parser/utils.py +7 -2
- teradataml/analytics/sqle/__init__.py +1 -0
- teradataml/analytics/table_operator/__init__.py +1 -1
- teradataml/analytics/uaf/__init__.py +1 -1
- teradataml/analytics/utils.py +4 -0
- teradataml/automl/data_preparation.py +3 -2
- teradataml/automl/feature_engineering.py +15 -7
- teradataml/automl/model_training.py +39 -33
- teradataml/common/__init__.py +2 -1
- teradataml/common/constants.py +35 -0
- teradataml/common/garbagecollector.py +2 -1
- teradataml/common/messagecodes.py +8 -2
- teradataml/common/messages.py +3 -1
- teradataml/common/sqlbundle.py +25 -3
- teradataml/common/utils.py +134 -9
- teradataml/context/context.py +20 -10
- teradataml/data/SQL_Fundamentals.pdf +0 -0
- teradataml/data/dataframe_example.json +18 -2
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/Shap.py +7 -1
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +4 -4
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +3 -3
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +18 -21
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +1 -1
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +10 -19
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +3 -1
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +15 -5
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/scripts/lightgbm/dataset.template +157 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +247 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +216 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +159 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +194 -167
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +136 -115
- teradataml/data/scripts/sklearn/sklearn_function.template +14 -19
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +155 -137
- teradataml/data/scripts/sklearn/sklearn_transform.py +129 -42
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/templates/open_source_ml.json +3 -2
- teradataml/data/vectordistance_example.json +4 -0
- teradataml/dataframe/dataframe.py +543 -175
- teradataml/dataframe/functions.py +553 -25
- teradataml/dataframe/sql.py +184 -15
- teradataml/dbutils/dbutils.py +556 -18
- teradataml/dbutils/filemgr.py +48 -1
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/opensource/__init__.py +1 -1
- teradataml/opensource/{sklearn/_class.py → _class.py} +102 -17
- teradataml/opensource/_lightgbm.py +950 -0
- teradataml/opensource/{sklearn/_wrapper_utils.py → _wrapper_utils.py} +1 -2
- teradataml/opensource/{sklearn/constants.py → constants.py} +13 -10
- teradataml/opensource/sklearn/__init__.py +0 -1
- teradataml/opensource/sklearn/_sklearn_wrapper.py +798 -438
- teradataml/options/__init__.py +7 -23
- teradataml/options/configure.py +29 -3
- teradataml/scriptmgmt/UserEnv.py +3 -3
- teradataml/scriptmgmt/lls_utils.py +74 -21
- teradataml/store/__init__.py +13 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +291 -0
- teradataml/store/feature_store/feature_store.py +2223 -0
- teradataml/store/feature_store/models.py +1505 -0
- teradataml/store/vector_store/__init__.py +1586 -0
- teradataml/table_operators/query_generator.py +3 -0
- teradataml/table_operators/table_operator_query_generator.py +3 -1
- teradataml/table_operators/table_operator_util.py +37 -38
- teradataml/table_operators/templates/dataframe_register.template +69 -0
- teradataml/utils/dtypes.py +4 -2
- teradataml/utils/validators.py +33 -1
- {teradataml-20.0.0.2.dist-info → teradataml-20.0.0.3.dist-info}/METADATA +200 -5
- {teradataml-20.0.0.2.dist-info → teradataml-20.0.0.3.dist-info}/RECORD +88 -65
- {teradataml-20.0.0.2.dist-info → teradataml-20.0.0.3.dist-info}/WHEEL +0 -0
- {teradataml-20.0.0.2.dist-info → teradataml-20.0.0.3.dist-info}/top_level.txt +0 -0
- {teradataml-20.0.0.2.dist-info → teradataml-20.0.0.3.dist-info}/zip-safe +0 -0
teradataml/dbutils/dbutils.py
CHANGED
|
@@ -37,7 +37,7 @@ from teradataml.utils.internal_buffer import _InternalBuffer
|
|
|
37
37
|
|
|
38
38
|
|
|
39
39
|
@collect_queryband(queryband='DrpTbl')
|
|
40
|
-
def db_drop_table(table_name, schema_name=None):
|
|
40
|
+
def db_drop_table(table_name, schema_name=None, suppress_error=False):
|
|
41
41
|
"""
|
|
42
42
|
DESCRIPTION:
|
|
43
43
|
Drops the table from the given schema.
|
|
@@ -55,6 +55,12 @@ def db_drop_table(table_name, schema_name=None):
|
|
|
55
55
|
Default Value: None
|
|
56
56
|
Types: str
|
|
57
57
|
|
|
58
|
+
suppress_error:
|
|
59
|
+
Optional Argument
|
|
60
|
+
Specifies whether to raise error or not.
|
|
61
|
+
Default Value: False
|
|
62
|
+
Types: str
|
|
63
|
+
|
|
58
64
|
RETURNS:
|
|
59
65
|
True - if the operation is successful.
|
|
60
66
|
|
|
@@ -83,14 +89,18 @@ def db_drop_table(table_name, schema_name=None):
|
|
|
83
89
|
|
|
84
90
|
try:
|
|
85
91
|
return UtilFuncs._drop_table(table_name)
|
|
86
|
-
except TeradataMlException:
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
92
|
+
except (TeradataMlException, OperationalError):
|
|
93
|
+
if suppress_error:
|
|
94
|
+
pass
|
|
95
|
+
else:
|
|
96
|
+
raise
|
|
90
97
|
except Exception as err:
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
98
|
+
if suppress_error:
|
|
99
|
+
pass
|
|
100
|
+
else:
|
|
101
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.DROP_FAILED, "table",
|
|
102
|
+
table_name),
|
|
103
|
+
MessageCodes.DROP_FAILED) from err
|
|
94
104
|
|
|
95
105
|
|
|
96
106
|
@collect_queryband(queryband='DrpVw')
|
|
@@ -173,6 +183,8 @@ def db_list_tables(schema_name=None, object_name=None, object_type='all'):
|
|
|
173
183
|
a replacement for the percent.
|
|
174
184
|
A '_' represents exactly one arbitrary character. Any single character is acceptable in the position in
|
|
175
185
|
which the underscore character appears.
|
|
186
|
+
Note:
|
|
187
|
+
* If '%' is specified in 'object_name', then the '_' character is not evaluated for an arbitrary character.
|
|
176
188
|
Default Value: None
|
|
177
189
|
Types: str
|
|
178
190
|
Example:
|
|
@@ -207,15 +219,15 @@ def db_list_tables(schema_name=None, object_name=None, object_type='all'):
|
|
|
207
219
|
>>> execute_sql("create view temporary_view as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
208
220
|
>>> db_list_tables(None , None, 'view')
|
|
209
221
|
|
|
210
|
-
# Example 3 - List all the object types in the default schema whose names begin with 'abc' followed by
|
|
211
|
-
#
|
|
222
|
+
# Example 3 - List all the object types in the default schema whose names begin with 'abc' followed by any number
|
|
223
|
+
# of characters in the end.
|
|
212
224
|
>>> execute_sql("create view abcd123 as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
213
|
-
>>> db_list_tables(None, '
|
|
225
|
+
>>> db_list_tables(None, 'abc%', None)
|
|
214
226
|
|
|
215
|
-
# Example 4 - List all the tables in the default schema whose names begin with '
|
|
216
|
-
#
|
|
227
|
+
# Example 4 - List all the tables in the default schema whose names begin with 'adm' followed by any number of
|
|
228
|
+
# characters and ends with 'train'.
|
|
217
229
|
>>> load_example_data("dataframe", "admissions_train")
|
|
218
|
-
>>> db_list_tables(None, '
|
|
230
|
+
>>> db_list_tables(None, 'adm%train', 'table')
|
|
219
231
|
|
|
220
232
|
# Example 5 - List all the views in the default schema whose names begin with any character but ends with 'abc'
|
|
221
233
|
>>> execute_sql("create view view_abc as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
@@ -390,7 +402,7 @@ def _execute_transaction(queries):
|
|
|
390
402
|
for query in queries:
|
|
391
403
|
cur.execute(query)
|
|
392
404
|
|
|
393
|
-
# Try committing the
|
|
405
|
+
# Try committing the transaction
|
|
394
406
|
con.commit()
|
|
395
407
|
except Exception:
|
|
396
408
|
# Let's first rollback
|
|
@@ -402,6 +414,71 @@ def _execute_transaction(queries):
|
|
|
402
414
|
cur.execute(auto_commit_on)
|
|
403
415
|
|
|
404
416
|
|
|
417
|
+
def db_transaction(func):
|
|
418
|
+
"""
|
|
419
|
+
DESCRIPTION:
|
|
420
|
+
Function to execute another function in a transaction.
|
|
421
|
+
|
|
422
|
+
PARAMETERS:
|
|
423
|
+
func:
|
|
424
|
+
Required Argument.
|
|
425
|
+
Specifies the function to be executed in a single transaction.
|
|
426
|
+
Types: function
|
|
427
|
+
|
|
428
|
+
RETURNS:
|
|
429
|
+
The object returned by "func".
|
|
430
|
+
|
|
431
|
+
RAISES:
|
|
432
|
+
TeradataMlException, OperationalError
|
|
433
|
+
|
|
434
|
+
EXAMPLES:
|
|
435
|
+
# Example: Declare a function to delete all the records from two tables
|
|
436
|
+
# and execute the function in a transaction.
|
|
437
|
+
>>> @db_transaction
|
|
438
|
+
... def insert_data(table1, table2):
|
|
439
|
+
... execute_sql("delete from {}".format(table1))
|
|
440
|
+
... execute_sql("delete from {}".format(table2))
|
|
441
|
+
... return True
|
|
442
|
+
>>> # Executing the above function in a transaction.
|
|
443
|
+
>>> insert_data("sales", "admissions_train")
|
|
444
|
+
True
|
|
445
|
+
>>>
|
|
446
|
+
"""
|
|
447
|
+
def execute_transaction(*args, **kwargs):
|
|
448
|
+
auto_commit_off = "{fn teradata_nativesql}{fn teradata_autocommit_off}"
|
|
449
|
+
auto_commit_on = "{fn teradata_nativesql}{fn teradata_autocommit_on}"
|
|
450
|
+
con = None
|
|
451
|
+
cur = None
|
|
452
|
+
|
|
453
|
+
result = None
|
|
454
|
+
try:
|
|
455
|
+
con = tdmlctx.td_connection
|
|
456
|
+
if con is None:
|
|
457
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.CONNECTION_FAILURE),
|
|
458
|
+
MessageCodes.CONNECTION_FAILURE)
|
|
459
|
+
con = con.connection
|
|
460
|
+
cur = con.cursor()
|
|
461
|
+
# Set auto_commit to OFF.
|
|
462
|
+
cur.execute(auto_commit_off)
|
|
463
|
+
|
|
464
|
+
# Execute function.
|
|
465
|
+
result = func(*args, **kwargs)
|
|
466
|
+
|
|
467
|
+
# Try committing the transaction.
|
|
468
|
+
con.commit()
|
|
469
|
+
except Exception:
|
|
470
|
+
# Let's first rollback.
|
|
471
|
+
con.rollback()
|
|
472
|
+
# Now, let's raise the error as is.
|
|
473
|
+
raise
|
|
474
|
+
finally:
|
|
475
|
+
# Finally, we must set auto_commit to ON.
|
|
476
|
+
cur.execute(auto_commit_on)
|
|
477
|
+
|
|
478
|
+
return result
|
|
479
|
+
|
|
480
|
+
return execute_transaction
|
|
481
|
+
|
|
405
482
|
def _execute_stored_procedure(function_call, fetchWarnings=True, expect_none_result=False):
|
|
406
483
|
"""
|
|
407
484
|
DESCRIPTION:
|
|
@@ -984,6 +1061,7 @@ def _create_table(table_name,
|
|
|
984
1061
|
pti = pti.no_primary_index()
|
|
985
1062
|
|
|
986
1063
|
con_form=[]
|
|
1064
|
+
foreign_constraints = []
|
|
987
1065
|
for c_name, parameters in kwargs.items():
|
|
988
1066
|
_Validators._validate_function_arguments([["constraint_type", c_name, True, str,
|
|
989
1067
|
True, SQLConstants.CONSTRAINT.value]])
|
|
@@ -992,9 +1070,21 @@ def _create_table(table_name,
|
|
|
992
1070
|
[con_form.append("{}('{}')".format("CheckConstraint", col)) for col in parameters]
|
|
993
1071
|
if c_name in 'foreign_key_constraint':
|
|
994
1072
|
parameters = parameters if isinstance(parameters[0], tuple) else [tuple(parameters)]
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
1073
|
+
# Every element in parameter is 3 elements.
|
|
1074
|
+
# 1st element and 2nd element also a list. 3rd element is name of ForeignKey.
|
|
1075
|
+
for fk_columns, fk_ref_columns, fk_name in parameters:
|
|
1076
|
+
fk_ref_column_objs = []
|
|
1077
|
+
|
|
1078
|
+
# fk_ref_columns is in this format - table_name.column_name .
|
|
1079
|
+
# There is no provision for schema name here.
|
|
1080
|
+
# sqlalchemy is not accepting this notation here - schema_name.table_name.column_name
|
|
1081
|
+
# So, create Column Object and bind schema name and table name to it.
|
|
1082
|
+
for fk_ref_column in fk_ref_columns:
|
|
1083
|
+
ref_column_table, ref_column = fk_ref_column.split(".")
|
|
1084
|
+
t = Table(ref_column_table, MetaData(), Column(ref_column), schema=schema_name)
|
|
1085
|
+
fk_ref_column_objs.append(getattr(t, "c")[ref_column])
|
|
1086
|
+
foreign_constraints.append(ForeignKeyConstraint(fk_columns, fk_ref_column_objs, fk_name))
|
|
1087
|
+
|
|
998
1088
|
if c_name in ['primary_key_constraint', 'unique_key_constraint']:
|
|
999
1089
|
c_name = "UniqueConstraint" if c_name in 'unique_key_constraint' else 'PrimaryKeyConstraint'
|
|
1000
1090
|
parameters = UtilFuncs._as_list(parameters)
|
|
@@ -1008,6 +1098,8 @@ def _create_table(table_name,
|
|
|
1008
1098
|
"schema=schema_name)".format("" if con_form is None else ",".join(con_form))
|
|
1009
1099
|
|
|
1010
1100
|
table=eval(table_str)
|
|
1101
|
+
for foreign_constraint in foreign_constraints:
|
|
1102
|
+
table.append_constraint(foreign_constraint)
|
|
1011
1103
|
table.create(bind=tdmlctx.get_context())
|
|
1012
1104
|
|
|
1013
1105
|
except Exception as err:
|
|
@@ -1015,6 +1107,333 @@ def _create_table(table_name,
|
|
|
1015
1107
|
raise TeradataMlException(Messages.get_message(msg_code, "create table", str(err)), msg_code)
|
|
1016
1108
|
|
|
1017
1109
|
|
|
1110
|
+
def _create_database(schema_name, size='10e6', spool_size=None):
|
|
1111
|
+
"""
|
|
1112
|
+
DESCRIPTION:
|
|
1113
|
+
Internal function to create a database with the specified name and size.
|
|
1114
|
+
|
|
1115
|
+
PARAMETERS:
|
|
1116
|
+
schema_name:
|
|
1117
|
+
Required Argument.
|
|
1118
|
+
Specifies the name of the database to create.
|
|
1119
|
+
Types: str
|
|
1120
|
+
|
|
1121
|
+
size:
|
|
1122
|
+
Optional Argument.
|
|
1123
|
+
Specifies the number of bytes to allocate to new database.
|
|
1124
|
+
Note:
|
|
1125
|
+
Exponential notation can also be used.
|
|
1126
|
+
Types: str or int
|
|
1127
|
+
|
|
1128
|
+
spool_size:
|
|
1129
|
+
Optional Argument.
|
|
1130
|
+
Specifies the number of bytes to allocate to new database
|
|
1131
|
+
for spool space.
|
|
1132
|
+
Note:
|
|
1133
|
+
Exponential notation can also be used.
|
|
1134
|
+
Types: str or int
|
|
1135
|
+
|
|
1136
|
+
RETURNS:
|
|
1137
|
+
bool
|
|
1138
|
+
|
|
1139
|
+
RAISES:
|
|
1140
|
+
TeradataMlException.
|
|
1141
|
+
|
|
1142
|
+
EXAMPLES:
|
|
1143
|
+
>>> from teradataml.dbutils.dbutils import _create_database
|
|
1144
|
+
>>> _create_database("db_name1", "10e5")
|
|
1145
|
+
"""
|
|
1146
|
+
sql = "CREATE DATABASE {} FROM {} AS PERM = {}".format(
|
|
1147
|
+
schema_name, tdmlctx._get_database_username(), size)
|
|
1148
|
+
|
|
1149
|
+
# If user pass spool size, create it with specified space.
|
|
1150
|
+
if spool_size:
|
|
1151
|
+
sql = "{} , SPOOL = {}".format(sql, spool_size)
|
|
1152
|
+
|
|
1153
|
+
execute_sql(sql)
|
|
1154
|
+
return True
|
|
1155
|
+
|
|
1156
|
+
|
|
1157
|
+
def _update_data(update_columns_values, table_name, schema_name, datalake_name=None, update_conditions=None):
|
|
1158
|
+
"""
|
|
1159
|
+
DESCRIPTION:
|
|
1160
|
+
Internal function to update the data in a table.
|
|
1161
|
+
|
|
1162
|
+
PARAMETERS:
|
|
1163
|
+
update_columns_values:
|
|
1164
|
+
Required Argument.
|
|
1165
|
+
Specifies the columns and it's values to update.
|
|
1166
|
+
Types: dict
|
|
1167
|
+
|
|
1168
|
+
table_name:
|
|
1169
|
+
Required Argument.
|
|
1170
|
+
Specifies the name of the table to update.
|
|
1171
|
+
Types: str
|
|
1172
|
+
|
|
1173
|
+
schema_name:
|
|
1174
|
+
Required Argument.
|
|
1175
|
+
Specifies the name of the database to update the data in the
|
|
1176
|
+
table "table_name".
|
|
1177
|
+
Types: str
|
|
1178
|
+
|
|
1179
|
+
datalake_name:
|
|
1180
|
+
Optional Argument.
|
|
1181
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1182
|
+
Types: str
|
|
1183
|
+
|
|
1184
|
+
update_conditions:
|
|
1185
|
+
Optional Argument.
|
|
1186
|
+
Specifies the key columns and it's values which is used as condition
|
|
1187
|
+
for updating the records.
|
|
1188
|
+
Types: dict
|
|
1189
|
+
|
|
1190
|
+
RETURNS:
|
|
1191
|
+
bool
|
|
1192
|
+
|
|
1193
|
+
RAISES:
|
|
1194
|
+
TeradataMlException.
|
|
1195
|
+
|
|
1196
|
+
EXAMPLES:
|
|
1197
|
+
>>> from teradataml.dbutils.dbutils import _update_data
|
|
1198
|
+
>>> _update_data("db_name1", "tbl", update_conditions={"column1": "value1"})
|
|
1199
|
+
"""
|
|
1200
|
+
# Prepare the update clause.
|
|
1201
|
+
update_clause = ", ".join(("{} = ?".format(col) for col in update_columns_values))
|
|
1202
|
+
update_values = tuple((_value for _value in update_columns_values.values()))
|
|
1203
|
+
|
|
1204
|
+
# If key_columns_values is passed, then prepare the SQL with where clause.
|
|
1205
|
+
# Else, simply update every thing.
|
|
1206
|
+
schema_name = "{}.{}".format(datalake_name, schema_name) if datalake_name else schema_name
|
|
1207
|
+
|
|
1208
|
+
get_str_ = lambda val: "'{}'".format(val) if isinstance(val, str) else val
|
|
1209
|
+
if update_conditions:
|
|
1210
|
+
|
|
1211
|
+
# Prepare where clause.
|
|
1212
|
+
where_ = []
|
|
1213
|
+
for column, col_value in update_conditions.items():
|
|
1214
|
+
if isinstance(col_value, list):
|
|
1215
|
+
col_value = ", ".join(get_str_(val) for val in col_value)
|
|
1216
|
+
col_value = "({})".format(col_value)
|
|
1217
|
+
where_.append("{} IN {}".format(column, col_value))
|
|
1218
|
+
else:
|
|
1219
|
+
where_.append("{} = {}".format(column, col_value))
|
|
1220
|
+
|
|
1221
|
+
where_clause = " AND ".join(where_)
|
|
1222
|
+
|
|
1223
|
+
sql = f"""UPDATE {schema_name}.{table_name} SET {update_clause}
|
|
1224
|
+
WHERE {where_clause}
|
|
1225
|
+
"""
|
|
1226
|
+
|
|
1227
|
+
execute_sql(sql, (*update_values, ))
|
|
1228
|
+
|
|
1229
|
+
else:
|
|
1230
|
+
sql = f"""UPDATE {schema_name}.{table_name} SET {update_clause}"""
|
|
1231
|
+
|
|
1232
|
+
execute_sql(sql, update_values)
|
|
1233
|
+
return True
|
|
1234
|
+
|
|
1235
|
+
|
|
1236
|
+
def _insert_data(table_name, values, columns=None, schema_name=None, datalake_name=None):
|
|
1237
|
+
"""
|
|
1238
|
+
DESCRIPTION:
|
|
1239
|
+
Internal function to insert the data in a table.
|
|
1240
|
+
|
|
1241
|
+
PARAMETERS:
|
|
1242
|
+
table_name:
|
|
1243
|
+
Required Argument.
|
|
1244
|
+
Specifies the name of the table to insert.
|
|
1245
|
+
Types: str
|
|
1246
|
+
|
|
1247
|
+
values:
|
|
1248
|
+
Required Argument.
|
|
1249
|
+
Specifies the values to insert.
|
|
1250
|
+
Types: tuple or list of tuple
|
|
1251
|
+
|
|
1252
|
+
columns:
|
|
1253
|
+
Optional Argument.
|
|
1254
|
+
Specifies the name of columns to be involved in insert.
|
|
1255
|
+
Types: list
|
|
1256
|
+
|
|
1257
|
+
schema_name:
|
|
1258
|
+
Optional Argument.
|
|
1259
|
+
Specifies the name of the database to insert the data in the
|
|
1260
|
+
table "table_name".
|
|
1261
|
+
Types: str
|
|
1262
|
+
|
|
1263
|
+
datalake_name:
|
|
1264
|
+
Optional Argument.
|
|
1265
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1266
|
+
Types: str
|
|
1267
|
+
|
|
1268
|
+
RETURNS:
|
|
1269
|
+
bool
|
|
1270
|
+
|
|
1271
|
+
RAISES:
|
|
1272
|
+
TeradataMlException.
|
|
1273
|
+
|
|
1274
|
+
EXAMPLES:
|
|
1275
|
+
>>> from teradataml.dbutils.dbutils import _insert_data
|
|
1276
|
+
>>> _insert_data("tbl", (1, 2, 3))
|
|
1277
|
+
"""
|
|
1278
|
+
# Prepare the update clause.
|
|
1279
|
+
if schema_name:
|
|
1280
|
+
table_name = '"{}"."{}"'.format(schema_name, table_name)
|
|
1281
|
+
if datalake_name:
|
|
1282
|
+
table_name = '"{}"."{}"'.format(datalake_name, table_name)
|
|
1283
|
+
|
|
1284
|
+
values = UtilFuncs._as_list(values)
|
|
1285
|
+
|
|
1286
|
+
# Prepare columns clause.
|
|
1287
|
+
if columns:
|
|
1288
|
+
# Prepare question marks.
|
|
1289
|
+
_q_marks = ["?"] * len(columns)
|
|
1290
|
+
columns = "({})".format(", ".join(columns))
|
|
1291
|
+
else:
|
|
1292
|
+
columns = ""
|
|
1293
|
+
_q_marks = ["?"] * (len(values[0]))
|
|
1294
|
+
|
|
1295
|
+
sql = "insert into {} {} values ({});".format(table_name, columns, ", ".join(_q_marks))
|
|
1296
|
+
execute_sql(sql, values)
|
|
1297
|
+
|
|
1298
|
+
return True
|
|
1299
|
+
|
|
1300
|
+
|
|
1301
|
+
def _upsert_data(update_columns_values,
|
|
1302
|
+
insert_columns_values,
|
|
1303
|
+
upsert_conditions,
|
|
1304
|
+
table_name,
|
|
1305
|
+
schema_name,
|
|
1306
|
+
datalake_name=None):
|
|
1307
|
+
"""
|
|
1308
|
+
DESCRIPTION:
|
|
1309
|
+
Internal function to either insert or update the data to a table.
|
|
1310
|
+
|
|
1311
|
+
PARAMETERS:
|
|
1312
|
+
update_columns_values:
|
|
1313
|
+
Required Argument.
|
|
1314
|
+
Specifies the columns and it's values to update.
|
|
1315
|
+
Types: dict
|
|
1316
|
+
|
|
1317
|
+
insert_columns_values:
|
|
1318
|
+
Required Argument.
|
|
1319
|
+
Specifies the columns and it's values to insert.
|
|
1320
|
+
Types: dict
|
|
1321
|
+
|
|
1322
|
+
upsert_conditions:
|
|
1323
|
+
Required Argument.
|
|
1324
|
+
Specifies the key columns and it's values which is used as condition
|
|
1325
|
+
for updating the records.
|
|
1326
|
+
Types: tuple
|
|
1327
|
+
|
|
1328
|
+
table_name:
|
|
1329
|
+
Required Argument.
|
|
1330
|
+
Specifies the name of the table to insert.
|
|
1331
|
+
Types: str
|
|
1332
|
+
|
|
1333
|
+
schema_name:
|
|
1334
|
+
Required Argument.
|
|
1335
|
+
Specifies the name of the database to update the data in the
|
|
1336
|
+
table "table_name".
|
|
1337
|
+
Types: str
|
|
1338
|
+
|
|
1339
|
+
datalake_name:
|
|
1340
|
+
Optional Argument.
|
|
1341
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1342
|
+
Types: str
|
|
1343
|
+
|
|
1344
|
+
RETURNS:
|
|
1345
|
+
bool
|
|
1346
|
+
|
|
1347
|
+
RAISES:
|
|
1348
|
+
TeradataMlException.
|
|
1349
|
+
|
|
1350
|
+
EXAMPLES:
|
|
1351
|
+
>>> from teradataml.dbutils.dbutils import _upsert_data
|
|
1352
|
+
>>> _upsert_data("db_name1",
|
|
1353
|
+
"tbl",
|
|
1354
|
+
update_columns_values={"column1": "value1"},
|
|
1355
|
+
insert_columns_values={"column1": "value2"},
|
|
1356
|
+
upsert_conditions={"key1": "val1"}
|
|
1357
|
+
)
|
|
1358
|
+
"""
|
|
1359
|
+
# If user passes datalake name, then append the same to schema name.
|
|
1360
|
+
if datalake_name:
|
|
1361
|
+
schema_name = "{}.{}".format(datalake_name, schema_name)
|
|
1362
|
+
|
|
1363
|
+
# Prepare the update clause.
|
|
1364
|
+
update_clause = ", ".join(("{} = ?".format(col) for col in update_columns_values))
|
|
1365
|
+
update_values = tuple((_value for _value in update_columns_values.values()))
|
|
1366
|
+
|
|
1367
|
+
# Prepare the where clause and it's values.
|
|
1368
|
+
where_clause = " AND ".join(("{} = ?".format(col) for col in upsert_conditions))
|
|
1369
|
+
where_values = tuple((_value for _value in upsert_conditions.values()))
|
|
1370
|
+
|
|
1371
|
+
# Prepare the insert clause and it's values.
|
|
1372
|
+
insert_values_clause = ", ".join(("?" for _ in range(len(insert_columns_values))))
|
|
1373
|
+
insert_clause = "({}) values ({})".format(", ".join(insert_columns_values), insert_values_clause)
|
|
1374
|
+
insert_values = tuple((_value for _value in insert_columns_values.values()))
|
|
1375
|
+
|
|
1376
|
+
sql = f"""UPDATE {schema_name}.{table_name} SET {update_clause}
|
|
1377
|
+
WHERE {where_clause}
|
|
1378
|
+
ELSE INSERT {schema_name}.{table_name} {insert_clause}
|
|
1379
|
+
"""
|
|
1380
|
+
execute_sql(sql, (*update_values, *where_values, *insert_values))
|
|
1381
|
+
|
|
1382
|
+
def _delete_data(table_name, schema_name=None, datalake_name=None, delete_conditions=None):
|
|
1383
|
+
"""
|
|
1384
|
+
DESCRIPTION:
|
|
1385
|
+
Internal function to delete the data in a table.
|
|
1386
|
+
|
|
1387
|
+
PARAMETERS:
|
|
1388
|
+
table_name:
|
|
1389
|
+
Required Argument.
|
|
1390
|
+
Specifies the name of the table to delete.
|
|
1391
|
+
Types: str
|
|
1392
|
+
|
|
1393
|
+
schema_name:
|
|
1394
|
+
Optional Argument.
|
|
1395
|
+
Specifies the name of the database to delete the data in the
|
|
1396
|
+
table "table_name".
|
|
1397
|
+
Types: str
|
|
1398
|
+
|
|
1399
|
+
datalake_name:
|
|
1400
|
+
Optional Argument.
|
|
1401
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1402
|
+
Types: str
|
|
1403
|
+
|
|
1404
|
+
delete_conditions:
|
|
1405
|
+
Optional Argument.
|
|
1406
|
+
Specifies the ColumnExpression to use for removing the data.
|
|
1407
|
+
Types: ColumnExpression
|
|
1408
|
+
|
|
1409
|
+
RETURNS:
|
|
1410
|
+
int, specifies the number of records those are deleted.
|
|
1411
|
+
|
|
1412
|
+
RAISES:
|
|
1413
|
+
TeradataMlException.
|
|
1414
|
+
|
|
1415
|
+
EXAMPLES:
|
|
1416
|
+
>>> from teradataml.dbutils.dbutils import _delete_data
|
|
1417
|
+
>>> _delete_data("tbl", "db_name1", delete_conditions={"column1": "value1"})
|
|
1418
|
+
"""
|
|
1419
|
+
if schema_name:
|
|
1420
|
+
table_name = '"{}"."{}"'.format(schema_name, table_name)
|
|
1421
|
+
|
|
1422
|
+
if datalake_name:
|
|
1423
|
+
table_name = "{}.{}".format(datalake_name, table_name)
|
|
1424
|
+
|
|
1425
|
+
sqlbundle = SQLBundle()
|
|
1426
|
+
|
|
1427
|
+
sql = sqlbundle._get_sql_query(SQLConstants.SQL_DELETE_ALL_ROWS).format(table_name)
|
|
1428
|
+
|
|
1429
|
+
# If condition exist, the prepare where clause.
|
|
1430
|
+
if delete_conditions:
|
|
1431
|
+
where_clause = delete_conditions.compile()
|
|
1432
|
+
sql = sqlbundle._get_sql_query(SQLConstants.SQL_DELETE_SPECIFIC_ROW).format(table_name, where_clause)
|
|
1433
|
+
|
|
1434
|
+
res = execute_sql(sql)
|
|
1435
|
+
return res.rowcount
|
|
1436
|
+
|
|
1018
1437
|
@collect_queryband(queryband='LstKwrds')
|
|
1019
1438
|
def list_td_reserved_keywords(key=None, raise_error=False):
|
|
1020
1439
|
"""
|
|
@@ -1471,3 +1890,122 @@ def unset_session_param(name):
|
|
|
1471
1890
|
|
|
1472
1891
|
return True
|
|
1473
1892
|
|
|
1893
|
+
class _Authorize:
|
|
1894
|
+
""" Parent class to either provide or revoke access on table(s). """
|
|
1895
|
+
_property = None
|
|
1896
|
+
|
|
1897
|
+
def __init__(self, objects):
|
|
1898
|
+
"""
|
|
1899
|
+
DESCRIPTION:
|
|
1900
|
+
Constructor for creating Authorize object.
|
|
1901
|
+
|
|
1902
|
+
PARAMETERS:
|
|
1903
|
+
objects:
|
|
1904
|
+
Required Argument.
|
|
1905
|
+
Specifies the name(s) of the database objects to be authorized.
|
|
1906
|
+
Types: str OR list of str.
|
|
1907
|
+
|
|
1908
|
+
RETURNS:
|
|
1909
|
+
Object of _Authorize.
|
|
1910
|
+
|
|
1911
|
+
RAISES:
|
|
1912
|
+
None
|
|
1913
|
+
|
|
1914
|
+
EXAMPLES:
|
|
1915
|
+
>>> auth = _Authorize('vfs_v1')
|
|
1916
|
+
"""
|
|
1917
|
+
# Store the objects here. Then use this where ever required.
|
|
1918
|
+
self._objects = objects
|
|
1919
|
+
self._access_method = self.__class__.__name__.upper()
|
|
1920
|
+
|
|
1921
|
+
def read(self, user):
|
|
1922
|
+
"""
|
|
1923
|
+
DESCRIPTION:
|
|
1924
|
+
Authorize the read access.
|
|
1925
|
+
Note:
|
|
1926
|
+
One must have admin access to give read access to other "user".
|
|
1927
|
+
|
|
1928
|
+
PARAMETERS:
|
|
1929
|
+
user:
|
|
1930
|
+
Required Argument.
|
|
1931
|
+
Specifies the name of the user to have read only access.
|
|
1932
|
+
Types: str
|
|
1933
|
+
|
|
1934
|
+
RETURNS:
|
|
1935
|
+
bool.
|
|
1936
|
+
|
|
1937
|
+
RAISES:
|
|
1938
|
+
None
|
|
1939
|
+
|
|
1940
|
+
EXAMPLES:
|
|
1941
|
+
>>> _Authorize('repo').read('BoB')
|
|
1942
|
+
"""
|
|
1943
|
+
for object in self._objects:
|
|
1944
|
+
sql = "{} SELECT ON {} {} {}".format(self._access_method, object, self._property, user)
|
|
1945
|
+
execute_sql(sql)
|
|
1946
|
+
|
|
1947
|
+
return True
|
|
1948
|
+
|
|
1949
|
+
def write(self, user):
|
|
1950
|
+
"""
|
|
1951
|
+
DESCRIPTION:
|
|
1952
|
+
Authorize the write access.
|
|
1953
|
+
Note:
|
|
1954
|
+
One must have admin access to give write access to other "user".
|
|
1955
|
+
|
|
1956
|
+
PARAMETERS:
|
|
1957
|
+
user:
|
|
1958
|
+
Required Argument.
|
|
1959
|
+
Specifies the name of the user to have write only access.
|
|
1960
|
+
Types: str
|
|
1961
|
+
|
|
1962
|
+
RETURNS:
|
|
1963
|
+
bool.
|
|
1964
|
+
|
|
1965
|
+
RAISES:
|
|
1966
|
+
None
|
|
1967
|
+
|
|
1968
|
+
EXAMPLES:
|
|
1969
|
+
>>> _Authorize('repo').write('BoB')
|
|
1970
|
+
"""
|
|
1971
|
+
for access_type in ["INSERT", "UPDATE", "DELETE"]:
|
|
1972
|
+
for object in self._objects:
|
|
1973
|
+
sql = "{} {} ON {} {} {}".format(self._access_method, access_type, object, self._property, user)
|
|
1974
|
+
execute_sql(sql)
|
|
1975
|
+
|
|
1976
|
+
return True
|
|
1977
|
+
|
|
1978
|
+
def read_write(self, user):
|
|
1979
|
+
"""
|
|
1980
|
+
DESCRIPTION:
|
|
1981
|
+
Authorize the read and write access.
|
|
1982
|
+
Note:
|
|
1983
|
+
One must have admin access to give read and write access to other "user".
|
|
1984
|
+
|
|
1985
|
+
PARAMETERS:
|
|
1986
|
+
user:
|
|
1987
|
+
Required Argument.
|
|
1988
|
+
Specifies the name of the user to have read and write access.
|
|
1989
|
+
Types: str
|
|
1990
|
+
|
|
1991
|
+
RETURNS:
|
|
1992
|
+
bool.
|
|
1993
|
+
|
|
1994
|
+
RAISES:
|
|
1995
|
+
None
|
|
1996
|
+
|
|
1997
|
+
EXAMPLES:
|
|
1998
|
+
>>> _Authorize('repo').read_write('BoB')
|
|
1999
|
+
"""
|
|
2000
|
+
self.read(user)
|
|
2001
|
+
return self.write(user)
|
|
2002
|
+
|
|
2003
|
+
|
|
2004
|
+
class Grant(_Authorize):
|
|
2005
|
+
""" Class to grant access to tables."""
|
|
2006
|
+
_property = "TO"
|
|
2007
|
+
|
|
2008
|
+
|
|
2009
|
+
class Revoke(_Authorize):
|
|
2010
|
+
""" Class to revoke access from tables."""
|
|
2011
|
+
_property = "FROM"
|