teradataml 20.0.0.6__py3-none-any.whl → 20.0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (96) hide show
  1. teradataml/README.md +210 -0
  2. teradataml/__init__.py +1 -1
  3. teradataml/_version.py +1 -1
  4. teradataml/analytics/analytic_function_executor.py +162 -76
  5. teradataml/analytics/byom/__init__.py +1 -1
  6. teradataml/analytics/json_parser/__init__.py +2 -0
  7. teradataml/analytics/json_parser/analytic_functions_argument.py +95 -2
  8. teradataml/analytics/json_parser/metadata.py +22 -4
  9. teradataml/analytics/sqle/DecisionTreePredict.py +3 -2
  10. teradataml/analytics/sqle/NaiveBayesPredict.py +3 -2
  11. teradataml/analytics/sqle/__init__.py +3 -0
  12. teradataml/analytics/utils.py +4 -1
  13. teradataml/automl/__init__.py +2369 -464
  14. teradataml/automl/autodataprep/__init__.py +15 -0
  15. teradataml/automl/custom_json_utils.py +184 -112
  16. teradataml/automl/data_preparation.py +113 -58
  17. teradataml/automl/data_transformation.py +154 -53
  18. teradataml/automl/feature_engineering.py +113 -53
  19. teradataml/automl/feature_exploration.py +548 -25
  20. teradataml/automl/model_evaluation.py +260 -32
  21. teradataml/automl/model_training.py +399 -206
  22. teradataml/clients/auth_client.py +2 -2
  23. teradataml/common/aed_utils.py +11 -2
  24. teradataml/common/bulk_exposed_utils.py +4 -2
  25. teradataml/common/constants.py +62 -2
  26. teradataml/common/garbagecollector.py +50 -21
  27. teradataml/common/messagecodes.py +47 -2
  28. teradataml/common/messages.py +19 -1
  29. teradataml/common/sqlbundle.py +23 -6
  30. teradataml/common/utils.py +116 -10
  31. teradataml/context/aed_context.py +16 -10
  32. teradataml/data/Employee.csv +5 -0
  33. teradataml/data/Employee_Address.csv +4 -0
  34. teradataml/data/Employee_roles.csv +5 -0
  35. teradataml/data/JulesBelvezeDummyData.csv +100 -0
  36. teradataml/data/byom_example.json +5 -0
  37. teradataml/data/creditcard_data.csv +284618 -0
  38. teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
  39. teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +1 -1
  40. teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +1 -1
  41. teradataml/data/docs/sqle/docs_17_20/TextParser.py +1 -1
  42. teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
  43. teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +3 -7
  44. teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +3 -7
  45. teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +3 -7
  46. teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +3 -7
  47. teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +3 -7
  48. teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +3 -7
  49. teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +3 -7
  50. teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +3 -7
  51. teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +3 -7
  52. teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +3 -7
  53. teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +3 -7
  54. teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
  55. teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
  56. teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
  57. teradataml/data/load_example_data.py +29 -11
  58. teradataml/data/payment_fraud_dataset.csv +10001 -0
  59. teradataml/data/teradataml_example.json +67 -0
  60. teradataml/dataframe/copy_to.py +714 -54
  61. teradataml/dataframe/dataframe.py +1153 -33
  62. teradataml/dataframe/dataframe_utils.py +8 -3
  63. teradataml/dataframe/functions.py +168 -1
  64. teradataml/dataframe/setop.py +4 -1
  65. teradataml/dataframe/sql.py +141 -9
  66. teradataml/dbutils/dbutils.py +470 -35
  67. teradataml/dbutils/filemgr.py +1 -1
  68. teradataml/hyperparameter_tuner/optimizer.py +456 -142
  69. teradataml/lib/aed_0_1.dll +0 -0
  70. teradataml/lib/libaed_0_1.dylib +0 -0
  71. teradataml/lib/libaed_0_1.so +0 -0
  72. teradataml/lib/libaed_0_1_aarch64.so +0 -0
  73. teradataml/scriptmgmt/UserEnv.py +234 -34
  74. teradataml/scriptmgmt/lls_utils.py +43 -17
  75. teradataml/sdk/_json_parser.py +1 -1
  76. teradataml/sdk/api_client.py +9 -6
  77. teradataml/sdk/modelops/_client.py +3 -0
  78. teradataml/series/series.py +12 -7
  79. teradataml/store/feature_store/constants.py +601 -234
  80. teradataml/store/feature_store/feature_store.py +2886 -616
  81. teradataml/store/feature_store/mind_map.py +639 -0
  82. teradataml/store/feature_store/models.py +5831 -214
  83. teradataml/store/feature_store/utils.py +390 -0
  84. teradataml/table_operators/table_operator_util.py +1 -1
  85. teradataml/table_operators/templates/dataframe_register.template +6 -2
  86. teradataml/table_operators/templates/dataframe_udf.template +6 -2
  87. teradataml/utils/docstring.py +527 -0
  88. teradataml/utils/dtypes.py +93 -0
  89. teradataml/utils/internal_buffer.py +2 -2
  90. teradataml/utils/utils.py +41 -2
  91. teradataml/utils/validators.py +694 -17
  92. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/METADATA +213 -2
  93. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/RECORD +96 -81
  94. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/WHEEL +0 -0
  95. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/top_level.txt +0 -0
  96. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/zip-safe +0 -0
@@ -97,8 +97,8 @@ class _AuthWorkflow:
97
97
  "pat": self.state['pat_token'],
98
98
  "sub": self.state['username']
99
99
  }
100
- # Add iat if required.
101
- if self.state['valid_from']:
100
+ # Add iat if applicable.
101
+ if self.state['valid_from'] is not None:
102
102
  payload.update({"iat": self.state['valid_from']})
103
103
  return payload
104
104
 
@@ -150,7 +150,7 @@ class AedUtils:
150
150
 
151
151
  return self.aed_context._validate_aed_return_code(ret_code[0], nodeid_out[0].decode("utf-8"))
152
152
 
153
- def _aed_select(self, nodeid, select_expr, distinct=False):
153
+ def _aed_select(self, nodeid, select_expr, distinct=False, timestamp_expr=None):
154
154
  """
155
155
  This wrapper function facilitates a integration with 'aed_select',
156
156
  a C++ function, in AED library, with Python tdml library.
@@ -162,6 +162,7 @@ class AedUtils:
162
162
  nodeid - A DAG node, a input to the select API.
163
163
  select_expr - Columns, to be selected from the data frame.
164
164
  distinct - Boolean, to decide addition of Distinct clause.
165
+ timestamp_expr - A string, to specify the temporal table clause.
165
166
 
166
167
  EXAMPLES:
167
168
  aed_table_nodeid = AedObj._aed_table("dbname.tablename")
@@ -181,11 +182,16 @@ class AedUtils:
181
182
  POINTER(c_char_p),
182
183
  POINTER(c_char_p),
183
184
  POINTER(c_char_p),
185
+ POINTER(c_int),
184
186
  POINTER(c_int)
185
187
  ]
186
188
 
187
189
  arg_name = ["projection"]
188
190
  arg_value = ["DISTINCT " + select_expr if distinct else select_expr]
191
+ if timestamp_expr:
192
+ arg_name.append("timestamp_expr")
193
+ arg_value.append(timestamp_expr)
194
+
189
195
  temp_table_name = UtilFuncs._generate_temp_table_name(prefix="select_", use_default_database=True, quote=False)
190
196
  output_table = [UtilFuncs._extract_table_name(temp_table_name)]
191
197
  output_schema = [UtilFuncs._extract_db_name(temp_table_name)]
@@ -195,6 +201,8 @@ class AedUtils:
195
201
 
196
202
  # return code
197
203
  ret_code = self.aed_context._int_array1(0)
204
+
205
+ length = self.aed_context._int_array1(len(arg_value))
198
206
  try:
199
207
  # *** AED request to select columns
200
208
  self.aed_context.ele_common_lib.aed_select(self.aed_context._arr_c([nodeid]),
@@ -203,6 +211,7 @@ class AedUtils:
203
211
  self.aed_context._arr_c(output_table),
204
212
  self.aed_context._arr_c(output_schema),
205
213
  nodeid_out,
214
+ length,
206
215
  ret_code)
207
216
  except Exception as emsg:
208
217
  raise TeradataMlException(Messages.get_message(MessageCodes.AED_EXEC_FAILED, "(aed_select)" + str(emsg)),
@@ -2050,7 +2059,7 @@ class AedUtils:
2050
2059
  POINTER(c_int)
2051
2060
  ]
2052
2061
  # Get number of parent nodes present to allocate enough memory
2053
- number_of_parent_nodes = self._aed_get_parent_node_count(nodeid);
2062
+ number_of_parent_nodes = self._aed_get_parent_node_count(nodeid)
2054
2063
  parent_nodeids = self.aed_context._arr_c(["00000000000000000000"] * number_of_parent_nodes)
2055
2064
  ret_code = self.aed_context._int_array1(0)
2056
2065
 
@@ -98,7 +98,9 @@ def _validate_unimplemented_function(func_name, func_params, *args, **kwargs):
98
98
  function_signature,
99
99
  return_statement)
100
100
  # Creating function object from the string in locals.
101
- exec(function_expression)
101
+ global_scope = {}
102
+ namespace = {}
103
+ exec(function_expression, global_scope, namespace)
102
104
 
103
105
  # kwargs may contain other properties too. So, before we call the function,
104
106
  # copying kwargs to another variable and remove additional properties.
@@ -107,5 +109,5 @@ def _validate_unimplemented_function(func_name, func_params, *args, **kwargs):
107
109
  if not param in TDMLFrameworkKeywords.AGGREGATE_FUNCTION_DEFAULT_ARGUMENTS.value:
108
110
  kw[param] = kwargs[param]
109
111
 
110
- return locals().get(func_name)(*args, **kw)
112
+ return namespace[func_name](*args, **kw)
111
113
 
@@ -63,10 +63,14 @@ class SQLConstants(Enum):
63
63
  SQL_SELECT_COLUMNNAMES_WITH_WHERE = 32
64
64
  SQL_HELP_DATABASE = 33
65
65
  SQL_HELP_DATALAKE = 34
66
+ SQL_INSERT_INTO_TABLE_VALUES_WITH_COLUMN_NAMES = 35
66
67
  CONSTRAINT = ["check_constraint", "primary_key_constraint",
67
68
  "foreign_key_constraint", "unique_key_constraint"]
68
69
  SQL_TD_OTF_METADATA = 35
69
70
  SQL_TD_OTF_SNAPSHOT = 36
71
+ SQL_LIST_TRIGGERS = 37
72
+ SQL_SHOW_TABLE = 38
73
+ SQL_SHOW_VIEW = 39
70
74
 
71
75
 
72
76
  class TeradataConstants(Enum):
@@ -143,6 +147,9 @@ class TeradataTypes(Enum):
143
147
  INTERVAL_DAY_TO_SECOND, INTERVAL_HOUR,
144
148
  INTERVAL_HOUR_TO_MINUTE, INTERVAL_HOUR_TO_SECOND,
145
149
  INTERVAL_MINUTE, INTERVAL_MINUTE_TO_SECOND, INTERVAL_SECOND)
150
+ TD_RANGE_N_CLAUSE_TYPES = (INTERVAL_YEAR, INTERVAL_DAY, INTERVAL_MONTH,
151
+ INTERVAL_MINUTE, INTERVAL_SECOND, INTERVAL_HOUR)
152
+
146
153
 
147
154
 
148
155
  class TeradataTableKindConstants(Enum):
@@ -155,6 +162,21 @@ class TeradataTableKindConstants(Enum):
155
162
  VOLATILE_TABLE_NAME = 'Table Name'
156
163
  REGULAR_TABLE_NAME = 'TableName'
157
164
 
165
+ class DataFrameTypes(Enum):
166
+ VIEW = "VIEW"
167
+ VALID_TIME_VIEW = "VALID_TIME_VIEW"
168
+ TRANSACTION_TIME_VIEW = "TRANSACTION_TIME_VIEW"
169
+ BI_TEMPORAL_VIEW = "BI_TEMPORAL_VIEW"
170
+ REGULAR_TABLE = "TABLE"
171
+ OTF_TABLE = "OTF"
172
+ BI_TEMPORAL = "BI_TEMPORAL"
173
+ TRANSACTION_TIME= "TRANSACTION_TIME"
174
+ VALID_TIME = "VALID_TIME"
175
+ ART_TABLE = "ART"
176
+ VOLATILE_TABLE = "VOLATILE_TABLE"
177
+ VALID_TIME_VOLATILE_TABLE = "VALID_TIME_VOLATILE_TABLE"
178
+ TRANSACTION_TIME_VOLATILE_TABLE = "TRANSACTION_TIME_VOLATILE_TABLE"
179
+ BI_TEMPORAL_VOLATILE_TABLE = "BI_TEMPORAL_VOLATILE_TABLE"
158
180
 
159
181
  class SQLPattern(Enum):
160
182
  SQLMR = re.compile(r"SELECT \* FROM .*\((\s*.*)*\) as .*", re.IGNORECASE)
@@ -1434,7 +1456,8 @@ class TeradataReservedKeywords(Enum):
1434
1456
  "SUMMARY",
1435
1457
  "HASH",
1436
1458
  "METHOD",
1437
- "TYPE"
1459
+ "TYPE",
1460
+ "CATALOG"
1438
1461
  ]
1439
1462
 
1440
1463
 
@@ -1509,10 +1532,21 @@ class AsyncStatusColumns(Enum):
1509
1532
 
1510
1533
 
1511
1534
  class AsyncOpStatus(Enum):
1512
- # Holds valid status for asynchronous operatiosns in UES.
1535
+ # Holds valid status for asynchronous operations in UES.
1513
1536
  FILE_INSTALLED = "File Installed"
1514
1537
  ERRED = "Errored"
1515
1538
  FINISHED = "Finished"
1539
+ MODEL_INSTALLED = "ModelInstalled"
1540
+
1541
+
1542
+ class AsyncOpStatusOAFColumns(Enum):
1543
+ # Holds column names of dataframe representing status of given claim-id.
1544
+ CLAIM_ID = "Claim Id"
1545
+ FILE_LIB_MODEL_NAME = "File/Libs/Model"
1546
+ METHOD_NAME = "Method Name"
1547
+ STAGE = "Stage"
1548
+ TIMESTAMP = "Timestamp"
1549
+ ADDITIONAL_DETAILS = "Additional Details"
1516
1550
 
1517
1551
 
1518
1552
  class CloudProvider(Enum):
@@ -1559,6 +1593,23 @@ class SessionParamsPythonNames(Enum):
1559
1593
  class AutoMLConstants(Enum):
1560
1594
  # List stores feature selection methods
1561
1595
  FEATURE_SELECTION_MTDS = ["lasso", "rfe", "pca"]
1596
+ # Model lists
1597
+ SUPERVISED_MODELS = ["glm", "svm", "knn", "decision_forest", "xgboost"]
1598
+ CLUSTERING_MODELS = ["KMeans", "GaussianMixture"]
1599
+ ALL_MODELS = SUPERVISED_MODELS + CLUSTERING_MODELS
1600
+
1601
+ # Metric lists
1602
+ CLASSIFICATION_METRICS = ["MICRO-F1", "MACRO-F1", "MICRO-RECALL", "MACRO-RECALL",
1603
+ "MICRO-PRECISION", "MACRO-PRECISION", "WEIGHTED-PRECISION",
1604
+ "WEIGHTED-RECALL", "WEIGHTED-F1", "ACCURACY"]
1605
+
1606
+ REGRESSION_METRICS = ["R2", "MAE", "MSE", "MSLE", "MAPE", "MPE",
1607
+ "RMSE", "RMSLE", "ME", "EV", "MPD", "MGD"]
1608
+
1609
+ CLUSTERING_METRICS = ["SILHOUETTE", "CALINSKI", "DAVIES"]
1610
+
1611
+ # Combined for default case
1612
+ ALL_METRICS = REGRESSION_METRICS + CLASSIFICATION_METRICS + CLUSTERING_METRICS
1562
1613
 
1563
1614
 
1564
1615
  class AuthMechs(Enum):
@@ -1577,3 +1628,12 @@ class TDServices(Enum):
1577
1628
  """
1578
1629
  VECTORSTORE = "vectorstore"
1579
1630
  MOPS = "MODELOPS" # For future reference
1631
+
1632
+ class AccessQueries(Enum):
1633
+ """
1634
+ Enum to hold permitted access queries.
1635
+ """
1636
+ read = ["{grant_revoke_} SELECT ON {database_} {to_from_} {user_}"]
1637
+ write= ["{grant_revoke_} CREATE TABLE ON {database_} {to_from_} {user_}",
1638
+ "{grant_revoke_} CREATE VIEW ON {database_} {to_from_} {user_}",
1639
+ "{grant_revoke_} DELETE, UPDATE, INSERT ON {database_} {to_from_} {user_}"]
@@ -325,23 +325,26 @@ class GarbageCollector():
325
325
  raise
326
326
 
327
327
  @staticmethod
328
- def _delete_object_entry(object_to_delete,
328
+ def _delete_object_entry(objects_to_delete,
329
329
  object_type=TeradataConstants.TERADATA_TABLE,
330
330
  remove_entry_from_gc_list=False):
331
331
  """
332
332
  DESCRIPTION:
333
333
  Deletes an entry of table/view/script from persisted file.
334
- This makes sure that the object is not garbage collected.
334
+ This makes sure that the object(s) is/are not garbage collected.
335
335
 
336
336
  PARAMETERS:
337
- object_to_delete:
337
+ objects_to_delete:
338
338
  Required Argument.
339
- Specifies the name of the table/view/script to be deleted.
340
- Types: str
339
+ Specifies the names of the table/view/script to be deleted.
340
+ Types: str or list of str
341
341
 
342
342
  object_type:
343
343
  Optional Argument.
344
344
  Specifies the type of the object (table/view/script) to be deleted.
345
+ Note:
346
+ Pass None, when object type is not available. In this case, the given object will be
347
+ removed from GC file based on just object name and current process id.
345
348
  Default Value: TeradataConstants.TERADATA_TABLE
346
349
  Types: TeradataConstants
347
350
 
@@ -365,8 +368,11 @@ class GarbageCollector():
365
368
  None.
366
369
 
367
370
  EXAMPLES:
368
- GarbageCollector._delete_table_view_entry(object_to_delete = 'temp.temp_table1')
371
+ GarbageCollector._delete_table_view_entry(objects_to_delete = 'temp.temp_table1')
369
372
  """
373
+ from teradataml.common.utils import UtilFuncs
374
+ objects_to_delete = UtilFuncs._as_list(objects_to_delete)
375
+
370
376
  try:
371
377
  tempfilename = GarbageCollector.__make_temp_file_name()
372
378
  if not os.path.isfile(tempfilename):
@@ -383,25 +389,37 @@ class GarbageCollector():
383
389
  db_object_type = int(record_parts[2].strip())
384
390
  db_object = record_parts[3].strip()
385
391
 
392
+ _added_in_gc_file = False # Set to True if the entry is added to GC file.
393
+
386
394
  # Avoid substring matches by comparing object names in full.
387
395
  # Also make sure to check for the pid.
388
- if not (object_to_delete == db_object
396
+ if object_type and not (db_object in objects_to_delete
389
397
  and object_type.value == db_object_type
390
398
  and int(os.getpid()) == contentpid):
391
399
  fgc.write(db_object_entry)
392
- else:
393
- if remove_entry_from_gc_list and configure._validate_gc:
394
- # Delete the entry from gc lists if required.
395
- GarbageCollector.__delete_object_from_gc_list(object_to_delete,
396
- object_type)
397
-
398
- # If object is a script, also delete the local copy of the file.
399
- if object_type in \
400
- [TeradataConstants.TERADATA_SCRIPT,
401
- TeradataConstants.TERADATA_APPLY,
402
- TeradataConstants.TERADATA_TEXT_FILE,
403
- TeradataConstants.TERADATA_LOCAL_SCRIPT]:
404
- GarbageCollector.__delete_gc_tempdir_local_file(db_object, object_type)
400
+ _added_in_gc_file = True
401
+
402
+ elif object_type is None:
403
+ if db_object in objects_to_delete and int(os.getpid()) == contentpid:
404
+ # Skip adding to GC file if the object is being deleted but object_type is passed as None.
405
+ pass
406
+ else:
407
+ fgc.write(db_object_entry)
408
+ _added_in_gc_file = True
409
+
410
+
411
+ if not _added_in_gc_file and remove_entry_from_gc_list and configure._validate_gc:
412
+ # Delete the entry from gc lists if required.
413
+ GarbageCollector.__delete_object_from_gc_list(db_object, object_type)
414
+
415
+
416
+ # If object is a script, also delete the local copy of the file.
417
+ if not _added_in_gc_file and object_type in \
418
+ [TeradataConstants.TERADATA_SCRIPT,
419
+ TeradataConstants.TERADATA_APPLY,
420
+ TeradataConstants.TERADATA_TEXT_FILE,
421
+ TeradataConstants.TERADATA_LOCAL_SCRIPT]:
422
+ GarbageCollector.__delete_gc_tempdir_local_file(db_object, object_type)
405
423
  fgc.truncate()
406
424
  except Exception as e:
407
425
  raise
@@ -456,8 +474,19 @@ class GarbageCollector():
456
474
  GarbageCollector.__gc_container.remove(object_name)
457
475
  elif TeradataConstants.TERADATA_APPLY == object_type:
458
476
  GarbageCollector.__gc_apply.remove(object_name)
459
- else:
477
+ elif TeradataConstants.TERADATA_SCRIPT == object_type:
460
478
  GarbageCollector.__gc_scripts.remove(object_name)
479
+ else:
480
+ # If none of the conditions met, then try removing from all.
481
+ _all_gc_lists = [GarbageCollector.__gc_tables, GarbageCollector.__gc_views,
482
+ GarbageCollector.__gc_scripts, GarbageCollector.__gc_container,
483
+ GarbageCollector.__gc_apply]
484
+ for _list in _all_gc_lists:
485
+ try:
486
+ _list.remove(object_name)
487
+ except ValueError:
488
+ # If the object is not found in the list, just ignore.
489
+ pass
461
490
 
462
491
  @staticmethod
463
492
  def _delete_local_file(file_path):
@@ -126,6 +126,8 @@ class ErrorInfoCodes(Enum):
126
126
  CANNOT_USE_TOGETHER_WITH = 'TDML_2042'
127
127
  TABLE_DOES_NOT_EXIST = 'TDML_2046'
128
128
  DEPENDENT_METHOD = 'TDML_2113'
129
+ TDMLDF_COLUMN_IN_ARG_FOUND = 'TDML_2114'
130
+ EITHER_ANY_ARGUMENT = 'TDML_2115'
129
131
 
130
132
  # Reserved for Generic Error Messages: 2121 - 2199
131
133
  RESERVED_KEYWORD = 'TDML_2121'
@@ -227,6 +229,7 @@ class ErrorInfoCodes(Enum):
227
229
 
228
230
  # OpenAF Error codes starting from 2551 - Reserved till 2560.
229
231
  SET_REQUIRED_PARAMS = 'TDML_2551'
232
+ INVALID_USAGE = 'TDML_2552'
230
233
 
231
234
  # Error codes for OTF. Reserved till 2570.
232
235
  OTF_TABLE_REQUIRED = 'TDML_2561'
@@ -242,6 +245,22 @@ class ErrorInfoCodes(Enum):
242
245
  # Python SDK Error codes starting from 2580 - Reserved till 2590.
243
246
  INFO_NOT_PROVIDED_USE_DEFAULT = 'TDML_W_2580' # Logger warning.
244
247
 
248
+ # EFS Error codes starting from 2600 - Reserved till 2650.
249
+ EFS_COMPONENT_NOT_EXIST = 'TDML_2600'
250
+ EFS_INVALID_PROCESS_TYPE = 'TDML_2601'
251
+ EFS_INVALID_FEATURE_TYPE = 'TDML_2602'
252
+ EFS_FEATURE_IN_DATASET = 'TDML_2603'
253
+ EFS_FEATURE_IN_CATALOG = 'TDML_2604'
254
+ EFS_ENTITY_IN_CATALOG = 'TDML_2605'
255
+ DF_DUPLICATE_VALUES = 'TDML_2606'
256
+ DF_NULL_VALUES = 'TDML_2607'
257
+ EFS_FEATURE_ENTITY_MISMATCH = 'TDML_2608'
258
+ FEATURES_ARCHIVED = 'TDML_2609'
259
+ EFS_DELETE_BEFORE_ARCHIVE = 'TDML_2610'
260
+ EFS_OBJ_IN_FEATURE_PROCESS = 'TDML_2611'
261
+ EFS_OBJECT_NOT_EXIST = 'TDML_2612'
262
+ EFS_OBJECT_IN_OTHER_DOMAIN = 'TDML_2613'
263
+
245
264
 
246
265
  class MessageCodes(Enum):
247
266
  """
@@ -388,7 +407,7 @@ class MessageCodes(Enum):
388
407
  VANTAGE_WARNING = "Following warning raised from Vantage with warning code: {}\n{}"
389
408
  FASTLOAD_FAILS = "fastload() failed to load pandas dataframe to Teradata Vantage."
390
409
  REMOVE_FILE_FAILED = "Failed to remove {} from Teradata Vantage"
391
- INPUT_FILE_NOT_FOUND = "Input file '{}' not found. Please check the file path."
410
+ INPUT_FILE_NOT_FOUND = "Input file(s) '{}' not found. Please check the file path(s)."
392
411
  INSTALL_FILE_FAILED = "File '{}' cannot be installed."
393
412
  REPLACE_FILE_FAILED = "Unable to replace '{}'"
394
413
  URL_UNREACHABLE = "URL '{}' is unreachable."
@@ -411,6 +430,7 @@ class MessageCodes(Enum):
411
430
  NO_ENVIRONMENT_FOUND = "No {} environment(s) found."
412
431
  UNSUPPORTED_FILE_EXTENSION = "Unsupported file extension specified. Supported file extensions is/are {}."
413
432
  FILE_EMPTY = "Input file {} is empty."
433
+ EITHER_ANY_ARGUMENT = "Provide either {} argument(s)."
414
434
 
415
435
  PYTHON_NOT_INSTALLED = "Python is not installed on Vantage. " \
416
436
  "Please install Python interpreter and add-on packages on Vantage."
@@ -457,6 +477,8 @@ class MessageCodes(Enum):
457
477
  "explicitly passed to function or specified using a configuration file, or setting up " \
458
478
  "the environment variables."
459
479
  DEPENDENT_METHOD = "Method(s) {} must be called before calling '{}'."
480
+ TDMLDF_COLUMN_IN_ARG_FOUND = "Column '{}' provided in '{}' argument, exist in {} {}."
481
+ INVALID_USAGE = "Invalid usage of {0} {1}. Use {0} {1} only {2}."
460
482
  REST_HTTP_ERROR = "Failed to run rest API:\n{}"
461
483
  REST_AUTH_MISSING_ARG = "For '{}' authentication, '{}' is/are not provided in config file or "\
462
484
  "environment variable or through constructor argument 'auth'."
@@ -468,4 +490,27 @@ class MessageCodes(Enum):
468
490
 
469
491
  INFO_NOT_PROVIDED_USE_DEFAULT = "{} is not provided in path '{}' method '{}' for operationID '{}' using default {}."
470
492
  OTF_TABLE_REQUIRED = "{} is supported only with OTF table."
471
-
493
+ EFS_COMPONENT_NOT_EXIST = "{} '{}' does not exist. Use {} to list valid {}."
494
+ EFS_INVALID_PROCESS_TYPE = "Invalid process type '{}' detected. Valid types are: {}."
495
+ EFS_INVALID_FEATURE_TYPE = ("Invalid feature type '{}' detected for feature ''. "
496
+ "Features cannot ignest for types: {}.")
497
+ EFS_FEATURE_IN_DATASET = ("Feature(s) {} is/are associated with an existing dataset(s) {}. "
498
+ "Feature(s) can be {} only when they are not associated with any dataset. "
499
+ "Use 'DatasetCatalog.list_datasets()' to see the list of features associated with datasets.")
500
+ EFS_FEATURE_IN_CATALOG = ("Feature '{}' exists in feature catalog. "
501
+ "Delete the feature first using FeatureCatalog.delete_features().")
502
+ EFS_ENTITY_IN_CATALOG = ("Entity '{}' is associated with feature(s) {} in Feature catalog. "
503
+ "Delete these features using FeatureCatalog.delete_features().")
504
+ DF_DUPLICATE_VALUES = "Duplicate {} are not allowed. Found the duplicate value(s) {}."
505
+ DF_NULL_VALUES = ("Null value(s) are not allowed in {} while {}. "
506
+ "Found the null value(s) {}.")
507
+ EFS_FEATURE_ENTITY_MISMATCH = ("Feature(s) {} is/are associated with entities {}. One cannot "
508
+ "ingest same feature for another entity in the same data domain. "
509
+ "Either choose a different feature name or choose a different data domain.")
510
+ FEATURES_ARCHIVED = "Feature(s) {} is/are archived. {}"
511
+ EFS_DELETE_BEFORE_ARCHIVE = ("{0} '{1}' is not archived. Archive the {0} before deleting it."
512
+ "Use 'FeatureStore.archive_{2}()' to archive the {0}.")
513
+ EFS_OBJ_IN_FEATURE_PROCESS = ("{0} '{1}' is associated with {2}. {0} can be modified only when it is "
514
+ "not associated with {2}. Archive the {3} using {4} and try again.")
515
+ EFS_OBJECT_NOT_EXIST = "{} with {} does not exist in data domain '{}'."
516
+ EFS_OBJECT_IN_OTHER_DOMAIN = "{} with {} does not exist in data domain '{}'. It exists in other data domain(s): {}."
@@ -197,6 +197,9 @@ class Messages():
197
197
  [ErrorInfoCodes.SET_REQUIRED_PARAMS, MessageCodes.SET_REQUIRED_PARAMS],
198
198
  [ErrorInfoCodes.MISSING_ARGS, MessageCodes.CONNECTION_PARAMS],
199
199
  [ErrorInfoCodes.DEPENDENT_METHOD, MessageCodes.DEPENDENT_METHOD],
200
+ [ErrorInfoCodes.TDMLDF_COLUMN_IN_ARG_FOUND, MessageCodes.TDMLDF_COLUMN_IN_ARG_FOUND],
201
+ [ErrorInfoCodes.INVALID_USAGE, MessageCodes.INVALID_USAGE],
202
+ [ErrorInfoCodes.DEPENDENT_METHOD, MessageCodes.DEPENDENT_METHOD],
200
203
  [ErrorInfoCodes.REST_HTTP_ERROR, MessageCodes.REST_HTTP_ERROR],
201
204
  [ErrorInfoCodes.REST_AUTH_MISSING_ARG, MessageCodes.REST_AUTH_MISSING_ARG],
202
205
  [ErrorInfoCodes.REST_NOT_CONFIGURED, MessageCodes.REST_NOT_CONFIGURED],
@@ -204,7 +207,22 @@ class Messages():
204
207
  [ErrorInfoCodes.REST_DEVICE_CODE_GEN_FAILED, MessageCodes.REST_DEVICE_CODE_GEN_FAILED],
205
208
  [ErrorInfoCodes.REST_DEVICE_CODE_AUTH_FAILED, MessageCodes.REST_DEVICE_CODE_AUTH_FAILED],
206
209
  [ErrorInfoCodes.INFO_NOT_PROVIDED_USE_DEFAULT, MessageCodes.INFO_NOT_PROVIDED_USE_DEFAULT],
207
- [ErrorInfoCodes.OTF_TABLE_REQUIRED, MessageCodes.OTF_TABLE_REQUIRED]
210
+ [ErrorInfoCodes.OTF_TABLE_REQUIRED, MessageCodes.OTF_TABLE_REQUIRED],
211
+ [ErrorInfoCodes.EFS_COMPONENT_NOT_EXIST, MessageCodes.EFS_COMPONENT_NOT_EXIST],
212
+ [ErrorInfoCodes.EFS_INVALID_PROCESS_TYPE, MessageCodes.EFS_INVALID_PROCESS_TYPE],
213
+ [ErrorInfoCodes.EFS_FEATURE_IN_DATASET, MessageCodes.EFS_FEATURE_IN_DATASET],
214
+ [ErrorInfoCodes.EFS_FEATURE_IN_CATALOG, MessageCodes.EFS_FEATURE_IN_CATALOG],
215
+ [ErrorInfoCodes.EFS_ENTITY_IN_CATALOG, MessageCodes.EFS_ENTITY_IN_CATALOG],
216
+ [ErrorInfoCodes.DF_DUPLICATE_VALUES, MessageCodes.DF_DUPLICATE_VALUES],
217
+ [ErrorInfoCodes.DF_NULL_VALUES, MessageCodes.DF_NULL_VALUES],
218
+ [ErrorInfoCodes.EFS_FEATURE_ENTITY_MISMATCH, MessageCodes.EFS_FEATURE_ENTITY_MISMATCH],
219
+ [ErrorInfoCodes.FEATURES_ARCHIVED, MessageCodes.FEATURES_ARCHIVED],
220
+ [ErrorInfoCodes.EFS_DELETE_BEFORE_ARCHIVE, MessageCodes.EFS_DELETE_BEFORE_ARCHIVE],
221
+ [ErrorInfoCodes.EFS_OBJ_IN_FEATURE_PROCESS, MessageCodes.EFS_OBJ_IN_FEATURE_PROCESS],
222
+ [ErrorInfoCodes.EFS_OBJECT_NOT_EXIST, MessageCodes.EFS_OBJECT_NOT_EXIST],
223
+ [ErrorInfoCodes.EFS_OBJECT_IN_OTHER_DOMAIN, MessageCodes.EFS_OBJECT_IN_OTHER_DOMAIN],
224
+ [ErrorInfoCodes.EITHER_ANY_ARGUMENT, MessageCodes.EITHER_ANY_ARGUMENT],
225
+
208
226
  ]
209
227
 
210
228
  @staticmethod
@@ -71,8 +71,13 @@ class SQLBundle:
71
71
  [SQLConstants.SQL_SELECT_COLUMNNAMES_WITH_WHERE, "sel {0} from {1} where {2}"],
72
72
  [SQLConstants.SQL_HELP_DATABASE, "HELP DATABASE {0}"],
73
73
  [SQLConstants.SQL_HELP_DATALAKE, "HELP DATALAKE {0}"],
74
+ [SQLConstants.SQL_INSERT_INTO_TABLE_VALUES_WITH_COLUMN_NAMES, "insert into {0} ({1}) values({2})"],
75
+ [SQLConstants.SQL_HELP_DATALAKE, "HELP DATALAKE {0}"],
74
76
  [SQLConstants.SQL_TD_OTF_METADATA, "SELECT * FROM {0}(ON ({1})) D;"],
75
- [SQLConstants.SQL_TD_OTF_SNAPSHOT, "SELECT * FROM {0} FOR SNAPSHOT AS OF {1};"]
77
+ [SQLConstants.SQL_TD_OTF_SNAPSHOT, "SELECT * FROM {0} FOR SNAPSHOT AS OF {1};"],
78
+ [SQLConstants.SQL_LIST_TRIGGERS, "SELECT TriggerName FROM DBC.TRIGGERSV WHERE DatabaseName = '{0}' AND TriggerName LIKE '{1}'"],
79
+ [SQLConstants.SQL_SHOW_TABLE, "SHOW TABLE {0}"],
80
+ [SQLConstants.SQL_SHOW_VIEW, "SHOW VIEW {0}"]
76
81
  ]
77
82
  self._add_sql_version()
78
83
 
@@ -158,7 +163,7 @@ class SQLBundle:
158
163
  return sqlbundle._get_sql_query(SQLConstants.SQL_BASE_QUERY).format(name)
159
164
 
160
165
  @staticmethod
161
- def _build_create_view(view_name, select_expression):
166
+ def _build_create_view(view_name, select_expression, lock_rows=False):
162
167
  """
163
168
  Builds a CREATE VIEW DDL statement.
164
169
  For Example,
@@ -167,6 +172,11 @@ class SQLBundle:
167
172
  PARAMETERS:
168
173
  view_name - Viewname to be created
169
174
  select_expression - A SQL from which a view is to be created. (SELECT query)
175
+ lock_rows - When set to True, teradataml DataFrame locks the corresponding row(s)
176
+ in underlying table(s) while accessing the data. Otherwise,
177
+ teradataml DataFrame access the data without locking the rows.
178
+ Default is False.
179
+
170
180
 
171
181
  RETURNS:
172
182
  A CREATE VIEW DDL statement
@@ -180,6 +190,8 @@ class SQLBundle:
180
190
  """
181
191
  sqlbundle = SQLBundle()
182
192
  query = sqlbundle._get_sql_query(SQLConstants.SQL_CREATE_VIEW)
193
+ if lock_rows:
194
+ select_expression = "LOCKING ROW FOR ACCESS {}".format(select_expression)
183
195
  return query.format(view_name, select_expression)
184
196
 
185
197
  @staticmethod
@@ -636,7 +648,7 @@ class SQLBundle:
636
648
  return ddlstmt.format(tablename, columns_datatypes)
637
649
 
638
650
  @staticmethod
639
- def _build_insert_into_table_records(tablename, columns):
651
+ def _build_insert_into_table_records(tablename, columns, column_names=None):
640
652
  """
641
653
  Builds a prepared statement with parameter markers for a table.
642
654
  This is an internal function.
@@ -644,6 +656,7 @@ class SQLBundle:
644
656
  PARAMETERS:
645
657
  tablename - Table name to insert data.
646
658
  columns - The parameter markers for the prepared statement
659
+ column_names - The column names to be inserted.
647
660
 
648
661
  RETURNS:
649
662
  Returns a prepared statement.
@@ -653,11 +666,15 @@ class SQLBundle:
653
666
 
654
667
  EXAMPLES:
655
668
  preprdstmt = SQLBundle.SQL_INSERT_INTO_TABLE_VALUES('mytab', '?, ?')
656
-
669
+ preprdstmt = SQLBundle.SQL_INSERT_INTO_TABLE_VALUES_WITH_COLUMN_NAMES('mytab', 'column1, column2', '?, ?')
670
+
657
671
  """
658
672
  sqlbundle = SQLBundle()
659
- query = sqlbundle._get_sql_query(SQLConstants.SQL_INSERT_INTO_TABLE_VALUES)
660
- return query.format(tablename, columns)
673
+ sqlkey = (SQLConstants.SQL_INSERT_INTO_TABLE_VALUES_WITH_COLUMN_NAMES
674
+ if column_names else SQLConstants.SQL_INSERT_INTO_TABLE_VALUES)
675
+ query = sqlbundle._get_sql_query(sqlkey)
676
+ return (query.format(tablename, column_names, columns)
677
+ if column_names else query.format(tablename, columns))
661
678
 
662
679
  @staticmethod
663
680
  def _build_delete_all_rows_from_table(tablename):