teradataml 17.20.0.7__py3-none-any.whl → 20.0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (1285) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +1864 -1640
  4. teradataml/__init__.py +70 -60
  5. teradataml/_version.py +11 -11
  6. teradataml/analytics/Transformations.py +2995 -2995
  7. teradataml/analytics/__init__.py +81 -83
  8. teradataml/analytics/analytic_function_executor.py +2013 -2010
  9. teradataml/analytics/analytic_query_generator.py +958 -958
  10. teradataml/analytics/byom/H2OPredict.py +514 -514
  11. teradataml/analytics/byom/PMMLPredict.py +437 -437
  12. teradataml/analytics/byom/__init__.py +14 -14
  13. teradataml/analytics/json_parser/__init__.py +130 -130
  14. teradataml/analytics/json_parser/analytic_functions_argument.py +1707 -1707
  15. teradataml/analytics/json_parser/json_store.py +191 -191
  16. teradataml/analytics/json_parser/metadata.py +1637 -1637
  17. teradataml/analytics/json_parser/utils.py +804 -803
  18. teradataml/analytics/meta_class.py +196 -196
  19. teradataml/analytics/sqle/DecisionTreePredict.py +455 -470
  20. teradataml/analytics/sqle/NaiveBayesPredict.py +419 -428
  21. teradataml/analytics/sqle/__init__.py +97 -110
  22. teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -78
  23. teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -62
  24. teradataml/analytics/table_operator/__init__.py +10 -10
  25. teradataml/analytics/uaf/__init__.py +63 -63
  26. teradataml/analytics/utils.py +693 -692
  27. teradataml/analytics/valib.py +1603 -1600
  28. teradataml/automl/__init__.py +1628 -0
  29. teradataml/automl/custom_json_utils.py +1270 -0
  30. teradataml/automl/data_preparation.py +993 -0
  31. teradataml/automl/data_transformation.py +727 -0
  32. teradataml/automl/feature_engineering.py +1648 -0
  33. teradataml/automl/feature_exploration.py +547 -0
  34. teradataml/automl/model_evaluation.py +163 -0
  35. teradataml/automl/model_training.py +887 -0
  36. teradataml/catalog/__init__.py +1 -3
  37. teradataml/catalog/byom.py +1759 -1716
  38. teradataml/catalog/function_argument_mapper.py +859 -861
  39. teradataml/catalog/model_cataloging_utils.py +491 -1510
  40. teradataml/clients/pkce_client.py +481 -481
  41. teradataml/common/aed_utils.py +6 -2
  42. teradataml/common/bulk_exposed_utils.py +111 -111
  43. teradataml/common/constants.py +1433 -1441
  44. teradataml/common/deprecations.py +160 -0
  45. teradataml/common/exceptions.py +73 -73
  46. teradataml/common/formula.py +742 -742
  47. teradataml/common/garbagecollector.py +592 -635
  48. teradataml/common/messagecodes.py +422 -431
  49. teradataml/common/messages.py +227 -231
  50. teradataml/common/sqlbundle.py +693 -693
  51. teradataml/common/td_coltype_code_to_tdtype.py +48 -48
  52. teradataml/common/utils.py +2418 -2500
  53. teradataml/common/warnings.py +25 -25
  54. teradataml/common/wrapper_utils.py +1 -110
  55. teradataml/config/dummy_file1.cfg +4 -4
  56. teradataml/config/dummy_file2.cfg +2 -2
  57. teradataml/config/sqlengine_alias_definitions_v1.0 +13 -13
  58. teradataml/config/sqlengine_alias_definitions_v1.1 +19 -19
  59. teradataml/config/sqlengine_alias_definitions_v1.3 +18 -18
  60. teradataml/context/aed_context.py +217 -217
  61. teradataml/context/context.py +1071 -999
  62. teradataml/data/A_loan.csv +19 -19
  63. teradataml/data/BINARY_REALS_LEFT.csv +11 -11
  64. teradataml/data/BINARY_REALS_RIGHT.csv +11 -11
  65. teradataml/data/B_loan.csv +49 -49
  66. teradataml/data/BuoyData2.csv +17 -17
  67. teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -5
  68. teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -5
  69. teradataml/data/Convolve2RealsLeft.csv +5 -5
  70. teradataml/data/Convolve2RealsRight.csv +5 -5
  71. teradataml/data/Convolve2ValidLeft.csv +11 -11
  72. teradataml/data/Convolve2ValidRight.csv +11 -11
  73. teradataml/data/DFFTConv_Real_8_8.csv +65 -65
  74. teradataml/data/Orders1_12mf.csv +24 -24
  75. teradataml/data/Pi_loan.csv +7 -7
  76. teradataml/data/SMOOTHED_DATA.csv +7 -7
  77. teradataml/data/TestDFFT8.csv +9 -9
  78. teradataml/data/TestRiver.csv +109 -109
  79. teradataml/data/Traindata.csv +28 -28
  80. teradataml/data/acf.csv +17 -17
  81. teradataml/data/adaboost_example.json +34 -34
  82. teradataml/data/adaboostpredict_example.json +24 -24
  83. teradataml/data/additional_table.csv +10 -10
  84. teradataml/data/admissions_test.csv +21 -21
  85. teradataml/data/admissions_train.csv +41 -41
  86. teradataml/data/admissions_train_nulls.csv +41 -41
  87. teradataml/data/ageandheight.csv +13 -13
  88. teradataml/data/ageandpressure.csv +31 -31
  89. teradataml/data/antiselect_example.json +36 -36
  90. teradataml/data/antiselect_input.csv +8 -8
  91. teradataml/data/antiselect_input_mixed_case.csv +8 -8
  92. teradataml/data/applicant_external.csv +6 -6
  93. teradataml/data/applicant_reference.csv +6 -6
  94. teradataml/data/arima_example.json +9 -9
  95. teradataml/data/assortedtext_input.csv +8 -8
  96. teradataml/data/attribution_example.json +33 -33
  97. teradataml/data/attribution_sample_table.csv +27 -27
  98. teradataml/data/attribution_sample_table1.csv +6 -6
  99. teradataml/data/attribution_sample_table2.csv +11 -11
  100. teradataml/data/bank_churn.csv +10001 -0
  101. teradataml/data/bank_web_clicks1.csv +42 -42
  102. teradataml/data/bank_web_clicks2.csv +91 -91
  103. teradataml/data/bank_web_url.csv +85 -85
  104. teradataml/data/barrier.csv +2 -2
  105. teradataml/data/barrier_new.csv +3 -3
  106. teradataml/data/betweenness_example.json +13 -13
  107. teradataml/data/bin_breaks.csv +8 -8
  108. teradataml/data/bin_fit_ip.csv +3 -3
  109. teradataml/data/binary_complex_left.csv +11 -11
  110. teradataml/data/binary_complex_right.csv +11 -11
  111. teradataml/data/binary_matrix_complex_left.csv +21 -21
  112. teradataml/data/binary_matrix_complex_right.csv +21 -21
  113. teradataml/data/binary_matrix_real_left.csv +21 -21
  114. teradataml/data/binary_matrix_real_right.csv +21 -21
  115. teradataml/data/blood2ageandweight.csv +26 -26
  116. teradataml/data/bmi.csv +501 -0
  117. teradataml/data/boston.csv +507 -507
  118. teradataml/data/buoydata_mix.csv +11 -11
  119. teradataml/data/burst_data.csv +5 -5
  120. teradataml/data/burst_example.json +20 -20
  121. teradataml/data/byom_example.json +17 -17
  122. teradataml/data/bytes_table.csv +3 -3
  123. teradataml/data/cal_housing_ex_raw.csv +70 -70
  124. teradataml/data/callers.csv +7 -7
  125. teradataml/data/calls.csv +10 -10
  126. teradataml/data/cars_hist.csv +33 -33
  127. teradataml/data/cat_table.csv +24 -24
  128. teradataml/data/ccm_example.json +31 -31
  129. teradataml/data/ccm_input.csv +91 -91
  130. teradataml/data/ccm_input2.csv +13 -13
  131. teradataml/data/ccmexample.csv +101 -101
  132. teradataml/data/ccmprepare_example.json +8 -8
  133. teradataml/data/ccmprepare_input.csv +91 -91
  134. teradataml/data/cfilter_example.json +12 -12
  135. teradataml/data/changepointdetection_example.json +18 -18
  136. teradataml/data/changepointdetectionrt_example.json +8 -8
  137. teradataml/data/chi_sq.csv +2 -2
  138. teradataml/data/churn_data.csv +14 -14
  139. teradataml/data/churn_emission.csv +35 -35
  140. teradataml/data/churn_initial.csv +3 -3
  141. teradataml/data/churn_state_transition.csv +5 -5
  142. teradataml/data/citedges_2.csv +745 -745
  143. teradataml/data/citvertices_2.csv +1210 -1210
  144. teradataml/data/clicks2.csv +16 -16
  145. teradataml/data/clickstream.csv +12 -12
  146. teradataml/data/clickstream1.csv +11 -11
  147. teradataml/data/closeness_example.json +15 -15
  148. teradataml/data/complaints.csv +21 -21
  149. teradataml/data/complaints_mini.csv +3 -3
  150. teradataml/data/complaints_testtoken.csv +224 -224
  151. teradataml/data/complaints_tokens_test.csv +353 -353
  152. teradataml/data/complaints_traintoken.csv +472 -472
  153. teradataml/data/computers_category.csv +1001 -1001
  154. teradataml/data/computers_test1.csv +1252 -1252
  155. teradataml/data/computers_train1.csv +5009 -5009
  156. teradataml/data/computers_train1_clustered.csv +5009 -5009
  157. teradataml/data/confusionmatrix_example.json +9 -9
  158. teradataml/data/conversion_event_table.csv +3 -3
  159. teradataml/data/corr_input.csv +17 -17
  160. teradataml/data/correlation_example.json +11 -11
  161. teradataml/data/coxhazardratio_example.json +39 -39
  162. teradataml/data/coxph_example.json +15 -15
  163. teradataml/data/coxsurvival_example.json +28 -28
  164. teradataml/data/cpt.csv +41 -41
  165. teradataml/data/credit_ex_merged.csv +45 -45
  166. teradataml/data/customer_loyalty.csv +301 -301
  167. teradataml/data/customer_loyalty_newseq.csv +31 -31
  168. teradataml/data/dataframe_example.json +146 -146
  169. teradataml/data/decisionforest_example.json +37 -37
  170. teradataml/data/decisionforestpredict_example.json +38 -38
  171. teradataml/data/decisiontree_example.json +21 -21
  172. teradataml/data/decisiontreepredict_example.json +45 -45
  173. teradataml/data/dfft2_size4_real.csv +17 -17
  174. teradataml/data/dfft2_test_matrix16.csv +17 -17
  175. teradataml/data/dfft2conv_real_4_4.csv +65 -65
  176. teradataml/data/diabetes.csv +443 -443
  177. teradataml/data/diabetes_test.csv +89 -89
  178. teradataml/data/dict_table.csv +5 -5
  179. teradataml/data/docperterm_table.csv +4 -4
  180. teradataml/data/docs/__init__.py +1 -1
  181. teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -180
  182. teradataml/data/docs/byom/docs/DataikuPredict.py +177 -177
  183. teradataml/data/docs/byom/docs/H2OPredict.py +324 -324
  184. teradataml/data/docs/byom/docs/ONNXPredict.py +283 -283
  185. teradataml/data/docs/byom/docs/PMMLPredict.py +277 -277
  186. teradataml/data/docs/sqle/docs_17_10/Antiselect.py +82 -82
  187. teradataml/data/docs/sqle/docs_17_10/Attribution.py +199 -199
  188. teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +171 -171
  189. teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -130
  190. teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -86
  191. teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -90
  192. teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +85 -85
  193. teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +95 -95
  194. teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -139
  195. teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +151 -151
  196. teradataml/data/docs/sqle/docs_17_10/FTest.py +160 -160
  197. teradataml/data/docs/sqle/docs_17_10/FillRowId.py +82 -82
  198. teradataml/data/docs/sqle/docs_17_10/Fit.py +87 -87
  199. teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -144
  200. teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +84 -84
  201. teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +81 -81
  202. teradataml/data/docs/sqle/docs_17_10/Histogram.py +164 -164
  203. teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -134
  204. teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +208 -208
  205. teradataml/data/docs/sqle/docs_17_10/NPath.py +265 -265
  206. teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -116
  207. teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -176
  208. teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -147
  209. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +132 -132
  210. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +103 -103
  211. teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +165 -165
  212. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +101 -101
  213. teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -128
  214. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +111 -111
  215. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -102
  216. teradataml/data/docs/sqle/docs_17_10/QQNorm.py +104 -104
  217. teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +109 -109
  218. teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +117 -117
  219. teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -98
  220. teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +152 -152
  221. teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -197
  222. teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -98
  223. teradataml/data/docs/sqle/docs_17_10/Sessionize.py +113 -113
  224. teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -116
  225. teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -98
  226. teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -187
  227. teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +145 -145
  228. teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -104
  229. teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +141 -141
  230. teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -214
  231. teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -83
  232. teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -83
  233. teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -155
  234. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +126 -126
  235. teradataml/data/docs/sqle/docs_17_20/Antiselect.py +82 -82
  236. teradataml/data/docs/sqle/docs_17_20/Attribution.py +200 -200
  237. teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +171 -171
  238. teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -138
  239. teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -86
  240. teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -90
  241. teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -166
  242. teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +85 -85
  243. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +243 -243
  244. teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -113
  245. teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +279 -279
  246. teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -144
  247. teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +135 -135
  248. teradataml/data/docs/sqle/docs_17_20/FTest.py +160 -160
  249. teradataml/data/docs/sqle/docs_17_20/FillRowId.py +82 -82
  250. teradataml/data/docs/sqle/docs_17_20/Fit.py +87 -87
  251. teradataml/data/docs/sqle/docs_17_20/GLM.py +380 -380
  252. teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +414 -414
  253. teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -144
  254. teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -234
  255. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +123 -123
  256. teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +108 -108
  257. teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +105 -105
  258. teradataml/data/docs/sqle/docs_17_20/Histogram.py +223 -223
  259. teradataml/data/docs/sqle/docs_17_20/KMeans.py +204 -204
  260. teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -143
  261. teradataml/data/docs/sqle/docs_17_20/KNN.py +214 -214
  262. teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -134
  263. teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +208 -208
  264. teradataml/data/docs/sqle/docs_17_20/NPath.py +265 -265
  265. teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -116
  266. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -176
  267. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +126 -126
  268. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +117 -117
  269. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -112
  270. teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -147
  271. teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -307
  272. teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -184
  273. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +225 -225
  274. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +115 -115
  275. teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +219 -219
  276. teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -127
  277. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +189 -189
  278. teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -112
  279. teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -128
  280. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +111 -111
  281. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -111
  282. teradataml/data/docs/sqle/docs_17_20/QQNorm.py +104 -104
  283. teradataml/data/docs/sqle/docs_17_20/ROC.py +163 -163
  284. teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +154 -154
  285. teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -106
  286. teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -120
  287. teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -211
  288. teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +108 -108
  289. teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +117 -117
  290. teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -110
  291. teradataml/data/docs/sqle/docs_17_20/SVM.py +413 -413
  292. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +202 -202
  293. teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +152 -152
  294. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +197 -197
  295. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +110 -109
  296. teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -206
  297. teradataml/data/docs/sqle/docs_17_20/Sessionize.py +113 -113
  298. teradataml/data/docs/sqle/docs_17_20/Silhouette.py +152 -152
  299. teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -116
  300. teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -108
  301. teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -187
  302. teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +145 -145
  303. teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -207
  304. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +171 -171
  305. teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +266 -266
  306. teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -140
  307. teradataml/data/docs/sqle/docs_17_20/TextParser.py +172 -172
  308. teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +159 -159
  309. teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -123
  310. teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +141 -141
  311. teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -214
  312. teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +168 -168
  313. teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -83
  314. teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -83
  315. teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +236 -236
  316. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +353 -353
  317. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +275 -275
  318. teradataml/data/docs/sqle/docs_17_20/ZTest.py +155 -155
  319. teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +429 -429
  320. teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +429 -429
  321. teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +347 -347
  322. teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +428 -428
  323. teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +347 -347
  324. teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +439 -439
  325. teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +386 -386
  326. teradataml/data/docs/uaf/docs_17_20/ACF.py +195 -195
  327. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +369 -369
  328. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +142 -142
  329. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +159 -159
  330. teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +247 -247
  331. teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -252
  332. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +177 -177
  333. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +174 -174
  334. teradataml/data/docs/uaf/docs_17_20/Convolve.py +226 -226
  335. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +214 -214
  336. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +183 -183
  337. teradataml/data/docs/uaf/docs_17_20/DFFT.py +203 -203
  338. teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -216
  339. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +215 -215
  340. teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +191 -191
  341. teradataml/data/docs/uaf/docs_17_20/DTW.py +179 -179
  342. teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +144 -144
  343. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +183 -183
  344. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +184 -184
  345. teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -172
  346. teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +205 -205
  347. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +142 -142
  348. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +258 -258
  349. teradataml/data/docs/uaf/docs_17_20/IDFFT.py +164 -164
  350. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +198 -198
  351. teradataml/data/docs/uaf/docs_17_20/InputValidator.py +120 -120
  352. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +155 -155
  353. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +214 -214
  354. teradataml/data/docs/uaf/docs_17_20/MAMean.py +173 -173
  355. teradataml/data/docs/uaf/docs_17_20/MInfo.py +133 -133
  356. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +135 -135
  357. teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +190 -190
  358. teradataml/data/docs/uaf/docs_17_20/PACF.py +158 -158
  359. teradataml/data/docs/uaf/docs_17_20/Portman.py +216 -216
  360. teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +154 -154
  361. teradataml/data/docs/uaf/docs_17_20/Resample.py +228 -228
  362. teradataml/data/docs/uaf/docs_17_20/SInfo.py +122 -122
  363. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +165 -165
  364. teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +173 -173
  365. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +170 -170
  366. teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +163 -163
  367. teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +179 -179
  368. teradataml/data/docs/uaf/docs_17_20/Smoothma.py +207 -207
  369. teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +150 -150
  370. teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -171
  371. teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +201 -201
  372. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +169 -169
  373. teradataml/data/dtw_example.json +17 -17
  374. teradataml/data/dtw_t1.csv +11 -11
  375. teradataml/data/dtw_t2.csv +4 -4
  376. teradataml/data/dwt2d_example.json +15 -15
  377. teradataml/data/dwt_example.json +14 -14
  378. teradataml/data/dwt_filter_dim.csv +5 -5
  379. teradataml/data/emission.csv +9 -9
  380. teradataml/data/emp_table_by_dept.csv +19 -19
  381. teradataml/data/employee_info.csv +4 -4
  382. teradataml/data/employee_table.csv +6 -6
  383. teradataml/data/excluding_event_table.csv +2 -2
  384. teradataml/data/finance_data.csv +6 -6
  385. teradataml/data/finance_data2.csv +61 -61
  386. teradataml/data/finance_data3.csv +93 -93
  387. teradataml/data/fish.csv +160 -0
  388. teradataml/data/fm_blood2ageandweight.csv +26 -26
  389. teradataml/data/fmeasure_example.json +11 -11
  390. teradataml/data/followers_leaders.csv +10 -10
  391. teradataml/data/fpgrowth_example.json +12 -12
  392. teradataml/data/frequentpaths_example.json +29 -29
  393. teradataml/data/friends.csv +9 -9
  394. teradataml/data/fs_input.csv +33 -33
  395. teradataml/data/fs_input1.csv +33 -33
  396. teradataml/data/genData.csv +513 -513
  397. teradataml/data/geodataframe_example.json +39 -39
  398. teradataml/data/glass_types.csv +215 -0
  399. teradataml/data/glm_admissions_model.csv +12 -12
  400. teradataml/data/glm_example.json +29 -29
  401. teradataml/data/glml1l2_example.json +28 -28
  402. teradataml/data/glml1l2predict_example.json +54 -54
  403. teradataml/data/glmpredict_example.json +54 -54
  404. teradataml/data/gq_t1.csv +21 -21
  405. teradataml/data/hconvolve_complex_right.csv +5 -5
  406. teradataml/data/hconvolve_complex_rightmulti.csv +5 -5
  407. teradataml/data/histogram_example.json +11 -11
  408. teradataml/data/hmmdecoder_example.json +78 -78
  409. teradataml/data/hmmevaluator_example.json +24 -24
  410. teradataml/data/hmmsupervised_example.json +10 -10
  411. teradataml/data/hmmunsupervised_example.json +7 -7
  412. teradataml/data/house_values.csv +12 -12
  413. teradataml/data/house_values2.csv +13 -13
  414. teradataml/data/housing_cat.csv +7 -7
  415. teradataml/data/housing_data.csv +9 -9
  416. teradataml/data/housing_test.csv +47 -47
  417. teradataml/data/housing_test_binary.csv +47 -47
  418. teradataml/data/housing_train.csv +493 -493
  419. teradataml/data/housing_train_attribute.csv +4 -4
  420. teradataml/data/housing_train_binary.csv +437 -437
  421. teradataml/data/housing_train_parameter.csv +2 -2
  422. teradataml/data/housing_train_response.csv +493 -493
  423. teradataml/data/ibm_stock.csv +370 -370
  424. teradataml/data/ibm_stock1.csv +370 -370
  425. teradataml/data/identitymatch_example.json +21 -21
  426. teradataml/data/idf_table.csv +4 -4
  427. teradataml/data/impressions.csv +101 -101
  428. teradataml/data/inflation.csv +21 -21
  429. teradataml/data/initial.csv +3 -3
  430. teradataml/data/insect_sprays.csv +12 -12
  431. teradataml/data/insurance.csv +1339 -1339
  432. teradataml/data/interpolator_example.json +12 -12
  433. teradataml/data/iris_altinput.csv +481 -481
  434. teradataml/data/iris_attribute_output.csv +8 -8
  435. teradataml/data/iris_attribute_test.csv +121 -121
  436. teradataml/data/iris_attribute_train.csv +481 -481
  437. teradataml/data/iris_category_expect_predict.csv +31 -31
  438. teradataml/data/iris_data.csv +151 -0
  439. teradataml/data/iris_input.csv +151 -151
  440. teradataml/data/iris_response_train.csv +121 -121
  441. teradataml/data/iris_test.csv +31 -31
  442. teradataml/data/iris_train.csv +121 -121
  443. teradataml/data/join_table1.csv +4 -4
  444. teradataml/data/join_table2.csv +4 -4
  445. teradataml/data/jsons/anly_function_name.json +6 -6
  446. teradataml/data/jsons/byom/dataikupredict.json +147 -147
  447. teradataml/data/jsons/byom/datarobotpredict.json +146 -146
  448. teradataml/data/jsons/byom/h2opredict.json +194 -194
  449. teradataml/data/jsons/byom/onnxpredict.json +186 -186
  450. teradataml/data/jsons/byom/pmmlpredict.json +146 -146
  451. teradataml/data/jsons/paired_functions.json +435 -435
  452. teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -56
  453. teradataml/data/jsons/sqle/16.20/Attribution.json +249 -249
  454. teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -156
  455. teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -170
  456. teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -122
  457. teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -367
  458. teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -239
  459. teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -136
  460. teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -235
  461. teradataml/data/jsons/sqle/16.20/Pack.json +98 -98
  462. teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -162
  463. teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -105
  464. teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -86
  465. teradataml/data/jsons/sqle/16.20/Unpack.json +166 -166
  466. teradataml/data/jsons/sqle/16.20/nPath.json +269 -269
  467. teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -56
  468. teradataml/data/jsons/sqle/17.00/Attribution.json +249 -249
  469. teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -156
  470. teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -170
  471. teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -122
  472. teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -367
  473. teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -239
  474. teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -136
  475. teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -235
  476. teradataml/data/jsons/sqle/17.00/Pack.json +98 -98
  477. teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -162
  478. teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -105
  479. teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -86
  480. teradataml/data/jsons/sqle/17.00/Unpack.json +166 -166
  481. teradataml/data/jsons/sqle/17.00/nPath.json +269 -269
  482. teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -56
  483. teradataml/data/jsons/sqle/17.05/Attribution.json +249 -249
  484. teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -156
  485. teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -170
  486. teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -122
  487. teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -367
  488. teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -239
  489. teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -136
  490. teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -235
  491. teradataml/data/jsons/sqle/17.05/Pack.json +98 -98
  492. teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -162
  493. teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -105
  494. teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -86
  495. teradataml/data/jsons/sqle/17.05/Unpack.json +166 -166
  496. teradataml/data/jsons/sqle/17.05/nPath.json +269 -269
  497. teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -56
  498. teradataml/data/jsons/sqle/17.10/Attribution.json +249 -249
  499. teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -185
  500. teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +171 -171
  501. teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -151
  502. teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -368
  503. teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -239
  504. teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -149
  505. teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -288
  506. teradataml/data/jsons/sqle/17.10/Pack.json +133 -133
  507. teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -193
  508. teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -105
  509. teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -86
  510. teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -239
  511. teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -70
  512. teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +53 -53
  513. teradataml/data/jsons/sqle/17.10/TD_Chisq.json +67 -67
  514. teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +53 -53
  515. teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +68 -68
  516. teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -187
  517. teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +51 -51
  518. teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -46
  519. teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -71
  520. teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +52 -52
  521. teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +52 -52
  522. teradataml/data/jsons/sqle/17.10/TD_Histogram.json +132 -132
  523. teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -147
  524. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +182 -182
  525. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +65 -64
  526. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +196 -196
  527. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -47
  528. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -114
  529. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -71
  530. teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +111 -111
  531. teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -93
  532. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +127 -127
  533. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +70 -69
  534. teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +156 -156
  535. teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +70 -69
  536. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +147 -147
  537. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -47
  538. teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -240
  539. teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +118 -118
  540. teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +52 -52
  541. teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +52 -52
  542. teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -171
  543. teradataml/data/jsons/sqle/17.10/Unpack.json +188 -188
  544. teradataml/data/jsons/sqle/17.10/nPath.json +269 -269
  545. teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -56
  546. teradataml/data/jsons/sqle/17.20/Attribution.json +249 -249
  547. teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -185
  548. teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -172
  549. teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -151
  550. teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -367
  551. teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -239
  552. teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -149
  553. teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -287
  554. teradataml/data/jsons/sqle/17.20/Pack.json +133 -133
  555. teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -192
  556. teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -105
  557. teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -86
  558. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +76 -76
  559. teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -239
  560. teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -71
  561. teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -53
  562. teradataml/data/jsons/sqle/17.20/TD_Chisq.json +67 -67
  563. teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +145 -145
  564. teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -53
  565. teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -218
  566. teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -92
  567. teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +259 -259
  568. teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -139
  569. teradataml/data/jsons/sqle/17.20/TD_FTest.json +186 -186
  570. teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -52
  571. teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -46
  572. teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -72
  573. teradataml/data/jsons/sqle/17.20/TD_GLM.json +431 -431
  574. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +125 -125
  575. teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -411
  576. teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -146
  577. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +91 -91
  578. teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -76
  579. teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -76
  580. teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -152
  581. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +211 -211
  582. teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +86 -86
  583. teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -262
  584. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -137
  585. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +101 -101
  586. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -71
  587. teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -147
  588. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +315 -315
  589. teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +123 -123
  590. teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -271
  591. teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -65
  592. teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -229
  593. teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -75
  594. teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -217
  595. teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -48
  596. teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -114
  597. teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -72
  598. teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -111
  599. teradataml/data/jsons/sqle/17.20/TD_ROC.json +177 -177
  600. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +178 -178
  601. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +73 -73
  602. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -74
  603. teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +137 -137
  604. teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -93
  605. teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +127 -127
  606. teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +70 -70
  607. teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -389
  608. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +124 -124
  609. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +156 -156
  610. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +70 -70
  611. teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +193 -193
  612. teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +142 -142
  613. teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -147
  614. teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -48
  615. teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -240
  616. teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -248
  617. teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -75
  618. teradataml/data/jsons/sqle/17.20/TD_TextParser.json +192 -192
  619. teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -142
  620. teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -117
  621. teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +182 -182
  622. teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +52 -52
  623. teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +52 -52
  624. teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -241
  625. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +312 -312
  626. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +182 -182
  627. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +170 -170
  628. teradataml/data/jsons/sqle/17.20/Unpack.json +188 -188
  629. teradataml/data/jsons/sqle/17.20/nPath.json +269 -269
  630. teradataml/data/jsons/tableoperator/17.00/read_nos.json +197 -197
  631. teradataml/data/jsons/tableoperator/17.05/read_nos.json +197 -197
  632. teradataml/data/jsons/tableoperator/17.05/write_nos.json +194 -194
  633. teradataml/data/jsons/tableoperator/17.10/read_nos.json +183 -183
  634. teradataml/data/jsons/tableoperator/17.10/write_nos.json +194 -194
  635. teradataml/data/jsons/tableoperator/17.20/read_nos.json +182 -182
  636. teradataml/data/jsons/tableoperator/17.20/write_nos.json +223 -223
  637. teradataml/data/jsons/uaf/17.20/TD_ACF.json +149 -149
  638. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +409 -409
  639. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +79 -79
  640. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +151 -151
  641. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +109 -109
  642. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +107 -107
  643. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +87 -87
  644. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +106 -106
  645. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +80 -80
  646. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +67 -67
  647. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +91 -91
  648. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +136 -136
  649. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +148 -148
  650. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -108
  651. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +109 -109
  652. teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +86 -86
  653. teradataml/data/jsons/uaf/17.20/TD_DIFF.json +91 -91
  654. teradataml/data/jsons/uaf/17.20/TD_DTW.json +116 -116
  655. teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +100 -100
  656. teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +38 -38
  657. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +100 -100
  658. teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +84 -84
  659. teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +70 -70
  660. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +152 -152
  661. teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECAST.json +313 -313
  662. teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +57 -57
  663. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +94 -94
  664. teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +63 -63
  665. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +181 -181
  666. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +102 -102
  667. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +182 -182
  668. teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +67 -67
  669. teradataml/data/jsons/uaf/17.20/TD_MINFO.json +66 -66
  670. teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +178 -178
  671. teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -114
  672. teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +118 -118
  673. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -175
  674. teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +97 -97
  675. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +173 -173
  676. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +136 -136
  677. teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +89 -89
  678. teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +79 -79
  679. teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +67 -67
  680. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -184
  681. teradataml/data/jsons/uaf/17.20/TD_SINFO.json +57 -57
  682. teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +162 -162
  683. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +100 -100
  684. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +111 -111
  685. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -95
  686. teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +77 -77
  687. teradataml/data/kmeans_example.json +17 -17
  688. teradataml/data/kmeans_us_arrests_data.csv +0 -0
  689. teradataml/data/knn_example.json +18 -18
  690. teradataml/data/knnrecommender_example.json +6 -6
  691. teradataml/data/knnrecommenderpredict_example.json +12 -12
  692. teradataml/data/lar_example.json +17 -17
  693. teradataml/data/larpredict_example.json +30 -30
  694. teradataml/data/lc_new_predictors.csv +5 -5
  695. teradataml/data/lc_new_reference.csv +9 -9
  696. teradataml/data/lda_example.json +8 -8
  697. teradataml/data/ldainference_example.json +14 -14
  698. teradataml/data/ldatopicsummary_example.json +8 -8
  699. teradataml/data/levendist_input.csv +13 -13
  700. teradataml/data/levenshteindistance_example.json +10 -10
  701. teradataml/data/linreg_example.json +9 -9
  702. teradataml/data/load_example_data.py +326 -323
  703. teradataml/data/loan_prediction.csv +295 -295
  704. teradataml/data/lungcancer.csv +138 -138
  705. teradataml/data/mappingdata.csv +12 -12
  706. teradataml/data/milk_timeseries.csv +157 -157
  707. teradataml/data/min_max_titanic.csv +4 -4
  708. teradataml/data/minhash_example.json +6 -6
  709. teradataml/data/ml_ratings.csv +7547 -7547
  710. teradataml/data/ml_ratings_10.csv +2445 -2445
  711. teradataml/data/model1_table.csv +5 -5
  712. teradataml/data/model2_table.csv +5 -5
  713. teradataml/data/models/iris_db_glm_model.pmml +56 -56
  714. teradataml/data/models/iris_db_xgb_model.pmml +4471 -4471
  715. teradataml/data/modularity_example.json +12 -12
  716. teradataml/data/movavg_example.json +7 -7
  717. teradataml/data/mtx1.csv +7 -7
  718. teradataml/data/mtx2.csv +13 -13
  719. teradataml/data/multi_model_classification.csv +401 -0
  720. teradataml/data/multi_model_regression.csv +401 -0
  721. teradataml/data/mvdfft8.csv +9 -9
  722. teradataml/data/naivebayes_example.json +9 -9
  723. teradataml/data/naivebayespredict_example.json +19 -19
  724. teradataml/data/naivebayestextclassifier2_example.json +6 -6
  725. teradataml/data/naivebayestextclassifier_example.json +8 -8
  726. teradataml/data/naivebayestextclassifierpredict_example.json +20 -20
  727. teradataml/data/name_Find_configure.csv +10 -10
  728. teradataml/data/namedentityfinder_example.json +14 -14
  729. teradataml/data/namedentityfinderevaluator_example.json +10 -10
  730. teradataml/data/namedentityfindertrainer_example.json +6 -6
  731. teradataml/data/nb_iris_input_test.csv +31 -31
  732. teradataml/data/nb_iris_input_train.csv +121 -121
  733. teradataml/data/nbp_iris_model.csv +13 -13
  734. teradataml/data/ner_extractor_text.csv +2 -2
  735. teradataml/data/ner_sports_test2.csv +29 -29
  736. teradataml/data/ner_sports_train.csv +501 -501
  737. teradataml/data/nerevaluator_example.json +5 -5
  738. teradataml/data/nerextractor_example.json +18 -18
  739. teradataml/data/nermem_sports_test.csv +17 -17
  740. teradataml/data/nermem_sports_train.csv +50 -50
  741. teradataml/data/nertrainer_example.json +6 -6
  742. teradataml/data/ngrams_example.json +6 -6
  743. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -1455
  744. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -1993
  745. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -1492
  746. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -536
  747. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -570
  748. teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -2559
  749. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -2911
  750. teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -698
  751. teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -784
  752. teradataml/data/npath_example.json +23 -23
  753. teradataml/data/ntree_example.json +14 -14
  754. teradataml/data/numeric_strings.csv +4 -4
  755. teradataml/data/numerics.csv +4 -4
  756. teradataml/data/ocean_buoy.csv +17 -17
  757. teradataml/data/ocean_buoy2.csv +17 -17
  758. teradataml/data/ocean_buoys.csv +27 -27
  759. teradataml/data/ocean_buoys2.csv +10 -10
  760. teradataml/data/ocean_buoys_nonpti.csv +28 -28
  761. teradataml/data/ocean_buoys_seq.csv +29 -29
  762. teradataml/data/openml_example.json +63 -0
  763. teradataml/data/optional_event_table.csv +4 -4
  764. teradataml/data/orders1.csv +11 -11
  765. teradataml/data/orders1_12.csv +12 -12
  766. teradataml/data/orders_ex.csv +4 -4
  767. teradataml/data/pack_example.json +8 -8
  768. teradataml/data/package_tracking.csv +19 -19
  769. teradataml/data/package_tracking_pti.csv +18 -18
  770. teradataml/data/pagerank_example.json +13 -13
  771. teradataml/data/paragraphs_input.csv +6 -6
  772. teradataml/data/pathanalyzer_example.json +7 -7
  773. teradataml/data/pathgenerator_example.json +7 -7
  774. teradataml/data/phrases.csv +7 -7
  775. teradataml/data/pivot_example.json +8 -8
  776. teradataml/data/pivot_input.csv +22 -22
  777. teradataml/data/playerRating.csv +31 -31
  778. teradataml/data/postagger_example.json +6 -6
  779. teradataml/data/posttagger_output.csv +44 -44
  780. teradataml/data/production_data.csv +16 -16
  781. teradataml/data/production_data2.csv +7 -7
  782. teradataml/data/randomsample_example.json +31 -31
  783. teradataml/data/randomwalksample_example.json +8 -8
  784. teradataml/data/rank_table.csv +6 -6
  785. teradataml/data/ref_mobile_data.csv +4 -4
  786. teradataml/data/ref_mobile_data_dense.csv +2 -2
  787. teradataml/data/ref_url.csv +17 -17
  788. teradataml/data/restaurant_reviews.csv +7 -7
  789. teradataml/data/river_data.csv +145 -145
  790. teradataml/data/roc_example.json +7 -7
  791. teradataml/data/roc_input.csv +101 -101
  792. teradataml/data/rule_inputs.csv +6 -6
  793. teradataml/data/rule_table.csv +2 -2
  794. teradataml/data/sales.csv +7 -7
  795. teradataml/data/sales_transaction.csv +501 -501
  796. teradataml/data/salesdata.csv +342 -342
  797. teradataml/data/sample_cities.csv +2 -2
  798. teradataml/data/sample_shapes.csv +10 -10
  799. teradataml/data/sample_streets.csv +2 -2
  800. teradataml/data/sampling_example.json +15 -15
  801. teradataml/data/sax_example.json +8 -8
  802. teradataml/data/scale_example.json +23 -23
  803. teradataml/data/scale_housing.csv +11 -11
  804. teradataml/data/scale_housing_test.csv +6 -6
  805. teradataml/data/scale_stat.csv +11 -11
  806. teradataml/data/scalebypartition_example.json +13 -13
  807. teradataml/data/scalemap_example.json +13 -13
  808. teradataml/data/scalesummary_example.json +12 -12
  809. teradataml/data/score_category.csv +101 -101
  810. teradataml/data/score_summary.csv +4 -4
  811. teradataml/data/script_example.json +9 -9
  812. teradataml/data/scripts/deploy_script.py +65 -0
  813. teradataml/data/scripts/mapper.R +20 -0
  814. teradataml/data/scripts/mapper.py +15 -15
  815. teradataml/data/scripts/mapper_replace.py +15 -15
  816. teradataml/data/scripts/sklearn/__init__.py +0 -0
  817. teradataml/data/scripts/sklearn/sklearn_fit.py +175 -0
  818. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +135 -0
  819. teradataml/data/scripts/sklearn/sklearn_function.template +113 -0
  820. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +158 -0
  821. teradataml/data/scripts/sklearn/sklearn_neighbors.py +152 -0
  822. teradataml/data/scripts/sklearn/sklearn_score.py +128 -0
  823. teradataml/data/scripts/sklearn/sklearn_transform.py +179 -0
  824. teradataml/data/seeds.csv +10 -10
  825. teradataml/data/sentenceextractor_example.json +6 -6
  826. teradataml/data/sentiment_extract_input.csv +11 -11
  827. teradataml/data/sentiment_train.csv +16 -16
  828. teradataml/data/sentiment_word.csv +20 -20
  829. teradataml/data/sentiment_word_input.csv +19 -19
  830. teradataml/data/sentimentextractor_example.json +24 -24
  831. teradataml/data/sentimenttrainer_example.json +8 -8
  832. teradataml/data/sequence_table.csv +10 -10
  833. teradataml/data/seriessplitter_example.json +7 -7
  834. teradataml/data/sessionize_example.json +17 -17
  835. teradataml/data/sessionize_table.csv +116 -116
  836. teradataml/data/setop_test1.csv +24 -24
  837. teradataml/data/setop_test2.csv +22 -22
  838. teradataml/data/soc_nw_edges.csv +10 -10
  839. teradataml/data/soc_nw_vertices.csv +7 -7
  840. teradataml/data/souvenir_timeseries.csv +167 -167
  841. teradataml/data/sparse_iris_attribute.csv +5 -5
  842. teradataml/data/sparse_iris_test.csv +121 -121
  843. teradataml/data/sparse_iris_train.csv +601 -601
  844. teradataml/data/star1.csv +6 -6
  845. teradataml/data/state_transition.csv +5 -5
  846. teradataml/data/stock_data.csv +53 -53
  847. teradataml/data/stock_movement.csv +11 -11
  848. teradataml/data/stock_vol.csv +76 -76
  849. teradataml/data/stop_words.csv +8 -8
  850. teradataml/data/store_sales.csv +37 -37
  851. teradataml/data/stringsimilarity_example.json +7 -7
  852. teradataml/data/strsimilarity_input.csv +13 -13
  853. teradataml/data/students.csv +101 -101
  854. teradataml/data/svm_iris_input_test.csv +121 -121
  855. teradataml/data/svm_iris_input_train.csv +481 -481
  856. teradataml/data/svm_iris_model.csv +7 -7
  857. teradataml/data/svmdense_example.json +9 -9
  858. teradataml/data/svmdensepredict_example.json +18 -18
  859. teradataml/data/svmsparse_example.json +7 -7
  860. teradataml/data/svmsparsepredict_example.json +13 -13
  861. teradataml/data/svmsparsesummary_example.json +7 -7
  862. teradataml/data/target_mobile_data.csv +13 -13
  863. teradataml/data/target_mobile_data_dense.csv +5 -5
  864. teradataml/data/templatedata.csv +1201 -1201
  865. teradataml/data/templates/open_source_ml.json +9 -0
  866. teradataml/data/teradataml_example.json +73 -1
  867. teradataml/data/test_classification.csv +101 -0
  868. teradataml/data/test_loan_prediction.csv +53 -53
  869. teradataml/data/test_pacf_12.csv +37 -37
  870. teradataml/data/test_prediction.csv +101 -0
  871. teradataml/data/test_regression.csv +101 -0
  872. teradataml/data/test_river2.csv +109 -109
  873. teradataml/data/text_inputs.csv +6 -6
  874. teradataml/data/textchunker_example.json +7 -7
  875. teradataml/data/textclassifier_example.json +6 -6
  876. teradataml/data/textclassifier_input.csv +7 -7
  877. teradataml/data/textclassifiertrainer_example.json +6 -6
  878. teradataml/data/textmorph_example.json +5 -5
  879. teradataml/data/textparser_example.json +15 -15
  880. teradataml/data/texttagger_example.json +11 -11
  881. teradataml/data/texttokenizer_example.json +6 -6
  882. teradataml/data/texttrainer_input.csv +11 -11
  883. teradataml/data/tf_example.json +6 -6
  884. teradataml/data/tfidf_example.json +13 -13
  885. teradataml/data/tfidf_input1.csv +201 -201
  886. teradataml/data/tfidf_train.csv +6 -6
  887. teradataml/data/time_table1.csv +535 -535
  888. teradataml/data/time_table2.csv +14 -14
  889. teradataml/data/timeseriesdata.csv +1601 -1601
  890. teradataml/data/timeseriesdatasetsd4.csv +105 -105
  891. teradataml/data/titanic.csv +892 -892
  892. teradataml/data/token_table.csv +696 -696
  893. teradataml/data/train_multiclass.csv +101 -0
  894. teradataml/data/train_regression.csv +101 -0
  895. teradataml/data/train_regression_multiple_labels.csv +101 -0
  896. teradataml/data/train_tracking.csv +27 -27
  897. teradataml/data/transformation_table.csv +5 -5
  898. teradataml/data/transformation_table_new.csv +1 -1
  899. teradataml/data/tv_spots.csv +16 -16
  900. teradataml/data/twod_climate_data.csv +117 -117
  901. teradataml/data/uaf_example.json +475 -475
  902. teradataml/data/univariatestatistics_example.json +8 -8
  903. teradataml/data/unpack_example.json +9 -9
  904. teradataml/data/unpivot_example.json +9 -9
  905. teradataml/data/unpivot_input.csv +8 -8
  906. teradataml/data/us_air_pass.csv +36 -36
  907. teradataml/data/us_population.csv +624 -624
  908. teradataml/data/us_states_shapes.csv +52 -52
  909. teradataml/data/varmax_example.json +17 -17
  910. teradataml/data/vectordistance_example.json +25 -25
  911. teradataml/data/ville_climatedata.csv +121 -121
  912. teradataml/data/ville_tempdata.csv +12 -12
  913. teradataml/data/ville_tempdata1.csv +12 -12
  914. teradataml/data/ville_temperature.csv +11 -11
  915. teradataml/data/waveletTable.csv +1605 -1605
  916. teradataml/data/waveletTable2.csv +1605 -1605
  917. teradataml/data/weightedmovavg_example.json +8 -8
  918. teradataml/data/wft_testing.csv +5 -5
  919. teradataml/data/wine_data.csv +1600 -0
  920. teradataml/data/word_embed_input_table1.csv +5 -5
  921. teradataml/data/word_embed_input_table2.csv +4 -4
  922. teradataml/data/word_embed_model.csv +22 -22
  923. teradataml/data/words_input.csv +13 -13
  924. teradataml/data/xconvolve_complex_left.csv +6 -6
  925. teradataml/data/xconvolve_complex_leftmulti.csv +6 -6
  926. teradataml/data/xgboost_example.json +35 -35
  927. teradataml/data/xgboostpredict_example.json +31 -31
  928. teradataml/dataframe/copy_to.py +1764 -1698
  929. teradataml/dataframe/data_transfer.py +2753 -2745
  930. teradataml/dataframe/dataframe.py +17545 -16946
  931. teradataml/dataframe/dataframe_utils.py +1837 -1740
  932. teradataml/dataframe/fastload.py +611 -603
  933. teradataml/dataframe/indexer.py +424 -424
  934. teradataml/dataframe/setop.py +1179 -1166
  935. teradataml/dataframe/sql.py +10090 -6432
  936. teradataml/dataframe/sql_function_parameters.py +439 -388
  937. teradataml/dataframe/sql_functions.py +652 -652
  938. teradataml/dataframe/sql_interfaces.py +220 -220
  939. teradataml/dataframe/vantage_function_types.py +674 -630
  940. teradataml/dataframe/window.py +693 -692
  941. teradataml/dbutils/__init__.py +3 -3
  942. teradataml/dbutils/dbutils.py +1167 -1150
  943. teradataml/dbutils/filemgr.py +267 -267
  944. teradataml/gen_ai/__init__.py +2 -2
  945. teradataml/gen_ai/convAI.py +472 -472
  946. teradataml/geospatial/__init__.py +3 -3
  947. teradataml/geospatial/geodataframe.py +1105 -1094
  948. teradataml/geospatial/geodataframecolumn.py +392 -387
  949. teradataml/geospatial/geometry_types.py +925 -925
  950. teradataml/hyperparameter_tuner/__init__.py +1 -1
  951. teradataml/hyperparameter_tuner/optimizer.py +3783 -2993
  952. teradataml/hyperparameter_tuner/utils.py +281 -187
  953. teradataml/lib/aed_0_1.dll +0 -0
  954. teradataml/lib/libaed_0_1.dylib +0 -0
  955. teradataml/lib/libaed_0_1.so +0 -0
  956. teradataml/libaed_0_1.dylib +0 -0
  957. teradataml/libaed_0_1.so +0 -0
  958. teradataml/opensource/__init__.py +1 -0
  959. teradataml/opensource/sklearn/__init__.py +1 -0
  960. teradataml/opensource/sklearn/_class.py +255 -0
  961. teradataml/opensource/sklearn/_sklearn_wrapper.py +1668 -0
  962. teradataml/opensource/sklearn/_wrapper_utils.py +268 -0
  963. teradataml/opensource/sklearn/constants.py +54 -0
  964. teradataml/options/__init__.py +121 -124
  965. teradataml/options/configure.py +337 -336
  966. teradataml/options/display.py +176 -176
  967. teradataml/plot/__init__.py +2 -2
  968. teradataml/plot/axis.py +1388 -1388
  969. teradataml/plot/constants.py +15 -15
  970. teradataml/plot/figure.py +398 -398
  971. teradataml/plot/plot.py +760 -760
  972. teradataml/plot/query_generator.py +83 -83
  973. teradataml/plot/subplot.py +216 -216
  974. teradataml/scriptmgmt/UserEnv.py +3788 -3761
  975. teradataml/scriptmgmt/__init__.py +3 -3
  976. teradataml/scriptmgmt/lls_utils.py +1616 -1604
  977. teradataml/series/series.py +532 -532
  978. teradataml/series/series_utils.py +71 -71
  979. teradataml/table_operators/Apply.py +949 -917
  980. teradataml/table_operators/Script.py +1719 -1982
  981. teradataml/table_operators/TableOperator.py +1207 -1616
  982. teradataml/table_operators/__init__.py +2 -3
  983. teradataml/table_operators/apply_query_generator.py +262 -262
  984. teradataml/table_operators/query_generator.py +507 -507
  985. teradataml/table_operators/table_operator_query_generator.py +460 -460
  986. teradataml/table_operators/table_operator_util.py +631 -639
  987. teradataml/table_operators/templates/dataframe_apply.template +184 -184
  988. teradataml/table_operators/templates/dataframe_map.template +176 -176
  989. teradataml/table_operators/templates/script_executor.template +170 -170
  990. teradataml/utils/dtypes.py +684 -684
  991. teradataml/utils/internal_buffer.py +84 -84
  992. teradataml/utils/print_versions.py +205 -205
  993. teradataml/utils/utils.py +410 -410
  994. teradataml/utils/validators.py +2239 -2115
  995. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.0.dist-info}/METADATA +270 -41
  996. teradataml-20.0.0.0.dist-info/RECORD +1038 -0
  997. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.0.dist-info}/WHEEL +1 -1
  998. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.0.dist-info}/zip-safe +1 -1
  999. teradataml/analytics/mle/AdaBoost.py +0 -651
  1000. teradataml/analytics/mle/AdaBoostPredict.py +0 -564
  1001. teradataml/analytics/mle/Antiselect.py +0 -342
  1002. teradataml/analytics/mle/Arima.py +0 -641
  1003. teradataml/analytics/mle/ArimaPredict.py +0 -477
  1004. teradataml/analytics/mle/Attribution.py +0 -1070
  1005. teradataml/analytics/mle/Betweenness.py +0 -658
  1006. teradataml/analytics/mle/Burst.py +0 -711
  1007. teradataml/analytics/mle/CCM.py +0 -600
  1008. teradataml/analytics/mle/CCMPrepare.py +0 -324
  1009. teradataml/analytics/mle/CFilter.py +0 -460
  1010. teradataml/analytics/mle/ChangePointDetection.py +0 -572
  1011. teradataml/analytics/mle/ChangePointDetectionRT.py +0 -477
  1012. teradataml/analytics/mle/Closeness.py +0 -737
  1013. teradataml/analytics/mle/ConfusionMatrix.py +0 -420
  1014. teradataml/analytics/mle/Correlation.py +0 -477
  1015. teradataml/analytics/mle/Correlation2.py +0 -573
  1016. teradataml/analytics/mle/CoxHazardRatio.py +0 -679
  1017. teradataml/analytics/mle/CoxPH.py +0 -556
  1018. teradataml/analytics/mle/CoxSurvival.py +0 -478
  1019. teradataml/analytics/mle/CumulativeMovAvg.py +0 -363
  1020. teradataml/analytics/mle/DTW.py +0 -623
  1021. teradataml/analytics/mle/DWT.py +0 -564
  1022. teradataml/analytics/mle/DWT2D.py +0 -599
  1023. teradataml/analytics/mle/DecisionForest.py +0 -716
  1024. teradataml/analytics/mle/DecisionForestEvaluator.py +0 -363
  1025. teradataml/analytics/mle/DecisionForestPredict.py +0 -561
  1026. teradataml/analytics/mle/DecisionTree.py +0 -830
  1027. teradataml/analytics/mle/DecisionTreePredict.py +0 -528
  1028. teradataml/analytics/mle/ExponentialMovAvg.py +0 -418
  1029. teradataml/analytics/mle/FMeasure.py +0 -402
  1030. teradataml/analytics/mle/FPGrowth.py +0 -734
  1031. teradataml/analytics/mle/FrequentPaths.py +0 -695
  1032. teradataml/analytics/mle/GLM.py +0 -558
  1033. teradataml/analytics/mle/GLML1L2.py +0 -547
  1034. teradataml/analytics/mle/GLML1L2Predict.py +0 -519
  1035. teradataml/analytics/mle/GLMPredict.py +0 -529
  1036. teradataml/analytics/mle/HMMDecoder.py +0 -945
  1037. teradataml/analytics/mle/HMMEvaluator.py +0 -901
  1038. teradataml/analytics/mle/HMMSupervised.py +0 -521
  1039. teradataml/analytics/mle/HMMUnsupervised.py +0 -572
  1040. teradataml/analytics/mle/Histogram.py +0 -561
  1041. teradataml/analytics/mle/IDWT.py +0 -476
  1042. teradataml/analytics/mle/IDWT2D.py +0 -493
  1043. teradataml/analytics/mle/IdentityMatch.py +0 -763
  1044. teradataml/analytics/mle/Interpolator.py +0 -918
  1045. teradataml/analytics/mle/KMeans.py +0 -485
  1046. teradataml/analytics/mle/KNN.py +0 -627
  1047. teradataml/analytics/mle/KNNRecommender.py +0 -488
  1048. teradataml/analytics/mle/KNNRecommenderPredict.py +0 -581
  1049. teradataml/analytics/mle/LAR.py +0 -439
  1050. teradataml/analytics/mle/LARPredict.py +0 -478
  1051. teradataml/analytics/mle/LDA.py +0 -548
  1052. teradataml/analytics/mle/LDAInference.py +0 -492
  1053. teradataml/analytics/mle/LDATopicSummary.py +0 -464
  1054. teradataml/analytics/mle/LevenshteinDistance.py +0 -450
  1055. teradataml/analytics/mle/LinReg.py +0 -433
  1056. teradataml/analytics/mle/LinRegPredict.py +0 -438
  1057. teradataml/analytics/mle/MinHash.py +0 -544
  1058. teradataml/analytics/mle/Modularity.py +0 -587
  1059. teradataml/analytics/mle/NEREvaluator.py +0 -410
  1060. teradataml/analytics/mle/NERExtractor.py +0 -595
  1061. teradataml/analytics/mle/NERTrainer.py +0 -458
  1062. teradataml/analytics/mle/NGrams.py +0 -570
  1063. teradataml/analytics/mle/NPath.py +0 -634
  1064. teradataml/analytics/mle/NTree.py +0 -549
  1065. teradataml/analytics/mle/NaiveBayes.py +0 -462
  1066. teradataml/analytics/mle/NaiveBayesPredict.py +0 -513
  1067. teradataml/analytics/mle/NaiveBayesTextClassifier.py +0 -607
  1068. teradataml/analytics/mle/NaiveBayesTextClassifier2.py +0 -531
  1069. teradataml/analytics/mle/NaiveBayesTextClassifierPredict.py +0 -799
  1070. teradataml/analytics/mle/NamedEntityFinder.py +0 -529
  1071. teradataml/analytics/mle/NamedEntityFinderEvaluator.py +0 -414
  1072. teradataml/analytics/mle/NamedEntityFinderTrainer.py +0 -396
  1073. teradataml/analytics/mle/POSTagger.py +0 -417
  1074. teradataml/analytics/mle/Pack.py +0 -411
  1075. teradataml/analytics/mle/PageRank.py +0 -535
  1076. teradataml/analytics/mle/PathAnalyzer.py +0 -426
  1077. teradataml/analytics/mle/PathGenerator.py +0 -367
  1078. teradataml/analytics/mle/PathStart.py +0 -464
  1079. teradataml/analytics/mle/PathSummarizer.py +0 -470
  1080. teradataml/analytics/mle/Pivot.py +0 -471
  1081. teradataml/analytics/mle/ROC.py +0 -425
  1082. teradataml/analytics/mle/RandomSample.py +0 -637
  1083. teradataml/analytics/mle/RandomWalkSample.py +0 -490
  1084. teradataml/analytics/mle/SAX.py +0 -779
  1085. teradataml/analytics/mle/SVMDense.py +0 -677
  1086. teradataml/analytics/mle/SVMDensePredict.py +0 -536
  1087. teradataml/analytics/mle/SVMDenseSummary.py +0 -437
  1088. teradataml/analytics/mle/SVMSparse.py +0 -557
  1089. teradataml/analytics/mle/SVMSparsePredict.py +0 -553
  1090. teradataml/analytics/mle/SVMSparseSummary.py +0 -435
  1091. teradataml/analytics/mle/Sampling.py +0 -549
  1092. teradataml/analytics/mle/Scale.py +0 -565
  1093. teradataml/analytics/mle/ScaleByPartition.py +0 -496
  1094. teradataml/analytics/mle/ScaleMap.py +0 -378
  1095. teradataml/analytics/mle/ScaleSummary.py +0 -320
  1096. teradataml/analytics/mle/SentenceExtractor.py +0 -363
  1097. teradataml/analytics/mle/SentimentEvaluator.py +0 -432
  1098. teradataml/analytics/mle/SentimentExtractor.py +0 -578
  1099. teradataml/analytics/mle/SentimentTrainer.py +0 -405
  1100. teradataml/analytics/mle/SeriesSplitter.py +0 -641
  1101. teradataml/analytics/mle/Sessionize.py +0 -475
  1102. teradataml/analytics/mle/SimpleMovAvg.py +0 -397
  1103. teradataml/analytics/mle/StringSimilarity.py +0 -425
  1104. teradataml/analytics/mle/TF.py +0 -389
  1105. teradataml/analytics/mle/TFIDF.py +0 -504
  1106. teradataml/analytics/mle/TextChunker.py +0 -414
  1107. teradataml/analytics/mle/TextClassifier.py +0 -399
  1108. teradataml/analytics/mle/TextClassifierEvaluator.py +0 -413
  1109. teradataml/analytics/mle/TextClassifierTrainer.py +0 -565
  1110. teradataml/analytics/mle/TextMorph.py +0 -494
  1111. teradataml/analytics/mle/TextParser.py +0 -623
  1112. teradataml/analytics/mle/TextTagger.py +0 -530
  1113. teradataml/analytics/mle/TextTokenizer.py +0 -502
  1114. teradataml/analytics/mle/UnivariateStatistics.py +0 -488
  1115. teradataml/analytics/mle/Unpack.py +0 -526
  1116. teradataml/analytics/mle/Unpivot.py +0 -438
  1117. teradataml/analytics/mle/VarMax.py +0 -776
  1118. teradataml/analytics/mle/VectorDistance.py +0 -762
  1119. teradataml/analytics/mle/WeightedMovAvg.py +0 -400
  1120. teradataml/analytics/mle/XGBoost.py +0 -842
  1121. teradataml/analytics/mle/XGBoostPredict.py +0 -627
  1122. teradataml/analytics/mle/__init__.py +0 -123
  1123. teradataml/analytics/mle/json/adaboost_mle.json +0 -135
  1124. teradataml/analytics/mle/json/adaboostpredict_mle.json +0 -85
  1125. teradataml/analytics/mle/json/antiselect_mle.json +0 -34
  1126. teradataml/analytics/mle/json/antiselect_mle_mle.json +0 -34
  1127. teradataml/analytics/mle/json/arima_mle.json +0 -172
  1128. teradataml/analytics/mle/json/arimapredict_mle.json +0 -52
  1129. teradataml/analytics/mle/json/attribution_mle_mle.json +0 -143
  1130. teradataml/analytics/mle/json/betweenness_mle.json +0 -97
  1131. teradataml/analytics/mle/json/burst_mle.json +0 -140
  1132. teradataml/analytics/mle/json/ccm_mle.json +0 -124
  1133. teradataml/analytics/mle/json/ccmprepare_mle.json +0 -14
  1134. teradataml/analytics/mle/json/cfilter_mle.json +0 -93
  1135. teradataml/analytics/mle/json/changepointdetection_mle.json +0 -92
  1136. teradataml/analytics/mle/json/changepointdetectionrt_mle.json +0 -78
  1137. teradataml/analytics/mle/json/closeness_mle.json +0 -104
  1138. teradataml/analytics/mle/json/confusionmatrix_mle.json +0 -79
  1139. teradataml/analytics/mle/json/correlation_mle.json +0 -86
  1140. teradataml/analytics/mle/json/correlationreduce_mle.json +0 -49
  1141. teradataml/analytics/mle/json/coxhazardratio_mle.json +0 -89
  1142. teradataml/analytics/mle/json/coxph_mle.json +0 -98
  1143. teradataml/analytics/mle/json/coxsurvival_mle.json +0 -79
  1144. teradataml/analytics/mle/json/cumulativemovavg_mle.json +0 -34
  1145. teradataml/analytics/mle/json/decisionforest_mle.json +0 -167
  1146. teradataml/analytics/mle/json/decisionforestevaluator_mle.json +0 -33
  1147. teradataml/analytics/mle/json/decisionforestpredict_mle_mle.json +0 -74
  1148. teradataml/analytics/mle/json/decisiontree_mle.json +0 -194
  1149. teradataml/analytics/mle/json/decisiontreepredict_mle_mle.json +0 -86
  1150. teradataml/analytics/mle/json/dtw_mle.json +0 -97
  1151. teradataml/analytics/mle/json/dwt2d_mle.json +0 -116
  1152. teradataml/analytics/mle/json/dwt_mle.json +0 -101
  1153. teradataml/analytics/mle/json/exponentialmovavg_mle.json +0 -55
  1154. teradataml/analytics/mle/json/fmeasure_mle.json +0 -58
  1155. teradataml/analytics/mle/json/fpgrowth_mle.json +0 -159
  1156. teradataml/analytics/mle/json/frequentpaths_mle.json +0 -129
  1157. teradataml/analytics/mle/json/glm_mle.json +0 -111
  1158. teradataml/analytics/mle/json/glml1l2_mle.json +0 -106
  1159. teradataml/analytics/mle/json/glml1l2predict_mle.json +0 -57
  1160. teradataml/analytics/mle/json/glmpredict_mle_mle.json +0 -74
  1161. teradataml/analytics/mle/json/histogram_mle.json +0 -100
  1162. teradataml/analytics/mle/json/hmmdecoder_mle.json +0 -192
  1163. teradataml/analytics/mle/json/hmmevaluator_mle.json +0 -206
  1164. teradataml/analytics/mle/json/hmmsupervised_mle.json +0 -91
  1165. teradataml/analytics/mle/json/hmmunsupervised_mle.json +0 -114
  1166. teradataml/analytics/mle/json/identitymatch_mle.json +0 -88
  1167. teradataml/analytics/mle/json/idwt2d_mle.json +0 -73
  1168. teradataml/analytics/mle/json/idwt_mle.json +0 -66
  1169. teradataml/analytics/mle/json/interpolator_mle.json +0 -151
  1170. teradataml/analytics/mle/json/kmeans_mle.json +0 -97
  1171. teradataml/analytics/mle/json/knn_mle.json +0 -141
  1172. teradataml/analytics/mle/json/knnrecommender_mle.json +0 -111
  1173. teradataml/analytics/mle/json/knnrecommenderpredict_mle.json +0 -75
  1174. teradataml/analytics/mle/json/lar_mle.json +0 -78
  1175. teradataml/analytics/mle/json/larpredict_mle.json +0 -69
  1176. teradataml/analytics/mle/json/lda_mle.json +0 -130
  1177. teradataml/analytics/mle/json/ldainference_mle.json +0 -78
  1178. teradataml/analytics/mle/json/ldatopicsummary_mle.json +0 -64
  1179. teradataml/analytics/mle/json/levenshteindistance_mle.json +0 -92
  1180. teradataml/analytics/mle/json/linreg_mle.json +0 -42
  1181. teradataml/analytics/mle/json/linregpredict_mle.json +0 -56
  1182. teradataml/analytics/mle/json/minhash_mle.json +0 -113
  1183. teradataml/analytics/mle/json/modularity_mle.json +0 -91
  1184. teradataml/analytics/mle/json/naivebayespredict_mle_mle.json +0 -85
  1185. teradataml/analytics/mle/json/naivebayesreduce_mle.json +0 -52
  1186. teradataml/analytics/mle/json/naivebayestextclassifierpredict_mle_mle.json +0 -147
  1187. teradataml/analytics/mle/json/naivebayestextclassifiertrainer2_mle.json +0 -108
  1188. teradataml/analytics/mle/json/naivebayestextclassifiertrainer_mle.json +0 -102
  1189. teradataml/analytics/mle/json/namedentityfinder_mle.json +0 -84
  1190. teradataml/analytics/mle/json/namedentityfinderevaluatorreduce_mle.json +0 -43
  1191. teradataml/analytics/mle/json/namedentityfindertrainer_mle.json +0 -64
  1192. teradataml/analytics/mle/json/nerevaluator_mle.json +0 -54
  1193. teradataml/analytics/mle/json/nerextractor_mle.json +0 -87
  1194. teradataml/analytics/mle/json/nertrainer_mle.json +0 -89
  1195. teradataml/analytics/mle/json/ngrams_mle.json +0 -137
  1196. teradataml/analytics/mle/json/ngramsplitter_mle_mle.json +0 -137
  1197. teradataml/analytics/mle/json/npath@coprocessor_mle.json +0 -73
  1198. teradataml/analytics/mle/json/ntree@coprocessor_mle.json +0 -123
  1199. teradataml/analytics/mle/json/pack_mle.json +0 -58
  1200. teradataml/analytics/mle/json/pack_mle_mle.json +0 -58
  1201. teradataml/analytics/mle/json/pagerank_mle.json +0 -81
  1202. teradataml/analytics/mle/json/pathanalyzer_mle.json +0 -63
  1203. teradataml/analytics/mle/json/pathgenerator_mle.json +0 -40
  1204. teradataml/analytics/mle/json/pathstart_mle.json +0 -62
  1205. teradataml/analytics/mle/json/pathsummarizer_mle.json +0 -72
  1206. teradataml/analytics/mle/json/pivoting_mle.json +0 -71
  1207. teradataml/analytics/mle/json/postagger_mle.json +0 -51
  1208. teradataml/analytics/mle/json/randomsample_mle.json +0 -131
  1209. teradataml/analytics/mle/json/randomwalksample_mle.json +0 -85
  1210. teradataml/analytics/mle/json/roc_mle.json +0 -73
  1211. teradataml/analytics/mle/json/sampling_mle.json +0 -75
  1212. teradataml/analytics/mle/json/sax_mle.json +0 -154
  1213. teradataml/analytics/mle/json/scale_mle.json +0 -93
  1214. teradataml/analytics/mle/json/scalebypartition_mle.json +0 -89
  1215. teradataml/analytics/mle/json/scalemap_mle.json +0 -44
  1216. teradataml/analytics/mle/json/scalesummary_mle.json +0 -14
  1217. teradataml/analytics/mle/json/sentenceextractor_mle.json +0 -41
  1218. teradataml/analytics/mle/json/sentimentevaluator_mle.json +0 -43
  1219. teradataml/analytics/mle/json/sentimentextractor_mle.json +0 -100
  1220. teradataml/analytics/mle/json/sentimenttrainer_mle.json +0 -68
  1221. teradataml/analytics/mle/json/seriessplitter_mle.json +0 -133
  1222. teradataml/analytics/mle/json/sessionize_mle_mle.json +0 -62
  1223. teradataml/analytics/mle/json/simplemovavg_mle.json +0 -48
  1224. teradataml/analytics/mle/json/stringsimilarity_mle.json +0 -50
  1225. teradataml/analytics/mle/json/stringsimilarity_mle_mle.json +0 -50
  1226. teradataml/analytics/mle/json/svmdense_mle.json +0 -165
  1227. teradataml/analytics/mle/json/svmdensepredict_mle.json +0 -95
  1228. teradataml/analytics/mle/json/svmdensesummary_mle.json +0 -58
  1229. teradataml/analytics/mle/json/svmsparse_mle.json +0 -148
  1230. teradataml/analytics/mle/json/svmsparsepredict_mle_mle.json +0 -103
  1231. teradataml/analytics/mle/json/svmsparsesummary_mle.json +0 -57
  1232. teradataml/analytics/mle/json/textchunker_mle.json +0 -40
  1233. teradataml/analytics/mle/json/textclassifier_mle.json +0 -51
  1234. teradataml/analytics/mle/json/textclassifierevaluator_mle.json +0 -43
  1235. teradataml/analytics/mle/json/textclassifiertrainer_mle.json +0 -103
  1236. teradataml/analytics/mle/json/textmorph_mle.json +0 -63
  1237. teradataml/analytics/mle/json/textparser_mle.json +0 -166
  1238. teradataml/analytics/mle/json/texttagger_mle.json +0 -81
  1239. teradataml/analytics/mle/json/texttokenizer_mle.json +0 -91
  1240. teradataml/analytics/mle/json/tf_mle.json +0 -33
  1241. teradataml/analytics/mle/json/tfidf_mle.json +0 -34
  1242. teradataml/analytics/mle/json/univariatestatistics_mle.json +0 -81
  1243. teradataml/analytics/mle/json/unpack_mle.json +0 -91
  1244. teradataml/analytics/mle/json/unpack_mle_mle.json +0 -91
  1245. teradataml/analytics/mle/json/unpivoting_mle.json +0 -63
  1246. teradataml/analytics/mle/json/varmax_mle.json +0 -176
  1247. teradataml/analytics/mle/json/vectordistance_mle.json +0 -179
  1248. teradataml/analytics/mle/json/weightedmovavg_mle.json +0 -48
  1249. teradataml/analytics/mle/json/xgboost_mle.json +0 -178
  1250. teradataml/analytics/mle/json/xgboostpredict_mle.json +0 -104
  1251. teradataml/analytics/sqle/Antiselect.py +0 -321
  1252. teradataml/analytics/sqle/Attribution.py +0 -603
  1253. teradataml/analytics/sqle/DecisionForestPredict.py +0 -408
  1254. teradataml/analytics/sqle/GLMPredict.py +0 -430
  1255. teradataml/analytics/sqle/MovingAverage.py +0 -543
  1256. teradataml/analytics/sqle/NGramSplitter.py +0 -548
  1257. teradataml/analytics/sqle/NPath.py +0 -632
  1258. teradataml/analytics/sqle/NaiveBayesTextClassifierPredict.py +0 -515
  1259. teradataml/analytics/sqle/Pack.py +0 -388
  1260. teradataml/analytics/sqle/SVMSparsePredict.py +0 -464
  1261. teradataml/analytics/sqle/Sessionize.py +0 -390
  1262. teradataml/analytics/sqle/StringSimilarity.py +0 -400
  1263. teradataml/analytics/sqle/Unpack.py +0 -503
  1264. teradataml/analytics/sqle/json/antiselect_sqle.json +0 -21
  1265. teradataml/analytics/sqle/json/attribution_sqle.json +0 -92
  1266. teradataml/analytics/sqle/json/decisionforestpredict_sqle.json +0 -48
  1267. teradataml/analytics/sqle/json/glmpredict_sqle.json +0 -48
  1268. teradataml/analytics/sqle/json/h2opredict_sqle.json +0 -63
  1269. teradataml/analytics/sqle/json/movingaverage_sqle.json +0 -58
  1270. teradataml/analytics/sqle/json/naivebayestextclassifierpredict_sqle.json +0 -76
  1271. teradataml/analytics/sqle/json/ngramsplitter_sqle.json +0 -126
  1272. teradataml/analytics/sqle/json/npath_sqle.json +0 -67
  1273. teradataml/analytics/sqle/json/pack_sqle.json +0 -47
  1274. teradataml/analytics/sqle/json/pmmlpredict_sqle.json +0 -55
  1275. teradataml/analytics/sqle/json/sessionize_sqle.json +0 -43
  1276. teradataml/analytics/sqle/json/stringsimilarity_sqle.json +0 -39
  1277. teradataml/analytics/sqle/json/svmsparsepredict_sqle.json +0 -74
  1278. teradataml/analytics/sqle/json/unpack_sqle.json +0 -80
  1279. teradataml/catalog/model_cataloging.py +0 -980
  1280. teradataml/config/mlengine_alias_definitions_v1.0 +0 -118
  1281. teradataml/config/mlengine_alias_definitions_v1.1 +0 -127
  1282. teradataml/config/mlengine_alias_definitions_v1.3 +0 -129
  1283. teradataml/table_operators/sandbox_container_util.py +0 -643
  1284. teradataml-17.20.0.7.dist-info/RECORD +0 -1280
  1285. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.0.dist-info}/top_level.txt +0 -0
@@ -1,1616 +1,1207 @@
1
- #!/usr/bin/python
2
- # ##################################################################
3
- #
4
- # Copyright 2020 Teradata. All rights reserved.
5
- # TERADATA CONFIDENTIAL AND TRADE SECRET
6
- #
7
- # Primary Owner: Trupti Purohit (trupti.purohit@teradata.com)
8
- # Secondary Owner: Gouri Patwardhan (gouri.patwardhan@teradata.com)
9
- #
10
- # Function Version: 1.0
11
- #
12
- # Description: Base class for Teradata's Table Operators
13
- # ##################################################################
14
-
15
- import os
16
- import tarfile
17
- import subprocess
18
- from pathlib import Path
19
- import teradataml.dataframe as tdmldf
20
- from teradataml.common.constants import OutputStyle, TeradataConstants
21
- from teradataml.common.constants import TableOperatorConstants
22
- from teradataml.common.garbagecollector import GarbageCollector
23
- from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
24
- from teradataml.common.utils import UtilFuncs
25
- from teradataml.dataframe.dataframe_utils import DataFrameUtils as df_utils
26
-
27
- from teradataml.common.exceptions import TeradataMlException
28
- from teradataml.common.messages import Messages
29
- from teradataml.common.messagecodes import MessageCodes
30
- from teradataml.options.configure import configure
31
- from teradataml.utils.utils import execute_sql
32
- from teradataml.utils.validators import _Validators
33
- from teradatasqlalchemy import (BYTEINT, SMALLINT, INTEGER, BIGINT, DECIMAL, FLOAT, NUMBER)
34
- from teradatasqlalchemy import (TIMESTAMP, DATE, TIME)
35
- from teradatasqlalchemy import (CHAR, VARCHAR, CLOB)
36
- from teradatasqlalchemy import (BYTE, VARBYTE, BLOB)
37
- from teradatasqlalchemy import (PERIOD_DATE, PERIOD_TIME, PERIOD_TIMESTAMP)
38
- from teradatasqlalchemy import (INTERVAL_YEAR, INTERVAL_YEAR_TO_MONTH, INTERVAL_MONTH, INTERVAL_DAY,
39
- INTERVAL_DAY_TO_HOUR, INTERVAL_DAY_TO_MINUTE, INTERVAL_DAY_TO_SECOND,
40
- INTERVAL_HOUR, INTERVAL_HOUR_TO_MINUTE, INTERVAL_HOUR_TO_SECOND,
41
- INTERVAL_MINUTE, INTERVAL_MINUTE_TO_SECOND, INTERVAL_SECOND)
42
- from teradataml.context.context import _get_current_databasename, get_context, get_connection
43
- from io import StringIO
44
-
45
-
46
- class TableOperator:
47
-
48
- def __init__(self,
49
- data=None,
50
- script_name=None,
51
- files_local_path=None,
52
- delimiter="\t",
53
- returns=None,
54
- quotechar=None,
55
- data_partition_column=None,
56
- data_hash_column=None,
57
- data_order_column=None,
58
- is_local_order=False,
59
- sort_ascending=True,
60
- nulls_first=True):
61
- """
62
- DESCRIPTION:
63
- Table Operators are a type of User-Defined Function, only available when connected to a
64
- Vantage.
65
-
66
- PARAMETERS:
67
- data:
68
- Optional Argument.
69
- Specifies a teradataml DataFrame containing the input data for the script.
70
-
71
- script_name:
72
- Required Argument.
73
- Specifies the name of the user script.
74
- Types: str
75
-
76
- files_local_path:
77
- Required Argument.
78
- Specifies the absolute local path where the user script and all supporting files
79
- like model files, input data file reside.
80
- Types: str
81
-
82
- delimiter:
83
- Optional Argument.
84
- Specifies a delimiter to use when reading columns from a row and
85
- writing result columns.
86
- The delimiter is a single character chosen from the set of punctuation characters.
87
- Types: str
88
-
89
- returns:
90
- Required Argument.
91
- Specifies the output column definition.
92
- Types: Dictionary specifying column name to teradatasqlalchemy type mapping.
93
- Default: None
94
-
95
- data_hash_column:
96
- Optional Argument.
97
- Specifies the column to be used for hashing.
98
- The rows in the data are redistributed to AMPs based on the hash value of the
99
- column specified. The user-installed script file then runs once on each AMP.
100
- If there is no data_hash_column, then the entire result set,
101
- delivered by the function, constitutes a single group or partition.
102
- Types: str
103
- Note:
104
- "data_hash_column" can not be specified along with "data_partition_column",
105
- "is_local_order" and "data_order_column".
106
-
107
- data_partition_column:
108
- Optional Argument.
109
- Specifies Partition By columns for data.
110
- Values to this argument can be provided as a list, if multiple
111
- columns are used for partition.
112
- Default Value: ANY
113
- Types: str OR list of Strings (str)
114
- Notes:
115
- 1) "data_partition_column" can not be specified along with "data_hash_column".
116
- 2) "data_partition_column" can not be specified along with "is_local_order = True".
117
-
118
- is_local_order:
119
- Optional Argument.
120
- Specifies a boolean value to determine whether the input data is to be ordered locally
121
- or not. 'sort_ascending' specifies the order in which the values in a group, or partition,
122
- are sorted. This argument is ignored, if data_order_column is None.
123
- When set to 'True', qualified rows are ordered locally in preparation to be input
124
- to the function.
125
- Default Value: False
126
- Types: bool
127
- Note:
128
- "is_local_order" can not be specified along with "data_hash_column".
129
- When "is_local_order" is set to 'True', "data_order_column" should be specified,
130
- and the columns specified in "data_order_column" are used for local ordering.
131
-
132
- data_order_column:
133
- Optional Argument.
134
- Specifies Order By columns for data.
135
- Values to this argument can be provided as a list, if multiple
136
- columns are used for ordering.
137
- This argument is used with in both cases: "is_local_order = True"
138
- and "is_local_order = False".
139
- Types: str OR list of Strings (str)
140
- Note:
141
- "data_order_column" can not be specified along with "data_hash_column".
142
-
143
- sort_ascending:
144
- Optional Argument.
145
- Specifies a boolean value to determine if the input data is to be sorted on
146
- the data_order_column column in ascending or descending order.
147
- When this is set to 'True' data is sorted in ascending order,
148
- otherwise data is sorted in descending order.
149
- This argument is ignored, if data_order_column is None.
150
- Default Value: True
151
- Types: bool
152
-
153
- nulls_first:
154
- Optional Argument.
155
- Specifies a boolean value to determine whether NULLS from input data are listed
156
- first or last during ordering.
157
- When this is set to 'True' NULLS are listed first, otherwise NULLS are listed last.
158
- This argument is ignored, if data_order_column is None.
159
- Default Value: True
160
- Types: bool
161
-
162
- RETURNS:
163
- An instance of TableOperator class.
164
-
165
- RAISES:
166
- TeradataMlException
167
-
168
- EXAMPLES:
169
- # Apply class extends this base class.
170
- apply_obj = Apply(data=barrierdf,
171
- script_name='mapper.py',
172
- files_local_path= '/root/data/scripts/',
173
- apply_command='python3 mapper.py',
174
- data_order_column="Id",
175
- is_local_order=False,
176
- nulls_first=False,
177
- sort_ascending=False,
178
- env_name = "test_env",
179
- returns={"word": VARCHAR(15), "count_input": VARCHAR(2)},
180
- style='csv',
181
- delimiter=',')
182
- """
183
- self.result = None
184
- self._tblop_query = None
185
- self.data = data
186
- self.script_name = script_name
187
- self.files_local_path = files_local_path
188
- self.delimiter = delimiter
189
- self.quotechar = quotechar
190
- self.returns = returns
191
- self.data_partition_column = data_partition_column
192
- self.data_hash_column = data_hash_column
193
- self.data_order_column = data_order_column
194
- self.is_local_order = is_local_order
195
- self.sort_ascending = sort_ascending
196
- self.nulls_first = nulls_first
197
-
198
- # Datatypes supported in returns clause of a table operator.
199
- self._supported_returns_datatypes = (BYTEINT, SMALLINT, INTEGER, BIGINT, DECIMAL, FLOAT, NUMBER,
200
- TIMESTAMP, DATE, TIME, CHAR, VARCHAR, CLOB, BYTE, VARBYTE,
201
- BLOB, PERIOD_DATE, PERIOD_TIME, PERIOD_TIMESTAMP, INTERVAL_YEAR,
202
- INTERVAL_YEAR_TO_MONTH, INTERVAL_MONTH, INTERVAL_DAY, INTERVAL_DAY_TO_HOUR,
203
- INTERVAL_DAY_TO_MINUTE, INTERVAL_DAY_TO_SECOND, INTERVAL_HOUR,
204
- INTERVAL_HOUR_TO_MINUTE, INTERVAL_HOUR_TO_SECOND, INTERVAL_MINUTE,
205
- INTERVAL_MINUTE_TO_SECOND, INTERVAL_SECOND
206
- )
207
-
208
- # Create AnalyticsWrapperUtils instance which contains validation functions.
209
- # This is required for is_default_or_not check.
210
- # Rest all validation is done using _Validators.
211
- self.__awu = AnalyticsWrapperUtils()
212
-
213
- self.awu_matrix = []
214
- self.awu_matrix.append(["data", self.data, True, (tdmldf.dataframe.DataFrame)])
215
- self.awu_matrix.append(["data_partition_column", self.data_partition_column, True, (str, list), True])
216
- self.awu_matrix.append(["data_hash_column", self.data_hash_column, True, (str, list), True])
217
- self.awu_matrix.append(["data_order_column", self.data_order_column, True, (str, list), True])
218
- self.awu_matrix.append(["is_local_order", self.is_local_order, True, (bool)])
219
- self.awu_matrix.append(["sort_ascending", self.sort_ascending, True, (bool)])
220
- self.awu_matrix.append(["nulls_first", self.nulls_first, True, (bool)])
221
- self.awu_matrix.append(["script_name", self.script_name, True, (str), True])
222
- self.awu_matrix.append(["files_local_path", self.files_local_path, True, (str), True])
223
- self.awu_matrix.append(["delimiter", self.delimiter, True, (str), False])
224
- self.awu_matrix.append(["quotechar", self.quotechar, True, (str), False])
225
-
226
- # Perform the function validations.
227
- self._validate()
228
-
229
- def _validate(self, for_data_args=False):
230
- """
231
- Function to validate Table Operator Function arguments, which verifies missing
232
- arguments, input argument and table types. Also processes the
233
- argument values.
234
- @param: for_data_args: Specifies whether the validation is for only arguments related to data or not.
235
- When set to True, validation is only for data arguments. Otherwise, validation
236
- is for all arguments. By default, system validates all the arguments.
237
- """
238
-
239
- if not for_data_args:
240
- # Make sure that a non-NULL value has been supplied for all mandatory arguments
241
- _Validators._validate_missing_required_arguments(self.awu_matrix)
242
-
243
- # Validate argument types
244
- _Validators._validate_function_arguments(self.awu_matrix,
245
- skip_empty_check={"quotechar": ["\n", "\t"],
246
- "delimiter": ["\n"]})
247
-
248
- if self.data is not None:
249
- # Hash and order by can be used together as long as is_local_order = True.
250
- if all([self.data_hash_column,
251
- self.data_order_column]) and not self.is_local_order:
252
- raise TeradataMlException(
253
- Messages.get_message(MessageCodes.CANNOT_USE_TOGETHER_WITH,
254
- "data_hash_column' and 'data_order_column",
255
- "is_local_order=False"),
256
- MessageCodes.CANNOT_USE_TOGETHER_WITH)
257
-
258
- # Either hash or partition can be used.
259
- if all([self.data_hash_column, self.data_partition_column]):
260
- raise TeradataMlException(Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
261
- "data_hash_column", "data_partition_column"),
262
- MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
263
-
264
- # Either local order by or partition by can be used.
265
- if all([self.is_local_order, self.data_partition_column]):
266
- raise TeradataMlException(Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
267
- "is_local_order=True",
268
- "data_partition_column"),
269
- MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
270
-
271
- # local order by requires column name.
272
- if self.is_local_order and self.data_order_column is None:
273
- raise TeradataMlException(Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING,
274
- "data_order_column",
275
- "is_local_order=True"),
276
- MessageCodes.DEPENDENT_ARG_MISSING)
277
-
278
- if self.__awu._is_default_or_not(self.data_partition_column, "ANY"):
279
- _Validators._validate_dataframe_has_argument_columns(self.data_partition_column, "data_partition_column",
280
- self.data, "data", True)
281
-
282
- _Validators._validate_dataframe_has_argument_columns(self.data_order_column, "data_order_column",
283
- self.data, "data", False)
284
-
285
- _Validators._validate_dataframe_has_argument_columns(self.data_hash_column, "data_hash_column",
286
- self.data, "data", False)
287
-
288
- if not for_data_args:
289
- # Check for length of the arguments "delimiter" and "quotechar".
290
- if self.delimiter is not None:
291
- _Validators._validate_str_arg_length('delimiter', self.delimiter, 'EQ', 1)
292
-
293
- if self.quotechar is not None:
294
- _Validators._validate_str_arg_length('quotechar', self.quotechar, 'EQ', 1)
295
-
296
- # The arguments 'quotechar' and 'delimiter' cannot take newline character.
297
- if self.delimiter == '\n':
298
- raise TeradataMlException(Messages.get_message(MessageCodes.NOT_ALLOWED_VALUES,
299
- "\n", "delimiter"),
300
- MessageCodes.NOT_ALLOWED_VALUES)
301
- if self.quotechar == '\n':
302
- raise TeradataMlException(Messages.get_message(MessageCodes.NOT_ALLOWED_VALUES,
303
- "\n", "quotechar"),
304
- MessageCodes.NOT_ALLOWED_VALUES)
305
-
306
- # The arguments 'quotechar' and 'delimiter' cannot have the same value.
307
- if self.delimiter == self.quotechar:
308
- raise TeradataMlException(Messages.get_message(MessageCodes.ARGUMENT_VALUE_SAME,
309
- "delimiter", "quotechar"),
310
- MessageCodes.ARGUMENT_VALUE_SAME)
311
-
312
- def set_data(self,
313
- data,
314
- data_partition_column=None,
315
- data_hash_column=None,
316
- data_order_column=None,
317
- is_local_order=False,
318
- sort_ascending=True,
319
- nulls_first=True):
320
- """
321
- DESCRIPTION:
322
- Function enables user to set data and data related arguments without having to
323
- re-create Script object.
324
-
325
- PARAMETERS:
326
- data:
327
- Required Argument.
328
- Specifies a teradataml DataFrame containing the input data for the script.
329
-
330
- data_hash_column:
331
- Optional Argument.
332
- Specifies the column to be used for hashing.
333
- The rows in the data are redistributed to AMPs based on the
334
- hash value of the column specified.
335
- The user installed script then runs once on each AMP.
336
- If there is no data_partition_column, then the entire result set delivered
337
- by the function, constitutes a single group or partition.
338
- Types: str
339
- Note:
340
- "data_hash_column" can not be specified along with
341
- "data_partition_column", "is_local_order" and "data_order_column".
342
-
343
- data_partition_column:
344
- Optional Argument.
345
- Specifies Partition By columns for data.
346
- Values to this argument can be provided as a list, if multiple
347
- columns are used for partition.
348
- Default Value: ANY
349
- Types: str OR list of Strings (str)
350
- Note:
351
- 1) "data_partition_column" can not be specified along with
352
- "data_hash_column".
353
- 2) "data_partition_column" can not be specified along with
354
- "is_local_order = True".
355
-
356
- is_local_order:
357
- Optional Argument.
358
- Specifies a boolean value to determine whether the input data is to be
359
- ordered locally or not. Order by specifies the order in which the
360
- values in a group or partition are sorted. Local Order By specifies
361
- orders qualified rows on each AMP in preparation to be input to a table
362
- function. This argument is ignored, if "data_order_column" is None. When
363
- set to True, data is ordered locally.
364
- Default Value: False
365
- Types: bool
366
- Note:
367
- 1) "is_local_order" can not be specified along with
368
- "data_hash_column".
369
- 2) When "is_local_order" is set to True, "data_order_column" should be
370
- specified, and the columns specified in "data_order_column" are
371
- used for local ordering.
372
-
373
- data_order_column:
374
- Optional Argument.
375
- Specifies Order By columns for data.
376
- Values to this argument can be provided as a list, if multiple
377
- columns are used for ordering.
378
- This argument is used in both cases:
379
- "is_local_order = True" and "is_local_order = False".
380
- Types: str OR list of Strings (str)
381
- Note:
382
- "data_order_column" can not be specified along with
383
- "data_hash_column".
384
-
385
- sort_ascending:
386
- Optional Argument.
387
- Specifies a boolean value to determine if the result set is to be sorted
388
- on the column specified in "data_order_column", in ascending or descending
389
- order.
390
- The sorting is ascending when this argument is set to True, and descending
391
- when set to False.
392
- This argument is ignored, if "data_order_column" is None.
393
- Default Value: True
394
- Types: bool
395
-
396
- nulls_first:
397
- Optional Argument.
398
- Specifies a boolean value to determine whether NULLS are listed first or
399
- last during ordering.
400
- This argument is ignored, if "data_order_column" is None.
401
- NULLS are listed first when this argument is set to True, and
402
- last when set to False.
403
- Default Value: True
404
- Types: bool
405
-
406
- RETURNS:
407
- None.
408
-
409
- RAISES:
410
- TeradataMlException
411
-
412
- EXAMPLES:
413
- >>> self.set_data(df)
414
- """
415
-
416
- awu_matrix_setter = []
417
- awu_matrix_setter.append(["data", data, True, (tdmldf.dataframe.DataFrame)])
418
- awu_matrix_setter.append(["data_partition_column", data_partition_column,
419
- True, (str, list), True])
420
- awu_matrix_setter.append(["data_hash_column", data_hash_column, True,
421
- (str, list), True])
422
- awu_matrix_setter.append(["data_order_column", data_order_column, True,
423
- (str, list), True])
424
- awu_matrix_setter.append(["is_local_order", is_local_order, True, (bool)])
425
- awu_matrix_setter.append(["sort_ascending", sort_ascending, True, (bool)])
426
- awu_matrix_setter.append(["nulls_first", nulls_first, True, (bool)])
427
-
428
- # Perform the function validations
429
- _Validators._validate_missing_required_arguments([["data", data, False,
430
- (tdmldf.dataframe.DataFrame)]])
431
- _Validators._validate_function_arguments(awu_matrix_setter)
432
-
433
- self.data = data
434
- self.data_partition_column = data_partition_column
435
- self.data_hash_column = data_hash_column
436
- self.data_order_column = data_order_column
437
- self.is_local_order = is_local_order
438
- self.sort_ascending = sort_ascending
439
- self.nulls_first = nulls_first
440
-
441
- def _execute(self, output_style='VIEW'):
442
- """
443
- Function to execute Table Operator queries.
444
- Create DataFrames for the required Table Operator output.
445
- """
446
- table_type = TeradataConstants.TERADATA_VIEW
447
- if output_style == OutputStyle.OUTPUT_TABLE.value:
448
- table_type = TeradataConstants.TERADATA_TABLE
449
-
450
- # Generate STDOUT table name and add it to the output table list.
451
- tblop_stdout_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_tblop_out_",
452
- use_default_database=True, gc_on_quit=True,
453
- quote=False,
454
- table_type=table_type
455
- )
456
-
457
- try:
458
- if output_style == OutputStyle.OUTPUT_TABLE.value:
459
- UtilFuncs._create_table(tblop_stdout_temp_tablename, self._tblop_query)
460
- else:
461
- UtilFuncs._create_view(tblop_stdout_temp_tablename, self._tblop_query)
462
- except Exception as emsg:
463
- raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_EXEC_SQL_FAILED, str(emsg)),
464
- MessageCodes.TDMLDF_EXEC_SQL_FAILED)
465
-
466
-
467
- self.result = self.__awu._create_data_set_object(
468
- df_input=UtilFuncs._extract_table_name(tblop_stdout_temp_tablename), source_type="table",
469
- database_name=UtilFuncs._extract_db_name(tblop_stdout_temp_tablename))
470
-
471
- return self.result
472
-
473
- def _returns_clause_validation(self):
474
- """
475
- DESCRIPTION:
476
- Function validates 'returns' clause for a table operator query.
477
-
478
- PARAMETERS:
479
- None.
480
-
481
- RETURNS:
482
- None
483
-
484
- RAISES:
485
- Error if argument is not of valid datatype.
486
-
487
- EXAMPLES:
488
- self._returns_clause_validation()
489
- """
490
- # Validate keys and datatypes in returns.
491
- if self.returns is not None:
492
- awu_matrix_returns = []
493
- for key in self.returns.keys():
494
- awu_matrix_returns.append(["keys in returns", key, False, (str), True])
495
- awu_matrix_returns.append(["values in returns", self.returns[key], False, self._supported_returns_datatypes])
496
- _Validators._validate_function_arguments(awu_matrix_returns)
497
-
498
- def setup_test_env(self, docker_image_location):
499
- """
500
- DESCRIPTION:
501
- Function enables user to load already downloaded sandbox image.
502
- This will enable users to run the Python scripts on client machine outside of
503
- Open Analytics Framework.
504
-
505
- PARAMETERS:
506
- docker_image_location:
507
- Required Argument.
508
- Specifies the location of image on user's system.
509
- Types: str
510
- Note:
511
- For location to download docker image refer teradataml User Guide.
512
-
513
- RETURNS:
514
- None.
515
-
516
- RAISES:
517
- TeradataMlException
518
-
519
- EXAMPLES:
520
- # Load example data.
521
- load_example_data("Script", ["barrier"])
522
-
523
- # Example - The script mapper.py reads in a line of text input ("Old Macdonald Had A Farm") from csv and
524
- # splits the line into individual words, emitting a new row for each word.
525
-
526
- # Create teradataml DataFrame objects.
527
- >>> barrierdf = DataFrame.from_table("barrier")
528
-
529
- # Create remote user environment.
530
- >>> test_env = create_env('test_env', 'python_3.7.9', 'Demo environment');
531
- User environment test_env created.
532
-
533
- # Create an Apply object that allows user to execute script using Open Analytics Framework.
534
- >>> apply_obj = Apply(data=barrierdf,
535
- script_name='mapper.py',
536
- files_local_path='data/scripts',
537
- apply_command='python mapper.py',
538
- delimiter=',',
539
- env_name = "test_env",
540
- data_partition_column="Id",
541
- returns={"word": VARCHAR(15), "count_input": VARCHAR(2)}
542
- )
543
-
544
- # Run user script locally within docker container and using data from csv.
545
- # This helps the user to fix script level issues outside of Open Analytics Framework.
546
- # Setup the environment by providing local path to docker image file.
547
- >>> apply_obj.setup_test_env(docker_image_location='/tmp/sto_sandbox_docker_image.tar'))
548
- Loading image from /tmp/sto_sandbox_docker_image.tar. It may take few minutes.
549
- Image loaded successfully.
550
- """
551
- self.awu_matrix_setup=[]
552
- self.awu_matrix_setup.append((["docker_image_location", docker_image_location, False, (str), True]))
553
-
554
- # Validate missing arguments
555
- _Validators._validate_missing_required_arguments(self.awu_matrix_setup)
556
-
557
- # Validate argument types
558
- _Validators._validate_function_arguments(self.awu_matrix_setup)
559
-
560
- # get the frame object of the function.
561
- import inspect
562
- frame = inspect.currentframe()
563
-
564
- # Validate argument types.
565
- _Validators._validate_module_presence('docker', frame.f_code.co_name)
566
-
567
- import docker
568
- # Load image from user provided location
569
- client = docker.from_env()
570
- if not Path(docker_image_location).exists():
571
- raise TeradataMlException(
572
- Messages.get_message(MessageCodes.INPUT_FILE_NOT_FOUND).format(docker_image_location),
573
- MessageCodes.INPUT_FILE_NOT_FOUND)
574
- else:
575
- try:
576
- print("Loading image from {0}. It may take few minutes.".format(docker_image_location))
577
- with open(docker_image_location, 'rb') as f:
578
- client.images.load(f)
579
- print("Image loaded successfully.")
580
- except:
581
- raise
582
-
583
- # Set _latest_sandbox_exists to True - which indicates sandbox image for STO exists on the system
584
- configure._latest_sandbox_exists = True
585
-
586
-
587
- def setup_sto_env(self, docker_image_location):
588
- """
589
- DESCRIPTION:
590
- Function enables user to load already downloaded sandbox image.
591
-
592
- PARAMETERS:
593
- docker_image_location:
594
- Required Argument.
595
- Specifies the location of image on user's system.
596
- Types: str
597
-
598
- Note:
599
- For location to download docker image refer teradataml User Guide.
600
-
601
- RETURNS:
602
- None.
603
-
604
- RAISES:
605
- TeradataMlException
606
-
607
- EXAMPLES:
608
- # Note - Refer to User Guide for setting search path and required permissions.
609
- # Load example data.
610
- load_example_data("Script", ["barrier"])
611
-
612
- # Example - The script mapper.py reads in a line of text input
613
- # ("Old Macdonald Had A Farm") from csv and
614
- # splits the line into individual words, emitting a new row for each word.
615
-
616
- # Create teradataml DataFrame objects.
617
- >>> barrierdf = DataFrame.from_table("barrier")
618
-
619
- # Set SEARCHUIFDBPATH.
620
- >>> execute_sql("SET SESSION SEARCHUIFDBPATH = alice;")
621
-
622
- # Create a Script object that allows us to execute script on Vantage.
623
- >>> import os
624
- >>> td_path = os.path.dirname(teradataml.__file__)
625
- >>> from teradatasqlalchemy import VARCHAR
626
- >>> sto = Script(data=barrierdf,
627
- ... script_name='mapper.py',
628
- ... files_local_path= os.path.join(td_path, 'data', 'scripts'),
629
- ... script_command='python ./alice/mapper.py',
630
- ... data_order_column="Id",
631
- ... is_local_order=False,
632
- ... nulls_first=False,
633
- ... sort_ascending=False,
634
- ... charset='latin',
635
- ... returns=OrderedDict([("word", VARCHAR(15)),("count_input", VARCHAR(2))]))
636
-
637
- # Run user script locally within docker container and using data from csv.
638
- # This helps the user to fix script level issues outside Vantage.
639
- # Setup the environment by providing local path to docker image file.
640
- >>> sto.setup_sto_env(docker_image_location='/tmp/sto_sandbox_docker_image.tar')
641
- Loading image from /tmp/sto_sandbox_docker_image.tar. It may take few minutes.
642
- Image loaded successfully.
643
- Starting a container for stosandbox:1.0 image.
644
- Container d7c73cb498c79a082180576bb5b10bb07b52efdd3026856146fc15e91147b19f
645
- started successfully.
646
-
647
- """
648
- self.awu_matrix_setup = []
649
- self.awu_matrix_setup.append((["docker_image_location", docker_image_location,
650
- False, (str), True]))
651
-
652
- # Validate missing arguments.
653
- _Validators._validate_missing_required_arguments(self.awu_matrix_setup)
654
-
655
- # Validate argument types.
656
- _Validators._validate_function_arguments(self.awu_matrix_setup)
657
-
658
- from teradataml.table_operators.sandbox_container_util import setup_sandbox_env
659
- setup_sandbox_env(sandbox_image_location=docker_image_location,
660
- sandbox_image_name='stosandbox:1.0')
661
-
662
- # Set _latest_sandbox_exists to True - which indicates sandbox image for STO
663
- # exists on the system.
664
- from teradataml.options.configure import configure
665
- configure._latest_sandbox_exists = True
666
-
667
- def test_script(self, supporting_files=None, input_data_file=None, script_args="",
668
- exec_mode='sandbox', **kwargs):
669
- """
670
- DESCRIPTION:
671
- Function enables user to run script in docker container environment outside
672
- Vantage.
673
- Input data for user script is read from file.
674
-
675
- PARAMETERS:
676
- supporting_files:
677
- Optional Argument
678
- Specifies a file or list of supporting files like model files to be
679
- copied to the container.
680
- Types: string or list of str
681
-
682
- input_data_file:
683
- Required Argument.
684
- Specifies the name of the input data file.
685
- It should have a path relative to the location specified in
686
- "files_local_path" argument.
687
- If set to None, read data from AMP, else from file passed in the argument
688
- 'input_data_file'.
689
- File should have at least permissions of mode 644.
690
- Types: str
691
-
692
- script_args:
693
- Optional Argument.
694
- Specifies command line arguments required by the user script.
695
- Types: str
696
-
697
- exec_mode:
698
- Optional Argument.
699
- Specifies the mode in which user wants to test the script.
700
- If set to 'sandbox', the user script will run within the sandbox
701
- environment, else it will run locally on user's system.
702
- Permitted Values: 'sandbox', 'local'
703
- Default Value: 'sandbox'
704
- Types: str
705
-
706
- kwargs:
707
- Optional Argument.
708
- Specifies the keyword arguments required for testing.
709
- Keys can be:
710
- data_row_limit:
711
- Optional Argument. Ignored when data is read from file.
712
- Specifies the number of rows to be taken from all amps when
713
- reading from a table or view on Vantage.
714
- Default Value: 1000
715
- Types: int
716
-
717
- password:
718
- Optional Argument. Required when reading from database.
719
- Specifies the password to connect to vantage where the data
720
- resides.
721
- Types: str
722
-
723
- data_file_delimiter:
724
- Optional Argument.
725
- Specifies the delimiter used in the input data file. This
726
- argument can be specified when data is read from file.
727
- Default Value: '\t'
728
- Types: str
729
-
730
- data_file_header:
731
- Optional Argument.
732
- Specifies whether the input data file contains header. This
733
- argument can be specified when data is read from file.
734
- Default Value: True
735
- Types: bool
736
-
737
- timeout:
738
- Optional Argument.
739
- Specifies the timeout for docker API calls when running in
740
- sandbox mode.
741
- Default Value: 5000
742
- Types: int
743
-
744
- data_file_quote_char:
745
- Optional Argument.
746
- Specifies the quotechar used in the input data file.
747
- This argument can be specified when data is read from file.
748
- Default Value: '"'
749
-
750
- logmech:
751
- Optional Argument.
752
- Specifies the type of logon mechanism to establish a connection to
753
- Teradata Vantage.
754
- Permitted Values: 'TD2', 'TDNEGO', 'LDAP', 'KRB5' & 'JWT'.
755
- TD2:
756
- The Teradata 2 (TD2) mechanism provides authentication
757
- using a Vantage username and password. This is the default
758
- logon mechanism using which the connection is established
759
- to Vantage.
760
-
761
- TDNEGO:
762
- A security mechanism that automatically determines the
763
- actual mechanism required, based on policy, without user's
764
- involvement. The actual mechanism is determined by the
765
- TDGSS server configuration and by the security policy's
766
- mechanism restrictions.
767
-
768
- LDAP:
769
- A directory-based user logon to Vantage with a directory
770
- username and password and is authenticated by the directory.
771
-
772
- KRB5 (Kerberos):
773
- A directory-based user logon to Vantage with a domain
774
- username and password and is authenticated by
775
- Kerberos (KRB5 mechanism).
776
- Note:
777
- User must have a valid ticket-granting ticket in
778
- order to use this logon mechanism.
779
-
780
- JWT:
781
- The JSON Web Token (JWT) authentication mechanism enables
782
- single sign-on (SSO) to the Vantage after the user
783
- successfully authenticates to Teradata UDA User Service.
784
- Note:
785
- User must use logdata parameter when using 'JWT' as
786
- the logon mechanism.
787
- Default Value: TD2
788
- Types: str
789
-
790
- Note:
791
- teradataml expects the client environments are already setup with appropriate
792
- security mechanisms and are in working conditions.
793
- For more information please refer Teradata Vantage™ - Advanced SQL Engine
794
- Security Administration at https://www.info.teradata.com/
795
-
796
- logdata:
797
- Optional Argument.
798
- Specifies parameters to the LOGMECH command beyond those needed by
799
- the logon mechanism, such as user ID, password and tokens
800
- (in case of JWT) to successfully authenticate the user.
801
- Types: str
802
-
803
- Types: dict
804
-
805
- RETURNS:
806
- Output from user script.
807
-
808
- RAISES:
809
- TeradataMlException
810
-
811
- EXAMPLES:
812
- # Assumption - sto is Script() object. Please refer to help(Script)
813
- # for creating Script object.
814
- # Run user script in sandbox mode with input from data file.
815
-
816
- >>> sto.test_script(input_data_file='../barrier.csv',
817
- ... data_file_delimiter=',',
818
- ... data_file_quote_char='"',
819
- ... data_file_header=True,
820
- ... exec_mode='sandbox')
821
-
822
- ############ STDOUT Output ############
823
- word count_input
824
- 0 1 1
825
- 1 Old 1
826
- 2 Macdonald 1
827
- 3 Had 1
828
- 4 A 1
829
- 5 Farm 1
830
- >>>
831
-
832
- # Run user script in local mode with input from table.
833
- >>> sto.test_script(data_row_limit=300, password='alice', exec_mode='local')
834
-
835
- ############ STDOUT Output ############
836
- word count_input
837
- 0 1 1
838
- 1 Old 1
839
- 2 Macdonald 1
840
- 3 Had 1
841
- 4 A 1
842
- 5 Farm 1
843
-
844
- # Run user script in sandbox mode with logmech as 'TD2'.
845
- >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="TD2")
846
-
847
- # Run user script in sandbox mode with logmech as 'TDNEGO'.
848
- >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="TDNEGO")
849
-
850
- # Run user script in sandbox mode with logmech as 'LDAP'.
851
- >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="LDAP")
852
-
853
- # Run user script in sandbox mode with logmech as 'KRB5'.
854
- >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="KRB5")
855
-
856
- # Run user script in sandbox mode with logmech as 'JWT'.
857
- >>> sto.test_script(script_args="4 5 10 6 480", password="alice",
858
- logmech='JWT', logdata='token=eyJpc...h8dA')
859
-
860
- """
861
- logmech_valid_values = ['TD2', 'TDNEGO', 'LDAP', 'KRB5', 'JWT']
862
-
863
- awu_matrix_test = []
864
- awu_matrix_test.append((["supporting_files", supporting_files, True,
865
- (str, list), True]))
866
- awu_matrix_test.append((["input_data_file", input_data_file, True, (str), True]))
867
- awu_matrix_test.append((["script_args", script_args, True, (str), False]))
868
- awu_matrix_test.append((["exec_mode", exec_mode, True, (str), True,
869
- [TableOperatorConstants.SANDBOX_EXEC.value,
870
- TableOperatorConstants.LOCAL_EXEC.value]]))
871
-
872
- data_row_limit = kwargs.pop("data_row_limit", 1000)
873
- awu_matrix_test.append((["data_row_limit", data_row_limit, True, (int), True]))
874
-
875
- data_file_delimiter = kwargs.pop("data_file_delimiter", '\t')
876
- awu_matrix_test.append((["data_file_delimiter", data_file_delimiter, True,
877
- (str), False]))
878
-
879
- data_file_quote_char = kwargs.pop("data_file_quote_char", '"')
880
- awu_matrix_test.append((["data_file_quote_char", data_file_quote_char, True,
881
- (str), False]))
882
-
883
- data_file_header = kwargs.pop("data_file_header", True)
884
- awu_matrix_test.append((["data_file_header", data_file_header, True, (bool)]))
885
-
886
- timeout = kwargs.pop("timeout", 5000)
887
- awu_matrix_test.append((["timeout", timeout, True, (int), True]))
888
-
889
- logmech = kwargs.pop("logmech", "TD2")
890
- awu_matrix_test.append(
891
- ["logmech", logmech, True, (str), True, logmech_valid_values])
892
-
893
- logdata = kwargs.pop("logdata", None)
894
- awu_matrix_test.append(["logdata", logdata, True, (str), True])
895
-
896
- # Validate argument types.
897
- _Validators._validate_function_arguments(awu_matrix_test)
898
-
899
- # Validate timeout value.
900
- _Validators._validate_positive_int(timeout, "timeout")
901
-
902
- self._validate()
903
-
904
- if logmech == "JWT" and not logdata:
905
- raise TeradataMlException(
906
- Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING, 'logdata',
907
- 'logmech=JWT'),
908
- MessageCodes.DEPENDENT_ARG_MISSING)
909
-
910
- if data_row_limit <= 0:
911
- raise ValueError(Messages.get_message(MessageCodes.TDMLDF_POSITIVE_INT).
912
- format("data_row_limit", "greater than"))
913
-
914
- # Either of 'input_data_file' or 'password' argument is required.
915
- password = kwargs.pop("password", None)
916
-
917
- # The check of EITHER_THIS_OR_THAT_ARGUMENT is applicable only when the exec_mode is sandbox.
918
- # Hence adding the check exec_mode != "local".
919
- # When exec_mode is local, the connection object is used to get the values in the table.
920
- if exec_mode != "local" and not (input_data_file or (self.data and password)):
921
- message = Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
922
- "input_data_file", "Script data and password")
923
- raise TeradataMlException(message, MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
924
- elif exec_mode == "local" and not (input_data_file or self.data):
925
- message = Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
926
- "input_data_file", "Script data")
927
- raise TeradataMlException(message, MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
928
-
929
- if not self.script_name and self.files_local_path:
930
- message = Messages.get_message(MessageCodes.MISSING_ARGS,
931
- "script_name and files_local_path")
932
- raise TeradataMlException(message, MessageCodes.MISSING_ARGS)
933
-
934
- if input_data_file:
935
- if self.files_local_path is None:
936
- message = Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING,
937
- "files_local_path", "input_data_file")
938
- raise TeradataMlException(message, MessageCodes.DEPENDENT_ARG_MISSING)
939
- else:
940
- # Check if file exists.
941
- fpath = os.path.join(self.files_local_path,
942
- input_data_file)
943
- _Validators._validate_file_exists(fpath)
944
-
945
- if self.script_name and self.files_local_path:
946
- # Check if file exists.
947
- fpath = os.path.join(self.files_local_path,
948
- os.path.basename(self.script_name))
949
- _Validators._validate_file_exists(fpath)
950
-
951
- if exec_mode.upper() == TableOperatorConstants.LOCAL_EXEC.value:
952
- user_script_path = os.path.join(self.files_local_path, self.script_name)
953
- import sys
954
- cmd = [str(sys.executable), user_script_path]
955
- cmd.extend(script_args)
956
-
957
- if input_data_file is not None:
958
- input_file_path = os.path.join(self.files_local_path, input_data_file)
959
-
960
- # Run user script locally with input from a file.
961
- exec_cmd_output = self.__local_run_user_script_input_file(
962
- cmd, input_file_path, data_file_delimiter, data_file_quote_char, data_file_header)
963
- try:
964
- return self.__process_test_script_output(exec_cmd_output)
965
- except Exception as exp:
966
- raise
967
-
968
- else:
969
- if self.data.shape[0] > data_row_limit:
970
- raise ValueError(
971
- Messages.get_message(MessageCodes.DATAFRAME_LIMIT_ERROR,
972
- 'data_row_limit', 'data_row_limit',
973
- data_row_limit))
974
-
975
- if not self.data._table_name:
976
- self.data._table_name = df_utils._execute_node_return_db_object_name(
977
- self.data._nodeid, self.data._metaexpr)
978
-
979
- table_name = UtilFuncs._extract_table_name(self.data._table_name)
980
-
981
- # Run user script locally with input from db.
982
- exec_cmd_output = self.__local_run_user_script_input_db(cmd, table_name)
983
- try:
984
- return self.__process_test_script_output(exec_cmd_output)
985
- except Exception as exp:
986
- raise
987
- else:
988
- # Execution Mode - sandbox.
989
-
990
- # get the frame object of the function.
991
- import inspect
992
- frame = inspect.currentframe()
993
-
994
- # Validate argument types.
995
- _Validators._validate_module_presence('docker', frame.f_code.co_name)
996
-
997
- # Read container_id from configure.sandbox_container_id, if it is None then
998
- # raise an exception
999
- container_id = configure.sandbox_container_id
1000
- if container_id is None:
1001
- message = Messages.get_message(MessageCodes.SANDBOX_CONTAINER_NOT_FOUND)
1002
- raise TeradataMlException(message,
1003
- MessageCodes.SANDBOX_CONTAINER_NOT_FOUND)
1004
-
1005
- # Set path inside docker container. This is where files will be copied to.
1006
- # os.path.join() will not work here because the path is not dependent on
1007
- # client platform. Sandbox environment is linux based here.
1008
- _path_in_docker_container = "/home/tdatuser"
1009
- user_script_path = "{}/{}".format(_path_in_docker_container, self.script_name)
1010
-
1011
- if input_data_file is not None:
1012
- input_file_name = os.path.basename(input_data_file)
1013
- input_file_path = "{}/{}".format(_path_in_docker_container,
1014
- input_file_name)
1015
- # Create script_executor.
1016
- self._create_executor_script(user_script_path=user_script_path,
1017
- user_script_args=script_args,
1018
- data_file_path=input_file_path,
1019
- data_file_delimiter=data_file_delimiter,
1020
- data_file_quote_char=data_file_quote_char,
1021
- data_file_header=data_file_header)
1022
- else:
1023
- # Read input from db.
1024
- if self.data.shape[0] > data_row_limit:
1025
- raise ValueError(
1026
- Messages.get_message(MessageCodes.DATAFRAME_LIMIT_ERROR,
1027
- 'data_row_limit', 'data_row_limit',
1028
- data_row_limit))
1029
- db_host = get_context().url.host
1030
-
1031
- user_name = get_context().url.username
1032
-
1033
- if not self.data._table_name:
1034
- self.data._table_name = df_utils._execute_node_return_db_object_name(
1035
- self.data._nodeid, self.data._metaexpr)
1036
- table_name = UtilFuncs._extract_table_name(self.data._table_name)
1037
-
1038
- db_name = _get_current_databasename()
1039
-
1040
- # Create script_executor.
1041
- self._create_executor_script(user_script_path=user_script_path,
1042
- user_script_args=script_args,
1043
- db_host=db_host,
1044
- user_name=user_name,
1045
- passwd=password,
1046
- table_name=table_name,
1047
- db_name=db_name,
1048
- logmech=logmech,
1049
- logdata=logdata)
1050
-
1051
- import docker
1052
- client = docker.APIClient(timeout=timeout)
1053
-
1054
- # Copy files to container indicated in configure.sandbox_container_id.
1055
- files_to_copy = [self.script_name]
1056
-
1057
- if supporting_files is not None:
1058
- if isinstance(supporting_files, str):
1059
- supporting_files = [supporting_files]
1060
-
1061
- if len(supporting_files) == 0 \
1062
- or any(file in [None, "None", ""] for file in supporting_files):
1063
- raise ValueError(
1064
- Messages.get_message(MessageCodes.LIST_SELECT_NONE_OR_EMPTY,
1065
- 'supporting_files'))
1066
- else:
1067
- files_to_copy.extend(supporting_files)
1068
-
1069
- if input_data_file is not None:
1070
- files_to_copy.append(input_data_file)
1071
-
1072
- for filename in files_to_copy:
1073
- file_path = os.path.join(self.files_local_path, filename)
1074
- # Check if file exists.
1075
- _Validators._validate_file_exists(file_path)
1076
-
1077
- # Copy file to docker container.
1078
-
1079
- self._copy_to_docker_container(client, file_path,
1080
- _path_in_docker_container,
1081
- container_id)
1082
-
1083
- # Copy script_executor to docker container.
1084
- self._copy_to_docker_container(client, self.script_path,
1085
- _path_in_docker_container,
1086
- container_id)
1087
-
1088
- script_executor_file_name = os.path.basename(self.script_path)
1089
- exec_cmd = ("python3 {0}/{1}".format(_path_in_docker_container,
1090
- script_executor_file_name))
1091
-
1092
- try:
1093
- # Setup an exec instance in the container.
1094
- exec_cmd_create = client.exec_create(container_id, exec_cmd)
1095
-
1096
- # Start exec instance and run user script.
1097
- exec_cmd_output = client.exec_start(exec_cmd_create, demux=True)
1098
-
1099
- # Inspect the output for success or failure.
1100
- inspect_out = client.exec_inspect(exec_cmd_create)
1101
-
1102
- # Extract the exit code.
1103
- exit_code = inspect_out['ExitCode']
1104
-
1105
- if exec_cmd_output[0] is not None:
1106
- executor_output = exec_cmd_output[0].decode()
1107
-
1108
- executor_error = ""
1109
- if exec_cmd_output[1] is not None:
1110
- executor_error = exec_cmd_output[1].decode()
1111
-
1112
- # Exit code 1 indicates any error thrown by subprocess.
1113
- # Exit code 126 indicates permission problem or command is not executable.
1114
- # Exit code 127 indicates possible typos in shell script with
1115
- # unrecognizable characters.
1116
- if exit_code == 1 or exit_code == 126 or exit_code == 127:
1117
- message = Messages.get_message(
1118
- MessageCodes.SANDBOX_SCRIPT_ERROR).format(executor_error)
1119
- raise TeradataMlException(message,
1120
- MessageCodes.SANDBOX_SCRIPT_ERROR)
1121
- # Exit code 2 indicates either username or password is invalid.
1122
- elif exit_code == 2:
1123
- message = Messages.get_message(
1124
- MessageCodes.SANDBOX_CONNECTION_ERROR).format(executor_error)
1125
- raise TeradataMlException(message,
1126
- MessageCodes.SANDBOX_CONNECTION_ERROR)
1127
- # Exit code 3 indicates problem with query.
1128
- elif exit_code == 3:
1129
- message = Messages.get_message(
1130
- MessageCodes.SANDBOX_QUERY_ERROR).format(executor_error)
1131
- raise TeradataMlException(message,
1132
- MessageCodes.SANDBOX_QUERY_ERROR)
1133
- # Exit code 4 indicates all other exceptions / errors.
1134
- elif exit_code == 4:
1135
- message = Messages.get_message(
1136
- MessageCodes.SANDBOX_CONTAINER_ERROR).format(executor_error)
1137
- raise TeradataMlException(message,
1138
- MessageCodes.SANDBOX_CONTAINER_ERROR)
1139
- elif exit_code != 0:
1140
- # Any error other than exit code 1, 2, 3, 4
1141
- message = Messages.get_message(
1142
- MessageCodes.SANDBOX_CONTAINER_ERROR).format(executor_error)
1143
- raise TeradataMlException(message,
1144
- MessageCodes.SANDBOX_CONTAINER_ERROR)
1145
- else:
1146
- return self.__process_test_script_output(executor_output)
1147
- except Exception as exp:
1148
- message = Messages.get_message(
1149
- MessageCodes.SANDBOX_CONTAINER_ERROR).format(str(exp))
1150
- raise TeradataMlException(message,
1151
- MessageCodes.SANDBOX_CONTAINER_ERROR)
1152
-
1153
- def __local_run_user_script_input_file(self, cmd, input_file_path,
1154
- data_file_delimiter='\t',
1155
- data_file_quote_char='"',
1156
- data_file_header=True):
1157
- """
1158
- DESCRIPTION:
1159
- Function to run the user script in local mode with input from file.
1160
-
1161
- PARAMETERS:
1162
- cmd:
1163
- Required Argument.
1164
- Specifies the command for running the user script.
1165
- Types: str
1166
-
1167
- input_file_path:
1168
- Required Argument.
1169
- Specifies the absolute local path of input data file.
1170
- Types: str
1171
-
1172
- data_file_delimiter:
1173
- Optional Argument.
1174
- Specifies the delimiter used in input data file.
1175
- Default Value: '\t'
1176
- Types: str
1177
-
1178
- data_file_quote_char:
1179
- Optional Argument.
1180
- Specifies the quote character used in input data file.
1181
- Default Value: '"'
1182
- Types: str
1183
-
1184
- data_file_header:
1185
- Optional Argument.
1186
- Specifies whether the input data file has header.
1187
- Default Value: True
1188
- Types: bool
1189
-
1190
- RETURNS:
1191
- The string output of the command that is run on input data file.
1192
-
1193
- RAISES:
1194
- Exception.
1195
-
1196
- EXAMPLES:
1197
- self.__local_run_user_script_input_file(cmd ="cmd",
1198
- input_file_path = "input_file_path",
1199
- data_file_delimiter = "data_file_delimiter",
1200
- data_file_quote_char = "data_file_quote_char",
1201
- data_file_header = True)
1202
-
1203
- """
1204
- with open(input_file_path) as data_file:
1205
- import csv
1206
- from pandas import isna as pd_isna
1207
-
1208
- data_handle = StringIO()
1209
-
1210
- # Read data from input file.
1211
- ip_data = csv.reader(data_file,
1212
- delimiter=data_file_delimiter,
1213
- quotechar=data_file_quote_char)
1214
- # Skip the first row of input file if data_file_header is True.
1215
- if data_file_header:
1216
- next(ip_data)
1217
- for row in ip_data:
1218
- if self.quotechar is not None:
1219
- # A NULL value should not be enclosed in quotes.
1220
- # The CSV module has no support for such output with writer,
1221
- # and hence the custom formatting.
1222
- line = ['' if pd_isna(s) else "{}{}{}".format(self.quotechar,
1223
- str(s),
1224
- self.quotechar)
1225
- for s in row]
1226
- else:
1227
- line = ['' if pd_isna(s) else str(s) for s in row]
1228
-
1229
- complete_line = (self.delimiter.join(line))
1230
-
1231
- data_handle.write(complete_line)
1232
- data_handle.write("\n")
1233
-
1234
- return self.__run_user_script_subprocess(cmd, data_handle)
1235
-
1236
- def __run_user_script_subprocess(self, cmd, data_handle):
1237
- """
1238
- DESCRIPTION:
1239
- Function to run the user script in a new process and return the output.
1240
-
1241
- PARAMETERS:
1242
- cmd:
1243
- Required Argument.
1244
- Specifies the command for running the script.
1245
- Types: str
1246
-
1247
- data_handle:
1248
- Required Argument.
1249
- Specifies the data handle for the input data required by the user script.
1250
-
1251
- RETURNS:
1252
- Output of user script on input data supplied in data_handle.
1253
-
1254
- RAISES:
1255
- None.
1256
-
1257
- EXAMPLES:
1258
- self.__run_user_script_subprocess(cmd = "exec_cmd_output",
1259
- data_handle = data_handle)
1260
-
1261
- """
1262
- # Launching new process to run the user script.
1263
- try:
1264
- proc = subprocess.Popen(cmd, stdin=subprocess.PIPE,
1265
- stdout=subprocess.PIPE,
1266
- stderr=subprocess.PIPE)
1267
- process_output, process_error = proc.communicate(data_handle.getvalue().encode())
1268
- data_handle.close()
1269
-
1270
- if proc.returncode == 0:
1271
- return process_output.decode("utf-8").rstrip("\r|\n")
1272
- else:
1273
- message = Messages.get_message(MessageCodes.SCRIPT_LOCAL_RUN_ERROR).\
1274
- format(process_error)
1275
- raise TeradataMlException(message, MessageCodes.SCRIPT_LOCAL_RUN_ERROR)
1276
- except Exception as e:
1277
- raise e
1278
-
1279
- def __process_test_script_output(self, exec_cmd_output):
1280
- """
1281
- DESCRIPTION:
1282
- Function to format the output of the user script.
1283
-
1284
- PARAMETERS:
1285
- exec_cmd_output:
1286
- Required Argument.
1287
- Specifies the output returned by the user script.
1288
- Types: str
1289
-
1290
- RETURNS:
1291
- The test script output as Pandas DataFrame.
1292
-
1293
- RAISES:
1294
- Exception.
1295
-
1296
- EXAMPLES:
1297
- self.__process_test_script_output(exec_cmd_output = "exec_cmd_output")
1298
- """
1299
- try:
1300
- kwargs = dict()
1301
- if self.quotechar is not None:
1302
- kwargs['quotechar'] = self.quotechar
1303
- kwargs['quoting'] = 1 # QUOTE_ALL
1304
-
1305
- output = StringIO(exec_cmd_output)
1306
-
1307
- from pandas import read_csv as pd_read_csv
1308
-
1309
- # Form a pandas dataframe.
1310
- df = pd_read_csv(output, sep=self.delimiter, index_col=False, header=None,
1311
- names=list(self.returns.keys()), **kwargs)
1312
- return df
1313
-
1314
- except Exception as exp:
1315
- raise exp
1316
-
1317
- def __local_run_user_script_input_db(self, cmd, table_name):
1318
- """
1319
- DESCRIPTION:
1320
- Function to run the user script in local mode with input from db.
1321
-
1322
- PARAMETERS:
1323
- cmd:
1324
- Required Argument.
1325
- Specifies the command for running the user script.
1326
- Types: str
1327
-
1328
- table_name:
1329
- Required Argument.
1330
- Specifies the table name for input to user script.
1331
- Types: str
1332
-
1333
- RETURNS:
1334
- The string output of the command that is run on the Vantage table.
1335
-
1336
- RAISES:
1337
- Exception.
1338
-
1339
- EXAMPLES:
1340
- self.__local_run_user_script_input_db(cmd = "cmd", table_name = "table_name")
1341
-
1342
- """
1343
- db_data_handle = StringIO()
1344
- try:
1345
- con = get_connection()
1346
- # Query for reading data from DB.
1347
- query = ("SELECT * FROM {} ORDER BY 1;".format(table_name))
1348
- cur = execute_sql(query)
1349
- row = cur.fetchone()
1350
- from pandas import isna as pd_isna
1351
- while row:
1352
- if self.quotechar is not None:
1353
- # A NULL value should not be enclosed in quotes.
1354
- # The CSV module has no support for such output with writer,
1355
- # and hence the custom formatting.
1356
- line = ['' if pd_isna(s) else "{}{}{}".format(self.quotechar,
1357
- str(s),
1358
- self.quotechar)
1359
- for s in row]
1360
- else:
1361
- line = ['' if pd_isna(s) else str(s) for s in row]
1362
-
1363
- complete_line = (self.delimiter.join(line))
1364
- db_data_handle.write(complete_line)
1365
- db_data_handle.write("\n")
1366
- row = cur.fetchone()
1367
- except Exception as exp:
1368
- raise exp
1369
-
1370
- return self.__run_user_script_subprocess(cmd, db_data_handle)
1371
-
1372
- def _create_executor_script(self, user_script_path,
1373
- user_script_args=None,
1374
- data_file_path=None,
1375
- data_file_delimiter='\t',
1376
- data_file_quote_char='"',
1377
- data_file_header=True,
1378
- db_name=None,
1379
- db_host=None,
1380
- user_name=None,
1381
- passwd=None,
1382
- logmech=None,
1383
- logdata=None,
1384
- table_name=None):
1385
- """
1386
- DESCRIPTION:
1387
- Internal function that will generate 'script_executor.py' to be copied to
1388
- sandbox environment.
1389
-
1390
- PARAMETERS:
1391
- user_script_path:
1392
- Required Argument.
1393
- Specifies the path to user script inside docker container.
1394
- Types: str
1395
-
1396
- user_script_args:
1397
- Optional Argument.
1398
- Specifies command line arguments required by the user script.
1399
- Types: str
1400
-
1401
- data_file_path:
1402
- Required Argument.
1403
- Specifies the path to input data file inside docker container.
1404
- Types: str
1405
-
1406
- data_file_delimiter:
1407
- Optional Argument.
1408
- Specifies the delimiter used in input data file.
1409
- Default Value: "\t" (tab)
1410
- Types: character of length 1
1411
-
1412
- data_file_quote_char:
1413
- Optional Argument.
1414
- Specifies the quote character used in input data file.
1415
- Default Value: '"'
1416
- Types: character of length 1
1417
-
1418
- data_file_header:
1419
- Optional Argument.
1420
- Specifies whether the input data file has header.
1421
- Default Value: True
1422
- Types: bool
1423
-
1424
- db_name:
1425
- Optional Argument.
1426
- Specifies the current database name.
1427
- Default Value: None
1428
- Types: str
1429
-
1430
- db_host:
1431
- Optional Argument.
1432
- Specifies the host name.
1433
- Default Value: None
1434
- Types: str
1435
-
1436
- user_name:
1437
- Optional Argument.
1438
- Specifies the user name.
1439
- Default Value: None
1440
- Types: str
1441
-
1442
- passwd:
1443
- Optional Argument.
1444
- Specifies the password for user name in "user_name".
1445
- Default Value: None
1446
- Types: str
1447
-
1448
- table_name:
1449
- Optional Argument.
1450
- Specifies the table name where input data is present.
1451
- Default Value: None
1452
- Types: str
1453
-
1454
- RETURNS:
1455
- None.
1456
-
1457
- RAISES:
1458
- None.
1459
-
1460
- EXAMPLES:
1461
- # Example 1: Create executor script when input data is to be read from a file.
1462
-
1463
- self._create_executor_script(user_script_path=user_script_path,
1464
- user_script_args=script_args,
1465
- data_file_path=input_file_path,
1466
- data_file_delimiter=data_file_delimiter,
1467
- data_file_quote_char=data_file_quote_char,
1468
- data_file_header=data_file_header)
1469
-
1470
- # Example 2: Create executor script when input data is to be read from db.
1471
-
1472
- self._create_executor_script(user_script_path=user_script_path,
1473
- user_script_args=script_args,
1474
- db_host=db_host,
1475
- user_name=user_name,
1476
- passwd=password,
1477
- table_name=table_name,
1478
- db_name=db_name)
1479
-
1480
- """
1481
- __data_source = "db"
1482
- if data_file_path:
1483
- __data_source = "file"
1484
-
1485
- temp_script_name = UtilFuncs._generate_temp_script_name(prefix="script_executor",
1486
- use_default_database=True,
1487
- gc_on_quit=True,
1488
- quote=True,
1489
- script_type=TeradataConstants.TERADATA_LOCAL_SCRIPT)
1490
-
1491
- # Remove quotes from the file name after removing the database name.
1492
- script_alias = UtilFuncs._teradata_unquote_arg(
1493
- UtilFuncs._extract_table_name(temp_script_name), quote='"')
1494
-
1495
- # script_name is the actual file name (basename).
1496
- script_name = "{script_name}.py".format(script_name=script_alias)
1497
-
1498
- # Create script in .teradataml directory.
1499
- ###
1500
-
1501
- script_dir = GarbageCollector._get_temp_dir_name()
1502
-
1503
- # script_path is the actual path where we want to generate the user script at.
1504
- self.script_path = os.path.join(script_dir, script_name)
1505
-
1506
- template_dir = os.path.join(os.path.dirname(
1507
- os.path.dirname(os.path.abspath(__file__))),
1508
- "table_operators", "templates")
1509
- try:
1510
- # Write to the script based on the template.
1511
- #
1512
- from teradataml.common.constants import TableOperatorConstants
1513
- executor_file = os.path.join(template_dir,
1514
- TableOperatorConstants.SCRIPT_TEMPLATE.value)
1515
- with open(executor_file, 'r') as input_file:
1516
- with open(self.script_path, 'w') as output_file:
1517
- os.chmod(self.script_path, 0o644)
1518
- output_file.write(
1519
- input_file.read().format(
1520
- DATA_SOURCE=UtilFuncs._serialize_and_encode(__data_source),
1521
- DELIMITER=UtilFuncs._serialize_and_encode(self.delimiter),
1522
- QUOTECHAR=UtilFuncs._serialize_and_encode(self.quotechar),
1523
- USER_SCRIPT_PATH=UtilFuncs._serialize_and_encode(
1524
- user_script_path),
1525
- SCRIPT_ARGS=UtilFuncs._serialize_and_encode(user_script_args),
1526
- DATA_FILE_PATH=UtilFuncs._serialize_and_encode(
1527
- data_file_path),
1528
- INPUT_DATA_FILE_DELIMITER=UtilFuncs._serialize_and_encode(
1529
- data_file_delimiter),
1530
- INPUT_DATA_FILE_QUOTE_CHAR=UtilFuncs._serialize_and_encode(
1531
- data_file_quote_char),
1532
- INPUT_DATA_FILE_HEADER=UtilFuncs._serialize_and_encode(
1533
- data_file_header),
1534
- DB_HOST=UtilFuncs._serialize_and_encode(db_host),
1535
- DB_USER=UtilFuncs._serialize_and_encode(user_name),
1536
- DB_PASS=UtilFuncs._serialize_and_encode(passwd),
1537
- DB_NAME=UtilFuncs._serialize_and_encode(db_name),
1538
- TABLE_NAME=UtilFuncs._serialize_and_encode(table_name),
1539
- LOGMECH=UtilFuncs._serialize_and_encode(logmech),
1540
- LOGDATA=UtilFuncs._serialize_and_encode(logdata)
1541
- ))
1542
- except Exception:
1543
- # Cleanup if we end up here in case of an error.
1544
- GarbageCollector._delete_local_file(self.script_path)
1545
- raise
1546
-
1547
- def _copy_to_docker_container(self, client,
1548
- local_file_path,
1549
- path_in_docker_container,
1550
- container):
1551
- """
1552
- DESCRIPTION:
1553
- Function to copy files to docker container.
1554
-
1555
- PARAMETERS:
1556
- client:
1557
- Required Argument.
1558
- Specifies the connection object for docker.
1559
- Types: str
1560
-
1561
- local_file_path:
1562
- Required Argument.
1563
- Specifies the path to the file to be copied.
1564
- Types: str
1565
-
1566
- path_in_docker_container:
1567
- Required Argument.
1568
- Specifies destination path in the docker container where file will be
1569
- copied to.
1570
- Types: str
1571
-
1572
- container:
1573
- Required Argument.
1574
- Specifies container id.
1575
- Types: str
1576
-
1577
- RETURNS:
1578
- None.
1579
-
1580
- RAISES:
1581
- TeradataMLException.
1582
-
1583
- """
1584
- # Create tar file.
1585
- tar_file_path = "{}.tar".format(local_file_path)
1586
- file_name = os.path.basename(local_file_path)
1587
- tar = tarfile.open(tar_file_path, mode='w')
1588
- try:
1589
- tar.add(local_file_path, arcname=file_name)
1590
- finally:
1591
- tar.close()
1592
- data = open(tar_file_path, 'rb').read()
1593
-
1594
- try:
1595
- # Copy file to docker container.
1596
- copy_status = client.put_archive(container, path_in_docker_container, data)
1597
- os.remove(tar_file_path)
1598
-
1599
- if copy_status:
1600
- return
1601
- except Exception as exp:
1602
- message = Messages.get_message(
1603
- MessageCodes.SANDBOX_CONTAINER_ERROR).format(str(exp))
1604
- raise TeradataMlException(message, MessageCodes.SANDBOX_CONTAINER_ERROR)
1605
-
1606
- def __repr__(self):
1607
- """
1608
- Returns the string representation for the class instance.
1609
- """
1610
- if self.result is None:
1611
- repr_string = "Result is empty. Please run execute_script first."
1612
- else:
1613
- repr_string = "############ STDOUT Output ############"
1614
- repr_string = "{}\n\n{}".format(repr_string, self.result)
1615
- return repr_string
1616
-
1
+ #!/usr/bin/python
2
+ # ##################################################################
3
+ #
4
+ # Copyright 2020 Teradata. All rights reserved.
5
+ # TERADATA CONFIDENTIAL AND TRADE SECRET
6
+ #
7
+ # Primary Owner: Trupti Purohit (trupti.purohit@teradata.com)
8
+ # Secondary Owner: Gouri Patwardhan (gouri.patwardhan@teradata.com)
9
+ #
10
+ # Function Version: 1.0
11
+ #
12
+ # Description: Base class for Teradata's Table Operators
13
+ # ##################################################################
14
+
15
+ import os
16
+ import time
17
+ import uuid
18
+ from math import floor
19
+ import tarfile
20
+ import subprocess
21
+ from pathlib import Path
22
+ import teradataml.dataframe as tdmldf
23
+ from teradataml.common.constants import OutputStyle, TeradataConstants
24
+ from teradataml.common.constants import TableOperatorConstants
25
+ from teradataml.common.garbagecollector import GarbageCollector
26
+ from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
27
+ from teradataml.common.utils import UtilFuncs
28
+ from teradataml.dataframe.dataframe_utils import DataFrameUtils as df_utils
29
+
30
+ from teradataml.common.exceptions import TeradataMlException
31
+ from teradataml.common.messages import Messages
32
+ from teradataml.common.messagecodes import MessageCodes
33
+ from teradataml.options.configure import configure
34
+ from teradataml.utils.utils import execute_sql
35
+ from teradataml.utils.validators import _Validators
36
+ from teradatasqlalchemy import (BYTEINT, SMALLINT, INTEGER, BIGINT, DECIMAL, FLOAT, NUMBER)
37
+ from teradatasqlalchemy import (TIMESTAMP, DATE, TIME)
38
+ from teradatasqlalchemy import (CHAR, VARCHAR, CLOB)
39
+ from teradatasqlalchemy import (BYTE, VARBYTE, BLOB)
40
+ from teradatasqlalchemy import (PERIOD_DATE, PERIOD_TIME, PERIOD_TIMESTAMP)
41
+ from teradatasqlalchemy import (INTERVAL_YEAR, INTERVAL_YEAR_TO_MONTH, INTERVAL_MONTH, INTERVAL_DAY,
42
+ INTERVAL_DAY_TO_HOUR, INTERVAL_DAY_TO_MINUTE, INTERVAL_DAY_TO_SECOND,
43
+ INTERVAL_HOUR, INTERVAL_HOUR_TO_MINUTE, INTERVAL_HOUR_TO_SECOND,
44
+ INTERVAL_MINUTE, INTERVAL_MINUTE_TO_SECOND, INTERVAL_SECOND)
45
+ from teradataml.context.context import _get_current_databasename, get_context, get_connection
46
+ from io import StringIO
47
+
48
+
49
+ class TableOperator:
50
+
51
+ def __init__(self,
52
+ data=None,
53
+ script_name=None,
54
+ files_local_path=None,
55
+ delimiter="\t",
56
+ returns=None,
57
+ quotechar=None,
58
+ data_partition_column=None,
59
+ data_hash_column=None,
60
+ data_order_column=None,
61
+ is_local_order=False,
62
+ sort_ascending=True,
63
+ nulls_first=True):
64
+ """
65
+ DESCRIPTION:
66
+ Table Operators are a type of User-Defined Function, only available when connected to a
67
+ Vantage.
68
+
69
+ PARAMETERS:
70
+ data:
71
+ Optional Argument.
72
+ Specifies a teradataml DataFrame containing the input data for the script.
73
+
74
+ script_name:
75
+ Required Argument.
76
+ Specifies the name of the user script.
77
+ Types: str
78
+
79
+ files_local_path:
80
+ Required Argument.
81
+ Specifies the absolute local path where the user script and all supporting files
82
+ like model files, input data file reside.
83
+ Types: str
84
+
85
+ delimiter:
86
+ Optional Argument.
87
+ Specifies a delimiter to use when reading columns from a row and
88
+ writing result columns.
89
+ The delimiter is a single character chosen from the set of punctuation characters.
90
+ Types: str
91
+
92
+ returns:
93
+ Required Argument.
94
+ Specifies the output column definition.
95
+ Types: Dictionary specifying column name to teradatasqlalchemy type mapping.
96
+ Default: None
97
+
98
+ data_hash_column:
99
+ Optional Argument.
100
+ Specifies the column to be used for hashing.
101
+ The rows in the data are redistributed to AMPs based on the hash value of the
102
+ column specified. The user-installed script file then runs once on each AMP.
103
+ If there is no data_hash_column, then the entire result set,
104
+ delivered by the function, constitutes a single group or partition.
105
+ Types: str
106
+ Note:
107
+ "data_hash_column" can not be specified along with "data_partition_column",
108
+ "is_local_order" and "data_order_column".
109
+
110
+ data_partition_column:
111
+ Optional Argument.
112
+ Specifies Partition By columns for data.
113
+ Values to this argument can be provided as a list, if multiple
114
+ columns are used for partition.
115
+ Default Value: ANY
116
+ Types: str OR list of Strings (str)
117
+ Notes:
118
+ 1) "data_partition_column" can not be specified along with "data_hash_column".
119
+ 2) "data_partition_column" can not be specified along with "is_local_order = True".
120
+
121
+ is_local_order:
122
+ Optional Argument.
123
+ Specifies a boolean value to determine whether the input data is to be ordered locally
124
+ or not. 'sort_ascending' specifies the order in which the values in a group, or partition,
125
+ are sorted. This argument is ignored, if data_order_column is None.
126
+ When set to 'True', qualified rows are ordered locally in preparation to be input
127
+ to the function.
128
+ Default Value: False
129
+ Types: bool
130
+ Note:
131
+ "is_local_order" can not be specified along with "data_hash_column".
132
+ When "is_local_order" is set to 'True', "data_order_column" should be specified,
133
+ and the columns specified in "data_order_column" are used for local ordering.
134
+
135
+ data_order_column:
136
+ Optional Argument.
137
+ Specifies Order By columns for data.
138
+ Values to this argument can be provided as a list, if multiple
139
+ columns are used for ordering.
140
+ This argument is used with in both cases: "is_local_order = True"
141
+ and "is_local_order = False".
142
+ Types: str OR list of Strings (str)
143
+ Note:
144
+ "data_order_column" can not be specified along with "data_hash_column".
145
+
146
+ sort_ascending:
147
+ Optional Argument.
148
+ Specifies a boolean value to determine if the input data is to be sorted on
149
+ the data_order_column column in ascending or descending order.
150
+ When this is set to 'True' data is sorted in ascending order,
151
+ otherwise data is sorted in descending order.
152
+ This argument is ignored, if data_order_column is None.
153
+ Default Value: True
154
+ Types: bool
155
+
156
+ nulls_first:
157
+ Optional Argument.
158
+ Specifies a boolean value to determine whether NULLS from input data are listed
159
+ first or last during ordering.
160
+ When this is set to 'True' NULLS are listed first, otherwise NULLS are listed last.
161
+ This argument is ignored, if data_order_column is None.
162
+ Default Value: True
163
+ Types: bool
164
+
165
+ RETURNS:
166
+ An instance of TableOperator class.
167
+
168
+ RAISES:
169
+ TeradataMlException
170
+
171
+ EXAMPLES:
172
+ # Apply class extends this base class.
173
+ apply_obj = Apply(data=barrierdf,
174
+ script_name='mapper.py',
175
+ files_local_path= '/root/data/scripts/',
176
+ apply_command='python3 mapper.py',
177
+ data_order_column="Id",
178
+ is_local_order=False,
179
+ nulls_first=False,
180
+ sort_ascending=False,
181
+ env_name = "test_env",
182
+ returns={"word": VARCHAR(15), "count_input": VARCHAR(2)},
183
+ style='csv',
184
+ delimiter=',')
185
+ """
186
+ self.result = None
187
+ self._tblop_query = None
188
+ self.data = data
189
+ self.script_name = script_name
190
+ self.files_local_path = files_local_path
191
+ self.delimiter = delimiter
192
+ self.quotechar = quotechar
193
+ self.returns = returns
194
+ self.data_partition_column = data_partition_column
195
+ self.data_hash_column = data_hash_column
196
+ self.data_order_column = data_order_column
197
+ self.is_local_order = is_local_order
198
+ self.sort_ascending = sort_ascending
199
+ self.nulls_first = nulls_first
200
+
201
+ # Datatypes supported in returns clause of a table operator.
202
+ self._supported_returns_datatypes = (BYTEINT, SMALLINT, INTEGER, BIGINT, DECIMAL, FLOAT, NUMBER,
203
+ TIMESTAMP, DATE, TIME, CHAR, VARCHAR, CLOB, BYTE, VARBYTE,
204
+ BLOB, PERIOD_DATE, PERIOD_TIME, PERIOD_TIMESTAMP, INTERVAL_YEAR,
205
+ INTERVAL_YEAR_TO_MONTH, INTERVAL_MONTH, INTERVAL_DAY, INTERVAL_DAY_TO_HOUR,
206
+ INTERVAL_DAY_TO_MINUTE, INTERVAL_DAY_TO_SECOND, INTERVAL_HOUR,
207
+ INTERVAL_HOUR_TO_MINUTE, INTERVAL_HOUR_TO_SECOND, INTERVAL_MINUTE,
208
+ INTERVAL_MINUTE_TO_SECOND, INTERVAL_SECOND
209
+ )
210
+
211
+ # Create AnalyticsWrapperUtils instance which contains validation functions.
212
+ # This is required for is_default_or_not check.
213
+ # Rest all validation is done using _Validators.
214
+ self.__awu = AnalyticsWrapperUtils()
215
+
216
+ self.awu_matrix = []
217
+ self.awu_matrix.append(["data", self.data, True, (tdmldf.dataframe.DataFrame)])
218
+ self.awu_matrix.append(["data_partition_column", self.data_partition_column, True, (str, list), True])
219
+ self.awu_matrix.append(["data_hash_column", self.data_hash_column, True, (str, list), True])
220
+ self.awu_matrix.append(["data_order_column", self.data_order_column, True, (str, list), True])
221
+ self.awu_matrix.append(["is_local_order", self.is_local_order, True, (bool)])
222
+ self.awu_matrix.append(["sort_ascending", self.sort_ascending, True, (bool)])
223
+ self.awu_matrix.append(["nulls_first", self.nulls_first, True, (bool)])
224
+ self.awu_matrix.append(["script_name", self.script_name, True, (str), True])
225
+ self.awu_matrix.append(["files_local_path", self.files_local_path, True, (str), True])
226
+ self.awu_matrix.append(["delimiter", self.delimiter, True, (str), False])
227
+ self.awu_matrix.append(["quotechar", self.quotechar, True, (str), False])
228
+
229
+ # Perform the function validations.
230
+ self._validate()
231
+
232
+ def _validate(self, for_data_args=False):
233
+ """
234
+ Function to validate Table Operator Function arguments, which verifies missing
235
+ arguments, input argument and table types. Also processes the
236
+ argument values.
237
+ @param: for_data_args: Specifies whether the validation is for only arguments related to data or not.
238
+ When set to True, validation is only for data arguments. Otherwise, validation
239
+ is for all arguments. By default, system validates all the arguments.
240
+ """
241
+
242
+ if not for_data_args:
243
+ # Make sure that a non-NULL value has been supplied for all mandatory arguments
244
+ _Validators._validate_missing_required_arguments(self.awu_matrix)
245
+
246
+ # Validate argument types
247
+ _Validators._validate_function_arguments(self.awu_matrix,
248
+ skip_empty_check={"quotechar": ["\n", "\t"],
249
+ "delimiter": ["\n"]})
250
+
251
+ if self.data is not None:
252
+ # Hash and order by can be used together as long as is_local_order = True.
253
+ if all([self.data_hash_column,
254
+ self.data_order_column]) and not self.is_local_order:
255
+ raise TeradataMlException(
256
+ Messages.get_message(MessageCodes.CANNOT_USE_TOGETHER_WITH,
257
+ "data_hash_column' and 'data_order_column",
258
+ "is_local_order=False"),
259
+ MessageCodes.CANNOT_USE_TOGETHER_WITH)
260
+
261
+ # Either hash or partition can be used.
262
+ if all([self.data_hash_column, self.data_partition_column]):
263
+ raise TeradataMlException(Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
264
+ "data_hash_column", "data_partition_column"),
265
+ MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
266
+
267
+ # Either local order by or partition by can be used.
268
+ if all([self.is_local_order, self.data_partition_column]):
269
+ raise TeradataMlException(Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
270
+ "is_local_order=True",
271
+ "data_partition_column"),
272
+ MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
273
+
274
+ # local order by requires column name.
275
+ if self.is_local_order and self.data_order_column is None:
276
+ raise TeradataMlException(Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING,
277
+ "data_order_column",
278
+ "is_local_order=True"),
279
+ MessageCodes.DEPENDENT_ARG_MISSING)
280
+
281
+ if self.__awu._is_default_or_not(self.data_partition_column, "ANY"):
282
+ _Validators._validate_dataframe_has_argument_columns(self.data_partition_column, "data_partition_column",
283
+ self.data, "data", True)
284
+
285
+ _Validators._validate_dataframe_has_argument_columns(self.data_order_column, "data_order_column",
286
+ self.data, "data", False)
287
+
288
+ _Validators._validate_dataframe_has_argument_columns(self.data_hash_column, "data_hash_column",
289
+ self.data, "data", False)
290
+
291
+ if not for_data_args:
292
+ # Check for length of the arguments "delimiter" and "quotechar".
293
+ if self.delimiter is not None:
294
+ _Validators._validate_str_arg_length('delimiter', self.delimiter, 'EQ', 1)
295
+
296
+ if self.quotechar is not None:
297
+ _Validators._validate_str_arg_length('quotechar', self.quotechar, 'EQ', 1)
298
+
299
+ # The arguments 'quotechar' and 'delimiter' cannot take newline character.
300
+ if self.delimiter == '\n':
301
+ raise TeradataMlException(Messages.get_message(MessageCodes.NOT_ALLOWED_VALUES,
302
+ "\n", "delimiter"),
303
+ MessageCodes.NOT_ALLOWED_VALUES)
304
+ if self.quotechar == '\n':
305
+ raise TeradataMlException(Messages.get_message(MessageCodes.NOT_ALLOWED_VALUES,
306
+ "\n", "quotechar"),
307
+ MessageCodes.NOT_ALLOWED_VALUES)
308
+
309
+ # The arguments 'quotechar' and 'delimiter' cannot have the same value.
310
+ if self.delimiter == self.quotechar:
311
+ raise TeradataMlException(Messages.get_message(MessageCodes.ARGUMENT_VALUE_SAME,
312
+ "delimiter", "quotechar"),
313
+ MessageCodes.ARGUMENT_VALUE_SAME)
314
+
315
+ def set_data(self,
316
+ data,
317
+ data_partition_column=None,
318
+ data_hash_column=None,
319
+ data_order_column=None,
320
+ is_local_order=False,
321
+ sort_ascending=True,
322
+ nulls_first=True):
323
+ """
324
+ DESCRIPTION:
325
+ Function enables user to set data and data related arguments without having to
326
+ re-create Script object.
327
+
328
+ PARAMETERS:
329
+ data:
330
+ Required Argument.
331
+ Specifies a teradataml DataFrame containing the input data for the script.
332
+
333
+ data_hash_column:
334
+ Optional Argument.
335
+ Specifies the column to be used for hashing.
336
+ The rows in the data are redistributed to AMPs based on the
337
+ hash value of the column specified.
338
+ The user installed script then runs once on each AMP.
339
+ If there is no data_partition_column, then the entire result set delivered
340
+ by the function, constitutes a single group or partition.
341
+ Types: str
342
+ Note:
343
+ "data_hash_column" can not be specified along with
344
+ "data_partition_column", "is_local_order" and "data_order_column".
345
+
346
+ data_partition_column:
347
+ Optional Argument.
348
+ Specifies Partition By columns for data.
349
+ Values to this argument can be provided as a list, if multiple
350
+ columns are used for partition.
351
+ Default Value: ANY
352
+ Types: str OR list of Strings (str)
353
+ Note:
354
+ 1) "data_partition_column" can not be specified along with
355
+ "data_hash_column".
356
+ 2) "data_partition_column" can not be specified along with
357
+ "is_local_order = True".
358
+
359
+ is_local_order:
360
+ Optional Argument.
361
+ Specifies a boolean value to determine whether the input data is to be
362
+ ordered locally or not. Order by specifies the order in which the
363
+ values in a group or partition are sorted. Local Order By specifies
364
+ orders qualified rows on each AMP in preparation to be input to a table
365
+ function. This argument is ignored, if "data_order_column" is None. When
366
+ set to True, data is ordered locally.
367
+ Default Value: False
368
+ Types: bool
369
+ Note:
370
+ 1) "is_local_order" can not be specified along with
371
+ "data_hash_column".
372
+ 2) When "is_local_order" is set to True, "data_order_column" should be
373
+ specified, and the columns specified in "data_order_column" are
374
+ used for local ordering.
375
+
376
+ data_order_column:
377
+ Optional Argument.
378
+ Specifies Order By columns for data.
379
+ Values to this argument can be provided as a list, if multiple
380
+ columns are used for ordering.
381
+ This argument is used in both cases:
382
+ "is_local_order = True" and "is_local_order = False".
383
+ Types: str OR list of Strings (str)
384
+ Note:
385
+ "data_order_column" can not be specified along with
386
+ "data_hash_column".
387
+
388
+ sort_ascending:
389
+ Optional Argument.
390
+ Specifies a boolean value to determine if the result set is to be sorted
391
+ on the column specified in "data_order_column", in ascending or descending
392
+ order.
393
+ The sorting is ascending when this argument is set to True, and descending
394
+ when set to False.
395
+ This argument is ignored, if "data_order_column" is None.
396
+ Default Value: True
397
+ Types: bool
398
+
399
+ nulls_first:
400
+ Optional Argument.
401
+ Specifies a boolean value to determine whether NULLS are listed first or
402
+ last during ordering.
403
+ This argument is ignored, if "data_order_column" is None.
404
+ NULLS are listed first when this argument is set to True, and
405
+ last when set to False.
406
+ Default Value: True
407
+ Types: bool
408
+
409
+ RETURNS:
410
+ None.
411
+
412
+ RAISES:
413
+ TeradataMlException
414
+
415
+ EXAMPLES:
416
+ >>> self.set_data(df)
417
+ """
418
+
419
+ awu_matrix_setter = []
420
+ awu_matrix_setter.append(["data", data, True, (tdmldf.dataframe.DataFrame)])
421
+ awu_matrix_setter.append(["data_partition_column", data_partition_column,
422
+ True, (str, list), True])
423
+ awu_matrix_setter.append(["data_hash_column", data_hash_column, True,
424
+ (str, list), True])
425
+ awu_matrix_setter.append(["data_order_column", data_order_column, True,
426
+ (str, list), True])
427
+ awu_matrix_setter.append(["is_local_order", is_local_order, True, (bool)])
428
+ awu_matrix_setter.append(["sort_ascending", sort_ascending, True, (bool)])
429
+ awu_matrix_setter.append(["nulls_first", nulls_first, True, (bool)])
430
+
431
+ # Perform the function validations
432
+ _Validators._validate_missing_required_arguments([["data", data, False,
433
+ (tdmldf.dataframe.DataFrame)]])
434
+ _Validators._validate_function_arguments(awu_matrix_setter)
435
+
436
+ self.data = data
437
+ self.data_partition_column = data_partition_column
438
+ self.data_hash_column = data_hash_column
439
+ self.data_order_column = data_order_column
440
+ self.is_local_order = is_local_order
441
+ self.sort_ascending = sort_ascending
442
+ self.nulls_first = nulls_first
443
+
444
+ def _execute(self, output_style='VIEW'):
445
+ """
446
+ Function to execute Table Operator queries.
447
+ Create DataFrames for the required Table Operator output.
448
+ """
449
+ table_type = TeradataConstants.TERADATA_VIEW
450
+ if output_style == OutputStyle.OUTPUT_TABLE.value:
451
+ table_type = TeradataConstants.TERADATA_TABLE
452
+
453
+ # Generate STDOUT table name and add it to the output table list.
454
+ tblop_stdout_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_tblop_out_",
455
+ use_default_database=True, gc_on_quit=True,
456
+ quote=False,
457
+ table_type=table_type
458
+ )
459
+
460
+ try:
461
+ if output_style == OutputStyle.OUTPUT_TABLE.value:
462
+ UtilFuncs._create_table(tblop_stdout_temp_tablename, self._tblop_query)
463
+ else:
464
+ UtilFuncs._create_view(tblop_stdout_temp_tablename, self._tblop_query)
465
+ except Exception as emsg:
466
+ raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_EXEC_SQL_FAILED, str(emsg)),
467
+ MessageCodes.TDMLDF_EXEC_SQL_FAILED)
468
+
469
+
470
+ self.result = self.__awu._create_data_set_object(
471
+ df_input=UtilFuncs._extract_table_name(tblop_stdout_temp_tablename), source_type="table",
472
+ database_name=UtilFuncs._extract_db_name(tblop_stdout_temp_tablename))
473
+
474
+ return self.result
475
+
476
+ def _returns_clause_validation(self):
477
+ """
478
+ DESCRIPTION:
479
+ Function validates 'returns' clause for a table operator query.
480
+
481
+ PARAMETERS:
482
+ None.
483
+
484
+ RETURNS:
485
+ None
486
+
487
+ RAISES:
488
+ Error if argument is not of valid datatype.
489
+
490
+ EXAMPLES:
491
+ self._returns_clause_validation()
492
+ """
493
+ # Validate keys and datatypes in returns.
494
+ if self.returns is not None:
495
+ awu_matrix_returns = []
496
+ for key in self.returns.keys():
497
+ awu_matrix_returns.append(["keys in returns", key, False, (str), True])
498
+ awu_matrix_returns.append(["values in returns", self.returns[key], False, self._supported_returns_datatypes])
499
+ _Validators._validate_function_arguments(awu_matrix_returns)
500
+
501
+
502
+ def test_script(self, supporting_files=None, input_data_file=None, script_args="",
503
+ exec_mode='local', **kwargs):
504
+ """
505
+ DESCRIPTION:
506
+ Function enables user to run script in docker container environment outside
507
+ Vantage.
508
+ Input data for user script is read from file.
509
+
510
+ PARAMETERS:
511
+ supporting_files:
512
+ Optional Argument
513
+ Specifies a file or list of supporting files like model files to be
514
+ copied to the container.
515
+ Types: string or list of str
516
+
517
+ input_data_file:
518
+ Required Argument.
519
+ Specifies the name of the input data file.
520
+ It should have a path relative to the location specified in
521
+ "files_local_path" argument.
522
+ If set to None, read data from AMP, else from file passed in the argument
523
+ 'input_data_file'.
524
+ File should have at least permissions of mode 644.
525
+ Types: str
526
+
527
+ script_args:
528
+ Optional Argument.
529
+ Specifies command line arguments required by the user script.
530
+ Types: str
531
+
532
+ exec_mode:
533
+ Optional Argument.
534
+ Specifies the mode in which user wants to test the script.
535
+ If set to 'local', the user script will run locally on user's system.
536
+ Permitted Values: 'local'
537
+ Default Value: 'local'
538
+ Types: str
539
+
540
+ kwargs:
541
+ Optional Argument.
542
+ Specifies the keyword arguments required for testing.
543
+ Keys can be:
544
+ data_row_limit:
545
+ Optional Argument. Ignored when data is read from file.
546
+ Specifies the number of rows to be taken from all amps when
547
+ reading from a table or view on Vantage.
548
+ Default Value: 1000
549
+ Types: int
550
+
551
+ password:
552
+ Optional Argument. Required when reading from database.
553
+ Specifies the password to connect to vantage where the data
554
+ resides.
555
+ Types: str
556
+
557
+ data_file_delimiter:
558
+ Optional Argument.
559
+ Specifies the delimiter used in the input data file. This
560
+ argument can be specified when data is read from file.
561
+ Default Value: '\t'
562
+ Types: str
563
+
564
+ data_file_header:
565
+ Optional Argument.
566
+ Specifies whether the input data file contains header. This
567
+ argument can be specified when data is read from file.
568
+ Default Value: True
569
+ Types: bool
570
+
571
+ data_file_quote_char:
572
+ Optional Argument.
573
+ Specifies the quotechar used in the input data file.
574
+ This argument can be specified when data is read from file.
575
+ Default Value: '"'
576
+
577
+ logmech:
578
+ Optional Argument.
579
+ Specifies the type of logon mechanism to establish a connection to
580
+ Teradata Vantage.
581
+ Permitted Values: 'TD2', 'TDNEGO', 'LDAP', 'KRB5' & 'JWT'.
582
+ TD2:
583
+ The Teradata 2 (TD2) mechanism provides authentication
584
+ using a Vantage username and password. This is the default
585
+ logon mechanism using which the connection is established
586
+ to Vantage.
587
+
588
+ TDNEGO:
589
+ A security mechanism that automatically determines the
590
+ actual mechanism required, based on policy, without user's
591
+ involvement. The actual mechanism is determined by the
592
+ TDGSS server configuration and by the security policy's
593
+ mechanism restrictions.
594
+
595
+ LDAP:
596
+ A directory-based user logon to Vantage with a directory
597
+ username and password and is authenticated by the directory.
598
+
599
+ KRB5 (Kerberos):
600
+ A directory-based user logon to Vantage with a domain
601
+ username and password and is authenticated by
602
+ Kerberos (KRB5 mechanism).
603
+ Note:
604
+ User must have a valid ticket-granting ticket in
605
+ order to use this logon mechanism.
606
+
607
+ JWT:
608
+ The JSON Web Token (JWT) authentication mechanism enables
609
+ single sign-on (SSO) to the Vantage after the user
610
+ successfully authenticates to Teradata UDA User Service.
611
+ Note:
612
+ User must use logdata parameter when using 'JWT' as
613
+ the logon mechanism.
614
+ Default Value: TD2
615
+ Types: str
616
+
617
+ Note:
618
+ teradataml expects the client environments are already setup with appropriate
619
+ security mechanisms and are in working conditions.
620
+ For more information please refer Teradata Vantage™ - Advanced SQL Engine
621
+ Security Administration at https://www.info.teradata.com/
622
+
623
+ logdata:
624
+ Optional Argument.
625
+ Specifies parameters to the LOGMECH command beyond those needed by
626
+ the logon mechanism, such as user ID, password and tokens
627
+ (in case of JWT) to successfully authenticate the user.
628
+ Types: str
629
+
630
+ Types: dict
631
+
632
+ RETURNS:
633
+ Output from user script.
634
+
635
+ RAISES:
636
+ TeradataMlException
637
+
638
+ EXAMPLES:
639
+ # Assumption - sto is Script() object. Please refer to help(Script)
640
+ # for creating Script object.
641
+
642
+ # Run user script in local mode with input from table.
643
+ >>> sto.test_script(data_row_limit=300, password='alice', exec_mode='local')
644
+
645
+ ############ STDOUT Output ############
646
+ word count_input
647
+ 0 1 1
648
+ 1 Old 1
649
+ 2 Macdonald 1
650
+ 3 Had 1
651
+ 4 A 1
652
+ 5 Farm 1
653
+
654
+ # Run user script in local mode with logmech as 'TD2'.
655
+ >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="TD2")
656
+
657
+ # Run user script in local mode with logmech as 'TDNEGO'.
658
+ >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="TDNEGO")
659
+
660
+ # Run user script in local mode with logmech as 'LDAP'.
661
+ >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="LDAP")
662
+
663
+ # Run user script in local mode with logmech as 'KRB5'.
664
+ >>> sto.test_script(script_args="4 5 10 6 480", password="alice", logmech="KRB5")
665
+
666
+ # Run user script in local mode with logmech as 'JWT'.
667
+ >>> sto.test_script(script_args="4 5 10 6 480", password="alice",
668
+ logmech='JWT', logdata='token=eyJpc...h8dA')
669
+
670
+ """
671
+ logmech_valid_values = ['TD2', 'TDNEGO', 'LDAP', 'KRB5', 'JWT']
672
+
673
+ awu_matrix_test = []
674
+ awu_matrix_test.append((["supporting_files", supporting_files, True,
675
+ (str, list), True]))
676
+ awu_matrix_test.append((["input_data_file", input_data_file, True, (str), True]))
677
+ awu_matrix_test.append((["script_args", script_args, True, (str), False]))
678
+ awu_matrix_test.append((["exec_mode", exec_mode, True, (str), True,
679
+ [TableOperatorConstants.LOCAL_EXEC.value]]))
680
+
681
+ data_row_limit = kwargs.pop("data_row_limit", 1000)
682
+ awu_matrix_test.append((["data_row_limit", data_row_limit, True, (int), True]))
683
+
684
+ data_file_delimiter = kwargs.pop("data_file_delimiter", '\t')
685
+ awu_matrix_test.append((["data_file_delimiter", data_file_delimiter, True,
686
+ (str), False]))
687
+
688
+ data_file_quote_char = kwargs.pop("data_file_quote_char", '"')
689
+ awu_matrix_test.append((["data_file_quote_char", data_file_quote_char, True,
690
+ (str), False]))
691
+
692
+ data_file_header = kwargs.pop("data_file_header", True)
693
+ awu_matrix_test.append((["data_file_header", data_file_header, True, (bool)]))
694
+
695
+ logmech = kwargs.pop("logmech", "TD2")
696
+ awu_matrix_test.append(
697
+ ["logmech", logmech, True, (str), True, logmech_valid_values])
698
+
699
+ logdata = kwargs.pop("logdata", None)
700
+ awu_matrix_test.append(["logdata", logdata, True, (str), True])
701
+
702
+ # Validate argument types.
703
+ _Validators._validate_function_arguments(awu_matrix_test)
704
+
705
+ self._validate()
706
+
707
+ if logmech == "JWT" and not logdata:
708
+ raise TeradataMlException(
709
+ Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING, 'logdata',
710
+ 'logmech=JWT'),
711
+ MessageCodes.DEPENDENT_ARG_MISSING)
712
+
713
+ if data_row_limit <= 0:
714
+ raise ValueError(Messages.get_message(MessageCodes.TDMLDF_POSITIVE_INT).
715
+ format("data_row_limit", "greater than"))
716
+
717
+ # Either of 'input_data_file' or 'password' argument is required.
718
+ password = kwargs.pop("password", None)
719
+
720
+ # When exec_mode is local, the connection object is used to get the values in the table.
721
+ if exec_mode == "local" and not (input_data_file or self.data):
722
+ message = Messages.get_message(MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT,
723
+ "input_data_file", "Script data")
724
+ raise TeradataMlException(message, MessageCodes.EITHER_THIS_OR_THAT_ARGUMENT)
725
+
726
+ if not self.script_name and self.files_local_path:
727
+ message = Messages.get_message(MessageCodes.MISSING_ARGS,
728
+ "script_name and files_local_path")
729
+ raise TeradataMlException(message, MessageCodes.MISSING_ARGS)
730
+
731
+ if input_data_file:
732
+ if self.files_local_path is None:
733
+ message = Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING,
734
+ "files_local_path", "input_data_file")
735
+ raise TeradataMlException(message, MessageCodes.DEPENDENT_ARG_MISSING)
736
+ else:
737
+ # Check if file exists.
738
+ fpath = os.path.join(self.files_local_path,
739
+ input_data_file)
740
+ _Validators._validate_file_exists(fpath)
741
+
742
+ if self.script_name and self.files_local_path:
743
+ # Check if file exists.
744
+ fpath = os.path.join(self.files_local_path,
745
+ os.path.basename(self.script_name))
746
+ _Validators._validate_file_exists(fpath)
747
+
748
+ if exec_mode.upper() == TableOperatorConstants.LOCAL_EXEC.value:
749
+ user_script_path = os.path.join(self.files_local_path, self.script_name)
750
+ import sys
751
+ cmd = [str(sys.executable), user_script_path]
752
+ cmd.extend(script_args)
753
+
754
+ if input_data_file is not None:
755
+ input_file_path = os.path.join(self.files_local_path, input_data_file)
756
+
757
+ # Run user script locally with input from a file.
758
+ exec_cmd_output = self.__local_run_user_script_input_file(
759
+ cmd, input_file_path, data_file_delimiter, data_file_quote_char, data_file_header)
760
+ try:
761
+ return self.__process_test_script_output(exec_cmd_output)
762
+ except Exception as exp:
763
+ raise
764
+
765
+ else:
766
+ if self.data.shape[0] > data_row_limit:
767
+ raise ValueError(
768
+ Messages.get_message(MessageCodes.DATAFRAME_LIMIT_ERROR,
769
+ 'data_row_limit', 'data_row_limit',
770
+ data_row_limit))
771
+
772
+ if not self.data._table_name:
773
+ self.data._table_name = df_utils._execute_node_return_db_object_name(
774
+ self.data._nodeid, self.data._metaexpr)
775
+
776
+ table_name = UtilFuncs._extract_table_name(self.data._table_name)
777
+
778
+ # Run user script locally with input from db.
779
+ exec_cmd_output = self.__local_run_user_script_input_db(cmd, table_name)
780
+ try:
781
+ return self.__process_test_script_output(exec_cmd_output)
782
+ except Exception as exp:
783
+ raise
784
+
785
+ def __local_run_user_script_input_file(self, cmd, input_file_path,
786
+ data_file_delimiter='\t',
787
+ data_file_quote_char='"',
788
+ data_file_header=True):
789
+ """
790
+ DESCRIPTION:
791
+ Function to run the user script in local mode with input from file.
792
+
793
+ PARAMETERS:
794
+ cmd:
795
+ Required Argument.
796
+ Specifies the command for running the user script.
797
+ Types: str
798
+
799
+ input_file_path:
800
+ Required Argument.
801
+ Specifies the absolute local path of input data file.
802
+ Types: str
803
+
804
+ data_file_delimiter:
805
+ Optional Argument.
806
+ Specifies the delimiter used in input data file.
807
+ Default Value: '\t'
808
+ Types: str
809
+
810
+ data_file_quote_char:
811
+ Optional Argument.
812
+ Specifies the quote character used in input data file.
813
+ Default Value: '"'
814
+ Types: str
815
+
816
+ data_file_header:
817
+ Optional Argument.
818
+ Specifies whether the input data file has header.
819
+ Default Value: True
820
+ Types: bool
821
+
822
+ RETURNS:
823
+ The string output of the command that is run on input data file.
824
+
825
+ RAISES:
826
+ Exception.
827
+
828
+ EXAMPLES:
829
+ self.__local_run_user_script_input_file(cmd ="cmd",
830
+ input_file_path = "input_file_path",
831
+ data_file_delimiter = "data_file_delimiter",
832
+ data_file_quote_char = "data_file_quote_char",
833
+ data_file_header = True)
834
+
835
+ """
836
+ with open(input_file_path) as data_file:
837
+ import csv
838
+ from pandas import isna as pd_isna
839
+
840
+ data_handle = StringIO()
841
+
842
+ # Read data from input file.
843
+ ip_data = csv.reader(data_file,
844
+ delimiter=data_file_delimiter,
845
+ quotechar=data_file_quote_char)
846
+ # Skip the first row of input file if data_file_header is True.
847
+ if data_file_header:
848
+ next(ip_data)
849
+ for row in ip_data:
850
+ if self.quotechar is not None:
851
+ # A NULL value should not be enclosed in quotes.
852
+ # The CSV module has no support for such output with writer,
853
+ # and hence the custom formatting.
854
+ line = ['' if pd_isna(s) else "{}{}{}".format(self.quotechar,
855
+ str(s),
856
+ self.quotechar)
857
+ for s in row]
858
+ else:
859
+ line = ['' if pd_isna(s) else str(s) for s in row]
860
+
861
+ complete_line = (self.delimiter.join(line))
862
+
863
+ data_handle.write(complete_line)
864
+ data_handle.write("\n")
865
+
866
+ return self.__run_user_script_subprocess(cmd, data_handle)
867
+
868
+ def __run_user_script_subprocess(self, cmd, data_handle):
869
+ """
870
+ DESCRIPTION:
871
+ Function to run the user script in a new process and return the output.
872
+
873
+ PARAMETERS:
874
+ cmd:
875
+ Required Argument.
876
+ Specifies the command for running the script.
877
+ Types: str
878
+
879
+ data_handle:
880
+ Required Argument.
881
+ Specifies the data handle for the input data required by the user script.
882
+
883
+ RETURNS:
884
+ Output of user script on input data supplied in data_handle.
885
+
886
+ RAISES:
887
+ None.
888
+
889
+ EXAMPLES:
890
+ self.__run_user_script_subprocess(cmd = "exec_cmd_output",
891
+ data_handle = data_handle)
892
+
893
+ """
894
+ # Launching new process to run the user script.
895
+ try:
896
+ proc = subprocess.Popen(cmd, stdin=subprocess.PIPE,
897
+ stdout=subprocess.PIPE,
898
+ stderr=subprocess.PIPE)
899
+ process_output, process_error = proc.communicate(data_handle.getvalue().encode())
900
+ data_handle.close()
901
+
902
+ if proc.returncode == 0:
903
+ return process_output.decode("utf-8").rstrip("\r|\n")
904
+ else:
905
+ message = Messages.get_message(MessageCodes.SCRIPT_LOCAL_RUN_ERROR).\
906
+ format(process_error)
907
+ raise TeradataMlException(message, MessageCodes.SCRIPT_LOCAL_RUN_ERROR)
908
+ except Exception as e:
909
+ raise e
910
+
911
+ def __process_test_script_output(self, exec_cmd_output):
912
+ """
913
+ DESCRIPTION:
914
+ Function to format the output of the user script.
915
+
916
+ PARAMETERS:
917
+ exec_cmd_output:
918
+ Required Argument.
919
+ Specifies the output returned by the user script.
920
+ Types: str
921
+
922
+ RETURNS:
923
+ The test script output as Pandas DataFrame.
924
+
925
+ RAISES:
926
+ Exception.
927
+
928
+ EXAMPLES:
929
+ self.__process_test_script_output(exec_cmd_output = "exec_cmd_output")
930
+ """
931
+ try:
932
+ kwargs = dict()
933
+ if self.quotechar is not None:
934
+ kwargs['quotechar'] = self.quotechar
935
+ kwargs['quoting'] = 1 # QUOTE_ALL
936
+
937
+ output = StringIO(exec_cmd_output)
938
+
939
+ from pandas import read_csv as pd_read_csv
940
+
941
+ # Form a pandas dataframe.
942
+ df = pd_read_csv(output, sep=self.delimiter, index_col=False, header=None,
943
+ names=list(self.returns.keys()), **kwargs)
944
+ return df
945
+
946
+ except Exception as exp:
947
+ raise exp
948
+
949
+ def __local_run_user_script_input_db(self, cmd, table_name):
950
+ """
951
+ DESCRIPTION:
952
+ Function to run the user script in local mode with input from db.
953
+
954
+ PARAMETERS:
955
+ cmd:
956
+ Required Argument.
957
+ Specifies the command for running the user script.
958
+ Types: str
959
+
960
+ table_name:
961
+ Required Argument.
962
+ Specifies the table name for input to user script.
963
+ Types: str
964
+
965
+ RETURNS:
966
+ The string output of the command that is run on the Vantage table.
967
+
968
+ RAISES:
969
+ Exception.
970
+
971
+ EXAMPLES:
972
+ self.__local_run_user_script_input_db(cmd = "cmd", table_name = "table_name")
973
+
974
+ """
975
+ db_data_handle = StringIO()
976
+ try:
977
+ con = get_connection()
978
+ # Query for reading data from DB.
979
+ query = ("SELECT * FROM {} ORDER BY 1;".format(table_name))
980
+ cur = execute_sql(query)
981
+ row = cur.fetchone()
982
+ from pandas import isna as pd_isna
983
+ while row:
984
+ if self.quotechar is not None:
985
+ # A NULL value should not be enclosed in quotes.
986
+ # The CSV module has no support for such output with writer,
987
+ # and hence the custom formatting.
988
+ line = ['' if pd_isna(s) else "{}{}{}".format(self.quotechar,
989
+ str(s),
990
+ self.quotechar)
991
+ for s in row]
992
+ else:
993
+ line = ['' if pd_isna(s) else str(s) for s in row]
994
+
995
+ complete_line = (self.delimiter.join(line))
996
+ db_data_handle.write(complete_line)
997
+ db_data_handle.write("\n")
998
+ row = cur.fetchone()
999
+ except Exception as exp:
1000
+ raise exp
1001
+
1002
+ return self.__run_user_script_subprocess(cmd, db_data_handle)
1003
+
1004
+ def __repr__(self):
1005
+ """
1006
+ Returns the string representation for the class instance.
1007
+ """
1008
+ if self.result is None:
1009
+ repr_string = "Result is empty. Please run execute_script first."
1010
+ else:
1011
+ repr_string = "############ STDOUT Output ############"
1012
+ repr_string = "{}\n\n{}".format(repr_string, self.result)
1013
+ return repr_string
1014
+
1015
+ def deploy(self, model_column, partition_columns=None, model_file_prefix=None):
1016
+ """
1017
+ DESCRIPTION:
1018
+ Function deploys the model generated after `execute_script()` in database or user
1019
+ environment in lake.
1020
+
1021
+ PARAMETERS:
1022
+ model_column:
1023
+ Required Argument.
1024
+ Specifies the column name in which model is present.
1025
+ Supported types of model in this column are CLOB and BLOB.
1026
+ Note:
1027
+ The column mentioned in this argument should be present in
1028
+ <apply_obj/script_obj>.result.
1029
+ Types: str
1030
+
1031
+ partition_columns:
1032
+ Optional Argument.
1033
+ Specifies the columns on which data is partitioned.
1034
+ Note:
1035
+ The columns mentioned in this argument should be present in
1036
+ <apply_obj/script_obj>.result.
1037
+ Types: str OR list of str
1038
+
1039
+ model_file_prefix:
1040
+ Optional Argument.
1041
+ Specifies the prefix to be used to the generated model file.
1042
+ If this argument is None, prefix is auto-generated.
1043
+ If the argument "model_column" contains multiple models and
1044
+ * "partition_columns" is None - model file prefix is appended with
1045
+ underscore(_) and numbers starting from one(1) to get model file
1046
+ names.
1047
+ * "partition_columns" is NOT None - model file prefix is appended
1048
+ with underscore(_) and unique values in partition_columns are joined
1049
+ with underscore(_) to generate model file names.
1050
+ Types: str
1051
+
1052
+ RETURNS:
1053
+ List of generated file names.
1054
+
1055
+ RAISES:
1056
+ TeradatamlException
1057
+
1058
+ EXAMPLES:
1059
+ >>> load_example_data("openml", "multi_model_classification")
1060
+
1061
+ >>> df = DataFrame("multi_model_classification")
1062
+ >>> df
1063
+ col2 col3 col4 label group_column partition_column_1 partition_column_2
1064
+ col1
1065
+ -1.013454 0.855765 -0.256920 -0.085301 1 9 0 10
1066
+ -3.146552 -1.805530 -0.071515 -2.093998 0 10 0 10
1067
+ -1.175097 -0.950745 0.018280 -0.895335 1 10 0 11
1068
+ 0.218497 -0.968924 0.183037 -0.303142 0 11 0 11
1069
+ -1.471908 -0.029195 -0.166141 -0.645309 1 11 1 10
1070
+ 1.082336 0.846357 -0.012063 0.812633 1 11 1 11
1071
+ -1.132068 -1.209750 0.065422 -0.982986 0 10 1 10
1072
+ -0.440339 2.290676 -0.423878 0.749467 1 8 1 10
1073
+ -0.615226 -0.546472 0.017496 -0.488720 0 12 0 10
1074
+ 0.579671 -0.573365 0.160603 0.014404 0 9 1 10
1075
+
1076
+ # Install Script file.
1077
+ >>> file_location = os.path.join(os.path.dirname(teradataml.__file__), "data", "scripts", "deploy_script.py")
1078
+ >>> install_file("deploy_script", file_location, replace=True)
1079
+
1080
+ # Variables needed for Script execution.
1081
+ >>> script_command = '/opt/teradata/languages/Python/bin/python3 ./ALICE/deploy_script.py'
1082
+ >>> partition_columns = ["partition_column_1", "partition_column_2"]
1083
+ >>> columns = ["col1", "col2", "col3", "col4", "label",
1084
+ "partition_column_1", "partition_column_2"]
1085
+ >>> returns = OrderedDict([("partition_column_1", INTEGER()),
1086
+ ("partition_column_2", INTEGER()),
1087
+ ("model", CLOB())])
1088
+
1089
+ # Script execution.
1090
+ >>> obj = Script(data=df.select(columns),
1091
+ script_command=script_command,
1092
+ data_partition_column=partition_columns,
1093
+ returns=returns
1094
+ )
1095
+ >>> opt = obj.execute_script()
1096
+ >>> opt
1097
+ partition_column_1 partition_column_2 model model
1098
+ 0 10 b'gAejc1.....drIr'
1099
+ 0 11 b'gANjcw.....qWIu'
1100
+ 1 10 b'abdwcd.....dWIz'
1101
+ 1 11 b'gA4jc4.....agfu'
1102
+
1103
+ # Example 1: Provide only "partition_columns" argument. Here, "model_file_prefix"
1104
+ # is auto generated.
1105
+ >>> obj.deploy(model_column="model",
1106
+ partition_columns=["partition_column_1", "partition_column_2"])
1107
+ >>> ['model_file_1710436227163427__0_10',
1108
+ 'model_file_1710436227163427__1_10',
1109
+ 'model_file_1710436227163427__0_11',
1110
+ 'model_file_1710436227163427__1_11']
1111
+
1112
+ # Example 2: Provide only "model_file_prefix" argument. Here, filenames are suffixed
1113
+ # with 1, 2, 3, ... for multiple models.
1114
+ >>> obj.deploy(model_column="model", model_file_prefix="my_prefix_new_")
1115
+ ['my_prefix_new__1',
1116
+ 'my_prefix_new__2',
1117
+ 'my_prefix_new__3',
1118
+ 'my_prefix_new__4']
1119
+
1120
+ # Example 3: Without both "partition_columns" and "model_file_prefix" arguments.
1121
+ >>> obj.deploy(model_column="model")
1122
+ ['model_file_1710438346528596__1',
1123
+ 'model_file_1710438346528596__2',
1124
+ 'model_file_1710438346528596__3',
1125
+ 'model_file_1710438346528596__4']
1126
+
1127
+ # Example 4: Provide both "partition_columns" and "model_file_prefix" arguments.
1128
+ >>> obj.deploy(model_column="model", model_file_prefix="my_prefix_new_",
1129
+ partition_columns=["partition_column_1", "partition_column_2"])
1130
+ ['my_prefix_new__0_10',
1131
+ 'my_prefix_new__0_11',
1132
+ 'my_prefix_new__1_10',
1133
+ 'my_prefix_new__1_11']
1134
+
1135
+ """
1136
+
1137
+ arg_info_matrix = []
1138
+ arg_info_matrix.append(["model_column", model_column, False, (str)])
1139
+ arg_info_matrix.append(["partition_columns", partition_columns, True, (str, list)])
1140
+ arg_info_matrix.append(["model_file_prefix", model_file_prefix, True, (str)])
1141
+ _Validators._validate_function_arguments(arg_info_matrix)
1142
+
1143
+ if self.result is None:
1144
+ return "Result is empty. Please run execute_script first."
1145
+
1146
+ if partition_columns is None:
1147
+ partition_columns = []
1148
+ partition_columns = UtilFuncs._as_list(partition_columns)
1149
+
1150
+ req_columns = [model_column] + partition_columns
1151
+
1152
+ _Validators._validate_column_exists_in_dataframe(columns=req_columns, metaexpr=self.result._metaexpr)
1153
+
1154
+ data = self.result.select(req_columns)
1155
+ data._index_column = None # Without this, first column i.e., model column will be index column.
1156
+
1157
+
1158
+ if model_file_prefix is None:
1159
+ timestamp = time.time()
1160
+ tmp = "{}{}".format(floor(timestamp / 1000000),
1161
+ floor(timestamp % 1000000 * 1000000 +
1162
+ int(str(uuid.uuid4().fields[-1])[:10])))
1163
+ model_file_prefix = f"model_file_{tmp}_"
1164
+
1165
+ vals = data.get_values()
1166
+
1167
+ model_column_type = data._td_column_names_and_sqlalchemy_types[model_column.lower()].__class__.__name__
1168
+
1169
+ n_models = len(vals)
1170
+ all_files = []
1171
+
1172
+ for i, row in enumerate(vals):
1173
+ model = row[0]
1174
+ partition_values = ""
1175
+ if partition_columns:
1176
+ partition_values = "_".join([str(x) for x in row[1:]])
1177
+ elif n_models > 1:
1178
+ partition_values = str(i+1)
1179
+
1180
+ model_file = f"{model_file_prefix}_{partition_values}"
1181
+ model_file_path = os.path.join(os.path.expanduser("~"), ".teradataml", model_file)
1182
+
1183
+ if model_column_type == "CLOB":
1184
+ import base64
1185
+ model = base64.b64decode(model.partition("'")[2])
1186
+ elif model_column_type == "BLOB":
1187
+ # No operation needed.
1188
+ # Apply model training returns BLOB type.
1189
+ pass
1190
+ else:
1191
+ raise ValueError(f"Model column type {model_column_type} is not supported.")
1192
+
1193
+ with open(model_file_path, "wb") as f:
1194
+ f.write(model)
1195
+
1196
+ if self.__class__.__name__ == "Script":
1197
+ from teradataml import install_file
1198
+ install_file(file_identifier=model_file, file_path=model_file_path,
1199
+ is_binary=True, suppress_output=True)
1200
+ elif self.__class__.__name__ == "Apply":
1201
+ self.env.install_file(file_name=model_file_path)
1202
+
1203
+ all_files.append(model_file)
1204
+
1205
+ os.remove(model_file_path)
1206
+
1207
+ return all_files