teradataml 17.20.0.7__py3-none-any.whl → 20.0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (1303) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +1935 -1640
  4. teradataml/__init__.py +70 -60
  5. teradataml/_version.py +11 -11
  6. teradataml/analytics/Transformations.py +2995 -2995
  7. teradataml/analytics/__init__.py +81 -83
  8. teradataml/analytics/analytic_function_executor.py +2040 -2010
  9. teradataml/analytics/analytic_query_generator.py +958 -958
  10. teradataml/analytics/byom/H2OPredict.py +514 -514
  11. teradataml/analytics/byom/PMMLPredict.py +437 -437
  12. teradataml/analytics/byom/__init__.py +14 -14
  13. teradataml/analytics/json_parser/__init__.py +130 -130
  14. teradataml/analytics/json_parser/analytic_functions_argument.py +1707 -1707
  15. teradataml/analytics/json_parser/json_store.py +191 -191
  16. teradataml/analytics/json_parser/metadata.py +1637 -1637
  17. teradataml/analytics/json_parser/utils.py +798 -803
  18. teradataml/analytics/meta_class.py +196 -196
  19. teradataml/analytics/sqle/DecisionTreePredict.py +455 -470
  20. teradataml/analytics/sqle/NaiveBayesPredict.py +419 -428
  21. teradataml/analytics/sqle/__init__.py +97 -110
  22. teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -78
  23. teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -62
  24. teradataml/analytics/table_operator/__init__.py +10 -10
  25. teradataml/analytics/uaf/__init__.py +63 -63
  26. teradataml/analytics/utils.py +693 -692
  27. teradataml/analytics/valib.py +1603 -1600
  28. teradataml/automl/__init__.py +1683 -0
  29. teradataml/automl/custom_json_utils.py +1270 -0
  30. teradataml/automl/data_preparation.py +1011 -0
  31. teradataml/automl/data_transformation.py +789 -0
  32. teradataml/automl/feature_engineering.py +1580 -0
  33. teradataml/automl/feature_exploration.py +554 -0
  34. teradataml/automl/model_evaluation.py +151 -0
  35. teradataml/automl/model_training.py +1026 -0
  36. teradataml/catalog/__init__.py +1 -3
  37. teradataml/catalog/byom.py +1759 -1716
  38. teradataml/catalog/function_argument_mapper.py +859 -861
  39. teradataml/catalog/model_cataloging_utils.py +491 -1510
  40. teradataml/clients/auth_client.py +133 -0
  41. teradataml/clients/pkce_client.py +481 -481
  42. teradataml/common/aed_utils.py +7 -2
  43. teradataml/common/bulk_exposed_utils.py +111 -111
  44. teradataml/common/constants.py +1438 -1441
  45. teradataml/common/deprecations.py +160 -0
  46. teradataml/common/exceptions.py +73 -73
  47. teradataml/common/formula.py +742 -742
  48. teradataml/common/garbagecollector.py +597 -635
  49. teradataml/common/messagecodes.py +424 -431
  50. teradataml/common/messages.py +228 -231
  51. teradataml/common/sqlbundle.py +693 -693
  52. teradataml/common/td_coltype_code_to_tdtype.py +48 -48
  53. teradataml/common/utils.py +2424 -2500
  54. teradataml/common/warnings.py +25 -25
  55. teradataml/common/wrapper_utils.py +1 -110
  56. teradataml/config/dummy_file1.cfg +4 -4
  57. teradataml/config/dummy_file2.cfg +2 -2
  58. teradataml/config/sqlengine_alias_definitions_v1.0 +13 -13
  59. teradataml/config/sqlengine_alias_definitions_v1.1 +19 -19
  60. teradataml/config/sqlengine_alias_definitions_v1.3 +18 -18
  61. teradataml/context/aed_context.py +217 -217
  62. teradataml/context/context.py +1091 -999
  63. teradataml/data/A_loan.csv +19 -19
  64. teradataml/data/BINARY_REALS_LEFT.csv +11 -11
  65. teradataml/data/BINARY_REALS_RIGHT.csv +11 -11
  66. teradataml/data/B_loan.csv +49 -49
  67. teradataml/data/BuoyData2.csv +17 -17
  68. teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -5
  69. teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -5
  70. teradataml/data/Convolve2RealsLeft.csv +5 -5
  71. teradataml/data/Convolve2RealsRight.csv +5 -5
  72. teradataml/data/Convolve2ValidLeft.csv +11 -11
  73. teradataml/data/Convolve2ValidRight.csv +11 -11
  74. teradataml/data/DFFTConv_Real_8_8.csv +65 -65
  75. teradataml/data/Orders1_12mf.csv +24 -24
  76. teradataml/data/Pi_loan.csv +7 -7
  77. teradataml/data/SMOOTHED_DATA.csv +7 -7
  78. teradataml/data/TestDFFT8.csv +9 -9
  79. teradataml/data/TestRiver.csv +109 -109
  80. teradataml/data/Traindata.csv +28 -28
  81. teradataml/data/acf.csv +17 -17
  82. teradataml/data/adaboost_example.json +34 -34
  83. teradataml/data/adaboostpredict_example.json +24 -24
  84. teradataml/data/additional_table.csv +10 -10
  85. teradataml/data/admissions_test.csv +21 -21
  86. teradataml/data/admissions_train.csv +41 -41
  87. teradataml/data/admissions_train_nulls.csv +41 -41
  88. teradataml/data/advertising.csv +201 -0
  89. teradataml/data/ageandheight.csv +13 -13
  90. teradataml/data/ageandpressure.csv +31 -31
  91. teradataml/data/antiselect_example.json +36 -36
  92. teradataml/data/antiselect_input.csv +8 -8
  93. teradataml/data/antiselect_input_mixed_case.csv +8 -8
  94. teradataml/data/applicant_external.csv +6 -6
  95. teradataml/data/applicant_reference.csv +6 -6
  96. teradataml/data/arima_example.json +9 -9
  97. teradataml/data/assortedtext_input.csv +8 -8
  98. teradataml/data/attribution_example.json +33 -33
  99. teradataml/data/attribution_sample_table.csv +27 -27
  100. teradataml/data/attribution_sample_table1.csv +6 -6
  101. teradataml/data/attribution_sample_table2.csv +11 -11
  102. teradataml/data/bank_churn.csv +10001 -0
  103. teradataml/data/bank_marketing.csv +11163 -0
  104. teradataml/data/bank_web_clicks1.csv +42 -42
  105. teradataml/data/bank_web_clicks2.csv +91 -91
  106. teradataml/data/bank_web_url.csv +85 -85
  107. teradataml/data/barrier.csv +2 -2
  108. teradataml/data/barrier_new.csv +3 -3
  109. teradataml/data/betweenness_example.json +13 -13
  110. teradataml/data/bike_sharing.csv +732 -0
  111. teradataml/data/bin_breaks.csv +8 -8
  112. teradataml/data/bin_fit_ip.csv +3 -3
  113. teradataml/data/binary_complex_left.csv +11 -11
  114. teradataml/data/binary_complex_right.csv +11 -11
  115. teradataml/data/binary_matrix_complex_left.csv +21 -21
  116. teradataml/data/binary_matrix_complex_right.csv +21 -21
  117. teradataml/data/binary_matrix_real_left.csv +21 -21
  118. teradataml/data/binary_matrix_real_right.csv +21 -21
  119. teradataml/data/blood2ageandweight.csv +26 -26
  120. teradataml/data/bmi.csv +501 -0
  121. teradataml/data/boston.csv +507 -507
  122. teradataml/data/boston2cols.csv +721 -0
  123. teradataml/data/breast_cancer.csv +570 -0
  124. teradataml/data/buoydata_mix.csv +11 -11
  125. teradataml/data/burst_data.csv +5 -5
  126. teradataml/data/burst_example.json +20 -20
  127. teradataml/data/byom_example.json +17 -17
  128. teradataml/data/bytes_table.csv +3 -3
  129. teradataml/data/cal_housing_ex_raw.csv +70 -70
  130. teradataml/data/callers.csv +7 -7
  131. teradataml/data/calls.csv +10 -10
  132. teradataml/data/cars_hist.csv +33 -33
  133. teradataml/data/cat_table.csv +24 -24
  134. teradataml/data/ccm_example.json +31 -31
  135. teradataml/data/ccm_input.csv +91 -91
  136. teradataml/data/ccm_input2.csv +13 -13
  137. teradataml/data/ccmexample.csv +101 -101
  138. teradataml/data/ccmprepare_example.json +8 -8
  139. teradataml/data/ccmprepare_input.csv +91 -91
  140. teradataml/data/cfilter_example.json +12 -12
  141. teradataml/data/changepointdetection_example.json +18 -18
  142. teradataml/data/changepointdetectionrt_example.json +8 -8
  143. teradataml/data/chi_sq.csv +2 -2
  144. teradataml/data/churn_data.csv +14 -14
  145. teradataml/data/churn_emission.csv +35 -35
  146. teradataml/data/churn_initial.csv +3 -3
  147. teradataml/data/churn_state_transition.csv +5 -5
  148. teradataml/data/citedges_2.csv +745 -745
  149. teradataml/data/citvertices_2.csv +1210 -1210
  150. teradataml/data/clicks2.csv +16 -16
  151. teradataml/data/clickstream.csv +12 -12
  152. teradataml/data/clickstream1.csv +11 -11
  153. teradataml/data/closeness_example.json +15 -15
  154. teradataml/data/complaints.csv +21 -21
  155. teradataml/data/complaints_mini.csv +3 -3
  156. teradataml/data/complaints_testtoken.csv +224 -224
  157. teradataml/data/complaints_tokens_test.csv +353 -353
  158. teradataml/data/complaints_traintoken.csv +472 -472
  159. teradataml/data/computers_category.csv +1001 -1001
  160. teradataml/data/computers_test1.csv +1252 -1252
  161. teradataml/data/computers_train1.csv +5009 -5009
  162. teradataml/data/computers_train1_clustered.csv +5009 -5009
  163. teradataml/data/confusionmatrix_example.json +9 -9
  164. teradataml/data/conversion_event_table.csv +3 -3
  165. teradataml/data/corr_input.csv +17 -17
  166. teradataml/data/correlation_example.json +11 -11
  167. teradataml/data/coxhazardratio_example.json +39 -39
  168. teradataml/data/coxph_example.json +15 -15
  169. teradataml/data/coxsurvival_example.json +28 -28
  170. teradataml/data/cpt.csv +41 -41
  171. teradataml/data/credit_ex_merged.csv +45 -45
  172. teradataml/data/customer_loyalty.csv +301 -301
  173. teradataml/data/customer_loyalty_newseq.csv +31 -31
  174. teradataml/data/customer_segmentation_test.csv +2628 -0
  175. teradataml/data/customer_segmentation_train.csv +8069 -0
  176. teradataml/data/dataframe_example.json +146 -146
  177. teradataml/data/decisionforest_example.json +37 -37
  178. teradataml/data/decisionforestpredict_example.json +38 -38
  179. teradataml/data/decisiontree_example.json +21 -21
  180. teradataml/data/decisiontreepredict_example.json +45 -45
  181. teradataml/data/dfft2_size4_real.csv +17 -17
  182. teradataml/data/dfft2_test_matrix16.csv +17 -17
  183. teradataml/data/dfft2conv_real_4_4.csv +65 -65
  184. teradataml/data/diabetes.csv +443 -443
  185. teradataml/data/diabetes_test.csv +89 -89
  186. teradataml/data/dict_table.csv +5 -5
  187. teradataml/data/docperterm_table.csv +4 -4
  188. teradataml/data/docs/__init__.py +1 -1
  189. teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -180
  190. teradataml/data/docs/byom/docs/DataikuPredict.py +177 -177
  191. teradataml/data/docs/byom/docs/H2OPredict.py +324 -324
  192. teradataml/data/docs/byom/docs/ONNXPredict.py +283 -283
  193. teradataml/data/docs/byom/docs/PMMLPredict.py +277 -277
  194. teradataml/data/docs/sqle/docs_17_10/Antiselect.py +82 -82
  195. teradataml/data/docs/sqle/docs_17_10/Attribution.py +199 -199
  196. teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +171 -171
  197. teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -130
  198. teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -86
  199. teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -90
  200. teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +85 -85
  201. teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +95 -95
  202. teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -139
  203. teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +151 -151
  204. teradataml/data/docs/sqle/docs_17_10/FTest.py +160 -160
  205. teradataml/data/docs/sqle/docs_17_10/FillRowId.py +82 -82
  206. teradataml/data/docs/sqle/docs_17_10/Fit.py +87 -87
  207. teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -144
  208. teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +84 -84
  209. teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +81 -81
  210. teradataml/data/docs/sqle/docs_17_10/Histogram.py +164 -164
  211. teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -134
  212. teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +208 -208
  213. teradataml/data/docs/sqle/docs_17_10/NPath.py +265 -265
  214. teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -116
  215. teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -176
  216. teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -147
  217. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +134 -132
  218. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -103
  219. teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +165 -165
  220. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -101
  221. teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -128
  222. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +111 -111
  223. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -102
  224. teradataml/data/docs/sqle/docs_17_10/QQNorm.py +104 -104
  225. teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +109 -109
  226. teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +117 -117
  227. teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -98
  228. teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +152 -152
  229. teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -197
  230. teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -98
  231. teradataml/data/docs/sqle/docs_17_10/Sessionize.py +113 -113
  232. teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -116
  233. teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -98
  234. teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -187
  235. teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +145 -145
  236. teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -104
  237. teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +141 -141
  238. teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -214
  239. teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -83
  240. teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -83
  241. teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -155
  242. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -126
  243. teradataml/data/docs/sqle/docs_17_20/Antiselect.py +82 -82
  244. teradataml/data/docs/sqle/docs_17_20/Attribution.py +200 -200
  245. teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +171 -171
  246. teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -138
  247. teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -86
  248. teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -90
  249. teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -166
  250. teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +85 -85
  251. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +245 -243
  252. teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -113
  253. teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +279 -279
  254. teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -144
  255. teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +135 -135
  256. teradataml/data/docs/sqle/docs_17_20/FTest.py +239 -160
  257. teradataml/data/docs/sqle/docs_17_20/FillRowId.py +82 -82
  258. teradataml/data/docs/sqle/docs_17_20/Fit.py +87 -87
  259. teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -380
  260. teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +414 -414
  261. teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -144
  262. teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -234
  263. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -123
  264. teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +108 -108
  265. teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +105 -105
  266. teradataml/data/docs/sqle/docs_17_20/Histogram.py +223 -223
  267. teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -204
  268. teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -143
  269. teradataml/data/docs/sqle/docs_17_20/KNN.py +214 -214
  270. teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -134
  271. teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +208 -208
  272. teradataml/data/docs/sqle/docs_17_20/NPath.py +265 -265
  273. teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -116
  274. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -176
  275. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +126 -126
  276. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +118 -117
  277. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -112
  278. teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -147
  279. teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -307
  280. teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -184
  281. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +230 -225
  282. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -115
  283. teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +219 -219
  284. teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -127
  285. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +189 -189
  286. teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -112
  287. teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -128
  288. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +111 -111
  289. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -111
  290. teradataml/data/docs/sqle/docs_17_20/QQNorm.py +104 -104
  291. teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -163
  292. teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +154 -154
  293. teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -106
  294. teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -120
  295. teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -211
  296. teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +108 -108
  297. teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +117 -117
  298. teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -110
  299. teradataml/data/docs/sqle/docs_17_20/SVM.py +413 -413
  300. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -202
  301. teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +152 -152
  302. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -197
  303. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -109
  304. teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -206
  305. teradataml/data/docs/sqle/docs_17_20/Sessionize.py +113 -113
  306. teradataml/data/docs/sqle/docs_17_20/Silhouette.py +152 -152
  307. teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -116
  308. teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -108
  309. teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -187
  310. teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +145 -145
  311. teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -207
  312. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -171
  313. teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +266 -266
  314. teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -140
  315. teradataml/data/docs/sqle/docs_17_20/TextParser.py +172 -172
  316. teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +159 -159
  317. teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -123
  318. teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +141 -141
  319. teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -214
  320. teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +168 -168
  321. teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -83
  322. teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -83
  323. teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +236 -236
  324. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +361 -353
  325. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -275
  326. teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -155
  327. teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +429 -429
  328. teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +429 -429
  329. teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +347 -347
  330. teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +428 -428
  331. teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +347 -347
  332. teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +439 -439
  333. teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +386 -386
  334. teradataml/data/docs/uaf/docs_17_20/ACF.py +195 -195
  335. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +369 -369
  336. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +142 -142
  337. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +159 -159
  338. teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +247 -247
  339. teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -252
  340. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +177 -177
  341. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +174 -174
  342. teradataml/data/docs/uaf/docs_17_20/Convolve.py +226 -226
  343. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +214 -214
  344. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +183 -183
  345. teradataml/data/docs/uaf/docs_17_20/DFFT.py +203 -203
  346. teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -216
  347. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +215 -215
  348. teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +191 -191
  349. teradataml/data/docs/uaf/docs_17_20/DTW.py +179 -179
  350. teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +144 -144
  351. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +183 -183
  352. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +184 -184
  353. teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -172
  354. teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +205 -205
  355. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +142 -142
  356. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +258 -258
  357. teradataml/data/docs/uaf/docs_17_20/IDFFT.py +164 -164
  358. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +198 -198
  359. teradataml/data/docs/uaf/docs_17_20/InputValidator.py +120 -120
  360. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +155 -155
  361. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +214 -214
  362. teradataml/data/docs/uaf/docs_17_20/MAMean.py +173 -173
  363. teradataml/data/docs/uaf/docs_17_20/MInfo.py +133 -133
  364. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +135 -135
  365. teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +190 -190
  366. teradataml/data/docs/uaf/docs_17_20/PACF.py +158 -158
  367. teradataml/data/docs/uaf/docs_17_20/Portman.py +216 -216
  368. teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +154 -154
  369. teradataml/data/docs/uaf/docs_17_20/Resample.py +228 -228
  370. teradataml/data/docs/uaf/docs_17_20/SInfo.py +122 -122
  371. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +165 -165
  372. teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +173 -173
  373. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +170 -170
  374. teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +163 -163
  375. teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +179 -179
  376. teradataml/data/docs/uaf/docs_17_20/Smoothma.py +207 -207
  377. teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +150 -150
  378. teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -171
  379. teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +201 -201
  380. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +169 -169
  381. teradataml/data/dtw_example.json +17 -17
  382. teradataml/data/dtw_t1.csv +11 -11
  383. teradataml/data/dtw_t2.csv +4 -4
  384. teradataml/data/dwt2d_example.json +15 -15
  385. teradataml/data/dwt_example.json +14 -14
  386. teradataml/data/dwt_filter_dim.csv +5 -5
  387. teradataml/data/emission.csv +9 -9
  388. teradataml/data/emp_table_by_dept.csv +19 -19
  389. teradataml/data/employee_info.csv +4 -4
  390. teradataml/data/employee_table.csv +6 -6
  391. teradataml/data/excluding_event_table.csv +2 -2
  392. teradataml/data/finance_data.csv +6 -6
  393. teradataml/data/finance_data2.csv +61 -61
  394. teradataml/data/finance_data3.csv +93 -93
  395. teradataml/data/fish.csv +160 -0
  396. teradataml/data/fm_blood2ageandweight.csv +26 -26
  397. teradataml/data/fmeasure_example.json +11 -11
  398. teradataml/data/followers_leaders.csv +10 -10
  399. teradataml/data/fpgrowth_example.json +12 -12
  400. teradataml/data/frequentpaths_example.json +29 -29
  401. teradataml/data/friends.csv +9 -9
  402. teradataml/data/fs_input.csv +33 -33
  403. teradataml/data/fs_input1.csv +33 -33
  404. teradataml/data/genData.csv +513 -513
  405. teradataml/data/geodataframe_example.json +39 -39
  406. teradataml/data/glass_types.csv +215 -0
  407. teradataml/data/glm_admissions_model.csv +12 -12
  408. teradataml/data/glm_example.json +56 -29
  409. teradataml/data/glml1l2_example.json +28 -28
  410. teradataml/data/glml1l2predict_example.json +54 -54
  411. teradataml/data/glmpredict_example.json +54 -54
  412. teradataml/data/gq_t1.csv +21 -21
  413. teradataml/data/hconvolve_complex_right.csv +5 -5
  414. teradataml/data/hconvolve_complex_rightmulti.csv +5 -5
  415. teradataml/data/histogram_example.json +11 -11
  416. teradataml/data/hmmdecoder_example.json +78 -78
  417. teradataml/data/hmmevaluator_example.json +24 -24
  418. teradataml/data/hmmsupervised_example.json +10 -10
  419. teradataml/data/hmmunsupervised_example.json +7 -7
  420. teradataml/data/house_values.csv +12 -12
  421. teradataml/data/house_values2.csv +13 -13
  422. teradataml/data/housing_cat.csv +7 -7
  423. teradataml/data/housing_data.csv +9 -9
  424. teradataml/data/housing_test.csv +47 -47
  425. teradataml/data/housing_test_binary.csv +47 -47
  426. teradataml/data/housing_train.csv +493 -493
  427. teradataml/data/housing_train_attribute.csv +4 -4
  428. teradataml/data/housing_train_binary.csv +437 -437
  429. teradataml/data/housing_train_parameter.csv +2 -2
  430. teradataml/data/housing_train_response.csv +493 -493
  431. teradataml/data/housing_train_segment.csv +201 -0
  432. teradataml/data/ibm_stock.csv +370 -370
  433. teradataml/data/ibm_stock1.csv +370 -370
  434. teradataml/data/identitymatch_example.json +21 -21
  435. teradataml/data/idf_table.csv +4 -4
  436. teradataml/data/impressions.csv +101 -101
  437. teradataml/data/inflation.csv +21 -21
  438. teradataml/data/initial.csv +3 -3
  439. teradataml/data/insect2Cols.csv +61 -0
  440. teradataml/data/insect_sprays.csv +12 -12
  441. teradataml/data/insurance.csv +1339 -1339
  442. teradataml/data/interpolator_example.json +12 -12
  443. teradataml/data/iris_altinput.csv +481 -481
  444. teradataml/data/iris_attribute_output.csv +8 -8
  445. teradataml/data/iris_attribute_test.csv +121 -121
  446. teradataml/data/iris_attribute_train.csv +481 -481
  447. teradataml/data/iris_category_expect_predict.csv +31 -31
  448. teradataml/data/iris_data.csv +151 -0
  449. teradataml/data/iris_input.csv +151 -151
  450. teradataml/data/iris_response_train.csv +121 -121
  451. teradataml/data/iris_test.csv +31 -31
  452. teradataml/data/iris_train.csv +121 -121
  453. teradataml/data/join_table1.csv +4 -4
  454. teradataml/data/join_table2.csv +4 -4
  455. teradataml/data/jsons/anly_function_name.json +6 -6
  456. teradataml/data/jsons/byom/dataikupredict.json +147 -147
  457. teradataml/data/jsons/byom/datarobotpredict.json +146 -146
  458. teradataml/data/jsons/byom/h2opredict.json +194 -194
  459. teradataml/data/jsons/byom/onnxpredict.json +186 -186
  460. teradataml/data/jsons/byom/pmmlpredict.json +146 -146
  461. teradataml/data/jsons/paired_functions.json +435 -435
  462. teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -56
  463. teradataml/data/jsons/sqle/16.20/Attribution.json +249 -249
  464. teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -156
  465. teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -170
  466. teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -122
  467. teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -367
  468. teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -239
  469. teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -136
  470. teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -235
  471. teradataml/data/jsons/sqle/16.20/Pack.json +98 -98
  472. teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -162
  473. teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -105
  474. teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -86
  475. teradataml/data/jsons/sqle/16.20/Unpack.json +166 -166
  476. teradataml/data/jsons/sqle/16.20/nPath.json +269 -269
  477. teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -56
  478. teradataml/data/jsons/sqle/17.00/Attribution.json +249 -249
  479. teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -156
  480. teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -170
  481. teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -122
  482. teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -367
  483. teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -239
  484. teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -136
  485. teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -235
  486. teradataml/data/jsons/sqle/17.00/Pack.json +98 -98
  487. teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -162
  488. teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -105
  489. teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -86
  490. teradataml/data/jsons/sqle/17.00/Unpack.json +166 -166
  491. teradataml/data/jsons/sqle/17.00/nPath.json +269 -269
  492. teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -56
  493. teradataml/data/jsons/sqle/17.05/Attribution.json +249 -249
  494. teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -156
  495. teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -170
  496. teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -122
  497. teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -367
  498. teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -239
  499. teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -136
  500. teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -235
  501. teradataml/data/jsons/sqle/17.05/Pack.json +98 -98
  502. teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -162
  503. teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -105
  504. teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -86
  505. teradataml/data/jsons/sqle/17.05/Unpack.json +166 -166
  506. teradataml/data/jsons/sqle/17.05/nPath.json +269 -269
  507. teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -56
  508. teradataml/data/jsons/sqle/17.10/Attribution.json +249 -249
  509. teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -185
  510. teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +171 -171
  511. teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -151
  512. teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -368
  513. teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -239
  514. teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -149
  515. teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -288
  516. teradataml/data/jsons/sqle/17.10/Pack.json +133 -133
  517. teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -193
  518. teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -105
  519. teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -86
  520. teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -239
  521. teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -70
  522. teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +53 -53
  523. teradataml/data/jsons/sqle/17.10/TD_Chisq.json +67 -67
  524. teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +53 -53
  525. teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +68 -68
  526. teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -187
  527. teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +51 -51
  528. teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -46
  529. teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -71
  530. teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +52 -52
  531. teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +52 -52
  532. teradataml/data/jsons/sqle/17.10/TD_Histogram.json +132 -132
  533. teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -147
  534. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +182 -182
  535. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +65 -64
  536. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +196 -196
  537. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -47
  538. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -114
  539. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -71
  540. teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +111 -111
  541. teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -93
  542. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +127 -127
  543. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +70 -69
  544. teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +156 -156
  545. teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +70 -69
  546. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +147 -147
  547. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -47
  548. teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -240
  549. teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +118 -118
  550. teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +52 -52
  551. teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +52 -52
  552. teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -171
  553. teradataml/data/jsons/sqle/17.10/Unpack.json +188 -188
  554. teradataml/data/jsons/sqle/17.10/nPath.json +269 -269
  555. teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -56
  556. teradataml/data/jsons/sqle/17.20/Attribution.json +249 -249
  557. teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -185
  558. teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -172
  559. teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -151
  560. teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -367
  561. teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -239
  562. teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -149
  563. teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -287
  564. teradataml/data/jsons/sqle/17.20/Pack.json +133 -133
  565. teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -192
  566. teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -105
  567. teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -86
  568. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +148 -76
  569. teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -239
  570. teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -71
  571. teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -53
  572. teradataml/data/jsons/sqle/17.20/TD_Chisq.json +67 -67
  573. teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +145 -145
  574. teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -53
  575. teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -218
  576. teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -92
  577. teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +259 -259
  578. teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -139
  579. teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -186
  580. teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -52
  581. teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -46
  582. teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -72
  583. teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -431
  584. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -125
  585. teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -411
  586. teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -146
  587. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -91
  588. teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -76
  589. teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -76
  590. teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -152
  591. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +231 -211
  592. teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +86 -86
  593. teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -262
  594. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -137
  595. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -101
  596. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -71
  597. teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -147
  598. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +315 -315
  599. teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +123 -123
  600. teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -271
  601. teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -65
  602. teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -229
  603. teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -75
  604. teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -217
  605. teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -48
  606. teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -114
  607. teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -72
  608. teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -111
  609. teradataml/data/jsons/sqle/17.20/TD_ROC.json +178 -177
  610. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +178 -178
  611. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +73 -73
  612. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -74
  613. teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +137 -137
  614. teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -93
  615. teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +127 -127
  616. teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +70 -70
  617. teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -389
  618. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -124
  619. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +309 -156
  620. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +119 -70
  621. teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +193 -193
  622. teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +142 -142
  623. teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -147
  624. teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -48
  625. teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -240
  626. teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -248
  627. teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -75
  628. teradataml/data/jsons/sqle/17.20/TD_TextParser.json +192 -192
  629. teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -142
  630. teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -117
  631. teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +182 -182
  632. teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +52 -52
  633. teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +52 -52
  634. teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -241
  635. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -312
  636. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -182
  637. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -170
  638. teradataml/data/jsons/sqle/17.20/Unpack.json +188 -188
  639. teradataml/data/jsons/sqle/17.20/nPath.json +269 -269
  640. teradataml/data/jsons/tableoperator/17.00/read_nos.json +197 -197
  641. teradataml/data/jsons/tableoperator/17.05/read_nos.json +197 -197
  642. teradataml/data/jsons/tableoperator/17.05/write_nos.json +194 -194
  643. teradataml/data/jsons/tableoperator/17.10/read_nos.json +183 -183
  644. teradataml/data/jsons/tableoperator/17.10/write_nos.json +194 -194
  645. teradataml/data/jsons/tableoperator/17.20/read_nos.json +182 -182
  646. teradataml/data/jsons/tableoperator/17.20/write_nos.json +223 -223
  647. teradataml/data/jsons/uaf/17.20/TD_ACF.json +149 -149
  648. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +409 -409
  649. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +79 -79
  650. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +151 -151
  651. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +109 -109
  652. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +107 -107
  653. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +87 -87
  654. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +106 -106
  655. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +80 -80
  656. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +67 -67
  657. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +91 -91
  658. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +136 -136
  659. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +148 -148
  660. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -108
  661. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +109 -109
  662. teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +86 -86
  663. teradataml/data/jsons/uaf/17.20/TD_DIFF.json +91 -91
  664. teradataml/data/jsons/uaf/17.20/TD_DTW.json +116 -116
  665. teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +100 -100
  666. teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +38 -38
  667. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +100 -100
  668. teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +84 -84
  669. teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +70 -70
  670. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +152 -152
  671. teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECAST.json +313 -313
  672. teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +57 -57
  673. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +94 -94
  674. teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +63 -63
  675. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +181 -181
  676. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +102 -102
  677. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +182 -182
  678. teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +67 -67
  679. teradataml/data/jsons/uaf/17.20/TD_MINFO.json +66 -66
  680. teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +178 -178
  681. teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -114
  682. teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +118 -118
  683. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -175
  684. teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +97 -97
  685. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +173 -173
  686. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +136 -136
  687. teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +89 -89
  688. teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +79 -79
  689. teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +67 -67
  690. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -184
  691. teradataml/data/jsons/uaf/17.20/TD_SINFO.json +57 -57
  692. teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +162 -162
  693. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +100 -100
  694. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +111 -111
  695. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -95
  696. teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +77 -77
  697. teradataml/data/kmeans_example.json +22 -17
  698. teradataml/data/kmeans_table.csv +10 -0
  699. teradataml/data/kmeans_us_arrests_data.csv +0 -0
  700. teradataml/data/knn_example.json +18 -18
  701. teradataml/data/knnrecommender_example.json +6 -6
  702. teradataml/data/knnrecommenderpredict_example.json +12 -12
  703. teradataml/data/lar_example.json +17 -17
  704. teradataml/data/larpredict_example.json +30 -30
  705. teradataml/data/lc_new_predictors.csv +5 -5
  706. teradataml/data/lc_new_reference.csv +9 -9
  707. teradataml/data/lda_example.json +8 -8
  708. teradataml/data/ldainference_example.json +14 -14
  709. teradataml/data/ldatopicsummary_example.json +8 -8
  710. teradataml/data/levendist_input.csv +13 -13
  711. teradataml/data/levenshteindistance_example.json +10 -10
  712. teradataml/data/linreg_example.json +9 -9
  713. teradataml/data/load_example_data.py +326 -323
  714. teradataml/data/loan_prediction.csv +295 -295
  715. teradataml/data/lungcancer.csv +138 -138
  716. teradataml/data/mappingdata.csv +12 -12
  717. teradataml/data/milk_timeseries.csv +157 -157
  718. teradataml/data/min_max_titanic.csv +4 -4
  719. teradataml/data/minhash_example.json +6 -6
  720. teradataml/data/ml_ratings.csv +7547 -7547
  721. teradataml/data/ml_ratings_10.csv +2445 -2445
  722. teradataml/data/model1_table.csv +5 -5
  723. teradataml/data/model2_table.csv +5 -5
  724. teradataml/data/models/iris_db_glm_model.pmml +56 -56
  725. teradataml/data/models/iris_db_xgb_model.pmml +4471 -4471
  726. teradataml/data/modularity_example.json +12 -12
  727. teradataml/data/movavg_example.json +7 -7
  728. teradataml/data/mtx1.csv +7 -7
  729. teradataml/data/mtx2.csv +13 -13
  730. teradataml/data/multi_model_classification.csv +401 -0
  731. teradataml/data/multi_model_regression.csv +401 -0
  732. teradataml/data/mvdfft8.csv +9 -9
  733. teradataml/data/naivebayes_example.json +9 -9
  734. teradataml/data/naivebayespredict_example.json +19 -19
  735. teradataml/data/naivebayestextclassifier2_example.json +6 -6
  736. teradataml/data/naivebayestextclassifier_example.json +8 -8
  737. teradataml/data/naivebayestextclassifierpredict_example.json +20 -20
  738. teradataml/data/name_Find_configure.csv +10 -10
  739. teradataml/data/namedentityfinder_example.json +14 -14
  740. teradataml/data/namedentityfinderevaluator_example.json +10 -10
  741. teradataml/data/namedentityfindertrainer_example.json +6 -6
  742. teradataml/data/nb_iris_input_test.csv +31 -31
  743. teradataml/data/nb_iris_input_train.csv +121 -121
  744. teradataml/data/nbp_iris_model.csv +13 -13
  745. teradataml/data/ner_extractor_text.csv +2 -2
  746. teradataml/data/ner_sports_test2.csv +29 -29
  747. teradataml/data/ner_sports_train.csv +501 -501
  748. teradataml/data/nerevaluator_example.json +5 -5
  749. teradataml/data/nerextractor_example.json +18 -18
  750. teradataml/data/nermem_sports_test.csv +17 -17
  751. teradataml/data/nermem_sports_train.csv +50 -50
  752. teradataml/data/nertrainer_example.json +6 -6
  753. teradataml/data/ngrams_example.json +6 -6
  754. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -1455
  755. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -1993
  756. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -1492
  757. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -536
  758. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -570
  759. teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -2559
  760. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -2911
  761. teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -698
  762. teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -784
  763. teradataml/data/npath_example.json +23 -23
  764. teradataml/data/ntree_example.json +14 -14
  765. teradataml/data/numeric_strings.csv +4 -4
  766. teradataml/data/numerics.csv +4 -4
  767. teradataml/data/ocean_buoy.csv +17 -17
  768. teradataml/data/ocean_buoy2.csv +17 -17
  769. teradataml/data/ocean_buoys.csv +27 -27
  770. teradataml/data/ocean_buoys2.csv +10 -10
  771. teradataml/data/ocean_buoys_nonpti.csv +28 -28
  772. teradataml/data/ocean_buoys_seq.csv +29 -29
  773. teradataml/data/onehot_encoder_train.csv +4 -0
  774. teradataml/data/openml_example.json +92 -0
  775. teradataml/data/optional_event_table.csv +4 -4
  776. teradataml/data/orders1.csv +11 -11
  777. teradataml/data/orders1_12.csv +12 -12
  778. teradataml/data/orders_ex.csv +4 -4
  779. teradataml/data/pack_example.json +8 -8
  780. teradataml/data/package_tracking.csv +19 -19
  781. teradataml/data/package_tracking_pti.csv +18 -18
  782. teradataml/data/pagerank_example.json +13 -13
  783. teradataml/data/paragraphs_input.csv +6 -6
  784. teradataml/data/pathanalyzer_example.json +7 -7
  785. teradataml/data/pathgenerator_example.json +7 -7
  786. teradataml/data/phrases.csv +7 -7
  787. teradataml/data/pivot_example.json +8 -8
  788. teradataml/data/pivot_input.csv +22 -22
  789. teradataml/data/playerRating.csv +31 -31
  790. teradataml/data/postagger_example.json +6 -6
  791. teradataml/data/posttagger_output.csv +44 -44
  792. teradataml/data/production_data.csv +16 -16
  793. teradataml/data/production_data2.csv +7 -7
  794. teradataml/data/randomsample_example.json +31 -31
  795. teradataml/data/randomwalksample_example.json +8 -8
  796. teradataml/data/rank_table.csv +6 -6
  797. teradataml/data/ref_mobile_data.csv +4 -4
  798. teradataml/data/ref_mobile_data_dense.csv +2 -2
  799. teradataml/data/ref_url.csv +17 -17
  800. teradataml/data/restaurant_reviews.csv +7 -7
  801. teradataml/data/river_data.csv +145 -145
  802. teradataml/data/roc_example.json +7 -7
  803. teradataml/data/roc_input.csv +101 -101
  804. teradataml/data/rule_inputs.csv +6 -6
  805. teradataml/data/rule_table.csv +2 -2
  806. teradataml/data/sales.csv +7 -7
  807. teradataml/data/sales_transaction.csv +501 -501
  808. teradataml/data/salesdata.csv +342 -342
  809. teradataml/data/sample_cities.csv +2 -2
  810. teradataml/data/sample_shapes.csv +10 -10
  811. teradataml/data/sample_streets.csv +2 -2
  812. teradataml/data/sampling_example.json +15 -15
  813. teradataml/data/sax_example.json +8 -8
  814. teradataml/data/scale_attributes.csv +3 -0
  815. teradataml/data/scale_example.json +74 -23
  816. teradataml/data/scale_housing.csv +11 -11
  817. teradataml/data/scale_housing_test.csv +6 -6
  818. teradataml/data/scale_input_part_sparse.csv +31 -0
  819. teradataml/data/scale_input_partitioned.csv +16 -0
  820. teradataml/data/scale_input_sparse.csv +11 -0
  821. teradataml/data/scale_parameters.csv +3 -0
  822. teradataml/data/scale_stat.csv +11 -11
  823. teradataml/data/scalebypartition_example.json +13 -13
  824. teradataml/data/scalemap_example.json +13 -13
  825. teradataml/data/scalesummary_example.json +12 -12
  826. teradataml/data/score_category.csv +101 -101
  827. teradataml/data/score_summary.csv +4 -4
  828. teradataml/data/script_example.json +9 -9
  829. teradataml/data/scripts/deploy_script.py +84 -0
  830. teradataml/data/scripts/mapper.R +20 -0
  831. teradataml/data/scripts/mapper.py +15 -15
  832. teradataml/data/scripts/mapper_replace.py +15 -15
  833. teradataml/data/scripts/sklearn/__init__.py +0 -0
  834. teradataml/data/scripts/sklearn/sklearn_fit.py +171 -0
  835. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +127 -0
  836. teradataml/data/scripts/sklearn/sklearn_function.template +108 -0
  837. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +148 -0
  838. teradataml/data/scripts/sklearn/sklearn_neighbors.py +143 -0
  839. teradataml/data/scripts/sklearn/sklearn_score.py +119 -0
  840. teradataml/data/scripts/sklearn/sklearn_transform.py +171 -0
  841. teradataml/data/seeds.csv +10 -10
  842. teradataml/data/sentenceextractor_example.json +6 -6
  843. teradataml/data/sentiment_extract_input.csv +11 -11
  844. teradataml/data/sentiment_train.csv +16 -16
  845. teradataml/data/sentiment_word.csv +20 -20
  846. teradataml/data/sentiment_word_input.csv +19 -19
  847. teradataml/data/sentimentextractor_example.json +24 -24
  848. teradataml/data/sentimenttrainer_example.json +8 -8
  849. teradataml/data/sequence_table.csv +10 -10
  850. teradataml/data/seriessplitter_example.json +7 -7
  851. teradataml/data/sessionize_example.json +17 -17
  852. teradataml/data/sessionize_table.csv +116 -116
  853. teradataml/data/setop_test1.csv +24 -24
  854. teradataml/data/setop_test2.csv +22 -22
  855. teradataml/data/soc_nw_edges.csv +10 -10
  856. teradataml/data/soc_nw_vertices.csv +7 -7
  857. teradataml/data/souvenir_timeseries.csv +167 -167
  858. teradataml/data/sparse_iris_attribute.csv +5 -5
  859. teradataml/data/sparse_iris_test.csv +121 -121
  860. teradataml/data/sparse_iris_train.csv +601 -601
  861. teradataml/data/star1.csv +6 -6
  862. teradataml/data/state_transition.csv +5 -5
  863. teradataml/data/stock_data.csv +53 -53
  864. teradataml/data/stock_movement.csv +11 -11
  865. teradataml/data/stock_vol.csv +76 -76
  866. teradataml/data/stop_words.csv +8 -8
  867. teradataml/data/store_sales.csv +37 -37
  868. teradataml/data/stringsimilarity_example.json +7 -7
  869. teradataml/data/strsimilarity_input.csv +13 -13
  870. teradataml/data/students.csv +101 -101
  871. teradataml/data/svm_iris_input_test.csv +121 -121
  872. teradataml/data/svm_iris_input_train.csv +481 -481
  873. teradataml/data/svm_iris_model.csv +7 -7
  874. teradataml/data/svmdense_example.json +9 -9
  875. teradataml/data/svmdensepredict_example.json +18 -18
  876. teradataml/data/svmsparse_example.json +7 -7
  877. teradataml/data/svmsparsepredict_example.json +13 -13
  878. teradataml/data/svmsparsesummary_example.json +7 -7
  879. teradataml/data/target_mobile_data.csv +13 -13
  880. teradataml/data/target_mobile_data_dense.csv +5 -5
  881. teradataml/data/templatedata.csv +1201 -1201
  882. teradataml/data/templates/open_source_ml.json +9 -0
  883. teradataml/data/teradataml_example.json +150 -1
  884. teradataml/data/test_classification.csv +101 -0
  885. teradataml/data/test_loan_prediction.csv +53 -53
  886. teradataml/data/test_pacf_12.csv +37 -37
  887. teradataml/data/test_prediction.csv +101 -0
  888. teradataml/data/test_regression.csv +101 -0
  889. teradataml/data/test_river2.csv +109 -109
  890. teradataml/data/text_inputs.csv +6 -6
  891. teradataml/data/textchunker_example.json +7 -7
  892. teradataml/data/textclassifier_example.json +6 -6
  893. teradataml/data/textclassifier_input.csv +7 -7
  894. teradataml/data/textclassifiertrainer_example.json +6 -6
  895. teradataml/data/textmorph_example.json +5 -5
  896. teradataml/data/textparser_example.json +15 -15
  897. teradataml/data/texttagger_example.json +11 -11
  898. teradataml/data/texttokenizer_example.json +6 -6
  899. teradataml/data/texttrainer_input.csv +11 -11
  900. teradataml/data/tf_example.json +6 -6
  901. teradataml/data/tfidf_example.json +13 -13
  902. teradataml/data/tfidf_input1.csv +201 -201
  903. teradataml/data/tfidf_train.csv +6 -6
  904. teradataml/data/time_table1.csv +535 -535
  905. teradataml/data/time_table2.csv +14 -14
  906. teradataml/data/timeseriesdata.csv +1601 -1601
  907. teradataml/data/timeseriesdatasetsd4.csv +105 -105
  908. teradataml/data/titanic.csv +892 -892
  909. teradataml/data/token_table.csv +696 -696
  910. teradataml/data/train_multiclass.csv +101 -0
  911. teradataml/data/train_regression.csv +101 -0
  912. teradataml/data/train_regression_multiple_labels.csv +101 -0
  913. teradataml/data/train_tracking.csv +27 -27
  914. teradataml/data/transformation_table.csv +5 -5
  915. teradataml/data/transformation_table_new.csv +1 -1
  916. teradataml/data/tv_spots.csv +16 -16
  917. teradataml/data/twod_climate_data.csv +117 -117
  918. teradataml/data/uaf_example.json +475 -475
  919. teradataml/data/univariatestatistics_example.json +8 -8
  920. teradataml/data/unpack_example.json +9 -9
  921. teradataml/data/unpivot_example.json +9 -9
  922. teradataml/data/unpivot_input.csv +8 -8
  923. teradataml/data/us_air_pass.csv +36 -36
  924. teradataml/data/us_population.csv +624 -624
  925. teradataml/data/us_states_shapes.csv +52 -52
  926. teradataml/data/varmax_example.json +17 -17
  927. teradataml/data/vectordistance_example.json +25 -25
  928. teradataml/data/ville_climatedata.csv +121 -121
  929. teradataml/data/ville_tempdata.csv +12 -12
  930. teradataml/data/ville_tempdata1.csv +12 -12
  931. teradataml/data/ville_temperature.csv +11 -11
  932. teradataml/data/waveletTable.csv +1605 -1605
  933. teradataml/data/waveletTable2.csv +1605 -1605
  934. teradataml/data/weightedmovavg_example.json +8 -8
  935. teradataml/data/wft_testing.csv +5 -5
  936. teradataml/data/wine_data.csv +1600 -0
  937. teradataml/data/word_embed_input_table1.csv +5 -5
  938. teradataml/data/word_embed_input_table2.csv +4 -4
  939. teradataml/data/word_embed_model.csv +22 -22
  940. teradataml/data/words_input.csv +13 -13
  941. teradataml/data/xconvolve_complex_left.csv +6 -6
  942. teradataml/data/xconvolve_complex_leftmulti.csv +6 -6
  943. teradataml/data/xgboost_example.json +35 -35
  944. teradataml/data/xgboostpredict_example.json +31 -31
  945. teradataml/data/ztest_example.json +16 -0
  946. teradataml/dataframe/copy_to.py +1769 -1698
  947. teradataml/dataframe/data_transfer.py +2812 -2745
  948. teradataml/dataframe/dataframe.py +17630 -16946
  949. teradataml/dataframe/dataframe_utils.py +1875 -1740
  950. teradataml/dataframe/fastload.py +794 -603
  951. teradataml/dataframe/indexer.py +424 -424
  952. teradataml/dataframe/setop.py +1179 -1166
  953. teradataml/dataframe/sql.py +10174 -6432
  954. teradataml/dataframe/sql_function_parameters.py +439 -388
  955. teradataml/dataframe/sql_functions.py +652 -652
  956. teradataml/dataframe/sql_interfaces.py +220 -220
  957. teradataml/dataframe/vantage_function_types.py +674 -630
  958. teradataml/dataframe/window.py +693 -692
  959. teradataml/dbutils/__init__.py +3 -3
  960. teradataml/dbutils/dbutils.py +1167 -1150
  961. teradataml/dbutils/filemgr.py +267 -267
  962. teradataml/gen_ai/__init__.py +2 -2
  963. teradataml/gen_ai/convAI.py +472 -472
  964. teradataml/geospatial/__init__.py +3 -3
  965. teradataml/geospatial/geodataframe.py +1105 -1094
  966. teradataml/geospatial/geodataframecolumn.py +392 -387
  967. teradataml/geospatial/geometry_types.py +925 -925
  968. teradataml/hyperparameter_tuner/__init__.py +1 -1
  969. teradataml/hyperparameter_tuner/optimizer.py +3783 -2993
  970. teradataml/hyperparameter_tuner/utils.py +281 -187
  971. teradataml/lib/aed_0_1.dll +0 -0
  972. teradataml/lib/libaed_0_1.dylib +0 -0
  973. teradataml/lib/libaed_0_1.so +0 -0
  974. teradataml/libaed_0_1.dylib +0 -0
  975. teradataml/libaed_0_1.so +0 -0
  976. teradataml/opensource/__init__.py +1 -0
  977. teradataml/opensource/sklearn/__init__.py +1 -0
  978. teradataml/opensource/sklearn/_class.py +255 -0
  979. teradataml/opensource/sklearn/_sklearn_wrapper.py +1715 -0
  980. teradataml/opensource/sklearn/_wrapper_utils.py +268 -0
  981. teradataml/opensource/sklearn/constants.py +54 -0
  982. teradataml/options/__init__.py +130 -124
  983. teradataml/options/configure.py +358 -336
  984. teradataml/options/display.py +176 -176
  985. teradataml/plot/__init__.py +2 -2
  986. teradataml/plot/axis.py +1388 -1388
  987. teradataml/plot/constants.py +15 -15
  988. teradataml/plot/figure.py +398 -398
  989. teradataml/plot/plot.py +760 -760
  990. teradataml/plot/query_generator.py +83 -83
  991. teradataml/plot/subplot.py +216 -216
  992. teradataml/scriptmgmt/UserEnv.py +3791 -3761
  993. teradataml/scriptmgmt/__init__.py +3 -3
  994. teradataml/scriptmgmt/lls_utils.py +1719 -1604
  995. teradataml/series/series.py +532 -532
  996. teradataml/series/series_utils.py +71 -71
  997. teradataml/table_operators/Apply.py +949 -917
  998. teradataml/table_operators/Script.py +1718 -1982
  999. teradataml/table_operators/TableOperator.py +1255 -1616
  1000. teradataml/table_operators/__init__.py +2 -3
  1001. teradataml/table_operators/apply_query_generator.py +262 -262
  1002. teradataml/table_operators/query_generator.py +507 -507
  1003. teradataml/table_operators/table_operator_query_generator.py +460 -460
  1004. teradataml/table_operators/table_operator_util.py +631 -639
  1005. teradataml/table_operators/templates/dataframe_apply.template +184 -184
  1006. teradataml/table_operators/templates/dataframe_map.template +176 -176
  1007. teradataml/table_operators/templates/script_executor.template +170 -170
  1008. teradataml/utils/dtypes.py +684 -684
  1009. teradataml/utils/internal_buffer.py +84 -84
  1010. teradataml/utils/print_versions.py +205 -205
  1011. teradataml/utils/utils.py +410 -410
  1012. teradataml/utils/validators.py +2277 -2115
  1013. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.1.dist-info}/METADATA +346 -45
  1014. teradataml-20.0.0.1.dist-info/RECORD +1056 -0
  1015. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.1.dist-info}/WHEEL +1 -1
  1016. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.1.dist-info}/zip-safe +1 -1
  1017. teradataml/analytics/mle/AdaBoost.py +0 -651
  1018. teradataml/analytics/mle/AdaBoostPredict.py +0 -564
  1019. teradataml/analytics/mle/Antiselect.py +0 -342
  1020. teradataml/analytics/mle/Arima.py +0 -641
  1021. teradataml/analytics/mle/ArimaPredict.py +0 -477
  1022. teradataml/analytics/mle/Attribution.py +0 -1070
  1023. teradataml/analytics/mle/Betweenness.py +0 -658
  1024. teradataml/analytics/mle/Burst.py +0 -711
  1025. teradataml/analytics/mle/CCM.py +0 -600
  1026. teradataml/analytics/mle/CCMPrepare.py +0 -324
  1027. teradataml/analytics/mle/CFilter.py +0 -460
  1028. teradataml/analytics/mle/ChangePointDetection.py +0 -572
  1029. teradataml/analytics/mle/ChangePointDetectionRT.py +0 -477
  1030. teradataml/analytics/mle/Closeness.py +0 -737
  1031. teradataml/analytics/mle/ConfusionMatrix.py +0 -420
  1032. teradataml/analytics/mle/Correlation.py +0 -477
  1033. teradataml/analytics/mle/Correlation2.py +0 -573
  1034. teradataml/analytics/mle/CoxHazardRatio.py +0 -679
  1035. teradataml/analytics/mle/CoxPH.py +0 -556
  1036. teradataml/analytics/mle/CoxSurvival.py +0 -478
  1037. teradataml/analytics/mle/CumulativeMovAvg.py +0 -363
  1038. teradataml/analytics/mle/DTW.py +0 -623
  1039. teradataml/analytics/mle/DWT.py +0 -564
  1040. teradataml/analytics/mle/DWT2D.py +0 -599
  1041. teradataml/analytics/mle/DecisionForest.py +0 -716
  1042. teradataml/analytics/mle/DecisionForestEvaluator.py +0 -363
  1043. teradataml/analytics/mle/DecisionForestPredict.py +0 -561
  1044. teradataml/analytics/mle/DecisionTree.py +0 -830
  1045. teradataml/analytics/mle/DecisionTreePredict.py +0 -528
  1046. teradataml/analytics/mle/ExponentialMovAvg.py +0 -418
  1047. teradataml/analytics/mle/FMeasure.py +0 -402
  1048. teradataml/analytics/mle/FPGrowth.py +0 -734
  1049. teradataml/analytics/mle/FrequentPaths.py +0 -695
  1050. teradataml/analytics/mle/GLM.py +0 -558
  1051. teradataml/analytics/mle/GLML1L2.py +0 -547
  1052. teradataml/analytics/mle/GLML1L2Predict.py +0 -519
  1053. teradataml/analytics/mle/GLMPredict.py +0 -529
  1054. teradataml/analytics/mle/HMMDecoder.py +0 -945
  1055. teradataml/analytics/mle/HMMEvaluator.py +0 -901
  1056. teradataml/analytics/mle/HMMSupervised.py +0 -521
  1057. teradataml/analytics/mle/HMMUnsupervised.py +0 -572
  1058. teradataml/analytics/mle/Histogram.py +0 -561
  1059. teradataml/analytics/mle/IDWT.py +0 -476
  1060. teradataml/analytics/mle/IDWT2D.py +0 -493
  1061. teradataml/analytics/mle/IdentityMatch.py +0 -763
  1062. teradataml/analytics/mle/Interpolator.py +0 -918
  1063. teradataml/analytics/mle/KMeans.py +0 -485
  1064. teradataml/analytics/mle/KNN.py +0 -627
  1065. teradataml/analytics/mle/KNNRecommender.py +0 -488
  1066. teradataml/analytics/mle/KNNRecommenderPredict.py +0 -581
  1067. teradataml/analytics/mle/LAR.py +0 -439
  1068. teradataml/analytics/mle/LARPredict.py +0 -478
  1069. teradataml/analytics/mle/LDA.py +0 -548
  1070. teradataml/analytics/mle/LDAInference.py +0 -492
  1071. teradataml/analytics/mle/LDATopicSummary.py +0 -464
  1072. teradataml/analytics/mle/LevenshteinDistance.py +0 -450
  1073. teradataml/analytics/mle/LinReg.py +0 -433
  1074. teradataml/analytics/mle/LinRegPredict.py +0 -438
  1075. teradataml/analytics/mle/MinHash.py +0 -544
  1076. teradataml/analytics/mle/Modularity.py +0 -587
  1077. teradataml/analytics/mle/NEREvaluator.py +0 -410
  1078. teradataml/analytics/mle/NERExtractor.py +0 -595
  1079. teradataml/analytics/mle/NERTrainer.py +0 -458
  1080. teradataml/analytics/mle/NGrams.py +0 -570
  1081. teradataml/analytics/mle/NPath.py +0 -634
  1082. teradataml/analytics/mle/NTree.py +0 -549
  1083. teradataml/analytics/mle/NaiveBayes.py +0 -462
  1084. teradataml/analytics/mle/NaiveBayesPredict.py +0 -513
  1085. teradataml/analytics/mle/NaiveBayesTextClassifier.py +0 -607
  1086. teradataml/analytics/mle/NaiveBayesTextClassifier2.py +0 -531
  1087. teradataml/analytics/mle/NaiveBayesTextClassifierPredict.py +0 -799
  1088. teradataml/analytics/mle/NamedEntityFinder.py +0 -529
  1089. teradataml/analytics/mle/NamedEntityFinderEvaluator.py +0 -414
  1090. teradataml/analytics/mle/NamedEntityFinderTrainer.py +0 -396
  1091. teradataml/analytics/mle/POSTagger.py +0 -417
  1092. teradataml/analytics/mle/Pack.py +0 -411
  1093. teradataml/analytics/mle/PageRank.py +0 -535
  1094. teradataml/analytics/mle/PathAnalyzer.py +0 -426
  1095. teradataml/analytics/mle/PathGenerator.py +0 -367
  1096. teradataml/analytics/mle/PathStart.py +0 -464
  1097. teradataml/analytics/mle/PathSummarizer.py +0 -470
  1098. teradataml/analytics/mle/Pivot.py +0 -471
  1099. teradataml/analytics/mle/ROC.py +0 -425
  1100. teradataml/analytics/mle/RandomSample.py +0 -637
  1101. teradataml/analytics/mle/RandomWalkSample.py +0 -490
  1102. teradataml/analytics/mle/SAX.py +0 -779
  1103. teradataml/analytics/mle/SVMDense.py +0 -677
  1104. teradataml/analytics/mle/SVMDensePredict.py +0 -536
  1105. teradataml/analytics/mle/SVMDenseSummary.py +0 -437
  1106. teradataml/analytics/mle/SVMSparse.py +0 -557
  1107. teradataml/analytics/mle/SVMSparsePredict.py +0 -553
  1108. teradataml/analytics/mle/SVMSparseSummary.py +0 -435
  1109. teradataml/analytics/mle/Sampling.py +0 -549
  1110. teradataml/analytics/mle/Scale.py +0 -565
  1111. teradataml/analytics/mle/ScaleByPartition.py +0 -496
  1112. teradataml/analytics/mle/ScaleMap.py +0 -378
  1113. teradataml/analytics/mle/ScaleSummary.py +0 -320
  1114. teradataml/analytics/mle/SentenceExtractor.py +0 -363
  1115. teradataml/analytics/mle/SentimentEvaluator.py +0 -432
  1116. teradataml/analytics/mle/SentimentExtractor.py +0 -578
  1117. teradataml/analytics/mle/SentimentTrainer.py +0 -405
  1118. teradataml/analytics/mle/SeriesSplitter.py +0 -641
  1119. teradataml/analytics/mle/Sessionize.py +0 -475
  1120. teradataml/analytics/mle/SimpleMovAvg.py +0 -397
  1121. teradataml/analytics/mle/StringSimilarity.py +0 -425
  1122. teradataml/analytics/mle/TF.py +0 -389
  1123. teradataml/analytics/mle/TFIDF.py +0 -504
  1124. teradataml/analytics/mle/TextChunker.py +0 -414
  1125. teradataml/analytics/mle/TextClassifier.py +0 -399
  1126. teradataml/analytics/mle/TextClassifierEvaluator.py +0 -413
  1127. teradataml/analytics/mle/TextClassifierTrainer.py +0 -565
  1128. teradataml/analytics/mle/TextMorph.py +0 -494
  1129. teradataml/analytics/mle/TextParser.py +0 -623
  1130. teradataml/analytics/mle/TextTagger.py +0 -530
  1131. teradataml/analytics/mle/TextTokenizer.py +0 -502
  1132. teradataml/analytics/mle/UnivariateStatistics.py +0 -488
  1133. teradataml/analytics/mle/Unpack.py +0 -526
  1134. teradataml/analytics/mle/Unpivot.py +0 -438
  1135. teradataml/analytics/mle/VarMax.py +0 -776
  1136. teradataml/analytics/mle/VectorDistance.py +0 -762
  1137. teradataml/analytics/mle/WeightedMovAvg.py +0 -400
  1138. teradataml/analytics/mle/XGBoost.py +0 -842
  1139. teradataml/analytics/mle/XGBoostPredict.py +0 -627
  1140. teradataml/analytics/mle/__init__.py +0 -123
  1141. teradataml/analytics/mle/json/adaboost_mle.json +0 -135
  1142. teradataml/analytics/mle/json/adaboostpredict_mle.json +0 -85
  1143. teradataml/analytics/mle/json/antiselect_mle.json +0 -34
  1144. teradataml/analytics/mle/json/antiselect_mle_mle.json +0 -34
  1145. teradataml/analytics/mle/json/arima_mle.json +0 -172
  1146. teradataml/analytics/mle/json/arimapredict_mle.json +0 -52
  1147. teradataml/analytics/mle/json/attribution_mle_mle.json +0 -143
  1148. teradataml/analytics/mle/json/betweenness_mle.json +0 -97
  1149. teradataml/analytics/mle/json/burst_mle.json +0 -140
  1150. teradataml/analytics/mle/json/ccm_mle.json +0 -124
  1151. teradataml/analytics/mle/json/ccmprepare_mle.json +0 -14
  1152. teradataml/analytics/mle/json/cfilter_mle.json +0 -93
  1153. teradataml/analytics/mle/json/changepointdetection_mle.json +0 -92
  1154. teradataml/analytics/mle/json/changepointdetectionrt_mle.json +0 -78
  1155. teradataml/analytics/mle/json/closeness_mle.json +0 -104
  1156. teradataml/analytics/mle/json/confusionmatrix_mle.json +0 -79
  1157. teradataml/analytics/mle/json/correlation_mle.json +0 -86
  1158. teradataml/analytics/mle/json/correlationreduce_mle.json +0 -49
  1159. teradataml/analytics/mle/json/coxhazardratio_mle.json +0 -89
  1160. teradataml/analytics/mle/json/coxph_mle.json +0 -98
  1161. teradataml/analytics/mle/json/coxsurvival_mle.json +0 -79
  1162. teradataml/analytics/mle/json/cumulativemovavg_mle.json +0 -34
  1163. teradataml/analytics/mle/json/decisionforest_mle.json +0 -167
  1164. teradataml/analytics/mle/json/decisionforestevaluator_mle.json +0 -33
  1165. teradataml/analytics/mle/json/decisionforestpredict_mle_mle.json +0 -74
  1166. teradataml/analytics/mle/json/decisiontree_mle.json +0 -194
  1167. teradataml/analytics/mle/json/decisiontreepredict_mle_mle.json +0 -86
  1168. teradataml/analytics/mle/json/dtw_mle.json +0 -97
  1169. teradataml/analytics/mle/json/dwt2d_mle.json +0 -116
  1170. teradataml/analytics/mle/json/dwt_mle.json +0 -101
  1171. teradataml/analytics/mle/json/exponentialmovavg_mle.json +0 -55
  1172. teradataml/analytics/mle/json/fmeasure_mle.json +0 -58
  1173. teradataml/analytics/mle/json/fpgrowth_mle.json +0 -159
  1174. teradataml/analytics/mle/json/frequentpaths_mle.json +0 -129
  1175. teradataml/analytics/mle/json/glm_mle.json +0 -111
  1176. teradataml/analytics/mle/json/glml1l2_mle.json +0 -106
  1177. teradataml/analytics/mle/json/glml1l2predict_mle.json +0 -57
  1178. teradataml/analytics/mle/json/glmpredict_mle_mle.json +0 -74
  1179. teradataml/analytics/mle/json/histogram_mle.json +0 -100
  1180. teradataml/analytics/mle/json/hmmdecoder_mle.json +0 -192
  1181. teradataml/analytics/mle/json/hmmevaluator_mle.json +0 -206
  1182. teradataml/analytics/mle/json/hmmsupervised_mle.json +0 -91
  1183. teradataml/analytics/mle/json/hmmunsupervised_mle.json +0 -114
  1184. teradataml/analytics/mle/json/identitymatch_mle.json +0 -88
  1185. teradataml/analytics/mle/json/idwt2d_mle.json +0 -73
  1186. teradataml/analytics/mle/json/idwt_mle.json +0 -66
  1187. teradataml/analytics/mle/json/interpolator_mle.json +0 -151
  1188. teradataml/analytics/mle/json/kmeans_mle.json +0 -97
  1189. teradataml/analytics/mle/json/knn_mle.json +0 -141
  1190. teradataml/analytics/mle/json/knnrecommender_mle.json +0 -111
  1191. teradataml/analytics/mle/json/knnrecommenderpredict_mle.json +0 -75
  1192. teradataml/analytics/mle/json/lar_mle.json +0 -78
  1193. teradataml/analytics/mle/json/larpredict_mle.json +0 -69
  1194. teradataml/analytics/mle/json/lda_mle.json +0 -130
  1195. teradataml/analytics/mle/json/ldainference_mle.json +0 -78
  1196. teradataml/analytics/mle/json/ldatopicsummary_mle.json +0 -64
  1197. teradataml/analytics/mle/json/levenshteindistance_mle.json +0 -92
  1198. teradataml/analytics/mle/json/linreg_mle.json +0 -42
  1199. teradataml/analytics/mle/json/linregpredict_mle.json +0 -56
  1200. teradataml/analytics/mle/json/minhash_mle.json +0 -113
  1201. teradataml/analytics/mle/json/modularity_mle.json +0 -91
  1202. teradataml/analytics/mle/json/naivebayespredict_mle_mle.json +0 -85
  1203. teradataml/analytics/mle/json/naivebayesreduce_mle.json +0 -52
  1204. teradataml/analytics/mle/json/naivebayestextclassifierpredict_mle_mle.json +0 -147
  1205. teradataml/analytics/mle/json/naivebayestextclassifiertrainer2_mle.json +0 -108
  1206. teradataml/analytics/mle/json/naivebayestextclassifiertrainer_mle.json +0 -102
  1207. teradataml/analytics/mle/json/namedentityfinder_mle.json +0 -84
  1208. teradataml/analytics/mle/json/namedentityfinderevaluatorreduce_mle.json +0 -43
  1209. teradataml/analytics/mle/json/namedentityfindertrainer_mle.json +0 -64
  1210. teradataml/analytics/mle/json/nerevaluator_mle.json +0 -54
  1211. teradataml/analytics/mle/json/nerextractor_mle.json +0 -87
  1212. teradataml/analytics/mle/json/nertrainer_mle.json +0 -89
  1213. teradataml/analytics/mle/json/ngrams_mle.json +0 -137
  1214. teradataml/analytics/mle/json/ngramsplitter_mle_mle.json +0 -137
  1215. teradataml/analytics/mle/json/npath@coprocessor_mle.json +0 -73
  1216. teradataml/analytics/mle/json/ntree@coprocessor_mle.json +0 -123
  1217. teradataml/analytics/mle/json/pack_mle.json +0 -58
  1218. teradataml/analytics/mle/json/pack_mle_mle.json +0 -58
  1219. teradataml/analytics/mle/json/pagerank_mle.json +0 -81
  1220. teradataml/analytics/mle/json/pathanalyzer_mle.json +0 -63
  1221. teradataml/analytics/mle/json/pathgenerator_mle.json +0 -40
  1222. teradataml/analytics/mle/json/pathstart_mle.json +0 -62
  1223. teradataml/analytics/mle/json/pathsummarizer_mle.json +0 -72
  1224. teradataml/analytics/mle/json/pivoting_mle.json +0 -71
  1225. teradataml/analytics/mle/json/postagger_mle.json +0 -51
  1226. teradataml/analytics/mle/json/randomsample_mle.json +0 -131
  1227. teradataml/analytics/mle/json/randomwalksample_mle.json +0 -85
  1228. teradataml/analytics/mle/json/roc_mle.json +0 -73
  1229. teradataml/analytics/mle/json/sampling_mle.json +0 -75
  1230. teradataml/analytics/mle/json/sax_mle.json +0 -154
  1231. teradataml/analytics/mle/json/scale_mle.json +0 -93
  1232. teradataml/analytics/mle/json/scalebypartition_mle.json +0 -89
  1233. teradataml/analytics/mle/json/scalemap_mle.json +0 -44
  1234. teradataml/analytics/mle/json/scalesummary_mle.json +0 -14
  1235. teradataml/analytics/mle/json/sentenceextractor_mle.json +0 -41
  1236. teradataml/analytics/mle/json/sentimentevaluator_mle.json +0 -43
  1237. teradataml/analytics/mle/json/sentimentextractor_mle.json +0 -100
  1238. teradataml/analytics/mle/json/sentimenttrainer_mle.json +0 -68
  1239. teradataml/analytics/mle/json/seriessplitter_mle.json +0 -133
  1240. teradataml/analytics/mle/json/sessionize_mle_mle.json +0 -62
  1241. teradataml/analytics/mle/json/simplemovavg_mle.json +0 -48
  1242. teradataml/analytics/mle/json/stringsimilarity_mle.json +0 -50
  1243. teradataml/analytics/mle/json/stringsimilarity_mle_mle.json +0 -50
  1244. teradataml/analytics/mle/json/svmdense_mle.json +0 -165
  1245. teradataml/analytics/mle/json/svmdensepredict_mle.json +0 -95
  1246. teradataml/analytics/mle/json/svmdensesummary_mle.json +0 -58
  1247. teradataml/analytics/mle/json/svmsparse_mle.json +0 -148
  1248. teradataml/analytics/mle/json/svmsparsepredict_mle_mle.json +0 -103
  1249. teradataml/analytics/mle/json/svmsparsesummary_mle.json +0 -57
  1250. teradataml/analytics/mle/json/textchunker_mle.json +0 -40
  1251. teradataml/analytics/mle/json/textclassifier_mle.json +0 -51
  1252. teradataml/analytics/mle/json/textclassifierevaluator_mle.json +0 -43
  1253. teradataml/analytics/mle/json/textclassifiertrainer_mle.json +0 -103
  1254. teradataml/analytics/mle/json/textmorph_mle.json +0 -63
  1255. teradataml/analytics/mle/json/textparser_mle.json +0 -166
  1256. teradataml/analytics/mle/json/texttagger_mle.json +0 -81
  1257. teradataml/analytics/mle/json/texttokenizer_mle.json +0 -91
  1258. teradataml/analytics/mle/json/tf_mle.json +0 -33
  1259. teradataml/analytics/mle/json/tfidf_mle.json +0 -34
  1260. teradataml/analytics/mle/json/univariatestatistics_mle.json +0 -81
  1261. teradataml/analytics/mle/json/unpack_mle.json +0 -91
  1262. teradataml/analytics/mle/json/unpack_mle_mle.json +0 -91
  1263. teradataml/analytics/mle/json/unpivoting_mle.json +0 -63
  1264. teradataml/analytics/mle/json/varmax_mle.json +0 -176
  1265. teradataml/analytics/mle/json/vectordistance_mle.json +0 -179
  1266. teradataml/analytics/mle/json/weightedmovavg_mle.json +0 -48
  1267. teradataml/analytics/mle/json/xgboost_mle.json +0 -178
  1268. teradataml/analytics/mle/json/xgboostpredict_mle.json +0 -104
  1269. teradataml/analytics/sqle/Antiselect.py +0 -321
  1270. teradataml/analytics/sqle/Attribution.py +0 -603
  1271. teradataml/analytics/sqle/DecisionForestPredict.py +0 -408
  1272. teradataml/analytics/sqle/GLMPredict.py +0 -430
  1273. teradataml/analytics/sqle/MovingAverage.py +0 -543
  1274. teradataml/analytics/sqle/NGramSplitter.py +0 -548
  1275. teradataml/analytics/sqle/NPath.py +0 -632
  1276. teradataml/analytics/sqle/NaiveBayesTextClassifierPredict.py +0 -515
  1277. teradataml/analytics/sqle/Pack.py +0 -388
  1278. teradataml/analytics/sqle/SVMSparsePredict.py +0 -464
  1279. teradataml/analytics/sqle/Sessionize.py +0 -390
  1280. teradataml/analytics/sqle/StringSimilarity.py +0 -400
  1281. teradataml/analytics/sqle/Unpack.py +0 -503
  1282. teradataml/analytics/sqle/json/antiselect_sqle.json +0 -21
  1283. teradataml/analytics/sqle/json/attribution_sqle.json +0 -92
  1284. teradataml/analytics/sqle/json/decisionforestpredict_sqle.json +0 -48
  1285. teradataml/analytics/sqle/json/glmpredict_sqle.json +0 -48
  1286. teradataml/analytics/sqle/json/h2opredict_sqle.json +0 -63
  1287. teradataml/analytics/sqle/json/movingaverage_sqle.json +0 -58
  1288. teradataml/analytics/sqle/json/naivebayestextclassifierpredict_sqle.json +0 -76
  1289. teradataml/analytics/sqle/json/ngramsplitter_sqle.json +0 -126
  1290. teradataml/analytics/sqle/json/npath_sqle.json +0 -67
  1291. teradataml/analytics/sqle/json/pack_sqle.json +0 -47
  1292. teradataml/analytics/sqle/json/pmmlpredict_sqle.json +0 -55
  1293. teradataml/analytics/sqle/json/sessionize_sqle.json +0 -43
  1294. teradataml/analytics/sqle/json/stringsimilarity_sqle.json +0 -39
  1295. teradataml/analytics/sqle/json/svmsparsepredict_sqle.json +0 -74
  1296. teradataml/analytics/sqle/json/unpack_sqle.json +0 -80
  1297. teradataml/catalog/model_cataloging.py +0 -980
  1298. teradataml/config/mlengine_alias_definitions_v1.0 +0 -118
  1299. teradataml/config/mlengine_alias_definitions_v1.1 +0 -127
  1300. teradataml/config/mlengine_alias_definitions_v1.3 +0 -129
  1301. teradataml/table_operators/sandbox_container_util.py +0 -643
  1302. teradataml-17.20.0.7.dist-info/RECORD +0 -1280
  1303. {teradataml-17.20.0.7.dist-info → teradataml-20.0.0.1.dist-info}/top_level.txt +0 -0
@@ -1,918 +1,950 @@
1
- #!/usr/bin/python
2
- # ##################################################################
3
- #
4
- # Copyright 2020 Teradata. All rights reserved.
5
- # TERADATA CONFIDENTIAL AND TRADE SECRET
6
- #
7
- # Primary Owner: Trupti Purohit (trupti.purohit@teradata.com)
8
- # Secondary Owner: Gouri Patwardhan (gouri.patwardhan@teradata.com)
9
- #
10
- # Function Version: 1.0
11
- #
12
- # Description: Apply is a TeradataML wrapper around Teradata's
13
- # Apply Table Operator
14
- # ##################################################################
15
-
16
- import os, re
17
- from collections import OrderedDict
18
- from teradataml.common.utils import UtilFuncs
19
- from teradataml.common.constants import OutputStyle
20
- from teradataml.options.display import display
21
- from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
22
- from teradataml.scriptmgmt.UserEnv import UserEnv
23
- from teradataml.scriptmgmt.lls_utils import get_user_env, get_env
24
- from teradataml.common.constants import TeradataConstants
25
- from teradataml.common.exceptions import TeradataMlException
26
- from teradataml.common.messages import Messages
27
- from teradataml.common.messagecodes import MessageCodes
28
- from teradataml.common.sqlbundle import SQLBundle
29
- from teradataml.table_operators.TableOperator import TableOperator
30
- from teradataml.table_operators.apply_query_generator import ApplyTableOperatorQueryGenerator
31
- from teradatasqlalchemy.dialect import dialect as td_dialect
32
- from teradataml.utils.validators import _Validators
33
- from teradatasqlalchemy import (BYTEINT, SMALLINT, INTEGER, BIGINT, DECIMAL, FLOAT, NUMBER)
34
- from teradatasqlalchemy import (CHAR, VARCHAR)
35
-
36
-
37
- class Apply(TableOperator):
38
-
39
- def __init__(self,
40
- data=None,
41
- script_name=None,
42
- files_local_path=None,
43
- apply_command=None,
44
- delimiter=",",
45
- returns=None,
46
- quotechar=None,
47
- env_name=None,
48
- style="csv",
49
- data_partition_column=None,
50
- data_hash_column=None,
51
- data_order_column=None,
52
- is_local_order=False,
53
- sort_ascending=True,
54
- nulls_first=True):
55
- """
56
- DESCRIPTION:
57
- The fastpath Apply table operator executes a user-installed script or
58
- any Linux command inside the remote user environment using Open Analytics Framework.
59
- The installed script will be executed in parallel with data from Advanced SQL Engine.
60
-
61
- PARAMETERS:
62
- apply_command:
63
- Required Argument.
64
- Specifies the command/script to run.
65
- Note:
66
- * 'Rscript --vanilla ..' helps user to run R script without saving or restoring anything in
67
- the process and keep things clean.
68
- Types: str
69
-
70
- script_name:
71
- Required Argument.
72
- Specifies the name of the user script.
73
- Types: str
74
-
75
- files_local_path:
76
- Required Argument.
77
- Specifies the absolute local path where user script and all supporting files
78
- like model files, input data file reside.
79
- Types: str
80
-
81
- env_name:
82
- Required Argument.
83
- Specifies the name of the remote user environment or an object of class UserEnv.
84
- Types: str or oject of class UserEnv.
85
-
86
- returns:
87
- Optional Argument.
88
- Specifies the output column definition.
89
- Data argument is required when "returns" is not specified.
90
- When "returns" is not specified, output column definition should match
91
- with column definition of table specified in the data argument.
92
- Types: Dictionary specifying column name to teradatasqlalchemy type mapping.
93
- Default: None
94
-
95
- data:
96
- Optional Argument.
97
- Specifies a teradataml DataFrame containing the input data for the script.
98
-
99
- data_hash_column:
100
- Optional Argument.
101
- Specifies the column to be used for hashing.
102
- The rows in the input data are redistributed to AMPs based on the hash value of the
103
- column specified.
104
- If there is no "data_hash_column", then the entire result set,
105
- delivered by the function, constitutes a single group or partition.
106
- Types: str
107
- Notes:
108
- 1. "data_hash_column" can not be specified along with "data_partition_column".
109
- 2. "data_hash_column" can not be specified along with "is_local_order=False" and
110
- "data_order_column".
111
-
112
- data_partition_column:
113
- Optional Argument.
114
- Specifies Partition By columns for data.
115
- Values to this argument can be provided as a list, if multiple
116
- columns are used for partition. If there is no "data_partition_column",
117
- then the entire result set delivered by the function, constitutes a single
118
- group or partition.
119
- Default Value: ANY
120
- Types: str OR list of Strings (str)
121
- Notes:
122
- 1) "data_partition_column" can not be specified along with "data_hash_column".
123
- 2) "data_partition_column" can not be specified along with "is_local_order = True".
124
-
125
- is_local_order:
126
- Optional Argument.
127
- Specifies a boolean value to determine whether the input data is to be ordered locally
128
- or not. 'sort_ascending' specifies the order in which the values in a group, or partition,
129
- are sorted. This argument is ignored, if data_order_column is None.
130
- When set to 'True', qualified rows are ordered locally in preparation to be input
131
- to the function.
132
- Default Value: False
133
- Types: bool
134
- Note:
135
- When "is_local_order" is set to 'True', "data_order_column" should be
136
- specified, and the columns specified in "data_order_column"
137
- are used for local ordering.
138
-
139
-
140
- data_order_column:
141
- Optional Argument.
142
- Specifies Order By columns for data.
143
- Values to this argument can be provided as a list, if multiple
144
- columns are used for ordering.
145
- This argument is used with in both cases: "is_local_order = True"
146
- and "is_local_order = False".
147
- Types: str OR list of Strings (str)
148
- Note:
149
- "data_order_column" can not be specified along with "data_hash_column".
150
-
151
- sort_ascending:
152
- Optional Argument.
153
- Specifies a boolean value to determine if the input data is to be sorted on
154
- the data_order_column column in ascending or descending order.
155
- When this is set to 'True' data is sorted in ascending order,
156
- otherwise data is sorted in descending order.
157
- This argument is ignored, if data_order_column is None.
158
- Default Value: True
159
- Types: bool
160
-
161
- nulls_first:
162
- Optional Argument.
163
- Specifies a boolean value to determine whether NULLS from input data are listed
164
- first or last during ordering.
165
- When this is set to 'True' NULLS are listed first, otherwise NULLS are listed last.
166
- This argument is ignored, if data_order_column is None.
167
- Default Value: True
168
- Types: bool
169
-
170
- delimiter:
171
- Optional Argument.
172
- Specifies a delimiter to use when reading columns from a row and
173
- writing result columns. Delimiter must be a valid Unicode code point.
174
- Notes:
175
- 1) The Quotechar cannot be the same as the Delimiter.
176
- 2) The value of delimiter cannot be an empty string, newline and carriage return.
177
- Default value: comma (,)
178
- Types: str
179
-
180
- quotechar:
181
- Optional Argument.
182
- Specifies the character used to quote all input and output values for the script.
183
- Note: The Quotechar cannot be the same as the Delimiter.
184
- Default value: double quote (")
185
- Types: str
186
-
187
- style:
188
- Optional Argument.
189
- Specifies how input is passed to and output is generated by the 'apply_command'
190
- respectively.
191
- Note:
192
- This clause only supports 'csv' value for Apply.
193
- Default value: "csv"
194
- Types: str
195
-
196
- RETURNS:
197
- Apply Object
198
-
199
- RAISES:
200
- TeradataMlException
201
-
202
- EXAMPLES:
203
- # Note - Refer to User Guide for setting required permissions.
204
- # Load example data.
205
- >>> load_example_data("Script", ["barrier"])
206
-
207
- # Example 1 - The Python script mapper.py reads in a line of text input ("Old Macdonald Had A Farm")
208
- # from csv and splits the line into individual words, emitting a new row for each word.
209
-
210
- # Create teradataml DataFrame objects.
211
- >>> barrierdf = DataFrame.from_table("barrier")
212
-
213
- # Create remote user environment.
214
- >>> testenv = create_env('testenv', 'python_3.7.13', 'Demo environment')
215
- User environment testenv created.
216
-
217
- >>> import os, teradataml
218
- >>> teradataml_dir = os.path.dirname(teradataml.__file__)
219
-
220
- # Create an Apply object that allows us to execute script.
221
- >>> apply_obj = Apply(data=barrierdf,
222
- script_name='mapper.py',
223
- files_local_path= os.path.join(teradataml_dir, 'data', 'scripts'),
224
- apply_command='python3 mapper.py',
225
- data_order_column="Id",
226
- is_local_order=False,
227
- nulls_first=False,
228
- sort_ascending=False,
229
- returns={"word": VARCHAR(15), "count_input": VARCHAR(10)},
230
- env_name=testenv,
231
- delimiter='\t')
232
-
233
- # Run user script locally within docker container and using data from csv.
234
- # This helps the user to fix script level issues outside Open Analytics
235
- # Framework.
236
- # Setup the environment by providing local path to docker image file.
237
- >>> apply_obj.setup_sto_env(docker_image_location='/tmp/sto_sandbox_docker_image.tar'))
238
- Loading image from /tmp/sto_sandbox_docker_image.tar. It may take few minutes.
239
- Image loaded successfully.
240
-
241
- >>> apply_obj.test_script(input_data_file=os.path.join(teradataml_dir, 'data', 'barrier.csv'))
242
- ############ STDOUT Output ############
243
-
244
- word count_input
245
- 0 Macdonald 1
246
- 1 A 1
247
- 2 Farm 1
248
- 3 Had 1
249
- 4 Old 1
250
- 5 1 1
251
-
252
- # Install file in remote user environment.
253
- >>> apply_obj.install_file(file_name=os.path.join(teradataml_dir, 'data', 'mapper.py'))
254
- File 'mapper.py' installed successfully in the remote user environment 'testenv'.
255
-
256
- # Execute the user script in the Open Analytics Framework.
257
- >>> apply_obj.execute_script()
258
- word count_input
259
- 0 Macdonald 1
260
- 1 A 1
261
- 2 Farm 1
262
- 3 Had 1
263
- 4 Old 1
264
- 5 1 1
265
-
266
- # Remove the installed file from remote user environment.
267
- >>> apply_obj.remove_file(file_name='mapper.py')
268
- File 'mapper.py' removed successfully from the remote user environment 'testenv'.
269
-
270
- # Example 2 - The R script mapper.R reads in a line of text input ("Old Macdonald Had A Farm")
271
- # from csv and splits the line into individual words, emitting a new row for each word.
272
-
273
- # Create teradataml DataFrame object.
274
- >>> barrierdf = DataFrame.from_table("barrier")
275
-
276
- # Create remote user environment.
277
- >>> testenv = create_env('test_env_for_r', 'r_4.1', 'Demo environment')
278
- User environment test_env_for_r created.
279
-
280
- >>> import os, teradataml
281
-
282
- # Install file in remote user environment.
283
- >>> testenv.install_file(file_path=os.path.join(os.path.dirname(teradataml.__file__), "data", "scripts", "mapper.R"))
284
- File 'mapper.R' installed successfully in the remote user environment 'test_env_for_r'.
285
-
286
- # Create an Apply object that allows us to execute script.
287
- >>> apply_obj = Apply(data=barrierdf,
288
- apply_command='Rscript --vanilla mapper.R',
289
- data_order_column="Id",
290
- is_local_order=False,
291
- nulls_first=False,
292
- sort_ascending=False,
293
- returns={"word": VARCHAR(15), "count_input": VARCHAR(10)},
294
- env_name=testenv,
295
- delimiter='\t')
296
-
297
- # Execute the user script in the Open Analytics Framework.
298
- >>> apply_obj.execute_script()
299
- word count_input
300
- 0 Macdonald 1
301
- 1 A 1
302
- 2 Farm 1
303
- 3 Had 1
304
- 4 Old 1
305
- 5 1 1
306
-
307
- # Remove the installed file from remote user environment.
308
- >>> apply_obj.remove_file(file_name='mapper.R')
309
- File 'mapper.R' removed successfully from the remote user environment 'test_env_for_r'.
310
- """
311
- # Common variables and their validation in base class.
312
- super(Apply, self).__init__(data,
313
- script_name,
314
- files_local_path,
315
- delimiter,
316
- returns,
317
- quotechar,
318
- data_partition_column,
319
- data_hash_column,
320
- data_order_column,
321
- is_local_order,
322
- sort_ascending,
323
- nulls_first)
324
-
325
- # Set the variable specific to this child class.
326
- self.apply_command = apply_command
327
- self.env_name = env_name if env_name is not None else get_user_env()
328
- self.style = style
329
- self.returns = returns
330
-
331
- # Create AnalyticsWrapperUtils instance which contains validation functions.
332
- # This is required for is_default_or_not check.
333
- # Rest all validation is done using _Validators
334
- self.__awu = AnalyticsWrapperUtils()
335
-
336
- # Perform argument validation for arguments specific to this class.
337
- self.__arg_info_matrix = []
338
-
339
- self.__arg_info_matrix.append(["style", self.style, True, (str), True, ['CSV']])
340
- self.__arg_info_matrix.append(["env_name", self.env_name, False, (str, UserEnv), True])
341
- self.__arg_info_matrix.append(["apply_command", self.apply_command, False, (str), True])
342
- self.__arg_info_matrix.append(["returns", self.returns, True, (dict), True])
343
-
344
- # Perform the function argument validations.
345
- self.__apply__validate()
346
-
347
- self.env = self.env_name if isinstance(self.env_name, UserEnv) else get_env(self.env_name)
348
-
349
- # User can specify object of UserEnv class. Or if environment is already created just pass
350
- # remote user environment name as string.
351
- if isinstance(self.env_name, UserEnv):
352
- self.env_name = self.env_name.env_name
353
-
354
- def __apply__validate(self):
355
-
356
- # Make sure that a non-NULL value has been supplied for all mandatory arguments.
357
- _Validators._validate_missing_required_arguments(self.__arg_info_matrix)
358
-
359
- # Validate argument types.
360
- _Validators._validate_function_arguments(self.__arg_info_matrix)
361
-
362
- if all([self.returns is None, self.data is None]):
363
- raise TeradataMlException(Messages.get_message(MessageCodes.SPECIFY_AT_LEAST_ONE_ARG,
364
- "data",
365
- "returns"),
366
- MessageCodes.SPECIFY_AT_LEAST_ONE_ARG)
367
-
368
- if self.returns is None:
369
- self.returns = OrderedDict(zip(self.data.columns,
370
- [col.type for col in
371
- self.data._metaexpr.c]))
372
-
373
- def install_file(self, file_name, replace=False):
374
- """
375
- DESCRIPTION:
376
- Function to install script in remote user environment specified in env_name
377
- argument of an Apply class object.
378
- On success, prints a message that file is installed or replaced.
379
- This language script can be executed via execute_script() function.
380
-
381
- PARAMETERS:
382
- file_name:
383
- Required Argument:
384
- Specifies the name of the file including file extension to be installed
385
- or replaced.
386
- Note:
387
- File names are case sensitive.
388
- Types: str
389
-
390
- replace:
391
- Optional Argument.
392
- Specifies if the file is to be installed or replaced.
393
- Default Value: False
394
- Types: bool
395
-
396
- RETURNS:
397
- True, if successful.
398
-
399
- RAISES:
400
- TeradataMLException, SqlOperationalError
401
-
402
- EXAMPLES:
403
- # Example 1: Install the file mapper.py found at the relative path data/scripts/ using
404
- # the default text mode.
405
-
406
- # In order to run example 1, "mapper.py" is required to be present on client.
407
- # Provide the path of "mapper.py" in "file_path" argument.
408
- # Create a file named "mapper.py" with content as follows:
409
- -----------------------------------------------------------
410
- #!/usr/bin/python
411
- import sys
412
- for line in sys.stdin:
413
- line = line.strip()
414
- words = line.split()
415
- for word in words:
416
- print ('%s\t%s' % (word, 1))
417
- ------------------------------------------------------------
418
-
419
- # Create teradataml DataFrame objects.
420
- >>> barrierdf = DataFrame.from_table("barrier")
421
-
422
- # Create remote user environment.
423
- >>> from teradataml import create_env
424
- >>> test_env = create_env('test_env', 'python_3.7.9', 'Demo environment')
425
- User environment testenv created.
426
-
427
- >>> import teradataml, os
428
- >>> teradataml_dir = os.path.dirname(teradataml.__file__)
429
- # Create an Apply object that allows user to execute script using Open Analytics Framework.
430
- >>> apply_obj = Apply(data=barrierdf,
431
- files_local_path='data/scripts/',
432
- script_name='mapper.py',
433
- apply_command='python3 mapper.py',
434
- data_order_column="Id",
435
- env_name=test_env,
436
- returns={"word": VARCHAR(15), "count_input": VARCHAR(2)}
437
- )
438
-
439
- # Install file in remote user environment.
440
- >>> apply_obj.install_file(file_name='mapper.py')
441
- File 'mapper.py' installed successfully in the remote user environment 'test_env'.
442
-
443
- # Replace file in remote user environment.
444
- >>> apply_obj.install_file(file_name='mapper.py', replace=True)
445
- File 'mapper.py' replaced successfully in the remote user environment 'test_env'.
446
- """
447
- # Install/Replace file in the remote user environment.
448
- try:
449
- __arg_info_matrix = []
450
- __arg_info_matrix.append(["file_name", file_name, False, (str), True])
451
-
452
- # Validate arguments
453
- _Validators._validate_missing_required_arguments(__arg_info_matrix)
454
- _Validators._validate_function_arguments(__arg_info_matrix)
455
-
456
- file_path = os.path.join(self.files_local_path, file_name)
457
-
458
- # Install file in remote user environment.
459
- self.env.install_file(file_path=file_path, replace=replace)
460
- except:
461
- raise
462
-
463
- def remove_file(self, file_name):
464
- """
465
- DESCRIPTION:
466
- Function to remove user installed files/scripts from remote user environment.
467
-
468
- PARAMETERS:
469
- file_name:
470
- Required Argument.
471
- Specifies the name of user-installed file with extension.
472
- Note:
473
- File names are case sensitive.
474
- Types: str
475
-
476
- RETURNS:
477
- True, if successful.
478
-
479
- RAISES:
480
- TeradataMLException, SqlOperationalError
481
-
482
- EXAMPLES:
483
- # Refer install_file example to create mapper.py script and install the file
484
- # in remote user environment.
485
-
486
- # Remove the installed file.
487
- >>> apply_obj.remove_file(file_name='mapper.py')
488
- File mapper.py removed successfully from the remote user environment test_env.
489
-
490
- """
491
- # Remove file from remote user environment.
492
- self.env.remove_file(file_name)
493
-
494
- def set_data(self,
495
- data,
496
- data_partition_column=None,
497
- data_hash_column=None,
498
- data_order_column=None,
499
- is_local_order=False,
500
- sort_ascending=True,
501
- nulls_first=True):
502
- """
503
- DESCRIPTION:
504
- Function enables user to set data and data related arguments without having to
505
- re-create Apply object.
506
-
507
- PARAMETERS:
508
- data:
509
- Required Argument.
510
- Specifies a teradataml DataFrame containing the input data.
511
-
512
- data_partition_column:
513
- Optional Argument.
514
- Specifies Partition By columns for data.
515
- Values to this argument can be provided as a list, if multiple
516
- columns are used for partition. If there is no "data_partition_column",
517
- then the entire result set delivered by the function, constitutes a single
518
- group or partition.
519
- Default Value: ANY
520
- Types: str OR list of Strings (str)
521
- Notes:
522
- 1) "data_partition_column" can not be specified along with
523
- "data_hash_column".
524
- 2) "data_partition_column" can not be specified along with
525
- "is_local_order = True".
526
-
527
- data_hash_column:
528
- Optional Argument.
529
- Specifies the column to be used for hashing.
530
- The rows in the input data are redistributed to AMPs based on the hash value of the
531
- column specified.
532
- If there is no data_hash_column, then the entire result set,
533
- delivered by the function, constitutes a single group or partition.
534
- Types: str
535
- Note:
536
- "data_hash_column" can not be specified along with "data_partition_column",
537
- "is_local_order" and "data_order_column".
538
-
539
- data_order_column:
540
- Optional Argument.
541
- Specifies Order By columns for data.
542
- Values to this argument can be provided as a list, if multiple
543
- columns are used for ordering.
544
- This argument is used in both cases:
545
- "is_local_order = True" and "is_local_order = False".
546
- Types: str OR list of Strings (str)
547
- Note:
548
- "data_order_column" can not be specified along with
549
- "data_hash_column".
550
-
551
- is_local_order:
552
- Optional Argument.
553
- Specifies a boolean value to determine whether the input data is to be
554
- ordered locally or not. Order by specifies the order in which the
555
- values in a group or partition are sorted. Local Order By specifies
556
- orders qualified rows on each AMP in preparation to be input to a table
557
- function. This argument is ignored, if "data_order_column" is None. When
558
- set to True, data is ordered locally.
559
- Default Value: False
560
- Types: bool
561
- Note:
562
- 1) "is_local_order" can not be specified along with
563
- "data_hash_column".
564
- 2) When "is_local_order" is set to True, "data_order_column" should be
565
- specified, and the columns specified in "data_order_column" are
566
- used for local ordering.
567
-
568
- sort_ascending:
569
- Optional Argument.
570
- Specifies a boolean value to determine if the result set is to be sorted
571
- on the column specified in "data_order_column", in ascending or descending
572
- order.
573
- The sorting is ascending when this argument is set to True, and descending
574
- when set to False.
575
- This argument is ignored, if "data_order_column" is None.
576
- Default Value: True
577
- Types: bool
578
-
579
- nulls_first:
580
- Optional Argument.
581
- Specifies a boolean value to determine whether NULLS are listed first or
582
- last during ordering.
583
- This argument is ignored, if "data_order_column" is None.
584
- NULLS are listed first when this argument is set to True, and
585
- last when set to False.
586
- Default Value: True
587
- Types: bool
588
-
589
- RETURNS:
590
- None.
591
-
592
- RAISES:
593
- TeradataMlException
594
-
595
- EXAMPLES:
596
- # Load example data.
597
- >>> load_example_data("Script", ["barrier", "barrier_new"])
598
-
599
- # Create teradataml DataFrame objects.
600
- >>> barrierdf = DataFrame.from_table("barrier")
601
- >>> barrierdf
602
- Name
603
- Id
604
- 1 Old Macdonald Had A Farm
605
- >>>
606
-
607
- # List base environments.
608
- >>> from teradataml import list_base_envs, create_env
609
- >>> list_base_envs()
610
- base_name language version
611
- 0 python_3.7.13 Python 3.7.13
612
- 1 python_3.8.13 Python 3.8.13
613
- 2 python_3.9.13 Python 3.9.13
614
- >>>
615
-
616
- # Create an environment.
617
- >>> demo_env = create_env(env_name = 'demo_env', base_env = 'python_3.8.13', desc = 'Demo Environment')
618
- User environment 'demo_env' created.
619
- >>>
620
-
621
- >>> import teradataml
622
- >>> from teradatasqlalchemy import VARCHAR
623
- >>> td_path = os.path.dirname(teradataml.__file__)
624
-
625
- # The script mapper.py reads in a line of text input
626
- # ("Old Macdonald Had A Farm") from csv and
627
- # splits the line into individual words, emitting a new row for each word.
628
- # Create an APPLY object with data and its arguments.
629
- >>> apply_obj = Apply(data = barrierdf,
630
- ... script_name='mapper.py',
631
- ... files_local_path= os.path.join(td_path,'data', 'scripts'),
632
- ... apply_command='python3 mapper.py',
633
- ... data_order_column="Id",
634
- ... is_local_order=False,
635
- ... nulls_first=False,
636
- ... sort_ascending=False,
637
- ... returns={"word": VARCHAR(15), "count_input": VARCHAR(10)},
638
- ... env_name=demo_env,
639
- ... delimiter='\t')
640
-
641
- # Install file in environment.
642
- >>> apply_obj.install_file('mapper.py')
643
- File 'mapper.py' installed successfully in the remote user environment 'demo_env'.
644
- >>>
645
-
646
- >>> apply_obj.execute_script()
647
- word count_input
648
- 0 Macdonald 1
649
- 1 A 1
650
- 2 Farm 1
651
- 3 Had 1
652
- 4 Old 1
653
- 5 1 1
654
- >>>
655
-
656
- # Now run the script on a new DataFrame.
657
- >>> barrierdf_new = DataFrame.from_table("barrier_new")
658
- >>> barrierdf_new
659
- Name
660
- Id
661
- 1 Old Macdonald Had A Farm
662
- 2 On his farm he had a cow
663
- >>>
664
-
665
- # Note:
666
- # All data related arguments that are not specified in set_data() are
667
- # reset to default values.
668
- >>> apply_obj.set_data(data=barrierdf_new,
669
- ... data_order_column='Id',
670
- ... nulls_first = True)
671
- >>>
672
-
673
- # Execute the user script on Vantage.
674
- >>> apply_obj.execute_script()
675
- word count_input
676
- 0 his 1
677
- 1 he 1
678
- 2 had 1
679
- 3 a 1
680
- 4 1 1
681
- 5 Old 1
682
- 6 Macdonald 1
683
- 7 Had 1
684
- 8 A 1
685
- 9 Farm 1
686
- >>>
687
- """
688
- super(Apply, self).set_data(data,
689
- data_partition_column,
690
- data_hash_column,
691
- data_order_column,
692
- is_local_order,
693
- sort_ascending,
694
- nulls_first)
695
-
696
- self._validate(for_data_args=True)
697
-
698
- def __form_table_operator_query(self):
699
- """
700
- Function to generate the Table Operator queries. The function defines
701
- variables and list of arguments required to form the query.
702
- """
703
- # Output table arguments list
704
- self.__func_output_args_sql_names = []
705
- self.__func_output_args = []
706
-
707
- # Generate lists for rest of the function arguments
708
- self.__func_other_arg_sql_names = []
709
- self.__func_other_args = []
710
- self.__func_other_arg_json_datatypes = []
711
-
712
- self.__func_args_before_using_clause_names = []
713
- self.__func_args_before_using_clause_values = []
714
- self.__func_args_before_using_clause_types = []
715
-
716
- self.__func_other_arg_sql_names.append("APPLY_COMMAND")
717
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.apply_command, "'"))
718
- self.__func_other_arg_json_datatypes.append("STRING")
719
-
720
- self.__func_other_arg_sql_names.append("ENVIRONMENT")
721
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.env_name, "'"))
722
- self.__func_other_arg_json_datatypes.append("STRING")
723
-
724
- self.__func_other_arg_sql_names.append("STYLE")
725
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.style, "'"))
726
- self.__func_other_arg_json_datatypes.append("STRING")
727
-
728
- if self.delimiter is not None:
729
- self.__func_other_arg_sql_names.append("delimiter")
730
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.delimiter, "'"))
731
- self.__func_other_arg_json_datatypes.append("STRING")
732
-
733
- # Generate returns clause
734
- if self.returns is not None:
735
- if isinstance(self.returns, dict):
736
- returns_clause = ', '.join(
737
- '{} {}'.format(key, self.returns[key].compile(td_dialect())) for key in self.returns.keys())
738
- self.__func_other_arg_sql_names.append("returns")
739
- self.__func_other_args.append(returns_clause)
740
- self.__func_other_arg_json_datatypes.append("STRING")
741
-
742
- if self.quotechar is not None:
743
- self.__func_other_arg_sql_names.append("quotechar")
744
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.quotechar, "'"))
745
- self.__func_other_arg_json_datatypes.append("STRING")
746
-
747
- # Declare empty lists to hold input table information.
748
- self.__func_input_arg_sql_names = []
749
- self.__func_input_table_view_query = []
750
- self.__func_input_dataframe_type = []
751
- self.__func_input_distribution = []
752
- self.__func_input_partition_by_cols = []
753
- self.__func_input_order_by_cols = []
754
- self.__func_input_order_by_type = []
755
- self.__func_input_sort_ascending = self.sort_ascending
756
- self.__func_input_nulls_first = None
757
-
758
- # Process data
759
- if self.data is not None:
760
- data_distribution = "FACT"
761
- if self.data_hash_column is not None:
762
- data_distribution = "HASH"
763
- self.data_partition_column = UtilFuncs._teradata_collapse_arglist(self.data_hash_column, "\"")
764
- else:
765
- if self.__awu._is_default_or_not(self.data_partition_column, "ANY"):
766
- self.data_partition_column = UtilFuncs._teradata_collapse_arglist(
767
- self.data_partition_column, "\"")
768
- else:
769
- self.data_partition_column = None
770
- if self.data_order_column is not None:
771
- if self.is_local_order:
772
- self.__func_input_order_by_type.append("LOCAL")
773
- if not self.data_hash_column:
774
- data_distribution = None
775
- else:
776
- self.__func_input_order_by_type.append(None)
777
- self.__func_input_order_by_cols.append(
778
- UtilFuncs._teradata_collapse_arglist(self.data_order_column, "\""))
779
- else:
780
- self.__func_input_order_by_type.append(None)
781
- self.__func_input_order_by_cols.append("NA_character_")
782
-
783
- self.__table_ref = self.__awu._teradata_on_clause_from_dataframe(self.data, False)
784
- self.__func_input_distribution.append(data_distribution)
785
- self.__func_input_arg_sql_names.append("input")
786
- self.__func_input_table_view_query.append(self.__table_ref["ref"])
787
- self.__func_input_dataframe_type.append(self.__table_ref["ref_type"])
788
- self.__func_input_partition_by_cols.append(self.data_partition_column)
789
- self.__func_input_nulls_first = self.nulls_first
790
-
791
- function_name = "Apply"
792
- # Create instance to generate Table Operator Query.
793
- applyqg_obj = ApplyTableOperatorQueryGenerator(function_name
794
- , self.__func_input_arg_sql_names
795
- , self.__func_input_table_view_query
796
- , self.__func_input_dataframe_type
797
- , self.__func_input_distribution
798
- , self.__func_input_partition_by_cols
799
- , self.__func_input_order_by_cols
800
- , self.__func_other_arg_sql_names
801
- , self.__func_other_args
802
- , self.__func_other_arg_json_datatypes
803
- , self.__func_output_args_sql_names
804
- , self.__func_output_args
805
- , self.__func_input_order_by_type
806
- , self.__func_input_sort_ascending
807
- , self.__func_input_nulls_first
808
- , engine="ENGINE_SQL"
809
- )
810
-
811
- # Invoke call to Apply Table operator query generation.
812
- self._tblop_query = applyqg_obj._gen_table_operator_select_stmt_sql()
813
-
814
- # Print Table Operator query if requested to do so.
815
- if display.print_sqlmr_query:
816
- print(self._tblop_query)
817
-
818
- def execute_script(self, output_style='VIEW'):
819
- """
820
- DESCRIPTION:
821
- Function enables user to execute Python scripts using Open Analytics Framework.
822
-
823
- PARAMETERS:
824
- output_style:
825
- Specifies the type of output object to create - a table or a view.
826
- Permitted values: 'VIEW', 'TABLE'.
827
- Default value: 'VIEW'
828
- Types: str
829
-
830
- RETURNS:
831
- Output teradataml DataFrames can be accessed using attribute
832
- references, such as ScriptObj.<attribute_name>.
833
- Output teradataml DataFrame attribute name is:
834
- result
835
-
836
- RAISES:
837
- TeradataMlException
838
-
839
- EXAMPLES:
840
- Refer to help(Apply)
841
- """
842
- # Validate the output_style.
843
- permitted_values = [OutputStyle.OUTPUT_TABLE.value,
844
- OutputStyle.OUTPUT_VIEW.value]
845
- _Validators._validate_permitted_values(output_style, permitted_values, 'output_style',
846
- case_insensitive=False, includeNone=False)
847
-
848
- # Generate the Table Operator query
849
- self.__form_table_operator_query()
850
-
851
- # Execute Table Operator query and return results
852
- return self._execute(output_style)
853
-
854
- # TODO: Remove the function with ELE-5010.
855
- def _execute(self, output_style="TABLE"):
856
- """
857
- DESCRIPTION:
858
- Function to execute APPLY Query and store the result in a table.
859
-
860
- PARAMETERS:
861
- output_style:
862
- Specifies the type of output object to create - a table or a view.
863
- Permitted values: 'VIEW', 'TABLE'.
864
- Default value: 'VIEW'
865
- Types: str
866
-
867
- RETURNS:
868
- Output teradataml DataFrames can be accessed using attribute
869
- references, such as ScriptObj.<attribute_name>.
870
- Output teradataml DataFrame attribute name is:
871
- result
872
-
873
- RAISES:
874
- TeradataMlException
875
-
876
- EXAMPLES:
877
- self._execute("VIEW")
878
- """
879
- # Generate STDOUT table name and add it to the output table list.
880
- tblop_stdout_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_tblop_out_",
881
- use_default_database=True, gc_on_quit=True,
882
- quote=False,
883
- table_type=TeradataConstants.TERADATA_TABLE
884
- )
885
-
886
- try:
887
- # Create table.
888
- columns_clause = ', '.join(
889
- '{} {}'.format(key, self.returns[key].compile(td_dialect())) for key in self.returns.keys())
890
- UtilFuncs._create_table_using_columns(tblop_stdout_temp_tablename,
891
- columns_datatypes=columns_clause,
892
- storage="TD_OFSSTORAGE")
893
-
894
- # Use insert with select to populate the data to table.
895
- # Insert with select accepts a table as a table and columns as
896
- # second and third parameter. So, converting the Query to a subquery
897
- # so the query acts as a table.
898
- query = "({}) as apply_result".format(self._tblop_query)
899
- ins_table = SQLBundle._build_insert_from_table_query(tblop_stdout_temp_tablename,
900
- query,
901
- "*")
902
- UtilFuncs._execute_query(ins_table)
903
-
904
- except Exception as emsg:
905
- emsg = str(emsg)
906
- pattern = r'\b\d{18}\b'
907
- query_id = re.findall(pattern, emsg)
908
- print("-----------------------------------------------------------------------")
909
- print("User should run view_log() to download the logs with the query id \"{}\".".format(query_id[0]))
910
- print("-----------------------------------------------------------------------")
911
- raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_EXEC_SQL_FAILED, emsg),
912
- MessageCodes.TDMLDF_EXEC_SQL_FAILED)
913
-
914
- self.result = self.__awu._create_data_set_object(
915
- df_input=UtilFuncs._extract_table_name(tblop_stdout_temp_tablename), source_type="table",
916
- database_name=UtilFuncs._extract_db_name(tblop_stdout_temp_tablename))
917
-
1
+ #!/usr/bin/python
2
+ # ##################################################################
3
+ #
4
+ # Copyright 2020 Teradata. All rights reserved.
5
+ # TERADATA CONFIDENTIAL AND TRADE SECRET
6
+ #
7
+ # Primary Owner: Trupti Purohit (trupti.purohit@teradata.com)
8
+ # Secondary Owner: Gouri Patwardhan (gouri.patwardhan@teradata.com)
9
+ #
10
+ # Function Version: 1.0
11
+ #
12
+ # Description: Apply is a TeradataML wrapper around Teradata's
13
+ # Apply Table Operator
14
+ # ##################################################################
15
+
16
+ import os, re
17
+ from collections import OrderedDict
18
+ from teradataml.common.utils import UtilFuncs
19
+ from teradataml.common.constants import OutputStyle
20
+ from teradataml.options.display import display
21
+ from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
22
+ from teradataml.scriptmgmt.UserEnv import UserEnv
23
+ from teradataml.scriptmgmt.lls_utils import get_user_env, get_env
24
+ from teradataml.common.constants import TeradataConstants
25
+ from teradataml.common.exceptions import TeradataMlException
26
+ from teradataml.common.messages import Messages
27
+ from teradataml.common.messagecodes import MessageCodes
28
+ from teradataml.common.sqlbundle import SQLBundle
29
+ from teradataml.table_operators.TableOperator import TableOperator
30
+ from teradataml.table_operators.apply_query_generator import ApplyTableOperatorQueryGenerator
31
+ from teradatasqlalchemy.dialect import dialect as td_dialect
32
+ from teradataml.utils.validators import _Validators
33
+ from teradatasqlalchemy import (BYTEINT, SMALLINT, INTEGER, BIGINT, DECIMAL, FLOAT, NUMBER)
34
+ from teradatasqlalchemy import (CHAR, VARCHAR)
35
+
36
+
37
+ class Apply(TableOperator):
38
+
39
+ def __init__(self,
40
+ data=None,
41
+ script_name=None,
42
+ files_local_path=None,
43
+ apply_command=None,
44
+ delimiter=",",
45
+ returns=None,
46
+ quotechar=None,
47
+ env_name=None,
48
+ style="csv",
49
+ data_partition_column=None,
50
+ data_hash_column=None,
51
+ data_order_column=None,
52
+ is_local_order=False,
53
+ sort_ascending=True,
54
+ nulls_first=True):
55
+ """
56
+ DESCRIPTION:
57
+ The fastpath Apply table operator executes a user-installed script or
58
+ any Linux command inside the remote user environment using Open Analytics Framework.
59
+ The installed script will be executed in parallel with data from Advanced SQL Engine.
60
+
61
+ PARAMETERS:
62
+ apply_command:
63
+ Required Argument.
64
+ Specifies the command/script to run.
65
+ Note:
66
+ * 'Rscript --vanilla ..' helps user to run R script without saving or restoring anything in
67
+ the process and keep things clean.
68
+ Types: str
69
+
70
+ script_name:
71
+ Required Argument.
72
+ Specifies the name of the user script.
73
+ Types: str
74
+
75
+ files_local_path:
76
+ Required Argument.
77
+ Specifies the absolute local path where user script and all supporting files
78
+ like model files, input data file reside.
79
+ Types: str
80
+
81
+ env_name:
82
+ Required Argument.
83
+ Specifies the name of the remote user environment or an object of class UserEnv.
84
+ Types: str or oject of class UserEnv.
85
+
86
+ returns:
87
+ Optional Argument.
88
+ Specifies the output column definition.
89
+ Data argument is required when "returns" is not specified.
90
+ When "returns" is not specified, output column definition should match
91
+ with column definition of table specified in the data argument.
92
+ Types: Dictionary specifying column name to teradatasqlalchemy type mapping.
93
+ Default: None
94
+
95
+ data:
96
+ Optional Argument.
97
+ Specifies a teradataml DataFrame containing the input data for the script.
98
+
99
+ data_hash_column:
100
+ Optional Argument.
101
+ Specifies the column to be used for hashing.
102
+ The rows in the input data are redistributed to AMPs based on the hash value of the
103
+ column specified.
104
+ If there is no "data_hash_column", then the entire result set,
105
+ delivered by the function, constitutes a single group or partition.
106
+ Types: str
107
+ Notes:
108
+ 1. "data_hash_column" can not be specified along with "data_partition_column".
109
+ 2. "data_hash_column" can not be specified along with "is_local_order=False" and
110
+ "data_order_column".
111
+
112
+ data_partition_column:
113
+ Optional Argument.
114
+ Specifies Partition By columns for data.
115
+ Values to this argument can be provided as a list, if multiple
116
+ columns are used for partition. If there is no "data_partition_column",
117
+ then the entire result set delivered by the function, constitutes a single
118
+ group or partition.
119
+ Default Value: ANY
120
+ Types: str OR list of Strings (str)
121
+ Notes:
122
+ 1) "data_partition_column" can not be specified along with "data_hash_column".
123
+ 2) "data_partition_column" can not be specified along with "is_local_order = True".
124
+
125
+ is_local_order:
126
+ Optional Argument.
127
+ Specifies a boolean value to determine whether the input data is to be ordered locally
128
+ or not. 'sort_ascending' specifies the order in which the values in a group, or partition,
129
+ are sorted. This argument is ignored, if data_order_column is None.
130
+ When set to 'True', qualified rows are ordered locally in preparation to be input
131
+ to the function.
132
+ Default Value: False
133
+ Types: bool
134
+ Note:
135
+ When "is_local_order" is set to 'True', "data_order_column" should be
136
+ specified, and the columns specified in "data_order_column"
137
+ are used for local ordering.
138
+
139
+
140
+ data_order_column:
141
+ Optional Argument.
142
+ Specifies Order By columns for data.
143
+ Values to this argument can be provided as a list, if multiple
144
+ columns are used for ordering.
145
+ This argument is used with in both cases: "is_local_order = True"
146
+ and "is_local_order = False".
147
+ Types: str OR list of Strings (str)
148
+ Note:
149
+ "data_order_column" can not be specified along with "data_hash_column".
150
+
151
+ sort_ascending:
152
+ Optional Argument.
153
+ Specifies a boolean value to determine if the input data is to be sorted on
154
+ the data_order_column column in ascending or descending order.
155
+ When this is set to 'True' data is sorted in ascending order,
156
+ otherwise data is sorted in descending order.
157
+ This argument is ignored, if data_order_column is None.
158
+ Default Value: True
159
+ Types: bool
160
+
161
+ nulls_first:
162
+ Optional Argument.
163
+ Specifies a boolean value to determine whether NULLS from input data are listed
164
+ first or last during ordering.
165
+ When this is set to 'True' NULLS are listed first, otherwise NULLS are listed last.
166
+ This argument is ignored, if data_order_column is None.
167
+ Default Value: True
168
+ Types: bool
169
+
170
+ delimiter:
171
+ Optional Argument.
172
+ Specifies a delimiter to use when reading columns from a row and
173
+ writing result columns. Delimiter must be a valid Unicode code point.
174
+ Notes:
175
+ 1) The Quotechar cannot be the same as the Delimiter.
176
+ 2) The value of delimiter cannot be an empty string, newline and carriage return.
177
+ Default value: comma (,)
178
+ Types: str
179
+
180
+ quotechar:
181
+ Optional Argument.
182
+ Specifies the character used to quote all input and output values for the script.
183
+ Note: The Quotechar cannot be the same as the Delimiter.
184
+ Default value: double quote (")
185
+ Types: str
186
+
187
+ style:
188
+ Optional Argument.
189
+ Specifies how input is passed to and output is generated by the 'apply_command'
190
+ respectively.
191
+ Note:
192
+ This clause only supports 'csv' value for Apply.
193
+ Default value: "csv"
194
+ Types: str
195
+
196
+ RETURNS:
197
+ Apply Object
198
+
199
+ RAISES:
200
+ TeradataMlException
201
+
202
+ EXAMPLES:
203
+ # Note - Refer to User Guide for setting required permissions.
204
+ # Load example data.
205
+ >>> load_example_data("Script", ["barrier"])
206
+
207
+ # Example 1 - The Python script mapper.py reads in a line of text input ("Old Macdonald Had A Farm")
208
+ # from csv and splits the line into individual words, emitting a new row for each word.
209
+
210
+ # Create teradataml DataFrame objects.
211
+ >>> barrierdf = DataFrame.from_table("barrier")
212
+
213
+ # Create remote user environment.
214
+ >>> testenv = create_env('testenv', 'python_3.7.13', 'Demo environment')
215
+ User environment testenv created.
216
+
217
+ >>> import os, teradataml
218
+ >>> teradataml_dir = os.path.dirname(teradataml.__file__)
219
+
220
+ # Create an Apply object that allows us to execute script.
221
+ >>> apply_obj = Apply(data=barrierdf,
222
+ script_name='mapper.py',
223
+ files_local_path= os.path.join(teradataml_dir, 'data', 'scripts'),
224
+ apply_command='python3 mapper.py',
225
+ data_order_column="Id",
226
+ is_local_order=False,
227
+ nulls_first=False,
228
+ sort_ascending=False,
229
+ returns={"word": VARCHAR(15), "count_input": VARCHAR(10)},
230
+ env_name=testenv,
231
+ delimiter='\t')
232
+
233
+ # Run user script locally using data from csv.
234
+ # This helps the user to fix script level issues outside Open Analytics
235
+ # Framework.
236
+ >>> apply_obj.test_script(input_data_file=os.path.join(teradataml_dir, 'data', 'barrier.csv'))
237
+ ############ STDOUT Output ############
238
+
239
+ word count_input
240
+ 0 Macdonald 1
241
+ 1 A 1
242
+ 2 Farm 1
243
+ 3 Had 1
244
+ 4 Old 1
245
+ 5 1 1
246
+
247
+ # Install file in remote user environment.
248
+ >>> apply_obj.install_file(file_name=os.path.join(teradataml_dir, 'data', 'mapper.py'))
249
+ File 'mapper.py' installed successfully in the remote user environment 'testenv'.
250
+
251
+ # Execute the user script in the Open Analytics Framework.
252
+ >>> apply_obj.execute_script()
253
+ word count_input
254
+ 0 Macdonald 1
255
+ 1 A 1
256
+ 2 Farm 1
257
+ 3 Had 1
258
+ 4 Old 1
259
+ 5 1 1
260
+
261
+ # Remove the installed file from remote user environment.
262
+ >>> apply_obj.remove_file(file_name='mapper.py')
263
+ File 'mapper.py' removed successfully from the remote user environment 'testenv'.
264
+
265
+ # Example 2 - The R script mapper.R reads in a line of text input ("Old Macdonald Had A Farm")
266
+ # from csv and splits the line into individual words, emitting a new row for each word.
267
+
268
+ # Create teradataml DataFrame object.
269
+ >>> barrierdf = DataFrame.from_table("barrier")
270
+
271
+ # Create remote user environment.
272
+ >>> testenv = create_env('test_env_for_r', 'r_4.1', 'Demo environment')
273
+ User environment test_env_for_r created.
274
+
275
+ >>> import os, teradataml
276
+
277
+ # Install file in remote user environment.
278
+ >>> testenv.install_file(file_path=os.path.join(os.path.dirname(teradataml.__file__), "data", "scripts", "mapper.R"))
279
+ File 'mapper.R' installed successfully in the remote user environment 'test_env_for_r'.
280
+
281
+ # Create an Apply object that allows us to execute script.
282
+ >>> apply_obj = Apply(data=barrierdf,
283
+ apply_command='Rscript --vanilla mapper.R',
284
+ data_order_column="Id",
285
+ is_local_order=False,
286
+ nulls_first=False,
287
+ sort_ascending=False,
288
+ returns={"word": VARCHAR(15), "count_input": VARCHAR(10)},
289
+ env_name=testenv,
290
+ delimiter='\t')
291
+
292
+ # Execute the user script in the Open Analytics Framework.
293
+ >>> apply_obj.execute_script()
294
+ word count_input
295
+ 0 Macdonald 1
296
+ 1 A 1
297
+ 2 Farm 1
298
+ 3 Had 1
299
+ 4 Old 1
300
+ 5 1 1
301
+
302
+ # Remove the installed file from remote user environment.
303
+ >>> apply_obj.remove_file(file_name='mapper.R')
304
+ File 'mapper.R' removed successfully from the remote user environment 'test_env_for_r'.
305
+ """
306
+ # Common variables and their validation in base class.
307
+ super(Apply, self).__init__(data,
308
+ script_name,
309
+ files_local_path,
310
+ delimiter,
311
+ returns,
312
+ quotechar,
313
+ data_partition_column,
314
+ data_hash_column,
315
+ data_order_column,
316
+ is_local_order,
317
+ sort_ascending,
318
+ nulls_first)
319
+
320
+ # Set the variable specific to this child class.
321
+ self.apply_command = apply_command
322
+ self.env_name = env_name if env_name is not None else get_user_env()
323
+ self.style = style
324
+ self.returns = returns
325
+ self._skip_argument_validation = False
326
+
327
+ # Create AnalyticsWrapperUtils instance which contains validation functions.
328
+ # This is required for is_default_or_not check.
329
+ # Rest all validation is done using _Validators
330
+ self.__awu = AnalyticsWrapperUtils()
331
+
332
+ # Perform argument validation for arguments specific to this class.
333
+ self.__arg_info_matrix = []
334
+
335
+ self.__arg_info_matrix.append(["style", self.style, True, (str), True, ['CSV']])
336
+ self.__arg_info_matrix.append(["env_name", self.env_name, False, (str, UserEnv), True])
337
+ self.__arg_info_matrix.append(["apply_command", self.apply_command, False, (str), True])
338
+ self.__arg_info_matrix.append(["returns", self.returns, True, (dict), True])
339
+
340
+ # Perform the function argument validations.
341
+ self.__apply__validate()
342
+
343
+ self.env = self.env_name if isinstance(self.env_name, UserEnv) else get_env(self.env_name)
344
+
345
+ # User can specify object of UserEnv class. Or if environment is already created just pass
346
+ # remote user environment name as string.
347
+ if isinstance(self.env_name, UserEnv):
348
+ self.env_name = self.env_name.env_name
349
+
350
+ @property
351
+ def skip_argument_validation(self):
352
+ """
353
+ DESCRIPTION:
354
+ Getter for self._skip_argument_validation.
355
+
356
+ RETURNS:
357
+ bool
358
+
359
+ RAISES:
360
+ None
361
+ """
362
+ return self._skip_argument_validation
363
+
364
+ @skip_argument_validation.setter
365
+ def skip_argument_validation(self, flag):
366
+ """
367
+ DESCRIPTION:
368
+ Setter for self._skip_argument_validation
369
+
370
+ PARAMETERS:
371
+ flag:
372
+ Required Argument.
373
+ Specifies whether the argument validation should be skipped or not.
374
+ Types: bool
375
+
376
+ RETURNS:
377
+ None
378
+
379
+ RAISES:
380
+ None
381
+ """
382
+ self._skip_argument_validation = flag
383
+
384
+ def __apply__validate(self):
385
+
386
+ if self._skip_argument_validation:
387
+ return
388
+ # Make sure that a non-NULL value has been supplied for all mandatory arguments.
389
+ _Validators._validate_missing_required_arguments(self.__arg_info_matrix)
390
+
391
+ # Validate argument types.
392
+ _Validators._validate_function_arguments(self.__arg_info_matrix)
393
+
394
+ if all([self.returns is None, self.data is None]):
395
+ raise TeradataMlException(Messages.get_message(MessageCodes.SPECIFY_AT_LEAST_ONE_ARG,
396
+ "data",
397
+ "returns"),
398
+ MessageCodes.SPECIFY_AT_LEAST_ONE_ARG)
399
+
400
+ if self.returns is None:
401
+ self.returns = OrderedDict(zip(self.data.columns,
402
+ [col.type for col in
403
+ self.data._metaexpr.c]))
404
+
405
+ def install_file(self, file_name, replace=False):
406
+ """
407
+ DESCRIPTION:
408
+ Function to install script in remote user environment specified in env_name
409
+ argument of an Apply class object.
410
+ On success, prints a message that file is installed or replaced.
411
+ This language script can be executed via execute_script() function.
412
+
413
+ PARAMETERS:
414
+ file_name:
415
+ Required Argument:
416
+ Specifies the name of the file including file extension to be installed
417
+ or replaced.
418
+ Note:
419
+ File names are case sensitive.
420
+ Types: str
421
+
422
+ replace:
423
+ Optional Argument.
424
+ Specifies if the file is to be installed or replaced.
425
+ Default Value: False
426
+ Types: bool
427
+
428
+ RETURNS:
429
+ True, if successful.
430
+
431
+ RAISES:
432
+ TeradataMLException, SqlOperationalError
433
+
434
+ EXAMPLES:
435
+ # Example 1: Install the file mapper.py found at the relative path data/scripts/ using
436
+ # the default text mode.
437
+
438
+ # In order to run example 1, "mapper.py" is required to be present on client.
439
+ # Provide the path of "mapper.py" in "file_path" argument.
440
+ # Create a file named "mapper.py" with content as follows:
441
+ -----------------------------------------------------------
442
+ #!/usr/bin/python
443
+ import sys
444
+ for line in sys.stdin:
445
+ line = line.strip()
446
+ words = line.split()
447
+ for word in words:
448
+ print ('%s\t%s' % (word, 1))
449
+ ------------------------------------------------------------
450
+
451
+ # Create teradataml DataFrame objects.
452
+ >>> barrierdf = DataFrame.from_table("barrier")
453
+
454
+ # Create remote user environment.
455
+ >>> from teradataml import create_env
456
+ >>> test_env = create_env('test_env', 'python_3.7.9', 'Demo environment')
457
+ User environment testenv created.
458
+
459
+ >>> import teradataml, os
460
+ >>> teradataml_dir = os.path.dirname(teradataml.__file__)
461
+ # Create an Apply object that allows user to execute script using Open Analytics Framework.
462
+ >>> apply_obj = Apply(data=barrierdf,
463
+ files_local_path='data/scripts/',
464
+ script_name='mapper.py',
465
+ apply_command='python3 mapper.py',
466
+ data_order_column="Id",
467
+ env_name=test_env,
468
+ returns={"word": VARCHAR(15), "count_input": VARCHAR(2)}
469
+ )
470
+
471
+ # Install file in remote user environment.
472
+ >>> apply_obj.install_file(file_name='mapper.py')
473
+ File 'mapper.py' installed successfully in the remote user environment 'test_env'.
474
+
475
+ # Replace file in remote user environment.
476
+ >>> apply_obj.install_file(file_name='mapper.py', replace=True)
477
+ File 'mapper.py' replaced successfully in the remote user environment 'test_env'.
478
+ """
479
+ # Install/Replace file in the remote user environment.
480
+ try:
481
+ __arg_info_matrix = []
482
+ __arg_info_matrix.append(["file_name", file_name, False, (str), True])
483
+
484
+ # Validate arguments
485
+ _Validators._validate_missing_required_arguments(__arg_info_matrix)
486
+ _Validators._validate_function_arguments(__arg_info_matrix)
487
+
488
+ file_path = os.path.join(self.files_local_path, file_name)
489
+
490
+ # Install file in remote user environment.
491
+ self.env.install_file(file_path=file_path, replace=replace)
492
+ except:
493
+ raise
494
+
495
+ def remove_file(self, file_name):
496
+ """
497
+ DESCRIPTION:
498
+ Function to remove user installed files/scripts from remote user environment.
499
+
500
+ PARAMETERS:
501
+ file_name:
502
+ Required Argument.
503
+ Specifies the name of user-installed file with extension.
504
+ Note:
505
+ File names are case sensitive.
506
+ Types: str
507
+
508
+ RETURNS:
509
+ True, if successful.
510
+
511
+ RAISES:
512
+ TeradataMLException, SqlOperationalError
513
+
514
+ EXAMPLES:
515
+ # Refer install_file example to create mapper.py script and install the file
516
+ # in remote user environment.
517
+
518
+ # Remove the installed file.
519
+ >>> apply_obj.remove_file(file_name='mapper.py')
520
+ File mapper.py removed successfully from the remote user environment test_env.
521
+
522
+ """
523
+ # Remove file from remote user environment.
524
+ self.env.remove_file(file_name)
525
+
526
+ def set_data(self,
527
+ data,
528
+ data_partition_column=None,
529
+ data_hash_column=None,
530
+ data_order_column=None,
531
+ is_local_order=False,
532
+ sort_ascending=True,
533
+ nulls_first=True):
534
+ """
535
+ DESCRIPTION:
536
+ Function enables user to set data and data related arguments without having to
537
+ re-create Apply object.
538
+
539
+ PARAMETERS:
540
+ data:
541
+ Required Argument.
542
+ Specifies a teradataml DataFrame containing the input data.
543
+
544
+ data_partition_column:
545
+ Optional Argument.
546
+ Specifies Partition By columns for data.
547
+ Values to this argument can be provided as a list, if multiple
548
+ columns are used for partition. If there is no "data_partition_column",
549
+ then the entire result set delivered by the function, constitutes a single
550
+ group or partition.
551
+ Default Value: ANY
552
+ Types: str OR list of Strings (str)
553
+ Notes:
554
+ 1) "data_partition_column" can not be specified along with
555
+ "data_hash_column".
556
+ 2) "data_partition_column" can not be specified along with
557
+ "is_local_order = True".
558
+
559
+ data_hash_column:
560
+ Optional Argument.
561
+ Specifies the column to be used for hashing.
562
+ The rows in the input data are redistributed to AMPs based on the hash value of the
563
+ column specified.
564
+ If there is no data_hash_column, then the entire result set,
565
+ delivered by the function, constitutes a single group or partition.
566
+ Types: str
567
+ Note:
568
+ "data_hash_column" can not be specified along with "data_partition_column",
569
+ "is_local_order" and "data_order_column".
570
+
571
+ data_order_column:
572
+ Optional Argument.
573
+ Specifies Order By columns for data.
574
+ Values to this argument can be provided as a list, if multiple
575
+ columns are used for ordering.
576
+ This argument is used in both cases:
577
+ "is_local_order = True" and "is_local_order = False".
578
+ Types: str OR list of Strings (str)
579
+ Note:
580
+ "data_order_column" can not be specified along with
581
+ "data_hash_column".
582
+
583
+ is_local_order:
584
+ Optional Argument.
585
+ Specifies a boolean value to determine whether the input data is to be
586
+ ordered locally or not. Order by specifies the order in which the
587
+ values in a group or partition are sorted. Local Order By specifies
588
+ orders qualified rows on each AMP in preparation to be input to a table
589
+ function. This argument is ignored, if "data_order_column" is None. When
590
+ set to True, data is ordered locally.
591
+ Default Value: False
592
+ Types: bool
593
+ Note:
594
+ 1) "is_local_order" can not be specified along with
595
+ "data_hash_column".
596
+ 2) When "is_local_order" is set to True, "data_order_column" should be
597
+ specified, and the columns specified in "data_order_column" are
598
+ used for local ordering.
599
+
600
+ sort_ascending:
601
+ Optional Argument.
602
+ Specifies a boolean value to determine if the result set is to be sorted
603
+ on the column specified in "data_order_column", in ascending or descending
604
+ order.
605
+ The sorting is ascending when this argument is set to True, and descending
606
+ when set to False.
607
+ This argument is ignored, if "data_order_column" is None.
608
+ Default Value: True
609
+ Types: bool
610
+
611
+ nulls_first:
612
+ Optional Argument.
613
+ Specifies a boolean value to determine whether NULLS are listed first or
614
+ last during ordering.
615
+ This argument is ignored, if "data_order_column" is None.
616
+ NULLS are listed first when this argument is set to True, and
617
+ last when set to False.
618
+ Default Value: True
619
+ Types: bool
620
+
621
+ RETURNS:
622
+ None.
623
+
624
+ RAISES:
625
+ TeradataMlException
626
+
627
+ EXAMPLES:
628
+ # Load example data.
629
+ >>> load_example_data("Script", ["barrier", "barrier_new"])
630
+
631
+ # Create teradataml DataFrame objects.
632
+ >>> barrierdf = DataFrame.from_table("barrier")
633
+ >>> barrierdf
634
+ Name
635
+ Id
636
+ 1 Old Macdonald Had A Farm
637
+ >>>
638
+
639
+ # List base environments.
640
+ >>> from teradataml import list_base_envs, create_env
641
+ >>> list_base_envs()
642
+ base_name language version
643
+ 0 python_3.7.13 Python 3.7.13
644
+ 1 python_3.8.13 Python 3.8.13
645
+ 2 python_3.9.13 Python 3.9.13
646
+ >>>
647
+
648
+ # Create an environment.
649
+ >>> demo_env = create_env(env_name = 'demo_env', base_env = 'python_3.8.13', desc = 'Demo Environment')
650
+ User environment 'demo_env' created.
651
+ >>>
652
+
653
+ >>> import teradataml
654
+ >>> from teradatasqlalchemy import VARCHAR
655
+ >>> td_path = os.path.dirname(teradataml.__file__)
656
+
657
+ # The script mapper.py reads in a line of text input
658
+ # ("Old Macdonald Had A Farm") from csv and
659
+ # splits the line into individual words, emitting a new row for each word.
660
+ # Create an APPLY object with data and its arguments.
661
+ >>> apply_obj = Apply(data = barrierdf,
662
+ ... script_name='mapper.py',
663
+ ... files_local_path= os.path.join(td_path,'data', 'scripts'),
664
+ ... apply_command='python3 mapper.py',
665
+ ... data_order_column="Id",
666
+ ... is_local_order=False,
667
+ ... nulls_first=False,
668
+ ... sort_ascending=False,
669
+ ... returns={"word": VARCHAR(15), "count_input": VARCHAR(10)},
670
+ ... env_name=demo_env,
671
+ ... delimiter='\t')
672
+
673
+ # Install file in environment.
674
+ >>> apply_obj.install_file('mapper.py')
675
+ File 'mapper.py' installed successfully in the remote user environment 'demo_env'.
676
+ >>>
677
+
678
+ >>> apply_obj.execute_script()
679
+ word count_input
680
+ 0 Macdonald 1
681
+ 1 A 1
682
+ 2 Farm 1
683
+ 3 Had 1
684
+ 4 Old 1
685
+ 5 1 1
686
+ >>>
687
+
688
+ # Now run the script on a new DataFrame.
689
+ >>> barrierdf_new = DataFrame.from_table("barrier_new")
690
+ >>> barrierdf_new
691
+ Name
692
+ Id
693
+ 1 Old Macdonald Had A Farm
694
+ 2 On his farm he had a cow
695
+ >>>
696
+
697
+ # Note:
698
+ # All data related arguments that are not specified in set_data() are
699
+ # reset to default values.
700
+ >>> apply_obj.set_data(data=barrierdf_new,
701
+ ... data_order_column='Id',
702
+ ... nulls_first = True)
703
+ >>>
704
+
705
+ # Execute the user script on Vantage.
706
+ >>> apply_obj.execute_script()
707
+ word count_input
708
+ 0 his 1
709
+ 1 he 1
710
+ 2 had 1
711
+ 3 a 1
712
+ 4 1 1
713
+ 5 Old 1
714
+ 6 Macdonald 1
715
+ 7 Had 1
716
+ 8 A 1
717
+ 9 Farm 1
718
+ >>>
719
+ """
720
+ super(Apply, self).set_data(data,
721
+ data_partition_column,
722
+ data_hash_column,
723
+ data_order_column,
724
+ is_local_order,
725
+ sort_ascending,
726
+ nulls_first)
727
+
728
+ self._validate(for_data_args=True)
729
+
730
+ def __form_table_operator_query(self):
731
+ """
732
+ Function to generate the Table Operator queries. The function defines
733
+ variables and list of arguments required to form the query.
734
+ """
735
+ # Output table arguments list
736
+ self.__func_output_args_sql_names = []
737
+ self.__func_output_args = []
738
+
739
+ # Generate lists for rest of the function arguments
740
+ self.__func_other_arg_sql_names = []
741
+ self.__func_other_args = []
742
+ self.__func_other_arg_json_datatypes = []
743
+
744
+ self.__func_args_before_using_clause_names = []
745
+ self.__func_args_before_using_clause_values = []
746
+ self.__func_args_before_using_clause_types = []
747
+
748
+ self.__func_other_arg_sql_names.append("APPLY_COMMAND")
749
+ self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.apply_command, "'"))
750
+ self.__func_other_arg_json_datatypes.append("STRING")
751
+
752
+ self.__func_other_arg_sql_names.append("ENVIRONMENT")
753
+ self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.env_name, "'"))
754
+ self.__func_other_arg_json_datatypes.append("STRING")
755
+
756
+ self.__func_other_arg_sql_names.append("STYLE")
757
+ self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.style, "'"))
758
+ self.__func_other_arg_json_datatypes.append("STRING")
759
+
760
+ if self.delimiter is not None:
761
+ self.__func_other_arg_sql_names.append("delimiter")
762
+ self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.delimiter, "'"))
763
+ self.__func_other_arg_json_datatypes.append("STRING")
764
+
765
+ # Generate returns clause
766
+ if self.returns is not None:
767
+ if isinstance(self.returns, dict):
768
+ returns_clause = ', '.join(
769
+ '{} {}'.format(key, self.returns[key].compile(td_dialect())) for key in self.returns.keys())
770
+ self.__func_other_arg_sql_names.append("returns")
771
+ self.__func_other_args.append(returns_clause)
772
+ self.__func_other_arg_json_datatypes.append("STRING")
773
+
774
+ if self.quotechar is not None:
775
+ self.__func_other_arg_sql_names.append("quotechar")
776
+ self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.quotechar, "'"))
777
+ self.__func_other_arg_json_datatypes.append("STRING")
778
+
779
+ # Declare empty lists to hold input table information.
780
+ self.__func_input_arg_sql_names = []
781
+ self.__func_input_table_view_query = []
782
+ self.__func_input_dataframe_type = []
783
+ self.__func_input_distribution = []
784
+ self.__func_input_partition_by_cols = []
785
+ self.__func_input_order_by_cols = []
786
+ self.__func_input_order_by_type = []
787
+ self.__func_input_sort_ascending = self.sort_ascending
788
+ self.__func_input_nulls_first = None
789
+
790
+ # Process data
791
+ if self.data is not None:
792
+ data_distribution = "FACT"
793
+ if self.data_hash_column is not None:
794
+ data_distribution = "HASH"
795
+ self.data_partition_column = UtilFuncs._teradata_collapse_arglist(self.data_hash_column, "\"")
796
+ else:
797
+ if self.__awu._is_default_or_not(self.data_partition_column, "ANY"):
798
+ self.data_partition_column = UtilFuncs._teradata_collapse_arglist(
799
+ self.data_partition_column, "\"")
800
+ else:
801
+ self.data_partition_column = None
802
+ if self.data_order_column is not None:
803
+ if self.is_local_order:
804
+ self.__func_input_order_by_type.append("LOCAL")
805
+ if not self.data_hash_column:
806
+ data_distribution = None
807
+ else:
808
+ self.__func_input_order_by_type.append(None)
809
+ self.__func_input_order_by_cols.append(
810
+ UtilFuncs._teradata_collapse_arglist(self.data_order_column, "\""))
811
+ else:
812
+ self.__func_input_order_by_type.append(None)
813
+ self.__func_input_order_by_cols.append("NA_character_")
814
+
815
+ self.__table_ref = self.__awu._teradata_on_clause_from_dataframe(self.data, False)
816
+ self.__func_input_distribution.append(data_distribution)
817
+ self.__func_input_arg_sql_names.append("input")
818
+ self.__func_input_table_view_query.append(self.__table_ref["ref"])
819
+ self.__func_input_dataframe_type.append(self.__table_ref["ref_type"])
820
+ self.__func_input_partition_by_cols.append(self.data_partition_column)
821
+ self.__func_input_nulls_first = self.nulls_first
822
+
823
+ function_name = "Apply"
824
+ # Create instance to generate Table Operator Query.
825
+ applyqg_obj = ApplyTableOperatorQueryGenerator(function_name
826
+ , self.__func_input_arg_sql_names
827
+ , self.__func_input_table_view_query
828
+ , self.__func_input_dataframe_type
829
+ , self.__func_input_distribution
830
+ , self.__func_input_partition_by_cols
831
+ , self.__func_input_order_by_cols
832
+ , self.__func_other_arg_sql_names
833
+ , self.__func_other_args
834
+ , self.__func_other_arg_json_datatypes
835
+ , self.__func_output_args_sql_names
836
+ , self.__func_output_args
837
+ , self.__func_input_order_by_type
838
+ , self.__func_input_sort_ascending
839
+ , self.__func_input_nulls_first
840
+ , engine="ENGINE_SQL"
841
+ )
842
+
843
+ # Invoke call to Apply Table operator query generation.
844
+ self._tblop_query = applyqg_obj._gen_table_operator_select_stmt_sql()
845
+
846
+ # Print Table Operator query if requested to do so.
847
+ if display.print_sqlmr_query:
848
+ print(self._tblop_query)
849
+
850
+ def execute_script(self, output_style='VIEW'):
851
+ """
852
+ DESCRIPTION:
853
+ Function enables user to execute Python scripts using Open Analytics Framework.
854
+
855
+ PARAMETERS:
856
+ output_style:
857
+ Specifies the type of output object to create - a table or a view.
858
+ Permitted values: 'VIEW', 'TABLE'.
859
+ Default value: 'VIEW'
860
+ Types: str
861
+
862
+ RETURNS:
863
+ Output teradataml DataFrames can be accessed using attribute
864
+ references, such as ScriptObj.<attribute_name>.
865
+ Output teradataml DataFrame attribute name is:
866
+ result
867
+
868
+ RAISES:
869
+ TeradataMlException
870
+
871
+ EXAMPLES:
872
+ Refer to help(Apply)
873
+ """
874
+ # Validate the output_style.
875
+ permitted_values = [OutputStyle.OUTPUT_TABLE.value,
876
+ OutputStyle.OUTPUT_VIEW.value]
877
+ _Validators._validate_permitted_values(output_style, permitted_values, 'output_style',
878
+ case_insensitive=False, includeNone=False)
879
+
880
+ # Generate the Table Operator query
881
+ self.__form_table_operator_query()
882
+
883
+ # Execute Table Operator query and return results
884
+ return self._execute(output_style)
885
+
886
+ # TODO: Remove the function with ELE-5010.
887
+ def _execute(self, output_style="TABLE"):
888
+ """
889
+ DESCRIPTION:
890
+ Function to execute APPLY Query and store the result in a table.
891
+
892
+ PARAMETERS:
893
+ output_style:
894
+ Specifies the type of output object to create - a table or a view.
895
+ Permitted values: 'VIEW', 'TABLE'.
896
+ Default value: 'VIEW'
897
+ Types: str
898
+
899
+ RETURNS:
900
+ Output teradataml DataFrames can be accessed using attribute
901
+ references, such as ScriptObj.<attribute_name>.
902
+ Output teradataml DataFrame attribute name is:
903
+ result
904
+
905
+ RAISES:
906
+ TeradataMlException
907
+
908
+ EXAMPLES:
909
+ self._execute("VIEW")
910
+ """
911
+ # Generate STDOUT table name and add it to the output table list.
912
+ tblop_stdout_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_tblop_out_",
913
+ use_default_database=True, gc_on_quit=True,
914
+ quote=False,
915
+ table_type=TeradataConstants.TERADATA_TABLE
916
+ )
917
+
918
+ try:
919
+ # Create table.
920
+ columns_clause = ', '.join(
921
+ '{} {}'.format(key, self.returns[key].compile(td_dialect())) for key in self.returns.keys())
922
+ UtilFuncs._create_table_using_columns(tblop_stdout_temp_tablename,
923
+ columns_datatypes=columns_clause,
924
+ storage="TD_OFSSTORAGE")
925
+
926
+ # Use insert with select to populate the data to table.
927
+ # Insert with select accepts a table as a table and columns as
928
+ # second and third parameter. So, converting the Query to a subquery
929
+ # so the query acts as a table.
930
+ query = "({}) as apply_result".format(self._tblop_query)
931
+ ins_table = SQLBundle._build_insert_from_table_query(tblop_stdout_temp_tablename,
932
+ query,
933
+ "*")
934
+ UtilFuncs._execute_query(ins_table)
935
+
936
+ except Exception as emsg:
937
+ emsg = str(emsg)
938
+ pattern = r'\b\d{18}\b'
939
+ query_id = re.findall(pattern, emsg)
940
+ print("-----------------------------------------------------------------------")
941
+ print("User should run view_log() to download the logs with the query id \"{}\".".format(query_id[0]))
942
+ print("-----------------------------------------------------------------------")
943
+ raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_EXEC_SQL_FAILED, emsg),
944
+ MessageCodes.TDMLDF_EXEC_SQL_FAILED)
945
+
946
+ self.result = self.__awu._create_data_set_object(
947
+ df_input=UtilFuncs._extract_table_name(tblop_stdout_temp_tablename), source_type="table",
948
+ database_name=UtilFuncs._extract_db_name(tblop_stdout_temp_tablename))
949
+
918
950
  return self.result