teradataml 17.20.0.6__py3-none-any.whl → 20.0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (432) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +238 -1
  4. teradataml/__init__.py +13 -3
  5. teradataml/_version.py +1 -1
  6. teradataml/analytics/Transformations.py +4 -4
  7. teradataml/analytics/__init__.py +0 -2
  8. teradataml/analytics/analytic_function_executor.py +3 -0
  9. teradataml/analytics/json_parser/utils.py +13 -12
  10. teradataml/analytics/sqle/DecisionTreePredict.py +15 -30
  11. teradataml/analytics/sqle/NaiveBayesPredict.py +11 -20
  12. teradataml/analytics/sqle/__init__.py +0 -13
  13. teradataml/analytics/utils.py +1 -0
  14. teradataml/analytics/valib.py +3 -0
  15. teradataml/automl/__init__.py +1628 -0
  16. teradataml/automl/custom_json_utils.py +1270 -0
  17. teradataml/automl/data_preparation.py +993 -0
  18. teradataml/automl/data_transformation.py +727 -0
  19. teradataml/automl/feature_engineering.py +1648 -0
  20. teradataml/automl/feature_exploration.py +547 -0
  21. teradataml/automl/model_evaluation.py +163 -0
  22. teradataml/automl/model_training.py +887 -0
  23. teradataml/catalog/__init__.py +0 -2
  24. teradataml/catalog/byom.py +49 -6
  25. teradataml/catalog/function_argument_mapper.py +0 -2
  26. teradataml/catalog/model_cataloging_utils.py +2 -1021
  27. teradataml/common/aed_utils.py +6 -2
  28. teradataml/common/constants.py +50 -58
  29. teradataml/common/deprecations.py +160 -0
  30. teradataml/common/garbagecollector.py +61 -104
  31. teradataml/common/messagecodes.py +27 -36
  32. teradataml/common/messages.py +11 -15
  33. teradataml/common/utils.py +205 -287
  34. teradataml/common/wrapper_utils.py +1 -110
  35. teradataml/context/context.py +150 -78
  36. teradataml/data/bank_churn.csv +10001 -0
  37. teradataml/data/bmi.csv +501 -0
  38. teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +3 -3
  39. teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +6 -5
  40. teradataml/data/docs/sqle/docs_17_10/Fit.py +1 -1
  41. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +1 -1
  42. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +1 -1
  43. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +2 -2
  44. teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +2 -1
  45. teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +1 -0
  46. teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +1 -1
  47. teradataml/data/docs/sqle/docs_17_10/Transform.py +2 -1
  48. teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +3 -3
  49. teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +6 -5
  50. teradataml/data/docs/sqle/docs_17_20/Fit.py +1 -1
  51. teradataml/data/docs/sqle/docs_17_20/GLM.py +1 -1
  52. teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +9 -10
  53. teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +3 -2
  54. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +16 -15
  55. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +2 -2
  56. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +2 -2
  57. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +8 -8
  58. teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +21 -20
  59. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +1 -1
  60. teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +8 -3
  61. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +6 -5
  62. teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +6 -6
  63. teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +2 -1
  64. teradataml/data/docs/sqle/docs_17_20/SVM.py +1 -1
  65. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +16 -16
  66. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +1 -0
  67. teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +3 -2
  68. teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +4 -4
  69. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +19 -19
  70. teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +5 -4
  71. teradataml/data/docs/sqle/docs_17_20/Transform.py +2 -2
  72. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +9 -9
  73. teradataml/data/fish.csv +160 -0
  74. teradataml/data/glass_types.csv +215 -0
  75. teradataml/data/insurance.csv +1 -1
  76. teradataml/data/iris_data.csv +151 -0
  77. teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +1 -0
  78. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +1 -0
  79. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +1 -0
  80. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +1 -0
  81. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +1 -0
  82. teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +1 -0
  83. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +1 -0
  84. teradataml/data/load_example_data.py +3 -0
  85. teradataml/data/multi_model_classification.csv +401 -0
  86. teradataml/data/multi_model_regression.csv +401 -0
  87. teradataml/data/openml_example.json +63 -0
  88. teradataml/data/scripts/deploy_script.py +65 -0
  89. teradataml/data/scripts/mapper.R +20 -0
  90. teradataml/data/scripts/sklearn/__init__.py +0 -0
  91. teradataml/data/scripts/sklearn/sklearn_fit.py +175 -0
  92. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +135 -0
  93. teradataml/data/scripts/sklearn/sklearn_function.template +113 -0
  94. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +158 -0
  95. teradataml/data/scripts/sklearn/sklearn_neighbors.py +152 -0
  96. teradataml/data/scripts/sklearn/sklearn_score.py +128 -0
  97. teradataml/data/scripts/sklearn/sklearn_transform.py +179 -0
  98. teradataml/data/templates/open_source_ml.json +9 -0
  99. teradataml/data/teradataml_example.json +73 -1
  100. teradataml/data/test_classification.csv +101 -0
  101. teradataml/data/test_prediction.csv +101 -0
  102. teradataml/data/test_regression.csv +101 -0
  103. teradataml/data/train_multiclass.csv +101 -0
  104. teradataml/data/train_regression.csv +101 -0
  105. teradataml/data/train_regression_multiple_labels.csv +101 -0
  106. teradataml/data/wine_data.csv +1600 -0
  107. teradataml/dataframe/copy_to.py +79 -13
  108. teradataml/dataframe/data_transfer.py +8 -0
  109. teradataml/dataframe/dataframe.py +910 -311
  110. teradataml/dataframe/dataframe_utils.py +102 -5
  111. teradataml/dataframe/fastload.py +11 -3
  112. teradataml/dataframe/setop.py +15 -2
  113. teradataml/dataframe/sql.py +3735 -77
  114. teradataml/dataframe/sql_function_parameters.py +56 -5
  115. teradataml/dataframe/vantage_function_types.py +45 -1
  116. teradataml/dataframe/window.py +30 -29
  117. teradataml/dbutils/dbutils.py +18 -1
  118. teradataml/geospatial/geodataframe.py +18 -7
  119. teradataml/geospatial/geodataframecolumn.py +5 -0
  120. teradataml/hyperparameter_tuner/optimizer.py +910 -120
  121. teradataml/hyperparameter_tuner/utils.py +131 -37
  122. teradataml/lib/aed_0_1.dll +0 -0
  123. teradataml/lib/libaed_0_1.dylib +0 -0
  124. teradataml/lib/libaed_0_1.so +0 -0
  125. teradataml/libaed_0_1.dylib +0 -0
  126. teradataml/libaed_0_1.so +0 -0
  127. teradataml/opensource/__init__.py +1 -0
  128. teradataml/opensource/sklearn/__init__.py +1 -0
  129. teradataml/opensource/sklearn/_class.py +255 -0
  130. teradataml/opensource/sklearn/_sklearn_wrapper.py +1668 -0
  131. teradataml/opensource/sklearn/_wrapper_utils.py +268 -0
  132. teradataml/opensource/sklearn/constants.py +54 -0
  133. teradataml/options/__init__.py +3 -6
  134. teradataml/options/configure.py +21 -20
  135. teradataml/scriptmgmt/UserEnv.py +61 -5
  136. teradataml/scriptmgmt/lls_utils.py +135 -53
  137. teradataml/table_operators/Apply.py +38 -6
  138. teradataml/table_operators/Script.py +45 -308
  139. teradataml/table_operators/TableOperator.py +182 -591
  140. teradataml/table_operators/__init__.py +0 -1
  141. teradataml/table_operators/table_operator_util.py +32 -40
  142. teradataml/utils/validators.py +127 -3
  143. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/METADATA +243 -3
  144. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/RECORD +147 -391
  145. teradataml/analytics/mle/AdaBoost.py +0 -651
  146. teradataml/analytics/mle/AdaBoostPredict.py +0 -564
  147. teradataml/analytics/mle/Antiselect.py +0 -342
  148. teradataml/analytics/mle/Arima.py +0 -641
  149. teradataml/analytics/mle/ArimaPredict.py +0 -477
  150. teradataml/analytics/mle/Attribution.py +0 -1070
  151. teradataml/analytics/mle/Betweenness.py +0 -658
  152. teradataml/analytics/mle/Burst.py +0 -711
  153. teradataml/analytics/mle/CCM.py +0 -600
  154. teradataml/analytics/mle/CCMPrepare.py +0 -324
  155. teradataml/analytics/mle/CFilter.py +0 -460
  156. teradataml/analytics/mle/ChangePointDetection.py +0 -572
  157. teradataml/analytics/mle/ChangePointDetectionRT.py +0 -477
  158. teradataml/analytics/mle/Closeness.py +0 -737
  159. teradataml/analytics/mle/ConfusionMatrix.py +0 -420
  160. teradataml/analytics/mle/Correlation.py +0 -477
  161. teradataml/analytics/mle/Correlation2.py +0 -573
  162. teradataml/analytics/mle/CoxHazardRatio.py +0 -679
  163. teradataml/analytics/mle/CoxPH.py +0 -556
  164. teradataml/analytics/mle/CoxSurvival.py +0 -478
  165. teradataml/analytics/mle/CumulativeMovAvg.py +0 -363
  166. teradataml/analytics/mle/DTW.py +0 -623
  167. teradataml/analytics/mle/DWT.py +0 -564
  168. teradataml/analytics/mle/DWT2D.py +0 -599
  169. teradataml/analytics/mle/DecisionForest.py +0 -716
  170. teradataml/analytics/mle/DecisionForestEvaluator.py +0 -363
  171. teradataml/analytics/mle/DecisionForestPredict.py +0 -561
  172. teradataml/analytics/mle/DecisionTree.py +0 -830
  173. teradataml/analytics/mle/DecisionTreePredict.py +0 -528
  174. teradataml/analytics/mle/ExponentialMovAvg.py +0 -418
  175. teradataml/analytics/mle/FMeasure.py +0 -402
  176. teradataml/analytics/mle/FPGrowth.py +0 -734
  177. teradataml/analytics/mle/FrequentPaths.py +0 -695
  178. teradataml/analytics/mle/GLM.py +0 -558
  179. teradataml/analytics/mle/GLML1L2.py +0 -547
  180. teradataml/analytics/mle/GLML1L2Predict.py +0 -519
  181. teradataml/analytics/mle/GLMPredict.py +0 -529
  182. teradataml/analytics/mle/HMMDecoder.py +0 -945
  183. teradataml/analytics/mle/HMMEvaluator.py +0 -901
  184. teradataml/analytics/mle/HMMSupervised.py +0 -521
  185. teradataml/analytics/mle/HMMUnsupervised.py +0 -572
  186. teradataml/analytics/mle/Histogram.py +0 -561
  187. teradataml/analytics/mle/IDWT.py +0 -476
  188. teradataml/analytics/mle/IDWT2D.py +0 -493
  189. teradataml/analytics/mle/IdentityMatch.py +0 -763
  190. teradataml/analytics/mle/Interpolator.py +0 -918
  191. teradataml/analytics/mle/KMeans.py +0 -485
  192. teradataml/analytics/mle/KNN.py +0 -627
  193. teradataml/analytics/mle/KNNRecommender.py +0 -488
  194. teradataml/analytics/mle/KNNRecommenderPredict.py +0 -581
  195. teradataml/analytics/mle/LAR.py +0 -439
  196. teradataml/analytics/mle/LARPredict.py +0 -478
  197. teradataml/analytics/mle/LDA.py +0 -548
  198. teradataml/analytics/mle/LDAInference.py +0 -492
  199. teradataml/analytics/mle/LDATopicSummary.py +0 -464
  200. teradataml/analytics/mle/LevenshteinDistance.py +0 -450
  201. teradataml/analytics/mle/LinReg.py +0 -433
  202. teradataml/analytics/mle/LinRegPredict.py +0 -438
  203. teradataml/analytics/mle/MinHash.py +0 -544
  204. teradataml/analytics/mle/Modularity.py +0 -587
  205. teradataml/analytics/mle/NEREvaluator.py +0 -410
  206. teradataml/analytics/mle/NERExtractor.py +0 -595
  207. teradataml/analytics/mle/NERTrainer.py +0 -458
  208. teradataml/analytics/mle/NGrams.py +0 -570
  209. teradataml/analytics/mle/NPath.py +0 -634
  210. teradataml/analytics/mle/NTree.py +0 -549
  211. teradataml/analytics/mle/NaiveBayes.py +0 -462
  212. teradataml/analytics/mle/NaiveBayesPredict.py +0 -513
  213. teradataml/analytics/mle/NaiveBayesTextClassifier.py +0 -607
  214. teradataml/analytics/mle/NaiveBayesTextClassifier2.py +0 -531
  215. teradataml/analytics/mle/NaiveBayesTextClassifierPredict.py +0 -799
  216. teradataml/analytics/mle/NamedEntityFinder.py +0 -529
  217. teradataml/analytics/mle/NamedEntityFinderEvaluator.py +0 -414
  218. teradataml/analytics/mle/NamedEntityFinderTrainer.py +0 -396
  219. teradataml/analytics/mle/POSTagger.py +0 -417
  220. teradataml/analytics/mle/Pack.py +0 -411
  221. teradataml/analytics/mle/PageRank.py +0 -535
  222. teradataml/analytics/mle/PathAnalyzer.py +0 -426
  223. teradataml/analytics/mle/PathGenerator.py +0 -367
  224. teradataml/analytics/mle/PathStart.py +0 -464
  225. teradataml/analytics/mle/PathSummarizer.py +0 -470
  226. teradataml/analytics/mle/Pivot.py +0 -471
  227. teradataml/analytics/mle/ROC.py +0 -425
  228. teradataml/analytics/mle/RandomSample.py +0 -637
  229. teradataml/analytics/mle/RandomWalkSample.py +0 -490
  230. teradataml/analytics/mle/SAX.py +0 -779
  231. teradataml/analytics/mle/SVMDense.py +0 -677
  232. teradataml/analytics/mle/SVMDensePredict.py +0 -536
  233. teradataml/analytics/mle/SVMDenseSummary.py +0 -437
  234. teradataml/analytics/mle/SVMSparse.py +0 -557
  235. teradataml/analytics/mle/SVMSparsePredict.py +0 -553
  236. teradataml/analytics/mle/SVMSparseSummary.py +0 -435
  237. teradataml/analytics/mle/Sampling.py +0 -549
  238. teradataml/analytics/mle/Scale.py +0 -565
  239. teradataml/analytics/mle/ScaleByPartition.py +0 -496
  240. teradataml/analytics/mle/ScaleMap.py +0 -378
  241. teradataml/analytics/mle/ScaleSummary.py +0 -320
  242. teradataml/analytics/mle/SentenceExtractor.py +0 -363
  243. teradataml/analytics/mle/SentimentEvaluator.py +0 -432
  244. teradataml/analytics/mle/SentimentExtractor.py +0 -578
  245. teradataml/analytics/mle/SentimentTrainer.py +0 -405
  246. teradataml/analytics/mle/SeriesSplitter.py +0 -641
  247. teradataml/analytics/mle/Sessionize.py +0 -475
  248. teradataml/analytics/mle/SimpleMovAvg.py +0 -397
  249. teradataml/analytics/mle/StringSimilarity.py +0 -425
  250. teradataml/analytics/mle/TF.py +0 -389
  251. teradataml/analytics/mle/TFIDF.py +0 -504
  252. teradataml/analytics/mle/TextChunker.py +0 -414
  253. teradataml/analytics/mle/TextClassifier.py +0 -399
  254. teradataml/analytics/mle/TextClassifierEvaluator.py +0 -413
  255. teradataml/analytics/mle/TextClassifierTrainer.py +0 -565
  256. teradataml/analytics/mle/TextMorph.py +0 -494
  257. teradataml/analytics/mle/TextParser.py +0 -623
  258. teradataml/analytics/mle/TextTagger.py +0 -530
  259. teradataml/analytics/mle/TextTokenizer.py +0 -502
  260. teradataml/analytics/mle/UnivariateStatistics.py +0 -488
  261. teradataml/analytics/mle/Unpack.py +0 -526
  262. teradataml/analytics/mle/Unpivot.py +0 -438
  263. teradataml/analytics/mle/VarMax.py +0 -776
  264. teradataml/analytics/mle/VectorDistance.py +0 -762
  265. teradataml/analytics/mle/WeightedMovAvg.py +0 -400
  266. teradataml/analytics/mle/XGBoost.py +0 -842
  267. teradataml/analytics/mle/XGBoostPredict.py +0 -627
  268. teradataml/analytics/mle/__init__.py +0 -123
  269. teradataml/analytics/mle/json/adaboost_mle.json +0 -135
  270. teradataml/analytics/mle/json/adaboostpredict_mle.json +0 -85
  271. teradataml/analytics/mle/json/antiselect_mle.json +0 -34
  272. teradataml/analytics/mle/json/antiselect_mle_mle.json +0 -34
  273. teradataml/analytics/mle/json/arima_mle.json +0 -172
  274. teradataml/analytics/mle/json/arimapredict_mle.json +0 -52
  275. teradataml/analytics/mle/json/attribution_mle_mle.json +0 -143
  276. teradataml/analytics/mle/json/betweenness_mle.json +0 -97
  277. teradataml/analytics/mle/json/burst_mle.json +0 -140
  278. teradataml/analytics/mle/json/ccm_mle.json +0 -124
  279. teradataml/analytics/mle/json/ccmprepare_mle.json +0 -14
  280. teradataml/analytics/mle/json/cfilter_mle.json +0 -93
  281. teradataml/analytics/mle/json/changepointdetection_mle.json +0 -92
  282. teradataml/analytics/mle/json/changepointdetectionrt_mle.json +0 -78
  283. teradataml/analytics/mle/json/closeness_mle.json +0 -104
  284. teradataml/analytics/mle/json/confusionmatrix_mle.json +0 -79
  285. teradataml/analytics/mle/json/correlation_mle.json +0 -86
  286. teradataml/analytics/mle/json/correlationreduce_mle.json +0 -49
  287. teradataml/analytics/mle/json/coxhazardratio_mle.json +0 -89
  288. teradataml/analytics/mle/json/coxph_mle.json +0 -98
  289. teradataml/analytics/mle/json/coxsurvival_mle.json +0 -79
  290. teradataml/analytics/mle/json/cumulativemovavg_mle.json +0 -34
  291. teradataml/analytics/mle/json/decisionforest_mle.json +0 -167
  292. teradataml/analytics/mle/json/decisionforestevaluator_mle.json +0 -33
  293. teradataml/analytics/mle/json/decisionforestpredict_mle_mle.json +0 -74
  294. teradataml/analytics/mle/json/decisiontree_mle.json +0 -194
  295. teradataml/analytics/mle/json/decisiontreepredict_mle_mle.json +0 -86
  296. teradataml/analytics/mle/json/dtw_mle.json +0 -97
  297. teradataml/analytics/mle/json/dwt2d_mle.json +0 -116
  298. teradataml/analytics/mle/json/dwt_mle.json +0 -101
  299. teradataml/analytics/mle/json/exponentialmovavg_mle.json +0 -55
  300. teradataml/analytics/mle/json/fmeasure_mle.json +0 -58
  301. teradataml/analytics/mle/json/fpgrowth_mle.json +0 -159
  302. teradataml/analytics/mle/json/frequentpaths_mle.json +0 -129
  303. teradataml/analytics/mle/json/glm_mle.json +0 -111
  304. teradataml/analytics/mle/json/glml1l2_mle.json +0 -106
  305. teradataml/analytics/mle/json/glml1l2predict_mle.json +0 -57
  306. teradataml/analytics/mle/json/glmpredict_mle_mle.json +0 -74
  307. teradataml/analytics/mle/json/histogram_mle.json +0 -100
  308. teradataml/analytics/mle/json/hmmdecoder_mle.json +0 -192
  309. teradataml/analytics/mle/json/hmmevaluator_mle.json +0 -206
  310. teradataml/analytics/mle/json/hmmsupervised_mle.json +0 -91
  311. teradataml/analytics/mle/json/hmmunsupervised_mle.json +0 -114
  312. teradataml/analytics/mle/json/identitymatch_mle.json +0 -88
  313. teradataml/analytics/mle/json/idwt2d_mle.json +0 -73
  314. teradataml/analytics/mle/json/idwt_mle.json +0 -66
  315. teradataml/analytics/mle/json/interpolator_mle.json +0 -151
  316. teradataml/analytics/mle/json/kmeans_mle.json +0 -97
  317. teradataml/analytics/mle/json/knn_mle.json +0 -141
  318. teradataml/analytics/mle/json/knnrecommender_mle.json +0 -111
  319. teradataml/analytics/mle/json/knnrecommenderpredict_mle.json +0 -75
  320. teradataml/analytics/mle/json/lar_mle.json +0 -78
  321. teradataml/analytics/mle/json/larpredict_mle.json +0 -69
  322. teradataml/analytics/mle/json/lda_mle.json +0 -130
  323. teradataml/analytics/mle/json/ldainference_mle.json +0 -78
  324. teradataml/analytics/mle/json/ldatopicsummary_mle.json +0 -64
  325. teradataml/analytics/mle/json/levenshteindistance_mle.json +0 -92
  326. teradataml/analytics/mle/json/linreg_mle.json +0 -42
  327. teradataml/analytics/mle/json/linregpredict_mle.json +0 -56
  328. teradataml/analytics/mle/json/minhash_mle.json +0 -113
  329. teradataml/analytics/mle/json/modularity_mle.json +0 -91
  330. teradataml/analytics/mle/json/naivebayespredict_mle_mle.json +0 -85
  331. teradataml/analytics/mle/json/naivebayesreduce_mle.json +0 -52
  332. teradataml/analytics/mle/json/naivebayestextclassifierpredict_mle_mle.json +0 -147
  333. teradataml/analytics/mle/json/naivebayestextclassifiertrainer2_mle.json +0 -108
  334. teradataml/analytics/mle/json/naivebayestextclassifiertrainer_mle.json +0 -102
  335. teradataml/analytics/mle/json/namedentityfinder_mle.json +0 -84
  336. teradataml/analytics/mle/json/namedentityfinderevaluatorreduce_mle.json +0 -43
  337. teradataml/analytics/mle/json/namedentityfindertrainer_mle.json +0 -64
  338. teradataml/analytics/mle/json/nerevaluator_mle.json +0 -54
  339. teradataml/analytics/mle/json/nerextractor_mle.json +0 -87
  340. teradataml/analytics/mle/json/nertrainer_mle.json +0 -89
  341. teradataml/analytics/mle/json/ngrams_mle.json +0 -137
  342. teradataml/analytics/mle/json/ngramsplitter_mle_mle.json +0 -137
  343. teradataml/analytics/mle/json/npath@coprocessor_mle.json +0 -73
  344. teradataml/analytics/mle/json/ntree@coprocessor_mle.json +0 -123
  345. teradataml/analytics/mle/json/pack_mle.json +0 -58
  346. teradataml/analytics/mle/json/pack_mle_mle.json +0 -58
  347. teradataml/analytics/mle/json/pagerank_mle.json +0 -81
  348. teradataml/analytics/mle/json/pathanalyzer_mle.json +0 -63
  349. teradataml/analytics/mle/json/pathgenerator_mle.json +0 -40
  350. teradataml/analytics/mle/json/pathstart_mle.json +0 -62
  351. teradataml/analytics/mle/json/pathsummarizer_mle.json +0 -72
  352. teradataml/analytics/mle/json/pivoting_mle.json +0 -71
  353. teradataml/analytics/mle/json/postagger_mle.json +0 -51
  354. teradataml/analytics/mle/json/randomsample_mle.json +0 -131
  355. teradataml/analytics/mle/json/randomwalksample_mle.json +0 -85
  356. teradataml/analytics/mle/json/roc_mle.json +0 -73
  357. teradataml/analytics/mle/json/sampling_mle.json +0 -75
  358. teradataml/analytics/mle/json/sax_mle.json +0 -154
  359. teradataml/analytics/mle/json/scale_mle.json +0 -93
  360. teradataml/analytics/mle/json/scalebypartition_mle.json +0 -89
  361. teradataml/analytics/mle/json/scalemap_mle.json +0 -44
  362. teradataml/analytics/mle/json/scalesummary_mle.json +0 -14
  363. teradataml/analytics/mle/json/sentenceextractor_mle.json +0 -41
  364. teradataml/analytics/mle/json/sentimentevaluator_mle.json +0 -43
  365. teradataml/analytics/mle/json/sentimentextractor_mle.json +0 -100
  366. teradataml/analytics/mle/json/sentimenttrainer_mle.json +0 -68
  367. teradataml/analytics/mle/json/seriessplitter_mle.json +0 -133
  368. teradataml/analytics/mle/json/sessionize_mle_mle.json +0 -62
  369. teradataml/analytics/mle/json/simplemovavg_mle.json +0 -48
  370. teradataml/analytics/mle/json/stringsimilarity_mle.json +0 -50
  371. teradataml/analytics/mle/json/stringsimilarity_mle_mle.json +0 -50
  372. teradataml/analytics/mle/json/svmdense_mle.json +0 -165
  373. teradataml/analytics/mle/json/svmdensepredict_mle.json +0 -95
  374. teradataml/analytics/mle/json/svmdensesummary_mle.json +0 -58
  375. teradataml/analytics/mle/json/svmsparse_mle.json +0 -148
  376. teradataml/analytics/mle/json/svmsparsepredict_mle_mle.json +0 -103
  377. teradataml/analytics/mle/json/svmsparsesummary_mle.json +0 -57
  378. teradataml/analytics/mle/json/textchunker_mle.json +0 -40
  379. teradataml/analytics/mle/json/textclassifier_mle.json +0 -51
  380. teradataml/analytics/mle/json/textclassifierevaluator_mle.json +0 -43
  381. teradataml/analytics/mle/json/textclassifiertrainer_mle.json +0 -103
  382. teradataml/analytics/mle/json/textmorph_mle.json +0 -63
  383. teradataml/analytics/mle/json/textparser_mle.json +0 -166
  384. teradataml/analytics/mle/json/texttagger_mle.json +0 -81
  385. teradataml/analytics/mle/json/texttokenizer_mle.json +0 -91
  386. teradataml/analytics/mle/json/tf_mle.json +0 -33
  387. teradataml/analytics/mle/json/tfidf_mle.json +0 -34
  388. teradataml/analytics/mle/json/univariatestatistics_mle.json +0 -81
  389. teradataml/analytics/mle/json/unpack_mle.json +0 -91
  390. teradataml/analytics/mle/json/unpack_mle_mle.json +0 -91
  391. teradataml/analytics/mle/json/unpivoting_mle.json +0 -63
  392. teradataml/analytics/mle/json/varmax_mle.json +0 -176
  393. teradataml/analytics/mle/json/vectordistance_mle.json +0 -179
  394. teradataml/analytics/mle/json/weightedmovavg_mle.json +0 -48
  395. teradataml/analytics/mle/json/xgboost_mle.json +0 -178
  396. teradataml/analytics/mle/json/xgboostpredict_mle.json +0 -104
  397. teradataml/analytics/sqle/Antiselect.py +0 -321
  398. teradataml/analytics/sqle/Attribution.py +0 -603
  399. teradataml/analytics/sqle/DecisionForestPredict.py +0 -408
  400. teradataml/analytics/sqle/GLMPredict.py +0 -430
  401. teradataml/analytics/sqle/MovingAverage.py +0 -543
  402. teradataml/analytics/sqle/NGramSplitter.py +0 -548
  403. teradataml/analytics/sqle/NPath.py +0 -632
  404. teradataml/analytics/sqle/NaiveBayesTextClassifierPredict.py +0 -515
  405. teradataml/analytics/sqle/Pack.py +0 -388
  406. teradataml/analytics/sqle/SVMSparsePredict.py +0 -464
  407. teradataml/analytics/sqle/Sessionize.py +0 -390
  408. teradataml/analytics/sqle/StringSimilarity.py +0 -400
  409. teradataml/analytics/sqle/Unpack.py +0 -503
  410. teradataml/analytics/sqle/json/antiselect_sqle.json +0 -21
  411. teradataml/analytics/sqle/json/attribution_sqle.json +0 -92
  412. teradataml/analytics/sqle/json/decisionforestpredict_sqle.json +0 -48
  413. teradataml/analytics/sqle/json/glmpredict_sqle.json +0 -48
  414. teradataml/analytics/sqle/json/h2opredict_sqle.json +0 -63
  415. teradataml/analytics/sqle/json/movingaverage_sqle.json +0 -58
  416. teradataml/analytics/sqle/json/naivebayestextclassifierpredict_sqle.json +0 -76
  417. teradataml/analytics/sqle/json/ngramsplitter_sqle.json +0 -126
  418. teradataml/analytics/sqle/json/npath_sqle.json +0 -67
  419. teradataml/analytics/sqle/json/pack_sqle.json +0 -47
  420. teradataml/analytics/sqle/json/pmmlpredict_sqle.json +0 -55
  421. teradataml/analytics/sqle/json/sessionize_sqle.json +0 -43
  422. teradataml/analytics/sqle/json/stringsimilarity_sqle.json +0 -39
  423. teradataml/analytics/sqle/json/svmsparsepredict_sqle.json +0 -74
  424. teradataml/analytics/sqle/json/unpack_sqle.json +0 -80
  425. teradataml/catalog/model_cataloging.py +0 -980
  426. teradataml/config/mlengine_alias_definitions_v1.0 +0 -118
  427. teradataml/config/mlengine_alias_definitions_v1.1 +0 -127
  428. teradataml/config/mlengine_alias_definitions_v1.3 +0 -129
  429. teradataml/table_operators/sandbox_container_util.py +0 -643
  430. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/WHEEL +0 -0
  431. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/top_level.txt +0 -0
  432. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/zip-safe +0 -0
@@ -219,16 +219,15 @@ def GLMPredictPerSegment(newdata=None, object=None, id_column=None, accumulate=N
219
219
  iter_max=100)
220
220
 
221
221
  # Predict the homestyle using GLMPredictPerSegment().
222
- GLMPredictPerSegment_out_2 = GLMPredictPerSegment(
223
- newdata=housing_test_ordinal_encodingtransform.result,
224
- newdata_partition_column="stories",
225
- object=GLMPerSegment_out_2,
226
- object_partition_column="stories",
227
- id_column="sn",
228
- output_prob=True,
229
- output_responses=["0", "1"]
230
- )
222
+ GLMPredictPerSegment_out_2 = GLMPredictPerSegment(newdata=housing_test_ordinal_encodingtransform.result,
223
+ newdata_partition_column="stories",
224
+ object=GLMPerSegment_out_2,
225
+ object_partition_column="stories",
226
+ id_column="sn",
227
+ output_prob=True,
228
+ output_responses=["0", "1"]
229
+ )
231
230
 
232
231
  # Print the result DataFrame.
233
232
  print(GLMPredictPerSegment_out_2.result)
234
- """
233
+ """
@@ -123,8 +123,9 @@ def KMeansPredict(data=None, object=None, accumulate=None, output_distance=False
123
123
  # using the model generated by the KMeans() function.
124
124
  # Note that teradataml DataFrame representing the model
125
125
  # is passed as input to "object".
126
- KMeansPredict_out = KMeansPredict(object=KMeans_out.result,
127
- data=computers_train1)
126
+ KMeansPredict_out = KMeansPredict(data=computers_train1,
127
+ object=KMeans_out.result
128
+ )
128
129
 
129
130
  # Print the result DataFrames.
130
131
  print(KMeansPredict_out.result)
@@ -16,6 +16,7 @@ def NaiveBayesTextClassifierPredict(object=None, newdata=None,
16
16
  NaiveBayesTextClassifierTrainer() function to predict the outcomes for a test set
17
17
  of data.
18
18
 
19
+
19
20
  PARAMETERS:
20
21
  object:
21
22
  Required Argument.
@@ -151,24 +152,24 @@ def NaiveBayesTextClassifierPredict(object=None, newdata=None,
151
152
  display_analytic_functions()
152
153
 
153
154
  # Create a model which is output of NaiveBayesTextClassifierTrainer.
154
- nbt_out = NaiveBayesTextClassifierTrainer(data = token_table,
155
- token_column = 'token',
156
- doc_id_column = 'doc_id',
157
- doc_category_column = 'category',
158
- model_type = "Bernoulli",
159
- data_partition_column = 'category')
155
+ nbt_out = NaiveBayesTextClassifierTrainer(data=token_table,
156
+ token_column='token',
157
+ doc_id_column='doc_id',
158
+ doc_category_column='category',
159
+ model_type="Bernoulli",
160
+ data_partition_column='category')
160
161
 
161
162
  # Example: Run NaiveBayesTextClassifierPredict() on model generated by
162
163
  # NaiveBayesTextClassifierTrainer() where model_type is "Bernoulli".
163
- nbt_predict_out = NaiveBayesTextClassifierPredict(object = nbt_out,
164
- newdata = complaints_tokens_test,
165
- input_token_column = 'token',
166
- doc_id_columns = 'doc_id',
167
- model_type = "Bernoulli",
168
- model_token_column = 'token',
169
- model_category_column = 'category',
170
- model_prob_column = 'prob',
171
- newdata_partition_column = 'doc_id')
164
+ nbt_predict_out = NaiveBayesTextClassifierPredict(newdata=complaints_tokens_test,
165
+ object=nbt_out,
166
+ input_token_column='token',
167
+ doc_id_columns='doc_id',
168
+ model_type="Bernoulli",
169
+ model_token_column='token',
170
+ model_category_column='category',
171
+ model_prob_column='prob',
172
+ newdata_partition_column='doc_id')
172
173
 
173
174
  # Print the result DataFrame.
174
175
  print(nbt_predict_out.result)
@@ -1,5 +1,5 @@
1
- def NaiveBayesTextClassifierTrainer(data = None, doc_category_column = None, token_column = None, doc_id_column = None,
2
- model_type = "MULTINOMIAL", **generic_arguments):
1
+ def NaiveBayesTextClassifierTrainer(data=None, doc_category_column=None, token_column=None, doc_id_column=None,
2
+ model_type="MULTINOMIAL", **generic_arguments):
3
3
  """
4
4
  DESCRIPTION:
5
5
  The NaiveBayesTextClassifierTrainer() function calculates the conditional probabilities for
@@ -1,5 +1,5 @@
1
- def NonLinearCombineFit(data = None, target_columns = None, formula = None,
2
- result_column = None, **generic_arguments):
1
+ def NonLinearCombineFit(data=None, target_columns=None, formula=None,
2
+ result_column=None, **generic_arguments):
3
3
  """
4
4
  DESCRIPTION:
5
5
  The NonLinearCombineFit() function returns the target columns and a
@@ -1,24 +1,24 @@
1
- def NonLinearCombineTransform(data = None, object = None, accumulate = None, **generic_arguments):
1
+ def NonLinearCombineTransform(data=None, object=None, accumulate=None, **generic_arguments):
2
2
  """
3
3
  DESCRIPTION:
4
4
  The NonLinearCombineTransform() function generates a new feature
5
5
  by taking a non-linear combination of existing features using the
6
6
  parameters from the output of NonLinearCombineFit() function.
7
-
8
-
7
+
8
+
9
9
  PARAMETERS:
10
10
  data:
11
11
  Required Argument.
12
12
  Specifies the input teradataml DataFrame.
13
13
  Types: teradataml DataFrame
14
-
14
+
15
15
  object:
16
16
  Required Argument.
17
17
  Specifies the teradataml DataFrame containing the fit parameters
18
18
  and target columns, generated by the NonLinearCombineFit() function
19
19
  or the instance of NonLinearCombineFit.
20
20
  Types: teradataml DataFrame or NonLinearCombineFit
21
-
21
+
22
22
  accumulate:
23
23
  Optional Argument.
24
24
  Specifies the name(s) of input teradataml DataFrame column(s)
@@ -26,7 +26,7 @@ def NonLinearCombineTransform(data = None, object = None, accumulate = None, **g
26
26
  By default, the function copies no input teradataml
27
27
  DataFrame columns to the output.
28
28
  Types: str OR list of Strings (str)
29
-
29
+
30
30
  **generic_arguments:
31
31
  Specifies the generic keyword arguments SQLE functions accept. Below
32
32
  are the generic keyword arguments:
@@ -100,12 +100,12 @@ def NonLinearCombineTransform(data = None, object = None, accumulate = None, **g
100
100
  target_columns = ["sibsp", "parch", "fare"],
101
101
  formula = "Y=(X0+X1+1)*X2",
102
102
  result_column = "total_cost")
103
-
103
+
104
104
  # Example 1 : Get the total cost for each passenger.
105
105
  NonLinearCombineTransform_out = NonLinearCombineTransform(data=titanic,
106
106
  object=Fit_out,
107
107
  accumulate="passenger")
108
-
108
+
109
109
  # Print the result DataFrame.
110
110
  print(NonLinearCombineTransform_out.result)
111
111
 
@@ -4,7 +4,7 @@ def OneClassSVMPredict(object=None, newdata=None, id_column=None,
4
4
  """
5
5
  DESCRIPTION:
6
6
  The OneClassSVMPredict() function uses the model generated
7
- by the function OneClassSVM() to predicts target class labels
7
+ by the function On eClassSVM() to predicts target class labels
8
8
  (classification) on new input data. Output values are 0 and 1.
9
9
  A value of 1 corresponds to a 'normal' observation, and a value
10
10
  of 0 is assigned to 'outlier' observations.
@@ -142,27 +142,27 @@ def OneClassSVMPredict(object=None, newdata=None, id_column=None,
142
142
 
143
143
  # Train the input data by OneClassSVM which helps model
144
144
  # to find anomalies in transformed data.
145
- one_class_svm=OneClassSVM(data=transform_obj.result,
146
- input_columns=['MedInc', 'HouseAge', 'AveRooms',
147
- 'AveBedrms', 'Population', 'AveOccup',
148
- 'Latitude', 'Longitude'],
149
- local_sgd_iterations=537,
150
- batch_size=1,
151
- learning_rate='constant',
152
- initial_eta=0.01,
153
- lambda1=0.1,
154
- alpha=0.0,
155
- momentum=0.0,
156
- iter_max=1
157
- )
145
+ one_class_svm = OneClassSVM(data=transform_obj.result,
146
+ input_columns=['MedInc', 'HouseAge', 'AveRooms',
147
+ 'AveBedrms', 'Population', 'AveOccup',
148
+ 'Latitude', 'Longitude'],
149
+ local_sgd_iterations=537,
150
+ batch_size=1,
151
+ learning_rate='constant',
152
+ initial_eta=0.01,
153
+ lambda1=0.1,
154
+ alpha=0.0,
155
+ momentum=0.0,
156
+ iter_max=1
157
+ )
158
158
 
159
159
 
160
160
 
161
161
  # Example 1 : Using trained data by OneClassSVM model, predict whether observation
162
162
  # is outlier or normal in the form of '0' or '1' on "newdata".
163
- OneClassSVMPredict_out1 = OneClassSVMPredict(object = one_class_svm.result,
164
- newdata = transform_obj.result,
165
- id_column = "id"
163
+ OneClassSVMPredict_out1 = OneClassSVMPredict(newdata=transform_obj.result,
164
+ object=one_class_svm.result,
165
+ id_column="id"
166
166
  )
167
167
 
168
168
  # Print the result DataFrame.
@@ -171,13 +171,14 @@ def OneClassSVMPredict(object=None, newdata=None, id_column=None,
171
171
  # Example 2 : Using trained data by OneClassSVM model, predict whether observation
172
172
  # is outlier or normal in the form of '0' or '1' also provides probability
173
173
  # of outcome of '0' and '1' on "newdata".
174
- OneClassSVMPredict_out2 = OneClassSVMPredict(object = one_class_svm,
175
- newdata = transform_obj.result,
176
- id_column = "id",
174
+ OneClassSVMPredict_out2 = OneClassSVMPredict(newdata=transform_obj.result,
175
+ object=one_class_svm,
176
+ id_column="id",
177
177
  accumulate="MedInc",
178
178
  output_prob=True,
179
179
  output_responses=["0", "1"]
180
180
  )
181
+
181
182
  # Print the result DataFrame.
182
183
  print(OneClassSVMPredict_out2.result)
183
184
 
@@ -112,4 +112,4 @@ def OneHotEncodingTransform(data=None, object=None, is_input_dense=None, **gener
112
112
 
113
113
  # Print the result DataFrame.
114
114
  print(obj1.result)
115
- """
115
+ """
@@ -5,7 +5,11 @@ def OutlierFilterTransform(data=None, object=None, **generic_arguments):
5
5
  OutlierFilterTransform() uses the result DataFrame from OutlierFilterFit() function to get
6
6
  statistics like median, count of rows, lower percentile and upper percentile for every column
7
7
  specified in target columns argument and filters the outliers in the input data.
8
-
8
+ Notes:
9
+ * Partitioning of input data and model is allowed using 'data_partition_column' and
10
+ 'object_partition_column' only if 'group_columns' are passed while creating model
11
+ using OutlierFilterFit() function.
12
+ * Neither 'data_partition_column' nor 'object_partition_column' can be used independently.
9
13
 
10
14
  PARAMETERS:
11
15
  data:
@@ -105,8 +109,9 @@ def OutlierFilterTransform(data=None, object=None, **generic_arguments):
105
109
  # method. Note that model is passed as instance of
106
110
  # OutlierFilterFit to "object".
107
111
  obj1 = OutlierFilterTransform(data=titanic_data,
108
- object=fit_obj)
112
+ object=fit_obj
113
+ )
109
114
 
110
115
  # Print the result DataFrame.
111
116
  print(obj1.result)
112
- """
117
+ """
@@ -5,19 +5,19 @@ def PolynomialFeaturesTransform(data=None, object=None, accumulate=None, **gener
5
5
  combinations of the feature by extracting the target column, degree, bias and interaction
6
6
  information from the output of the PolynomialFeaturesFit() function.
7
7
 
8
-
8
+
9
9
  PARAMETERS:
10
10
  data:
11
11
  Required Argument.
12
12
  Specifies the input teradataml DataFrame.
13
13
  Types: teradataml DataFrame
14
-
14
+
15
15
  object:
16
16
  Required Argument.
17
17
  Specifies the teradataml DataFrame containing the output of generated by
18
18
  PolynomialFeaturesFit() function or the instance of PolynomialFeaturesFit.
19
19
  Types: teradataml DataFrame or PolynomialFeaturesFit
20
-
20
+
21
21
  accumulate:
22
22
  Optional Argument.
23
23
  Specifies the names of input teradataml DataFrame columns to copy to the output.
@@ -104,8 +104,9 @@ def PolynomialFeaturesTransform(data=None, object=None, accumulate=None, **gener
104
104
  # Example 2: Generate feature matrix. Note that model is passed as instance of
105
105
  # PolynomialFeaturesFit to "object".
106
106
  obj1 = PolynomialFeaturesTransform(data=numerics,
107
- object=fit_obj)
107
+ object=fit_obj
108
+ )
108
109
 
109
110
  # Print the result DataFrame.
110
111
  print(obj1.result)
111
- """
112
+ """
@@ -4,26 +4,26 @@ def RandomProjectionTransform(object=None, data=None, accumulate=None, **generic
4
4
  The RandomProjectionTransform() function converts the
5
5
  high-dimensional input data to a low-dimensional space
6
6
  using the RandomProjectionFit() function output.
7
-
8
-
7
+
8
+
9
9
  PARAMETERS:
10
10
  object:
11
11
  Required Argument.
12
12
  Specifies the teradataml DataFrame containing the output generated by
13
13
  RandomProjectionFit() function or the instance of RandomProjectionFit.
14
14
  Types: teradataml DataFrame or RandomProjectionFit
15
-
15
+
16
16
  data:
17
17
  Required Argument.
18
18
  Specifies the input teradataml DataFrame.
19
19
  Types: teradataml DataFrame
20
-
20
+
21
21
  accumulate:
22
22
  Optional Argument.
23
23
  Specifies the name(s) of input teradataml DataFrame column(s) to copy to the
24
24
  output. By default, only transformed columns are present in the output.
25
25
  Types: str OR list of Strings (str)
26
-
26
+
27
27
  **generic_arguments:
28
28
  Specifies the generic keyword arguments SQLE functions accept. Below
29
29
  are the generic keyword arguments:
@@ -45,7 +45,7 @@ def RandomProjectionTransform(object=None, data=None, accumulate=None, **generic
45
45
  otherwise not.
46
46
  Default Value: False
47
47
  Types: bool
48
-
48
+
49
49
  Function allows the user to partition, hash, order or local
50
50
  order the input data. These generic arguments are available
51
51
  for each argument that accepts teradataml DataFrame as
@@ -4,6 +4,7 @@ def RowNormalizeTransform(data=None, object=None, accumulate=None, **generic_arg
4
4
  RowNormalizeTransform() function normalizes input columns row-wise, using
5
5
  RowNormalizeFit() function output.
6
6
 
7
+
7
8
  PARAMETERS:
8
9
  data:
9
10
  Required Argument.
@@ -107,4 +108,4 @@ def RowNormalizeTransform(data=None, object=None, accumulate=None, **generic_arg
107
108
 
108
109
  # Print the result DataFrame.
109
110
  print(obj1.result)
110
- """
111
+ """
@@ -1,4 +1,4 @@
1
- def SVM(formula=None, data = None, input_columns=None, response_column=None, model_type="Classification",
1
+ def SVM(formula=None, data=None, input_columns=None, response_column=None, model_type="Classification",
2
2
  iter_max=300, epsilon=0.1, batch_size=10, lambda1=0.02, alpha=0.15, iter_num_no_change=50,
3
3
  tolerance=0.001, intercept=True, class_weights="0:1.0, 1:1.0", learning_rate=None,
4
4
  initial_eta=0.05, decay_rate=0.25, decay_steps=5, momentum=0.0, nesterov=False,
@@ -1,5 +1,5 @@
1
- def SVMPredict(object = None, newdata = None, id_column = None, accumulate = None,
2
- output_prob = False, output_responses = None, **generic_arguments):
1
+ def SVMPredict(object=None, newdata=None, id_column=None, accumulate=None,
2
+ output_prob=False, output_responses=None, **generic_arguments):
3
3
  """
4
4
  DESCRIPTION:
5
5
  The SVMPredict() function uses the model generated by the function SVM() to
@@ -150,12 +150,13 @@ def SVMPredict(object = None, newdata = None, id_column = None, accumulate = Non
150
150
  response_column="MedHouseVal",
151
151
  model_type="Regression"
152
152
  )
153
+
153
154
  # SVMPredict() predicts target values using regression model by SVM().
154
- SVMPredict_out1 = SVMPredict(object = svm_obj1.result,
155
- newdata = transform_obj.result,
156
- id_column = "id",
157
- accumulate = "MedHouseVal"
158
- )
155
+ SVMPredict_out1 = SVMPredict(newdata=transform_obj.result,
156
+ object=svm_obj1.result,
157
+ id_column="id",
158
+ accumulate="MedHouseVal"
159
+ )
159
160
 
160
161
  # Print the result DataFrame.
161
162
  print(SVMPredict_out1.result)
@@ -185,18 +186,17 @@ def SVMPredict(object = None, newdata = None, id_column = None, accumulate = Non
185
186
  nesterov_optimization=True,
186
187
  local_sgd_iterations=1,
187
188
  )
189
+
188
190
  # SVMPredict() predicts target values using classification model by SVM() and
189
191
  # instance of SVM passed to SVMPredict.
190
- SVMPredict_out2 = SVMPredict(object = svm_obj2,
191
- newdata = transform_obj.result,
192
- id_column = "id",
193
- accumulate = "MedHouseVal",
194
- output_prob = True,
195
- output_responses = ["0", "1"]
196
- )
192
+ SVMPredict_out2 = SVMPredict(newdata=transform_obj.result,
193
+ object=svm_obj2,
194
+ id_column="id",
195
+ accumulate="MedHouseVal",
196
+ output_prob=True,
197
+ output_responses=["0", "1"]
198
+ )
197
199
 
198
200
  # Print the result DataFrame.
199
201
  print(SVMPredict_out2.result)
200
202
  """
201
-
202
-
@@ -3,6 +3,7 @@ def ScaleTransform(data=None, object=None, accumulate=None, **generic_arguments)
3
3
  DESCRIPTION:
4
4
  ScaleTransform() function scales specified columns in input data, using ScaleFit() function output.
5
5
 
6
+
6
7
  PARAMETERS:
7
8
  data:
8
9
  Required Argument.
@@ -101,8 +101,9 @@ def SimpleImputeTransform(data=None, object=None, **generic_arguments):
101
101
  # Example 2: Impute the values for missing values. Note that model is passed
102
102
  # as instance of SimpleImputeFit to "object".
103
103
  obj1 = SimpleImputeTransform(data=titanic,
104
- object=fit_obj)
104
+ object=fit_obj
105
+ )
105
106
 
106
107
  # Print the result DataFrame.
107
108
  print(obj1.result)
108
- """
109
+ """
@@ -1,6 +1,6 @@
1
- def TDDecisionForestPredict(newdata = None, object = None, id_column = None,
2
- detailed = False, output_prob = False, output_responses = None,
3
- accumulate = None, **generic_arguments):
1
+ def TDDecisionForestPredict(newdata=None, object=None, id_column=None,
2
+ detailed=False, output_prob=False, output_responses=None,
3
+ accumulate=None, **generic_arguments):
4
4
  """
5
5
  DESCRIPTION:
6
6
  TDDecisionForestPredict() function uses the model output by DecisionForest()
@@ -134,7 +134,7 @@ def TDDecisionForestPredict(newdata = None, object = None, id_column = None,
134
134
 
135
135
  # Example 1 : This example takes boston data as input, and generates the Regression
136
136
  # model using DecisionForest(). Using TDDecisionForestPredict() function
137
- # to predict the medv with the Regression model generated by XGBoost().
137
+ # to predict the medv with the Regression model generated by DecisionForest().
138
138
 
139
139
  # Create 2 samples of input data - sample 1 will have 80% of total rows and
140
140
  # sample 2 will have 20% of total rows.
@@ -13,33 +13,33 @@ def TDGLMPredict(object=None, newdata=None, id_column=None, accumulate=None, out
13
13
  * User can use RegressionEvaluator(), ClassificationEvaluator(), or ROC() function as a
14
14
  post-processing step for evaluating prediction results.
15
15
  * The TDGLMPredict() function accepts models from GLM() function in SQLE.
16
-
17
-
16
+
17
+
18
18
  PARAMETERS:
19
19
  object:
20
20
  Required Argument.
21
21
  Specifies the teradataml DataFrame containing the model data generated by GLM()
22
22
  function or the instance of GLM.
23
23
  Types: teradataml DataFrame or GLM
24
-
24
+
25
25
  newdata:
26
26
  Required Argument.
27
27
  Specifies the teradataml DataFrame containing the input data.
28
28
  Types: teradataml DataFrame
29
-
29
+
30
30
  id_column:
31
31
  Required Argument.
32
32
  Specifies the name of the column that uniquely identifies an
33
33
  observation in the test data.
34
34
  Types: str
35
-
35
+
36
36
  accumulate:
37
37
  Optional Argument.
38
38
  Specifies the name(s) of input teradataml DataFrame column(s) to copy to the
39
39
  output. By default, the function copies no input teradataml DataFrame columns
40
40
  to the output.
41
41
  Types: str OR list of Strings (str)
42
-
42
+
43
43
  output_prob:
44
44
  Optional Argument.
45
45
  Specifies whether the function should output the probability for each
@@ -48,7 +48,7 @@ def TDGLMPredict(object=None, newdata=None, id_column=None, accumulate=None, out
48
48
  Only applicable if the "family" is 'Binomial'.
49
49
  Default Value: False
50
50
  Types: bool
51
-
51
+
52
52
  output_responses:
53
53
  Optional Argument.
54
54
  Specifies the class labels for which to output probabilities.
@@ -57,7 +57,7 @@ def TDGLMPredict(object=None, newdata=None, id_column=None, accumulate=None, out
57
57
  Note:
58
58
  Only applicable if "output_prob" is True.
59
59
  Types: str OR list of strs
60
-
60
+
61
61
  **generic_arguments:
62
62
  Specifies the generic keyword arguments SQLE functions accept. Below
63
63
  are the generic keyword arguments:
@@ -143,29 +143,29 @@ def TDGLMPredict(object=None, newdata=None, id_column=None, accumulate=None, out
143
143
  accumulate=["id","MedHouseVal"])
144
144
 
145
145
  # Generate regression model using generalized linear model(GLM).
146
- answer=GLM(input_columns=["MedInc", "HouseAge", "AveRooms", "AveBedrms", "Population", "AveOccup",
147
- "Latitude", "Longitude"],
148
- response_column="MedHouseVal",
149
- data=obj.result,
150
- nesterov=False)
146
+ answer = GLM(input_columns=["MedInc", "HouseAge", "AveRooms", "AveBedrms", "Population", "AveOccup",
147
+ "Latitude", "Longitude"],
148
+ response_column="MedHouseVal",
149
+ data=obj.result,
150
+ nesterov=False)
151
151
 
152
152
  # TDGLMPredict() predicts 'MedHouseVal' using generated regression model by GLM and newdata.
153
153
  # Note that teradataml DataFrame representing the model is passed as input to "object".
154
- TDGLMPredict_out = TDGLMPredict(object=answer.result,
155
- newdata=obj.result,
154
+ TDGLMPredict_out = TDGLMPredict(newdata=obj.result,
155
+ object=answer.result,
156
156
  accumulate="MedHouseVal",
157
157
  id_column="id")
158
-
158
+
159
159
  # Print the result DataFrame.
160
160
  print(TDGLMPredict_out.result)
161
161
 
162
162
  # Example 2: TDGLMPredict() predicts the 'MedHouseVal' using generated regression model
163
163
  # by GLM and newdata. Note that model is passed as instance of GLM to "object".
164
- TDGLMPredict_out1 = TDGLMPredict(object=answer,
165
- newdata=obj.result,
164
+ TDGLMPredict_out1 = TDGLMPredict(newdata=obj.result,
165
+ object=answer,
166
166
  accumulate="MedHouseVal",
167
167
  id_column="id")
168
168
 
169
169
  # Print the result DataFrame.
170
170
  print(TDGLMPredict_out1.result)
171
- """
171
+ """
@@ -17,6 +17,7 @@ def TargetEncodingTransform(data=None, object=None, accumulate=None, **generic_a
17
17
  the "default_values" argument is also not used during
18
18
  TargetEncodingFit() function.
19
19
 
20
+
20
21
  PARAMETERS:
21
22
  data:
22
23
  Required Argument.
@@ -115,9 +116,9 @@ def TargetEncodingTransform(data=None, object=None, accumulate=None, **generic_a
115
116
  # Find the distinct count of 'sex' and 'embarked' in which only 2 column should be present
116
117
  # name 'ColumnName' and 'CategoryCount'.
117
118
  category_data=categorical_summ.result.groupby('ColumnName').count()
118
- category_data = category_data.assign(drop_columns = True,
119
- ColumnName = category_data.ColumnName,
120
- CategoryCount = category_data.count_DistinctValue)
119
+ category_data = category_data.assign(drop_columns=True,
120
+ ColumnName=category_data.ColumnName,
121
+ CategoryCount=category_data.count_DistinctValue)
121
122
 
122
123
  # Generates the required hyperparameters when "encoder_method" is 'CBM_BETA'.
123
124
  TargetEncodingFit_out = TargetEncodingFit(data=data_input,
@@ -137,4 +138,4 @@ def TargetEncodingTransform(data=None, object=None, accumulate=None, **generic_a
137
138
  # Print the result DataFrame.
138
139
  print(TargetEncodingTransform_out.result)
139
140
 
140
- """
141
+ """
@@ -4,6 +4,7 @@ def Transform(data=None, object=None, id_columns=None, **generic_arguments):
4
4
  The Transform() function applies numeric transformations to input columns,
5
5
  using Fit() output.
6
6
 
7
+
7
8
  PARAMETERS:
8
9
  data:
9
10
  Required Argument.
@@ -119,5 +120,4 @@ def Transform(data=None, object=None, id_columns=None, **generic_arguments):
119
120
 
120
121
  # Print the result DataFrame.
121
122
  print(transform_result1.result)
122
-
123
- """
123
+ """