teradataml 17.20.0.6__py3-none-any.whl → 20.0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (432) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +238 -1
  4. teradataml/__init__.py +13 -3
  5. teradataml/_version.py +1 -1
  6. teradataml/analytics/Transformations.py +4 -4
  7. teradataml/analytics/__init__.py +0 -2
  8. teradataml/analytics/analytic_function_executor.py +3 -0
  9. teradataml/analytics/json_parser/utils.py +13 -12
  10. teradataml/analytics/sqle/DecisionTreePredict.py +15 -30
  11. teradataml/analytics/sqle/NaiveBayesPredict.py +11 -20
  12. teradataml/analytics/sqle/__init__.py +0 -13
  13. teradataml/analytics/utils.py +1 -0
  14. teradataml/analytics/valib.py +3 -0
  15. teradataml/automl/__init__.py +1628 -0
  16. teradataml/automl/custom_json_utils.py +1270 -0
  17. teradataml/automl/data_preparation.py +993 -0
  18. teradataml/automl/data_transformation.py +727 -0
  19. teradataml/automl/feature_engineering.py +1648 -0
  20. teradataml/automl/feature_exploration.py +547 -0
  21. teradataml/automl/model_evaluation.py +163 -0
  22. teradataml/automl/model_training.py +887 -0
  23. teradataml/catalog/__init__.py +0 -2
  24. teradataml/catalog/byom.py +49 -6
  25. teradataml/catalog/function_argument_mapper.py +0 -2
  26. teradataml/catalog/model_cataloging_utils.py +2 -1021
  27. teradataml/common/aed_utils.py +6 -2
  28. teradataml/common/constants.py +50 -58
  29. teradataml/common/deprecations.py +160 -0
  30. teradataml/common/garbagecollector.py +61 -104
  31. teradataml/common/messagecodes.py +27 -36
  32. teradataml/common/messages.py +11 -15
  33. teradataml/common/utils.py +205 -287
  34. teradataml/common/wrapper_utils.py +1 -110
  35. teradataml/context/context.py +150 -78
  36. teradataml/data/bank_churn.csv +10001 -0
  37. teradataml/data/bmi.csv +501 -0
  38. teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +3 -3
  39. teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +6 -5
  40. teradataml/data/docs/sqle/docs_17_10/Fit.py +1 -1
  41. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +1 -1
  42. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +1 -1
  43. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +2 -2
  44. teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +2 -1
  45. teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +1 -0
  46. teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +1 -1
  47. teradataml/data/docs/sqle/docs_17_10/Transform.py +2 -1
  48. teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +3 -3
  49. teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +6 -5
  50. teradataml/data/docs/sqle/docs_17_20/Fit.py +1 -1
  51. teradataml/data/docs/sqle/docs_17_20/GLM.py +1 -1
  52. teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +9 -10
  53. teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +3 -2
  54. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +16 -15
  55. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +2 -2
  56. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +2 -2
  57. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +8 -8
  58. teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +21 -20
  59. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +1 -1
  60. teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +8 -3
  61. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +6 -5
  62. teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +6 -6
  63. teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +2 -1
  64. teradataml/data/docs/sqle/docs_17_20/SVM.py +1 -1
  65. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +16 -16
  66. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +1 -0
  67. teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +3 -2
  68. teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +4 -4
  69. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +19 -19
  70. teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +5 -4
  71. teradataml/data/docs/sqle/docs_17_20/Transform.py +2 -2
  72. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +9 -9
  73. teradataml/data/fish.csv +160 -0
  74. teradataml/data/glass_types.csv +215 -0
  75. teradataml/data/insurance.csv +1 -1
  76. teradataml/data/iris_data.csv +151 -0
  77. teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +1 -0
  78. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +1 -0
  79. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +1 -0
  80. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +1 -0
  81. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +1 -0
  82. teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +1 -0
  83. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +1 -0
  84. teradataml/data/load_example_data.py +3 -0
  85. teradataml/data/multi_model_classification.csv +401 -0
  86. teradataml/data/multi_model_regression.csv +401 -0
  87. teradataml/data/openml_example.json +63 -0
  88. teradataml/data/scripts/deploy_script.py +65 -0
  89. teradataml/data/scripts/mapper.R +20 -0
  90. teradataml/data/scripts/sklearn/__init__.py +0 -0
  91. teradataml/data/scripts/sklearn/sklearn_fit.py +175 -0
  92. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +135 -0
  93. teradataml/data/scripts/sklearn/sklearn_function.template +113 -0
  94. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +158 -0
  95. teradataml/data/scripts/sklearn/sklearn_neighbors.py +152 -0
  96. teradataml/data/scripts/sklearn/sklearn_score.py +128 -0
  97. teradataml/data/scripts/sklearn/sklearn_transform.py +179 -0
  98. teradataml/data/templates/open_source_ml.json +9 -0
  99. teradataml/data/teradataml_example.json +73 -1
  100. teradataml/data/test_classification.csv +101 -0
  101. teradataml/data/test_prediction.csv +101 -0
  102. teradataml/data/test_regression.csv +101 -0
  103. teradataml/data/train_multiclass.csv +101 -0
  104. teradataml/data/train_regression.csv +101 -0
  105. teradataml/data/train_regression_multiple_labels.csv +101 -0
  106. teradataml/data/wine_data.csv +1600 -0
  107. teradataml/dataframe/copy_to.py +79 -13
  108. teradataml/dataframe/data_transfer.py +8 -0
  109. teradataml/dataframe/dataframe.py +910 -311
  110. teradataml/dataframe/dataframe_utils.py +102 -5
  111. teradataml/dataframe/fastload.py +11 -3
  112. teradataml/dataframe/setop.py +15 -2
  113. teradataml/dataframe/sql.py +3735 -77
  114. teradataml/dataframe/sql_function_parameters.py +56 -5
  115. teradataml/dataframe/vantage_function_types.py +45 -1
  116. teradataml/dataframe/window.py +30 -29
  117. teradataml/dbutils/dbutils.py +18 -1
  118. teradataml/geospatial/geodataframe.py +18 -7
  119. teradataml/geospatial/geodataframecolumn.py +5 -0
  120. teradataml/hyperparameter_tuner/optimizer.py +910 -120
  121. teradataml/hyperparameter_tuner/utils.py +131 -37
  122. teradataml/lib/aed_0_1.dll +0 -0
  123. teradataml/lib/libaed_0_1.dylib +0 -0
  124. teradataml/lib/libaed_0_1.so +0 -0
  125. teradataml/libaed_0_1.dylib +0 -0
  126. teradataml/libaed_0_1.so +0 -0
  127. teradataml/opensource/__init__.py +1 -0
  128. teradataml/opensource/sklearn/__init__.py +1 -0
  129. teradataml/opensource/sklearn/_class.py +255 -0
  130. teradataml/opensource/sklearn/_sklearn_wrapper.py +1668 -0
  131. teradataml/opensource/sklearn/_wrapper_utils.py +268 -0
  132. teradataml/opensource/sklearn/constants.py +54 -0
  133. teradataml/options/__init__.py +3 -6
  134. teradataml/options/configure.py +21 -20
  135. teradataml/scriptmgmt/UserEnv.py +61 -5
  136. teradataml/scriptmgmt/lls_utils.py +135 -53
  137. teradataml/table_operators/Apply.py +38 -6
  138. teradataml/table_operators/Script.py +45 -308
  139. teradataml/table_operators/TableOperator.py +182 -591
  140. teradataml/table_operators/__init__.py +0 -1
  141. teradataml/table_operators/table_operator_util.py +32 -40
  142. teradataml/utils/validators.py +127 -3
  143. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/METADATA +243 -3
  144. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/RECORD +147 -391
  145. teradataml/analytics/mle/AdaBoost.py +0 -651
  146. teradataml/analytics/mle/AdaBoostPredict.py +0 -564
  147. teradataml/analytics/mle/Antiselect.py +0 -342
  148. teradataml/analytics/mle/Arima.py +0 -641
  149. teradataml/analytics/mle/ArimaPredict.py +0 -477
  150. teradataml/analytics/mle/Attribution.py +0 -1070
  151. teradataml/analytics/mle/Betweenness.py +0 -658
  152. teradataml/analytics/mle/Burst.py +0 -711
  153. teradataml/analytics/mle/CCM.py +0 -600
  154. teradataml/analytics/mle/CCMPrepare.py +0 -324
  155. teradataml/analytics/mle/CFilter.py +0 -460
  156. teradataml/analytics/mle/ChangePointDetection.py +0 -572
  157. teradataml/analytics/mle/ChangePointDetectionRT.py +0 -477
  158. teradataml/analytics/mle/Closeness.py +0 -737
  159. teradataml/analytics/mle/ConfusionMatrix.py +0 -420
  160. teradataml/analytics/mle/Correlation.py +0 -477
  161. teradataml/analytics/mle/Correlation2.py +0 -573
  162. teradataml/analytics/mle/CoxHazardRatio.py +0 -679
  163. teradataml/analytics/mle/CoxPH.py +0 -556
  164. teradataml/analytics/mle/CoxSurvival.py +0 -478
  165. teradataml/analytics/mle/CumulativeMovAvg.py +0 -363
  166. teradataml/analytics/mle/DTW.py +0 -623
  167. teradataml/analytics/mle/DWT.py +0 -564
  168. teradataml/analytics/mle/DWT2D.py +0 -599
  169. teradataml/analytics/mle/DecisionForest.py +0 -716
  170. teradataml/analytics/mle/DecisionForestEvaluator.py +0 -363
  171. teradataml/analytics/mle/DecisionForestPredict.py +0 -561
  172. teradataml/analytics/mle/DecisionTree.py +0 -830
  173. teradataml/analytics/mle/DecisionTreePredict.py +0 -528
  174. teradataml/analytics/mle/ExponentialMovAvg.py +0 -418
  175. teradataml/analytics/mle/FMeasure.py +0 -402
  176. teradataml/analytics/mle/FPGrowth.py +0 -734
  177. teradataml/analytics/mle/FrequentPaths.py +0 -695
  178. teradataml/analytics/mle/GLM.py +0 -558
  179. teradataml/analytics/mle/GLML1L2.py +0 -547
  180. teradataml/analytics/mle/GLML1L2Predict.py +0 -519
  181. teradataml/analytics/mle/GLMPredict.py +0 -529
  182. teradataml/analytics/mle/HMMDecoder.py +0 -945
  183. teradataml/analytics/mle/HMMEvaluator.py +0 -901
  184. teradataml/analytics/mle/HMMSupervised.py +0 -521
  185. teradataml/analytics/mle/HMMUnsupervised.py +0 -572
  186. teradataml/analytics/mle/Histogram.py +0 -561
  187. teradataml/analytics/mle/IDWT.py +0 -476
  188. teradataml/analytics/mle/IDWT2D.py +0 -493
  189. teradataml/analytics/mle/IdentityMatch.py +0 -763
  190. teradataml/analytics/mle/Interpolator.py +0 -918
  191. teradataml/analytics/mle/KMeans.py +0 -485
  192. teradataml/analytics/mle/KNN.py +0 -627
  193. teradataml/analytics/mle/KNNRecommender.py +0 -488
  194. teradataml/analytics/mle/KNNRecommenderPredict.py +0 -581
  195. teradataml/analytics/mle/LAR.py +0 -439
  196. teradataml/analytics/mle/LARPredict.py +0 -478
  197. teradataml/analytics/mle/LDA.py +0 -548
  198. teradataml/analytics/mle/LDAInference.py +0 -492
  199. teradataml/analytics/mle/LDATopicSummary.py +0 -464
  200. teradataml/analytics/mle/LevenshteinDistance.py +0 -450
  201. teradataml/analytics/mle/LinReg.py +0 -433
  202. teradataml/analytics/mle/LinRegPredict.py +0 -438
  203. teradataml/analytics/mle/MinHash.py +0 -544
  204. teradataml/analytics/mle/Modularity.py +0 -587
  205. teradataml/analytics/mle/NEREvaluator.py +0 -410
  206. teradataml/analytics/mle/NERExtractor.py +0 -595
  207. teradataml/analytics/mle/NERTrainer.py +0 -458
  208. teradataml/analytics/mle/NGrams.py +0 -570
  209. teradataml/analytics/mle/NPath.py +0 -634
  210. teradataml/analytics/mle/NTree.py +0 -549
  211. teradataml/analytics/mle/NaiveBayes.py +0 -462
  212. teradataml/analytics/mle/NaiveBayesPredict.py +0 -513
  213. teradataml/analytics/mle/NaiveBayesTextClassifier.py +0 -607
  214. teradataml/analytics/mle/NaiveBayesTextClassifier2.py +0 -531
  215. teradataml/analytics/mle/NaiveBayesTextClassifierPredict.py +0 -799
  216. teradataml/analytics/mle/NamedEntityFinder.py +0 -529
  217. teradataml/analytics/mle/NamedEntityFinderEvaluator.py +0 -414
  218. teradataml/analytics/mle/NamedEntityFinderTrainer.py +0 -396
  219. teradataml/analytics/mle/POSTagger.py +0 -417
  220. teradataml/analytics/mle/Pack.py +0 -411
  221. teradataml/analytics/mle/PageRank.py +0 -535
  222. teradataml/analytics/mle/PathAnalyzer.py +0 -426
  223. teradataml/analytics/mle/PathGenerator.py +0 -367
  224. teradataml/analytics/mle/PathStart.py +0 -464
  225. teradataml/analytics/mle/PathSummarizer.py +0 -470
  226. teradataml/analytics/mle/Pivot.py +0 -471
  227. teradataml/analytics/mle/ROC.py +0 -425
  228. teradataml/analytics/mle/RandomSample.py +0 -637
  229. teradataml/analytics/mle/RandomWalkSample.py +0 -490
  230. teradataml/analytics/mle/SAX.py +0 -779
  231. teradataml/analytics/mle/SVMDense.py +0 -677
  232. teradataml/analytics/mle/SVMDensePredict.py +0 -536
  233. teradataml/analytics/mle/SVMDenseSummary.py +0 -437
  234. teradataml/analytics/mle/SVMSparse.py +0 -557
  235. teradataml/analytics/mle/SVMSparsePredict.py +0 -553
  236. teradataml/analytics/mle/SVMSparseSummary.py +0 -435
  237. teradataml/analytics/mle/Sampling.py +0 -549
  238. teradataml/analytics/mle/Scale.py +0 -565
  239. teradataml/analytics/mle/ScaleByPartition.py +0 -496
  240. teradataml/analytics/mle/ScaleMap.py +0 -378
  241. teradataml/analytics/mle/ScaleSummary.py +0 -320
  242. teradataml/analytics/mle/SentenceExtractor.py +0 -363
  243. teradataml/analytics/mle/SentimentEvaluator.py +0 -432
  244. teradataml/analytics/mle/SentimentExtractor.py +0 -578
  245. teradataml/analytics/mle/SentimentTrainer.py +0 -405
  246. teradataml/analytics/mle/SeriesSplitter.py +0 -641
  247. teradataml/analytics/mle/Sessionize.py +0 -475
  248. teradataml/analytics/mle/SimpleMovAvg.py +0 -397
  249. teradataml/analytics/mle/StringSimilarity.py +0 -425
  250. teradataml/analytics/mle/TF.py +0 -389
  251. teradataml/analytics/mle/TFIDF.py +0 -504
  252. teradataml/analytics/mle/TextChunker.py +0 -414
  253. teradataml/analytics/mle/TextClassifier.py +0 -399
  254. teradataml/analytics/mle/TextClassifierEvaluator.py +0 -413
  255. teradataml/analytics/mle/TextClassifierTrainer.py +0 -565
  256. teradataml/analytics/mle/TextMorph.py +0 -494
  257. teradataml/analytics/mle/TextParser.py +0 -623
  258. teradataml/analytics/mle/TextTagger.py +0 -530
  259. teradataml/analytics/mle/TextTokenizer.py +0 -502
  260. teradataml/analytics/mle/UnivariateStatistics.py +0 -488
  261. teradataml/analytics/mle/Unpack.py +0 -526
  262. teradataml/analytics/mle/Unpivot.py +0 -438
  263. teradataml/analytics/mle/VarMax.py +0 -776
  264. teradataml/analytics/mle/VectorDistance.py +0 -762
  265. teradataml/analytics/mle/WeightedMovAvg.py +0 -400
  266. teradataml/analytics/mle/XGBoost.py +0 -842
  267. teradataml/analytics/mle/XGBoostPredict.py +0 -627
  268. teradataml/analytics/mle/__init__.py +0 -123
  269. teradataml/analytics/mle/json/adaboost_mle.json +0 -135
  270. teradataml/analytics/mle/json/adaboostpredict_mle.json +0 -85
  271. teradataml/analytics/mle/json/antiselect_mle.json +0 -34
  272. teradataml/analytics/mle/json/antiselect_mle_mle.json +0 -34
  273. teradataml/analytics/mle/json/arima_mle.json +0 -172
  274. teradataml/analytics/mle/json/arimapredict_mle.json +0 -52
  275. teradataml/analytics/mle/json/attribution_mle_mle.json +0 -143
  276. teradataml/analytics/mle/json/betweenness_mle.json +0 -97
  277. teradataml/analytics/mle/json/burst_mle.json +0 -140
  278. teradataml/analytics/mle/json/ccm_mle.json +0 -124
  279. teradataml/analytics/mle/json/ccmprepare_mle.json +0 -14
  280. teradataml/analytics/mle/json/cfilter_mle.json +0 -93
  281. teradataml/analytics/mle/json/changepointdetection_mle.json +0 -92
  282. teradataml/analytics/mle/json/changepointdetectionrt_mle.json +0 -78
  283. teradataml/analytics/mle/json/closeness_mle.json +0 -104
  284. teradataml/analytics/mle/json/confusionmatrix_mle.json +0 -79
  285. teradataml/analytics/mle/json/correlation_mle.json +0 -86
  286. teradataml/analytics/mle/json/correlationreduce_mle.json +0 -49
  287. teradataml/analytics/mle/json/coxhazardratio_mle.json +0 -89
  288. teradataml/analytics/mle/json/coxph_mle.json +0 -98
  289. teradataml/analytics/mle/json/coxsurvival_mle.json +0 -79
  290. teradataml/analytics/mle/json/cumulativemovavg_mle.json +0 -34
  291. teradataml/analytics/mle/json/decisionforest_mle.json +0 -167
  292. teradataml/analytics/mle/json/decisionforestevaluator_mle.json +0 -33
  293. teradataml/analytics/mle/json/decisionforestpredict_mle_mle.json +0 -74
  294. teradataml/analytics/mle/json/decisiontree_mle.json +0 -194
  295. teradataml/analytics/mle/json/decisiontreepredict_mle_mle.json +0 -86
  296. teradataml/analytics/mle/json/dtw_mle.json +0 -97
  297. teradataml/analytics/mle/json/dwt2d_mle.json +0 -116
  298. teradataml/analytics/mle/json/dwt_mle.json +0 -101
  299. teradataml/analytics/mle/json/exponentialmovavg_mle.json +0 -55
  300. teradataml/analytics/mle/json/fmeasure_mle.json +0 -58
  301. teradataml/analytics/mle/json/fpgrowth_mle.json +0 -159
  302. teradataml/analytics/mle/json/frequentpaths_mle.json +0 -129
  303. teradataml/analytics/mle/json/glm_mle.json +0 -111
  304. teradataml/analytics/mle/json/glml1l2_mle.json +0 -106
  305. teradataml/analytics/mle/json/glml1l2predict_mle.json +0 -57
  306. teradataml/analytics/mle/json/glmpredict_mle_mle.json +0 -74
  307. teradataml/analytics/mle/json/histogram_mle.json +0 -100
  308. teradataml/analytics/mle/json/hmmdecoder_mle.json +0 -192
  309. teradataml/analytics/mle/json/hmmevaluator_mle.json +0 -206
  310. teradataml/analytics/mle/json/hmmsupervised_mle.json +0 -91
  311. teradataml/analytics/mle/json/hmmunsupervised_mle.json +0 -114
  312. teradataml/analytics/mle/json/identitymatch_mle.json +0 -88
  313. teradataml/analytics/mle/json/idwt2d_mle.json +0 -73
  314. teradataml/analytics/mle/json/idwt_mle.json +0 -66
  315. teradataml/analytics/mle/json/interpolator_mle.json +0 -151
  316. teradataml/analytics/mle/json/kmeans_mle.json +0 -97
  317. teradataml/analytics/mle/json/knn_mle.json +0 -141
  318. teradataml/analytics/mle/json/knnrecommender_mle.json +0 -111
  319. teradataml/analytics/mle/json/knnrecommenderpredict_mle.json +0 -75
  320. teradataml/analytics/mle/json/lar_mle.json +0 -78
  321. teradataml/analytics/mle/json/larpredict_mle.json +0 -69
  322. teradataml/analytics/mle/json/lda_mle.json +0 -130
  323. teradataml/analytics/mle/json/ldainference_mle.json +0 -78
  324. teradataml/analytics/mle/json/ldatopicsummary_mle.json +0 -64
  325. teradataml/analytics/mle/json/levenshteindistance_mle.json +0 -92
  326. teradataml/analytics/mle/json/linreg_mle.json +0 -42
  327. teradataml/analytics/mle/json/linregpredict_mle.json +0 -56
  328. teradataml/analytics/mle/json/minhash_mle.json +0 -113
  329. teradataml/analytics/mle/json/modularity_mle.json +0 -91
  330. teradataml/analytics/mle/json/naivebayespredict_mle_mle.json +0 -85
  331. teradataml/analytics/mle/json/naivebayesreduce_mle.json +0 -52
  332. teradataml/analytics/mle/json/naivebayestextclassifierpredict_mle_mle.json +0 -147
  333. teradataml/analytics/mle/json/naivebayestextclassifiertrainer2_mle.json +0 -108
  334. teradataml/analytics/mle/json/naivebayestextclassifiertrainer_mle.json +0 -102
  335. teradataml/analytics/mle/json/namedentityfinder_mle.json +0 -84
  336. teradataml/analytics/mle/json/namedentityfinderevaluatorreduce_mle.json +0 -43
  337. teradataml/analytics/mle/json/namedentityfindertrainer_mle.json +0 -64
  338. teradataml/analytics/mle/json/nerevaluator_mle.json +0 -54
  339. teradataml/analytics/mle/json/nerextractor_mle.json +0 -87
  340. teradataml/analytics/mle/json/nertrainer_mle.json +0 -89
  341. teradataml/analytics/mle/json/ngrams_mle.json +0 -137
  342. teradataml/analytics/mle/json/ngramsplitter_mle_mle.json +0 -137
  343. teradataml/analytics/mle/json/npath@coprocessor_mle.json +0 -73
  344. teradataml/analytics/mle/json/ntree@coprocessor_mle.json +0 -123
  345. teradataml/analytics/mle/json/pack_mle.json +0 -58
  346. teradataml/analytics/mle/json/pack_mle_mle.json +0 -58
  347. teradataml/analytics/mle/json/pagerank_mle.json +0 -81
  348. teradataml/analytics/mle/json/pathanalyzer_mle.json +0 -63
  349. teradataml/analytics/mle/json/pathgenerator_mle.json +0 -40
  350. teradataml/analytics/mle/json/pathstart_mle.json +0 -62
  351. teradataml/analytics/mle/json/pathsummarizer_mle.json +0 -72
  352. teradataml/analytics/mle/json/pivoting_mle.json +0 -71
  353. teradataml/analytics/mle/json/postagger_mle.json +0 -51
  354. teradataml/analytics/mle/json/randomsample_mle.json +0 -131
  355. teradataml/analytics/mle/json/randomwalksample_mle.json +0 -85
  356. teradataml/analytics/mle/json/roc_mle.json +0 -73
  357. teradataml/analytics/mle/json/sampling_mle.json +0 -75
  358. teradataml/analytics/mle/json/sax_mle.json +0 -154
  359. teradataml/analytics/mle/json/scale_mle.json +0 -93
  360. teradataml/analytics/mle/json/scalebypartition_mle.json +0 -89
  361. teradataml/analytics/mle/json/scalemap_mle.json +0 -44
  362. teradataml/analytics/mle/json/scalesummary_mle.json +0 -14
  363. teradataml/analytics/mle/json/sentenceextractor_mle.json +0 -41
  364. teradataml/analytics/mle/json/sentimentevaluator_mle.json +0 -43
  365. teradataml/analytics/mle/json/sentimentextractor_mle.json +0 -100
  366. teradataml/analytics/mle/json/sentimenttrainer_mle.json +0 -68
  367. teradataml/analytics/mle/json/seriessplitter_mle.json +0 -133
  368. teradataml/analytics/mle/json/sessionize_mle_mle.json +0 -62
  369. teradataml/analytics/mle/json/simplemovavg_mle.json +0 -48
  370. teradataml/analytics/mle/json/stringsimilarity_mle.json +0 -50
  371. teradataml/analytics/mle/json/stringsimilarity_mle_mle.json +0 -50
  372. teradataml/analytics/mle/json/svmdense_mle.json +0 -165
  373. teradataml/analytics/mle/json/svmdensepredict_mle.json +0 -95
  374. teradataml/analytics/mle/json/svmdensesummary_mle.json +0 -58
  375. teradataml/analytics/mle/json/svmsparse_mle.json +0 -148
  376. teradataml/analytics/mle/json/svmsparsepredict_mle_mle.json +0 -103
  377. teradataml/analytics/mle/json/svmsparsesummary_mle.json +0 -57
  378. teradataml/analytics/mle/json/textchunker_mle.json +0 -40
  379. teradataml/analytics/mle/json/textclassifier_mle.json +0 -51
  380. teradataml/analytics/mle/json/textclassifierevaluator_mle.json +0 -43
  381. teradataml/analytics/mle/json/textclassifiertrainer_mle.json +0 -103
  382. teradataml/analytics/mle/json/textmorph_mle.json +0 -63
  383. teradataml/analytics/mle/json/textparser_mle.json +0 -166
  384. teradataml/analytics/mle/json/texttagger_mle.json +0 -81
  385. teradataml/analytics/mle/json/texttokenizer_mle.json +0 -91
  386. teradataml/analytics/mle/json/tf_mle.json +0 -33
  387. teradataml/analytics/mle/json/tfidf_mle.json +0 -34
  388. teradataml/analytics/mle/json/univariatestatistics_mle.json +0 -81
  389. teradataml/analytics/mle/json/unpack_mle.json +0 -91
  390. teradataml/analytics/mle/json/unpack_mle_mle.json +0 -91
  391. teradataml/analytics/mle/json/unpivoting_mle.json +0 -63
  392. teradataml/analytics/mle/json/varmax_mle.json +0 -176
  393. teradataml/analytics/mle/json/vectordistance_mle.json +0 -179
  394. teradataml/analytics/mle/json/weightedmovavg_mle.json +0 -48
  395. teradataml/analytics/mle/json/xgboost_mle.json +0 -178
  396. teradataml/analytics/mle/json/xgboostpredict_mle.json +0 -104
  397. teradataml/analytics/sqle/Antiselect.py +0 -321
  398. teradataml/analytics/sqle/Attribution.py +0 -603
  399. teradataml/analytics/sqle/DecisionForestPredict.py +0 -408
  400. teradataml/analytics/sqle/GLMPredict.py +0 -430
  401. teradataml/analytics/sqle/MovingAverage.py +0 -543
  402. teradataml/analytics/sqle/NGramSplitter.py +0 -548
  403. teradataml/analytics/sqle/NPath.py +0 -632
  404. teradataml/analytics/sqle/NaiveBayesTextClassifierPredict.py +0 -515
  405. teradataml/analytics/sqle/Pack.py +0 -388
  406. teradataml/analytics/sqle/SVMSparsePredict.py +0 -464
  407. teradataml/analytics/sqle/Sessionize.py +0 -390
  408. teradataml/analytics/sqle/StringSimilarity.py +0 -400
  409. teradataml/analytics/sqle/Unpack.py +0 -503
  410. teradataml/analytics/sqle/json/antiselect_sqle.json +0 -21
  411. teradataml/analytics/sqle/json/attribution_sqle.json +0 -92
  412. teradataml/analytics/sqle/json/decisionforestpredict_sqle.json +0 -48
  413. teradataml/analytics/sqle/json/glmpredict_sqle.json +0 -48
  414. teradataml/analytics/sqle/json/h2opredict_sqle.json +0 -63
  415. teradataml/analytics/sqle/json/movingaverage_sqle.json +0 -58
  416. teradataml/analytics/sqle/json/naivebayestextclassifierpredict_sqle.json +0 -76
  417. teradataml/analytics/sqle/json/ngramsplitter_sqle.json +0 -126
  418. teradataml/analytics/sqle/json/npath_sqle.json +0 -67
  419. teradataml/analytics/sqle/json/pack_sqle.json +0 -47
  420. teradataml/analytics/sqle/json/pmmlpredict_sqle.json +0 -55
  421. teradataml/analytics/sqle/json/sessionize_sqle.json +0 -43
  422. teradataml/analytics/sqle/json/stringsimilarity_sqle.json +0 -39
  423. teradataml/analytics/sqle/json/svmsparsepredict_sqle.json +0 -74
  424. teradataml/analytics/sqle/json/unpack_sqle.json +0 -80
  425. teradataml/catalog/model_cataloging.py +0 -980
  426. teradataml/config/mlengine_alias_definitions_v1.0 +0 -118
  427. teradataml/config/mlengine_alias_definitions_v1.1 +0 -127
  428. teradataml/config/mlengine_alias_definitions_v1.3 +0 -129
  429. teradataml/table_operators/sandbox_container_util.py +0 -643
  430. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/WHEEL +0 -0
  431. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/top_level.txt +0 -0
  432. {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/zip-safe +0 -0
@@ -1,918 +0,0 @@
1
- #!/usr/bin/python
2
- # ##################################################################
3
- #
4
- # Copyright 2018 Teradata. All rights reserved.
5
- # TERADATA CONFIDENTIAL AND TRADE SECRET
6
- #
7
- # Primary Owner: Rohit Agrawal (rohit.agrawal@teradata.com)
8
- # Secondary Owner: Pankaj Purandare (pankajvinod.purandare@teradata.com)
9
- #
10
- # Version: 1.2
11
- # Function Version: 1.2
12
- #
13
- # ##################################################################
14
-
15
- import inspect
16
- import time
17
- from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
18
- from teradataml.common.utils import UtilFuncs
19
- from teradataml.context.context import *
20
- from teradataml.dataframe.dataframe import DataFrame
21
- from teradataml.common.aed_utils import AedUtils
22
- from teradataml.analytics.analytic_query_generator import AnalyticQueryGenerator
23
- from teradataml.common.exceptions import TeradataMlException
24
- from teradataml.common.messages import Messages
25
- from teradataml.common.messagecodes import MessageCodes
26
- from teradataml.common.constants import TeradataConstants
27
- from teradataml.dataframe.dataframe_utils import DataFrameUtils as df_utils
28
- from teradataml.options.display import display
29
-
30
- class Interpolator:
31
-
32
- def __init__(self,
33
- data = None,
34
- time_data = None,
35
- count_rownumber = None,
36
- time_column = None,
37
- value_columns = None,
38
- time_interval = None,
39
- interpolation_type = None,
40
- aggregation_type = None,
41
- time_datatype = None,
42
- value_datatype = None,
43
- start_time = None,
44
- end_time = None,
45
- values_before_first = None,
46
- values_after_last = None,
47
- duplicate_rows_count = None,
48
- accumulate = None,
49
- data_sequence_column = None,
50
- time_data_sequence_column = None,
51
- count_rownumber_sequence_column = None,
52
- data_partition_column = None,
53
- count_rownumber_partition_column = None,
54
- data_order_column = None,
55
- time_data_order_column = None,
56
- count_rownumber_order_column = None):
57
- """
58
- DESCRIPTION:
59
- The Interpolator function calculates missing values in a time series,
60
- using either interpolation or aggregation. Interpolation estimates
61
- missing values between known values. Aggregation combines known
62
- values to produce an aggregate value.
63
-
64
-
65
- PARAMETERS:
66
- data:
67
- Required Argument.
68
- Specifies the teradataml DataFrame that contains the input data.
69
-
70
- data_partition_column:
71
- Required Argument.
72
- Specifies Partition By columns for data.
73
- Values to this argument can be provided as a list, if multiple
74
- columns are used for partition.
75
- Types: str OR list of Strings (str)
76
-
77
- data_order_column:
78
- Required Argument.
79
- Specifies Order By columns for data.
80
- Values to this argument can be provided as a list, if multiple
81
- columns are used for ordering.
82
- Types: str OR list of Strings (str)
83
-
84
- time_data:
85
- Optional Argument.
86
- Specifies the teradataml DataFrame name which contains time.
87
- If you specify time_data then the function calculates an interpolated
88
- value for each time point.
89
- Note:
90
- If you omit time_data, you must specify the time_interval
91
- argument.
92
-
93
- time_data_order_column:
94
- Optional Argument.
95
- Specifies Order By columns for time_data.
96
- Values to this argument can be provided as a list, if multiple
97
- columns are used for ordering.
98
- Types: str OR list of Strings (str)
99
-
100
- count_rownumber:
101
- Optional Argument.
102
- Specifies the teradataml DataFrame name which contains proportion
103
- of time points.
104
- Note:
105
- It is only used with interpolation_type.
106
- ("loess"(weights ({constant | tricube}), degree ({0 | 1 | 2}), span(m))),
107
- where m is between (x+1)/n and 1.
108
-
109
- count_rownumber_partition_column:
110
- Optional Argument.
111
- Specifies Partition By columns for count_rownumber.
112
- Values to this argument can be provided as a list, if multiple
113
- columns are used for partition.
114
- Types: str OR list of Strings (str)
115
-
116
- count_rownumber_order_column:
117
- Optional Argument.
118
- Specifies Order By columns for count_rownumber.
119
- Values to this argument can be provided as a list, if multiple
120
- columns are used for ordering.
121
- Types: str OR list of Strings (str)
122
-
123
- time_column:
124
- Required Argument.
125
- Specifies the name of the input teradataml DataFrame data column that
126
- contains the time points of the time series whose missing values are
127
- to be calculated.
128
- Types: str
129
-
130
- value_columns:
131
- Required Argument.
132
- Specifies the names of input teradataml DataFrame data columns to
133
- interpolate to the output teradataml DataFrame.
134
- Types: str OR list of Strings (str)
135
-
136
- time_interval:
137
- Optional Argument. Required when time_data is not provided.
138
- Specifies the length of time, in seconds, between calculated values.
139
- If you specify time_interval then the function calculates an
140
- interpolated value for a time point only if the value is missing
141
- in the original time series; otherwise, the function copies the original value.
142
- Note:
143
- 1. If you specify aggregation_type, the function ignores time_data or
144
- time_interval and calculates the aggregated value for each point in the
145
- time series.
146
- 2. Specify exactly one of time_data or time_interval.
147
- Types: int or float
148
-
149
- interpolation_type:
150
- Optional Argument.
151
- Specifies interpolation types for the columns that value_columns
152
- specifies. If you specify interpolation_type, then it must be the
153
- same size as value_columns. That is, if value_columns specifies n
154
- columns, then interpolation_type must specify n interpolation types.
155
- For i in [1, n], value_column_i has interpolation_type_i. However,
156
- interpolation_type_i can be empty;
157
- for example:
158
- value_columns (c1, c2, c3)
159
- interpolation_type ("linear", ,"constant")
160
- An empty interpolation_type has the default value.
161
- The function calculates the value for each missing time point using a
162
- low-degree polynomial based on a set of nearest neighbors.
163
- The possible values of interpolation_type are as follows.
164
- * "linear" (default): The value for each missing time point is
165
- determined using linear interpolation between the two nearest points.
166
- * "constant": The value for each missing time point is set
167
- to the nearest value.
168
- You must use this option if value_column has SQL data type CHARACTER,
169
- CHARACTER(n), or VARCHAR.
170
- * "spline[(type(cubic))]": The value for each missing time point is
171
- determined by fitting a cubic spline to the nearest three points.
172
- * "median[(window(n))]": The value for each missing time point is set
173
- to the median value of the nearest n time points.
174
- n must be greater than or equal to 2.
175
- The default value of n is 5.
176
- * "loess[(weights({constant | tricube}), degree ({0 |1 |2}),
177
- span(m))]":
178
- * weights:
179
- * constant: All time points are equally weighted.
180
- * ricube: Time points closer to missing data point are more heavily
181
- weighted than those farther away.
182
- The default value is constant.
183
- * degree: Degree of polynomial.
184
- The default value is 1.
185
- * m: Two choices:
186
- * It is either an integer greater than 1 (which specifies the number of
187
- neighboring points)
188
- * Specifies proportion of time points to use in each fit.
189
- You must provide count_rownumber, and m must be between (x+1)/n and 1,
190
- where x is specified degree and n is number of rows in partition).
191
- The default value of m is 5.
192
- Note:
193
- 1. Specify only one of interpolation_type or aggregation_type.
194
- 2. If you omit both syntax elements, the function uses interpolation_type
195
- with its default value, 'linear'.
196
- 3. For SQL data types CHARACTER, CHARACTER(n), and VARCHAR, you cannot use
197
- aggregation_type. You must use interpolation_type, and interpolation_type
198
- must be 'constant'.
199
- 4. In interpolation_type syntax, brackets do not indicate optional
200
- elements - you must include them.
201
- Types: str OR list of strs
202
-
203
- aggregation_type:
204
- Optional Argument.
205
- Specifies the aggregation types of the columns that value_columns
206
- specifies. If you specify aggregation_type, then it must be the same
207
- size as value_columns. That is, if value_columns specifies n columns,
208
- then aggregation_type must specify n aggregation types. For i in [1,
209
- n], value_column_i has aggregation_type_i. However, aggregation_type_i
210
- can be empty.
211
- for example:
212
- value_columns (c1, c2, c3)
213
- aggregation_type (min, ,max)
214
- An empty aggregation_type has the default value.
215
- The syntax of aggregation_type is:
216
- { min | max | mean | mode | sum } [(window(n))]
217
- The function calculates the aggregate value as the minimum, maximum,
218
- mean, mode, or sum within a sliding window of length n. n must be
219
- greater than or equal to 2.
220
- The default value of n is 5.
221
- The default aggregation method is min.
222
- The Interpolator function can calculate the aggregates of values of
223
- these SQL data types:
224
- * int
225
- * BIGINT
226
- * SMALLINT
227
- * float
228
- * DECIMAL(n,n)
229
- * DECIMAL
230
- * NUMERIC
231
- * NUMERIC(n,n)
232
- Note:
233
- 1. Specify only one of aggregation_type or interpolation_type.
234
- 2. If you omit both syntax elements, the function uses interpolation_type
235
- with its default value, 'linear'.
236
- 3. Aggregation calculations ignore the values in time_interval or in the
237
- time_data. The function calculates the aggregated value for each value
238
- in the time series.
239
- 4. In aggregation_type syntax, brackets do not indicate optional
240
- elements - you must include them.
241
- Types: str OR list of strs
242
-
243
- time_datatype:
244
- Optional Argument.
245
- Specifies the data type of the output column that corresponds to the
246
- input teradataml DataFrame data column that time_column specifies
247
- (time_column).
248
- If you omit this argument, then the function infers the data type of
249
- time_column from the input teradataml DataFrame data and uses the inferred
250
- data type for the corresponding output teradataml DataFrame column.
251
- If you specify this argument, then the function can transform the input
252
- data to the specified output data type only if both the input column
253
- data type and the specified output column data type are in this list:
254
- * int
255
- * BIGINT
256
- * SMALLINT
257
- * float
258
- * DECIMAL(n,n)
259
- * DECIMAL
260
- * NUMERIC
261
- * NUMERIC(n,n)
262
- Types: str
263
-
264
- value_datatype:
265
- Optional Argument.
266
- Specifies the data types of the output columns that correspond to
267
- the input teradataml DataFrame data columns that value_columns specifies.
268
- If you omit this argument, then the function infers the data type of
269
- each time_column from the input teradataml DataFrame data and uses the
270
- inferred data type for the corresponding output teradataml DataFrame
271
- column.
272
- If you specify value_datatype, then it must be the same size as
273
- value_columns. That is, if value_columns specifies n columns, then
274
- value_datatype must specify n data types. For i in [1, n], value_column_i
275
- has value_type_i. However, value_type_i can be empty;
276
- for example:
277
- value_columns (c1, c2, c3)
278
- value_datatype (int, ,VARCHAR)
279
- If you specify this argument, then the function can transform the
280
- input data to the specified output data type only if both the input
281
- column data type and the specified output column data type are
282
- in this list:
283
- * int
284
- * BIGINT
285
- * SMALLINT
286
- * float
287
- * DECIMAL(n,n)
288
- * DECIMAL
289
- * NUMERIC
290
- * NUMERIC(n,n)
291
- Types: str OR list of strs
292
-
293
- start_time:
294
- Optional Argument.
295
- Specifies the start time for the time series.
296
- The default value is the start time of the time series in input
297
- teradataml DataFrame.
298
- Types: str
299
-
300
- end_time:
301
- Optional Argument.
302
- Specifies the end time for the time series.
303
- The default value is the end time of the time series in input
304
- teradataml DataFrame.
305
- Types: str
306
-
307
- values_before_first:
308
- Optional Argument.
309
- Specifies the values to use if start_time is before the start time of
310
- the time series in input teradataml DataFrame. Each of these values
311
- must have the same data type as its corresponding value_column. Values
312
- of data type VARCHAR are case-insensitive.
313
- If value_columns specifies n columns, then values_before_first must
314
- specify n values. For in [1, n], value_column_i has the value
315
- before_first_value_i. However, before_first_value_i can be empty;
316
- for example:
317
- value_columns (c1, c2, c3)
318
- values_before_first (1, ,"abc")
319
- If before_first_value_i is empty, then value_column_i has the value NULL.
320
- If you do not specify values_before_first, then value_column_i has the
321
- value NULL for i in [1, n].
322
- Types: str OR list of strs
323
-
324
- values_after_last:
325
- Optional Argument.
326
- Specifies the values to use if end_time is after the end time of the
327
- time series in input teradataml DataFrame. Each of these values must
328
- have the same data type as its corresponding value_column. Values of
329
- data type VARCHAR are case-insensitive.
330
- If value_columns specifies n columns, then values_after_last must
331
- specify n values. For i in [1, n], value_column_i has the value
332
- after_last_value_i. However, after_last_value_i can be empty;
333
- for example:
334
- value_columns (c1, c2, c3)
335
- values_after_last (1, ,"abc")
336
- If after_last_value_i is empty, then value_column_i has the value NULL.
337
- If you do not specify values_after_last, then value_column_i has the
338
- value NULL for i in [1, n].
339
- Types: str OR list of strs
340
-
341
- duplicate_rows_count:
342
- Optional Argument.
343
- Specifies the number of rows to duplicate across split boundaries if
344
- you use the SeriesSplitter function.
345
- If you specify only value1, then the function duplicates value1 rows
346
- from the previous partition and value1 rows from the next partition.
347
- If you specify both value1 and value2, then the function duplicates value1
348
- rows from the previous partition and value2 rows from the next partition.
349
- Each argument value must be non-negative int. Both value1 and value2 must
350
- exceed the number of time points that the function needs for every
351
- specified interpolation or aggregation method. For aggregation, the
352
- number of time points required is determined by the value of n in window(n)
353
- specified by aggregation_type.
354
- The interpolation methods and the number of time points that the function
355
- needs for them are:
356
- * "linear": 1
357
- * "constant": 1
358
- * "spline": 2
359
- * "median [(window(n))]": n/2
360
- * "loess [(weights ({constant | tricube}), degree ({0 | 1 | 2}), span(m))]":
361
- * m > 1: m-1
362
- * m < 1: (m * n)-1
363
- where n is total number of data rows, found in column n of the
364
- count_rownumber DataFrame.
365
- Types: int OR list of ints
366
-
367
- accumulate:
368
- Optional Argument.
369
- Specifies the names of input teradataml DataFrame columns (other than those
370
- specified by time_column and value_columns) to copy to the output table.
371
- By default, the function copies to the output teradataml DataFrame only
372
- the columns specified by time_column and value_columns.
373
- Types: str OR list of Strings (str)
374
-
375
- data_sequence_column:
376
- Optional Argument.
377
- Specifies the list of column(s) that uniquely identifies each row of
378
- the input argument "data". The argument is used to ensure
379
- deterministic results for functions which produce results that vary
380
- from run to run.
381
- Types: str OR list of Strings (str)
382
-
383
- time_data_sequence_column:
384
- Optional Argument.
385
- Specifies the list of column(s) that uniquely identifies each row of
386
- the input argument "time_data". The argument is used to ensure
387
- deterministic results for functions which produce results that vary
388
- from run to run.
389
- Types: str OR list of Strings (str)
390
-
391
- count_rownumber_sequence_column:
392
- Optional Argument.
393
- Specifies the list of column(s) that uniquely identifies each row of
394
- the input argument "count_rownumber". The argument is used to ensure
395
- deterministic results for functions which produce results that vary
396
- from run to run.
397
- Types: str OR list of Strings (str)
398
-
399
- RETURNS:
400
- Instance of Interpolator.
401
- Output teradataml DataFrames can be accessed using attribute
402
- references, such as InterpolatorObj.<attribute_name>.
403
- Output teradataml DataFrame attribute name is:
404
- result
405
-
406
-
407
- RAISES:
408
- TeradataMlException
409
-
410
-
411
- EXAMPLES:
412
- # Load the data to run the example.
413
- load_example_data("Interpolator", ["ibm_stock1", "time_table1"])
414
-
415
- # Create teradataml DataFrame.
416
- ibm_stock1 = DataFrame.from_table("ibm_stock1")
417
- time_table1 = DataFrame.from_table("time_table1")
418
-
419
- # Example 1 : Running Interpolator function with aggregation_type min.
420
- interpolator_out1 = Interpolator(data=ibm_stock1,
421
- data_partition_column='id',
422
- data_order_column='period',
423
- time_data=time_table1,
424
- time_data_order_column='period',
425
- time_column='period',
426
- value_columns='stockprice',
427
- accumulate='id',
428
- aggregation_type='min[(window(2))]',
429
- values_before_first='0',
430
- values_after_last='0',
431
- data_sequence_column='period'
432
- )
433
-
434
- # Print the result DataFrame.
435
- print(interpolator_out1.result)
436
-
437
- # Example 2 : Running Interpolator function with constant interpolation.
438
- interpolator_out2 = Interpolator(data=ibm_stock1,
439
- data_partition_column='id',
440
- data_order_column='period',
441
- time_column='period',
442
- value_columns='stockprice',
443
- accumulate='id',
444
- time_interval=86400.0,
445
- interpolation_type='constant',
446
- values_before_first='0',
447
- values_after_last='0'
448
- )
449
-
450
- # Print the result DataFrame.
451
- print(interpolator_out2.result)
452
-
453
- # Example 3 : Running Interpolator function with linear interpolation.
454
- interpolator_out3 = Interpolator(data=ibm_stock1,
455
- data_partition_column='id',
456
- data_order_column='period',
457
- time_column='period',
458
- value_columns='stockprice',
459
- accumulate='id',
460
- time_interval=86400.0,
461
- interpolation_type='linear',
462
- values_before_first='0',
463
- values_after_last='0'
464
- )
465
-
466
- # Print the result DataFrame.
467
- print(interpolator_out3.result)
468
-
469
- # Example 4 : Running Interpolator function with median interpolation.
470
- interpolator_out4 = Interpolator(data=ibm_stock1,
471
- data_partition_column='id',
472
- data_order_column='period',
473
- time_column='period',
474
- value_columns='stockprice',
475
- accumulate='id',
476
- time_interval=86400.0,
477
- interpolation_type='median[(window(4))]',
478
- values_before_first='0',
479
- values_after_last='0'
480
- )
481
-
482
- # Print the result DataFrame.
483
- print(interpolator_out4.result)
484
-
485
- # Example 5 : Running Interpolator function with spline interpolation.
486
- interpolator_out5 = Interpolator(data=ibm_stock1,
487
- data_partition_column='id',
488
- data_order_column='period',
489
- time_column='period',
490
- value_columns='stockprice',
491
- accumulate='id',
492
- time_interval=86400.0,
493
- interpolation_type='spline[(type(cubic))]',
494
- values_before_first='0',
495
- values_after_last='0'
496
- )
497
-
498
- # Print the result DataFrame.
499
- print(interpolator_out5.result)
500
-
501
- # Example 6 : Running Interpolator function with loess interpolation.
502
- interpolator_out6 = Interpolator(data=ibm_stock1,
503
- data_partition_column='id',
504
- data_order_column='period',
505
- time_column='period',
506
- value_columns='stockprice',
507
- accumulate='id',
508
- time_interval=86400.0,
509
- interpolation_type='loess[(weights(constant),degree(2),span(4))]',
510
- values_before_first='0',
511
- values_after_last='0'
512
- )
513
-
514
- # Print the result DataFrame.
515
- print(interpolator_out6)
516
-
517
- """
518
-
519
- # Start the timer to get the build time
520
- _start_time = time.time()
521
-
522
- self.data = data
523
- self.time_data = time_data
524
- self.count_rownumber = count_rownumber
525
- self.time_column = time_column
526
- self.value_columns = value_columns
527
- self.time_interval = time_interval
528
- self.interpolation_type = interpolation_type
529
- self.aggregation_type = aggregation_type
530
- self.time_datatype = time_datatype
531
- self.value_datatype = value_datatype
532
- self.start_time = start_time
533
- self.end_time = end_time
534
- self.values_before_first = values_before_first
535
- self.values_after_last = values_after_last
536
- self.duplicate_rows_count = duplicate_rows_count
537
- self.accumulate = accumulate
538
- self.data_sequence_column = data_sequence_column
539
- self.time_data_sequence_column = time_data_sequence_column
540
- self.count_rownumber_sequence_column = count_rownumber_sequence_column
541
- self.data_partition_column = data_partition_column
542
- self.count_rownumber_partition_column = count_rownumber_partition_column
543
- self.data_order_column = data_order_column
544
- self.time_data_order_column = time_data_order_column
545
- self.count_rownumber_order_column = count_rownumber_order_column
546
-
547
- # Create TeradataPyWrapperUtils instance which contains validation functions.
548
- self.__awu = AnalyticsWrapperUtils()
549
- self.__aed_utils = AedUtils()
550
-
551
- # Create argument information matrix to do parameter checking
552
- self.__arg_info_matrix = []
553
- self.__arg_info_matrix.append(["data", self.data, False, (DataFrame)])
554
- self.__arg_info_matrix.append(["data_partition_column", self.data_partition_column, False, (str,list)])
555
- self.__arg_info_matrix.append(["data_order_column", self.data_order_column, False, (str,list)])
556
- self.__arg_info_matrix.append(["time_data", self.time_data, True, (DataFrame)])
557
- self.__arg_info_matrix.append(["time_data_order_column", self.time_data_order_column, self.time_data is None, (str,list)])
558
- self.__arg_info_matrix.append(["count_rownumber", self.count_rownumber, True, (DataFrame)])
559
- self.__arg_info_matrix.append(["count_rownumber_partition_column", self.count_rownumber_partition_column, self.count_rownumber is None, (str,list)])
560
- self.__arg_info_matrix.append(["count_rownumber_order_column", self.count_rownumber_order_column, True, (str,list)])
561
- self.__arg_info_matrix.append(["time_column", self.time_column, False, (str)])
562
- self.__arg_info_matrix.append(["value_columns", self.value_columns, False, (str,list)])
563
- self.__arg_info_matrix.append(["time_interval", self.time_interval, True, (int,float)])
564
- self.__arg_info_matrix.append(["interpolation_type", self.interpolation_type, True, (str,list)])
565
- self.__arg_info_matrix.append(["aggregation_type", self.aggregation_type, True, (str,list)])
566
- self.__arg_info_matrix.append(["time_datatype", self.time_datatype, True, (str)])
567
- self.__arg_info_matrix.append(["value_datatype", self.value_datatype, True, (str,list)])
568
- self.__arg_info_matrix.append(["start_time", self.start_time, True, (str)])
569
- self.__arg_info_matrix.append(["end_time", self.end_time, True, (str)])
570
- self.__arg_info_matrix.append(["values_before_first", self.values_before_first, True, (str,list)])
571
- self.__arg_info_matrix.append(["values_after_last", self.values_after_last, True, (str,list)])
572
- self.__arg_info_matrix.append(["duplicate_rows_count", self.duplicate_rows_count, True, (int,list)])
573
- self.__arg_info_matrix.append(["accumulate", self.accumulate, True, (str,list)])
574
- self.__arg_info_matrix.append(["data_sequence_column", self.data_sequence_column, True, (str,list)])
575
- self.__arg_info_matrix.append(["time_data_sequence_column", self.time_data_sequence_column, True, (str,list)])
576
- self.__arg_info_matrix.append(["count_rownumber_sequence_column", self.count_rownumber_sequence_column, True, (str,list)])
577
-
578
- if inspect.stack()[1][3] != '_from_model_catalog':
579
- # Perform the function validations
580
- self.__validate()
581
- # Generate the ML query
582
- self.__form_tdml_query()
583
- # Execute ML query
584
- self.__execute()
585
- # Get the prediction type
586
- self._prediction_type = self.__awu._get_function_prediction_type(self)
587
-
588
- # End the timer to get the build time
589
- _end_time = time.time()
590
-
591
- # Calculate the build time
592
- self._build_time = (int)(_end_time - _start_time)
593
-
594
- def __validate(self):
595
- """
596
- Function to validate sqlmr function arguments, which verifies missing
597
- arguments, input argument and table types. Also processes the
598
- argument values.
599
- """
600
-
601
- # Make sure that a non-NULL value has been supplied for all mandatory arguments
602
- self.__awu._validate_missing_required_arguments(self.__arg_info_matrix)
603
-
604
- # Make sure that a non-NULL value has been supplied correct type of argument
605
- self.__awu._validate_argument_types(self.__arg_info_matrix)
606
-
607
- # Check to make sure input table types are strings or data frame objects or of valid type.
608
- self.__awu._validate_input_table_datatype(self.data, "data", None)
609
- self.__awu._validate_input_table_datatype(self.time_data, "time_data", None)
610
- self.__awu._validate_input_table_datatype(self.count_rownumber, "count_rownumber", None)
611
-
612
- # Check whether the input columns passed to the argument are not empty.
613
- # Also check whether the input columns passed to the argument valid or not.
614
- self.__awu._validate_input_columns_not_empty(self.time_column, "time_column")
615
- self.__awu._validate_dataframe_has_argument_columns(self.time_column, "time_column", self.data, "data", False)
616
-
617
- self.__awu._validate_input_columns_not_empty(self.value_columns, "value_columns")
618
- self.__awu._validate_dataframe_has_argument_columns(self.value_columns, "value_columns", self.data, "data", False)
619
-
620
- self.__awu._validate_input_columns_not_empty(self.accumulate, "accumulate")
621
- self.__awu._validate_dataframe_has_argument_columns(self.accumulate, "accumulate", self.data, "data", False)
622
-
623
- self.__awu._validate_input_columns_not_empty(self.data_sequence_column, "data_sequence_column")
624
- self.__awu._validate_dataframe_has_argument_columns(self.data_sequence_column, "data_sequence_column", self.data, "data", False)
625
-
626
- self.__awu._validate_input_columns_not_empty(self.time_data_sequence_column, "time_data_sequence_column")
627
- self.__awu._validate_dataframe_has_argument_columns(self.time_data_sequence_column, "time_data_sequence_column", self.time_data, "time_data", False)
628
-
629
- self.__awu._validate_input_columns_not_empty(self.count_rownumber_sequence_column, "count_rownumber_sequence_column")
630
- self.__awu._validate_dataframe_has_argument_columns(self.count_rownumber_sequence_column, "count_rownumber_sequence_column", self.count_rownumber, "count_rownumber", False)
631
-
632
- self.__awu._validate_input_columns_not_empty(self.data_partition_column, "data_partition_column")
633
- self.__awu._validate_dataframe_has_argument_columns(self.data_partition_column, "data_partition_column", self.data, "data", True)
634
-
635
- self.__awu._validate_input_columns_not_empty(self.count_rownumber_partition_column, "count_rownumber_partition_column")
636
- self.__awu._validate_dataframe_has_argument_columns(self.count_rownumber_partition_column, "count_rownumber_partition_column", self.count_rownumber, "count_rownumber", True)
637
-
638
- self.__awu._validate_input_columns_not_empty(self.data_order_column, "data_order_column")
639
- self.__awu._validate_dataframe_has_argument_columns(self.data_order_column, "data_order_column", self.data, "data", False)
640
-
641
- self.__awu._validate_input_columns_not_empty(self.time_data_order_column, "time_data_order_column")
642
- self.__awu._validate_dataframe_has_argument_columns(self.time_data_order_column, "time_data_order_column", self.time_data, "time_data", False)
643
-
644
- self.__awu._validate_input_columns_not_empty(self.count_rownumber_order_column, "count_rownumber_order_column")
645
- self.__awu._validate_dataframe_has_argument_columns(self.count_rownumber_order_column, "count_rownumber_order_column", self.count_rownumber, "count_rownumber", False)
646
-
647
-
648
- def __form_tdml_query(self):
649
- """
650
- Function to generate the analytical function queries. The function defines
651
- variables and list of arguments required to form the query.
652
- """
653
-
654
- # Output table arguments list
655
- self.__func_output_args_sql_names = []
656
- self.__func_output_args = []
657
-
658
- # Model Cataloging related attributes.
659
- self._sql_specific_attributes = {}
660
- self._sql_formula_attribute_mapper = {}
661
- self._target_column = None
662
- self._algorithm_name = None
663
-
664
- # Generate lists for rest of the function arguments
665
- self.__func_other_arg_sql_names = []
666
- self.__func_other_args = []
667
- self.__func_other_arg_json_datatypes = []
668
-
669
- self.__func_other_arg_sql_names.append("TimeColumn")
670
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(UtilFuncs._teradata_quote_arg(self.time_column, "\""), "'"))
671
- self.__func_other_arg_json_datatypes.append("COLUMNS")
672
-
673
- self.__func_other_arg_sql_names.append("ValueColumns")
674
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(UtilFuncs._teradata_quote_arg(self.value_columns, "\""), "'"))
675
- self.__func_other_arg_json_datatypes.append("COLUMNS")
676
-
677
- if self.accumulate is not None:
678
- self.__func_other_arg_sql_names.append("Accumulate")
679
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(UtilFuncs._teradata_quote_arg(self.accumulate, "\""), "'"))
680
- self.__func_other_arg_json_datatypes.append("COLUMNS")
681
-
682
- if self.time_interval is not None:
683
- self.__func_other_arg_sql_names.append("TimeInterval")
684
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.time_interval, "'"))
685
- self.__func_other_arg_json_datatypes.append("DOUBLE")
686
-
687
- if self.interpolation_type is not None:
688
- self.__func_other_arg_sql_names.append("InterpolationType")
689
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.interpolation_type, "'"))
690
- self.__func_other_arg_json_datatypes.append("STRING")
691
-
692
- if self.aggregation_type is not None:
693
- self.__func_other_arg_sql_names.append("AggregationType")
694
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.aggregation_type, "'"))
695
- self.__func_other_arg_json_datatypes.append("STRING")
696
-
697
- if self.time_datatype is not None:
698
- self.__func_other_arg_sql_names.append("TimeDataType")
699
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.time_datatype, "'"))
700
- self.__func_other_arg_json_datatypes.append("STRING")
701
-
702
- if self.value_datatype is not None:
703
- self.__func_other_arg_sql_names.append("ValueDataType")
704
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.value_datatype, "'"))
705
- self.__func_other_arg_json_datatypes.append("STRING")
706
-
707
- if self.start_time is not None:
708
- self.__func_other_arg_sql_names.append("StartTime")
709
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.start_time, "'"))
710
- self.__func_other_arg_json_datatypes.append("STRING")
711
-
712
- if self.end_time is not None:
713
- self.__func_other_arg_sql_names.append("EndTime")
714
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.end_time, "'"))
715
- self.__func_other_arg_json_datatypes.append("STRING")
716
-
717
- if self.values_before_first is not None:
718
- self.__func_other_arg_sql_names.append("ValuesBeforeFirst")
719
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.values_before_first, "'"))
720
- self.__func_other_arg_json_datatypes.append("STRING")
721
-
722
- if self.values_after_last is not None:
723
- self.__func_other_arg_sql_names.append("ValuesAfterLast")
724
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.values_after_last, "'"))
725
- self.__func_other_arg_json_datatypes.append("STRING")
726
-
727
- if self.duplicate_rows_count is not None:
728
- self.__func_other_arg_sql_names.append("DuplicateRowsCount")
729
- self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.duplicate_rows_count, "'"))
730
- self.__func_other_arg_json_datatypes.append("INTEGER")
731
-
732
- # Generate lists for rest of the function arguments
733
- sequence_input_by_list = []
734
- if self.data_sequence_column is not None:
735
- sequence_input_by_list.append("input_table:" + UtilFuncs._teradata_collapse_arglist(self.data_sequence_column, ""))
736
-
737
- if self.time_data_sequence_column is not None:
738
- sequence_input_by_list.append("time_table:" + UtilFuncs._teradata_collapse_arglist(self.time_data_sequence_column, ""))
739
-
740
- if self.count_rownumber_sequence_column is not None:
741
- sequence_input_by_list.append("count_row_number:" + UtilFuncs._teradata_collapse_arglist(self.count_rownumber_sequence_column, ""))
742
-
743
- if len(sequence_input_by_list) > 0:
744
- self.__func_other_arg_sql_names.append("SequenceInputBy")
745
- sequence_input_by_arg_value = UtilFuncs._teradata_collapse_arglist(sequence_input_by_list, "'")
746
- self.__func_other_args.append(sequence_input_by_arg_value)
747
- self.__func_other_arg_json_datatypes.append("STRING")
748
- self._sql_specific_attributes["SequenceInputBy"] = sequence_input_by_arg_value
749
-
750
-
751
- # Declare empty lists to hold input table information.
752
- self.__func_input_arg_sql_names = []
753
- self.__func_input_table_view_query = []
754
- self.__func_input_dataframe_type = []
755
- self.__func_input_distribution = []
756
- self.__func_input_partition_by_cols = []
757
- self.__func_input_order_by_cols = []
758
-
759
- # Process data
760
- self.data_partition_column = UtilFuncs._teradata_collapse_arglist(self.data_partition_column, "\"")
761
- self.__table_ref = self.__awu._teradata_on_clause_from_dataframe(self.data, False)
762
- self.__func_input_distribution.append("FACT")
763
- self.__func_input_arg_sql_names.append("input_table")
764
- self.__func_input_table_view_query.append(self.__table_ref["ref"])
765
- self.__func_input_dataframe_type.append(self.__table_ref["ref_type"])
766
- self.__func_input_partition_by_cols.append(self.data_partition_column)
767
- self.__func_input_order_by_cols.append(UtilFuncs._teradata_collapse_arglist(self.data_order_column, "\""))
768
-
769
- # Process time_data
770
- if self.time_data is not None:
771
- self.__table_ref = self.__awu._teradata_on_clause_from_dataframe(self.time_data, False)
772
- self.__func_input_distribution.append("DIMENSION")
773
- self.__func_input_arg_sql_names.append("time_table")
774
- self.__func_input_table_view_query.append(self.__table_ref["ref"])
775
- self.__func_input_dataframe_type.append(self.__table_ref["ref_type"])
776
- self.__func_input_partition_by_cols.append("NA_character_")
777
- self.__func_input_order_by_cols.append(UtilFuncs._teradata_collapse_arglist(self.time_data_order_column, "\""))
778
-
779
- # Process count_rownumber
780
- self.count_rownumber_partition_column = UtilFuncs._teradata_collapse_arglist(self.count_rownumber_partition_column, "\"")
781
- if self.count_rownumber is not None:
782
- self.__table_ref = self.__awu._teradata_on_clause_from_dataframe(self.count_rownumber, False)
783
- self.__func_input_distribution.append("FACT")
784
- self.__func_input_arg_sql_names.append("count_row_number")
785
- self.__func_input_table_view_query.append(self.__table_ref["ref"])
786
- self.__func_input_dataframe_type.append(self.__table_ref["ref_type"])
787
- self.__func_input_partition_by_cols.append(self.count_rownumber_partition_column)
788
- self.__func_input_order_by_cols.append(UtilFuncs._teradata_collapse_arglist(self.count_rownumber_order_column, "\""))
789
-
790
- function_name = "Interpolator"
791
- # Create instance to generate SQLMR.
792
- self.__aqg_obj = AnalyticQueryGenerator(function_name,
793
- self.__func_input_arg_sql_names,
794
- self.__func_input_table_view_query,
795
- self.__func_input_dataframe_type,
796
- self.__func_input_distribution,
797
- self.__func_input_partition_by_cols,
798
- self.__func_input_order_by_cols,
799
- self.__func_other_arg_sql_names,
800
- self.__func_other_args,
801
- self.__func_other_arg_json_datatypes,
802
- self.__func_output_args_sql_names,
803
- self.__func_output_args,
804
- engine="ENGINE_ML")
805
- # Invoke call to SQL-MR generation.
806
- self.sqlmr_query = self.__aqg_obj._gen_sqlmr_select_stmt_sql()
807
-
808
- # Print SQL-MR query if requested to do so.
809
- if display.print_sqlmr_query:
810
- print(self.sqlmr_query)
811
-
812
- # Set the algorithm name for Model Cataloging.
813
- self._algorithm_name = self.__aqg_obj._get_alias_name_for_function(function_name)
814
-
815
- def __execute(self):
816
- """
817
- Function to execute SQL-MR queries.
818
- Create DataFrames for the required SQL-MR outputs.
819
- """
820
- # Generate STDOUT table name and add it to the output table list.
821
- sqlmr_stdout_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_sqlmr_out_", use_default_database=True, gc_on_quit=True, quote=False)
822
- try:
823
- # Generate the output.
824
- UtilFuncs._create_view(sqlmr_stdout_temp_tablename, self.sqlmr_query)
825
- except Exception as emsg:
826
- raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_EXEC_SQL_FAILED, str(emsg)), MessageCodes.TDMLDF_EXEC_SQL_FAILED)
827
-
828
- # Update output table data frames.
829
- self._mlresults = []
830
- self.result = self.__awu._create_data_set_object(df_input=UtilFuncs._extract_table_name(sqlmr_stdout_temp_tablename), source_type="table", database_name=UtilFuncs._extract_db_name(sqlmr_stdout_temp_tablename))
831
- self._mlresults.append(self.result)
832
-
833
- def show_query(self):
834
- """
835
- Function to return the underlying SQL query.
836
- When model object is created using retrieve_model(), then None is returned.
837
- """
838
- return self.sqlmr_query
839
-
840
- def get_prediction_type(self):
841
- """
842
- Function to return the Prediction type of the algorithm.
843
- When model object is created using retrieve_model(), then the value returned is
844
- as saved in the Model Catalog.
845
- """
846
- return self._prediction_type
847
-
848
- def get_target_column(self):
849
- """
850
- Function to return the Target Column of the algorithm.
851
- When model object is created using retrieve_model(), then the value returned is
852
- as saved in the Model Catalog.
853
- """
854
- return self._target_column
855
-
856
- def get_build_time(self):
857
- """
858
- Function to return the build time of the algorithm in seconds.
859
- When model object is created using retrieve_model(), then the value returned is
860
- as saved in the Model Catalog.
861
- """
862
- return self._build_time
863
-
864
- def _get_algorithm_name(self):
865
- """
866
- Function to return the name of the algorithm.
867
- """
868
- return self._algorithm_name
869
-
870
- def _get_sql_specific_attributes(self):
871
- """
872
- Function to return the dictionary containing the SQL specific attributes of the algorithm.
873
- """
874
- return self._sql_specific_attributes
875
-
876
- @classmethod
877
- def _from_model_catalog(cls,
878
- result = None,
879
- **kwargs):
880
- """
881
- Classmethod is used by Model Cataloging, to instantiate this wrapper class.
882
- """
883
- kwargs.pop("result", None)
884
-
885
- # Model Cataloging related attributes.
886
- target_column = kwargs.pop("__target_column", None)
887
- prediction_type = kwargs.pop("__prediction_type", None)
888
- algorithm_name = kwargs.pop("__algorithm_name", None)
889
- build_time = kwargs.pop("__build_time", None)
890
-
891
- # Let's create an object of this class.
892
- obj = cls(**kwargs)
893
- obj.result = result
894
-
895
- # Initialize the sqlmr_query class attribute.
896
- obj.sqlmr_query = None
897
-
898
- # Initialize the SQL specific Model Cataloging attributes.
899
- obj._sql_specific_attributes = None
900
- obj._target_column = target_column
901
- obj._prediction_type = prediction_type
902
- obj._algorithm_name = algorithm_name
903
- obj._build_time = build_time
904
-
905
- # Update output table data frames.
906
- obj._mlresults = []
907
- obj.result = obj.__awu._create_data_set_object(df_input=UtilFuncs._extract_table_name(obj.result), source_type="table", database_name=UtilFuncs._extract_db_name(obj.result))
908
- obj._mlresults.append(obj.result)
909
- return obj
910
-
911
- def __repr__(self):
912
- """
913
- Returns the string representation for a Interpolator class instance.
914
- """
915
- repr_string="############ STDOUT Output ############"
916
- repr_string = "{}\n\n{}".format(repr_string,self.result)
917
- return repr_string
918
-