teradataml 17.20.0.6__py3-none-any.whl → 20.0.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of teradataml might be problematic. Click here for more details.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +238 -1
- teradataml/__init__.py +13 -3
- teradataml/_version.py +1 -1
- teradataml/analytics/Transformations.py +4 -4
- teradataml/analytics/__init__.py +0 -2
- teradataml/analytics/analytic_function_executor.py +3 -0
- teradataml/analytics/json_parser/utils.py +13 -12
- teradataml/analytics/sqle/DecisionTreePredict.py +15 -30
- teradataml/analytics/sqle/NaiveBayesPredict.py +11 -20
- teradataml/analytics/sqle/__init__.py +0 -13
- teradataml/analytics/utils.py +1 -0
- teradataml/analytics/valib.py +3 -0
- teradataml/automl/__init__.py +1628 -0
- teradataml/automl/custom_json_utils.py +1270 -0
- teradataml/automl/data_preparation.py +993 -0
- teradataml/automl/data_transformation.py +727 -0
- teradataml/automl/feature_engineering.py +1648 -0
- teradataml/automl/feature_exploration.py +547 -0
- teradataml/automl/model_evaluation.py +163 -0
- teradataml/automl/model_training.py +887 -0
- teradataml/catalog/__init__.py +0 -2
- teradataml/catalog/byom.py +49 -6
- teradataml/catalog/function_argument_mapper.py +0 -2
- teradataml/catalog/model_cataloging_utils.py +2 -1021
- teradataml/common/aed_utils.py +6 -2
- teradataml/common/constants.py +50 -58
- teradataml/common/deprecations.py +160 -0
- teradataml/common/garbagecollector.py +61 -104
- teradataml/common/messagecodes.py +27 -36
- teradataml/common/messages.py +11 -15
- teradataml/common/utils.py +205 -287
- teradataml/common/wrapper_utils.py +1 -110
- teradataml/context/context.py +150 -78
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +3 -3
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +6 -5
- teradataml/data/docs/sqle/docs_17_10/Fit.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +2 -2
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +2 -1
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +1 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/Transform.py +2 -1
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +3 -3
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +6 -5
- teradataml/data/docs/sqle/docs_17_20/Fit.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/GLM.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +9 -10
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +3 -2
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +16 -15
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +2 -2
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +2 -2
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +8 -8
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +21 -20
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +8 -3
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +6 -5
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +6 -6
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +2 -1
- teradataml/data/docs/sqle/docs_17_20/SVM.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +16 -16
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +1 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +3 -2
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +4 -4
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +19 -19
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +5 -4
- teradataml/data/docs/sqle/docs_17_20/Transform.py +2 -2
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +9 -9
- teradataml/data/fish.csv +160 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/insurance.csv +1 -1
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +1 -0
- teradataml/data/load_example_data.py +3 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/openml_example.json +63 -0
- teradataml/data/scripts/deploy_script.py +65 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +175 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +135 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +113 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +158 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +152 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +128 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +179 -0
- teradataml/data/templates/open_source_ml.json +9 -0
- teradataml/data/teradataml_example.json +73 -1
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/dataframe/copy_to.py +79 -13
- teradataml/dataframe/data_transfer.py +8 -0
- teradataml/dataframe/dataframe.py +910 -311
- teradataml/dataframe/dataframe_utils.py +102 -5
- teradataml/dataframe/fastload.py +11 -3
- teradataml/dataframe/setop.py +15 -2
- teradataml/dataframe/sql.py +3735 -77
- teradataml/dataframe/sql_function_parameters.py +56 -5
- teradataml/dataframe/vantage_function_types.py +45 -1
- teradataml/dataframe/window.py +30 -29
- teradataml/dbutils/dbutils.py +18 -1
- teradataml/geospatial/geodataframe.py +18 -7
- teradataml/geospatial/geodataframecolumn.py +5 -0
- teradataml/hyperparameter_tuner/optimizer.py +910 -120
- teradataml/hyperparameter_tuner/utils.py +131 -37
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/libaed_0_1.dylib +0 -0
- teradataml/libaed_0_1.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/sklearn/__init__.py +1 -0
- teradataml/opensource/sklearn/_class.py +255 -0
- teradataml/opensource/sklearn/_sklearn_wrapper.py +1668 -0
- teradataml/opensource/sklearn/_wrapper_utils.py +268 -0
- teradataml/opensource/sklearn/constants.py +54 -0
- teradataml/options/__init__.py +3 -6
- teradataml/options/configure.py +21 -20
- teradataml/scriptmgmt/UserEnv.py +61 -5
- teradataml/scriptmgmt/lls_utils.py +135 -53
- teradataml/table_operators/Apply.py +38 -6
- teradataml/table_operators/Script.py +45 -308
- teradataml/table_operators/TableOperator.py +182 -591
- teradataml/table_operators/__init__.py +0 -1
- teradataml/table_operators/table_operator_util.py +32 -40
- teradataml/utils/validators.py +127 -3
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/METADATA +243 -3
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/RECORD +147 -391
- teradataml/analytics/mle/AdaBoost.py +0 -651
- teradataml/analytics/mle/AdaBoostPredict.py +0 -564
- teradataml/analytics/mle/Antiselect.py +0 -342
- teradataml/analytics/mle/Arima.py +0 -641
- teradataml/analytics/mle/ArimaPredict.py +0 -477
- teradataml/analytics/mle/Attribution.py +0 -1070
- teradataml/analytics/mle/Betweenness.py +0 -658
- teradataml/analytics/mle/Burst.py +0 -711
- teradataml/analytics/mle/CCM.py +0 -600
- teradataml/analytics/mle/CCMPrepare.py +0 -324
- teradataml/analytics/mle/CFilter.py +0 -460
- teradataml/analytics/mle/ChangePointDetection.py +0 -572
- teradataml/analytics/mle/ChangePointDetectionRT.py +0 -477
- teradataml/analytics/mle/Closeness.py +0 -737
- teradataml/analytics/mle/ConfusionMatrix.py +0 -420
- teradataml/analytics/mle/Correlation.py +0 -477
- teradataml/analytics/mle/Correlation2.py +0 -573
- teradataml/analytics/mle/CoxHazardRatio.py +0 -679
- teradataml/analytics/mle/CoxPH.py +0 -556
- teradataml/analytics/mle/CoxSurvival.py +0 -478
- teradataml/analytics/mle/CumulativeMovAvg.py +0 -363
- teradataml/analytics/mle/DTW.py +0 -623
- teradataml/analytics/mle/DWT.py +0 -564
- teradataml/analytics/mle/DWT2D.py +0 -599
- teradataml/analytics/mle/DecisionForest.py +0 -716
- teradataml/analytics/mle/DecisionForestEvaluator.py +0 -363
- teradataml/analytics/mle/DecisionForestPredict.py +0 -561
- teradataml/analytics/mle/DecisionTree.py +0 -830
- teradataml/analytics/mle/DecisionTreePredict.py +0 -528
- teradataml/analytics/mle/ExponentialMovAvg.py +0 -418
- teradataml/analytics/mle/FMeasure.py +0 -402
- teradataml/analytics/mle/FPGrowth.py +0 -734
- teradataml/analytics/mle/FrequentPaths.py +0 -695
- teradataml/analytics/mle/GLM.py +0 -558
- teradataml/analytics/mle/GLML1L2.py +0 -547
- teradataml/analytics/mle/GLML1L2Predict.py +0 -519
- teradataml/analytics/mle/GLMPredict.py +0 -529
- teradataml/analytics/mle/HMMDecoder.py +0 -945
- teradataml/analytics/mle/HMMEvaluator.py +0 -901
- teradataml/analytics/mle/HMMSupervised.py +0 -521
- teradataml/analytics/mle/HMMUnsupervised.py +0 -572
- teradataml/analytics/mle/Histogram.py +0 -561
- teradataml/analytics/mle/IDWT.py +0 -476
- teradataml/analytics/mle/IDWT2D.py +0 -493
- teradataml/analytics/mle/IdentityMatch.py +0 -763
- teradataml/analytics/mle/Interpolator.py +0 -918
- teradataml/analytics/mle/KMeans.py +0 -485
- teradataml/analytics/mle/KNN.py +0 -627
- teradataml/analytics/mle/KNNRecommender.py +0 -488
- teradataml/analytics/mle/KNNRecommenderPredict.py +0 -581
- teradataml/analytics/mle/LAR.py +0 -439
- teradataml/analytics/mle/LARPredict.py +0 -478
- teradataml/analytics/mle/LDA.py +0 -548
- teradataml/analytics/mle/LDAInference.py +0 -492
- teradataml/analytics/mle/LDATopicSummary.py +0 -464
- teradataml/analytics/mle/LevenshteinDistance.py +0 -450
- teradataml/analytics/mle/LinReg.py +0 -433
- teradataml/analytics/mle/LinRegPredict.py +0 -438
- teradataml/analytics/mle/MinHash.py +0 -544
- teradataml/analytics/mle/Modularity.py +0 -587
- teradataml/analytics/mle/NEREvaluator.py +0 -410
- teradataml/analytics/mle/NERExtractor.py +0 -595
- teradataml/analytics/mle/NERTrainer.py +0 -458
- teradataml/analytics/mle/NGrams.py +0 -570
- teradataml/analytics/mle/NPath.py +0 -634
- teradataml/analytics/mle/NTree.py +0 -549
- teradataml/analytics/mle/NaiveBayes.py +0 -462
- teradataml/analytics/mle/NaiveBayesPredict.py +0 -513
- teradataml/analytics/mle/NaiveBayesTextClassifier.py +0 -607
- teradataml/analytics/mle/NaiveBayesTextClassifier2.py +0 -531
- teradataml/analytics/mle/NaiveBayesTextClassifierPredict.py +0 -799
- teradataml/analytics/mle/NamedEntityFinder.py +0 -529
- teradataml/analytics/mle/NamedEntityFinderEvaluator.py +0 -414
- teradataml/analytics/mle/NamedEntityFinderTrainer.py +0 -396
- teradataml/analytics/mle/POSTagger.py +0 -417
- teradataml/analytics/mle/Pack.py +0 -411
- teradataml/analytics/mle/PageRank.py +0 -535
- teradataml/analytics/mle/PathAnalyzer.py +0 -426
- teradataml/analytics/mle/PathGenerator.py +0 -367
- teradataml/analytics/mle/PathStart.py +0 -464
- teradataml/analytics/mle/PathSummarizer.py +0 -470
- teradataml/analytics/mle/Pivot.py +0 -471
- teradataml/analytics/mle/ROC.py +0 -425
- teradataml/analytics/mle/RandomSample.py +0 -637
- teradataml/analytics/mle/RandomWalkSample.py +0 -490
- teradataml/analytics/mle/SAX.py +0 -779
- teradataml/analytics/mle/SVMDense.py +0 -677
- teradataml/analytics/mle/SVMDensePredict.py +0 -536
- teradataml/analytics/mle/SVMDenseSummary.py +0 -437
- teradataml/analytics/mle/SVMSparse.py +0 -557
- teradataml/analytics/mle/SVMSparsePredict.py +0 -553
- teradataml/analytics/mle/SVMSparseSummary.py +0 -435
- teradataml/analytics/mle/Sampling.py +0 -549
- teradataml/analytics/mle/Scale.py +0 -565
- teradataml/analytics/mle/ScaleByPartition.py +0 -496
- teradataml/analytics/mle/ScaleMap.py +0 -378
- teradataml/analytics/mle/ScaleSummary.py +0 -320
- teradataml/analytics/mle/SentenceExtractor.py +0 -363
- teradataml/analytics/mle/SentimentEvaluator.py +0 -432
- teradataml/analytics/mle/SentimentExtractor.py +0 -578
- teradataml/analytics/mle/SentimentTrainer.py +0 -405
- teradataml/analytics/mle/SeriesSplitter.py +0 -641
- teradataml/analytics/mle/Sessionize.py +0 -475
- teradataml/analytics/mle/SimpleMovAvg.py +0 -397
- teradataml/analytics/mle/StringSimilarity.py +0 -425
- teradataml/analytics/mle/TF.py +0 -389
- teradataml/analytics/mle/TFIDF.py +0 -504
- teradataml/analytics/mle/TextChunker.py +0 -414
- teradataml/analytics/mle/TextClassifier.py +0 -399
- teradataml/analytics/mle/TextClassifierEvaluator.py +0 -413
- teradataml/analytics/mle/TextClassifierTrainer.py +0 -565
- teradataml/analytics/mle/TextMorph.py +0 -494
- teradataml/analytics/mle/TextParser.py +0 -623
- teradataml/analytics/mle/TextTagger.py +0 -530
- teradataml/analytics/mle/TextTokenizer.py +0 -502
- teradataml/analytics/mle/UnivariateStatistics.py +0 -488
- teradataml/analytics/mle/Unpack.py +0 -526
- teradataml/analytics/mle/Unpivot.py +0 -438
- teradataml/analytics/mle/VarMax.py +0 -776
- teradataml/analytics/mle/VectorDistance.py +0 -762
- teradataml/analytics/mle/WeightedMovAvg.py +0 -400
- teradataml/analytics/mle/XGBoost.py +0 -842
- teradataml/analytics/mle/XGBoostPredict.py +0 -627
- teradataml/analytics/mle/__init__.py +0 -123
- teradataml/analytics/mle/json/adaboost_mle.json +0 -135
- teradataml/analytics/mle/json/adaboostpredict_mle.json +0 -85
- teradataml/analytics/mle/json/antiselect_mle.json +0 -34
- teradataml/analytics/mle/json/antiselect_mle_mle.json +0 -34
- teradataml/analytics/mle/json/arima_mle.json +0 -172
- teradataml/analytics/mle/json/arimapredict_mle.json +0 -52
- teradataml/analytics/mle/json/attribution_mle_mle.json +0 -143
- teradataml/analytics/mle/json/betweenness_mle.json +0 -97
- teradataml/analytics/mle/json/burst_mle.json +0 -140
- teradataml/analytics/mle/json/ccm_mle.json +0 -124
- teradataml/analytics/mle/json/ccmprepare_mle.json +0 -14
- teradataml/analytics/mle/json/cfilter_mle.json +0 -93
- teradataml/analytics/mle/json/changepointdetection_mle.json +0 -92
- teradataml/analytics/mle/json/changepointdetectionrt_mle.json +0 -78
- teradataml/analytics/mle/json/closeness_mle.json +0 -104
- teradataml/analytics/mle/json/confusionmatrix_mle.json +0 -79
- teradataml/analytics/mle/json/correlation_mle.json +0 -86
- teradataml/analytics/mle/json/correlationreduce_mle.json +0 -49
- teradataml/analytics/mle/json/coxhazardratio_mle.json +0 -89
- teradataml/analytics/mle/json/coxph_mle.json +0 -98
- teradataml/analytics/mle/json/coxsurvival_mle.json +0 -79
- teradataml/analytics/mle/json/cumulativemovavg_mle.json +0 -34
- teradataml/analytics/mle/json/decisionforest_mle.json +0 -167
- teradataml/analytics/mle/json/decisionforestevaluator_mle.json +0 -33
- teradataml/analytics/mle/json/decisionforestpredict_mle_mle.json +0 -74
- teradataml/analytics/mle/json/decisiontree_mle.json +0 -194
- teradataml/analytics/mle/json/decisiontreepredict_mle_mle.json +0 -86
- teradataml/analytics/mle/json/dtw_mle.json +0 -97
- teradataml/analytics/mle/json/dwt2d_mle.json +0 -116
- teradataml/analytics/mle/json/dwt_mle.json +0 -101
- teradataml/analytics/mle/json/exponentialmovavg_mle.json +0 -55
- teradataml/analytics/mle/json/fmeasure_mle.json +0 -58
- teradataml/analytics/mle/json/fpgrowth_mle.json +0 -159
- teradataml/analytics/mle/json/frequentpaths_mle.json +0 -129
- teradataml/analytics/mle/json/glm_mle.json +0 -111
- teradataml/analytics/mle/json/glml1l2_mle.json +0 -106
- teradataml/analytics/mle/json/glml1l2predict_mle.json +0 -57
- teradataml/analytics/mle/json/glmpredict_mle_mle.json +0 -74
- teradataml/analytics/mle/json/histogram_mle.json +0 -100
- teradataml/analytics/mle/json/hmmdecoder_mle.json +0 -192
- teradataml/analytics/mle/json/hmmevaluator_mle.json +0 -206
- teradataml/analytics/mle/json/hmmsupervised_mle.json +0 -91
- teradataml/analytics/mle/json/hmmunsupervised_mle.json +0 -114
- teradataml/analytics/mle/json/identitymatch_mle.json +0 -88
- teradataml/analytics/mle/json/idwt2d_mle.json +0 -73
- teradataml/analytics/mle/json/idwt_mle.json +0 -66
- teradataml/analytics/mle/json/interpolator_mle.json +0 -151
- teradataml/analytics/mle/json/kmeans_mle.json +0 -97
- teradataml/analytics/mle/json/knn_mle.json +0 -141
- teradataml/analytics/mle/json/knnrecommender_mle.json +0 -111
- teradataml/analytics/mle/json/knnrecommenderpredict_mle.json +0 -75
- teradataml/analytics/mle/json/lar_mle.json +0 -78
- teradataml/analytics/mle/json/larpredict_mle.json +0 -69
- teradataml/analytics/mle/json/lda_mle.json +0 -130
- teradataml/analytics/mle/json/ldainference_mle.json +0 -78
- teradataml/analytics/mle/json/ldatopicsummary_mle.json +0 -64
- teradataml/analytics/mle/json/levenshteindistance_mle.json +0 -92
- teradataml/analytics/mle/json/linreg_mle.json +0 -42
- teradataml/analytics/mle/json/linregpredict_mle.json +0 -56
- teradataml/analytics/mle/json/minhash_mle.json +0 -113
- teradataml/analytics/mle/json/modularity_mle.json +0 -91
- teradataml/analytics/mle/json/naivebayespredict_mle_mle.json +0 -85
- teradataml/analytics/mle/json/naivebayesreduce_mle.json +0 -52
- teradataml/analytics/mle/json/naivebayestextclassifierpredict_mle_mle.json +0 -147
- teradataml/analytics/mle/json/naivebayestextclassifiertrainer2_mle.json +0 -108
- teradataml/analytics/mle/json/naivebayestextclassifiertrainer_mle.json +0 -102
- teradataml/analytics/mle/json/namedentityfinder_mle.json +0 -84
- teradataml/analytics/mle/json/namedentityfinderevaluatorreduce_mle.json +0 -43
- teradataml/analytics/mle/json/namedentityfindertrainer_mle.json +0 -64
- teradataml/analytics/mle/json/nerevaluator_mle.json +0 -54
- teradataml/analytics/mle/json/nerextractor_mle.json +0 -87
- teradataml/analytics/mle/json/nertrainer_mle.json +0 -89
- teradataml/analytics/mle/json/ngrams_mle.json +0 -137
- teradataml/analytics/mle/json/ngramsplitter_mle_mle.json +0 -137
- teradataml/analytics/mle/json/npath@coprocessor_mle.json +0 -73
- teradataml/analytics/mle/json/ntree@coprocessor_mle.json +0 -123
- teradataml/analytics/mle/json/pack_mle.json +0 -58
- teradataml/analytics/mle/json/pack_mle_mle.json +0 -58
- teradataml/analytics/mle/json/pagerank_mle.json +0 -81
- teradataml/analytics/mle/json/pathanalyzer_mle.json +0 -63
- teradataml/analytics/mle/json/pathgenerator_mle.json +0 -40
- teradataml/analytics/mle/json/pathstart_mle.json +0 -62
- teradataml/analytics/mle/json/pathsummarizer_mle.json +0 -72
- teradataml/analytics/mle/json/pivoting_mle.json +0 -71
- teradataml/analytics/mle/json/postagger_mle.json +0 -51
- teradataml/analytics/mle/json/randomsample_mle.json +0 -131
- teradataml/analytics/mle/json/randomwalksample_mle.json +0 -85
- teradataml/analytics/mle/json/roc_mle.json +0 -73
- teradataml/analytics/mle/json/sampling_mle.json +0 -75
- teradataml/analytics/mle/json/sax_mle.json +0 -154
- teradataml/analytics/mle/json/scale_mle.json +0 -93
- teradataml/analytics/mle/json/scalebypartition_mle.json +0 -89
- teradataml/analytics/mle/json/scalemap_mle.json +0 -44
- teradataml/analytics/mle/json/scalesummary_mle.json +0 -14
- teradataml/analytics/mle/json/sentenceextractor_mle.json +0 -41
- teradataml/analytics/mle/json/sentimentevaluator_mle.json +0 -43
- teradataml/analytics/mle/json/sentimentextractor_mle.json +0 -100
- teradataml/analytics/mle/json/sentimenttrainer_mle.json +0 -68
- teradataml/analytics/mle/json/seriessplitter_mle.json +0 -133
- teradataml/analytics/mle/json/sessionize_mle_mle.json +0 -62
- teradataml/analytics/mle/json/simplemovavg_mle.json +0 -48
- teradataml/analytics/mle/json/stringsimilarity_mle.json +0 -50
- teradataml/analytics/mle/json/stringsimilarity_mle_mle.json +0 -50
- teradataml/analytics/mle/json/svmdense_mle.json +0 -165
- teradataml/analytics/mle/json/svmdensepredict_mle.json +0 -95
- teradataml/analytics/mle/json/svmdensesummary_mle.json +0 -58
- teradataml/analytics/mle/json/svmsparse_mle.json +0 -148
- teradataml/analytics/mle/json/svmsparsepredict_mle_mle.json +0 -103
- teradataml/analytics/mle/json/svmsparsesummary_mle.json +0 -57
- teradataml/analytics/mle/json/textchunker_mle.json +0 -40
- teradataml/analytics/mle/json/textclassifier_mle.json +0 -51
- teradataml/analytics/mle/json/textclassifierevaluator_mle.json +0 -43
- teradataml/analytics/mle/json/textclassifiertrainer_mle.json +0 -103
- teradataml/analytics/mle/json/textmorph_mle.json +0 -63
- teradataml/analytics/mle/json/textparser_mle.json +0 -166
- teradataml/analytics/mle/json/texttagger_mle.json +0 -81
- teradataml/analytics/mle/json/texttokenizer_mle.json +0 -91
- teradataml/analytics/mle/json/tf_mle.json +0 -33
- teradataml/analytics/mle/json/tfidf_mle.json +0 -34
- teradataml/analytics/mle/json/univariatestatistics_mle.json +0 -81
- teradataml/analytics/mle/json/unpack_mle.json +0 -91
- teradataml/analytics/mle/json/unpack_mle_mle.json +0 -91
- teradataml/analytics/mle/json/unpivoting_mle.json +0 -63
- teradataml/analytics/mle/json/varmax_mle.json +0 -176
- teradataml/analytics/mle/json/vectordistance_mle.json +0 -179
- teradataml/analytics/mle/json/weightedmovavg_mle.json +0 -48
- teradataml/analytics/mle/json/xgboost_mle.json +0 -178
- teradataml/analytics/mle/json/xgboostpredict_mle.json +0 -104
- teradataml/analytics/sqle/Antiselect.py +0 -321
- teradataml/analytics/sqle/Attribution.py +0 -603
- teradataml/analytics/sqle/DecisionForestPredict.py +0 -408
- teradataml/analytics/sqle/GLMPredict.py +0 -430
- teradataml/analytics/sqle/MovingAverage.py +0 -543
- teradataml/analytics/sqle/NGramSplitter.py +0 -548
- teradataml/analytics/sqle/NPath.py +0 -632
- teradataml/analytics/sqle/NaiveBayesTextClassifierPredict.py +0 -515
- teradataml/analytics/sqle/Pack.py +0 -388
- teradataml/analytics/sqle/SVMSparsePredict.py +0 -464
- teradataml/analytics/sqle/Sessionize.py +0 -390
- teradataml/analytics/sqle/StringSimilarity.py +0 -400
- teradataml/analytics/sqle/Unpack.py +0 -503
- teradataml/analytics/sqle/json/antiselect_sqle.json +0 -21
- teradataml/analytics/sqle/json/attribution_sqle.json +0 -92
- teradataml/analytics/sqle/json/decisionforestpredict_sqle.json +0 -48
- teradataml/analytics/sqle/json/glmpredict_sqle.json +0 -48
- teradataml/analytics/sqle/json/h2opredict_sqle.json +0 -63
- teradataml/analytics/sqle/json/movingaverage_sqle.json +0 -58
- teradataml/analytics/sqle/json/naivebayestextclassifierpredict_sqle.json +0 -76
- teradataml/analytics/sqle/json/ngramsplitter_sqle.json +0 -126
- teradataml/analytics/sqle/json/npath_sqle.json +0 -67
- teradataml/analytics/sqle/json/pack_sqle.json +0 -47
- teradataml/analytics/sqle/json/pmmlpredict_sqle.json +0 -55
- teradataml/analytics/sqle/json/sessionize_sqle.json +0 -43
- teradataml/analytics/sqle/json/stringsimilarity_sqle.json +0 -39
- teradataml/analytics/sqle/json/svmsparsepredict_sqle.json +0 -74
- teradataml/analytics/sqle/json/unpack_sqle.json +0 -80
- teradataml/catalog/model_cataloging.py +0 -980
- teradataml/config/mlengine_alias_definitions_v1.0 +0 -118
- teradataml/config/mlengine_alias_definitions_v1.1 +0 -127
- teradataml/config/mlengine_alias_definitions_v1.3 +0 -129
- teradataml/table_operators/sandbox_container_util.py +0 -643
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/WHEEL +0 -0
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/top_level.txt +0 -0
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/zip-safe +0 -0
|
@@ -1,547 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# ##################################################################
|
|
3
|
-
#
|
|
4
|
-
# Copyright 2018 Teradata. All rights reserved.
|
|
5
|
-
# TERADATA CONFIDENTIAL AND TRADE SECRET
|
|
6
|
-
#
|
|
7
|
-
# Primary Owner: Rohit Agrawal (rohit.agrawal@teradata.com)
|
|
8
|
-
# Secondary Owner: Pankaj Purandare (pankajvinod.purandare@teradata.com)
|
|
9
|
-
#
|
|
10
|
-
# Version: 1.2
|
|
11
|
-
# Function Version: 1.19
|
|
12
|
-
#
|
|
13
|
-
# ##################################################################
|
|
14
|
-
|
|
15
|
-
import inspect
|
|
16
|
-
import time
|
|
17
|
-
from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
|
|
18
|
-
from teradataml.common.utils import UtilFuncs
|
|
19
|
-
from teradataml.context.context import *
|
|
20
|
-
from teradataml.dataframe.dataframe import DataFrame
|
|
21
|
-
from teradataml.common.aed_utils import AedUtils
|
|
22
|
-
from teradataml.analytics.analytic_query_generator import AnalyticQueryGenerator
|
|
23
|
-
from teradataml.common.exceptions import TeradataMlException
|
|
24
|
-
from teradataml.common.messages import Messages
|
|
25
|
-
from teradataml.common.messagecodes import MessageCodes
|
|
26
|
-
from teradataml.common.constants import TeradataConstants
|
|
27
|
-
from teradataml.dataframe.dataframe_utils import DataFrameUtils as df_utils
|
|
28
|
-
from teradataml.options.display import display
|
|
29
|
-
from teradataml.common.formula import Formula
|
|
30
|
-
|
|
31
|
-
class GLML1L2:
|
|
32
|
-
|
|
33
|
-
def __init__(self,
|
|
34
|
-
formula = None,
|
|
35
|
-
data = None,
|
|
36
|
-
alpha = 0.0,
|
|
37
|
-
lambda1 = 0.0,
|
|
38
|
-
max_iter_num = 10000,
|
|
39
|
-
stop_threshold = 1.0E-7,
|
|
40
|
-
family = "Gaussian",
|
|
41
|
-
randomization = False,
|
|
42
|
-
data_sequence_column = None):
|
|
43
|
-
"""
|
|
44
|
-
DESCRIPTION:
|
|
45
|
-
The GLML1L2 function differs from the GLM function in these ways:
|
|
46
|
-
1. GLML1L2 supports the regularization models Ridge, LASSO, and
|
|
47
|
-
Elastic Net.
|
|
48
|
-
2. GLML1L2 outputs a model teradataml DataFrame and optionally,
|
|
49
|
-
a factor teradataml DataFrame (GLM outputs only a model).
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
PARAMETERS:
|
|
53
|
-
formula:
|
|
54
|
-
Required Argument.
|
|
55
|
-
A string consisting of "formula". Specifies the model to be fitted.
|
|
56
|
-
Only basic formula of the "col1 ~ col2 + col3 +..." form are
|
|
57
|
-
supported and all variables must be from the same teradataml
|
|
58
|
-
DataFrame object. The response should be column of type float, int or
|
|
59
|
-
bool.
|
|
60
|
-
|
|
61
|
-
data:
|
|
62
|
-
Required Argument.
|
|
63
|
-
Specifies the name of the teradataml DataFrame that contains the
|
|
64
|
-
input data.
|
|
65
|
-
|
|
66
|
-
alpha:
|
|
67
|
-
Optional Argument.
|
|
68
|
-
Specifies whether to use Lasso, Ridge or Elastic Net. If the value is
|
|
69
|
-
0, Ridge is used. If the value is 1, Lasso is used. For any value
|
|
70
|
-
between 0 and 1, Elastic Net is applied.
|
|
71
|
-
Default Value: 0.0
|
|
72
|
-
Types: float
|
|
73
|
-
|
|
74
|
-
lambda1:
|
|
75
|
-
Optional Argument.
|
|
76
|
-
Specifies the parameter that controls the magnitude of the regularization
|
|
77
|
-
term. The value lambda must be in the range [0.0, 100.0].
|
|
78
|
-
A value of zero disables regularization.
|
|
79
|
-
Default Value: 0.0
|
|
80
|
-
Types: float
|
|
81
|
-
|
|
82
|
-
max_iter_num:
|
|
83
|
-
Optional Argument.
|
|
84
|
-
Specifies the maximum number of iterations over the data.
|
|
85
|
-
The parameter max_iterations must be a positive int value in
|
|
86
|
-
the range [1, 100000].
|
|
87
|
-
Default Value: 10000
|
|
88
|
-
Types: int
|
|
89
|
-
|
|
90
|
-
stop_threshold:
|
|
91
|
-
Optional Argument.
|
|
92
|
-
Specifies the convergence threshold.
|
|
93
|
-
Default Value: 1.0E-7
|
|
94
|
-
Types: float
|
|
95
|
-
|
|
96
|
-
family:
|
|
97
|
-
Optional Argument.
|
|
98
|
-
Specifies the distribution exponential family.
|
|
99
|
-
Default Value: "Gaussian"
|
|
100
|
-
Permitted Values: Binomial, Gaussian
|
|
101
|
-
Types: str
|
|
102
|
-
|
|
103
|
-
randomization:
|
|
104
|
-
Optional Argument.
|
|
105
|
-
Specify whether to randomize the input teradataml DataFrame data.
|
|
106
|
-
Default Value: False
|
|
107
|
-
Types: bool
|
|
108
|
-
|
|
109
|
-
data_sequence_column:
|
|
110
|
-
Optional Argument.
|
|
111
|
-
Specifies the list of column(s) that uniquely identifies each row of
|
|
112
|
-
the input argument "data". The argument is used to ensure
|
|
113
|
-
deterministic results for functions which produce results that vary
|
|
114
|
-
from run to run.
|
|
115
|
-
Types: str OR list of Strings (str)
|
|
116
|
-
|
|
117
|
-
RETURNS:
|
|
118
|
-
Instance of GLML1L2.
|
|
119
|
-
Output teradataml DataFrames can be accessed using attribute
|
|
120
|
-
references, such as GLML1L2Obj.<attribute_name>.
|
|
121
|
-
Output teradataml DataFrame attribute names are:
|
|
122
|
-
1. output
|
|
123
|
-
2. factor_data
|
|
124
|
-
|
|
125
|
-
Note:
|
|
126
|
-
1. When argument randomization is True or if any categorical columns
|
|
127
|
-
are provided in formula argument, then and only then output teradataml DataFrame
|
|
128
|
-
factor_data is created.
|
|
129
|
-
2. factor_data can be used as the input (data) for future GLML1L2
|
|
130
|
-
function calls, thereby saving the function from repeating the
|
|
131
|
-
categorical-to-numerical conversion or randomization.
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
RAISES:
|
|
135
|
-
TeradataMlException
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
EXAMPLES:
|
|
139
|
-
# Load the data to run the example.
|
|
140
|
-
load_example_data("GLML1L2", ["admissions_train", "housing_train"])
|
|
141
|
-
|
|
142
|
-
# Create teradataml DataFrame object.
|
|
143
|
-
admissions_train = DataFrame.from_table("admissions_train")
|
|
144
|
-
housing_train = DataFrame.from_table("housing_train")
|
|
145
|
-
|
|
146
|
-
# Example 1 - The input DataFrame is admission_train, running GLML1L2 function as
|
|
147
|
-
# Ridge Regression Analysis. Alpha (0.0) indicates L2 (ridge regression).
|
|
148
|
-
|
|
149
|
-
glml1l2_out1 = GLML1L2(data=admissions_train,
|
|
150
|
-
formula = "admitted ~ masters + gpa + stats + programming",
|
|
151
|
-
alpha=0.0,
|
|
152
|
-
lambda1=0.02,
|
|
153
|
-
family='Binomial',
|
|
154
|
-
randomization=True
|
|
155
|
-
)
|
|
156
|
-
|
|
157
|
-
# Print the output DataFrames.
|
|
158
|
-
# STDOUT DataFrame.
|
|
159
|
-
print(glml1l2_out1.output)
|
|
160
|
-
|
|
161
|
-
# factor_data dataframe.
|
|
162
|
-
print(glml1l2_out1.factor_data)
|
|
163
|
-
|
|
164
|
-
# Example 2 - The input DataFrame is factor_data DataFrame which is generated by
|
|
165
|
-
# (GLML1L2 Example 1: Ridge Regression, Binomial Family). In factor_data DataFrame
|
|
166
|
-
# categorical predictors were converted to integers.
|
|
167
|
-
|
|
168
|
-
glml1l2_out2 = GLML1L2(data=glml1l2_out1.factor_data,
|
|
169
|
-
formula = "admitted ~ masters_yes + stats_beginner + stats_novice + programming_beginner + programming_novice + gpa",
|
|
170
|
-
alpha=0.0,
|
|
171
|
-
lambda1=0.02,
|
|
172
|
-
family='Binomial'
|
|
173
|
-
)
|
|
174
|
-
|
|
175
|
-
# Print the result.
|
|
176
|
-
print(glml1l2_out2)
|
|
177
|
-
|
|
178
|
-
# Example 3 - The input DataFrame is housing_train, running GLML1L2 function as
|
|
179
|
-
# LASSO Regression (Family Gaussian distribution). Alpha (1.0) indicates
|
|
180
|
-
# L1 (LASSO) regularization.
|
|
181
|
-
|
|
182
|
-
glml1l2_out3 = GLML1L2(data=housing_train ,
|
|
183
|
-
formula = "price ~ lotsize + bedrooms + bathrms + stories + garagepl + driveway + recroom + fullbase + gashw + airco + prefarea + homestyle",
|
|
184
|
-
alpha=1.0,
|
|
185
|
-
lambda1=0.02,
|
|
186
|
-
family='Gaussian'
|
|
187
|
-
)
|
|
188
|
-
|
|
189
|
-
# Print all output dataframes.
|
|
190
|
-
print(glml1l2_out3.output)
|
|
191
|
-
print(glml1l2_out3.factor_data)
|
|
192
|
-
|
|
193
|
-
"""
|
|
194
|
-
|
|
195
|
-
# Start the timer to get the build time
|
|
196
|
-
_start_time = time.time()
|
|
197
|
-
|
|
198
|
-
self.formula = formula
|
|
199
|
-
self.data = data
|
|
200
|
-
self.alpha = alpha
|
|
201
|
-
self.lambda1 = lambda1
|
|
202
|
-
self.max_iter_num = max_iter_num
|
|
203
|
-
self.stop_threshold = stop_threshold
|
|
204
|
-
self.family = family
|
|
205
|
-
self.randomization = randomization
|
|
206
|
-
self.data_sequence_column = data_sequence_column
|
|
207
|
-
|
|
208
|
-
# Create TeradataPyWrapperUtils instance which contains validation functions.
|
|
209
|
-
self.__awu = AnalyticsWrapperUtils()
|
|
210
|
-
self.__aed_utils = AedUtils()
|
|
211
|
-
|
|
212
|
-
# Create argument information matrix to do parameter checking
|
|
213
|
-
self.__arg_info_matrix = []
|
|
214
|
-
self.__arg_info_matrix.append(["formula", self.formula, False, "formula"])
|
|
215
|
-
self.__arg_info_matrix.append(["data", self.data, False, (DataFrame)])
|
|
216
|
-
self.__arg_info_matrix.append(["alpha", self.alpha, True, (float)])
|
|
217
|
-
self.__arg_info_matrix.append(["lambda1", self.lambda1, True, (float)])
|
|
218
|
-
self.__arg_info_matrix.append(["max_iter_num", self.max_iter_num, True, (int)])
|
|
219
|
-
self.__arg_info_matrix.append(["stop_threshold", self.stop_threshold, True, (float)])
|
|
220
|
-
self.__arg_info_matrix.append(["family", self.family, True, (str)])
|
|
221
|
-
self.__arg_info_matrix.append(["randomization", self.randomization, True, (bool)])
|
|
222
|
-
self.__arg_info_matrix.append(["data_sequence_column", self.data_sequence_column, True, (str,list)])
|
|
223
|
-
|
|
224
|
-
# Internal variable to decide whether to use factor output or not.
|
|
225
|
-
self.__factor_output = True
|
|
226
|
-
|
|
227
|
-
if inspect.stack()[1][3] != '_from_model_catalog':
|
|
228
|
-
# Perform the function validations
|
|
229
|
-
self.__validate()
|
|
230
|
-
# Generate the ML query
|
|
231
|
-
self.__form_tdml_query()
|
|
232
|
-
# Execute ML query
|
|
233
|
-
self.__execute()
|
|
234
|
-
# Get the prediction type
|
|
235
|
-
self._prediction_type = self.__awu._get_function_prediction_type(self)
|
|
236
|
-
|
|
237
|
-
# End the timer to get the build time
|
|
238
|
-
_end_time = time.time()
|
|
239
|
-
|
|
240
|
-
# Calculate the build time
|
|
241
|
-
self._build_time = (int)(_end_time - _start_time)
|
|
242
|
-
|
|
243
|
-
def __validate(self):
|
|
244
|
-
"""
|
|
245
|
-
Function to validate sqlmr function arguments, which verifies missing
|
|
246
|
-
arguments, input argument and table types. Also processes the
|
|
247
|
-
argument values.
|
|
248
|
-
"""
|
|
249
|
-
|
|
250
|
-
# Make sure that a non-NULL value has been supplied for all mandatory arguments
|
|
251
|
-
self.__awu._validate_missing_required_arguments(self.__arg_info_matrix)
|
|
252
|
-
|
|
253
|
-
# Make sure that a non-NULL value has been supplied correct type of argument
|
|
254
|
-
self.__awu._validate_argument_types(self.__arg_info_matrix)
|
|
255
|
-
|
|
256
|
-
# Check to make sure input table types are strings or data frame objects or of valid type.
|
|
257
|
-
self.__awu._validate_input_table_datatype(self.data, "data", None)
|
|
258
|
-
|
|
259
|
-
# Check for permitted values
|
|
260
|
-
family_permitted_values = ["BINOMIAL", "GAUSSIAN"]
|
|
261
|
-
self.__awu._validate_permitted_values(self.family, family_permitted_values, "family")
|
|
262
|
-
|
|
263
|
-
# Check whether the input columns passed to the argument are not empty.
|
|
264
|
-
# Also check whether the input columns passed to the argument valid or not.
|
|
265
|
-
self.__awu._validate_input_columns_not_empty(self.data_sequence_column, "data_sequence_column")
|
|
266
|
-
self.__awu._validate_dataframe_has_argument_columns(self.data_sequence_column, "data_sequence_column", self.data, "data", False)
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
def __form_tdml_query(self):
|
|
270
|
-
"""
|
|
271
|
-
Function to generate the analytical function queries. The function defines
|
|
272
|
-
variables and list of arguments required to form the query.
|
|
273
|
-
"""
|
|
274
|
-
|
|
275
|
-
# Model Cataloging related attributes.
|
|
276
|
-
self._sql_specific_attributes = {}
|
|
277
|
-
self._sql_formula_attribute_mapper = {}
|
|
278
|
-
self._target_column = None
|
|
279
|
-
self._algorithm_name = None
|
|
280
|
-
|
|
281
|
-
# Generate lists for rest of the function arguments
|
|
282
|
-
self.__func_other_arg_sql_names = []
|
|
283
|
-
self.__func_other_args = []
|
|
284
|
-
self.__func_other_arg_json_datatypes = []
|
|
285
|
-
|
|
286
|
-
if self.alpha is not None and self.alpha != 0.0:
|
|
287
|
-
self.__func_other_arg_sql_names.append("Alpha")
|
|
288
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.alpha, "'"))
|
|
289
|
-
self.__func_other_arg_json_datatypes.append("DOUBLE")
|
|
290
|
-
|
|
291
|
-
if self.lambda1 is not None and self.lambda1 != 0:
|
|
292
|
-
self.__func_other_arg_sql_names.append("Lambda")
|
|
293
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.lambda1, "'"))
|
|
294
|
-
self.__func_other_arg_json_datatypes.append("DOUBLE")
|
|
295
|
-
|
|
296
|
-
if self.max_iter_num is not None and self.max_iter_num != 10000:
|
|
297
|
-
self.__func_other_arg_sql_names.append("MaxIterNum")
|
|
298
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.max_iter_num, "'"))
|
|
299
|
-
self.__func_other_arg_json_datatypes.append("INTEGER")
|
|
300
|
-
|
|
301
|
-
if self.stop_threshold is not None and self.stop_threshold != 1.0E-7:
|
|
302
|
-
self.__func_other_arg_sql_names.append("StopThreshold")
|
|
303
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.stop_threshold, "'"))
|
|
304
|
-
self.__func_other_arg_json_datatypes.append("DOUBLE")
|
|
305
|
-
|
|
306
|
-
if self.family is not None and self.family != "Gaussian":
|
|
307
|
-
self.__func_other_arg_sql_names.append("Family")
|
|
308
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.family, "'"))
|
|
309
|
-
self.__func_other_arg_json_datatypes.append("STRING")
|
|
310
|
-
|
|
311
|
-
if self.randomization is not None and self.randomization != False:
|
|
312
|
-
self.__func_other_arg_sql_names.append("Randomization")
|
|
313
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(self.randomization, "'"))
|
|
314
|
-
self.__func_other_arg_json_datatypes.append("BOOLEAN")
|
|
315
|
-
|
|
316
|
-
# Generate lists for rest of the function arguments
|
|
317
|
-
sequence_input_by_list = []
|
|
318
|
-
if self.data_sequence_column is not None:
|
|
319
|
-
sequence_input_by_list.append("InputTable:" + UtilFuncs._teradata_collapse_arglist(self.data_sequence_column, ""))
|
|
320
|
-
|
|
321
|
-
if len(sequence_input_by_list) > 0:
|
|
322
|
-
self.__func_other_arg_sql_names.append("SequenceInputBy")
|
|
323
|
-
sequence_input_by_arg_value = UtilFuncs._teradata_collapse_arglist(sequence_input_by_list, "'")
|
|
324
|
-
self.__func_other_args.append(sequence_input_by_arg_value)
|
|
325
|
-
self.__func_other_arg_json_datatypes.append("STRING")
|
|
326
|
-
self._sql_specific_attributes["SequenceInputBy"] = sequence_input_by_arg_value
|
|
327
|
-
|
|
328
|
-
# Let's process formula argument
|
|
329
|
-
self.formula = self.__awu._validate_formula_notation(self.formula, self.data, "formula")
|
|
330
|
-
# response variable
|
|
331
|
-
__response_column = self.formula._get_dependent_vars()
|
|
332
|
-
self._target_column = __response_column
|
|
333
|
-
self.__func_other_arg_sql_names.append("ResponseColumn")
|
|
334
|
-
self.__func_other_args.append(UtilFuncs._teradata_collapse_arglist(UtilFuncs._teradata_quote_arg(__response_column, "\""), "'"))
|
|
335
|
-
self.__func_other_arg_json_datatypes.append("COLUMN_NAMES")
|
|
336
|
-
self._sql_specific_attributes["ResponseColumn"] = __response_column
|
|
337
|
-
self._sql_formula_attribute_mapper["ResponseColumn"] = "__response_column"
|
|
338
|
-
|
|
339
|
-
# all input columns
|
|
340
|
-
__all_columns = self.__awu._get_columns_by_type(self.formula, self.data, "all")
|
|
341
|
-
__all_columns.remove(__response_column)
|
|
342
|
-
if len(__all_columns) > 0:
|
|
343
|
-
self.__func_other_arg_sql_names.append("FeatureColumns")
|
|
344
|
-
all_columns_list = UtilFuncs._teradata_collapse_arglist(UtilFuncs._teradata_quote_arg(__all_columns, "\""), "'")
|
|
345
|
-
self.__func_other_args.append(all_columns_list)
|
|
346
|
-
self.__func_other_arg_json_datatypes.append("COLUMN_NAMES")
|
|
347
|
-
self._sql_specific_attributes["FeatureColumns"] = all_columns_list
|
|
348
|
-
self._sql_formula_attribute_mapper["FeatureColumns"] = "__all_columns"
|
|
349
|
-
|
|
350
|
-
# categorical input columns
|
|
351
|
-
__categorical_columns = self.__awu._get_columns_by_type(self.formula, self.data, "categorical")
|
|
352
|
-
if len(__categorical_columns) > 0:
|
|
353
|
-
self.__func_other_arg_sql_names.append("CategoricalColumns")
|
|
354
|
-
categorical_columns_list = UtilFuncs._teradata_collapse_arglist(UtilFuncs._teradata_quote_arg(__categorical_columns, "\""), "'")
|
|
355
|
-
self.__func_other_args.append(categorical_columns_list)
|
|
356
|
-
self.__func_other_arg_json_datatypes.append("COLUMN_NAMES")
|
|
357
|
-
self._sql_specific_attributes["CategoricalColumns"] = categorical_columns_list
|
|
358
|
-
self._sql_formula_attribute_mapper["CategoricalColumns"] = "__categorical_columns"
|
|
359
|
-
|
|
360
|
-
# Generate temp table names for output table parameters if any.
|
|
361
|
-
if self.randomization is True or len(__categorical_columns) > 0:
|
|
362
|
-
self.__factor_data_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_glml1l20", use_default_database=True, gc_on_quit=True, quote=False, table_type=TeradataConstants.TERADATA_TABLE)
|
|
363
|
-
else:
|
|
364
|
-
self.__factor_data_temp_tablename = None
|
|
365
|
-
self.__factor_output = False
|
|
366
|
-
|
|
367
|
-
# Output table arguments list
|
|
368
|
-
self.__func_output_args_sql_names = ["FactorTable"]
|
|
369
|
-
self.__func_output_args = [self.__factor_data_temp_tablename]
|
|
370
|
-
|
|
371
|
-
# Declare empty lists to hold input table information.
|
|
372
|
-
self.__func_input_arg_sql_names = []
|
|
373
|
-
self.__func_input_table_view_query = []
|
|
374
|
-
self.__func_input_dataframe_type = []
|
|
375
|
-
self.__func_input_distribution = []
|
|
376
|
-
self.__func_input_partition_by_cols = []
|
|
377
|
-
self.__func_input_order_by_cols = []
|
|
378
|
-
|
|
379
|
-
# Process data
|
|
380
|
-
self.__table_ref = self.__awu._teradata_on_clause_from_dataframe(self.data, False)
|
|
381
|
-
self.__func_input_distribution.append("NONE")
|
|
382
|
-
self.__func_input_arg_sql_names.append("InputTable")
|
|
383
|
-
self.__func_input_table_view_query.append(self.__table_ref["ref"])
|
|
384
|
-
self.__func_input_dataframe_type.append(self.__table_ref["ref_type"])
|
|
385
|
-
self.__func_input_partition_by_cols.append("NA_character_")
|
|
386
|
-
self.__func_input_order_by_cols.append("NA_character_")
|
|
387
|
-
|
|
388
|
-
function_name = "GLML1L2"
|
|
389
|
-
# Create instance to generate SQLMR.
|
|
390
|
-
self.__aqg_obj = AnalyticQueryGenerator(function_name,
|
|
391
|
-
self.__func_input_arg_sql_names,
|
|
392
|
-
self.__func_input_table_view_query,
|
|
393
|
-
self.__func_input_dataframe_type,
|
|
394
|
-
self.__func_input_distribution,
|
|
395
|
-
self.__func_input_partition_by_cols,
|
|
396
|
-
self.__func_input_order_by_cols,
|
|
397
|
-
self.__func_other_arg_sql_names,
|
|
398
|
-
self.__func_other_args,
|
|
399
|
-
self.__func_other_arg_json_datatypes,
|
|
400
|
-
self.__func_output_args_sql_names,
|
|
401
|
-
self.__func_output_args,
|
|
402
|
-
engine="ENGINE_ML")
|
|
403
|
-
# Invoke call to SQL-MR generation.
|
|
404
|
-
self.sqlmr_query = self.__aqg_obj._gen_sqlmr_select_stmt_sql()
|
|
405
|
-
|
|
406
|
-
# Print SQL-MR query if requested to do so.
|
|
407
|
-
if display.print_sqlmr_query:
|
|
408
|
-
print(self.sqlmr_query)
|
|
409
|
-
|
|
410
|
-
# Set the algorithm name for Model Cataloging.
|
|
411
|
-
self._algorithm_name = self.__aqg_obj._get_alias_name_for_function(function_name)
|
|
412
|
-
|
|
413
|
-
def __execute(self):
|
|
414
|
-
"""
|
|
415
|
-
Function to execute SQL-MR queries.
|
|
416
|
-
Create DataFrames for the required SQL-MR outputs.
|
|
417
|
-
"""
|
|
418
|
-
# Generate STDOUT table name and add it to the output table list.
|
|
419
|
-
sqlmr_stdout_temp_tablename = UtilFuncs._generate_temp_table_name(prefix="td_sqlmr_out_", use_default_database=True, gc_on_quit=True, quote=False, table_type=TeradataConstants.TERADATA_TABLE)
|
|
420
|
-
try:
|
|
421
|
-
# Generate the output.
|
|
422
|
-
UtilFuncs._create_table(sqlmr_stdout_temp_tablename, self.sqlmr_query)
|
|
423
|
-
except Exception as emsg:
|
|
424
|
-
raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_EXEC_SQL_FAILED, str(emsg)), MessageCodes.TDMLDF_EXEC_SQL_FAILED)
|
|
425
|
-
|
|
426
|
-
# Update output table data frames.
|
|
427
|
-
self._mlresults = []
|
|
428
|
-
self.output = self.__awu._create_data_set_object(df_input=UtilFuncs._extract_table_name(sqlmr_stdout_temp_tablename), source_type="table", database_name=UtilFuncs._extract_db_name(sqlmr_stdout_temp_tablename))
|
|
429
|
-
self._mlresults.append(self.output)
|
|
430
|
-
if self.__factor_output:
|
|
431
|
-
self.factor_data = self.__awu._create_data_set_object(df_input=UtilFuncs._extract_table_name(self.__factor_data_temp_tablename), source_type="table", database_name=UtilFuncs._extract_db_name(self.__factor_data_temp_tablename))
|
|
432
|
-
self._mlresults.append(self.factor_data)
|
|
433
|
-
else:
|
|
434
|
-
self.factor_data = "INFO: 'factor_data' output DataFrame is not created, the result is based on either CategoricalColumns or Randomization; " \
|
|
435
|
-
"therefore, you must also specify either CategoricalColumns in formula or Randomization ('true')."
|
|
436
|
-
|
|
437
|
-
def show_query(self):
|
|
438
|
-
"""
|
|
439
|
-
Function to return the underlying SQL query.
|
|
440
|
-
When model object is created using retrieve_model(), then None is returned.
|
|
441
|
-
"""
|
|
442
|
-
return self.sqlmr_query
|
|
443
|
-
|
|
444
|
-
def get_prediction_type(self):
|
|
445
|
-
"""
|
|
446
|
-
Function to return the Prediction type of the algorithm.
|
|
447
|
-
When model object is created using retrieve_model(), then the value returned is
|
|
448
|
-
as saved in the Model Catalog.
|
|
449
|
-
"""
|
|
450
|
-
return self._prediction_type
|
|
451
|
-
|
|
452
|
-
def get_target_column(self):
|
|
453
|
-
"""
|
|
454
|
-
Function to return the Target Column of the algorithm.
|
|
455
|
-
When model object is created using retrieve_model(), then the value returned is
|
|
456
|
-
as saved in the Model Catalog.
|
|
457
|
-
"""
|
|
458
|
-
return self._target_column
|
|
459
|
-
|
|
460
|
-
def get_build_time(self):
|
|
461
|
-
"""
|
|
462
|
-
Function to return the build time of the algorithm in seconds.
|
|
463
|
-
When model object is created using retrieve_model(), then the value returned is
|
|
464
|
-
as saved in the Model Catalog.
|
|
465
|
-
"""
|
|
466
|
-
return self._build_time
|
|
467
|
-
|
|
468
|
-
def _get_algorithm_name(self):
|
|
469
|
-
"""
|
|
470
|
-
Function to return the name of the algorithm.
|
|
471
|
-
"""
|
|
472
|
-
return self._algorithm_name
|
|
473
|
-
|
|
474
|
-
def _get_sql_specific_attributes(self):
|
|
475
|
-
"""
|
|
476
|
-
Function to return the dictionary containing the SQL specific attributes of the algorithm.
|
|
477
|
-
"""
|
|
478
|
-
return self._sql_specific_attributes
|
|
479
|
-
|
|
480
|
-
@classmethod
|
|
481
|
-
def _from_model_catalog(cls,
|
|
482
|
-
factor_data = None,
|
|
483
|
-
output = None,
|
|
484
|
-
**kwargs):
|
|
485
|
-
"""
|
|
486
|
-
Classmethod is used by Model Cataloging, to instantiate this wrapper class.
|
|
487
|
-
"""
|
|
488
|
-
kwargs.pop("factor_data", None)
|
|
489
|
-
kwargs.pop("output", None)
|
|
490
|
-
|
|
491
|
-
# Model Cataloging related attributes.
|
|
492
|
-
target_column = kwargs.pop("__target_column", None)
|
|
493
|
-
prediction_type = kwargs.pop("__prediction_type", None)
|
|
494
|
-
algorithm_name = kwargs.pop("__algorithm_name", None)
|
|
495
|
-
build_time = kwargs.pop("__build_time", None)
|
|
496
|
-
|
|
497
|
-
# Initialize the formula attributes.
|
|
498
|
-
__response_column = kwargs.pop("__response_column", None)
|
|
499
|
-
__all_columns = kwargs.pop("__all_columns", None)
|
|
500
|
-
__numeric_columns = kwargs.pop("__numeric_columns", None)
|
|
501
|
-
__categorical_columns = kwargs.pop("__categorical_columns", None)
|
|
502
|
-
|
|
503
|
-
# Let's create an object of this class.
|
|
504
|
-
obj = cls(**kwargs)
|
|
505
|
-
obj.factor_data = factor_data
|
|
506
|
-
obj.output = output
|
|
507
|
-
|
|
508
|
-
# Initialize the sqlmr_query class attribute.
|
|
509
|
-
obj.sqlmr_query = None
|
|
510
|
-
|
|
511
|
-
# Initialize the SQL specific Model Cataloging attributes.
|
|
512
|
-
obj._sql_specific_attributes = None
|
|
513
|
-
obj._target_column = target_column
|
|
514
|
-
obj._prediction_type = prediction_type
|
|
515
|
-
obj._algorithm_name = algorithm_name
|
|
516
|
-
obj._build_time = build_time
|
|
517
|
-
|
|
518
|
-
# Initialize the formula.
|
|
519
|
-
if obj.formula is not None:
|
|
520
|
-
obj.formula = Formula._from_formula_attr(obj.formula,
|
|
521
|
-
__response_column,
|
|
522
|
-
__all_columns,
|
|
523
|
-
__categorical_columns,
|
|
524
|
-
__numeric_columns)
|
|
525
|
-
|
|
526
|
-
# Update output table data frames.
|
|
527
|
-
obj._mlresults = []
|
|
528
|
-
obj.output = obj.__awu._create_data_set_object(df_input=UtilFuncs._extract_table_name(obj.output), source_type="table", database_name=UtilFuncs._extract_db_name(obj.output))
|
|
529
|
-
obj._mlresults.append(obj.output)
|
|
530
|
-
if obj.factor_data is not None:
|
|
531
|
-
obj.factor_data = obj.__awu._create_data_set_object(df_input=UtilFuncs._extract_table_name(obj.factor_data), source_type="table", database_name=UtilFuncs._extract_db_name(obj.factor_data))
|
|
532
|
-
obj._mlresults.append(obj.factor_data)
|
|
533
|
-
else:
|
|
534
|
-
obj.factor_data = "INFO: 'factor_data' output DataFrame is not created, the result is based on either CategoricalColumns or Randomization; " \
|
|
535
|
-
"therefore, you must also specify either CategoricalColumns in formula or Randomization ('true')."
|
|
536
|
-
return obj
|
|
537
|
-
|
|
538
|
-
def __repr__(self):
|
|
539
|
-
"""
|
|
540
|
-
Returns the string representation for a GLML1L2 class instance.
|
|
541
|
-
"""
|
|
542
|
-
repr_string="############ STDOUT Output ############"
|
|
543
|
-
repr_string = "{}\n\n{}".format(repr_string,self.output)
|
|
544
|
-
repr_string="{}\n\n\n############ factor_data Output ############".format(repr_string)
|
|
545
|
-
repr_string = "{}\n\n{}".format(repr_string,self.factor_data)
|
|
546
|
-
return repr_string
|
|
547
|
-
|