teradataml 20.0.0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +2762 -0
- teradataml/__init__.py +78 -0
- teradataml/_version.py +11 -0
- teradataml/analytics/Transformations.py +2996 -0
- teradataml/analytics/__init__.py +82 -0
- teradataml/analytics/analytic_function_executor.py +2416 -0
- teradataml/analytics/analytic_query_generator.py +1050 -0
- teradataml/analytics/byom/H2OPredict.py +514 -0
- teradataml/analytics/byom/PMMLPredict.py +437 -0
- teradataml/analytics/byom/__init__.py +16 -0
- teradataml/analytics/json_parser/__init__.py +133 -0
- teradataml/analytics/json_parser/analytic_functions_argument.py +1805 -0
- teradataml/analytics/json_parser/json_store.py +191 -0
- teradataml/analytics/json_parser/metadata.py +1666 -0
- teradataml/analytics/json_parser/utils.py +805 -0
- teradataml/analytics/meta_class.py +236 -0
- teradataml/analytics/sqle/DecisionTreePredict.py +456 -0
- teradataml/analytics/sqle/NaiveBayesPredict.py +420 -0
- teradataml/analytics/sqle/__init__.py +128 -0
- teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -0
- teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -0
- teradataml/analytics/table_operator/__init__.py +11 -0
- teradataml/analytics/uaf/__init__.py +82 -0
- teradataml/analytics/utils.py +828 -0
- teradataml/analytics/valib.py +1617 -0
- teradataml/automl/__init__.py +5835 -0
- teradataml/automl/autodataprep/__init__.py +493 -0
- teradataml/automl/custom_json_utils.py +1625 -0
- teradataml/automl/data_preparation.py +1384 -0
- teradataml/automl/data_transformation.py +1254 -0
- teradataml/automl/feature_engineering.py +2273 -0
- teradataml/automl/feature_exploration.py +1873 -0
- teradataml/automl/model_evaluation.py +488 -0
- teradataml/automl/model_training.py +1407 -0
- teradataml/catalog/__init__.py +2 -0
- teradataml/catalog/byom.py +1759 -0
- teradataml/catalog/function_argument_mapper.py +859 -0
- teradataml/catalog/model_cataloging_utils.py +491 -0
- teradataml/clients/__init__.py +0 -0
- teradataml/clients/auth_client.py +137 -0
- teradataml/clients/keycloak_client.py +165 -0
- teradataml/clients/pkce_client.py +481 -0
- teradataml/common/__init__.py +1 -0
- teradataml/common/aed_utils.py +2078 -0
- teradataml/common/bulk_exposed_utils.py +113 -0
- teradataml/common/constants.py +1669 -0
- teradataml/common/deprecations.py +166 -0
- teradataml/common/exceptions.py +147 -0
- teradataml/common/formula.py +743 -0
- teradataml/common/garbagecollector.py +666 -0
- teradataml/common/logger.py +1261 -0
- teradataml/common/messagecodes.py +518 -0
- teradataml/common/messages.py +262 -0
- teradataml/common/pylogger.py +67 -0
- teradataml/common/sqlbundle.py +764 -0
- teradataml/common/td_coltype_code_to_tdtype.py +48 -0
- teradataml/common/utils.py +3166 -0
- teradataml/common/warnings.py +36 -0
- teradataml/common/wrapper_utils.py +625 -0
- teradataml/config/__init__.py +0 -0
- teradataml/config/dummy_file1.cfg +5 -0
- teradataml/config/dummy_file2.cfg +3 -0
- teradataml/config/sqlengine_alias_definitions_v1.0 +14 -0
- teradataml/config/sqlengine_alias_definitions_v1.1 +20 -0
- teradataml/config/sqlengine_alias_definitions_v1.3 +19 -0
- teradataml/context/__init__.py +0 -0
- teradataml/context/aed_context.py +223 -0
- teradataml/context/context.py +1462 -0
- teradataml/data/A_loan.csv +19 -0
- teradataml/data/BINARY_REALS_LEFT.csv +11 -0
- teradataml/data/BINARY_REALS_RIGHT.csv +11 -0
- teradataml/data/B_loan.csv +49 -0
- teradataml/data/BuoyData2.csv +17 -0
- teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -0
- teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -0
- teradataml/data/Convolve2RealsLeft.csv +5 -0
- teradataml/data/Convolve2RealsRight.csv +5 -0
- teradataml/data/Convolve2ValidLeft.csv +11 -0
- teradataml/data/Convolve2ValidRight.csv +11 -0
- teradataml/data/DFFTConv_Real_8_8.csv +65 -0
- teradataml/data/Employee.csv +5 -0
- teradataml/data/Employee_Address.csv +4 -0
- teradataml/data/Employee_roles.csv +5 -0
- teradataml/data/JulesBelvezeDummyData.csv +100 -0
- teradataml/data/Mall_customer_data.csv +201 -0
- teradataml/data/Orders1_12mf.csv +25 -0
- teradataml/data/Pi_loan.csv +7 -0
- teradataml/data/SMOOTHED_DATA.csv +7 -0
- teradataml/data/TestDFFT8.csv +9 -0
- teradataml/data/TestRiver.csv +109 -0
- teradataml/data/Traindata.csv +28 -0
- teradataml/data/__init__.py +0 -0
- teradataml/data/acf.csv +17 -0
- teradataml/data/adaboost_example.json +34 -0
- teradataml/data/adaboostpredict_example.json +24 -0
- teradataml/data/additional_table.csv +11 -0
- teradataml/data/admissions_test.csv +21 -0
- teradataml/data/admissions_train.csv +41 -0
- teradataml/data/admissions_train_nulls.csv +41 -0
- teradataml/data/advertising.csv +201 -0
- teradataml/data/ageandheight.csv +13 -0
- teradataml/data/ageandpressure.csv +31 -0
- teradataml/data/amazon_reviews_25.csv +26 -0
- teradataml/data/antiselect_example.json +36 -0
- teradataml/data/antiselect_input.csv +8 -0
- teradataml/data/antiselect_input_mixed_case.csv +8 -0
- teradataml/data/applicant_external.csv +7 -0
- teradataml/data/applicant_reference.csv +7 -0
- teradataml/data/apriori_example.json +22 -0
- teradataml/data/arima_example.json +9 -0
- teradataml/data/assortedtext_input.csv +8 -0
- teradataml/data/attribution_example.json +34 -0
- teradataml/data/attribution_sample_table.csv +27 -0
- teradataml/data/attribution_sample_table1.csv +6 -0
- teradataml/data/attribution_sample_table2.csv +11 -0
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bank_marketing.csv +11163 -0
- teradataml/data/bank_web_clicks1.csv +43 -0
- teradataml/data/bank_web_clicks2.csv +91 -0
- teradataml/data/bank_web_url.csv +85 -0
- teradataml/data/barrier.csv +2 -0
- teradataml/data/barrier_new.csv +3 -0
- teradataml/data/betweenness_example.json +14 -0
- teradataml/data/bike_sharing.csv +732 -0
- teradataml/data/bin_breaks.csv +8 -0
- teradataml/data/bin_fit_ip.csv +4 -0
- teradataml/data/binary_complex_left.csv +11 -0
- teradataml/data/binary_complex_right.csv +11 -0
- teradataml/data/binary_matrix_complex_left.csv +21 -0
- teradataml/data/binary_matrix_complex_right.csv +21 -0
- teradataml/data/binary_matrix_real_left.csv +21 -0
- teradataml/data/binary_matrix_real_right.csv +21 -0
- teradataml/data/blood2ageandweight.csv +26 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/boston.csv +507 -0
- teradataml/data/boston2cols.csv +721 -0
- teradataml/data/breast_cancer.csv +570 -0
- teradataml/data/buoydata_mix.csv +11 -0
- teradataml/data/burst_data.csv +5 -0
- teradataml/data/burst_example.json +21 -0
- teradataml/data/byom_example.json +34 -0
- teradataml/data/bytes_table.csv +4 -0
- teradataml/data/cal_housing_ex_raw.csv +70 -0
- teradataml/data/callers.csv +7 -0
- teradataml/data/calls.csv +10 -0
- teradataml/data/cars_hist.csv +33 -0
- teradataml/data/cat_table.csv +25 -0
- teradataml/data/ccm_example.json +32 -0
- teradataml/data/ccm_input.csv +91 -0
- teradataml/data/ccm_input2.csv +13 -0
- teradataml/data/ccmexample.csv +101 -0
- teradataml/data/ccmprepare_example.json +9 -0
- teradataml/data/ccmprepare_input.csv +91 -0
- teradataml/data/cfilter_example.json +12 -0
- teradataml/data/changepointdetection_example.json +18 -0
- teradataml/data/changepointdetectionrt_example.json +8 -0
- teradataml/data/chi_sq.csv +3 -0
- teradataml/data/churn_data.csv +14 -0
- teradataml/data/churn_emission.csv +35 -0
- teradataml/data/churn_initial.csv +3 -0
- teradataml/data/churn_state_transition.csv +5 -0
- teradataml/data/citedges_2.csv +745 -0
- teradataml/data/citvertices_2.csv +1210 -0
- teradataml/data/clicks2.csv +16 -0
- teradataml/data/clickstream.csv +13 -0
- teradataml/data/clickstream1.csv +11 -0
- teradataml/data/closeness_example.json +16 -0
- teradataml/data/complaints.csv +21 -0
- teradataml/data/complaints_mini.csv +3 -0
- teradataml/data/complaints_test_tokenized.csv +353 -0
- teradataml/data/complaints_testtoken.csv +224 -0
- teradataml/data/complaints_tokens_model.csv +348 -0
- teradataml/data/complaints_tokens_test.csv +353 -0
- teradataml/data/complaints_traintoken.csv +472 -0
- teradataml/data/computers_category.csv +1001 -0
- teradataml/data/computers_test1.csv +1252 -0
- teradataml/data/computers_train1.csv +5009 -0
- teradataml/data/computers_train1_clustered.csv +5009 -0
- teradataml/data/confusionmatrix_example.json +9 -0
- teradataml/data/conversion_event_table.csv +3 -0
- teradataml/data/corr_input.csv +17 -0
- teradataml/data/correlation_example.json +11 -0
- teradataml/data/covid_confirm_sd.csv +83 -0
- teradataml/data/coxhazardratio_example.json +39 -0
- teradataml/data/coxph_example.json +15 -0
- teradataml/data/coxsurvival_example.json +28 -0
- teradataml/data/cpt.csv +41 -0
- teradataml/data/credit_ex_merged.csv +45 -0
- teradataml/data/creditcard_data.csv +1001 -0
- teradataml/data/customer_loyalty.csv +301 -0
- teradataml/data/customer_loyalty_newseq.csv +31 -0
- teradataml/data/customer_segmentation_test.csv +2628 -0
- teradataml/data/customer_segmentation_train.csv +8069 -0
- teradataml/data/dataframe_example.json +173 -0
- teradataml/data/decisionforest_example.json +37 -0
- teradataml/data/decisionforestpredict_example.json +38 -0
- teradataml/data/decisiontree_example.json +21 -0
- teradataml/data/decisiontreepredict_example.json +45 -0
- teradataml/data/dfft2_size4_real.csv +17 -0
- teradataml/data/dfft2_test_matrix16.csv +17 -0
- teradataml/data/dfft2conv_real_4_4.csv +65 -0
- teradataml/data/diabetes.csv +443 -0
- teradataml/data/diabetes_test.csv +89 -0
- teradataml/data/dict_table.csv +5 -0
- teradataml/data/docperterm_table.csv +4 -0
- teradataml/data/docs/__init__.py +1 -0
- teradataml/data/docs/byom/__init__.py +0 -0
- teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -0
- teradataml/data/docs/byom/docs/DataikuPredict.py +217 -0
- teradataml/data/docs/byom/docs/H2OPredict.py +325 -0
- teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
- teradataml/data/docs/byom/docs/ONNXPredict.py +283 -0
- teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
- teradataml/data/docs/byom/docs/PMMLPredict.py +278 -0
- teradataml/data/docs/byom/docs/__init__.py +0 -0
- teradataml/data/docs/sqle/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_10/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Attribution.py +200 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -0
- teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +96 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +152 -0
- teradataml/data/docs/sqle/docs_17_10/FTest.py +161 -0
- teradataml/data/docs/sqle/docs_17_10/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +85 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +82 -0
- teradataml/data/docs/sqle/docs_17_10/Histogram.py +165 -0
- teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_10/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -0
- teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +135 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +166 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -0
- teradataml/data/docs/sqle/docs_17_10/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +110 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -0
- teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -0
- teradataml/data/docs/sqle/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -0
- teradataml/data/docs/sqle/docs_17_20/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Apriori.py +138 -0
- teradataml/data/docs/sqle/docs_17_20/Attribution.py +201 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -0
- teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
- teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +246 -0
- teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +280 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +136 -0
- teradataml/data/docs/sqle/docs_17_20/FTest.py +240 -0
- teradataml/data/docs/sqle/docs_17_20/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +415 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -0
- teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/Histogram.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -0
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/KNN.py +215 -0
- teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_20/NERExtractor.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_20/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +231 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +191 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -0
- teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +155 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -0
- teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -0
- teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -0
- teradataml/data/docs/sqle/docs_17_20/SMOTE.py +212 -0
- teradataml/data/docs/sqle/docs_17_20/SVM.py +414 -0
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -0
- teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -0
- teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -0
- teradataml/data/docs/sqle/docs_17_20/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_20/Shap.py +225 -0
- teradataml/data/docs/sqle/docs_17_20/Silhouette.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -0
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -0
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
- teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +267 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -0
- teradataml/data/docs/sqle/docs_17_20/TextMorph.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +160 -0
- teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -0
- teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
- teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +169 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +237 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoost.py +362 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -0
- teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/tableoperator/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_00/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_05/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +429 -0
- teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +440 -0
- teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +387 -0
- teradataml/data/docs/tableoperator/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/uaf/__init__.py +0 -0
- teradataml/data/docs/uaf/docs_17_20/ACF.py +186 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +370 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +161 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
- teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
- teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +248 -0
- teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +178 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve.py +230 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve2.py +218 -0
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT.py +204 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +192 -0
- teradataml/data/docs/uaf/docs_17_20/DIFF.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/DTW.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
- teradataml/data/docs/uaf/docs_17_20/DWT2D.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +142 -0
- teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +184 -0
- teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
- teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +206 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +143 -0
- teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +198 -0
- teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +260 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT.py +165 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
- teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/InputValidator.py +121 -0
- teradataml/data/docs/uaf/docs_17_20/LineSpec.py +156 -0
- teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +215 -0
- teradataml/data/docs/uaf/docs_17_20/MAMean.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/MInfo.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
- teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/PACF.py +157 -0
- teradataml/data/docs/uaf/docs_17_20/Portman.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +203 -0
- teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +155 -0
- teradataml/data/docs/uaf/docs_17_20/Resample.py +237 -0
- teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
- teradataml/data/docs/uaf/docs_17_20/SInfo.py +123 -0
- teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +173 -0
- teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +164 -0
- teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/Smoothma.py +208 -0
- teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +151 -0
- teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +202 -0
- teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
- teradataml/data/docs/uaf/docs_17_20/__init__.py +0 -0
- teradataml/data/dtw_example.json +18 -0
- teradataml/data/dtw_t1.csv +11 -0
- teradataml/data/dtw_t2.csv +4 -0
- teradataml/data/dwt2d_dataTable.csv +65 -0
- teradataml/data/dwt2d_example.json +16 -0
- teradataml/data/dwt_dataTable.csv +8 -0
- teradataml/data/dwt_example.json +15 -0
- teradataml/data/dwt_filterTable.csv +3 -0
- teradataml/data/dwt_filter_dim.csv +5 -0
- teradataml/data/emission.csv +9 -0
- teradataml/data/emp_table_by_dept.csv +19 -0
- teradataml/data/employee_info.csv +4 -0
- teradataml/data/employee_table.csv +6 -0
- teradataml/data/excluding_event_table.csv +2 -0
- teradataml/data/finance_data.csv +6 -0
- teradataml/data/finance_data2.csv +61 -0
- teradataml/data/finance_data3.csv +93 -0
- teradataml/data/finance_data4.csv +13 -0
- teradataml/data/fish.csv +160 -0
- teradataml/data/fm_blood2ageandweight.csv +26 -0
- teradataml/data/fmeasure_example.json +12 -0
- teradataml/data/followers_leaders.csv +10 -0
- teradataml/data/fpgrowth_example.json +12 -0
- teradataml/data/frequentpaths_example.json +29 -0
- teradataml/data/friends.csv +9 -0
- teradataml/data/fs_input.csv +33 -0
- teradataml/data/fs_input1.csv +33 -0
- teradataml/data/genData.csv +513 -0
- teradataml/data/geodataframe_example.json +40 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/glm_admissions_model.csv +12 -0
- teradataml/data/glm_example.json +56 -0
- teradataml/data/glml1l2_example.json +28 -0
- teradataml/data/glml1l2predict_example.json +54 -0
- teradataml/data/glmpredict_example.json +54 -0
- teradataml/data/gq_t1.csv +21 -0
- teradataml/data/grocery_transaction.csv +19 -0
- teradataml/data/hconvolve_complex_right.csv +5 -0
- teradataml/data/hconvolve_complex_rightmulti.csv +5 -0
- teradataml/data/histogram_example.json +12 -0
- teradataml/data/hmmdecoder_example.json +79 -0
- teradataml/data/hmmevaluator_example.json +25 -0
- teradataml/data/hmmsupervised_example.json +10 -0
- teradataml/data/hmmunsupervised_example.json +8 -0
- teradataml/data/hnsw_alter_data.csv +5 -0
- teradataml/data/hnsw_data.csv +10 -0
- teradataml/data/house_values.csv +12 -0
- teradataml/data/house_values2.csv +13 -0
- teradataml/data/housing_cat.csv +7 -0
- teradataml/data/housing_data.csv +9 -0
- teradataml/data/housing_test.csv +47 -0
- teradataml/data/housing_test_binary.csv +47 -0
- teradataml/data/housing_train.csv +493 -0
- teradataml/data/housing_train_attribute.csv +5 -0
- teradataml/data/housing_train_binary.csv +437 -0
- teradataml/data/housing_train_parameter.csv +2 -0
- teradataml/data/housing_train_response.csv +493 -0
- teradataml/data/housing_train_segment.csv +201 -0
- teradataml/data/ibm_stock.csv +370 -0
- teradataml/data/ibm_stock1.csv +370 -0
- teradataml/data/identitymatch_example.json +22 -0
- teradataml/data/idf_table.csv +4 -0
- teradataml/data/idwt2d_dataTable.csv +5 -0
- teradataml/data/idwt_dataTable.csv +8 -0
- teradataml/data/idwt_filterTable.csv +3 -0
- teradataml/data/impressions.csv +101 -0
- teradataml/data/inflation.csv +21 -0
- teradataml/data/initial.csv +3 -0
- teradataml/data/insect2Cols.csv +61 -0
- teradataml/data/insect_sprays.csv +13 -0
- teradataml/data/insurance.csv +1339 -0
- teradataml/data/interpolator_example.json +13 -0
- teradataml/data/interval_data.csv +5 -0
- teradataml/data/iris_altinput.csv +481 -0
- teradataml/data/iris_attribute_output.csv +8 -0
- teradataml/data/iris_attribute_test.csv +121 -0
- teradataml/data/iris_attribute_train.csv +481 -0
- teradataml/data/iris_category_expect_predict.csv +31 -0
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/iris_input.csv +151 -0
- teradataml/data/iris_response_train.csv +121 -0
- teradataml/data/iris_test.csv +31 -0
- teradataml/data/iris_train.csv +121 -0
- teradataml/data/join_table1.csv +4 -0
- teradataml/data/join_table2.csv +4 -0
- teradataml/data/jsons/anly_function_name.json +7 -0
- teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
- teradataml/data/jsons/byom/dataikupredict.json +148 -0
- teradataml/data/jsons/byom/datarobotpredict.json +147 -0
- teradataml/data/jsons/byom/h2opredict.json +195 -0
- teradataml/data/jsons/byom/onnxembeddings.json +267 -0
- teradataml/data/jsons/byom/onnxpredict.json +187 -0
- teradataml/data/jsons/byom/pmmlpredict.json +147 -0
- teradataml/data/jsons/paired_functions.json +450 -0
- teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/16.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/16.20/Pack.json +98 -0
- teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/16.20/Unpack.json +166 -0
- teradataml/data/jsons/sqle/16.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.00/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.00/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.00/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.00/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.05/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.05/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.05/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.05/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.10/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -0
- teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -0
- teradataml/data/jsons/sqle/17.10/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -0
- teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -0
- teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +69 -0
- teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -0
- teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_Histogram.json +133 -0
- teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +183 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +66 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +197 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +112 -0
- teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +157 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +148 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +119 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -0
- teradataml/data/jsons/sqle/17.10/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.10/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -0
- teradataml/data/jsons/sqle/17.20/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -0
- teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +149 -0
- teradataml/data/jsons/sqle/17.20/TD_Apriori.json +181 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
- teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -0
- teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +260 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -0
- teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -0
- teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeans.json +232 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +87 -0
- teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -0
- teradataml/data/jsons/sqle/17.20/TD_NERExtractor.json +145 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +316 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +124 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -0
- teradataml/data/jsons/sqle/17.20/TD_ROC.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +138 -0
- teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_SMOTE.json +267 -0
- teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -0
- teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +310 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +120 -0
- teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +194 -0
- teradataml/data/jsons/sqle/17.20/TD_Shap.json +221 -0
- teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +143 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_TextMorph.json +134 -0
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +297 -0
- teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -0
- teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
- teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +183 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -0
- teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -0
- teradataml/data/jsons/sqle/17.20/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +370 -0
- teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +460 -0
- teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +385 -0
- teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +400 -0
- teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +401 -0
- teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +384 -0
- teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +384 -0
- teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
- teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
- teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
- teradataml/data/jsons/tableoperator/17.00/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.10/read_nos.json +184 -0
- teradataml/data/jsons/tableoperator/17.10/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/tableoperator/17.20/read_nos.json +183 -0
- teradataml/data/jsons/tableoperator/17.20/write_nos.json +224 -0
- teradataml/data/jsons/uaf/17.20/TD_ACF.json +132 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +396 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +77 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +153 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
- teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +107 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +106 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +89 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +104 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +66 -0
- teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +87 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT.json +134 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +144 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_DIFF.json +92 -0
- teradataml/data/jsons/uaf/17.20/TD_DTW.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
- teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +39 -0
- teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +85 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +71 -0
- teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +139 -0
- teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECASTER.json +313 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +81 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
- teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +64 -0
- teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
- teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +182 -0
- teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +103 -0
- teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +181 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_MINFO.json +67 -0
- teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +179 -0
- teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +119 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +98 -0
- teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +194 -0
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
- teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +143 -0
- teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +90 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +80 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -0
- teradataml/data/jsons/uaf/17.20/TD_SINFO.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +163 -0
- teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +112 -0
- teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -0
- teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
- teradataml/data/kmeans_example.json +23 -0
- teradataml/data/kmeans_table.csv +10 -0
- teradataml/data/kmeans_us_arrests_data.csv +51 -0
- teradataml/data/knn_example.json +19 -0
- teradataml/data/knnrecommender_example.json +7 -0
- teradataml/data/knnrecommenderpredict_example.json +12 -0
- teradataml/data/lar_example.json +17 -0
- teradataml/data/larpredict_example.json +30 -0
- teradataml/data/lc_new_predictors.csv +5 -0
- teradataml/data/lc_new_reference.csv +9 -0
- teradataml/data/lda_example.json +9 -0
- teradataml/data/ldainference_example.json +15 -0
- teradataml/data/ldatopicsummary_example.json +9 -0
- teradataml/data/levendist_input.csv +13 -0
- teradataml/data/levenshteindistance_example.json +10 -0
- teradataml/data/linreg_example.json +10 -0
- teradataml/data/load_example_data.py +350 -0
- teradataml/data/loan_prediction.csv +295 -0
- teradataml/data/lungcancer.csv +138 -0
- teradataml/data/mappingdata.csv +12 -0
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/milk_timeseries.csv +157 -0
- teradataml/data/min_max_titanic.csv +4 -0
- teradataml/data/minhash_example.json +6 -0
- teradataml/data/ml_ratings.csv +7547 -0
- teradataml/data/ml_ratings_10.csv +2445 -0
- teradataml/data/mobile_data.csv +13 -0
- teradataml/data/model1_table.csv +5 -0
- teradataml/data/model2_table.csv +5 -0
- teradataml/data/models/License_file.txt +1 -0
- teradataml/data/models/License_file_empty.txt +0 -0
- teradataml/data/models/dataiku_iris_data_ann_thin +0 -0
- teradataml/data/models/dr_iris_rf +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn.onnx +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn_floattensor.onnx +0 -0
- teradataml/data/models/iris_db_glm_model.pmml +57 -0
- teradataml/data/models/iris_db_xgb_model.pmml +4471 -0
- teradataml/data/models/iris_kmeans_model +0 -0
- teradataml/data/models/iris_mojo_glm_h2o_model +0 -0
- teradataml/data/models/iris_mojo_xgb_h2o_model +0 -0
- teradataml/data/modularity_example.json +12 -0
- teradataml/data/movavg_example.json +8 -0
- teradataml/data/mtx1.csv +7 -0
- teradataml/data/mtx2.csv +13 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/mvdfft8.csv +9 -0
- teradataml/data/naivebayes_example.json +10 -0
- teradataml/data/naivebayespredict_example.json +19 -0
- teradataml/data/naivebayestextclassifier2_example.json +7 -0
- teradataml/data/naivebayestextclassifier_example.json +8 -0
- teradataml/data/naivebayestextclassifierpredict_example.json +32 -0
- teradataml/data/name_Find_configure.csv +10 -0
- teradataml/data/namedentityfinder_example.json +14 -0
- teradataml/data/namedentityfinderevaluator_example.json +10 -0
- teradataml/data/namedentityfindertrainer_example.json +6 -0
- teradataml/data/nb_iris_input_test.csv +31 -0
- teradataml/data/nb_iris_input_train.csv +121 -0
- teradataml/data/nbp_iris_model.csv +13 -0
- teradataml/data/ner_dict.csv +8 -0
- teradataml/data/ner_extractor_text.csv +2 -0
- teradataml/data/ner_input_eng.csv +7 -0
- teradataml/data/ner_rule.csv +5 -0
- teradataml/data/ner_sports_test2.csv +29 -0
- teradataml/data/ner_sports_train.csv +501 -0
- teradataml/data/nerevaluator_example.json +6 -0
- teradataml/data/nerextractor_example.json +18 -0
- teradataml/data/nermem_sports_test.csv +18 -0
- teradataml/data/nermem_sports_train.csv +51 -0
- teradataml/data/nertrainer_example.json +7 -0
- teradataml/data/ngrams_example.json +7 -0
- teradataml/data/notebooks/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -0
- teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -0
- teradataml/data/notebooks/sqlalchemy/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -0
- teradataml/data/npath_example.json +23 -0
- teradataml/data/ntree_example.json +14 -0
- teradataml/data/numeric_strings.csv +5 -0
- teradataml/data/numerics.csv +4 -0
- teradataml/data/ocean_buoy.csv +17 -0
- teradataml/data/ocean_buoy2.csv +17 -0
- teradataml/data/ocean_buoys.csv +28 -0
- teradataml/data/ocean_buoys2.csv +10 -0
- teradataml/data/ocean_buoys_nonpti.csv +28 -0
- teradataml/data/ocean_buoys_seq.csv +29 -0
- teradataml/data/onehot_encoder_train.csv +4 -0
- teradataml/data/openml_example.json +92 -0
- teradataml/data/optional_event_table.csv +4 -0
- teradataml/data/orders1.csv +11 -0
- teradataml/data/orders1_12.csv +13 -0
- teradataml/data/orders_ex.csv +4 -0
- teradataml/data/pack_example.json +9 -0
- teradataml/data/package_tracking.csv +19 -0
- teradataml/data/package_tracking_pti.csv +19 -0
- teradataml/data/pagerank_example.json +13 -0
- teradataml/data/paragraphs_input.csv +6 -0
- teradataml/data/pathanalyzer_example.json +8 -0
- teradataml/data/pathgenerator_example.json +8 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/pattern_matching_data.csv +11 -0
- teradataml/data/payment_fraud_dataset.csv +10001 -0
- teradataml/data/peppers.png +0 -0
- teradataml/data/phrases.csv +7 -0
- teradataml/data/pivot_example.json +9 -0
- teradataml/data/pivot_input.csv +22 -0
- teradataml/data/playerRating.csv +31 -0
- teradataml/data/pos_input.csv +40 -0
- teradataml/data/postagger_example.json +7 -0
- teradataml/data/posttagger_output.csv +44 -0
- teradataml/data/production_data.csv +17 -0
- teradataml/data/production_data2.csv +7 -0
- teradataml/data/randomsample_example.json +32 -0
- teradataml/data/randomwalksample_example.json +9 -0
- teradataml/data/rank_table.csv +6 -0
- teradataml/data/real_values.csv +14 -0
- teradataml/data/ref_mobile_data.csv +4 -0
- teradataml/data/ref_mobile_data_dense.csv +2 -0
- teradataml/data/ref_url.csv +17 -0
- teradataml/data/restaurant_reviews.csv +7 -0
- teradataml/data/retail_churn_table.csv +27772 -0
- teradataml/data/river_data.csv +145 -0
- teradataml/data/roc_example.json +8 -0
- teradataml/data/roc_input.csv +101 -0
- teradataml/data/rule_inputs.csv +6 -0
- teradataml/data/rule_table.csv +2 -0
- teradataml/data/sales.csv +7 -0
- teradataml/data/sales_transaction.csv +501 -0
- teradataml/data/salesdata.csv +342 -0
- teradataml/data/sample_cities.csv +3 -0
- teradataml/data/sample_shapes.csv +11 -0
- teradataml/data/sample_streets.csv +3 -0
- teradataml/data/sampling_example.json +16 -0
- teradataml/data/sax_example.json +17 -0
- teradataml/data/scale_attributes.csv +3 -0
- teradataml/data/scale_example.json +74 -0
- teradataml/data/scale_housing.csv +11 -0
- teradataml/data/scale_housing_test.csv +6 -0
- teradataml/data/scale_input_part_sparse.csv +31 -0
- teradataml/data/scale_input_partitioned.csv +16 -0
- teradataml/data/scale_input_sparse.csv +11 -0
- teradataml/data/scale_parameters.csv +3 -0
- teradataml/data/scale_stat.csv +11 -0
- teradataml/data/scalebypartition_example.json +13 -0
- teradataml/data/scalemap_example.json +13 -0
- teradataml/data/scalesummary_example.json +12 -0
- teradataml/data/score_category.csv +101 -0
- teradataml/data/score_summary.csv +4 -0
- teradataml/data/script_example.json +10 -0
- teradataml/data/scripts/deploy_script.py +84 -0
- teradataml/data/scripts/lightgbm/dataset.template +175 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +264 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +234 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +177 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/mapper.py +16 -0
- teradataml/data/scripts/mapper_replace.py +16 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +205 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +148 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +144 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +166 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +161 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +145 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +327 -0
- teradataml/data/sdk/modelops/modelops_spec.json +101737 -0
- teradataml/data/seeds.csv +10 -0
- teradataml/data/sentenceextractor_example.json +7 -0
- teradataml/data/sentiment_extract_input.csv +11 -0
- teradataml/data/sentiment_train.csv +16 -0
- teradataml/data/sentiment_word.csv +20 -0
- teradataml/data/sentiment_word_input.csv +20 -0
- teradataml/data/sentimentextractor_example.json +24 -0
- teradataml/data/sentimenttrainer_example.json +8 -0
- teradataml/data/sequence_table.csv +10 -0
- teradataml/data/seriessplitter_example.json +8 -0
- teradataml/data/sessionize_example.json +17 -0
- teradataml/data/sessionize_table.csv +116 -0
- teradataml/data/setop_test1.csv +24 -0
- teradataml/data/setop_test2.csv +22 -0
- teradataml/data/soc_nw_edges.csv +11 -0
- teradataml/data/soc_nw_vertices.csv +8 -0
- teradataml/data/souvenir_timeseries.csv +168 -0
- teradataml/data/sparse_iris_attribute.csv +5 -0
- teradataml/data/sparse_iris_test.csv +121 -0
- teradataml/data/sparse_iris_train.csv +601 -0
- teradataml/data/star1.csv +6 -0
- teradataml/data/star_pivot.csv +8 -0
- teradataml/data/state_transition.csv +5 -0
- teradataml/data/stock_data.csv +53 -0
- teradataml/data/stock_movement.csv +11 -0
- teradataml/data/stock_vol.csv +76 -0
- teradataml/data/stop_words.csv +8 -0
- teradataml/data/store_sales.csv +37 -0
- teradataml/data/stringsimilarity_example.json +8 -0
- teradataml/data/strsimilarity_input.csv +13 -0
- teradataml/data/students.csv +101 -0
- teradataml/data/svm_iris_input_test.csv +121 -0
- teradataml/data/svm_iris_input_train.csv +481 -0
- teradataml/data/svm_iris_model.csv +7 -0
- teradataml/data/svmdense_example.json +10 -0
- teradataml/data/svmdensepredict_example.json +19 -0
- teradataml/data/svmsparse_example.json +8 -0
- teradataml/data/svmsparsepredict_example.json +14 -0
- teradataml/data/svmsparsesummary_example.json +8 -0
- teradataml/data/target_mobile_data.csv +13 -0
- teradataml/data/target_mobile_data_dense.csv +5 -0
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/tdnerextractor_example.json +14 -0
- teradataml/data/templatedata.csv +1201 -0
- teradataml/data/templates/open_source_ml.json +11 -0
- teradataml/data/teradata_icon.ico +0 -0
- teradataml/data/teradataml_example.json +1473 -0
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_loan_prediction.csv +53 -0
- teradataml/data/test_pacf_12.csv +37 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/test_river2.csv +109 -0
- teradataml/data/text_inputs.csv +6 -0
- teradataml/data/textchunker_example.json +8 -0
- teradataml/data/textclassifier_example.json +7 -0
- teradataml/data/textclassifier_input.csv +7 -0
- teradataml/data/textclassifiertrainer_example.json +7 -0
- teradataml/data/textmorph_example.json +11 -0
- teradataml/data/textparser_example.json +15 -0
- teradataml/data/texttagger_example.json +12 -0
- teradataml/data/texttokenizer_example.json +7 -0
- teradataml/data/texttrainer_input.csv +11 -0
- teradataml/data/tf_example.json +7 -0
- teradataml/data/tfidf_example.json +14 -0
- teradataml/data/tfidf_input1.csv +201 -0
- teradataml/data/tfidf_train.csv +6 -0
- teradataml/data/time_table1.csv +535 -0
- teradataml/data/time_table2.csv +14 -0
- teradataml/data/timeseriesdata.csv +1601 -0
- teradataml/data/timeseriesdatasetsd4.csv +105 -0
- teradataml/data/timestamp_data.csv +4 -0
- teradataml/data/titanic.csv +892 -0
- teradataml/data/titanic_dataset_unpivoted.csv +19 -0
- teradataml/data/to_num_data.csv +4 -0
- teradataml/data/tochar_data.csv +5 -0
- teradataml/data/token_table.csv +696 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/train_tracking.csv +28 -0
- teradataml/data/trans_dense.csv +16 -0
- teradataml/data/trans_sparse.csv +55 -0
- teradataml/data/transformation_table.csv +6 -0
- teradataml/data/transformation_table_new.csv +2 -0
- teradataml/data/tv_spots.csv +16 -0
- teradataml/data/twod_climate_data.csv +117 -0
- teradataml/data/uaf_example.json +529 -0
- teradataml/data/univariatestatistics_example.json +9 -0
- teradataml/data/unpack_example.json +10 -0
- teradataml/data/unpivot_example.json +25 -0
- teradataml/data/unpivot_input.csv +8 -0
- teradataml/data/url_data.csv +10 -0
- teradataml/data/us_air_pass.csv +37 -0
- teradataml/data/us_population.csv +624 -0
- teradataml/data/us_states_shapes.csv +52 -0
- teradataml/data/varmax_example.json +18 -0
- teradataml/data/vectordistance_example.json +30 -0
- teradataml/data/ville_climatedata.csv +121 -0
- teradataml/data/ville_tempdata.csv +12 -0
- teradataml/data/ville_tempdata1.csv +12 -0
- teradataml/data/ville_temperature.csv +11 -0
- teradataml/data/waveletTable.csv +1605 -0
- teradataml/data/waveletTable2.csv +1605 -0
- teradataml/data/weightedmovavg_example.json +9 -0
- teradataml/data/wft_testing.csv +5 -0
- teradataml/data/windowdfft.csv +16 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/data/word_embed_input_table1.csv +6 -0
- teradataml/data/word_embed_input_table2.csv +5 -0
- teradataml/data/word_embed_model.csv +23 -0
- teradataml/data/words_input.csv +13 -0
- teradataml/data/xconvolve_complex_left.csv +6 -0
- teradataml/data/xconvolve_complex_leftmulti.csv +6 -0
- teradataml/data/xgboost_example.json +36 -0
- teradataml/data/xgboostpredict_example.json +32 -0
- teradataml/data/ztest_example.json +16 -0
- teradataml/dataframe/__init__.py +0 -0
- teradataml/dataframe/copy_to.py +2446 -0
- teradataml/dataframe/data_transfer.py +2840 -0
- teradataml/dataframe/dataframe.py +20908 -0
- teradataml/dataframe/dataframe_utils.py +2114 -0
- teradataml/dataframe/fastload.py +794 -0
- teradataml/dataframe/functions.py +2110 -0
- teradataml/dataframe/indexer.py +424 -0
- teradataml/dataframe/row.py +160 -0
- teradataml/dataframe/setop.py +1171 -0
- teradataml/dataframe/sql.py +10904 -0
- teradataml/dataframe/sql_function_parameters.py +440 -0
- teradataml/dataframe/sql_functions.py +652 -0
- teradataml/dataframe/sql_interfaces.py +220 -0
- teradataml/dataframe/vantage_function_types.py +675 -0
- teradataml/dataframe/window.py +694 -0
- teradataml/dbutils/__init__.py +3 -0
- teradataml/dbutils/dbutils.py +2871 -0
- teradataml/dbutils/filemgr.py +318 -0
- teradataml/gen_ai/__init__.py +2 -0
- teradataml/gen_ai/convAI.py +473 -0
- teradataml/geospatial/__init__.py +4 -0
- teradataml/geospatial/geodataframe.py +1105 -0
- teradataml/geospatial/geodataframecolumn.py +392 -0
- teradataml/geospatial/geometry_types.py +926 -0
- teradataml/hyperparameter_tuner/__init__.py +1 -0
- teradataml/hyperparameter_tuner/optimizer.py +4115 -0
- teradataml/hyperparameter_tuner/utils.py +303 -0
- teradataml/lib/__init__.py +0 -0
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/lib/libaed_0_1_aarch64.so +0 -0
- teradataml/lib/libaed_0_1_ppc64le.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/_base.py +1321 -0
- teradataml/opensource/_class.py +464 -0
- teradataml/opensource/_constants.py +61 -0
- teradataml/opensource/_lightgbm.py +949 -0
- teradataml/opensource/_sklearn.py +1008 -0
- teradataml/opensource/_wrapper_utils.py +267 -0
- teradataml/options/__init__.py +148 -0
- teradataml/options/configure.py +489 -0
- teradataml/options/display.py +187 -0
- teradataml/plot/__init__.py +3 -0
- teradataml/plot/axis.py +1427 -0
- teradataml/plot/constants.py +15 -0
- teradataml/plot/figure.py +431 -0
- teradataml/plot/plot.py +810 -0
- teradataml/plot/query_generator.py +83 -0
- teradataml/plot/subplot.py +216 -0
- teradataml/scriptmgmt/UserEnv.py +4273 -0
- teradataml/scriptmgmt/__init__.py +3 -0
- teradataml/scriptmgmt/lls_utils.py +2157 -0
- teradataml/sdk/README.md +79 -0
- teradataml/sdk/__init__.py +4 -0
- teradataml/sdk/_auth_modes.py +422 -0
- teradataml/sdk/_func_params.py +487 -0
- teradataml/sdk/_json_parser.py +453 -0
- teradataml/sdk/_openapi_spec_constants.py +249 -0
- teradataml/sdk/_utils.py +236 -0
- teradataml/sdk/api_client.py +900 -0
- teradataml/sdk/constants.py +62 -0
- teradataml/sdk/modelops/__init__.py +98 -0
- teradataml/sdk/modelops/_client.py +409 -0
- teradataml/sdk/modelops/_constants.py +304 -0
- teradataml/sdk/modelops/models.py +2308 -0
- teradataml/sdk/spinner.py +107 -0
- teradataml/series/__init__.py +0 -0
- teradataml/series/series.py +537 -0
- teradataml/series/series_utils.py +71 -0
- teradataml/store/__init__.py +12 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +658 -0
- teradataml/store/feature_store/feature_store.py +4814 -0
- teradataml/store/feature_store/mind_map.py +639 -0
- teradataml/store/feature_store/models.py +7330 -0
- teradataml/store/feature_store/utils.py +390 -0
- teradataml/table_operators/Apply.py +979 -0
- teradataml/table_operators/Script.py +1739 -0
- teradataml/table_operators/TableOperator.py +1343 -0
- teradataml/table_operators/__init__.py +2 -0
- teradataml/table_operators/apply_query_generator.py +262 -0
- teradataml/table_operators/query_generator.py +493 -0
- teradataml/table_operators/table_operator_query_generator.py +462 -0
- teradataml/table_operators/table_operator_util.py +726 -0
- teradataml/table_operators/templates/dataframe_apply.template +184 -0
- teradataml/table_operators/templates/dataframe_map.template +176 -0
- teradataml/table_operators/templates/dataframe_register.template +73 -0
- teradataml/table_operators/templates/dataframe_udf.template +67 -0
- teradataml/table_operators/templates/script_executor.template +170 -0
- teradataml/telemetry_utils/__init__.py +0 -0
- teradataml/telemetry_utils/queryband.py +53 -0
- teradataml/utils/__init__.py +0 -0
- teradataml/utils/docstring.py +527 -0
- teradataml/utils/dtypes.py +943 -0
- teradataml/utils/internal_buffer.py +122 -0
- teradataml/utils/print_versions.py +206 -0
- teradataml/utils/utils.py +451 -0
- teradataml/utils/validators.py +3305 -0
- teradataml-20.0.0.8.dist-info/METADATA +2804 -0
- teradataml-20.0.0.8.dist-info/RECORD +1208 -0
- teradataml-20.0.0.8.dist-info/WHEEL +5 -0
- teradataml-20.0.0.8.dist-info/top_level.txt +1 -0
- teradataml-20.0.0.8.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,1617 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unpublished work.
|
|
3
|
+
Copyright (c) 2020 by Teradata Corporation. All rights reserved.
|
|
4
|
+
TERADATA CORPORATION CONFIDENTIAL AND TRADE SECRET
|
|
5
|
+
|
|
6
|
+
Primary Owner: PankajVinod.Purandare@teradata.com
|
|
7
|
+
Secondary Owner: Adithya.Avvaru@teradata.com
|
|
8
|
+
|
|
9
|
+
This file implements the core framework that allows user to execute any Vantage Analytics
|
|
10
|
+
Library (VALIB) Function.
|
|
11
|
+
"""
|
|
12
|
+
import time
|
|
13
|
+
import uuid
|
|
14
|
+
from math import floor
|
|
15
|
+
|
|
16
|
+
from teradataml.telemetry_utils.queryband import collect_queryband
|
|
17
|
+
from teradataml.common import messages
|
|
18
|
+
from teradataml.common.constants import TeradataConstants, ValibConstants as VC
|
|
19
|
+
from teradataml.common.exceptions import TeradataMlException
|
|
20
|
+
from teradataml.common.garbagecollector import GarbageCollector
|
|
21
|
+
from teradataml.common.messages import Messages, MessageCodes
|
|
22
|
+
from teradataml.common.utils import UtilFuncs
|
|
23
|
+
from teradataml.context.context import get_context, _get_current_databasename
|
|
24
|
+
from teradataml.options.configure import configure
|
|
25
|
+
from teradataml.dataframe.dataframe import DataFrame, in_schema
|
|
26
|
+
from teradataml.utils.validators import _Validators
|
|
27
|
+
from teradataml.analytics.Transformations import Binning, Derive, OneHotEncoder, FillNa, \
|
|
28
|
+
LabelEncoder, MinMaxScalar, Retain, Sigmoid, ZScore
|
|
29
|
+
from teradataml.common.constants import TeradataReservedKeywords, TeradataConstants
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class _VALIB():
|
|
33
|
+
""" An internal class for executing VALIB analytic functions. """
|
|
34
|
+
|
|
35
|
+
def __init__(self, *c, **kwargs):
|
|
36
|
+
""" Constructor for VALIB function execution. """
|
|
37
|
+
# Vantage SQL name of the VALIB function.
|
|
38
|
+
self.__sql_func_name = ""
|
|
39
|
+
# teradataml name of the VALIB function.
|
|
40
|
+
self.__tdml_valib_name = ""
|
|
41
|
+
self.__func_arg_sql_syntax_eles = []
|
|
42
|
+
self.__func_other_arg_sql_names = []
|
|
43
|
+
self.__func_other_args = []
|
|
44
|
+
self.result = None
|
|
45
|
+
self.__generated_sql = None
|
|
46
|
+
self.__multioutput_attr_map = {}
|
|
47
|
+
self.__multioutput_attr_map.update(VC.TERADATAML_VALIB_MULTIOUTPUT_ATTR_MAP.value)
|
|
48
|
+
self.__output_arg_map = {}
|
|
49
|
+
self.__output_arg_map.update(VC.VALIB_FUNCTION_OUTPUT_ARGUMENT_MAP.value)
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def _tdml_valib_name(self):
|
|
53
|
+
"""
|
|
54
|
+
DESCRIPTION:
|
|
55
|
+
Function to return VAL function name.
|
|
56
|
+
|
|
57
|
+
RETURNS:
|
|
58
|
+
str
|
|
59
|
+
|
|
60
|
+
RAISES:
|
|
61
|
+
None
|
|
62
|
+
|
|
63
|
+
EXAMPLES:
|
|
64
|
+
valib.LinReg._tdml_valib_name
|
|
65
|
+
"""
|
|
66
|
+
return self.__tdml_valib_name
|
|
67
|
+
|
|
68
|
+
def __getattr__(self, item):
|
|
69
|
+
"""
|
|
70
|
+
DESCRIPTION:
|
|
71
|
+
Returns an attribute of the _VALIB class.
|
|
72
|
+
|
|
73
|
+
PARAMETERS:
|
|
74
|
+
item:
|
|
75
|
+
Required Argument.
|
|
76
|
+
Specifes the name of the attribute.
|
|
77
|
+
|
|
78
|
+
RETURNS:
|
|
79
|
+
An object of _VALIB class.
|
|
80
|
+
|
|
81
|
+
RAISES:
|
|
82
|
+
None.
|
|
83
|
+
|
|
84
|
+
EXAMPLES:
|
|
85
|
+
valib.ValibFunctionName
|
|
86
|
+
"""
|
|
87
|
+
return self.__get_valib_instance(item)
|
|
88
|
+
|
|
89
|
+
def __call__(self, **kwargs):
|
|
90
|
+
"""
|
|
91
|
+
DESCRIPTION:
|
|
92
|
+
Function makes the instance of this class callable.
|
|
93
|
+
|
|
94
|
+
PARAMETERS:
|
|
95
|
+
kwargs:
|
|
96
|
+
Keyword arguments for the callable function.
|
|
97
|
+
|
|
98
|
+
RETURNS:
|
|
99
|
+
Returns a callable of object of _VALIB class.
|
|
100
|
+
|
|
101
|
+
RAISES:
|
|
102
|
+
None.
|
|
103
|
+
|
|
104
|
+
EXAMPLES:
|
|
105
|
+
valib.ValibFunctionName()
|
|
106
|
+
"""
|
|
107
|
+
# Input arguments passed to a function.
|
|
108
|
+
# Use the same as the data members for the dynamic class.
|
|
109
|
+
self.__dyn_cls_data_members = kwargs
|
|
110
|
+
return self._execute_valib_function(**kwargs)
|
|
111
|
+
|
|
112
|
+
def __get_valib_instance(self, item):
|
|
113
|
+
"""
|
|
114
|
+
DESCRIPTION:
|
|
115
|
+
Function creates and returns an instance of valib class for the function
|
|
116
|
+
name assigning the SQL function name and teradataml function name attributes.
|
|
117
|
+
PARAMETERS:
|
|
118
|
+
item:
|
|
119
|
+
Required Argument.
|
|
120
|
+
Specifies the name of the attribute/function.
|
|
121
|
+
Types: str
|
|
122
|
+
|
|
123
|
+
RETURNS:
|
|
124
|
+
An object of _VALIB class.
|
|
125
|
+
|
|
126
|
+
RAISES:
|
|
127
|
+
None.
|
|
128
|
+
|
|
129
|
+
EXAMPLES:
|
|
130
|
+
valib.__get_valib_instance("<function_name>")
|
|
131
|
+
"""
|
|
132
|
+
valib_f = _VALIB()
|
|
133
|
+
valib_f.__tdml_valib_name = item
|
|
134
|
+
|
|
135
|
+
# Overwriting the multioutput attribute mapper with evaluator map if tdml function name
|
|
136
|
+
# is present in the constant TERDATAML_EVALUATOR_OUTPUT_ATTR_MAP.
|
|
137
|
+
evaluator_map = VC.TERDATAML_EVALUATOR_OUTPUT_ATTR_MAP.value
|
|
138
|
+
if item in evaluator_map:
|
|
139
|
+
valib_f.__multioutput_attr_map = {}
|
|
140
|
+
valib_f.__multioutput_attr_map.update(evaluator_map)
|
|
141
|
+
|
|
142
|
+
try:
|
|
143
|
+
valib_f.__sql_func_name = VC.TERADATAML_VALIB_SQL_FUNCTION_NAME_MAP.value[item].upper()
|
|
144
|
+
except:
|
|
145
|
+
valib_f.__sql_func_name = item.upper()
|
|
146
|
+
return valib_f
|
|
147
|
+
|
|
148
|
+
def __create_dynamic_valib_class(self):
|
|
149
|
+
"""
|
|
150
|
+
DESCRIPTION:
|
|
151
|
+
Function dynamically creates a class of VALIB function type.
|
|
152
|
+
|
|
153
|
+
PARAMETERS:
|
|
154
|
+
None
|
|
155
|
+
|
|
156
|
+
RETURNS:
|
|
157
|
+
An object of dynamic class of VALIB function name.
|
|
158
|
+
|
|
159
|
+
RAISES:
|
|
160
|
+
None.
|
|
161
|
+
|
|
162
|
+
EXAMPLE:
|
|
163
|
+
self.__create_dynamic_valib_class()
|
|
164
|
+
"""
|
|
165
|
+
|
|
166
|
+
# Constructor for the dynamic class.
|
|
167
|
+
def constructor(self):
|
|
168
|
+
""" Constructor for dynamic class """
|
|
169
|
+
# Do Nothing...
|
|
170
|
+
pass
|
|
171
|
+
self.__dyn_cls_data_members["__init__"] = constructor
|
|
172
|
+
|
|
173
|
+
# __repr__ method for dynamic class.
|
|
174
|
+
def print_result(self):
|
|
175
|
+
""" Function to be used for representation of VALIB function type object. """
|
|
176
|
+
repr_string = ""
|
|
177
|
+
for key in self._valib_results:
|
|
178
|
+
repr_string = "{}\n############ {} Output ############".format(repr_string, key)
|
|
179
|
+
repr_string = "{}\n\n{}\n\n".format(repr_string, getattr(self, key))
|
|
180
|
+
return repr_string
|
|
181
|
+
self.__dyn_cls_data_members["__repr__"] = print_result
|
|
182
|
+
|
|
183
|
+
query = (self.__query, self.__generated_sql)
|
|
184
|
+
# Print the underlying SQL stored procedure call or generated SQL.
|
|
185
|
+
def show_query(self, query_type="sp"):
|
|
186
|
+
"""
|
|
187
|
+
Function to return the underlying SQL query.
|
|
188
|
+
"""
|
|
189
|
+
_Validators._validate_permitted_values(arg=query_type,
|
|
190
|
+
permitted_values=["sp", "sql", "both"],
|
|
191
|
+
arg_name="query_type")
|
|
192
|
+
if query_type.lower() == "sp":
|
|
193
|
+
return query[0]
|
|
194
|
+
elif query_type.lower() == "sql":
|
|
195
|
+
return query[1]
|
|
196
|
+
return query
|
|
197
|
+
|
|
198
|
+
self.__dyn_cls_data_members["show_query"] = show_query
|
|
199
|
+
|
|
200
|
+
# To list attributes using dict()
|
|
201
|
+
self.__dyn_cls_data_members["__dict__"] = self.__dyn_cls_data_members
|
|
202
|
+
|
|
203
|
+
# Dynamic class creation with VALIB function name.
|
|
204
|
+
valib_class = type(self.__tdml_valib_name, (object,), self.__dyn_cls_data_members)
|
|
205
|
+
|
|
206
|
+
return valib_class()
|
|
207
|
+
|
|
208
|
+
def __create_output_dataframes(self, out_var):
|
|
209
|
+
"""
|
|
210
|
+
DESCRIPTION:
|
|
211
|
+
Internal function to create output DataFrame, set the index labels to
|
|
212
|
+
None and add the same to the result list.
|
|
213
|
+
Function makes sure that all these created variables are added to the
|
|
214
|
+
dynamic class as data members.
|
|
215
|
+
|
|
216
|
+
PARAMETERS:
|
|
217
|
+
out_var:
|
|
218
|
+
Required Argument.
|
|
219
|
+
Specifies the name of the output DataFrame.
|
|
220
|
+
Types: str
|
|
221
|
+
|
|
222
|
+
RETURNS:
|
|
223
|
+
None.
|
|
224
|
+
|
|
225
|
+
RAISES:
|
|
226
|
+
None.
|
|
227
|
+
|
|
228
|
+
EXAMPLES:
|
|
229
|
+
self.__create_output_dataframes("result")
|
|
230
|
+
"""
|
|
231
|
+
self.__dyn_cls_data_members[out_var] = DataFrame(
|
|
232
|
+
in_schema(self.__db_name, self.__dyn_cls_data_members[out_var]))
|
|
233
|
+
self.__dyn_cls_data_members[out_var]._index_label = None
|
|
234
|
+
self.__dyn_cls_data_members[out_var]._index_query_required = False
|
|
235
|
+
self.__dyn_cls_data_members[VC.OUTPUT_DATAFRAME_RESULTS.value].append(out_var)
|
|
236
|
+
|
|
237
|
+
@collect_queryband(attr="_VALIB__sql_func_name")
|
|
238
|
+
def __generate_execute_sp_query(self):
|
|
239
|
+
"""
|
|
240
|
+
DESCRIPTION:
|
|
241
|
+
Function generates a stored procedure call corresponding to the function
|
|
242
|
+
and execute the same.
|
|
243
|
+
|
|
244
|
+
PARAMETERS:
|
|
245
|
+
None.
|
|
246
|
+
|
|
247
|
+
RETURNS:
|
|
248
|
+
Console output of query, if any, otherwise None.
|
|
249
|
+
|
|
250
|
+
RAISES:
|
|
251
|
+
TeradataMlException
|
|
252
|
+
|
|
253
|
+
EXAMPLES:
|
|
254
|
+
self.__generate_execute_sp_query()
|
|
255
|
+
"""
|
|
256
|
+
# Generate and execute SQL VALIB SP call.
|
|
257
|
+
if configure.val_install_location is None:
|
|
258
|
+
message = Messages.get_message(MessageCodes.UNKNOWN_INSTALL_LOCATION,
|
|
259
|
+
"Vantage analytic functions",
|
|
260
|
+
"option 'configure.val_install_location'")
|
|
261
|
+
raise TeradataMlException(message, MessageCodes.MISSING_ARGS)
|
|
262
|
+
|
|
263
|
+
query_string = "call {0}.td_analyze('{1}', '{2};');"
|
|
264
|
+
self.__query = query_string.format(configure.val_install_location, self.__sql_func_name,
|
|
265
|
+
";".join(self.__func_arg_sql_syntax_eles))
|
|
266
|
+
|
|
267
|
+
return UtilFuncs._execute_query(self.__query, expect_none_result=True)
|
|
268
|
+
|
|
269
|
+
def __generate_valib_sql_argument_syntax(self, arg, arg_name):
|
|
270
|
+
"""
|
|
271
|
+
DESCRIPTION:
|
|
272
|
+
Function to generate the VALIB SQL function argument syntax.
|
|
273
|
+
|
|
274
|
+
PARAMETERS:
|
|
275
|
+
arg:
|
|
276
|
+
Required Argument.
|
|
277
|
+
Specifies an argument value to be used in VALIB function call.
|
|
278
|
+
Types: Any object that can be converted to a string.
|
|
279
|
+
|
|
280
|
+
arg_name:
|
|
281
|
+
Required Argument.
|
|
282
|
+
Specifies a SQL argument name to be used in VALIB function call.
|
|
283
|
+
Types: String
|
|
284
|
+
|
|
285
|
+
RETURNS:
|
|
286
|
+
None
|
|
287
|
+
|
|
288
|
+
RAISES:
|
|
289
|
+
None
|
|
290
|
+
|
|
291
|
+
EXAMPLES:
|
|
292
|
+
self.__generate_valib_sql_argument_syntax(argument, "argument_name")
|
|
293
|
+
"""
|
|
294
|
+
arg = UtilFuncs._teradata_collapse_arglist(arg, "")
|
|
295
|
+
self.__func_arg_sql_syntax_eles.append("{}={}".format(arg_name, arg))
|
|
296
|
+
|
|
297
|
+
def __extract_db_tbl_name(self, table_name, arg_name, extract_table=True, remove_quotes=False):
|
|
298
|
+
"""
|
|
299
|
+
DESCRIPTION:
|
|
300
|
+
Function processes the table name argument to extract database or table from it.
|
|
301
|
+
|
|
302
|
+
PARAMETERS:
|
|
303
|
+
table_name:
|
|
304
|
+
Required Argument.
|
|
305
|
+
Specifies the fully-qualified table name.
|
|
306
|
+
Types: String
|
|
307
|
+
|
|
308
|
+
arg_name:
|
|
309
|
+
Required Argument.
|
|
310
|
+
Specifies a SQL argument name to be used in VALIB function call.
|
|
311
|
+
Types: String
|
|
312
|
+
|
|
313
|
+
extract_table:
|
|
314
|
+
Optional Argument.
|
|
315
|
+
Specifies whether to extract a table name or database name from
|
|
316
|
+
"table_name". When set to 'True', table name is extracted otherwise
|
|
317
|
+
database name is extracted.
|
|
318
|
+
Default Value: True
|
|
319
|
+
Types: bool
|
|
320
|
+
|
|
321
|
+
remove_quotes:
|
|
322
|
+
Optional Argument.
|
|
323
|
+
Specifies whether to remove quotes from the extracted string or not.
|
|
324
|
+
When set to 'True', double quotes will be removed from the extracted
|
|
325
|
+
name.
|
|
326
|
+
Default Value: False
|
|
327
|
+
Types: bool
|
|
328
|
+
|
|
329
|
+
RETURNS:
|
|
330
|
+
Extracted name.
|
|
331
|
+
|
|
332
|
+
RAISES:
|
|
333
|
+
None.
|
|
334
|
+
|
|
335
|
+
EXAMPLES:
|
|
336
|
+
# Extract the table name and remove quotes.
|
|
337
|
+
self.__extract_db_tbl_name(self, table_name, arg_name, remove_quotes=True)
|
|
338
|
+
|
|
339
|
+
# Extract the database name.
|
|
340
|
+
self.__extract_db_tbl_name(self, table_name, arg_name, extract_table=False)
|
|
341
|
+
"""
|
|
342
|
+
# Extract table name or db name from the 'table_name'
|
|
343
|
+
if extract_table:
|
|
344
|
+
name = UtilFuncs._extract_table_name(table_name)
|
|
345
|
+
else:
|
|
346
|
+
name = UtilFuncs._extract_db_name(table_name)
|
|
347
|
+
|
|
348
|
+
# Remove quotes.
|
|
349
|
+
if remove_quotes:
|
|
350
|
+
name = name.replace("\"", "")
|
|
351
|
+
|
|
352
|
+
# Generate VALIB function argument call syntax.
|
|
353
|
+
self.__generate_valib_sql_argument_syntax(name, arg_name)
|
|
354
|
+
|
|
355
|
+
return name
|
|
356
|
+
|
|
357
|
+
def __get_temp_table_name(self):
|
|
358
|
+
"""
|
|
359
|
+
DESCRIPTION:
|
|
360
|
+
Generate and get the table name for the outputs.
|
|
361
|
+
|
|
362
|
+
PARAMETERS:
|
|
363
|
+
None.
|
|
364
|
+
|
|
365
|
+
RETURNS:
|
|
366
|
+
None.
|
|
367
|
+
|
|
368
|
+
RAISES:
|
|
369
|
+
None.
|
|
370
|
+
|
|
371
|
+
EXAMPLES:
|
|
372
|
+
self.__get_temp_table_name()
|
|
373
|
+
"""
|
|
374
|
+
prefix = "valib_{}".format(self.__tdml_valib_name.lower())
|
|
375
|
+
tbl_name = UtilFuncs._generate_temp_table_name(prefix=prefix, use_default_database=True,
|
|
376
|
+
gc_on_quit=True, quote=False,
|
|
377
|
+
table_type=TeradataConstants.TERADATA_TABLE)
|
|
378
|
+
# With VT option, table name is getting generated with 'vt_'.
|
|
379
|
+
# But its not getting created as Volatile table. Hence
|
|
380
|
+
# explicitly garbage collecting.
|
|
381
|
+
if configure.temp_object_type == TeradataConstants.TERADATA_VOLATILE_TABLE:
|
|
382
|
+
GarbageCollector._add_to_garbagecollector(tbl_name,
|
|
383
|
+
TeradataConstants.TERADATA_TABLE)
|
|
384
|
+
return tbl_name
|
|
385
|
+
|
|
386
|
+
def __process_dyn_cls_output_member(self, arg_name, out_tablename, out_var=None):
|
|
387
|
+
"""
|
|
388
|
+
DESCRIPTION:
|
|
389
|
+
Function to process output table name argument. As part of processing it does:
|
|
390
|
+
* Generates the SQL clause for argument name.
|
|
391
|
+
* Adds a data member to the dynamic class dictionary, with the name same as
|
|
392
|
+
exposed name of the output DataFrame.
|
|
393
|
+
|
|
394
|
+
PARAMETERS:
|
|
395
|
+
arg_name:
|
|
396
|
+
Required Argument.
|
|
397
|
+
Specifies the output table SQL argument name.
|
|
398
|
+
Types: str
|
|
399
|
+
|
|
400
|
+
out_tablename:
|
|
401
|
+
Required Argument.
|
|
402
|
+
Specifies the output table name.
|
|
403
|
+
Types: str
|
|
404
|
+
|
|
405
|
+
out_var:
|
|
406
|
+
Optional Argument.
|
|
407
|
+
Specifies the output DataFrame name to use.
|
|
408
|
+
If this is None, then value for this is extracted from
|
|
409
|
+
'TERADATAML_VALIB_MULTIOUTPUT_ATTR_MAP'.
|
|
410
|
+
Types: str
|
|
411
|
+
|
|
412
|
+
RETURNS:
|
|
413
|
+
None.
|
|
414
|
+
|
|
415
|
+
RAISES:
|
|
416
|
+
None.
|
|
417
|
+
|
|
418
|
+
EXAMPLES:
|
|
419
|
+
self.__process_dyn_cls_output_member("outputtablename", out_tablename,
|
|
420
|
+
ValibConstants.DEFAULT_OUTPUT_VAR.value)
|
|
421
|
+
"""
|
|
422
|
+
if out_var is None:
|
|
423
|
+
# If output variable name is None, then extract it from the MAP.
|
|
424
|
+
# This output variable corresponds to the output DataFrame name of the function.
|
|
425
|
+
func_name = self.__get_output_attr_map_func_name()
|
|
426
|
+
out_var = self.__multioutput_attr_map[func_name][arg_name]
|
|
427
|
+
|
|
428
|
+
# Add the output DataFrame name, to the dictionary of dynamic class.
|
|
429
|
+
# At start we will just add the corresponding table name as it's value.
|
|
430
|
+
self.__dyn_cls_data_members[out_var] = self.__extract_db_tbl_name(table_name=out_tablename,
|
|
431
|
+
arg_name=arg_name)
|
|
432
|
+
|
|
433
|
+
def __get_table_name_with_extension(self, table_name, extension):
|
|
434
|
+
"""
|
|
435
|
+
DESCRIPTION:
|
|
436
|
+
Internal function to create a table name using the extension and add it to Garbage
|
|
437
|
+
Collector.
|
|
438
|
+
|
|
439
|
+
PARAMETERS:
|
|
440
|
+
table_name:
|
|
441
|
+
Required Argument.
|
|
442
|
+
Specifies the table name for which extension is to be suffixed.
|
|
443
|
+
Types: str
|
|
444
|
+
|
|
445
|
+
extension:
|
|
446
|
+
Required Argument.
|
|
447
|
+
Specifies the suffix string that is to be added at the end of the table name.
|
|
448
|
+
Types: str
|
|
449
|
+
|
|
450
|
+
RETURNS:
|
|
451
|
+
The new table name.
|
|
452
|
+
|
|
453
|
+
EXAMPLE:
|
|
454
|
+
self.__get_table_name_with_extension(table_name="<table_name>", extension="_rpt")
|
|
455
|
+
"""
|
|
456
|
+
# Add extension to the table name.
|
|
457
|
+
generated_table_name = "{}{}".format(table_name, extension)
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
# Register new output table to the GC.
|
|
461
|
+
gc_tabname = "\"{}\".\"{}\"".format(self.__db_name, generated_table_name)
|
|
462
|
+
GarbageCollector._add_to_garbagecollector(gc_tabname, TeradataConstants.TERADATA_TABLE)
|
|
463
|
+
|
|
464
|
+
return generated_table_name
|
|
465
|
+
|
|
466
|
+
def __get_output_attr_map_func_name(self):
|
|
467
|
+
"""
|
|
468
|
+
DESCRIPTION:
|
|
469
|
+
Function to get either teradataml function name or SQL function name from
|
|
470
|
+
"__multioutput_attr_map" based on whether the function is evaluator function or not.
|
|
471
|
+
|
|
472
|
+
PARAMETERS:
|
|
473
|
+
None.
|
|
474
|
+
|
|
475
|
+
RETURNS:
|
|
476
|
+
Either teradataml function name or SQL function name.
|
|
477
|
+
|
|
478
|
+
RAISES:
|
|
479
|
+
None.
|
|
480
|
+
|
|
481
|
+
EXAMPLES:
|
|
482
|
+
self.__get_output_attr_map_func_name()
|
|
483
|
+
"""
|
|
484
|
+
# __multioutput_attr_map can have either SQL function name or tdml function name.
|
|
485
|
+
# If the function is evaluator function, then __multioutput_attr_map contains the
|
|
486
|
+
# dictionary of tdml function name to dictionary of output tables. Otherwise, it
|
|
487
|
+
# contains the dictionary of SQL function name to dictionary of output tables.
|
|
488
|
+
func_name = self.__sql_func_name
|
|
489
|
+
if self.__tdml_valib_name in self.__multioutput_attr_map:
|
|
490
|
+
func_name = self.__tdml_valib_name
|
|
491
|
+
return func_name
|
|
492
|
+
|
|
493
|
+
def __process_func_outputs(self, query_exec_output):
|
|
494
|
+
"""
|
|
495
|
+
DESCRIPTION:
|
|
496
|
+
Internal function to process the output tables generated by a stored procedure
|
|
497
|
+
call. Function creates the required output DataFrames from the tables and a
|
|
498
|
+
result list.
|
|
499
|
+
|
|
500
|
+
PARAMETERS:
|
|
501
|
+
query_exec_output:
|
|
502
|
+
Required Argument.
|
|
503
|
+
Specifies the output captured by the UtilFuncs._execute_query() API.
|
|
504
|
+
If no output is generated None should be passed.
|
|
505
|
+
Types: tuple
|
|
506
|
+
|
|
507
|
+
RETURNS:
|
|
508
|
+
None.
|
|
509
|
+
|
|
510
|
+
RAISES:
|
|
511
|
+
None.
|
|
512
|
+
|
|
513
|
+
EXAMPLES:
|
|
514
|
+
exec_out = self.__generate_execute_sp_query()
|
|
515
|
+
self.__process_func_outputs(query_exec_output=exec_out)
|
|
516
|
+
"""
|
|
517
|
+
self.__dyn_cls_data_members[VC.OUTPUT_DATAFRAME_RESULTS.value] = []
|
|
518
|
+
|
|
519
|
+
func_name = self.__get_output_attr_map_func_name()
|
|
520
|
+
|
|
521
|
+
# Processing gensql/gensqlonly output.
|
|
522
|
+
# Checking if user has passed gen_sql or gen_sql_only as an argument and is true.
|
|
523
|
+
# If gen_sql_only is true, don't process the output and return.
|
|
524
|
+
gen_sql_only = self.__dyn_cls_data_members.get("gen_sql_only", False)
|
|
525
|
+
if gen_sql_only:
|
|
526
|
+
self.__generated_sql = query_exec_output[0][0][0]
|
|
527
|
+
self.__dyn_cls_data_members[VC.DEFAULT_OUTPUT_VAR.value] = None
|
|
528
|
+
return
|
|
529
|
+
elif self.__dyn_cls_data_members.get("gen_sql", False):
|
|
530
|
+
self.__generated_sql = query_exec_output[0][0][0]
|
|
531
|
+
|
|
532
|
+
if func_name in self.__multioutput_attr_map:
|
|
533
|
+
# Process each output and get it ready for dynamic class creation.
|
|
534
|
+
valib_output_mapper = self.__multioutput_attr_map[func_name]
|
|
535
|
+
for key in valib_output_mapper:
|
|
536
|
+
out_var = valib_output_mapper[key]
|
|
537
|
+
self.__create_output_dataframes(out_var=out_var)
|
|
538
|
+
elif VC.DEFAULT_OUTPUT_VAR.value in self.__dyn_cls_data_members:
|
|
539
|
+
# Process functions that generate only one output.
|
|
540
|
+
self.__create_output_dataframes(out_var=VC.DEFAULT_OUTPUT_VAR.value)
|
|
541
|
+
else:
|
|
542
|
+
# Function which will not produce any output table, but will return result set.
|
|
543
|
+
# "result_set" will contain the actual result data in a list of list format.
|
|
544
|
+
self.__dyn_cls_data_members["result_set"] = query_exec_output[0]
|
|
545
|
+
# "result_columns" will contain the list of column names of the result data.
|
|
546
|
+
self.__dyn_cls_data_members["result_columns"] = query_exec_output[1]
|
|
547
|
+
# TODO - Add support for EXP's does not producing any output tables. Future Purpose.
|
|
548
|
+
|
|
549
|
+
def __process_output_extensions(self, output_table_name, output_extensions):
|
|
550
|
+
"""
|
|
551
|
+
DESCRIPTION:
|
|
552
|
+
Function to process extended outputs of the function.
|
|
553
|
+
Extended outputs are the output tables generated by SQL function, using
|
|
554
|
+
the existing output table name and adding some extensions to it.
|
|
555
|
+
For example,
|
|
556
|
+
Linear function takes one argument for producing the output tables, but
|
|
557
|
+
it's ends up creating multiple output tables.
|
|
558
|
+
This is how it created these tables.
|
|
559
|
+
* Creates a coefficients and statistics table by using the name passed to
|
|
560
|
+
"outputtablename" argument.
|
|
561
|
+
* Creates a statistical measures table using the name passed to
|
|
562
|
+
"outputtablename" argument and appending "_rpt" to it.
|
|
563
|
+
* Creates a XML reports table using the name passed to "outputtablename"
|
|
564
|
+
argument and appending "_txt" to it.
|
|
565
|
+
|
|
566
|
+
PARAMETERS:
|
|
567
|
+
output_table_name:
|
|
568
|
+
Required Argument.
|
|
569
|
+
Specifies the output table name to use the extensions with to produce new
|
|
570
|
+
output table names.
|
|
571
|
+
Types: str
|
|
572
|
+
|
|
573
|
+
output_extensions:
|
|
574
|
+
Required Argument.
|
|
575
|
+
Specifies a mapper with output table extensions as keys and output dataframe name
|
|
576
|
+
as value.
|
|
577
|
+
Types: dict
|
|
578
|
+
|
|
579
|
+
RETURNS:
|
|
580
|
+
None.
|
|
581
|
+
|
|
582
|
+
RAISES:
|
|
583
|
+
None.
|
|
584
|
+
|
|
585
|
+
EXAMPLES:
|
|
586
|
+
self.__process_output_extensions("output_table_name",
|
|
587
|
+
{"_rpt": "output_df_name1",
|
|
588
|
+
"_txt": "output_df_name1"})
|
|
589
|
+
"""
|
|
590
|
+
|
|
591
|
+
# Now let's process the output extensions and respective output DataFrames.
|
|
592
|
+
for extension in output_extensions:
|
|
593
|
+
new_table_name = self.__get_table_name_with_extension(table_name=output_table_name,
|
|
594
|
+
extension=extension)
|
|
595
|
+
|
|
596
|
+
# Get the teradataml output variable name corresponding to the extension.
|
|
597
|
+
func_name = self.__get_output_attr_map_func_name()
|
|
598
|
+
out_var = self.__multioutput_attr_map[func_name][extension]
|
|
599
|
+
|
|
600
|
+
# Add the table name to the dynamic class as it's data member.
|
|
601
|
+
self.__dyn_cls_data_members[out_var] = new_table_name
|
|
602
|
+
|
|
603
|
+
def __process_output_argument(self):
|
|
604
|
+
"""
|
|
605
|
+
DESCRIPTION:
|
|
606
|
+
Function to process output argument(s) of a VALIB function.
|
|
607
|
+
|
|
608
|
+
PARAMETERS:
|
|
609
|
+
None.
|
|
610
|
+
|
|
611
|
+
RETURNS:
|
|
612
|
+
None.
|
|
613
|
+
|
|
614
|
+
RAISES:
|
|
615
|
+
None.
|
|
616
|
+
|
|
617
|
+
EXAMPLES:
|
|
618
|
+
self.__process_output_argument()
|
|
619
|
+
"""
|
|
620
|
+
|
|
621
|
+
#
|
|
622
|
+
# Note:
|
|
623
|
+
# So far all the functions we have seen, only one output database argument is present
|
|
624
|
+
# in SQL functions. In case in future, a function with more output database arguments
|
|
625
|
+
# are added, we will need to modify this function, especially the below piece and treat
|
|
626
|
+
# database arguments as we are processing the output table name arguments.
|
|
627
|
+
#
|
|
628
|
+
# Default SQL argument name for the output database argument.
|
|
629
|
+
database_arg_name = "outputdatabase"
|
|
630
|
+
if self.__sql_func_name in self.__output_arg_map:
|
|
631
|
+
# Extract output database argument name for the function and use the same.
|
|
632
|
+
database_arg_name = self.__output_arg_map[self.__sql_func_name]["db"]
|
|
633
|
+
|
|
634
|
+
out_tablename = self.__get_temp_table_name()
|
|
635
|
+
self.__db_name = self.__extract_db_tbl_name(table_name=out_tablename,
|
|
636
|
+
arg_name=database_arg_name,
|
|
637
|
+
extract_table=False)
|
|
638
|
+
|
|
639
|
+
#
|
|
640
|
+
# Note:
|
|
641
|
+
# So far all the functions visited, we observed following characteristics about
|
|
642
|
+
# processing the output tables by SQL function.
|
|
643
|
+
# 1. Function produces only one table, with argument name as "outputtablename",
|
|
644
|
+
# which is our default case.
|
|
645
|
+
# 2. Function produces only one table, with argument name different than
|
|
646
|
+
# "outputtablename". In such case, we use 'VALIB_FUNCTION_OUTPUT_ARGUMENT_MAP'
|
|
647
|
+
# to extract the SQL argument name for specifying the output table.
|
|
648
|
+
# 3. Function produces multiple output tables with multiple output table arguments.
|
|
649
|
+
# In such case, we use 'VALIB_FUNCTION_OUTPUT_ARGUMENT_MAP' to extract the SQL
|
|
650
|
+
# argument names for specifying the output tables.
|
|
651
|
+
# 4. Function produces multiple output tables with just one output table argument.
|
|
652
|
+
# In such cases, SQL uses the specified table name to create one of the output
|
|
653
|
+
# table and other output tables are created based on the pre-defined extensions
|
|
654
|
+
# which are appended to the specified table name and using the same.
|
|
655
|
+
#
|
|
656
|
+
# Now that we have processed the output database name argument, we will now process the
|
|
657
|
+
# output table name argument(s).
|
|
658
|
+
if self.__sql_func_name in self.__output_arg_map:
|
|
659
|
+
# Extract the function output argument map.
|
|
660
|
+
func_output_argument_map = self.__output_arg_map[self.__sql_func_name]
|
|
661
|
+
|
|
662
|
+
# Extract output table argument name(s) for the function and use the same.
|
|
663
|
+
table_arg_names = func_output_argument_map["tbls"]
|
|
664
|
+
|
|
665
|
+
if not isinstance(table_arg_names, list):
|
|
666
|
+
# This is a block to process functions producing multiple outputs with
|
|
667
|
+
# 1. One output table argument.
|
|
668
|
+
# 2. Use the same argument to produce other argument with some extension to it.
|
|
669
|
+
#
|
|
670
|
+
# Extract the table name from the generated name and add it to SQL syntax.
|
|
671
|
+
table_name = self.__extract_db_tbl_name(table_name=out_tablename,
|
|
672
|
+
arg_name=table_arg_names)
|
|
673
|
+
|
|
674
|
+
# Process all mandatory output extensions, irrespective of whether the function
|
|
675
|
+
# is scoring or evaluator or any other function.
|
|
676
|
+
if "mandatory_output_extensions" in func_output_argument_map:
|
|
677
|
+
mandatory_extensions = func_output_argument_map["mandatory_output_extensions"]
|
|
678
|
+
self.__process_output_extensions(table_name, mandatory_extensions)
|
|
679
|
+
|
|
680
|
+
if "evaluator_output_extensions" in func_output_argument_map:
|
|
681
|
+
# We process either the table in "table_arg_names" or
|
|
682
|
+
# "evaluator_output_extensions" based on whether the function is evaluator
|
|
683
|
+
# function or not.
|
|
684
|
+
#
|
|
685
|
+
# If the function is:
|
|
686
|
+
# 1. evaluator function, process extensions as mentioned in evaluator based
|
|
687
|
+
# output extensions.
|
|
688
|
+
# 2. NOT evaluator function (scoring or any other function):
|
|
689
|
+
# a. with an entry in TERADATAML_VALIB_MULTIOUTPUT_ATTR_MAP,
|
|
690
|
+
# process table in the variable "table_arg_names".
|
|
691
|
+
# b. without an entry in TERADATAML_VALIB_MULTIOUTPUT_ATTR_MAP,
|
|
692
|
+
# process table as "result".
|
|
693
|
+
if self.__tdml_valib_name in self.__multioutput_attr_map:
|
|
694
|
+
evaluator_extensions = \
|
|
695
|
+
func_output_argument_map["evaluator_output_extensions"]
|
|
696
|
+
self.__process_output_extensions(table_name, evaluator_extensions)
|
|
697
|
+
|
|
698
|
+
elif self.__sql_func_name in self.__multioutput_attr_map:
|
|
699
|
+
out_var = \
|
|
700
|
+
self.__multioutput_attr_map[self.__sql_func_name][table_arg_names]
|
|
701
|
+
self.__dyn_cls_data_members[out_var] = table_name
|
|
702
|
+
|
|
703
|
+
else:
|
|
704
|
+
out_var = VC.DEFAULT_OUTPUT_VAR.value
|
|
705
|
+
self.__dyn_cls_data_members[out_var] = table_name
|
|
706
|
+
|
|
707
|
+
else:
|
|
708
|
+
# If function produces only one output table, but uses different argument name.
|
|
709
|
+
func_name = self.__get_output_attr_map_func_name()
|
|
710
|
+
out_var = self.__multioutput_attr_map[func_name][table_arg_names]
|
|
711
|
+
self.__dyn_cls_data_members[out_var] = table_name
|
|
712
|
+
else:
|
|
713
|
+
# Function produces multiple outputs.
|
|
714
|
+
for arg_name in table_arg_names:
|
|
715
|
+
# Generate a table name for each output and add the name to the dictionary
|
|
716
|
+
# for further processing and dynamic class creation.
|
|
717
|
+
out_tablename = self.__get_temp_table_name()
|
|
718
|
+
self.__process_dyn_cls_output_member(arg_name, out_tablename)
|
|
719
|
+
else:
|
|
720
|
+
# Let's use the default output table name argument "outputtablename".
|
|
721
|
+
self.__process_dyn_cls_output_member("outputtablename", out_tablename,
|
|
722
|
+
VC.DEFAULT_OUTPUT_VAR.value)
|
|
723
|
+
|
|
724
|
+
def __process_input_argument(self, df, database_arg_name, table_arg_name):
|
|
725
|
+
"""
|
|
726
|
+
DESCRIPTION:
|
|
727
|
+
Function to process input argument(s).
|
|
728
|
+
|
|
729
|
+
PARAMETERS:
|
|
730
|
+
df:
|
|
731
|
+
Required Argument.
|
|
732
|
+
Specifies the input teradataml DataFrame.
|
|
733
|
+
Types: teradataml DataFrame
|
|
734
|
+
|
|
735
|
+
database_arg_name:
|
|
736
|
+
Required Argument.
|
|
737
|
+
Specifies the name of the database argument.
|
|
738
|
+
Types: String
|
|
739
|
+
|
|
740
|
+
table_arg_name:
|
|
741
|
+
Required Argument.
|
|
742
|
+
Specifies the name of the table argument.
|
|
743
|
+
Types: String
|
|
744
|
+
|
|
745
|
+
RETURNS:
|
|
746
|
+
None.
|
|
747
|
+
|
|
748
|
+
RAISES:
|
|
749
|
+
None.
|
|
750
|
+
|
|
751
|
+
EXAMPLES:
|
|
752
|
+
self.__process_input_argument(df, "db", "table")
|
|
753
|
+
"""
|
|
754
|
+
# Assuming that df._table_name always contains FQDN.
|
|
755
|
+
db_name = UtilFuncs()._get_db_name_from_dataframe(df)
|
|
756
|
+
|
|
757
|
+
self.__generate_valib_sql_argument_syntax(db_name, database_arg_name)
|
|
758
|
+
self.__extract_db_tbl_name(df._table_name, table_arg_name, remove_quotes=True)
|
|
759
|
+
|
|
760
|
+
def __process_other_arguments(self, **kwargs):
|
|
761
|
+
"""
|
|
762
|
+
DESCRIPTION:
|
|
763
|
+
Function to process other arguments.
|
|
764
|
+
|
|
765
|
+
PARAMETERS:
|
|
766
|
+
kwargs:
|
|
767
|
+
Specifies the keyword arguments passed to a function.
|
|
768
|
+
|
|
769
|
+
RETURNS:
|
|
770
|
+
None.
|
|
771
|
+
|
|
772
|
+
RAISES:
|
|
773
|
+
None.
|
|
774
|
+
|
|
775
|
+
EXAMPLES:
|
|
776
|
+
self.__process_other_arguments(arg1="string", arg2="db", arg3=2)
|
|
777
|
+
"""
|
|
778
|
+
# Argument name dictionary.
|
|
779
|
+
function_arguments = VC.TERADATAML_VALIB_FUNCTION_ARGUMENT_MAP.value
|
|
780
|
+
try:
|
|
781
|
+
func_arg_mapper = function_arguments[self.__sql_func_name]
|
|
782
|
+
except:
|
|
783
|
+
func_arg_mapper = None
|
|
784
|
+
|
|
785
|
+
# Input argument name mapper extracted from VALIB_FUNCTION_MULTIINPUT_ARGUMENT_MAP.
|
|
786
|
+
input_arguments = VC.VALIB_FUNCTION_MULTIINPUT_ARGUMENT_MAP.value
|
|
787
|
+
try:
|
|
788
|
+
func_input_arg_mapper = input_arguments[self.__sql_func_name]
|
|
789
|
+
input_handling_required = True
|
|
790
|
+
except:
|
|
791
|
+
func_input_arg_mapper = None
|
|
792
|
+
input_handling_required = False
|
|
793
|
+
|
|
794
|
+
for arg in kwargs:
|
|
795
|
+
arg_notin_arg_mapper = func_arg_mapper is not None and arg not in func_arg_mapper
|
|
796
|
+
# Raise error if incorrect argument is passed.
|
|
797
|
+
error_msg = "{0}() got an unexpected keyword argument '{1}'".\
|
|
798
|
+
format(self.__tdml_valib_name, arg)
|
|
799
|
+
if input_handling_required:
|
|
800
|
+
if arg_notin_arg_mapper and arg not in func_input_arg_mapper:
|
|
801
|
+
raise TypeError(error_msg)
|
|
802
|
+
else:
|
|
803
|
+
if arg_notin_arg_mapper:
|
|
804
|
+
raise TypeError(error_msg)
|
|
805
|
+
|
|
806
|
+
# Arguments to ignore and the once which will not be processed.
|
|
807
|
+
if arg.lower() in VC.IGNORE_ARGUMENTS.value:
|
|
808
|
+
if arg.lower() == "outputstyle":
|
|
809
|
+
# If user has passed an argument "outputstyle", then we will ignore
|
|
810
|
+
# user value and then create a table as final outcome.
|
|
811
|
+
self.__generate_valib_sql_argument_syntax("table", "outputstyle")
|
|
812
|
+
|
|
813
|
+
# Other arguments mentioned in 'ValibConstants.IGNORE_ARGUMENTS' will be ignored.
|
|
814
|
+
continue
|
|
815
|
+
|
|
816
|
+
# Pop each argument from kwargs.
|
|
817
|
+
arg_value = kwargs.get(arg)
|
|
818
|
+
|
|
819
|
+
if input_handling_required and arg in func_input_arg_mapper:
|
|
820
|
+
# Argument provided is an input argument.
|
|
821
|
+
# Let's get the names of the database and table arguments for this arg.
|
|
822
|
+
self.__process_input_argument(df=arg_value,
|
|
823
|
+
database_arg_name=
|
|
824
|
+
func_input_arg_mapper[arg]["database_arg"],
|
|
825
|
+
table_arg_name=
|
|
826
|
+
func_input_arg_mapper[arg]["table_arg"])
|
|
827
|
+
else:
|
|
828
|
+
# Get the SQL argument name.
|
|
829
|
+
arg_name = func_arg_mapper[arg] if isinstance(func_arg_mapper, dict) else arg
|
|
830
|
+
self.__generate_valib_sql_argument_syntax(arg_value, arg_name)
|
|
831
|
+
|
|
832
|
+
def __process_val_transformations(self, transformations, tf_tdml_arg, tf_sql_arg, data,
|
|
833
|
+
data_arg="data"):
|
|
834
|
+
"""
|
|
835
|
+
DESCRIPTION:
|
|
836
|
+
Internal function to process the transformation(s) and generate the SQL
|
|
837
|
+
argument syntax for the argument.
|
|
838
|
+
|
|
839
|
+
PARAMETERS:
|
|
840
|
+
transformations:
|
|
841
|
+
Required Argument.
|
|
842
|
+
Specifies the transformation(s) to be used for variable transformation.
|
|
843
|
+
Types: FillNa
|
|
844
|
+
|
|
845
|
+
tf_tdml_arg:
|
|
846
|
+
Required Argument.
|
|
847
|
+
Specifies the name of the argument that accepts transformation(s)
|
|
848
|
+
to be used for variable transformation.
|
|
849
|
+
Types: str
|
|
850
|
+
|
|
851
|
+
tf_sql_arg:
|
|
852
|
+
Required Argument.
|
|
853
|
+
Specifies the SQL argument name used for the transformation(s).
|
|
854
|
+
Types: str
|
|
855
|
+
|
|
856
|
+
data:
|
|
857
|
+
Required Argument.
|
|
858
|
+
Specifies the input teradataml DataFrame used for Variable Transformation.
|
|
859
|
+
Types: teradataml DataFrame
|
|
860
|
+
|
|
861
|
+
data_arg:
|
|
862
|
+
Optional Argument.
|
|
863
|
+
Specifies the name of the input data argument.
|
|
864
|
+
Default Value: "data"
|
|
865
|
+
Types: string
|
|
866
|
+
|
|
867
|
+
RETURNS:
|
|
868
|
+
None
|
|
869
|
+
|
|
870
|
+
RAISES:
|
|
871
|
+
ValueError
|
|
872
|
+
|
|
873
|
+
EXAMPLES:
|
|
874
|
+
self.__process_val_transformations(fillna, "fillna", "nullreplacement", data)
|
|
875
|
+
"""
|
|
876
|
+
# A list to contains SQL syntax of each transformation.
|
|
877
|
+
tf_syntax_elements = []
|
|
878
|
+
|
|
879
|
+
for tf in UtilFuncs._as_list(transformations):
|
|
880
|
+
# Validates the existence of the columns used for transformation
|
|
881
|
+
# in the input data.
|
|
882
|
+
if tf.columns is not None:
|
|
883
|
+
_Validators._validate_dataframe_has_argument_columns(
|
|
884
|
+
UtilFuncs._as_list(tf.columns), "columns in {}".format(tf_tdml_arg), data,
|
|
885
|
+
data_arg)
|
|
886
|
+
tf_syntax_elements.append(tf._val_sql_syntax())
|
|
887
|
+
|
|
888
|
+
# Add an entry for transformation in SQL argument syntax.
|
|
889
|
+
self.__generate_valib_sql_argument_syntax(arg="".join(tf_syntax_elements),
|
|
890
|
+
arg_name=tf_sql_arg)
|
|
891
|
+
|
|
892
|
+
def _execute_valib_function(self,
|
|
893
|
+
skip_data_arg_processing=False,
|
|
894
|
+
skip_output_arg_processing=False,
|
|
895
|
+
skip_other_arg_processing=False,
|
|
896
|
+
skip_func_output_processing=False,
|
|
897
|
+
skip_dyn_cls_processing=False,
|
|
898
|
+
**kwargs):
|
|
899
|
+
"""
|
|
900
|
+
DESCRIPTION:
|
|
901
|
+
Function processes arguments and executes the VALIB function.
|
|
902
|
+
|
|
903
|
+
PARAMETERS:
|
|
904
|
+
skip_data_arg_processing:
|
|
905
|
+
Optional Argument.
|
|
906
|
+
Specifies whether to skip data argument processing or not.
|
|
907
|
+
Default is to process the data argument.
|
|
908
|
+
When set to True, caller should make sure to process "data" argument and
|
|
909
|
+
pass SQL argument and values as part of kwargs to this function.
|
|
910
|
+
Default Value: False
|
|
911
|
+
Types: bool
|
|
912
|
+
|
|
913
|
+
skip_output_arg_processing:
|
|
914
|
+
Optional Argument.
|
|
915
|
+
Specifies whether to skip output argument processing or not.
|
|
916
|
+
Default is to process the output arguments.
|
|
917
|
+
When set to True, caller should make sure to process all output arguments and
|
|
918
|
+
pass equivalent SQL argument and values as part of kwargs to this function.
|
|
919
|
+
Default Value: False
|
|
920
|
+
Types: bool
|
|
921
|
+
|
|
922
|
+
skip_other_arg_processing:
|
|
923
|
+
Optional Argument.
|
|
924
|
+
Specifies whether to skip other argument processing or not.
|
|
925
|
+
Default is to process the other arguments, i.e., kwargs.
|
|
926
|
+
When set to True, caller should make sure to process all other arguments are
|
|
927
|
+
processed internally by the function.
|
|
928
|
+
Default Value: False
|
|
929
|
+
Types: bool
|
|
930
|
+
|
|
931
|
+
skip_func_output_processing:
|
|
932
|
+
Optional Argument.
|
|
933
|
+
Specifies whether to skip function output processing or not.
|
|
934
|
+
Default is to process the same.
|
|
935
|
+
When set to True, caller should make sure to process function output
|
|
936
|
+
arguments. Generally, when this argument is set to True, one must also
|
|
937
|
+
set "skip_dyn_cls_processing" to True.
|
|
938
|
+
Default Value: False
|
|
939
|
+
Types: bool
|
|
940
|
+
|
|
941
|
+
skip_dyn_cls_processing:
|
|
942
|
+
Optional Argument.
|
|
943
|
+
Specifies whether to skip dynamic class processing or not.
|
|
944
|
+
Default is to process the dynamic class, where it creates a dynamic
|
|
945
|
+
class and an instance of the same and returns the same.
|
|
946
|
+
When set to True, caller should make sure to process dynamic class and
|
|
947
|
+
return an instance of the same.
|
|
948
|
+
arguments.
|
|
949
|
+
Default Value: False
|
|
950
|
+
Types: bool
|
|
951
|
+
|
|
952
|
+
kwargs:
|
|
953
|
+
Specifies the keyword arguments passed to a function.
|
|
954
|
+
|
|
955
|
+
RETURNS:
|
|
956
|
+
None.
|
|
957
|
+
|
|
958
|
+
RAISES:
|
|
959
|
+
TeradataMlException, TypeError
|
|
960
|
+
|
|
961
|
+
EXAMPLES:
|
|
962
|
+
self._execute_valib_function(arg1="string", arg2="db", arg3=2)
|
|
963
|
+
"""
|
|
964
|
+
if not skip_data_arg_processing:
|
|
965
|
+
# Process data argument.
|
|
966
|
+
try:
|
|
967
|
+
data = kwargs.pop("data")
|
|
968
|
+
if not isinstance(data, DataFrame):
|
|
969
|
+
raise TypeError(Messages.get_message(MessageCodes.UNSUPPORTED_DATATYPE,
|
|
970
|
+
["data"], ["teradataml DataFrame"]))
|
|
971
|
+
self.__process_input_argument(data, "database", "tablename")
|
|
972
|
+
except KeyError:
|
|
973
|
+
# Raise TeradataMlException.
|
|
974
|
+
error_msg = Messages.get_message(MessageCodes.MISSING_ARGS, ["data"])
|
|
975
|
+
raise TeradataMlException(error_msg, MessageCodes.MISSING_ARGS)
|
|
976
|
+
|
|
977
|
+
if not skip_output_arg_processing:
|
|
978
|
+
# Process output arguments.
|
|
979
|
+
self.__process_output_argument()
|
|
980
|
+
|
|
981
|
+
if not skip_other_arg_processing:
|
|
982
|
+
# Process other arguments.
|
|
983
|
+
self.__process_other_arguments(**kwargs)
|
|
984
|
+
|
|
985
|
+
# If the function is evaluator function, add SQL argument "scoringmethod=evaluate".
|
|
986
|
+
if self.__tdml_valib_name in self.__multioutput_attr_map:
|
|
987
|
+
scoring_method_values = VC.SCORING_METHOD_ARG_VALUE.value
|
|
988
|
+
score_method = "non-default"
|
|
989
|
+
if kwargs.get("gen_sql_only", False):
|
|
990
|
+
score_method = "default"
|
|
991
|
+
self.__generate_valib_sql_argument_syntax(scoring_method_values[score_method],
|
|
992
|
+
VC.SCORING_METHOD_ARG_NAME.value)
|
|
993
|
+
|
|
994
|
+
# Generate the query.
|
|
995
|
+
exec_out = self.__generate_execute_sp_query()
|
|
996
|
+
|
|
997
|
+
if not skip_func_output_processing:
|
|
998
|
+
# Process the function output DataFrames.
|
|
999
|
+
self.__process_func_outputs(query_exec_output=exec_out)
|
|
1000
|
+
|
|
1001
|
+
if not skip_dyn_cls_processing:
|
|
1002
|
+
# Generate the dynamic class and create a object of the
|
|
1003
|
+
# same and return the same.
|
|
1004
|
+
return self.__create_dynamic_valib_class()
|
|
1005
|
+
|
|
1006
|
+
def Association(self, data, group_column, item_column, **kwargs):
|
|
1007
|
+
"""
|
|
1008
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1009
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1010
|
+
"""
|
|
1011
|
+
# Add required arguments, i.e., positional arguments to kwargs for
|
|
1012
|
+
# further processing.
|
|
1013
|
+
kwargs["data"] = data
|
|
1014
|
+
kwargs["group_column"] = group_column
|
|
1015
|
+
kwargs["item_column"] = item_column
|
|
1016
|
+
|
|
1017
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1018
|
+
valib_inst = self.__get_valib_instance("Association")
|
|
1019
|
+
|
|
1020
|
+
# Add all arguments to dynamic class as data members.
|
|
1021
|
+
valib_inst.__dyn_cls_data_members = {}
|
|
1022
|
+
valib_inst.__dyn_cls_data_members.update(kwargs)
|
|
1023
|
+
|
|
1024
|
+
# Get the value of "combinations", "no_support_results", "process_type"
|
|
1025
|
+
# parameters from kwargs.
|
|
1026
|
+
# These three parameters decide the number of output table generated.
|
|
1027
|
+
combinations = kwargs.get("combinations", 11)
|
|
1028
|
+
no_support_results = kwargs.get("no_support_results", True)
|
|
1029
|
+
process_type = kwargs.get("process_type", "all")
|
|
1030
|
+
support_result_prefix = kwargs.pop("support_result_prefix", "ml__valib_association")
|
|
1031
|
+
|
|
1032
|
+
# Support table information based on the combinations.
|
|
1033
|
+
# This dict contains a list of names of the support output tables those will
|
|
1034
|
+
# be generated for a specific combination.
|
|
1035
|
+
combinations_support_tables = {
|
|
1036
|
+
11: ["_0_TO_1_SUPPORT", "_1_TO_1_SUPPORT"],
|
|
1037
|
+
12: ["_0_TO_1_SUPPORT", "_1_TO_1_SUPPORT", "_2_TO_1_SUPPORT"],
|
|
1038
|
+
13: ["_0_TO_1_SUPPORT", "_2_TO_1_SUPPORT", "_3_TO_1_SUPPORT"],
|
|
1039
|
+
14: ["_0_TO_1_SUPPORT", "_3_TO_1_SUPPORT", "_4_TO_1_SUPPORT"],
|
|
1040
|
+
21: ["_0_TO_1_SUPPORT", "_1_TO_1_SUPPORT", "_2_TO_1_SUPPORT"],
|
|
1041
|
+
22: ["_0_TO_1_SUPPORT", "_1_TO_1_SUPPORT", "_2_TO_2_SUPPORT"],
|
|
1042
|
+
23: ["_0_TO_1_SUPPORT", "_1_TO_1_SUPPORT", "_2_TO_1_SUPPORT", "_3_TO_2_SUPPORT"],
|
|
1043
|
+
31: ["_0_TO_1_SUPPORT", "_2_TO_1_SUPPORT", "_3_TO_1_SUPPORT"],
|
|
1044
|
+
32: ["_0_TO_1_SUPPORT", "_1_TO_1_SUPPORT", "_2_TO_1_SUPPORT", "_3_TO_2_SUPPORT"],
|
|
1045
|
+
41: ["_0_TO_1_SUPPORT", "_3_TO_1_SUPPORT", "_4_TO_1_SUPPORT"],
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
# This dict contains name of the support output table mapped to its corresponding
|
|
1049
|
+
# exposed output teradataml DataFrame name.
|
|
1050
|
+
support_result_names = {
|
|
1051
|
+
"_0_TO_1_SUPPORT": "support_result_01",
|
|
1052
|
+
"_1_TO_1_SUPPORT": "support_result_11",
|
|
1053
|
+
"_2_TO_1_SUPPORT": "support_result_21",
|
|
1054
|
+
"_3_TO_1_SUPPORT": "support_result_31",
|
|
1055
|
+
"_4_TO_1_SUPPORT": "support_result_41",
|
|
1056
|
+
"_2_TO_2_SUPPORT": "support_result_22",
|
|
1057
|
+
"_3_TO_2_SUPPORT": "support_result_32",
|
|
1058
|
+
}
|
|
1059
|
+
|
|
1060
|
+
# Association rules produces various outputs. It generates:
|
|
1061
|
+
# 1. Support Tables
|
|
1062
|
+
# 2. Affinity Tables.
|
|
1063
|
+
|
|
1064
|
+
# Support tables are generated when one of the following conditions occur:
|
|
1065
|
+
# 1. When "process_type" is 'support'. Then only two tables are generated as follows:
|
|
1066
|
+
# a. <support_result_prefix>_1_ITEM_SUPPORT
|
|
1067
|
+
# b. <support_result_prefix>_group_count
|
|
1068
|
+
# 2. When "no_support_results" is set to False.
|
|
1069
|
+
# a. Multiple support table are generated based on the values passed
|
|
1070
|
+
# to "combinations".
|
|
1071
|
+
# b. A GROUP COUNT support table is also generated.
|
|
1072
|
+
|
|
1073
|
+
# Here are some details on how and what outputs are generated:
|
|
1074
|
+
# 1. When "process_type" is 'support', then:
|
|
1075
|
+
# a. No affinity tables are generated.
|
|
1076
|
+
# b. Only two support tables are generated, which are named as:
|
|
1077
|
+
# i. <support_result_prefix>_1_ITEM_SUPPORT
|
|
1078
|
+
# ii. <support_result_prefix>_group_count
|
|
1079
|
+
# 2. When "no_support_results" is set to False.
|
|
1080
|
+
# a. Affinity tables are generated.
|
|
1081
|
+
# b. Multiple support table are generated, along with GROUP COUNT table.
|
|
1082
|
+
# 3. When "no_support_results" is set to True.
|
|
1083
|
+
# a. Only affinity tables are generated.
|
|
1084
|
+
# b. No support tables are generated.
|
|
1085
|
+
|
|
1086
|
+
# Affinity tables are generated based on the values passed to "combinations"
|
|
1087
|
+
# parameter. Number of outputs generated is equal to the number of values passed
|
|
1088
|
+
# to "combinations".
|
|
1089
|
+
# Here are some cases to understand about this processing:
|
|
1090
|
+
# 1. If "combinations" parameter is not passed, i.e., is None, then only
|
|
1091
|
+
# one output table is generated.
|
|
1092
|
+
# 2. If only one value is passed to "combinations" parameter, then only
|
|
1093
|
+
# one output table is generated.
|
|
1094
|
+
# 3. If only one value is passed in a list to "combinations" parameter,
|
|
1095
|
+
# then only one output table is generated.
|
|
1096
|
+
# 4. If list with multiple values is passed to "combinations" parameter,
|
|
1097
|
+
# then number of output tables generated is equal to length of the list.
|
|
1098
|
+
# 5. If empty list is passed to "combinations" parameter, then SQL will
|
|
1099
|
+
# take care of throwing appropriate exceptions.
|
|
1100
|
+
|
|
1101
|
+
# Let's add the entry for the function in multi-output attribute mapper
|
|
1102
|
+
# as function produces multiple outputs.
|
|
1103
|
+
valib_inst.__multioutput_attr_map[valib_inst.__sql_func_name] = {}
|
|
1104
|
+
|
|
1105
|
+
# To process output table parameters:
|
|
1106
|
+
# 1. Let's generate the output database name parameter first.
|
|
1107
|
+
# 2. Then generate the output table parameter.
|
|
1108
|
+
# 3. Once the arguments and it's values are generated, call
|
|
1109
|
+
# _execute_valib_function() and make sure to skip the
|
|
1110
|
+
# output argument processing only.
|
|
1111
|
+
|
|
1112
|
+
# Let's first get the temp table name to be used for creating output
|
|
1113
|
+
# tables. Extract the database name and table name which will be used
|
|
1114
|
+
# as follows:
|
|
1115
|
+
# 1. Database name will be passed to SQL argument 'outputdatabase'.
|
|
1116
|
+
# 2. Table name extracted will be used to generate the values for
|
|
1117
|
+
# SQL argument 'outputtablename'.
|
|
1118
|
+
out_tablename = valib_inst.__get_temp_table_name()
|
|
1119
|
+
|
|
1120
|
+
# Add an entry for "outputdatabase" in SQL argument syntax.
|
|
1121
|
+
valib_inst.__db_name = valib_inst.__extract_db_tbl_name(table_name=out_tablename,
|
|
1122
|
+
arg_name="outputdatabase",
|
|
1123
|
+
extract_table=False,
|
|
1124
|
+
remove_quotes=True)
|
|
1125
|
+
|
|
1126
|
+
__table_name = UtilFuncs._extract_table_name(out_tablename).replace("\"", "")
|
|
1127
|
+
|
|
1128
|
+
# Let's start processing the output table argument.
|
|
1129
|
+
# A list containing the output table name argument values.
|
|
1130
|
+
output_table_names = []
|
|
1131
|
+
|
|
1132
|
+
# For Association we will create two new variables to store the output DataFrame
|
|
1133
|
+
# attribute names for support tables and affinity tables.
|
|
1134
|
+
#
|
|
1135
|
+
# This is done specifically for Association function as output attribute names
|
|
1136
|
+
# will vary based on the input values for "combinations" parameter. Thus, it will
|
|
1137
|
+
# help user to know the names of the output DataFrame attributes generated for
|
|
1138
|
+
# a specific function call.
|
|
1139
|
+
sup_table_attrs = "support_outputs"
|
|
1140
|
+
aff_table_attrs = "affinity_outputs"
|
|
1141
|
+
valib_inst.__dyn_cls_data_members[sup_table_attrs] = []
|
|
1142
|
+
valib_inst.__dyn_cls_data_members[aff_table_attrs] = []
|
|
1143
|
+
|
|
1144
|
+
# Before we proceed here is a common function which will be used for
|
|
1145
|
+
# processing support tables.
|
|
1146
|
+
def process_support_tables(out_var, support_table_name):
|
|
1147
|
+
""" Internal function to process support tables. """
|
|
1148
|
+
valib_inst.__dyn_cls_data_members[out_var] = support_table_name
|
|
1149
|
+
valib_inst.__multioutput_attr_map[valib_inst.__sql_func_name][out_var] = out_var
|
|
1150
|
+
if out_var not in valib_inst.__dyn_cls_data_members[sup_table_attrs]:
|
|
1151
|
+
valib_inst.__dyn_cls_data_members[sup_table_attrs].append(out_var)
|
|
1152
|
+
GarbageCollector._add_to_garbagecollector(support_table_name,
|
|
1153
|
+
TeradataConstants.TERADATA_TABLE)
|
|
1154
|
+
|
|
1155
|
+
# GROUP_COUNT support table will be generated, when "process_type" is 'support'
|
|
1156
|
+
# or "no_support_results" is set to False.
|
|
1157
|
+
# Add the entry for the table in the output mappers.
|
|
1158
|
+
if process_type.lower() == "support" or not no_support_results:
|
|
1159
|
+
# Output attribute name of the group count table is "group_count".
|
|
1160
|
+
out_var = "group_count"
|
|
1161
|
+
grp_cnt_table_name = "{}_group_count".format(support_result_prefix)
|
|
1162
|
+
process_support_tables(out_var=out_var, support_table_name=grp_cnt_table_name)
|
|
1163
|
+
|
|
1164
|
+
# Let's process the other support tables and affinity tables.
|
|
1165
|
+
if process_type.lower() == "support":
|
|
1166
|
+
# We are here that means only 1 item support table along with group count
|
|
1167
|
+
# support table is generated. Group count table entry is already added.
|
|
1168
|
+
# Output attribute name of the 1 item support table is "support_1_item".
|
|
1169
|
+
out_var = "support_1_item"
|
|
1170
|
+
sup_tbl_name = "{}_1_ITEM_SUPPORT".format(support_result_prefix)
|
|
1171
|
+
process_support_tables(out_var=out_var, support_table_name=sup_tbl_name)
|
|
1172
|
+
|
|
1173
|
+
# Value for output table does not matter when "process_type" is 'support'.
|
|
1174
|
+
# No affinity tables are generated.
|
|
1175
|
+
output_table_names.append(__table_name)
|
|
1176
|
+
else:
|
|
1177
|
+
# Affinity tables and other support tables are generated only when "process_type"
|
|
1178
|
+
# is not equal to 'support'.
|
|
1179
|
+
|
|
1180
|
+
# Process the affinity tables.
|
|
1181
|
+
for combination in UtilFuncs._as_list(combinations):
|
|
1182
|
+
# Generate the new output table name.
|
|
1183
|
+
extension = "_{}".format(combination)
|
|
1184
|
+
out_var = "{}{}".format(VC.DEFAULT_OUTPUT_VAR.value, extension)
|
|
1185
|
+
new_tbl_name = valib_inst.__get_table_name_with_extension(table_name=__table_name,
|
|
1186
|
+
extension=extension)
|
|
1187
|
+
|
|
1188
|
+
# Add an entry for affinity output in mappers, which will produce the
|
|
1189
|
+
# output DataFrames.
|
|
1190
|
+
valib_inst.__dyn_cls_data_members[out_var] = new_tbl_name
|
|
1191
|
+
valib_inst.__multioutput_attr_map[valib_inst.__sql_func_name][out_var] = out_var
|
|
1192
|
+
valib_inst.__dyn_cls_data_members[aff_table_attrs].append(out_var)
|
|
1193
|
+
|
|
1194
|
+
# Add the name of the output affinity table, which will be used as value
|
|
1195
|
+
# for the "outputtablename" argument.
|
|
1196
|
+
output_table_names.append(new_tbl_name)
|
|
1197
|
+
|
|
1198
|
+
if not no_support_results:
|
|
1199
|
+
# Other support tables are also generated and are not dropped in the end
|
|
1200
|
+
# by Vantage, hence we will create output DataFrames for each one of those.
|
|
1201
|
+
# Let's process all those support tables.
|
|
1202
|
+
# 'combinations_support_tables' contains a name of list of support
|
|
1203
|
+
# output tables those will be generated for a specific combination.
|
|
1204
|
+
for sup_postfix in combinations_support_tables[combination]:
|
|
1205
|
+
sup_out_var = support_result_names[sup_postfix]
|
|
1206
|
+
sup_tbl_name = "{}{}".format(support_result_prefix, sup_postfix)
|
|
1207
|
+
process_support_tables(out_var=sup_out_var, support_table_name=sup_tbl_name)
|
|
1208
|
+
|
|
1209
|
+
# Add an entry for "outputtablename" in SQL argument syntax.
|
|
1210
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg=output_table_names,
|
|
1211
|
+
arg_name="outputtablename")
|
|
1212
|
+
|
|
1213
|
+
# Execute the function, skip output argument and output dataframe processing.
|
|
1214
|
+
return valib_inst._execute_valib_function(skip_output_arg_processing=True,
|
|
1215
|
+
support_result_prefix=support_result_prefix,
|
|
1216
|
+
**kwargs)
|
|
1217
|
+
|
|
1218
|
+
def KMeans(self, data, columns, centers, **kwargs):
|
|
1219
|
+
"""
|
|
1220
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1221
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1222
|
+
"""
|
|
1223
|
+
# Add the required arguments to kwargs for further processing.
|
|
1224
|
+
kwargs["data"] = data
|
|
1225
|
+
kwargs["columns"] = columns
|
|
1226
|
+
kwargs["centers"] = centers
|
|
1227
|
+
|
|
1228
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1229
|
+
new_valib_obj = self.__get_valib_instance("KMeans")
|
|
1230
|
+
|
|
1231
|
+
# Add all arguments to dynamic class as data members.
|
|
1232
|
+
new_valib_obj.__dyn_cls_data_members = {}
|
|
1233
|
+
new_valib_obj.__dyn_cls_data_members.update(kwargs)
|
|
1234
|
+
|
|
1235
|
+
centroids_data = kwargs.pop("centroids_data", None)
|
|
1236
|
+
|
|
1237
|
+
# If there is no "centroids_data", do normal processing.
|
|
1238
|
+
if centroids_data is None:
|
|
1239
|
+
return new_valib_obj._execute_valib_function(**kwargs)
|
|
1240
|
+
|
|
1241
|
+
# If "centroids_data" is provided, special handling for output argument is needed.
|
|
1242
|
+
if not isinstance(centroids_data, DataFrame):
|
|
1243
|
+
raise TypeError(Messages.get_message(MessageCodes.UNSUPPORTED_DATATYPE,
|
|
1244
|
+
["centroids_data"], ["teradataml DataFrame"]))
|
|
1245
|
+
|
|
1246
|
+
# The following things has to be handled:
|
|
1247
|
+
# 1. The table in "centroids_data" is updated with new centroids and the same table
|
|
1248
|
+
# is the result (new output) table.
|
|
1249
|
+
# Extract database name and add it to Valib SQL argument syntax.
|
|
1250
|
+
new_valib_obj.__db_name = new_valib_obj.__extract_db_tbl_name(
|
|
1251
|
+
table_name=centroids_data._table_name,
|
|
1252
|
+
arg_name="outputdatabase",
|
|
1253
|
+
extract_table=False,
|
|
1254
|
+
remove_quotes=True)
|
|
1255
|
+
|
|
1256
|
+
# Extract table name and add it to Valib SQL argument syntax.
|
|
1257
|
+
table_name = new_valib_obj.__extract_db_tbl_name(table_name=centroids_data._table_name,
|
|
1258
|
+
arg_name="outputtablename",
|
|
1259
|
+
extract_table=True,
|
|
1260
|
+
remove_quotes=True)
|
|
1261
|
+
|
|
1262
|
+
# Since output argument processing will be skipped, table name is added in dynamic
|
|
1263
|
+
# class data member "result", which will be replaced with DataFrame while processing
|
|
1264
|
+
# function outputs in the function _execute_valib_function.
|
|
1265
|
+
new_valib_obj.__dyn_cls_data_members[VC.DEFAULT_OUTPUT_VAR.value] = table_name
|
|
1266
|
+
|
|
1267
|
+
# 2. Execute the valib function call based on the arguments along with newly added
|
|
1268
|
+
# the SQL argument 'continuation=true' and process output and other arguments
|
|
1269
|
+
# related information.
|
|
1270
|
+
return new_valib_obj._execute_valib_function(skip_output_arg_processing=True,
|
|
1271
|
+
continuation=True,
|
|
1272
|
+
**kwargs)
|
|
1273
|
+
|
|
1274
|
+
def DecisionTreePredict(self, data, model, **kwargs):
|
|
1275
|
+
"""
|
|
1276
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1277
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1278
|
+
"""
|
|
1279
|
+
# Add the required arguments to kwargs for further processing.
|
|
1280
|
+
kwargs["data"] = data
|
|
1281
|
+
kwargs["model"] = model
|
|
1282
|
+
|
|
1283
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1284
|
+
new_valib_obj = self.__get_valib_instance("DecisionTreePredict")
|
|
1285
|
+
|
|
1286
|
+
# Add all arguments to dynamic class as data members.
|
|
1287
|
+
new_valib_obj.__dyn_cls_data_members = {}
|
|
1288
|
+
new_valib_obj.__dyn_cls_data_members.update(kwargs)
|
|
1289
|
+
|
|
1290
|
+
return new_valib_obj._execute_valib_function(profile=True, **kwargs)
|
|
1291
|
+
|
|
1292
|
+
def DecisionTreeEvaluator(self, data, model, **kwargs):
|
|
1293
|
+
"""
|
|
1294
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1295
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1296
|
+
"""
|
|
1297
|
+
# Add the required arguments to kwargs for further processing.
|
|
1298
|
+
kwargs["data"] = data
|
|
1299
|
+
kwargs["model"] = model
|
|
1300
|
+
|
|
1301
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1302
|
+
new_valib_obj = self.__get_valib_instance("DecisionTreeEvaluator")
|
|
1303
|
+
|
|
1304
|
+
# Add all arguments to dynamic class as data members.
|
|
1305
|
+
new_valib_obj.__dyn_cls_data_members = {}
|
|
1306
|
+
new_valib_obj.__dyn_cls_data_members.update(kwargs)
|
|
1307
|
+
|
|
1308
|
+
return new_valib_obj._execute_valib_function(profile=True, **kwargs)
|
|
1309
|
+
|
|
1310
|
+
def __validate_overlap_arguments(self, data_val, data_arg, columns_val, columns_arg,
|
|
1311
|
+
is_optional = True):
|
|
1312
|
+
"""
|
|
1313
|
+
DESCRIPTION:
|
|
1314
|
+
Internal function to validate pair of data{i} and columns{i} arguments.
|
|
1315
|
+
|
|
1316
|
+
PARAMETERS:
|
|
1317
|
+
data_val:
|
|
1318
|
+
Required Argument.
|
|
1319
|
+
Specifies the teradataml Dataframe containing input data.
|
|
1320
|
+
Types: teradataml Dataframe
|
|
1321
|
+
|
|
1322
|
+
data_arg:
|
|
1323
|
+
Required Argument.
|
|
1324
|
+
Specifies the argument name for the teradataml DataFrame specified in the
|
|
1325
|
+
argument "data_val".
|
|
1326
|
+
Types: str
|
|
1327
|
+
|
|
1328
|
+
columns_val:
|
|
1329
|
+
Required Argument.
|
|
1330
|
+
Specifies the list of column(s) present in the DataFrame "data_val".
|
|
1331
|
+
Types: str OR list of strings (str)
|
|
1332
|
+
|
|
1333
|
+
columns_arg:
|
|
1334
|
+
Required Argument.
|
|
1335
|
+
Specifies the argument name for the columns specified in the
|
|
1336
|
+
argument "columns_val".
|
|
1337
|
+
Types: str
|
|
1338
|
+
|
|
1339
|
+
is_optional:
|
|
1340
|
+
Optional Argument.
|
|
1341
|
+
Specifies whether the values in arguments "data_val" and "columns_val" are
|
|
1342
|
+
optional in Overlap() function.
|
|
1343
|
+
If True, the values in these arguments should be validated as optional arguments
|
|
1344
|
+
in Overlap() function. Otherwise, these values are considered as required
|
|
1345
|
+
arguments.
|
|
1346
|
+
Default Value: True
|
|
1347
|
+
Types: bool
|
|
1348
|
+
|
|
1349
|
+
RETURNS:
|
|
1350
|
+
None.
|
|
1351
|
+
|
|
1352
|
+
EXAMPLES:
|
|
1353
|
+
valib.__validate_overlap_arguments(data_val=data, data_arg="data",
|
|
1354
|
+
columns_val=columns, columns_arg="columns",
|
|
1355
|
+
is_optional=False)
|
|
1356
|
+
"""
|
|
1357
|
+
# Create argument information matrix to do parameter checking.
|
|
1358
|
+
__arg_info_matrix = []
|
|
1359
|
+
__arg_info_matrix.append([data_arg, data_val, is_optional, (DataFrame)])
|
|
1360
|
+
__arg_info_matrix.append([columns_arg, columns_val, is_optional, (str, list), True])
|
|
1361
|
+
|
|
1362
|
+
_Validators._validate_function_arguments(arg_list=__arg_info_matrix)
|
|
1363
|
+
|
|
1364
|
+
_Validators._validate_dataframe_has_argument_columns(data=data_val,
|
|
1365
|
+
data_arg=data_arg,
|
|
1366
|
+
columns=columns_val,
|
|
1367
|
+
column_arg=columns_arg,
|
|
1368
|
+
is_partition_arg=False)
|
|
1369
|
+
|
|
1370
|
+
# TODO- Delete LogRegPredict function definition if Jira TDAF-7867 is resolved.
|
|
1371
|
+
def LogRegPredict(self, **kwargs):
|
|
1372
|
+
"""
|
|
1373
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1374
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1375
|
+
"""
|
|
1376
|
+
|
|
1377
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1378
|
+
valib_inst = self.__get_valib_instance("LogRegPredict")
|
|
1379
|
+
|
|
1380
|
+
# Add all arguments to dynamic class as data members.
|
|
1381
|
+
valib_inst.__dyn_cls_data_members = {}
|
|
1382
|
+
valib_inst.__dyn_cls_data_members.update(kwargs)
|
|
1383
|
+
|
|
1384
|
+
# Setting scoringmethod to "score" if gen_sql_only is True.
|
|
1385
|
+
gen_sql_only = kwargs.get("gen_sql_only", False)
|
|
1386
|
+
if gen_sql_only:
|
|
1387
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg="score",
|
|
1388
|
+
arg_name="scoringmethod")
|
|
1389
|
+
|
|
1390
|
+
return valib_inst._execute_valib_function(**kwargs)
|
|
1391
|
+
|
|
1392
|
+
def Overlap(self, data1, columns1, **kwargs):
|
|
1393
|
+
"""
|
|
1394
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1395
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1396
|
+
"""
|
|
1397
|
+
# Validate the required arguments - data1 and columns1.
|
|
1398
|
+
# Other arguments are validated as and when they are being processed.
|
|
1399
|
+
self.__validate_overlap_arguments(data_val=data1, data_arg="data1",
|
|
1400
|
+
columns_val=columns1, columns_arg="columns1",
|
|
1401
|
+
is_optional=False)
|
|
1402
|
+
|
|
1403
|
+
kwargs["data1"] = data1
|
|
1404
|
+
kwargs["columns1"] = columns1
|
|
1405
|
+
|
|
1406
|
+
# Each columns argument can take string or list of strings.
|
|
1407
|
+
# Ensure all columns related arguments to be list of one or more strings.
|
|
1408
|
+
columns1 = UtilFuncs._as_list(columns1)
|
|
1409
|
+
|
|
1410
|
+
valib_inst = self.__get_valib_instance("Overlap")
|
|
1411
|
+
|
|
1412
|
+
# Add all arguments to dynamic class as data members.
|
|
1413
|
+
valib_inst.__dyn_cls_data_members = {}
|
|
1414
|
+
valib_inst.__dyn_cls_data_members.update(kwargs)
|
|
1415
|
+
|
|
1416
|
+
parse_kwargs = True
|
|
1417
|
+
ind = 1
|
|
1418
|
+
database_names = []
|
|
1419
|
+
table_names = []
|
|
1420
|
+
column_names_df = []
|
|
1421
|
+
|
|
1422
|
+
"""
|
|
1423
|
+
The argument names are data1, data2, ..., dataN and columns1, columns2, ... columnsN
|
|
1424
|
+
corresponding to each data arguments.
|
|
1425
|
+
Note:
|
|
1426
|
+
1. The number of data arguments should be same as that of columns related arguments.
|
|
1427
|
+
2. The number of columns in each of the columns related arguments (including
|
|
1428
|
+
"columns1" argument) should be same.
|
|
1429
|
+
"""
|
|
1430
|
+
while parse_kwargs:
|
|
1431
|
+
data_arg_name = "data{}".format(str(ind))
|
|
1432
|
+
data_arg_value = kwargs.pop(data_arg_name, None)
|
|
1433
|
+
if data_arg_value is None:
|
|
1434
|
+
parse_kwargs = False
|
|
1435
|
+
else:
|
|
1436
|
+
columns_arg_name = "columns{}".format(str(ind))
|
|
1437
|
+
columns_arg_value = kwargs.pop(columns_arg_name, None)
|
|
1438
|
+
|
|
1439
|
+
# Raise error if dataN is present and columnsN is not present.
|
|
1440
|
+
if columns_arg_value is None:
|
|
1441
|
+
err_ = Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING,
|
|
1442
|
+
columns_arg_name, data_arg_name)
|
|
1443
|
+
raise TeradataMlException(err_, MessageCodes.DEPENDENT_ARG_MISSING)
|
|
1444
|
+
|
|
1445
|
+
self.__validate_overlap_arguments(data_val=data_arg_value,
|
|
1446
|
+
data_arg=data_arg_name,
|
|
1447
|
+
columns_val=columns_arg_value,
|
|
1448
|
+
columns_arg=columns_arg_name)
|
|
1449
|
+
|
|
1450
|
+
# Each columns argument can take string or list of strings.
|
|
1451
|
+
# Ensure all columns related arguments to be list of one or more strings.
|
|
1452
|
+
columns_arg_value = UtilFuncs._as_list(columns_arg_value)
|
|
1453
|
+
|
|
1454
|
+
if len(columns_arg_value) != len(columns1):
|
|
1455
|
+
err_ = Messages.get_message(MessageCodes.INVALID_LENGTH_ARGS,
|
|
1456
|
+
"'columns1', 'columns2', ..., 'columnsN'")
|
|
1457
|
+
raise TeradataMlException(err_ ,MessageCodes.INVALID_LENGTH_ARGS)
|
|
1458
|
+
|
|
1459
|
+
# If all the validations are done,
|
|
1460
|
+
# 1. extract database names
|
|
1461
|
+
# 2. extract table names
|
|
1462
|
+
# 3. generate SQL syntax for 'columns' argument.
|
|
1463
|
+
database_names.append(UtilFuncs()._get_db_name_from_dataframe(data_arg_value))
|
|
1464
|
+
__table_name = UtilFuncs._extract_table_name(data_arg_value._table_name).\
|
|
1465
|
+
replace("\"", "")
|
|
1466
|
+
table_names.append(__table_name)
|
|
1467
|
+
column_names_df.append("{" + ",".join(columns_arg_value) + "}")
|
|
1468
|
+
|
|
1469
|
+
ind = ind + 1
|
|
1470
|
+
|
|
1471
|
+
# gensqlonly implementation.
|
|
1472
|
+
gen_sql_only = kwargs.pop("gen_sql_only", False)
|
|
1473
|
+
if gen_sql_only:
|
|
1474
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg=str(gen_sql_only),
|
|
1475
|
+
arg_name="gensqlonly")
|
|
1476
|
+
charset = kwargs.pop("charset", None)
|
|
1477
|
+
# Raise error if there are additional arguments.
|
|
1478
|
+
if len(kwargs) != 0:
|
|
1479
|
+
err_ = "The keyword arguments for Overlap() should have data1, data2, ..., dataN " \
|
|
1480
|
+
"and corresponding columns1, columns2, ..., columnsN. " \
|
|
1481
|
+
"Found additional arguments {}."
|
|
1482
|
+
raise TypeError(err_.format(list(kwargs.keys())))
|
|
1483
|
+
|
|
1484
|
+
# Generate SQL syntax for SQL arguments database, tablename and columns.
|
|
1485
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg=",".join(database_names),
|
|
1486
|
+
arg_name="database")
|
|
1487
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg=",".join(table_names),
|
|
1488
|
+
arg_name="tablename")
|
|
1489
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg=",".join(column_names_df),
|
|
1490
|
+
arg_name="columns")
|
|
1491
|
+
# Generate clause of charset.
|
|
1492
|
+
if charset:
|
|
1493
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg=charset,
|
|
1494
|
+
arg_name="charset")
|
|
1495
|
+
|
|
1496
|
+
return valib_inst._execute_valib_function(skip_data_arg_processing=True,
|
|
1497
|
+
skip_other_arg_processing=True)
|
|
1498
|
+
|
|
1499
|
+
def Transform(self, data, bins=None, derive=None, one_hot_encode=None, fillna=None,
|
|
1500
|
+
label_encode=None, rescale=None, retain=None, sigmoid=None, zscore=None,
|
|
1501
|
+
**kwargs):
|
|
1502
|
+
"""
|
|
1503
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1504
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1505
|
+
"""
|
|
1506
|
+
# Argument Validations
|
|
1507
|
+
# Note:
|
|
1508
|
+
# Commented code is kept for future purpose. Once all commented code is updated
|
|
1509
|
+
# note will be removed as well.
|
|
1510
|
+
arg_info_matrix = []
|
|
1511
|
+
arg_info_matrix.append(["data", data, False, (DataFrame)])
|
|
1512
|
+
arg_info_matrix.append(["bins", bins, True, (Binning, list)])
|
|
1513
|
+
arg_info_matrix.append(["derive", derive, True, (Derive, list)])
|
|
1514
|
+
arg_info_matrix.append(["one_hot_encode", one_hot_encode, True, (OneHotEncoder, list)])
|
|
1515
|
+
arg_info_matrix.append(["fillna", fillna, True, (FillNa, list)])
|
|
1516
|
+
arg_info_matrix.append(["rescale", rescale, True, (MinMaxScalar, list)])
|
|
1517
|
+
arg_info_matrix.append(["label_encode", label_encode, True, (LabelEncoder, list)])
|
|
1518
|
+
arg_info_matrix.append(["retain", retain, True, (Retain, list)])
|
|
1519
|
+
arg_info_matrix.append(["sigmoid", sigmoid, True, (Sigmoid, list)])
|
|
1520
|
+
arg_info_matrix.append(["zscore", zscore, True, (ZScore, list)])
|
|
1521
|
+
|
|
1522
|
+
# Argument validations.
|
|
1523
|
+
_Validators._validate_function_arguments(arg_info_matrix)
|
|
1524
|
+
|
|
1525
|
+
# Add "data" to kwargs for further processing.
|
|
1526
|
+
kwargs["data"] = data
|
|
1527
|
+
|
|
1528
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1529
|
+
valib_inst = self.__get_valib_instance("Transform")
|
|
1530
|
+
|
|
1531
|
+
# Add all arguments to dynamic class as data members.
|
|
1532
|
+
valib_inst.__dyn_cls_data_members = {}
|
|
1533
|
+
valib_inst.__dyn_cls_data_members.update(kwargs)
|
|
1534
|
+
valib_inst.__dyn_cls_data_members["bins"] = bins
|
|
1535
|
+
valib_inst.__dyn_cls_data_members["derive"] = derive
|
|
1536
|
+
valib_inst.__dyn_cls_data_members["one_hot_encode"] = one_hot_encode
|
|
1537
|
+
valib_inst.__dyn_cls_data_members["fillna"] = fillna
|
|
1538
|
+
valib_inst.__dyn_cls_data_members["label_encode"] = label_encode
|
|
1539
|
+
valib_inst.__dyn_cls_data_members["rescale"] = rescale
|
|
1540
|
+
valib_inst.__dyn_cls_data_members["retain"] = retain
|
|
1541
|
+
valib_inst.__dyn_cls_data_members["sigmoid"] = sigmoid
|
|
1542
|
+
valib_inst.__dyn_cls_data_members["zscore"] = zscore
|
|
1543
|
+
|
|
1544
|
+
# Add "outputstyle" argument to generate output table.
|
|
1545
|
+
valib_inst.__generate_valib_sql_argument_syntax(arg="table", arg_name="outputstyle")
|
|
1546
|
+
|
|
1547
|
+
# Bin Coding Transformation
|
|
1548
|
+
if bins is not None:
|
|
1549
|
+
valib_inst.__process_val_transformations(bins, "bins", "bincode", data)
|
|
1550
|
+
|
|
1551
|
+
# Derive Transformation
|
|
1552
|
+
if derive is not None:
|
|
1553
|
+
valib_inst.__process_val_transformations(derive, "derive", "derive", data)
|
|
1554
|
+
|
|
1555
|
+
# OneHotEncoder Transformation
|
|
1556
|
+
if one_hot_encode is not None:
|
|
1557
|
+
valib_inst.__process_val_transformations(one_hot_encode, "one_hot_encode", "designcode", data)
|
|
1558
|
+
|
|
1559
|
+
# Null Replacement Transformation
|
|
1560
|
+
if fillna is not None:
|
|
1561
|
+
valib_inst.__process_val_transformations(fillna, "fillna", "nullreplacement", data)
|
|
1562
|
+
|
|
1563
|
+
# LabelEncoder Transformation
|
|
1564
|
+
if label_encode is not None:
|
|
1565
|
+
valib_inst.__process_val_transformations(label_encode, "label_encode", "recode", data)
|
|
1566
|
+
|
|
1567
|
+
# MinMaxScalar Transformation
|
|
1568
|
+
if rescale is not None:
|
|
1569
|
+
valib_inst.__process_val_transformations(rescale, "rescale", "rescale", data)
|
|
1570
|
+
|
|
1571
|
+
# Retain Transformation
|
|
1572
|
+
if retain is not None:
|
|
1573
|
+
valib_inst.__process_val_transformations(retain, "retain", "retain", data)
|
|
1574
|
+
|
|
1575
|
+
# Sigmoid Transformation
|
|
1576
|
+
if sigmoid is not None:
|
|
1577
|
+
valib_inst.__process_val_transformations(sigmoid, "sigmoid", "sigmoid", data)
|
|
1578
|
+
|
|
1579
|
+
# ZScore Transformation
|
|
1580
|
+
if zscore is not None:
|
|
1581
|
+
valib_inst.__process_val_transformations(zscore, "zscore", "zscore", data)
|
|
1582
|
+
|
|
1583
|
+
# Execute the function, just do not process the output dataframes
|
|
1584
|
+
# and dynamic class creation for the function.
|
|
1585
|
+
return valib_inst._execute_valib_function(**kwargs)
|
|
1586
|
+
|
|
1587
|
+
def XmlToHtmlReport(self, data, analysis_type, **kwargs):
|
|
1588
|
+
"""
|
|
1589
|
+
Please refer to Teradata Python Function Reference guide for Documentation.
|
|
1590
|
+
Reference guide can be found at: https://docs.teradata.com
|
|
1591
|
+
"""
|
|
1592
|
+
# Add the required arguments to kwargs for further processing.
|
|
1593
|
+
kwargs["data"] = data
|
|
1594
|
+
kwargs["analysis_type"] = analysis_type
|
|
1595
|
+
# Dict that maps teradataml Class name to SQL name.
|
|
1596
|
+
tdml_classname_to_sql_name = {"DecisionTree": "decisiontree",
|
|
1597
|
+
"DecisionTreeEvaluator": "decisiontreescore",
|
|
1598
|
+
"PCA": "factor",
|
|
1599
|
+
"PCAEvaluator": "factorscore",
|
|
1600
|
+
"LinReg": "linear",
|
|
1601
|
+
"LogReg": "logistic",
|
|
1602
|
+
"LogRegEvaluator": "logisticscore"}
|
|
1603
|
+
|
|
1604
|
+
if analysis_type in tdml_classname_to_sql_name:
|
|
1605
|
+
kwargs["analysis_type"] = tdml_classname_to_sql_name[analysis_type]
|
|
1606
|
+
|
|
1607
|
+
# Get a new instance of _VALIB() class for function execution.
|
|
1608
|
+
new_valib_obj = self.__get_valib_instance("XmlToHtmlReport")
|
|
1609
|
+
|
|
1610
|
+
# Add all arguments to dynamic class as data members.
|
|
1611
|
+
new_valib_obj.__dyn_cls_data_members = {}
|
|
1612
|
+
new_valib_obj.__dyn_cls_data_members.update(kwargs)
|
|
1613
|
+
|
|
1614
|
+
return new_valib_obj._execute_valib_function(**kwargs)
|
|
1615
|
+
|
|
1616
|
+
# Define an object of type _VALIB, that will allow user to execute any VALIB function.
|
|
1617
|
+
valib = _VALIB()
|