teradataml 20.0.0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +2762 -0
- teradataml/__init__.py +78 -0
- teradataml/_version.py +11 -0
- teradataml/analytics/Transformations.py +2996 -0
- teradataml/analytics/__init__.py +82 -0
- teradataml/analytics/analytic_function_executor.py +2416 -0
- teradataml/analytics/analytic_query_generator.py +1050 -0
- teradataml/analytics/byom/H2OPredict.py +514 -0
- teradataml/analytics/byom/PMMLPredict.py +437 -0
- teradataml/analytics/byom/__init__.py +16 -0
- teradataml/analytics/json_parser/__init__.py +133 -0
- teradataml/analytics/json_parser/analytic_functions_argument.py +1805 -0
- teradataml/analytics/json_parser/json_store.py +191 -0
- teradataml/analytics/json_parser/metadata.py +1666 -0
- teradataml/analytics/json_parser/utils.py +805 -0
- teradataml/analytics/meta_class.py +236 -0
- teradataml/analytics/sqle/DecisionTreePredict.py +456 -0
- teradataml/analytics/sqle/NaiveBayesPredict.py +420 -0
- teradataml/analytics/sqle/__init__.py +128 -0
- teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -0
- teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -0
- teradataml/analytics/table_operator/__init__.py +11 -0
- teradataml/analytics/uaf/__init__.py +82 -0
- teradataml/analytics/utils.py +828 -0
- teradataml/analytics/valib.py +1617 -0
- teradataml/automl/__init__.py +5835 -0
- teradataml/automl/autodataprep/__init__.py +493 -0
- teradataml/automl/custom_json_utils.py +1625 -0
- teradataml/automl/data_preparation.py +1384 -0
- teradataml/automl/data_transformation.py +1254 -0
- teradataml/automl/feature_engineering.py +2273 -0
- teradataml/automl/feature_exploration.py +1873 -0
- teradataml/automl/model_evaluation.py +488 -0
- teradataml/automl/model_training.py +1407 -0
- teradataml/catalog/__init__.py +2 -0
- teradataml/catalog/byom.py +1759 -0
- teradataml/catalog/function_argument_mapper.py +859 -0
- teradataml/catalog/model_cataloging_utils.py +491 -0
- teradataml/clients/__init__.py +0 -0
- teradataml/clients/auth_client.py +137 -0
- teradataml/clients/keycloak_client.py +165 -0
- teradataml/clients/pkce_client.py +481 -0
- teradataml/common/__init__.py +1 -0
- teradataml/common/aed_utils.py +2078 -0
- teradataml/common/bulk_exposed_utils.py +113 -0
- teradataml/common/constants.py +1669 -0
- teradataml/common/deprecations.py +166 -0
- teradataml/common/exceptions.py +147 -0
- teradataml/common/formula.py +743 -0
- teradataml/common/garbagecollector.py +666 -0
- teradataml/common/logger.py +1261 -0
- teradataml/common/messagecodes.py +518 -0
- teradataml/common/messages.py +262 -0
- teradataml/common/pylogger.py +67 -0
- teradataml/common/sqlbundle.py +764 -0
- teradataml/common/td_coltype_code_to_tdtype.py +48 -0
- teradataml/common/utils.py +3166 -0
- teradataml/common/warnings.py +36 -0
- teradataml/common/wrapper_utils.py +625 -0
- teradataml/config/__init__.py +0 -0
- teradataml/config/dummy_file1.cfg +5 -0
- teradataml/config/dummy_file2.cfg +3 -0
- teradataml/config/sqlengine_alias_definitions_v1.0 +14 -0
- teradataml/config/sqlengine_alias_definitions_v1.1 +20 -0
- teradataml/config/sqlengine_alias_definitions_v1.3 +19 -0
- teradataml/context/__init__.py +0 -0
- teradataml/context/aed_context.py +223 -0
- teradataml/context/context.py +1462 -0
- teradataml/data/A_loan.csv +19 -0
- teradataml/data/BINARY_REALS_LEFT.csv +11 -0
- teradataml/data/BINARY_REALS_RIGHT.csv +11 -0
- teradataml/data/B_loan.csv +49 -0
- teradataml/data/BuoyData2.csv +17 -0
- teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -0
- teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -0
- teradataml/data/Convolve2RealsLeft.csv +5 -0
- teradataml/data/Convolve2RealsRight.csv +5 -0
- teradataml/data/Convolve2ValidLeft.csv +11 -0
- teradataml/data/Convolve2ValidRight.csv +11 -0
- teradataml/data/DFFTConv_Real_8_8.csv +65 -0
- teradataml/data/Employee.csv +5 -0
- teradataml/data/Employee_Address.csv +4 -0
- teradataml/data/Employee_roles.csv +5 -0
- teradataml/data/JulesBelvezeDummyData.csv +100 -0
- teradataml/data/Mall_customer_data.csv +201 -0
- teradataml/data/Orders1_12mf.csv +25 -0
- teradataml/data/Pi_loan.csv +7 -0
- teradataml/data/SMOOTHED_DATA.csv +7 -0
- teradataml/data/TestDFFT8.csv +9 -0
- teradataml/data/TestRiver.csv +109 -0
- teradataml/data/Traindata.csv +28 -0
- teradataml/data/__init__.py +0 -0
- teradataml/data/acf.csv +17 -0
- teradataml/data/adaboost_example.json +34 -0
- teradataml/data/adaboostpredict_example.json +24 -0
- teradataml/data/additional_table.csv +11 -0
- teradataml/data/admissions_test.csv +21 -0
- teradataml/data/admissions_train.csv +41 -0
- teradataml/data/admissions_train_nulls.csv +41 -0
- teradataml/data/advertising.csv +201 -0
- teradataml/data/ageandheight.csv +13 -0
- teradataml/data/ageandpressure.csv +31 -0
- teradataml/data/amazon_reviews_25.csv +26 -0
- teradataml/data/antiselect_example.json +36 -0
- teradataml/data/antiselect_input.csv +8 -0
- teradataml/data/antiselect_input_mixed_case.csv +8 -0
- teradataml/data/applicant_external.csv +7 -0
- teradataml/data/applicant_reference.csv +7 -0
- teradataml/data/apriori_example.json +22 -0
- teradataml/data/arima_example.json +9 -0
- teradataml/data/assortedtext_input.csv +8 -0
- teradataml/data/attribution_example.json +34 -0
- teradataml/data/attribution_sample_table.csv +27 -0
- teradataml/data/attribution_sample_table1.csv +6 -0
- teradataml/data/attribution_sample_table2.csv +11 -0
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bank_marketing.csv +11163 -0
- teradataml/data/bank_web_clicks1.csv +43 -0
- teradataml/data/bank_web_clicks2.csv +91 -0
- teradataml/data/bank_web_url.csv +85 -0
- teradataml/data/barrier.csv +2 -0
- teradataml/data/barrier_new.csv +3 -0
- teradataml/data/betweenness_example.json +14 -0
- teradataml/data/bike_sharing.csv +732 -0
- teradataml/data/bin_breaks.csv +8 -0
- teradataml/data/bin_fit_ip.csv +4 -0
- teradataml/data/binary_complex_left.csv +11 -0
- teradataml/data/binary_complex_right.csv +11 -0
- teradataml/data/binary_matrix_complex_left.csv +21 -0
- teradataml/data/binary_matrix_complex_right.csv +21 -0
- teradataml/data/binary_matrix_real_left.csv +21 -0
- teradataml/data/binary_matrix_real_right.csv +21 -0
- teradataml/data/blood2ageandweight.csv +26 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/boston.csv +507 -0
- teradataml/data/boston2cols.csv +721 -0
- teradataml/data/breast_cancer.csv +570 -0
- teradataml/data/buoydata_mix.csv +11 -0
- teradataml/data/burst_data.csv +5 -0
- teradataml/data/burst_example.json +21 -0
- teradataml/data/byom_example.json +34 -0
- teradataml/data/bytes_table.csv +4 -0
- teradataml/data/cal_housing_ex_raw.csv +70 -0
- teradataml/data/callers.csv +7 -0
- teradataml/data/calls.csv +10 -0
- teradataml/data/cars_hist.csv +33 -0
- teradataml/data/cat_table.csv +25 -0
- teradataml/data/ccm_example.json +32 -0
- teradataml/data/ccm_input.csv +91 -0
- teradataml/data/ccm_input2.csv +13 -0
- teradataml/data/ccmexample.csv +101 -0
- teradataml/data/ccmprepare_example.json +9 -0
- teradataml/data/ccmprepare_input.csv +91 -0
- teradataml/data/cfilter_example.json +12 -0
- teradataml/data/changepointdetection_example.json +18 -0
- teradataml/data/changepointdetectionrt_example.json +8 -0
- teradataml/data/chi_sq.csv +3 -0
- teradataml/data/churn_data.csv +14 -0
- teradataml/data/churn_emission.csv +35 -0
- teradataml/data/churn_initial.csv +3 -0
- teradataml/data/churn_state_transition.csv +5 -0
- teradataml/data/citedges_2.csv +745 -0
- teradataml/data/citvertices_2.csv +1210 -0
- teradataml/data/clicks2.csv +16 -0
- teradataml/data/clickstream.csv +13 -0
- teradataml/data/clickstream1.csv +11 -0
- teradataml/data/closeness_example.json +16 -0
- teradataml/data/complaints.csv +21 -0
- teradataml/data/complaints_mini.csv +3 -0
- teradataml/data/complaints_test_tokenized.csv +353 -0
- teradataml/data/complaints_testtoken.csv +224 -0
- teradataml/data/complaints_tokens_model.csv +348 -0
- teradataml/data/complaints_tokens_test.csv +353 -0
- teradataml/data/complaints_traintoken.csv +472 -0
- teradataml/data/computers_category.csv +1001 -0
- teradataml/data/computers_test1.csv +1252 -0
- teradataml/data/computers_train1.csv +5009 -0
- teradataml/data/computers_train1_clustered.csv +5009 -0
- teradataml/data/confusionmatrix_example.json +9 -0
- teradataml/data/conversion_event_table.csv +3 -0
- teradataml/data/corr_input.csv +17 -0
- teradataml/data/correlation_example.json +11 -0
- teradataml/data/covid_confirm_sd.csv +83 -0
- teradataml/data/coxhazardratio_example.json +39 -0
- teradataml/data/coxph_example.json +15 -0
- teradataml/data/coxsurvival_example.json +28 -0
- teradataml/data/cpt.csv +41 -0
- teradataml/data/credit_ex_merged.csv +45 -0
- teradataml/data/creditcard_data.csv +1001 -0
- teradataml/data/customer_loyalty.csv +301 -0
- teradataml/data/customer_loyalty_newseq.csv +31 -0
- teradataml/data/customer_segmentation_test.csv +2628 -0
- teradataml/data/customer_segmentation_train.csv +8069 -0
- teradataml/data/dataframe_example.json +173 -0
- teradataml/data/decisionforest_example.json +37 -0
- teradataml/data/decisionforestpredict_example.json +38 -0
- teradataml/data/decisiontree_example.json +21 -0
- teradataml/data/decisiontreepredict_example.json +45 -0
- teradataml/data/dfft2_size4_real.csv +17 -0
- teradataml/data/dfft2_test_matrix16.csv +17 -0
- teradataml/data/dfft2conv_real_4_4.csv +65 -0
- teradataml/data/diabetes.csv +443 -0
- teradataml/data/diabetes_test.csv +89 -0
- teradataml/data/dict_table.csv +5 -0
- teradataml/data/docperterm_table.csv +4 -0
- teradataml/data/docs/__init__.py +1 -0
- teradataml/data/docs/byom/__init__.py +0 -0
- teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -0
- teradataml/data/docs/byom/docs/DataikuPredict.py +217 -0
- teradataml/data/docs/byom/docs/H2OPredict.py +325 -0
- teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
- teradataml/data/docs/byom/docs/ONNXPredict.py +283 -0
- teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
- teradataml/data/docs/byom/docs/PMMLPredict.py +278 -0
- teradataml/data/docs/byom/docs/__init__.py +0 -0
- teradataml/data/docs/sqle/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_10/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Attribution.py +200 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -0
- teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +96 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +152 -0
- teradataml/data/docs/sqle/docs_17_10/FTest.py +161 -0
- teradataml/data/docs/sqle/docs_17_10/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +85 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +82 -0
- teradataml/data/docs/sqle/docs_17_10/Histogram.py +165 -0
- teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_10/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -0
- teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +135 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +166 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -0
- teradataml/data/docs/sqle/docs_17_10/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +110 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -0
- teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -0
- teradataml/data/docs/sqle/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -0
- teradataml/data/docs/sqle/docs_17_20/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Apriori.py +138 -0
- teradataml/data/docs/sqle/docs_17_20/Attribution.py +201 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -0
- teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
- teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +246 -0
- teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +280 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +136 -0
- teradataml/data/docs/sqle/docs_17_20/FTest.py +240 -0
- teradataml/data/docs/sqle/docs_17_20/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +415 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -0
- teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/Histogram.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -0
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/KNN.py +215 -0
- teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_20/NERExtractor.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_20/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +231 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +191 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -0
- teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +155 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -0
- teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -0
- teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -0
- teradataml/data/docs/sqle/docs_17_20/SMOTE.py +212 -0
- teradataml/data/docs/sqle/docs_17_20/SVM.py +414 -0
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -0
- teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -0
- teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -0
- teradataml/data/docs/sqle/docs_17_20/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_20/Shap.py +225 -0
- teradataml/data/docs/sqle/docs_17_20/Silhouette.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -0
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -0
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
- teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +267 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -0
- teradataml/data/docs/sqle/docs_17_20/TextMorph.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +160 -0
- teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -0
- teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
- teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +169 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +237 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoost.py +362 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -0
- teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/tableoperator/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_00/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_05/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +429 -0
- teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +440 -0
- teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +387 -0
- teradataml/data/docs/tableoperator/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/uaf/__init__.py +0 -0
- teradataml/data/docs/uaf/docs_17_20/ACF.py +186 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +370 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +161 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
- teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
- teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +248 -0
- teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +178 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve.py +230 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve2.py +218 -0
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT.py +204 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +192 -0
- teradataml/data/docs/uaf/docs_17_20/DIFF.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/DTW.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
- teradataml/data/docs/uaf/docs_17_20/DWT2D.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +142 -0
- teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +184 -0
- teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
- teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +206 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +143 -0
- teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +198 -0
- teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +260 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT.py +165 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
- teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/InputValidator.py +121 -0
- teradataml/data/docs/uaf/docs_17_20/LineSpec.py +156 -0
- teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +215 -0
- teradataml/data/docs/uaf/docs_17_20/MAMean.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/MInfo.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
- teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/PACF.py +157 -0
- teradataml/data/docs/uaf/docs_17_20/Portman.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +203 -0
- teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +155 -0
- teradataml/data/docs/uaf/docs_17_20/Resample.py +237 -0
- teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
- teradataml/data/docs/uaf/docs_17_20/SInfo.py +123 -0
- teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +173 -0
- teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +164 -0
- teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/Smoothma.py +208 -0
- teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +151 -0
- teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +202 -0
- teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
- teradataml/data/docs/uaf/docs_17_20/__init__.py +0 -0
- teradataml/data/dtw_example.json +18 -0
- teradataml/data/dtw_t1.csv +11 -0
- teradataml/data/dtw_t2.csv +4 -0
- teradataml/data/dwt2d_dataTable.csv +65 -0
- teradataml/data/dwt2d_example.json +16 -0
- teradataml/data/dwt_dataTable.csv +8 -0
- teradataml/data/dwt_example.json +15 -0
- teradataml/data/dwt_filterTable.csv +3 -0
- teradataml/data/dwt_filter_dim.csv +5 -0
- teradataml/data/emission.csv +9 -0
- teradataml/data/emp_table_by_dept.csv +19 -0
- teradataml/data/employee_info.csv +4 -0
- teradataml/data/employee_table.csv +6 -0
- teradataml/data/excluding_event_table.csv +2 -0
- teradataml/data/finance_data.csv +6 -0
- teradataml/data/finance_data2.csv +61 -0
- teradataml/data/finance_data3.csv +93 -0
- teradataml/data/finance_data4.csv +13 -0
- teradataml/data/fish.csv +160 -0
- teradataml/data/fm_blood2ageandweight.csv +26 -0
- teradataml/data/fmeasure_example.json +12 -0
- teradataml/data/followers_leaders.csv +10 -0
- teradataml/data/fpgrowth_example.json +12 -0
- teradataml/data/frequentpaths_example.json +29 -0
- teradataml/data/friends.csv +9 -0
- teradataml/data/fs_input.csv +33 -0
- teradataml/data/fs_input1.csv +33 -0
- teradataml/data/genData.csv +513 -0
- teradataml/data/geodataframe_example.json +40 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/glm_admissions_model.csv +12 -0
- teradataml/data/glm_example.json +56 -0
- teradataml/data/glml1l2_example.json +28 -0
- teradataml/data/glml1l2predict_example.json +54 -0
- teradataml/data/glmpredict_example.json +54 -0
- teradataml/data/gq_t1.csv +21 -0
- teradataml/data/grocery_transaction.csv +19 -0
- teradataml/data/hconvolve_complex_right.csv +5 -0
- teradataml/data/hconvolve_complex_rightmulti.csv +5 -0
- teradataml/data/histogram_example.json +12 -0
- teradataml/data/hmmdecoder_example.json +79 -0
- teradataml/data/hmmevaluator_example.json +25 -0
- teradataml/data/hmmsupervised_example.json +10 -0
- teradataml/data/hmmunsupervised_example.json +8 -0
- teradataml/data/hnsw_alter_data.csv +5 -0
- teradataml/data/hnsw_data.csv +10 -0
- teradataml/data/house_values.csv +12 -0
- teradataml/data/house_values2.csv +13 -0
- teradataml/data/housing_cat.csv +7 -0
- teradataml/data/housing_data.csv +9 -0
- teradataml/data/housing_test.csv +47 -0
- teradataml/data/housing_test_binary.csv +47 -0
- teradataml/data/housing_train.csv +493 -0
- teradataml/data/housing_train_attribute.csv +5 -0
- teradataml/data/housing_train_binary.csv +437 -0
- teradataml/data/housing_train_parameter.csv +2 -0
- teradataml/data/housing_train_response.csv +493 -0
- teradataml/data/housing_train_segment.csv +201 -0
- teradataml/data/ibm_stock.csv +370 -0
- teradataml/data/ibm_stock1.csv +370 -0
- teradataml/data/identitymatch_example.json +22 -0
- teradataml/data/idf_table.csv +4 -0
- teradataml/data/idwt2d_dataTable.csv +5 -0
- teradataml/data/idwt_dataTable.csv +8 -0
- teradataml/data/idwt_filterTable.csv +3 -0
- teradataml/data/impressions.csv +101 -0
- teradataml/data/inflation.csv +21 -0
- teradataml/data/initial.csv +3 -0
- teradataml/data/insect2Cols.csv +61 -0
- teradataml/data/insect_sprays.csv +13 -0
- teradataml/data/insurance.csv +1339 -0
- teradataml/data/interpolator_example.json +13 -0
- teradataml/data/interval_data.csv +5 -0
- teradataml/data/iris_altinput.csv +481 -0
- teradataml/data/iris_attribute_output.csv +8 -0
- teradataml/data/iris_attribute_test.csv +121 -0
- teradataml/data/iris_attribute_train.csv +481 -0
- teradataml/data/iris_category_expect_predict.csv +31 -0
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/iris_input.csv +151 -0
- teradataml/data/iris_response_train.csv +121 -0
- teradataml/data/iris_test.csv +31 -0
- teradataml/data/iris_train.csv +121 -0
- teradataml/data/join_table1.csv +4 -0
- teradataml/data/join_table2.csv +4 -0
- teradataml/data/jsons/anly_function_name.json +7 -0
- teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
- teradataml/data/jsons/byom/dataikupredict.json +148 -0
- teradataml/data/jsons/byom/datarobotpredict.json +147 -0
- teradataml/data/jsons/byom/h2opredict.json +195 -0
- teradataml/data/jsons/byom/onnxembeddings.json +267 -0
- teradataml/data/jsons/byom/onnxpredict.json +187 -0
- teradataml/data/jsons/byom/pmmlpredict.json +147 -0
- teradataml/data/jsons/paired_functions.json +450 -0
- teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/16.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/16.20/Pack.json +98 -0
- teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/16.20/Unpack.json +166 -0
- teradataml/data/jsons/sqle/16.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.00/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.00/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.00/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.00/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.05/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.05/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.05/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.05/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.10/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -0
- teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -0
- teradataml/data/jsons/sqle/17.10/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -0
- teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -0
- teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +69 -0
- teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -0
- teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_Histogram.json +133 -0
- teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +183 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +66 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +197 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +112 -0
- teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +157 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +148 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +119 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -0
- teradataml/data/jsons/sqle/17.10/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.10/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -0
- teradataml/data/jsons/sqle/17.20/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -0
- teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +149 -0
- teradataml/data/jsons/sqle/17.20/TD_Apriori.json +181 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
- teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -0
- teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +260 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -0
- teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -0
- teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeans.json +232 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +87 -0
- teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -0
- teradataml/data/jsons/sqle/17.20/TD_NERExtractor.json +145 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +316 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +124 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -0
- teradataml/data/jsons/sqle/17.20/TD_ROC.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +138 -0
- teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_SMOTE.json +267 -0
- teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -0
- teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +310 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +120 -0
- teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +194 -0
- teradataml/data/jsons/sqle/17.20/TD_Shap.json +221 -0
- teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +143 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_TextMorph.json +134 -0
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +297 -0
- teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -0
- teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
- teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +183 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -0
- teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -0
- teradataml/data/jsons/sqle/17.20/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +370 -0
- teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +460 -0
- teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +385 -0
- teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +400 -0
- teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +401 -0
- teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +384 -0
- teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +384 -0
- teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
- teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
- teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
- teradataml/data/jsons/tableoperator/17.00/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.10/read_nos.json +184 -0
- teradataml/data/jsons/tableoperator/17.10/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/tableoperator/17.20/read_nos.json +183 -0
- teradataml/data/jsons/tableoperator/17.20/write_nos.json +224 -0
- teradataml/data/jsons/uaf/17.20/TD_ACF.json +132 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +396 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +77 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +153 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
- teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +107 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +106 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +89 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +104 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +66 -0
- teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +87 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT.json +134 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +144 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_DIFF.json +92 -0
- teradataml/data/jsons/uaf/17.20/TD_DTW.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
- teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +39 -0
- teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +85 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +71 -0
- teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +139 -0
- teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECASTER.json +313 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +81 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
- teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +64 -0
- teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
- teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +182 -0
- teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +103 -0
- teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +181 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_MINFO.json +67 -0
- teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +179 -0
- teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +119 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +98 -0
- teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +194 -0
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
- teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +143 -0
- teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +90 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +80 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -0
- teradataml/data/jsons/uaf/17.20/TD_SINFO.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +163 -0
- teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +112 -0
- teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -0
- teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
- teradataml/data/kmeans_example.json +23 -0
- teradataml/data/kmeans_table.csv +10 -0
- teradataml/data/kmeans_us_arrests_data.csv +51 -0
- teradataml/data/knn_example.json +19 -0
- teradataml/data/knnrecommender_example.json +7 -0
- teradataml/data/knnrecommenderpredict_example.json +12 -0
- teradataml/data/lar_example.json +17 -0
- teradataml/data/larpredict_example.json +30 -0
- teradataml/data/lc_new_predictors.csv +5 -0
- teradataml/data/lc_new_reference.csv +9 -0
- teradataml/data/lda_example.json +9 -0
- teradataml/data/ldainference_example.json +15 -0
- teradataml/data/ldatopicsummary_example.json +9 -0
- teradataml/data/levendist_input.csv +13 -0
- teradataml/data/levenshteindistance_example.json +10 -0
- teradataml/data/linreg_example.json +10 -0
- teradataml/data/load_example_data.py +350 -0
- teradataml/data/loan_prediction.csv +295 -0
- teradataml/data/lungcancer.csv +138 -0
- teradataml/data/mappingdata.csv +12 -0
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/milk_timeseries.csv +157 -0
- teradataml/data/min_max_titanic.csv +4 -0
- teradataml/data/minhash_example.json +6 -0
- teradataml/data/ml_ratings.csv +7547 -0
- teradataml/data/ml_ratings_10.csv +2445 -0
- teradataml/data/mobile_data.csv +13 -0
- teradataml/data/model1_table.csv +5 -0
- teradataml/data/model2_table.csv +5 -0
- teradataml/data/models/License_file.txt +1 -0
- teradataml/data/models/License_file_empty.txt +0 -0
- teradataml/data/models/dataiku_iris_data_ann_thin +0 -0
- teradataml/data/models/dr_iris_rf +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn.onnx +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn_floattensor.onnx +0 -0
- teradataml/data/models/iris_db_glm_model.pmml +57 -0
- teradataml/data/models/iris_db_xgb_model.pmml +4471 -0
- teradataml/data/models/iris_kmeans_model +0 -0
- teradataml/data/models/iris_mojo_glm_h2o_model +0 -0
- teradataml/data/models/iris_mojo_xgb_h2o_model +0 -0
- teradataml/data/modularity_example.json +12 -0
- teradataml/data/movavg_example.json +8 -0
- teradataml/data/mtx1.csv +7 -0
- teradataml/data/mtx2.csv +13 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/mvdfft8.csv +9 -0
- teradataml/data/naivebayes_example.json +10 -0
- teradataml/data/naivebayespredict_example.json +19 -0
- teradataml/data/naivebayestextclassifier2_example.json +7 -0
- teradataml/data/naivebayestextclassifier_example.json +8 -0
- teradataml/data/naivebayestextclassifierpredict_example.json +32 -0
- teradataml/data/name_Find_configure.csv +10 -0
- teradataml/data/namedentityfinder_example.json +14 -0
- teradataml/data/namedentityfinderevaluator_example.json +10 -0
- teradataml/data/namedentityfindertrainer_example.json +6 -0
- teradataml/data/nb_iris_input_test.csv +31 -0
- teradataml/data/nb_iris_input_train.csv +121 -0
- teradataml/data/nbp_iris_model.csv +13 -0
- teradataml/data/ner_dict.csv +8 -0
- teradataml/data/ner_extractor_text.csv +2 -0
- teradataml/data/ner_input_eng.csv +7 -0
- teradataml/data/ner_rule.csv +5 -0
- teradataml/data/ner_sports_test2.csv +29 -0
- teradataml/data/ner_sports_train.csv +501 -0
- teradataml/data/nerevaluator_example.json +6 -0
- teradataml/data/nerextractor_example.json +18 -0
- teradataml/data/nermem_sports_test.csv +18 -0
- teradataml/data/nermem_sports_train.csv +51 -0
- teradataml/data/nertrainer_example.json +7 -0
- teradataml/data/ngrams_example.json +7 -0
- teradataml/data/notebooks/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -0
- teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -0
- teradataml/data/notebooks/sqlalchemy/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -0
- teradataml/data/npath_example.json +23 -0
- teradataml/data/ntree_example.json +14 -0
- teradataml/data/numeric_strings.csv +5 -0
- teradataml/data/numerics.csv +4 -0
- teradataml/data/ocean_buoy.csv +17 -0
- teradataml/data/ocean_buoy2.csv +17 -0
- teradataml/data/ocean_buoys.csv +28 -0
- teradataml/data/ocean_buoys2.csv +10 -0
- teradataml/data/ocean_buoys_nonpti.csv +28 -0
- teradataml/data/ocean_buoys_seq.csv +29 -0
- teradataml/data/onehot_encoder_train.csv +4 -0
- teradataml/data/openml_example.json +92 -0
- teradataml/data/optional_event_table.csv +4 -0
- teradataml/data/orders1.csv +11 -0
- teradataml/data/orders1_12.csv +13 -0
- teradataml/data/orders_ex.csv +4 -0
- teradataml/data/pack_example.json +9 -0
- teradataml/data/package_tracking.csv +19 -0
- teradataml/data/package_tracking_pti.csv +19 -0
- teradataml/data/pagerank_example.json +13 -0
- teradataml/data/paragraphs_input.csv +6 -0
- teradataml/data/pathanalyzer_example.json +8 -0
- teradataml/data/pathgenerator_example.json +8 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/pattern_matching_data.csv +11 -0
- teradataml/data/payment_fraud_dataset.csv +10001 -0
- teradataml/data/peppers.png +0 -0
- teradataml/data/phrases.csv +7 -0
- teradataml/data/pivot_example.json +9 -0
- teradataml/data/pivot_input.csv +22 -0
- teradataml/data/playerRating.csv +31 -0
- teradataml/data/pos_input.csv +40 -0
- teradataml/data/postagger_example.json +7 -0
- teradataml/data/posttagger_output.csv +44 -0
- teradataml/data/production_data.csv +17 -0
- teradataml/data/production_data2.csv +7 -0
- teradataml/data/randomsample_example.json +32 -0
- teradataml/data/randomwalksample_example.json +9 -0
- teradataml/data/rank_table.csv +6 -0
- teradataml/data/real_values.csv +14 -0
- teradataml/data/ref_mobile_data.csv +4 -0
- teradataml/data/ref_mobile_data_dense.csv +2 -0
- teradataml/data/ref_url.csv +17 -0
- teradataml/data/restaurant_reviews.csv +7 -0
- teradataml/data/retail_churn_table.csv +27772 -0
- teradataml/data/river_data.csv +145 -0
- teradataml/data/roc_example.json +8 -0
- teradataml/data/roc_input.csv +101 -0
- teradataml/data/rule_inputs.csv +6 -0
- teradataml/data/rule_table.csv +2 -0
- teradataml/data/sales.csv +7 -0
- teradataml/data/sales_transaction.csv +501 -0
- teradataml/data/salesdata.csv +342 -0
- teradataml/data/sample_cities.csv +3 -0
- teradataml/data/sample_shapes.csv +11 -0
- teradataml/data/sample_streets.csv +3 -0
- teradataml/data/sampling_example.json +16 -0
- teradataml/data/sax_example.json +17 -0
- teradataml/data/scale_attributes.csv +3 -0
- teradataml/data/scale_example.json +74 -0
- teradataml/data/scale_housing.csv +11 -0
- teradataml/data/scale_housing_test.csv +6 -0
- teradataml/data/scale_input_part_sparse.csv +31 -0
- teradataml/data/scale_input_partitioned.csv +16 -0
- teradataml/data/scale_input_sparse.csv +11 -0
- teradataml/data/scale_parameters.csv +3 -0
- teradataml/data/scale_stat.csv +11 -0
- teradataml/data/scalebypartition_example.json +13 -0
- teradataml/data/scalemap_example.json +13 -0
- teradataml/data/scalesummary_example.json +12 -0
- teradataml/data/score_category.csv +101 -0
- teradataml/data/score_summary.csv +4 -0
- teradataml/data/script_example.json +10 -0
- teradataml/data/scripts/deploy_script.py +84 -0
- teradataml/data/scripts/lightgbm/dataset.template +175 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +264 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +234 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +177 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/mapper.py +16 -0
- teradataml/data/scripts/mapper_replace.py +16 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +205 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +148 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +144 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +166 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +161 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +145 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +327 -0
- teradataml/data/sdk/modelops/modelops_spec.json +101737 -0
- teradataml/data/seeds.csv +10 -0
- teradataml/data/sentenceextractor_example.json +7 -0
- teradataml/data/sentiment_extract_input.csv +11 -0
- teradataml/data/sentiment_train.csv +16 -0
- teradataml/data/sentiment_word.csv +20 -0
- teradataml/data/sentiment_word_input.csv +20 -0
- teradataml/data/sentimentextractor_example.json +24 -0
- teradataml/data/sentimenttrainer_example.json +8 -0
- teradataml/data/sequence_table.csv +10 -0
- teradataml/data/seriessplitter_example.json +8 -0
- teradataml/data/sessionize_example.json +17 -0
- teradataml/data/sessionize_table.csv +116 -0
- teradataml/data/setop_test1.csv +24 -0
- teradataml/data/setop_test2.csv +22 -0
- teradataml/data/soc_nw_edges.csv +11 -0
- teradataml/data/soc_nw_vertices.csv +8 -0
- teradataml/data/souvenir_timeseries.csv +168 -0
- teradataml/data/sparse_iris_attribute.csv +5 -0
- teradataml/data/sparse_iris_test.csv +121 -0
- teradataml/data/sparse_iris_train.csv +601 -0
- teradataml/data/star1.csv +6 -0
- teradataml/data/star_pivot.csv +8 -0
- teradataml/data/state_transition.csv +5 -0
- teradataml/data/stock_data.csv +53 -0
- teradataml/data/stock_movement.csv +11 -0
- teradataml/data/stock_vol.csv +76 -0
- teradataml/data/stop_words.csv +8 -0
- teradataml/data/store_sales.csv +37 -0
- teradataml/data/stringsimilarity_example.json +8 -0
- teradataml/data/strsimilarity_input.csv +13 -0
- teradataml/data/students.csv +101 -0
- teradataml/data/svm_iris_input_test.csv +121 -0
- teradataml/data/svm_iris_input_train.csv +481 -0
- teradataml/data/svm_iris_model.csv +7 -0
- teradataml/data/svmdense_example.json +10 -0
- teradataml/data/svmdensepredict_example.json +19 -0
- teradataml/data/svmsparse_example.json +8 -0
- teradataml/data/svmsparsepredict_example.json +14 -0
- teradataml/data/svmsparsesummary_example.json +8 -0
- teradataml/data/target_mobile_data.csv +13 -0
- teradataml/data/target_mobile_data_dense.csv +5 -0
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/tdnerextractor_example.json +14 -0
- teradataml/data/templatedata.csv +1201 -0
- teradataml/data/templates/open_source_ml.json +11 -0
- teradataml/data/teradata_icon.ico +0 -0
- teradataml/data/teradataml_example.json +1473 -0
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_loan_prediction.csv +53 -0
- teradataml/data/test_pacf_12.csv +37 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/test_river2.csv +109 -0
- teradataml/data/text_inputs.csv +6 -0
- teradataml/data/textchunker_example.json +8 -0
- teradataml/data/textclassifier_example.json +7 -0
- teradataml/data/textclassifier_input.csv +7 -0
- teradataml/data/textclassifiertrainer_example.json +7 -0
- teradataml/data/textmorph_example.json +11 -0
- teradataml/data/textparser_example.json +15 -0
- teradataml/data/texttagger_example.json +12 -0
- teradataml/data/texttokenizer_example.json +7 -0
- teradataml/data/texttrainer_input.csv +11 -0
- teradataml/data/tf_example.json +7 -0
- teradataml/data/tfidf_example.json +14 -0
- teradataml/data/tfidf_input1.csv +201 -0
- teradataml/data/tfidf_train.csv +6 -0
- teradataml/data/time_table1.csv +535 -0
- teradataml/data/time_table2.csv +14 -0
- teradataml/data/timeseriesdata.csv +1601 -0
- teradataml/data/timeseriesdatasetsd4.csv +105 -0
- teradataml/data/timestamp_data.csv +4 -0
- teradataml/data/titanic.csv +892 -0
- teradataml/data/titanic_dataset_unpivoted.csv +19 -0
- teradataml/data/to_num_data.csv +4 -0
- teradataml/data/tochar_data.csv +5 -0
- teradataml/data/token_table.csv +696 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/train_tracking.csv +28 -0
- teradataml/data/trans_dense.csv +16 -0
- teradataml/data/trans_sparse.csv +55 -0
- teradataml/data/transformation_table.csv +6 -0
- teradataml/data/transformation_table_new.csv +2 -0
- teradataml/data/tv_spots.csv +16 -0
- teradataml/data/twod_climate_data.csv +117 -0
- teradataml/data/uaf_example.json +529 -0
- teradataml/data/univariatestatistics_example.json +9 -0
- teradataml/data/unpack_example.json +10 -0
- teradataml/data/unpivot_example.json +25 -0
- teradataml/data/unpivot_input.csv +8 -0
- teradataml/data/url_data.csv +10 -0
- teradataml/data/us_air_pass.csv +37 -0
- teradataml/data/us_population.csv +624 -0
- teradataml/data/us_states_shapes.csv +52 -0
- teradataml/data/varmax_example.json +18 -0
- teradataml/data/vectordistance_example.json +30 -0
- teradataml/data/ville_climatedata.csv +121 -0
- teradataml/data/ville_tempdata.csv +12 -0
- teradataml/data/ville_tempdata1.csv +12 -0
- teradataml/data/ville_temperature.csv +11 -0
- teradataml/data/waveletTable.csv +1605 -0
- teradataml/data/waveletTable2.csv +1605 -0
- teradataml/data/weightedmovavg_example.json +9 -0
- teradataml/data/wft_testing.csv +5 -0
- teradataml/data/windowdfft.csv +16 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/data/word_embed_input_table1.csv +6 -0
- teradataml/data/word_embed_input_table2.csv +5 -0
- teradataml/data/word_embed_model.csv +23 -0
- teradataml/data/words_input.csv +13 -0
- teradataml/data/xconvolve_complex_left.csv +6 -0
- teradataml/data/xconvolve_complex_leftmulti.csv +6 -0
- teradataml/data/xgboost_example.json +36 -0
- teradataml/data/xgboostpredict_example.json +32 -0
- teradataml/data/ztest_example.json +16 -0
- teradataml/dataframe/__init__.py +0 -0
- teradataml/dataframe/copy_to.py +2446 -0
- teradataml/dataframe/data_transfer.py +2840 -0
- teradataml/dataframe/dataframe.py +20908 -0
- teradataml/dataframe/dataframe_utils.py +2114 -0
- teradataml/dataframe/fastload.py +794 -0
- teradataml/dataframe/functions.py +2110 -0
- teradataml/dataframe/indexer.py +424 -0
- teradataml/dataframe/row.py +160 -0
- teradataml/dataframe/setop.py +1171 -0
- teradataml/dataframe/sql.py +10904 -0
- teradataml/dataframe/sql_function_parameters.py +440 -0
- teradataml/dataframe/sql_functions.py +652 -0
- teradataml/dataframe/sql_interfaces.py +220 -0
- teradataml/dataframe/vantage_function_types.py +675 -0
- teradataml/dataframe/window.py +694 -0
- teradataml/dbutils/__init__.py +3 -0
- teradataml/dbutils/dbutils.py +2871 -0
- teradataml/dbutils/filemgr.py +318 -0
- teradataml/gen_ai/__init__.py +2 -0
- teradataml/gen_ai/convAI.py +473 -0
- teradataml/geospatial/__init__.py +4 -0
- teradataml/geospatial/geodataframe.py +1105 -0
- teradataml/geospatial/geodataframecolumn.py +392 -0
- teradataml/geospatial/geometry_types.py +926 -0
- teradataml/hyperparameter_tuner/__init__.py +1 -0
- teradataml/hyperparameter_tuner/optimizer.py +4115 -0
- teradataml/hyperparameter_tuner/utils.py +303 -0
- teradataml/lib/__init__.py +0 -0
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/lib/libaed_0_1_aarch64.so +0 -0
- teradataml/lib/libaed_0_1_ppc64le.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/_base.py +1321 -0
- teradataml/opensource/_class.py +464 -0
- teradataml/opensource/_constants.py +61 -0
- teradataml/opensource/_lightgbm.py +949 -0
- teradataml/opensource/_sklearn.py +1008 -0
- teradataml/opensource/_wrapper_utils.py +267 -0
- teradataml/options/__init__.py +148 -0
- teradataml/options/configure.py +489 -0
- teradataml/options/display.py +187 -0
- teradataml/plot/__init__.py +3 -0
- teradataml/plot/axis.py +1427 -0
- teradataml/plot/constants.py +15 -0
- teradataml/plot/figure.py +431 -0
- teradataml/plot/plot.py +810 -0
- teradataml/plot/query_generator.py +83 -0
- teradataml/plot/subplot.py +216 -0
- teradataml/scriptmgmt/UserEnv.py +4273 -0
- teradataml/scriptmgmt/__init__.py +3 -0
- teradataml/scriptmgmt/lls_utils.py +2157 -0
- teradataml/sdk/README.md +79 -0
- teradataml/sdk/__init__.py +4 -0
- teradataml/sdk/_auth_modes.py +422 -0
- teradataml/sdk/_func_params.py +487 -0
- teradataml/sdk/_json_parser.py +453 -0
- teradataml/sdk/_openapi_spec_constants.py +249 -0
- teradataml/sdk/_utils.py +236 -0
- teradataml/sdk/api_client.py +900 -0
- teradataml/sdk/constants.py +62 -0
- teradataml/sdk/modelops/__init__.py +98 -0
- teradataml/sdk/modelops/_client.py +409 -0
- teradataml/sdk/modelops/_constants.py +304 -0
- teradataml/sdk/modelops/models.py +2308 -0
- teradataml/sdk/spinner.py +107 -0
- teradataml/series/__init__.py +0 -0
- teradataml/series/series.py +537 -0
- teradataml/series/series_utils.py +71 -0
- teradataml/store/__init__.py +12 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +658 -0
- teradataml/store/feature_store/feature_store.py +4814 -0
- teradataml/store/feature_store/mind_map.py +639 -0
- teradataml/store/feature_store/models.py +7330 -0
- teradataml/store/feature_store/utils.py +390 -0
- teradataml/table_operators/Apply.py +979 -0
- teradataml/table_operators/Script.py +1739 -0
- teradataml/table_operators/TableOperator.py +1343 -0
- teradataml/table_operators/__init__.py +2 -0
- teradataml/table_operators/apply_query_generator.py +262 -0
- teradataml/table_operators/query_generator.py +493 -0
- teradataml/table_operators/table_operator_query_generator.py +462 -0
- teradataml/table_operators/table_operator_util.py +726 -0
- teradataml/table_operators/templates/dataframe_apply.template +184 -0
- teradataml/table_operators/templates/dataframe_map.template +176 -0
- teradataml/table_operators/templates/dataframe_register.template +73 -0
- teradataml/table_operators/templates/dataframe_udf.template +67 -0
- teradataml/table_operators/templates/script_executor.template +170 -0
- teradataml/telemetry_utils/__init__.py +0 -0
- teradataml/telemetry_utils/queryband.py +53 -0
- teradataml/utils/__init__.py +0 -0
- teradataml/utils/docstring.py +527 -0
- teradataml/utils/dtypes.py +943 -0
- teradataml/utils/internal_buffer.py +122 -0
- teradataml/utils/print_versions.py +206 -0
- teradataml/utils/utils.py +451 -0
- teradataml/utils/validators.py +3305 -0
- teradataml-20.0.0.8.dist-info/METADATA +2804 -0
- teradataml-20.0.0.8.dist-info/RECORD +1208 -0
- teradataml-20.0.0.8.dist-info/WHEEL +5 -0
- teradataml-20.0.0.8.dist-info/top_level.txt +1 -0
- teradataml-20.0.0.8.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,1759 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unpublished work.
|
|
3
|
+
Copyright (c) 2021 by Teradata Corporation. All rights reserved.
|
|
4
|
+
TERADATA CORPORATION CONFIDENTIAL AND TRADE SECRET
|
|
5
|
+
|
|
6
|
+
Primary Owner: pradeep.garre@teradata.com
|
|
7
|
+
Secondary Owner: PankajVinod.Purandare@teradata.com
|
|
8
|
+
|
|
9
|
+
This file implements the core framework that allows user to load BYOM to Vantage.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from teradataml.dataframe.dataframe import DataFrame, in_schema
|
|
13
|
+
from teradataml.utils.validators import _Validators
|
|
14
|
+
from teradataml.context.context import _get_current_databasename, get_connection, get_context
|
|
15
|
+
from teradataml.common.messagecodes import MessageCodes
|
|
16
|
+
from teradataml.common.messages import Messages
|
|
17
|
+
from teradataml.common.exceptions import TeradataMlException
|
|
18
|
+
from teradatasql import OperationalError as SqlOperationalError
|
|
19
|
+
from teradatasqlalchemy.types import *
|
|
20
|
+
from teradatasqlalchemy.types import _TDType
|
|
21
|
+
from teradataml.dbutils.dbutils import _get_quoted_object_name, _create_table
|
|
22
|
+
from teradataml.common.utils import UtilFuncs
|
|
23
|
+
from teradataml.utils.dtypes import _Dtypes
|
|
24
|
+
from teradataml.catalog.model_cataloging_utils import __get_like_filter_expression_on_col
|
|
25
|
+
from teradataml.options.display import display
|
|
26
|
+
from teradataml.common.constants import ModelCatalogingConstants as mac
|
|
27
|
+
from teradataml.options.configure import configure
|
|
28
|
+
from teradataml.utils.utils import execute_sql
|
|
29
|
+
from teradataml.telemetry_utils.queryband import collect_queryband
|
|
30
|
+
|
|
31
|
+
validator = _Validators()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def __check_if_model_exists(model_id,
|
|
35
|
+
table_name,
|
|
36
|
+
schema_name=None,
|
|
37
|
+
raise_error_if_model_found=False,
|
|
38
|
+
raise_error_if_model_not_found=False):
|
|
39
|
+
"""
|
|
40
|
+
DESCRIPTION:
|
|
41
|
+
Internal function to check if byom model with given "model_id", exists or not.
|
|
42
|
+
|
|
43
|
+
PARAMETERS:
|
|
44
|
+
model_id:
|
|
45
|
+
Required Argument.
|
|
46
|
+
Specifies the name of the model identifier to check whether it exists or not.
|
|
47
|
+
Types: str
|
|
48
|
+
|
|
49
|
+
table_name:
|
|
50
|
+
Required Argument.
|
|
51
|
+
Specifies the table name that may or may not contain entry for the model.
|
|
52
|
+
Types: str
|
|
53
|
+
|
|
54
|
+
schema_name:
|
|
55
|
+
Optional Argument.
|
|
56
|
+
Specifies the name of the schema, to look out for table specified in
|
|
57
|
+
"table_name". If not specified, then "table_name" is looked over in
|
|
58
|
+
the current database.
|
|
59
|
+
Types: str
|
|
60
|
+
|
|
61
|
+
raise_error_if_model_found:
|
|
62
|
+
Optional Argument.
|
|
63
|
+
Specifies the flag to decide whether to raise error when model exists or not.
|
|
64
|
+
Default Value: False (Do not raise exception)
|
|
65
|
+
Types: bool
|
|
66
|
+
|
|
67
|
+
raise_error_if_model_not_found:
|
|
68
|
+
Optional Argument.
|
|
69
|
+
Specifies the flag to decide whether to raise error when model is found or not.
|
|
70
|
+
Default Value: False (Do not raise exception)
|
|
71
|
+
Types: bool
|
|
72
|
+
|
|
73
|
+
RETURNS:
|
|
74
|
+
bool.
|
|
75
|
+
|
|
76
|
+
RAISES:
|
|
77
|
+
TeradataMlException - MODEL_ALREADY_EXISTS, MODEL_NOT_FOUND
|
|
78
|
+
|
|
79
|
+
EXAMPLES:
|
|
80
|
+
>>> meta_df = __check_if_model_exists("glm_out")
|
|
81
|
+
"""
|
|
82
|
+
# If external model, create DataFrame on table specified in parameters within
|
|
83
|
+
# current schema. Else, create DataFrame on table & schema specified in parameters.
|
|
84
|
+
schema_name = schema_name if schema_name is not None else _get_current_databasename()
|
|
85
|
+
models_meta_df = DataFrame(in_schema(schema_name, table_name))
|
|
86
|
+
models_meta_df = models_meta_df[models_meta_df.model_id == model_id]
|
|
87
|
+
|
|
88
|
+
num_rows = models_meta_df.shape[0]
|
|
89
|
+
|
|
90
|
+
if raise_error_if_model_found:
|
|
91
|
+
if num_rows == 1:
|
|
92
|
+
# If model with name 'name' already exists.
|
|
93
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.MODEL_ALREADY_EXISTS,
|
|
94
|
+
model_id),
|
|
95
|
+
MessageCodes.MODEL_ALREADY_EXISTS)
|
|
96
|
+
|
|
97
|
+
if raise_error_if_model_not_found:
|
|
98
|
+
if num_rows == 0:
|
|
99
|
+
# 'name' MODEL_NOT_FOUND
|
|
100
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.MODEL_NOT_FOUND,
|
|
101
|
+
model_id, ''),
|
|
102
|
+
MessageCodes.MODEL_NOT_FOUND)
|
|
103
|
+
|
|
104
|
+
return True if num_rows == 1 else False
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def __set_validate_catalog_parameters(table_name=None, schema_name=None):
|
|
108
|
+
"""
|
|
109
|
+
DESCRIPTION:
|
|
110
|
+
Internal function to set the table and schema name
|
|
111
|
+
for byom catalog API's according to the model cataloging
|
|
112
|
+
parameters and the user inputs.
|
|
113
|
+
|
|
114
|
+
PARAMETERS:
|
|
115
|
+
table_name:
|
|
116
|
+
Optional Argument.
|
|
117
|
+
Specifies the name of the byom catalog table.
|
|
118
|
+
Notes:
|
|
119
|
+
* One must either specify this argument or set the byom model catalog table
|
|
120
|
+
name using set_byom_catalog().
|
|
121
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
122
|
+
are set, the settings in function call take precedence and is used for
|
|
123
|
+
function execution when saving an model.
|
|
124
|
+
Types: str
|
|
125
|
+
|
|
126
|
+
schema_name:
|
|
127
|
+
Optional Argument.
|
|
128
|
+
Specifies the name of the schema/database in which the table specified in
|
|
129
|
+
"table_name" is looked up.
|
|
130
|
+
Notes:
|
|
131
|
+
* One must either specify this argument or set the byom model catalog schema
|
|
132
|
+
name using set_byom_catalog().
|
|
133
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
134
|
+
are set, the settings in function call take precedence and is used for
|
|
135
|
+
function execution when saving an model.
|
|
136
|
+
Types: str
|
|
137
|
+
|
|
138
|
+
RETURNS:
|
|
139
|
+
List of "table_name" and "schema_name".
|
|
140
|
+
|
|
141
|
+
RAISES:
|
|
142
|
+
ValueError
|
|
143
|
+
|
|
144
|
+
EXAMPLES:
|
|
145
|
+
>>> __set_validate_catalog_parameters(table_name = "model_catalog_table",
|
|
146
|
+
schema_name="model_catalog_schema")
|
|
147
|
+
"""
|
|
148
|
+
# Raise an error if schema_name is provided and table_name is not provided
|
|
149
|
+
_Validators._validate_dependent_argument("schema_name", schema_name, "table_name", table_name)
|
|
150
|
+
|
|
151
|
+
# Set the schema_name to default schema_name if only table_name is provided.
|
|
152
|
+
# Set the table_name and schema_name to model catalog session level variables if not provided.
|
|
153
|
+
schema_name = schema_name if schema_name is not None else\
|
|
154
|
+
_get_current_databasename() if table_name is not None else configure._byom_model_catalog_database
|
|
155
|
+
table_name = table_name if table_name is not None else configure._byom_model_catalog_table
|
|
156
|
+
|
|
157
|
+
# Check whether table information is present and not None.
|
|
158
|
+
additional_error = Messages.get_message(MessageCodes.EITHER_FUNCTION_OR_ARGS, "catalog", "set_byom_catalog",
|
|
159
|
+
"catalog", "")
|
|
160
|
+
validator._validate_argument_is_not_None(table_name, "table_name", additional_error)
|
|
161
|
+
|
|
162
|
+
return [table_name, schema_name]
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@collect_queryband(queryband="stByomCtlg")
|
|
166
|
+
def set_byom_catalog(table_name,
|
|
167
|
+
schema_name=None):
|
|
168
|
+
"""
|
|
169
|
+
DESCRIPTION:
|
|
170
|
+
Function to set the BYOM model catalog information to be used by
|
|
171
|
+
BYOM model cataloging APIs such as:
|
|
172
|
+
* delete_byom
|
|
173
|
+
* list_byom
|
|
174
|
+
* retrieve_byom
|
|
175
|
+
* save_byom
|
|
176
|
+
* set_license
|
|
177
|
+
|
|
178
|
+
PARAMETERS:
|
|
179
|
+
table_name:
|
|
180
|
+
Required Argument.
|
|
181
|
+
Specifies the name of the table to be used for BYOM model cataloging.
|
|
182
|
+
This table will be used for saving, retrieving BYOM model information
|
|
183
|
+
by BYOM model cataloging APIs.
|
|
184
|
+
Types: str.
|
|
185
|
+
|
|
186
|
+
schema_name:
|
|
187
|
+
Optional Argument.
|
|
188
|
+
Specifies the name of the schema/database in which the table specified in
|
|
189
|
+
"table_name" is looked up. If not specified, then table is looked
|
|
190
|
+
up in current schema/database.
|
|
191
|
+
Types: str
|
|
192
|
+
|
|
193
|
+
RETURNS:
|
|
194
|
+
None
|
|
195
|
+
|
|
196
|
+
RAISES:
|
|
197
|
+
TeradataMlException
|
|
198
|
+
|
|
199
|
+
EXAMPLES:
|
|
200
|
+
>>> from teradataml import set_byom_catalog
|
|
201
|
+
|
|
202
|
+
# Example 1 - Set global parameters table_name = 'model_table_name' and schema_name = 'model_schema_name';
|
|
203
|
+
>>> set_byom_catalog(table_name='model_table_name', schema_name='model_schema_name')
|
|
204
|
+
The model cataloging parameters are set to table_name='model_table_name' and schema_name='model_schema_name'
|
|
205
|
+
|
|
206
|
+
"""
|
|
207
|
+
|
|
208
|
+
# Let's perform argument validations.
|
|
209
|
+
# Create argument information matrix to do parameter checking.
|
|
210
|
+
__arg_info_matrix = []
|
|
211
|
+
__arg_info_matrix.append(["table_name", table_name, False, str, True])
|
|
212
|
+
__arg_info_matrix.append(["schema_name", schema_name, True, str, True])
|
|
213
|
+
|
|
214
|
+
# Make sure that a correct type of values has been supplied to the arguments.
|
|
215
|
+
validator._validate_function_arguments(__arg_info_matrix)
|
|
216
|
+
|
|
217
|
+
schema_name = schema_name if schema_name is not None else _get_current_databasename()
|
|
218
|
+
|
|
219
|
+
# Perform required validations for the API.
|
|
220
|
+
# Check whether the table given exist or not.
|
|
221
|
+
conn = get_connection()
|
|
222
|
+
validator._check_table_exists(conn, table_name, schema_name)
|
|
223
|
+
|
|
224
|
+
configure._byom_model_catalog_table = table_name
|
|
225
|
+
configure._byom_model_catalog_database = schema_name
|
|
226
|
+
print("The model cataloging parameters are set to table_name='{}' and "
|
|
227
|
+
"schema_name='{}'".format(table_name, schema_name))
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
@collect_queryband(queryband="svByom")
|
|
231
|
+
def save_byom(model_id,
|
|
232
|
+
model_file,
|
|
233
|
+
table_name=None,
|
|
234
|
+
schema_name=None,
|
|
235
|
+
additional_columns=None,
|
|
236
|
+
additional_columns_types=None):
|
|
237
|
+
"""
|
|
238
|
+
DESCRIPTION:
|
|
239
|
+
Function to save externally trained models in Teradata Vantage in the
|
|
240
|
+
specified table. Function allows user to save various models stored in
|
|
241
|
+
different formats such as PMML, MOJO etc. If the specified model table
|
|
242
|
+
exists in Vantage, model data is saved in the same, otherwise model table
|
|
243
|
+
is created first based on the user parameters and then model data is
|
|
244
|
+
saved. See below 'Note' section for more details.
|
|
245
|
+
|
|
246
|
+
Notes:
|
|
247
|
+
If user specified table exists, then
|
|
248
|
+
a. Table must have at least two columns with names and types as
|
|
249
|
+
specified below:
|
|
250
|
+
* 'model_id' of type VARCHAR of any length and
|
|
251
|
+
* 'model' column of type BLOB.
|
|
252
|
+
b. User can choose to have the additional columns as well to store
|
|
253
|
+
additional information of the model. This information can be passed
|
|
254
|
+
using "additional_columns" parameter. See "additional_columns"
|
|
255
|
+
argument description for more details.
|
|
256
|
+
If user specified table does not exist, then
|
|
257
|
+
a. Function creates the table with the name specified in "table_name".
|
|
258
|
+
b. Table is created in the schema specified in "schema_name". If
|
|
259
|
+
"schema_name" is not specified, then current schema is considered
|
|
260
|
+
for "schema_name".
|
|
261
|
+
c. Table is created with columns:
|
|
262
|
+
* 'model_id' with type specified in "additional_columns_types". If
|
|
263
|
+
not specified, table is created with 'model_id' column as VARCHAR(128).
|
|
264
|
+
* 'model' with type specified in "additional_columns_types". If
|
|
265
|
+
not specified, table is created with 'model' column as BLOB.
|
|
266
|
+
* Columns specified in "additional_columns" parameter. See "additional_columns"
|
|
267
|
+
argument description for more details.
|
|
268
|
+
* Datatypes of these additional columns are either taken from
|
|
269
|
+
the values passed to "additional_columns_types" or inferred
|
|
270
|
+
from the values passed to the "additional_columns". See
|
|
271
|
+
"additional_columns_types" argument description for more details.
|
|
272
|
+
|
|
273
|
+
PARAMETERS:
|
|
274
|
+
model_id:
|
|
275
|
+
Required Argument.
|
|
276
|
+
Specifies the unique model identifier for model.
|
|
277
|
+
Types: str.
|
|
278
|
+
|
|
279
|
+
model_file:
|
|
280
|
+
Required Argument.
|
|
281
|
+
Specifies the absolute path of the file which has model information.
|
|
282
|
+
Types: str
|
|
283
|
+
|
|
284
|
+
table_name:
|
|
285
|
+
Optional Argument.
|
|
286
|
+
Specifies the name of the table where model is saved. If "table_name"
|
|
287
|
+
does not exist, this function creates table according to "additional_columns"
|
|
288
|
+
and "additional_columns_types".
|
|
289
|
+
Notes:
|
|
290
|
+
* One must either specify this argument or set the byom model catalog table
|
|
291
|
+
name using set_byom_catalog().
|
|
292
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
293
|
+
are set, the settings in save_byom() take precedence and is used for
|
|
294
|
+
function execution when saving an model.
|
|
295
|
+
Types: str
|
|
296
|
+
|
|
297
|
+
schema_name:
|
|
298
|
+
Optional Argument.
|
|
299
|
+
Specifies the name of the schema/database in which the table specified in
|
|
300
|
+
"table_name" is looked up.
|
|
301
|
+
Notes:
|
|
302
|
+
* One must either specify this argument and table_name argument
|
|
303
|
+
or set the byom model catalog schema and table name using set_byom_catalog().
|
|
304
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
305
|
+
are set, the settings in save_byom() take precedence and is used for
|
|
306
|
+
function execution when saving an model.
|
|
307
|
+
* If user specifies schema_name argument table_name argument has to be specified,
|
|
308
|
+
else exception is raised.
|
|
309
|
+
Types: str
|
|
310
|
+
|
|
311
|
+
additional_columns:
|
|
312
|
+
Optional Argument.
|
|
313
|
+
Specifies the additional information about the model to be saved in the
|
|
314
|
+
model table. Additional information about the model is passed as key value
|
|
315
|
+
pair, where key is the name of the column and value is data to be stored
|
|
316
|
+
in that column for the model being saved.
|
|
317
|
+
Notes:
|
|
318
|
+
1. Following are the allowed types for the values passed in dictionary:
|
|
319
|
+
* int
|
|
320
|
+
* float
|
|
321
|
+
* str
|
|
322
|
+
* bool
|
|
323
|
+
* datetime.datetime
|
|
324
|
+
* datetime.date
|
|
325
|
+
* datetime.time
|
|
326
|
+
2. "additional_columns" does not accept keys model_id and model.
|
|
327
|
+
Types: dict
|
|
328
|
+
|
|
329
|
+
additional_columns_types:
|
|
330
|
+
Optional Argument.
|
|
331
|
+
Specifies the column type of additional columns. These column types are used
|
|
332
|
+
while creating the table using the columns specified in "additional_columns"
|
|
333
|
+
argument. Additional column datatype information is passed as key value pair
|
|
334
|
+
with key being the column name and value as teradatasqlalchemy.types.
|
|
335
|
+
Notes:
|
|
336
|
+
1. If, any of the column type for additional columns are not specified in
|
|
337
|
+
"additional_columns_types", it then derives the column type according
|
|
338
|
+
the below table:
|
|
339
|
+
+---------------------------+-----------------------------------------+
|
|
340
|
+
| Python Type | teradatasqlalchemy Type |
|
|
341
|
+
+---------------------------+-----------------------------------------+
|
|
342
|
+
| str | VARCHAR(1024) |
|
|
343
|
+
+---------------------------+-----------------------------------------+
|
|
344
|
+
| int | INTEGER |
|
|
345
|
+
+---------------------------+-----------------------------------------+
|
|
346
|
+
| bool | BYTEINT |
|
|
347
|
+
+---------------------------+-----------------------------------------+
|
|
348
|
+
| float | FLOAT |
|
|
349
|
+
+---------------------------+-----------------------------------------+
|
|
350
|
+
| datetime | TIMESTAMP |
|
|
351
|
+
+---------------------------+-----------------------------------------+
|
|
352
|
+
| date | DATE |
|
|
353
|
+
+---------------------------+-----------------------------------------+
|
|
354
|
+
| time | TIME |
|
|
355
|
+
+---------------------------+-----------------------------------------+
|
|
356
|
+
2. Columns model_id, with column type as VARCHAR and model, with column type
|
|
357
|
+
as BLOB are mandatory for table. So, for the columns model_id and model,
|
|
358
|
+
acceptable values for "additional_columns_types" are VARCHAR and BLOB
|
|
359
|
+
respectively.
|
|
360
|
+
3. This argument is ignored if table exists.
|
|
361
|
+
Types: dict
|
|
362
|
+
|
|
363
|
+
Note:
|
|
364
|
+
The following table describes the system behaviour in different scenarios:
|
|
365
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
366
|
+
| In save_byom() | In set_byom_catalog() | System Behavior |
|
|
367
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
368
|
+
| table_name | schema_name | table_name | schema_name | |
|
|
369
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
370
|
+
| Set | Set | Set | Set | schema_name and table_name in |
|
|
371
|
+
| | | | | save_byom() are used for |
|
|
372
|
+
| | | | | function execution. |
|
|
373
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
374
|
+
| Set | Set | Not set | Not set | schema_name and table_name in |
|
|
375
|
+
| | | | | save_byom() is used for |
|
|
376
|
+
| | | | | function execution. |
|
|
377
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
378
|
+
| Set | Not set | Set | Set | table_name from save_byom() |
|
|
379
|
+
| | | | | is used and schema name |
|
|
380
|
+
| | | | | associated with the current |
|
|
381
|
+
| | | | | context is used. |
|
|
382
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
383
|
+
| Not set | Set | Set | Set | Exception is raised. |
|
|
384
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
385
|
+
| Not set | Not set | Set | Set | table_name and schema_name |
|
|
386
|
+
| | | | | from set_byom_catalog() |
|
|
387
|
+
| | | | | are used for function execution. |
|
|
388
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
389
|
+
| Not set | Not set | Set | Not set | table_name from set_byom_catalog() |
|
|
390
|
+
| | | | | is used and schema name |
|
|
391
|
+
| | | | | associated with the current |
|
|
392
|
+
| | | | | context is used. |
|
|
393
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
394
|
+
| Not set | Not set | Not set | Not set | Exception is raised |
|
|
395
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
396
|
+
|
|
397
|
+
RETURNS:
|
|
398
|
+
None.
|
|
399
|
+
|
|
400
|
+
RAISES:
|
|
401
|
+
TeradataMlException, TypeError, ValueError
|
|
402
|
+
|
|
403
|
+
EXAMPLES:
|
|
404
|
+
|
|
405
|
+
>>> import teradataml, os, datetime
|
|
406
|
+
>>> model_file = os.path.join(os.path.dirname(teradataml.__file__), 'data', 'models', 'iris_kmeans_model')
|
|
407
|
+
>>> from teradataml import save_byom
|
|
408
|
+
|
|
409
|
+
# Example 1 - Create table "byom_model" with additional columns by specifying the type
|
|
410
|
+
# of the columns as below and save the model in it.
|
|
411
|
+
# +---------------------------+-----------------------------------------+
|
|
412
|
+
# | Column name | Column Type |
|
|
413
|
+
# +---------------------------+-----------------------------------------+
|
|
414
|
+
# | model_id | VARCHAR(128) |
|
|
415
|
+
# +---------------------------+-----------------------------------------+
|
|
416
|
+
# | model | BLOB |
|
|
417
|
+
# +---------------------------+-----------------------------------------+
|
|
418
|
+
# | Description | VARCHAR(2000) |
|
|
419
|
+
# +---------------------------+-----------------------------------------+
|
|
420
|
+
# | UserId | NUMBER(5) |
|
|
421
|
+
# +---------------------------+-----------------------------------------+
|
|
422
|
+
# | ProductionReady | BYTEINT |
|
|
423
|
+
# +---------------------------+-----------------------------------------+
|
|
424
|
+
# | ModelEfficiency | NUMBER(11,10) |
|
|
425
|
+
# +---------------------------+-----------------------------------------+
|
|
426
|
+
# | ModelSavedTime | TIMESTAMP |
|
|
427
|
+
# +---------------------------+-----------------------------------------+
|
|
428
|
+
# | ModelGeneratedDate | DATE |
|
|
429
|
+
# +---------------------------+-----------------------------------------+
|
|
430
|
+
# | ModelGeneratedTime | TIME |
|
|
431
|
+
# +---------------------------+-----------------------------------------+
|
|
432
|
+
#
|
|
433
|
+
>>> save_byom('model1',
|
|
434
|
+
... model_file,
|
|
435
|
+
... 'byom_models',
|
|
436
|
+
... additional_columns={"Description": "KMeans model",
|
|
437
|
+
... "UserId": "12345",
|
|
438
|
+
... "ProductionReady": False,
|
|
439
|
+
... "ModelEfficiency": 0.67412,
|
|
440
|
+
... "ModelSavedTime": datetime.datetime.now(),
|
|
441
|
+
... "ModelGeneratedDate":datetime.date.today(),
|
|
442
|
+
... "ModelGeneratedTime": datetime.time(hour=0,minute=5,second=45,microsecond=110)
|
|
443
|
+
... },
|
|
444
|
+
... additional_columns_types={"Description": VARCHAR(2000),
|
|
445
|
+
... "UserId": NUMBER(5),
|
|
446
|
+
... "ProductionReady": BYTEINT,
|
|
447
|
+
... "ModelEfficiency": NUMBER(11,10),
|
|
448
|
+
... "ModelSavedTime": TIMESTAMP,
|
|
449
|
+
... "ModelGeneratedDate": DATE,
|
|
450
|
+
... "ModelGeneratedTime": TIME}
|
|
451
|
+
... )
|
|
452
|
+
Created the table 'byom_models' as it does not exist.
|
|
453
|
+
Model is saved.
|
|
454
|
+
>>>
|
|
455
|
+
|
|
456
|
+
# Example 2 - Create table "byom_model1" in "test" DataBase, with additional columns
|
|
457
|
+
# by not specifying the type of the columns and once table is created,
|
|
458
|
+
# save the model in it.
|
|
459
|
+
>>> save_byom('model1',
|
|
460
|
+
... model_file,
|
|
461
|
+
... 'byom_models1',
|
|
462
|
+
... additional_columns={"Description": "KMeans model",
|
|
463
|
+
... "UserId": "12346",
|
|
464
|
+
... "ProductionReady": False,
|
|
465
|
+
... "ModelEfficiency": 0.67412,
|
|
466
|
+
... "ModelSavedTime": datetime.datetime.now(),
|
|
467
|
+
... "ModelGeneratedDate":datetime.date.today(),
|
|
468
|
+
... "ModelGeneratedTime": datetime.time(hour=0,minute=5,second=45,microsecond=110)
|
|
469
|
+
... },
|
|
470
|
+
... schema_name='test'
|
|
471
|
+
... )
|
|
472
|
+
Created the table 'byom_models1' as it does not exist.
|
|
473
|
+
Model is saved.
|
|
474
|
+
>>>
|
|
475
|
+
|
|
476
|
+
# Example 3 - Save the model in the existing table "byom_models".
|
|
477
|
+
>>> save_byom('model2',
|
|
478
|
+
... model_file,
|
|
479
|
+
... 'byom_models',
|
|
480
|
+
... additional_columns={"Description": "KMeans model duplicated"}
|
|
481
|
+
... )
|
|
482
|
+
Model is saved.
|
|
483
|
+
>>>
|
|
484
|
+
|
|
485
|
+
# Example 4 - Set the cataloging parameters and save the model
|
|
486
|
+
# in the existing table "byom_models".
|
|
487
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name='alice')
|
|
488
|
+
The model cataloging parameters are set to table_name='byom_models' and schema_name='alice'
|
|
489
|
+
>>> save_byom('model3', model_file=model_file)
|
|
490
|
+
Model is saved.
|
|
491
|
+
|
|
492
|
+
# Example 4 - Set the cataloging table_name to 'byom_models'
|
|
493
|
+
# and save the model in table 'byom_licensed_models' other than model catalog table.
|
|
494
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name='alice')
|
|
495
|
+
The model cataloging parameters are set to table_name='byom_models' and schema_name='alice'
|
|
496
|
+
>>> save_byom('licensed_model2', model_file=model_file, table_name='byom_licensed_models',
|
|
497
|
+
... additional_columns={"license_data": "A5sUL9KU_kP35Vq"})
|
|
498
|
+
Created the model table 'byom_licensed_models' as it does not exist.
|
|
499
|
+
Model is saved.
|
|
500
|
+
>>>
|
|
501
|
+
"""
|
|
502
|
+
try:
|
|
503
|
+
# Let's perform argument validations.
|
|
504
|
+
# Create argument information matrix to do parameter checking.
|
|
505
|
+
__arg_info_matrix = []
|
|
506
|
+
__arg_info_matrix.append(["model_id", model_id, False, str, True])
|
|
507
|
+
__arg_info_matrix.append(["model_file", model_file, False, str, True])
|
|
508
|
+
__arg_info_matrix.append(["table_name", table_name, True, str, True])
|
|
509
|
+
__arg_info_matrix.append(["schema_name", schema_name, True, str, True])
|
|
510
|
+
__arg_info_matrix.append(["additional_columns", additional_columns, True, dict])
|
|
511
|
+
__arg_info_matrix.append(["additional_columns_types", additional_columns_types, True, dict])
|
|
512
|
+
|
|
513
|
+
# Make sure that a correct type of values has been supplied to the arguments.
|
|
514
|
+
validator._validate_function_arguments(__arg_info_matrix)
|
|
515
|
+
|
|
516
|
+
# Set the table and schema name according to the model cataloging parameters and the user inputs.
|
|
517
|
+
table_name, schema_name = __set_validate_catalog_parameters(table_name, schema_name)
|
|
518
|
+
|
|
519
|
+
# Change the additional_columns_types and additional_columns to dictionary if
|
|
520
|
+
# it is None so that retrieval would be easy.
|
|
521
|
+
if additional_columns_types is None:
|
|
522
|
+
additional_columns_types = {}
|
|
523
|
+
|
|
524
|
+
if additional_columns is None:
|
|
525
|
+
additional_columns = {}
|
|
526
|
+
|
|
527
|
+
# Check if model_id or model in additional columns.
|
|
528
|
+
for column in ["model_id", "model"]:
|
|
529
|
+
if column in additional_columns:
|
|
530
|
+
error_code = MessageCodes.NOT_ALLOWED_VALUES
|
|
531
|
+
error_msg = Messages.get_message(error_code, column, "additional_columns")
|
|
532
|
+
raise TeradataMlException(error_msg, error_code)
|
|
533
|
+
|
|
534
|
+
# Add model_id and model columns information to lists
|
|
535
|
+
# which will be used in creating insert query.
|
|
536
|
+
column_names = ["model_id", "model"]
|
|
537
|
+
insert_parameters = [model_id, UtilFuncs._get_file_contents(model_file, True)]
|
|
538
|
+
|
|
539
|
+
connection = get_connection()
|
|
540
|
+
# Check if table already exists.
|
|
541
|
+
# If exists, extract required information about table columns types
|
|
542
|
+
# else extract from additional_columns_types.
|
|
543
|
+
# Also validate model_id against allowed length.
|
|
544
|
+
table_exists = connection.dialect.has_table(connection, table_name=table_name,
|
|
545
|
+
schema=schema_name, table_only=True)
|
|
546
|
+
if table_exists:
|
|
547
|
+
# Check if model exists or not. If exists, raise error.
|
|
548
|
+
__check_if_model_exists(
|
|
549
|
+
model_id, table_name, schema_name, raise_error_if_model_found=True)
|
|
550
|
+
|
|
551
|
+
# Gather column name and type information from existing table
|
|
552
|
+
existing_table_df = DataFrame(in_schema(schema_name, table_name))
|
|
553
|
+
existing_columns_name_sql_type_dict = existing_table_df._td_column_names_and_sqlalchemy_types
|
|
554
|
+
|
|
555
|
+
existing_table_model_id_type = existing_columns_name_sql_type_dict["model_id"]
|
|
556
|
+
# Validate length of model_id argument
|
|
557
|
+
_Validators._validate_column_value_length("model_id", model_id, existing_table_model_id_type.length,
|
|
558
|
+
"save the model")
|
|
559
|
+
else:
|
|
560
|
+
# Validate length of model_id argument
|
|
561
|
+
_Validators._validate_column_value_length("model_id", model_id, 128, "save the model")
|
|
562
|
+
|
|
563
|
+
columns_name_type_dict = {"model_id": additional_columns_types.get("model_id", VARCHAR(128)),
|
|
564
|
+
"model": additional_columns_types.get("model", BLOB)}
|
|
565
|
+
|
|
566
|
+
# List of columns whose type is not provided in additional_columns_types.
|
|
567
|
+
undefined_column_types = []
|
|
568
|
+
|
|
569
|
+
# If user passes any additional columns data, extract that also to insert it
|
|
570
|
+
# in table.
|
|
571
|
+
# If table exists, use the information about column types from existing table,
|
|
572
|
+
# ignore additional_columns_types argument.
|
|
573
|
+
if additional_columns:
|
|
574
|
+
for col_name, col_value in additional_columns.items():
|
|
575
|
+
# Before proceeding further, validate the additional column data.
|
|
576
|
+
# One should not pass custom types such as list, dict, user defined
|
|
577
|
+
# objects etc.
|
|
578
|
+
_Validators._validate_py_type_for_td_type_conversion(type(col_value), "additional_columns")
|
|
579
|
+
|
|
580
|
+
# If table exists, use same column data type.
|
|
581
|
+
# If table does not exist and column type is not specified
|
|
582
|
+
# in additional column types, derive the appropriate one.
|
|
583
|
+
if table_exists:
|
|
584
|
+
col_name_lower = col_name.lower()
|
|
585
|
+
if col_name_lower in existing_columns_name_sql_type_dict:
|
|
586
|
+
col_type = existing_columns_name_sql_type_dict[col_name_lower]
|
|
587
|
+
else:
|
|
588
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.INSERTION_INCOMPATIBLE),
|
|
589
|
+
MessageCodes.INSERTION_INCOMPATIBLE)
|
|
590
|
+
else:
|
|
591
|
+
col_type = additional_columns_types.get(
|
|
592
|
+
col_name, _Dtypes._python_type_to_teradata_type(type(col_value)))
|
|
593
|
+
# Update columns_name_type_dict
|
|
594
|
+
columns_name_type_dict[col_name] = col_type
|
|
595
|
+
|
|
596
|
+
# Collect undefined column types to show warning.
|
|
597
|
+
if additional_columns_types.get(col_name) is None:
|
|
598
|
+
undefined_column_types.append(col_name)
|
|
599
|
+
|
|
600
|
+
# Validate the length of input varchar columns against allowed column lengths.
|
|
601
|
+
if isinstance(col_type, VARCHAR):
|
|
602
|
+
_Validators._validate_column_value_length(col_name, col_value, col_type.length,
|
|
603
|
+
"save the model")
|
|
604
|
+
|
|
605
|
+
# Add current column name and corresponding value in respective lists.
|
|
606
|
+
column_names.append(col_name)
|
|
607
|
+
insert_parameters.append(col_value)
|
|
608
|
+
|
|
609
|
+
# If table doesn't exist, create one using additional_columns_types
|
|
610
|
+
if not table_exists:
|
|
611
|
+
__mandatory_columns_types = {"model_id": VARCHAR, "model": BLOB}
|
|
612
|
+
is_mandatory_col_type_expected = lambda c_name, c_type: \
|
|
613
|
+
c_type == __mandatory_columns_types[c_name] or type(c_type) == __mandatory_columns_types[c_name]
|
|
614
|
+
|
|
615
|
+
# Validate additional_columns_types.
|
|
616
|
+
for c_name, c_type in additional_columns_types.items():
|
|
617
|
+
# Check if model_id & model columns have appropriate types.
|
|
618
|
+
if c_name in __mandatory_columns_types and not is_mandatory_col_type_expected(c_name, c_type):
|
|
619
|
+
error_code = MessageCodes.INVALID_COLUMN_DATATYPE
|
|
620
|
+
err_msg = Messages.get_message(error_code,
|
|
621
|
+
c_name,
|
|
622
|
+
"additional_columns_types",
|
|
623
|
+
"Valid",
|
|
624
|
+
"[{}]".format(__mandatory_columns_types[c_name].__name__)
|
|
625
|
+
)
|
|
626
|
+
raise TeradataMlException(err_msg, error_code)
|
|
627
|
+
|
|
628
|
+
# Check if value passed to additional_columns_types is a valid type or not.
|
|
629
|
+
# User can pass a class or an object of a class from teradatasqlalchemy.types .
|
|
630
|
+
# So, Check if c_type is either a subclass of TDType or a TDType.
|
|
631
|
+
# isinstance(c_type, _TDType), checks if c_type is an object of teradatasqlalchemy.types
|
|
632
|
+
# issubclass(c_type, _TDType), checks if c_type is a proper Teradata type or not.
|
|
633
|
+
# However, issubclass accepts only class in its 1st parameter so check if c_type is
|
|
634
|
+
# a class or not, before passing it to issubclass.
|
|
635
|
+
elif not (isinstance(c_type, _TDType) or (isinstance(c_type, type) and issubclass(c_type, _TDType))):
|
|
636
|
+
error_code = MessageCodes.INVALID_COLUMN_DATATYPE
|
|
637
|
+
err_msg = Messages.get_message(
|
|
638
|
+
error_code, c_name, "additional_columns_types", "Valid", "teradatasqlalchemy.types")
|
|
639
|
+
raise TeradataMlException(err_msg, error_code)
|
|
640
|
+
|
|
641
|
+
if len(undefined_column_types) > 0:
|
|
642
|
+
warnings.warn("Specified table does not exist and data types of {0} "\
|
|
643
|
+
"columns are not provided. Taking default datatypes."\
|
|
644
|
+
.format(", ".join(undefined_column_types)), stacklevel=2)
|
|
645
|
+
|
|
646
|
+
# Create empty vantage table using sqlalchemy object.
|
|
647
|
+
_create_table(
|
|
648
|
+
table_name, columns_name_type_dict, primary_index="model_id", schema_name=schema_name)
|
|
649
|
+
print("Created the model table '{}' as it does not exist.".format(table_name))
|
|
650
|
+
|
|
651
|
+
# If schema is specified, then concatenate schema name with table name.
|
|
652
|
+
if schema_name:
|
|
653
|
+
table_name = in_schema(schema_name, table_name)
|
|
654
|
+
|
|
655
|
+
# Generate insert query.
|
|
656
|
+
columns_clause = ", ".join(column_names)
|
|
657
|
+
values_clause = ", ".join(("?" for _ in range(len(column_names))))
|
|
658
|
+
insert_model = f"insert into {table_name} ({columns_clause}) values ({values_clause});"
|
|
659
|
+
# Empty queryband buffer before SQL call.
|
|
660
|
+
UtilFuncs._set_queryband()
|
|
661
|
+
execute_sql(insert_model, tuple([insert_parameters]))
|
|
662
|
+
print("Model is saved.")
|
|
663
|
+
|
|
664
|
+
except (SqlOperationalError, TeradataMlException, TypeError, ValueError):
|
|
665
|
+
raise
|
|
666
|
+
except Exception as err:
|
|
667
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
668
|
+
raise TeradataMlException(Messages.get_message(error_code, "save", str(err)), error_code)
|
|
669
|
+
|
|
670
|
+
|
|
671
|
+
@collect_queryband(queryband="dltByom")
|
|
672
|
+
def delete_byom(model_id, table_name=None, schema_name=None):
|
|
673
|
+
"""
|
|
674
|
+
DESCRIPTION:
|
|
675
|
+
Delete a model from the user specified table in Teradata Vantage.
|
|
676
|
+
|
|
677
|
+
PARAMETERS:
|
|
678
|
+
model_id:
|
|
679
|
+
Required Argument.
|
|
680
|
+
Specifies the unique model identifier of the model to be deleted.
|
|
681
|
+
Types: str
|
|
682
|
+
|
|
683
|
+
table_name:
|
|
684
|
+
Optional Argument.
|
|
685
|
+
Specifies the name of the table to delete the model from.
|
|
686
|
+
Notes:
|
|
687
|
+
* One must either specify this argument or set the byom model catalog table
|
|
688
|
+
name using set_byom_catalog().
|
|
689
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
690
|
+
are set, the settings in delete_byom() take precedence and is used for
|
|
691
|
+
function execution.
|
|
692
|
+
Types: str
|
|
693
|
+
|
|
694
|
+
schema_name:
|
|
695
|
+
Optional Argument.
|
|
696
|
+
Specifies the name of the schema/database in which the table specified in
|
|
697
|
+
"table_name" is looked up.
|
|
698
|
+
Notes:
|
|
699
|
+
* One must either specify this argument and table_name argument
|
|
700
|
+
or set the byom model catalog schema and table name using set_byom_catalog().
|
|
701
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
702
|
+
are set, the settings in delete_byom() take precedence and is used for
|
|
703
|
+
function execution.
|
|
704
|
+
* If user specifies schema_name argument table_name argument has to be specified,
|
|
705
|
+
else exception is raised.
|
|
706
|
+
Types: str
|
|
707
|
+
|
|
708
|
+
Note:
|
|
709
|
+
The following table describes the system behaviour in different scenarios:
|
|
710
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
711
|
+
| In delete_byom() | In set_byom_catalog() | System Behavior |
|
|
712
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
713
|
+
| table_name | schema_name | table_name | schema_name | |
|
|
714
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
715
|
+
| Set | Set | Set | Set | schema_name and table_name in |
|
|
716
|
+
| | | | | delete_byom() are used for |
|
|
717
|
+
| | | | | function execution. |
|
|
718
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
719
|
+
| Set | Set | Not set | Not set | schema_name and table_name in |
|
|
720
|
+
| | | | | delete_byom() is used for |
|
|
721
|
+
| | | | | function execution. |
|
|
722
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
723
|
+
| Not set | Not set | Set | Set | table_name from delete_byom() |
|
|
724
|
+
| | | | | is used and schema name |
|
|
725
|
+
| | | | | associated with the current |
|
|
726
|
+
| | | | | context is used. |
|
|
727
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
728
|
+
| Not set | Set | Set | Set | Exception is raised. |
|
|
729
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
730
|
+
| Not set | Not set | Set | Set | table_name and schema_name |
|
|
731
|
+
| | | | | from set_byom_catalog() |
|
|
732
|
+
| | | | | are used for function execution. |
|
|
733
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
734
|
+
| Not set | Not set | Set | Not set | table_name from set_byom_catalog() |
|
|
735
|
+
| | | | | is used and schema name |
|
|
736
|
+
| | | | | associated with the current |
|
|
737
|
+
| | | | | context is used. |
|
|
738
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
739
|
+
| Not set | Not set | Not set | Not set | Exception is raised |
|
|
740
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
741
|
+
|
|
742
|
+
RETURNS:
|
|
743
|
+
None.
|
|
744
|
+
|
|
745
|
+
RAISES:
|
|
746
|
+
TeradataMlException
|
|
747
|
+
|
|
748
|
+
EXAMPLES:
|
|
749
|
+
|
|
750
|
+
>>> import teradataml, os, datetime
|
|
751
|
+
>>> model_file = os.path.join(os.path.dirname(teradataml.__file__), 'data', 'models', 'iris_kmeans_model')
|
|
752
|
+
>>> from teradataml import save_byom, delete_byom
|
|
753
|
+
>>> save_byom('model3', model_file, 'byom_models')
|
|
754
|
+
Model is saved.
|
|
755
|
+
>>> save_byom('model4', model_file, 'byom_models', schema_name='test')
|
|
756
|
+
Model is saved.
|
|
757
|
+
>>> save_byom('model5', model_file, 'byom_models', schema_name='test')
|
|
758
|
+
Model is saved.
|
|
759
|
+
>>> save_byom('model4', model_file, 'byom_models')
|
|
760
|
+
Model is saved.
|
|
761
|
+
>>> save_byom('model5', model_file, 'byom_models')
|
|
762
|
+
Model is saved.
|
|
763
|
+
|
|
764
|
+
# Example 1 - Delete a model with id 'model3' from the table byom_models.
|
|
765
|
+
>>> delete_byom(model_id='model3', table_name='byom_models')
|
|
766
|
+
Model is deleted.
|
|
767
|
+
>>>
|
|
768
|
+
|
|
769
|
+
# Example 2 - Delete a model with id 'model4' from the table byom_models
|
|
770
|
+
# and the table is in "test" DataBase.
|
|
771
|
+
>>> delete_byom(model_id='model4', table_name='byom_models', schema_name='test')
|
|
772
|
+
Model is deleted.
|
|
773
|
+
>>>
|
|
774
|
+
|
|
775
|
+
# Example 3 - Delete a model with id 'model4' from the model cataloging table 'byom_models'
|
|
776
|
+
# set by set_byom_catalog().
|
|
777
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name='alice')
|
|
778
|
+
The model cataloging parameters are set to table_name='byom_models' and schema_name='alice'
|
|
779
|
+
>>> delete_byom(model_id='model4')
|
|
780
|
+
Model is deleted.
|
|
781
|
+
|
|
782
|
+
# Example 4 - Set the cataloging table_name to 'byom_models'
|
|
783
|
+
# and delete the model in table other than model catalog table 'byom_licensed_models'.
|
|
784
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name= 'alice')
|
|
785
|
+
The model cataloging parameters are set to table_name='byom_models' and schema_name='alice'
|
|
786
|
+
>>> save_byom('licensed_model2', model_file=model_file, table_name='byom_licensed_models')
|
|
787
|
+
Created the model table 'byom_licensed_models' as it does not exist.
|
|
788
|
+
Model is saved.
|
|
789
|
+
>>> delete_byom(model_id='licensed_model2', table_name='byom_licensed_models')
|
|
790
|
+
Model is deleted.
|
|
791
|
+
|
|
792
|
+
"""
|
|
793
|
+
|
|
794
|
+
# Let's perform argument validations.
|
|
795
|
+
# Create argument information matrix to do parameter checking
|
|
796
|
+
__arg_info_matrix = []
|
|
797
|
+
__arg_info_matrix.append(["model_id", model_id, False, str, True])
|
|
798
|
+
__arg_info_matrix.append(["table_name", table_name, True, str, True])
|
|
799
|
+
__arg_info_matrix.append(["schema_name", schema_name, True, str, True])
|
|
800
|
+
|
|
801
|
+
# Make sure that a correct type of values has been supplied to the arguments.
|
|
802
|
+
validator._validate_function_arguments(__arg_info_matrix)
|
|
803
|
+
|
|
804
|
+
# Set the table and schema name according to the model cataloging parameters and the user inputs.
|
|
805
|
+
table_name, schema_name = __set_validate_catalog_parameters(table_name, schema_name)
|
|
806
|
+
|
|
807
|
+
# Before proceed further, check whether table exists or not.
|
|
808
|
+
conn = get_connection()
|
|
809
|
+
if not conn.dialect.has_table(conn, table_name=table_name, schema=schema_name, table_only=True):
|
|
810
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
811
|
+
error_msg = Messages.get_message(
|
|
812
|
+
error_code, "delete", 'Table "{}.{}" does not exist.'.format(schema_name, table_name))
|
|
813
|
+
raise TeradataMlException(error_msg, error_code)
|
|
814
|
+
|
|
815
|
+
# Let's check if the user created the model since only the creator can delete it
|
|
816
|
+
__check_if_model_exists(model_id, table_name, schema_name, raise_error_if_model_not_found=True)
|
|
817
|
+
|
|
818
|
+
# Get the FQTN before deleting the model.
|
|
819
|
+
table_name = _get_quoted_object_name(schema_name, table_name)
|
|
820
|
+
|
|
821
|
+
try:
|
|
822
|
+
delete_model = f"delete from {table_name} where model_id = (?)"
|
|
823
|
+
# Empty queryband buffer before SQL call.
|
|
824
|
+
UtilFuncs._set_queryband()
|
|
825
|
+
execute_sql(delete_model, tuple([model_id]))
|
|
826
|
+
print("Model is deleted.")
|
|
827
|
+
|
|
828
|
+
except (SqlOperationalError, TeradataMlException):
|
|
829
|
+
raise
|
|
830
|
+
except Exception as err:
|
|
831
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
832
|
+
error_msg = Messages.get_message(error_code, "delete", str(err))
|
|
833
|
+
raise TeradataMlException(error_msg, error_code)
|
|
834
|
+
|
|
835
|
+
|
|
836
|
+
@collect_queryband(queryband="stLcns")
|
|
837
|
+
def set_license(license,
|
|
838
|
+
table_name=None,
|
|
839
|
+
schema_name=None,
|
|
840
|
+
source='string'):
|
|
841
|
+
"""
|
|
842
|
+
DESCRIPTION:
|
|
843
|
+
The set_license() function allows a user to set the license information
|
|
844
|
+
associated with the externally generated model in a session level variable
|
|
845
|
+
which is required by H2O DAI models. It is used by the retrieve_byom()
|
|
846
|
+
function to retrieve the license information while retrieving the specific model.
|
|
847
|
+
If specified table name does not exist and is not the same as BYOM catalog tables,
|
|
848
|
+
then this function creates the table and stores the license information;
|
|
849
|
+
otherwise, this function just validates and sets the license information.
|
|
850
|
+
|
|
851
|
+
The license can be set by passing the license in the following ways:
|
|
852
|
+
* Passing the license as a variable;
|
|
853
|
+
* Passing the column name in the model table itself;
|
|
854
|
+
* Passing the table and the column name containing the license;
|
|
855
|
+
* Passing the license in a file.
|
|
856
|
+
|
|
857
|
+
PARAMETERS:
|
|
858
|
+
license:
|
|
859
|
+
Required Argument.
|
|
860
|
+
Specifies the license key information that can be passed as:
|
|
861
|
+
* a variable.
|
|
862
|
+
* in a file.
|
|
863
|
+
* name of the column containing license information in a table
|
|
864
|
+
specified by "table_name" argument.
|
|
865
|
+
Note:
|
|
866
|
+
Argument "source" must be set accordingly.
|
|
867
|
+
Types: str
|
|
868
|
+
|
|
869
|
+
table_name:
|
|
870
|
+
Optional Argument.
|
|
871
|
+
Specifies the table name containing the license information if "source" is 'column',
|
|
872
|
+
otherwise specifies the table to store the license into.
|
|
873
|
+
Note:
|
|
874
|
+
Argument "table_name" and "schema_name"
|
|
875
|
+
both should be specified or both should be None.
|
|
876
|
+
Types: str
|
|
877
|
+
|
|
878
|
+
schema_name:
|
|
879
|
+
Optional Argument.
|
|
880
|
+
Specifies the name of the schema in which the table specified in
|
|
881
|
+
"table_name" is looked up.
|
|
882
|
+
Note:
|
|
883
|
+
Argument "table_name" and "schema_name"
|
|
884
|
+
both should be specified or both should be None.
|
|
885
|
+
Types: str
|
|
886
|
+
|
|
887
|
+
source:
|
|
888
|
+
Required Argument.
|
|
889
|
+
Specifies whether license key specified in "license" is a string, file
|
|
890
|
+
or column name.
|
|
891
|
+
Default value: string
|
|
892
|
+
Permitted values: string, file, column
|
|
893
|
+
|
|
894
|
+
RETURNS:
|
|
895
|
+
None.
|
|
896
|
+
|
|
897
|
+
RAISES:
|
|
898
|
+
TeradataMlException
|
|
899
|
+
|
|
900
|
+
EXAMPLES:
|
|
901
|
+
>>> import os
|
|
902
|
+
>>> from teradataml import save_byom, retrieve_byom, get_context, set_license, set_byom_catalog
|
|
903
|
+
|
|
904
|
+
# Example 1: When license is passed as a string.
|
|
905
|
+
>>> set_license(license='eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI',
|
|
906
|
+
... table_name=None, schema_name=None, source='string')
|
|
907
|
+
The license parameters are set.
|
|
908
|
+
The license is : eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
909
|
+
|
|
910
|
+
# Example 2: When license is stored in a file and file is passed as input to "license".
|
|
911
|
+
# "source" must be set to "file".
|
|
912
|
+
>>> license_file = os.path.join(os.path.dirname(teradataml.__file__),
|
|
913
|
+
... 'data', 'models', 'License_file.txt')
|
|
914
|
+
>>> set_license(license=license_file, source='file')
|
|
915
|
+
The license parameters are set.
|
|
916
|
+
The license is: license_string
|
|
917
|
+
|
|
918
|
+
# Example 3: When license is present in the byom model catalog table itself.
|
|
919
|
+
# Store a model with license information in the model table.
|
|
920
|
+
>>> model_file = os.path.join(os.path.dirname(teradataml.__file__),
|
|
921
|
+
... 'data', 'models', 'iris_kmeans_model')
|
|
922
|
+
>>> save_byom('licensed_model1', model_file, 'byom_licensed_models',
|
|
923
|
+
... additional_columns={"license_data": "A5sUL9KU_kP35Vq"})
|
|
924
|
+
Created the model table 'byom_licensed_models' as it does not exist.
|
|
925
|
+
Model is saved.
|
|
926
|
+
>>> set_byom_catalog(table_name='byom_licensed_models', schema_name='alice')
|
|
927
|
+
The model cataloging parameters are set to table_name='byom_licensed_models'
|
|
928
|
+
and schema_name='alice'
|
|
929
|
+
>>> set_license(license='license_data', source='column')
|
|
930
|
+
The license parameters are set.
|
|
931
|
+
The license is present in the table='byom_licensed_models',schema='alice' and
|
|
932
|
+
column='license_data'.
|
|
933
|
+
|
|
934
|
+
# Example 4: Set the license information using the license stored in a column
|
|
935
|
+
# 'license_key' of a table 'license_table'.
|
|
936
|
+
# Create a table and insert the license information in the table.
|
|
937
|
+
>>> license = 'eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI'
|
|
938
|
+
>>> lic_table = 'create table license (id integer between 1 and 1,
|
|
939
|
+
license_key varchar(2500)) unique primary index(id);'
|
|
940
|
+
>>> execute_sql(lic_table)
|
|
941
|
+
<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x000001DC4F2EE9A0>
|
|
942
|
+
>>> execute_sql("insert into license values (1, 'peBVRtjA-ib')")
|
|
943
|
+
<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x000001DC4F2EEF10>
|
|
944
|
+
>>> set_license(license='license_key', table_name='license', schema_name='alice',
|
|
945
|
+
... source='column')
|
|
946
|
+
The license parameters are set.
|
|
947
|
+
The license is present in the table='license', schema='alice' and column='license_key'.
|
|
948
|
+
|
|
949
|
+
# Example 5: Set License when license is passed as a string, table
|
|
950
|
+
# and schema name are passed. Since table does not exist, table is
|
|
951
|
+
# created and license is stored in the table.
|
|
952
|
+
>>> set_license(license="eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI",
|
|
953
|
+
... table_name='license_table', schema_name='alice', source='string')
|
|
954
|
+
The license parameters are set.
|
|
955
|
+
The license is present in the table='license_table', schema='alice' and column='license'.
|
|
956
|
+
|
|
957
|
+
# Example 6: Set License when license is passed as a file
|
|
958
|
+
# and table and schema name are passed. Since table does not exist,
|
|
959
|
+
# table is created and license is stored in the table.
|
|
960
|
+
>>> set_license(license=license_file, table_name='license_t1', schema_name= 'alice', source='file')
|
|
961
|
+
The license parameters are set.
|
|
962
|
+
The license is present in the table='license_t1', schema='alice' and column='license'.
|
|
963
|
+
"""
|
|
964
|
+
# Create argument information matrix for validations.
|
|
965
|
+
__arg_info_matrix = []
|
|
966
|
+
__arg_info_matrix.append(["license", license, False, str, True])
|
|
967
|
+
__arg_info_matrix.append(["source", source, True, str, True, mac.LICENSE_SOURCE.value])
|
|
968
|
+
__arg_info_matrix.append(["table_name", table_name, True, str, True])
|
|
969
|
+
__arg_info_matrix.append(["schema_name", schema_name, True, str, True])
|
|
970
|
+
|
|
971
|
+
# Make sure that a correct type of values has been supplied to the arguments.
|
|
972
|
+
validator._validate_function_arguments(__arg_info_matrix)
|
|
973
|
+
|
|
974
|
+
# Make sure if table_name is provided, schema_name is also provided and vice_versa.
|
|
975
|
+
validator._validate_mutually_inclusive_arguments(table_name, "table_name", schema_name,
|
|
976
|
+
"schema_name")
|
|
977
|
+
|
|
978
|
+
source = source.lower()
|
|
979
|
+
if source == 'column':
|
|
980
|
+
conn = get_connection()
|
|
981
|
+
additional_error = ""
|
|
982
|
+
if table_name is None:
|
|
983
|
+
# Assign the table and schema name to model cataloging table and schema as
|
|
984
|
+
# table_name is not provided.
|
|
985
|
+
table_name = configure._byom_model_catalog_table
|
|
986
|
+
schema_name = configure._byom_model_catalog_database
|
|
987
|
+
|
|
988
|
+
# Raise an error if the catalog information is not set or table name is not passed
|
|
989
|
+
additional_error = Messages.get_message(MessageCodes.EITHER_FUNCTION_OR_ARGS, "catalog",
|
|
990
|
+
"set_byom_catalog", "catalog", "")
|
|
991
|
+
validator._validate_argument_is_not_None(table_name, "table_name", additional_error)
|
|
992
|
+
|
|
993
|
+
if validator._check_table_exists(conn, table_name, schema_name,
|
|
994
|
+
raise_error_if_does_not_exists=True,
|
|
995
|
+
additional_error=additional_error):
|
|
996
|
+
|
|
997
|
+
# Validate the column name provided in the license argument
|
|
998
|
+
# to check if column exists or not.
|
|
999
|
+
license_table = DataFrame(in_schema(schema_name=schema_name,
|
|
1000
|
+
table_name=table_name))
|
|
1001
|
+
|
|
1002
|
+
_Validators._validate_column_exists_in_dataframe(license,
|
|
1003
|
+
license_table._metaexpr,
|
|
1004
|
+
for_table=True)
|
|
1005
|
+
|
|
1006
|
+
# Set the configuration option _byom_model_catalog_license,
|
|
1007
|
+
# _byom_model_catalog_license_source, _byom_model_catalog_license_table,
|
|
1008
|
+
# _byom_model_catalog_license_database.
|
|
1009
|
+
|
|
1010
|
+
configure._byom_model_catalog_license = license
|
|
1011
|
+
configure._byom_model_catalog_license_source = 'column'
|
|
1012
|
+
configure._byom_model_catalog_license_table = table_name
|
|
1013
|
+
configure._byom_model_catalog_license_database = schema_name
|
|
1014
|
+
|
|
1015
|
+
print("The license parameters are set.")
|
|
1016
|
+
print("The license is present in the table='{}', schema='{}' and column='{}'"
|
|
1017
|
+
".".format(configure._byom_model_catalog_license_table,
|
|
1018
|
+
configure._byom_model_catalog_license_database,
|
|
1019
|
+
configure._byom_model_catalog_license))
|
|
1020
|
+
else:
|
|
1021
|
+
# Set the configuration option _byom_model_catalog_license.
|
|
1022
|
+
# If license is passed in a file, extract the same from the file and then set the option.
|
|
1023
|
+
configure._byom_model_catalog_license = license if source == 'string' else \
|
|
1024
|
+
UtilFuncs._get_file_contents(license)
|
|
1025
|
+
|
|
1026
|
+
if table_name is None:
|
|
1027
|
+
# Set the configuration option _byom_model_catalog_license_source.
|
|
1028
|
+
# If table_name is not provided set the value to 'string' and print the information.
|
|
1029
|
+
configure._byom_model_catalog_license_source = 'string'
|
|
1030
|
+
print("The license parameters are set.")
|
|
1031
|
+
print("The license is: {}".format(configure._byom_model_catalog_license))
|
|
1032
|
+
|
|
1033
|
+
else:
|
|
1034
|
+
conn = get_connection()
|
|
1035
|
+
if not validator._check_table_exists(conn, table_name, schema_name, False):
|
|
1036
|
+
# Create the license table with constraints
|
|
1037
|
+
license_table = table_name
|
|
1038
|
+
columns_to_create = {"id": NUMBER,
|
|
1039
|
+
"license": VARCHAR}
|
|
1040
|
+
|
|
1041
|
+
try:
|
|
1042
|
+
_create_table(license_table, columns_to_create, primary_index="id",
|
|
1043
|
+
schema_name=schema_name, check_constraint='id between 1 and 1')
|
|
1044
|
+
query = "insert into {}.{} values (1, '{}')".format(
|
|
1045
|
+
schema_name, license_table, configure._byom_model_catalog_license)
|
|
1046
|
+
|
|
1047
|
+
# Empty queryband buffer before SQL call.
|
|
1048
|
+
UtilFuncs._set_queryband()
|
|
1049
|
+
execute_sql(query)
|
|
1050
|
+
except:
|
|
1051
|
+
raise
|
|
1052
|
+
|
|
1053
|
+
configure._byom_model_catalog_license = 'license'
|
|
1054
|
+
configure._byom_model_catalog_license_source = 'column'
|
|
1055
|
+
configure._byom_model_catalog_license_table = license_table
|
|
1056
|
+
configure._byom_model_catalog_license_database = schema_name
|
|
1057
|
+
|
|
1058
|
+
print("The license parameters are set.")
|
|
1059
|
+
print("The license is present in the table='{}', schema='{}' and column='{}'"
|
|
1060
|
+
".".format(configure._byom_model_catalog_license_table,
|
|
1061
|
+
configure._byom_model_catalog_license_database,
|
|
1062
|
+
configure._byom_model_catalog_license))
|
|
1063
|
+
else:
|
|
1064
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.TABLE_ALREADY_EXISTS, table_name),
|
|
1065
|
+
MessageCodes.TABLE_ALREADY_EXISTS)
|
|
1066
|
+
|
|
1067
|
+
|
|
1068
|
+
@collect_queryband(queryband="gtLcns")
|
|
1069
|
+
def get_license():
|
|
1070
|
+
"""
|
|
1071
|
+
DESCRIPTION:
|
|
1072
|
+
Get the license information set by set_license() function at the session level.
|
|
1073
|
+
|
|
1074
|
+
PARAMETERS:
|
|
1075
|
+
None.
|
|
1076
|
+
|
|
1077
|
+
RETURNS:
|
|
1078
|
+
None.
|
|
1079
|
+
|
|
1080
|
+
RAISES:
|
|
1081
|
+
None.
|
|
1082
|
+
|
|
1083
|
+
EXAMPLES:
|
|
1084
|
+
>>> import os, teradataml
|
|
1085
|
+
>>> from teradataml import save_byom, get_license, set_license
|
|
1086
|
+
|
|
1087
|
+
# Example 1: When license is passed as a string.
|
|
1088
|
+
>>> set_license(license='eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI',
|
|
1089
|
+
source='string')
|
|
1090
|
+
The license parameters are set.
|
|
1091
|
+
The license is: eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
1092
|
+
>>> get_license()
|
|
1093
|
+
The license is: eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
1094
|
+
|
|
1095
|
+
# Example 2: When license is present in the column='license_data' and table='byom_licensed_models'.
|
|
1096
|
+
>>> set_license(license='license_data', table_name='byom_licensed_models', schema_name='alice',
|
|
1097
|
+
source='column')
|
|
1098
|
+
The license parameters are set.
|
|
1099
|
+
The license is present in the table='byom_licensed_models', schema='alice' and
|
|
1100
|
+
column='license_data'.
|
|
1101
|
+
>>> get_license()
|
|
1102
|
+
The license is stored in:
|
|
1103
|
+
table = 'byom_licensed_models'
|
|
1104
|
+
schema = 'alice'
|
|
1105
|
+
column = 'license_data'
|
|
1106
|
+
>>>
|
|
1107
|
+
|
|
1108
|
+
"""
|
|
1109
|
+
license = configure._byom_model_catalog_license
|
|
1110
|
+
source = configure._byom_model_catalog_license_source
|
|
1111
|
+
table_name = configure._byom_model_catalog_license_table
|
|
1112
|
+
schema_name = configure._byom_model_catalog_license_database
|
|
1113
|
+
|
|
1114
|
+
# Check whether license information is present or not.
|
|
1115
|
+
if license is not None:
|
|
1116
|
+
if source in 'string':
|
|
1117
|
+
print("The license is: {}".format(license))
|
|
1118
|
+
else:
|
|
1119
|
+
print("The license is stored in:\ntable = '{}'\nschema = '{}'\ncolumn = '{}'"
|
|
1120
|
+
.format(table_name, schema_name, license))
|
|
1121
|
+
else:
|
|
1122
|
+
print('Set the license information using set_license() function.')
|
|
1123
|
+
|
|
1124
|
+
|
|
1125
|
+
@collect_queryband(queryband="rtrvByom")
|
|
1126
|
+
def retrieve_byom(model_id,
|
|
1127
|
+
table_name=None,
|
|
1128
|
+
schema_name=None,
|
|
1129
|
+
license=None,
|
|
1130
|
+
is_license_column=False,
|
|
1131
|
+
license_table_name=None,
|
|
1132
|
+
license_schema_name=None,
|
|
1133
|
+
require_license=False,
|
|
1134
|
+
return_addition_columns=False):
|
|
1135
|
+
"""
|
|
1136
|
+
DESCRIPTION:
|
|
1137
|
+
Function to retrieve a saved model. Output of this function can be
|
|
1138
|
+
directly passed as input to the PMMLPredict and H2OPredict functions.
|
|
1139
|
+
Some models generated, such as H2O-DAI has license associated with it.
|
|
1140
|
+
When such models are to be used for scoring, one must retrieve the model
|
|
1141
|
+
by passing relevant license information. Please refer to "license_key"
|
|
1142
|
+
for more details.
|
|
1143
|
+
|
|
1144
|
+
PARAMETERS:
|
|
1145
|
+
model_id:
|
|
1146
|
+
Required Argument.
|
|
1147
|
+
Specifies the unique model identifier of the model to be retrieved.
|
|
1148
|
+
Types: str
|
|
1149
|
+
|
|
1150
|
+
table_name:
|
|
1151
|
+
Optional Argument.
|
|
1152
|
+
Specifies the name of the table to retrieve external model from.
|
|
1153
|
+
Notes:
|
|
1154
|
+
* One must either specify this argument or set the byom model catalog table
|
|
1155
|
+
name using set_byom_catalog().
|
|
1156
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
1157
|
+
are set, the settings in retrieve_byom() take precedence and is used for
|
|
1158
|
+
function execution.
|
|
1159
|
+
Types: str
|
|
1160
|
+
|
|
1161
|
+
schema_name:
|
|
1162
|
+
Optional Argument.
|
|
1163
|
+
Specifies the name of the schema/database in which the table specified in
|
|
1164
|
+
"table_name" is looked up.
|
|
1165
|
+
Notes:
|
|
1166
|
+
* One must either specify this argument and table_name argument
|
|
1167
|
+
or set the byom model catalog schema and table name using set_byom_catalog().
|
|
1168
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
1169
|
+
are set, the settings in retrieve_byom() take precedence and is used for
|
|
1170
|
+
function execution.
|
|
1171
|
+
* If user specifies schema_name argument table_name argument has to be specified,
|
|
1172
|
+
else exception is raised.
|
|
1173
|
+
Types: str
|
|
1174
|
+
|
|
1175
|
+
license:
|
|
1176
|
+
Optional Argument.
|
|
1177
|
+
Specifies the license key information in different ways specified as below:
|
|
1178
|
+
* If the license key is stored in a variable, user can pass it as string.
|
|
1179
|
+
* If the license key is stored in table, then pass a column name containing
|
|
1180
|
+
the license. Based on the table which has license information stored,
|
|
1181
|
+
* If the information is stored in the same model table as that of the
|
|
1182
|
+
model, one must set "is_license_column" to True.
|
|
1183
|
+
* If the information is stored in the different table from that of the
|
|
1184
|
+
"table_name", one can specify the table name and schema name using
|
|
1185
|
+
"license_table_name" and "license_schema_name" respectively.
|
|
1186
|
+
Types: str
|
|
1187
|
+
|
|
1188
|
+
is_license_column:
|
|
1189
|
+
Optional Argument.
|
|
1190
|
+
Specifies whether license key specified in "license" is a license key
|
|
1191
|
+
or column name. When set to True, "license" contains the column name
|
|
1192
|
+
containing license data, otherwise contains the actual license key.
|
|
1193
|
+
Default Value: False
|
|
1194
|
+
Types: str
|
|
1195
|
+
|
|
1196
|
+
license_table_name:
|
|
1197
|
+
Optional Argument.
|
|
1198
|
+
Specifies the name of the table which holds license key. One can specify this
|
|
1199
|
+
argument if license is stored in a table other than "table_name".
|
|
1200
|
+
Types: str
|
|
1201
|
+
|
|
1202
|
+
license_schema_name:
|
|
1203
|
+
Optional Argument.
|
|
1204
|
+
Specifies the name of the Database associated with the "license_table_name".
|
|
1205
|
+
If not specified, current Database would be considered for "license_table_name".
|
|
1206
|
+
Types: str
|
|
1207
|
+
|
|
1208
|
+
require_license:
|
|
1209
|
+
Optional Argument.
|
|
1210
|
+
Specifies whether the model to be retrieved is associated with a license.
|
|
1211
|
+
If True, license information set by the set_license() is retrieved.
|
|
1212
|
+
Note:
|
|
1213
|
+
If license parameters are passed, then this argument is ignored.
|
|
1214
|
+
Default value: False
|
|
1215
|
+
Types: bool
|
|
1216
|
+
|
|
1217
|
+
return_addition_columns:
|
|
1218
|
+
Optional Argument.
|
|
1219
|
+
Specifies whether to return additional columns saved during save_byom() along with
|
|
1220
|
+
model_id and model columns.
|
|
1221
|
+
Default value: False
|
|
1222
|
+
Types: bool
|
|
1223
|
+
|
|
1224
|
+
Note:
|
|
1225
|
+
The following table describes the system behaviour in different scenarios:
|
|
1226
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1227
|
+
| In retrieve_byom() | In set_byom_catalog() | System Behavior |
|
|
1228
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1229
|
+
| table_name | schema_name | table_name | schema_name | |
|
|
1230
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1231
|
+
| Set | Set | Set | Set | schema_name and table_name in |
|
|
1232
|
+
| | | | | retrieve_byom() are used for |
|
|
1233
|
+
| | | | | function execution. |
|
|
1234
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1235
|
+
| Set | Set | Not set | Not set | schema_name and table_name in |
|
|
1236
|
+
| | | | | retrieve_byom() is used for |
|
|
1237
|
+
| | | | | function execution. |
|
|
1238
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1239
|
+
| Not set | Not set | Set | Set | table_name from retrieve_byom() |
|
|
1240
|
+
| | | | | is used and schema name |
|
|
1241
|
+
| | | | | associated with the current |
|
|
1242
|
+
| | | | | context is used. |
|
|
1243
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1244
|
+
| Not set | Set | Set | Set | Exception is raised. |
|
|
1245
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1246
|
+
| Not set | Not set | Set | Set | table_name and schema_name |
|
|
1247
|
+
| | | | | from set_byom_catalog() |
|
|
1248
|
+
| | | | | are used for function execution. |
|
|
1249
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1250
|
+
| Not set | Not set | Set | Not set | table_name from set_byom_catalog() |
|
|
1251
|
+
| | | | | is used and schema name |
|
|
1252
|
+
| | | | | associated with the current |
|
|
1253
|
+
| | | | | context is used. |
|
|
1254
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1255
|
+
| Not set | Not set | Not set | Not set | Exception is raised |
|
|
1256
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1257
|
+
|
|
1258
|
+
RETURNS:
|
|
1259
|
+
teradataml DataFrame
|
|
1260
|
+
|
|
1261
|
+
RAISES:
|
|
1262
|
+
TeradataMlException, TypeError
|
|
1263
|
+
|
|
1264
|
+
EXAMPLES:
|
|
1265
|
+
>>> import teradataml, os, datetime
|
|
1266
|
+
>>> model_file = os.path.join(os.path.dirname(teradataml.__file__), 'data', 'models', 'iris_kmeans_model')
|
|
1267
|
+
>>> from teradataml import save_byom, retrieve_byom, get_context
|
|
1268
|
+
>>> save_byom('model5', model_file, 'byom_models')
|
|
1269
|
+
Model is saved.
|
|
1270
|
+
>>> save_byom('model6', model_file, 'byom_models', schema_name='test')
|
|
1271
|
+
Model is saved.
|
|
1272
|
+
>>> # Save the license in an addtional column named "license_data" in the model table.
|
|
1273
|
+
>>> save_byom('licensed_model1', model_file, 'byom_licensed_models', additional_columns={"license_data": "A5sUL9KU_kP35Vq"})
|
|
1274
|
+
Created the model table 'byom_licensed_models' as it does not exist.
|
|
1275
|
+
Model is saved.
|
|
1276
|
+
>>> # Store the license in a table.
|
|
1277
|
+
>>> license = 'eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI'
|
|
1278
|
+
>>> lic_table = 'create table license (id integer between 1 and 1,license_key varchar(2500)) unique primary index(id);'
|
|
1279
|
+
>>> execute_sql(lic_table)
|
|
1280
|
+
<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x0000014AAFF27080>
|
|
1281
|
+
>>> execute_sql("insert into license values (1, 'peBVRtjA-ib')")
|
|
1282
|
+
<sqlalchemy.engine.cursor.LegacyCursorResult object at 0x0000014AAFF27278>
|
|
1283
|
+
>>>
|
|
1284
|
+
|
|
1285
|
+
# Example 1 - Retrieve a model with id 'model5' from the table 'byom_models'.
|
|
1286
|
+
>>> df = retrieve_byom('model5', table_name='byom_models')
|
|
1287
|
+
>>> df
|
|
1288
|
+
model
|
|
1289
|
+
model_id
|
|
1290
|
+
model5 b'504B03041400080808...'
|
|
1291
|
+
|
|
1292
|
+
# Example 2 - Retrieve a model with id 'model6' from the table 'byom_models'
|
|
1293
|
+
# and the table is in 'test' DataBase.
|
|
1294
|
+
>>> df = retrieve_byom('model6', table_name='byom_models', schema_name='test')
|
|
1295
|
+
>>> df
|
|
1296
|
+
model
|
|
1297
|
+
model_id
|
|
1298
|
+
model6 b'504B03041400080808...'
|
|
1299
|
+
|
|
1300
|
+
# Example 3 - Retrieve a model with id 'model5' from the table 'byom_models'
|
|
1301
|
+
# with license key stored in a variable 'license'.
|
|
1302
|
+
>>> df = retrieve_byom('model5', table_name='byom_models', license=license)
|
|
1303
|
+
>>> df
|
|
1304
|
+
model license
|
|
1305
|
+
model_id
|
|
1306
|
+
model5 b'504B03041400080808...' eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
1307
|
+
>>>
|
|
1308
|
+
|
|
1309
|
+
# Example 4 - Retrieve a model with id 'licensed_model1' and associated license
|
|
1310
|
+
# key stored in table 'byom_licensed_models'. License key is stored
|
|
1311
|
+
# in column 'license_data'.
|
|
1312
|
+
>>> df = retrieve_byom('licensed_model1',
|
|
1313
|
+
... table_name='byom_licensed_models',
|
|
1314
|
+
... license='license_data',
|
|
1315
|
+
... is_license_column=True)
|
|
1316
|
+
>>> df
|
|
1317
|
+
model license
|
|
1318
|
+
model_id
|
|
1319
|
+
licensed_model1 b'504B03041400080808...' A5sUL9KU_kP35Vq
|
|
1320
|
+
>>>
|
|
1321
|
+
|
|
1322
|
+
# Example 5 - Retrieve a model with id 'licensed_model1' from the table
|
|
1323
|
+
# 'byom_licensed_models' and associated license key stored in
|
|
1324
|
+
# column 'license_key' of the table 'license'.
|
|
1325
|
+
>>> df = retrieve_byom('licensed_model1',
|
|
1326
|
+
... table_name='byom_licensed_models',
|
|
1327
|
+
... license='license_key',
|
|
1328
|
+
... is_license_column=True,
|
|
1329
|
+
... license_table_name='license')
|
|
1330
|
+
>>> df
|
|
1331
|
+
model license
|
|
1332
|
+
model_id
|
|
1333
|
+
licensed_model1 b'504B03041400080808...' peBVRtjA-ib
|
|
1334
|
+
>>>
|
|
1335
|
+
|
|
1336
|
+
# Example 6 - Retrieve a model with id 'licensed_model1' from the table
|
|
1337
|
+
# 'byom_licensed_models' and associated license key stored in
|
|
1338
|
+
# column 'license_key' of the table 'license' present in the
|
|
1339
|
+
# schema 'mldb'.
|
|
1340
|
+
>>> df = retrieve_byom('licensed_model1',
|
|
1341
|
+
... table_name='byom_licensed_models',
|
|
1342
|
+
... license='license_key',
|
|
1343
|
+
... is_license_column=True,
|
|
1344
|
+
... license_table_name='license',
|
|
1345
|
+
... license_schema_name='mldb')
|
|
1346
|
+
>>> df
|
|
1347
|
+
model license
|
|
1348
|
+
model_id
|
|
1349
|
+
licensed_model1 b'504B03041400080808...' peBVRtjA-ib
|
|
1350
|
+
>>>
|
|
1351
|
+
|
|
1352
|
+
# Example 7 - Retrieve a model with id 'model5' from the table 'byom_models'
|
|
1353
|
+
# with license key stored by set_license in a variable 'license'.
|
|
1354
|
+
# The catalog information is set using set_byom_catalog()
|
|
1355
|
+
# to table_name='byom_models', schema_name='alice'
|
|
1356
|
+
# schema_name='alice' and is used to retrieve the model.
|
|
1357
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name='alice')
|
|
1358
|
+
The model cataloging parameters are set to table_name='byom_models' and
|
|
1359
|
+
schema_name='alice'
|
|
1360
|
+
>>> set_license(license=license, source='string')
|
|
1361
|
+
The license parameters are set.
|
|
1362
|
+
The license is: eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
1363
|
+
>>> df = retrieve_byom('model5', require_license=True)
|
|
1364
|
+
>>> df
|
|
1365
|
+
model license
|
|
1366
|
+
model_id
|
|
1367
|
+
model5 b'504B03041400080808...' eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
1368
|
+
>>>
|
|
1369
|
+
|
|
1370
|
+
# Example 8 - Retrieve a model with id 'model5' from the table 'byom_models'
|
|
1371
|
+
# with license key stored by set_license in a file. Since the
|
|
1372
|
+
# schema name is not provided, default schema is used.
|
|
1373
|
+
>>> license_file = os.path.join(os.path.dirname(teradataml.__file__),
|
|
1374
|
+
... 'data', 'models', 'License_file.txt')
|
|
1375
|
+
>>> set_license(license=license_file, source='file')
|
|
1376
|
+
The license parameters are set.
|
|
1377
|
+
The license is: license_string
|
|
1378
|
+
>>> df = retrieve_byom('model5', table_name='byom_models', require_license=True)
|
|
1379
|
+
>>> df
|
|
1380
|
+
model license
|
|
1381
|
+
model_id
|
|
1382
|
+
model5 b'504B03041400080808...' license_string
|
|
1383
|
+
|
|
1384
|
+
# Example 9 - Retrieve a model with id 'licensed_model1' and associated license
|
|
1385
|
+
# key stored in column 'license_key' of the table 'license' present
|
|
1386
|
+
# in the schema 'alice'. The byom catalog and license information is
|
|
1387
|
+
# set using set_byom_catalog() and set_license() respectively.
|
|
1388
|
+
# Function is executed with license parameters passed,
|
|
1389
|
+
# which overrides the license information set at the session level.
|
|
1390
|
+
>>> set_byom_catalog(table_name='byom_licensed_models', schema_name='alice')
|
|
1391
|
+
The model cataloging parameters are set to table_name='byom_licensed_models'
|
|
1392
|
+
and schema_name='alice'
|
|
1393
|
+
>>> set_license(license=license, source='string')
|
|
1394
|
+
The license parameters are set.
|
|
1395
|
+
The license is: eZSy3peBVRtjA-ibVuvNw5A5sUL9KU_kP35Vq4ZNBQ3iGY6oVSpE6g97sFY2LI
|
|
1396
|
+
>>> df = retrieve_byom('licensed_model1', license='license_key',
|
|
1397
|
+
... is_license_column=True, license_table_name='license')
|
|
1398
|
+
>>> df
|
|
1399
|
+
model license
|
|
1400
|
+
model_id
|
|
1401
|
+
licensed_model1 b'504B03041400080808...' peBVRtjA-ib
|
|
1402
|
+
|
|
1403
|
+
# Example 10 - Retrieve a model with id 'licensed_model1' from the table
|
|
1404
|
+
# 'byom_licensed_models' and associated license key stored in
|
|
1405
|
+
# column 'license_data' of the table 'byom_licensed_models'.
|
|
1406
|
+
# The byom catalog and license information is already set
|
|
1407
|
+
# at the session level, passing the table_name to the
|
|
1408
|
+
# function call overrides the byom catalog information
|
|
1409
|
+
# at the session level.
|
|
1410
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name='alice')
|
|
1411
|
+
The model cataloging parameters are set to table_name='byom_models' and
|
|
1412
|
+
schema_name='alice'
|
|
1413
|
+
>>> set_license(license='license_data', table_name='byom_licensed_models',
|
|
1414
|
+
schema_name='alice', source='column')
|
|
1415
|
+
The license parameters are set.
|
|
1416
|
+
The license is present in the table='byom_licensed_models', schema='alice'
|
|
1417
|
+
and column='license_data'.
|
|
1418
|
+
>>> df = retrieve_byom('licensed_model1', table_name='byom_licensed_models',
|
|
1419
|
+
... require_license=True)
|
|
1420
|
+
>>> df
|
|
1421
|
+
model license
|
|
1422
|
+
model_id
|
|
1423
|
+
licensed_model1 b'504B03041400080808...' A5sUL9KU_kP35Vq
|
|
1424
|
+
|
|
1425
|
+
# Example 11 - If require license=False which is the default value for the above example,
|
|
1426
|
+
# the license information is not retrieved.
|
|
1427
|
+
>>> df = retrieve_byom('licensed_model1', table_name='byom_licensed_models')
|
|
1428
|
+
>>> df
|
|
1429
|
+
model
|
|
1430
|
+
model_id
|
|
1431
|
+
licensed_model1 b'504B03041400080808...'
|
|
1432
|
+
|
|
1433
|
+
# Example 12 - Retrieve a model with id 'licensed_model1' from the table along with all
|
|
1434
|
+
# additional columns saved during save_byom().
|
|
1435
|
+
>>> df = retrieve_byom('licensed_model1', table_name='byom_licensed_models',
|
|
1436
|
+
return_addition_columns=True)
|
|
1437
|
+
>>> df
|
|
1438
|
+
model license_data
|
|
1439
|
+
model_id
|
|
1440
|
+
licensed_model1 b'504B03041400080808...' A5sUL9KU_kP35Vq
|
|
1441
|
+
"""
|
|
1442
|
+
|
|
1443
|
+
|
|
1444
|
+
# Let's perform argument validations.
|
|
1445
|
+
# Create argument information matrix to do parameter checking
|
|
1446
|
+
__arg_info_matrix = []
|
|
1447
|
+
__arg_info_matrix.append(["model_id", model_id, False, str, True])
|
|
1448
|
+
__arg_info_matrix.append(["table_name", table_name, True, str, True])
|
|
1449
|
+
__arg_info_matrix.append(["schema_name", schema_name, True, str, True])
|
|
1450
|
+
__arg_info_matrix.append(["license", license, True, str, True])
|
|
1451
|
+
__arg_info_matrix.append(["is_license_column", is_license_column, False, bool])
|
|
1452
|
+
__arg_info_matrix.append(["license_table_name", license_table_name, True, str, True])
|
|
1453
|
+
__arg_info_matrix.append(["license_schema_name", license_schema_name, True, str, True])
|
|
1454
|
+
|
|
1455
|
+
# Make sure that a correct type of values has been supplied to the arguments.
|
|
1456
|
+
validator._validate_function_arguments(__arg_info_matrix)
|
|
1457
|
+
|
|
1458
|
+
# Set the table and schema name according to the model cataloging parameters and the user inputs.
|
|
1459
|
+
table_name, schema_name = __set_validate_catalog_parameters(table_name, schema_name)
|
|
1460
|
+
|
|
1461
|
+
if require_license and license is None:
|
|
1462
|
+
license = configure._byom_model_catalog_license
|
|
1463
|
+
is_license_column = True if configure._byom_model_catalog_license_source == 'column' else False
|
|
1464
|
+
license_table_name = configure._byom_model_catalog_license_table
|
|
1465
|
+
license_schema_name = configure._byom_model_catalog_license_database
|
|
1466
|
+
|
|
1467
|
+
# Check whether license information is present or not.
|
|
1468
|
+
additional_error = Messages.get_message(MessageCodes.EITHER_FUNCTION_OR_ARGS, "license", "set_license",
|
|
1469
|
+
"license", "")
|
|
1470
|
+
validator._validate_argument_is_not_None(license, "license", additional_error)
|
|
1471
|
+
|
|
1472
|
+
# Before proceeding further, check whether table exists or not.
|
|
1473
|
+
conn = get_connection()
|
|
1474
|
+
if not conn.dialect.has_table(conn, table_name=table_name, schema=schema_name, table_only=True):
|
|
1475
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
1476
|
+
error_msg = Messages.get_message(
|
|
1477
|
+
error_code, "retrieve", 'Table "{}.{}" does not exist.'.format(schema_name, table_name))
|
|
1478
|
+
raise TeradataMlException(error_msg, error_code)
|
|
1479
|
+
|
|
1480
|
+
table_name = in_schema(schema_name=schema_name, table_name=table_name)
|
|
1481
|
+
model_details = DataFrame(table_name)
|
|
1482
|
+
model_details = model_details[model_details.model_id == model_id]
|
|
1483
|
+
|
|
1484
|
+
# __check_if_model_exists does the same check however, but it do not return DataFrame.
|
|
1485
|
+
# So, doing the model existence check here.
|
|
1486
|
+
if model_details.shape[0] == 0:
|
|
1487
|
+
error_code = MessageCodes.MODEL_NOT_FOUND
|
|
1488
|
+
error_msg = Messages.get_message(error_code, model_id, " in the table '{}'".format(table_name))
|
|
1489
|
+
raise TeradataMlException(error_msg, error_code)
|
|
1490
|
+
|
|
1491
|
+
if model_details.shape[0] > 1:
|
|
1492
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
1493
|
+
error_msg = Messages.get_message(
|
|
1494
|
+
error_code, "retrieve", "Duplicate model found for model id '{}'".format(model_id))
|
|
1495
|
+
raise TeradataMlException(error_msg, error_code)
|
|
1496
|
+
|
|
1497
|
+
# If license holds the actual license key, assign it to model DataFrame.
|
|
1498
|
+
# If license holds the column name, i.e., license data is stored in a table,
|
|
1499
|
+
# If table which holds license data is same as model table, select the column.
|
|
1500
|
+
# If table which holds license data is different from model table, create a
|
|
1501
|
+
# DataFrame on the table which holds license data and do cross join with
|
|
1502
|
+
# models DataFrame. The cross join creates a new DataFrame which has columns
|
|
1503
|
+
# of both tables.
|
|
1504
|
+
# Note that, license table should hold only one record. so even cartesian
|
|
1505
|
+
# product should hold only record in the DataFrame.
|
|
1506
|
+
|
|
1507
|
+
if not license:
|
|
1508
|
+
if return_addition_columns:
|
|
1509
|
+
# Return all columns if return_addition_columns is True.
|
|
1510
|
+
return model_details
|
|
1511
|
+
return model_details.select(["model_id", "model"])
|
|
1512
|
+
|
|
1513
|
+
# Lambda function for attaching the license to model DataFrame.
|
|
1514
|
+
_get_license_model_df = lambda license: model_details.assign(drop_columns=True,
|
|
1515
|
+
model_id=model_details.model_id,
|
|
1516
|
+
model=model_details.model,
|
|
1517
|
+
license=license)
|
|
1518
|
+
|
|
1519
|
+
# If user passed a license as a variable, attach it to the model DataFrame.
|
|
1520
|
+
if not is_license_column:
|
|
1521
|
+
return _get_license_model_df(license)
|
|
1522
|
+
|
|
1523
|
+
# If license exists in the column of the same model table.
|
|
1524
|
+
if is_license_column and not license_table_name:
|
|
1525
|
+
_Validators._validate_column_exists_in_dataframe(license,
|
|
1526
|
+
model_details._metaexpr,
|
|
1527
|
+
for_table=True,
|
|
1528
|
+
column_arg='license',
|
|
1529
|
+
data_arg=table_name)
|
|
1530
|
+
return _get_license_model_df(model_details[license])
|
|
1531
|
+
|
|
1532
|
+
# If license exists in the column of the table different from model table.
|
|
1533
|
+
license_schema_name = license_schema_name if license_schema_name else schema_name
|
|
1534
|
+
license_table = in_schema(license_schema_name, license_table_name)
|
|
1535
|
+
|
|
1536
|
+
# Check whether license table exists or not before proceed further.
|
|
1537
|
+
if not conn.dialect.has_table(conn, table_name=license_table_name, schema=license_schema_name,
|
|
1538
|
+
table_only=True):
|
|
1539
|
+
error_code = MessageCodes.EXECUTION_FAILED
|
|
1540
|
+
error_msg = Messages.get_message(
|
|
1541
|
+
error_code, "retrieve the model", 'Table "{}" does not exist.'.format(license_table))
|
|
1542
|
+
raise TeradataMlException(error_msg, error_code)
|
|
1543
|
+
|
|
1544
|
+
license_df = DataFrame(license_table)
|
|
1545
|
+
# Check column existed in table.
|
|
1546
|
+
_Validators._validate_column_exists_in_dataframe(license,
|
|
1547
|
+
license_df._metaexpr,
|
|
1548
|
+
for_table=True,
|
|
1549
|
+
column_arg='license',
|
|
1550
|
+
data_arg=license_table)
|
|
1551
|
+
|
|
1552
|
+
if license_df.shape[0] > 1:
|
|
1553
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
1554
|
+
error_msg = Messages.get_message(
|
|
1555
|
+
error_code, "retrieve", "Table which holds license key should have only one row.")
|
|
1556
|
+
raise TeradataMlException(error_msg, error_code)
|
|
1557
|
+
|
|
1558
|
+
if not return_addition_columns:
|
|
1559
|
+
# Return only model_id and model columns if return_addition_columns is False.
|
|
1560
|
+
model_details = model_details.select(["model_id", "model"])
|
|
1561
|
+
|
|
1562
|
+
# Make sure license is the column name for license key.
|
|
1563
|
+
license_df = license_df.assign(drop_columns=True, license=license_df[license])
|
|
1564
|
+
return model_details.join(license_df, how="cross")
|
|
1565
|
+
|
|
1566
|
+
|
|
1567
|
+
@collect_queryband(queryband="lstByom")
|
|
1568
|
+
def list_byom(table_name=None, schema_name=None, model_id=None):
|
|
1569
|
+
"""
|
|
1570
|
+
DESCRIPTION:
|
|
1571
|
+
The list_byom() function allows a user to list saved models, filtering the results based on the optional arguments.
|
|
1572
|
+
|
|
1573
|
+
PARAMETERS:
|
|
1574
|
+
table_name:
|
|
1575
|
+
Optional Argument.
|
|
1576
|
+
Specifies the name of the table to list models from.
|
|
1577
|
+
Notes:
|
|
1578
|
+
* One must either specify this argument or set the byom model catalog table
|
|
1579
|
+
name using set_byom_catalog().
|
|
1580
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
1581
|
+
are set, the settings in list_byom() take precedence and is used for
|
|
1582
|
+
function execution.
|
|
1583
|
+
Types: str
|
|
1584
|
+
|
|
1585
|
+
schema_name:
|
|
1586
|
+
Optional Argument.
|
|
1587
|
+
Specifies the name of the schema/database in which the table specified in
|
|
1588
|
+
"table_name" is looked up.
|
|
1589
|
+
Notes:
|
|
1590
|
+
* One must either specify this argument and table_name argument
|
|
1591
|
+
or set the byom model catalog schema and table name using set_byom_catalog().
|
|
1592
|
+
* If none of these arguments are set, exception is raised; If both arguments
|
|
1593
|
+
are set, the settings in list_byom() take precedence and is used for
|
|
1594
|
+
function execution.
|
|
1595
|
+
* If user specifies schema_name argument table_name argument has to be specified,
|
|
1596
|
+
else exception is raised.
|
|
1597
|
+
Types: str
|
|
1598
|
+
|
|
1599
|
+
model_id:
|
|
1600
|
+
Optional Argument.
|
|
1601
|
+
Specifies the unique model identifier of the model(s). If specified,
|
|
1602
|
+
the models with either exact match or a substring match, are listed.
|
|
1603
|
+
Types: str OR list
|
|
1604
|
+
|
|
1605
|
+
Note:
|
|
1606
|
+
The following table describes the system behaviour in different scenarios:
|
|
1607
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1608
|
+
| In list_byom() | In set_byom_catalog() | System Behavior |
|
|
1609
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1610
|
+
| table_name | schema_name | table_name | schema_name | |
|
|
1611
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1612
|
+
| Set | Set | Set | Set | schema_name and table_name in |
|
|
1613
|
+
| | | | | list_byom() are used for |
|
|
1614
|
+
| | | | | function execution. |
|
|
1615
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1616
|
+
| Set | Set | Not set | Not set | schema_name and table_name in |
|
|
1617
|
+
| | | | | list_byom() is used for |
|
|
1618
|
+
| | | | | function execution. |
|
|
1619
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1620
|
+
| Set | Not set | Set | Set | table_name from list_byom() |
|
|
1621
|
+
| | | | | is used and schema name |
|
|
1622
|
+
| | | | | associated with the current |
|
|
1623
|
+
| | | | | context is used. |
|
|
1624
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1625
|
+
| Not set | Set | Set | Set | Exception is raised. |
|
|
1626
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1627
|
+
| Not set | Not set | Set | Set | table_name and schema_name |
|
|
1628
|
+
| | | | | from set_byom_catalog() |
|
|
1629
|
+
| | | | | are used for function execution. |
|
|
1630
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1631
|
+
| Not set | Not set | Set | Not set | table_name from set_byom_catalog() |
|
|
1632
|
+
| | | | | is used and schema name |
|
|
1633
|
+
| | | | | associated with the current |
|
|
1634
|
+
| | | | | context is used. |
|
|
1635
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1636
|
+
| Not set | Not set | Not set | Not set | Exception is raised |
|
|
1637
|
+
+----------------+-------------+-----------------------+-------------+-------------------------------------+
|
|
1638
|
+
RETURNS:
|
|
1639
|
+
None.
|
|
1640
|
+
|
|
1641
|
+
RAISES:
|
|
1642
|
+
TeradataMlException, TypeError
|
|
1643
|
+
|
|
1644
|
+
EXAMPLES:
|
|
1645
|
+
>>> import teradataml, os, datetime
|
|
1646
|
+
>>> model_file = os.path.join(os.path.dirname(teradataml.__file__), 'data', 'models', 'iris_kmeans_model')
|
|
1647
|
+
>>> from teradataml import save_byom, list_byom
|
|
1648
|
+
>>> save_byom('model7', model_file, 'byom_models')
|
|
1649
|
+
Model is saved.
|
|
1650
|
+
>>> save_byom('iris_model1', model_file, 'byom_models')
|
|
1651
|
+
Model is saved.
|
|
1652
|
+
>>> save_byom('model8', model_file, 'byom_models', schema_name='test')
|
|
1653
|
+
Model is saved.
|
|
1654
|
+
>>> save_byom('iris_model1', model_file, 'byom_licensed_models')
|
|
1655
|
+
Model is saved.
|
|
1656
|
+
>>>
|
|
1657
|
+
|
|
1658
|
+
# Example 1 - List all the models from the table byom_models.
|
|
1659
|
+
>>> list_byom(table_name='byom_models')
|
|
1660
|
+
model
|
|
1661
|
+
model_id
|
|
1662
|
+
model7 b'504B03041400080808...'
|
|
1663
|
+
iris_model1 b'504B03041400080808...'
|
|
1664
|
+
>>>
|
|
1665
|
+
|
|
1666
|
+
# Example 2 - List all the models with model_id containing 'iris' string.
|
|
1667
|
+
# List such models from 'byom_models' table.
|
|
1668
|
+
>>> list_byom(table_name='byom_models', model_id='iris')
|
|
1669
|
+
model
|
|
1670
|
+
model_id
|
|
1671
|
+
iris_model1 b'504B03041400080808...'
|
|
1672
|
+
>>>
|
|
1673
|
+
|
|
1674
|
+
# Example 3 - List all the models with model_id containing either 'iris'
|
|
1675
|
+
# or '7'. List such models from 'byom_models' table.
|
|
1676
|
+
>>> list_byom(table_name='byom_models', model_id=['iris', '7'])
|
|
1677
|
+
model
|
|
1678
|
+
model_id
|
|
1679
|
+
model7 b'504B03041400080808...'
|
|
1680
|
+
iris_model1 b'504B03041400080808...'
|
|
1681
|
+
>>>
|
|
1682
|
+
|
|
1683
|
+
# Example 4 - List all the models from the 'byom_models' table and table is
|
|
1684
|
+
# in 'test' DataBase.
|
|
1685
|
+
>>> list_byom(table_name='byom_models', schema_name='test')
|
|
1686
|
+
model
|
|
1687
|
+
model_id
|
|
1688
|
+
model8 b'504B03041400080808...'
|
|
1689
|
+
>>>
|
|
1690
|
+
|
|
1691
|
+
# Example 5 - List all the models from the model cataloging table
|
|
1692
|
+
# set by set_byom_catalog().
|
|
1693
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name='alice')
|
|
1694
|
+
The model cataloging parameters are set to table_name='byom_models' and schema_name='alice'
|
|
1695
|
+
>>> list_byom()
|
|
1696
|
+
model
|
|
1697
|
+
model_id
|
|
1698
|
+
model8 b'504B03041400080808...'
|
|
1699
|
+
|
|
1700
|
+
# Example 6 - List all the models from the table other than model cataloging table
|
|
1701
|
+
# set at the session level.
|
|
1702
|
+
>>> set_byom_catalog(table_name='byom_models', schema_name= 'alice')
|
|
1703
|
+
The model cataloging parameters are set to table_name='byom_models' and schema_name='alice'
|
|
1704
|
+
>>> list_byom(table_name='byom_licensed_models')
|
|
1705
|
+
model
|
|
1706
|
+
model_id
|
|
1707
|
+
iris_model1 b'504B03041400080808...'
|
|
1708
|
+
|
|
1709
|
+
"""
|
|
1710
|
+
|
|
1711
|
+
# Let's perform argument validations.
|
|
1712
|
+
# Create argument information matrix to do parameter checking
|
|
1713
|
+
__arg_info_matrix = []
|
|
1714
|
+
__arg_info_matrix.append(["table_name", table_name, True, str, True])
|
|
1715
|
+
__arg_info_matrix.append(["schema_name", schema_name, True, str, True])
|
|
1716
|
+
__arg_info_matrix.append(["model_id", model_id, True, (str, list), True])
|
|
1717
|
+
|
|
1718
|
+
# Make sure that a correct type of values has been supplied to the arguments.
|
|
1719
|
+
validator._validate_function_arguments(__arg_info_matrix)
|
|
1720
|
+
|
|
1721
|
+
# Set the table and schema name according to the model cataloging parameters and the user inputs.
|
|
1722
|
+
table_name, schema_name = __set_validate_catalog_parameters(table_name, schema_name)
|
|
1723
|
+
|
|
1724
|
+
# Before proceeding further, check whether table exists or not.
|
|
1725
|
+
conn = get_connection()
|
|
1726
|
+
if not conn.dialect.has_table(conn, table_name=table_name, schema=schema_name, table_only=True):
|
|
1727
|
+
error_code = MessageCodes.MODEL_CATALOGING_OPERATION_FAILED
|
|
1728
|
+
error_msg = Messages.get_message(
|
|
1729
|
+
error_code, "list", 'Table "{}.{}" does not exist.'.format(schema_name, table_name))
|
|
1730
|
+
raise TeradataMlException(error_msg, error_code)
|
|
1731
|
+
|
|
1732
|
+
model_details = DataFrame(in_schema(schema_name, table_name))
|
|
1733
|
+
|
|
1734
|
+
filter_condition = None
|
|
1735
|
+
if model_id:
|
|
1736
|
+
model_ids = UtilFuncs._as_list(model_id)
|
|
1737
|
+
for modelid in model_ids:
|
|
1738
|
+
# Filter Expression on model_id column.
|
|
1739
|
+
# We are looking to find all rows with model_id matching with 'modelid' string.
|
|
1740
|
+
# This is case-insensitive look-up.
|
|
1741
|
+
filter_expression = __get_like_filter_expression_on_col(model_details._metaexpr,
|
|
1742
|
+
"model_id", modelid)
|
|
1743
|
+
filter_condition = filter_condition | filter_expression \
|
|
1744
|
+
if filter_condition else filter_expression
|
|
1745
|
+
|
|
1746
|
+
if filter_condition:
|
|
1747
|
+
model_details = model_details[filter_condition]
|
|
1748
|
+
|
|
1749
|
+
if model_details.shape[0] != 0:
|
|
1750
|
+
orig_max_rows_num = display.max_rows
|
|
1751
|
+
try:
|
|
1752
|
+
display.max_rows = 99999
|
|
1753
|
+
print(model_details)
|
|
1754
|
+
except Exception:
|
|
1755
|
+
raise
|
|
1756
|
+
finally:
|
|
1757
|
+
display.max_rows = orig_max_rows_num
|
|
1758
|
+
else:
|
|
1759
|
+
print("No models found.")
|