teradataml 20.0.0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +2762 -0
- teradataml/__init__.py +78 -0
- teradataml/_version.py +11 -0
- teradataml/analytics/Transformations.py +2996 -0
- teradataml/analytics/__init__.py +82 -0
- teradataml/analytics/analytic_function_executor.py +2416 -0
- teradataml/analytics/analytic_query_generator.py +1050 -0
- teradataml/analytics/byom/H2OPredict.py +514 -0
- teradataml/analytics/byom/PMMLPredict.py +437 -0
- teradataml/analytics/byom/__init__.py +16 -0
- teradataml/analytics/json_parser/__init__.py +133 -0
- teradataml/analytics/json_parser/analytic_functions_argument.py +1805 -0
- teradataml/analytics/json_parser/json_store.py +191 -0
- teradataml/analytics/json_parser/metadata.py +1666 -0
- teradataml/analytics/json_parser/utils.py +805 -0
- teradataml/analytics/meta_class.py +236 -0
- teradataml/analytics/sqle/DecisionTreePredict.py +456 -0
- teradataml/analytics/sqle/NaiveBayesPredict.py +420 -0
- teradataml/analytics/sqle/__init__.py +128 -0
- teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -0
- teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -0
- teradataml/analytics/table_operator/__init__.py +11 -0
- teradataml/analytics/uaf/__init__.py +82 -0
- teradataml/analytics/utils.py +828 -0
- teradataml/analytics/valib.py +1617 -0
- teradataml/automl/__init__.py +5835 -0
- teradataml/automl/autodataprep/__init__.py +493 -0
- teradataml/automl/custom_json_utils.py +1625 -0
- teradataml/automl/data_preparation.py +1384 -0
- teradataml/automl/data_transformation.py +1254 -0
- teradataml/automl/feature_engineering.py +2273 -0
- teradataml/automl/feature_exploration.py +1873 -0
- teradataml/automl/model_evaluation.py +488 -0
- teradataml/automl/model_training.py +1407 -0
- teradataml/catalog/__init__.py +2 -0
- teradataml/catalog/byom.py +1759 -0
- teradataml/catalog/function_argument_mapper.py +859 -0
- teradataml/catalog/model_cataloging_utils.py +491 -0
- teradataml/clients/__init__.py +0 -0
- teradataml/clients/auth_client.py +137 -0
- teradataml/clients/keycloak_client.py +165 -0
- teradataml/clients/pkce_client.py +481 -0
- teradataml/common/__init__.py +1 -0
- teradataml/common/aed_utils.py +2078 -0
- teradataml/common/bulk_exposed_utils.py +113 -0
- teradataml/common/constants.py +1669 -0
- teradataml/common/deprecations.py +166 -0
- teradataml/common/exceptions.py +147 -0
- teradataml/common/formula.py +743 -0
- teradataml/common/garbagecollector.py +666 -0
- teradataml/common/logger.py +1261 -0
- teradataml/common/messagecodes.py +518 -0
- teradataml/common/messages.py +262 -0
- teradataml/common/pylogger.py +67 -0
- teradataml/common/sqlbundle.py +764 -0
- teradataml/common/td_coltype_code_to_tdtype.py +48 -0
- teradataml/common/utils.py +3166 -0
- teradataml/common/warnings.py +36 -0
- teradataml/common/wrapper_utils.py +625 -0
- teradataml/config/__init__.py +0 -0
- teradataml/config/dummy_file1.cfg +5 -0
- teradataml/config/dummy_file2.cfg +3 -0
- teradataml/config/sqlengine_alias_definitions_v1.0 +14 -0
- teradataml/config/sqlengine_alias_definitions_v1.1 +20 -0
- teradataml/config/sqlengine_alias_definitions_v1.3 +19 -0
- teradataml/context/__init__.py +0 -0
- teradataml/context/aed_context.py +223 -0
- teradataml/context/context.py +1462 -0
- teradataml/data/A_loan.csv +19 -0
- teradataml/data/BINARY_REALS_LEFT.csv +11 -0
- teradataml/data/BINARY_REALS_RIGHT.csv +11 -0
- teradataml/data/B_loan.csv +49 -0
- teradataml/data/BuoyData2.csv +17 -0
- teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -0
- teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -0
- teradataml/data/Convolve2RealsLeft.csv +5 -0
- teradataml/data/Convolve2RealsRight.csv +5 -0
- teradataml/data/Convolve2ValidLeft.csv +11 -0
- teradataml/data/Convolve2ValidRight.csv +11 -0
- teradataml/data/DFFTConv_Real_8_8.csv +65 -0
- teradataml/data/Employee.csv +5 -0
- teradataml/data/Employee_Address.csv +4 -0
- teradataml/data/Employee_roles.csv +5 -0
- teradataml/data/JulesBelvezeDummyData.csv +100 -0
- teradataml/data/Mall_customer_data.csv +201 -0
- teradataml/data/Orders1_12mf.csv +25 -0
- teradataml/data/Pi_loan.csv +7 -0
- teradataml/data/SMOOTHED_DATA.csv +7 -0
- teradataml/data/TestDFFT8.csv +9 -0
- teradataml/data/TestRiver.csv +109 -0
- teradataml/data/Traindata.csv +28 -0
- teradataml/data/__init__.py +0 -0
- teradataml/data/acf.csv +17 -0
- teradataml/data/adaboost_example.json +34 -0
- teradataml/data/adaboostpredict_example.json +24 -0
- teradataml/data/additional_table.csv +11 -0
- teradataml/data/admissions_test.csv +21 -0
- teradataml/data/admissions_train.csv +41 -0
- teradataml/data/admissions_train_nulls.csv +41 -0
- teradataml/data/advertising.csv +201 -0
- teradataml/data/ageandheight.csv +13 -0
- teradataml/data/ageandpressure.csv +31 -0
- teradataml/data/amazon_reviews_25.csv +26 -0
- teradataml/data/antiselect_example.json +36 -0
- teradataml/data/antiselect_input.csv +8 -0
- teradataml/data/antiselect_input_mixed_case.csv +8 -0
- teradataml/data/applicant_external.csv +7 -0
- teradataml/data/applicant_reference.csv +7 -0
- teradataml/data/apriori_example.json +22 -0
- teradataml/data/arima_example.json +9 -0
- teradataml/data/assortedtext_input.csv +8 -0
- teradataml/data/attribution_example.json +34 -0
- teradataml/data/attribution_sample_table.csv +27 -0
- teradataml/data/attribution_sample_table1.csv +6 -0
- teradataml/data/attribution_sample_table2.csv +11 -0
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bank_marketing.csv +11163 -0
- teradataml/data/bank_web_clicks1.csv +43 -0
- teradataml/data/bank_web_clicks2.csv +91 -0
- teradataml/data/bank_web_url.csv +85 -0
- teradataml/data/barrier.csv +2 -0
- teradataml/data/barrier_new.csv +3 -0
- teradataml/data/betweenness_example.json +14 -0
- teradataml/data/bike_sharing.csv +732 -0
- teradataml/data/bin_breaks.csv +8 -0
- teradataml/data/bin_fit_ip.csv +4 -0
- teradataml/data/binary_complex_left.csv +11 -0
- teradataml/data/binary_complex_right.csv +11 -0
- teradataml/data/binary_matrix_complex_left.csv +21 -0
- teradataml/data/binary_matrix_complex_right.csv +21 -0
- teradataml/data/binary_matrix_real_left.csv +21 -0
- teradataml/data/binary_matrix_real_right.csv +21 -0
- teradataml/data/blood2ageandweight.csv +26 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/boston.csv +507 -0
- teradataml/data/boston2cols.csv +721 -0
- teradataml/data/breast_cancer.csv +570 -0
- teradataml/data/buoydata_mix.csv +11 -0
- teradataml/data/burst_data.csv +5 -0
- teradataml/data/burst_example.json +21 -0
- teradataml/data/byom_example.json +34 -0
- teradataml/data/bytes_table.csv +4 -0
- teradataml/data/cal_housing_ex_raw.csv +70 -0
- teradataml/data/callers.csv +7 -0
- teradataml/data/calls.csv +10 -0
- teradataml/data/cars_hist.csv +33 -0
- teradataml/data/cat_table.csv +25 -0
- teradataml/data/ccm_example.json +32 -0
- teradataml/data/ccm_input.csv +91 -0
- teradataml/data/ccm_input2.csv +13 -0
- teradataml/data/ccmexample.csv +101 -0
- teradataml/data/ccmprepare_example.json +9 -0
- teradataml/data/ccmprepare_input.csv +91 -0
- teradataml/data/cfilter_example.json +12 -0
- teradataml/data/changepointdetection_example.json +18 -0
- teradataml/data/changepointdetectionrt_example.json +8 -0
- teradataml/data/chi_sq.csv +3 -0
- teradataml/data/churn_data.csv +14 -0
- teradataml/data/churn_emission.csv +35 -0
- teradataml/data/churn_initial.csv +3 -0
- teradataml/data/churn_state_transition.csv +5 -0
- teradataml/data/citedges_2.csv +745 -0
- teradataml/data/citvertices_2.csv +1210 -0
- teradataml/data/clicks2.csv +16 -0
- teradataml/data/clickstream.csv +13 -0
- teradataml/data/clickstream1.csv +11 -0
- teradataml/data/closeness_example.json +16 -0
- teradataml/data/complaints.csv +21 -0
- teradataml/data/complaints_mini.csv +3 -0
- teradataml/data/complaints_test_tokenized.csv +353 -0
- teradataml/data/complaints_testtoken.csv +224 -0
- teradataml/data/complaints_tokens_model.csv +348 -0
- teradataml/data/complaints_tokens_test.csv +353 -0
- teradataml/data/complaints_traintoken.csv +472 -0
- teradataml/data/computers_category.csv +1001 -0
- teradataml/data/computers_test1.csv +1252 -0
- teradataml/data/computers_train1.csv +5009 -0
- teradataml/data/computers_train1_clustered.csv +5009 -0
- teradataml/data/confusionmatrix_example.json +9 -0
- teradataml/data/conversion_event_table.csv +3 -0
- teradataml/data/corr_input.csv +17 -0
- teradataml/data/correlation_example.json +11 -0
- teradataml/data/covid_confirm_sd.csv +83 -0
- teradataml/data/coxhazardratio_example.json +39 -0
- teradataml/data/coxph_example.json +15 -0
- teradataml/data/coxsurvival_example.json +28 -0
- teradataml/data/cpt.csv +41 -0
- teradataml/data/credit_ex_merged.csv +45 -0
- teradataml/data/creditcard_data.csv +1001 -0
- teradataml/data/customer_loyalty.csv +301 -0
- teradataml/data/customer_loyalty_newseq.csv +31 -0
- teradataml/data/customer_segmentation_test.csv +2628 -0
- teradataml/data/customer_segmentation_train.csv +8069 -0
- teradataml/data/dataframe_example.json +173 -0
- teradataml/data/decisionforest_example.json +37 -0
- teradataml/data/decisionforestpredict_example.json +38 -0
- teradataml/data/decisiontree_example.json +21 -0
- teradataml/data/decisiontreepredict_example.json +45 -0
- teradataml/data/dfft2_size4_real.csv +17 -0
- teradataml/data/dfft2_test_matrix16.csv +17 -0
- teradataml/data/dfft2conv_real_4_4.csv +65 -0
- teradataml/data/diabetes.csv +443 -0
- teradataml/data/diabetes_test.csv +89 -0
- teradataml/data/dict_table.csv +5 -0
- teradataml/data/docperterm_table.csv +4 -0
- teradataml/data/docs/__init__.py +1 -0
- teradataml/data/docs/byom/__init__.py +0 -0
- teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -0
- teradataml/data/docs/byom/docs/DataikuPredict.py +217 -0
- teradataml/data/docs/byom/docs/H2OPredict.py +325 -0
- teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
- teradataml/data/docs/byom/docs/ONNXPredict.py +283 -0
- teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
- teradataml/data/docs/byom/docs/PMMLPredict.py +278 -0
- teradataml/data/docs/byom/docs/__init__.py +0 -0
- teradataml/data/docs/sqle/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_10/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Attribution.py +200 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -0
- teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +96 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +152 -0
- teradataml/data/docs/sqle/docs_17_10/FTest.py +161 -0
- teradataml/data/docs/sqle/docs_17_10/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +85 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +82 -0
- teradataml/data/docs/sqle/docs_17_10/Histogram.py +165 -0
- teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_10/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -0
- teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +135 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +166 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -0
- teradataml/data/docs/sqle/docs_17_10/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +110 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -0
- teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -0
- teradataml/data/docs/sqle/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -0
- teradataml/data/docs/sqle/docs_17_20/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Apriori.py +138 -0
- teradataml/data/docs/sqle/docs_17_20/Attribution.py +201 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -0
- teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
- teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +246 -0
- teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +280 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +136 -0
- teradataml/data/docs/sqle/docs_17_20/FTest.py +240 -0
- teradataml/data/docs/sqle/docs_17_20/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +415 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -0
- teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/Histogram.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -0
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/KNN.py +215 -0
- teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_20/NERExtractor.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_20/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +231 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +191 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -0
- teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +155 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -0
- teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -0
- teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -0
- teradataml/data/docs/sqle/docs_17_20/SMOTE.py +212 -0
- teradataml/data/docs/sqle/docs_17_20/SVM.py +414 -0
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -0
- teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -0
- teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -0
- teradataml/data/docs/sqle/docs_17_20/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_20/Shap.py +225 -0
- teradataml/data/docs/sqle/docs_17_20/Silhouette.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -0
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -0
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
- teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +267 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -0
- teradataml/data/docs/sqle/docs_17_20/TextMorph.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +160 -0
- teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -0
- teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
- teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +169 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +237 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoost.py +362 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -0
- teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/tableoperator/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_00/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_05/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +429 -0
- teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +440 -0
- teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +387 -0
- teradataml/data/docs/tableoperator/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/uaf/__init__.py +0 -0
- teradataml/data/docs/uaf/docs_17_20/ACF.py +186 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +370 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +161 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
- teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
- teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +248 -0
- teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +178 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve.py +230 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve2.py +218 -0
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT.py +204 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +192 -0
- teradataml/data/docs/uaf/docs_17_20/DIFF.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/DTW.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
- teradataml/data/docs/uaf/docs_17_20/DWT2D.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +142 -0
- teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +184 -0
- teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
- teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +206 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +143 -0
- teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +198 -0
- teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +260 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT.py +165 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
- teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/InputValidator.py +121 -0
- teradataml/data/docs/uaf/docs_17_20/LineSpec.py +156 -0
- teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +215 -0
- teradataml/data/docs/uaf/docs_17_20/MAMean.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/MInfo.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
- teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/PACF.py +157 -0
- teradataml/data/docs/uaf/docs_17_20/Portman.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +203 -0
- teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +155 -0
- teradataml/data/docs/uaf/docs_17_20/Resample.py +237 -0
- teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
- teradataml/data/docs/uaf/docs_17_20/SInfo.py +123 -0
- teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +173 -0
- teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +164 -0
- teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/Smoothma.py +208 -0
- teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +151 -0
- teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +202 -0
- teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
- teradataml/data/docs/uaf/docs_17_20/__init__.py +0 -0
- teradataml/data/dtw_example.json +18 -0
- teradataml/data/dtw_t1.csv +11 -0
- teradataml/data/dtw_t2.csv +4 -0
- teradataml/data/dwt2d_dataTable.csv +65 -0
- teradataml/data/dwt2d_example.json +16 -0
- teradataml/data/dwt_dataTable.csv +8 -0
- teradataml/data/dwt_example.json +15 -0
- teradataml/data/dwt_filterTable.csv +3 -0
- teradataml/data/dwt_filter_dim.csv +5 -0
- teradataml/data/emission.csv +9 -0
- teradataml/data/emp_table_by_dept.csv +19 -0
- teradataml/data/employee_info.csv +4 -0
- teradataml/data/employee_table.csv +6 -0
- teradataml/data/excluding_event_table.csv +2 -0
- teradataml/data/finance_data.csv +6 -0
- teradataml/data/finance_data2.csv +61 -0
- teradataml/data/finance_data3.csv +93 -0
- teradataml/data/finance_data4.csv +13 -0
- teradataml/data/fish.csv +160 -0
- teradataml/data/fm_blood2ageandweight.csv +26 -0
- teradataml/data/fmeasure_example.json +12 -0
- teradataml/data/followers_leaders.csv +10 -0
- teradataml/data/fpgrowth_example.json +12 -0
- teradataml/data/frequentpaths_example.json +29 -0
- teradataml/data/friends.csv +9 -0
- teradataml/data/fs_input.csv +33 -0
- teradataml/data/fs_input1.csv +33 -0
- teradataml/data/genData.csv +513 -0
- teradataml/data/geodataframe_example.json +40 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/glm_admissions_model.csv +12 -0
- teradataml/data/glm_example.json +56 -0
- teradataml/data/glml1l2_example.json +28 -0
- teradataml/data/glml1l2predict_example.json +54 -0
- teradataml/data/glmpredict_example.json +54 -0
- teradataml/data/gq_t1.csv +21 -0
- teradataml/data/grocery_transaction.csv +19 -0
- teradataml/data/hconvolve_complex_right.csv +5 -0
- teradataml/data/hconvolve_complex_rightmulti.csv +5 -0
- teradataml/data/histogram_example.json +12 -0
- teradataml/data/hmmdecoder_example.json +79 -0
- teradataml/data/hmmevaluator_example.json +25 -0
- teradataml/data/hmmsupervised_example.json +10 -0
- teradataml/data/hmmunsupervised_example.json +8 -0
- teradataml/data/hnsw_alter_data.csv +5 -0
- teradataml/data/hnsw_data.csv +10 -0
- teradataml/data/house_values.csv +12 -0
- teradataml/data/house_values2.csv +13 -0
- teradataml/data/housing_cat.csv +7 -0
- teradataml/data/housing_data.csv +9 -0
- teradataml/data/housing_test.csv +47 -0
- teradataml/data/housing_test_binary.csv +47 -0
- teradataml/data/housing_train.csv +493 -0
- teradataml/data/housing_train_attribute.csv +5 -0
- teradataml/data/housing_train_binary.csv +437 -0
- teradataml/data/housing_train_parameter.csv +2 -0
- teradataml/data/housing_train_response.csv +493 -0
- teradataml/data/housing_train_segment.csv +201 -0
- teradataml/data/ibm_stock.csv +370 -0
- teradataml/data/ibm_stock1.csv +370 -0
- teradataml/data/identitymatch_example.json +22 -0
- teradataml/data/idf_table.csv +4 -0
- teradataml/data/idwt2d_dataTable.csv +5 -0
- teradataml/data/idwt_dataTable.csv +8 -0
- teradataml/data/idwt_filterTable.csv +3 -0
- teradataml/data/impressions.csv +101 -0
- teradataml/data/inflation.csv +21 -0
- teradataml/data/initial.csv +3 -0
- teradataml/data/insect2Cols.csv +61 -0
- teradataml/data/insect_sprays.csv +13 -0
- teradataml/data/insurance.csv +1339 -0
- teradataml/data/interpolator_example.json +13 -0
- teradataml/data/interval_data.csv +5 -0
- teradataml/data/iris_altinput.csv +481 -0
- teradataml/data/iris_attribute_output.csv +8 -0
- teradataml/data/iris_attribute_test.csv +121 -0
- teradataml/data/iris_attribute_train.csv +481 -0
- teradataml/data/iris_category_expect_predict.csv +31 -0
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/iris_input.csv +151 -0
- teradataml/data/iris_response_train.csv +121 -0
- teradataml/data/iris_test.csv +31 -0
- teradataml/data/iris_train.csv +121 -0
- teradataml/data/join_table1.csv +4 -0
- teradataml/data/join_table2.csv +4 -0
- teradataml/data/jsons/anly_function_name.json +7 -0
- teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
- teradataml/data/jsons/byom/dataikupredict.json +148 -0
- teradataml/data/jsons/byom/datarobotpredict.json +147 -0
- teradataml/data/jsons/byom/h2opredict.json +195 -0
- teradataml/data/jsons/byom/onnxembeddings.json +267 -0
- teradataml/data/jsons/byom/onnxpredict.json +187 -0
- teradataml/data/jsons/byom/pmmlpredict.json +147 -0
- teradataml/data/jsons/paired_functions.json +450 -0
- teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/16.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/16.20/Pack.json +98 -0
- teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/16.20/Unpack.json +166 -0
- teradataml/data/jsons/sqle/16.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.00/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.00/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.00/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.00/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.05/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.05/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.05/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.05/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.10/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -0
- teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -0
- teradataml/data/jsons/sqle/17.10/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -0
- teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -0
- teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +69 -0
- teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -0
- teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_Histogram.json +133 -0
- teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +183 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +66 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +197 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +112 -0
- teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +157 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +148 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +119 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -0
- teradataml/data/jsons/sqle/17.10/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.10/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -0
- teradataml/data/jsons/sqle/17.20/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -0
- teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +149 -0
- teradataml/data/jsons/sqle/17.20/TD_Apriori.json +181 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
- teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -0
- teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +260 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -0
- teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -0
- teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeans.json +232 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +87 -0
- teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -0
- teradataml/data/jsons/sqle/17.20/TD_NERExtractor.json +145 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +316 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +124 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -0
- teradataml/data/jsons/sqle/17.20/TD_ROC.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +138 -0
- teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_SMOTE.json +267 -0
- teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -0
- teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +310 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +120 -0
- teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +194 -0
- teradataml/data/jsons/sqle/17.20/TD_Shap.json +221 -0
- teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +143 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_TextMorph.json +134 -0
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +297 -0
- teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -0
- teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
- teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +183 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -0
- teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -0
- teradataml/data/jsons/sqle/17.20/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +370 -0
- teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +460 -0
- teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +385 -0
- teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +400 -0
- teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +401 -0
- teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +384 -0
- teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +384 -0
- teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
- teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
- teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
- teradataml/data/jsons/tableoperator/17.00/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.10/read_nos.json +184 -0
- teradataml/data/jsons/tableoperator/17.10/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/tableoperator/17.20/read_nos.json +183 -0
- teradataml/data/jsons/tableoperator/17.20/write_nos.json +224 -0
- teradataml/data/jsons/uaf/17.20/TD_ACF.json +132 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +396 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +77 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +153 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
- teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +107 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +106 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +89 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +104 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +66 -0
- teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +87 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT.json +134 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +144 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_DIFF.json +92 -0
- teradataml/data/jsons/uaf/17.20/TD_DTW.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
- teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +39 -0
- teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +85 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +71 -0
- teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +139 -0
- teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECASTER.json +313 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +81 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
- teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +64 -0
- teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
- teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +182 -0
- teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +103 -0
- teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +181 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_MINFO.json +67 -0
- teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +179 -0
- teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +119 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +98 -0
- teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +194 -0
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
- teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +143 -0
- teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +90 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +80 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -0
- teradataml/data/jsons/uaf/17.20/TD_SINFO.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +163 -0
- teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +112 -0
- teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -0
- teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
- teradataml/data/kmeans_example.json +23 -0
- teradataml/data/kmeans_table.csv +10 -0
- teradataml/data/kmeans_us_arrests_data.csv +51 -0
- teradataml/data/knn_example.json +19 -0
- teradataml/data/knnrecommender_example.json +7 -0
- teradataml/data/knnrecommenderpredict_example.json +12 -0
- teradataml/data/lar_example.json +17 -0
- teradataml/data/larpredict_example.json +30 -0
- teradataml/data/lc_new_predictors.csv +5 -0
- teradataml/data/lc_new_reference.csv +9 -0
- teradataml/data/lda_example.json +9 -0
- teradataml/data/ldainference_example.json +15 -0
- teradataml/data/ldatopicsummary_example.json +9 -0
- teradataml/data/levendist_input.csv +13 -0
- teradataml/data/levenshteindistance_example.json +10 -0
- teradataml/data/linreg_example.json +10 -0
- teradataml/data/load_example_data.py +350 -0
- teradataml/data/loan_prediction.csv +295 -0
- teradataml/data/lungcancer.csv +138 -0
- teradataml/data/mappingdata.csv +12 -0
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/milk_timeseries.csv +157 -0
- teradataml/data/min_max_titanic.csv +4 -0
- teradataml/data/minhash_example.json +6 -0
- teradataml/data/ml_ratings.csv +7547 -0
- teradataml/data/ml_ratings_10.csv +2445 -0
- teradataml/data/mobile_data.csv +13 -0
- teradataml/data/model1_table.csv +5 -0
- teradataml/data/model2_table.csv +5 -0
- teradataml/data/models/License_file.txt +1 -0
- teradataml/data/models/License_file_empty.txt +0 -0
- teradataml/data/models/dataiku_iris_data_ann_thin +0 -0
- teradataml/data/models/dr_iris_rf +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn.onnx +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn_floattensor.onnx +0 -0
- teradataml/data/models/iris_db_glm_model.pmml +57 -0
- teradataml/data/models/iris_db_xgb_model.pmml +4471 -0
- teradataml/data/models/iris_kmeans_model +0 -0
- teradataml/data/models/iris_mojo_glm_h2o_model +0 -0
- teradataml/data/models/iris_mojo_xgb_h2o_model +0 -0
- teradataml/data/modularity_example.json +12 -0
- teradataml/data/movavg_example.json +8 -0
- teradataml/data/mtx1.csv +7 -0
- teradataml/data/mtx2.csv +13 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/mvdfft8.csv +9 -0
- teradataml/data/naivebayes_example.json +10 -0
- teradataml/data/naivebayespredict_example.json +19 -0
- teradataml/data/naivebayestextclassifier2_example.json +7 -0
- teradataml/data/naivebayestextclassifier_example.json +8 -0
- teradataml/data/naivebayestextclassifierpredict_example.json +32 -0
- teradataml/data/name_Find_configure.csv +10 -0
- teradataml/data/namedentityfinder_example.json +14 -0
- teradataml/data/namedentityfinderevaluator_example.json +10 -0
- teradataml/data/namedentityfindertrainer_example.json +6 -0
- teradataml/data/nb_iris_input_test.csv +31 -0
- teradataml/data/nb_iris_input_train.csv +121 -0
- teradataml/data/nbp_iris_model.csv +13 -0
- teradataml/data/ner_dict.csv +8 -0
- teradataml/data/ner_extractor_text.csv +2 -0
- teradataml/data/ner_input_eng.csv +7 -0
- teradataml/data/ner_rule.csv +5 -0
- teradataml/data/ner_sports_test2.csv +29 -0
- teradataml/data/ner_sports_train.csv +501 -0
- teradataml/data/nerevaluator_example.json +6 -0
- teradataml/data/nerextractor_example.json +18 -0
- teradataml/data/nermem_sports_test.csv +18 -0
- teradataml/data/nermem_sports_train.csv +51 -0
- teradataml/data/nertrainer_example.json +7 -0
- teradataml/data/ngrams_example.json +7 -0
- teradataml/data/notebooks/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -0
- teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -0
- teradataml/data/notebooks/sqlalchemy/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -0
- teradataml/data/npath_example.json +23 -0
- teradataml/data/ntree_example.json +14 -0
- teradataml/data/numeric_strings.csv +5 -0
- teradataml/data/numerics.csv +4 -0
- teradataml/data/ocean_buoy.csv +17 -0
- teradataml/data/ocean_buoy2.csv +17 -0
- teradataml/data/ocean_buoys.csv +28 -0
- teradataml/data/ocean_buoys2.csv +10 -0
- teradataml/data/ocean_buoys_nonpti.csv +28 -0
- teradataml/data/ocean_buoys_seq.csv +29 -0
- teradataml/data/onehot_encoder_train.csv +4 -0
- teradataml/data/openml_example.json +92 -0
- teradataml/data/optional_event_table.csv +4 -0
- teradataml/data/orders1.csv +11 -0
- teradataml/data/orders1_12.csv +13 -0
- teradataml/data/orders_ex.csv +4 -0
- teradataml/data/pack_example.json +9 -0
- teradataml/data/package_tracking.csv +19 -0
- teradataml/data/package_tracking_pti.csv +19 -0
- teradataml/data/pagerank_example.json +13 -0
- teradataml/data/paragraphs_input.csv +6 -0
- teradataml/data/pathanalyzer_example.json +8 -0
- teradataml/data/pathgenerator_example.json +8 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/pattern_matching_data.csv +11 -0
- teradataml/data/payment_fraud_dataset.csv +10001 -0
- teradataml/data/peppers.png +0 -0
- teradataml/data/phrases.csv +7 -0
- teradataml/data/pivot_example.json +9 -0
- teradataml/data/pivot_input.csv +22 -0
- teradataml/data/playerRating.csv +31 -0
- teradataml/data/pos_input.csv +40 -0
- teradataml/data/postagger_example.json +7 -0
- teradataml/data/posttagger_output.csv +44 -0
- teradataml/data/production_data.csv +17 -0
- teradataml/data/production_data2.csv +7 -0
- teradataml/data/randomsample_example.json +32 -0
- teradataml/data/randomwalksample_example.json +9 -0
- teradataml/data/rank_table.csv +6 -0
- teradataml/data/real_values.csv +14 -0
- teradataml/data/ref_mobile_data.csv +4 -0
- teradataml/data/ref_mobile_data_dense.csv +2 -0
- teradataml/data/ref_url.csv +17 -0
- teradataml/data/restaurant_reviews.csv +7 -0
- teradataml/data/retail_churn_table.csv +27772 -0
- teradataml/data/river_data.csv +145 -0
- teradataml/data/roc_example.json +8 -0
- teradataml/data/roc_input.csv +101 -0
- teradataml/data/rule_inputs.csv +6 -0
- teradataml/data/rule_table.csv +2 -0
- teradataml/data/sales.csv +7 -0
- teradataml/data/sales_transaction.csv +501 -0
- teradataml/data/salesdata.csv +342 -0
- teradataml/data/sample_cities.csv +3 -0
- teradataml/data/sample_shapes.csv +11 -0
- teradataml/data/sample_streets.csv +3 -0
- teradataml/data/sampling_example.json +16 -0
- teradataml/data/sax_example.json +17 -0
- teradataml/data/scale_attributes.csv +3 -0
- teradataml/data/scale_example.json +74 -0
- teradataml/data/scale_housing.csv +11 -0
- teradataml/data/scale_housing_test.csv +6 -0
- teradataml/data/scale_input_part_sparse.csv +31 -0
- teradataml/data/scale_input_partitioned.csv +16 -0
- teradataml/data/scale_input_sparse.csv +11 -0
- teradataml/data/scale_parameters.csv +3 -0
- teradataml/data/scale_stat.csv +11 -0
- teradataml/data/scalebypartition_example.json +13 -0
- teradataml/data/scalemap_example.json +13 -0
- teradataml/data/scalesummary_example.json +12 -0
- teradataml/data/score_category.csv +101 -0
- teradataml/data/score_summary.csv +4 -0
- teradataml/data/script_example.json +10 -0
- teradataml/data/scripts/deploy_script.py +84 -0
- teradataml/data/scripts/lightgbm/dataset.template +175 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +264 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +234 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +177 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/mapper.py +16 -0
- teradataml/data/scripts/mapper_replace.py +16 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +205 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +148 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +144 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +166 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +161 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +145 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +327 -0
- teradataml/data/sdk/modelops/modelops_spec.json +101737 -0
- teradataml/data/seeds.csv +10 -0
- teradataml/data/sentenceextractor_example.json +7 -0
- teradataml/data/sentiment_extract_input.csv +11 -0
- teradataml/data/sentiment_train.csv +16 -0
- teradataml/data/sentiment_word.csv +20 -0
- teradataml/data/sentiment_word_input.csv +20 -0
- teradataml/data/sentimentextractor_example.json +24 -0
- teradataml/data/sentimenttrainer_example.json +8 -0
- teradataml/data/sequence_table.csv +10 -0
- teradataml/data/seriessplitter_example.json +8 -0
- teradataml/data/sessionize_example.json +17 -0
- teradataml/data/sessionize_table.csv +116 -0
- teradataml/data/setop_test1.csv +24 -0
- teradataml/data/setop_test2.csv +22 -0
- teradataml/data/soc_nw_edges.csv +11 -0
- teradataml/data/soc_nw_vertices.csv +8 -0
- teradataml/data/souvenir_timeseries.csv +168 -0
- teradataml/data/sparse_iris_attribute.csv +5 -0
- teradataml/data/sparse_iris_test.csv +121 -0
- teradataml/data/sparse_iris_train.csv +601 -0
- teradataml/data/star1.csv +6 -0
- teradataml/data/star_pivot.csv +8 -0
- teradataml/data/state_transition.csv +5 -0
- teradataml/data/stock_data.csv +53 -0
- teradataml/data/stock_movement.csv +11 -0
- teradataml/data/stock_vol.csv +76 -0
- teradataml/data/stop_words.csv +8 -0
- teradataml/data/store_sales.csv +37 -0
- teradataml/data/stringsimilarity_example.json +8 -0
- teradataml/data/strsimilarity_input.csv +13 -0
- teradataml/data/students.csv +101 -0
- teradataml/data/svm_iris_input_test.csv +121 -0
- teradataml/data/svm_iris_input_train.csv +481 -0
- teradataml/data/svm_iris_model.csv +7 -0
- teradataml/data/svmdense_example.json +10 -0
- teradataml/data/svmdensepredict_example.json +19 -0
- teradataml/data/svmsparse_example.json +8 -0
- teradataml/data/svmsparsepredict_example.json +14 -0
- teradataml/data/svmsparsesummary_example.json +8 -0
- teradataml/data/target_mobile_data.csv +13 -0
- teradataml/data/target_mobile_data_dense.csv +5 -0
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/tdnerextractor_example.json +14 -0
- teradataml/data/templatedata.csv +1201 -0
- teradataml/data/templates/open_source_ml.json +11 -0
- teradataml/data/teradata_icon.ico +0 -0
- teradataml/data/teradataml_example.json +1473 -0
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_loan_prediction.csv +53 -0
- teradataml/data/test_pacf_12.csv +37 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/test_river2.csv +109 -0
- teradataml/data/text_inputs.csv +6 -0
- teradataml/data/textchunker_example.json +8 -0
- teradataml/data/textclassifier_example.json +7 -0
- teradataml/data/textclassifier_input.csv +7 -0
- teradataml/data/textclassifiertrainer_example.json +7 -0
- teradataml/data/textmorph_example.json +11 -0
- teradataml/data/textparser_example.json +15 -0
- teradataml/data/texttagger_example.json +12 -0
- teradataml/data/texttokenizer_example.json +7 -0
- teradataml/data/texttrainer_input.csv +11 -0
- teradataml/data/tf_example.json +7 -0
- teradataml/data/tfidf_example.json +14 -0
- teradataml/data/tfidf_input1.csv +201 -0
- teradataml/data/tfidf_train.csv +6 -0
- teradataml/data/time_table1.csv +535 -0
- teradataml/data/time_table2.csv +14 -0
- teradataml/data/timeseriesdata.csv +1601 -0
- teradataml/data/timeseriesdatasetsd4.csv +105 -0
- teradataml/data/timestamp_data.csv +4 -0
- teradataml/data/titanic.csv +892 -0
- teradataml/data/titanic_dataset_unpivoted.csv +19 -0
- teradataml/data/to_num_data.csv +4 -0
- teradataml/data/tochar_data.csv +5 -0
- teradataml/data/token_table.csv +696 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/train_tracking.csv +28 -0
- teradataml/data/trans_dense.csv +16 -0
- teradataml/data/trans_sparse.csv +55 -0
- teradataml/data/transformation_table.csv +6 -0
- teradataml/data/transformation_table_new.csv +2 -0
- teradataml/data/tv_spots.csv +16 -0
- teradataml/data/twod_climate_data.csv +117 -0
- teradataml/data/uaf_example.json +529 -0
- teradataml/data/univariatestatistics_example.json +9 -0
- teradataml/data/unpack_example.json +10 -0
- teradataml/data/unpivot_example.json +25 -0
- teradataml/data/unpivot_input.csv +8 -0
- teradataml/data/url_data.csv +10 -0
- teradataml/data/us_air_pass.csv +37 -0
- teradataml/data/us_population.csv +624 -0
- teradataml/data/us_states_shapes.csv +52 -0
- teradataml/data/varmax_example.json +18 -0
- teradataml/data/vectordistance_example.json +30 -0
- teradataml/data/ville_climatedata.csv +121 -0
- teradataml/data/ville_tempdata.csv +12 -0
- teradataml/data/ville_tempdata1.csv +12 -0
- teradataml/data/ville_temperature.csv +11 -0
- teradataml/data/waveletTable.csv +1605 -0
- teradataml/data/waveletTable2.csv +1605 -0
- teradataml/data/weightedmovavg_example.json +9 -0
- teradataml/data/wft_testing.csv +5 -0
- teradataml/data/windowdfft.csv +16 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/data/word_embed_input_table1.csv +6 -0
- teradataml/data/word_embed_input_table2.csv +5 -0
- teradataml/data/word_embed_model.csv +23 -0
- teradataml/data/words_input.csv +13 -0
- teradataml/data/xconvolve_complex_left.csv +6 -0
- teradataml/data/xconvolve_complex_leftmulti.csv +6 -0
- teradataml/data/xgboost_example.json +36 -0
- teradataml/data/xgboostpredict_example.json +32 -0
- teradataml/data/ztest_example.json +16 -0
- teradataml/dataframe/__init__.py +0 -0
- teradataml/dataframe/copy_to.py +2446 -0
- teradataml/dataframe/data_transfer.py +2840 -0
- teradataml/dataframe/dataframe.py +20908 -0
- teradataml/dataframe/dataframe_utils.py +2114 -0
- teradataml/dataframe/fastload.py +794 -0
- teradataml/dataframe/functions.py +2110 -0
- teradataml/dataframe/indexer.py +424 -0
- teradataml/dataframe/row.py +160 -0
- teradataml/dataframe/setop.py +1171 -0
- teradataml/dataframe/sql.py +10904 -0
- teradataml/dataframe/sql_function_parameters.py +440 -0
- teradataml/dataframe/sql_functions.py +652 -0
- teradataml/dataframe/sql_interfaces.py +220 -0
- teradataml/dataframe/vantage_function_types.py +675 -0
- teradataml/dataframe/window.py +694 -0
- teradataml/dbutils/__init__.py +3 -0
- teradataml/dbutils/dbutils.py +2871 -0
- teradataml/dbutils/filemgr.py +318 -0
- teradataml/gen_ai/__init__.py +2 -0
- teradataml/gen_ai/convAI.py +473 -0
- teradataml/geospatial/__init__.py +4 -0
- teradataml/geospatial/geodataframe.py +1105 -0
- teradataml/geospatial/geodataframecolumn.py +392 -0
- teradataml/geospatial/geometry_types.py +926 -0
- teradataml/hyperparameter_tuner/__init__.py +1 -0
- teradataml/hyperparameter_tuner/optimizer.py +4115 -0
- teradataml/hyperparameter_tuner/utils.py +303 -0
- teradataml/lib/__init__.py +0 -0
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/lib/libaed_0_1_aarch64.so +0 -0
- teradataml/lib/libaed_0_1_ppc64le.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/_base.py +1321 -0
- teradataml/opensource/_class.py +464 -0
- teradataml/opensource/_constants.py +61 -0
- teradataml/opensource/_lightgbm.py +949 -0
- teradataml/opensource/_sklearn.py +1008 -0
- teradataml/opensource/_wrapper_utils.py +267 -0
- teradataml/options/__init__.py +148 -0
- teradataml/options/configure.py +489 -0
- teradataml/options/display.py +187 -0
- teradataml/plot/__init__.py +3 -0
- teradataml/plot/axis.py +1427 -0
- teradataml/plot/constants.py +15 -0
- teradataml/plot/figure.py +431 -0
- teradataml/plot/plot.py +810 -0
- teradataml/plot/query_generator.py +83 -0
- teradataml/plot/subplot.py +216 -0
- teradataml/scriptmgmt/UserEnv.py +4273 -0
- teradataml/scriptmgmt/__init__.py +3 -0
- teradataml/scriptmgmt/lls_utils.py +2157 -0
- teradataml/sdk/README.md +79 -0
- teradataml/sdk/__init__.py +4 -0
- teradataml/sdk/_auth_modes.py +422 -0
- teradataml/sdk/_func_params.py +487 -0
- teradataml/sdk/_json_parser.py +453 -0
- teradataml/sdk/_openapi_spec_constants.py +249 -0
- teradataml/sdk/_utils.py +236 -0
- teradataml/sdk/api_client.py +900 -0
- teradataml/sdk/constants.py +62 -0
- teradataml/sdk/modelops/__init__.py +98 -0
- teradataml/sdk/modelops/_client.py +409 -0
- teradataml/sdk/modelops/_constants.py +304 -0
- teradataml/sdk/modelops/models.py +2308 -0
- teradataml/sdk/spinner.py +107 -0
- teradataml/series/__init__.py +0 -0
- teradataml/series/series.py +537 -0
- teradataml/series/series_utils.py +71 -0
- teradataml/store/__init__.py +12 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +658 -0
- teradataml/store/feature_store/feature_store.py +4814 -0
- teradataml/store/feature_store/mind_map.py +639 -0
- teradataml/store/feature_store/models.py +7330 -0
- teradataml/store/feature_store/utils.py +390 -0
- teradataml/table_operators/Apply.py +979 -0
- teradataml/table_operators/Script.py +1739 -0
- teradataml/table_operators/TableOperator.py +1343 -0
- teradataml/table_operators/__init__.py +2 -0
- teradataml/table_operators/apply_query_generator.py +262 -0
- teradataml/table_operators/query_generator.py +493 -0
- teradataml/table_operators/table_operator_query_generator.py +462 -0
- teradataml/table_operators/table_operator_util.py +726 -0
- teradataml/table_operators/templates/dataframe_apply.template +184 -0
- teradataml/table_operators/templates/dataframe_map.template +176 -0
- teradataml/table_operators/templates/dataframe_register.template +73 -0
- teradataml/table_operators/templates/dataframe_udf.template +67 -0
- teradataml/table_operators/templates/script_executor.template +170 -0
- teradataml/telemetry_utils/__init__.py +0 -0
- teradataml/telemetry_utils/queryband.py +53 -0
- teradataml/utils/__init__.py +0 -0
- teradataml/utils/docstring.py +527 -0
- teradataml/utils/dtypes.py +943 -0
- teradataml/utils/internal_buffer.py +122 -0
- teradataml/utils/print_versions.py +206 -0
- teradataml/utils/utils.py +451 -0
- teradataml/utils/validators.py +3305 -0
- teradataml-20.0.0.8.dist-info/METADATA +2804 -0
- teradataml-20.0.0.8.dist-info/RECORD +1208 -0
- teradataml-20.0.0.8.dist-info/WHEEL +5 -0
- teradataml-20.0.0.8.dist-info/top_level.txt +1 -0
- teradataml-20.0.0.8.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,2871 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright (c) 2018 by Teradata Corporation. All rights reserved.
|
|
3
|
+
TERADATA CORPORATION CONFIDENTIAL AND TRADE SECRET
|
|
4
|
+
|
|
5
|
+
Primary Owner: rameshchandra.d@teradata.com
|
|
6
|
+
Secondary Owner: sanath.vobilisetty@teradata.com
|
|
7
|
+
|
|
8
|
+
teradataml db utilities
|
|
9
|
+
----------
|
|
10
|
+
A teradataml database utility functions provide interface to Teradata Vantage common tasks such as drop_table, drop_view, create_table etc.
|
|
11
|
+
"""
|
|
12
|
+
import concurrent.futures
|
|
13
|
+
import enum
|
|
14
|
+
import json
|
|
15
|
+
import os
|
|
16
|
+
import re
|
|
17
|
+
import shutil
|
|
18
|
+
import tempfile
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
import functools
|
|
21
|
+
|
|
22
|
+
import pandas as pd
|
|
23
|
+
from sqlalchemy import (CheckConstraint, Column, ForeignKeyConstraint,
|
|
24
|
+
MetaData, PrimaryKeyConstraint, Table,
|
|
25
|
+
UniqueConstraint)
|
|
26
|
+
from sqlalchemy.sql.functions import Function
|
|
27
|
+
from teradatasql import OperationalError
|
|
28
|
+
from teradatasqlalchemy.dialect import TDCreateTablePost as post
|
|
29
|
+
from teradatasqlalchemy.dialect import dialect as td_dialect
|
|
30
|
+
from teradatasqlalchemy.dialect import preparer
|
|
31
|
+
|
|
32
|
+
import teradataml.dataframe as tdmldf
|
|
33
|
+
from teradataml.common.constants import (SessionParamsPythonNames,
|
|
34
|
+
SessionParamsSQL, SQLConstants,
|
|
35
|
+
TableOperatorConstants,
|
|
36
|
+
TeradataTableKindConstants)
|
|
37
|
+
from teradataml.common.exceptions import TeradataMlException
|
|
38
|
+
from teradataml.common.messagecodes import MessageCodes
|
|
39
|
+
from teradataml.common.messages import Messages
|
|
40
|
+
from teradataml.common.sqlbundle import SQLBundle
|
|
41
|
+
from teradataml.common.utils import UtilFuncs
|
|
42
|
+
from teradataml.common.logger import get_td_logger
|
|
43
|
+
from teradataml.context import context as tdmlctx
|
|
44
|
+
from teradataml.options.configure import configure
|
|
45
|
+
from teradataml.telemetry_utils.queryband import collect_queryband
|
|
46
|
+
from teradataml.utils.internal_buffer import _InternalBuffer
|
|
47
|
+
from teradataml.utils.utils import execute_sql
|
|
48
|
+
from teradataml.utils.validators import _Validators
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@collect_queryband(queryband='DrpTbl')
|
|
52
|
+
def db_drop_table(table_name, schema_name=None, suppress_error=False,
|
|
53
|
+
datalake_name=None, purge=None):
|
|
54
|
+
"""
|
|
55
|
+
DESCRIPTION:
|
|
56
|
+
Drops the table from the given schema.
|
|
57
|
+
|
|
58
|
+
PARAMETERS:
|
|
59
|
+
table_name:
|
|
60
|
+
Required Argument
|
|
61
|
+
Specifies the table name to be dropped.
|
|
62
|
+
Types: str
|
|
63
|
+
|
|
64
|
+
schema_name:
|
|
65
|
+
Optional Argument
|
|
66
|
+
Specifies schema of the table to be dropped. If schema is not specified, function drops table from the
|
|
67
|
+
current database.
|
|
68
|
+
Default Value: None
|
|
69
|
+
Types: str
|
|
70
|
+
|
|
71
|
+
suppress_error:
|
|
72
|
+
Optional Argument
|
|
73
|
+
Specifies whether to raise error or not.
|
|
74
|
+
Default Value: False
|
|
75
|
+
Types: bool
|
|
76
|
+
|
|
77
|
+
datalake_name:
|
|
78
|
+
Optional Argument
|
|
79
|
+
Specifies name of the datalake to drop table from.
|
|
80
|
+
Note:
|
|
81
|
+
"schema_name" must be provided while using this argument.
|
|
82
|
+
Default Value: None
|
|
83
|
+
Types: str
|
|
84
|
+
|
|
85
|
+
purge:
|
|
86
|
+
Optional Argument
|
|
87
|
+
Specifies whether to use purge clause or not while dropping datalake table.
|
|
88
|
+
It is only applicable when "datalake_name" argument is used. When "datalake_name" is specified,
|
|
89
|
+
but "purge" is not specified, data is purged by default.
|
|
90
|
+
Default Value: None
|
|
91
|
+
Types: bool
|
|
92
|
+
|
|
93
|
+
RETURNS:
|
|
94
|
+
True - if the operation is successful.
|
|
95
|
+
|
|
96
|
+
RAISES:
|
|
97
|
+
TeradataMlException - If the table doesn't exist.
|
|
98
|
+
|
|
99
|
+
EXAMPLES:
|
|
100
|
+
>>> load_example_data("dataframe", "admissions_train")
|
|
101
|
+
|
|
102
|
+
# Example 1: Drop table in current database.
|
|
103
|
+
>>> db_drop_table(table_name = 'admissions_train')
|
|
104
|
+
|
|
105
|
+
# Example 2: Drop table from the given schema.
|
|
106
|
+
>>> db_drop_table(table_name = 'admissions_train', schema_name = 'alice')
|
|
107
|
+
|
|
108
|
+
#Example 3: Drop a table from datalake and purge the data.
|
|
109
|
+
>>> db_drop_table(table_name = 'datalake_table', schema_name = 'datalake_db',
|
|
110
|
+
... datalake_name='datalake', purge=True)
|
|
111
|
+
|
|
112
|
+
"""
|
|
113
|
+
# Argument validations
|
|
114
|
+
awu_matrix = []
|
|
115
|
+
awu_matrix.append(["schema_name", schema_name, True, (str), True])
|
|
116
|
+
awu_matrix.append(["table_name", table_name, False, (str), True])
|
|
117
|
+
awu_matrix.append(["datalake_name", datalake_name, True, (str), True])
|
|
118
|
+
awu_matrix.append(["purge", purge, True, (bool, type(None)), True])
|
|
119
|
+
awu_matrix.append(["suppress_error", suppress_error, True, (bool)])
|
|
120
|
+
# Validate argument types
|
|
121
|
+
_Validators._validate_function_arguments(awu_matrix)
|
|
122
|
+
|
|
123
|
+
# Process datalake related arguments.
|
|
124
|
+
purge_clause = None
|
|
125
|
+
if datalake_name is not None:
|
|
126
|
+
if schema_name is None:
|
|
127
|
+
err_ = Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING, "schema_name",
|
|
128
|
+
"datalake_name")
|
|
129
|
+
raise TeradataMlException(err_, MessageCodes.DEPENDENT_ARG_MISSING)
|
|
130
|
+
|
|
131
|
+
if purge is False:
|
|
132
|
+
purge_clause = "NO PURGE"
|
|
133
|
+
else:
|
|
134
|
+
purge_clause = "PURGE ALL"
|
|
135
|
+
|
|
136
|
+
# Joining view and schema names in the format "schema_name"."view_name"
|
|
137
|
+
table_name = _get_quoted_object_name(schema_name, table_name, datalake_name)
|
|
138
|
+
|
|
139
|
+
try:
|
|
140
|
+
return UtilFuncs._drop_table(table_name, purge_clause=purge_clause)
|
|
141
|
+
except (TeradataMlException, OperationalError):
|
|
142
|
+
if suppress_error:
|
|
143
|
+
pass
|
|
144
|
+
else:
|
|
145
|
+
raise
|
|
146
|
+
except Exception as err:
|
|
147
|
+
if suppress_error:
|
|
148
|
+
pass
|
|
149
|
+
else:
|
|
150
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.DROP_FAILED, "table",
|
|
151
|
+
table_name),
|
|
152
|
+
MessageCodes.DROP_FAILED) from err
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
@collect_queryband(queryband='DrpVw')
|
|
156
|
+
def db_drop_view(view_name, schema_name=None, suppress_error=False):
|
|
157
|
+
"""
|
|
158
|
+
DESCRIPTION:
|
|
159
|
+
Drops the view from the given schema.
|
|
160
|
+
|
|
161
|
+
PARAMETERS:
|
|
162
|
+
view_name:
|
|
163
|
+
Required Argument
|
|
164
|
+
Specifies view name to be dropped.
|
|
165
|
+
Types: str
|
|
166
|
+
|
|
167
|
+
schema_name:
|
|
168
|
+
Optional Argument
|
|
169
|
+
Specifies schema of the view to be dropped. If schema is not specified, function drops view from the current
|
|
170
|
+
database.
|
|
171
|
+
Default Value: None
|
|
172
|
+
Types: str
|
|
173
|
+
|
|
174
|
+
suppress_error:
|
|
175
|
+
Optional Argument
|
|
176
|
+
Specifies whether to raise error or not.
|
|
177
|
+
Default Value: False
|
|
178
|
+
Types: bool
|
|
179
|
+
|
|
180
|
+
RETURNS:
|
|
181
|
+
True - if the operation is successful.
|
|
182
|
+
|
|
183
|
+
RAISES:
|
|
184
|
+
TeradataMlException - If the view doesn't exist.
|
|
185
|
+
|
|
186
|
+
EXAMPLES:
|
|
187
|
+
# Create a view.
|
|
188
|
+
>>> execute_sql("create view temporary_view as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
189
|
+
|
|
190
|
+
# Drop view in current schema.
|
|
191
|
+
>>> db_drop_view(view_name = 'temporary_view')
|
|
192
|
+
|
|
193
|
+
# Drop view from the given schema.
|
|
194
|
+
>>> db_drop_view(view_name = 'temporary_view', schema_name = 'alice')
|
|
195
|
+
|
|
196
|
+
# Drop view by suppressing errors.
|
|
197
|
+
>>> db_drop_view(view_name = 'temporary_view', suppress_error = True)
|
|
198
|
+
"""
|
|
199
|
+
# Argument validations
|
|
200
|
+
awu_matrix = []
|
|
201
|
+
awu_matrix.append(["schema_name", schema_name, True, (str), True])
|
|
202
|
+
awu_matrix.append(["view_name", view_name, False, (str), True])
|
|
203
|
+
awu_matrix.append(["suppress_error", suppress_error, True, (bool)])
|
|
204
|
+
|
|
205
|
+
# Validate argument types
|
|
206
|
+
_Validators._validate_function_arguments(awu_matrix)
|
|
207
|
+
|
|
208
|
+
# Joining view and schema names in the format "schema_name"."view_name"
|
|
209
|
+
view_name = _get_quoted_object_name(schema_name, view_name)
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
return UtilFuncs._drop_view(view_name)
|
|
213
|
+
except (TeradataMlException, OperationalError):
|
|
214
|
+
if suppress_error:
|
|
215
|
+
pass
|
|
216
|
+
else:
|
|
217
|
+
raise
|
|
218
|
+
except Exception as err:
|
|
219
|
+
if suppress_error:
|
|
220
|
+
pass
|
|
221
|
+
else:
|
|
222
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.DROP_FAILED, "view",
|
|
223
|
+
view_name),
|
|
224
|
+
MessageCodes.DROP_FAILED) from err
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
@collect_queryband(queryband='LstTbls')
|
|
228
|
+
def db_list_tables(schema_name=None, object_name=None, object_type='all', datalake_name=None):
|
|
229
|
+
"""
|
|
230
|
+
DESCRIPTION:
|
|
231
|
+
Lists the Vantage objects(table/view) names for the specified schema name.
|
|
232
|
+
|
|
233
|
+
PARAMETERS:
|
|
234
|
+
schema_name:
|
|
235
|
+
Optional Argument.
|
|
236
|
+
Specifies the name of schema in the database. If schema is not specified, function lists tables/views from
|
|
237
|
+
the current database.
|
|
238
|
+
Default Value: None
|
|
239
|
+
Types: str
|
|
240
|
+
|
|
241
|
+
object_name:
|
|
242
|
+
Optional Argument.
|
|
243
|
+
Specifies a table/view name or pattern to be used for filtering them from the database.
|
|
244
|
+
Pattern may contain '%' or '_' as pattern matching characters.
|
|
245
|
+
- '%' represents any string of zero or more arbitrary characters. Any string of characters is acceptable as
|
|
246
|
+
a replacement for the percent.
|
|
247
|
+
- '_' represents exactly one arbitrary character. Any single character is acceptable in the position in
|
|
248
|
+
which the underscore character appears.
|
|
249
|
+
Note:
|
|
250
|
+
* If '%' is specified in 'object_name', then the '_' character is not evaluated for an arbitrary character.
|
|
251
|
+
Default Value: None
|
|
252
|
+
Types: str
|
|
253
|
+
Example:
|
|
254
|
+
1. '%abc' will return all table/view object names starting with any character and ending with abc.
|
|
255
|
+
2. 'a_c' will return all table/view object names starting with 'a', ending with 'c' and has length of 3.
|
|
256
|
+
|
|
257
|
+
object_type:
|
|
258
|
+
Optional Argument.
|
|
259
|
+
Specifies object type to apply the filter. Valid values for this argument are 'all','table','view',
|
|
260
|
+
'volatile','temp'.
|
|
261
|
+
* all - List all the object types.
|
|
262
|
+
* table - List only tables.
|
|
263
|
+
* view - List only views.
|
|
264
|
+
* volatile - List only volatile tables.
|
|
265
|
+
* temp - List all teradataml temporary objects created in the specified database.
|
|
266
|
+
Default Value: 'all'
|
|
267
|
+
Types: str
|
|
268
|
+
|
|
269
|
+
datalake_name:
|
|
270
|
+
Optional Argument.
|
|
271
|
+
Specifies the name of datalake to list tables from.
|
|
272
|
+
Note:
|
|
273
|
+
"schema_name" must be provided while using this argument.
|
|
274
|
+
Default Value: None
|
|
275
|
+
Types: str
|
|
276
|
+
|
|
277
|
+
RETURNS:
|
|
278
|
+
Pandas DataFrame
|
|
279
|
+
|
|
280
|
+
RAISES:
|
|
281
|
+
TeradataMlException - If the object_type argument is provided with invalid values.
|
|
282
|
+
OperationalError - If any errors are raised from Vantage.
|
|
283
|
+
|
|
284
|
+
EXAMPLES:
|
|
285
|
+
# Example 1: List all object types in the default schema
|
|
286
|
+
>>> load_example_data("dataframe", "admissions_train")
|
|
287
|
+
>>> db_list_tables()
|
|
288
|
+
|
|
289
|
+
# Example 2: List all the views in the default schema
|
|
290
|
+
>>> execute_sql("create view temporary_view as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
291
|
+
>>> db_list_tables(None , None, 'view')
|
|
292
|
+
|
|
293
|
+
# Example 3: List all the object types in the default schema whose names begin with 'abc' followed by any number
|
|
294
|
+
# of characters in the end.
|
|
295
|
+
>>> execute_sql("create view abcd123 as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
296
|
+
>>> db_list_tables(None, 'abc%', None)
|
|
297
|
+
|
|
298
|
+
# Example 4: List all the tables in the default schema whose names begin with 'adm' followed by any number of
|
|
299
|
+
# characters and ends with 'train'.
|
|
300
|
+
>>> load_example_data("dataframe", "admissions_train")
|
|
301
|
+
>>> db_list_tables(None, 'adm%train', 'table')
|
|
302
|
+
|
|
303
|
+
# Example 5: List all the views in the default schema whose names begin with any character but ends with 'abc'
|
|
304
|
+
>>> execute_sql("create view view_abc as (select 1 as dummy_col1, 2 as dummy_col2);")
|
|
305
|
+
>>> db_list_tables(None, '%abc', 'view')
|
|
306
|
+
|
|
307
|
+
# Example 6: List all the volatile tables in the default schema whose names begin with 'abc' and ends with any
|
|
308
|
+
# arbitrary character and has a length of 4
|
|
309
|
+
>>> execute_sql("CREATE volatile TABLE abcd(col0 int, col1 float) NO PRIMARY INDEX;")
|
|
310
|
+
>>> db_list_tables(None, 'abc_', 'volatile')
|
|
311
|
+
|
|
312
|
+
# Example 7: List all the temporary objects created by teradataml in the default schema whose names begins and
|
|
313
|
+
# ends with any number of arbitrary characters but contains 'filter' in between.
|
|
314
|
+
>>> db_list_tables(None, '%filter%', 'temp')
|
|
315
|
+
|
|
316
|
+
# Example 8: List all the tables in datalake's database.
|
|
317
|
+
>>> db_list_tables(schema_name='datalake_db_name', datalake_name='datalake_name')
|
|
318
|
+
"""
|
|
319
|
+
if tdmlctx.get_connection() is None:
|
|
320
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.INVALID_CONTEXT_CONNECTION),
|
|
321
|
+
MessageCodes.INVALID_CONTEXT_CONNECTION)
|
|
322
|
+
|
|
323
|
+
# Argument validations
|
|
324
|
+
awu_matrix = []
|
|
325
|
+
awu_matrix.append(["schema_name", schema_name, True, (str), True])
|
|
326
|
+
awu_matrix.append(["object_name", object_name, True, (str), True])
|
|
327
|
+
permitted_object_types = [TeradataTableKindConstants.ALL.value,
|
|
328
|
+
TeradataTableKindConstants.TABLE.value,
|
|
329
|
+
TeradataTableKindConstants.VIEW.value,
|
|
330
|
+
TeradataTableKindConstants.VOLATILE.value,
|
|
331
|
+
TeradataTableKindConstants.TEMP.value]
|
|
332
|
+
awu_matrix.append(["object_type", object_type, True, (str), True, permitted_object_types])
|
|
333
|
+
awu_matrix.append(["datalake_name", datalake_name, True, (str), True])
|
|
334
|
+
# Validate argument types
|
|
335
|
+
_Validators._validate_function_arguments(awu_matrix)
|
|
336
|
+
|
|
337
|
+
# 'schema_name' must be provided while using 'datalake_name'.
|
|
338
|
+
_Validators._validate_dependent_argument(dependent_arg='datalake_name',
|
|
339
|
+
dependent_arg_value=datalake_name,
|
|
340
|
+
independent_arg='schema_name',
|
|
341
|
+
independent_arg_value=schema_name)
|
|
342
|
+
|
|
343
|
+
try:
|
|
344
|
+
return _get_select_table_kind(schema_name, object_name, object_type, datalake_name)
|
|
345
|
+
except TeradataMlException:
|
|
346
|
+
raise
|
|
347
|
+
except OperationalError:
|
|
348
|
+
raise
|
|
349
|
+
except Exception as err:
|
|
350
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.LIST_DB_TABLES_FAILED),
|
|
351
|
+
MessageCodes.LIST_DB_TABLES_FAILED) from err
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _convert_sql_search_string_to_regex(sql_str):
|
|
355
|
+
"""Internal function to convert SQL string matching patterns to python regex."""
|
|
356
|
+
if sql_str:
|
|
357
|
+
# sql_str[1:-1] Removes single quotes from sql_str.
|
|
358
|
+
sql_str = sql_str[1:-1]
|
|
359
|
+
|
|
360
|
+
# If '%' is specified in 'sql_str',
|
|
361
|
+
# then the '_' character is not evaluated for an arbitrary character.
|
|
362
|
+
if '%' in sql_str:
|
|
363
|
+
# Replace % with .* if not preceded by a backslash.
|
|
364
|
+
sql_str = re.sub(r'(?<!\\)%', r'.*', sql_str, flags=re.IGNORECASE)
|
|
365
|
+
# Remove the escape character for the replacements.
|
|
366
|
+
sql_str = sql_str.replace(r'\%', '%')
|
|
367
|
+
else:
|
|
368
|
+
# Replace _ with . if not preceded by a backslash.
|
|
369
|
+
sql_str = re.sub(r'(?<!\\)_', r'.', sql_str, flags=re.IGNORECASE)
|
|
370
|
+
# Remove the escape character for the replacements.
|
|
371
|
+
sql_str = sql_str.replace(r'\_', '_')
|
|
372
|
+
|
|
373
|
+
# Add boundaries if the string doesn't start or end with '.*' i.e. SQL '%'.
|
|
374
|
+
if not sql_str.startswith('.*'):
|
|
375
|
+
sql_str = '^' + sql_str # Anchor to the start of the string.
|
|
376
|
+
if not sql_str.endswith('.*'):
|
|
377
|
+
sql_str = sql_str + '$' # Anchor to the end of the string.
|
|
378
|
+
return sql_str
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
def _get_select_table_kind(schema_name, table_name, table_kind, datalake_name):
|
|
382
|
+
"""
|
|
383
|
+
Get the list of the table names from the specified schema name and datalake.
|
|
384
|
+
|
|
385
|
+
PARAMETERS:
|
|
386
|
+
schema_name - The Name of schema in the database. The default value is the current database name.
|
|
387
|
+
table_name - The pattern to be used to filtering the table names from the database.
|
|
388
|
+
The table name argument can contain '%' as pattern matching character.For example '%abc'
|
|
389
|
+
will return all table names starting with any characters and ending with abc.
|
|
390
|
+
table_kind - The table kind to apply the filter. The valid values are 'all','table','view','volatile','temp'.
|
|
391
|
+
all - list the all the table kinds.
|
|
392
|
+
table - list only tables.
|
|
393
|
+
view - list only views.
|
|
394
|
+
volatile - list only volatile temp.
|
|
395
|
+
temp - list all teradata ml temporary objects created in the specified database.
|
|
396
|
+
datalake_name - The name of datalake to search schema in.
|
|
397
|
+
RETURNS:
|
|
398
|
+
Panda's DataFrame - if the operation is successful.
|
|
399
|
+
|
|
400
|
+
RAISES:
|
|
401
|
+
Database error if an error occurred while executing query.
|
|
402
|
+
|
|
403
|
+
EXAMPLES:
|
|
404
|
+
_get_select_table_kind("schema_name", "table_name", "all")
|
|
405
|
+
"""
|
|
406
|
+
object_name_str = None
|
|
407
|
+
if table_name is not None:
|
|
408
|
+
object_name_str = "'{0}'".format(table_name)
|
|
409
|
+
object_table_kind = None
|
|
410
|
+
|
|
411
|
+
# Tablekind:
|
|
412
|
+
# 'O' - stands for Table with no primary index and no partitioning
|
|
413
|
+
# 'Q' - stands for Queue table
|
|
414
|
+
# 'T' - stands for a Table with a primary index or primary AMP index, partitioning, or both.
|
|
415
|
+
# Or a partitioned table with NoPI
|
|
416
|
+
# 'V' - stands for View
|
|
417
|
+
if (table_kind == TeradataTableKindConstants.TABLE.value):
|
|
418
|
+
object_table_kind = ['O', 'Q', 'T']
|
|
419
|
+
elif (table_kind == TeradataTableKindConstants.VIEW.value):
|
|
420
|
+
object_table_kind = ['V']
|
|
421
|
+
elif (table_kind == TeradataTableKindConstants.TEMP.value):
|
|
422
|
+
if table_name is None:
|
|
423
|
+
object_name_str = "'{0}'".format(TeradataTableKindConstants.ML_PATTERN.value)
|
|
424
|
+
else:
|
|
425
|
+
object_name_str = "'{0}','{1}'".format(table_name,
|
|
426
|
+
TeradataTableKindConstants.ML_PATTERN.value)
|
|
427
|
+
else:
|
|
428
|
+
object_table_kind = ['O', 'Q', 'T', 'V']
|
|
429
|
+
|
|
430
|
+
if datalake_name is None:
|
|
431
|
+
# Check the schema name.
|
|
432
|
+
if schema_name is None:
|
|
433
|
+
schema_name = tdmlctx._get_current_databasename()
|
|
434
|
+
|
|
435
|
+
# Create an empty dataframe with desired column name.
|
|
436
|
+
pddf = pd.DataFrame(columns=[TeradataTableKindConstants.REGULAR_TABLE_NAME.value])
|
|
437
|
+
|
|
438
|
+
# Check the table kind.
|
|
439
|
+
if table_kind != TeradataTableKindConstants.VOLATILE.value:
|
|
440
|
+
if object_table_kind is not None:
|
|
441
|
+
object_table_kind = ', '.join([f"'{value}'" for value in object_table_kind])
|
|
442
|
+
query = SQLBundle._build_select_table_kind(schema_name, object_name_str, object_table_kind)
|
|
443
|
+
get_td_logger().debug("Executing Query: %s", query)
|
|
444
|
+
pddf = pd.read_sql(query, tdmlctx.get_connection())
|
|
445
|
+
|
|
446
|
+
# Check if all table kind or volatile table kind is requested.
|
|
447
|
+
# If so,add volatile tables to the pddf.
|
|
448
|
+
if table_kind == TeradataTableKindConstants.ALL.value or \
|
|
449
|
+
table_kind == TeradataTableKindConstants.VOLATILE.value:
|
|
450
|
+
# Create list of volatile tables.
|
|
451
|
+
try:
|
|
452
|
+
vtquery = SQLBundle._build_help_volatile_table()
|
|
453
|
+
get_td_logger().debug("Executing Query: %s", vtquery)
|
|
454
|
+
vtdf = pd.read_sql(vtquery, tdmlctx.get_connection())
|
|
455
|
+
if not vtdf.empty:
|
|
456
|
+
# Volatile table query returns different column names.
|
|
457
|
+
# So, rename its column names to match with normal
|
|
458
|
+
# 'SELECT TABLENAME FROM DBC.TABLESV' query results.
|
|
459
|
+
columns_dict = {TeradataTableKindConstants.VOLATILE_TABLE_NAME.value:
|
|
460
|
+
TeradataTableKindConstants.REGULAR_TABLE_NAME.value}
|
|
461
|
+
vtdf.rename(columns=columns_dict, inplace=True)
|
|
462
|
+
# Volatile table names might contain leading whitespaces. Remove those.
|
|
463
|
+
vtdf[TeradataTableKindConstants.REGULAR_TABLE_NAME.value] = vtdf[TeradataTableKindConstants.REGULAR_TABLE_NAME.value].str.strip()
|
|
464
|
+
# Filter volatile tables using table name pattern.
|
|
465
|
+
if object_name_str and (object_name_str := _convert_sql_search_string_to_regex(object_name_str)):
|
|
466
|
+
name_filter = vtdf[TeradataTableKindConstants.REGULAR_TABLE_NAME.value].str.strip().str.match(
|
|
467
|
+
object_name_str,
|
|
468
|
+
na=False,
|
|
469
|
+
flags=re.IGNORECASE)
|
|
470
|
+
vtdf = vtdf[name_filter]
|
|
471
|
+
# Concat existing list with volatile tables list.
|
|
472
|
+
frames = [pddf, vtdf[[TeradataTableKindConstants.REGULAR_TABLE_NAME.value]]]
|
|
473
|
+
pddf = pd.concat(frames)
|
|
474
|
+
pddf.reset_index(drop=True, inplace=True)
|
|
475
|
+
except Exception as err:
|
|
476
|
+
# No volatile tables exist.
|
|
477
|
+
pass
|
|
478
|
+
else:
|
|
479
|
+
return pddf
|
|
480
|
+
else:
|
|
481
|
+
# TODO: when OTF team enables VSD support for datalake tables
|
|
482
|
+
# with epic: https://teradata-pe.atlassian.net/browse/OTF-454,
|
|
483
|
+
# this can be changed to use VSD_tablesV table which is
|
|
484
|
+
# similar to DBC.TABLESV.
|
|
485
|
+
# For datalake tables' information we need to use help database and
|
|
486
|
+
# then apply filter for table kind and table substring.
|
|
487
|
+
# We can't use select from DBC.TABLESV.
|
|
488
|
+
sqlbundle = SQLBundle()
|
|
489
|
+
help_db_sql = sqlbundle._get_sql_query(SQLConstants.SQL_HELP_DATABASE)
|
|
490
|
+
query = help_db_sql.format(_get_quoted_object_name(schema_name=datalake_name,
|
|
491
|
+
object_name=schema_name))
|
|
492
|
+
get_td_logger().debug("Executing Query: %s", query)
|
|
493
|
+
pddf = pd.read_sql(query, tdmlctx.td_connection.connection)
|
|
494
|
+
|
|
495
|
+
if object_name_str:
|
|
496
|
+
object_name_str = _convert_sql_search_string_to_regex(object_name_str)
|
|
497
|
+
if object_name_str:
|
|
498
|
+
name_filter = pddf['Table/View/Macro Name'].str.strip().str.match(object_name_str, na=False,
|
|
499
|
+
flags=re.IGNORECASE)
|
|
500
|
+
pddf = pddf[name_filter]
|
|
501
|
+
|
|
502
|
+
if object_table_kind is not None:
|
|
503
|
+
object_filter = pddf['Kind'].isin(object_table_kind)
|
|
504
|
+
pddf = pddf[object_filter]
|
|
505
|
+
|
|
506
|
+
columns_dict = {'Table/View/Macro Name':
|
|
507
|
+
TeradataTableKindConstants.REGULAR_TABLE_NAME.value}
|
|
508
|
+
pddf.rename(columns=columns_dict, inplace=True)
|
|
509
|
+
|
|
510
|
+
# Return only filtered columns.
|
|
511
|
+
if not pddf.empty:
|
|
512
|
+
return pddf[[TeradataTableKindConstants.REGULAR_TABLE_NAME.value]]
|
|
513
|
+
else:
|
|
514
|
+
return pd.DataFrame()
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
def _execute_transaction(queries):
|
|
518
|
+
"""
|
|
519
|
+
Internal function to execute the query or list of queries passed, as one transaction.
|
|
520
|
+
|
|
521
|
+
PARAMETERS:
|
|
522
|
+
queries:
|
|
523
|
+
Required argument.
|
|
524
|
+
Specifies a query or a list of queries to be executed as a single transaction.
|
|
525
|
+
Types: str or list of str
|
|
526
|
+
|
|
527
|
+
RAISES:
|
|
528
|
+
Exception
|
|
529
|
+
|
|
530
|
+
RETURNS:
|
|
531
|
+
None.
|
|
532
|
+
|
|
533
|
+
EXAMPLES:
|
|
534
|
+
>>> _execute_transaction([query1, query2])
|
|
535
|
+
"""
|
|
536
|
+
auto_commit_off = "{fn teradata_nativesql}{fn teradata_autocommit_off}"
|
|
537
|
+
auto_commit_on = "{fn teradata_nativesql}{fn teradata_autocommit_on}"
|
|
538
|
+
con = None
|
|
539
|
+
cur = None
|
|
540
|
+
|
|
541
|
+
if queries is not None:
|
|
542
|
+
if isinstance(queries, str):
|
|
543
|
+
queries = [queries]
|
|
544
|
+
|
|
545
|
+
# Check if we have any queries to execute
|
|
546
|
+
if len(queries) == 0:
|
|
547
|
+
return
|
|
548
|
+
|
|
549
|
+
try:
|
|
550
|
+
con = tdmlctx.td_connection
|
|
551
|
+
if con is None:
|
|
552
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.CONNECTION_FAILURE),
|
|
553
|
+
MessageCodes.CONNECTION_FAILURE)
|
|
554
|
+
con = con.connection
|
|
555
|
+
cur = con.cursor()
|
|
556
|
+
# Set auto_commit to OFF
|
|
557
|
+
cur.execute(auto_commit_off)
|
|
558
|
+
for query in queries:
|
|
559
|
+
cur.execute(query)
|
|
560
|
+
|
|
561
|
+
# Try committing the transaction
|
|
562
|
+
con.commit()
|
|
563
|
+
except Exception:
|
|
564
|
+
# Let's first rollback
|
|
565
|
+
con.rollback()
|
|
566
|
+
# Now, let's raise the error as is
|
|
567
|
+
raise
|
|
568
|
+
finally:
|
|
569
|
+
# Finally, we must set auto_commit to ON
|
|
570
|
+
cur.execute(auto_commit_on)
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
def db_transaction(func):
|
|
574
|
+
"""
|
|
575
|
+
DESCRIPTION:
|
|
576
|
+
Function to execute another function in a transaction.
|
|
577
|
+
|
|
578
|
+
PARAMETERS:
|
|
579
|
+
func:
|
|
580
|
+
Required Argument.
|
|
581
|
+
Specifies the function to be executed in a single transaction.
|
|
582
|
+
Types: function
|
|
583
|
+
|
|
584
|
+
RETURNS:
|
|
585
|
+
The object returned by "func".
|
|
586
|
+
|
|
587
|
+
RAISES:
|
|
588
|
+
TeradataMlException, OperationalError
|
|
589
|
+
|
|
590
|
+
EXAMPLES:
|
|
591
|
+
# Example: Declare a function to delete all the records from two tables
|
|
592
|
+
# and execute the function in a transaction.
|
|
593
|
+
>>> @db_transaction
|
|
594
|
+
... def insert_data(table1, table2):
|
|
595
|
+
... execute_sql("delete from {}".format(table1))
|
|
596
|
+
... execute_sql("delete from {}".format(table2))
|
|
597
|
+
... return True
|
|
598
|
+
>>> # Executing the above function in a transaction.
|
|
599
|
+
>>> insert_data("sales", "admissions_train")
|
|
600
|
+
True
|
|
601
|
+
>>>
|
|
602
|
+
"""
|
|
603
|
+
@functools.wraps(func)
|
|
604
|
+
def execute_transaction(*args, **kwargs):
|
|
605
|
+
auto_commit_off = "{fn teradata_nativesql}{fn teradata_autocommit_off}"
|
|
606
|
+
auto_commit_on = "{fn teradata_nativesql}{fn teradata_autocommit_on}"
|
|
607
|
+
con = None
|
|
608
|
+
cur = None
|
|
609
|
+
|
|
610
|
+
result = None
|
|
611
|
+
try:
|
|
612
|
+
con = tdmlctx.td_connection
|
|
613
|
+
if con is None:
|
|
614
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.CONNECTION_FAILURE),
|
|
615
|
+
MessageCodes.CONNECTION_FAILURE)
|
|
616
|
+
con = con.connection
|
|
617
|
+
cur = con.cursor()
|
|
618
|
+
# Set auto_commit to OFF.
|
|
619
|
+
cur.execute(auto_commit_off)
|
|
620
|
+
|
|
621
|
+
# Execute function.
|
|
622
|
+
result = func(*args, **kwargs)
|
|
623
|
+
|
|
624
|
+
# Try committing the transaction.
|
|
625
|
+
con.commit()
|
|
626
|
+
except Exception:
|
|
627
|
+
# Let's first rollback.
|
|
628
|
+
con.rollback()
|
|
629
|
+
# Now, let's raise the error as is.
|
|
630
|
+
raise
|
|
631
|
+
finally:
|
|
632
|
+
# Finally, we must set auto_commit to ON.
|
|
633
|
+
cur.execute(auto_commit_on)
|
|
634
|
+
|
|
635
|
+
return result
|
|
636
|
+
|
|
637
|
+
return execute_transaction
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+
def _execute_stored_procedure(function_call, fetchWarnings=True, expect_none_result=False):
|
|
641
|
+
"""
|
|
642
|
+
DESCRIPTION:
|
|
643
|
+
Executes the specified function call of the stored procedure which contains
|
|
644
|
+
function name and parameters used by the function.
|
|
645
|
+
|
|
646
|
+
PARAMETERS:
|
|
647
|
+
function_call:
|
|
648
|
+
Required argument.
|
|
649
|
+
Specifies Function object for the stored procedure to be executed.
|
|
650
|
+
This function object contains stored procedure name along with its arguments.
|
|
651
|
+
Types: sqlalchemy.sql.functions.Function
|
|
652
|
+
|
|
653
|
+
fetchWarnings:
|
|
654
|
+
Optional Argument.
|
|
655
|
+
Specifies a flag that decides whether to raise warnings thrown from Vantage or not.
|
|
656
|
+
This will be the ideal behaviour for most of the stored procedures to fetch the warnings.
|
|
657
|
+
Default Values: True
|
|
658
|
+
Types: bool
|
|
659
|
+
|
|
660
|
+
expect_none_result:
|
|
661
|
+
Optional Argument.
|
|
662
|
+
When set to True, warnings will be ignored, and only result set is returned.
|
|
663
|
+
Returns None if query does not produce a result set.
|
|
664
|
+
This option is ignored when fetchWarnings is set to True.
|
|
665
|
+
Default Values: False
|
|
666
|
+
Types: bool
|
|
667
|
+
|
|
668
|
+
RETURNS:
|
|
669
|
+
Results received from Vantage after the execution.
|
|
670
|
+
|
|
671
|
+
RAISES:
|
|
672
|
+
Exception thrown by the Vantage.
|
|
673
|
+
|
|
674
|
+
EXAMPLES:
|
|
675
|
+
# No parameter needed by stored procedure.
|
|
676
|
+
functioncall = func.SYSUIF.list_base_environments()
|
|
677
|
+
_execute_stored_procedure(functioncall)
|
|
678
|
+
|
|
679
|
+
# Parameters are passed to the stored procedure in a list.
|
|
680
|
+
functioncall = func.SYSUIF.install_file('myfile','mapper.py','cz!/documents/mapper.py')
|
|
681
|
+
_execute_stored_procedure("SYSUIF.install_file(functioncall)", fetchWarnings=True)
|
|
682
|
+
"""
|
|
683
|
+
__arg_info_matrix = []
|
|
684
|
+
__arg_info_matrix.append(["function_call", function_call, False, (Function)])
|
|
685
|
+
__arg_info_matrix.append(["fetchWarnings", fetchWarnings, True, (bool)])
|
|
686
|
+
__arg_info_matrix.append(["expect_none_result", expect_none_result, True, (bool)])
|
|
687
|
+
|
|
688
|
+
# Validate arguments
|
|
689
|
+
_Validators._validate_function_arguments(__arg_info_matrix)
|
|
690
|
+
|
|
691
|
+
sqlbundle = SQLBundle()
|
|
692
|
+
|
|
693
|
+
# Get the query for running stored procedure.
|
|
694
|
+
exec_sp_stmt = sqlbundle._get_sql_query(SQLConstants.SQL_EXEC_STORED_PROCEDURE)
|
|
695
|
+
exec_sp_stmt = exec_sp_stmt.format(_get_function_call_as_string(function_call))
|
|
696
|
+
|
|
697
|
+
return UtilFuncs._execute_query(exec_sp_stmt, fetchWarnings, expect_none_result)
|
|
698
|
+
|
|
699
|
+
|
|
700
|
+
def _get_function_call_as_string(sqlcFuncObj):
|
|
701
|
+
"""
|
|
702
|
+
DESCRIPTION:
|
|
703
|
+
This function returns string representation for the sqlalchemy.sql.functions.Function object
|
|
704
|
+
which will be used to create a query to be used to execute the function.
|
|
705
|
+
|
|
706
|
+
PARAMETERS:
|
|
707
|
+
sqlcFuncObj:
|
|
708
|
+
Required Argument.
|
|
709
|
+
Specifies function object representing the SQL function call to be executed.
|
|
710
|
+
|
|
711
|
+
RAISES:
|
|
712
|
+
None
|
|
713
|
+
|
|
714
|
+
RETURNS:
|
|
715
|
+
String representation of the input Function.
|
|
716
|
+
|
|
717
|
+
EXAMPLES:
|
|
718
|
+
functioncall = func.SYSUIF.install_file("tdml_testfile", "test_script", "/root/test_script.py")
|
|
719
|
+
_get_function_call_as_string(functioncall)
|
|
720
|
+
|
|
721
|
+
Output:
|
|
722
|
+
"SYSUIF.install_file('tdml_testfile', 'test_script', '/root/test_script.py')"
|
|
723
|
+
"""
|
|
724
|
+
# This is done by _exec_stored_procedure
|
|
725
|
+
from teradatasqlalchemy.dialect import dialect as td_dialect
|
|
726
|
+
kw = dict({'dialect': td_dialect(),
|
|
727
|
+
'compile_kwargs':
|
|
728
|
+
{
|
|
729
|
+
'include_table': False,
|
|
730
|
+
'literal_binds': True
|
|
731
|
+
}
|
|
732
|
+
})
|
|
733
|
+
|
|
734
|
+
return str(sqlcFuncObj.compile(**kw))
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
def _get_quoted_object_name(schema_name, object_name, datalake=None):
|
|
738
|
+
"""
|
|
739
|
+
DESCRIPTION:
|
|
740
|
+
This function quotes and joins schema name to the object name which can either be table or a view.
|
|
741
|
+
|
|
742
|
+
PARAMETERS:
|
|
743
|
+
schema_name
|
|
744
|
+
Required Argument.
|
|
745
|
+
Specifies the schema name.
|
|
746
|
+
Type: str
|
|
747
|
+
|
|
748
|
+
object_name
|
|
749
|
+
Required Argument.
|
|
750
|
+
Specifies the object name either table or view.
|
|
751
|
+
Type: str
|
|
752
|
+
|
|
753
|
+
datalake
|
|
754
|
+
Optional Argument.
|
|
755
|
+
Specifies the datalake name.
|
|
756
|
+
Default value: None
|
|
757
|
+
Type: str
|
|
758
|
+
|
|
759
|
+
RAISES:
|
|
760
|
+
None
|
|
761
|
+
|
|
762
|
+
RETURNS:
|
|
763
|
+
Quoted and joined string of schema and object name.
|
|
764
|
+
|
|
765
|
+
EXAMPLES:
|
|
766
|
+
_get_quoted_object_name(schema_name = "alice", object_name = "admissions_train")
|
|
767
|
+
|
|
768
|
+
OUTPUT:
|
|
769
|
+
'"alice"."admissions_train"'
|
|
770
|
+
"""
|
|
771
|
+
tdp = preparer(td_dialect)
|
|
772
|
+
|
|
773
|
+
if schema_name is not None:
|
|
774
|
+
schema_name = tdp.quote(schema_name)
|
|
775
|
+
else:
|
|
776
|
+
schema_name = tdp.quote(tdmlctx._get_current_databasename())
|
|
777
|
+
|
|
778
|
+
quoted_object_name = "{0}.{1}".format(schema_name, tdp.quote(object_name))
|
|
779
|
+
if datalake is not None:
|
|
780
|
+
quoted_object_name = "{}.{}".format(tdp.quote(datalake), quoted_object_name)
|
|
781
|
+
return quoted_object_name
|
|
782
|
+
|
|
783
|
+
|
|
784
|
+
@collect_queryband(queryband='VwLg')
|
|
785
|
+
def view_log(log_type="script", num_lines=1000, query_id=None, log_dir=None):
|
|
786
|
+
"""
|
|
787
|
+
DESCRIPTION:
|
|
788
|
+
Function for viewing script, apply or byom log on Vantage.
|
|
789
|
+
Logs are pulled from 'script_log' or 'byom.log' file on database node.
|
|
790
|
+
When log_type is "script", logs are pulled from 'scriptlog' file on database node.
|
|
791
|
+
This is useful when Script.execute() is executed to run user scripts in Vantage.
|
|
792
|
+
When log_type is set to "apply", function downloads the log files to a folder.
|
|
793
|
+
Notes:
|
|
794
|
+
* Logs files will be downloaded based on "log_dir".
|
|
795
|
+
* teradataml creates a sub directory with the name as "query_id"
|
|
796
|
+
and downloads the logs to the sub directory.
|
|
797
|
+
* files generated from "query_id" requires few seconds to generate,
|
|
798
|
+
provide "query_id" to function view_log() after few seconds else
|
|
799
|
+
it will return empty sub directory.
|
|
800
|
+
|
|
801
|
+
PARAMETERS:
|
|
802
|
+
log_type:
|
|
803
|
+
Optional Argument.
|
|
804
|
+
Specifies which logs to view.
|
|
805
|
+
If set to 'script', script log is pulled from database node.
|
|
806
|
+
If set to 'byom', byom log is pulled from database node.
|
|
807
|
+
If set to 'apply' logs are pulled from kubernetes container.
|
|
808
|
+
Permitted Values: 'script', 'apply', 'byom'
|
|
809
|
+
Default Value: 'script'
|
|
810
|
+
Types: str
|
|
811
|
+
|
|
812
|
+
num_lines:
|
|
813
|
+
Optional Argument.
|
|
814
|
+
Specifies the number of lines to be read and displayed from log.
|
|
815
|
+
Note:
|
|
816
|
+
This argument is applicable when log_type is 'script' otherwise ignored.
|
|
817
|
+
Default Value: 1000
|
|
818
|
+
Types: int
|
|
819
|
+
|
|
820
|
+
query_id:
|
|
821
|
+
Required Argument when log_type is 'apply', otherwise ignored.
|
|
822
|
+
Specifies the id of the query for which logs are to be retrieved.
|
|
823
|
+
This query id is part of the error message received when Apply class
|
|
824
|
+
or Dataframe apply method calls fail to execute the Apply table operator
|
|
825
|
+
query.
|
|
826
|
+
Types: str
|
|
827
|
+
|
|
828
|
+
log_dir:
|
|
829
|
+
Optional Argument.
|
|
830
|
+
Specifies the directory path to store all the log files for "query_id".
|
|
831
|
+
Notes:
|
|
832
|
+
* This argument is applicable when log_type is 'apply' otherwise ignored.
|
|
833
|
+
* when "log_dir" is not provided, function creates temporary folder
|
|
834
|
+
and store the log files in the temp folder.
|
|
835
|
+
Types: str
|
|
836
|
+
|
|
837
|
+
RETURNS:
|
|
838
|
+
when log_type="apply" returns log files, otherwise teradataml dataframe.
|
|
839
|
+
|
|
840
|
+
RAISES:
|
|
841
|
+
TeradataMLException.
|
|
842
|
+
|
|
843
|
+
EXAMPLES:
|
|
844
|
+
# Example 1: View script log.
|
|
845
|
+
>>> view_log(log_type="script", num_lines=200)
|
|
846
|
+
>>> view_log(log_type="byom", num_lines=200)
|
|
847
|
+
|
|
848
|
+
# Example 2: Download the Apply query logs to a default temp folder.
|
|
849
|
+
# Use query id from the error messages returned by Apply class.
|
|
850
|
+
>>> view_log(log_type="apply", query_id='307161028465226056')
|
|
851
|
+
Logs for query_id "307191028465562578" is stored at "C:\\local_repo\\AppData\\Local\\Temp\\tmp00kuxlgu\\307161028465226056"
|
|
852
|
+
|
|
853
|
+
# Example 3: Download the Apply query logs to a specific folder.
|
|
854
|
+
# Use query id from the error messages returned by Apply class.
|
|
855
|
+
>>> view_log(log_type="apply", query_id='307161028465226056',log_dir='C:\\local_repo\\workspace')
|
|
856
|
+
Logs for query_id "307191028465562578" is stored at "C:\\local_repo\\workspace\\307161028465226056"
|
|
857
|
+
"""
|
|
858
|
+
awu_matrix_test = []
|
|
859
|
+
awu_matrix_test.append((["num_lines", num_lines, True, (int), True]))
|
|
860
|
+
awu_matrix_test.append(("log_type", log_type, True, (str), True,
|
|
861
|
+
[TableOperatorConstants.SCRIPT_LOG.value,
|
|
862
|
+
TableOperatorConstants.APPLY_LOG.value,
|
|
863
|
+
TableOperatorConstants.BYOM_LOG.value]))
|
|
864
|
+
# Validate argument type.
|
|
865
|
+
_Validators._validate_function_arguments(awu_matrix_test)
|
|
866
|
+
|
|
867
|
+
# Validate num_lines is a positive integer.
|
|
868
|
+
_Validators._validate_positive_int(num_lines, "num_lines")
|
|
869
|
+
|
|
870
|
+
awu_matrix_test.append(["query_id", query_id, True, (str), True])
|
|
871
|
+
awu_matrix_test.append(["log_dir", log_dir, True, (str), True])
|
|
872
|
+
|
|
873
|
+
# Validate argument type.
|
|
874
|
+
_Validators._validate_function_arguments(awu_matrix_test)
|
|
875
|
+
|
|
876
|
+
# log_type is script.
|
|
877
|
+
if log_type.upper() in [TableOperatorConstants.SCRIPT_LOG.value, TableOperatorConstants.BYOM_LOG.value]:
|
|
878
|
+
# Validate num_lines is a positive integer.
|
|
879
|
+
_Validators._validate_positive_int(num_lines, "num_lines")
|
|
880
|
+
|
|
881
|
+
# Query for viewing last n lines of script log.
|
|
882
|
+
view_log_query = TableOperatorConstants.SCRIPT_LOG_QUERY.value \
|
|
883
|
+
.format(num_lines, configure.default_varchar_size)
|
|
884
|
+
|
|
885
|
+
# log_type is apply.
|
|
886
|
+
else:
|
|
887
|
+
if query_id is None:
|
|
888
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING,
|
|
889
|
+
"query_id",
|
|
890
|
+
"log_type=\"apply\""),
|
|
891
|
+
MessageCodes.DEPENDENT_ARG_MISSING)
|
|
892
|
+
if log_dir is not None:
|
|
893
|
+
if not os.path.exists(log_dir):
|
|
894
|
+
err_msg = 'The path \'{}\' does not exist.'.format(
|
|
895
|
+
log_dir)
|
|
896
|
+
raise TeradataMlException(err_msg, MessageCodes.INPUT_FILE_NOT_FOUND)
|
|
897
|
+
if not os.path.isdir(log_dir):
|
|
898
|
+
err_msg = 'Please provide directory path instead of file path.'.format(
|
|
899
|
+
log_dir)
|
|
900
|
+
raise TeradataMlException(err_msg, MessageCodes.INPUT_FILE_NOT_FOUND)
|
|
901
|
+
from teradataml.scriptmgmt.UserEnv import (_get_auth_token,
|
|
902
|
+
_get_ues_url,
|
|
903
|
+
_process_ues_response)
|
|
904
|
+
ues_url = _get_ues_url(logs=True, query_id=query_id)
|
|
905
|
+
response = UtilFuncs._http_request(ues_url, headers=_get_auth_token())
|
|
906
|
+
resp = _process_ues_response(api_name="view_log", response=response)
|
|
907
|
+
resp = resp.content.decode('utf-8')
|
|
908
|
+
jsons = json.loads(resp)
|
|
909
|
+
if log_dir is None:
|
|
910
|
+
log_dir = tempfile.mkdtemp()
|
|
911
|
+
log_dir = os.path.join(log_dir, query_id)
|
|
912
|
+
if os.path.exists(log_dir):
|
|
913
|
+
shutil.rmtree(log_dir)
|
|
914
|
+
os.mkdir(log_dir)
|
|
915
|
+
urls_and_files = [(log['url'], os.path.join(log_dir, log['name'])) for log in jsons['logs']]
|
|
916
|
+
failed_files = []
|
|
917
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
918
|
+
results = {executor.submit(_fetch_url_and_save, url, file_path):
|
|
919
|
+
(os.path.basename(file_path)) for url, file_path in urls_and_files}
|
|
920
|
+
for future in concurrent.futures.as_completed(results):
|
|
921
|
+
try:
|
|
922
|
+
file_name = results[future]
|
|
923
|
+
future.result()
|
|
924
|
+
except (TeradataMlException, RuntimeError, Exception) as emsg:
|
|
925
|
+
failed_files.append((file_name, emsg))
|
|
926
|
+
if len(failed_files) > 0:
|
|
927
|
+
emsg = ""
|
|
928
|
+
for msg in failed_files:
|
|
929
|
+
emsg += "\nUnable to download the file - {}. Reason: {}" \
|
|
930
|
+
.format(msg[0], msg[1].args[0])
|
|
931
|
+
msg_code = MessageCodes.FUNC_EXECUTION_FAILED
|
|
932
|
+
error_msg = Messages.get_message(msg_code, "view_log()", emsg)
|
|
933
|
+
raise TeradataMlException(error_msg, msg_code)
|
|
934
|
+
else:
|
|
935
|
+
print("Logs for query_id \"{}\" is stored at \"{}\"".format(query_id, log_dir))
|
|
936
|
+
# Return a teradataml dataframe from query.
|
|
937
|
+
if log_type != 'apply':
|
|
938
|
+
return tdmldf.dataframe.DataFrame.from_query(view_log_query)
|
|
939
|
+
|
|
940
|
+
|
|
941
|
+
def _fetch_url_and_save(url, file_path):
|
|
942
|
+
"""
|
|
943
|
+
DESCRIPTION:
|
|
944
|
+
Download file from specifed url and update files at specified path.
|
|
945
|
+
|
|
946
|
+
PARAMETERS:
|
|
947
|
+
url:
|
|
948
|
+
Specifies the url from where file needs to be downloaded.
|
|
949
|
+
|
|
950
|
+
file_path:
|
|
951
|
+
Specifies the path of file where downloaded file needs to be updated.
|
|
952
|
+
|
|
953
|
+
Returns:
|
|
954
|
+
None
|
|
955
|
+
|
|
956
|
+
"""
|
|
957
|
+
from teradataml.scriptmgmt.UserEnv import _process_ues_response
|
|
958
|
+
response = UtilFuncs._http_request(url)
|
|
959
|
+
resp = _process_ues_response(api_name="view_log", response=response)
|
|
960
|
+
with open(file_path, 'w') as file:
|
|
961
|
+
file.write(resp.content.decode('utf-8'))
|
|
962
|
+
|
|
963
|
+
|
|
964
|
+
def _check_if_python_packages_installed():
|
|
965
|
+
"""
|
|
966
|
+
DESCRIPTION:
|
|
967
|
+
Function to set the following global variables based on whether the Vantage node
|
|
968
|
+
has Python and add-on packages including pip3 installed.
|
|
969
|
+
- 'python_packages_installed' to True or False
|
|
970
|
+
- 'python_version_vantage' to the version of Python installed on Vantage.
|
|
971
|
+
|
|
972
|
+
PARAMETERS:
|
|
973
|
+
None.
|
|
974
|
+
|
|
975
|
+
RETURNS:
|
|
976
|
+
None.
|
|
977
|
+
|
|
978
|
+
RAISES:
|
|
979
|
+
Exception.
|
|
980
|
+
|
|
981
|
+
EXAMPLES:
|
|
982
|
+
_check_if_python_packages_installed()
|
|
983
|
+
"""
|
|
984
|
+
if tdmlctx.python_packages_installed:
|
|
985
|
+
# Skip check if Python and add-on packages are already installed and checked.
|
|
986
|
+
return
|
|
987
|
+
|
|
988
|
+
# Check if Python interpreter and add-ons packages are installed or not.
|
|
989
|
+
try:
|
|
990
|
+
query = TableOperatorConstants.CHECK_PYTHON_INSTALLED.value.format(configure.indb_install_location)
|
|
991
|
+
opt = UtilFuncs._execute_query(query=query)
|
|
992
|
+
|
|
993
|
+
python_version = opt[0][0].split(" -- ")[1].split(" ")[1].strip()
|
|
994
|
+
|
|
995
|
+
# If query execution is successful, then Python and add-on packages are
|
|
996
|
+
# present.
|
|
997
|
+
tdmlctx.python_packages_installed = True
|
|
998
|
+
tdmlctx.python_version_vantage = python_version
|
|
999
|
+
except Exception as err:
|
|
1000
|
+
# Raise Exception if the error message does not contain
|
|
1001
|
+
# "bash: pip3: command not found".
|
|
1002
|
+
# Default value of the global variable "python_packages_installed" remains
|
|
1003
|
+
# same which was set during create_context/set_context.
|
|
1004
|
+
if "bash: pip3: command not found" not in str(err):
|
|
1005
|
+
raise
|
|
1006
|
+
|
|
1007
|
+
|
|
1008
|
+
@collect_queryband(queryband='PkgDtls')
|
|
1009
|
+
def db_python_package_details(names=None):
|
|
1010
|
+
"""
|
|
1011
|
+
DESCRIPTION:
|
|
1012
|
+
Function to get the Python packages, installed on Vantage, and their corresponding
|
|
1013
|
+
versions.
|
|
1014
|
+
Note:
|
|
1015
|
+
Using this function is valid only when Python interpreter and add-on packages
|
|
1016
|
+
are installed on the Vantage node.
|
|
1017
|
+
|
|
1018
|
+
PARAMETERS:
|
|
1019
|
+
names:
|
|
1020
|
+
Optional Argument.
|
|
1021
|
+
Specifies the name(s)/pattern(s) of the Python package(s) for which version
|
|
1022
|
+
information is to be fetched from Vantage. If this argument is not specified
|
|
1023
|
+
or None, versions of all installed Python packages are returned.
|
|
1024
|
+
Default Value: None
|
|
1025
|
+
Types: str
|
|
1026
|
+
|
|
1027
|
+
RETURNS:
|
|
1028
|
+
teradataml DataFrame, if package(s) is/are present in the Vantage.
|
|
1029
|
+
|
|
1030
|
+
RAISES:
|
|
1031
|
+
TeradataMlException.
|
|
1032
|
+
|
|
1033
|
+
EXAMPLES:
|
|
1034
|
+
# Note:
|
|
1035
|
+
# These examples will work only when the Python packages are installed on Vantage.
|
|
1036
|
+
|
|
1037
|
+
# Example 1: Get the details of a Python package 'dill' from Vantage.
|
|
1038
|
+
>>> db_python_package_details("dill")
|
|
1039
|
+
package version
|
|
1040
|
+
0 dill 0.2.8.2
|
|
1041
|
+
|
|
1042
|
+
# Example 2: Get the details of Python packages, having string 'mpy', installed on Vantage.
|
|
1043
|
+
>>> db_python_package_details(names = "mpy")
|
|
1044
|
+
package version
|
|
1045
|
+
0 simpy 3.0.11
|
|
1046
|
+
1 numpy 1.16.1
|
|
1047
|
+
2 gmpy2 2.0.8
|
|
1048
|
+
3 msgpack-numpy 0.4.3.2
|
|
1049
|
+
4 sympy 1.3
|
|
1050
|
+
|
|
1051
|
+
# Example 3: Get the details of Python packages, having string 'numpy' and 'learn',
|
|
1052
|
+
# installed on Vantage.
|
|
1053
|
+
>>> db_python_package_details(["numpy", "learn"])
|
|
1054
|
+
package version
|
|
1055
|
+
0 scikit-learn 0.20.3
|
|
1056
|
+
1 numpy 1.16.1
|
|
1057
|
+
2 msgpack-numpy 0.4.3.2
|
|
1058
|
+
|
|
1059
|
+
# Example 4: Get the details of all Python packages installed on Vantage.
|
|
1060
|
+
>>> db_python_package_details()
|
|
1061
|
+
package version
|
|
1062
|
+
0 packaging 18.0
|
|
1063
|
+
1 cycler 0.10.0
|
|
1064
|
+
2 simpy 3.0.11
|
|
1065
|
+
3 more-itertools 4.3.0
|
|
1066
|
+
4 mpmath 1.0.0
|
|
1067
|
+
5 toolz 0.9.0
|
|
1068
|
+
6 wordcloud 1.5.0
|
|
1069
|
+
7 mistune 0.8.4
|
|
1070
|
+
8 singledispatch 3.4.0.3
|
|
1071
|
+
9 attrs 18.2.0
|
|
1072
|
+
|
|
1073
|
+
"""
|
|
1074
|
+
# Validate arguments.
|
|
1075
|
+
__arg_info_matrix = []
|
|
1076
|
+
__arg_info_matrix.append(["names", names, True, (str, list), True])
|
|
1077
|
+
|
|
1078
|
+
_Validators._validate_function_arguments(arg_list=__arg_info_matrix)
|
|
1079
|
+
|
|
1080
|
+
# Check if Python interpretor and add-on packages are installed or not.
|
|
1081
|
+
_check_if_python_packages_installed()
|
|
1082
|
+
|
|
1083
|
+
# Raise error if Python and add-on packages are not installed.
|
|
1084
|
+
if not tdmlctx.python_packages_installed:
|
|
1085
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.PYTHON_NOT_INSTALLED),
|
|
1086
|
+
MessageCodes.PYTHON_NOT_INSTALLED)
|
|
1087
|
+
|
|
1088
|
+
package_str = ""
|
|
1089
|
+
# Adding "grep ..." only when the argument "name" is mentioned.
|
|
1090
|
+
# Otherwise, all the package details are fetched.
|
|
1091
|
+
if names is not None:
|
|
1092
|
+
names = UtilFuncs._as_list(names)
|
|
1093
|
+
package_str = "|".join(names)
|
|
1094
|
+
package_str = "grep -E \"{0}\" | ".format(package_str)
|
|
1095
|
+
|
|
1096
|
+
query = TableOperatorConstants.PACKAGE_VERSION_QUERY.value. \
|
|
1097
|
+
format(configure.indb_install_location, package_str, configure.default_varchar_size)
|
|
1098
|
+
|
|
1099
|
+
ret_val = tdmldf.dataframe.DataFrame.from_query(query)
|
|
1100
|
+
|
|
1101
|
+
if ret_val.shape[0] == 0:
|
|
1102
|
+
msg_str = "No Python package(s) found based on given search criteria : names = {}"
|
|
1103
|
+
print(msg_str.format(names))
|
|
1104
|
+
ret_val = None
|
|
1105
|
+
|
|
1106
|
+
return ret_val
|
|
1107
|
+
|
|
1108
|
+
|
|
1109
|
+
def _db_python_package_version_diff(packages=None, only_diff=True):
|
|
1110
|
+
"""
|
|
1111
|
+
DESCRIPTION:
|
|
1112
|
+
Internal function to get the pandas dataframe containing the difference in the Python
|
|
1113
|
+
packages installed on Vantage and the packages mentioned in the argument "packages".
|
|
1114
|
+
Note:
|
|
1115
|
+
* Using this function is valid only when Python interpreter and add-on packages
|
|
1116
|
+
are installed on the Vantage node.
|
|
1117
|
+
* This function also checks for differences in Python packages versions given
|
|
1118
|
+
part of package name as string.
|
|
1119
|
+
* Returns pandas dataframe of only differences when the argument `only_diff` is set to
|
|
1120
|
+
True. Otherwise, returns all the packages.
|
|
1121
|
+
|
|
1122
|
+
PARAMETERS:
|
|
1123
|
+
packages:
|
|
1124
|
+
Required Argument.
|
|
1125
|
+
Specifies the name(s) of the Python package(s) for which the difference
|
|
1126
|
+
in the versions is to be fetched from Vantage.
|
|
1127
|
+
Note:
|
|
1128
|
+
* If this argument is None, all the packages installed on Vantage are considered.
|
|
1129
|
+
* If any package is present in Vantage but not in the current environment, then None
|
|
1130
|
+
is shown as the version of the package in the current environment.
|
|
1131
|
+
Types: str or list of str
|
|
1132
|
+
|
|
1133
|
+
only_diff:
|
|
1134
|
+
Optional Argument.
|
|
1135
|
+
Specifies whether to return only the differences in the versions of the packages
|
|
1136
|
+
installed on Vantage and the packages mentioned in the argument "packages".
|
|
1137
|
+
Default Value: True
|
|
1138
|
+
|
|
1139
|
+
RETURNS:
|
|
1140
|
+
pandas DataFrame
|
|
1141
|
+
|
|
1142
|
+
RAISES:
|
|
1143
|
+
TeradataMlException.
|
|
1144
|
+
|
|
1145
|
+
EXAMPLES:
|
|
1146
|
+
# Note:
|
|
1147
|
+
# These examples will work only when the Python packages are installed on Vantage.
|
|
1148
|
+
|
|
1149
|
+
# Example 1: Get the difference in the versions of Python packages 'dill' and 'matplotlib'
|
|
1150
|
+
# installed on Vantage.
|
|
1151
|
+
>>> _db_python_package_version_diff(["dill", "matplotlib"])
|
|
1152
|
+
package vantage local
|
|
1153
|
+
0 dill 0.3.6 0.3.7
|
|
1154
|
+
|
|
1155
|
+
# Example 2: Get the difference in the versions of Python packages 'dill' and 'matplotlib'
|
|
1156
|
+
# installed on Vantage and 'only_diff' argument set to False.
|
|
1157
|
+
>>> _db_python_package_version_diff(["dill", "matplotlib"], only_diff=False)
|
|
1158
|
+
package vantage local
|
|
1159
|
+
0 matplotlib-inline 0.1.6 0.1.6
|
|
1160
|
+
1 dill 0.3.6 0.3.7
|
|
1161
|
+
2 matplotlib 3.6.2 3.6.2
|
|
1162
|
+
"""
|
|
1163
|
+
# Check if Python interpreter and add-on packages are installed or not.
|
|
1164
|
+
_check_if_python_packages_installed()
|
|
1165
|
+
|
|
1166
|
+
# Raise error if Python and add-on packages are not installed.
|
|
1167
|
+
if not tdmlctx.python_packages_installed:
|
|
1168
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.PYTHON_NOT_INSTALLED),
|
|
1169
|
+
MessageCodes.PYTHON_NOT_INSTALLED)
|
|
1170
|
+
|
|
1171
|
+
# Installed packages dictionary.
|
|
1172
|
+
db_pkg_df = db_python_package_details(packages)
|
|
1173
|
+
if db_pkg_df is None:
|
|
1174
|
+
return None
|
|
1175
|
+
|
|
1176
|
+
pkgs_dict = {row.package: row.version for row in db_pkg_df.itertuples()}
|
|
1177
|
+
|
|
1178
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
1179
|
+
diff_list = []
|
|
1180
|
+
|
|
1181
|
+
for pkg in pkgs_dict.keys():
|
|
1182
|
+
vantage_version = pkgs_dict.get(pkg)
|
|
1183
|
+
try:
|
|
1184
|
+
local_version = version(pkg)
|
|
1185
|
+
except PackageNotFoundError:
|
|
1186
|
+
# If package is not found in the current environment, then the local version is set to None.
|
|
1187
|
+
local_version = None
|
|
1188
|
+
except Exception as e:
|
|
1189
|
+
# Any other exception is raised.
|
|
1190
|
+
raise
|
|
1191
|
+
|
|
1192
|
+
if only_diff:
|
|
1193
|
+
if vantage_version != local_version:
|
|
1194
|
+
# Add to list only when the versions are different.
|
|
1195
|
+
diff_list.append([pkg, vantage_version, local_version])
|
|
1196
|
+
else:
|
|
1197
|
+
# Add to list all the packages and versions irrespective of the differences.
|
|
1198
|
+
diff_list.append([pkg, vantage_version, local_version])
|
|
1199
|
+
|
|
1200
|
+
return pd.DataFrame(diff_list, columns=["package", "vantage", "local"])
|
|
1201
|
+
|
|
1202
|
+
|
|
1203
|
+
@collect_queryband(queryband='PythonDiff')
|
|
1204
|
+
def db_python_version_diff():
|
|
1205
|
+
"""
|
|
1206
|
+
DESCRIPTION:
|
|
1207
|
+
Function to get the difference of the Python intepreter major version installed on Vantage
|
|
1208
|
+
and the Python version used in the current environment.
|
|
1209
|
+
|
|
1210
|
+
Note:
|
|
1211
|
+
* Using this function is valid only when Python interpreter and add-on packages
|
|
1212
|
+
are installed on the Vantage node.
|
|
1213
|
+
|
|
1214
|
+
RETURNS:
|
|
1215
|
+
Empty dictionary when Python major version is same on Vantage and the current environment.
|
|
1216
|
+
Otherwise, returns a dictionary with the following keys:
|
|
1217
|
+
- 'vantage_version': Python major version installed on Vantage.
|
|
1218
|
+
- 'local_version': Python major version used in the current environment.
|
|
1219
|
+
|
|
1220
|
+
RAISES:
|
|
1221
|
+
TeradataMlException.
|
|
1222
|
+
|
|
1223
|
+
EXAMPLES:
|
|
1224
|
+
# Note:
|
|
1225
|
+
# These examples will work only when the Python packages are installed on Vantage.
|
|
1226
|
+
|
|
1227
|
+
# Example 1: Get the difference in the Python version installed on Vantage and the current environment.
|
|
1228
|
+
>>> db_python_version_diff()
|
|
1229
|
+
{"vantage_version": "3.7", "local_version": "3.8"}
|
|
1230
|
+
"""
|
|
1231
|
+
# Check if Python interpretor and add-on packages are installed or not.
|
|
1232
|
+
_check_if_python_packages_installed()
|
|
1233
|
+
|
|
1234
|
+
# Raise error if Python and add-on packages are not installed.
|
|
1235
|
+
if not tdmlctx.python_packages_installed:
|
|
1236
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.PYTHON_NOT_INSTALLED),
|
|
1237
|
+
MessageCodes.PYTHON_NOT_INSTALLED)
|
|
1238
|
+
|
|
1239
|
+
# Get major version of Python installed on Vantage and the current environment.
|
|
1240
|
+
python_local = tdmlctx.python_version_local.rsplit(".", 1)[0]
|
|
1241
|
+
python_vantage = tdmlctx.python_version_vantage.rsplit(".", 1)[0]
|
|
1242
|
+
|
|
1243
|
+
if python_local != python_vantage:
|
|
1244
|
+
return {"vantage_version": python_vantage, "local_version": python_local}
|
|
1245
|
+
|
|
1246
|
+
return {}
|
|
1247
|
+
|
|
1248
|
+
|
|
1249
|
+
@collect_queryband(queryband='PkgDiff')
|
|
1250
|
+
def db_python_package_version_diff(packages=None):
|
|
1251
|
+
"""
|
|
1252
|
+
DESCRIPTION:
|
|
1253
|
+
Function to get the difference of the Python packages installed on Vantage and
|
|
1254
|
+
in the current environment mentioned in the argument "packages".
|
|
1255
|
+
|
|
1256
|
+
Notes:
|
|
1257
|
+
* Using this function is valid only when Python interpreter and add-on packages
|
|
1258
|
+
are installed on the Vantage node.
|
|
1259
|
+
* This function also checks for differences in Python packages versions given
|
|
1260
|
+
part of package name as string.
|
|
1261
|
+
|
|
1262
|
+
PARAMETERS:
|
|
1263
|
+
packages:
|
|
1264
|
+
Optional Argument.
|
|
1265
|
+
Specifies the name(s) of the Python package(s) for which the difference
|
|
1266
|
+
in the versions is to be fetched from Vantage.
|
|
1267
|
+
Notes:
|
|
1268
|
+
* If this argument is None, all the packages installed on Vantage are considered.
|
|
1269
|
+
* If any package is present in Vantage but not in the current environment, then None
|
|
1270
|
+
is shown as the version of the package in the current environment.
|
|
1271
|
+
Types: str or list of str
|
|
1272
|
+
|
|
1273
|
+
RETURNS:
|
|
1274
|
+
pandas DataFrame
|
|
1275
|
+
|
|
1276
|
+
RAISES:
|
|
1277
|
+
TeradataMlException.
|
|
1278
|
+
|
|
1279
|
+
EXAMPLES:
|
|
1280
|
+
# Note:
|
|
1281
|
+
# These examples will work only when the Python packages are installed on Vantage.
|
|
1282
|
+
|
|
1283
|
+
# Example 1: Get the difference in the versions of Python package 'dill' installed on Vantage.
|
|
1284
|
+
>>> db_python_package_version_diff("dill")
|
|
1285
|
+
package vantage local
|
|
1286
|
+
0 dill 0.10.0 0.11.2
|
|
1287
|
+
|
|
1288
|
+
# Example 2: Get the difference in the versions of all Python packages installed on Vantage.
|
|
1289
|
+
>>> db_python_package_version_diff()
|
|
1290
|
+
package vantage local
|
|
1291
|
+
0 scikit-learn 1.3.3 0.24.2
|
|
1292
|
+
1 dill 0.10.0 0.11.2
|
|
1293
|
+
...
|
|
1294
|
+
532 attrs 18.2.0 17.0.0
|
|
1295
|
+
|
|
1296
|
+
"""
|
|
1297
|
+
# Validate arguments.
|
|
1298
|
+
__arg_info_matrix = []
|
|
1299
|
+
__arg_info_matrix.append(["packages", packages, True, (str, list), True])
|
|
1300
|
+
|
|
1301
|
+
_Validators._validate_function_arguments(arg_list=__arg_info_matrix)
|
|
1302
|
+
|
|
1303
|
+
return _db_python_package_version_diff(packages=packages)
|
|
1304
|
+
|
|
1305
|
+
|
|
1306
|
+
def _create_table(table_name,
|
|
1307
|
+
columns,
|
|
1308
|
+
primary_index=None,
|
|
1309
|
+
unique=True,
|
|
1310
|
+
temporary=False,
|
|
1311
|
+
schema_name=None,
|
|
1312
|
+
set_table=True,
|
|
1313
|
+
**kwargs):
|
|
1314
|
+
"""
|
|
1315
|
+
DESCRIPTION:
|
|
1316
|
+
This is an internal function used to construct a SQLAlchemy Table Object.
|
|
1317
|
+
This function checks appropriate flags and supports creation of Teradata
|
|
1318
|
+
specific Table constructs such as Volatile/Primary Index tables and constraints.
|
|
1319
|
+
|
|
1320
|
+
PARAMETERS:
|
|
1321
|
+
table_name:
|
|
1322
|
+
Required Argument.
|
|
1323
|
+
Specifies the name of SQL table.
|
|
1324
|
+
Types: str
|
|
1325
|
+
|
|
1326
|
+
columns:
|
|
1327
|
+
Required Argument.
|
|
1328
|
+
Specifies a Python dictionary with column-name(key) to column-type(value) mapping
|
|
1329
|
+
to create table.
|
|
1330
|
+
Types: dict
|
|
1331
|
+
|
|
1332
|
+
primary_index:
|
|
1333
|
+
Optional Argument.
|
|
1334
|
+
Specifies the column name(s) on which primary index needs to be created.
|
|
1335
|
+
Default Value: None
|
|
1336
|
+
Types: str OR list of Strings (str)
|
|
1337
|
+
|
|
1338
|
+
unique:
|
|
1339
|
+
Optional Argument.
|
|
1340
|
+
Specifies whether index is unique primary index or not i.e.,
|
|
1341
|
+
if True, index column(s) does not accepts duplicate values,
|
|
1342
|
+
if False, index column(s) accepts duplicate values.
|
|
1343
|
+
Default Value: True
|
|
1344
|
+
Types: bool
|
|
1345
|
+
|
|
1346
|
+
temporary:
|
|
1347
|
+
Optional Argument.
|
|
1348
|
+
Specifies whether SQL table to be created is Volatile or not.
|
|
1349
|
+
Default Value: False
|
|
1350
|
+
Types: bool
|
|
1351
|
+
|
|
1352
|
+
schema_name:
|
|
1353
|
+
Optional Argument.
|
|
1354
|
+
Specifies the name of the SQL schema in the database to write to.
|
|
1355
|
+
If not specified, table is created in default schema.
|
|
1356
|
+
Default Value: None
|
|
1357
|
+
Types: str
|
|
1358
|
+
|
|
1359
|
+
set_table:
|
|
1360
|
+
Optional Argument.
|
|
1361
|
+
A flag specifying whether to create a SET table or a MULTISET table.
|
|
1362
|
+
When True, an attempt to create a SET table is made.
|
|
1363
|
+
When False, an attempt to create a MULTISET table is made.
|
|
1364
|
+
Default Value: True
|
|
1365
|
+
Types: bool
|
|
1366
|
+
|
|
1367
|
+
**kwargs:
|
|
1368
|
+
Optional Argument.
|
|
1369
|
+
Specifies table_level constraints as keyword arguments.
|
|
1370
|
+
Each constraint argument can accept a string or a list of strings.
|
|
1371
|
+
Notes:
|
|
1372
|
+
* If the same constraint is to be applied multiple times,
|
|
1373
|
+
conditions or columns should be mentioned as individual
|
|
1374
|
+
elements in the list.
|
|
1375
|
+
* If the constraint is to be applied on multiple columns,
|
|
1376
|
+
it should be mentioned in a tuple inside the list.
|
|
1377
|
+
* For foreign_key_constraint, value should be a list
|
|
1378
|
+
containing 3 elements, constrained columns,
|
|
1379
|
+
referenced columns and referenced table name.
|
|
1380
|
+
* If multiple foreign_key_constraint constraints are
|
|
1381
|
+
to be specified, then a list of tuples containing
|
|
1382
|
+
the 3 elements should be specified.
|
|
1383
|
+
Permitted values:check_constraint, primary_key_constraint,
|
|
1384
|
+
foreign_key_constraint, unique_key_constraint.
|
|
1385
|
+
|
|
1386
|
+
RETURNS:
|
|
1387
|
+
None
|
|
1388
|
+
|
|
1389
|
+
RAISES:
|
|
1390
|
+
None
|
|
1391
|
+
|
|
1392
|
+
EXAMPLES:
|
|
1393
|
+
# Example 1: Create a table with primary key constraint.
|
|
1394
|
+
>>> _create_table(table_name=table_name, columns=columns_to_create, schema_name = schema_name,
|
|
1395
|
+
primary_key_constraint='column_name', set_table=False)
|
|
1396
|
+
|
|
1397
|
+
# Example 2: Create a table with multiple check constraints.
|
|
1398
|
+
>>> _create_table(table_name=table_name, columns=columns_to_create, schema_name = schema_name,
|
|
1399
|
+
check_constraint=['column_name > value', 'column_name > value2'], set_table=False)
|
|
1400
|
+
|
|
1401
|
+
# Example 3: Create a table with multiple columns as primary key in primary constraint.
|
|
1402
|
+
>>> _create_table(table_name=table_name, columns=columns_to_create, schema_name = schema_name,
|
|
1403
|
+
primary_key_constraint=[('column_name','column_name')], set_table=False)
|
|
1404
|
+
|
|
1405
|
+
# Example 4: Create a table with no constraint and no primary key.
|
|
1406
|
+
>>> _create_table(table_name=table_name, columns=columns_to_create, schema_name = schema_name,
|
|
1407
|
+
set_table=False)
|
|
1408
|
+
|
|
1409
|
+
"""
|
|
1410
|
+
try:
|
|
1411
|
+
prefix = []
|
|
1412
|
+
pti = post(opts={})
|
|
1413
|
+
|
|
1414
|
+
if temporary is True:
|
|
1415
|
+
pti = pti.on_commit(option='preserve')
|
|
1416
|
+
prefix.append('VOLATILE')
|
|
1417
|
+
|
|
1418
|
+
if set_table:
|
|
1419
|
+
prefix.append('set')
|
|
1420
|
+
else:
|
|
1421
|
+
prefix.append('multiset')
|
|
1422
|
+
|
|
1423
|
+
meta = MetaData()
|
|
1424
|
+
meta.bind = tdmlctx.get_context()
|
|
1425
|
+
|
|
1426
|
+
if primary_index is not None:
|
|
1427
|
+
if isinstance(primary_index, list):
|
|
1428
|
+
pti = pti.primary_index(unique=unique, cols=primary_index)
|
|
1429
|
+
elif isinstance(primary_index, str):
|
|
1430
|
+
pti = pti.primary_index(unique=unique, cols=[primary_index])
|
|
1431
|
+
else:
|
|
1432
|
+
pti = pti.no_primary_index()
|
|
1433
|
+
|
|
1434
|
+
con_form = []
|
|
1435
|
+
foreign_constraints = []
|
|
1436
|
+
for c_name, parameters in kwargs.items():
|
|
1437
|
+
_Validators._validate_function_arguments([["constraint_type", c_name, True, str,
|
|
1438
|
+
True, SQLConstants.CONSTRAINT.value]])
|
|
1439
|
+
if c_name in 'check_constraint':
|
|
1440
|
+
parameters = UtilFuncs._as_list(parameters)
|
|
1441
|
+
[con_form.append("{}('{}')".format("CheckConstraint", col)) for col in parameters]
|
|
1442
|
+
if c_name in 'foreign_key_constraint':
|
|
1443
|
+
parameters = parameters if isinstance(parameters[0], tuple) else [tuple(parameters)]
|
|
1444
|
+
# Every element in parameter is 3 elements.
|
|
1445
|
+
# 1st element and 2nd element also a list. 3rd element is name of ForeignKey.
|
|
1446
|
+
for fk_columns, fk_ref_columns, fk_name in parameters:
|
|
1447
|
+
fk_ref_column_objs = []
|
|
1448
|
+
|
|
1449
|
+
# fk_ref_columns is in this format - table_name.column_name .
|
|
1450
|
+
# There is no provision for schema name here.
|
|
1451
|
+
# sqlalchemy is not accepting this notation here - schema_name.table_name.column_name
|
|
1452
|
+
# So, create Column Object and bind schema name and table name to it.
|
|
1453
|
+
for fk_ref_column in fk_ref_columns:
|
|
1454
|
+
ref_column_table, ref_column = fk_ref_column.split(".")
|
|
1455
|
+
t = Table(ref_column_table, MetaData(), Column(ref_column), schema=schema_name)
|
|
1456
|
+
fk_ref_column_objs.append(getattr(t, "c")[ref_column])
|
|
1457
|
+
foreign_constraints.append(ForeignKeyConstraint(fk_columns, fk_ref_column_objs, fk_name))
|
|
1458
|
+
|
|
1459
|
+
if c_name in ['primary_key_constraint', 'unique_key_constraint']:
|
|
1460
|
+
c_name = "UniqueConstraint" if c_name in 'unique_key_constraint' else 'PrimaryKeyConstraint'
|
|
1461
|
+
parameters = UtilFuncs._as_list(parameters)
|
|
1462
|
+
[con_form.append("{}('{}')".format(c_name, "','".join(col))) if type(col) == tuple else con_form.append(
|
|
1463
|
+
"{}('{}')".format(c_name, col)) for col in parameters]
|
|
1464
|
+
con_form.append("")
|
|
1465
|
+
|
|
1466
|
+
# Create default Table construct with parameter dictionary
|
|
1467
|
+
table_str = "Table(table_name, meta,*(Column(c_name, c_type) for c_name,c_type in" \
|
|
1468
|
+
" columns.items()),{} teradatasql_post_create=pti,prefixes=prefix," \
|
|
1469
|
+
"schema=schema_name)".format("" if con_form is None else ",".join(con_form))
|
|
1470
|
+
|
|
1471
|
+
table = eval(table_str)
|
|
1472
|
+
for foreign_constraint in foreign_constraints:
|
|
1473
|
+
table.append_constraint(foreign_constraint)
|
|
1474
|
+
table.create(bind=tdmlctx.get_context())
|
|
1475
|
+
|
|
1476
|
+
except Exception as err:
|
|
1477
|
+
msg_code = MessageCodes.EXECUTION_FAILED
|
|
1478
|
+
raise TeradataMlException(Messages.get_message(msg_code, "create table", str(err)), msg_code)
|
|
1479
|
+
|
|
1480
|
+
|
|
1481
|
+
def _create_temporal_table(table_name,
|
|
1482
|
+
columns,
|
|
1483
|
+
validtime_columns,
|
|
1484
|
+
primary_index=None,
|
|
1485
|
+
partition_by_range=None,
|
|
1486
|
+
schema_name=None,
|
|
1487
|
+
skip_if_exists=False):
|
|
1488
|
+
"""
|
|
1489
|
+
DESCRIPTION:
|
|
1490
|
+
Internal function used to create validTime dimension temporal table.
|
|
1491
|
+
|
|
1492
|
+
PARAMETERS:
|
|
1493
|
+
table_name:
|
|
1494
|
+
Required Argument.
|
|
1495
|
+
Specifies the name of SQL table.
|
|
1496
|
+
Types: str
|
|
1497
|
+
|
|
1498
|
+
columns:
|
|
1499
|
+
Required Argument.
|
|
1500
|
+
Specifies a Python dictionary with column-name(key) to column-type(value) mapping
|
|
1501
|
+
to create table. Column-type can be of type string or teradatasqlalchemy type.
|
|
1502
|
+
Types: dict
|
|
1503
|
+
|
|
1504
|
+
validtime_columns:
|
|
1505
|
+
Required Argument.
|
|
1506
|
+
Specifies the validTime columns to be created in the table.
|
|
1507
|
+
Note:
|
|
1508
|
+
The columns specified in "validtime_columns" should be present in
|
|
1509
|
+
"columns" argument.
|
|
1510
|
+
Types: tuple of str
|
|
1511
|
+
|
|
1512
|
+
primary_index:
|
|
1513
|
+
Optional Argument.
|
|
1514
|
+
Specifies the column name(s) on which primary index needs to be created.
|
|
1515
|
+
Types: str OR list of Strings (str)
|
|
1516
|
+
|
|
1517
|
+
partition_by_range:
|
|
1518
|
+
Optional Argument.
|
|
1519
|
+
Specifies the column name(s) on which partition by range needs to be created.
|
|
1520
|
+
Types: str OR ColumnExpression
|
|
1521
|
+
|
|
1522
|
+
schema_name:
|
|
1523
|
+
Optional Argument.
|
|
1524
|
+
Specifies the name of the SQL schema in the database to write to.
|
|
1525
|
+
If not specified, table is created in default schema.
|
|
1526
|
+
Types: str
|
|
1527
|
+
|
|
1528
|
+
RETURNS:
|
|
1529
|
+
None
|
|
1530
|
+
|
|
1531
|
+
RAISES:
|
|
1532
|
+
None
|
|
1533
|
+
|
|
1534
|
+
EXAMPLES:
|
|
1535
|
+
>>> from teradataml.dbutils.dbutils import _create_temporal_table
|
|
1536
|
+
>>> from teradatasqlalchemy.types import *
|
|
1537
|
+
# Example: Create a temporal table "Table1" with primary key constraint, partition it by range.
|
|
1538
|
+
# Make sure to specify column validTime temporal column from columns 'start_time'
|
|
1539
|
+
# and 'end_time'.
|
|
1540
|
+
>>> _create_temporal_table(table_name="Table1",
|
|
1541
|
+
... columns={"column1": "VARCHAR(100)",
|
|
1542
|
+
... "column2": INTEGER,
|
|
1543
|
+
... "start_time": "TIMESTAMP(6)",
|
|
1544
|
+
... "end_time": TIMESTAMP(6)},
|
|
1545
|
+
... schema_name = "vfs_test",
|
|
1546
|
+
... primary_index='column_name',
|
|
1547
|
+
... partition_by_range='column_name',
|
|
1548
|
+
... validtime_columns=('start_time', 'end_time'))
|
|
1549
|
+
"""
|
|
1550
|
+
# Prepare column clause first.
|
|
1551
|
+
columns_clause_ = ['{} {}'.format(k, v if isinstance(v, str)
|
|
1552
|
+
else v.compile(td_dialect())) for k, v in columns.items()]
|
|
1553
|
+
if validtime_columns:
|
|
1554
|
+
period_for_clause = ['PERIOD FOR ValidPeriod ({}, {}) AS VALIDTIME'.format(
|
|
1555
|
+
validtime_columns[0], validtime_columns[1])
|
|
1556
|
+
]
|
|
1557
|
+
else:
|
|
1558
|
+
period_for_clause = []
|
|
1559
|
+
columns_clause = ",\n ".join(columns_clause_+period_for_clause)
|
|
1560
|
+
|
|
1561
|
+
# Prepare primary index clause.
|
|
1562
|
+
if primary_index:
|
|
1563
|
+
primary_index_clause = "PRIMARY INDEX ({})".format(
|
|
1564
|
+
", ".join(UtilFuncs._as_list(primary_index)))
|
|
1565
|
+
else:
|
|
1566
|
+
primary_index_clause = ""
|
|
1567
|
+
|
|
1568
|
+
# Prepare partition by range clause.
|
|
1569
|
+
if partition_by_range:
|
|
1570
|
+
partition_by_range_clause = "PARTITION BY RANGE_N({})".format(
|
|
1571
|
+
partition_by_range if isinstance(partition_by_range, str) else partition_by_range.compile())
|
|
1572
|
+
else:
|
|
1573
|
+
partition_by_range_clause = ""
|
|
1574
|
+
|
|
1575
|
+
# Prepare create table statement.
|
|
1576
|
+
table_name = UtilFuncs._get_qualified_table_name(schema_name, table_name) if\
|
|
1577
|
+
schema_name else table_name
|
|
1578
|
+
sql = """
|
|
1579
|
+
CREATE MULTISET TABLE {}
|
|
1580
|
+
(\n{}\n)\n{}\n{}
|
|
1581
|
+
""".format(table_name, columns_clause, primary_index_clause, partition_by_range_clause)
|
|
1582
|
+
|
|
1583
|
+
if skip_if_exists:
|
|
1584
|
+
execute_sql(sql, ignore_errors=3803)
|
|
1585
|
+
else:
|
|
1586
|
+
execute_sql(sql)
|
|
1587
|
+
|
|
1588
|
+
return True
|
|
1589
|
+
|
|
1590
|
+
|
|
1591
|
+
def _create_database(schema_name, size='10e6', spool_size=None,
|
|
1592
|
+
datalake=None, **kwargs):
|
|
1593
|
+
"""
|
|
1594
|
+
DESCRIPTION:
|
|
1595
|
+
Internal function to create a database with the specified name and size.
|
|
1596
|
+
|
|
1597
|
+
PARAMETERS:
|
|
1598
|
+
schema_name:
|
|
1599
|
+
Required Argument.
|
|
1600
|
+
Specifies the name of the database to create.
|
|
1601
|
+
Types: str
|
|
1602
|
+
|
|
1603
|
+
size:
|
|
1604
|
+
Optional Argument.
|
|
1605
|
+
Specifies the number of bytes to allocate to new database.
|
|
1606
|
+
Note:
|
|
1607
|
+
Exponential notation can also be used.
|
|
1608
|
+
Types: str or int
|
|
1609
|
+
|
|
1610
|
+
spool_size:
|
|
1611
|
+
Optional Argument.
|
|
1612
|
+
Specifies the number of bytes to allocate to new database
|
|
1613
|
+
for spool space.
|
|
1614
|
+
Note:
|
|
1615
|
+
Exponential notation can also be used.
|
|
1616
|
+
Types: str or int
|
|
1617
|
+
|
|
1618
|
+
datalake:
|
|
1619
|
+
Optional Argument.
|
|
1620
|
+
Specifies the name of datalake to create database in.
|
|
1621
|
+
Types: str
|
|
1622
|
+
|
|
1623
|
+
kwargs:
|
|
1624
|
+
Optional Argument.
|
|
1625
|
+
Specifies keyword arguments which are used in DBPROPERTIES
|
|
1626
|
+
clause as key-value pair while creating datalake database.
|
|
1627
|
+
|
|
1628
|
+
RETURNS:
|
|
1629
|
+
bool
|
|
1630
|
+
|
|
1631
|
+
RAISES:
|
|
1632
|
+
TeradataMlException.
|
|
1633
|
+
|
|
1634
|
+
EXAMPLES:
|
|
1635
|
+
>>> from teradataml.dbutils.dbutils import _create_database
|
|
1636
|
+
# Example 1: Create database.
|
|
1637
|
+
>>> _create_database("db_name1", "10e5")
|
|
1638
|
+
|
|
1639
|
+
# Example 2: Create database in datalake.
|
|
1640
|
+
>>> _create_database("otf_db_1", datalake="datalake_iceberg_glue")
|
|
1641
|
+
|
|
1642
|
+
# Example 3: Create database in datalake having DBPROPERTIES.
|
|
1643
|
+
>>> _create_database("otf_db", datalake="datalake_iceberg_glue",
|
|
1644
|
+
... owner='tdml_user', other_property='some_value',
|
|
1645
|
+
... other_property2=20, comment='Created by tdml_user')
|
|
1646
|
+
"""
|
|
1647
|
+
if datalake:
|
|
1648
|
+
db_properties = []
|
|
1649
|
+
for key, val in kwargs.items():
|
|
1650
|
+
db_properties.append("'{}'='{}'".format(key, val))
|
|
1651
|
+
|
|
1652
|
+
sql = "CREATE DATABASE {}.{}{};".format(datalake, schema_name,
|
|
1653
|
+
' DBPROPERTIES({})'.format(','.join(db_properties))
|
|
1654
|
+
if db_properties else '')
|
|
1655
|
+
|
|
1656
|
+
else:
|
|
1657
|
+
sql = "CREATE DATABASE {} FROM {} AS PERM = {}".format(schema_name, tdmlctx._get_database_username(), size)
|
|
1658
|
+
|
|
1659
|
+
# If user pass spool size, create it with specified space.
|
|
1660
|
+
if spool_size:
|
|
1661
|
+
sql = "{} , SPOOL = {}".format(sql, spool_size)
|
|
1662
|
+
|
|
1663
|
+
execute_sql(sql)
|
|
1664
|
+
return True
|
|
1665
|
+
|
|
1666
|
+
|
|
1667
|
+
def _update_data(update_columns_values, table_name, schema_name, datalake_name=None, update_conditions=None):
|
|
1668
|
+
"""
|
|
1669
|
+
DESCRIPTION:
|
|
1670
|
+
Internal function to update the data in a table.
|
|
1671
|
+
|
|
1672
|
+
PARAMETERS:
|
|
1673
|
+
update_columns_values:
|
|
1674
|
+
Required Argument.
|
|
1675
|
+
Specifies the columns and it's values to update.
|
|
1676
|
+
Types: dict
|
|
1677
|
+
|
|
1678
|
+
table_name:
|
|
1679
|
+
Required Argument.
|
|
1680
|
+
Specifies the name of the table to update.
|
|
1681
|
+
Types: str
|
|
1682
|
+
|
|
1683
|
+
schema_name:
|
|
1684
|
+
Required Argument.
|
|
1685
|
+
Specifies the name of the database to update the data in the
|
|
1686
|
+
table "table_name".
|
|
1687
|
+
Types: str
|
|
1688
|
+
|
|
1689
|
+
datalake_name:
|
|
1690
|
+
Optional Argument.
|
|
1691
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1692
|
+
Types: str
|
|
1693
|
+
|
|
1694
|
+
update_conditions:
|
|
1695
|
+
Optional Argument.
|
|
1696
|
+
Specifies the key columns and it's values which is used as condition
|
|
1697
|
+
for updating the records.
|
|
1698
|
+
Types: dict
|
|
1699
|
+
|
|
1700
|
+
RETURNS:
|
|
1701
|
+
bool
|
|
1702
|
+
|
|
1703
|
+
RAISES:
|
|
1704
|
+
TeradataMlException.
|
|
1705
|
+
|
|
1706
|
+
EXAMPLES:
|
|
1707
|
+
>>> from teradataml.dbutils.dbutils import _update_data
|
|
1708
|
+
>>> _update_data("db_name1", "tbl", update_conditions={"column1": "value1"})
|
|
1709
|
+
"""
|
|
1710
|
+
# Prepare the update clause.
|
|
1711
|
+
update_clause = ", ".join(("{} = ?".format(col) for col in update_columns_values))
|
|
1712
|
+
update_values = tuple((_value for _value in update_columns_values.values()))
|
|
1713
|
+
|
|
1714
|
+
# If key_columns_values is passed, then prepare the SQL with where clause.
|
|
1715
|
+
# Else, simply update every thing.
|
|
1716
|
+
qualified_table_name = _get_quoted_object_name(schema_name, table_name, datalake_name)
|
|
1717
|
+
|
|
1718
|
+
get_str_ = lambda val: "'{}'".format(val) if isinstance(val, str) else val
|
|
1719
|
+
if update_conditions:
|
|
1720
|
+
|
|
1721
|
+
# Prepare where clause.
|
|
1722
|
+
where_ = []
|
|
1723
|
+
for column, col_value in update_conditions.items():
|
|
1724
|
+
if isinstance(col_value, list):
|
|
1725
|
+
col_value = ", ".join(get_str_(val) for val in col_value)
|
|
1726
|
+
col_value = "({})".format(col_value)
|
|
1727
|
+
where_.append("{} IN {}".format(column, col_value))
|
|
1728
|
+
else:
|
|
1729
|
+
where_.append("{} = {}".format(column, col_value))
|
|
1730
|
+
|
|
1731
|
+
where_clause = " AND ".join(where_)
|
|
1732
|
+
|
|
1733
|
+
sql = f"""UPDATE {qualified_table_name} SET {update_clause}
|
|
1734
|
+
WHERE {where_clause}
|
|
1735
|
+
"""
|
|
1736
|
+
|
|
1737
|
+
execute_sql(sql, (*update_values,))
|
|
1738
|
+
|
|
1739
|
+
else:
|
|
1740
|
+
sql = f"""UPDATE {qualified_table_name} SET {update_clause}"""
|
|
1741
|
+
|
|
1742
|
+
execute_sql(sql, update_values)
|
|
1743
|
+
return True
|
|
1744
|
+
|
|
1745
|
+
|
|
1746
|
+
def _insert_data(table_name,
|
|
1747
|
+
values,
|
|
1748
|
+
columns=None,
|
|
1749
|
+
schema_name=None,
|
|
1750
|
+
datalake_name=None,
|
|
1751
|
+
return_uid=False,
|
|
1752
|
+
ignore_errors=None):
|
|
1753
|
+
"""
|
|
1754
|
+
DESCRIPTION:
|
|
1755
|
+
Internal function to insert the data in a table.
|
|
1756
|
+
|
|
1757
|
+
PARAMETERS:
|
|
1758
|
+
table_name:
|
|
1759
|
+
Required Argument.
|
|
1760
|
+
Specifies the name of the table to insert.
|
|
1761
|
+
Types: str
|
|
1762
|
+
|
|
1763
|
+
values:
|
|
1764
|
+
Required Argument.
|
|
1765
|
+
Specifies the values to insert.
|
|
1766
|
+
Types: tuple or list of tuple
|
|
1767
|
+
|
|
1768
|
+
columns:
|
|
1769
|
+
Optional Argument.
|
|
1770
|
+
Specifies the name of columns to be involved in insert.
|
|
1771
|
+
Types: list
|
|
1772
|
+
|
|
1773
|
+
schema_name:
|
|
1774
|
+
Optional Argument.
|
|
1775
|
+
Specifies the name of the database to insert the data in the
|
|
1776
|
+
table "table_name".
|
|
1777
|
+
Types: str
|
|
1778
|
+
|
|
1779
|
+
datalake_name:
|
|
1780
|
+
Optional Argument.
|
|
1781
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1782
|
+
Types: str
|
|
1783
|
+
|
|
1784
|
+
return_uid:
|
|
1785
|
+
Optional Argument.
|
|
1786
|
+
Specifies whether the function should return the unique identifier
|
|
1787
|
+
of the inserted row or not. When set to True, function returns the
|
|
1788
|
+
unique ID generated by Teradata Vantage for the inserted row. Otherwise,
|
|
1789
|
+
it returns True if the insert operation is successful.
|
|
1790
|
+
Note:
|
|
1791
|
+
This argument is only applicable when the table is created
|
|
1792
|
+
in such a way it generates unique ID automatically.
|
|
1793
|
+
Default Value: False
|
|
1794
|
+
Types: bool
|
|
1795
|
+
|
|
1796
|
+
ignore_errors:
|
|
1797
|
+
Optional Argument.
|
|
1798
|
+
Specifies the error code(s) to ignore while inserting data.
|
|
1799
|
+
If this argument is not specified, no errors are ignored.
|
|
1800
|
+
Note:
|
|
1801
|
+
Error codes are Teradata Vantage error codes and not
|
|
1802
|
+
teradataml error codes.
|
|
1803
|
+
Default Value: None
|
|
1804
|
+
Types: int or list of int
|
|
1805
|
+
|
|
1806
|
+
RETURNS:
|
|
1807
|
+
bool or int
|
|
1808
|
+
|
|
1809
|
+
RAISES:
|
|
1810
|
+
TeradataMlException.
|
|
1811
|
+
|
|
1812
|
+
EXAMPLES:
|
|
1813
|
+
>>> from teradataml.dbutils.dbutils import _insert_data
|
|
1814
|
+
>>> _insert_data("tbl", (1, 2, 3))
|
|
1815
|
+
"""
|
|
1816
|
+
# Prepare the update clause.
|
|
1817
|
+
qualified_table_name = _get_quoted_object_name(schema_name, table_name, datalake_name)
|
|
1818
|
+
|
|
1819
|
+
values = UtilFuncs._as_list(values)
|
|
1820
|
+
|
|
1821
|
+
# Prepare columns clause.
|
|
1822
|
+
if columns:
|
|
1823
|
+
# Prepare question marks.
|
|
1824
|
+
_q_marks = ["?"] * len(columns)
|
|
1825
|
+
columns = "({})".format(", ".join(columns))
|
|
1826
|
+
else:
|
|
1827
|
+
columns = ""
|
|
1828
|
+
_q_marks = ["?"] * (len(values[0]))
|
|
1829
|
+
|
|
1830
|
+
if not return_uid:
|
|
1831
|
+
sql = "insert into {} {} values ({});".format(qualified_table_name, columns, ", ".join(_q_marks))
|
|
1832
|
+
execute_sql(sql, values, ignore_errors)
|
|
1833
|
+
return True
|
|
1834
|
+
|
|
1835
|
+
sql = "{{fn teradata_agkr(C)}}insert into {} {} values ({});".format(qualified_table_name, columns, ", ".join(_q_marks))
|
|
1836
|
+
c = execute_sql(sql, values, ignore_errors)
|
|
1837
|
+
return c.fetchone()[0]
|
|
1838
|
+
|
|
1839
|
+
|
|
1840
|
+
def _upsert_data(update_columns_values,
|
|
1841
|
+
insert_columns_values,
|
|
1842
|
+
upsert_conditions,
|
|
1843
|
+
table_name,
|
|
1844
|
+
schema_name,
|
|
1845
|
+
datalake_name=None):
|
|
1846
|
+
"""
|
|
1847
|
+
DESCRIPTION:
|
|
1848
|
+
Internal function to either insert or update the data to a table.
|
|
1849
|
+
|
|
1850
|
+
PARAMETERS:
|
|
1851
|
+
update_columns_values:
|
|
1852
|
+
Required Argument.
|
|
1853
|
+
Specifies the columns and it's values to update.
|
|
1854
|
+
Types: dict
|
|
1855
|
+
|
|
1856
|
+
insert_columns_values:
|
|
1857
|
+
Required Argument.
|
|
1858
|
+
Specifies the columns and it's values to insert.
|
|
1859
|
+
Types: dict
|
|
1860
|
+
|
|
1861
|
+
upsert_conditions:
|
|
1862
|
+
Required Argument.
|
|
1863
|
+
Specifies the key columns and it's values which is used as condition
|
|
1864
|
+
for updating the records.
|
|
1865
|
+
Types: tuple
|
|
1866
|
+
|
|
1867
|
+
table_name:
|
|
1868
|
+
Required Argument.
|
|
1869
|
+
Specifies the name of the table to insert.
|
|
1870
|
+
Types: str
|
|
1871
|
+
|
|
1872
|
+
schema_name:
|
|
1873
|
+
Required Argument.
|
|
1874
|
+
Specifies the name of the database to update the data in the
|
|
1875
|
+
table "table_name".
|
|
1876
|
+
Types: str
|
|
1877
|
+
|
|
1878
|
+
datalake_name:
|
|
1879
|
+
Optional Argument.
|
|
1880
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
1881
|
+
Note:
|
|
1882
|
+
"schema_name" must be provided while using this argument.
|
|
1883
|
+
Types: str
|
|
1884
|
+
|
|
1885
|
+
RETURNS:
|
|
1886
|
+
bool
|
|
1887
|
+
|
|
1888
|
+
RAISES:
|
|
1889
|
+
TeradataMlException.
|
|
1890
|
+
|
|
1891
|
+
EXAMPLES:
|
|
1892
|
+
>>> from teradataml.dbutils.dbutils import _upsert_data
|
|
1893
|
+
>>> _upsert_data("db_name1",
|
|
1894
|
+
"tbl",
|
|
1895
|
+
update_columns_values={"column1": "value1"},
|
|
1896
|
+
insert_columns_values={"column1": "value2"},
|
|
1897
|
+
upsert_conditions={"key1": "val1"}
|
|
1898
|
+
)
|
|
1899
|
+
"""
|
|
1900
|
+
# If user passes datalake name, then append the same to schema name.
|
|
1901
|
+
qualified_table_name = _get_quoted_object_name(schema_name, table_name, datalake_name)
|
|
1902
|
+
|
|
1903
|
+
# Prepare the update clause.
|
|
1904
|
+
update_clause = ", ".join(("{} = ?".format(col) for col in update_columns_values))
|
|
1905
|
+
update_values = tuple((_value for _value in update_columns_values.values()))
|
|
1906
|
+
|
|
1907
|
+
# Prepare the where clause and it's values.
|
|
1908
|
+
where_clause = " AND ".join(("{} = ?".format(col) for col in upsert_conditions))
|
|
1909
|
+
where_values = tuple((_value for _value in upsert_conditions.values()))
|
|
1910
|
+
|
|
1911
|
+
# Prepare the insert clause and it's values.
|
|
1912
|
+
insert_values_clause = ", ".join(("?" for _ in range(len(insert_columns_values))))
|
|
1913
|
+
insert_clause = "({}) values ({})".format(", ".join(insert_columns_values), insert_values_clause)
|
|
1914
|
+
insert_values = tuple((_value for _value in insert_columns_values.values()))
|
|
1915
|
+
|
|
1916
|
+
sql = f"""UPDATE {qualified_table_name} SET {update_clause}
|
|
1917
|
+
WHERE {where_clause}
|
|
1918
|
+
ELSE INSERT {qualified_table_name} {insert_clause}
|
|
1919
|
+
"""
|
|
1920
|
+
execute_sql(sql, (*update_values, *where_values, *insert_values))
|
|
1921
|
+
|
|
1922
|
+
|
|
1923
|
+
def _merge_data(target_table,
|
|
1924
|
+
target_table_alias_name,
|
|
1925
|
+
source,
|
|
1926
|
+
source_alias_name,
|
|
1927
|
+
condition,
|
|
1928
|
+
matched_details=None,
|
|
1929
|
+
non_matched_clause=None,
|
|
1930
|
+
temporal_clause=None,
|
|
1931
|
+
target_table_schema=None,
|
|
1932
|
+
source_table_schema=None):
|
|
1933
|
+
"""
|
|
1934
|
+
DESCRIPTION:
|
|
1935
|
+
Internal function to merge the data in a table.
|
|
1936
|
+
|
|
1937
|
+
PARAMETERS:
|
|
1938
|
+
target_table:
|
|
1939
|
+
Required Argument.
|
|
1940
|
+
Specifies the name of the target table to merge.
|
|
1941
|
+
Types: str
|
|
1942
|
+
|
|
1943
|
+
target_table_alias_name:
|
|
1944
|
+
Required Argument.
|
|
1945
|
+
Specifies the alias name of the target table to merge.
|
|
1946
|
+
Types: str
|
|
1947
|
+
|
|
1948
|
+
source:
|
|
1949
|
+
Required Argument.
|
|
1950
|
+
Specifies the name of the source table to merge.
|
|
1951
|
+
Can be a table name or a teradataml DataFrame.
|
|
1952
|
+
Note:
|
|
1953
|
+
Source can be a SELECT statement also. In this case,
|
|
1954
|
+
one should add paranthesis for the query. For example,
|
|
1955
|
+
value of source should be '(SELECT * FROM TABLE)' if
|
|
1956
|
+
source is a query.
|
|
1957
|
+
Types: str OR teradataml DataFrame
|
|
1958
|
+
|
|
1959
|
+
source_alias_name:
|
|
1960
|
+
Required Argument.
|
|
1961
|
+
Specifies the alias name of the source table to merge.
|
|
1962
|
+
Types: str
|
|
1963
|
+
|
|
1964
|
+
condition:
|
|
1965
|
+
Required Argument.
|
|
1966
|
+
Specifies the condition to merge the data.
|
|
1967
|
+
Types: str OR ColumnExpression
|
|
1968
|
+
|
|
1969
|
+
matched_details:
|
|
1970
|
+
Optional Argument.
|
|
1971
|
+
Specifies what to do when the condition is matched.
|
|
1972
|
+
Teradata allows either UPDATE or DELETE when the condition is matched.
|
|
1973
|
+
Note:
|
|
1974
|
+
ColumnExpressions are not allowed for key 'set' since the aliases
|
|
1975
|
+
should be with the alias name and setting alias name is not straight forward.
|
|
1976
|
+
Hence, not allowing it for now.
|
|
1977
|
+
Types: dict
|
|
1978
|
+
Example: {"action": "UPDATE", "set": {"col1": "src.col1", "col2": "src.col2"}}
|
|
1979
|
+
|
|
1980
|
+
non_matched_clause:
|
|
1981
|
+
Optional Argument.
|
|
1982
|
+
Specifies what to do when the condition is not matched.
|
|
1983
|
+
Teradata allows INSERT when the condition is not matched.
|
|
1984
|
+
Note:
|
|
1985
|
+
ColumnExpressions are not allowed in 'values' since the aliases
|
|
1986
|
+
should be with the alias name and setting alias name is not straight forward.
|
|
1987
|
+
Hence, not allowing it for now.
|
|
1988
|
+
Types: dict
|
|
1989
|
+
Example: {"action": "INSERT", "columns": ["col1", "col2"], "values": ["src.col1", "src.col2"]}
|
|
1990
|
+
|
|
1991
|
+
temporal_clause:
|
|
1992
|
+
Optional Argument.
|
|
1993
|
+
Specifies the temporal clause to be added to the MERGE statement.
|
|
1994
|
+
Types: str
|
|
1995
|
+
|
|
1996
|
+
target_table_schema:
|
|
1997
|
+
Optional Argument.
|
|
1998
|
+
Specifies the schema name of the target table.
|
|
1999
|
+
Types: str
|
|
2000
|
+
|
|
2001
|
+
source_table_schema:
|
|
2002
|
+
Optional Argument.
|
|
2003
|
+
Specifies the schema name of the source table.
|
|
2004
|
+
Note:
|
|
2005
|
+
If source is a DataFrame, this argument is ignored.
|
|
2006
|
+
Types: str
|
|
2007
|
+
|
|
2008
|
+
RETURNS:
|
|
2009
|
+
None
|
|
2010
|
+
|
|
2011
|
+
RAISES:
|
|
2012
|
+
ValueError: If required parameters are missing or invalid.
|
|
2013
|
+
|
|
2014
|
+
EXAMPLES:
|
|
2015
|
+
>>> _merge_data(
|
|
2016
|
+
... target_table="target_table",
|
|
2017
|
+
... target_table_alias_name="tgt",
|
|
2018
|
+
... source="source_table",
|
|
2019
|
+
... source_alias_name="src",
|
|
2020
|
+
... condition="tgt.id = src.id",
|
|
2021
|
+
... matched_details={"action": "UPDATE", "set": {"col1": "src.col1", "col2": "src.col2"}},
|
|
2022
|
+
... non_matched_clause={"action": "INSERT", "columns": ["id", "col1"], "values": ["src.id", "src.col1"]}
|
|
2023
|
+
... )
|
|
2024
|
+
"""
|
|
2025
|
+
# Note: Table names are not quoted because source can be a query also.
|
|
2026
|
+
# To keep it intact, both target tables and source tables are not
|
|
2027
|
+
# quoted. Hence it is caller function responsibility to add quote
|
|
2028
|
+
# if either source table or target table has special characters or
|
|
2029
|
+
# is from the user.
|
|
2030
|
+
quote = UtilFuncs._get_dialect_quoted_name
|
|
2031
|
+
if target_table_schema:
|
|
2032
|
+
target_table = "{}.{}".format(quote(target_table_schema), target_table)
|
|
2033
|
+
else:
|
|
2034
|
+
target_table = target_table
|
|
2035
|
+
|
|
2036
|
+
# If source is DataFrame, extract the query from it.
|
|
2037
|
+
if isinstance(source, str):
|
|
2038
|
+
source = "{}.{}".format(quote(source_table_schema), source) \
|
|
2039
|
+
if source_table_schema else source
|
|
2040
|
+
else:
|
|
2041
|
+
source = "({})".format(source.show_query())
|
|
2042
|
+
|
|
2043
|
+
# If condition is not a string, then prepare from it.
|
|
2044
|
+
condition = condition if isinstance(condition, str) else condition.compile()
|
|
2045
|
+
|
|
2046
|
+
# Start building the MERGE statement
|
|
2047
|
+
merge_sql = (f"MERGE INTO {target_table} AS {target_table_alias_name} \n\tUSING "
|
|
2048
|
+
f"{source} AS {source_alias_name} \n\tON {condition}")
|
|
2049
|
+
|
|
2050
|
+
# Handle matched clause
|
|
2051
|
+
if matched_details:
|
|
2052
|
+
action = matched_details.get("action", "").upper()
|
|
2053
|
+
if action == "UPDATE":
|
|
2054
|
+
set_clause = ", ".join([f"{col} = {val}"
|
|
2055
|
+
for col, val in matched_details.get("set", {}).items()])
|
|
2056
|
+
merge_sql += f"\n\tWHEN MATCHED THEN \n\t\tUPDATE \n\t\tSET \n\t\t{set_clause}"
|
|
2057
|
+
elif action == "DELETE":
|
|
2058
|
+
merge_sql += "\n\tWHEN MATCHED THEN \n\tDELETE\n\t\t"
|
|
2059
|
+
else:
|
|
2060
|
+
raise ValueError("Invalid action in matched_details. Supported actions are 'UPDATE' and 'DELETE'.")
|
|
2061
|
+
|
|
2062
|
+
# Handle non-matched clause
|
|
2063
|
+
if non_matched_clause:
|
|
2064
|
+
action = non_matched_clause.get("action", "").upper()
|
|
2065
|
+
if action == "INSERT":
|
|
2066
|
+
columns = ", ".join(non_matched_clause.get("columns", []))
|
|
2067
|
+
values = ", ".join(non_matched_clause.get("values", []))
|
|
2068
|
+
merge_sql += f"\n\tWHEN NOT MATCHED THEN \n\t\tINSERT ({columns}) \n\t\tVALUES \n\t\t({values})"
|
|
2069
|
+
else:
|
|
2070
|
+
raise ValueError("Invalid action in non_matched_clause. Supported action is 'INSERT'.")
|
|
2071
|
+
|
|
2072
|
+
# Finalize the statement
|
|
2073
|
+
merge_sql += ";"
|
|
2074
|
+
if temporal_clause:
|
|
2075
|
+
merge_sql = "{} {}".format(temporal_clause, merge_sql)
|
|
2076
|
+
|
|
2077
|
+
execute_sql(merge_sql)
|
|
2078
|
+
|
|
2079
|
+
|
|
2080
|
+
def _delete_data(table_name, schema_name=None, datalake_name=None, delete_conditions=None, temporal_clause=None):
|
|
2081
|
+
"""
|
|
2082
|
+
DESCRIPTION:
|
|
2083
|
+
Internal function to delete the data in a table.
|
|
2084
|
+
|
|
2085
|
+
PARAMETERS:
|
|
2086
|
+
table_name:
|
|
2087
|
+
Required Argument.
|
|
2088
|
+
Specifies the name of the table to delete.
|
|
2089
|
+
Types: str
|
|
2090
|
+
|
|
2091
|
+
schema_name:
|
|
2092
|
+
Optional Argument.
|
|
2093
|
+
Specifies the name of the database to delete the data in the
|
|
2094
|
+
table "table_name".
|
|
2095
|
+
Types: str
|
|
2096
|
+
|
|
2097
|
+
datalake_name:
|
|
2098
|
+
Optional Argument.
|
|
2099
|
+
Specifies the name of the datalake to look for "schema_name".
|
|
2100
|
+
Types: str
|
|
2101
|
+
|
|
2102
|
+
delete_conditions:
|
|
2103
|
+
Optional Argument.
|
|
2104
|
+
Specifies the ColumnExpression or dictionary containing key values
|
|
2105
|
+
pairs to use for removing the data.
|
|
2106
|
+
Types: ColumnExpression, dict
|
|
2107
|
+
|
|
2108
|
+
temporal_clause:
|
|
2109
|
+
Optional Argument.
|
|
2110
|
+
Specifies the temporal clause to be added to the DELETE statement.
|
|
2111
|
+
Types: str
|
|
2112
|
+
|
|
2113
|
+
RETURNS:
|
|
2114
|
+
int, specifies the number of records those are deleted.
|
|
2115
|
+
|
|
2116
|
+
RAISES:
|
|
2117
|
+
TeradataMlException.
|
|
2118
|
+
|
|
2119
|
+
EXAMPLES:
|
|
2120
|
+
>>> from teradataml.dbutils.dbutils import _delete_data
|
|
2121
|
+
>>> _delete_data("tbl", "db_name1", delete_conditions={"column1": "value1"})
|
|
2122
|
+
"""
|
|
2123
|
+
qualified_table_name = _get_quoted_object_name(schema_name, table_name, datalake_name)
|
|
2124
|
+
sqlbundle = SQLBundle()
|
|
2125
|
+
|
|
2126
|
+
sql = sqlbundle._get_sql_query(SQLConstants.SQL_DELETE_ALL_ROWS).format(qualified_table_name)
|
|
2127
|
+
|
|
2128
|
+
# If condition exist, the prepare where clause.
|
|
2129
|
+
if delete_conditions:
|
|
2130
|
+
from teradataml.dataframe.sql import _SQLColumnExpression
|
|
2131
|
+
if isinstance(delete_conditions, _SQLColumnExpression):
|
|
2132
|
+
where_clause = delete_conditions.compile()
|
|
2133
|
+
elif isinstance(delete_conditions, dict):
|
|
2134
|
+
get_str_ = lambda val: "'{}'".format(val) if isinstance(val, str) else val
|
|
2135
|
+
where_ = []
|
|
2136
|
+
for column, col_value in delete_conditions.items():
|
|
2137
|
+
if isinstance(col_value, list):
|
|
2138
|
+
col_value = ", ".join(get_str_(val) for val in col_value)
|
|
2139
|
+
col_value = "({})".format(col_value)
|
|
2140
|
+
where_.append("{} IN {}".format(column, col_value))
|
|
2141
|
+
else:
|
|
2142
|
+
where_.append("{} = {}".format(column, col_value))
|
|
2143
|
+
where_clause = " AND ".join(where_)
|
|
2144
|
+
|
|
2145
|
+
sql = sqlbundle._get_sql_query(SQLConstants.SQL_DELETE_SPECIFIC_ROW).format(qualified_table_name, where_clause)
|
|
2146
|
+
|
|
2147
|
+
if temporal_clause:
|
|
2148
|
+
sql = "{} {}".format(temporal_clause, sql)
|
|
2149
|
+
|
|
2150
|
+
res = execute_sql(sql)
|
|
2151
|
+
return res.rowcount
|
|
2152
|
+
|
|
2153
|
+
|
|
2154
|
+
@collect_queryband(queryband='LstKwrds')
|
|
2155
|
+
def list_td_reserved_keywords(key=None, raise_error=False):
|
|
2156
|
+
"""
|
|
2157
|
+
DESCRIPTION:
|
|
2158
|
+
Function validates if the specified string or the list of strings is Teradata reserved keyword or not.
|
|
2159
|
+
If key is not specified or is a empty list, list all the Teradata reserved keywords.
|
|
2160
|
+
|
|
2161
|
+
PARAMETERS:
|
|
2162
|
+
key:
|
|
2163
|
+
Optional Argument.
|
|
2164
|
+
Specifies a string or list of strings to validate for Teradata reserved keyword.
|
|
2165
|
+
Types: string or list of strings
|
|
2166
|
+
|
|
2167
|
+
raise_error:
|
|
2168
|
+
Optional Argument.
|
|
2169
|
+
Specifies whether to raise exception or not.
|
|
2170
|
+
When set to True, an exception is raised,
|
|
2171
|
+
if specified "key" contains Teradata reserved keyword, otherwise not.
|
|
2172
|
+
Default Value: False
|
|
2173
|
+
Types: bool
|
|
2174
|
+
|
|
2175
|
+
RETURNS:
|
|
2176
|
+
set, if "key" is None or a empty list.
|
|
2177
|
+
True, if "key" contains Teradata reserved keyword, False otherwise.
|
|
2178
|
+
|
|
2179
|
+
RAISES:
|
|
2180
|
+
TeradataMlException.
|
|
2181
|
+
|
|
2182
|
+
EXAMPLES:
|
|
2183
|
+
>>> from teradataml import list_td_reserved_keywords
|
|
2184
|
+
>>> # Example 1: List all available Teradata reserved keywords.
|
|
2185
|
+
>>> list_td_reserved_keywords()
|
|
2186
|
+
restricted_word
|
|
2187
|
+
0 ABS
|
|
2188
|
+
1 ACCOUNT
|
|
2189
|
+
2 ACOS
|
|
2190
|
+
3 ACOSH
|
|
2191
|
+
4 ADD_MONTHS
|
|
2192
|
+
5 ADMIN
|
|
2193
|
+
6 ADD
|
|
2194
|
+
7 ACCESS_LOCK
|
|
2195
|
+
8 ABORTSESSION
|
|
2196
|
+
9 ABORT
|
|
2197
|
+
>>>
|
|
2198
|
+
|
|
2199
|
+
>>> # Example 2: Validate if keyword "account" is a Teradata reserved keyword or not.
|
|
2200
|
+
>>> list_td_reserved_keywords("account")
|
|
2201
|
+
True
|
|
2202
|
+
>>>
|
|
2203
|
+
|
|
2204
|
+
>>> # Example 3: Validate and raise exception if keyword "account" is a Teradata reserved keyword.
|
|
2205
|
+
>>> list_td_reserved_keywords("account", raise_error=True)
|
|
2206
|
+
TeradataMlException: [Teradata][teradataml](TDML_2121) '['ACCOUNT']' is a Teradata reserved keyword.
|
|
2207
|
+
|
|
2208
|
+
>>> # Example 4: Validate if the list of keywords contains Teradata reserved keyword or not.
|
|
2209
|
+
>>> list_td_reserved_keywords(["account", 'add', 'abc'])
|
|
2210
|
+
True
|
|
2211
|
+
|
|
2212
|
+
>>> # Example 5: Validate and raise exception if the list of keywords contains Teradata reserved keyword.
|
|
2213
|
+
>>> list_td_reserved_keywords(["account", 'add', 'abc'], raise_error=True)
|
|
2214
|
+
TeradataMlException: [Teradata][teradataml](TDML_2121) '['ADD', 'ACCOUNT']' is a Teradata reserved keyword.
|
|
2215
|
+
"""
|
|
2216
|
+
is_res_key = False
|
|
2217
|
+
try:
|
|
2218
|
+
from teradataml.dataframe.dataframe import DataFrame, in_schema
|
|
2219
|
+
|
|
2220
|
+
# Store the reserved keywords in buffer.
|
|
2221
|
+
if _InternalBuffer.get("reservered_words") is None:
|
|
2222
|
+
# Get the reserved keywords from the table
|
|
2223
|
+
reserved_keys = DataFrame(in_schema("SYSLIB", "SQLRestrictedWords"))
|
|
2224
|
+
_InternalBuffer.add(reservered_words={word_[0] for word_ in reserved_keys.itertuples(name=None)})
|
|
2225
|
+
|
|
2226
|
+
reservered_words = _InternalBuffer.get("reservered_words")
|
|
2227
|
+
|
|
2228
|
+
# If key is not passed or is a empty list, return the list of Teradata reserved keywords.
|
|
2229
|
+
if key is None or len(key) == 0:
|
|
2230
|
+
return reservered_words
|
|
2231
|
+
|
|
2232
|
+
key = [key] if isinstance(key, str) else key
|
|
2233
|
+
|
|
2234
|
+
res_key = (k.upper() for k in key if k.upper() in reservered_words)
|
|
2235
|
+
res_key = list(res_key)
|
|
2236
|
+
if len(res_key) > 0:
|
|
2237
|
+
is_res_key = True
|
|
2238
|
+
if raise_error:
|
|
2239
|
+
raise TeradataMlException(Messages.get_message(MessageCodes.RESERVED_KEYWORD, res_key),
|
|
2240
|
+
MessageCodes.RESERVED_KEYWORD)
|
|
2241
|
+
return True
|
|
2242
|
+
return False
|
|
2243
|
+
except TeradataMlException as e:
|
|
2244
|
+
if is_res_key:
|
|
2245
|
+
raise e
|
|
2246
|
+
return False
|
|
2247
|
+
|
|
2248
|
+
|
|
2249
|
+
def _rename_table(old_table_name, new_table_name):
|
|
2250
|
+
"""
|
|
2251
|
+
This function renames the existing table present in the database.
|
|
2252
|
+
|
|
2253
|
+
PARAMETERS:
|
|
2254
|
+
old_table_name:
|
|
2255
|
+
Required Argument.
|
|
2256
|
+
Specifies the name of the existing table in vantage.
|
|
2257
|
+
Types : String
|
|
2258
|
+
|
|
2259
|
+
new_table_name:
|
|
2260
|
+
Required Argument.
|
|
2261
|
+
Specifies the the new name for the existing table.
|
|
2262
|
+
Types : String
|
|
2263
|
+
|
|
2264
|
+
RETURNS:
|
|
2265
|
+
None
|
|
2266
|
+
|
|
2267
|
+
RAISES:
|
|
2268
|
+
None
|
|
2269
|
+
|
|
2270
|
+
EXAMPLES:
|
|
2271
|
+
>>> load_example_data("dataframe", "sales")
|
|
2272
|
+
>>> _rename_table("sales", "new_sales")
|
|
2273
|
+
"""
|
|
2274
|
+
# Query to rename existing table.
|
|
2275
|
+
query = "RENAME TABLE {} TO {};".format(old_table_name, new_table_name)
|
|
2276
|
+
# Execute rename query.
|
|
2277
|
+
UtilFuncs._execute_ddl_statement(query)
|
|
2278
|
+
|
|
2279
|
+
|
|
2280
|
+
def _execute_query_and_generate_pandas_df(query, index=None, **kwargs):
|
|
2281
|
+
"""
|
|
2282
|
+
DESCRIPTION:
|
|
2283
|
+
Function executes the provided query and returns a pandas DataFrame.
|
|
2284
|
+
|
|
2285
|
+
PARAMETERS:
|
|
2286
|
+
query:
|
|
2287
|
+
Required Argument.
|
|
2288
|
+
Specifies the query that needs to be executed to form Pandas
|
|
2289
|
+
DataFrame.
|
|
2290
|
+
Type: str
|
|
2291
|
+
|
|
2292
|
+
index
|
|
2293
|
+
Optional Argument.
|
|
2294
|
+
Specifies column(s) to be used as Pandas index.
|
|
2295
|
+
Types: str OR list of Strings (str)
|
|
2296
|
+
|
|
2297
|
+
RETURNS:
|
|
2298
|
+
Pandas DataFrame.
|
|
2299
|
+
|
|
2300
|
+
RAISES:
|
|
2301
|
+
TeradataMlException.
|
|
2302
|
+
|
|
2303
|
+
EXAMPLES:
|
|
2304
|
+
pdf = _execute_query_and_generate_pandas_df("SELECT * from t1", "col1")
|
|
2305
|
+
"""
|
|
2306
|
+
# Empty queryband buffer before SQL call.
|
|
2307
|
+
UtilFuncs._set_queryband()
|
|
2308
|
+
cur = execute_sql(query)
|
|
2309
|
+
columns = kwargs.pop('columns', [col[0] for col in cur.description])
|
|
2310
|
+
rows = cur.fetchall()
|
|
2311
|
+
if cur is not None:
|
|
2312
|
+
cur.close()
|
|
2313
|
+
|
|
2314
|
+
# Set coerce_float to True for Decimal type columns.
|
|
2315
|
+
if 'coerce_float' not in kwargs:
|
|
2316
|
+
kwargs['coerce_float'] = True
|
|
2317
|
+
|
|
2318
|
+
try:
|
|
2319
|
+
pandas_df = pd.DataFrame.from_records(data=list(tuple(row) for row in rows),
|
|
2320
|
+
columns=columns,
|
|
2321
|
+
index=index,
|
|
2322
|
+
**kwargs)
|
|
2323
|
+
except KeyError:
|
|
2324
|
+
raise TeradataMlException(
|
|
2325
|
+
Messages.get_message(MessageCodes.INVALID_PRIMARY_INDEX),
|
|
2326
|
+
MessageCodes.INVALID_PRIMARY_INDEX)
|
|
2327
|
+
except:
|
|
2328
|
+
raise TeradataMlException(
|
|
2329
|
+
Messages.get_message(MessageCodes.TDMLDF_SELECT_DF_FAIL),
|
|
2330
|
+
MessageCodes.TDMLDF_SELECT_DF_FAIL)
|
|
2331
|
+
|
|
2332
|
+
return pandas_df
|
|
2333
|
+
|
|
2334
|
+
def _is_trigger_exist(schema_name, trigger_names):
|
|
2335
|
+
"""
|
|
2336
|
+
DESCRIPTION:
|
|
2337
|
+
Checks if all given triggers exist in the specified schema.
|
|
2338
|
+
|
|
2339
|
+
PARAMETERS:
|
|
2340
|
+
schema_name:
|
|
2341
|
+
Required Argument.
|
|
2342
|
+
Specifies the schema/database name.
|
|
2343
|
+
Types: str
|
|
2344
|
+
|
|
2345
|
+
trigger_names:
|
|
2346
|
+
Required Argument.
|
|
2347
|
+
Specifies the trigger name(s) to check.
|
|
2348
|
+
Types: str or list of str
|
|
2349
|
+
|
|
2350
|
+
RETURNS:
|
|
2351
|
+
Tuple - first element specifies whether all provided triggers exist or not.
|
|
2352
|
+
second element specifies total number of triggers found from "trigger_names".
|
|
2353
|
+
|
|
2354
|
+
RAISES:
|
|
2355
|
+
TeradataMlException
|
|
2356
|
+
|
|
2357
|
+
EXAMPLES:
|
|
2358
|
+
>>> is_trigger_exist("mydb", ["trg1", "trg2"])
|
|
2359
|
+
(True, 2)
|
|
2360
|
+
>>> is_trigger_exist("mydb", ["trg1", "missing_trg"])
|
|
2361
|
+
(False, 1)
|
|
2362
|
+
"""
|
|
2363
|
+
# Normalize trigger_names to list
|
|
2364
|
+
triggers = UtilFuncs._as_list(trigger_names)
|
|
2365
|
+
if not triggers:
|
|
2366
|
+
return False
|
|
2367
|
+
|
|
2368
|
+
# Prepare SQL to check all triggers in one call
|
|
2369
|
+
triggers_str = ", ".join("'{}'".format(t) for t in triggers)
|
|
2370
|
+
sql = f"""
|
|
2371
|
+
SELECT TriggerName
|
|
2372
|
+
FROM DBC.TriggersV
|
|
2373
|
+
WHERE DatabaseName = '{schema_name}'
|
|
2374
|
+
AND TriggerName IN ({triggers_str})
|
|
2375
|
+
"""
|
|
2376
|
+
|
|
2377
|
+
try:
|
|
2378
|
+
result = execute_sql(sql)
|
|
2379
|
+
found = {row[0] for row in result.fetchall()}
|
|
2380
|
+
num_found = len(found)
|
|
2381
|
+
all_exist = all(t in found for t in triggers)
|
|
2382
|
+
return all_exist, num_found
|
|
2383
|
+
except Exception as e:
|
|
2384
|
+
raise TeradataMlException(
|
|
2385
|
+
Messages.get_message(MessageCodes.EXECUTION_FAILED, "is_triggers_exist", str(e)),
|
|
2386
|
+
MessageCodes.EXECUTION_FAILED)
|
|
2387
|
+
|
|
2388
|
+
class _TDSessionParams:
|
|
2389
|
+
"""
|
|
2390
|
+
A successfull connection through teradataml establishes a session with Vantage.
|
|
2391
|
+
Every session will have default parameters. For example one can set Offset value
|
|
2392
|
+
for parameter 'Session Time Zone'.
|
|
2393
|
+
This is an internal utility to store all session related parameters.
|
|
2394
|
+
"""
|
|
2395
|
+
|
|
2396
|
+
def __init__(self, data):
|
|
2397
|
+
"""
|
|
2398
|
+
Constructor to store columns and rows of session params.
|
|
2399
|
+
|
|
2400
|
+
PARAMETERS:
|
|
2401
|
+
data:
|
|
2402
|
+
Required Argument.
|
|
2403
|
+
Specifies the Session parameters.
|
|
2404
|
+
Types: dict
|
|
2405
|
+
"""
|
|
2406
|
+
self.__session_params = data
|
|
2407
|
+
|
|
2408
|
+
def __getitem__(self, parameter):
|
|
2409
|
+
"""
|
|
2410
|
+
Return the value of Session parameter.
|
|
2411
|
+
|
|
2412
|
+
PARAMETERS:
|
|
2413
|
+
parameter:
|
|
2414
|
+
Required Argument.
|
|
2415
|
+
Specifies name of the session parameter.
|
|
2416
|
+
Types: str
|
|
2417
|
+
"""
|
|
2418
|
+
if parameter in self.__session_params:
|
|
2419
|
+
return self.__session_params[parameter]
|
|
2420
|
+
raise AttributeError("'TDSessionParams' object has no attribute '{}'".format(parameter))
|
|
2421
|
+
|
|
2422
|
+
|
|
2423
|
+
def set_session_param(name, value):
|
|
2424
|
+
"""
|
|
2425
|
+
DESCRIPTION:
|
|
2426
|
+
Function to set the session parameter.
|
|
2427
|
+
Note:
|
|
2428
|
+
* Look at Vantage documentation for session parameters.
|
|
2429
|
+
|
|
2430
|
+
PARAMETERS:
|
|
2431
|
+
name:
|
|
2432
|
+
Required Argument.
|
|
2433
|
+
Specifies the name of the parameter to set.
|
|
2434
|
+
Permitted Values: timezone, calendar, account, character_set_unicode,
|
|
2435
|
+
collation, constraint, database, dateform, debug_function,
|
|
2436
|
+
dot_notation, isolated_loading, function_trace, json_ignore_errors,
|
|
2437
|
+
searchuifdbpath, transaction_isolation_level, query_band, udfsearchpath
|
|
2438
|
+
Types: str
|
|
2439
|
+
|
|
2440
|
+
value:
|
|
2441
|
+
Required Argument.
|
|
2442
|
+
Specifies the value for the parameter "name" to set.
|
|
2443
|
+
Permitted Values:
|
|
2444
|
+
1. timezone: timezone strings
|
|
2445
|
+
2. calendar: Teradata, ISO, Compatible
|
|
2446
|
+
3. character_set_unicode: ON, OFF
|
|
2447
|
+
4. account: should be a list in which first item should be "account string" second should be
|
|
2448
|
+
either SESSION or REQUEST.
|
|
2449
|
+
5. collation: ASCII, CHARSET_COLL, EBCDIC, HOST, JIS_COLL, MULTINATIONAL
|
|
2450
|
+
6. constraint: row_level_security_constraint_name {( level_name | category_name [,...] | NULL )}
|
|
2451
|
+
where,
|
|
2452
|
+
row_level_security_constraint_name:
|
|
2453
|
+
Name of an existing constraint.
|
|
2454
|
+
The specified constraint_name must be currently assigned to the user.
|
|
2455
|
+
User can specify a maximum of 6 hierarchical constraints and 2 non-hierarchical
|
|
2456
|
+
constraints per SET SESSION CONSTRAINT statement.
|
|
2457
|
+
level_name:
|
|
2458
|
+
Name of a hierarchical level, valid for the constraint_name, that is to replace the
|
|
2459
|
+
default level.
|
|
2460
|
+
The specified level_name must be currently assigned to the user. Otherwise, Vantage
|
|
2461
|
+
returns an error to the requestor.
|
|
2462
|
+
category_name:
|
|
2463
|
+
A set of one or more existing non-hierarchical category names valid for the
|
|
2464
|
+
constraint_name.
|
|
2465
|
+
Because all assigned category (non-hierarchical) constraint values assigned to a
|
|
2466
|
+
user are automatically active, "set_session_param" is only useful to specify a
|
|
2467
|
+
subset of the assigned categories for the constraint.
|
|
2468
|
+
For example, assume that User BOB has 3 country codes, and wants to load a table
|
|
2469
|
+
with data that is to be made available to User CARL who only has rights to see data
|
|
2470
|
+
for his own country. User BOB can use "set_session_param" to specify only the
|
|
2471
|
+
country code for User CARL when loading the data so Carl can access the data later.
|
|
2472
|
+
7. database: Name of the new default database for the remainder of the current session.
|
|
2473
|
+
8. dateform: ANSIDATE, INTEGERDATE
|
|
2474
|
+
9. debug_function: should be a list in which first item should be "function_name" second should be
|
|
2475
|
+
either ON or OFF.
|
|
2476
|
+
10. dot_notation: DEFAULT, LIST, NULL ERROR
|
|
2477
|
+
11. isolated_loading: NO, '', CONCURRENT
|
|
2478
|
+
12. function_trace: Should be a list. First item should be "mask_string" and second should be table name.
|
|
2479
|
+
13. json_ignore_errors: ON, OFF
|
|
2480
|
+
14. searchuifdbpath: String in format 'database_name, user_name'
|
|
2481
|
+
15. transaction_isolation_level: READ UNCOMMITTED, RU, SERIALIZABLE, SR
|
|
2482
|
+
16. query_band: Should be a list. First item should be "band_specification" and second should be either
|
|
2483
|
+
SESSION or TRANSACTION
|
|
2484
|
+
17. udfsearchpath: Should be a list. First item should be "database_name" and second should be "udf_name"
|
|
2485
|
+
Types: str or list of strings
|
|
2486
|
+
|
|
2487
|
+
Returns:
|
|
2488
|
+
True, if session parameter is set successfully.
|
|
2489
|
+
|
|
2490
|
+
RAISES:
|
|
2491
|
+
ValueError, teradatasql.OperationalError
|
|
2492
|
+
|
|
2493
|
+
EXAMPLES:
|
|
2494
|
+
# Example 1: Set time zone offset for the session as the system default.
|
|
2495
|
+
>>> set_session_param('timezone', 'LOCAL')
|
|
2496
|
+
True
|
|
2497
|
+
|
|
2498
|
+
# Example 2: Set time zone to "AMERICA PACIFIC".
|
|
2499
|
+
>>> set_session_param('timezone', "'AMERICA PACIFIC'")
|
|
2500
|
+
True
|
|
2501
|
+
|
|
2502
|
+
# Example 3: Set time zone to "-07:00".
|
|
2503
|
+
>>> set_session_param('timezone', "'-07:00'")
|
|
2504
|
+
True
|
|
2505
|
+
|
|
2506
|
+
# Example 4: Set time zone to 3 hours ahead of 'GMT'.
|
|
2507
|
+
>>> set_session_param('timezone', "3")
|
|
2508
|
+
True
|
|
2509
|
+
|
|
2510
|
+
# Example 6: Set calendar to 'COMPATIBLE'.
|
|
2511
|
+
>>> set_session_param('calendar', "COMPATIBLE")
|
|
2512
|
+
True
|
|
2513
|
+
|
|
2514
|
+
# Example 7: Dynamically changes your account to 'dbc' for the remainder of the session.
|
|
2515
|
+
>>> set_session_param('account', ['dbc', 'SESSION'])
|
|
2516
|
+
True
|
|
2517
|
+
|
|
2518
|
+
# Example 8: Enables Unicode Pass Through processing.
|
|
2519
|
+
>>> set_session_param('character_set_unicode', 'ON')
|
|
2520
|
+
True
|
|
2521
|
+
|
|
2522
|
+
# Example 9: Session set to ASCII collation.
|
|
2523
|
+
>>> set_session_param('collation', 'ASCII')
|
|
2524
|
+
True
|
|
2525
|
+
|
|
2526
|
+
# Example 10: The resulting session has a row-level security label consisting of an unclassified level
|
|
2527
|
+
# and nato category.
|
|
2528
|
+
>>> set_session_param('constraint', 'classification_category (norway)')
|
|
2529
|
+
True
|
|
2530
|
+
|
|
2531
|
+
# Example 11: Changes the default database for the session.
|
|
2532
|
+
>>> set_session_param('database', 'alice')
|
|
2533
|
+
True
|
|
2534
|
+
|
|
2535
|
+
# Example 12: Changes the DATE format to 'INTEGERDATE'.
|
|
2536
|
+
>>> set_session_param('dateform', 'INTEGERDATE')
|
|
2537
|
+
True
|
|
2538
|
+
|
|
2539
|
+
# Example 13: Enable Debugging for the Session.
|
|
2540
|
+
>>> set_session_param('debug_function', ['function_name', 'ON'])
|
|
2541
|
+
True
|
|
2542
|
+
|
|
2543
|
+
# Example 14: Sets the session response for dot notation query result.
|
|
2544
|
+
>>> set_session_param('dot_notation', 'DEFAULT')
|
|
2545
|
+
True
|
|
2546
|
+
|
|
2547
|
+
# Example 15: DML operations are not performed as concurrent load isolated operations.
|
|
2548
|
+
>>> set_session_param('isolated_loading', 'NO')
|
|
2549
|
+
True
|
|
2550
|
+
|
|
2551
|
+
# Example 16: Enables function trace output for debugging external user-defined functions and
|
|
2552
|
+
# external SQL procedures for the current session.
|
|
2553
|
+
>>> set_session_param('function_trace', ["'diag,3'", 'titanic'])
|
|
2554
|
+
True
|
|
2555
|
+
|
|
2556
|
+
# Example 17: Enables the validation of JSON data on INSERT operations.
|
|
2557
|
+
>>> set_session_param('json_ignore_errors', 'ON')
|
|
2558
|
+
True
|
|
2559
|
+
|
|
2560
|
+
# Example 18: Sets the database search path for the SCRIPT execution in the SessionTbl.SearchUIFDBPath column.
|
|
2561
|
+
>>> set_session_param('SEARCHUIFDBPATH', 'dbc, alice')
|
|
2562
|
+
True
|
|
2563
|
+
|
|
2564
|
+
# Example 19: Sets the read-only locking severity for all SELECT requests made against nontemporal tables,
|
|
2565
|
+
# whether they are outer SELECT requests or subqueries, in the current session to READ regardless
|
|
2566
|
+
# of the setting for the DBS Control parameter AccessLockForUncomRead.
|
|
2567
|
+
# Note: SR and SERIALIZABLE are synonyms.
|
|
2568
|
+
>>> set_session_param('TRANSACTION_ISOLATION_LEVEL', 'SR')
|
|
2569
|
+
True
|
|
2570
|
+
|
|
2571
|
+
# Example 20: This example uses the PROXYROLE name:value pair in a query band to set the proxy
|
|
2572
|
+
# role in a trusted session to a specific role.
|
|
2573
|
+
>>> set_session_param('query_band', ["'PROXYUSER=fred;PROXYROLE=administration;'", 'SESSION'])
|
|
2574
|
+
True
|
|
2575
|
+
|
|
2576
|
+
# Example 21: Allows you to specify a custom UDF search path. When you execute a UDF,
|
|
2577
|
+
# Vantage searches this path first, before looking in the default Vantage
|
|
2578
|
+
# search path for the UDF.
|
|
2579
|
+
>>> set_session_param('udfsearchpath', ["alice, SYSLIB, TD_SYSFNLIB", 'bitor'])
|
|
2580
|
+
True
|
|
2581
|
+
"""
|
|
2582
|
+
# Validate argument types
|
|
2583
|
+
function_args = []
|
|
2584
|
+
function_args.append(["name", name, False, str, True])
|
|
2585
|
+
function_args.append(["value", value, False, (int, str, float, list), False])
|
|
2586
|
+
_Validators._validate_function_arguments(function_args)
|
|
2587
|
+
|
|
2588
|
+
# Validate Permitted values for session parameter name.
|
|
2589
|
+
permitted_session_parameters = [key.name for key in SessionParamsSQL]
|
|
2590
|
+
_Validators._validate_permitted_values(arg=name,
|
|
2591
|
+
permitted_values=permitted_session_parameters,
|
|
2592
|
+
arg_name='name',
|
|
2593
|
+
case_insensitive=True,
|
|
2594
|
+
includeNone=False)
|
|
2595
|
+
|
|
2596
|
+
if not isinstance(value, list):
|
|
2597
|
+
value = [value]
|
|
2598
|
+
|
|
2599
|
+
# Before setting the session, first extract the session parameters
|
|
2600
|
+
# and store it in buffer. This helps while unsetting the parameter.
|
|
2601
|
+
result = execute_sql('help session')
|
|
2602
|
+
data = dict(zip(
|
|
2603
|
+
[param[0] for param in result.description],
|
|
2604
|
+
[value for value in next(result)]
|
|
2605
|
+
))
|
|
2606
|
+
_InternalBuffer.add(session_params=_TDSessionParams(data))
|
|
2607
|
+
# Store function name of 'DEBUG_FUNCTION' used.
|
|
2608
|
+
_InternalBuffer.add(function_name=value[0] if name.upper() == 'DEBUG_FUNCTION' else '')
|
|
2609
|
+
|
|
2610
|
+
# Set the session parameter.
|
|
2611
|
+
execute_sql(getattr(SessionParamsSQL, name.upper()).value.format(*value))
|
|
2612
|
+
return True
|
|
2613
|
+
|
|
2614
|
+
|
|
2615
|
+
def unset_session_param(name):
|
|
2616
|
+
"""
|
|
2617
|
+
DESCRIPTION:
|
|
2618
|
+
Function to unset the session parameter.
|
|
2619
|
+
|
|
2620
|
+
PARAMETERS:
|
|
2621
|
+
name:
|
|
2622
|
+
Required Argument.
|
|
2623
|
+
Specifies the parameter to unset for the session.
|
|
2624
|
+
Permitted Values: timezone, account, calendar, collation,
|
|
2625
|
+
database, dataform, character_set_unicode,
|
|
2626
|
+
debug_function, isolated_loading, function_trace,
|
|
2627
|
+
json_ignore_errors, query_band
|
|
2628
|
+
Type: str
|
|
2629
|
+
|
|
2630
|
+
Returns:
|
|
2631
|
+
True, if successfully unsets the session parameter.
|
|
2632
|
+
|
|
2633
|
+
RAISES:
|
|
2634
|
+
ValueError, teradatasql.OperationalError
|
|
2635
|
+
|
|
2636
|
+
EXAMPLES:
|
|
2637
|
+
# Example 1: Unset session's time zone to previous time zone.
|
|
2638
|
+
>>> set_session_param('timezone', "'GMT+1'")
|
|
2639
|
+
True
|
|
2640
|
+
>>> unset_session_param("timezone")
|
|
2641
|
+
True
|
|
2642
|
+
|
|
2643
|
+
"""
|
|
2644
|
+
# Validate argument types
|
|
2645
|
+
function_args = []
|
|
2646
|
+
function_args.append(["name", name, True, str, True])
|
|
2647
|
+
_Validators._validate_function_arguments(function_args)
|
|
2648
|
+
|
|
2649
|
+
# Validate Permitted values for session parameter name which can be unset.
|
|
2650
|
+
permitted_session_parameters = [key.name for key in SessionParamsPythonNames] +\
|
|
2651
|
+
["character_set_unicode", "debug_function",
|
|
2652
|
+
"isolated_loading", "function_trace",
|
|
2653
|
+
"json_ignore_errors", "query_band"]
|
|
2654
|
+
_Validators._validate_permitted_values(arg=name,
|
|
2655
|
+
permitted_values=permitted_session_parameters,
|
|
2656
|
+
arg_name='name',
|
|
2657
|
+
case_insensitive=True,
|
|
2658
|
+
includeNone=False)
|
|
2659
|
+
|
|
2660
|
+
# Check whether session param is set or not first.
|
|
2661
|
+
session_params = _InternalBuffer.get('session_params')
|
|
2662
|
+
if session_params is None:
|
|
2663
|
+
msg_code = MessageCodes.FUNC_EXECUTION_FAILED
|
|
2664
|
+
error_msg = Messages.get_message(msg_code, "unset_session_param", "Set the parameter before unsetting it.")
|
|
2665
|
+
raise TeradataMlException(error_msg, msg_code)
|
|
2666
|
+
if name.upper() == "DEBUG_FUNCTION":
|
|
2667
|
+
# If unset param is debug_function, then check if any function name is available to unset.
|
|
2668
|
+
if _InternalBuffer.get('function_name') in ('', None):
|
|
2669
|
+
raise TeradataMlException(
|
|
2670
|
+
Messages.get_message(MessageCodes.FUNC_EXECUTION_FAILED,
|
|
2671
|
+
"unset_session_param",
|
|
2672
|
+
"Set the parameter before unsetting it."),
|
|
2673
|
+
MessageCodes.FUNC_EXECUTION_FAILED)
|
|
2674
|
+
# unset_values stores params which are not available in _InternalBuffer, to unset create a dictionary
|
|
2675
|
+
# with param as key and unset param as value
|
|
2676
|
+
# TODO: Unset for ISOLATED_LOADING should revert to previous behaviour, but we are setting it to NO.
|
|
2677
|
+
# This is not correct if ISOLATED_LOADING was CONCURRENT before setting it to NO.
|
|
2678
|
+
unset_values = {"CHARACTER_SET_UNICODE": "OFF", "DEBUG_FUNCTION": [_InternalBuffer.get('function_name'), "OFF"],
|
|
2679
|
+
"ISOLATED_LOADING": "NO", "FUNCTION_TRACE": "SET SESSION FUNCTION TRACE OFF",
|
|
2680
|
+
"JSON_IGNORE_ERRORS": "OFF", "QUERY_BAND": ["", "SESSION"]}
|
|
2681
|
+
|
|
2682
|
+
# If 'name' in unset_values unset the params
|
|
2683
|
+
if name.upper() in unset_values:
|
|
2684
|
+
# When name is 'FUNCTION_TRACE' unset_values already have query for that, use execute_sql on that.
|
|
2685
|
+
if name.upper() == "FUNCTION_TRACE":
|
|
2686
|
+
execute_sql(unset_values[name.upper()])
|
|
2687
|
+
# When name is other than 'FUNCTION_TRACE' use value and key of unset_values to unset param.
|
|
2688
|
+
else:
|
|
2689
|
+
set_session_param(name, unset_values[name.upper()])
|
|
2690
|
+
return True
|
|
2691
|
+
|
|
2692
|
+
previous_value = "{}".format(session_params[getattr(SessionParamsPythonNames, name.upper()).value]) \
|
|
2693
|
+
if name.upper() != 'TIMEZONE' else "'{}'".format(
|
|
2694
|
+
session_params[getattr(SessionParamsPythonNames, name.upper()).value])
|
|
2695
|
+
|
|
2696
|
+
if name.upper() == "ACCOUNT":
|
|
2697
|
+
previous_value = [previous_value, 'SESSION']
|
|
2698
|
+
set_session_param(name, previous_value)
|
|
2699
|
+
|
|
2700
|
+
return True
|
|
2701
|
+
|
|
2702
|
+
|
|
2703
|
+
class _Authorize:
|
|
2704
|
+
""" Parent class to either provide or revoke access on database objects. """
|
|
2705
|
+
_property = None
|
|
2706
|
+
|
|
2707
|
+
def __init__(self, objects, database=None):
|
|
2708
|
+
"""
|
|
2709
|
+
DESCRIPTION:
|
|
2710
|
+
Constructor for creating Authorize object.
|
|
2711
|
+
|
|
2712
|
+
PARAMETERS:
|
|
2713
|
+
objects:
|
|
2714
|
+
Required Argument.
|
|
2715
|
+
Specifies the name(s) of the database objects to be authorized.
|
|
2716
|
+
Types: str OR list of str OR AccessType Enum
|
|
2717
|
+
|
|
2718
|
+
database:
|
|
2719
|
+
Optional Argument.
|
|
2720
|
+
Specifies the name of the database to grant or revoke access.
|
|
2721
|
+
Types: str
|
|
2722
|
+
|
|
2723
|
+
RETURNS:
|
|
2724
|
+
Object of _Authorize.
|
|
2725
|
+
|
|
2726
|
+
RAISES:
|
|
2727
|
+
None
|
|
2728
|
+
|
|
2729
|
+
EXAMPLES:
|
|
2730
|
+
>>> auth = _Authorize('vfs_v1')
|
|
2731
|
+
"""
|
|
2732
|
+
# Store the objects here. Then use this where ever required.
|
|
2733
|
+
self._is_enum = issubclass(objects, enum.Enum)
|
|
2734
|
+
self._objects = objects
|
|
2735
|
+
self._access_method = self.__class__.__name__.upper()
|
|
2736
|
+
self.database = database
|
|
2737
|
+
|
|
2738
|
+
def read(self, user):
|
|
2739
|
+
"""
|
|
2740
|
+
DESCRIPTION:
|
|
2741
|
+
Authorize the read access.
|
|
2742
|
+
Note:
|
|
2743
|
+
One must have admin access to give read access to other "user".
|
|
2744
|
+
|
|
2745
|
+
PARAMETERS:
|
|
2746
|
+
user:
|
|
2747
|
+
Required Argument.
|
|
2748
|
+
Specifies the name of the user to have read only access.
|
|
2749
|
+
Types: str
|
|
2750
|
+
|
|
2751
|
+
RETURNS:
|
|
2752
|
+
bool.
|
|
2753
|
+
|
|
2754
|
+
RAISES:
|
|
2755
|
+
None
|
|
2756
|
+
|
|
2757
|
+
EXAMPLES:
|
|
2758
|
+
>>> _Authorize('repo').read('BoB')
|
|
2759
|
+
"""
|
|
2760
|
+
return self._apply_access(user, 'read', 'SELECT')
|
|
2761
|
+
|
|
2762
|
+
def write(self, user):
|
|
2763
|
+
"""
|
|
2764
|
+
DESCRIPTION:
|
|
2765
|
+
Authorize the write access.
|
|
2766
|
+
Note:
|
|
2767
|
+
One must have admin access to give write access to other "user".
|
|
2768
|
+
|
|
2769
|
+
PARAMETERS:
|
|
2770
|
+
user:
|
|
2771
|
+
Required Argument.
|
|
2772
|
+
Specifies the name of the user to have write only access.
|
|
2773
|
+
Types: str
|
|
2774
|
+
|
|
2775
|
+
RETURNS:
|
|
2776
|
+
bool.
|
|
2777
|
+
|
|
2778
|
+
RAISES:
|
|
2779
|
+
None
|
|
2780
|
+
|
|
2781
|
+
EXAMPLES:
|
|
2782
|
+
>>> _Authorize('repo').write('BoB')
|
|
2783
|
+
"""
|
|
2784
|
+
return self._apply_access(user, 'write', 'INSERT, UPDATE, DELETE')
|
|
2785
|
+
|
|
2786
|
+
def _apply_access(self, user, operation, access_type):
|
|
2787
|
+
"""
|
|
2788
|
+
DESCRIPTION:
|
|
2789
|
+
Internal function to grant or revoke access.
|
|
2790
|
+
|
|
2791
|
+
PARAMETERS:
|
|
2792
|
+
user:
|
|
2793
|
+
Required Argument.
|
|
2794
|
+
Specifies the name of the user to have access.
|
|
2795
|
+
Types: str
|
|
2796
|
+
|
|
2797
|
+
operation:
|
|
2798
|
+
Required Argument.
|
|
2799
|
+
Specifies the operation to perform.
|
|
2800
|
+
Permitted Values: 'read', 'write'
|
|
2801
|
+
Types: str
|
|
2802
|
+
|
|
2803
|
+
access_type:
|
|
2804
|
+
Required Argument.
|
|
2805
|
+
Specifies the type of access to grant or revoke.
|
|
2806
|
+
Permitted Values:
|
|
2807
|
+
* 'SELECT' for read
|
|
2808
|
+
* 'INSERT, UPDATE, DELETE' for write
|
|
2809
|
+
Types: str
|
|
2810
|
+
|
|
2811
|
+
RETURNS:
|
|
2812
|
+
bool, True if access is granted or revoked successfully.
|
|
2813
|
+
|
|
2814
|
+
RAISES:
|
|
2815
|
+
TeradataMlException, OperationalError
|
|
2816
|
+
|
|
2817
|
+
EXAMPLES:
|
|
2818
|
+
>>> _Authorize('repo')._apply_access('BoB', 'read', 'SELECT')
|
|
2819
|
+
"""
|
|
2820
|
+
sql_objects = UtilFuncs._as_list(self._objects) if not self._is_enum else \
|
|
2821
|
+
getattr(self._objects, operation).value
|
|
2822
|
+
|
|
2823
|
+
for obj in sql_objects:
|
|
2824
|
+
if self._is_enum:
|
|
2825
|
+
sql = obj.format(
|
|
2826
|
+
grant_revoke_=self._access_method,
|
|
2827
|
+
database_=self.database,
|
|
2828
|
+
to_from_=self._property,
|
|
2829
|
+
user_=user
|
|
2830
|
+
)
|
|
2831
|
+
else:
|
|
2832
|
+
sql = "{} {} ON {} {} {}".format(
|
|
2833
|
+
self._access_method, access_type, obj, self._property, user
|
|
2834
|
+
)
|
|
2835
|
+
execute_sql(sql)
|
|
2836
|
+
return True
|
|
2837
|
+
|
|
2838
|
+
def read_write(self, user):
|
|
2839
|
+
"""
|
|
2840
|
+
DESCRIPTION:
|
|
2841
|
+
Authorize the read and write access.
|
|
2842
|
+
Note:
|
|
2843
|
+
One must have admin access to give read and write access to other "user".
|
|
2844
|
+
|
|
2845
|
+
PARAMETERS:
|
|
2846
|
+
user:
|
|
2847
|
+
Required Argument.
|
|
2848
|
+
Specifies the name of the user to have read and write access.
|
|
2849
|
+
Types: str
|
|
2850
|
+
|
|
2851
|
+
RETURNS:
|
|
2852
|
+
bool.
|
|
2853
|
+
|
|
2854
|
+
RAISES:
|
|
2855
|
+
None
|
|
2856
|
+
|
|
2857
|
+
EXAMPLES:
|
|
2858
|
+
>>> _Authorize('repo').read_write('BoB')
|
|
2859
|
+
"""
|
|
2860
|
+
self.read(user)
|
|
2861
|
+
return self.write(user)
|
|
2862
|
+
|
|
2863
|
+
|
|
2864
|
+
class Grant(_Authorize):
|
|
2865
|
+
""" Class to grant access to tables."""
|
|
2866
|
+
_property = "TO"
|
|
2867
|
+
|
|
2868
|
+
|
|
2869
|
+
class Revoke(_Authorize):
|
|
2870
|
+
""" Class to revoke access from tables."""
|
|
2871
|
+
_property = "FROM"
|