teradataml 20.0.0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +2762 -0
- teradataml/__init__.py +78 -0
- teradataml/_version.py +11 -0
- teradataml/analytics/Transformations.py +2996 -0
- teradataml/analytics/__init__.py +82 -0
- teradataml/analytics/analytic_function_executor.py +2416 -0
- teradataml/analytics/analytic_query_generator.py +1050 -0
- teradataml/analytics/byom/H2OPredict.py +514 -0
- teradataml/analytics/byom/PMMLPredict.py +437 -0
- teradataml/analytics/byom/__init__.py +16 -0
- teradataml/analytics/json_parser/__init__.py +133 -0
- teradataml/analytics/json_parser/analytic_functions_argument.py +1805 -0
- teradataml/analytics/json_parser/json_store.py +191 -0
- teradataml/analytics/json_parser/metadata.py +1666 -0
- teradataml/analytics/json_parser/utils.py +805 -0
- teradataml/analytics/meta_class.py +236 -0
- teradataml/analytics/sqle/DecisionTreePredict.py +456 -0
- teradataml/analytics/sqle/NaiveBayesPredict.py +420 -0
- teradataml/analytics/sqle/__init__.py +128 -0
- teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -0
- teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -0
- teradataml/analytics/table_operator/__init__.py +11 -0
- teradataml/analytics/uaf/__init__.py +82 -0
- teradataml/analytics/utils.py +828 -0
- teradataml/analytics/valib.py +1617 -0
- teradataml/automl/__init__.py +5835 -0
- teradataml/automl/autodataprep/__init__.py +493 -0
- teradataml/automl/custom_json_utils.py +1625 -0
- teradataml/automl/data_preparation.py +1384 -0
- teradataml/automl/data_transformation.py +1254 -0
- teradataml/automl/feature_engineering.py +2273 -0
- teradataml/automl/feature_exploration.py +1873 -0
- teradataml/automl/model_evaluation.py +488 -0
- teradataml/automl/model_training.py +1407 -0
- teradataml/catalog/__init__.py +2 -0
- teradataml/catalog/byom.py +1759 -0
- teradataml/catalog/function_argument_mapper.py +859 -0
- teradataml/catalog/model_cataloging_utils.py +491 -0
- teradataml/clients/__init__.py +0 -0
- teradataml/clients/auth_client.py +137 -0
- teradataml/clients/keycloak_client.py +165 -0
- teradataml/clients/pkce_client.py +481 -0
- teradataml/common/__init__.py +1 -0
- teradataml/common/aed_utils.py +2078 -0
- teradataml/common/bulk_exposed_utils.py +113 -0
- teradataml/common/constants.py +1669 -0
- teradataml/common/deprecations.py +166 -0
- teradataml/common/exceptions.py +147 -0
- teradataml/common/formula.py +743 -0
- teradataml/common/garbagecollector.py +666 -0
- teradataml/common/logger.py +1261 -0
- teradataml/common/messagecodes.py +518 -0
- teradataml/common/messages.py +262 -0
- teradataml/common/pylogger.py +67 -0
- teradataml/common/sqlbundle.py +764 -0
- teradataml/common/td_coltype_code_to_tdtype.py +48 -0
- teradataml/common/utils.py +3166 -0
- teradataml/common/warnings.py +36 -0
- teradataml/common/wrapper_utils.py +625 -0
- teradataml/config/__init__.py +0 -0
- teradataml/config/dummy_file1.cfg +5 -0
- teradataml/config/dummy_file2.cfg +3 -0
- teradataml/config/sqlengine_alias_definitions_v1.0 +14 -0
- teradataml/config/sqlengine_alias_definitions_v1.1 +20 -0
- teradataml/config/sqlengine_alias_definitions_v1.3 +19 -0
- teradataml/context/__init__.py +0 -0
- teradataml/context/aed_context.py +223 -0
- teradataml/context/context.py +1462 -0
- teradataml/data/A_loan.csv +19 -0
- teradataml/data/BINARY_REALS_LEFT.csv +11 -0
- teradataml/data/BINARY_REALS_RIGHT.csv +11 -0
- teradataml/data/B_loan.csv +49 -0
- teradataml/data/BuoyData2.csv +17 -0
- teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -0
- teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -0
- teradataml/data/Convolve2RealsLeft.csv +5 -0
- teradataml/data/Convolve2RealsRight.csv +5 -0
- teradataml/data/Convolve2ValidLeft.csv +11 -0
- teradataml/data/Convolve2ValidRight.csv +11 -0
- teradataml/data/DFFTConv_Real_8_8.csv +65 -0
- teradataml/data/Employee.csv +5 -0
- teradataml/data/Employee_Address.csv +4 -0
- teradataml/data/Employee_roles.csv +5 -0
- teradataml/data/JulesBelvezeDummyData.csv +100 -0
- teradataml/data/Mall_customer_data.csv +201 -0
- teradataml/data/Orders1_12mf.csv +25 -0
- teradataml/data/Pi_loan.csv +7 -0
- teradataml/data/SMOOTHED_DATA.csv +7 -0
- teradataml/data/TestDFFT8.csv +9 -0
- teradataml/data/TestRiver.csv +109 -0
- teradataml/data/Traindata.csv +28 -0
- teradataml/data/__init__.py +0 -0
- teradataml/data/acf.csv +17 -0
- teradataml/data/adaboost_example.json +34 -0
- teradataml/data/adaboostpredict_example.json +24 -0
- teradataml/data/additional_table.csv +11 -0
- teradataml/data/admissions_test.csv +21 -0
- teradataml/data/admissions_train.csv +41 -0
- teradataml/data/admissions_train_nulls.csv +41 -0
- teradataml/data/advertising.csv +201 -0
- teradataml/data/ageandheight.csv +13 -0
- teradataml/data/ageandpressure.csv +31 -0
- teradataml/data/amazon_reviews_25.csv +26 -0
- teradataml/data/antiselect_example.json +36 -0
- teradataml/data/antiselect_input.csv +8 -0
- teradataml/data/antiselect_input_mixed_case.csv +8 -0
- teradataml/data/applicant_external.csv +7 -0
- teradataml/data/applicant_reference.csv +7 -0
- teradataml/data/apriori_example.json +22 -0
- teradataml/data/arima_example.json +9 -0
- teradataml/data/assortedtext_input.csv +8 -0
- teradataml/data/attribution_example.json +34 -0
- teradataml/data/attribution_sample_table.csv +27 -0
- teradataml/data/attribution_sample_table1.csv +6 -0
- teradataml/data/attribution_sample_table2.csv +11 -0
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bank_marketing.csv +11163 -0
- teradataml/data/bank_web_clicks1.csv +43 -0
- teradataml/data/bank_web_clicks2.csv +91 -0
- teradataml/data/bank_web_url.csv +85 -0
- teradataml/data/barrier.csv +2 -0
- teradataml/data/barrier_new.csv +3 -0
- teradataml/data/betweenness_example.json +14 -0
- teradataml/data/bike_sharing.csv +732 -0
- teradataml/data/bin_breaks.csv +8 -0
- teradataml/data/bin_fit_ip.csv +4 -0
- teradataml/data/binary_complex_left.csv +11 -0
- teradataml/data/binary_complex_right.csv +11 -0
- teradataml/data/binary_matrix_complex_left.csv +21 -0
- teradataml/data/binary_matrix_complex_right.csv +21 -0
- teradataml/data/binary_matrix_real_left.csv +21 -0
- teradataml/data/binary_matrix_real_right.csv +21 -0
- teradataml/data/blood2ageandweight.csv +26 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/boston.csv +507 -0
- teradataml/data/boston2cols.csv +721 -0
- teradataml/data/breast_cancer.csv +570 -0
- teradataml/data/buoydata_mix.csv +11 -0
- teradataml/data/burst_data.csv +5 -0
- teradataml/data/burst_example.json +21 -0
- teradataml/data/byom_example.json +34 -0
- teradataml/data/bytes_table.csv +4 -0
- teradataml/data/cal_housing_ex_raw.csv +70 -0
- teradataml/data/callers.csv +7 -0
- teradataml/data/calls.csv +10 -0
- teradataml/data/cars_hist.csv +33 -0
- teradataml/data/cat_table.csv +25 -0
- teradataml/data/ccm_example.json +32 -0
- teradataml/data/ccm_input.csv +91 -0
- teradataml/data/ccm_input2.csv +13 -0
- teradataml/data/ccmexample.csv +101 -0
- teradataml/data/ccmprepare_example.json +9 -0
- teradataml/data/ccmprepare_input.csv +91 -0
- teradataml/data/cfilter_example.json +12 -0
- teradataml/data/changepointdetection_example.json +18 -0
- teradataml/data/changepointdetectionrt_example.json +8 -0
- teradataml/data/chi_sq.csv +3 -0
- teradataml/data/churn_data.csv +14 -0
- teradataml/data/churn_emission.csv +35 -0
- teradataml/data/churn_initial.csv +3 -0
- teradataml/data/churn_state_transition.csv +5 -0
- teradataml/data/citedges_2.csv +745 -0
- teradataml/data/citvertices_2.csv +1210 -0
- teradataml/data/clicks2.csv +16 -0
- teradataml/data/clickstream.csv +13 -0
- teradataml/data/clickstream1.csv +11 -0
- teradataml/data/closeness_example.json +16 -0
- teradataml/data/complaints.csv +21 -0
- teradataml/data/complaints_mini.csv +3 -0
- teradataml/data/complaints_test_tokenized.csv +353 -0
- teradataml/data/complaints_testtoken.csv +224 -0
- teradataml/data/complaints_tokens_model.csv +348 -0
- teradataml/data/complaints_tokens_test.csv +353 -0
- teradataml/data/complaints_traintoken.csv +472 -0
- teradataml/data/computers_category.csv +1001 -0
- teradataml/data/computers_test1.csv +1252 -0
- teradataml/data/computers_train1.csv +5009 -0
- teradataml/data/computers_train1_clustered.csv +5009 -0
- teradataml/data/confusionmatrix_example.json +9 -0
- teradataml/data/conversion_event_table.csv +3 -0
- teradataml/data/corr_input.csv +17 -0
- teradataml/data/correlation_example.json +11 -0
- teradataml/data/covid_confirm_sd.csv +83 -0
- teradataml/data/coxhazardratio_example.json +39 -0
- teradataml/data/coxph_example.json +15 -0
- teradataml/data/coxsurvival_example.json +28 -0
- teradataml/data/cpt.csv +41 -0
- teradataml/data/credit_ex_merged.csv +45 -0
- teradataml/data/creditcard_data.csv +1001 -0
- teradataml/data/customer_loyalty.csv +301 -0
- teradataml/data/customer_loyalty_newseq.csv +31 -0
- teradataml/data/customer_segmentation_test.csv +2628 -0
- teradataml/data/customer_segmentation_train.csv +8069 -0
- teradataml/data/dataframe_example.json +173 -0
- teradataml/data/decisionforest_example.json +37 -0
- teradataml/data/decisionforestpredict_example.json +38 -0
- teradataml/data/decisiontree_example.json +21 -0
- teradataml/data/decisiontreepredict_example.json +45 -0
- teradataml/data/dfft2_size4_real.csv +17 -0
- teradataml/data/dfft2_test_matrix16.csv +17 -0
- teradataml/data/dfft2conv_real_4_4.csv +65 -0
- teradataml/data/diabetes.csv +443 -0
- teradataml/data/diabetes_test.csv +89 -0
- teradataml/data/dict_table.csv +5 -0
- teradataml/data/docperterm_table.csv +4 -0
- teradataml/data/docs/__init__.py +1 -0
- teradataml/data/docs/byom/__init__.py +0 -0
- teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -0
- teradataml/data/docs/byom/docs/DataikuPredict.py +217 -0
- teradataml/data/docs/byom/docs/H2OPredict.py +325 -0
- teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
- teradataml/data/docs/byom/docs/ONNXPredict.py +283 -0
- teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
- teradataml/data/docs/byom/docs/PMMLPredict.py +278 -0
- teradataml/data/docs/byom/docs/__init__.py +0 -0
- teradataml/data/docs/sqle/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_10/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Attribution.py +200 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -0
- teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +96 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +152 -0
- teradataml/data/docs/sqle/docs_17_10/FTest.py +161 -0
- teradataml/data/docs/sqle/docs_17_10/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +85 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +82 -0
- teradataml/data/docs/sqle/docs_17_10/Histogram.py +165 -0
- teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_10/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -0
- teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +135 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +166 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -0
- teradataml/data/docs/sqle/docs_17_10/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +110 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -0
- teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -0
- teradataml/data/docs/sqle/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -0
- teradataml/data/docs/sqle/docs_17_20/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Apriori.py +138 -0
- teradataml/data/docs/sqle/docs_17_20/Attribution.py +201 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -0
- teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
- teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +246 -0
- teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +280 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +136 -0
- teradataml/data/docs/sqle/docs_17_20/FTest.py +240 -0
- teradataml/data/docs/sqle/docs_17_20/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +415 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -0
- teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/Histogram.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -0
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/KNN.py +215 -0
- teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_20/NERExtractor.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_20/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +231 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +191 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -0
- teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +155 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -0
- teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -0
- teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -0
- teradataml/data/docs/sqle/docs_17_20/SMOTE.py +212 -0
- teradataml/data/docs/sqle/docs_17_20/SVM.py +414 -0
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -0
- teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -0
- teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -0
- teradataml/data/docs/sqle/docs_17_20/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_20/Shap.py +225 -0
- teradataml/data/docs/sqle/docs_17_20/Silhouette.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -0
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -0
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
- teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +267 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -0
- teradataml/data/docs/sqle/docs_17_20/TextMorph.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +160 -0
- teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -0
- teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
- teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +169 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +237 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoost.py +362 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -0
- teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/tableoperator/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_00/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_05/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +429 -0
- teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +440 -0
- teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +387 -0
- teradataml/data/docs/tableoperator/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/uaf/__init__.py +0 -0
- teradataml/data/docs/uaf/docs_17_20/ACF.py +186 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +370 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +161 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
- teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
- teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +248 -0
- teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +178 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve.py +230 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve2.py +218 -0
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT.py +204 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +192 -0
- teradataml/data/docs/uaf/docs_17_20/DIFF.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/DTW.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
- teradataml/data/docs/uaf/docs_17_20/DWT2D.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +142 -0
- teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +184 -0
- teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
- teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +206 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +143 -0
- teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +198 -0
- teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +260 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT.py +165 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
- teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/InputValidator.py +121 -0
- teradataml/data/docs/uaf/docs_17_20/LineSpec.py +156 -0
- teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +215 -0
- teradataml/data/docs/uaf/docs_17_20/MAMean.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/MInfo.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
- teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/PACF.py +157 -0
- teradataml/data/docs/uaf/docs_17_20/Portman.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +203 -0
- teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +155 -0
- teradataml/data/docs/uaf/docs_17_20/Resample.py +237 -0
- teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
- teradataml/data/docs/uaf/docs_17_20/SInfo.py +123 -0
- teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +173 -0
- teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +164 -0
- teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/Smoothma.py +208 -0
- teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +151 -0
- teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +202 -0
- teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
- teradataml/data/docs/uaf/docs_17_20/__init__.py +0 -0
- teradataml/data/dtw_example.json +18 -0
- teradataml/data/dtw_t1.csv +11 -0
- teradataml/data/dtw_t2.csv +4 -0
- teradataml/data/dwt2d_dataTable.csv +65 -0
- teradataml/data/dwt2d_example.json +16 -0
- teradataml/data/dwt_dataTable.csv +8 -0
- teradataml/data/dwt_example.json +15 -0
- teradataml/data/dwt_filterTable.csv +3 -0
- teradataml/data/dwt_filter_dim.csv +5 -0
- teradataml/data/emission.csv +9 -0
- teradataml/data/emp_table_by_dept.csv +19 -0
- teradataml/data/employee_info.csv +4 -0
- teradataml/data/employee_table.csv +6 -0
- teradataml/data/excluding_event_table.csv +2 -0
- teradataml/data/finance_data.csv +6 -0
- teradataml/data/finance_data2.csv +61 -0
- teradataml/data/finance_data3.csv +93 -0
- teradataml/data/finance_data4.csv +13 -0
- teradataml/data/fish.csv +160 -0
- teradataml/data/fm_blood2ageandweight.csv +26 -0
- teradataml/data/fmeasure_example.json +12 -0
- teradataml/data/followers_leaders.csv +10 -0
- teradataml/data/fpgrowth_example.json +12 -0
- teradataml/data/frequentpaths_example.json +29 -0
- teradataml/data/friends.csv +9 -0
- teradataml/data/fs_input.csv +33 -0
- teradataml/data/fs_input1.csv +33 -0
- teradataml/data/genData.csv +513 -0
- teradataml/data/geodataframe_example.json +40 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/glm_admissions_model.csv +12 -0
- teradataml/data/glm_example.json +56 -0
- teradataml/data/glml1l2_example.json +28 -0
- teradataml/data/glml1l2predict_example.json +54 -0
- teradataml/data/glmpredict_example.json +54 -0
- teradataml/data/gq_t1.csv +21 -0
- teradataml/data/grocery_transaction.csv +19 -0
- teradataml/data/hconvolve_complex_right.csv +5 -0
- teradataml/data/hconvolve_complex_rightmulti.csv +5 -0
- teradataml/data/histogram_example.json +12 -0
- teradataml/data/hmmdecoder_example.json +79 -0
- teradataml/data/hmmevaluator_example.json +25 -0
- teradataml/data/hmmsupervised_example.json +10 -0
- teradataml/data/hmmunsupervised_example.json +8 -0
- teradataml/data/hnsw_alter_data.csv +5 -0
- teradataml/data/hnsw_data.csv +10 -0
- teradataml/data/house_values.csv +12 -0
- teradataml/data/house_values2.csv +13 -0
- teradataml/data/housing_cat.csv +7 -0
- teradataml/data/housing_data.csv +9 -0
- teradataml/data/housing_test.csv +47 -0
- teradataml/data/housing_test_binary.csv +47 -0
- teradataml/data/housing_train.csv +493 -0
- teradataml/data/housing_train_attribute.csv +5 -0
- teradataml/data/housing_train_binary.csv +437 -0
- teradataml/data/housing_train_parameter.csv +2 -0
- teradataml/data/housing_train_response.csv +493 -0
- teradataml/data/housing_train_segment.csv +201 -0
- teradataml/data/ibm_stock.csv +370 -0
- teradataml/data/ibm_stock1.csv +370 -0
- teradataml/data/identitymatch_example.json +22 -0
- teradataml/data/idf_table.csv +4 -0
- teradataml/data/idwt2d_dataTable.csv +5 -0
- teradataml/data/idwt_dataTable.csv +8 -0
- teradataml/data/idwt_filterTable.csv +3 -0
- teradataml/data/impressions.csv +101 -0
- teradataml/data/inflation.csv +21 -0
- teradataml/data/initial.csv +3 -0
- teradataml/data/insect2Cols.csv +61 -0
- teradataml/data/insect_sprays.csv +13 -0
- teradataml/data/insurance.csv +1339 -0
- teradataml/data/interpolator_example.json +13 -0
- teradataml/data/interval_data.csv +5 -0
- teradataml/data/iris_altinput.csv +481 -0
- teradataml/data/iris_attribute_output.csv +8 -0
- teradataml/data/iris_attribute_test.csv +121 -0
- teradataml/data/iris_attribute_train.csv +481 -0
- teradataml/data/iris_category_expect_predict.csv +31 -0
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/iris_input.csv +151 -0
- teradataml/data/iris_response_train.csv +121 -0
- teradataml/data/iris_test.csv +31 -0
- teradataml/data/iris_train.csv +121 -0
- teradataml/data/join_table1.csv +4 -0
- teradataml/data/join_table2.csv +4 -0
- teradataml/data/jsons/anly_function_name.json +7 -0
- teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
- teradataml/data/jsons/byom/dataikupredict.json +148 -0
- teradataml/data/jsons/byom/datarobotpredict.json +147 -0
- teradataml/data/jsons/byom/h2opredict.json +195 -0
- teradataml/data/jsons/byom/onnxembeddings.json +267 -0
- teradataml/data/jsons/byom/onnxpredict.json +187 -0
- teradataml/data/jsons/byom/pmmlpredict.json +147 -0
- teradataml/data/jsons/paired_functions.json +450 -0
- teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/16.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/16.20/Pack.json +98 -0
- teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/16.20/Unpack.json +166 -0
- teradataml/data/jsons/sqle/16.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.00/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.00/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.00/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.00/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.05/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.05/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.05/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.05/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.10/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -0
- teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -0
- teradataml/data/jsons/sqle/17.10/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -0
- teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -0
- teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +69 -0
- teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -0
- teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_Histogram.json +133 -0
- teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +183 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +66 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +197 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +112 -0
- teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +157 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +148 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +119 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -0
- teradataml/data/jsons/sqle/17.10/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.10/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -0
- teradataml/data/jsons/sqle/17.20/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -0
- teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +149 -0
- teradataml/data/jsons/sqle/17.20/TD_Apriori.json +181 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
- teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -0
- teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +260 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -0
- teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -0
- teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeans.json +232 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +87 -0
- teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -0
- teradataml/data/jsons/sqle/17.20/TD_NERExtractor.json +145 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +316 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +124 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -0
- teradataml/data/jsons/sqle/17.20/TD_ROC.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +138 -0
- teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_SMOTE.json +267 -0
- teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -0
- teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +310 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +120 -0
- teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +194 -0
- teradataml/data/jsons/sqle/17.20/TD_Shap.json +221 -0
- teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +143 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_TextMorph.json +134 -0
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +297 -0
- teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -0
- teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
- teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +183 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -0
- teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -0
- teradataml/data/jsons/sqle/17.20/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +370 -0
- teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +460 -0
- teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +385 -0
- teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +400 -0
- teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +401 -0
- teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +384 -0
- teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +384 -0
- teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
- teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
- teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
- teradataml/data/jsons/tableoperator/17.00/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.10/read_nos.json +184 -0
- teradataml/data/jsons/tableoperator/17.10/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/tableoperator/17.20/read_nos.json +183 -0
- teradataml/data/jsons/tableoperator/17.20/write_nos.json +224 -0
- teradataml/data/jsons/uaf/17.20/TD_ACF.json +132 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +396 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +77 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +153 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
- teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +107 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +106 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +89 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +104 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +66 -0
- teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +87 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT.json +134 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +144 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_DIFF.json +92 -0
- teradataml/data/jsons/uaf/17.20/TD_DTW.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
- teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +39 -0
- teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +85 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +71 -0
- teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +139 -0
- teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECASTER.json +313 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +81 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
- teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +64 -0
- teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
- teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +182 -0
- teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +103 -0
- teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +181 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_MINFO.json +67 -0
- teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +179 -0
- teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +119 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +98 -0
- teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +194 -0
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
- teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +143 -0
- teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +90 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +80 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -0
- teradataml/data/jsons/uaf/17.20/TD_SINFO.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +163 -0
- teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +112 -0
- teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -0
- teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
- teradataml/data/kmeans_example.json +23 -0
- teradataml/data/kmeans_table.csv +10 -0
- teradataml/data/kmeans_us_arrests_data.csv +51 -0
- teradataml/data/knn_example.json +19 -0
- teradataml/data/knnrecommender_example.json +7 -0
- teradataml/data/knnrecommenderpredict_example.json +12 -0
- teradataml/data/lar_example.json +17 -0
- teradataml/data/larpredict_example.json +30 -0
- teradataml/data/lc_new_predictors.csv +5 -0
- teradataml/data/lc_new_reference.csv +9 -0
- teradataml/data/lda_example.json +9 -0
- teradataml/data/ldainference_example.json +15 -0
- teradataml/data/ldatopicsummary_example.json +9 -0
- teradataml/data/levendist_input.csv +13 -0
- teradataml/data/levenshteindistance_example.json +10 -0
- teradataml/data/linreg_example.json +10 -0
- teradataml/data/load_example_data.py +350 -0
- teradataml/data/loan_prediction.csv +295 -0
- teradataml/data/lungcancer.csv +138 -0
- teradataml/data/mappingdata.csv +12 -0
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/milk_timeseries.csv +157 -0
- teradataml/data/min_max_titanic.csv +4 -0
- teradataml/data/minhash_example.json +6 -0
- teradataml/data/ml_ratings.csv +7547 -0
- teradataml/data/ml_ratings_10.csv +2445 -0
- teradataml/data/mobile_data.csv +13 -0
- teradataml/data/model1_table.csv +5 -0
- teradataml/data/model2_table.csv +5 -0
- teradataml/data/models/License_file.txt +1 -0
- teradataml/data/models/License_file_empty.txt +0 -0
- teradataml/data/models/dataiku_iris_data_ann_thin +0 -0
- teradataml/data/models/dr_iris_rf +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn.onnx +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn_floattensor.onnx +0 -0
- teradataml/data/models/iris_db_glm_model.pmml +57 -0
- teradataml/data/models/iris_db_xgb_model.pmml +4471 -0
- teradataml/data/models/iris_kmeans_model +0 -0
- teradataml/data/models/iris_mojo_glm_h2o_model +0 -0
- teradataml/data/models/iris_mojo_xgb_h2o_model +0 -0
- teradataml/data/modularity_example.json +12 -0
- teradataml/data/movavg_example.json +8 -0
- teradataml/data/mtx1.csv +7 -0
- teradataml/data/mtx2.csv +13 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/mvdfft8.csv +9 -0
- teradataml/data/naivebayes_example.json +10 -0
- teradataml/data/naivebayespredict_example.json +19 -0
- teradataml/data/naivebayestextclassifier2_example.json +7 -0
- teradataml/data/naivebayestextclassifier_example.json +8 -0
- teradataml/data/naivebayestextclassifierpredict_example.json +32 -0
- teradataml/data/name_Find_configure.csv +10 -0
- teradataml/data/namedentityfinder_example.json +14 -0
- teradataml/data/namedentityfinderevaluator_example.json +10 -0
- teradataml/data/namedentityfindertrainer_example.json +6 -0
- teradataml/data/nb_iris_input_test.csv +31 -0
- teradataml/data/nb_iris_input_train.csv +121 -0
- teradataml/data/nbp_iris_model.csv +13 -0
- teradataml/data/ner_dict.csv +8 -0
- teradataml/data/ner_extractor_text.csv +2 -0
- teradataml/data/ner_input_eng.csv +7 -0
- teradataml/data/ner_rule.csv +5 -0
- teradataml/data/ner_sports_test2.csv +29 -0
- teradataml/data/ner_sports_train.csv +501 -0
- teradataml/data/nerevaluator_example.json +6 -0
- teradataml/data/nerextractor_example.json +18 -0
- teradataml/data/nermem_sports_test.csv +18 -0
- teradataml/data/nermem_sports_train.csv +51 -0
- teradataml/data/nertrainer_example.json +7 -0
- teradataml/data/ngrams_example.json +7 -0
- teradataml/data/notebooks/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -0
- teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -0
- teradataml/data/notebooks/sqlalchemy/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -0
- teradataml/data/npath_example.json +23 -0
- teradataml/data/ntree_example.json +14 -0
- teradataml/data/numeric_strings.csv +5 -0
- teradataml/data/numerics.csv +4 -0
- teradataml/data/ocean_buoy.csv +17 -0
- teradataml/data/ocean_buoy2.csv +17 -0
- teradataml/data/ocean_buoys.csv +28 -0
- teradataml/data/ocean_buoys2.csv +10 -0
- teradataml/data/ocean_buoys_nonpti.csv +28 -0
- teradataml/data/ocean_buoys_seq.csv +29 -0
- teradataml/data/onehot_encoder_train.csv +4 -0
- teradataml/data/openml_example.json +92 -0
- teradataml/data/optional_event_table.csv +4 -0
- teradataml/data/orders1.csv +11 -0
- teradataml/data/orders1_12.csv +13 -0
- teradataml/data/orders_ex.csv +4 -0
- teradataml/data/pack_example.json +9 -0
- teradataml/data/package_tracking.csv +19 -0
- teradataml/data/package_tracking_pti.csv +19 -0
- teradataml/data/pagerank_example.json +13 -0
- teradataml/data/paragraphs_input.csv +6 -0
- teradataml/data/pathanalyzer_example.json +8 -0
- teradataml/data/pathgenerator_example.json +8 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/pattern_matching_data.csv +11 -0
- teradataml/data/payment_fraud_dataset.csv +10001 -0
- teradataml/data/peppers.png +0 -0
- teradataml/data/phrases.csv +7 -0
- teradataml/data/pivot_example.json +9 -0
- teradataml/data/pivot_input.csv +22 -0
- teradataml/data/playerRating.csv +31 -0
- teradataml/data/pos_input.csv +40 -0
- teradataml/data/postagger_example.json +7 -0
- teradataml/data/posttagger_output.csv +44 -0
- teradataml/data/production_data.csv +17 -0
- teradataml/data/production_data2.csv +7 -0
- teradataml/data/randomsample_example.json +32 -0
- teradataml/data/randomwalksample_example.json +9 -0
- teradataml/data/rank_table.csv +6 -0
- teradataml/data/real_values.csv +14 -0
- teradataml/data/ref_mobile_data.csv +4 -0
- teradataml/data/ref_mobile_data_dense.csv +2 -0
- teradataml/data/ref_url.csv +17 -0
- teradataml/data/restaurant_reviews.csv +7 -0
- teradataml/data/retail_churn_table.csv +27772 -0
- teradataml/data/river_data.csv +145 -0
- teradataml/data/roc_example.json +8 -0
- teradataml/data/roc_input.csv +101 -0
- teradataml/data/rule_inputs.csv +6 -0
- teradataml/data/rule_table.csv +2 -0
- teradataml/data/sales.csv +7 -0
- teradataml/data/sales_transaction.csv +501 -0
- teradataml/data/salesdata.csv +342 -0
- teradataml/data/sample_cities.csv +3 -0
- teradataml/data/sample_shapes.csv +11 -0
- teradataml/data/sample_streets.csv +3 -0
- teradataml/data/sampling_example.json +16 -0
- teradataml/data/sax_example.json +17 -0
- teradataml/data/scale_attributes.csv +3 -0
- teradataml/data/scale_example.json +74 -0
- teradataml/data/scale_housing.csv +11 -0
- teradataml/data/scale_housing_test.csv +6 -0
- teradataml/data/scale_input_part_sparse.csv +31 -0
- teradataml/data/scale_input_partitioned.csv +16 -0
- teradataml/data/scale_input_sparse.csv +11 -0
- teradataml/data/scale_parameters.csv +3 -0
- teradataml/data/scale_stat.csv +11 -0
- teradataml/data/scalebypartition_example.json +13 -0
- teradataml/data/scalemap_example.json +13 -0
- teradataml/data/scalesummary_example.json +12 -0
- teradataml/data/score_category.csv +101 -0
- teradataml/data/score_summary.csv +4 -0
- teradataml/data/script_example.json +10 -0
- teradataml/data/scripts/deploy_script.py +84 -0
- teradataml/data/scripts/lightgbm/dataset.template +175 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +264 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +234 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +177 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/mapper.py +16 -0
- teradataml/data/scripts/mapper_replace.py +16 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +205 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +148 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +144 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +166 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +161 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +145 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +327 -0
- teradataml/data/sdk/modelops/modelops_spec.json +101737 -0
- teradataml/data/seeds.csv +10 -0
- teradataml/data/sentenceextractor_example.json +7 -0
- teradataml/data/sentiment_extract_input.csv +11 -0
- teradataml/data/sentiment_train.csv +16 -0
- teradataml/data/sentiment_word.csv +20 -0
- teradataml/data/sentiment_word_input.csv +20 -0
- teradataml/data/sentimentextractor_example.json +24 -0
- teradataml/data/sentimenttrainer_example.json +8 -0
- teradataml/data/sequence_table.csv +10 -0
- teradataml/data/seriessplitter_example.json +8 -0
- teradataml/data/sessionize_example.json +17 -0
- teradataml/data/sessionize_table.csv +116 -0
- teradataml/data/setop_test1.csv +24 -0
- teradataml/data/setop_test2.csv +22 -0
- teradataml/data/soc_nw_edges.csv +11 -0
- teradataml/data/soc_nw_vertices.csv +8 -0
- teradataml/data/souvenir_timeseries.csv +168 -0
- teradataml/data/sparse_iris_attribute.csv +5 -0
- teradataml/data/sparse_iris_test.csv +121 -0
- teradataml/data/sparse_iris_train.csv +601 -0
- teradataml/data/star1.csv +6 -0
- teradataml/data/star_pivot.csv +8 -0
- teradataml/data/state_transition.csv +5 -0
- teradataml/data/stock_data.csv +53 -0
- teradataml/data/stock_movement.csv +11 -0
- teradataml/data/stock_vol.csv +76 -0
- teradataml/data/stop_words.csv +8 -0
- teradataml/data/store_sales.csv +37 -0
- teradataml/data/stringsimilarity_example.json +8 -0
- teradataml/data/strsimilarity_input.csv +13 -0
- teradataml/data/students.csv +101 -0
- teradataml/data/svm_iris_input_test.csv +121 -0
- teradataml/data/svm_iris_input_train.csv +481 -0
- teradataml/data/svm_iris_model.csv +7 -0
- teradataml/data/svmdense_example.json +10 -0
- teradataml/data/svmdensepredict_example.json +19 -0
- teradataml/data/svmsparse_example.json +8 -0
- teradataml/data/svmsparsepredict_example.json +14 -0
- teradataml/data/svmsparsesummary_example.json +8 -0
- teradataml/data/target_mobile_data.csv +13 -0
- teradataml/data/target_mobile_data_dense.csv +5 -0
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/tdnerextractor_example.json +14 -0
- teradataml/data/templatedata.csv +1201 -0
- teradataml/data/templates/open_source_ml.json +11 -0
- teradataml/data/teradata_icon.ico +0 -0
- teradataml/data/teradataml_example.json +1473 -0
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_loan_prediction.csv +53 -0
- teradataml/data/test_pacf_12.csv +37 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/test_river2.csv +109 -0
- teradataml/data/text_inputs.csv +6 -0
- teradataml/data/textchunker_example.json +8 -0
- teradataml/data/textclassifier_example.json +7 -0
- teradataml/data/textclassifier_input.csv +7 -0
- teradataml/data/textclassifiertrainer_example.json +7 -0
- teradataml/data/textmorph_example.json +11 -0
- teradataml/data/textparser_example.json +15 -0
- teradataml/data/texttagger_example.json +12 -0
- teradataml/data/texttokenizer_example.json +7 -0
- teradataml/data/texttrainer_input.csv +11 -0
- teradataml/data/tf_example.json +7 -0
- teradataml/data/tfidf_example.json +14 -0
- teradataml/data/tfidf_input1.csv +201 -0
- teradataml/data/tfidf_train.csv +6 -0
- teradataml/data/time_table1.csv +535 -0
- teradataml/data/time_table2.csv +14 -0
- teradataml/data/timeseriesdata.csv +1601 -0
- teradataml/data/timeseriesdatasetsd4.csv +105 -0
- teradataml/data/timestamp_data.csv +4 -0
- teradataml/data/titanic.csv +892 -0
- teradataml/data/titanic_dataset_unpivoted.csv +19 -0
- teradataml/data/to_num_data.csv +4 -0
- teradataml/data/tochar_data.csv +5 -0
- teradataml/data/token_table.csv +696 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/train_tracking.csv +28 -0
- teradataml/data/trans_dense.csv +16 -0
- teradataml/data/trans_sparse.csv +55 -0
- teradataml/data/transformation_table.csv +6 -0
- teradataml/data/transformation_table_new.csv +2 -0
- teradataml/data/tv_spots.csv +16 -0
- teradataml/data/twod_climate_data.csv +117 -0
- teradataml/data/uaf_example.json +529 -0
- teradataml/data/univariatestatistics_example.json +9 -0
- teradataml/data/unpack_example.json +10 -0
- teradataml/data/unpivot_example.json +25 -0
- teradataml/data/unpivot_input.csv +8 -0
- teradataml/data/url_data.csv +10 -0
- teradataml/data/us_air_pass.csv +37 -0
- teradataml/data/us_population.csv +624 -0
- teradataml/data/us_states_shapes.csv +52 -0
- teradataml/data/varmax_example.json +18 -0
- teradataml/data/vectordistance_example.json +30 -0
- teradataml/data/ville_climatedata.csv +121 -0
- teradataml/data/ville_tempdata.csv +12 -0
- teradataml/data/ville_tempdata1.csv +12 -0
- teradataml/data/ville_temperature.csv +11 -0
- teradataml/data/waveletTable.csv +1605 -0
- teradataml/data/waveletTable2.csv +1605 -0
- teradataml/data/weightedmovavg_example.json +9 -0
- teradataml/data/wft_testing.csv +5 -0
- teradataml/data/windowdfft.csv +16 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/data/word_embed_input_table1.csv +6 -0
- teradataml/data/word_embed_input_table2.csv +5 -0
- teradataml/data/word_embed_model.csv +23 -0
- teradataml/data/words_input.csv +13 -0
- teradataml/data/xconvolve_complex_left.csv +6 -0
- teradataml/data/xconvolve_complex_leftmulti.csv +6 -0
- teradataml/data/xgboost_example.json +36 -0
- teradataml/data/xgboostpredict_example.json +32 -0
- teradataml/data/ztest_example.json +16 -0
- teradataml/dataframe/__init__.py +0 -0
- teradataml/dataframe/copy_to.py +2446 -0
- teradataml/dataframe/data_transfer.py +2840 -0
- teradataml/dataframe/dataframe.py +20908 -0
- teradataml/dataframe/dataframe_utils.py +2114 -0
- teradataml/dataframe/fastload.py +794 -0
- teradataml/dataframe/functions.py +2110 -0
- teradataml/dataframe/indexer.py +424 -0
- teradataml/dataframe/row.py +160 -0
- teradataml/dataframe/setop.py +1171 -0
- teradataml/dataframe/sql.py +10904 -0
- teradataml/dataframe/sql_function_parameters.py +440 -0
- teradataml/dataframe/sql_functions.py +652 -0
- teradataml/dataframe/sql_interfaces.py +220 -0
- teradataml/dataframe/vantage_function_types.py +675 -0
- teradataml/dataframe/window.py +694 -0
- teradataml/dbutils/__init__.py +3 -0
- teradataml/dbutils/dbutils.py +2871 -0
- teradataml/dbutils/filemgr.py +318 -0
- teradataml/gen_ai/__init__.py +2 -0
- teradataml/gen_ai/convAI.py +473 -0
- teradataml/geospatial/__init__.py +4 -0
- teradataml/geospatial/geodataframe.py +1105 -0
- teradataml/geospatial/geodataframecolumn.py +392 -0
- teradataml/geospatial/geometry_types.py +926 -0
- teradataml/hyperparameter_tuner/__init__.py +1 -0
- teradataml/hyperparameter_tuner/optimizer.py +4115 -0
- teradataml/hyperparameter_tuner/utils.py +303 -0
- teradataml/lib/__init__.py +0 -0
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/lib/libaed_0_1_aarch64.so +0 -0
- teradataml/lib/libaed_0_1_ppc64le.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/_base.py +1321 -0
- teradataml/opensource/_class.py +464 -0
- teradataml/opensource/_constants.py +61 -0
- teradataml/opensource/_lightgbm.py +949 -0
- teradataml/opensource/_sklearn.py +1008 -0
- teradataml/opensource/_wrapper_utils.py +267 -0
- teradataml/options/__init__.py +148 -0
- teradataml/options/configure.py +489 -0
- teradataml/options/display.py +187 -0
- teradataml/plot/__init__.py +3 -0
- teradataml/plot/axis.py +1427 -0
- teradataml/plot/constants.py +15 -0
- teradataml/plot/figure.py +431 -0
- teradataml/plot/plot.py +810 -0
- teradataml/plot/query_generator.py +83 -0
- teradataml/plot/subplot.py +216 -0
- teradataml/scriptmgmt/UserEnv.py +4273 -0
- teradataml/scriptmgmt/__init__.py +3 -0
- teradataml/scriptmgmt/lls_utils.py +2157 -0
- teradataml/sdk/README.md +79 -0
- teradataml/sdk/__init__.py +4 -0
- teradataml/sdk/_auth_modes.py +422 -0
- teradataml/sdk/_func_params.py +487 -0
- teradataml/sdk/_json_parser.py +453 -0
- teradataml/sdk/_openapi_spec_constants.py +249 -0
- teradataml/sdk/_utils.py +236 -0
- teradataml/sdk/api_client.py +900 -0
- teradataml/sdk/constants.py +62 -0
- teradataml/sdk/modelops/__init__.py +98 -0
- teradataml/sdk/modelops/_client.py +409 -0
- teradataml/sdk/modelops/_constants.py +304 -0
- teradataml/sdk/modelops/models.py +2308 -0
- teradataml/sdk/spinner.py +107 -0
- teradataml/series/__init__.py +0 -0
- teradataml/series/series.py +537 -0
- teradataml/series/series_utils.py +71 -0
- teradataml/store/__init__.py +12 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +658 -0
- teradataml/store/feature_store/feature_store.py +4814 -0
- teradataml/store/feature_store/mind_map.py +639 -0
- teradataml/store/feature_store/models.py +7330 -0
- teradataml/store/feature_store/utils.py +390 -0
- teradataml/table_operators/Apply.py +979 -0
- teradataml/table_operators/Script.py +1739 -0
- teradataml/table_operators/TableOperator.py +1343 -0
- teradataml/table_operators/__init__.py +2 -0
- teradataml/table_operators/apply_query_generator.py +262 -0
- teradataml/table_operators/query_generator.py +493 -0
- teradataml/table_operators/table_operator_query_generator.py +462 -0
- teradataml/table_operators/table_operator_util.py +726 -0
- teradataml/table_operators/templates/dataframe_apply.template +184 -0
- teradataml/table_operators/templates/dataframe_map.template +176 -0
- teradataml/table_operators/templates/dataframe_register.template +73 -0
- teradataml/table_operators/templates/dataframe_udf.template +67 -0
- teradataml/table_operators/templates/script_executor.template +170 -0
- teradataml/telemetry_utils/__init__.py +0 -0
- teradataml/telemetry_utils/queryband.py +53 -0
- teradataml/utils/__init__.py +0 -0
- teradataml/utils/docstring.py +527 -0
- teradataml/utils/dtypes.py +943 -0
- teradataml/utils/internal_buffer.py +122 -0
- teradataml/utils/print_versions.py +206 -0
- teradataml/utils/utils.py +451 -0
- teradataml/utils/validators.py +3305 -0
- teradataml-20.0.0.8.dist-info/METADATA +2804 -0
- teradataml-20.0.0.8.dist-info/RECORD +1208 -0
- teradataml-20.0.0.8.dist-info/WHEEL +5 -0
- teradataml-20.0.0.8.dist-info/top_level.txt +1 -0
- teradataml-20.0.0.8.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
{
|
|
2
|
+
"json_schema_major_version": "1",
|
|
3
|
+
"json_schema_minor_version": "0",
|
|
4
|
+
"json_content_version": "1",
|
|
5
|
+
"function_name": "write_nos",
|
|
6
|
+
"function_version": "1.0",
|
|
7
|
+
"function_type": "table_operator",
|
|
8
|
+
"function_alias_name": "write_nos",
|
|
9
|
+
"function_r_name": "write.nos",
|
|
10
|
+
"supports_view": false,
|
|
11
|
+
"short_description": "This function enables access to write data from Vantage to external storage, like Amazon S3, Azure Blob storage, or Google Cloud Storage.",
|
|
12
|
+
"long_description": "This function enables access to write data from Vantage to external storage, like Amazon S3, Azure Blob storage, or Google Cloud Storage. You must have the EXECUTE FUNCTION privilege on TD_SYSFNLIB.WRITE_NOS.",
|
|
13
|
+
"input_tables": [
|
|
14
|
+
{
|
|
15
|
+
"requiredInputKind": [
|
|
16
|
+
"PartitionByKey"
|
|
17
|
+
],
|
|
18
|
+
"isOrdered": false,
|
|
19
|
+
"isLocalOrdered": true,
|
|
20
|
+
"partitionByOne": false,
|
|
21
|
+
"hashByKey": true,
|
|
22
|
+
"name": "input",
|
|
23
|
+
"alternateNames": [],
|
|
24
|
+
"isRequired": true,
|
|
25
|
+
"rDescription": "Specifies the teradataml DataFrame containing the input data which will be written to external storage.",
|
|
26
|
+
"description": "Specifies the table containing the input data which will be written to external storage.",
|
|
27
|
+
"datatype": "TABLE_ALIAS",
|
|
28
|
+
"allowsLists": false,
|
|
29
|
+
"rName": "data",
|
|
30
|
+
"useInR": true,
|
|
31
|
+
"rOrderNum": 1
|
|
32
|
+
}
|
|
33
|
+
],
|
|
34
|
+
"argument_clauses": [
|
|
35
|
+
{
|
|
36
|
+
"permittedValues": [],
|
|
37
|
+
"isOutputColumn": false,
|
|
38
|
+
"name": "LOCATION",
|
|
39
|
+
"alternateNames": [],
|
|
40
|
+
"isRequired": false,
|
|
41
|
+
"rDescription": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to location in the external object storage system.\nA URI identifying the external storage system in the format:\n /connector/endpoint/bucket_or_container/prefix \nThe LOCATION string cannot exceed 2048 characters.",
|
|
42
|
+
"description": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to location in the external object storage system.\nA URI identifying the external storage system in the format:\n /connector/endpoint/bucket_or_container/prefix \nThe LOCATION string cannot exceed 2048 characters.",
|
|
43
|
+
"datatype": "STRING",
|
|
44
|
+
"allowsLists": false,
|
|
45
|
+
"rName": "location",
|
|
46
|
+
"useInR": true,
|
|
47
|
+
"rOrderNum": 2
|
|
48
|
+
},
|
|
49
|
+
{
|
|
50
|
+
"permittedValues": [],
|
|
51
|
+
"isOutputColumn": false,
|
|
52
|
+
"name": "AUTHORIZATION",
|
|
53
|
+
"alternateNames": [],
|
|
54
|
+
"isRequired": false,
|
|
55
|
+
"rDescription": "Specifies the authorization for accessing the external storage. A set of name:value pairs (NVPs) in JSON format that contain access information for the external storage system. \nFormat:\n { \"Access_ID\":\"your_access_id\",\"Access_Key\":\"your_access_key\" }\nNote: To protect your authorization information, Teradata recommends (TODO: Add teradataml recommendation)",
|
|
56
|
+
"description": "Specifies the authorization for accessing the external storage. A set of name:value pairs (NVPs) in JSON format that contain access information for the external storage system. \nFormat:\n { \"Access_ID\":\"your_access_id\",\"Access_Key\":\"your_access_key\" }\nNote: To protect your authorization information, Teradata recommends (TODO: Add teradataml recommendation)",
|
|
57
|
+
"datatype": ["STRING", "JSON"],
|
|
58
|
+
"allowsLists": false,
|
|
59
|
+
"rName": "authorization",
|
|
60
|
+
"useInR": true,
|
|
61
|
+
"rOrderNum": 3
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
"permittedValues": ["PARQUET"],
|
|
65
|
+
"rDefaultValue": "PARQUET",
|
|
66
|
+
"isOutputColumn": false,
|
|
67
|
+
"name": "STOREDAS",
|
|
68
|
+
"alternateNames": [],
|
|
69
|
+
"isRequired": false,
|
|
70
|
+
"rDescription": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. Objects created in external storage by write_nos are written only in Parquet format.",
|
|
71
|
+
"description": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. Objects created in external storage by write_nos are written only in Parquet format.",
|
|
72
|
+
"datatype": "STRING",
|
|
73
|
+
"allowsLists": false,
|
|
74
|
+
"rName": "stored.as",
|
|
75
|
+
"useInR": true,
|
|
76
|
+
"rOrderNum": 4
|
|
77
|
+
},
|
|
78
|
+
{
|
|
79
|
+
"permittedValues": ["DISCRETE", "RANGE"],
|
|
80
|
+
"defaultValue": "RANGE",
|
|
81
|
+
"isOutputColumn": false,
|
|
82
|
+
"name": "NAMING",
|
|
83
|
+
"alternateNames": [],
|
|
84
|
+
"isRequired": false,
|
|
85
|
+
"rDescription": "Specifies the how the objects containing the rows of data are named in the external storage:\nDiscrete naming uses the ordering column values as part of the object names in external storage. For example, if the PARTITION BY clause has ORDER BY dateColumn, intColumn, the discrete form name of the objects written to external storage would include the values for those columns as part of the object name, which would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-03-01/13/object_33_0_1.parquet\n2019-03-01 is the value for dateColumn, the first ordering column, and 13 is the value for the second ordering column, intColumn. All rows stored in this external Parquet-formatted object contain those two values.\nRange naming, the default, includes as part of the object name the range of values included in the partition for each ordering column. For example, using the same ORDER BY as above the object names would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-01-01/2019-03-02/9/10000/object_33_0_1.parquet\nwhere 2019-01-01 is the minimum value in that object for the first ordering column, dateColumn, 2019-03-02 is the maximum value for the rows stored in this external Parquet-formatted object. Value 9 is the minimum value for the second ordering column, intColumn, and 10000 is the maximum value for that column.",
|
|
86
|
+
"description": "Specifies the how the objects containing the rows of data are named in the external storage:\nDiscrete naming uses the ordering column values as part of the object names in external storage. For example, if the PARTITION BY clause has ORDER BY dateColumn, intColumn, the discrete form name of the objects written to external storage would include the values for those columns as part of the object name, which would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-03-01/13/object_33_0_1.parquet\n2019-03-01 is the value for dateColumn, the first ordering column, and 13 is the value for the second ordering column, intColumn. All rows stored in this external Parquet-formatted object contain those two values.\nRange naming, the default, includes as part of the object name the range of values included in the partition for each ordering column. For example, using the same ORDER BY as above the object names would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-01-01/2019-03-02/9/10000/object_33_0_1.parquet\nwhere 2019-01-01 is the minimum value in that object for the first ordering column, dateColumn, 2019-03-02 is the maximum value for the rows stored in this external Parquet-formatted object. Value 9 is the minimum value for the second ordering column, intColumn, and 10000 is the maximum value for that column.",
|
|
87
|
+
"datatype": "STRING",
|
|
88
|
+
"allowsLists": false,
|
|
89
|
+
"rName": "naming",
|
|
90
|
+
"useInR": true,
|
|
91
|
+
"rOrderNum": 5
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
"permittedValues": [],
|
|
95
|
+
"isOutputColumn": false,
|
|
96
|
+
"name": "MANIFESTFILE",
|
|
97
|
+
"alternateNames": [],
|
|
98
|
+
"isRequired": false,
|
|
99
|
+
"rDescription": "Specifies the fully qualified path and file name where the manifest file is written. Use the format\n/connector/end point/bucket_or_container/prefix/manifest_file_name\nFor example:\n/S3/ceph-s3.teradata.com/xz186000/manifest/manifest.json\nIf you do not include the manifestfile parameter, no manifest file is written.",
|
|
100
|
+
"description": "Specifies the fully qualified path and file name where the manifest file is written. Use the format\n/connector/end point/bucket_or_container/prefix/manifest_file_name\nFor example:\n/S3/ceph-s3.teradata.com/xz186000/manifest/manifest.json\nIf you do not include the manifestfile parameter, no manifest file is written.",
|
|
101
|
+
"datatype": "STRING",
|
|
102
|
+
"allowsLists": false,
|
|
103
|
+
"rName": "manifest.file",
|
|
104
|
+
"useInR": true,
|
|
105
|
+
"rOrderNum": 6
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
"permittedValues": [],
|
|
109
|
+
"defaultValue": false,
|
|
110
|
+
"isOutputColumn": false,
|
|
111
|
+
"name": "MANIFESTONLY",
|
|
112
|
+
"alternateNames": [],
|
|
113
|
+
"isRequired": false,
|
|
114
|
+
"rDescription": "Specifies whether to write only a manifest file in external storage or not. No actual data objects are written to external storage if you use manifestonly = True. You must also use the manifest_file option to create a manifest file in external storage. Use this option to create a new manifest file in the event that a write_nos operation fails due to a database abort or restart, or when network connectivity issues interrupt and stop a write_nos operation before all data has been written to external storage. The manifest is created from the table or query result set that is input to write_nos. The input must be a list of storage object names and sizes, with one row per object.\nNote: The input to write_nos with manifestonly can itself incorporate read_nos, similar to this, which uses function mappings for write_nos and read_nos:\n (TODO: Add manually or update json)\nA query like this can be used if a write_nos operation fails before it can create a manifest file. The new manifest file created using read_nos will reflect all data objects currently in the external storage location, and can aid in determining which data objects resulted from the incomplete write_nos operation. For more information, see Teradata Vantage™ - Native Object Store Getting Started Guide, B035-1214.",
|
|
115
|
+
"description": "Specifies whether to write only a manifest file in external storage or not. No actual data objects are written to external storage if you use manifestonly = True. You must also use the manifest_file option to create a manifest file in external storage. Use this option to create a new manifest file in the event that a write_nos operation fails due to a database abort or restart, or when network connectivity issues interrupt and stop a write_nos operation before all data has been written to external storage. The manifest is created from the table or query result set that is input to write_nos. The input must be a list of storage object names and sizes, with one row per object.\nNote: The input to write_nos with manifestonly can itself incorporate read_nos, similar to this, which uses function mappings for write_nos and read_nos:\n (TODO: Add manually or update json)\nA query like this can be used if a write_nos operation fails before it can create a manifest file. The new manifest file created using read_nos will reflect all data objects currently in the external storage location, and can aid in determining which data objects resulted from the incomplete write_nos operation. For more information, see Teradata Vantage™ - Native Object Store Getting Started Guide, B035-1214.",
|
|
116
|
+
"datatype": "BOOLEAN",
|
|
117
|
+
"allowsLists": false,
|
|
118
|
+
"rName": "manifest.only",
|
|
119
|
+
"useInR": true,
|
|
120
|
+
"rOrderNum": 7
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
"permittedValues": [],
|
|
124
|
+
"defaultValue": false,
|
|
125
|
+
"isOutputColumn": false,
|
|
126
|
+
"name": "OVERWRITE",
|
|
127
|
+
"alternateNames": [],
|
|
128
|
+
"isRequired": false,
|
|
129
|
+
"rDescription": "Specifies whether an existing manifest file in external storage will be overwritten with a new manifest file that has the same name. If False, the default, write_nos returns an error if a manifest file exists in external storage that is named identically to the value of manifestfile.\nNote: overwrite must be used with manifest.only = True",
|
|
130
|
+
"description": "Specifies whether an existing manifest file in external storage will be overwritten with a new manifest file that has the same name. If False, the default, write_nos returns an error if a manifest file exists in external storage that is named identically to the value of manifestfile.\nNote: overwrite must be used with MANIFESTONLY = True",
|
|
131
|
+
"datatype": "BOOLEAN",
|
|
132
|
+
"allowsLists": false,
|
|
133
|
+
"rName": "overwrite",
|
|
134
|
+
"useInR": true,
|
|
135
|
+
"rOrderNum": 8
|
|
136
|
+
},
|
|
137
|
+
{
|
|
138
|
+
"permittedValues": [],
|
|
139
|
+
"isOutputColumn": false,
|
|
140
|
+
"name": "INCLUDE_ORDERING",
|
|
141
|
+
"alternateNames": [],
|
|
142
|
+
"isRequired": false,
|
|
143
|
+
"rDescription": "Specifies whether the ORDER BY columns and their values are written to external storage.",
|
|
144
|
+
"description": "Specifies whether the ORDER BY columns and their values are written to external storage.",
|
|
145
|
+
"datatype": "BOOLEAN",
|
|
146
|
+
"allowsLists": false,
|
|
147
|
+
"rName": "include.ordering",
|
|
148
|
+
"useInR": true,
|
|
149
|
+
"rOrderNum": 9
|
|
150
|
+
},
|
|
151
|
+
{
|
|
152
|
+
"permittedValues": [],
|
|
153
|
+
"isOutputColumn": false,
|
|
154
|
+
"name": "INCLUDE_HASHBY",
|
|
155
|
+
"alternateNames": [],
|
|
156
|
+
"isRequired": false,
|
|
157
|
+
"rDescription": "Specifies whether the HASH BY columns and their values are written to external storage.",
|
|
158
|
+
"description": "Specifies whether the HASH BY columns and their values are written to external storage.",
|
|
159
|
+
"datatype": "BOOLEAN",
|
|
160
|
+
"allowsLists": false,
|
|
161
|
+
"rName": "include.hashby",
|
|
162
|
+
"useInR": true,
|
|
163
|
+
"rOrderNum": 10
|
|
164
|
+
},
|
|
165
|
+
{
|
|
166
|
+
"permittedValues": [],
|
|
167
|
+
"defaultValue": "16MB",
|
|
168
|
+
"isOutputColumn": false,
|
|
169
|
+
"name": "MAXOBJECTSIZE",
|
|
170
|
+
"alternateNames": [],
|
|
171
|
+
"isRequired": false,
|
|
172
|
+
"rDescription": "Specifies the maximum output object size in megabytes, where max.object.size is a number between 4 and 16. The default is the value of the DefaultRowGroupSize field in DBS Control. For more information on DBS Control, see Teradata Vantage™ - Database Utilities , B035-1102.",
|
|
173
|
+
"description": "Specifies the maximum output object size in megabytes, where MAXOBJECTSIZE is a number between 4 and 16. The default is the value of the DefaultRowGroupSize field in DBS Control. For more information on DBS Control, see Teradata Vantage™ - Database Utilities , B035-1102.",
|
|
174
|
+
"datatype": "STRING",
|
|
175
|
+
"allowsLists": false,
|
|
176
|
+
"rName": "max.object.size",
|
|
177
|
+
"useInR": true,
|
|
178
|
+
"rOrderNum": 11
|
|
179
|
+
},
|
|
180
|
+
{
|
|
181
|
+
"permittedValues": ["GZIP", "SNAPPY"],
|
|
182
|
+
"isOutputColumn": false,
|
|
183
|
+
"name": "COMPRESSION",
|
|
184
|
+
"alternateNames": [],
|
|
185
|
+
"isRequired": false,
|
|
186
|
+
"rDescription": "Specifies the compression algorithm used to compress the objects written to external storage.\nNote: For Parquet files the compression occurs inside parts of the parquet file instead of for the entire file, so the file extension on external objects remains .parquet.",
|
|
187
|
+
"description": "Specifies the compression algorithm used to compress the objects written to external storage.\nNote: For Parquet files the compression occurs inside parts of the parquet file instead of for the entire file, so the file extension on external objects remains .parquet.",
|
|
188
|
+
"datatype": "STRING",
|
|
189
|
+
"allowsLists": false,
|
|
190
|
+
"rName": "compression",
|
|
191
|
+
"useInR": true,
|
|
192
|
+
"rOrderNum": 12
|
|
193
|
+
}
|
|
194
|
+
]
|
|
195
|
+
}
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
{
|
|
2
|
+
"json_schema_major_version": "1",
|
|
3
|
+
"json_schema_minor_version": "0",
|
|
4
|
+
"json_content_version": "1",
|
|
5
|
+
"function_name": "read_nos",
|
|
6
|
+
"function_version": "1.0",
|
|
7
|
+
"function_type": "table_operator",
|
|
8
|
+
"function_alias_name": "read_nos",
|
|
9
|
+
"function_r_name": "read.nos",
|
|
10
|
+
"short_description": "This function enables access to external files in JSON, CSV, or Parquet format.",
|
|
11
|
+
"long_description": "This function enables access to external files in JSON, CSV, or Parquet format. You must have the EXECUTE FUNCTION privilege on TD_SYSFNLIB.READ_NOS.",
|
|
12
|
+
"input_tables": [
|
|
13
|
+
{
|
|
14
|
+
"requiredInputKind": [
|
|
15
|
+
"PartitionByAny"
|
|
16
|
+
],
|
|
17
|
+
"isOrdered": false,
|
|
18
|
+
"partitionByOne": false,
|
|
19
|
+
"name": "input",
|
|
20
|
+
"alternateNames": [],
|
|
21
|
+
"isRequired": false,
|
|
22
|
+
"rDescription": "Specifies the teradataml DataFrame containing the input data.",
|
|
23
|
+
"description": "Specifies the table containing the input data.",
|
|
24
|
+
"datatype": "TABLE_ALIAS",
|
|
25
|
+
"allowsLists": false,
|
|
26
|
+
"rName": "data",
|
|
27
|
+
"useInR": true,
|
|
28
|
+
"rOrderNum": 1
|
|
29
|
+
}
|
|
30
|
+
],
|
|
31
|
+
"argument_clauses": [
|
|
32
|
+
{
|
|
33
|
+
"permittedValues": [],
|
|
34
|
+
"isOutputColumn": false,
|
|
35
|
+
"name": "LOCATION",
|
|
36
|
+
"alternateNames": [],
|
|
37
|
+
"isRequired": false,
|
|
38
|
+
"rDescription": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to the data in the external object storage system. The location value includes the following components:\nAmazon S3: /connector/bucket.endpoint/[key_prefix].\nAzure Blob storage and Azure Data Lake Storage Gen2: /connector/container.endpoint/[key_prefix].\nGoogle Cloud Storage: /connector/endpoint/bucket/[key_prefix].\nconnector: Identifies the type of external storage system where the data is located. Teradata requires the storage location to start with the following for all external storage locations:\nAmazon S3 storage location must begin with /S3 or /s3\nAzure Blob storage location (including Azure Data Lake Storage Gen2 in Blob Interop Mode) must begin with /AZ or /az\nGoogle Cloud Storage location must begin with /GS or /gs.\nendpoint: A URL that identifies the system-specific entry point for the external object storage system.\nbucket (Amazon S3, Google Cloud Storage) or container (Azure Blob storage and Azure Data Lake Storage Gen2): A container that logically groups stored objects in the external storage system.\nkey_prefix: Identifies one or more objects in the logical organization of the bucket data. Because it is a key prefix, not an actual directory path, the key prefix may match one or more objects in the external storage. For example, the key prefix '/fabrics/cotton/colors/b/' would match objects: /fabrics/cotton/colors/blue, /fabrics/cotton/colors/brown, and /fabrics/cotton/colors/black. If there were organization levels below those, such as /fabrics/cotton/colors/blue/shirts, the same key prefix would gather those objects too.\nNote: Vantage validates only the first file it encounters from the location key prefix.\nFor example, this location value might specify all objects on an Amazon cloud storage system for the month of December, 2001:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/\nThis location could specify an individual storage object (or file), Day1.csv:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/Day1.csv'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/Day11.csv\nThis location specifies an entire container in an Azure external object store (Azure Blob storage or Azure Data Lake Storage Gen2). The container may contain multiple file objects:\nlocation = '/AZ/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/nos-csv-data'\nconnector: AZ, bucket: YOUR-STORAGE-ACCOUNT, endpoint: blob.core.windows.net, key_prefix: nos-csv-data\nThis is an example of a Google Cloud Storage location:\nconnector: GS, bucket: YOUR-BUCKET, endpoint: storage.googleapis.com, key_prefix: CSVDATA/RIVERS/rivers.csv",
|
|
39
|
+
"description": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to the data in the external object storage system. The location value includes the following components:\nAmazon S3: /connector/bucket.endpoint/[key_prefix].\nAzure Blob storage and Azure Data Lake Storage Gen2: /connector/container.endpoint/[key_prefix].\nGoogle Cloud Storage: /connector/endpoint/bucket/[key_prefix].\nconnector: Identifies the type of external storage system where the data is located. Teradata requires the storage location to start with the following for all external storage locations:\nAmazon S3 storage location must begin with /S3 or /s3\nAzure Blob storage location (including Azure Data Lake Storage Gen2 in Blob Interop Mode) must begin with /AZ or /az\nGoogle Cloud Storage location must begin with /GS or /gs.\nendpoint: A URL that identifies the system-specific entry point for the external object storage system.\nbucket (Amazon S3, Google Cloud Storage) or container (Azure Blob storage and Azure Data Lake Storage Gen2): A container that logically groups stored objects in the external storage system.\nkey_prefix: Identifies one or more objects in the logical organization of the bucket data. Because it is a key prefix, not an actual directory path, the key prefix may match one or more objects in the external storage. For example, the key prefix '/fabrics/cotton/colors/b/' would match objects: /fabrics/cotton/colors/blue, /fabrics/cotton/colors/brown, and /fabrics/cotton/colors/black. If there were organization levels below those, such as /fabrics/cotton/colors/blue/shirts, the same key prefix would gather those objects too.\nNote: Vantage validates only the first file it encounters from the location key prefix.\nFor example, this location value might specify all objects on an Amazon cloud storage system for the month of December, 2001:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/\nThis location could specify an individual storage object (or file), Day1.csv:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/Day1.csv'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/Day11.csv\nThis location specifies an entire container in an Azure external object store (Azure Blob storage or Azure Data Lake Storage Gen2). The container may contain multiple file objects:\nlocation = '/AZ/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/nos-csv-data'\nconnector: AZ, bucket: YOUR-STORAGE-ACCOUNT, endpoint: blob.core.windows.net, key_prefix: nos-csv-data\nThis is an example of a Google Cloud Storage location:\nconnector: GS, bucket: YOUR-BUCKET, endpoint: storage.googleapis.com, key_prefix: CSVDATA/RIVERS/rivers.csv",
|
|
40
|
+
"datatype": "STRING",
|
|
41
|
+
"allowsLists": false,
|
|
42
|
+
"rName": "location",
|
|
43
|
+
"useInR": true,
|
|
44
|
+
"rOrderNum": 2
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
"permittedValues": [],
|
|
48
|
+
"isOutputColumn": false,
|
|
49
|
+
"name": "AUTHORIZATION",
|
|
50
|
+
"alternateNames": [],
|
|
51
|
+
"isRequired": false,
|
|
52
|
+
"rDescription": "Specifies the authorization for accessing external storage. On any platform, you can specify an authorization object ([DatabaseName.]AuthorizationObjectName). You must have the EXECUTE privilege on AuthorizationObjectName. On Amazon S3 and Azure Blob storage and Azure Data Lake Storage Gen2, you can specify either an authorization object or a string in JSON format. The string specifies the USER (identification) and PASSWORD (secret_key) for accessing external storage. The following table shows the supported credentials for USER and PASSWORD (used in the CREATE AUTHORIZATION command):\n\nSystem/Scheme |USER |PASSWORD\nAWS |Access Key ID |Access Key Secret\nAzure / Shared Key |Storage Account Name |Storage Account Key\nAzure Shared Access Signature (SAS) |Storage Account Name |Account SAS Token\nGoogle Cloud (S3 interop mode) |Access Key ID |Access Key Secret\nGoogle Cloud (native) |Client Email |Private Key\nOn-premises object stores |Access Key ID |Access Key Secret\nPublic access object stores |<empty string> |<empty string>\n |Enclose the empty string in |Enclose the empty string in\n |single straight quotes: USER ''| single straight quotes: PASSWORD ''\nIf you use a function mapping to define a wrapper for READ_NOS, you can specify the authorization in the function mapping. Note that [ INVOKER | DEFINER ] TRUSTED must be used with function mapping. If you are using AWS IAM credentials, you can omit the AUTHORIZATION clause. When accessing GCS, Advanced SQL Engine uses either the S3-compatible connector or the native Google connector, depending on the user credentials.",
|
|
53
|
+
"description": "Specifies the authorization for accessing external storage. On any platform, you can specify an authorization object ([DatabaseName.]AuthorizationObjectName). You must have the EXECUTE privilege on AuthorizationObjectName. On Amazon S3 and Azure Blob storage and Azure Data Lake Storage Gen2, you can specify either an authorization object or a string in JSON format. The string specifies the USER (identification) and PASSWORD (secret_key) for accessing external storage. The following table shows the supported credentials for USER and PASSWORD (used in the CREATE AUTHORIZATION command):\n\nSystem/Scheme |USER |PASSWORD\nAWS |Access Key ID |Access Key Secret\nAzure / Shared Key |Storage Account Name |Storage Account Key\nAzure Shared Access Signature (SAS) |Storage Account Name |Account SAS Token\nGoogle Cloud (S3 interop mode) |Access Key ID |Access Key Secret\nGoogle Cloud (native) |Client Email |Private Key\nOn-premises object stores |Access Key ID |Access Key Secret\nPublic access object stores |<empty string> |<empty string>\n |Enclose the empty string in |Enclose the empty string in\n |single straight quotes: USER ''| single straight quotes: PASSWORD ''\nIf you use a function mapping to define a wrapper for READ_NOS, you can specify the authorization in the function mapping. Note that [ INVOKER | DEFINER ] TRUSTED must be used with function mapping. If you are using AWS IAM credentials, you can omit the AUTHORIZATION clause. When accessing GCS, Advanced SQL Engine uses either the S3-compatible connector or the native Google connector, depending on the user credentials.",
|
|
54
|
+
"datatype": ["STRING", "JSON"],
|
|
55
|
+
"allowsLists": false,
|
|
56
|
+
"rName": "authorization",
|
|
57
|
+
"useInR": true,
|
|
58
|
+
"rOrderNum": 3
|
|
59
|
+
},
|
|
60
|
+
{
|
|
61
|
+
"permittedValues": [],
|
|
62
|
+
"defaultValue": 16,
|
|
63
|
+
"isOutputColumn": false,
|
|
64
|
+
"name": "BUFFERSIZE",
|
|
65
|
+
"alternateNames": [],
|
|
66
|
+
"isRequired": false,
|
|
67
|
+
"rDescription": "Specifies the size of the network buffer to allocate when retrieving data from the external storage repository. The default value is 16 MB, which is the maximum value",
|
|
68
|
+
"description": "Specifies the size of the network buffer to allocate when retrieving data from the external storage repository. The default value is 16 MB, which is the maximum value",
|
|
69
|
+
"datatype": "INTEGER",
|
|
70
|
+
"allowsLists": false,
|
|
71
|
+
"rName": "buffer.size",
|
|
72
|
+
"useInR": true,
|
|
73
|
+
"rOrderNum": 4
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
"permittedValues": ["NOSREAD_RECORD", "NOSREAD_KEYS", "NOSREAD_SCHEMA", "NOSREAD_PARQUET_SCHEMA"],
|
|
77
|
+
"defaultValue": "NOSREAD_RECORD",
|
|
78
|
+
"isOutputColumn": false,
|
|
79
|
+
"name": "RETURNTYPE",
|
|
80
|
+
"alternateNames": [],
|
|
81
|
+
"isRequired": false,
|
|
82
|
+
"rDescription": "Specifies the format in which data is returned.\nNOSREAD_RECORD: Returns one row for each external record along with its metadata. This is the default. Access external records by specifying one of the following:\n* Input table and location and an empty table. For CSV, you can include a schema definition.\n* Input table with a row for each external file. For CSV, this method does not support a schema definition.\nFor an empty single-column input table, do the following:\n* Define an input table with a single column, Payload, with the appropriate data type: JSON and DATASET with a Storage Format of CSV. This column determines the output Payload column return type.\n* For location, specify the filepath.\nFor a multiple-column input table, define an input table with the following columns:\n* Location VARCHAR(2048) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* OffsetIntoObject BIGINT\n* ObjectLength BIGINT\n* Payload JSON or VARCHAR for CSV\nThis table can be populated using the output of the NOSREAD_KEYS return type.\nNOSREAD_KEYS: Retrieve the list of files from the path specified in the LOCATION USING clause. A schema definition is not necessary. Returns: Location, ObjectVersionID, ObjectTimeStamp, ObjectLength, size of external file.\nNOSREAD_RAW: Retrieves file data from the external storage services, not specific records. Retrieved data is returned as CLOB/BLOB. You can retrieve a complete file from external storage and save in Teradata CLOB/BLOB format. The maximum amount of data that can be retrieved from the external storage and saved in the Teradata column is 2GB, the Vantage limit for LOBs. The ObjectLength corresponds to the length of CLOB/BLOB column read from the external storage. This information is provided in the form of a table returned to the READ_NOS table operator. The Payload column in the input table is only used to determine the datatype of the column in which the returned data is stored.\nDefine the input table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), Payload CLOB/BLOB.\nREAD_NOS returns a table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), OffsetIntoObject BIGINT, OffsetIntoObject BIGINT, Payload CLOB/BLOB, based on input table CLOB/BLOB Column.\nNOSREAD_PARQUET_SCHEMA: Returns information about the Parquet data schema. For information about the mapping between Parquet data types and Teradata data types, see Parquet External Files in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
|
|
83
|
+
"description": "Specifies the format in which data is returned.\nNOSREAD_RECORD: Returns one row for each external record along with its metadata. This is the default. Access external records by specifying one of the following:\n* Input table and location and an empty table. For CSV, you can include a schema definition.\n* Input table with a row for each external file. For CSV, this method does not support a schema definition.\nFor an empty single-column input table, do the following:\n* Define an input table with a single column, Payload, with the appropriate data type: JSON and DATASET with a Storage Format of CSV. This column determines the output Payload column return type.\n* For location, specify the filepath.\nFor a multiple-column input table, define an input table with the following columns:\n* Location VARCHAR(2048) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* OffsetIntoObject BIGINT\n* ObjectLength BIGINT\n* Payload JSON or VARCHAR for CSV\nThis table can be populated using the output of the NOSREAD_KEYS return type.\nNOSREAD_KEYS: Retrieve the list of files from the path specified in the LOCATION USING clause. A schema definition is not necessary. Returns: Location, ObjectVersionID, ObjectTimeStamp, ObjectLength, size of external file.\nNOSREAD_RAW: Retrieves file data from the external storage services, not specific records. Retrieved data is returned as CLOB/BLOB. You can retrieve a complete file from external storage and save in Teradata CLOB/BLOB format. The maximum amount of data that can be retrieved from the external storage and saved in the Teradata column is 2GB, the Vantage limit for LOBs. The ObjectLength corresponds to the length of CLOB/BLOB column read from the external storage. This information is provided in the form of a table returned to the READ_NOS table operator. The Payload column in the input table is only used to determine the datatype of the column in which the returned data is stored.\nDefine the input table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), Payload CLOB/BLOB.\nREAD_NOS returns a table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), OffsetIntoObject BIGINT, OffsetIntoObject BIGINT, Payload CLOB/BLOB, based on input table CLOB/BLOB Column.\nNOSREAD_PARQUET_SCHEMA: Returns information about the Parquet data schema. For information about the mapping between Parquet data types and Teradata data types, see Parquet External Files in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
|
|
84
|
+
"datatype": "STRING",
|
|
85
|
+
"allowsLists": false,
|
|
86
|
+
"rName": "return.type",
|
|
87
|
+
"useInR": true,
|
|
88
|
+
"rOrderNum": 5
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
"permittedValues": [],
|
|
92
|
+
"defaultValue": 1.0,
|
|
93
|
+
"lowerBound": 0.0,
|
|
94
|
+
"upperBound": 1.0,
|
|
95
|
+
"lowerBoundType": "INCLUSIVE",
|
|
96
|
+
"upperBoundType": "INCLUSIVE",
|
|
97
|
+
"isOutputColumn": false,
|
|
98
|
+
"name": "SAMPLE_PERC",
|
|
99
|
+
"alternateNames": [],
|
|
100
|
+
"isRequired": false,
|
|
101
|
+
"rDescription": "Specifies the percentage of rows to retrieve from the external storage repository when return.type is NOSREAD_RECORD. The valid range of values is from '0.0' to '1.0', where '1.0' represents 100% of the rows. The default value is 1.0.",
|
|
102
|
+
"description": "Specifies the percentage of rows to retrieve from the external storage repository when return.type is NOSREAD_RECORD. The valid range of values is from '0.0' to '1.0', where '1.0' represents 100% of the rows. The default value is 1.0.",
|
|
103
|
+
"datatype": "DOUBLE PRECISION",
|
|
104
|
+
"allowsLists": false,
|
|
105
|
+
"rName": "sample.perc",
|
|
106
|
+
"useInR": true,
|
|
107
|
+
"rOrderNum": 6
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
"permittedValues": ["PARQUET", "TEXTFILE"],
|
|
111
|
+
"defaultValue": "TEXTFILE",
|
|
112
|
+
"isOutputColumn": false,
|
|
113
|
+
"name": "STOREDAS",
|
|
114
|
+
"alternateNames": [],
|
|
115
|
+
"isRequired": false,
|
|
116
|
+
"rDescription": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. This is a required parameter for Parquet data.\nTEXTFILE means the external data uses a text-based format, such as CSV or JSON.\nThe default is TEXTFILE.",
|
|
117
|
+
"description": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. This is a required parameter for Parquet data.\nTEXTFILE means the external data uses a text-based format, such as CSV or JSON.\nThe default is TEXTFILE.",
|
|
118
|
+
"datatype": "STRING",
|
|
119
|
+
"allowsLists": false,
|
|
120
|
+
"rName": "stored.as",
|
|
121
|
+
"useInR": true,
|
|
122
|
+
"rOrderNum": 7
|
|
123
|
+
},
|
|
124
|
+
{
|
|
125
|
+
"permittedValues": [],
|
|
126
|
+
"defaultValue": false,
|
|
127
|
+
"isOutputColumn": false,
|
|
128
|
+
"name": "FULLSCAN",
|
|
129
|
+
"alternateNames": [],
|
|
130
|
+
"isRequired": false,
|
|
131
|
+
"rDescription": "Specifies whether read.nos scans columns of variable length types (CHAR, VARCHAR, BYTE, VARBYTE, JSON, and BSON) to discover the maximum length.\nWhen set to True, the sizes of variable length data is determined from the Parquet data.\nNote: Choosing this value can impact performance because all variable length data type columns in each Parquet file at the location must be scanned to assess the value having the greatest length.\nWhen set to False, variable length field sizes are assigned the Vantage maximum value for the particular data type. The default is False.",
|
|
132
|
+
"description": "Determines whether READ_NOS scans columns of variable length types (CHAR, VARCHAR, BYTE, VARBYTE, JSON, and BSON) to discover the maximum length.\nWhen set to True, the sizes of variable length data is determined from the Parquet data.\nNote: Choosing this value can impact performance because all variable length data type columns in each Parquet file at the location must be scanned to assess the value having the greatest length.\nWhen set to False, variable length field sizes are assigned the Vantage maximum value for the particular data type. The default is False.",
|
|
133
|
+
"datatype": "BOOLEAN",
|
|
134
|
+
"allowsLists": false,
|
|
135
|
+
"rName": "full.scan",
|
|
136
|
+
"useInR": true,
|
|
137
|
+
"rOrderNum": 8
|
|
138
|
+
},
|
|
139
|
+
{
|
|
140
|
+
"permittedValues": [],
|
|
141
|
+
"defaultValue": false,
|
|
142
|
+
"isOutputColumn": false,
|
|
143
|
+
"name": "MANIFEST",
|
|
144
|
+
"alternateNames": [],
|
|
145
|
+
"isRequired": false,
|
|
146
|
+
"rDescription": "Specifies whether the location value points to a manifest file (a file containing a list of files to read) or object name. The object name can include the full path or a partial path. It must identify a single file containing the manifest. Note: The individual entries within the manifest file must show complete paths. Below is an example of a manifest file that contains a list of entries to locations in JSON format\n{\n \"entries\": [\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-10.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-101.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-102.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-103.json\"}\n ]\n}",
|
|
147
|
+
"description": "Specifies whether the LOCATION value points to a manifest file (a file containing a list of files to read) or object name. The object name can include the full path or a partial path. It must identify a single file containing the manifest. Note: The individual entries within the manifest file must show complete paths. Below is an example of a manifest file that contains a list of entries to locations in JSON format\n{\n \"entries\": [\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-10.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-101.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-102.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-103.json\"}\n ]\n}",
|
|
148
|
+
"datatype": "BOOLEAN",
|
|
149
|
+
"allowsLists": false,
|
|
150
|
+
"rName": "manifest",
|
|
151
|
+
"useInR": true,
|
|
152
|
+
"rOrderNum": 9
|
|
153
|
+
},
|
|
154
|
+
{
|
|
155
|
+
"permittedValues": [],
|
|
156
|
+
"isOutputColumn": false,
|
|
157
|
+
"name": "ROWFORMAT",
|
|
158
|
+
"alternateNames": [],
|
|
159
|
+
"isRequired": false,
|
|
160
|
+
"rDescription": "Specifies the encoding format of the external row, for example:\nrow.format = '{\"field_delimiter\":\",\", \"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}'.\nSpecify row_format using JSON format. It can include only the three keys shown above. Key names and values are case-specific, except for the value for \"character_set\", which can use any combination of letter cases.\nThe row.format character set specification must be compatible with character set of the Payload column. Do not specify row.format for Parquet format data. For a JSON column, these are the default values:\nUNICODE: row.format = '{\"record_delimiter\":\"\n\", \"character_set\":\"UTF8\"}'\nLATIN: row.format = '{\"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}'\nFor a CSV column, these are the default values:\nUNICODE: row.format = '{\"character_set\":\"UTF8\"}'\nThis is the default if you do not specify an input table for read.nos.\nLATIN: row.format = '{\"character_set\":\"LATIN\"}'\nYou can specify the following options:\nfield_delimiter-> The default is ',' (comma). You can also specify a custom field delimiter, such as tab '\t'.\nrecord_delimiter-> New line feed character: '\n'. A line feed (\n) is the only acceptable record delimiter.\ncharacter_set -> 'UTF8' or 'LATIN'. If you do not specify a row.format or payload column, Vantage assumes UTF8 Unicode.",
|
|
161
|
+
"description": "Specifies the encoding format of the external row, for example:\nROWFORMAT('{\"field_delimiter\":\",\", \"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}').\nSpecify ROWFORMAT using JSON format. It can include only the three keys shown above. Key names and values are case-specific, except for the value for \"character_set\", which can use any combination of letter cases.\nThe ROWFORMAT character set specification must be compatible with character set of the Payload column. Do not specify ROWFORMAT for Parquet format data. For a JSON column, these are the default values:\nUNICODE: ROWFORMAT('{\"record_delimiter\":\"\n\", \"character_set\":\"UTF8\"}')\nLATIN: ROWFORMAT('{\"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}')\nFor a CSV column, these are the default values:\nUNICODE: ROWFORMAT('{\"character_set\":\"UTF8\"}')\nThis is the default if you do not specify an input table for READ_NOS.\nLATIN: ROWFORMAT('{\"character_set\":\"LATIN\"}')\nYou can specify the following options:\nfield_delimiter-> The default is ',' (comma). You can also specify a custom field delimiter, such as tab '\t'.\nrecord_delimiter-> New line feed character: '\n'. A line feed (\n) is the only acceptable record delimiter.\ncharacter_set -> 'UTF8' or 'LATIN'. If you do not specify a ROWFORMAT or payload column, Vantage assumes UTF8 Unicode.",
|
|
162
|
+
"datatype": ["STRING", "JSON"],
|
|
163
|
+
"allowsLists": false,
|
|
164
|
+
"rName": "row.format",
|
|
165
|
+
"useInR": true,
|
|
166
|
+
"rOrderNum": 10
|
|
167
|
+
},
|
|
168
|
+
{
|
|
169
|
+
"permittedValues": [],
|
|
170
|
+
"defaultValue": true,
|
|
171
|
+
"isOutputColumn": false,
|
|
172
|
+
"name": "HEADER",
|
|
173
|
+
"alternateNames": [],
|
|
174
|
+
"isRequired": false,
|
|
175
|
+
"rDescription": "Specifies whether the first row of data in an input CSV file is interpreted as column headings for the subsequent rows of data. Use this parameter only when a CSV input file is not associated with a separate schema object that defines columns for the CSV data. The value for header can be 'True' or 'False'. The default is 'True'.",
|
|
176
|
+
"description": "Specifies whether the first row of data in an input CSV file is interpreted as column headings for the subsequent rows of data. Use this parameter only when a CSV input file is not associated with a separate schema object that defines columns for the CSV data. The value for HEADER can be 'TRUE' or 'FALSE'. The default is 'TRUE'.",
|
|
177
|
+
"datatype": "BOOLEAN",
|
|
178
|
+
"allowsLists": false,
|
|
179
|
+
"rName": "header",
|
|
180
|
+
"useInR": true,
|
|
181
|
+
"rOrderNum": 11
|
|
182
|
+
}
|
|
183
|
+
]
|
|
184
|
+
}
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
{
|
|
2
|
+
"json_schema_major_version": "1",
|
|
3
|
+
"json_schema_minor_version": "0",
|
|
4
|
+
"json_content_version": "1",
|
|
5
|
+
"function_name": "write_nos",
|
|
6
|
+
"function_version": "1.0",
|
|
7
|
+
"function_type": "table_operator",
|
|
8
|
+
"function_alias_name": "write_nos",
|
|
9
|
+
"function_r_name": "write.nos",
|
|
10
|
+
"supports_view": false,
|
|
11
|
+
"short_description": "This function enables access to write data from Vantage to external storage, like Amazon S3, Azure Blob storage, or Google Cloud Storage.",
|
|
12
|
+
"long_description": "This function enables access to write data from Vantage to external storage, like Amazon S3, Azure Blob storage, or Google Cloud Storage. You must have the EXECUTE FUNCTION privilege on TD_SYSFNLIB.WRITE_NOS.",
|
|
13
|
+
"input_tables": [
|
|
14
|
+
{
|
|
15
|
+
"requiredInputKind": [
|
|
16
|
+
"PartitionByKey"
|
|
17
|
+
],
|
|
18
|
+
"isOrdered": false,
|
|
19
|
+
"isLocalOrdered": true,
|
|
20
|
+
"partitionByOne": false,
|
|
21
|
+
"hashByKey": true,
|
|
22
|
+
"name": "input",
|
|
23
|
+
"alternateNames": [],
|
|
24
|
+
"isRequired": true,
|
|
25
|
+
"rDescription": "Specifies the teradataml DataFrame containing the input data which will be written to external storage.",
|
|
26
|
+
"description": "Specifies the table containing the input data which will be written to external storage.",
|
|
27
|
+
"datatype": "TABLE_ALIAS",
|
|
28
|
+
"allowsLists": false,
|
|
29
|
+
"rName": "data",
|
|
30
|
+
"useInR": true,
|
|
31
|
+
"rOrderNum": 1
|
|
32
|
+
}
|
|
33
|
+
],
|
|
34
|
+
"argument_clauses": [
|
|
35
|
+
{
|
|
36
|
+
"permittedValues": [],
|
|
37
|
+
"isOutputColumn": false,
|
|
38
|
+
"name": "LOCATION",
|
|
39
|
+
"alternateNames": [],
|
|
40
|
+
"isRequired": false,
|
|
41
|
+
"rDescription": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to location in the external object storage system.\nA URI identifying the external storage system in the format:\n /connector/endpoint/bucket_or_container/prefix \nThe LOCATION string cannot exceed 2048 characters.",
|
|
42
|
+
"description": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to location in the external object storage system.\nA URI identifying the external storage system in the format:\n /connector/endpoint/bucket_or_container/prefix \nThe LOCATION string cannot exceed 2048 characters.",
|
|
43
|
+
"datatype": "STRING",
|
|
44
|
+
"allowsLists": false,
|
|
45
|
+
"rName": "location",
|
|
46
|
+
"useInR": true,
|
|
47
|
+
"rOrderNum": 2
|
|
48
|
+
},
|
|
49
|
+
{
|
|
50
|
+
"permittedValues": [],
|
|
51
|
+
"isOutputColumn": false,
|
|
52
|
+
"name": "AUTHORIZATION",
|
|
53
|
+
"alternateNames": [],
|
|
54
|
+
"isRequired": false,
|
|
55
|
+
"rDescription": "Specifies the authorization for accessing the external storage. On any platform, you can specify an authorization object ([DatabaseName.]AuthorizationObjectName). You must have the EXECUTE privilege on AuthorizationObjectName.\nOn Amazon S3 and Azure Blob storage and Azure Data Lake Storage Gen2, you can specify either an authorization object or a string in JSON format. The string specifies the USER (identification) and PASSWORD (secret_key) for accessing external storage. The following table shows the supported credentials for USER and PASSWORD (used in the CREATE AUTHORIZATION command):\nSystem/Scheme |USER |PASSWORD\nAWS |Access Key ID |Access Key Secret\nAzure / Shared Key |Storage Account Name |Storage Account Key\nAzure Shared Access Signature (SAS) |Storage Account Name |Account SAS Token\nGoogle Cloud (S3 interop mode) |Access Key ID |Access Key Secret\nGoogle Cloud (native) |Client Email |Private Key\nOn-premises object stores |Access Key ID |Access Key Secret\nPublic access object stores |<empty string> |<empty string>\n |Enclose the empty string in |Enclose the empty string in\n |single straight quotes: USER ''| single straight quotes: PASSWORD ''\nIf you use a function mapping to define a wrapper for READ_NOS, you can specify the authorization in the function mapping. With function mappings, you can use only [ INVOKER | DEFINER ] TRUSTED, not system-wide authorization.\nIf an AWS IAM credential provides access, you can omit the AUTHORIZATION clause.",
|
|
56
|
+
"description": "Specifies the authorization for accessing the external storage. On any platform, you can specify an authorization object ([DatabaseName.]AuthorizationObjectName). You must have the EXECUTE privilege on AuthorizationObjectName.\nOn Amazon S3 and Azure Blob storage and Azure Data Lake Storage Gen2, you can specify either an authorization object or a string in JSON format. The string specifies the USER (identification) and PASSWORD (secret_key) for accessing external storage. The following table shows the supported credentials for USER and PASSWORD (used in the CREATE AUTHORIZATION command):\nSystem/Scheme |USER |PASSWORD\nAWS |Access Key ID |Access Key Secret\nAzure / Shared Key |Storage Account Name |Storage Account Key\nAzure Shared Access Signature (SAS) |Storage Account Name |Account SAS Token\nGoogle Cloud (S3 interop mode) |Access Key ID |Access Key Secret\nGoogle Cloud (native) |Client Email |Private Key\nOn-premises object stores |Access Key ID |Access Key Secret\nPublic access object stores |<empty string> |<empty string>\n |Enclose the empty string in |Enclose the empty string in\n |single straight quotes: USER ''| single straight quotes: PASSWORD ''\nIf you use a function mapping to define a wrapper for READ_NOS, you can specify the authorization in the function mapping. With function mappings, you can use only [ INVOKER | DEFINER ] TRUSTED, not system-wide authorization.\nIf an AWS IAM credential provides access, you can omit the AUTHORIZATION clause.",
|
|
57
|
+
"datatype": ["STRING", "JSON"],
|
|
58
|
+
"allowsLists": false,
|
|
59
|
+
"rName": "authorization",
|
|
60
|
+
"useInR": true,
|
|
61
|
+
"rOrderNum": 3
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
"permittedValues": ["PARQUET"],
|
|
65
|
+
"rDefaultValue": "PARQUET",
|
|
66
|
+
"isOutputColumn": false,
|
|
67
|
+
"name": "STOREDAS",
|
|
68
|
+
"alternateNames": [],
|
|
69
|
+
"isRequired": false,
|
|
70
|
+
"rDescription": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. Objects created in external storage by write_nos are written only in Parquet format.",
|
|
71
|
+
"description": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. Objects created in external storage by write_nos are written only in Parquet format.",
|
|
72
|
+
"datatype": "STRING",
|
|
73
|
+
"allowsLists": false,
|
|
74
|
+
"rName": "stored.as",
|
|
75
|
+
"useInR": true,
|
|
76
|
+
"rOrderNum": 4
|
|
77
|
+
},
|
|
78
|
+
{
|
|
79
|
+
"permittedValues": ["DISCRETE", "RANGE"],
|
|
80
|
+
"defaultValue": "RANGE",
|
|
81
|
+
"isOutputColumn": false,
|
|
82
|
+
"name": "NAMING",
|
|
83
|
+
"alternateNames": [],
|
|
84
|
+
"isRequired": false,
|
|
85
|
+
"rDescription": "Specifies the how the objects containing the rows of data are named in the external storage:\nDiscrete naming uses the ordering column values as part of the object names in external storage. For example, if the PARTITION BY clause has ORDER BY dateColumn, intColumn, the discrete form name of the objects written to external storage would include the values for those columns as part of the object name, which would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-03-01/13/object_33_0_1.parquet\n2019-03-01 is the value for dateColumn, the first ordering column, and 13 is the value for the second ordering column, intColumn. All rows stored in this external Parquet-formatted object contain those two values.\nRange naming, the default, includes as part of the object name the range of values included in the partition for each ordering column. For example, using the same ORDER BY as above the object names would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-01-01/2019-03-02/9/10000/object_33_0_1.parquet\nwhere 2019-01-01 is the minimum value in that object for the first ordering column, dateColumn, 2019-03-02 is the maximum value for the rows stored in this external Parquet-formatted object. Value 9 is the minimum value for the second ordering column, intColumn, and 10000 is the maximum value for that column.",
|
|
86
|
+
"description": "Specifies the how the objects containing the rows of data are named in the external storage:\nDiscrete naming uses the ordering column values as part of the object names in external storage. For example, if the PARTITION BY clause has ORDER BY dateColumn, intColumn, the discrete form name of the objects written to external storage would include the values for those columns as part of the object name, which would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-03-01/13/object_33_0_1.parquet\n2019-03-01 is the value for dateColumn, the first ordering column, and 13 is the value for the second ordering column, intColumn. All rows stored in this external Parquet-formatted object contain those two values.\nRange naming, the default, includes as part of the object name the range of values included in the partition for each ordering column. For example, using the same ORDER BY as above the object names would look similar to this:\nS3/ceph-s3.teradata.com/xz186000/2019-01-01/2019-03-02/9/10000/object_33_0_1.parquet\nwhere 2019-01-01 is the minimum value in that object for the first ordering column, dateColumn, 2019-03-02 is the maximum value for the rows stored in this external Parquet-formatted object. Value 9 is the minimum value for the second ordering column, intColumn, and 10000 is the maximum value for that column.",
|
|
87
|
+
"datatype": "STRING",
|
|
88
|
+
"allowsLists": false,
|
|
89
|
+
"rName": "naming",
|
|
90
|
+
"useInR": true,
|
|
91
|
+
"rOrderNum": 5
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
"permittedValues": [],
|
|
95
|
+
"isOutputColumn": false,
|
|
96
|
+
"name": "MANIFESTFILE",
|
|
97
|
+
"alternateNames": [],
|
|
98
|
+
"isRequired": false,
|
|
99
|
+
"rDescription": "Specifies the fully qualified path and file name where the manifest file is written. Use the format\n/connector/end point/bucket_or_container/prefix/manifest_file_name\nFor example:\n/S3/ceph-s3.teradata.com/xz186000/manifest/manifest.json\nIf you do not include the manifestfile parameter, no manifest file is written.",
|
|
100
|
+
"description": "Specifies the fully qualified path and file name where the manifest file is written. Use the format\n/connector/end point/bucket_or_container/prefix/manifest_file_name\nFor example:\n/S3/ceph-s3.teradata.com/xz186000/manifest/manifest.json\nIf you do not include the manifestfile parameter, no manifest file is written.",
|
|
101
|
+
"datatype": "STRING",
|
|
102
|
+
"allowsLists": false,
|
|
103
|
+
"rName": "manifest.file",
|
|
104
|
+
"useInR": true,
|
|
105
|
+
"rOrderNum": 6
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
"permittedValues": [],
|
|
109
|
+
"defaultValue": false,
|
|
110
|
+
"isOutputColumn": false,
|
|
111
|
+
"name": "MANIFESTONLY",
|
|
112
|
+
"alternateNames": [],
|
|
113
|
+
"isRequired": false,
|
|
114
|
+
"rDescription": "Specifies wheather to write only a manifest file in external storage or not. No actual data objects are written to external storage if you use manifestonly = True. You must also use the manifest_file option to create a manifest file in external storage. Use this option to create a new manifest file in the event that a write_nos operation fails due to a database abort or restart, or when network connectivity issues interrupt and stop a write_nos operation before all data has been written to external storage. The manifest is created from the table or query result set that is input to write_nos. The input must be a list of storage object names and sizes, with one row per object.\nNote: The input to write_nos with manifestonly can itself incorporate read_nos, similar to this, which uses function mappings for write_nos and read_nos:\n (TODO: Add manually or update json)\nA query like this can be used if a write_nos operation fails before it can create a manifest file. The new manifest file created using read_nos will reflect all data objects currently in the external storage location, and can aid in determining which data objects resulted from the incomplete write_nos operation. For more information, see Teradata Vantage™ - Native Object Store Getting Started Guide, B035-1214.",
|
|
115
|
+
"description": "Specifies whether to write only a manifest file in external storage or not. No actual data objects are written to external storage if you use manifestonly = True. You must also use the manifest_file option to create a manifest file in external storage. Use this option to create a new manifest file in the event that a write_nos operation fails due to a database abort or restart, or when network connectivity issues interrupt and stop a write_nos operation before all data has been written to external storage. The manifest is created from the table or query result set that is input to write_nos. The input must be a list of storage object names and sizes, with one row per object.\nNote: The input to write_nos with manifestonly can itself incorporate read_nos, similar to this, which uses function mappings for write_nos and read_nos:\n (TODO: Add manually or update json)\nA query like this can be used if a write_nos operation fails before it can create a manifest file. The new manifest file created using read_nos will reflect all data objects currently in the external storage location, and can aid in determining which data objects resulted from the incomplete write_nos operation. For more information, see Teradata Vantage™ - Native Object Store Getting Started Guide, B035-1214.",
|
|
116
|
+
"datatype": "BOOLEAN",
|
|
117
|
+
"allowsLists": false,
|
|
118
|
+
"rName": "manifest.only",
|
|
119
|
+
"useInR": true,
|
|
120
|
+
"rOrderNum": 7
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
"permittedValues": [],
|
|
124
|
+
"defaultValue": false,
|
|
125
|
+
"isOutputColumn": false,
|
|
126
|
+
"name": "OVERWRITE",
|
|
127
|
+
"alternateNames": [],
|
|
128
|
+
"isRequired": false,
|
|
129
|
+
"rDescription": "Specifies whether an existing manifest file in external storage will be overwritten with a new manifest file that has the same name. If False, the default, write_nos returns an error if a manifest file exists in external storage that is named identically to the value of manifestfile.\nNote: overwrite must be used with manifest.only = True",
|
|
130
|
+
"description": "Specifies whether an existing manifest file in external storage will be overwritten with a new manifest file that has the same name. If False, the default, write_nos returns an error if a manifest file exists in external storage that is named identically to the value of manifestfile.\nNote: overwrite must be used with MANIFESTONLY = True",
|
|
131
|
+
"datatype": "BOOLEAN",
|
|
132
|
+
"allowsLists": false,
|
|
133
|
+
"rName": "overwrite",
|
|
134
|
+
"useInR": true,
|
|
135
|
+
"rOrderNum": 8
|
|
136
|
+
},
|
|
137
|
+
{
|
|
138
|
+
"permittedValues": [],
|
|
139
|
+
"isOutputColumn": false,
|
|
140
|
+
"name": "INCLUDE_ORDERING",
|
|
141
|
+
"alternateNames": [],
|
|
142
|
+
"isRequired": false,
|
|
143
|
+
"rDescription": "Specifies whether the ORDER BY columns and their values are written to external storage.",
|
|
144
|
+
"description": "Specifies whether the ORDER BY columns and their values are written to external storage.",
|
|
145
|
+
"datatype": "BOOLEAN",
|
|
146
|
+
"allowsLists": false,
|
|
147
|
+
"rName": "include.ordering",
|
|
148
|
+
"useInR": true,
|
|
149
|
+
"rOrderNum": 9
|
|
150
|
+
},
|
|
151
|
+
{
|
|
152
|
+
"permittedValues": [],
|
|
153
|
+
"isOutputColumn": false,
|
|
154
|
+
"name": "INCLUDE_HASHBY",
|
|
155
|
+
"alternateNames": [],
|
|
156
|
+
"isRequired": false,
|
|
157
|
+
"rDescription": "Specifies whether the HASH BY columns and their values are written to external storage.",
|
|
158
|
+
"description": "Specifies whether the HASH BY columns and their values are written to external storage.",
|
|
159
|
+
"datatype": "BOOLEAN",
|
|
160
|
+
"allowsLists": false,
|
|
161
|
+
"rName": "include.hashby",
|
|
162
|
+
"useInR": true,
|
|
163
|
+
"rOrderNum": 10
|
|
164
|
+
},
|
|
165
|
+
{
|
|
166
|
+
"permittedValues": [],
|
|
167
|
+
"defaultValue": "16MB",
|
|
168
|
+
"isOutputColumn": false,
|
|
169
|
+
"name": "MAXOBJECTSIZE",
|
|
170
|
+
"alternateNames": [],
|
|
171
|
+
"isRequired": false,
|
|
172
|
+
"rDescription": "Specifies the maximum output object size in megabytes, where max.object.size is a number between 4 and 16. The default is the value of the DefaultRowGroupSize field in DBS Control. For more information on DBS Control, see Teradata Vantage™ - Database Utilities , B035-1102.",
|
|
173
|
+
"description": "Specifies the maximum output object size in megabytes, where MAXOBJECTSIZE is a number between 4 and 16. The default is the value of the DefaultRowGroupSize field in DBS Control. For more information on DBS Control, see Teradata Vantage™ - Database Utilities , B035-1102.",
|
|
174
|
+
"datatype": "STRING",
|
|
175
|
+
"allowsLists": false,
|
|
176
|
+
"rName": "max.object.size",
|
|
177
|
+
"useInR": true,
|
|
178
|
+
"rOrderNum": 11
|
|
179
|
+
},
|
|
180
|
+
{
|
|
181
|
+
"permittedValues": ["GZIP", "SNAPPY"],
|
|
182
|
+
"isOutputColumn": false,
|
|
183
|
+
"name": "COMPRESSION",
|
|
184
|
+
"alternateNames": [],
|
|
185
|
+
"isRequired": false,
|
|
186
|
+
"rDescription": "Specifies the compression algorithm used to compress the objects written to external storage.\nNote: For Parquet files the compression occurs inside parts of the parquet file instead of for the entire file, so the file extension on external objects remains .parquet.",
|
|
187
|
+
"description": "Specifies the compression algorithm used to compress the objects written to external storage.\nNote: For Parquet files the compression occurs inside parts of the parquet file instead of for the entire file, so the file extension on external objects remains .parquet.",
|
|
188
|
+
"datatype": "STRING",
|
|
189
|
+
"allowsLists": false,
|
|
190
|
+
"rName": "compression",
|
|
191
|
+
"useInR": true,
|
|
192
|
+
"rOrderNum": 12
|
|
193
|
+
}
|
|
194
|
+
]
|
|
195
|
+
}
|