teradataml 20.0.0.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +2762 -0
- teradataml/__init__.py +78 -0
- teradataml/_version.py +11 -0
- teradataml/analytics/Transformations.py +2996 -0
- teradataml/analytics/__init__.py +82 -0
- teradataml/analytics/analytic_function_executor.py +2416 -0
- teradataml/analytics/analytic_query_generator.py +1050 -0
- teradataml/analytics/byom/H2OPredict.py +514 -0
- teradataml/analytics/byom/PMMLPredict.py +437 -0
- teradataml/analytics/byom/__init__.py +16 -0
- teradataml/analytics/json_parser/__init__.py +133 -0
- teradataml/analytics/json_parser/analytic_functions_argument.py +1805 -0
- teradataml/analytics/json_parser/json_store.py +191 -0
- teradataml/analytics/json_parser/metadata.py +1666 -0
- teradataml/analytics/json_parser/utils.py +805 -0
- teradataml/analytics/meta_class.py +236 -0
- teradataml/analytics/sqle/DecisionTreePredict.py +456 -0
- teradataml/analytics/sqle/NaiveBayesPredict.py +420 -0
- teradataml/analytics/sqle/__init__.py +128 -0
- teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -0
- teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -0
- teradataml/analytics/table_operator/__init__.py +11 -0
- teradataml/analytics/uaf/__init__.py +82 -0
- teradataml/analytics/utils.py +828 -0
- teradataml/analytics/valib.py +1617 -0
- teradataml/automl/__init__.py +5835 -0
- teradataml/automl/autodataprep/__init__.py +493 -0
- teradataml/automl/custom_json_utils.py +1625 -0
- teradataml/automl/data_preparation.py +1384 -0
- teradataml/automl/data_transformation.py +1254 -0
- teradataml/automl/feature_engineering.py +2273 -0
- teradataml/automl/feature_exploration.py +1873 -0
- teradataml/automl/model_evaluation.py +488 -0
- teradataml/automl/model_training.py +1407 -0
- teradataml/catalog/__init__.py +2 -0
- teradataml/catalog/byom.py +1759 -0
- teradataml/catalog/function_argument_mapper.py +859 -0
- teradataml/catalog/model_cataloging_utils.py +491 -0
- teradataml/clients/__init__.py +0 -0
- teradataml/clients/auth_client.py +137 -0
- teradataml/clients/keycloak_client.py +165 -0
- teradataml/clients/pkce_client.py +481 -0
- teradataml/common/__init__.py +1 -0
- teradataml/common/aed_utils.py +2078 -0
- teradataml/common/bulk_exposed_utils.py +113 -0
- teradataml/common/constants.py +1669 -0
- teradataml/common/deprecations.py +166 -0
- teradataml/common/exceptions.py +147 -0
- teradataml/common/formula.py +743 -0
- teradataml/common/garbagecollector.py +666 -0
- teradataml/common/logger.py +1261 -0
- teradataml/common/messagecodes.py +518 -0
- teradataml/common/messages.py +262 -0
- teradataml/common/pylogger.py +67 -0
- teradataml/common/sqlbundle.py +764 -0
- teradataml/common/td_coltype_code_to_tdtype.py +48 -0
- teradataml/common/utils.py +3166 -0
- teradataml/common/warnings.py +36 -0
- teradataml/common/wrapper_utils.py +625 -0
- teradataml/config/__init__.py +0 -0
- teradataml/config/dummy_file1.cfg +5 -0
- teradataml/config/dummy_file2.cfg +3 -0
- teradataml/config/sqlengine_alias_definitions_v1.0 +14 -0
- teradataml/config/sqlengine_alias_definitions_v1.1 +20 -0
- teradataml/config/sqlengine_alias_definitions_v1.3 +19 -0
- teradataml/context/__init__.py +0 -0
- teradataml/context/aed_context.py +223 -0
- teradataml/context/context.py +1462 -0
- teradataml/data/A_loan.csv +19 -0
- teradataml/data/BINARY_REALS_LEFT.csv +11 -0
- teradataml/data/BINARY_REALS_RIGHT.csv +11 -0
- teradataml/data/B_loan.csv +49 -0
- teradataml/data/BuoyData2.csv +17 -0
- teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -0
- teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -0
- teradataml/data/Convolve2RealsLeft.csv +5 -0
- teradataml/data/Convolve2RealsRight.csv +5 -0
- teradataml/data/Convolve2ValidLeft.csv +11 -0
- teradataml/data/Convolve2ValidRight.csv +11 -0
- teradataml/data/DFFTConv_Real_8_8.csv +65 -0
- teradataml/data/Employee.csv +5 -0
- teradataml/data/Employee_Address.csv +4 -0
- teradataml/data/Employee_roles.csv +5 -0
- teradataml/data/JulesBelvezeDummyData.csv +100 -0
- teradataml/data/Mall_customer_data.csv +201 -0
- teradataml/data/Orders1_12mf.csv +25 -0
- teradataml/data/Pi_loan.csv +7 -0
- teradataml/data/SMOOTHED_DATA.csv +7 -0
- teradataml/data/TestDFFT8.csv +9 -0
- teradataml/data/TestRiver.csv +109 -0
- teradataml/data/Traindata.csv +28 -0
- teradataml/data/__init__.py +0 -0
- teradataml/data/acf.csv +17 -0
- teradataml/data/adaboost_example.json +34 -0
- teradataml/data/adaboostpredict_example.json +24 -0
- teradataml/data/additional_table.csv +11 -0
- teradataml/data/admissions_test.csv +21 -0
- teradataml/data/admissions_train.csv +41 -0
- teradataml/data/admissions_train_nulls.csv +41 -0
- teradataml/data/advertising.csv +201 -0
- teradataml/data/ageandheight.csv +13 -0
- teradataml/data/ageandpressure.csv +31 -0
- teradataml/data/amazon_reviews_25.csv +26 -0
- teradataml/data/antiselect_example.json +36 -0
- teradataml/data/antiselect_input.csv +8 -0
- teradataml/data/antiselect_input_mixed_case.csv +8 -0
- teradataml/data/applicant_external.csv +7 -0
- teradataml/data/applicant_reference.csv +7 -0
- teradataml/data/apriori_example.json +22 -0
- teradataml/data/arima_example.json +9 -0
- teradataml/data/assortedtext_input.csv +8 -0
- teradataml/data/attribution_example.json +34 -0
- teradataml/data/attribution_sample_table.csv +27 -0
- teradataml/data/attribution_sample_table1.csv +6 -0
- teradataml/data/attribution_sample_table2.csv +11 -0
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bank_marketing.csv +11163 -0
- teradataml/data/bank_web_clicks1.csv +43 -0
- teradataml/data/bank_web_clicks2.csv +91 -0
- teradataml/data/bank_web_url.csv +85 -0
- teradataml/data/barrier.csv +2 -0
- teradataml/data/barrier_new.csv +3 -0
- teradataml/data/betweenness_example.json +14 -0
- teradataml/data/bike_sharing.csv +732 -0
- teradataml/data/bin_breaks.csv +8 -0
- teradataml/data/bin_fit_ip.csv +4 -0
- teradataml/data/binary_complex_left.csv +11 -0
- teradataml/data/binary_complex_right.csv +11 -0
- teradataml/data/binary_matrix_complex_left.csv +21 -0
- teradataml/data/binary_matrix_complex_right.csv +21 -0
- teradataml/data/binary_matrix_real_left.csv +21 -0
- teradataml/data/binary_matrix_real_right.csv +21 -0
- teradataml/data/blood2ageandweight.csv +26 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/boston.csv +507 -0
- teradataml/data/boston2cols.csv +721 -0
- teradataml/data/breast_cancer.csv +570 -0
- teradataml/data/buoydata_mix.csv +11 -0
- teradataml/data/burst_data.csv +5 -0
- teradataml/data/burst_example.json +21 -0
- teradataml/data/byom_example.json +34 -0
- teradataml/data/bytes_table.csv +4 -0
- teradataml/data/cal_housing_ex_raw.csv +70 -0
- teradataml/data/callers.csv +7 -0
- teradataml/data/calls.csv +10 -0
- teradataml/data/cars_hist.csv +33 -0
- teradataml/data/cat_table.csv +25 -0
- teradataml/data/ccm_example.json +32 -0
- teradataml/data/ccm_input.csv +91 -0
- teradataml/data/ccm_input2.csv +13 -0
- teradataml/data/ccmexample.csv +101 -0
- teradataml/data/ccmprepare_example.json +9 -0
- teradataml/data/ccmprepare_input.csv +91 -0
- teradataml/data/cfilter_example.json +12 -0
- teradataml/data/changepointdetection_example.json +18 -0
- teradataml/data/changepointdetectionrt_example.json +8 -0
- teradataml/data/chi_sq.csv +3 -0
- teradataml/data/churn_data.csv +14 -0
- teradataml/data/churn_emission.csv +35 -0
- teradataml/data/churn_initial.csv +3 -0
- teradataml/data/churn_state_transition.csv +5 -0
- teradataml/data/citedges_2.csv +745 -0
- teradataml/data/citvertices_2.csv +1210 -0
- teradataml/data/clicks2.csv +16 -0
- teradataml/data/clickstream.csv +13 -0
- teradataml/data/clickstream1.csv +11 -0
- teradataml/data/closeness_example.json +16 -0
- teradataml/data/complaints.csv +21 -0
- teradataml/data/complaints_mini.csv +3 -0
- teradataml/data/complaints_test_tokenized.csv +353 -0
- teradataml/data/complaints_testtoken.csv +224 -0
- teradataml/data/complaints_tokens_model.csv +348 -0
- teradataml/data/complaints_tokens_test.csv +353 -0
- teradataml/data/complaints_traintoken.csv +472 -0
- teradataml/data/computers_category.csv +1001 -0
- teradataml/data/computers_test1.csv +1252 -0
- teradataml/data/computers_train1.csv +5009 -0
- teradataml/data/computers_train1_clustered.csv +5009 -0
- teradataml/data/confusionmatrix_example.json +9 -0
- teradataml/data/conversion_event_table.csv +3 -0
- teradataml/data/corr_input.csv +17 -0
- teradataml/data/correlation_example.json +11 -0
- teradataml/data/covid_confirm_sd.csv +83 -0
- teradataml/data/coxhazardratio_example.json +39 -0
- teradataml/data/coxph_example.json +15 -0
- teradataml/data/coxsurvival_example.json +28 -0
- teradataml/data/cpt.csv +41 -0
- teradataml/data/credit_ex_merged.csv +45 -0
- teradataml/data/creditcard_data.csv +1001 -0
- teradataml/data/customer_loyalty.csv +301 -0
- teradataml/data/customer_loyalty_newseq.csv +31 -0
- teradataml/data/customer_segmentation_test.csv +2628 -0
- teradataml/data/customer_segmentation_train.csv +8069 -0
- teradataml/data/dataframe_example.json +173 -0
- teradataml/data/decisionforest_example.json +37 -0
- teradataml/data/decisionforestpredict_example.json +38 -0
- teradataml/data/decisiontree_example.json +21 -0
- teradataml/data/decisiontreepredict_example.json +45 -0
- teradataml/data/dfft2_size4_real.csv +17 -0
- teradataml/data/dfft2_test_matrix16.csv +17 -0
- teradataml/data/dfft2conv_real_4_4.csv +65 -0
- teradataml/data/diabetes.csv +443 -0
- teradataml/data/diabetes_test.csv +89 -0
- teradataml/data/dict_table.csv +5 -0
- teradataml/data/docperterm_table.csv +4 -0
- teradataml/data/docs/__init__.py +1 -0
- teradataml/data/docs/byom/__init__.py +0 -0
- teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -0
- teradataml/data/docs/byom/docs/DataikuPredict.py +217 -0
- teradataml/data/docs/byom/docs/H2OPredict.py +325 -0
- teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
- teradataml/data/docs/byom/docs/ONNXPredict.py +283 -0
- teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
- teradataml/data/docs/byom/docs/PMMLPredict.py +278 -0
- teradataml/data/docs/byom/docs/__init__.py +0 -0
- teradataml/data/docs/sqle/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_10/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Attribution.py +200 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -0
- teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +96 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -0
- teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +152 -0
- teradataml/data/docs/sqle/docs_17_10/FTest.py +161 -0
- teradataml/data/docs/sqle/docs_17_10/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +85 -0
- teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +82 -0
- teradataml/data/docs/sqle/docs_17_10/Histogram.py +165 -0
- teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_10/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -0
- teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +135 -0
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +166 -0
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -0
- teradataml/data/docs/sqle/docs_17_10/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +110 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -0
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -0
- teradataml/data/docs/sqle/docs_17_10/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -0
- teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -0
- teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -0
- teradataml/data/docs/sqle/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -0
- teradataml/data/docs/sqle/docs_17_20/Antiselect.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Apriori.py +138 -0
- teradataml/data/docs/sqle/docs_17_20/Attribution.py +201 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +172 -0
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -0
- teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
- teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -0
- teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +86 -0
- teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +246 -0
- teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +280 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +136 -0
- teradataml/data/docs/sqle/docs_17_20/FTest.py +240 -0
- teradataml/data/docs/sqle/docs_17_20/FillRowId.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/Fit.py +88 -0
- teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +415 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -0
- teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/Histogram.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -0
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -0
- teradataml/data/docs/sqle/docs_17_20/KNN.py +215 -0
- teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -0
- teradataml/data/docs/sqle/docs_17_20/NERExtractor.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +209 -0
- teradataml/data/docs/sqle/docs_17_20/NPath.py +266 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -0
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +231 -0
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +191 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -0
- teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -0
- teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -0
- teradataml/data/docs/sqle/docs_17_20/QQNorm.py +105 -0
- teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +155 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -0
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -0
- teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -0
- teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +118 -0
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -0
- teradataml/data/docs/sqle/docs_17_20/SMOTE.py +212 -0
- teradataml/data/docs/sqle/docs_17_20/SVM.py +414 -0
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -0
- teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -0
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -0
- teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -0
- teradataml/data/docs/sqle/docs_17_20/Sessionize.py +114 -0
- teradataml/data/docs/sqle/docs_17_20/Shap.py +225 -0
- teradataml/data/docs/sqle/docs_17_20/Silhouette.py +153 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -0
- teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -0
- teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +146 -0
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -0
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -0
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
- teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +267 -0
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -0
- teradataml/data/docs/sqle/docs_17_20/TextMorph.py +119 -0
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +224 -0
- teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +160 -0
- teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -0
- teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -0
- teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
- teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +169 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -0
- teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +237 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoost.py +362 -0
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -0
- teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -0
- teradataml/data/docs/sqle/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/tableoperator/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_00/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +430 -0
- teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_05/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +429 -0
- teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +348 -0
- teradataml/data/docs/tableoperator/docs_17_10/__init__.py +0 -0
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +440 -0
- teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +387 -0
- teradataml/data/docs/tableoperator/docs_17_20/__init__.py +0 -0
- teradataml/data/docs/uaf/__init__.py +0 -0
- teradataml/data/docs/uaf/docs_17_20/ACF.py +186 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +370 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +161 -0
- teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
- teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
- teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +248 -0
- teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +178 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve.py +230 -0
- teradataml/data/docs/uaf/docs_17_20/Convolve2.py +218 -0
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT.py +204 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +216 -0
- teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +192 -0
- teradataml/data/docs/uaf/docs_17_20/DIFF.py +175 -0
- teradataml/data/docs/uaf/docs_17_20/DTW.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
- teradataml/data/docs/uaf/docs_17_20/DWT2D.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +142 -0
- teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +184 -0
- teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +185 -0
- teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
- teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +206 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +143 -0
- teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +198 -0
- teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +260 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT.py +165 -0
- teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
- teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/InputValidator.py +121 -0
- teradataml/data/docs/uaf/docs_17_20/LineSpec.py +156 -0
- teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +215 -0
- teradataml/data/docs/uaf/docs_17_20/MAMean.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/MInfo.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
- teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +191 -0
- teradataml/data/docs/uaf/docs_17_20/PACF.py +157 -0
- teradataml/data/docs/uaf/docs_17_20/Portman.py +217 -0
- teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +203 -0
- teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +155 -0
- teradataml/data/docs/uaf/docs_17_20/Resample.py +237 -0
- teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
- teradataml/data/docs/uaf/docs_17_20/SInfo.py +123 -0
- teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +173 -0
- teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +174 -0
- teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +164 -0
- teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +180 -0
- teradataml/data/docs/uaf/docs_17_20/Smoothma.py +208 -0
- teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +151 -0
- teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +202 -0
- teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +171 -0
- teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
- teradataml/data/docs/uaf/docs_17_20/__init__.py +0 -0
- teradataml/data/dtw_example.json +18 -0
- teradataml/data/dtw_t1.csv +11 -0
- teradataml/data/dtw_t2.csv +4 -0
- teradataml/data/dwt2d_dataTable.csv +65 -0
- teradataml/data/dwt2d_example.json +16 -0
- teradataml/data/dwt_dataTable.csv +8 -0
- teradataml/data/dwt_example.json +15 -0
- teradataml/data/dwt_filterTable.csv +3 -0
- teradataml/data/dwt_filter_dim.csv +5 -0
- teradataml/data/emission.csv +9 -0
- teradataml/data/emp_table_by_dept.csv +19 -0
- teradataml/data/employee_info.csv +4 -0
- teradataml/data/employee_table.csv +6 -0
- teradataml/data/excluding_event_table.csv +2 -0
- teradataml/data/finance_data.csv +6 -0
- teradataml/data/finance_data2.csv +61 -0
- teradataml/data/finance_data3.csv +93 -0
- teradataml/data/finance_data4.csv +13 -0
- teradataml/data/fish.csv +160 -0
- teradataml/data/fm_blood2ageandweight.csv +26 -0
- teradataml/data/fmeasure_example.json +12 -0
- teradataml/data/followers_leaders.csv +10 -0
- teradataml/data/fpgrowth_example.json +12 -0
- teradataml/data/frequentpaths_example.json +29 -0
- teradataml/data/friends.csv +9 -0
- teradataml/data/fs_input.csv +33 -0
- teradataml/data/fs_input1.csv +33 -0
- teradataml/data/genData.csv +513 -0
- teradataml/data/geodataframe_example.json +40 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/glm_admissions_model.csv +12 -0
- teradataml/data/glm_example.json +56 -0
- teradataml/data/glml1l2_example.json +28 -0
- teradataml/data/glml1l2predict_example.json +54 -0
- teradataml/data/glmpredict_example.json +54 -0
- teradataml/data/gq_t1.csv +21 -0
- teradataml/data/grocery_transaction.csv +19 -0
- teradataml/data/hconvolve_complex_right.csv +5 -0
- teradataml/data/hconvolve_complex_rightmulti.csv +5 -0
- teradataml/data/histogram_example.json +12 -0
- teradataml/data/hmmdecoder_example.json +79 -0
- teradataml/data/hmmevaluator_example.json +25 -0
- teradataml/data/hmmsupervised_example.json +10 -0
- teradataml/data/hmmunsupervised_example.json +8 -0
- teradataml/data/hnsw_alter_data.csv +5 -0
- teradataml/data/hnsw_data.csv +10 -0
- teradataml/data/house_values.csv +12 -0
- teradataml/data/house_values2.csv +13 -0
- teradataml/data/housing_cat.csv +7 -0
- teradataml/data/housing_data.csv +9 -0
- teradataml/data/housing_test.csv +47 -0
- teradataml/data/housing_test_binary.csv +47 -0
- teradataml/data/housing_train.csv +493 -0
- teradataml/data/housing_train_attribute.csv +5 -0
- teradataml/data/housing_train_binary.csv +437 -0
- teradataml/data/housing_train_parameter.csv +2 -0
- teradataml/data/housing_train_response.csv +493 -0
- teradataml/data/housing_train_segment.csv +201 -0
- teradataml/data/ibm_stock.csv +370 -0
- teradataml/data/ibm_stock1.csv +370 -0
- teradataml/data/identitymatch_example.json +22 -0
- teradataml/data/idf_table.csv +4 -0
- teradataml/data/idwt2d_dataTable.csv +5 -0
- teradataml/data/idwt_dataTable.csv +8 -0
- teradataml/data/idwt_filterTable.csv +3 -0
- teradataml/data/impressions.csv +101 -0
- teradataml/data/inflation.csv +21 -0
- teradataml/data/initial.csv +3 -0
- teradataml/data/insect2Cols.csv +61 -0
- teradataml/data/insect_sprays.csv +13 -0
- teradataml/data/insurance.csv +1339 -0
- teradataml/data/interpolator_example.json +13 -0
- teradataml/data/interval_data.csv +5 -0
- teradataml/data/iris_altinput.csv +481 -0
- teradataml/data/iris_attribute_output.csv +8 -0
- teradataml/data/iris_attribute_test.csv +121 -0
- teradataml/data/iris_attribute_train.csv +481 -0
- teradataml/data/iris_category_expect_predict.csv +31 -0
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/iris_input.csv +151 -0
- teradataml/data/iris_response_train.csv +121 -0
- teradataml/data/iris_test.csv +31 -0
- teradataml/data/iris_train.csv +121 -0
- teradataml/data/join_table1.csv +4 -0
- teradataml/data/join_table2.csv +4 -0
- teradataml/data/jsons/anly_function_name.json +7 -0
- teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
- teradataml/data/jsons/byom/dataikupredict.json +148 -0
- teradataml/data/jsons/byom/datarobotpredict.json +147 -0
- teradataml/data/jsons/byom/h2opredict.json +195 -0
- teradataml/data/jsons/byom/onnxembeddings.json +267 -0
- teradataml/data/jsons/byom/onnxpredict.json +187 -0
- teradataml/data/jsons/byom/pmmlpredict.json +147 -0
- teradataml/data/jsons/paired_functions.json +450 -0
- teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/16.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/16.20/Pack.json +98 -0
- teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/16.20/Unpack.json +166 -0
- teradataml/data/jsons/sqle/16.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.00/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.00/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.00/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.00/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.05/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -0
- teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -0
- teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -0
- teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -0
- teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -0
- teradataml/data/jsons/sqle/17.05/Pack.json +98 -0
- teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -0
- teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.05/Unpack.json +166 -0
- teradataml/data/jsons/sqle/17.05/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.10/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -0
- teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -0
- teradataml/data/jsons/sqle/17.10/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -0
- teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -0
- teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +54 -0
- teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +69 -0
- teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -0
- teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_Histogram.json +133 -0
- teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +183 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +66 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +197 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +112 -0
- teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +157 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +71 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +148 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +119 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -0
- teradataml/data/jsons/sqle/17.10/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.10/nPath.json +269 -0
- teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -0
- teradataml/data/jsons/sqle/17.20/Attribution.json +249 -0
- teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -0
- teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -0
- teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -0
- teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -0
- teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -0
- teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -0
- teradataml/data/jsons/sqle/17.20/Pack.json +133 -0
- teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -0
- teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -0
- teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -0
- teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +149 -0
- teradataml/data/jsons/sqle/17.20/TD_Apriori.json +181 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -0
- teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
- teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_Chisq.json +68 -0
- teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -0
- teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +260 -0
- teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -0
- teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -0
- teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -0
- teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -0
- teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -0
- teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -0
- teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeans.json +232 -0
- teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +87 -0
- teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -0
- teradataml/data/jsons/sqle/17.20/TD_NERExtractor.json +145 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -0
- teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +316 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +124 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -0
- teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -0
- teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -0
- teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -0
- teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -0
- teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -0
- teradataml/data/jsons/sqle/17.20/TD_ROC.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +179 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -0
- teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +138 -0
- teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +128 -0
- teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +71 -0
- teradataml/data/jsons/sqle/17.20/TD_SMOTE.json +267 -0
- teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -0
- teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +310 -0
- teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +120 -0
- teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +194 -0
- teradataml/data/jsons/sqle/17.20/TD_Shap.json +221 -0
- teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +143 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -0
- teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -0
- teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -0
- teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -0
- teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -0
- teradataml/data/jsons/sqle/17.20/TD_TextMorph.json +134 -0
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +297 -0
- teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -0
- teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -0
- teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
- teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +183 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +53 -0
- teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -0
- teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -0
- teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -0
- teradataml/data/jsons/sqle/17.20/Unpack.json +188 -0
- teradataml/data/jsons/sqle/17.20/nPath.json +269 -0
- teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +370 -0
- teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +460 -0
- teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +385 -0
- teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +369 -0
- teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +400 -0
- teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +401 -0
- teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +384 -0
- teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +384 -0
- teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
- teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
- teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
- teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
- teradataml/data/jsons/tableoperator/17.00/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/read_nos.json +198 -0
- teradataml/data/jsons/tableoperator/17.05/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.10/read_nos.json +184 -0
- teradataml/data/jsons/tableoperator/17.10/write_nos.json +195 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/tableoperator/17.20/read_nos.json +183 -0
- teradataml/data/jsons/tableoperator/17.20/write_nos.json +224 -0
- teradataml/data/jsons/uaf/17.20/TD_ACF.json +132 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +396 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +77 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +153 -0
- teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
- teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +107 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +106 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +89 -0
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +104 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +66 -0
- teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +87 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT.json +134 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +144 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +108 -0
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_DIFF.json +92 -0
- teradataml/data/jsons/uaf/17.20/TD_DTW.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
- teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +39 -0
- teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +85 -0
- teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +71 -0
- teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +139 -0
- teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECASTER.json +313 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +81 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
- teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +64 -0
- teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
- teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +182 -0
- teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +103 -0
- teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +181 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
- teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_MINFO.json +67 -0
- teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +179 -0
- teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -0
- teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +119 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -0
- teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +98 -0
- teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +194 -0
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
- teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +143 -0
- teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +90 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +80 -0
- teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +68 -0
- teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -0
- teradataml/data/jsons/uaf/17.20/TD_SINFO.json +58 -0
- teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +163 -0
- teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +101 -0
- teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +112 -0
- teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -0
- teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +78 -0
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
- teradataml/data/kmeans_example.json +23 -0
- teradataml/data/kmeans_table.csv +10 -0
- teradataml/data/kmeans_us_arrests_data.csv +51 -0
- teradataml/data/knn_example.json +19 -0
- teradataml/data/knnrecommender_example.json +7 -0
- teradataml/data/knnrecommenderpredict_example.json +12 -0
- teradataml/data/lar_example.json +17 -0
- teradataml/data/larpredict_example.json +30 -0
- teradataml/data/lc_new_predictors.csv +5 -0
- teradataml/data/lc_new_reference.csv +9 -0
- teradataml/data/lda_example.json +9 -0
- teradataml/data/ldainference_example.json +15 -0
- teradataml/data/ldatopicsummary_example.json +9 -0
- teradataml/data/levendist_input.csv +13 -0
- teradataml/data/levenshteindistance_example.json +10 -0
- teradataml/data/linreg_example.json +10 -0
- teradataml/data/load_example_data.py +350 -0
- teradataml/data/loan_prediction.csv +295 -0
- teradataml/data/lungcancer.csv +138 -0
- teradataml/data/mappingdata.csv +12 -0
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/milk_timeseries.csv +157 -0
- teradataml/data/min_max_titanic.csv +4 -0
- teradataml/data/minhash_example.json +6 -0
- teradataml/data/ml_ratings.csv +7547 -0
- teradataml/data/ml_ratings_10.csv +2445 -0
- teradataml/data/mobile_data.csv +13 -0
- teradataml/data/model1_table.csv +5 -0
- teradataml/data/model2_table.csv +5 -0
- teradataml/data/models/License_file.txt +1 -0
- teradataml/data/models/License_file_empty.txt +0 -0
- teradataml/data/models/dataiku_iris_data_ann_thin +0 -0
- teradataml/data/models/dr_iris_rf +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn.onnx +0 -0
- teradataml/data/models/iris_db_dt_model_sklearn_floattensor.onnx +0 -0
- teradataml/data/models/iris_db_glm_model.pmml +57 -0
- teradataml/data/models/iris_db_xgb_model.pmml +4471 -0
- teradataml/data/models/iris_kmeans_model +0 -0
- teradataml/data/models/iris_mojo_glm_h2o_model +0 -0
- teradataml/data/models/iris_mojo_xgb_h2o_model +0 -0
- teradataml/data/modularity_example.json +12 -0
- teradataml/data/movavg_example.json +8 -0
- teradataml/data/mtx1.csv +7 -0
- teradataml/data/mtx2.csv +13 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/mvdfft8.csv +9 -0
- teradataml/data/naivebayes_example.json +10 -0
- teradataml/data/naivebayespredict_example.json +19 -0
- teradataml/data/naivebayestextclassifier2_example.json +7 -0
- teradataml/data/naivebayestextclassifier_example.json +8 -0
- teradataml/data/naivebayestextclassifierpredict_example.json +32 -0
- teradataml/data/name_Find_configure.csv +10 -0
- teradataml/data/namedentityfinder_example.json +14 -0
- teradataml/data/namedentityfinderevaluator_example.json +10 -0
- teradataml/data/namedentityfindertrainer_example.json +6 -0
- teradataml/data/nb_iris_input_test.csv +31 -0
- teradataml/data/nb_iris_input_train.csv +121 -0
- teradataml/data/nbp_iris_model.csv +13 -0
- teradataml/data/ner_dict.csv +8 -0
- teradataml/data/ner_extractor_text.csv +2 -0
- teradataml/data/ner_input_eng.csv +7 -0
- teradataml/data/ner_rule.csv +5 -0
- teradataml/data/ner_sports_test2.csv +29 -0
- teradataml/data/ner_sports_train.csv +501 -0
- teradataml/data/nerevaluator_example.json +6 -0
- teradataml/data/nerextractor_example.json +18 -0
- teradataml/data/nermem_sports_test.csv +18 -0
- teradataml/data/nermem_sports_train.csv +51 -0
- teradataml/data/nertrainer_example.json +7 -0
- teradataml/data/ngrams_example.json +7 -0
- teradataml/data/notebooks/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -0
- teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -0
- teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -0
- teradataml/data/notebooks/sqlalchemy/__init__.py +0 -0
- teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -0
- teradataml/data/npath_example.json +23 -0
- teradataml/data/ntree_example.json +14 -0
- teradataml/data/numeric_strings.csv +5 -0
- teradataml/data/numerics.csv +4 -0
- teradataml/data/ocean_buoy.csv +17 -0
- teradataml/data/ocean_buoy2.csv +17 -0
- teradataml/data/ocean_buoys.csv +28 -0
- teradataml/data/ocean_buoys2.csv +10 -0
- teradataml/data/ocean_buoys_nonpti.csv +28 -0
- teradataml/data/ocean_buoys_seq.csv +29 -0
- teradataml/data/onehot_encoder_train.csv +4 -0
- teradataml/data/openml_example.json +92 -0
- teradataml/data/optional_event_table.csv +4 -0
- teradataml/data/orders1.csv +11 -0
- teradataml/data/orders1_12.csv +13 -0
- teradataml/data/orders_ex.csv +4 -0
- teradataml/data/pack_example.json +9 -0
- teradataml/data/package_tracking.csv +19 -0
- teradataml/data/package_tracking_pti.csv +19 -0
- teradataml/data/pagerank_example.json +13 -0
- teradataml/data/paragraphs_input.csv +6 -0
- teradataml/data/pathanalyzer_example.json +8 -0
- teradataml/data/pathgenerator_example.json +8 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/pattern_matching_data.csv +11 -0
- teradataml/data/payment_fraud_dataset.csv +10001 -0
- teradataml/data/peppers.png +0 -0
- teradataml/data/phrases.csv +7 -0
- teradataml/data/pivot_example.json +9 -0
- teradataml/data/pivot_input.csv +22 -0
- teradataml/data/playerRating.csv +31 -0
- teradataml/data/pos_input.csv +40 -0
- teradataml/data/postagger_example.json +7 -0
- teradataml/data/posttagger_output.csv +44 -0
- teradataml/data/production_data.csv +17 -0
- teradataml/data/production_data2.csv +7 -0
- teradataml/data/randomsample_example.json +32 -0
- teradataml/data/randomwalksample_example.json +9 -0
- teradataml/data/rank_table.csv +6 -0
- teradataml/data/real_values.csv +14 -0
- teradataml/data/ref_mobile_data.csv +4 -0
- teradataml/data/ref_mobile_data_dense.csv +2 -0
- teradataml/data/ref_url.csv +17 -0
- teradataml/data/restaurant_reviews.csv +7 -0
- teradataml/data/retail_churn_table.csv +27772 -0
- teradataml/data/river_data.csv +145 -0
- teradataml/data/roc_example.json +8 -0
- teradataml/data/roc_input.csv +101 -0
- teradataml/data/rule_inputs.csv +6 -0
- teradataml/data/rule_table.csv +2 -0
- teradataml/data/sales.csv +7 -0
- teradataml/data/sales_transaction.csv +501 -0
- teradataml/data/salesdata.csv +342 -0
- teradataml/data/sample_cities.csv +3 -0
- teradataml/data/sample_shapes.csv +11 -0
- teradataml/data/sample_streets.csv +3 -0
- teradataml/data/sampling_example.json +16 -0
- teradataml/data/sax_example.json +17 -0
- teradataml/data/scale_attributes.csv +3 -0
- teradataml/data/scale_example.json +74 -0
- teradataml/data/scale_housing.csv +11 -0
- teradataml/data/scale_housing_test.csv +6 -0
- teradataml/data/scale_input_part_sparse.csv +31 -0
- teradataml/data/scale_input_partitioned.csv +16 -0
- teradataml/data/scale_input_sparse.csv +11 -0
- teradataml/data/scale_parameters.csv +3 -0
- teradataml/data/scale_stat.csv +11 -0
- teradataml/data/scalebypartition_example.json +13 -0
- teradataml/data/scalemap_example.json +13 -0
- teradataml/data/scalesummary_example.json +12 -0
- teradataml/data/score_category.csv +101 -0
- teradataml/data/score_summary.csv +4 -0
- teradataml/data/script_example.json +10 -0
- teradataml/data/scripts/deploy_script.py +84 -0
- teradataml/data/scripts/lightgbm/dataset.template +175 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +264 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +234 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +177 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/mapper.py +16 -0
- teradataml/data/scripts/mapper_replace.py +16 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +205 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +148 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +144 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +166 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +161 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +145 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +327 -0
- teradataml/data/sdk/modelops/modelops_spec.json +101737 -0
- teradataml/data/seeds.csv +10 -0
- teradataml/data/sentenceextractor_example.json +7 -0
- teradataml/data/sentiment_extract_input.csv +11 -0
- teradataml/data/sentiment_train.csv +16 -0
- teradataml/data/sentiment_word.csv +20 -0
- teradataml/data/sentiment_word_input.csv +20 -0
- teradataml/data/sentimentextractor_example.json +24 -0
- teradataml/data/sentimenttrainer_example.json +8 -0
- teradataml/data/sequence_table.csv +10 -0
- teradataml/data/seriessplitter_example.json +8 -0
- teradataml/data/sessionize_example.json +17 -0
- teradataml/data/sessionize_table.csv +116 -0
- teradataml/data/setop_test1.csv +24 -0
- teradataml/data/setop_test2.csv +22 -0
- teradataml/data/soc_nw_edges.csv +11 -0
- teradataml/data/soc_nw_vertices.csv +8 -0
- teradataml/data/souvenir_timeseries.csv +168 -0
- teradataml/data/sparse_iris_attribute.csv +5 -0
- teradataml/data/sparse_iris_test.csv +121 -0
- teradataml/data/sparse_iris_train.csv +601 -0
- teradataml/data/star1.csv +6 -0
- teradataml/data/star_pivot.csv +8 -0
- teradataml/data/state_transition.csv +5 -0
- teradataml/data/stock_data.csv +53 -0
- teradataml/data/stock_movement.csv +11 -0
- teradataml/data/stock_vol.csv +76 -0
- teradataml/data/stop_words.csv +8 -0
- teradataml/data/store_sales.csv +37 -0
- teradataml/data/stringsimilarity_example.json +8 -0
- teradataml/data/strsimilarity_input.csv +13 -0
- teradataml/data/students.csv +101 -0
- teradataml/data/svm_iris_input_test.csv +121 -0
- teradataml/data/svm_iris_input_train.csv +481 -0
- teradataml/data/svm_iris_model.csv +7 -0
- teradataml/data/svmdense_example.json +10 -0
- teradataml/data/svmdensepredict_example.json +19 -0
- teradataml/data/svmsparse_example.json +8 -0
- teradataml/data/svmsparsepredict_example.json +14 -0
- teradataml/data/svmsparsesummary_example.json +8 -0
- teradataml/data/target_mobile_data.csv +13 -0
- teradataml/data/target_mobile_data_dense.csv +5 -0
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/tdnerextractor_example.json +14 -0
- teradataml/data/templatedata.csv +1201 -0
- teradataml/data/templates/open_source_ml.json +11 -0
- teradataml/data/teradata_icon.ico +0 -0
- teradataml/data/teradataml_example.json +1473 -0
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_loan_prediction.csv +53 -0
- teradataml/data/test_pacf_12.csv +37 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/test_river2.csv +109 -0
- teradataml/data/text_inputs.csv +6 -0
- teradataml/data/textchunker_example.json +8 -0
- teradataml/data/textclassifier_example.json +7 -0
- teradataml/data/textclassifier_input.csv +7 -0
- teradataml/data/textclassifiertrainer_example.json +7 -0
- teradataml/data/textmorph_example.json +11 -0
- teradataml/data/textparser_example.json +15 -0
- teradataml/data/texttagger_example.json +12 -0
- teradataml/data/texttokenizer_example.json +7 -0
- teradataml/data/texttrainer_input.csv +11 -0
- teradataml/data/tf_example.json +7 -0
- teradataml/data/tfidf_example.json +14 -0
- teradataml/data/tfidf_input1.csv +201 -0
- teradataml/data/tfidf_train.csv +6 -0
- teradataml/data/time_table1.csv +535 -0
- teradataml/data/time_table2.csv +14 -0
- teradataml/data/timeseriesdata.csv +1601 -0
- teradataml/data/timeseriesdatasetsd4.csv +105 -0
- teradataml/data/timestamp_data.csv +4 -0
- teradataml/data/titanic.csv +892 -0
- teradataml/data/titanic_dataset_unpivoted.csv +19 -0
- teradataml/data/to_num_data.csv +4 -0
- teradataml/data/tochar_data.csv +5 -0
- teradataml/data/token_table.csv +696 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/train_tracking.csv +28 -0
- teradataml/data/trans_dense.csv +16 -0
- teradataml/data/trans_sparse.csv +55 -0
- teradataml/data/transformation_table.csv +6 -0
- teradataml/data/transformation_table_new.csv +2 -0
- teradataml/data/tv_spots.csv +16 -0
- teradataml/data/twod_climate_data.csv +117 -0
- teradataml/data/uaf_example.json +529 -0
- teradataml/data/univariatestatistics_example.json +9 -0
- teradataml/data/unpack_example.json +10 -0
- teradataml/data/unpivot_example.json +25 -0
- teradataml/data/unpivot_input.csv +8 -0
- teradataml/data/url_data.csv +10 -0
- teradataml/data/us_air_pass.csv +37 -0
- teradataml/data/us_population.csv +624 -0
- teradataml/data/us_states_shapes.csv +52 -0
- teradataml/data/varmax_example.json +18 -0
- teradataml/data/vectordistance_example.json +30 -0
- teradataml/data/ville_climatedata.csv +121 -0
- teradataml/data/ville_tempdata.csv +12 -0
- teradataml/data/ville_tempdata1.csv +12 -0
- teradataml/data/ville_temperature.csv +11 -0
- teradataml/data/waveletTable.csv +1605 -0
- teradataml/data/waveletTable2.csv +1605 -0
- teradataml/data/weightedmovavg_example.json +9 -0
- teradataml/data/wft_testing.csv +5 -0
- teradataml/data/windowdfft.csv +16 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/data/word_embed_input_table1.csv +6 -0
- teradataml/data/word_embed_input_table2.csv +5 -0
- teradataml/data/word_embed_model.csv +23 -0
- teradataml/data/words_input.csv +13 -0
- teradataml/data/xconvolve_complex_left.csv +6 -0
- teradataml/data/xconvolve_complex_leftmulti.csv +6 -0
- teradataml/data/xgboost_example.json +36 -0
- teradataml/data/xgboostpredict_example.json +32 -0
- teradataml/data/ztest_example.json +16 -0
- teradataml/dataframe/__init__.py +0 -0
- teradataml/dataframe/copy_to.py +2446 -0
- teradataml/dataframe/data_transfer.py +2840 -0
- teradataml/dataframe/dataframe.py +20908 -0
- teradataml/dataframe/dataframe_utils.py +2114 -0
- teradataml/dataframe/fastload.py +794 -0
- teradataml/dataframe/functions.py +2110 -0
- teradataml/dataframe/indexer.py +424 -0
- teradataml/dataframe/row.py +160 -0
- teradataml/dataframe/setop.py +1171 -0
- teradataml/dataframe/sql.py +10904 -0
- teradataml/dataframe/sql_function_parameters.py +440 -0
- teradataml/dataframe/sql_functions.py +652 -0
- teradataml/dataframe/sql_interfaces.py +220 -0
- teradataml/dataframe/vantage_function_types.py +675 -0
- teradataml/dataframe/window.py +694 -0
- teradataml/dbutils/__init__.py +3 -0
- teradataml/dbutils/dbutils.py +2871 -0
- teradataml/dbutils/filemgr.py +318 -0
- teradataml/gen_ai/__init__.py +2 -0
- teradataml/gen_ai/convAI.py +473 -0
- teradataml/geospatial/__init__.py +4 -0
- teradataml/geospatial/geodataframe.py +1105 -0
- teradataml/geospatial/geodataframecolumn.py +392 -0
- teradataml/geospatial/geometry_types.py +926 -0
- teradataml/hyperparameter_tuner/__init__.py +1 -0
- teradataml/hyperparameter_tuner/optimizer.py +4115 -0
- teradataml/hyperparameter_tuner/utils.py +303 -0
- teradataml/lib/__init__.py +0 -0
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/lib/libaed_0_1_aarch64.so +0 -0
- teradataml/lib/libaed_0_1_ppc64le.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/_base.py +1321 -0
- teradataml/opensource/_class.py +464 -0
- teradataml/opensource/_constants.py +61 -0
- teradataml/opensource/_lightgbm.py +949 -0
- teradataml/opensource/_sklearn.py +1008 -0
- teradataml/opensource/_wrapper_utils.py +267 -0
- teradataml/options/__init__.py +148 -0
- teradataml/options/configure.py +489 -0
- teradataml/options/display.py +187 -0
- teradataml/plot/__init__.py +3 -0
- teradataml/plot/axis.py +1427 -0
- teradataml/plot/constants.py +15 -0
- teradataml/plot/figure.py +431 -0
- teradataml/plot/plot.py +810 -0
- teradataml/plot/query_generator.py +83 -0
- teradataml/plot/subplot.py +216 -0
- teradataml/scriptmgmt/UserEnv.py +4273 -0
- teradataml/scriptmgmt/__init__.py +3 -0
- teradataml/scriptmgmt/lls_utils.py +2157 -0
- teradataml/sdk/README.md +79 -0
- teradataml/sdk/__init__.py +4 -0
- teradataml/sdk/_auth_modes.py +422 -0
- teradataml/sdk/_func_params.py +487 -0
- teradataml/sdk/_json_parser.py +453 -0
- teradataml/sdk/_openapi_spec_constants.py +249 -0
- teradataml/sdk/_utils.py +236 -0
- teradataml/sdk/api_client.py +900 -0
- teradataml/sdk/constants.py +62 -0
- teradataml/sdk/modelops/__init__.py +98 -0
- teradataml/sdk/modelops/_client.py +409 -0
- teradataml/sdk/modelops/_constants.py +304 -0
- teradataml/sdk/modelops/models.py +2308 -0
- teradataml/sdk/spinner.py +107 -0
- teradataml/series/__init__.py +0 -0
- teradataml/series/series.py +537 -0
- teradataml/series/series_utils.py +71 -0
- teradataml/store/__init__.py +12 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +658 -0
- teradataml/store/feature_store/feature_store.py +4814 -0
- teradataml/store/feature_store/mind_map.py +639 -0
- teradataml/store/feature_store/models.py +7330 -0
- teradataml/store/feature_store/utils.py +390 -0
- teradataml/table_operators/Apply.py +979 -0
- teradataml/table_operators/Script.py +1739 -0
- teradataml/table_operators/TableOperator.py +1343 -0
- teradataml/table_operators/__init__.py +2 -0
- teradataml/table_operators/apply_query_generator.py +262 -0
- teradataml/table_operators/query_generator.py +493 -0
- teradataml/table_operators/table_operator_query_generator.py +462 -0
- teradataml/table_operators/table_operator_util.py +726 -0
- teradataml/table_operators/templates/dataframe_apply.template +184 -0
- teradataml/table_operators/templates/dataframe_map.template +176 -0
- teradataml/table_operators/templates/dataframe_register.template +73 -0
- teradataml/table_operators/templates/dataframe_udf.template +67 -0
- teradataml/table_operators/templates/script_executor.template +170 -0
- teradataml/telemetry_utils/__init__.py +0 -0
- teradataml/telemetry_utils/queryband.py +53 -0
- teradataml/utils/__init__.py +0 -0
- teradataml/utils/docstring.py +527 -0
- teradataml/utils/dtypes.py +943 -0
- teradataml/utils/internal_buffer.py +122 -0
- teradataml/utils/print_versions.py +206 -0
- teradataml/utils/utils.py +451 -0
- teradataml/utils/validators.py +3305 -0
- teradataml-20.0.0.8.dist-info/METADATA +2804 -0
- teradataml-20.0.0.8.dist-info/RECORD +1208 -0
- teradataml-20.0.0.8.dist-info/WHEEL +5 -0
- teradataml-20.0.0.8.dist-info/top_level.txt +1 -0
- teradataml-20.0.0.8.dist-info/zip-safe +1 -0
|
@@ -0,0 +1,1492 @@
|
|
|
1
|
+
{
|
|
2
|
+
"cells": [
|
|
3
|
+
{
|
|
4
|
+
"cell_type": "markdown",
|
|
5
|
+
"metadata": {},
|
|
6
|
+
"source": [
|
|
7
|
+
"### Disclaimer\n",
|
|
8
|
+
"Please note, the Vantage Functions via SQLAlchemy feature is a preview/beta code release with limited functionality (the “Code”). As such, you acknowledge that the Code is experimental in nature and that the Code is provided “AS IS” and may not be functional on any machine or in any environment. TERADATA DISCLAIMS ALL WARRANTIES RELATING TO THE CODE, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES AGAINST INFRINGEMENT OF THIRD-PARTY RIGHTS, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.\n",
|
|
9
|
+
"\n",
|
|
10
|
+
"TERADATA SHALL NOT BE RESPONSIBLE OR LIABLE WITH RESPECT TO ANY SUBJECT MATTER OF THE CODE UNDER ANY CONTRACT, NEGLIGENCE, STRICT LIABILITY OR OTHER THEORY \n",
|
|
11
|
+
" (A) FOR LOSS OR INACCURACY OF DATA OR COST OF PROCUREMENT OF SUBSTITUTE GOODS, SERVICES OR TECHNOLOGY, OR \n",
|
|
12
|
+
" (B) FOR ANY INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES INCLUDING, BUT NOT LIMITED TO LOSS OF REVENUES AND LOSS OF PROFITS. TERADATA SHALL NOT BE RESPONSIBLE FOR ANY MATTER BEYOND ITS REASONABLE CONTROL.\n",
|
|
13
|
+
"\n",
|
|
14
|
+
"Notwithstanding anything to the contrary: \n",
|
|
15
|
+
" (a) Teradata will have no obligation of any kind with respect to any Code-related comments, suggestions, design changes or improvements that you elect to provide to Teradata in either verbal or written form (collectively, “Feedback”), and \n",
|
|
16
|
+
" (b) Teradata and its affiliates are hereby free to use any ideas, concepts, know-how or techniques, in whole or in part, contained in Feedback: \n",
|
|
17
|
+
" (i) for any purpose whatsoever, including developing, manufacturing, and/or marketing products and/or services incorporating Feedback in whole or in part, and \n",
|
|
18
|
+
" (ii) without any restrictions or limitations, including requiring the payment of any license fees, royalties, or other consideration. "
|
|
19
|
+
]
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"cell_type": "code",
|
|
23
|
+
"execution_count": 1,
|
|
24
|
+
"metadata": {},
|
|
25
|
+
"outputs": [],
|
|
26
|
+
"source": [
|
|
27
|
+
"# In this notebook, we will be covering examples for following Regular Aggregate Functions\n",
|
|
28
|
+
"# SQL Documentation: https://docs.teradata.com/reader/756LNiPSFdY~4JcCCcR5Cw/ynL7ziGBufCzuUooZX8VQg\n",
|
|
29
|
+
" # 1. BITAND\n",
|
|
30
|
+
" # 2. BITNOT\n",
|
|
31
|
+
" # 3. BITOR\n",
|
|
32
|
+
" # 4. BITXOR\n",
|
|
33
|
+
" # 5. COUNTSET\n",
|
|
34
|
+
" # 6. GETBIT\n",
|
|
35
|
+
" # 7. ROTATELEFT\n",
|
|
36
|
+
" # 8. ROTATERIGHT\n",
|
|
37
|
+
" # 9. SETBIT\n",
|
|
38
|
+
" # 10. SHIFTLEFT\n",
|
|
39
|
+
" # 11. SHIFTRIGHT\n",
|
|
40
|
+
" # 12. SUBBITSTR\n",
|
|
41
|
+
" # 13. TO_BYTE"
|
|
42
|
+
]
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
"cell_type": "code",
|
|
46
|
+
"execution_count": 2,
|
|
47
|
+
"metadata": {},
|
|
48
|
+
"outputs": [
|
|
49
|
+
{
|
|
50
|
+
"name": "stdout",
|
|
51
|
+
"output_type": "stream",
|
|
52
|
+
"text": [
|
|
53
|
+
"Hostname: ········\n",
|
|
54
|
+
"Username: ········\n",
|
|
55
|
+
"Password: ········\n"
|
|
56
|
+
]
|
|
57
|
+
}
|
|
58
|
+
],
|
|
59
|
+
"source": [
|
|
60
|
+
"# Get the connection to the Vantage using create_context()\n",
|
|
61
|
+
"from teradataml import *\n",
|
|
62
|
+
"import getpass\n",
|
|
63
|
+
"td_context = create_context(host=getpass.getpass(\"Hostname: \"), username=getpass.getpass(\"Username: \"), password=getpass.getpass(\"Password: \"))"
|
|
64
|
+
]
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
"cell_type": "code",
|
|
68
|
+
"execution_count": 3,
|
|
69
|
+
"metadata": {},
|
|
70
|
+
"outputs": [
|
|
71
|
+
{
|
|
72
|
+
"name": "stdout",
|
|
73
|
+
"output_type": "stream",
|
|
74
|
+
"text": [
|
|
75
|
+
"WARNING: Skipped loading table bytes_table since it already exists in the database.\n"
|
|
76
|
+
]
|
|
77
|
+
},
|
|
78
|
+
{
|
|
79
|
+
"data": {
|
|
80
|
+
"text/plain": [
|
|
81
|
+
" byte_col varbyte_col blob_col\n",
|
|
82
|
+
"id_col \n",
|
|
83
|
+
"0 b'63' b'627A7863' b'3330363136323633'\n",
|
|
84
|
+
"2 b'61' b'616263643132' b'6162636431323233'\n",
|
|
85
|
+
"1 b'62' b'62717765' b'3331363136323633'"
|
|
86
|
+
]
|
|
87
|
+
},
|
|
88
|
+
"execution_count": 3,
|
|
89
|
+
"metadata": {},
|
|
90
|
+
"output_type": "execute_result"
|
|
91
|
+
}
|
|
92
|
+
],
|
|
93
|
+
"source": [
|
|
94
|
+
"# Load the example dataset.\n",
|
|
95
|
+
"load_example_data(\"dataframe\", [\"bytes_table\"])\n",
|
|
96
|
+
"# Create the DataFrame on 'admissions_train' table\n",
|
|
97
|
+
"bytes_table = DataFrame(\"bytes_table\")\n",
|
|
98
|
+
"bytes_table"
|
|
99
|
+
]
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
"cell_type": "code",
|
|
103
|
+
"execution_count": 4,
|
|
104
|
+
"metadata": {},
|
|
105
|
+
"outputs": [],
|
|
106
|
+
"source": [
|
|
107
|
+
"def print_variables(df, columns):\n",
|
|
108
|
+
" print(\"Equivalent SQL: {}\".format(df.show_query()))\n",
|
|
109
|
+
" print(\"\\n\")\n",
|
|
110
|
+
" print(\" ************************* DataFrame ********************* \")\n",
|
|
111
|
+
" print(df)\n",
|
|
112
|
+
" print(\"\\n\\n\")\n",
|
|
113
|
+
" print(\" ************************* DataFrame.dtypes ********************* \")\n",
|
|
114
|
+
" print(df.dtypes)\n",
|
|
115
|
+
" print(\"\\n\\n\")\n",
|
|
116
|
+
" if isinstance(columns, str):\n",
|
|
117
|
+
" columns = [columns]\n",
|
|
118
|
+
" for col in columns:\n",
|
|
119
|
+
" coltype = df.__getattr__(col).type\n",
|
|
120
|
+
" if isinstance(coltype, sqlalchemy.sql.sqltypes.NullType):\n",
|
|
121
|
+
" coltype = \"NullType\"\n",
|
|
122
|
+
" print(\" '{}' Column Type: {}\".format(col, coltype))"
|
|
123
|
+
]
|
|
124
|
+
},
|
|
125
|
+
{
|
|
126
|
+
"cell_type": "markdown",
|
|
127
|
+
"metadata": {},
|
|
128
|
+
"source": [
|
|
129
|
+
"# Using Bit-Byte Manipulation Functions from Teradata Vanatge with SQLAlchemy"
|
|
130
|
+
]
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
"cell_type": "code",
|
|
134
|
+
"execution_count": 5,
|
|
135
|
+
"metadata": {},
|
|
136
|
+
"outputs": [],
|
|
137
|
+
"source": [
|
|
138
|
+
"# Import func from SQLAlchemy to use the same for executing bit/byte manipulation functions\n",
|
|
139
|
+
"from sqlalchemy import func"
|
|
140
|
+
]
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
"cell_type": "code",
|
|
144
|
+
"execution_count": 6,
|
|
145
|
+
"metadata": {},
|
|
146
|
+
"outputs": [],
|
|
147
|
+
"source": [
|
|
148
|
+
"# Before we move on with examples, one should read below just to understand how teradataml DataFrame and \n",
|
|
149
|
+
"# it's columns are used to create a SQLAlchemy ClauseElement/Expression.\n",
|
|
150
|
+
"\n",
|
|
151
|
+
"# Often in below examples one would see something like this: 'admissions_train.admitted.expression'\n",
|
|
152
|
+
"# Here in the above expression,\n",
|
|
153
|
+
"# 'admissions_train' is 'teradataml DataFrame'\n",
|
|
154
|
+
"# 'admitted' is 'column name' in teradataml DataFrame 'admissions_train'\n",
|
|
155
|
+
"# Thus, \n",
|
|
156
|
+
"# 'admissions_train.admitted' together forms a ColumnExpression.\n",
|
|
157
|
+
"# 'expression' allows us to use teradata ColumnExpression to be treated as SQLAlchemy Expression.\n",
|
|
158
|
+
"# Thus,\n",
|
|
159
|
+
"# 'admissions_train.admitted.expression' gives us an expression that can be used with SQLAlchemy clauseElements."
|
|
160
|
+
]
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
"cell_type": "markdown",
|
|
164
|
+
"metadata": {},
|
|
165
|
+
"source": [
|
|
166
|
+
"## BITAND Function"
|
|
167
|
+
]
|
|
168
|
+
},
|
|
169
|
+
{
|
|
170
|
+
"cell_type": "code",
|
|
171
|
+
"execution_count": 7,
|
|
172
|
+
"metadata": {},
|
|
173
|
+
"outputs": [],
|
|
174
|
+
"source": [
|
|
175
|
+
"# Function performs the logical AND operation on the corresponding bits from the two input arguments.\n",
|
|
176
|
+
"# Syntax:\n",
|
|
177
|
+
"# =======\n",
|
|
178
|
+
"# BITAND(target_arg, bit_mask_arg)\n",
|
|
179
|
+
"\n",
|
|
180
|
+
"# Argument Description:\n",
|
|
181
|
+
"# =====================\n",
|
|
182
|
+
"# target_arg\n",
|
|
183
|
+
"# A numeric or variable byte expression.\n",
|
|
184
|
+
"# Supported types for 'target_arg' are as follows:\n",
|
|
185
|
+
"# BYTEINT\n",
|
|
186
|
+
"# SMALLINT\n",
|
|
187
|
+
"# INTEGER\n",
|
|
188
|
+
"# BIGINT\n",
|
|
189
|
+
"# DECIMAL\n",
|
|
190
|
+
"# NUMBER\n",
|
|
191
|
+
"# VARBYTE(n)\n",
|
|
192
|
+
"# bit_mask_arg\n",
|
|
193
|
+
"# A fixed byte value, a variable byte value, or a numeric expression.\n",
|
|
194
|
+
"\n",
|
|
195
|
+
"# Function Description:\n",
|
|
196
|
+
"# =====================\n",
|
|
197
|
+
"# This function takes two bit patterns of equal length and performs the logical AND operation on each pair \n",
|
|
198
|
+
"# of corresponding bits as follows:\n",
|
|
199
|
+
"# 1. If the bits at the same position are both 1, then the result is 1; otherwise, \n",
|
|
200
|
+
"# the result is 0. \n",
|
|
201
|
+
"# 2. If either input argument is NULL, the function returns NULL.\n",
|
|
202
|
+
"#\n",
|
|
203
|
+
"# If the target_arg and bit_mask_arg arguments differ in length, the arguments are processed as follows:\n",
|
|
204
|
+
"# 1. The target_arg and bit_mask_arg arguments are aligned on their least significant byte/bit.\n",
|
|
205
|
+
"# 2. The smaller argument is padded with zeros to the left until it becomes the same size as the larger argument."
|
|
206
|
+
]
|
|
207
|
+
},
|
|
208
|
+
{
|
|
209
|
+
"cell_type": "markdown",
|
|
210
|
+
"metadata": {},
|
|
211
|
+
"source": [
|
|
212
|
+
"The data type of the bit_mask_arg parameter varies depending upon the data type of the target_arg parameter. \n",
|
|
213
|
+
"The following (target_arg, bit_mask_arg) input combinations are permitted:\n",
|
|
214
|
+
"\n",
|
|
215
|
+
"| target_arg type | bit_mask_arg type |\n",
|
|
216
|
+
"|------|------|\n",
|
|
217
|
+
"| BYTEINT\t| BYTE(1) |\n",
|
|
218
|
+
"| BYTEINT\t| BYTEINT |\n",
|
|
219
|
+
"| SMALLINT\t| BYTE(2) |\n",
|
|
220
|
+
"| SMALLINT\t| SMALLINT |\n",
|
|
221
|
+
"| INTEGER\t| BYTE(4) |\n",
|
|
222
|
+
"| INTEGER\t| INTEGER |\n",
|
|
223
|
+
"| BIGINT\t| BYTE(8) |\n",
|
|
224
|
+
"| BIGINT\t| BIGINT |\n",
|
|
225
|
+
"| NUMBER(38,0)\t| VARBYTE(16) |\n",
|
|
226
|
+
"| NUMBER(38,0)\t| NUMBER(38,0) |\n",
|
|
227
|
+
"| VARBYTE(n)\t| VARBYTE(n) |"
|
|
228
|
+
]
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
"cell_type": "code",
|
|
232
|
+
"execution_count": 8,
|
|
233
|
+
"metadata": {},
|
|
234
|
+
"outputs": [
|
|
235
|
+
{
|
|
236
|
+
"data": {
|
|
237
|
+
"text/plain": [
|
|
238
|
+
"sqlalchemy.sql.functions.Function"
|
|
239
|
+
]
|
|
240
|
+
},
|
|
241
|
+
"execution_count": 8,
|
|
242
|
+
"metadata": {},
|
|
243
|
+
"output_type": "execute_result"
|
|
244
|
+
}
|
|
245
|
+
],
|
|
246
|
+
"source": [
|
|
247
|
+
"bit_byte_func_ = func.BITAND(bytes_table.id_col.expression, bytes_table.byte_col.expression)\n",
|
|
248
|
+
"type(bit_byte_func_)"
|
|
249
|
+
]
|
|
250
|
+
},
|
|
251
|
+
{
|
|
252
|
+
"cell_type": "code",
|
|
253
|
+
"execution_count": 9,
|
|
254
|
+
"metadata": {},
|
|
255
|
+
"outputs": [
|
|
256
|
+
{
|
|
257
|
+
"name": "stdout",
|
|
258
|
+
"output_type": "stream",
|
|
259
|
+
"text": [
|
|
260
|
+
"Equivalent SQL: select bitand(varbyte_col, byte_col) AS bitand_byte_, BITAND(id_col, byte_col) AS bitand_id_ from \"bytes_table\"\n",
|
|
261
|
+
"\n",
|
|
262
|
+
"\n",
|
|
263
|
+
" ************************* DataFrame ********************* \n",
|
|
264
|
+
" bitand_byte_ bitand_id_\n",
|
|
265
|
+
"0 b'63' 0\n",
|
|
266
|
+
"1 b'20' 0\n",
|
|
267
|
+
"2 b'60' 0\n",
|
|
268
|
+
"\n",
|
|
269
|
+
"\n",
|
|
270
|
+
"\n",
|
|
271
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
272
|
+
"bitand_byte_ bytes\n",
|
|
273
|
+
"bitand_id_ int\n",
|
|
274
|
+
"\n",
|
|
275
|
+
"\n",
|
|
276
|
+
"\n",
|
|
277
|
+
" 'bitand_id_' Column Type: INTEGER\n",
|
|
278
|
+
" 'bitand_byte_' Column Type: VARBYTE\n"
|
|
279
|
+
]
|
|
280
|
+
}
|
|
281
|
+
],
|
|
282
|
+
"source": [
|
|
283
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
284
|
+
"df = bytes_table.assign(True, bitand_id_=bit_byte_func_, \n",
|
|
285
|
+
" bitand_byte_=func.bitand(bytes_table.varbyte_col.expression, bytes_table.byte_col.expression)\n",
|
|
286
|
+
" )\n",
|
|
287
|
+
"print_variables(df, [\"bitand_id_\", \"bitand_byte_\"])"
|
|
288
|
+
]
|
|
289
|
+
},
|
|
290
|
+
{
|
|
291
|
+
"cell_type": "markdown",
|
|
292
|
+
"metadata": {},
|
|
293
|
+
"source": [
|
|
294
|
+
"## BITNOT Function"
|
|
295
|
+
]
|
|
296
|
+
},
|
|
297
|
+
{
|
|
298
|
+
"cell_type": "code",
|
|
299
|
+
"execution_count": 10,
|
|
300
|
+
"metadata": {},
|
|
301
|
+
"outputs": [],
|
|
302
|
+
"source": [
|
|
303
|
+
"# Function performs a bitwise complement on the binary representation of the input argument.\n",
|
|
304
|
+
"# Syntax:\n",
|
|
305
|
+
"# =======\n",
|
|
306
|
+
"# BITNOT(target_arg)\n",
|
|
307
|
+
"\n",
|
|
308
|
+
"# Argument Description:\n",
|
|
309
|
+
"# =====================\n",
|
|
310
|
+
"# target_arg\n",
|
|
311
|
+
"# A numeric or variable byte expression.\n",
|
|
312
|
+
"# Supported types for 'target_arg' are as follows:\n",
|
|
313
|
+
"# BYTEINT\n",
|
|
314
|
+
"# SMALLINT\n",
|
|
315
|
+
"# INTEGER\n",
|
|
316
|
+
"# BIGINT\n",
|
|
317
|
+
"# VARBYTE(n)"
|
|
318
|
+
]
|
|
319
|
+
},
|
|
320
|
+
{
|
|
321
|
+
"cell_type": "code",
|
|
322
|
+
"execution_count": 11,
|
|
323
|
+
"metadata": {},
|
|
324
|
+
"outputs": [
|
|
325
|
+
{
|
|
326
|
+
"data": {
|
|
327
|
+
"text/plain": [
|
|
328
|
+
"sqlalchemy.sql.functions.Function"
|
|
329
|
+
]
|
|
330
|
+
},
|
|
331
|
+
"execution_count": 11,
|
|
332
|
+
"metadata": {},
|
|
333
|
+
"output_type": "execute_result"
|
|
334
|
+
}
|
|
335
|
+
],
|
|
336
|
+
"source": [
|
|
337
|
+
"bit_byte_func_ = func.BITNOT(bytes_table.varbyte_col.expression)\n",
|
|
338
|
+
"type(bit_byte_func_)"
|
|
339
|
+
]
|
|
340
|
+
},
|
|
341
|
+
{
|
|
342
|
+
"cell_type": "code",
|
|
343
|
+
"execution_count": 12,
|
|
344
|
+
"metadata": {},
|
|
345
|
+
"outputs": [
|
|
346
|
+
{
|
|
347
|
+
"name": "stdout",
|
|
348
|
+
"output_type": "stream",
|
|
349
|
+
"text": [
|
|
350
|
+
"Equivalent SQL: select BITNOT(varbyte_col) AS bitnot_byte_ from \"bytes_table\"\n",
|
|
351
|
+
"\n",
|
|
352
|
+
"\n",
|
|
353
|
+
" ************************* DataFrame ********************* \n",
|
|
354
|
+
" bitnot_byte_\n",
|
|
355
|
+
"0 b'-616263643133'\n",
|
|
356
|
+
"1 b'-62717766'\n",
|
|
357
|
+
"2 b'-627A7864'\n",
|
|
358
|
+
"\n",
|
|
359
|
+
"\n",
|
|
360
|
+
"\n",
|
|
361
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
362
|
+
"bitnot_byte_ bytes\n",
|
|
363
|
+
"\n",
|
|
364
|
+
"\n",
|
|
365
|
+
"\n",
|
|
366
|
+
" 'bitnot_byte_' Column Type: VARBYTE\n"
|
|
367
|
+
]
|
|
368
|
+
}
|
|
369
|
+
],
|
|
370
|
+
"source": [
|
|
371
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
372
|
+
"df = bytes_table.assign(True, bitnot_byte_=bit_byte_func_)\n",
|
|
373
|
+
"print_variables(df, [\"bitnot_byte_\"])"
|
|
374
|
+
]
|
|
375
|
+
},
|
|
376
|
+
{
|
|
377
|
+
"cell_type": "markdown",
|
|
378
|
+
"metadata": {},
|
|
379
|
+
"source": [
|
|
380
|
+
"## BITOR Function"
|
|
381
|
+
]
|
|
382
|
+
},
|
|
383
|
+
{
|
|
384
|
+
"cell_type": "code",
|
|
385
|
+
"execution_count": 13,
|
|
386
|
+
"metadata": {},
|
|
387
|
+
"outputs": [],
|
|
388
|
+
"source": [
|
|
389
|
+
"# Function performs the logical OR operation on the corresponding bits from the two input arguments.\n",
|
|
390
|
+
"# Syntax:\n",
|
|
391
|
+
"# =======\n",
|
|
392
|
+
"# BITOR(target_arg, bit_mask_arg)\n",
|
|
393
|
+
"\n",
|
|
394
|
+
"# Argument Description:\n",
|
|
395
|
+
"# =====================\n",
|
|
396
|
+
"# target_arg\n",
|
|
397
|
+
"# A numeric or variable byte expression.\n",
|
|
398
|
+
"# Supported types for 'target_arg' are as follows:\n",
|
|
399
|
+
"# BYTEINT\n",
|
|
400
|
+
"# SMALLINT\n",
|
|
401
|
+
"# INTEGER\n",
|
|
402
|
+
"# BIGINT\n",
|
|
403
|
+
"# VARBYTE(n)\n",
|
|
404
|
+
"# bit_mask_arg\n",
|
|
405
|
+
"# A fixed byte value, a variable byte value, or a numeric expression.\n",
|
|
406
|
+
"\n",
|
|
407
|
+
"# Function Description:\n",
|
|
408
|
+
"# =====================\n",
|
|
409
|
+
"# This function takes two bit patterns of equal length and performs the logical OR operation on each pair \n",
|
|
410
|
+
"# of corresponding bits as follows:\n",
|
|
411
|
+
"# 1. If either of the bits from the input arguments is 1, then the result is 1.\n",
|
|
412
|
+
"# 2. If both of the bits from the input arguments are 0, then the result is 0.\n",
|
|
413
|
+
"# 3. If any of the input arguments is NULL, then the result is NULL.\n",
|
|
414
|
+
"#\n",
|
|
415
|
+
"# If the target_arg and bit_mask_arg arguments differ in length, the arguments are processed as follows:\n",
|
|
416
|
+
"# 1. The target_arg and bit_mask_arg arguments are aligned on their least significant byte/bit.\n",
|
|
417
|
+
"# 2. The smaller argument is padded with zeros to the left until it becomes the same size as the larger argument."
|
|
418
|
+
]
|
|
419
|
+
},
|
|
420
|
+
{
|
|
421
|
+
"cell_type": "markdown",
|
|
422
|
+
"metadata": {},
|
|
423
|
+
"source": [
|
|
424
|
+
"The data type of the bit_mask_arg parameter varies depending upon the data type of the target_arg parameter. \n",
|
|
425
|
+
"The following (target_arg, bit_mask_arg) input combinations are permitted:\n",
|
|
426
|
+
"\n",
|
|
427
|
+
"| target_arg type | bit_mask_arg type |\n",
|
|
428
|
+
"|------|------|\n",
|
|
429
|
+
"| BYTEINT\t| BYTE(1) |\n",
|
|
430
|
+
"| BYTEINT\t| BYTEINT |\n",
|
|
431
|
+
"| SMALLINT\t| BYTE(2) |\n",
|
|
432
|
+
"| SMALLINT\t| SMALLINT |\n",
|
|
433
|
+
"| INTEGER\t| BYTE(4) |\n",
|
|
434
|
+
"| INTEGER\t| INTEGER |\n",
|
|
435
|
+
"| BIGINT\t| BYTE(8) |\n",
|
|
436
|
+
"| BIGINT\t| BIGINT |\n",
|
|
437
|
+
"| VARBYTE(n)\t| VARBYTE(n) |"
|
|
438
|
+
]
|
|
439
|
+
},
|
|
440
|
+
{
|
|
441
|
+
"cell_type": "code",
|
|
442
|
+
"execution_count": 14,
|
|
443
|
+
"metadata": {},
|
|
444
|
+
"outputs": [
|
|
445
|
+
{
|
|
446
|
+
"data": {
|
|
447
|
+
"text/plain": [
|
|
448
|
+
"sqlalchemy.sql.functions.Function"
|
|
449
|
+
]
|
|
450
|
+
},
|
|
451
|
+
"execution_count": 14,
|
|
452
|
+
"metadata": {},
|
|
453
|
+
"output_type": "execute_result"
|
|
454
|
+
}
|
|
455
|
+
],
|
|
456
|
+
"source": [
|
|
457
|
+
"bit_byte_func_ = func.Bitor(bytes_table.id_col.expression, bytes_table.byte_col.expression)\n",
|
|
458
|
+
"type(bit_byte_func_)"
|
|
459
|
+
]
|
|
460
|
+
},
|
|
461
|
+
{
|
|
462
|
+
"cell_type": "code",
|
|
463
|
+
"execution_count": 15,
|
|
464
|
+
"metadata": {},
|
|
465
|
+
"outputs": [
|
|
466
|
+
{
|
|
467
|
+
"name": "stdout",
|
|
468
|
+
"output_type": "stream",
|
|
469
|
+
"text": [
|
|
470
|
+
"Equivalent SQL: select Bitor(id_col, byte_col) AS bitor_byte_ from \"bytes_table\"\n",
|
|
471
|
+
"\n",
|
|
472
|
+
"\n",
|
|
473
|
+
" ************************* DataFrame ********************* \n",
|
|
474
|
+
" bitor_byte_\n",
|
|
475
|
+
"0 1627389954\n",
|
|
476
|
+
"1 1644167169\n",
|
|
477
|
+
"2 1660944384\n",
|
|
478
|
+
"\n",
|
|
479
|
+
"\n",
|
|
480
|
+
"\n",
|
|
481
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
482
|
+
"bitor_byte_ int\n",
|
|
483
|
+
"\n",
|
|
484
|
+
"\n",
|
|
485
|
+
"\n",
|
|
486
|
+
" 'bitor_byte_' Column Type: INTEGER\n"
|
|
487
|
+
]
|
|
488
|
+
}
|
|
489
|
+
],
|
|
490
|
+
"source": [
|
|
491
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
492
|
+
"df = bytes_table.assign(True, bitor_byte_=bit_byte_func_)\n",
|
|
493
|
+
"print_variables(df, [\"bitor_byte_\"])"
|
|
494
|
+
]
|
|
495
|
+
},
|
|
496
|
+
{
|
|
497
|
+
"cell_type": "markdown",
|
|
498
|
+
"metadata": {},
|
|
499
|
+
"source": [
|
|
500
|
+
"## BITXOR Function"
|
|
501
|
+
]
|
|
502
|
+
},
|
|
503
|
+
{
|
|
504
|
+
"cell_type": "code",
|
|
505
|
+
"execution_count": 16,
|
|
506
|
+
"metadata": {},
|
|
507
|
+
"outputs": [],
|
|
508
|
+
"source": [
|
|
509
|
+
"# Function performs a bitwise XOR operation on the binary representation of the two input arguments.\n",
|
|
510
|
+
"# Syntax:\n",
|
|
511
|
+
"# =======\n",
|
|
512
|
+
"# BITXOR(target_arg, bit_mask_arg)\n",
|
|
513
|
+
"\n",
|
|
514
|
+
"# Argument Description:\n",
|
|
515
|
+
"# =====================\n",
|
|
516
|
+
"# target_arg\n",
|
|
517
|
+
"# A numeric or variable byte expression.\n",
|
|
518
|
+
"# Supported types for 'target_arg' are as follows:\n",
|
|
519
|
+
"# BYTEINT\n",
|
|
520
|
+
"# SMALLINT\n",
|
|
521
|
+
"# INTEGER\n",
|
|
522
|
+
"# BIGINT\n",
|
|
523
|
+
"# VARBYTE(n)\n",
|
|
524
|
+
"# bit_mask_arg\n",
|
|
525
|
+
"# A fixed byte value, a variable byte value, or a numeric expression.\n",
|
|
526
|
+
"\n",
|
|
527
|
+
"# Function Description:\n",
|
|
528
|
+
"# =====================\n",
|
|
529
|
+
"# This function takes two bit patterns of equal length and performs the logical OR operation on each pair \n",
|
|
530
|
+
"# of corresponding bits as follows:\n",
|
|
531
|
+
"# 1. The result in each position is 1 if the two bits are different.\n",
|
|
532
|
+
"# 2. The result in each position is 0 if the two bits are same.\n",
|
|
533
|
+
"# 3. If any of the input arguments is NULL, then the result is NULL.\n",
|
|
534
|
+
"#\n",
|
|
535
|
+
"# If the target_arg and bit_mask_arg arguments differ in length, the arguments are processed as follows:\n",
|
|
536
|
+
"# 1. The target_arg and bit_mask_arg arguments are aligned on their least significant byte/bit.\n",
|
|
537
|
+
"# 2. The smaller argument is padded with zeros to the left until it becomes the same size as the larger argument."
|
|
538
|
+
]
|
|
539
|
+
},
|
|
540
|
+
{
|
|
541
|
+
"cell_type": "markdown",
|
|
542
|
+
"metadata": {},
|
|
543
|
+
"source": [
|
|
544
|
+
"The data type of the bit_mask_arg parameter varies depending upon the data type of the target_arg parameter. \n",
|
|
545
|
+
"The following (target_arg, bit_mask_arg) input combinations are permitted:\n",
|
|
546
|
+
"\n",
|
|
547
|
+
"| target_arg type | bit_mask_arg type |\n",
|
|
548
|
+
"|------|------|\n",
|
|
549
|
+
"| BYTEINT| BYTE(1) |\n",
|
|
550
|
+
"| BYTEINT| BYTEINT |\n",
|
|
551
|
+
"| SMALLINT| BYTE(2) |\n",
|
|
552
|
+
"| SMALLINT| SMALLINT |\n",
|
|
553
|
+
"| INTEGER| BYTE(4) |\n",
|
|
554
|
+
"| INTEGER| INTEGER |\n",
|
|
555
|
+
"| BIGINT| BYTE(8) |\n",
|
|
556
|
+
"| BIGINT| BIGINT |\n",
|
|
557
|
+
"| VARBYTE(n)| VARBYTE(n) |"
|
|
558
|
+
]
|
|
559
|
+
},
|
|
560
|
+
{
|
|
561
|
+
"cell_type": "code",
|
|
562
|
+
"execution_count": 17,
|
|
563
|
+
"metadata": {},
|
|
564
|
+
"outputs": [
|
|
565
|
+
{
|
|
566
|
+
"name": "stdout",
|
|
567
|
+
"output_type": "stream",
|
|
568
|
+
"text": [
|
|
569
|
+
"Equivalent SQL: select BITXOR(id_col, byte_col) AS bitxor_int_byte_ from \"bytes_table\"\n",
|
|
570
|
+
"\n",
|
|
571
|
+
"\n",
|
|
572
|
+
" ************************* DataFrame ********************* \n",
|
|
573
|
+
" bitxor_int_byte_\n",
|
|
574
|
+
"0 1660944384\n",
|
|
575
|
+
"1 1627389954\n",
|
|
576
|
+
"2 1644167169\n",
|
|
577
|
+
"\n",
|
|
578
|
+
"\n",
|
|
579
|
+
"\n",
|
|
580
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
581
|
+
"bitxor_int_byte_ int\n",
|
|
582
|
+
"\n",
|
|
583
|
+
"\n",
|
|
584
|
+
"\n",
|
|
585
|
+
" 'bitxor_int_byte_' Column Type: INTEGER\n"
|
|
586
|
+
]
|
|
587
|
+
}
|
|
588
|
+
],
|
|
589
|
+
"source": [
|
|
590
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
591
|
+
"df = bytes_table.assign(True, bitxor_int_byte_= func.BITXOR(bytes_table.id_col.expression, bytes_table.byte_col.expression))\n",
|
|
592
|
+
"print_variables(df, [\"bitxor_int_byte_\"])"
|
|
593
|
+
]
|
|
594
|
+
},
|
|
595
|
+
{
|
|
596
|
+
"cell_type": "markdown",
|
|
597
|
+
"metadata": {},
|
|
598
|
+
"source": [
|
|
599
|
+
"## COUNTSET Function"
|
|
600
|
+
]
|
|
601
|
+
},
|
|
602
|
+
{
|
|
603
|
+
"cell_type": "code",
|
|
604
|
+
"execution_count": 18,
|
|
605
|
+
"metadata": {},
|
|
606
|
+
"outputs": [],
|
|
607
|
+
"source": [
|
|
608
|
+
"# Function returns the count of the binary bits within the target_arg expression \n",
|
|
609
|
+
"# that are either set to 1 or set to 0 depending on the target_value_arg value.\n",
|
|
610
|
+
"# Syntax:\n",
|
|
611
|
+
"# =======\n",
|
|
612
|
+
"# COUNTSET(target_arg, target_arg_value)\n",
|
|
613
|
+
"\n",
|
|
614
|
+
"# Argument Description:\n",
|
|
615
|
+
"# =====================\n",
|
|
616
|
+
"# target_arg\n",
|
|
617
|
+
"# A numeric or variable byte expression.\n",
|
|
618
|
+
"# target_value_arg\n",
|
|
619
|
+
"# An integer value. Only a value of 0 or 1 is allowed. If target_value_arg is not specified, the default is 1.\n",
|
|
620
|
+
"\n",
|
|
621
|
+
"# Function Description:\n",
|
|
622
|
+
"# =====================\n",
|
|
623
|
+
"# COUNTSET takes the target_arg input expression and counts the number of bits within the expression \n",
|
|
624
|
+
"# that are either set to 1 or set to 0, depending on the value of target_value_arg.\n",
|
|
625
|
+
"#\n",
|
|
626
|
+
"# The target_value_arg parameter only accepts a value of 0 or 1. If a value for target_value_arg is \n",
|
|
627
|
+
"# not specified, the default value of 1 is used, and COUNTSET counts the bit values that are set to 1.\n",
|
|
628
|
+
"#\n",
|
|
629
|
+
"# If any of the input arguments is NULL, the function returns NULL."
|
|
630
|
+
]
|
|
631
|
+
},
|
|
632
|
+
{
|
|
633
|
+
"cell_type": "markdown",
|
|
634
|
+
"metadata": {},
|
|
635
|
+
"source": [
|
|
636
|
+
"It is defined with the following parameter data types for the following (target_arg [,target_value_arg]) input combinations:\n",
|
|
637
|
+
"\n",
|
|
638
|
+
"| target_arg type\t| target_value_arg type (optional) |\n",
|
|
639
|
+
"| ------- | ------ |\n",
|
|
640
|
+
"| BYTEINT |\tINTEGER |\n",
|
|
641
|
+
"| SMALLINT\t| INTEGER |\n",
|
|
642
|
+
"| INTEGER\t| INTEGER |\n",
|
|
643
|
+
"| BIGINT\t| INTEGER |\n",
|
|
644
|
+
"| VARBYTE(n) |\tINTEGER |"
|
|
645
|
+
]
|
|
646
|
+
},
|
|
647
|
+
{
|
|
648
|
+
"cell_type": "code",
|
|
649
|
+
"execution_count": 19,
|
|
650
|
+
"metadata": {},
|
|
651
|
+
"outputs": [
|
|
652
|
+
{
|
|
653
|
+
"name": "stdout",
|
|
654
|
+
"output_type": "stream",
|
|
655
|
+
"text": [
|
|
656
|
+
"Equivalent SQL: select COUNTSET(varbyte_col, 0) AS count_0s_varbyte_, COUNTSET(varbyte_col) AS count_1s_varbyte_ from \"bytes_table\"\n",
|
|
657
|
+
"\n",
|
|
658
|
+
"\n",
|
|
659
|
+
" ************************* DataFrame ********************* \n",
|
|
660
|
+
" count_0s_varbyte_ count_1s_varbyte_\n",
|
|
661
|
+
"0 16 16\n",
|
|
662
|
+
"1 29 19\n",
|
|
663
|
+
"2 15 17\n",
|
|
664
|
+
"\n",
|
|
665
|
+
"\n",
|
|
666
|
+
"\n",
|
|
667
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
668
|
+
"count_0s_varbyte_ int\n",
|
|
669
|
+
"count_1s_varbyte_ int\n",
|
|
670
|
+
"\n",
|
|
671
|
+
"\n",
|
|
672
|
+
"\n",
|
|
673
|
+
" 'count_1s_varbyte_' Column Type: INTEGER\n",
|
|
674
|
+
" 'count_0s_varbyte_' Column Type: INTEGER\n"
|
|
675
|
+
]
|
|
676
|
+
}
|
|
677
|
+
],
|
|
678
|
+
"source": [
|
|
679
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
680
|
+
"df = bytes_table.assign(True, \n",
|
|
681
|
+
" count_1s_varbyte_= func.COUNTSET(bytes_table.varbyte_col.expression),\n",
|
|
682
|
+
" count_0s_varbyte_= func.COUNTSET(bytes_table.varbyte_col.expression, 0)\n",
|
|
683
|
+
" )\n",
|
|
684
|
+
"print_variables(df, [\"count_1s_varbyte_\", \"count_0s_varbyte_\"])"
|
|
685
|
+
]
|
|
686
|
+
},
|
|
687
|
+
{
|
|
688
|
+
"cell_type": "markdown",
|
|
689
|
+
"metadata": {},
|
|
690
|
+
"source": [
|
|
691
|
+
"## GETBIT Function"
|
|
692
|
+
]
|
|
693
|
+
},
|
|
694
|
+
{
|
|
695
|
+
"cell_type": "code",
|
|
696
|
+
"execution_count": 20,
|
|
697
|
+
"metadata": {},
|
|
698
|
+
"outputs": [],
|
|
699
|
+
"source": [
|
|
700
|
+
"# Function returns the value of the bit specified by target_bit_arg from the target_arg byte expression.\n",
|
|
701
|
+
"# Syntax:\n",
|
|
702
|
+
"# =======\n",
|
|
703
|
+
"# GETBIT(target_arg, target_bit_arg)\n",
|
|
704
|
+
"\n",
|
|
705
|
+
"# Argument Description:\n",
|
|
706
|
+
"# =====================\n",
|
|
707
|
+
"# target_arg\n",
|
|
708
|
+
"# A numeric or variable byte expression.\n",
|
|
709
|
+
"# target_bit_arg\n",
|
|
710
|
+
"# An integer expression.\n",
|
|
711
|
+
"\n",
|
|
712
|
+
"# Function Description:\n",
|
|
713
|
+
"# =====================\n",
|
|
714
|
+
"# GETBIT gets the bit specified by target_bit_arg from the target_arg byte expression \n",
|
|
715
|
+
"# and returns either 0 or 1 to indicate the value of that bit.\n",
|
|
716
|
+
"#\n",
|
|
717
|
+
"# The range of input values for target_bit_arg can vary from 0 (bit 0 is the least significant bit) \n",
|
|
718
|
+
"# to the (sizeof(target_arg) - 1).\n",
|
|
719
|
+
"#\n",
|
|
720
|
+
"# If target_bit_arg is negative or out-of-range (meaning that it exceeds the size of target_arg), \n",
|
|
721
|
+
"# an error is returned.\n",
|
|
722
|
+
"#\n",
|
|
723
|
+
"# If either input argument is NULL, the function returns NULL."
|
|
724
|
+
]
|
|
725
|
+
},
|
|
726
|
+
{
|
|
727
|
+
"cell_type": "markdown",
|
|
728
|
+
"metadata": {},
|
|
729
|
+
"source": [
|
|
730
|
+
"It is defined with the following parameter data types for the following (target_arg, target_bit_arg) input combinations:\n",
|
|
731
|
+
"\n",
|
|
732
|
+
"| target_arg type\t| target_bit_arg type (optional) |\n",
|
|
733
|
+
"| ------- | ------ |\n",
|
|
734
|
+
"| BYTEINT |\tINTEGER |\n",
|
|
735
|
+
"| SMALLINT\t| INTEGER |\n",
|
|
736
|
+
"| INTEGER\t| INTEGER |\n",
|
|
737
|
+
"| BIGINT\t| INTEGER |\n",
|
|
738
|
+
"| VARBYTE(n) |\tINTEGER |"
|
|
739
|
+
]
|
|
740
|
+
},
|
|
741
|
+
{
|
|
742
|
+
"cell_type": "code",
|
|
743
|
+
"execution_count": 21,
|
|
744
|
+
"metadata": {},
|
|
745
|
+
"outputs": [
|
|
746
|
+
{
|
|
747
|
+
"name": "stdout",
|
|
748
|
+
"output_type": "stream",
|
|
749
|
+
"text": [
|
|
750
|
+
"Equivalent SQL: select GETBIT(id_col, 1) AS getbit_int_, GETBIT(varbyte_col, 3) AS getbit_varbyte_ from \"bytes_table\"\n",
|
|
751
|
+
"\n",
|
|
752
|
+
"\n",
|
|
753
|
+
" ************************* DataFrame ********************* \n",
|
|
754
|
+
" getbit_int_ getbit_varbyte_\n",
|
|
755
|
+
"0 0 0\n",
|
|
756
|
+
"1 1 0\n",
|
|
757
|
+
"2 0 0\n",
|
|
758
|
+
"\n",
|
|
759
|
+
"\n",
|
|
760
|
+
"\n",
|
|
761
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
762
|
+
"getbit_int_ int\n",
|
|
763
|
+
"getbit_varbyte_ int\n",
|
|
764
|
+
"\n",
|
|
765
|
+
"\n",
|
|
766
|
+
"\n",
|
|
767
|
+
" 'getbit_varbyte_' Column Type: BYTEINT\n",
|
|
768
|
+
" 'getbit_int_' Column Type: BYTEINT\n"
|
|
769
|
+
]
|
|
770
|
+
}
|
|
771
|
+
],
|
|
772
|
+
"source": [
|
|
773
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
774
|
+
"df = bytes_table.assign(True, \n",
|
|
775
|
+
" getbit_varbyte_= func.GETBIT(bytes_table.varbyte_col.expression, 3),\n",
|
|
776
|
+
" getbit_int_= func.GETBIT(bytes_table.id_col.expression, 1)\n",
|
|
777
|
+
" )\n",
|
|
778
|
+
"print_variables(df, [\"getbit_varbyte_\", \"getbit_int_\"])"
|
|
779
|
+
]
|
|
780
|
+
},
|
|
781
|
+
{
|
|
782
|
+
"cell_type": "markdown",
|
|
783
|
+
"metadata": {},
|
|
784
|
+
"source": [
|
|
785
|
+
"## ROTATELEFT Function"
|
|
786
|
+
]
|
|
787
|
+
},
|
|
788
|
+
{
|
|
789
|
+
"cell_type": "code",
|
|
790
|
+
"execution_count": 22,
|
|
791
|
+
"metadata": {},
|
|
792
|
+
"outputs": [],
|
|
793
|
+
"source": [
|
|
794
|
+
"# Function returns an expression rotated to the left by the number of bits you specify, \n",
|
|
795
|
+
"# with the most significant bits wrapping around to the right.\n",
|
|
796
|
+
"# Syntax:\n",
|
|
797
|
+
"# =======\n",
|
|
798
|
+
"# ROTATELEFT(target_arg, num_bits_arg)\n",
|
|
799
|
+
"\n",
|
|
800
|
+
"# Argument Description:\n",
|
|
801
|
+
"# =====================\n",
|
|
802
|
+
"# target_arg\n",
|
|
803
|
+
"# A numeric or variable byte expression.\n",
|
|
804
|
+
"# num_bits_arg\n",
|
|
805
|
+
"# An integer expression indicating the number of bit positions to rotate.\n",
|
|
806
|
+
"\n",
|
|
807
|
+
"# Function Description:\n",
|
|
808
|
+
"# =====================\n",
|
|
809
|
+
"# 1. If num_bits_arg is equal to zero then the function returns target_arg unchanged.\n",
|
|
810
|
+
"# 2. If num_bits_arg is negative then the function rotates the bits to the right instead of the left.\n",
|
|
811
|
+
"# 3. If target_arg and/or num_bits_arg are NULL then the function returns NULL.\n",
|
|
812
|
+
"# 4. If num_bits_arg is larger than the size of target_arg then the function rotates \n",
|
|
813
|
+
"# (num_bits_arg MOD sizeof(target_arg)) bits. The scope of the rotation operation is bounded by the size\n",
|
|
814
|
+
"# of the target_arg expression.\n",
|
|
815
|
+
"# NOTE:\n",
|
|
816
|
+
"# When operating against an integer value (BYTEINT, SMALLINT, INTEGER, or BIGINT), rotating a bit into the \n",
|
|
817
|
+
"# most significant position will result in the integer becoming negative. This is because all integers in \n",
|
|
818
|
+
"# Teradata Database are signed integers."
|
|
819
|
+
]
|
|
820
|
+
},
|
|
821
|
+
{
|
|
822
|
+
"cell_type": "markdown",
|
|
823
|
+
"metadata": {},
|
|
824
|
+
"source": [
|
|
825
|
+
"It is defined with the following parameter data types for the following (target_arg, num_bits_arg) input combinations:\n",
|
|
826
|
+
"\n",
|
|
827
|
+
"| target_arg type\t| num_bits_arg type (optional) |\n",
|
|
828
|
+
"| ------- | ------ |\n",
|
|
829
|
+
"| BYTEINT |\tINTEGER |\n",
|
|
830
|
+
"| SMALLINT\t| INTEGER |\n",
|
|
831
|
+
"| INTEGER\t| INTEGER |\n",
|
|
832
|
+
"| BIGINT\t| INTEGER |\n",
|
|
833
|
+
"| VARBYTE(n) |\tINTEGER |"
|
|
834
|
+
]
|
|
835
|
+
},
|
|
836
|
+
{
|
|
837
|
+
"cell_type": "code",
|
|
838
|
+
"execution_count": 23,
|
|
839
|
+
"metadata": {},
|
|
840
|
+
"outputs": [
|
|
841
|
+
{
|
|
842
|
+
"name": "stdout",
|
|
843
|
+
"output_type": "stream",
|
|
844
|
+
"text": [
|
|
845
|
+
"Equivalent SQL: select ROTATELEFT(id_col, 5) AS rotateleft_int_, ROTATELEFT(varbyte_col, 1) AS rotateleft_varbyte_ from \"bytes_table\"\n",
|
|
846
|
+
"\n",
|
|
847
|
+
"\n",
|
|
848
|
+
" ************************* DataFrame ********************* \n",
|
|
849
|
+
" rotateleft_int_ rotateleft_varbyte_\n",
|
|
850
|
+
"0 64 b'-3D3B39379D9C'\n",
|
|
851
|
+
"1 32 b'-3B1D1136'\n",
|
|
852
|
+
"2 0 b'-3B0B0F3A'\n",
|
|
853
|
+
"\n",
|
|
854
|
+
"\n",
|
|
855
|
+
"\n",
|
|
856
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
857
|
+
"rotateleft_int_ int\n",
|
|
858
|
+
"rotateleft_varbyte_ bytes\n",
|
|
859
|
+
"\n",
|
|
860
|
+
"\n",
|
|
861
|
+
"\n",
|
|
862
|
+
" 'rotateleft_varbyte_' Column Type: VARBYTE\n",
|
|
863
|
+
" 'rotateleft_int_' Column Type: INTEGER\n"
|
|
864
|
+
]
|
|
865
|
+
}
|
|
866
|
+
],
|
|
867
|
+
"source": [
|
|
868
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
869
|
+
"df = bytes_table.assign(True, \n",
|
|
870
|
+
" rotateleft_varbyte_= func.ROTATELEFT(bytes_table.varbyte_col.expression, 1),\n",
|
|
871
|
+
" rotateleft_int_= func.ROTATELEFT(bytes_table.id_col.expression, 5)\n",
|
|
872
|
+
" )\n",
|
|
873
|
+
"print_variables(df, [\"rotateleft_varbyte_\", \"rotateleft_int_\"])"
|
|
874
|
+
]
|
|
875
|
+
},
|
|
876
|
+
{
|
|
877
|
+
"cell_type": "markdown",
|
|
878
|
+
"metadata": {},
|
|
879
|
+
"source": [
|
|
880
|
+
"## ROTATERIGHT Function"
|
|
881
|
+
]
|
|
882
|
+
},
|
|
883
|
+
{
|
|
884
|
+
"cell_type": "code",
|
|
885
|
+
"execution_count": 24,
|
|
886
|
+
"metadata": {},
|
|
887
|
+
"outputs": [],
|
|
888
|
+
"source": [
|
|
889
|
+
"# Function returns an expression rotated to the right by the number of bits you specify, \n",
|
|
890
|
+
"# with the least significant bits wrapping around to the left.\n",
|
|
891
|
+
"# Syntax:\n",
|
|
892
|
+
"# =======\n",
|
|
893
|
+
"# ROTATERIGHT(target_arg, num_bits_arg)\n",
|
|
894
|
+
"\n",
|
|
895
|
+
"# Argument Description:\n",
|
|
896
|
+
"# =====================\n",
|
|
897
|
+
"# target_arg\n",
|
|
898
|
+
"# A numeric or variable byte expression.\n",
|
|
899
|
+
"# num_bits_arg\n",
|
|
900
|
+
"# An integer expression indicating the number of bit positions to rotate.\n",
|
|
901
|
+
"\n",
|
|
902
|
+
"# Function Description:\n",
|
|
903
|
+
"# =====================\n",
|
|
904
|
+
"# 1. If num_bits_arg is equal to zero then the function returns target_arg unchanged.\n",
|
|
905
|
+
"# 2. If num_bits_arg is negative then the function rotates the bits to the left instead of the right.\n",
|
|
906
|
+
"# 3. If target_arg and/or num_bits_arg are NULL then the function returns NULL.\n",
|
|
907
|
+
"# 4. If num_bits_arg is larger than the size of target_arg then the function rotates \n",
|
|
908
|
+
"# (num_bits_arg MOD sizeof(target_arg)) bits. The scope of the rotation operation is bounded by the size\n",
|
|
909
|
+
"# of the target_arg expression.\n",
|
|
910
|
+
"# NOTE:\n",
|
|
911
|
+
"# When operating against an integer value (BYTEINT, SMALLINT, INTEGER, or BIGINT), rotating a bit into the \n",
|
|
912
|
+
"# most significant position will result in the integer becoming negative. This is because all integers in \n",
|
|
913
|
+
"# Teradata Database are signed integers."
|
|
914
|
+
]
|
|
915
|
+
},
|
|
916
|
+
{
|
|
917
|
+
"cell_type": "markdown",
|
|
918
|
+
"metadata": {},
|
|
919
|
+
"source": [
|
|
920
|
+
"It is defined with the following parameter data types for the following (target_arg, num_bits_arg) input combinations:\n",
|
|
921
|
+
"\n",
|
|
922
|
+
"| target_arg type\t| num_bits_arg type (optional) |\n",
|
|
923
|
+
"| ------- | ------ |\n",
|
|
924
|
+
"| BYTEINT |\tINTEGER |\n",
|
|
925
|
+
"| SMALLINT\t| INTEGER |\n",
|
|
926
|
+
"| INTEGER\t| INTEGER |\n",
|
|
927
|
+
"| BIGINT\t| INTEGER |\n",
|
|
928
|
+
"| VARBYTE(n) |\tINTEGER |"
|
|
929
|
+
]
|
|
930
|
+
},
|
|
931
|
+
{
|
|
932
|
+
"cell_type": "code",
|
|
933
|
+
"execution_count": 25,
|
|
934
|
+
"metadata": {},
|
|
935
|
+
"outputs": [
|
|
936
|
+
{
|
|
937
|
+
"name": "stdout",
|
|
938
|
+
"output_type": "stream",
|
|
939
|
+
"text": [
|
|
940
|
+
"Equivalent SQL: select ROTATERIGHT(id_col, 2) AS rotateright_int_, rotateright(varbyte_col, 1) AS rotateright_varbyte_ from \"bytes_table\"\n",
|
|
941
|
+
"\n",
|
|
942
|
+
"\n",
|
|
943
|
+
" ************************* DataFrame ********************* \n",
|
|
944
|
+
" rotateright_int_ rotateright_varbyte_\n",
|
|
945
|
+
"0 -2147483648 b'30B131B21899'\n",
|
|
946
|
+
"1 1073741824 b'-4EC7444E'\n",
|
|
947
|
+
"2 0 b'-4EC2C3CF'\n",
|
|
948
|
+
"\n",
|
|
949
|
+
"\n",
|
|
950
|
+
"\n",
|
|
951
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
952
|
+
"rotateright_int_ int\n",
|
|
953
|
+
"rotateright_varbyte_ bytes\n",
|
|
954
|
+
"\n",
|
|
955
|
+
"\n",
|
|
956
|
+
"\n",
|
|
957
|
+
" 'rotateright_varbyte_' Column Type: VARBYTE\n",
|
|
958
|
+
" 'rotateright_int_' Column Type: INTEGER\n"
|
|
959
|
+
]
|
|
960
|
+
}
|
|
961
|
+
],
|
|
962
|
+
"source": [
|
|
963
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
964
|
+
"df = bytes_table.assign(True, \n",
|
|
965
|
+
" rotateright_varbyte_= func.rotateright(bytes_table.varbyte_col.expression, 1),\n",
|
|
966
|
+
" rotateright_int_= func.ROTATERIGHT(bytes_table.id_col.expression, 2)\n",
|
|
967
|
+
" )\n",
|
|
968
|
+
"print_variables(df, [\"rotateright_varbyte_\", \"rotateright_int_\"])"
|
|
969
|
+
]
|
|
970
|
+
},
|
|
971
|
+
{
|
|
972
|
+
"cell_type": "markdown",
|
|
973
|
+
"metadata": {},
|
|
974
|
+
"source": [
|
|
975
|
+
"## SETBIT Function"
|
|
976
|
+
]
|
|
977
|
+
},
|
|
978
|
+
{
|
|
979
|
+
"cell_type": "code",
|
|
980
|
+
"execution_count": 26,
|
|
981
|
+
"metadata": {},
|
|
982
|
+
"outputs": [],
|
|
983
|
+
"source": [
|
|
984
|
+
"# Function sets the value of the bit specified by target_bit_arg to the value of target_value_arg\n",
|
|
985
|
+
"# in the target_arg byte expression.\n",
|
|
986
|
+
"# Syntax:\n",
|
|
987
|
+
"# =======\n",
|
|
988
|
+
"# SETBIT(target_arg, target_bit_arg, target_value_arg)\n",
|
|
989
|
+
"\n",
|
|
990
|
+
"# Argument Description:\n",
|
|
991
|
+
"# =====================\n",
|
|
992
|
+
"# target_arg\n",
|
|
993
|
+
"# A numeric or variable byte expression.\n",
|
|
994
|
+
"# target_bit_arg\n",
|
|
995
|
+
"# An integer expression.\n",
|
|
996
|
+
"# target_value_arg\n",
|
|
997
|
+
"# An integer value. Only a value of 0 or 1 is allowed. If target_value_arg is not specified, the default is 1.\n",
|
|
998
|
+
" \n",
|
|
999
|
+
"# Function Description:\n",
|
|
1000
|
+
"# =====================\n",
|
|
1001
|
+
"# SETBIT takes the target_arg input and sets the bit specified by target_bit_arg to the value, 0 or 1, \n",
|
|
1002
|
+
"# as provided by the target_value_arg argument.\n",
|
|
1003
|
+
"#\n",
|
|
1004
|
+
"# The target_value_arg parameter only accepts a value of 0 or 1. If a value for target_value_arg is not specified, \n",
|
|
1005
|
+
"# the default value of 1 is used.\n",
|
|
1006
|
+
"#\n",
|
|
1007
|
+
"# The range of input values for target_bit_arg can vary from 0 (bit 0 is the least significant bit) \n",
|
|
1008
|
+
"# to the (sizeof(target_arg) - 1).\n",
|
|
1009
|
+
"#\n",
|
|
1010
|
+
"# If target_bit_arg is negative or out-of-range (meaning that it exceeds the size of target_arg), an error is returned.\n",
|
|
1011
|
+
"#\n",
|
|
1012
|
+
"# If any of the input arguments is NULL, the function returns NULL."
|
|
1013
|
+
]
|
|
1014
|
+
},
|
|
1015
|
+
{
|
|
1016
|
+
"cell_type": "markdown",
|
|
1017
|
+
"metadata": {},
|
|
1018
|
+
"source": [
|
|
1019
|
+
"It is defined with the following parameter data types for the following (target_arg, target_bit_arg, target_value_arg) input combinations:\n",
|
|
1020
|
+
"\n",
|
|
1021
|
+
"| target_arg type\t| target_bit_arg type | target_value_arg type(optional) |\n",
|
|
1022
|
+
"| ------- | ------ | ------ |\n",
|
|
1023
|
+
"| BYTEINT |\tINTEGER |\tINTEGER |\n",
|
|
1024
|
+
"| SMALLINT\t| INTEGER |\tINTEGER |\n",
|
|
1025
|
+
"| INTEGER\t| INTEGER |\tINTEGER |\n",
|
|
1026
|
+
"| BIGINT\t| INTEGER |\tINTEGER |\n",
|
|
1027
|
+
"| VARBYTE(n) |\tINTEGER |\tINTEGER |"
|
|
1028
|
+
]
|
|
1029
|
+
},
|
|
1030
|
+
{
|
|
1031
|
+
"cell_type": "code",
|
|
1032
|
+
"execution_count": 27,
|
|
1033
|
+
"metadata": {},
|
|
1034
|
+
"outputs": [
|
|
1035
|
+
{
|
|
1036
|
+
"name": "stdout",
|
|
1037
|
+
"output_type": "stream",
|
|
1038
|
+
"text": [
|
|
1039
|
+
"Equivalent SQL: select setbit(id_col, 1) AS setbit_int_1_1, SETBIT(id_col, 2, 0) AS setbit_int_2_0 from \"bytes_table\"\n",
|
|
1040
|
+
"\n",
|
|
1041
|
+
"\n",
|
|
1042
|
+
" ************************* DataFrame ********************* \n",
|
|
1043
|
+
" setbit_int_1_1 setbit_int_2_0\n",
|
|
1044
|
+
"0 2 0\n",
|
|
1045
|
+
"1 2 2\n",
|
|
1046
|
+
"2 3 1\n",
|
|
1047
|
+
"\n",
|
|
1048
|
+
"\n",
|
|
1049
|
+
"\n",
|
|
1050
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
1051
|
+
"setbit_int_1_1 int\n",
|
|
1052
|
+
"setbit_int_2_0 int\n",
|
|
1053
|
+
"\n",
|
|
1054
|
+
"\n",
|
|
1055
|
+
"\n",
|
|
1056
|
+
" 'setbit_int_1_1' Column Type: INTEGER\n",
|
|
1057
|
+
" 'setbit_int_2_0' Column Type: INTEGER\n"
|
|
1058
|
+
]
|
|
1059
|
+
}
|
|
1060
|
+
],
|
|
1061
|
+
"source": [
|
|
1062
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
1063
|
+
"df = bytes_table.assign(True, \n",
|
|
1064
|
+
" setbit_int_1_1= func.setbit(bytes_table.id_col.expression, 1),\n",
|
|
1065
|
+
" setbit_int_2_0= func.SETBIT(bytes_table.id_col.expression, 2, 0)\n",
|
|
1066
|
+
" )\n",
|
|
1067
|
+
"print_variables(df, [\"setbit_int_1_1\", \"setbit_int_2_0\"])"
|
|
1068
|
+
]
|
|
1069
|
+
},
|
|
1070
|
+
{
|
|
1071
|
+
"cell_type": "markdown",
|
|
1072
|
+
"metadata": {},
|
|
1073
|
+
"source": [
|
|
1074
|
+
"## SHIFTLEFT Function"
|
|
1075
|
+
]
|
|
1076
|
+
},
|
|
1077
|
+
{
|
|
1078
|
+
"cell_type": "code",
|
|
1079
|
+
"execution_count": 28,
|
|
1080
|
+
"metadata": {},
|
|
1081
|
+
"outputs": [],
|
|
1082
|
+
"source": [
|
|
1083
|
+
"# Function returns the expression target_arg shifted by the specified number of bits (num_bits_arg) to the left. \n",
|
|
1084
|
+
"# The bits in the most significant positions are lost, and the bits in the least significant positions are \n",
|
|
1085
|
+
"# filled with zeros.\n",
|
|
1086
|
+
"#\n",
|
|
1087
|
+
"# Syntax:\n",
|
|
1088
|
+
"# =======\n",
|
|
1089
|
+
"# SHIFTLEFT(target_arg, num_bits_arg)\n",
|
|
1090
|
+
"\n",
|
|
1091
|
+
"# Argument Description:\n",
|
|
1092
|
+
"# =====================\n",
|
|
1093
|
+
"# target_arg\n",
|
|
1094
|
+
"# A numeric or variable byte expression.\n",
|
|
1095
|
+
"# num_bits_arg\n",
|
|
1096
|
+
"# An integer expression indicating the number of bit positions to shift.\n",
|
|
1097
|
+
"\n",
|
|
1098
|
+
"# Function Description:\n",
|
|
1099
|
+
"# =====================\n",
|
|
1100
|
+
"# 1. If num_bits_arg is equal to zero then the function returns target_arg unchanged.\n",
|
|
1101
|
+
"# 2. If num_bits_arg is negative then the function shifts the bits to the right instead of the left.\n",
|
|
1102
|
+
"# 3. If target_arg and/or num_bits_arg are NULL then the function returns NULL.\n",
|
|
1103
|
+
"# 4. If num_bits_arg is larger than the size of target_arg then the function returns an error.\n",
|
|
1104
|
+
"# The scope of the shift operation is bounded by the size of the target_arg expression. \n",
|
|
1105
|
+
"# Specifying a shift that is outside the range of target_arg results in an SQL error.\n",
|
|
1106
|
+
"# NOTE:\n",
|
|
1107
|
+
"# When operating against an integer value (BYTEINT, SMALLINT, INTEGER, or BIGINT), shifting a bit \n",
|
|
1108
|
+
"# into the most significant position will result in the integer becoming negative. This is because \n",
|
|
1109
|
+
"# all integers in Teradata Database are signed integers."
|
|
1110
|
+
]
|
|
1111
|
+
},
|
|
1112
|
+
{
|
|
1113
|
+
"cell_type": "markdown",
|
|
1114
|
+
"metadata": {},
|
|
1115
|
+
"source": [
|
|
1116
|
+
"It is defined with the following parameter data types for the following (target_arg, num_bits_arg) input combinations:\n",
|
|
1117
|
+
"\n",
|
|
1118
|
+
"| target_arg type\t| num_bits_arg type (optional) |\n",
|
|
1119
|
+
"| ------- | ------ |\n",
|
|
1120
|
+
"| BYTEINT |\tINTEGER |\n",
|
|
1121
|
+
"| SMALLINT\t| INTEGER |\n",
|
|
1122
|
+
"| INTEGER\t| INTEGER |\n",
|
|
1123
|
+
"| BIGINT\t| INTEGER |\n",
|
|
1124
|
+
"| VARBYTE(n) |\tINTEGER |"
|
|
1125
|
+
]
|
|
1126
|
+
},
|
|
1127
|
+
{
|
|
1128
|
+
"cell_type": "code",
|
|
1129
|
+
"execution_count": 29,
|
|
1130
|
+
"metadata": {},
|
|
1131
|
+
"outputs": [
|
|
1132
|
+
{
|
|
1133
|
+
"name": "stdout",
|
|
1134
|
+
"output_type": "stream",
|
|
1135
|
+
"text": [
|
|
1136
|
+
"Equivalent SQL: select ShiftLeft(id_col, 2) AS shiftleft_int_, shiftleft(varbyte_col, 1) AS shiftleft_varbyte_ from \"bytes_table\"\n",
|
|
1137
|
+
"\n",
|
|
1138
|
+
"\n",
|
|
1139
|
+
" ************************* DataFrame ********************* \n",
|
|
1140
|
+
" shiftleft_int_ shiftleft_varbyte_\n",
|
|
1141
|
+
"0 8 b'-3D3B39379D9C'\n",
|
|
1142
|
+
"1 4 b'-3B1D1136'\n",
|
|
1143
|
+
"2 0 b'-3B0B0F3A'\n",
|
|
1144
|
+
"\n",
|
|
1145
|
+
"\n",
|
|
1146
|
+
"\n",
|
|
1147
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
1148
|
+
"shiftleft_int_ int\n",
|
|
1149
|
+
"shiftleft_varbyte_ bytes\n",
|
|
1150
|
+
"\n",
|
|
1151
|
+
"\n",
|
|
1152
|
+
"\n",
|
|
1153
|
+
" 'shiftleft_varbyte_' Column Type: VARBYTE\n",
|
|
1154
|
+
" 'shiftleft_int_' Column Type: INTEGER\n"
|
|
1155
|
+
]
|
|
1156
|
+
}
|
|
1157
|
+
],
|
|
1158
|
+
"source": [
|
|
1159
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
1160
|
+
"df = bytes_table.assign(True, \n",
|
|
1161
|
+
" shiftleft_varbyte_= func.shiftleft(bytes_table.varbyte_col.expression, 1),\n",
|
|
1162
|
+
" shiftleft_int_= func.ShiftLeft(bytes_table.id_col.expression, 2)\n",
|
|
1163
|
+
" )\n",
|
|
1164
|
+
"print_variables(df, [\"shiftleft_varbyte_\", \"shiftleft_int_\"])"
|
|
1165
|
+
]
|
|
1166
|
+
},
|
|
1167
|
+
{
|
|
1168
|
+
"cell_type": "markdown",
|
|
1169
|
+
"metadata": {},
|
|
1170
|
+
"source": [
|
|
1171
|
+
"## SHIFTRIGHT Function"
|
|
1172
|
+
]
|
|
1173
|
+
},
|
|
1174
|
+
{
|
|
1175
|
+
"cell_type": "code",
|
|
1176
|
+
"execution_count": 30,
|
|
1177
|
+
"metadata": {},
|
|
1178
|
+
"outputs": [],
|
|
1179
|
+
"source": [
|
|
1180
|
+
"# Function returns the expression target_arg shifted by the specified number of bits (num_bits_arg) to the right. \n",
|
|
1181
|
+
"# The bits in the least significant positions are lost, and the bits in the most significant positions are filled with zeros.\n",
|
|
1182
|
+
"# Syntax:\n",
|
|
1183
|
+
"# =======\n",
|
|
1184
|
+
"# SHIFTRIGHT(target_arg, num_bits_arg)\n",
|
|
1185
|
+
"\n",
|
|
1186
|
+
"# Argument Description:\n",
|
|
1187
|
+
"# =====================\n",
|
|
1188
|
+
"# target_arg\n",
|
|
1189
|
+
"# A numeric or variable byte expression.\n",
|
|
1190
|
+
"# tnum_bits_arg\n",
|
|
1191
|
+
"# An integer expression indicating the number of bit positions to shift.\n",
|
|
1192
|
+
"\n",
|
|
1193
|
+
"# Function Description:\n",
|
|
1194
|
+
"# =====================\n",
|
|
1195
|
+
"# 1. If num_bits_arg is equal to zero then the function returns target_arg unchanged.\n",
|
|
1196
|
+
"# 2. If num_bits_arg is negative then the function shifts the bits to the left instead of the right.\n",
|
|
1197
|
+
"# 3. If target_arg and/or num_bits_arg are NULL then the function returns NULL.\n",
|
|
1198
|
+
"# 4. If num_bits_arg is larger than the size of target_arg then the function returns an error.\n",
|
|
1199
|
+
"# The scope of the shift operation is bounded by the size of the target_arg expression. \n",
|
|
1200
|
+
"# Specifying a shift that is outside the range of target_arg results in an SQL error.\n",
|
|
1201
|
+
"# NOTE:\n",
|
|
1202
|
+
"# When operating against an integer value (BYTEINT, SMALLINT, INTEGER, or BIGINT), shifting a bit \n",
|
|
1203
|
+
"# into the most significant position will result in the integer becoming negative. This is because \n",
|
|
1204
|
+
"# all integers in Teradata Database are signed integers."
|
|
1205
|
+
]
|
|
1206
|
+
},
|
|
1207
|
+
{
|
|
1208
|
+
"cell_type": "markdown",
|
|
1209
|
+
"metadata": {},
|
|
1210
|
+
"source": [
|
|
1211
|
+
"It is defined with the following parameter data types for the following (target_arg, num_bits_arg) input combinations:\n",
|
|
1212
|
+
"\n",
|
|
1213
|
+
"| target_arg type\t| num_bits_arg type (optional) |\n",
|
|
1214
|
+
"| ------- | ------ |\n",
|
|
1215
|
+
"| BYTEINT |\tINTEGER |\n",
|
|
1216
|
+
"| SMALLINT\t| INTEGER |\n",
|
|
1217
|
+
"| INTEGER\t| INTEGER |\n",
|
|
1218
|
+
"| BIGINT\t| INTEGER |\n",
|
|
1219
|
+
"| VARBYTE(n) |\tINTEGER |"
|
|
1220
|
+
]
|
|
1221
|
+
},
|
|
1222
|
+
{
|
|
1223
|
+
"cell_type": "code",
|
|
1224
|
+
"execution_count": 31,
|
|
1225
|
+
"metadata": {},
|
|
1226
|
+
"outputs": [
|
|
1227
|
+
{
|
|
1228
|
+
"name": "stdout",
|
|
1229
|
+
"output_type": "stream",
|
|
1230
|
+
"text": [
|
|
1231
|
+
"Equivalent SQL: select ShiftRight(id_col, 2) AS shiftright_int_, shiftright(varbyte_col, 1) AS shiftright_varbyte_ from \"bytes_table\"\n",
|
|
1232
|
+
"\n",
|
|
1233
|
+
"\n",
|
|
1234
|
+
" ************************* DataFrame ********************* \n",
|
|
1235
|
+
" shiftright_int_ shiftright_varbyte_\n",
|
|
1236
|
+
"0 0 b'30B131B21899'\n",
|
|
1237
|
+
"1 0 b'3138BBB2'\n",
|
|
1238
|
+
"2 0 b'313D3C31'\n",
|
|
1239
|
+
"\n",
|
|
1240
|
+
"\n",
|
|
1241
|
+
"\n",
|
|
1242
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
1243
|
+
"shiftright_int_ int\n",
|
|
1244
|
+
"shiftright_varbyte_ bytes\n",
|
|
1245
|
+
"\n",
|
|
1246
|
+
"\n",
|
|
1247
|
+
"\n",
|
|
1248
|
+
" 'shiftright_varbyte_' Column Type: VARBYTE\n",
|
|
1249
|
+
" 'shiftright_int_' Column Type: INTEGER\n"
|
|
1250
|
+
]
|
|
1251
|
+
}
|
|
1252
|
+
],
|
|
1253
|
+
"source": [
|
|
1254
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
1255
|
+
"df = bytes_table.assign(True, \n",
|
|
1256
|
+
" shiftright_varbyte_= func.shiftright(bytes_table.varbyte_col.expression, 1),\n",
|
|
1257
|
+
" shiftright_int_= func.ShiftRight(bytes_table.id_col.expression, 2)\n",
|
|
1258
|
+
" )\n",
|
|
1259
|
+
"print_variables(df, [\"shiftright_varbyte_\", \"shiftright_int_\"])"
|
|
1260
|
+
]
|
|
1261
|
+
},
|
|
1262
|
+
{
|
|
1263
|
+
"cell_type": "markdown",
|
|
1264
|
+
"metadata": {},
|
|
1265
|
+
"source": [
|
|
1266
|
+
"## SUBBITSTR Function"
|
|
1267
|
+
]
|
|
1268
|
+
},
|
|
1269
|
+
{
|
|
1270
|
+
"cell_type": "code",
|
|
1271
|
+
"execution_count": 32,
|
|
1272
|
+
"metadata": {},
|
|
1273
|
+
"outputs": [],
|
|
1274
|
+
"source": [
|
|
1275
|
+
"# Function extracts a bit substring from the target_arg input expression based on the specified bit position.\n",
|
|
1276
|
+
"#\n",
|
|
1277
|
+
"# Syntax:\n",
|
|
1278
|
+
"# =======\n",
|
|
1279
|
+
"# SUBBITSTR(target_arg, position_arg, num_bits_arg)\n",
|
|
1280
|
+
"\n",
|
|
1281
|
+
"# Arguemnt Description:\n",
|
|
1282
|
+
"# =====================\n",
|
|
1283
|
+
"# target_arg\n",
|
|
1284
|
+
"# A numeric or variable byte expression.\n",
|
|
1285
|
+
"# position_arg\n",
|
|
1286
|
+
"# An integer expression indicating the starting position of the bit substring to be extracted.\n",
|
|
1287
|
+
"# num_bits_arg\n",
|
|
1288
|
+
"# An integer expression indicating the length of the bit substring to be extracted. \n",
|
|
1289
|
+
"# This specifies the number of bits for the function to return.\n",
|
|
1290
|
+
"\n",
|
|
1291
|
+
"# Function Description:\n",
|
|
1292
|
+
"# =====================\n",
|
|
1293
|
+
"# SUBBITSTR extracts a bit substring from the target_arg string expression starting at the bit position \n",
|
|
1294
|
+
"# specified by position_arg. For the range of bit positions for each data type, see Bit and Byte Numbering Model.\n",
|
|
1295
|
+
"#\n",
|
|
1296
|
+
"# The num_bits_arg value specifies the length of the bit substring to be extracted and indicates the number\n",
|
|
1297
|
+
"# of bits that the function should return. Because the return value of the function is a VARBYTE string, \n",
|
|
1298
|
+
"# the number of bits returned is rounded to the byte boundary greater than the number of bits requested.\n",
|
|
1299
|
+
"#\n",
|
|
1300
|
+
"# The bits returned is right-justified, and the excess bits (those exceeding the requested number of bits) \n",
|
|
1301
|
+
"# are filled with zeros.\n",
|
|
1302
|
+
"#\n",
|
|
1303
|
+
"# If position_arg is negative or out-of-range (meaning that it exceeds the size of target_arg), an error is returned.\n",
|
|
1304
|
+
"#\n",
|
|
1305
|
+
"# If num_bits_arg is negative, or is greater than the number of bits remaining after the starting \n",
|
|
1306
|
+
"# position_arg is taken into account, an error is returned.\n",
|
|
1307
|
+
"#\n",
|
|
1308
|
+
"# If any of the input arguments is NULL, the function returns NULL."
|
|
1309
|
+
]
|
|
1310
|
+
},
|
|
1311
|
+
{
|
|
1312
|
+
"cell_type": "markdown",
|
|
1313
|
+
"metadata": {},
|
|
1314
|
+
"source": [
|
|
1315
|
+
"It is defined with the following parameter data types for the following (target_arg, position_arg, num_bits_arg) input combinations:\n",
|
|
1316
|
+
"\n",
|
|
1317
|
+
"| target_arg type\t| position_arg type | num_bits_arg type |\n",
|
|
1318
|
+
"| ------- | ------ | ------ |\n",
|
|
1319
|
+
"| BYTEINT |\tINTEGER |\tINTEGER |\n",
|
|
1320
|
+
"| SMALLINT\t| INTEGER |\tINTEGER |\n",
|
|
1321
|
+
"| INTEGER\t| INTEGER |\tINTEGER |\n",
|
|
1322
|
+
"| BIGINT\t| INTEGER |\tINTEGER |\n",
|
|
1323
|
+
"| VARBYTE(n) |\tINTEGER |\tINTEGER |"
|
|
1324
|
+
]
|
|
1325
|
+
},
|
|
1326
|
+
{
|
|
1327
|
+
"cell_type": "code",
|
|
1328
|
+
"execution_count": 33,
|
|
1329
|
+
"metadata": {},
|
|
1330
|
+
"outputs": [
|
|
1331
|
+
{
|
|
1332
|
+
"name": "stdout",
|
|
1333
|
+
"output_type": "stream",
|
|
1334
|
+
"text": [
|
|
1335
|
+
"Equivalent SQL: select SUBBITSTR(varbyte_col, 3, 4) AS subbitstr_varbyte_ from \"bytes_table\"\n",
|
|
1336
|
+
"\n",
|
|
1337
|
+
"\n",
|
|
1338
|
+
" ************************* DataFrame ********************* \n",
|
|
1339
|
+
" subbitstr_varbyte_\n",
|
|
1340
|
+
"0 b'C'\n",
|
|
1341
|
+
"1 b'6'\n",
|
|
1342
|
+
"2 b'C'\n",
|
|
1343
|
+
"\n",
|
|
1344
|
+
"\n",
|
|
1345
|
+
"\n",
|
|
1346
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
1347
|
+
"subbitstr_varbyte_ bytes\n",
|
|
1348
|
+
"\n",
|
|
1349
|
+
"\n",
|
|
1350
|
+
"\n",
|
|
1351
|
+
" 'subbitstr_varbyte_' Column Type: VARBYTE\n"
|
|
1352
|
+
]
|
|
1353
|
+
}
|
|
1354
|
+
],
|
|
1355
|
+
"source": [
|
|
1356
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
1357
|
+
"df = bytes_table.assign(True, \n",
|
|
1358
|
+
" subbitstr_varbyte_= func.SUBBITSTR(bytes_table.varbyte_col.expression, 3, 4)\n",
|
|
1359
|
+
" )\n",
|
|
1360
|
+
"print_variables(df, [\"subbitstr_varbyte_\"])"
|
|
1361
|
+
]
|
|
1362
|
+
},
|
|
1363
|
+
{
|
|
1364
|
+
"cell_type": "markdown",
|
|
1365
|
+
"metadata": {},
|
|
1366
|
+
"source": [
|
|
1367
|
+
"## TO_BYTE Function"
|
|
1368
|
+
]
|
|
1369
|
+
},
|
|
1370
|
+
{
|
|
1371
|
+
"cell_type": "code",
|
|
1372
|
+
"execution_count": 34,
|
|
1373
|
+
"metadata": {},
|
|
1374
|
+
"outputs": [],
|
|
1375
|
+
"source": [
|
|
1376
|
+
"# Function converts a numeric data type to the Teradata Database server byte representation (byte value) of the input value.\n",
|
|
1377
|
+
"# Syntax:\n",
|
|
1378
|
+
"# =======\n",
|
|
1379
|
+
"# TO_BYTE(target_arg)\n",
|
|
1380
|
+
"\n",
|
|
1381
|
+
"# Arguemnt Description:\n",
|
|
1382
|
+
"# =====================\n",
|
|
1383
|
+
"# target_arg\n",
|
|
1384
|
+
"# A numeric or variable byte expression.\n",
|
|
1385
|
+
"# It is defined with the following parameter data types:\n",
|
|
1386
|
+
"# BYTEINT\n",
|
|
1387
|
+
"# SMALLINT\n",
|
|
1388
|
+
"# INTEGER\n",
|
|
1389
|
+
"# BIGINT\n",
|
|
1390
|
+
"\n",
|
|
1391
|
+
"# Function Description:\n",
|
|
1392
|
+
"# =====================\n",
|
|
1393
|
+
"# The number of bytes returned by the function varies according to the data type of the target_arg value.\n",
|
|
1394
|
+
"#\n",
|
|
1395
|
+
"# If target_arg is NULL, the function returns NULL."
|
|
1396
|
+
]
|
|
1397
|
+
},
|
|
1398
|
+
{
|
|
1399
|
+
"cell_type": "code",
|
|
1400
|
+
"execution_count": 35,
|
|
1401
|
+
"metadata": {},
|
|
1402
|
+
"outputs": [
|
|
1403
|
+
{
|
|
1404
|
+
"name": "stdout",
|
|
1405
|
+
"output_type": "stream",
|
|
1406
|
+
"text": [
|
|
1407
|
+
"Equivalent SQL: select to_byte(id_col) AS int_to_byte_ from \"bytes_table\"\n",
|
|
1408
|
+
"\n",
|
|
1409
|
+
"\n",
|
|
1410
|
+
" ************************* DataFrame ********************* \n",
|
|
1411
|
+
" int_to_byte_\n",
|
|
1412
|
+
"0 b'2'\n",
|
|
1413
|
+
"1 b'1'\n",
|
|
1414
|
+
"2 b'0'\n",
|
|
1415
|
+
"\n",
|
|
1416
|
+
"\n",
|
|
1417
|
+
"\n",
|
|
1418
|
+
" ************************* DataFrame.dtypes ********************* \n",
|
|
1419
|
+
"int_to_byte_ bytes\n",
|
|
1420
|
+
"\n",
|
|
1421
|
+
"\n",
|
|
1422
|
+
"\n",
|
|
1423
|
+
" 'int_to_byte_' Column Type: BYTE\n"
|
|
1424
|
+
]
|
|
1425
|
+
}
|
|
1426
|
+
],
|
|
1427
|
+
"source": [
|
|
1428
|
+
"# Note: Function name case does not matter. We can use the function name in lower case, upper case or mixed case.\n",
|
|
1429
|
+
"df = bytes_table.assign(True, \n",
|
|
1430
|
+
" int_to_byte_= func.to_byte(bytes_table.id_col.expression)\n",
|
|
1431
|
+
" )\n",
|
|
1432
|
+
"print_variables(df, [\"int_to_byte_\"])"
|
|
1433
|
+
]
|
|
1434
|
+
},
|
|
1435
|
+
{
|
|
1436
|
+
"cell_type": "markdown",
|
|
1437
|
+
"metadata": {},
|
|
1438
|
+
"source": [
|
|
1439
|
+
"## NOTE: If Incorrect type input is passed to any of these function, one may see following error:\n",
|
|
1440
|
+
" [Error 9881] Function 'to_byte' called with an invalid number or type of parameters"
|
|
1441
|
+
]
|
|
1442
|
+
},
|
|
1443
|
+
{
|
|
1444
|
+
"cell_type": "code",
|
|
1445
|
+
"execution_count": 36,
|
|
1446
|
+
"metadata": {},
|
|
1447
|
+
"outputs": [
|
|
1448
|
+
{
|
|
1449
|
+
"data": {
|
|
1450
|
+
"text/plain": [
|
|
1451
|
+
"True"
|
|
1452
|
+
]
|
|
1453
|
+
},
|
|
1454
|
+
"execution_count": 36,
|
|
1455
|
+
"metadata": {},
|
|
1456
|
+
"output_type": "execute_result"
|
|
1457
|
+
}
|
|
1458
|
+
],
|
|
1459
|
+
"source": [
|
|
1460
|
+
"remove_context()"
|
|
1461
|
+
]
|
|
1462
|
+
},
|
|
1463
|
+
{
|
|
1464
|
+
"cell_type": "code",
|
|
1465
|
+
"execution_count": null,
|
|
1466
|
+
"metadata": {},
|
|
1467
|
+
"outputs": [],
|
|
1468
|
+
"source": []
|
|
1469
|
+
}
|
|
1470
|
+
],
|
|
1471
|
+
"metadata": {
|
|
1472
|
+
"kernelspec": {
|
|
1473
|
+
"display_name": "Python 3",
|
|
1474
|
+
"language": "python",
|
|
1475
|
+
"name": "python3"
|
|
1476
|
+
},
|
|
1477
|
+
"language_info": {
|
|
1478
|
+
"codemirror_mode": {
|
|
1479
|
+
"name": "ipython",
|
|
1480
|
+
"version": 3
|
|
1481
|
+
},
|
|
1482
|
+
"file_extension": ".py",
|
|
1483
|
+
"mimetype": "text/x-python",
|
|
1484
|
+
"name": "python",
|
|
1485
|
+
"nbconvert_exporter": "python",
|
|
1486
|
+
"pygments_lexer": "ipython3",
|
|
1487
|
+
"version": "3.7.1"
|
|
1488
|
+
}
|
|
1489
|
+
},
|
|
1490
|
+
"nbformat": 4,
|
|
1491
|
+
"nbformat_minor": 2
|
|
1492
|
+
}
|