teradataml 17.20.0.6__py3-none-any.whl → 20.0.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of teradataml might be problematic. Click here for more details.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +238 -1
- teradataml/__init__.py +13 -3
- teradataml/_version.py +1 -1
- teradataml/analytics/Transformations.py +4 -4
- teradataml/analytics/__init__.py +0 -2
- teradataml/analytics/analytic_function_executor.py +3 -0
- teradataml/analytics/json_parser/utils.py +13 -12
- teradataml/analytics/sqle/DecisionTreePredict.py +15 -30
- teradataml/analytics/sqle/NaiveBayesPredict.py +11 -20
- teradataml/analytics/sqle/__init__.py +0 -13
- teradataml/analytics/utils.py +1 -0
- teradataml/analytics/valib.py +3 -0
- teradataml/automl/__init__.py +1628 -0
- teradataml/automl/custom_json_utils.py +1270 -0
- teradataml/automl/data_preparation.py +993 -0
- teradataml/automl/data_transformation.py +727 -0
- teradataml/automl/feature_engineering.py +1648 -0
- teradataml/automl/feature_exploration.py +547 -0
- teradataml/automl/model_evaluation.py +163 -0
- teradataml/automl/model_training.py +887 -0
- teradataml/catalog/__init__.py +0 -2
- teradataml/catalog/byom.py +49 -6
- teradataml/catalog/function_argument_mapper.py +0 -2
- teradataml/catalog/model_cataloging_utils.py +2 -1021
- teradataml/common/aed_utils.py +6 -2
- teradataml/common/constants.py +50 -58
- teradataml/common/deprecations.py +160 -0
- teradataml/common/garbagecollector.py +61 -104
- teradataml/common/messagecodes.py +27 -36
- teradataml/common/messages.py +11 -15
- teradataml/common/utils.py +205 -287
- teradataml/common/wrapper_utils.py +1 -110
- teradataml/context/context.py +150 -78
- teradataml/data/bank_churn.csv +10001 -0
- teradataml/data/bmi.csv +501 -0
- teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +3 -3
- teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +6 -5
- teradataml/data/docs/sqle/docs_17_10/Fit.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +2 -2
- teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +2 -1
- teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +1 -0
- teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_10/Transform.py +2 -1
- teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +3 -3
- teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +6 -5
- teradataml/data/docs/sqle/docs_17_20/Fit.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/GLM.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +9 -10
- teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +3 -2
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +16 -15
- teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +2 -2
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +2 -2
- teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +8 -8
- teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +21 -20
- teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +8 -3
- teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +6 -5
- teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +6 -6
- teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +2 -1
- teradataml/data/docs/sqle/docs_17_20/SVM.py +1 -1
- teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +16 -16
- teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +1 -0
- teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +3 -2
- teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +4 -4
- teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +19 -19
- teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +5 -4
- teradataml/data/docs/sqle/docs_17_20/Transform.py +2 -2
- teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +9 -9
- teradataml/data/fish.csv +160 -0
- teradataml/data/glass_types.csv +215 -0
- teradataml/data/insurance.csv +1 -1
- teradataml/data/iris_data.csv +151 -0
- teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +1 -0
- teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +1 -0
- teradataml/data/load_example_data.py +3 -0
- teradataml/data/multi_model_classification.csv +401 -0
- teradataml/data/multi_model_regression.csv +401 -0
- teradataml/data/openml_example.json +63 -0
- teradataml/data/scripts/deploy_script.py +65 -0
- teradataml/data/scripts/mapper.R +20 -0
- teradataml/data/scripts/sklearn/__init__.py +0 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +175 -0
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +135 -0
- teradataml/data/scripts/sklearn/sklearn_function.template +113 -0
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +158 -0
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +152 -0
- teradataml/data/scripts/sklearn/sklearn_score.py +128 -0
- teradataml/data/scripts/sklearn/sklearn_transform.py +179 -0
- teradataml/data/templates/open_source_ml.json +9 -0
- teradataml/data/teradataml_example.json +73 -1
- teradataml/data/test_classification.csv +101 -0
- teradataml/data/test_prediction.csv +101 -0
- teradataml/data/test_regression.csv +101 -0
- teradataml/data/train_multiclass.csv +101 -0
- teradataml/data/train_regression.csv +101 -0
- teradataml/data/train_regression_multiple_labels.csv +101 -0
- teradataml/data/wine_data.csv +1600 -0
- teradataml/dataframe/copy_to.py +79 -13
- teradataml/dataframe/data_transfer.py +8 -0
- teradataml/dataframe/dataframe.py +910 -311
- teradataml/dataframe/dataframe_utils.py +102 -5
- teradataml/dataframe/fastload.py +11 -3
- teradataml/dataframe/setop.py +15 -2
- teradataml/dataframe/sql.py +3735 -77
- teradataml/dataframe/sql_function_parameters.py +56 -5
- teradataml/dataframe/vantage_function_types.py +45 -1
- teradataml/dataframe/window.py +30 -29
- teradataml/dbutils/dbutils.py +18 -1
- teradataml/geospatial/geodataframe.py +18 -7
- teradataml/geospatial/geodataframecolumn.py +5 -0
- teradataml/hyperparameter_tuner/optimizer.py +910 -120
- teradataml/hyperparameter_tuner/utils.py +131 -37
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/lib/libaed_0_1.dylib +0 -0
- teradataml/lib/libaed_0_1.so +0 -0
- teradataml/libaed_0_1.dylib +0 -0
- teradataml/libaed_0_1.so +0 -0
- teradataml/opensource/__init__.py +1 -0
- teradataml/opensource/sklearn/__init__.py +1 -0
- teradataml/opensource/sklearn/_class.py +255 -0
- teradataml/opensource/sklearn/_sklearn_wrapper.py +1668 -0
- teradataml/opensource/sklearn/_wrapper_utils.py +268 -0
- teradataml/opensource/sklearn/constants.py +54 -0
- teradataml/options/__init__.py +3 -6
- teradataml/options/configure.py +21 -20
- teradataml/scriptmgmt/UserEnv.py +61 -5
- teradataml/scriptmgmt/lls_utils.py +135 -53
- teradataml/table_operators/Apply.py +38 -6
- teradataml/table_operators/Script.py +45 -308
- teradataml/table_operators/TableOperator.py +182 -591
- teradataml/table_operators/__init__.py +0 -1
- teradataml/table_operators/table_operator_util.py +32 -40
- teradataml/utils/validators.py +127 -3
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/METADATA +243 -3
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/RECORD +147 -391
- teradataml/analytics/mle/AdaBoost.py +0 -651
- teradataml/analytics/mle/AdaBoostPredict.py +0 -564
- teradataml/analytics/mle/Antiselect.py +0 -342
- teradataml/analytics/mle/Arima.py +0 -641
- teradataml/analytics/mle/ArimaPredict.py +0 -477
- teradataml/analytics/mle/Attribution.py +0 -1070
- teradataml/analytics/mle/Betweenness.py +0 -658
- teradataml/analytics/mle/Burst.py +0 -711
- teradataml/analytics/mle/CCM.py +0 -600
- teradataml/analytics/mle/CCMPrepare.py +0 -324
- teradataml/analytics/mle/CFilter.py +0 -460
- teradataml/analytics/mle/ChangePointDetection.py +0 -572
- teradataml/analytics/mle/ChangePointDetectionRT.py +0 -477
- teradataml/analytics/mle/Closeness.py +0 -737
- teradataml/analytics/mle/ConfusionMatrix.py +0 -420
- teradataml/analytics/mle/Correlation.py +0 -477
- teradataml/analytics/mle/Correlation2.py +0 -573
- teradataml/analytics/mle/CoxHazardRatio.py +0 -679
- teradataml/analytics/mle/CoxPH.py +0 -556
- teradataml/analytics/mle/CoxSurvival.py +0 -478
- teradataml/analytics/mle/CumulativeMovAvg.py +0 -363
- teradataml/analytics/mle/DTW.py +0 -623
- teradataml/analytics/mle/DWT.py +0 -564
- teradataml/analytics/mle/DWT2D.py +0 -599
- teradataml/analytics/mle/DecisionForest.py +0 -716
- teradataml/analytics/mle/DecisionForestEvaluator.py +0 -363
- teradataml/analytics/mle/DecisionForestPredict.py +0 -561
- teradataml/analytics/mle/DecisionTree.py +0 -830
- teradataml/analytics/mle/DecisionTreePredict.py +0 -528
- teradataml/analytics/mle/ExponentialMovAvg.py +0 -418
- teradataml/analytics/mle/FMeasure.py +0 -402
- teradataml/analytics/mle/FPGrowth.py +0 -734
- teradataml/analytics/mle/FrequentPaths.py +0 -695
- teradataml/analytics/mle/GLM.py +0 -558
- teradataml/analytics/mle/GLML1L2.py +0 -547
- teradataml/analytics/mle/GLML1L2Predict.py +0 -519
- teradataml/analytics/mle/GLMPredict.py +0 -529
- teradataml/analytics/mle/HMMDecoder.py +0 -945
- teradataml/analytics/mle/HMMEvaluator.py +0 -901
- teradataml/analytics/mle/HMMSupervised.py +0 -521
- teradataml/analytics/mle/HMMUnsupervised.py +0 -572
- teradataml/analytics/mle/Histogram.py +0 -561
- teradataml/analytics/mle/IDWT.py +0 -476
- teradataml/analytics/mle/IDWT2D.py +0 -493
- teradataml/analytics/mle/IdentityMatch.py +0 -763
- teradataml/analytics/mle/Interpolator.py +0 -918
- teradataml/analytics/mle/KMeans.py +0 -485
- teradataml/analytics/mle/KNN.py +0 -627
- teradataml/analytics/mle/KNNRecommender.py +0 -488
- teradataml/analytics/mle/KNNRecommenderPredict.py +0 -581
- teradataml/analytics/mle/LAR.py +0 -439
- teradataml/analytics/mle/LARPredict.py +0 -478
- teradataml/analytics/mle/LDA.py +0 -548
- teradataml/analytics/mle/LDAInference.py +0 -492
- teradataml/analytics/mle/LDATopicSummary.py +0 -464
- teradataml/analytics/mle/LevenshteinDistance.py +0 -450
- teradataml/analytics/mle/LinReg.py +0 -433
- teradataml/analytics/mle/LinRegPredict.py +0 -438
- teradataml/analytics/mle/MinHash.py +0 -544
- teradataml/analytics/mle/Modularity.py +0 -587
- teradataml/analytics/mle/NEREvaluator.py +0 -410
- teradataml/analytics/mle/NERExtractor.py +0 -595
- teradataml/analytics/mle/NERTrainer.py +0 -458
- teradataml/analytics/mle/NGrams.py +0 -570
- teradataml/analytics/mle/NPath.py +0 -634
- teradataml/analytics/mle/NTree.py +0 -549
- teradataml/analytics/mle/NaiveBayes.py +0 -462
- teradataml/analytics/mle/NaiveBayesPredict.py +0 -513
- teradataml/analytics/mle/NaiveBayesTextClassifier.py +0 -607
- teradataml/analytics/mle/NaiveBayesTextClassifier2.py +0 -531
- teradataml/analytics/mle/NaiveBayesTextClassifierPredict.py +0 -799
- teradataml/analytics/mle/NamedEntityFinder.py +0 -529
- teradataml/analytics/mle/NamedEntityFinderEvaluator.py +0 -414
- teradataml/analytics/mle/NamedEntityFinderTrainer.py +0 -396
- teradataml/analytics/mle/POSTagger.py +0 -417
- teradataml/analytics/mle/Pack.py +0 -411
- teradataml/analytics/mle/PageRank.py +0 -535
- teradataml/analytics/mle/PathAnalyzer.py +0 -426
- teradataml/analytics/mle/PathGenerator.py +0 -367
- teradataml/analytics/mle/PathStart.py +0 -464
- teradataml/analytics/mle/PathSummarizer.py +0 -470
- teradataml/analytics/mle/Pivot.py +0 -471
- teradataml/analytics/mle/ROC.py +0 -425
- teradataml/analytics/mle/RandomSample.py +0 -637
- teradataml/analytics/mle/RandomWalkSample.py +0 -490
- teradataml/analytics/mle/SAX.py +0 -779
- teradataml/analytics/mle/SVMDense.py +0 -677
- teradataml/analytics/mle/SVMDensePredict.py +0 -536
- teradataml/analytics/mle/SVMDenseSummary.py +0 -437
- teradataml/analytics/mle/SVMSparse.py +0 -557
- teradataml/analytics/mle/SVMSparsePredict.py +0 -553
- teradataml/analytics/mle/SVMSparseSummary.py +0 -435
- teradataml/analytics/mle/Sampling.py +0 -549
- teradataml/analytics/mle/Scale.py +0 -565
- teradataml/analytics/mle/ScaleByPartition.py +0 -496
- teradataml/analytics/mle/ScaleMap.py +0 -378
- teradataml/analytics/mle/ScaleSummary.py +0 -320
- teradataml/analytics/mle/SentenceExtractor.py +0 -363
- teradataml/analytics/mle/SentimentEvaluator.py +0 -432
- teradataml/analytics/mle/SentimentExtractor.py +0 -578
- teradataml/analytics/mle/SentimentTrainer.py +0 -405
- teradataml/analytics/mle/SeriesSplitter.py +0 -641
- teradataml/analytics/mle/Sessionize.py +0 -475
- teradataml/analytics/mle/SimpleMovAvg.py +0 -397
- teradataml/analytics/mle/StringSimilarity.py +0 -425
- teradataml/analytics/mle/TF.py +0 -389
- teradataml/analytics/mle/TFIDF.py +0 -504
- teradataml/analytics/mle/TextChunker.py +0 -414
- teradataml/analytics/mle/TextClassifier.py +0 -399
- teradataml/analytics/mle/TextClassifierEvaluator.py +0 -413
- teradataml/analytics/mle/TextClassifierTrainer.py +0 -565
- teradataml/analytics/mle/TextMorph.py +0 -494
- teradataml/analytics/mle/TextParser.py +0 -623
- teradataml/analytics/mle/TextTagger.py +0 -530
- teradataml/analytics/mle/TextTokenizer.py +0 -502
- teradataml/analytics/mle/UnivariateStatistics.py +0 -488
- teradataml/analytics/mle/Unpack.py +0 -526
- teradataml/analytics/mle/Unpivot.py +0 -438
- teradataml/analytics/mle/VarMax.py +0 -776
- teradataml/analytics/mle/VectorDistance.py +0 -762
- teradataml/analytics/mle/WeightedMovAvg.py +0 -400
- teradataml/analytics/mle/XGBoost.py +0 -842
- teradataml/analytics/mle/XGBoostPredict.py +0 -627
- teradataml/analytics/mle/__init__.py +0 -123
- teradataml/analytics/mle/json/adaboost_mle.json +0 -135
- teradataml/analytics/mle/json/adaboostpredict_mle.json +0 -85
- teradataml/analytics/mle/json/antiselect_mle.json +0 -34
- teradataml/analytics/mle/json/antiselect_mle_mle.json +0 -34
- teradataml/analytics/mle/json/arima_mle.json +0 -172
- teradataml/analytics/mle/json/arimapredict_mle.json +0 -52
- teradataml/analytics/mle/json/attribution_mle_mle.json +0 -143
- teradataml/analytics/mle/json/betweenness_mle.json +0 -97
- teradataml/analytics/mle/json/burst_mle.json +0 -140
- teradataml/analytics/mle/json/ccm_mle.json +0 -124
- teradataml/analytics/mle/json/ccmprepare_mle.json +0 -14
- teradataml/analytics/mle/json/cfilter_mle.json +0 -93
- teradataml/analytics/mle/json/changepointdetection_mle.json +0 -92
- teradataml/analytics/mle/json/changepointdetectionrt_mle.json +0 -78
- teradataml/analytics/mle/json/closeness_mle.json +0 -104
- teradataml/analytics/mle/json/confusionmatrix_mle.json +0 -79
- teradataml/analytics/mle/json/correlation_mle.json +0 -86
- teradataml/analytics/mle/json/correlationreduce_mle.json +0 -49
- teradataml/analytics/mle/json/coxhazardratio_mle.json +0 -89
- teradataml/analytics/mle/json/coxph_mle.json +0 -98
- teradataml/analytics/mle/json/coxsurvival_mle.json +0 -79
- teradataml/analytics/mle/json/cumulativemovavg_mle.json +0 -34
- teradataml/analytics/mle/json/decisionforest_mle.json +0 -167
- teradataml/analytics/mle/json/decisionforestevaluator_mle.json +0 -33
- teradataml/analytics/mle/json/decisionforestpredict_mle_mle.json +0 -74
- teradataml/analytics/mle/json/decisiontree_mle.json +0 -194
- teradataml/analytics/mle/json/decisiontreepredict_mle_mle.json +0 -86
- teradataml/analytics/mle/json/dtw_mle.json +0 -97
- teradataml/analytics/mle/json/dwt2d_mle.json +0 -116
- teradataml/analytics/mle/json/dwt_mle.json +0 -101
- teradataml/analytics/mle/json/exponentialmovavg_mle.json +0 -55
- teradataml/analytics/mle/json/fmeasure_mle.json +0 -58
- teradataml/analytics/mle/json/fpgrowth_mle.json +0 -159
- teradataml/analytics/mle/json/frequentpaths_mle.json +0 -129
- teradataml/analytics/mle/json/glm_mle.json +0 -111
- teradataml/analytics/mle/json/glml1l2_mle.json +0 -106
- teradataml/analytics/mle/json/glml1l2predict_mle.json +0 -57
- teradataml/analytics/mle/json/glmpredict_mle_mle.json +0 -74
- teradataml/analytics/mle/json/histogram_mle.json +0 -100
- teradataml/analytics/mle/json/hmmdecoder_mle.json +0 -192
- teradataml/analytics/mle/json/hmmevaluator_mle.json +0 -206
- teradataml/analytics/mle/json/hmmsupervised_mle.json +0 -91
- teradataml/analytics/mle/json/hmmunsupervised_mle.json +0 -114
- teradataml/analytics/mle/json/identitymatch_mle.json +0 -88
- teradataml/analytics/mle/json/idwt2d_mle.json +0 -73
- teradataml/analytics/mle/json/idwt_mle.json +0 -66
- teradataml/analytics/mle/json/interpolator_mle.json +0 -151
- teradataml/analytics/mle/json/kmeans_mle.json +0 -97
- teradataml/analytics/mle/json/knn_mle.json +0 -141
- teradataml/analytics/mle/json/knnrecommender_mle.json +0 -111
- teradataml/analytics/mle/json/knnrecommenderpredict_mle.json +0 -75
- teradataml/analytics/mle/json/lar_mle.json +0 -78
- teradataml/analytics/mle/json/larpredict_mle.json +0 -69
- teradataml/analytics/mle/json/lda_mle.json +0 -130
- teradataml/analytics/mle/json/ldainference_mle.json +0 -78
- teradataml/analytics/mle/json/ldatopicsummary_mle.json +0 -64
- teradataml/analytics/mle/json/levenshteindistance_mle.json +0 -92
- teradataml/analytics/mle/json/linreg_mle.json +0 -42
- teradataml/analytics/mle/json/linregpredict_mle.json +0 -56
- teradataml/analytics/mle/json/minhash_mle.json +0 -113
- teradataml/analytics/mle/json/modularity_mle.json +0 -91
- teradataml/analytics/mle/json/naivebayespredict_mle_mle.json +0 -85
- teradataml/analytics/mle/json/naivebayesreduce_mle.json +0 -52
- teradataml/analytics/mle/json/naivebayestextclassifierpredict_mle_mle.json +0 -147
- teradataml/analytics/mle/json/naivebayestextclassifiertrainer2_mle.json +0 -108
- teradataml/analytics/mle/json/naivebayestextclassifiertrainer_mle.json +0 -102
- teradataml/analytics/mle/json/namedentityfinder_mle.json +0 -84
- teradataml/analytics/mle/json/namedentityfinderevaluatorreduce_mle.json +0 -43
- teradataml/analytics/mle/json/namedentityfindertrainer_mle.json +0 -64
- teradataml/analytics/mle/json/nerevaluator_mle.json +0 -54
- teradataml/analytics/mle/json/nerextractor_mle.json +0 -87
- teradataml/analytics/mle/json/nertrainer_mle.json +0 -89
- teradataml/analytics/mle/json/ngrams_mle.json +0 -137
- teradataml/analytics/mle/json/ngramsplitter_mle_mle.json +0 -137
- teradataml/analytics/mle/json/npath@coprocessor_mle.json +0 -73
- teradataml/analytics/mle/json/ntree@coprocessor_mle.json +0 -123
- teradataml/analytics/mle/json/pack_mle.json +0 -58
- teradataml/analytics/mle/json/pack_mle_mle.json +0 -58
- teradataml/analytics/mle/json/pagerank_mle.json +0 -81
- teradataml/analytics/mle/json/pathanalyzer_mle.json +0 -63
- teradataml/analytics/mle/json/pathgenerator_mle.json +0 -40
- teradataml/analytics/mle/json/pathstart_mle.json +0 -62
- teradataml/analytics/mle/json/pathsummarizer_mle.json +0 -72
- teradataml/analytics/mle/json/pivoting_mle.json +0 -71
- teradataml/analytics/mle/json/postagger_mle.json +0 -51
- teradataml/analytics/mle/json/randomsample_mle.json +0 -131
- teradataml/analytics/mle/json/randomwalksample_mle.json +0 -85
- teradataml/analytics/mle/json/roc_mle.json +0 -73
- teradataml/analytics/mle/json/sampling_mle.json +0 -75
- teradataml/analytics/mle/json/sax_mle.json +0 -154
- teradataml/analytics/mle/json/scale_mle.json +0 -93
- teradataml/analytics/mle/json/scalebypartition_mle.json +0 -89
- teradataml/analytics/mle/json/scalemap_mle.json +0 -44
- teradataml/analytics/mle/json/scalesummary_mle.json +0 -14
- teradataml/analytics/mle/json/sentenceextractor_mle.json +0 -41
- teradataml/analytics/mle/json/sentimentevaluator_mle.json +0 -43
- teradataml/analytics/mle/json/sentimentextractor_mle.json +0 -100
- teradataml/analytics/mle/json/sentimenttrainer_mle.json +0 -68
- teradataml/analytics/mle/json/seriessplitter_mle.json +0 -133
- teradataml/analytics/mle/json/sessionize_mle_mle.json +0 -62
- teradataml/analytics/mle/json/simplemovavg_mle.json +0 -48
- teradataml/analytics/mle/json/stringsimilarity_mle.json +0 -50
- teradataml/analytics/mle/json/stringsimilarity_mle_mle.json +0 -50
- teradataml/analytics/mle/json/svmdense_mle.json +0 -165
- teradataml/analytics/mle/json/svmdensepredict_mle.json +0 -95
- teradataml/analytics/mle/json/svmdensesummary_mle.json +0 -58
- teradataml/analytics/mle/json/svmsparse_mle.json +0 -148
- teradataml/analytics/mle/json/svmsparsepredict_mle_mle.json +0 -103
- teradataml/analytics/mle/json/svmsparsesummary_mle.json +0 -57
- teradataml/analytics/mle/json/textchunker_mle.json +0 -40
- teradataml/analytics/mle/json/textclassifier_mle.json +0 -51
- teradataml/analytics/mle/json/textclassifierevaluator_mle.json +0 -43
- teradataml/analytics/mle/json/textclassifiertrainer_mle.json +0 -103
- teradataml/analytics/mle/json/textmorph_mle.json +0 -63
- teradataml/analytics/mle/json/textparser_mle.json +0 -166
- teradataml/analytics/mle/json/texttagger_mle.json +0 -81
- teradataml/analytics/mle/json/texttokenizer_mle.json +0 -91
- teradataml/analytics/mle/json/tf_mle.json +0 -33
- teradataml/analytics/mle/json/tfidf_mle.json +0 -34
- teradataml/analytics/mle/json/univariatestatistics_mle.json +0 -81
- teradataml/analytics/mle/json/unpack_mle.json +0 -91
- teradataml/analytics/mle/json/unpack_mle_mle.json +0 -91
- teradataml/analytics/mle/json/unpivoting_mle.json +0 -63
- teradataml/analytics/mle/json/varmax_mle.json +0 -176
- teradataml/analytics/mle/json/vectordistance_mle.json +0 -179
- teradataml/analytics/mle/json/weightedmovavg_mle.json +0 -48
- teradataml/analytics/mle/json/xgboost_mle.json +0 -178
- teradataml/analytics/mle/json/xgboostpredict_mle.json +0 -104
- teradataml/analytics/sqle/Antiselect.py +0 -321
- teradataml/analytics/sqle/Attribution.py +0 -603
- teradataml/analytics/sqle/DecisionForestPredict.py +0 -408
- teradataml/analytics/sqle/GLMPredict.py +0 -430
- teradataml/analytics/sqle/MovingAverage.py +0 -543
- teradataml/analytics/sqle/NGramSplitter.py +0 -548
- teradataml/analytics/sqle/NPath.py +0 -632
- teradataml/analytics/sqle/NaiveBayesTextClassifierPredict.py +0 -515
- teradataml/analytics/sqle/Pack.py +0 -388
- teradataml/analytics/sqle/SVMSparsePredict.py +0 -464
- teradataml/analytics/sqle/Sessionize.py +0 -390
- teradataml/analytics/sqle/StringSimilarity.py +0 -400
- teradataml/analytics/sqle/Unpack.py +0 -503
- teradataml/analytics/sqle/json/antiselect_sqle.json +0 -21
- teradataml/analytics/sqle/json/attribution_sqle.json +0 -92
- teradataml/analytics/sqle/json/decisionforestpredict_sqle.json +0 -48
- teradataml/analytics/sqle/json/glmpredict_sqle.json +0 -48
- teradataml/analytics/sqle/json/h2opredict_sqle.json +0 -63
- teradataml/analytics/sqle/json/movingaverage_sqle.json +0 -58
- teradataml/analytics/sqle/json/naivebayestextclassifierpredict_sqle.json +0 -76
- teradataml/analytics/sqle/json/ngramsplitter_sqle.json +0 -126
- teradataml/analytics/sqle/json/npath_sqle.json +0 -67
- teradataml/analytics/sqle/json/pack_sqle.json +0 -47
- teradataml/analytics/sqle/json/pmmlpredict_sqle.json +0 -55
- teradataml/analytics/sqle/json/sessionize_sqle.json +0 -43
- teradataml/analytics/sqle/json/stringsimilarity_sqle.json +0 -39
- teradataml/analytics/sqle/json/svmsparsepredict_sqle.json +0 -74
- teradataml/analytics/sqle/json/unpack_sqle.json +0 -80
- teradataml/catalog/model_cataloging.py +0 -980
- teradataml/config/mlengine_alias_definitions_v1.0 +0 -118
- teradataml/config/mlengine_alias_definitions_v1.1 +0 -127
- teradataml/config/mlengine_alias_definitions_v1.3 +0 -129
- teradataml/table_operators/sandbox_container_util.py +0 -643
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/WHEEL +0 -0
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/top_level.txt +0 -0
- {teradataml-17.20.0.6.dist-info → teradataml-20.0.0.0.dist-info}/zip-safe +0 -0
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
import math
|
|
3
|
+
import sys
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
# The below import is needed to convert sparse matrix to dense array as sparse matrices are NOT
|
|
7
|
+
# supported in Vantage.
|
|
8
|
+
# This is in scipy 1.6.x. Might vary based on scipy version.
|
|
9
|
+
from scipy.sparse.csr import csr_matrix
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
DELIMITER = '\t'
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_value(value):
|
|
16
|
+
ret_val = value
|
|
17
|
+
try:
|
|
18
|
+
ret_val = float(value.replace(' ', ''))
|
|
19
|
+
except Exception as ex:
|
|
20
|
+
# If the value can't be converted to float, then it is string.
|
|
21
|
+
pass
|
|
22
|
+
return ret_val
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_values_list(values, ignore_none=True):
|
|
26
|
+
ret_vals = []
|
|
27
|
+
for val in values:
|
|
28
|
+
if val == "" and ignore_none:
|
|
29
|
+
# Empty cell value in the database table.
|
|
30
|
+
continue
|
|
31
|
+
ret_vals.append(get_value(val))
|
|
32
|
+
|
|
33
|
+
return ret_vals
|
|
34
|
+
|
|
35
|
+
def convert_to_type(val, typee):
|
|
36
|
+
if typee == 'int':
|
|
37
|
+
return int(val)
|
|
38
|
+
if typee == 'float':
|
|
39
|
+
return float(val)
|
|
40
|
+
if typee == 'bool':
|
|
41
|
+
return eval(val)
|
|
42
|
+
return str(val)
|
|
43
|
+
|
|
44
|
+
def splitter(strr, delim=",", convert_to="str"):
|
|
45
|
+
"""
|
|
46
|
+
Split the string based on delimiter and convert to the type specified.
|
|
47
|
+
"""
|
|
48
|
+
if strr == "None":
|
|
49
|
+
return []
|
|
50
|
+
return [convert_to_type(i, convert_to) for i in strr.split(delim)]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# Arguments to the Script
|
|
54
|
+
if len(sys.argv) < 7:
|
|
55
|
+
# At least 7 arguments command line arguments should be passed to this file.
|
|
56
|
+
# 1: file to be run
|
|
57
|
+
# 2. function name.
|
|
58
|
+
# 3. No of feature columns.
|
|
59
|
+
# 4. Comma separated indices of partition columns.
|
|
60
|
+
# 5. Comma separated types of the partition columns.
|
|
61
|
+
# 6. Model file prefix to generate model file using partition columns.
|
|
62
|
+
# 7. Flag to check the system type. True, means Lake, Enterprise otherwise.
|
|
63
|
+
# 8. OPTIONAL - Arguments in string format like "return_distance True-bool",
|
|
64
|
+
# "n_neighbors 3-int", "radius 3.4-float" etc.
|
|
65
|
+
sys.exit("At least 7 arguments should be passed to this file - file to be run, function name, "\
|
|
66
|
+
"no of feature columns, comma separated indices and types of partition columns, "\
|
|
67
|
+
"model file prefix to generate model file using partition columns, flag to check "\
|
|
68
|
+
"lake or enterprise and optional arguments in string format.")
|
|
69
|
+
|
|
70
|
+
convert_to_int = lambda x: int(x) if x != "None" else None
|
|
71
|
+
|
|
72
|
+
is_lake_system = eval(sys.argv[6])
|
|
73
|
+
if not is_lake_system:
|
|
74
|
+
db = sys.argv[0].split("/")[1]
|
|
75
|
+
func_name = sys.argv[1]
|
|
76
|
+
n_f_cols = convert_to_int(sys.argv[2])
|
|
77
|
+
data_partition_column_types = splitter(sys.argv[4])
|
|
78
|
+
data_partition_column_indices = splitter(sys.argv[3], convert_to="int") # indices are integers.
|
|
79
|
+
model_file_prefix = sys.argv[5]
|
|
80
|
+
# Extract arguments from string.
|
|
81
|
+
arguments = {}
|
|
82
|
+
for i in range(7, len(sys.argv), 2):
|
|
83
|
+
value = sys.argv[i + 1].split("-", 1)
|
|
84
|
+
arguments[sys.argv[i]] = convert_to_type(value[0], value[1])
|
|
85
|
+
|
|
86
|
+
model = None
|
|
87
|
+
data_partition_column_values = []
|
|
88
|
+
|
|
89
|
+
# Data Format:
|
|
90
|
+
# feature1, feature2, ..., featuren, label1, label2, ... labelk, data_partition_column1, ...,
|
|
91
|
+
# data_partition_columnn.
|
|
92
|
+
# label is optional (it is present when label_exists is not "None")
|
|
93
|
+
|
|
94
|
+
# `return_distance` is needed as the result is a tuple of two arrays when it is True.
|
|
95
|
+
return_distance = arguments.get("return_distance", True) # Default value is True.
|
|
96
|
+
|
|
97
|
+
while 1:
|
|
98
|
+
try:
|
|
99
|
+
line = input()
|
|
100
|
+
if line == '': # Exit if user provides blank line
|
|
101
|
+
break
|
|
102
|
+
else:
|
|
103
|
+
values = line.split(DELIMITER)
|
|
104
|
+
if not data_partition_column_values:
|
|
105
|
+
# Partition column values is same for all rows. Hence, only read once.
|
|
106
|
+
for i, val in enumerate(data_partition_column_indices):
|
|
107
|
+
data_partition_column_values.append(
|
|
108
|
+
convert_to_type(values[val], typee=data_partition_column_types[i])
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Prepare the corresponding model file name and extract model.
|
|
112
|
+
partition_join = "_".join([str(x) for x in data_partition_column_values])
|
|
113
|
+
# Replace '-' with '_' as '-' because partition_columns can be negative.
|
|
114
|
+
partition_join = partition_join.replace("-", "_")
|
|
115
|
+
|
|
116
|
+
model_file_path = f"{model_file_prefix}_{partition_join}" \
|
|
117
|
+
if is_lake_system else \
|
|
118
|
+
f"./{db}/{model_file_prefix}_{partition_join}"
|
|
119
|
+
|
|
120
|
+
with open(model_file_path, "rb") as fp:
|
|
121
|
+
model = pickle.loads(fp.read())
|
|
122
|
+
|
|
123
|
+
if not model:
|
|
124
|
+
sys.exit("Model file is not installed in Vantage.")
|
|
125
|
+
|
|
126
|
+
f_ = get_values_list(values[:n_f_cols])
|
|
127
|
+
if f_:
|
|
128
|
+
output = getattr(model, func_name)(np.array([f_]), **arguments)
|
|
129
|
+
else:
|
|
130
|
+
output = getattr(model, func_name)(**arguments)
|
|
131
|
+
result_list = f_
|
|
132
|
+
|
|
133
|
+
if func_name in ['kneighbors', 'radius_neighbors']:
|
|
134
|
+
if return_distance:
|
|
135
|
+
result_list += [str(output[0][0].tolist()), str(output[1][0].tolist())]
|
|
136
|
+
else:
|
|
137
|
+
result_list += [str(output[0].tolist())]
|
|
138
|
+
else:
|
|
139
|
+
# cases like 'kneighbors_graph', 'radius_neighbors_graph' and other functions.
|
|
140
|
+
if isinstance(output, csr_matrix):
|
|
141
|
+
# 'kneighbors_graph', 'radius_neighbors_graph' return sparse matrix.
|
|
142
|
+
output = output.toarray()
|
|
143
|
+
result_list += [str(output[0].tolist())]
|
|
144
|
+
|
|
145
|
+
print(*(data_partition_column_values +
|
|
146
|
+
['' if (val is None or (not isinstance(val, str) and
|
|
147
|
+
(math.isnan(val) or math.isinf(val))))
|
|
148
|
+
else val
|
|
149
|
+
for val in result_list]), sep=DELIMITER)
|
|
150
|
+
|
|
151
|
+
except EOFError: # Exit if reached EOF or CTRL-D
|
|
152
|
+
break
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
import math
|
|
3
|
+
import sys
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
DELIMITER = '\t'
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_value(value):
|
|
10
|
+
ret_val = value
|
|
11
|
+
try:
|
|
12
|
+
ret_val = float("".join(value.split()))
|
|
13
|
+
except Exception as ex:
|
|
14
|
+
# If the value can't be converted to float, then it is string.
|
|
15
|
+
pass
|
|
16
|
+
return ret_val
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_values_list(values, ignore_none=True):
|
|
20
|
+
ret_vals = []
|
|
21
|
+
for val in values:
|
|
22
|
+
if val == "" and ignore_none:
|
|
23
|
+
# Empty cell value in the database table.
|
|
24
|
+
continue
|
|
25
|
+
ret_vals.append(get_value(val))
|
|
26
|
+
|
|
27
|
+
return ret_vals
|
|
28
|
+
|
|
29
|
+
def convert_to_type(val, typee):
|
|
30
|
+
if typee == 'int':
|
|
31
|
+
return int(val)
|
|
32
|
+
if typee == 'float':
|
|
33
|
+
return float(val)
|
|
34
|
+
if typee == 'bool':
|
|
35
|
+
return bool(val)
|
|
36
|
+
return str(val)
|
|
37
|
+
|
|
38
|
+
def splitter(strr, delim=",", convert_to="str"):
|
|
39
|
+
"""
|
|
40
|
+
Split the string based on delimiter and convert to the type specified.
|
|
41
|
+
"""
|
|
42
|
+
if strr == "None":
|
|
43
|
+
return []
|
|
44
|
+
return [convert_to_type(i, convert_to) for i in strr.split(delim)]
|
|
45
|
+
|
|
46
|
+
# Arguments to the Script
|
|
47
|
+
if len(sys.argv) != 8:
|
|
48
|
+
# 8 command line arguments should be passed to this file.
|
|
49
|
+
# 1: file to be run
|
|
50
|
+
# 2. function name (Eg. score, aic etc)
|
|
51
|
+
# 3. No of feature columns.
|
|
52
|
+
# 4. No of class labels.
|
|
53
|
+
# 5. Comma separated indices of partition columns.
|
|
54
|
+
# 6. Comma separated types of the partition columns.
|
|
55
|
+
# 7. Model file prefix to generated model file using partition columns.
|
|
56
|
+
# 8. Flag to check the system type. True, means Lake, Enterprise otherwise.
|
|
57
|
+
sys.exit("8 arguments should be passed to this file - file to be run, function name, "\
|
|
58
|
+
"no of feature columns, no of class labels, comma separated indices and types of "\
|
|
59
|
+
"partition columns, model file prefix to generate model file using partition "\
|
|
60
|
+
"columns and flag to check lake or enterprise.")
|
|
61
|
+
|
|
62
|
+
is_lake_system = eval(sys.argv[7])
|
|
63
|
+
if not is_lake_system:
|
|
64
|
+
db = sys.argv[0].split("/")[1]
|
|
65
|
+
func_name = sys.argv[1]
|
|
66
|
+
n_f_cols = int(sys.argv[2])
|
|
67
|
+
n_c_labels = int(sys.argv[3])
|
|
68
|
+
data_partition_column_types = splitter(sys.argv[5])
|
|
69
|
+
data_partition_column_indices = splitter(sys.argv[4], convert_to="int") # indices are integers.
|
|
70
|
+
model_file_prefix = sys.argv[6]
|
|
71
|
+
|
|
72
|
+
model = None
|
|
73
|
+
|
|
74
|
+
# Data Format (n_features, k_labels, one data_partition_column):
|
|
75
|
+
# feature1, feature2, ..., featuren, label1, label2, ... labelk, data_partition_column1, ...,
|
|
76
|
+
# data_partition_columnn.
|
|
77
|
+
# labels are optional.
|
|
78
|
+
|
|
79
|
+
data_partition_column_values = []
|
|
80
|
+
|
|
81
|
+
features = []
|
|
82
|
+
labels = []
|
|
83
|
+
while 1:
|
|
84
|
+
try:
|
|
85
|
+
line = input()
|
|
86
|
+
if line == '': # Exit if user provides blank line
|
|
87
|
+
break
|
|
88
|
+
else:
|
|
89
|
+
values = line.split(DELIMITER)
|
|
90
|
+
features.append(get_values_list(values[:n_f_cols]))
|
|
91
|
+
if n_c_labels > 0:
|
|
92
|
+
labels.append(get_values_list(values[n_f_cols:(n_f_cols+n_c_labels)]))
|
|
93
|
+
|
|
94
|
+
if not data_partition_column_values:
|
|
95
|
+
# Partition column values is same for all rows. Hence, only read once.
|
|
96
|
+
for i, val in enumerate(data_partition_column_indices):
|
|
97
|
+
data_partition_column_values.append(
|
|
98
|
+
convert_to_type(values[val], typee=data_partition_column_types[i])
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
# Prepare the corresponding model file name and extract model.
|
|
102
|
+
partition_join = "_".join([str(x) for x in data_partition_column_values])
|
|
103
|
+
# Replace '-' with '_' as '-' because partition_columns can be negative.
|
|
104
|
+
partition_join = partition_join.replace("-", "_")
|
|
105
|
+
|
|
106
|
+
model_file_path = f"{model_file_prefix}_{partition_join}" \
|
|
107
|
+
if is_lake_system else \
|
|
108
|
+
f"./{db}/{model_file_prefix}_{partition_join}"
|
|
109
|
+
|
|
110
|
+
with open(model_file_path, "rb") as fp:
|
|
111
|
+
model = pickle.loads(fp.read())
|
|
112
|
+
|
|
113
|
+
if not model:
|
|
114
|
+
sys.exit("Model file is not installed in Vantage.")
|
|
115
|
+
|
|
116
|
+
except EOFError: # Exit if reached EOF or CTRL-D
|
|
117
|
+
break
|
|
118
|
+
|
|
119
|
+
if len(features) == 0:
|
|
120
|
+
sys.exit(0)
|
|
121
|
+
|
|
122
|
+
if labels:
|
|
123
|
+
val = getattr(model, func_name)(np.array(features), np.array(labels))
|
|
124
|
+
else:
|
|
125
|
+
val = getattr(model, func_name)(np.array(features))
|
|
126
|
+
|
|
127
|
+
result_val = ['' if (val is None or math.isnan(val) or math.isinf(val)) else val]
|
|
128
|
+
print(*(data_partition_column_values + result_val), sep=DELIMITER)
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import pickle
|
|
2
|
+
import math
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
# The below import is needed to convert sparse matrix to dense array as sparse matrices are NOT
|
|
8
|
+
# supported in Vantage.
|
|
9
|
+
# This is in scipy 1.10.0. Might vary based on scipy version.
|
|
10
|
+
from scipy.sparse import csr_matrix
|
|
11
|
+
|
|
12
|
+
DELIMITER = '\t'
|
|
13
|
+
|
|
14
|
+
def get_value(value):
|
|
15
|
+
ret_val = value
|
|
16
|
+
try:
|
|
17
|
+
ret_val = float(value.replace(' ', ''))
|
|
18
|
+
except Exception as ex:
|
|
19
|
+
# If the value can't be converted to float, then it is string.
|
|
20
|
+
pass
|
|
21
|
+
return ret_val
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_values_list(values, ignore_none=True):
|
|
25
|
+
ret_vals = []
|
|
26
|
+
for val in values:
|
|
27
|
+
if val == "" and ignore_none:
|
|
28
|
+
# Empty cell value in the database table.
|
|
29
|
+
continue
|
|
30
|
+
ret_vals.append(get_value(val))
|
|
31
|
+
|
|
32
|
+
return ret_vals
|
|
33
|
+
|
|
34
|
+
def convert_to_type(val, typee):
|
|
35
|
+
if typee == 'int':
|
|
36
|
+
return int(val)
|
|
37
|
+
if typee == 'float':
|
|
38
|
+
return float(val)
|
|
39
|
+
if typee == 'bool':
|
|
40
|
+
return eval(val)
|
|
41
|
+
return str(val)
|
|
42
|
+
|
|
43
|
+
def splitter(strr, delim=",", convert_to="str"):
|
|
44
|
+
"""
|
|
45
|
+
Split the string based on delimiter and convert to the type specified.
|
|
46
|
+
"""
|
|
47
|
+
if strr == "None":
|
|
48
|
+
return []
|
|
49
|
+
return [convert_to_type(i, convert_to) for i in strr.split(delim)]
|
|
50
|
+
|
|
51
|
+
# Process output returned by sklearn function.
|
|
52
|
+
def get_output_data(trans_values, func_name, model_obj, n_c_labels):
|
|
53
|
+
# Converting sparse matrix to dense array as sparse matrices are NOT
|
|
54
|
+
# supported in Vantage.
|
|
55
|
+
module_name = model_obj.__module__.split("._")[0]
|
|
56
|
+
|
|
57
|
+
if isinstance(trans_values, csr_matrix):
|
|
58
|
+
trans_values = trans_values.toarray()
|
|
59
|
+
|
|
60
|
+
if module_name == "sklearn.cross_decomposition" and n_c_labels > 0 and func_name == "transform":
|
|
61
|
+
# For cross_decomposition, output is a tuple of arrays when label columns are provided
|
|
62
|
+
# along with feature columns for transform function. In this case, concatenate the
|
|
63
|
+
# arrays and return the combined values.
|
|
64
|
+
if isinstance(trans_values, tuple):
|
|
65
|
+
return np.concatenate(trans_values, axis=1).tolist()[0]
|
|
66
|
+
|
|
67
|
+
if isinstance(trans_values[0], np.ndarray) \
|
|
68
|
+
or isinstance(trans_values[0], list) \
|
|
69
|
+
or isinstance(trans_values[0], tuple):
|
|
70
|
+
# Here, the value returned by sklearn function is list type.
|
|
71
|
+
opt_list = list(trans_values[0])
|
|
72
|
+
if func_name == "inverse_transform" and type(model_obj).__name__ == "MultiLabelBinarizer":
|
|
73
|
+
# output array "trans_values[0]" may not be of same size. It should be of
|
|
74
|
+
# maximum size of `model.classes_`
|
|
75
|
+
# Append None to last elements.
|
|
76
|
+
if len(opt_list) < len(model_obj.classes_):
|
|
77
|
+
opt_list += [""] * (len(model_obj.classes_) - len(opt_list))
|
|
78
|
+
return opt_list
|
|
79
|
+
return [trans_values[0]]
|
|
80
|
+
|
|
81
|
+
# Arguments to the Script
|
|
82
|
+
if len(sys.argv) != 8:
|
|
83
|
+
# 8 arguments command line arguments should be passed to this file.
|
|
84
|
+
# 1: file to be run
|
|
85
|
+
# 2. function name (Eg. predict, fit etc)
|
|
86
|
+
# 3. No of feature columns.
|
|
87
|
+
# 4. No of class labels.
|
|
88
|
+
# 5. Comma separated indices of partition columns.
|
|
89
|
+
# 6. Comma separated types of the partition columns.
|
|
90
|
+
# 7. Model file prefix to generated model file using partition columns.
|
|
91
|
+
# 8. Flag to check the system type. True, means Lake, Enterprise otherwise.
|
|
92
|
+
sys.exit("8 arguments should be passed to this file - file to be run, function name, "\
|
|
93
|
+
"no of feature columns, no of class labels, comma separated indices and types of "\
|
|
94
|
+
"partition columns, model file prefix to generate model file using partition "\
|
|
95
|
+
"columns and flag to check lake or enterprise.")
|
|
96
|
+
|
|
97
|
+
is_lake_system = eval(sys.argv[7])
|
|
98
|
+
if not is_lake_system:
|
|
99
|
+
db = sys.argv[0].split("/")[1]
|
|
100
|
+
func_name = sys.argv[1]
|
|
101
|
+
n_f_cols = int(sys.argv[2])
|
|
102
|
+
n_c_labels = int(sys.argv[3])
|
|
103
|
+
data_partition_column_types = splitter(sys.argv[5])
|
|
104
|
+
data_partition_column_indices = splitter(sys.argv[4], convert_to="int") # indices are integers.
|
|
105
|
+
model_file_prefix = sys.argv[6]
|
|
106
|
+
|
|
107
|
+
model = None
|
|
108
|
+
data_partition_column_values = []
|
|
109
|
+
|
|
110
|
+
# Data Format:
|
|
111
|
+
# feature1, feature2, ..., featuren, label1, label2, ... labelk, data_partition_column1, ...,
|
|
112
|
+
# data_partition_columnn.
|
|
113
|
+
# label is optional (it is present when label_exists is not "None")
|
|
114
|
+
|
|
115
|
+
while 1:
|
|
116
|
+
try:
|
|
117
|
+
line = input()
|
|
118
|
+
if line == '': # Exit if user provides blank line
|
|
119
|
+
break
|
|
120
|
+
else:
|
|
121
|
+
values = line.split(DELIMITER)
|
|
122
|
+
if not data_partition_column_values:
|
|
123
|
+
# Partition column values is same for all rows. Hence, only read once.
|
|
124
|
+
for i, val in enumerate(data_partition_column_indices):
|
|
125
|
+
data_partition_column_values.append(
|
|
126
|
+
convert_to_type(values[val], typee=data_partition_column_types[i])
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
# Prepare the corresponding model file name and extract model.
|
|
130
|
+
partition_join = "_".join([str(x) for x in data_partition_column_values])
|
|
131
|
+
# Replace '-' with '_' as '-' because partition_columns can be negative.
|
|
132
|
+
partition_join = partition_join.replace("-", "_")
|
|
133
|
+
|
|
134
|
+
model_file_path = f"{model_file_prefix}_{partition_join}" \
|
|
135
|
+
if is_lake_system else \
|
|
136
|
+
f"./{db}/{model_file_prefix}_{partition_join}"
|
|
137
|
+
|
|
138
|
+
with open(model_file_path, "rb") as fp:
|
|
139
|
+
model = pickle.loads(fp.read())
|
|
140
|
+
|
|
141
|
+
if not model:
|
|
142
|
+
sys.exit("Model file is not installed in Vantage.")
|
|
143
|
+
|
|
144
|
+
f_ = get_values_list(values[:n_f_cols])
|
|
145
|
+
if n_c_labels > 0:
|
|
146
|
+
# Labels are present in last column.
|
|
147
|
+
l_ = get_values_list(values[n_f_cols:n_f_cols+n_c_labels])
|
|
148
|
+
# predict() now takes 'y' also for it to return the labels from script. Skipping 'y'
|
|
149
|
+
# in function call. Generally, 'y' is passed to return y along with actual output.
|
|
150
|
+
try:
|
|
151
|
+
# cross_composition functions uses Y for labels.
|
|
152
|
+
# used 'in' in if constion, as model.__module__ is giving
|
|
153
|
+
# 'sklearn.cross_decomposition._pls'.
|
|
154
|
+
if "cross_decomposition" in model.__module__:
|
|
155
|
+
trans_values = getattr(model, func_name)(X=np.array([f_]), Y=np.array([l_]))
|
|
156
|
+
else:
|
|
157
|
+
trans_values = getattr(model, func_name)(X=np.array([f_]), y=np.array([l_]))
|
|
158
|
+
|
|
159
|
+
except TypeError as ex:
|
|
160
|
+
# Function which does not accept 'y' like predict_proba() raises error like
|
|
161
|
+
# "TypeError: predict_proba() takes 2 positional arguments but 3 were given".
|
|
162
|
+
trans_values = getattr(model, func_name)(np.array([f_]))
|
|
163
|
+
else:
|
|
164
|
+
# If class labels do not exist in data, don't read labels, read just features.
|
|
165
|
+
trans_values = getattr(model, func_name)(np.array([f_]))
|
|
166
|
+
|
|
167
|
+
result_list = f_
|
|
168
|
+
if n_c_labels > 0 and func_name in ["predict", "decision_function"]:
|
|
169
|
+
result_list += l_
|
|
170
|
+
result_list += get_output_data(trans_values=trans_values, func_name=func_name,
|
|
171
|
+
model_obj=model, n_c_labels=n_c_labels)
|
|
172
|
+
|
|
173
|
+
print(*(data_partition_column_values +
|
|
174
|
+
['' if (val is None or math.isnan(val) or math.isinf(val))
|
|
175
|
+
else val for val in result_list]),
|
|
176
|
+
sep=DELIMITER)
|
|
177
|
+
|
|
178
|
+
except EOFError: # Exit if reached EOF or CTRL-D
|
|
179
|
+
break
|
|
@@ -1200,5 +1200,77 @@
|
|
|
1200
1200
|
"column_name": "VARCHAR(20)",
|
|
1201
1201
|
"category": "VARCHAR(50)",
|
|
1202
1202
|
"ordinal_value": "INTEGER"
|
|
1203
|
+
},
|
|
1204
|
+
"insurance":{
|
|
1205
|
+
"age":"INTEGER",
|
|
1206
|
+
"sex":"VARCHAR(20)",
|
|
1207
|
+
"bmi":"FLOAT",
|
|
1208
|
+
"children":"INTEGER",
|
|
1209
|
+
"smoker":"VARCHAR(20)",
|
|
1210
|
+
"region":"VARCHAR(20)",
|
|
1211
|
+
"charges":"FLOAT"
|
|
1212
|
+
},
|
|
1213
|
+
"bank_churn":{
|
|
1214
|
+
"customer_id":"BIGINT",
|
|
1215
|
+
"credit_score":"BIGINT",
|
|
1216
|
+
"country":"VARCHAR(256)",
|
|
1217
|
+
"gender":"varchar(20)",
|
|
1218
|
+
"age":"INTEGER",
|
|
1219
|
+
"tenure":"BIGINT",
|
|
1220
|
+
"balance":"FLOAT",
|
|
1221
|
+
"products_number":"BIGINT",
|
|
1222
|
+
"credit_card":"BIGINT",
|
|
1223
|
+
"active_member":"BIGINT",
|
|
1224
|
+
"estimated_salary":"FLOAT",
|
|
1225
|
+
"churn":"BIGINT"
|
|
1226
|
+
},
|
|
1227
|
+
"wine_data":{
|
|
1228
|
+
"fixed_acidity":"FLOAT",
|
|
1229
|
+
"volatile_acidity":"FLOAT",
|
|
1230
|
+
"citric_acid":"FLOAT",
|
|
1231
|
+
"residual_sugar":"FLOAT",
|
|
1232
|
+
"chlorides":"FLOAT",
|
|
1233
|
+
"free_sulfur_dioxide":"FLOAT",
|
|
1234
|
+
"total_sulfur_dioxide":"FLOAT",
|
|
1235
|
+
"density":"FLOAT",
|
|
1236
|
+
"pH":"FLOAT",
|
|
1237
|
+
"sulphates":"FLOAT",
|
|
1238
|
+
"alcohol":"FLOAT",
|
|
1239
|
+
"quality":"VARCHAR(20)"
|
|
1240
|
+
},
|
|
1241
|
+
"fish":{
|
|
1242
|
+
"Species":"VARCHAR(20)",
|
|
1243
|
+
"Weight":"INTEGER",
|
|
1244
|
+
"Length1":"FLOAT",
|
|
1245
|
+
"Length2":"FLOAT",
|
|
1246
|
+
"Length3":"FLOAT",
|
|
1247
|
+
"Height":"FLOAT",
|
|
1248
|
+
"Width":"FLOAT"
|
|
1249
|
+
},
|
|
1250
|
+
"iris_data":{
|
|
1251
|
+
"sepal_length":"FLOAT",
|
|
1252
|
+
"sepal_width":"FLOAT",
|
|
1253
|
+
"petal_length":"FLOAT",
|
|
1254
|
+
"petal_width":"FLOAT",
|
|
1255
|
+
"species":"VARCHAR(20)"
|
|
1256
|
+
},
|
|
1257
|
+
"glass_types":{
|
|
1258
|
+
"ri":"FLOAT",
|
|
1259
|
+
"na":"FLOAT",
|
|
1260
|
+
"mg":"FLOAT",
|
|
1261
|
+
"ai":"FLOAT",
|
|
1262
|
+
"si":"FLOAT",
|
|
1263
|
+
"k":"FLOAT",
|
|
1264
|
+
"ca":"FLOAT",
|
|
1265
|
+
"ba":"FLOAT",
|
|
1266
|
+
"fe":"FLOAT",
|
|
1267
|
+
"gtype":"INTEGER"
|
|
1268
|
+
},
|
|
1269
|
+
"bmi":{
|
|
1270
|
+
"gender":"VARCHAR(20)",
|
|
1271
|
+
"height":"INTEGER",
|
|
1272
|
+
"weight":"INTEGER",
|
|
1273
|
+
"bmi":"INTEGER"
|
|
1203
1274
|
}
|
|
1204
|
-
|
|
1275
|
+
|
|
1276
|
+
}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
col1,col2,col3,col4,label
|
|
2
|
+
-7.156025619387396e-02,2.295539000122874e-01,2.1654344712218576e-01,6.527397921673574e-02,1
|
|
3
|
+
-4.032571038523639e-01,2.0061840569850093e+00,2.027512477119932e+00,8.508919440196765e-01,1
|
|
4
|
+
-1.1305820619922704e+00,-2.0295925141421645e-02,-7.102336334648424e-01,-1.440991082992062e+00,0
|
|
5
|
+
1.8332468205821462e-01,-7.74610353732039e-01,-7.66054694735782e-01,-2.9366863291253276e-01,0
|
|
6
|
+
-2.8692000017174224e-01,-7.169529842687833e-01,-9.865850877151031e-01,-8.48214734984639e-01,0
|
|
7
|
+
-2.5604297516143286e+00,4.0223233672431147e-01,-1.1007419820939432e+00,-2.9595882598466674e+00,0
|
|
8
|
+
4.223414406917685e-01,-2.039114403027563e+00,-2.053215806414584e+00,-8.491230457662061e-01,0
|
|
9
|
+
-5.097927128625588e-01,4.926589443964751e-01,2.482067293662461e-01,-3.095907315896897e-01,1
|
|
10
|
+
7.216694959200303e-01,-1.1215566442946217e+00,-8.318398647044646e-01,1.5074209659533433e-01,0
|
|
11
|
+
-9.861325665504175e-01,1.7105310292848412e+00,1.3382818041204743e+00,-8.534109029742931e-02,1
|
|
12
|
+
-7.231680381760497e-02,-7.736683335839201e-01,-9.203832529446819e-01,-6.157487035381035e-01,0
|
|
13
|
+
4.559464978500706e-01,-4.578687619407589e-01,-2.4152710035941571e-01,2.6519962031057953e-01,1
|
|
14
|
+
3.9390607557342605e-01,3.902473455403299e-01,6.815206771027396e-01,7.618043278846556e-01,1
|
|
15
|
+
2.031968254396686e+00,8.403986546299471e-01,2.1871813975599217e+00,3.1348238328210307e+00,1
|
|
16
|
+
4.5893100825733946e-02,-2.614033916761356e-01,-2.682252643663885e-01,-1.1926611814335958e-01,1
|
|
17
|
+
3.734361597739779e-01,-1.0738263398394476e+00,-9.894492879424683e-01,-2.565077828985802e-01,0
|
|
18
|
+
-1.2011443492922544e+00,1.1724106094128572e-01,-5.973218447748447e-01,-1.436834012027966e+00,0
|
|
19
|
+
-1.308191714615154e+00,-4.3265955878063e-01,-1.2853288297891097e+00,-1.9447377443527114e+00,0
|
|
20
|
+
5.5626974301926e-01,-5.842642261304858e-01,-3.2372692196790664e-01,3.0616506646092834e-01,1
|
|
21
|
+
-1.7950634462096733e+00,3.929047275333263e+00,3.3597325348675415e+00,3.9736974686402515e-01,1
|
|
22
|
+
-1.3081345089927474e+00,1.5169724178877273e+00,9.232814514163201e-01,-6.230425160474049e-01,1
|
|
23
|
+
-2.380763938123808e+00,1.1371277773786237e+00,-1.5902752431047806e-01,-2.2346069917229e+00,0
|
|
24
|
+
-4.86612461597536e-01,-1.9216172048359466e+00,-2.4726349400207184e+00,-1.916925819699851e+00,0
|
|
25
|
+
4.329945316598389e-01,2.2034637129059273e-01,5.128145618207097e-01,6.959760736131354e-01,1
|
|
26
|
+
1.2678043992138572e+00,-1.8017079299088052e+00,-1.2703498629717205e+00,3.791128277285917e-01,0
|
|
27
|
+
1.1050264748849479e+00,-1.9498938738833806e+00,-1.5371644824583068e+00,7.317120819146172e-02,1
|
|
28
|
+
1.3405762424958136e+00,-8.145933633680679e-01,-1.0789456676958853e-01,1.1401276101958635e+00,1
|
|
29
|
+
-6.795980112953546e-01,1.0394390740067625e+00,7.643895292440813e-01,-1.5329770651272379e-01,1
|
|
30
|
+
-1.8289652217246832e-01,-1.0129018665883924e+00,-1.258608594846326e+00,-9.175155960168716e-01,0
|
|
31
|
+
1.1069863727547826e+00,-1.067015728836049e+00,-5.358340907741215e-01,6.741337341703041e-01,1
|
|
32
|
+
5.448188127966272e-01,-1.3333047607577708e+00,-1.1792131172184188e+00,-2.1605253663187984e-01,1
|
|
33
|
+
1.555250600398339e+00,2.562532716572834e-01,1.2356714816573484e+00,2.1370389455234107e+00,1
|
|
34
|
+
-3.4553805148756467e-01,-2.2967233366922146e+00,-2.8118071037996795e+00,-1.9931134219738014e+00,0
|
|
35
|
+
1.120734838779243e+00,-3.247441907424656e-01,3.133806659361569e-01,1.1946631281496634e+00,1
|
|
36
|
+
-9.071974278788116e-01,3.8781319921728025e-01,-1.1213297416928425e-01,-8.82342711984023e-01,1
|
|
37
|
+
1.878348872411074e+00,5.772885191549086e-01,1.7957459076734437e+00,2.7625386504818605e+00,1
|
|
38
|
+
1.4337012132131193e+00,-1.7542398362245135e+00,-1.1157342322226846e+00,6.207167434763821e-01,0
|
|
39
|
+
3.802515663893694e-01,2.0295669683967534e+00,2.530269084921403e+00,1.855834485876084e+00,1
|
|
40
|
+
-1.2573206891835977e+00,-2.1486101200899306e+00,-3.198263394150648e+00,-3.043733068054326e+00,0
|
|
41
|
+
6.876617604053404e-01,-1.3659201799604759e+00,-1.1293310758797774e+00,-5.78388149501351e-02,0
|
|
42
|
+
-1.1045395160596065e+00,2.1014117205877856e+00,1.7091024231467271e+00,3.0154944776507597e-02,1
|
|
43
|
+
-7.934701915546589e-01,1.2666241997891345e+00,9.52529621622912e-01,-1.4304352954382915e-01,0
|
|
44
|
+
-1.2437812638415866e+00,2.808211117074704e-01,-4.3793316291108564e-01,-1.3797701364848058e+00,0
|
|
45
|
+
4.171803637611743e-01,-1.1678611541651076e+00,-1.069382886775273e+00,-2.650302736268299e-01,0
|
|
46
|
+
-4.2276158128364316e-01,-7.692135037774855e-01,-1.1283601124999758e+00,-1.0551260019803728e+00,0
|
|
47
|
+
-2.475534022965409e-03,3.9979927136949633e-01,4.513944673876453e-01,2.6789217705377905e-01,1
|
|
48
|
+
4.866811876964725e-01,-1.547405674112523e+00,-1.4570900594036427e+00,-4.3458019526275143e-01,0
|
|
49
|
+
-6.054157973815384e-01,-2.959705989254669e-01,-7.032919195935738e-01,-9.649029503155023e-01,0
|
|
50
|
+
-1.6613005248968182e+00,2.3920366515222007e+00,1.6998912473108954e+00,-4.756834722241752e-01,1
|
|
51
|
+
-1.4024688627878925e+00,-1.1285650300388772e-01,-9.80358458750089e-01,-1.8469628905866908e+00,0
|
|
52
|
+
1.7762447906304968e+00,8.014786986933157e-01,1.9876466987502077e+00,2.7856185053131712e+00,1
|
|
53
|
+
-3.5875462239471156e+00,2.9181935785016044e-01,-1.850168527344012e+00,-4.331054510250071e+00,0
|
|
54
|
+
-8.6069961887752e-01,-3.0560569985919417e-01,-8.693850044520386e-01,-1.2937015638901872e+00,0
|
|
55
|
+
-2.5215955002082238e+00,2.478225544122823e+00,1.2745836381384683e+00,-1.503283196868372e+00,1
|
|
56
|
+
-2.680512096708636e-01,-2.510822715809212e-01,-4.4736931037076955e-01,-5.085890180828015e-01,0
|
|
57
|
+
1.4704989233220236e+00,1.277797620936113e+00,2.341376260856668e+00,2.722537359608725e+00,1
|
|
58
|
+
-1.6207337470423404e+00,-1.0921935526093054e+00,-2.2224434857026436e+00,-2.7863740396742367e+00,0
|
|
59
|
+
9.299503177551005e-01,-1.2885146301716626e+00,-8.94365875666622e-01,3.0049519527933966e-01,0
|
|
60
|
+
-2.0572167152672716e-01,1.7589532053530663e+00,1.867520275756581e+00,9.326645584872928e-01,1
|
|
61
|
+
2.028990234337061e+00,-1.6696358577770876e-01,1.0442144720383355e+00,2.448190349139855e+00,1
|
|
62
|
+
4.2434508106371127e-01,-1.278059889065437e+00,-1.1898626592786472e+00,-3.3068750825746973e-01,0
|
|
63
|
+
1.2319505503720647e+00,-1.5394952592671587e+00,-9.951053168689495e-01,5.11600970144431e-01,0
|
|
64
|
+
1.9769890149980527e+00,-2.1326410553140835e+00,-1.21414739515473e+00,1.0500446681804023e+00,0
|
|
65
|
+
7.092165932574367e-01,-1.481739570005534e+00,-1.2474307620913612e+00,-1.091402940812165e-01,0
|
|
66
|
+
-9.615415551232644e-01,1.6014346799820025e+00,1.2296436790616394e+00,-1.2825229790994352e-01,1
|
|
67
|
+
2.6332591353317136e+00,1.9968582753260276e+00,3.8627421878639634e+00,4.677830701373434e+00,1
|
|
68
|
+
-1.2326739561548532e+00,1.1655623753837756e+00,5.710681973205789e-01,-7.659970564179892e-01,0
|
|
69
|
+
2.115647338564104e+00,-1.142415827390722e+00,-8.11815766831625e-03,1.8963423940631976e+00,1
|
|
70
|
+
-1.6218421243563732e+00,1.7133806672783916e+00,9.550838772751624e-01,-8.8592125248134e-01,1
|
|
71
|
+
1.7944511285596305e+00,-1.7603283965924263e+00,-9.033439498079894e-01,1.0719956418304486e+00,0
|
|
72
|
+
-4.3784329530910715e-02,-1.1011002600500417e+00,-1.273959668620149e+00,-8.016903048376662e-01,0
|
|
73
|
+
-1.0486202319833615e-01,2.771360265873167e-01,2.502022685945151e-01,5.548937069062941e-02,1
|
|
74
|
+
1.7803747427888172e+00,-1.7499486432213605e+00,-9.001421449524415e-01,1.061262048267582e+00,0
|
|
75
|
+
3.5117341307021976e-01,-9.789691163230871e-01,-8.955262089440742e-01,-2.203098686670774e-01,0
|
|
76
|
+
5.593131963144458e-01,2.9407023239132535e-01,6.731149622919457e-01,9.054153700658307e-01,1
|
|
77
|
+
-2.8978824993367214e-01,-9.071101176908198e-01,-1.2037417597331295e+00,-9.807399284527443e-01,0
|
|
78
|
+
-8.271824743359713e-01,1.5208867942038558e+00,1.2200699738201646e+00,-1.3241124747140454e-02,1
|
|
79
|
+
-3.6974359976891638e+00,1.5768875645658011e+00,-4.6122012950176094e-01,-3.5986516890499547e+00,0
|
|
80
|
+
-8.640081290830173e-01,-7.952928105950496e-02,-6.15293009552834e-01,-1.1446247511104104e+00,0
|
|
81
|
+
-2.08311803057051e-01,8.624849330033639e-01,8.504116645498019e-01,3.2169391260614677e-01,1
|
|
82
|
+
-1.001711293894727e-01,2.1778421202365075e+00,2.406205156499104e+00,1.3498680692557434e+00,1
|
|
83
|
+
1.8508239150610382e+00,-1.6824992225408963e+00,-7.80910683743241e-01,1.1959190498938077e+00,0
|
|
84
|
+
7.338999623284566e-01,-5.143119923571249e-01,-1.3650854882454633e-01,5.77822711680621e-01,1
|
|
85
|
+
-8.695369519005374e-01,1.9989687710081492e+00,1.7359033485741289e+00,2.573749080243791e-01,1
|
|
86
|
+
1.3532962753060973e+00,-1.2874410336483981e+00,-6.358121950079192e-01,8.356496699669813e-01,0
|
|
87
|
+
-8.002210588472312e-01,-1.7356949681552813e-01,-6.830493560168442e-01,-1.1278490368861847e+00,1
|
|
88
|
+
2.4254808486506256e+00,-5.498915480430562e-01,8.5144036205131e-01,2.689134716018992e+00,1
|
|
89
|
+
1.2666139424327671e+00,-1.6660989176826315e+00,-1.1174533578348345e+00,4.6953709831874846e-01,0
|
|
90
|
+
-3.211053667726502e-01,4.1328802958184946e-01,2.729906061625348e-01,-1.2519837916582902e-01,1
|
|
91
|
+
8.658254600723946e-01,-1.4784947067277336e+00,-1.1485577664230358e+00,9.07604413995754e-02,0
|
|
92
|
+
-7.628844164635872e-01,-8.717458260209808e-01,-1.4512594413079423e+00,-1.5539978148072322e+00,0
|
|
93
|
+
-4.4358418688521395e-01,9.942821241511963e-01,8.566996977399313e-01,1.1403208330863496e-01,1
|
|
94
|
+
-2.2011901612056937e+00,2.867239546533001e+00,1.9100285859432384e+00,-8.351013526293585e-01,1
|
|
95
|
+
1.8324086110597337e+00,-1.9101544353699098e+00,-1.049996324589264e+00,1.018348124556209e+00,0
|
|
96
|
+
-3.545288721030091e-01,1.212189150478433e-01,-7.818714416774197e-02,-3.653805823167333e-01,1
|
|
97
|
+
-1.972427560720223e+00,1.5208760827120622e+00,5.23902617748198e-01,-1.4589916202641877e+00,1
|
|
98
|
+
-1.0528659778077893e+00,-6.415151124325393e-01,-1.366720111082726e+00,-1.7639973894652614e+00,0
|
|
99
|
+
-4.718241674174253e-01,4.4496832895711996e-01,2.1726178451322842e-01,-2.939883824642023e-01,1
|
|
100
|
+
-1.9464385931107386e-01,1.296884427177927e+00,1.3508149060562555e+00,6.334203305402286e-01,1
|
|
101
|
+
-3.5996730357206175e-01,-1.0547372143709348e+00,-1.413635629086208e+00,-1.1694070454905325e+00,0
|