teradataml 20.0.0.1__py3-none-any.whl → 20.0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of teradataml might be problematic. Click here for more details.
- teradataml/LICENSE-3RD-PARTY.pdf +0 -0
- teradataml/LICENSE.pdf +0 -0
- teradataml/README.md +306 -0
- teradataml/__init__.py +10 -3
- teradataml/_version.py +1 -1
- teradataml/analytics/__init__.py +3 -2
- teradataml/analytics/analytic_function_executor.py +299 -16
- teradataml/analytics/analytic_query_generator.py +92 -0
- teradataml/analytics/byom/__init__.py +3 -2
- teradataml/analytics/json_parser/metadata.py +13 -3
- teradataml/analytics/json_parser/utils.py +13 -6
- teradataml/analytics/meta_class.py +40 -1
- teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
- teradataml/analytics/sqle/__init__.py +11 -2
- teradataml/analytics/table_operator/__init__.py +4 -3
- teradataml/analytics/uaf/__init__.py +21 -2
- teradataml/analytics/utils.py +66 -1
- teradataml/analytics/valib.py +1 -1
- teradataml/automl/__init__.py +1502 -323
- teradataml/automl/custom_json_utils.py +139 -61
- teradataml/automl/data_preparation.py +247 -307
- teradataml/automl/data_transformation.py +32 -12
- teradataml/automl/feature_engineering.py +325 -86
- teradataml/automl/model_evaluation.py +44 -35
- teradataml/automl/model_training.py +122 -153
- teradataml/catalog/byom.py +8 -8
- teradataml/clients/pkce_client.py +1 -1
- teradataml/common/__init__.py +2 -1
- teradataml/common/constants.py +72 -0
- teradataml/common/deprecations.py +13 -7
- teradataml/common/garbagecollector.py +152 -120
- teradataml/common/messagecodes.py +11 -2
- teradataml/common/messages.py +4 -1
- teradataml/common/sqlbundle.py +26 -4
- teradataml/common/utils.py +225 -14
- teradataml/common/wrapper_utils.py +1 -1
- teradataml/context/context.py +82 -2
- teradataml/data/SQL_Fundamentals.pdf +0 -0
- teradataml/data/complaints_test_tokenized.csv +353 -0
- teradataml/data/complaints_tokens_model.csv +348 -0
- teradataml/data/covid_confirm_sd.csv +83 -0
- teradataml/data/dataframe_example.json +27 -1
- teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
- teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
- teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
- teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
- teradataml/data/docs/sqle/docs_17_20/Shap.py +203 -0
- teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
- teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
- teradataml/data/docs/sqle/docs_17_20/TextParser.py +3 -3
- teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
- teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
- teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
- teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
- teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
- teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
- teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
- teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
- teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
- teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
- teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
- teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
- teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
- teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
- teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
- teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +18 -21
- teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
- teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
- teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
- teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
- teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
- teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
- teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
- teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
- teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
- teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
- teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
- teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
- teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
- teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
- teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
- teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
- teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
- teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
- teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
- teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
- teradataml/data/dwt2d_dataTable.csv +65 -0
- teradataml/data/dwt_dataTable.csv +8 -0
- teradataml/data/dwt_filterTable.csv +3 -0
- teradataml/data/finance_data4.csv +13 -0
- teradataml/data/grocery_transaction.csv +19 -0
- teradataml/data/idwt2d_dataTable.csv +5 -0
- teradataml/data/idwt_dataTable.csv +8 -0
- teradataml/data/idwt_filterTable.csv +3 -0
- teradataml/data/interval_data.csv +5 -0
- teradataml/data/jsons/paired_functions.json +14 -0
- teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
- teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
- teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
- teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
- teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
- teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
- teradataml/data/jsons/sqle/17.20/TD_TextParser.json +1 -1
- teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
- teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
- teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
- teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
- teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
- teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
- teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
- teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
- teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
- teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
- teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
- teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
- teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
- teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
- teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
- teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
- teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
- teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
- teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
- teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
- teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
- teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +10 -19
- teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
- teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
- teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
- teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
- teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
- teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
- teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
- teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
- teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
- teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
- teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
- teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
- teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
- teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
- teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
- teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
- teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
- teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
- teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
- teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
- teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
- teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
- teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
- teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
- teradataml/data/load_example_data.py +8 -2
- teradataml/data/medical_readings.csv +101 -0
- teradataml/data/naivebayestextclassifier_example.json +1 -1
- teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
- teradataml/data/patient_profile.csv +101 -0
- teradataml/data/peppers.png +0 -0
- teradataml/data/real_values.csv +14 -0
- teradataml/data/sax_example.json +8 -0
- teradataml/data/scripts/deploy_script.py +1 -1
- teradataml/data/scripts/lightgbm/dataset.template +157 -0
- teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +247 -0
- teradataml/data/scripts/lightgbm/lightgbm_function.template +216 -0
- teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +159 -0
- teradataml/data/scripts/sklearn/sklearn_fit.py +194 -160
- teradataml/data/scripts/sklearn/sklearn_fit_predict.py +136 -115
- teradataml/data/scripts/sklearn/sklearn_function.template +34 -16
- teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +155 -137
- teradataml/data/scripts/sklearn/sklearn_neighbors.py +1 -1
- teradataml/data/scripts/sklearn/sklearn_score.py +12 -3
- teradataml/data/scripts/sklearn/sklearn_transform.py +162 -24
- teradataml/data/star_pivot.csv +8 -0
- teradataml/data/target_udt_data.csv +8 -0
- teradataml/data/templates/open_source_ml.json +3 -1
- teradataml/data/teradataml_example.json +20 -1
- teradataml/data/timestamp_data.csv +4 -0
- teradataml/data/titanic_dataset_unpivoted.csv +19 -0
- teradataml/data/uaf_example.json +55 -1
- teradataml/data/unpivot_example.json +15 -0
- teradataml/data/url_data.csv +9 -0
- teradataml/data/vectordistance_example.json +4 -0
- teradataml/data/windowdfft.csv +16 -0
- teradataml/dataframe/copy_to.py +1 -1
- teradataml/dataframe/data_transfer.py +5 -3
- teradataml/dataframe/dataframe.py +1002 -201
- teradataml/dataframe/fastload.py +3 -3
- teradataml/dataframe/functions.py +867 -0
- teradataml/dataframe/row.py +160 -0
- teradataml/dataframe/setop.py +2 -2
- teradataml/dataframe/sql.py +840 -33
- teradataml/dataframe/window.py +1 -1
- teradataml/dbutils/dbutils.py +878 -34
- teradataml/dbutils/filemgr.py +48 -1
- teradataml/geospatial/geodataframe.py +1 -1
- teradataml/geospatial/geodataframecolumn.py +1 -1
- teradataml/hyperparameter_tuner/optimizer.py +13 -13
- teradataml/lib/aed_0_1.dll +0 -0
- teradataml/opensource/__init__.py +1 -1
- teradataml/opensource/{sklearn/_class.py → _class.py} +102 -17
- teradataml/opensource/_lightgbm.py +950 -0
- teradataml/opensource/{sklearn/_wrapper_utils.py → _wrapper_utils.py} +1 -2
- teradataml/opensource/{sklearn/constants.py → constants.py} +13 -10
- teradataml/opensource/sklearn/__init__.py +0 -1
- teradataml/opensource/sklearn/_sklearn_wrapper.py +1019 -574
- teradataml/options/__init__.py +9 -23
- teradataml/options/configure.py +42 -4
- teradataml/options/display.py +2 -2
- teradataml/plot/axis.py +4 -4
- teradataml/scriptmgmt/UserEnv.py +13 -9
- teradataml/scriptmgmt/lls_utils.py +77 -23
- teradataml/store/__init__.py +13 -0
- teradataml/store/feature_store/__init__.py +0 -0
- teradataml/store/feature_store/constants.py +291 -0
- teradataml/store/feature_store/feature_store.py +2223 -0
- teradataml/store/feature_store/models.py +1505 -0
- teradataml/store/vector_store/__init__.py +1586 -0
- teradataml/table_operators/Script.py +2 -2
- teradataml/table_operators/TableOperator.py +106 -20
- teradataml/table_operators/query_generator.py +3 -0
- teradataml/table_operators/table_operator_query_generator.py +3 -1
- teradataml/table_operators/table_operator_util.py +102 -56
- teradataml/table_operators/templates/dataframe_register.template +69 -0
- teradataml/table_operators/templates/dataframe_udf.template +63 -0
- teradataml/telemetry_utils/__init__.py +0 -0
- teradataml/telemetry_utils/queryband.py +52 -0
- teradataml/utils/dtypes.py +4 -2
- teradataml/utils/validators.py +34 -2
- {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/METADATA +311 -3
- {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/RECORD +240 -157
- {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/WHEEL +0 -0
- {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/top_level.txt +0 -0
- {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/zip-safe +0 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
def TFIDF(data = None, doc_id_column = None, token_column = None,
|
|
2
|
+
tf_normalization = "NORMAL", idf_normalization = "LOG",
|
|
3
|
+
regularization = "NONE", accumulate = None,
|
|
4
|
+
**generic_arguments):
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
DESCRIPTION:
|
|
8
|
+
Function takes any document set and computes the Term Frequency (TF),
|
|
9
|
+
Inverse Document Frequency (IDF), and Term Frequency Inverse Document
|
|
10
|
+
Frequency (TF-IDF) scores for each term.
|
|
11
|
+
|
|
12
|
+
PARAMETERS:
|
|
13
|
+
data:
|
|
14
|
+
Required Argument.
|
|
15
|
+
Specifies the input teradataml DataFrame that contains
|
|
16
|
+
the document id and the term.
|
|
17
|
+
Types: teradataml DataFrame
|
|
18
|
+
|
|
19
|
+
doc_id_column:
|
|
20
|
+
Required Argument.
|
|
21
|
+
Specifies the name of the column in "data" that contains the
|
|
22
|
+
document identifier.
|
|
23
|
+
Types: str
|
|
24
|
+
|
|
25
|
+
token_column:
|
|
26
|
+
Required Argument.
|
|
27
|
+
Specifies the name of the column in "data" that contains the tokens.
|
|
28
|
+
Types: str
|
|
29
|
+
|
|
30
|
+
tf_normalization:
|
|
31
|
+
Optional Argument.
|
|
32
|
+
Specifies the normalization method for calculating the term frequency (TF).
|
|
33
|
+
Default Value: "NORMAL"
|
|
34
|
+
Permitted Values: BOOL, COUNT, NORMAL, LOG, AUGMENT
|
|
35
|
+
Types: str
|
|
36
|
+
|
|
37
|
+
idf_normalization:
|
|
38
|
+
Optional Argument.
|
|
39
|
+
Specifies the normalization method for calculating the inverse
|
|
40
|
+
document frequency (IDF).
|
|
41
|
+
Default Value: "LOG"
|
|
42
|
+
Permitted Values: UNARY, LOG, LOGNORM, SMOOTH
|
|
43
|
+
Types: str
|
|
44
|
+
|
|
45
|
+
regularization:
|
|
46
|
+
Optional Argument.
|
|
47
|
+
Specifies the regularization method for calculating the TF-IDF score.
|
|
48
|
+
Default Value: "NONE"
|
|
49
|
+
Permitted Values: L2, L1, NONE
|
|
50
|
+
Types: str
|
|
51
|
+
|
|
52
|
+
accumulate:
|
|
53
|
+
Optional Argument.
|
|
54
|
+
Specifies the name(s) of input teradataml DataFrame column(s) to copy to the
|
|
55
|
+
output.
|
|
56
|
+
Types: str OR list of Strings (str)
|
|
57
|
+
|
|
58
|
+
**generic_arguments:
|
|
59
|
+
Specifies the generic keyword arguments SQLE functions accept. Below
|
|
60
|
+
are the generic keyword arguments:
|
|
61
|
+
persist:
|
|
62
|
+
Optional Argument.
|
|
63
|
+
Specifies whether to persist the results of the
|
|
64
|
+
function in a table or not. When set to True,
|
|
65
|
+
results are persisted in a table; otherwise,
|
|
66
|
+
results are garbage collected at the end of the
|
|
67
|
+
session.
|
|
68
|
+
Default Value: False
|
|
69
|
+
Types: bool
|
|
70
|
+
|
|
71
|
+
volatile:
|
|
72
|
+
Optional Argument.
|
|
73
|
+
Specifies whether to put the results of the
|
|
74
|
+
function in a volatile table or not. When set to
|
|
75
|
+
True, results are stored in a volatile table,
|
|
76
|
+
otherwise not.
|
|
77
|
+
Default Value: False
|
|
78
|
+
Types: bool
|
|
79
|
+
|
|
80
|
+
Function allows the user to partition, hash, order or local
|
|
81
|
+
order the input data. These generic arguments are available
|
|
82
|
+
for each argument that accepts teradataml DataFrame as
|
|
83
|
+
input and can be accessed as:
|
|
84
|
+
* "<input_data_arg_name>_partition_column" accepts str or
|
|
85
|
+
list of str (Strings)
|
|
86
|
+
* "<input_data_arg_name>_hash_column" accepts str or list
|
|
87
|
+
of str (Strings)
|
|
88
|
+
* "<input_data_arg_name>_order_column" accepts str or list
|
|
89
|
+
of str (Strings)
|
|
90
|
+
* "local_order_<input_data_arg_name>" accepts boolean
|
|
91
|
+
Note:
|
|
92
|
+
These generic arguments are supported by teradataml if
|
|
93
|
+
the underlying SQL Engine function supports, else an
|
|
94
|
+
exception is raised.
|
|
95
|
+
|
|
96
|
+
RETURNS:
|
|
97
|
+
Instance of TFIDF.
|
|
98
|
+
Output teradataml DataFrames can be accessed using attribute
|
|
99
|
+
references, such as TFIDFObj.<attribute_name>.
|
|
100
|
+
Output teradataml DataFrame attribute name is:
|
|
101
|
+
result
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
RAISES:
|
|
105
|
+
TeradataMlException, TypeError, ValueError
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
EXAMPLES:
|
|
109
|
+
# Notes:
|
|
110
|
+
# 1. Get the connection to Vantage, before importing the
|
|
111
|
+
# function in user space.
|
|
112
|
+
# 2. User can import the function, if it is available on
|
|
113
|
+
# Vantage user is connected to.
|
|
114
|
+
# 3. To check the list of analytic functions available on
|
|
115
|
+
# Vantage user connected to, use
|
|
116
|
+
# "display_analytic_functions()".
|
|
117
|
+
|
|
118
|
+
# Load the example data.
|
|
119
|
+
load_example_data('naivebayestextclassifier',"token_table")
|
|
120
|
+
|
|
121
|
+
# Create teradataml DataFrame objects.
|
|
122
|
+
inp = DataFrame.from_table('token_table')
|
|
123
|
+
|
|
124
|
+
# Check the list of available analytic functions.
|
|
125
|
+
display_analytic_functions()
|
|
126
|
+
|
|
127
|
+
# Import function TFIDF.
|
|
128
|
+
from teradataml import TFIDF
|
|
129
|
+
|
|
130
|
+
# Example 1 : Compute the TF, IDF and TF-IDF scores
|
|
131
|
+
# for each term in the input data.
|
|
132
|
+
TFIDF_out = TFIDF(data=inp,
|
|
133
|
+
doc_id_column='doc_id',
|
|
134
|
+
token_column='token',
|
|
135
|
+
tf_normalization = "LOG",
|
|
136
|
+
idf_normalization = "SMOOTH",
|
|
137
|
+
regularization = "L2",
|
|
138
|
+
accumulate=['category'])
|
|
139
|
+
|
|
140
|
+
# Print the result DataFrame.
|
|
141
|
+
print(TFIDF_out.result)
|
|
142
|
+
"""
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
def TextParser(data=None, object=None, text_column=None,
|
|
1
|
+
def TextParser(data=None, object=None, text_column=None, convert_to_lowercase=True, stem_tokens=False,
|
|
2
2
|
remove_stopwords=False, accumulate=None, delimiter=" \t\n\f\r",
|
|
3
3
|
punctuation="!#$%&()*+,-./:;?@\^_`{|}~", token_col_name=None, **generic_arguments):
|
|
4
4
|
"""
|
|
@@ -38,7 +38,7 @@ def TextParser(data=None, object=None, text_column=None, covert_to_lowercase=Tru
|
|
|
38
38
|
Specifies the name of the input data column whose contents are to be tokenized.
|
|
39
39
|
Types: str
|
|
40
40
|
|
|
41
|
-
|
|
41
|
+
convert_to_lowercase:
|
|
42
42
|
Optional Argument.
|
|
43
43
|
Specifies whether to convert the text in "text_column" to lowercase.
|
|
44
44
|
Default Value: True
|
|
@@ -165,7 +165,7 @@ def TextParser(data=None, object=None, text_column=None, covert_to_lowercase=Tru
|
|
|
165
165
|
# Example 2 : Convert words in "text_data" column into their root forms.
|
|
166
166
|
TextParser_out = TextParser(data=complaints,
|
|
167
167
|
text_column="text_data",
|
|
168
|
-
|
|
168
|
+
convert_to_lowercase=True,
|
|
169
169
|
stem_tokens=True)
|
|
170
170
|
|
|
171
171
|
# Print the result DataFrame.
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
def Unpivoting(data = None, id_column = None, target_columns = None,
|
|
2
|
+
alias_names = None, attribute_column = "AttributeName", value_column = "AttributeValue",
|
|
3
|
+
accumulate = None, include_nulls = False, input_types = False, output_varchar = False,
|
|
4
|
+
indexed_attribute = False, include_datatypes = False,
|
|
5
|
+
**generic_arguments):
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
DESCRIPTION:
|
|
9
|
+
Function unpivots the data, that is, changes the data from
|
|
10
|
+
dense format to sparse format.
|
|
11
|
+
|
|
12
|
+
PARAMETERS:
|
|
13
|
+
data:
|
|
14
|
+
Required Argument.
|
|
15
|
+
Specifies the input teradataml DataFrame.
|
|
16
|
+
Types: teradataml DataFrame
|
|
17
|
+
|
|
18
|
+
id_column:
|
|
19
|
+
Required Argument.
|
|
20
|
+
Specifies the name of the column in "data" which contains the input data identifier.
|
|
21
|
+
Types: str
|
|
22
|
+
|
|
23
|
+
target_columns:
|
|
24
|
+
Required Argument.
|
|
25
|
+
Specifies the name(s) of input teradataml DataFrame column(s) which contains the data for
|
|
26
|
+
unpivoting.
|
|
27
|
+
Types: str OR list of Strings (str)
|
|
28
|
+
|
|
29
|
+
Optional Argument.
|
|
30
|
+
Specifies alternate names for the values in the 'attribute_column'.
|
|
31
|
+
Types: str OR list of strs
|
|
32
|
+
|
|
33
|
+
alias_names:
|
|
34
|
+
Optional Argument.
|
|
35
|
+
Specifies alternate names for the values in the 'attribute_column'.
|
|
36
|
+
column.
|
|
37
|
+
Types: str OR list of strs
|
|
38
|
+
|
|
39
|
+
attribute_column:
|
|
40
|
+
Optional Argument.
|
|
41
|
+
Specifies the name of the column in the output DataFrame, which holds the names of pivoted columns.
|
|
42
|
+
Default Value: "AttributeName"
|
|
43
|
+
Types: str
|
|
44
|
+
|
|
45
|
+
value_column:
|
|
46
|
+
Optional Argument.
|
|
47
|
+
Specifies the name of the column in the output DataFrame, which holds the values of pivoted columns.
|
|
48
|
+
Default Value: "AttributeValue"
|
|
49
|
+
Types: str
|
|
50
|
+
|
|
51
|
+
accumulate:
|
|
52
|
+
Optional Argument.
|
|
53
|
+
Specifies the name(s) of input teradataml DataFrame column(s) to copy to the output.
|
|
54
|
+
By default, the function copies no input teradataml DataFrame columns to the output.
|
|
55
|
+
Types: str OR list of Strings (str)
|
|
56
|
+
|
|
57
|
+
include_nulls:
|
|
58
|
+
Optional Argument.
|
|
59
|
+
Specifies whether or not to include nulls in the transformation.
|
|
60
|
+
Default Value: False
|
|
61
|
+
Types: bool
|
|
62
|
+
|
|
63
|
+
input_types:
|
|
64
|
+
Optional Argument.
|
|
65
|
+
Specifies whether attribute values should be organized into multiple columns based on data type groups.
|
|
66
|
+
Note:
|
|
67
|
+
* 'input_types' argument cannot be used when output_varchar is set to True.
|
|
68
|
+
Default Value: False
|
|
69
|
+
Types: bool
|
|
70
|
+
|
|
71
|
+
output_varchar:
|
|
72
|
+
Optional Argument.
|
|
73
|
+
Specifies whether to output the 'value_column' in varchar format regardless of its data type.
|
|
74
|
+
Note:
|
|
75
|
+
* 'output_varchar' argument cannot be used when input_types is set to True.
|
|
76
|
+
Default Value: False
|
|
77
|
+
Types: bool
|
|
78
|
+
|
|
79
|
+
indexed_attribute:
|
|
80
|
+
Optional Argument.
|
|
81
|
+
Specifies whether to output the column indexes instead of column names in AttributeName column.
|
|
82
|
+
When set to True, outputs the column indexes instead of column names.
|
|
83
|
+
Default Value: False
|
|
84
|
+
Types: bool
|
|
85
|
+
|
|
86
|
+
include_datatypes:
|
|
87
|
+
Optional Argument.
|
|
88
|
+
Specifies whether to output the original datatype name. When set to True,
|
|
89
|
+
outputs the original datatype name.
|
|
90
|
+
Default Value: False
|
|
91
|
+
Types: bool
|
|
92
|
+
|
|
93
|
+
**generic_arguments:
|
|
94
|
+
Specifies the generic keyword arguments SQLE functions accept. Below
|
|
95
|
+
are the generic keyword arguments:
|
|
96
|
+
persist:
|
|
97
|
+
Optional Argument.
|
|
98
|
+
Specifies whether to persist the results of the
|
|
99
|
+
function in a table or not. When set to True,
|
|
100
|
+
results are persisted in a table; otherwise,
|
|
101
|
+
results are garbage collected at the end of the
|
|
102
|
+
session.
|
|
103
|
+
Default Value: False
|
|
104
|
+
Types: bool
|
|
105
|
+
|
|
106
|
+
volatile:
|
|
107
|
+
Optional Argument.
|
|
108
|
+
Specifies whether to put the results of the
|
|
109
|
+
function in a volatile table or not. When set to
|
|
110
|
+
True, results are stored in a volatile table,
|
|
111
|
+
otherwise not.
|
|
112
|
+
Default Value: False
|
|
113
|
+
Types: bool
|
|
114
|
+
|
|
115
|
+
Function allows the user to partition, hash, order or local
|
|
116
|
+
order the input data. These generic arguments are available
|
|
117
|
+
for each argument that accepts teradataml DataFrame as
|
|
118
|
+
input and can be accessed as:
|
|
119
|
+
* "<input_data_arg_name>_partition_column" accepts str or
|
|
120
|
+
list of str (Strings)
|
|
121
|
+
* "<input_data_arg_name>_hash_column" accepts str or list
|
|
122
|
+
of str (Strings)
|
|
123
|
+
* "<input_data_arg_name>_order_column" accepts str or list
|
|
124
|
+
of str (Strings)
|
|
125
|
+
* "local_order_<input_data_arg_name>" accepts boolean
|
|
126
|
+
Note:
|
|
127
|
+
These generic arguments are supported by teradataml if
|
|
128
|
+
the underlying SQL Engine function supports, else an
|
|
129
|
+
exception is raised.
|
|
130
|
+
|
|
131
|
+
RETURNS:
|
|
132
|
+
Instance of Unpivoting.
|
|
133
|
+
Output teradataml DataFrames can be accessed using attribute
|
|
134
|
+
references, such as UnpivotingObj.<attribute_name>.
|
|
135
|
+
Output teradataml DataFrame attribute name is:
|
|
136
|
+
result
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
RAISES:
|
|
140
|
+
TeradataMlException, TypeError, ValueError
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
EXAMPLES:
|
|
144
|
+
# Notes:
|
|
145
|
+
# 1. Get the connection to Vantage, before importing the
|
|
146
|
+
# function in user space.
|
|
147
|
+
# 2. User can import the function, if it is available on
|
|
148
|
+
# Vantage user is connected to.
|
|
149
|
+
# 3. To check the list of analytic functions available on
|
|
150
|
+
# Vantage user connected to, use
|
|
151
|
+
# "display_analytic_functions()".
|
|
152
|
+
|
|
153
|
+
# Load the example data.
|
|
154
|
+
load_example_data('unpivot', 'unpivot_input')
|
|
155
|
+
|
|
156
|
+
# Create teradataml DataFrame objects.
|
|
157
|
+
upvt_inp = DataFrame('unpivot_input')
|
|
158
|
+
|
|
159
|
+
# Check the list of available analytic functions.
|
|
160
|
+
display_analytic_functions()
|
|
161
|
+
|
|
162
|
+
# Import function Unpivoting.
|
|
163
|
+
from teradataml import Unpivoting
|
|
164
|
+
|
|
165
|
+
# Example 1 : Unpivot the data.
|
|
166
|
+
upvt1 = Unpivoting(data = upvt_inp,
|
|
167
|
+
id_column = 'sn',
|
|
168
|
+
target_columns = 'city',
|
|
169
|
+
accumulate = 'week',
|
|
170
|
+
include_nulls = True)
|
|
171
|
+
|
|
172
|
+
# Print the result DataFrame.
|
|
173
|
+
print( upvt1.result)
|
|
174
|
+
|
|
175
|
+
# Example 2 : Unpivot the data with alternate names for the values in
|
|
176
|
+
# the AttributeName output column.
|
|
177
|
+
upvt2= Unpivoting(data = upvt_inp,
|
|
178
|
+
id_column = 'sn',
|
|
179
|
+
target_columns = 'city',
|
|
180
|
+
alias_names = 'city_us',
|
|
181
|
+
attribute_column = "Attribute",
|
|
182
|
+
value_column = "value",
|
|
183
|
+
accumulate = 'week',
|
|
184
|
+
include_nulls = True)
|
|
185
|
+
|
|
186
|
+
# Print the result DataFrame.
|
|
187
|
+
print( upvt2.result)
|
|
188
|
+
|
|
189
|
+
# Example 3 : Unpivot the data with multiple target columns and output
|
|
190
|
+
# data types.
|
|
191
|
+
upvt3 = Unpivoting(data = upvt_inp,
|
|
192
|
+
id_column = 'sn',
|
|
193
|
+
target_columns = ['city','pressure'],
|
|
194
|
+
attribute_column = "Attribute",
|
|
195
|
+
value_column = "value",
|
|
196
|
+
accumulate = 'week',
|
|
197
|
+
include_nulls = True,
|
|
198
|
+
indexed_attribute = True,
|
|
199
|
+
include_datatypes = True)
|
|
200
|
+
|
|
201
|
+
# Print the result DataFrame.
|
|
202
|
+
print( upvt3.result)
|
|
203
|
+
|
|
204
|
+
# Example 4 : Unpivot the data with multiple target columns and output
|
|
205
|
+
# the input types.
|
|
206
|
+
upvt4 = Unpivoting(data = upvt_inp,
|
|
207
|
+
id_column = 'sn',
|
|
208
|
+
target_columns = ['city','temp'],
|
|
209
|
+
accumulate = 'week',
|
|
210
|
+
include_nulls = True,
|
|
211
|
+
input_types = True)
|
|
212
|
+
|
|
213
|
+
# Print the result DataFrame.
|
|
214
|
+
print( upvt4.result)
|
|
215
|
+
|
|
216
|
+
"""
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
def Image2Matrix(data=None,
|
|
2
|
+
output='gray',
|
|
3
|
+
**generic_arguments):
|
|
4
|
+
"""
|
|
5
|
+
DESCRIPTION:
|
|
6
|
+
Image2Matrix() function converts an image to a matrix.
|
|
7
|
+
It converts JPEG or PNG images to matrixes with payload values being the pixel values.
|
|
8
|
+
Note:
|
|
9
|
+
* The image size cannot be greater than 16 MB.
|
|
10
|
+
* The image should not exceed 4,000,000 pixels.
|
|
11
|
+
|
|
12
|
+
PARAMETERS:
|
|
13
|
+
data:
|
|
14
|
+
Required Argument.
|
|
15
|
+
Specifies the teradataml DataFrame which has image details.
|
|
16
|
+
Types: Teradataml DataFrame
|
|
17
|
+
|
|
18
|
+
output:
|
|
19
|
+
Optional Argument.
|
|
20
|
+
Specifies the type of output matrix.
|
|
21
|
+
Default: 'gray'
|
|
22
|
+
Permitted Values:
|
|
23
|
+
'gray': Converts the image to a grayscale matrix.
|
|
24
|
+
'rgb': Converts the image to a RGB matrix.
|
|
25
|
+
Types: str
|
|
26
|
+
|
|
27
|
+
**generic_arguments:
|
|
28
|
+
Specifies the generic keyword arguments SQLE functions accept.
|
|
29
|
+
Below are the generic keyword arguments:
|
|
30
|
+
persist:
|
|
31
|
+
Optional Argument.
|
|
32
|
+
Specifies whether to persist the results of the function in table or not.
|
|
33
|
+
When set to True, results are persisted in table; otherwise, results
|
|
34
|
+
are garbage collected at the end of the session.
|
|
35
|
+
Default Value: False
|
|
36
|
+
Types: boolean
|
|
37
|
+
|
|
38
|
+
volatile:
|
|
39
|
+
Optional Argument.
|
|
40
|
+
Specifies whether to put the results of the function in volatile table or not.
|
|
41
|
+
When set to True, results are stored in volatile table, otherwise not.
|
|
42
|
+
Default Value: False
|
|
43
|
+
Types: boolean
|
|
44
|
+
|
|
45
|
+
Function allows the user to partition, hash, order or local order the input
|
|
46
|
+
data. These generic arguments are available for each argument that accepts
|
|
47
|
+
teradataml DataFrame as input and can be accessed as:
|
|
48
|
+
* "<input_data_arg_name>_partition_column" accepts str or list of str (Strings)
|
|
49
|
+
* "<input_data_arg_name>_hash_column" accepts str or list of str (Strings)
|
|
50
|
+
* "<input_data_arg_name>_order_column" accepts str or list of str (Strings)
|
|
51
|
+
* "local_order_<input_data_arg_name>" accepts boolean
|
|
52
|
+
Note:
|
|
53
|
+
These generic arguments are supported by teradataml if the underlying Analytic Database
|
|
54
|
+
function supports, else an exception is raised.
|
|
55
|
+
|
|
56
|
+
RETURNS:
|
|
57
|
+
Instance of Image2Matrix.
|
|
58
|
+
Output teradataml DataFrames can be accessed using attribute
|
|
59
|
+
references, such as Image2Matrix.<attribute_name>.
|
|
60
|
+
Output teradataml DataFrame attribute name is:
|
|
61
|
+
result
|
|
62
|
+
|
|
63
|
+
RAISES:
|
|
64
|
+
TeradataMlException, TypeError, ValueError
|
|
65
|
+
|
|
66
|
+
EXAMPLES:
|
|
67
|
+
# Notes:
|
|
68
|
+
# 1. Get the connection to Vantage, before importing the
|
|
69
|
+
# function in user space.
|
|
70
|
+
# 2. User can import the function, if it is available on
|
|
71
|
+
# Vantage user is connected to.
|
|
72
|
+
# 3. To check the list of UAF analytic functions available
|
|
73
|
+
# on Vantage user connected to, use
|
|
74
|
+
# "display_analytic_functions()".
|
|
75
|
+
|
|
76
|
+
# Check the list of available analytic functions.
|
|
77
|
+
display_analytic_functions()
|
|
78
|
+
|
|
79
|
+
# Import function Image2Matrix.
|
|
80
|
+
from teradataml import Image2Matrix
|
|
81
|
+
import teradataml
|
|
82
|
+
|
|
83
|
+
# Drop the image table if it is present.
|
|
84
|
+
try:
|
|
85
|
+
db_drop_table('imageTable')
|
|
86
|
+
except:
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
# Create a table to store the image data.
|
|
90
|
+
execute_sql('CREATE TABLE imageTable(id INTEGER, image BLOB);')
|
|
91
|
+
|
|
92
|
+
# Load the image data into the fileContent variable.
|
|
93
|
+
file_dir = os.path.join(os.path.dirname(teradataml.__file__), "data")
|
|
94
|
+
with open(os.path.join(file_dir,'peppers.png'), mode='rb') as file:
|
|
95
|
+
fileContent = file.read()
|
|
96
|
+
|
|
97
|
+
# Insert the image data into the table.
|
|
98
|
+
sql = 'INSERT INTO imageTable VALUES(?, ?);'
|
|
99
|
+
parameters = (1, fileContent)
|
|
100
|
+
execute_sql(sql, parameters)
|
|
101
|
+
|
|
102
|
+
# Create a DataFrame for the image table.
|
|
103
|
+
imageTable = DataFrame('imageTable')
|
|
104
|
+
|
|
105
|
+
# Example 1: Convert the image to matrix with gray values.
|
|
106
|
+
image2matrix = Image2Matrix(data=imageTable.select(['id', 'image']),
|
|
107
|
+
output='gray')
|
|
108
|
+
|
|
109
|
+
# Print the result DataFrame.
|
|
110
|
+
print(image2matrix.result)
|
|
111
|
+
|
|
112
|
+
# Example 2: Convert the image to matrix with rgb values.
|
|
113
|
+
image2matrix2 = Image2Matrix(data=imageTable.select(['id', 'image']),
|
|
114
|
+
output='rgb')
|
|
115
|
+
|
|
116
|
+
# Print the result DataFrame.
|
|
117
|
+
print(image2matrix2.result)
|
|
118
|
+
"""
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
def ACF(data=None, data_filter_expr=None, max_lags=None,
|
|
2
2
|
func_type=False, unbiased=False, demean=True,
|
|
3
|
-
qstat=False, alpha=None,
|
|
3
|
+
qstat=False, alpha=None,
|
|
4
4
|
**generic_arguments):
|
|
5
5
|
"""
|
|
6
6
|
DESCRIPTION:
|
|
@@ -96,15 +96,6 @@ def ACF(data=None, data_filter_expr=None, max_lags=None,
|
|
|
96
96
|
* The function does not return confidence intervals.
|
|
97
97
|
Types: float
|
|
98
98
|
|
|
99
|
-
round_results:
|
|
100
|
-
Optional Argument.
|
|
101
|
-
Specifies whether rounding should be done or not.
|
|
102
|
-
When set to True, results in the output row are
|
|
103
|
-
rounded before inserting the rows into dataframe,
|
|
104
|
-
otherwise not.
|
|
105
|
-
Default Value: False
|
|
106
|
-
Types: bool
|
|
107
|
-
|
|
108
99
|
**generic_arguments:
|
|
109
100
|
Specifies the generic keyword arguments of UAF functions.
|
|
110
101
|
Below are the generic keyword arguments:
|
|
@@ -312,7 +312,7 @@ def ArimaEstimate(data1=None, data1_filter_expr=None, data2=None,
|
|
|
312
312
|
references, such as ArimaEstimate_obj.<attribute_name>.
|
|
313
313
|
Output teradataml DataFrame attribute names are:
|
|
314
314
|
1. result
|
|
315
|
-
2. fitmetadata - Available when "
|
|
315
|
+
2. fitmetadata - Available when "fit_metrics" is set to True, otherwise not.
|
|
316
316
|
3. fitresiduals - Available when "residuals" is set to True, otherwise not.
|
|
317
317
|
4. model
|
|
318
318
|
5. valdata
|
|
@@ -118,6 +118,9 @@ def ArimaForecast(data=None, data_filter_expr=None, forecast_periods=None,
|
|
|
118
118
|
payload_field="magnitude",
|
|
119
119
|
payload_content="REAL")
|
|
120
120
|
|
|
121
|
+
# Example 1: Forecast 2 periods based on the model fitted by ArimaEstimate.
|
|
122
|
+
# As the fit_percentage is greater than or equal to 100,
|
|
123
|
+
# output of ArimaEstimate is used for ArimaForecast.
|
|
121
124
|
# Execute ArimaEstimate function.
|
|
122
125
|
arima_estimate_op = ArimaEstimate(data1=data_series_df,
|
|
123
126
|
nonseasonal_model_order=[2,0,0],
|
|
@@ -128,15 +131,42 @@ def ArimaForecast(data=None, data_filter_expr=None, forecast_periods=None,
|
|
|
128
131
|
residuals=True,
|
|
129
132
|
fit_percentage=100)
|
|
130
133
|
|
|
131
|
-
# Example 1: Forecast 2 periods based on the model fitted by ArimaEstimate.
|
|
132
|
-
# As the fit_percentage is greater than or equal to 100,
|
|
133
|
-
# output of ArimaEstimate is used for ArimaForecast.
|
|
134
|
-
|
|
135
134
|
# Create teradataml TDAnalyticResult object over the result attribute of 'arima_estimate_op'
|
|
136
135
|
data_art_df = TDAnalyticResult(data=arima_estimate_op.result)
|
|
137
136
|
|
|
138
|
-
uaf_out = ArimaForecast(data=data_art_df,
|
|
137
|
+
uaf_out = ArimaForecast(data=data_art_df,
|
|
138
|
+
forecast_periods=2)
|
|
139
139
|
|
|
140
140
|
# Print the result DataFrame.
|
|
141
141
|
print(uaf_out.result)
|
|
142
|
+
|
|
143
|
+
# Example 2: Forecast 2 periods based on the model fitted by ArimaValidate.
|
|
144
|
+
# As the fit_percentage is less than 100,
|
|
145
|
+
# output of ArimaEstimate is used for ArimaValidate and
|
|
146
|
+
# output of ArimaValidate is used for ArimaForecast.
|
|
147
|
+
# Execute ArimaEstimate function.
|
|
148
|
+
arima_estimate_op = ArimaEstimate(data1=data_series_df,
|
|
149
|
+
nonseasonal_model_order=[2,0,0],
|
|
150
|
+
constant=False,
|
|
151
|
+
algorithm="MLE",
|
|
152
|
+
coeff_stats=True,
|
|
153
|
+
fit_metrics=True,
|
|
154
|
+
residuals=True,
|
|
155
|
+
fit_percentage=80)
|
|
156
|
+
|
|
157
|
+
# Create TDAnalyticResult object over the result attribute of 'arima_estimate_op'.
|
|
158
|
+
data_art_df = TDAnalyticResult(data=arima_estimate_op.result)
|
|
159
|
+
|
|
160
|
+
# Execute ArimaValidate function.
|
|
161
|
+
arima_validate_op = ArimaValidate(data=data_art_df,
|
|
162
|
+
fit_metrics=TRUE,
|
|
163
|
+
residuals=TRUE)
|
|
164
|
+
|
|
165
|
+
data_art_df1 = TDAnalyticResult(data=arima_validate_op.result)
|
|
166
|
+
|
|
167
|
+
uaf_out = ArimaForecast(data=data_art_df1,
|
|
168
|
+
forecast_periods=2)
|
|
169
|
+
|
|
170
|
+
# Print the result DataFrames.
|
|
171
|
+
print(uaf_out.result)
|
|
142
172
|
"""
|
|
@@ -149,7 +149,9 @@ def ArimaValidate(data=None, data_filter_expr=None, fit_metrics=False,
|
|
|
149
149
|
# Create teradataml TDAnalyticResult object over the result attribute of 'arima_estimate_op'.
|
|
150
150
|
data_art_df = TDAnalyticResult(data=arima_estimate_op.result)
|
|
151
151
|
|
|
152
|
-
uaf_out = ArimaValidate(data=data_art_df,
|
|
152
|
+
uaf_out = ArimaValidate(data=data_art_df,
|
|
153
|
+
fit_metrics=True,
|
|
154
|
+
residuals=True)
|
|
153
155
|
|
|
154
156
|
# Print the result DataFrames.
|
|
155
157
|
print(uaf_out.result)
|