teradataml 20.0.0.1__py3-none-any.whl → 20.0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (200) hide show
  1. teradataml/LICENSE.pdf +0 -0
  2. teradataml/README.md +112 -0
  3. teradataml/__init__.py +6 -3
  4. teradataml/_version.py +1 -1
  5. teradataml/analytics/__init__.py +3 -2
  6. teradataml/analytics/analytic_function_executor.py +224 -16
  7. teradataml/analytics/analytic_query_generator.py +92 -0
  8. teradataml/analytics/byom/__init__.py +3 -2
  9. teradataml/analytics/json_parser/metadata.py +1 -0
  10. teradataml/analytics/json_parser/utils.py +6 -4
  11. teradataml/analytics/meta_class.py +40 -1
  12. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  13. teradataml/analytics/sqle/__init__.py +10 -2
  14. teradataml/analytics/table_operator/__init__.py +3 -2
  15. teradataml/analytics/uaf/__init__.py +21 -2
  16. teradataml/analytics/utils.py +62 -1
  17. teradataml/analytics/valib.py +1 -1
  18. teradataml/automl/__init__.py +1502 -323
  19. teradataml/automl/custom_json_utils.py +139 -61
  20. teradataml/automl/data_preparation.py +245 -306
  21. teradataml/automl/data_transformation.py +32 -12
  22. teradataml/automl/feature_engineering.py +313 -82
  23. teradataml/automl/model_evaluation.py +44 -35
  24. teradataml/automl/model_training.py +109 -146
  25. teradataml/catalog/byom.py +8 -8
  26. teradataml/clients/pkce_client.py +1 -1
  27. teradataml/common/constants.py +37 -0
  28. teradataml/common/deprecations.py +13 -7
  29. teradataml/common/garbagecollector.py +151 -120
  30. teradataml/common/messagecodes.py +4 -1
  31. teradataml/common/messages.py +2 -1
  32. teradataml/common/sqlbundle.py +1 -1
  33. teradataml/common/utils.py +97 -11
  34. teradataml/common/wrapper_utils.py +1 -1
  35. teradataml/context/context.py +72 -2
  36. teradataml/data/complaints_test_tokenized.csv +353 -0
  37. teradataml/data/complaints_tokens_model.csv +348 -0
  38. teradataml/data/covid_confirm_sd.csv +83 -0
  39. teradataml/data/dataframe_example.json +10 -0
  40. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  41. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  42. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  43. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  44. teradataml/data/docs/sqle/docs_17_20/Shap.py +197 -0
  45. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  46. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  47. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  48. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  49. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  50. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  51. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  52. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  53. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  54. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  55. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  56. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  57. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  58. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  59. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  60. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  61. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  62. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  63. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  64. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  65. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  66. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  67. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  68. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  69. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  70. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  71. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  72. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  73. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  74. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  75. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  76. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  77. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  78. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  79. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  80. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  81. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  82. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  83. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  84. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  85. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  86. teradataml/data/dwt2d_dataTable.csv +65 -0
  87. teradataml/data/dwt_dataTable.csv +8 -0
  88. teradataml/data/dwt_filterTable.csv +3 -0
  89. teradataml/data/finance_data4.csv +13 -0
  90. teradataml/data/grocery_transaction.csv +19 -0
  91. teradataml/data/idwt2d_dataTable.csv +5 -0
  92. teradataml/data/idwt_dataTable.csv +8 -0
  93. teradataml/data/idwt_filterTable.csv +3 -0
  94. teradataml/data/interval_data.csv +5 -0
  95. teradataml/data/jsons/paired_functions.json +14 -0
  96. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  97. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  98. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  99. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  100. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  101. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  102. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  103. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  104. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  105. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  106. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  107. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  108. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  109. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  110. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  111. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  112. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  113. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  114. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  115. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  116. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  117. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  118. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  119. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  120. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  121. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  122. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  123. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  124. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  125. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  126. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  127. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  128. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  129. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  130. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  131. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  132. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  133. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  134. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  135. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  136. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  137. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  138. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  139. teradataml/data/jsons/uaf/17.20/TD_SAX.json +208 -0
  140. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  141. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  142. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  143. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  144. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  145. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +400 -0
  146. teradataml/data/load_example_data.py +8 -2
  147. teradataml/data/naivebayestextclassifier_example.json +1 -1
  148. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  149. teradataml/data/peppers.png +0 -0
  150. teradataml/data/real_values.csv +14 -0
  151. teradataml/data/sax_example.json +8 -0
  152. teradataml/data/scripts/deploy_script.py +1 -1
  153. teradataml/data/scripts/sklearn/sklearn_fit.py +17 -10
  154. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +2 -2
  155. teradataml/data/scripts/sklearn/sklearn_function.template +30 -7
  156. teradataml/data/scripts/sklearn/sklearn_neighbors.py +1 -1
  157. teradataml/data/scripts/sklearn/sklearn_score.py +12 -3
  158. teradataml/data/scripts/sklearn/sklearn_transform.py +55 -4
  159. teradataml/data/star_pivot.csv +8 -0
  160. teradataml/data/templates/open_source_ml.json +2 -1
  161. teradataml/data/teradataml_example.json +20 -1
  162. teradataml/data/timestamp_data.csv +4 -0
  163. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  164. teradataml/data/uaf_example.json +55 -1
  165. teradataml/data/unpivot_example.json +15 -0
  166. teradataml/data/url_data.csv +9 -0
  167. teradataml/data/windowdfft.csv +16 -0
  168. teradataml/dataframe/copy_to.py +1 -1
  169. teradataml/dataframe/data_transfer.py +5 -3
  170. teradataml/dataframe/dataframe.py +474 -41
  171. teradataml/dataframe/fastload.py +3 -3
  172. teradataml/dataframe/functions.py +339 -0
  173. teradataml/dataframe/row.py +160 -0
  174. teradataml/dataframe/setop.py +2 -2
  175. teradataml/dataframe/sql.py +658 -20
  176. teradataml/dataframe/window.py +1 -1
  177. teradataml/dbutils/dbutils.py +322 -16
  178. teradataml/geospatial/geodataframe.py +1 -1
  179. teradataml/geospatial/geodataframecolumn.py +1 -1
  180. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  181. teradataml/lib/aed_0_1.dll +0 -0
  182. teradataml/opensource/sklearn/_sklearn_wrapper.py +154 -69
  183. teradataml/options/__init__.py +3 -1
  184. teradataml/options/configure.py +14 -2
  185. teradataml/options/display.py +2 -2
  186. teradataml/plot/axis.py +4 -4
  187. teradataml/scriptmgmt/UserEnv.py +10 -6
  188. teradataml/scriptmgmt/lls_utils.py +3 -2
  189. teradataml/table_operators/Script.py +2 -2
  190. teradataml/table_operators/TableOperator.py +106 -20
  191. teradataml/table_operators/table_operator_util.py +88 -41
  192. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  193. teradataml/telemetry_utils/__init__.py +0 -0
  194. teradataml/telemetry_utils/queryband.py +52 -0
  195. teradataml/utils/validators.py +1 -1
  196. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.2.dist-info}/METADATA +115 -2
  197. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.2.dist-info}/RECORD +200 -140
  198. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.2.dist-info}/WHEEL +0 -0
  199. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.2.dist-info}/top_level.txt +0 -0
  200. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.2.dist-info}/zip-safe +0 -0
@@ -41,7 +41,7 @@ def Convolve(data1=None, data1_filter_expr=None, data2=None,
41
41
  data2:
42
42
  Required Argument.
43
43
  Specifies the actual filter kernel.
44
- Two time series have the following TDSeries characteristics.
44
+ The time series have the following TDSeries characteristics.
45
45
  1. "payload_content" must have one of these values:
46
46
  * REAL
47
47
  * COMPLEX
@@ -64,18 +64,21 @@ def Convolve(data1=None, data1_filter_expr=None, data2=None,
64
64
 
65
65
  algorithm:
66
66
  Optional Argument.
67
- Specifies the options to use for convolving. Options
68
- are 'CONV_SUMMATION' and 'CONV_DFFT'. If the
69
- 'CONV_SUMMATION' approach is used when one of the two
70
- series has greater than 64 entries, then an error
71
- is returned. When this parameter is not present,
72
- the function selects the option based on the number
73
- of entries in the source input series.
67
+ Specifies the options to use for convolving.
68
+ By default, the function selects the best option based
69
+ on the number of entries present in the two inputs,
70
+ and their types ( REAL, COMPLEX, and so on.)
71
+ CONV_SUMMATION only supports:
72
+ * REAL, REAL
73
+ * REAL, MULTIVAR_REAL
74
+ * MULTIVAR_REAL, REAL
75
+ * MULTIVAR_REAL, MULTIVAR_REAL
74
76
  Note:
75
77
  * This parameter is usually used for testing.
76
78
  If this parameter is not included, the internal
77
- planning logic selects the best option based
78
- on the number of entries in the source input series.
79
+ planning logic selects the best option based on
80
+ the number of entries present in the two inputs,
81
+ and their types ( REAL, COMPLEX, and so on.)
79
82
  Permitted Values: CONV_SUMMATION, CONV_DFFT
80
83
  Types: str
81
84
 
@@ -154,7 +154,6 @@ def Convolve2(data1=None, data1_filter_expr=None, data2=None,
154
154
  data3 = DataFrame.from_table("Convolve2RealsLeft")
155
155
  data4 = DataFrame.from_table("Convolve2RealsLeft")
156
156
 
157
-
158
157
  # Example 1: Apply the Convolve2() function when payload fields of two matrices
159
158
  # are the different to convolve two matrices into a new source
160
159
  # image matrix.
@@ -168,6 +167,7 @@ def Convolve2(data1=None, data1_filter_expr=None, data2=None,
168
167
  column_index='column_i',
169
168
  payload_field=["B"],
170
169
  payload_content="REAL")
170
+
171
171
  data2_matrix_df = TDMatrix(data=data2,
172
172
  id='id',
173
173
  row_index_style="sequence",
@@ -176,6 +176,7 @@ def Convolve2(data1=None, data1_filter_expr=None, data2=None,
176
176
  column_index='column_i',
177
177
  payload_field=["A"],
178
178
  payload_content="REAL")
179
+
179
180
  # Convolve the "data1_matrix_df" and "data2_matrix_df" matrices using the Convolve2() function.
180
181
  uaf_out1 = Convolve2(data1=data1_matrix_df,
181
182
  data2=data2_matrix_df,
@@ -196,6 +197,7 @@ def Convolve2(data1=None, data1_filter_expr=None, data2=None,
196
197
  column_index='col_seq',
197
198
  payload_field=["A"],
198
199
  payload_content="REAL")
200
+
199
201
  data4_matrix_df = TDMatrix(data=data4,
200
202
  id='id',
201
203
  row_index_style="sequence",
@@ -204,6 +206,7 @@ def Convolve2(data1=None, data1_filter_expr=None, data2=None,
204
206
  column_index='col_seq',
205
207
  payload_field=["A"],
206
208
  payload_content="REAL")
209
+
207
210
  # Convolve the "data3_matrix_df" and "data4_matrix_df" matrices using the Convolve2() function.
208
211
  uaf_out2 = Convolve2(data1=data3_matrix_df,
209
212
  data2=data4_matrix_df,
@@ -21,7 +21,7 @@ def CumulPeriodogram(data=None, data_filter_expr=None,
21
21
  2. Use ArimaValidate() to validate spectral candidates.
22
22
  4. Execute CumulPeriodogram() using the residuals.
23
23
  5. See the null hypothesis result from CumulPeriodogram().
24
- 6. Use Plot() to plot the results.
24
+ 6. Use DataFrame.plot() to plot the results.
25
25
 
26
26
  PARAMETERS:
27
27
  data:
@@ -143,7 +143,6 @@ def CumulPeriodogram(data=None, data_filter_expr=None,
143
143
  fit_metrics=True,
144
144
  residuals=True)
145
145
 
146
-
147
146
  # Example 1: Perform statistical test using CumulPeriodogram()
148
147
  # with input as TDSeries object created over the 'fitresiduals'
149
148
  # attribute of arima_validate generated by running ArimaValidate() and
@@ -158,7 +157,8 @@ def CumulPeriodogram(data=None, data_filter_expr=None,
158
157
  payload_field="RESIDUAL",
159
158
  payload_content="REAL")
160
159
 
161
- uaf_out = CumulPeriodogram(data=data_series_df, significance_level=0.05)
160
+ uaf_out = CumulPeriodogram(data=data_series_df,
161
+ significance_level=0.05)
162
162
 
163
163
  # Print the result DataFrames.
164
164
  print(uaf_out.result)
@@ -174,7 +174,8 @@ def CumulPeriodogram(data=None, data_filter_expr=None,
174
174
  # generated by ArimaValidate() function with layer as 'ARTFITRESIDUALS'.
175
175
  art_df = TDAnalyticResult(data=arima_validate.result, layer="ARTFITRESIDUALS")
176
176
 
177
- uaf_out = CumulPeriodogram(data=art_df, significance_level=0.05)
177
+ uaf_out = CumulPeriodogram(data=art_df,
178
+ significance_level=0.05)
178
179
 
179
180
  # Print the result DataFrames.
180
181
  print(uaf_out.result)
@@ -174,10 +174,10 @@ def DFFT2Conv(data=None, data_filter_expr=None, conv=None,
174
174
  # input matrix with real numbers only for the matrix id 33.
175
175
  filter_expr = td_matrix.id==33
176
176
  dfft2_out = DFFT2(data=td_matrix,
177
- data_filter_expr=filter_expr,
178
- freq_style="K_INTEGRAL",
179
- human_readable=False,
180
- output_fmt_content="COMPLEX")
177
+ data_filter_expr=filter_expr,
178
+ freq_style="K_INTEGRAL",
179
+ human_readable=False,
180
+ output_fmt_content="COMPLEX")
181
181
 
182
182
  # Example 1: Convert the complex(REAL,IMAGINARY) output of DFFT2() to
183
183
  # polar(AMPLITUDE,PHASE) in RADIAN format using TDMatrix
@@ -0,0 +1,235 @@
1
+ def DWT(data1=None, data1_filter_expr=None, data2=None,
2
+ data2_filter_expr=None, wavelet=None, mode="symmetric",
3
+ level=1, part=None, input_fmt_input_mode=None,
4
+ output_fmt_index_style="NUMERICAL_SEQUENCE",
5
+ **generic_arguments):
6
+ """
7
+ DESCRIPTION:
8
+ DWT() is a function that performs discrete wavelet
9
+ transform (DWT).
10
+
11
+ PARAMETERS:
12
+ data1:
13
+ Required Argument.
14
+ Specifies the series to be used as an input.
15
+ Multiple payloads are supported, and each payload column is
16
+ transformed independently. Only REAL or MULTIVAR_REAL
17
+ payload content types are supported.
18
+ Types: TDSeries
19
+
20
+ data1_filter_expr:
21
+ Optional Argument.
22
+ Specifies the filter expression for "data1".
23
+ Types: ColumnExpression
24
+
25
+ data2:
26
+ Optional Argument.
27
+ Specifies the series to be used as an input. The
28
+ series specifies the filter. It should have two payload
29
+ columns corresponding to low and high pass
30
+ filters. Only MULTIVAR_REAL payload content type is
31
+ supported.
32
+ Types: TDSeries
33
+
34
+ data2_filter_expr:
35
+ Optional Argument.
36
+ Specifies the filter expression for "data2".
37
+ Types: ColumnExpression
38
+
39
+ wavelet:
40
+ Optional Argument.
41
+ Specifies the name of the wavelet.
42
+ Option families and names are:
43
+ * Daubechies: 'db1' or 'haar', 'db2', 'db3', .... ,'db38'
44
+ * Coiflets: 'coif1', 'coif2', ... , 'coif17'
45
+ * Symlets: 'sym2', 'sym3', ... ,' sym20'
46
+ * Discrete Meyer: 'dmey'
47
+ * Biorthogonal: 'bior1.1', 'bior1.3', 'bior1.5',
48
+ 'bior2.2', 'bior2.4', 'bior2.6',
49
+ 'bior2.8', 'bior3.1', 'bior3.3',
50
+ 'bior3.5', 'bior3.7', 'bior3.9',
51
+ 'bior4.4', 'bior5.5', 'bior6.8'
52
+ * Reverse Biorthogonal: 'rbio1.1', 'rbio1.3',
53
+ 'rbio1.5' 'rbio2.2',
54
+ 'rbio2.4', 'rbio2.6',
55
+ 'rbio2.8', 'rbio3.1',
56
+ 'rbio3.3', 'rbio3.5',
57
+ 'rbio3.7','rbio3.9',
58
+ 'rbio4.4', 'rbio5.5',
59
+ 'rbio6.8'
60
+ Note:
61
+ * If 'wavelet' is specified, do not include a second
62
+ input series for the function. Otherwise, include
63
+ a second input series to provide the filter.
64
+ * Data type is case-sensitive.
65
+ Types: str
66
+
67
+ mode:
68
+ Optional Argument.
69
+ Specifies the signal extension mode. Data type is
70
+ case-insensitive.
71
+ Permitted Values:
72
+ * symmetric, sym, symh
73
+ * reflect, symw
74
+ * smooth, spd, sp1
75
+ * constant, sp0
76
+ * zero, zpd
77
+ * periodic, ppd
78
+ * periodization, per
79
+ * antisymmetric, asym, asymh
80
+ * antireflect, asymw
81
+ Default Value: symmetric
82
+ Types: str
83
+
84
+ level:
85
+ Optional Argument.
86
+ Specifies the level of decomposition.
87
+ Valid values are [1,15].
88
+ Default Value: 1
89
+ Types: int
90
+
91
+ part:
92
+ Optional Argument.
93
+ Specifies the indicator that the input is partial decomposition
94
+ result.
95
+ Note:
96
+ Data type is case-insensitive.
97
+ Permitted Values:
98
+ * a - the approximation
99
+ * d - the detail of decomposition of result.
100
+ Types: str
101
+
102
+ input_fmt_input_mode:
103
+ Optional Argument.
104
+ Specifies the input mode supported by the function.
105
+ When there are two input series, then the input_fmt_input_mode
106
+ specification is mandatory.
107
+ Permitted Values:
108
+ The input_fmt_input_mode parameter has the following options:
109
+ * ONE2ONE: Both the primary and secondary series
110
+ specifications contain a series name which
111
+ identifies the two series in the function.
112
+ * MANY2ONE: The MANY specification is the primary series
113
+ declaration. The secondary series specification
114
+ contains a series name that identifies the single
115
+ secondary series.
116
+ * MATCH: Both series are defined by their respective series
117
+ specification instance name declarations.
118
+ Types: str
119
+
120
+ output_fmt_index_style:
121
+ Optional Argument.
122
+ Specifies the index style of the output format.
123
+ Permitted Values: NUMERICAL_SEQUENCE
124
+ Default Value: NUMERICAL_SEQUENCE
125
+ Types: str
126
+
127
+ **generic_arguments:
128
+ Specifies the generic keyword arguments of UAF functions.
129
+ Below are the generic keyword arguments:
130
+ persist:
131
+ Optional Argument.
132
+ Specifies whether to persist the results of the
133
+ function in a table or not. When set to True,
134
+ results are persisted in a table; otherwise,
135
+ results are garbage collected at the end of the
136
+ session.
137
+ Note that, when UAF function is executed, an
138
+ analytic result table (ART) is created.
139
+ Default Value: False
140
+ Types: bool
141
+
142
+ volatile:
143
+ Optional Argument.
144
+ Specifies whether to put the results of the
145
+ function in a volatile ART or not. When set to
146
+ True, results are stored in a volatile ART,
147
+ otherwise not.
148
+ Default Value: False
149
+ Types: bool
150
+
151
+ output_table_name:
152
+ Optional Argument.
153
+ Specifies the name of the table to store results.
154
+ If not specified, a unique table name is internally
155
+ generated.
156
+ Types: str
157
+
158
+ output_db_name:
159
+ Optional Argument.
160
+ Specifies the name of the database to create output
161
+ table into. If not specified, table is created into
162
+ database specified by the user at the time of context
163
+ creation or configuration parameter. Argument is ignored,
164
+ if "output_table_name" is not specified.
165
+ Types: str
166
+
167
+
168
+ RETURNS:
169
+ Instance of DWT.
170
+ Output teradataml DataFrames can be accessed using attribute
171
+ references, such as DWT_obj.<attribute_name>.
172
+ Output teradataml DataFrame attribute name is:
173
+ 1. result
174
+
175
+
176
+ RAISES:
177
+ TeradataMlException, TypeError, ValueError
178
+
179
+
180
+ EXAMPLES:
181
+ # Notes:
182
+ # 1. Get the connection to Vantage, before importing the
183
+ # function in user space.
184
+ # 2. User can import the function, if it is available on
185
+ # Vantage user is connected to.
186
+ # 3. To check the list of UAF analytic functions available
187
+ # on Vantage user connected to, use
188
+ # "display_analytic_functions()".
189
+
190
+ # Check the list of available UAF analytic functions.
191
+ display_analytic_functions(type="UAF")
192
+
193
+ # Import function DWT.
194
+ from teradataml import DWT
195
+
196
+ # Load the example data.
197
+ load_example_data("uaf", ["dwt_dataTable", "dwt_filterTable"])
198
+
199
+ # Create teradataml DataFrame objects.
200
+ data1 = DataFrame.from_table("dwt_dataTable")
201
+ data2 = DataFrame.from_table("dwt_filterTable")
202
+
203
+ # Create teradataml TDSeries objects.
204
+ data1_series_df = TDSeries(data=data1,
205
+ id="id",
206
+ row_index="rowi",
207
+ row_index_style="SEQUENCE",
208
+ payload_field="v",
209
+ payload_content="REAL")
210
+
211
+ data2_series_df = TDSeries(data=data2,
212
+ id="id",
213
+ row_index="seq",
214
+ row_index_style="SEQUENCE",
215
+ payload_field=["lo", "hi"],
216
+ payload_content="MULTIVAR_REAL")
217
+
218
+ # Example 1: Perform discrete wavelet transform using two series as input.
219
+ uaf_out = DWT(data1=data1_series_df,
220
+ data2=data2_series_df,
221
+ data2_filter_expr=data2_series_df.id==1,
222
+ input_fmt_input_mode='MANY2ONE')
223
+
224
+ # Print the result DataFrame.
225
+ print(uaf_out.result)
226
+
227
+ # Example 2: Perform discrete wavelet transform using single series as input and the wavelet parameter.
228
+ uaf_out = DWT(data1=data1_series_df,
229
+ wavelet='haar')
230
+
231
+ # Print the result DataFrame.
232
+ print(uaf_out.result)
233
+
234
+ """
235
+
@@ -0,0 +1,214 @@
1
+ def DWT2D(data1=None, data1_filter_expr=None, data2=None,
2
+ data2_filter_expr=None, wavelet=None, mode="symmetric",
3
+ level=1, input_fmt_input_mode=None,
4
+ output_fmt_index_style="NUMERICAL_SEQUENCE",
5
+ **generic_arguments):
6
+ """
7
+ DESCRIPTION:
8
+ DWT2D() function performs discrete wavelet transform (DWT) for
9
+ two-dimensional data. The algorithm is applied first
10
+ vertically by column axis, then horizontally by row axis.
11
+
12
+
13
+ PARAMETERS:
14
+ data1:
15
+ Required Argument.
16
+ Specifies the input matrix. Multiple payloads are supported,
17
+ and each payload column is transformed independently.
18
+ Only REAL or MULTIVAR_REAL payload content types are supported.
19
+ Types: TDMatrix
20
+
21
+ data1_filter_expr:
22
+ Optional Argument.
23
+ Specifies the filter expression for "data1".
24
+ Types: ColumnExpression
25
+
26
+ data2:
27
+ Optional Argument.
28
+ Specifies the input series. The series specifies the filter.
29
+ It should have two payload columns corresponding to low and high
30
+ pass filters. Only MULTIVAR_REAL payload content type is supported.
31
+ Types: TDSeries
32
+
33
+ data2_filter_expr:
34
+ Optional Argument.
35
+ Specifies the filter expression for "data2".
36
+ Types: ColumnExpression
37
+
38
+ wavelet:
39
+ Optional Argument.
40
+ Specifies the name of the wavelet.
41
+ Permitted families and names are:
42
+ * Daubechies: 'db1' or 'haar', 'db2', 'db3', .... ,'db38'
43
+ * Coiflets: 'coif1', 'coif2', ... , 'coif17'
44
+ * Symlets: 'sym2', 'sym3', ... ,' sym20'
45
+ * Discrete Meyer: 'dmey'
46
+ * Biorthogonal: 'bior1.1', 'bior1.3', 'bior1.5', 'bior2.2',
47
+ 'bior2.4', 'bior2.6', 'bior2.8', 'bior3.1',
48
+ 'bior3.3', 'bior3.5', 'bior3.7', 'bior3.9',
49
+ 'bior4.4', 'bior5.5', 'bior6.8'
50
+ * Reverse Biorthogonal: 'rbio1.1', 'rbio1.3', 'rbio1.5'
51
+ 'rbio2.2', 'rbio2.4', 'rbio2.6',
52
+ 'rbio2.8', 'rbio3.1', 'rbio3.3',
53
+ 'rbio3.5', 'rbio3.7','rbio3.9',
54
+ 'rbio4.4', 'rbio5.5', 'rbio6.8'
55
+ Note:
56
+ * If 'wavelet' is specified, do not include a second
57
+ input series for the function. Otherwise, include
58
+ a second input series to provide the filter.
59
+ * Data type is case-sensitive.
60
+ Types: str
61
+
62
+ mode:
63
+ Optional Argument.
64
+ Specifies the signal extension mode. Data type is case-insensitive.
65
+ Permitted Values:
66
+ * symmetric, sym, symh
67
+ * reflect, symw
68
+ * smooth, spd, sp1
69
+ * constant, sp0
70
+ * zero, zpd
71
+ * periodic, ppd
72
+ * periodization, per
73
+ * antisymmetric, asym, asymh
74
+ * antireflect, asymw
75
+ Default Value: symmetric
76
+ Types: str
77
+
78
+ level:
79
+ Optional Argument.
80
+ Specifies the level of decomposition. Valid values are [1,15].
81
+ Default Value: 1
82
+ Types: int
83
+
84
+ input_fmt_input_mode:
85
+ Optional Argument.
86
+ Specifies the input mode supported by the function.
87
+ When there are two input series, then the "input_fmt_input_mode"
88
+ specification is mandatory.
89
+ Permitted Values:
90
+ * ONE2ONE: Both the primary and secondary series specifications
91
+ contain a series name which identifies the two series
92
+ in the function.
93
+ * MANY2ONE: The MANY specification is the primary series
94
+ declaration. The secondary series specification
95
+ contains a series name that identifies the single
96
+ secondary series.
97
+ * MATCH: Both series are defined by their respective series
98
+ specification instance name declarations.
99
+ Types: str
100
+
101
+ output_fmt_index_style:
102
+ Optional Argument.
103
+ Specifies the index style of the output format.
104
+ Permitted Values: NUMERICAL_SEQUENCE
105
+ Default Value: NUMERICAL_SEQUENCE
106
+ Types: str
107
+
108
+ **generic_arguments:
109
+ Specifies the generic keyword arguments of UAF functions.
110
+ Below are the generic keyword arguments:
111
+ persist:
112
+ Optional Argument.
113
+ Specifies whether to persist the results of the
114
+ function in a table or not. When set to True,
115
+ results are persisted in a table; otherwise,
116
+ results are garbage collected at the end of the
117
+ session.
118
+ Note that, when UAF function is executed, an
119
+ analytic result table (ART) is created.
120
+ Default Value: False
121
+ Types: bool
122
+
123
+ volatile:
124
+ Optional Argument.
125
+ Specifies whether to put the results of the
126
+ function in a volatile ART or not. When set to
127
+ True, results are stored in a volatile ART,
128
+ otherwise not.
129
+ Default Value: False
130
+ Types: bool
131
+
132
+ output_table_name:
133
+ Optional Argument.
134
+ Specifies the name of the table to store results.
135
+ If not specified, a unique table name is internally
136
+ generated.
137
+ Types: str
138
+
139
+ output_db_name:
140
+ Optional Argument.
141
+ Specifies the name of the database to create output
142
+ table into. If not specified, table is created into
143
+ database specified by the user at the time of context
144
+ creation or configuration parameter. Argument is ignored,
145
+ if "output_table_name" is not specified.
146
+ Types: str
147
+
148
+
149
+ RETURNS:
150
+ Instance of DWT2D.
151
+ Output teradataml DataFrames can be accessed using attribute
152
+ references, such as DWT2D_obj.<attribute_name>.
153
+ Output teradataml DataFrame attribute name is:
154
+ 1. result
155
+
156
+
157
+ RAISES:
158
+ TeradataMlException, TypeError, ValueError
159
+
160
+
161
+ EXAMPLES:
162
+ # Notes:
163
+ # 1. Get the connection to Vantage, before importing the
164
+ # function in user space.
165
+ # 2. User can import the function, if it is available on
166
+ # Vantage user is connected to.
167
+ # 3. To check the list of UAF analytic functions available
168
+ # on Vantage user connected to, use
169
+ # "display_analytic_functions()".
170
+
171
+ # Check the list of available UAF analytic functions.
172
+ display_analytic_functions(type="UAF")
173
+
174
+ # Load the example data.
175
+ load_example_data("uaf", ["dwt2d_dataTable", "dwt_filterTable"])
176
+
177
+ # Create teradataml DataFrame objects.
178
+ data1 = DataFrame.from_table("dwt2d_dataTable")
179
+ data2 = DataFrame.from_table("dwt_filterTable")
180
+
181
+ # Create teradataml TDSeries object.
182
+ data2_series_df = TDSeries(data=data2,
183
+ id="id",
184
+ row_index="seq",
185
+ row_index_style="SEQUENCE",
186
+ payload_field=["lo", "hi"],
187
+ payload_content="MULTIVAR_REAL")
188
+
189
+ # Create teradataml TDMatrix object.
190
+ data1_matrix_df = TDMatrix(data=data1,
191
+ id="id",
192
+ row_index="y",
193
+ row_index_style="SEQUENCE",
194
+ column_index="x",
195
+ column_index_style="SEQUENCE",
196
+ payload_field="v",
197
+ payload_content="REAL")
198
+
199
+ # Example 1: Perform discrete wavelet transform (DWT) for two-dimensional data using both inputs.
200
+ uaf_out = DWT2D(data1=data1_matrix_df,
201
+ data2=data2_series_df,
202
+ data2_filter_expr=data2.id==1,
203
+ input_fmt_input_mode="MANY2ONE")
204
+
205
+ # Example 1: Perform discrete wavelet transform (DWT) for two-dimensional data
206
+ # using only one matrix as input and wavelet as 'haar'.
207
+ uaf_out = DWT2D(data1=data1_matrix_df,
208
+ wavelet='haar')
209
+
210
+ # Print the result DataFrame.
211
+ print(uaf_out.result)
212
+
213
+ """
214
+
@@ -28,7 +28,7 @@ def DurbinWatson(data=None, data_filter_expr=None, explanatory_count=None,
28
28
  explanatory_count:
29
29
  Required Argument.
30
30
  Specifies the number of explanatory variables in the original regression.
31
- The number of explanatory variables along with the "include_contant"
31
+ The number of explanatory variables along with the "include_constant"
32
32
  information is needed to perform the lookup in the Durbin-Watson data.
33
33
  Types: int
34
34
 
@@ -10,7 +10,7 @@ def ExtractResults(data=None, data_filter_expr=None, **generic_arguments):
10
10
 
11
11
  The functions that have multiple layers are shown in the table.
12
12
  Layers of each function can be extracted from the function output,
13
- i.e. "result" attribute, using the layer name specified below:
13
+ i.e., "result" attribute, using the layer name specified below:
14
14
 
15
15
  ------------------------------------------------------------------
16
16
  | Function | Layers |