teradataml 20.0.0.0__py3-none-any.whl → 20.0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (263) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +183 -0
  4. teradataml/__init__.py +6 -3
  5. teradataml/_version.py +2 -2
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +275 -40
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +1 -0
  11. teradataml/analytics/json_parser/utils.py +17 -21
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +10 -2
  15. teradataml/analytics/table_operator/__init__.py +3 -2
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +62 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1553 -319
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +276 -319
  22. teradataml/automl/data_transformation.py +163 -81
  23. teradataml/automl/feature_engineering.py +402 -239
  24. teradataml/automl/feature_exploration.py +9 -2
  25. teradataml/automl/model_evaluation.py +48 -51
  26. teradataml/automl/model_training.py +291 -189
  27. teradataml/catalog/byom.py +8 -8
  28. teradataml/catalog/model_cataloging_utils.py +1 -1
  29. teradataml/clients/auth_client.py +133 -0
  30. teradataml/clients/pkce_client.py +1 -1
  31. teradataml/common/aed_utils.py +3 -2
  32. teradataml/common/constants.py +48 -6
  33. teradataml/common/deprecations.py +13 -7
  34. teradataml/common/garbagecollector.py +156 -120
  35. teradataml/common/messagecodes.py +6 -1
  36. teradataml/common/messages.py +3 -1
  37. teradataml/common/sqlbundle.py +1 -1
  38. teradataml/common/utils.py +103 -11
  39. teradataml/common/wrapper_utils.py +1 -1
  40. teradataml/context/context.py +121 -31
  41. teradataml/data/advertising.csv +201 -0
  42. teradataml/data/bank_marketing.csv +11163 -0
  43. teradataml/data/bike_sharing.csv +732 -0
  44. teradataml/data/boston2cols.csv +721 -0
  45. teradataml/data/breast_cancer.csv +570 -0
  46. teradataml/data/complaints_test_tokenized.csv +353 -0
  47. teradataml/data/complaints_tokens_model.csv +348 -0
  48. teradataml/data/covid_confirm_sd.csv +83 -0
  49. teradataml/data/customer_segmentation_test.csv +2628 -0
  50. teradataml/data/customer_segmentation_train.csv +8069 -0
  51. teradataml/data/dataframe_example.json +10 -0
  52. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +3 -1
  53. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +6 -0
  54. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +5 -1
  55. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +61 -1
  56. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  57. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +2 -0
  58. teradataml/data/docs/sqle/docs_17_20/FTest.py +105 -26
  59. teradataml/data/docs/sqle/docs_17_20/GLM.py +162 -1
  60. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +5 -3
  61. teradataml/data/docs/sqle/docs_17_20/KMeans.py +48 -1
  62. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  63. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +3 -2
  64. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +5 -0
  65. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +6 -0
  66. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  67. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  68. teradataml/data/docs/sqle/docs_17_20/ROC.py +3 -2
  69. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +13 -2
  70. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +119 -1
  71. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +93 -1
  72. teradataml/data/docs/sqle/docs_17_20/Shap.py +197 -0
  73. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +163 -1
  74. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  75. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  76. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  77. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +12 -4
  78. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +7 -1
  79. teradataml/data/docs/sqle/docs_17_20/ZTest.py +72 -7
  80. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  81. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  82. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  83. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  84. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  85. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  86. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  87. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  88. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  89. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  90. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  91. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  92. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  93. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  94. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  95. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  96. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  97. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  98. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  99. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  100. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  101. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  102. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  103. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  104. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  105. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  106. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  107. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  108. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  109. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  110. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  111. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  112. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  113. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  114. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  115. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  116. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  117. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  118. teradataml/data/dwt2d_dataTable.csv +65 -0
  119. teradataml/data/dwt_dataTable.csv +8 -0
  120. teradataml/data/dwt_filterTable.csv +3 -0
  121. teradataml/data/finance_data4.csv +13 -0
  122. teradataml/data/glm_example.json +28 -1
  123. teradataml/data/grocery_transaction.csv +19 -0
  124. teradataml/data/housing_train_segment.csv +201 -0
  125. teradataml/data/idwt2d_dataTable.csv +5 -0
  126. teradataml/data/idwt_dataTable.csv +8 -0
  127. teradataml/data/idwt_filterTable.csv +3 -0
  128. teradataml/data/insect2Cols.csv +61 -0
  129. teradataml/data/interval_data.csv +5 -0
  130. teradataml/data/jsons/paired_functions.json +14 -0
  131. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +99 -27
  132. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  133. teradataml/data/jsons/sqle/17.20/TD_FTest.json +166 -83
  134. teradataml/data/jsons/sqle/17.20/TD_GLM.json +90 -14
  135. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +48 -5
  136. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +5 -3
  137. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +31 -11
  138. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  139. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  140. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +3 -2
  141. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  142. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  143. teradataml/data/jsons/sqle/17.20/TD_ROC.json +2 -1
  144. teradataml/data/jsons/sqle/17.20/TD_SVM.json +16 -16
  145. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +19 -1
  146. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +168 -15
  147. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +50 -1
  148. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  149. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  150. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  151. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +25 -7
  152. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +17 -4
  153. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +157 -80
  154. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  155. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  156. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  157. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  158. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  159. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  160. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  161. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  162. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  163. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  164. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  165. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  166. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  167. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  168. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  169. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  170. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  171. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  172. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  173. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  174. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  175. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  176. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  177. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  178. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  179. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  180. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  181. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  182. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  183. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  184. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  185. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  186. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  187. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  188. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  189. teradataml/data/jsons/uaf/17.20/TD_SAX.json +208 -0
  190. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  191. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  192. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  193. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  194. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  195. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +400 -0
  196. teradataml/data/kmeans_example.json +5 -0
  197. teradataml/data/kmeans_table.csv +10 -0
  198. teradataml/data/load_example_data.py +8 -2
  199. teradataml/data/naivebayestextclassifier_example.json +1 -1
  200. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  201. teradataml/data/onehot_encoder_train.csv +4 -0
  202. teradataml/data/openml_example.json +29 -0
  203. teradataml/data/peppers.png +0 -0
  204. teradataml/data/real_values.csv +14 -0
  205. teradataml/data/sax_example.json +8 -0
  206. teradataml/data/scale_attributes.csv +3 -0
  207. teradataml/data/scale_example.json +52 -1
  208. teradataml/data/scale_input_part_sparse.csv +31 -0
  209. teradataml/data/scale_input_partitioned.csv +16 -0
  210. teradataml/data/scale_input_sparse.csv +11 -0
  211. teradataml/data/scale_parameters.csv +3 -0
  212. teradataml/data/scripts/deploy_script.py +21 -2
  213. teradataml/data/scripts/sklearn/sklearn_fit.py +40 -37
  214. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +22 -30
  215. teradataml/data/scripts/sklearn/sklearn_function.template +42 -24
  216. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +23 -33
  217. teradataml/data/scripts/sklearn/sklearn_neighbors.py +19 -28
  218. teradataml/data/scripts/sklearn/sklearn_score.py +32 -32
  219. teradataml/data/scripts/sklearn/sklearn_transform.py +85 -42
  220. teradataml/data/star_pivot.csv +8 -0
  221. teradataml/data/templates/open_source_ml.json +2 -1
  222. teradataml/data/teradataml_example.json +97 -1
  223. teradataml/data/timestamp_data.csv +4 -0
  224. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  225. teradataml/data/uaf_example.json +55 -1
  226. teradataml/data/unpivot_example.json +15 -0
  227. teradataml/data/url_data.csv +9 -0
  228. teradataml/data/windowdfft.csv +16 -0
  229. teradataml/data/ztest_example.json +16 -0
  230. teradataml/dataframe/copy_to.py +9 -4
  231. teradataml/dataframe/data_transfer.py +125 -64
  232. teradataml/dataframe/dataframe.py +575 -57
  233. teradataml/dataframe/dataframe_utils.py +47 -9
  234. teradataml/dataframe/fastload.py +273 -90
  235. teradataml/dataframe/functions.py +339 -0
  236. teradataml/dataframe/row.py +160 -0
  237. teradataml/dataframe/setop.py +2 -2
  238. teradataml/dataframe/sql.py +740 -18
  239. teradataml/dataframe/window.py +1 -1
  240. teradataml/dbutils/dbutils.py +324 -18
  241. teradataml/geospatial/geodataframe.py +1 -1
  242. teradataml/geospatial/geodataframecolumn.py +1 -1
  243. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  244. teradataml/lib/aed_0_1.dll +0 -0
  245. teradataml/opensource/sklearn/_sklearn_wrapper.py +254 -122
  246. teradataml/options/__init__.py +16 -5
  247. teradataml/options/configure.py +39 -6
  248. teradataml/options/display.py +2 -2
  249. teradataml/plot/axis.py +4 -4
  250. teradataml/scriptmgmt/UserEnv.py +26 -19
  251. teradataml/scriptmgmt/lls_utils.py +120 -16
  252. teradataml/table_operators/Script.py +4 -5
  253. teradataml/table_operators/TableOperator.py +160 -26
  254. teradataml/table_operators/table_operator_util.py +88 -41
  255. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  256. teradataml/telemetry_utils/__init__.py +0 -0
  257. teradataml/telemetry_utils/queryband.py +52 -0
  258. teradataml/utils/validators.py +41 -3
  259. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/METADATA +191 -6
  260. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/RECORD +263 -185
  261. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/WHEEL +0 -0
  262. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/top_level.txt +0 -0
  263. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/zip-safe +0 -0
@@ -0,0 +1,209 @@
1
+ {
2
+ "FuncName": "TD_MATRIX2IMAGE",
3
+ "FuncDescriptionShort": "TD_MATRIX2IMAGE converts a matrix to an image.",
4
+ "FuncDescriptionLong": [
5
+ "The conversion produces an image using color maps.",
6
+ "The color image produced by TD_MATRIX2IMAGE is limited to 8-bit color depth.",
7
+ "In previous versions, TD_PLOT with MESH option was used to convert a matrix to an image. TD_PLOT is limited to a single payload.",
8
+ "TD_MATRIX2IMAGE can combine three payloads to create RGB color images."
9
+ ],
10
+ "FunctionVersion": "...",
11
+ "FunctionCategory": "General Utility",
12
+ "JSONVersion": "1",
13
+ "FuncRName": "td_Matrix_2_Image",
14
+ "MaxInputFiles": 1,
15
+ "Input": [
16
+ {
17
+ "Type": "MATRIX",
18
+ "Description": [
19
+ "The TD_MATRIX2IMAGE function takes matrixes or ART as input.",
20
+ "These series may have real numbers or multivariate reals (vector of real numbers) as their individual elements.",
21
+ "These logical series can be regular or irregular.",
22
+ "Their indexing can be based on integer or float data types."
23
+ ],
24
+ "LangName": "data or object or newdata or ... --> Langauges team can work with UAF team to come up with this field",
25
+ "Optional": false
26
+ }
27
+ ],
28
+ "Output": [
29
+ {
30
+ "Type": "ART",
31
+ "PrimaryLayer": true,
32
+ "LayerName": "ARTPRIMARY",
33
+ "ResultTableColumnTypes": [
34
+ "big_integer",
35
+ "big_integer",
36
+ "blob"
37
+ ],
38
+ "Description": [
39
+ "The TD_MATRIX2IMAGE function returns a primary result set without additional layers."
40
+ ],
41
+ "LangName": "data or object or newdata or ... --> Langauges team can work with UAF team to come up with this field"
42
+ }
43
+ ],
44
+ "IsPlottable": true,
45
+ "Params": [
46
+ {
47
+ "Name": "IMAGE",
48
+ "Type": "string",
49
+ "Optional": true,
50
+ "PermittedValues": [
51
+ "PNG",
52
+ "JPG"
53
+ ],
54
+ "DefaultValue": "PNG",
55
+ "Description": [
56
+ "[Optional] The image output format. It can be PNG or JPG. The default is PNG."
57
+ ]
58
+ },
59
+ {
60
+ "Name": "TYPE",
61
+ "Type": "string",
62
+ "Optional": true,
63
+ "PermittedValues": [
64
+ "GRAY",
65
+ "RGB",
66
+ "COLORMAP"
67
+ ],
68
+ "Description": [
69
+ "[Optional] The type of the image.",
70
+ "It can be GRAY, RGB or COLORMAP.",
71
+ "GRAY has a single payload, the output image is a gray scale image.",
72
+ "RGB has three payloads corresponding to RED, GREEN and BLUE channels, the output image is a RGB color image.",
73
+ "COLORMAP has a single payload. The output image is a RGB color image.",
74
+ "If there is a single payload, then the default type is GRAY.",
75
+ "If there are three payloads, then the default type is RGB."
76
+ ]
77
+ },
78
+ {
79
+ "Name": "COLORMAP",
80
+ "Type": "string",
81
+ "Optional": true,
82
+ "DefaultValue": "viridis",
83
+ "Description": [
84
+ "[Optional] The colormap to use when the TYPE is COLORMAP.",
85
+ "The values corresponds to the COLORMAP of TD_PLOT.",
86
+ "If not specified, then the default colormap is viridis.",
87
+ "The value is case-sensitive."
88
+ ]
89
+ },
90
+ {
91
+ "Name": "RANGE",
92
+ "Type": "list",
93
+ "ListType": "double",
94
+ "ListSize": 2,
95
+ "Optional": true,
96
+ "CheckDuplicates": false,
97
+ "Description": [
98
+ "[Optional] The range of the single payload value to be scaled.",
99
+ "By default, the MIN and MAX values of the payload are used as the range.",
100
+ "Used when TYPE is GRAY or COLORMAP."
101
+ ]
102
+ },
103
+ {
104
+ "Name": "RED",
105
+ "Type": "list",
106
+ "ListType": "double",
107
+ "ListSize": 2,
108
+ "Optional": true,
109
+ "CheckDuplicates": false,
110
+ "Description": [
111
+ "[Optional] The range of the first payload value.",
112
+ "By default, the MIN and MAX values of the payload are used as the range.",
113
+ "It is only used when TYPE is RGB."
114
+ ]
115
+ },
116
+ {
117
+ "Name": "GREEN",
118
+ "Type": "list",
119
+ "ListType": "double",
120
+ "ListSize": 2,
121
+ "Optional": true,
122
+ "CheckDuplicates": false,
123
+ "Description": [
124
+ "[Optional] The range of the second payload value.",
125
+ "By default, the MIN and MAX values of the payload are used as the range.",
126
+ "It is only used when TYPE is RGB."
127
+ ]
128
+ },
129
+ {
130
+ "Name": "BLUE",
131
+ "Type": "list",
132
+ "ListType": "double",
133
+ "ListSize": 2,
134
+ "Optional": true,
135
+ "CheckDuplicates": false,
136
+ "Description": [
137
+ "[Optional] The range of the third payload value.",
138
+ "By default, the MIN and MAX values of the payload are used as the range.",
139
+ "It is only used when TYPE is RGB."
140
+ ]
141
+ },
142
+ {
143
+ "Name": "FLIPX",
144
+ "Type": "integer",
145
+ "Optional": true,
146
+ "PermittedValues": [
147
+ 0,
148
+ 1
149
+ ],
150
+ "Description": [
151
+ "[Optional] Indicator to flip the image horizontally.",
152
+ "A value of 1 means flip the image.",
153
+ "A value of 0 means do not flip the image."
154
+ ],
155
+ "LangName": "flip_x"
156
+ },
157
+ {
158
+ "Name": "FLIPY",
159
+ "Type": "integer",
160
+ "Optional": true,
161
+ "PermittedValues": [
162
+ 0,
163
+ 1
164
+ ],
165
+ "Description": [
166
+ "[Optional] Indicator to flip the image vertically.",
167
+ "A value of 1 means flip the image.",
168
+ "A value of 0 means do not flip the image."
169
+ ],
170
+ "LangName": "flip_y"
171
+ }
172
+ ],
173
+ "InputFmt": [
174
+ {
175
+ "Name": "INPUT_MODE",
176
+ "Type": "string",
177
+ "Optional": true,
178
+ "PermittedValues": [
179
+ "MANY2ONE",
180
+ "ONE2ONE",
181
+ "MATCH"
182
+ ],
183
+ "Description": [
184
+ "When there are two input series, then the INPUT_FMT specification is mandatory.",
185
+ "[Optional] The INPUT_MODE parameter has the following options:",
186
+ "ONE2ONE: Both the primary and secondary series specifications contain a series name which identifies the two series in the function.",
187
+ "MANY2ONE: The MANY specification is the primary series declaration. The secondary series specification contains a series name that identifies the single secondary series.",
188
+ "MATCH: Both series are defined by their respective SERIES_SPEC(INSTANCE_NAME()) declarations."
189
+ ],
190
+ "LangName": "input_fmt_input_mode"
191
+ }
192
+ ],
193
+ "OutputFmt": [
194
+ {
195
+ "Name": "INDEX_STYLE",
196
+ "Type": "string",
197
+ "Optional": true,
198
+ "DefaultValue": "NUMERICAL_SEQUENCE",
199
+ "PermittedValues": [
200
+ "NUMERICAL_SEQUENCE"
201
+ ],
202
+ "Description": [
203
+ "[Optional] Specify the INDEX_STYLE of the output format.",
204
+ "Option is NUMERICAL_SEQUENCE."
205
+ ],
206
+ "LangName": "output_fmt_index_style"
207
+ }
208
+ ]
209
+ }
@@ -2,7 +2,7 @@
2
2
  "FuncName": "TD_PACF",
3
3
  "FuncDescriptionShort": "Computes the Partial Auto Correlation function of a series",
4
4
  "FuncDescriptionLong": [
5
- "the TD_PACF, Partial Auto Correlation function, can be passed either a discrete series - time or spatial series (INPUT_TYPE(DATA_SERIES)) - as an input; or, alternatively, can be passed a series containing previously computed Auto Correlation coefficients - lag and magnitude (INPUT_TYPE(ACF)). The passed in logical-runtime series is permitted to have elements which are either univariate or multivariate reals. Each input series produces a result series, which is indexed on 'LAG', and containing univariate or multivariate real number magnitudes as it result series elements. This function provides the ability to generate 'on-demand' plots. The generated plots can be either univariate, multivariate, or composite in nature. The chosen implementation’s output and style specification closely match Matplotlib ( https://matplotlib.org/ ), an extremely popular Python plotting library. The primary purpose for this decision is to jump start existing Matplotlib users into TD_PLOT usage. However, the Teradata implementation is a completely independent and separate one based on C/C++."
5
+ "the TD_PACF, Partial Auto Correlation function, can be passed either a discrete series - time or spatial series (INPUT_TYPE(DATA_SERIES)) - as an input; or, alternatively, can be passed a series containing previously computed Auto Correlation coefficients - lag and magnitude (INPUT_TYPE(ACF)). The passed in logical-runtime series is permitted to have elements which are either univariate or multivariate reals. Each input series produces a result series, which is indexed on 'LAG', and containing univariate or multivariate real number magnitudes as it result series elements."
6
6
  ],
7
7
  "FunctionVersion": "...",
8
8
  "FunctionCategory": "Model Preparation and Parameter Estimation",
@@ -111,4 +111,4 @@
111
111
  ],
112
112
  "InputFmt": false,
113
113
  "OutputFmt": false
114
- }
114
+ }
@@ -59,7 +59,7 @@
59
59
  "Type": "float",
60
60
  "Optional": true,
61
61
  "LowerBound": 0,
62
- "LowerBoundType": "INCLUSIVE",
62
+ "LowerBoundType": "EXCLUSIVE",
63
63
  "Description": [
64
64
  "Floating point constant representing the sample rate, in hertz. A value of 10000.0 means that the sample points were obtained by sampling at a rate of 10,000 hertz."
65
65
  ]
@@ -87,13 +87,13 @@
87
87
  "FOURIER",
88
88
  "INCRFOURIER"
89
89
  ],
90
- "Description": [
90
+ "Description": [
91
91
  "ALGORITHM('AUTOCOV') - Use the Fourier Cosine of the Auto covariance approach to calculate power spectrum",
92
92
  "ALGORITHM('AUTOCORR') - Use the Fourier Cosine of the Auto correlation approach to calculate power spectrum",
93
93
  "ALGORITHM('FOURIER') - Use the Fourier Transform approach to calculate power spectrum",
94
94
  "ALGORITHM('INCRFOURIER') - Use the Incremental Fourier Transform approach to calculate power spectrum"
95
95
  ]
96
- },
96
+ },
97
97
  {
98
98
  "Name": "INCRFOURIER_PARAM",
99
99
  "Type": "record",
@@ -146,7 +146,7 @@
146
146
  "PARZEN",
147
147
  "WELCH"
148
148
  ],
149
- "Description": [
149
+ "Description": [
150
150
  "WINDOW('NONE') - Do not apply a smoothing window. Theoretically this translates into the application of a square wave window, which has a magnitude of '1.0' for the whole duration of the window.",
151
151
  "WINDOW_NAME('TUKEY') - Apply a Tukey smoothing window with the supplied alpha value",
152
152
  "WINDOW_NAME ('BARTLETT') - Apply a Bartlett smoothing window",
@@ -172,4 +172,4 @@
172
172
  ],
173
173
  "InputFmt": false,
174
174
  "OutputFmt": false
175
- }
175
+ }
@@ -36,60 +36,64 @@
36
36
  ],
37
37
  "IsPlottable": false,
38
38
  "Params": [
39
- {
40
- "Name": "TIMECODE",
39
+ {
40
+ "Name": "SEQUENCE",
41
41
  "Type": "record",
42
- "Optional": false,
42
+ "Optional": true,
43
43
  "CheckDuplicates": true,
44
- "LangName": "...",
45
44
  "Description": [
46
- "Describes the result series that this function is to produce. At least one of TIMECODE or SEQUENCE must be present, and must correspond correctly to the index data type of the input series. They are mutually exclusive."
45
+ "Describes the result series that this function is to produce.",
46
+ "At least one of TIMECODE or SEQUENCE must be present.",
47
+ "Input series must contain an index data type corresponding to SEQUENCE (real, integer, etc.).",
48
+ "If SEQUENCE parameter exists then TIMECODE will not work, the two are mutually exclusive parameters."
47
49
  ],
48
- "NestedParams":
49
- [
50
+ "LangName": "...",
51
+ "NestedParams": [
50
52
  {
51
53
  "Name": "START_VALUE",
52
- "Type": "timestampwz",
54
+ "Type": "float",
53
55
  "Optional": true,
54
56
  "Description": [
55
- "Specifies the first sampling index to interpolate."
57
+ "Starting value - this is the first sampling index that the resample function is to interpolate"
56
58
  ]
57
59
  },
58
60
  {
59
61
  "Name": "DURATION",
60
- "Type": "string",
62
+ "Type": "float",
61
63
  "Optional": true,
62
64
  "Description": [
63
- "Specifies the sampling interval associated with the result series."
65
+ "Sampling duration - sampling interval associated with the result series."
64
66
  ]
65
67
  }
66
68
  ]
67
69
  },
68
70
  {
69
- "Name": "SEQUENCE",
71
+ "Name": "TIMECODE",
70
72
  "Type": "record",
71
- "Optional": false,
73
+ "Optional": true,
72
74
  "CheckDuplicates": true,
73
- "LangName": "...",
74
75
  "Description": [
75
- "Describes the result series that this function is to produce. At least one of TIMECODE or SEQUENCE must be present, and must correspond correctly to the index data type of the input series. They are mutually exclusive."
76
+ "Describes the result series that this function is to produce.",
77
+ "At least one of TIMECODE or SEQUENCE must be present.",
78
+ "Input series must contain an index data type corresponding to TIMECODE (timstampwz, time-duration, etc.).",
79
+ "If TIMECODE parameter exists then SEQUENCE will not work, the two are mutually exclusive parameters."
76
80
  ],
77
- "NestedParams" :
78
- [
81
+ "LangName": "...",
82
+ "NestedParams": [
79
83
  {
80
- "Name" : "START_VALUE",
81
- "Type" : "float",
82
- "Optional" : true,
84
+ "Name": "START_VALUE",
85
+ "Type": "timestampwz",
86
+ "Optional": true,
83
87
  "Description": [
84
- "Specifies the first sampling index to interpolate."
88
+ "Starting value - this is the first sampling index that the resample function is to interpolate"
85
89
  ]
86
90
  },
87
91
  {
88
- "Name" : "DURATION",
89
- "Type" : "float",
90
- "Optional" : true,
92
+ "Name": "DURATION",
93
+ "Type": "string",
94
+ "Optional": true,
91
95
  "Description": [
92
- "Specifies the sampling interval associated with the result series."
96
+ "Sampling duration - sampling interval associated with the result series."
93
97
  ]
94
98
  }
95
99
  ]
@@ -106,7 +110,7 @@
106
110
  "SPLINE"
107
111
  ],
108
112
  "Description": [
109
- "enum( NONE, LINEAR, LAG, LEAD, WEIGHTED, SPLINE ) : enumerated value selecting one of the valid supported interpolation strategies."
113
+ "enum( LINEAR, LAG, LEAD, WEIGHTED, SPLINE ) : enumerated value selecting one of the valid supported interpolation strategies."
110
114
  ]
111
115
  },
112
116
  {
@@ -119,7 +123,8 @@
119
123
  "UpperBoundType": "INCLUSIVE",
120
124
  "AllowNaN": false,
121
125
  "Description": [
122
- "Only valid when INTERPOLATE(WEIGHTED) has been selected. The interpolated value is calculated as: Y_t = Y_{t_LEFT} * (1 - WEIGHT) + (Y-{t_RIGHT} * WEIGHT)"
126
+ "Only required when INTERPOLATE(WEIGHTED) has been selected, otherwise invalid parameter.",
127
+ "The interpolated value is calculated as: Y_t = Y_{t_LEFT} * (1 - WEIGHT) + (Y-{t_RIGHT} * WEIGHT)"
123
128
  ]
124
129
  },
125
130
  {
@@ -170,5 +175,20 @@
170
175
  }
171
176
  ],
172
177
  "InputFmt": false,
173
- "OutputFmt": false
178
+ "OutputFmt": [
179
+ {
180
+ "Name": "INDEX_STYLE",
181
+ "Type": "string",
182
+ "Optional": true,
183
+ "DefaultValue": "FLOW_THROUGH",
184
+ "PermittedValues": [
185
+ "NUMERICAL_SEQUENCE",
186
+ "FLOW_THROUGH"
187
+ ],
188
+ "Description": [
189
+ "Specifies the INDEX_STYLE of the output format."
190
+ ],
191
+ "LangName": "output_fmt_index_style"
192
+ }
193
+ ]
174
194
  }
@@ -0,0 +1,208 @@
1
+ {
2
+ "FuncName": "TD_SAX",
3
+ "FuncDescriptionShort": "TD_SAX transform a time series into sequence of symbols.",
4
+ "FuncDescriptionLong": [
5
+ "TD_SAX uses Piecewise Aggregate Approximation (PAA) and transform a timeseries into sequence of symbols.",
6
+ "The symbols can be characters, string, and bitmap."
7
+ ],
8
+ "FunctionVersion": "...",
9
+ "JSONVersion": "1",
10
+ "FunctionCategory": "Digital Signal Processing",
11
+ "FuncRName": "td_SAX",
12
+ "MaxInputFiles": 1,
13
+ "Input": [
14
+ {
15
+ "Type": "SERIES",
16
+ "Description": [
17
+ "Time series whose value can be REAL or MULTIVAR_REAL."
18
+ ],
19
+ "LangName": "data or object or newdata or ... --> Langauges team can work with UAF team to come up with this field",
20
+ "Optional": false
21
+ }
22
+ ],
23
+ "Output": [
24
+ {
25
+ "Type": "ART",
26
+ "PrimaryLayer": true,
27
+ "LayerName": "ARTPRIMARY",
28
+ "ResultTableColumnTypes": [
29
+ "<varies>",
30
+ "float",
31
+ "float"
32
+ ],
33
+ "Description": [
34
+ "The primary analytical result set contains the SAX representation of the input time series.",
35
+ "The primary result set is accessed using a SELECT statement"
36
+ ],
37
+ "LangName": "data or object or newdata or ... --> Langauges team can work with UAF team to come up with this field"
38
+ }
39
+ ],
40
+ "IsPlottable": true,
41
+ "Params": [
42
+ {
43
+ "Name": "WINDOW_TYPE",
44
+ "Type": "string",
45
+ "Optional": true,
46
+ "PermittedValues": [
47
+ "GLOBAL",
48
+ "SLIDING"
49
+ ],
50
+ "Description": [
51
+ "[Optional] If not specified, the GLOBAL type is the default.",
52
+ "Specifies the window type used in the SAX transformation."
53
+ ]
54
+ },
55
+ {
56
+ "Name": "OUTPUT_TYPE",
57
+ "Type": "string",
58
+ "Optional": true,
59
+ "PermittedValues": [
60
+ "STRING",
61
+ "BITMAP",
62
+ "O_CHARS"
63
+ ],
64
+ "Description": [
65
+ "[Optional] If not specified, the STRING type is the default.",
66
+ "The output format of the result can be string, char or bitmap."
67
+ ]
68
+ },
69
+ {
70
+ "Name": "MEAN",
71
+ "Type": "list",
72
+ "ListType": "float",
73
+ "Optional": true,
74
+ "CheckDuplicates": false,
75
+ "Description": [
76
+ "The global mean values that the function uses to calculate the SAX code for every partition",
77
+ "If not specified, the function calculates the mean values for every partition.",
78
+ "If the mean specifies one value and multiple payloads, then the specified value applies to every payload.",
79
+ "If the mean specifies multiple values, then the specified values apply to the corresponding payloads."
80
+ ]
81
+ },
82
+ {
83
+ "Name": "STD_DEV",
84
+ "Type": "list",
85
+ "ListType": "float",
86
+ "Optional": true,
87
+ "CheckDuplicates": false,
88
+ "Description": [
89
+ "The global mean values that the function uses to calculate the SAX code for every partition",
90
+ "If not specified, the function calculates the standard deviation values for every partition.",
91
+ "If the standard deviation specifies one value and multiple payloads, then the specified value applies to every payload.",
92
+ "If the standard deviation specifies multiple values, then the specified values apply to the corresponding payloads."
93
+ ]
94
+ },
95
+ {
96
+ "Name": "WINDOW_SIZE",
97
+ "Type": "integer",
98
+ "Optional": true,
99
+ "LowerBound": 1,
100
+ "LowerBoundType": "INCLUSIVE",
101
+ "Description": [
102
+ "The size of sliding window used in the SAX transformation and maximum value is 64000.",
103
+ "The window size is required for SLIDING window type."
104
+ ]
105
+ },
106
+ {
107
+ "Name": "OUTPUT_FREQUENCY",
108
+ "Type": "integer",
109
+ "Optional": true,
110
+ "LowerBound": 1,
111
+ "DefaultValue": 1,
112
+ "LowerBoundType": "INCLUSIVE",
113
+ "Description": [
114
+ "The number of data points that the window slides between successive outputs.",
115
+ "This is valid for SLIDING window type."
116
+ ]
117
+ },
118
+ {
119
+ "Name": "POINTS_PER_SYMBOL",
120
+ "Type": "integer",
121
+ "Optional": true,
122
+ "LowerBound": 1,
123
+ "DefaultValue": 1,
124
+ "LowerBoundType": "INCLUSIVE",
125
+ "Description": [
126
+ "The number of data points to be converted to one SAX symbol.",
127
+ "This is valid for GLOBAL window type."
128
+ ]
129
+ },
130
+ {
131
+ "Name": "SYMBOLS_PER_WINDOW ",
132
+ "Type": "integer",
133
+ "Optional": true,
134
+ "LowerBound": 1,
135
+ "DefaultValue": 1,
136
+ "LowerBoundType": "INCLUSIVE",
137
+ "Description": [
138
+ "The number of SAX symbols to be generated for each window.",
139
+ "This is valid for SLIDING window type."
140
+ ]
141
+ },
142
+ {
143
+ "Name": "ALPHABET_SIZE",
144
+ "Type": "integer",
145
+ "Optional": true,
146
+ "LowerBound": 2,
147
+ "DefaultValue": 4,
148
+ "LowerBoundType": "INCLUSIVE",
149
+ "Description": [
150
+ "The number of symbols in the SAX alphabet.",
151
+ "The alphabet consists letters a through t.",
152
+ "The alphabet size must be [2,20]."
153
+ ]
154
+ },
155
+ {
156
+ "Name": "BITMAP_LEVEL",
157
+ "Type": "integer",
158
+ "Optional": true,
159
+ "LowerBound": 1,
160
+ "DefaultValue": 2,
161
+ "LowerBoundType": "INCLUSIVE",
162
+ "Description": [
163
+ "The number of consecutive symbols to be converted to one symbol on a bitmap.",
164
+ "For bitmap level 1, the bitmap contains the symbols 'a', 'b', 'c', and so on.",
165
+ "For bitmap level 2, the bitmap contains the symbols 'aa', 'ab', 'ac', and so on.",
166
+ "The range must be [1,4]."
167
+ ]
168
+ },
169
+ {
170
+ "Name": "CODE_STATS",
171
+ "Type": "integer",
172
+ "Optional": true,
173
+ "DefaultValue": 0,
174
+ "PermittedValues": [
175
+ 0, 1
176
+ ],
177
+ "Description": [
178
+ "Indicator that the function prints the mean and standard deviation."
179
+ ]
180
+ },
181
+ {
182
+ "Name": "BREAKPOINTS ",
183
+ "Type": "list",
184
+ "ListType": "float",
185
+ "Optional": true,
186
+ "CheckDuplicates": false,
187
+ "Description": [
188
+ "The breakpoints to form the SAX code based on input data."
189
+ ]
190
+ }
191
+ ],
192
+ "InputFmt": false,
193
+ "OutputFmt": [
194
+ {
195
+ "Name": "INDEX_STYLE",
196
+ "Type": "string",
197
+ "Optional": true,
198
+ "DefaultValue": "NUMERICAL_SEQUENCE",
199
+ "PermittedValues": [
200
+ "NUMERICAL_SEQUENCE"
201
+ ],
202
+ "Description": [
203
+ "Specifies the INDEX_STYLE of the output format."
204
+ ],
205
+ "LangName": "output_fmt_index_style"
206
+ }
207
+ ]
208
+ }