teradataml 20.0.0.0__py3-none-any.whl → 20.0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (263) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +183 -0
  4. teradataml/__init__.py +6 -3
  5. teradataml/_version.py +2 -2
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +275 -40
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +1 -0
  11. teradataml/analytics/json_parser/utils.py +17 -21
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +10 -2
  15. teradataml/analytics/table_operator/__init__.py +3 -2
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +62 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1553 -319
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +276 -319
  22. teradataml/automl/data_transformation.py +163 -81
  23. teradataml/automl/feature_engineering.py +402 -239
  24. teradataml/automl/feature_exploration.py +9 -2
  25. teradataml/automl/model_evaluation.py +48 -51
  26. teradataml/automl/model_training.py +291 -189
  27. teradataml/catalog/byom.py +8 -8
  28. teradataml/catalog/model_cataloging_utils.py +1 -1
  29. teradataml/clients/auth_client.py +133 -0
  30. teradataml/clients/pkce_client.py +1 -1
  31. teradataml/common/aed_utils.py +3 -2
  32. teradataml/common/constants.py +48 -6
  33. teradataml/common/deprecations.py +13 -7
  34. teradataml/common/garbagecollector.py +156 -120
  35. teradataml/common/messagecodes.py +6 -1
  36. teradataml/common/messages.py +3 -1
  37. teradataml/common/sqlbundle.py +1 -1
  38. teradataml/common/utils.py +103 -11
  39. teradataml/common/wrapper_utils.py +1 -1
  40. teradataml/context/context.py +121 -31
  41. teradataml/data/advertising.csv +201 -0
  42. teradataml/data/bank_marketing.csv +11163 -0
  43. teradataml/data/bike_sharing.csv +732 -0
  44. teradataml/data/boston2cols.csv +721 -0
  45. teradataml/data/breast_cancer.csv +570 -0
  46. teradataml/data/complaints_test_tokenized.csv +353 -0
  47. teradataml/data/complaints_tokens_model.csv +348 -0
  48. teradataml/data/covid_confirm_sd.csv +83 -0
  49. teradataml/data/customer_segmentation_test.csv +2628 -0
  50. teradataml/data/customer_segmentation_train.csv +8069 -0
  51. teradataml/data/dataframe_example.json +10 -0
  52. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +3 -1
  53. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +6 -0
  54. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +5 -1
  55. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +61 -1
  56. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  57. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +2 -0
  58. teradataml/data/docs/sqle/docs_17_20/FTest.py +105 -26
  59. teradataml/data/docs/sqle/docs_17_20/GLM.py +162 -1
  60. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +5 -3
  61. teradataml/data/docs/sqle/docs_17_20/KMeans.py +48 -1
  62. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  63. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +3 -2
  64. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +5 -0
  65. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +6 -0
  66. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  67. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  68. teradataml/data/docs/sqle/docs_17_20/ROC.py +3 -2
  69. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +13 -2
  70. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +119 -1
  71. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +93 -1
  72. teradataml/data/docs/sqle/docs_17_20/Shap.py +197 -0
  73. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +163 -1
  74. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  75. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  76. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  77. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +12 -4
  78. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +7 -1
  79. teradataml/data/docs/sqle/docs_17_20/ZTest.py +72 -7
  80. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  81. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  82. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  83. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  84. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  85. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  86. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  87. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  88. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  89. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  90. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  91. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  92. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  93. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  94. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  95. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  96. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  97. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  98. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  99. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  100. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  101. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  102. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  103. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  104. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  105. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  106. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  107. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  108. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  109. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  110. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  111. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  112. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  113. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  114. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  115. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  116. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  117. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  118. teradataml/data/dwt2d_dataTable.csv +65 -0
  119. teradataml/data/dwt_dataTable.csv +8 -0
  120. teradataml/data/dwt_filterTable.csv +3 -0
  121. teradataml/data/finance_data4.csv +13 -0
  122. teradataml/data/glm_example.json +28 -1
  123. teradataml/data/grocery_transaction.csv +19 -0
  124. teradataml/data/housing_train_segment.csv +201 -0
  125. teradataml/data/idwt2d_dataTable.csv +5 -0
  126. teradataml/data/idwt_dataTable.csv +8 -0
  127. teradataml/data/idwt_filterTable.csv +3 -0
  128. teradataml/data/insect2Cols.csv +61 -0
  129. teradataml/data/interval_data.csv +5 -0
  130. teradataml/data/jsons/paired_functions.json +14 -0
  131. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +99 -27
  132. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  133. teradataml/data/jsons/sqle/17.20/TD_FTest.json +166 -83
  134. teradataml/data/jsons/sqle/17.20/TD_GLM.json +90 -14
  135. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +48 -5
  136. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +5 -3
  137. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +31 -11
  138. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  139. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  140. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +3 -2
  141. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  142. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  143. teradataml/data/jsons/sqle/17.20/TD_ROC.json +2 -1
  144. teradataml/data/jsons/sqle/17.20/TD_SVM.json +16 -16
  145. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +19 -1
  146. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +168 -15
  147. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +50 -1
  148. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  149. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  150. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  151. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +25 -7
  152. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +17 -4
  153. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +157 -80
  154. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  155. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  156. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  157. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  158. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  159. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  160. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  161. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  162. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  163. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  164. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  165. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  166. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  167. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  168. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  169. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  170. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  171. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  172. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  173. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  174. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  175. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  176. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  177. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  178. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  179. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  180. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  181. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  182. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  183. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  184. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  185. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  186. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  187. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  188. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  189. teradataml/data/jsons/uaf/17.20/TD_SAX.json +208 -0
  190. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  191. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  192. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  193. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  194. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  195. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +400 -0
  196. teradataml/data/kmeans_example.json +5 -0
  197. teradataml/data/kmeans_table.csv +10 -0
  198. teradataml/data/load_example_data.py +8 -2
  199. teradataml/data/naivebayestextclassifier_example.json +1 -1
  200. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  201. teradataml/data/onehot_encoder_train.csv +4 -0
  202. teradataml/data/openml_example.json +29 -0
  203. teradataml/data/peppers.png +0 -0
  204. teradataml/data/real_values.csv +14 -0
  205. teradataml/data/sax_example.json +8 -0
  206. teradataml/data/scale_attributes.csv +3 -0
  207. teradataml/data/scale_example.json +52 -1
  208. teradataml/data/scale_input_part_sparse.csv +31 -0
  209. teradataml/data/scale_input_partitioned.csv +16 -0
  210. teradataml/data/scale_input_sparse.csv +11 -0
  211. teradataml/data/scale_parameters.csv +3 -0
  212. teradataml/data/scripts/deploy_script.py +21 -2
  213. teradataml/data/scripts/sklearn/sklearn_fit.py +40 -37
  214. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +22 -30
  215. teradataml/data/scripts/sklearn/sklearn_function.template +42 -24
  216. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +23 -33
  217. teradataml/data/scripts/sklearn/sklearn_neighbors.py +19 -28
  218. teradataml/data/scripts/sklearn/sklearn_score.py +32 -32
  219. teradataml/data/scripts/sklearn/sklearn_transform.py +85 -42
  220. teradataml/data/star_pivot.csv +8 -0
  221. teradataml/data/templates/open_source_ml.json +2 -1
  222. teradataml/data/teradataml_example.json +97 -1
  223. teradataml/data/timestamp_data.csv +4 -0
  224. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  225. teradataml/data/uaf_example.json +55 -1
  226. teradataml/data/unpivot_example.json +15 -0
  227. teradataml/data/url_data.csv +9 -0
  228. teradataml/data/windowdfft.csv +16 -0
  229. teradataml/data/ztest_example.json +16 -0
  230. teradataml/dataframe/copy_to.py +9 -4
  231. teradataml/dataframe/data_transfer.py +125 -64
  232. teradataml/dataframe/dataframe.py +575 -57
  233. teradataml/dataframe/dataframe_utils.py +47 -9
  234. teradataml/dataframe/fastload.py +273 -90
  235. teradataml/dataframe/functions.py +339 -0
  236. teradataml/dataframe/row.py +160 -0
  237. teradataml/dataframe/setop.py +2 -2
  238. teradataml/dataframe/sql.py +740 -18
  239. teradataml/dataframe/window.py +1 -1
  240. teradataml/dbutils/dbutils.py +324 -18
  241. teradataml/geospatial/geodataframe.py +1 -1
  242. teradataml/geospatial/geodataframecolumn.py +1 -1
  243. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  244. teradataml/lib/aed_0_1.dll +0 -0
  245. teradataml/opensource/sklearn/_sklearn_wrapper.py +254 -122
  246. teradataml/options/__init__.py +16 -5
  247. teradataml/options/configure.py +39 -6
  248. teradataml/options/display.py +2 -2
  249. teradataml/plot/axis.py +4 -4
  250. teradataml/scriptmgmt/UserEnv.py +26 -19
  251. teradataml/scriptmgmt/lls_utils.py +120 -16
  252. teradataml/table_operators/Script.py +4 -5
  253. teradataml/table_operators/TableOperator.py +160 -26
  254. teradataml/table_operators/table_operator_util.py +88 -41
  255. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  256. teradataml/telemetry_utils/__init__.py +0 -0
  257. teradataml/telemetry_utils/queryband.py +52 -0
  258. teradataml/utils/validators.py +41 -3
  259. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/METADATA +191 -6
  260. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/RECORD +263 -185
  261. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/WHEEL +0 -0
  262. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/top_level.txt +0 -0
  263. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/zip-safe +0 -0
@@ -116,7 +116,7 @@
116
116
  "isRequired": false,
117
117
  "rDescription": "Specify the amount of regularization to be added. The higher the value, the stronger the regularization. It is also used to compute the learning rate when the learning rate is set to ‘optimal’. Must be a non-negative float value. A value of 0 means no regularization.",
118
118
  "description": "Specify the amount of regularization to be added. The higher the value, the stronger the regularization. It is also used to compute the learning rate when the learning rate is set to ‘optimal’. Must be a non-negative float value. A value of 0 means no regularization.",
119
- "datatype": "DOUBLE",
119
+ "datatype": "NUMERIC",
120
120
  "allowsLists": false,
121
121
  "rName": "lambda1",
122
122
  "useInR": true,
@@ -134,7 +134,7 @@
134
134
  "isRequired": false,
135
135
  "rDescription": "Specify the Elasticnet parameter for penalty computation. It only becomes effective if RegularizationLambda > 0. The value represents the contribution ratio of L1 in the penalty. A value of 1.0 indicates L1 (LASSO) only, a value of 0 indicates L2 (Ridge) only, and a value in between is a combination of L1 and L2. Default: 0.15 (15% L1, 85% L2). Must be a float value between 0 and 1.",
136
136
  "description": "Specify the Elasticnet parameter for penalty computation. It only becomes effective if RegularizationLambda > 0. The value represents the contribution ratio of L1 in the penalty. A value of 1.0 indicates L1 (LASSO) only, a value of 0 indicates L2 (Ridge) only, and a value in between is a combination of L1 and L2. Default: 0.15 (15% L1, 85% L2). Must be a float value between 0 and 1.",
137
- "datatype": "DOUBLE",
137
+ "datatype": "NUMERIC",
138
138
  "allowsLists": false,
139
139
  "rName": "alpha",
140
140
  "useInR": true,
@@ -170,7 +170,7 @@
170
170
  "isRequired": false,
171
171
  "rDescription": "Specify the stopping criteria in terms of loss function improvement. Applicable when IterNumNoChange is greater than 0. Value is a positive integer.",
172
172
  "description": "Specify the stopping criteria in terms of loss function improvement. Applicable when IterNumNoChange is greater than 0. Value is a positive integer.",
173
- "datatype": "DOUBLE",
173
+ "datatype": "NUMERIC",
174
174
  "allowsLists": false,
175
175
  "rName": "tolerance",
176
176
  "useInR": true,
@@ -191,9 +191,9 @@
191
191
  },
192
192
  {
193
193
  "permittedValues": [
194
- "CONSTANT",
195
- "OPTIMAL",
196
- "INVTIME",
194
+ "CONSTANT",
195
+ "OPTIMAL",
196
+ "INVTIME",
197
197
  "ADAPTIVE"
198
198
  ],
199
199
  "defaultValue": "OPTIMAL",
@@ -221,7 +221,7 @@
221
221
  "isRequired": false,
222
222
  "rDescription": "Specify the initial value of eta for the learning rate. For ‘constant’, this value is the learning rate for all iterations. ",
223
223
  "description": "Specify the initial value of eta for the learning rate. For ‘constant’, this value is the learning rate for all iterations. ",
224
- "datatype": "DOUBLE",
224
+ "datatype": "NUMERIC",
225
225
  "allowsLists": false,
226
226
  "rName": "initial.eta",
227
227
  "useInR": true,
@@ -239,7 +239,7 @@
239
239
  "isRequired": false,
240
240
  "rDescription": "Specify the decay rate for the learning rate (invtime and adaptive).",
241
241
  "description": "Specify the decay rate for the learning rate (invtime and adaptive).",
242
- "datatype": "DOUBLE",
242
+ "datatype": "NUMERIC",
243
243
  "allowsLists": false,
244
244
  "rName": "decay.rate",
245
245
  "useInR": true,
@@ -275,7 +275,7 @@
275
275
  "isRequired": false,
276
276
  "rDescription": "Specify the value to use for the momentum learning rate optimizer. Must be a non-negative float value between 0 and 1. A larger value indicates a higher momentum contribution. A value of 0 means the momentum optimizer is disabled. For a good momentum contribution, a value between 0.6-0.95 is recommended.",
277
277
  "description": "Specify the value to use for the momentum learning rate optimizer. Must be a non-negative float value between 0 and 1. A larger value indicates a higher momentum contribution. A value of 0 means the momentum optimizer is disabled. For a good momentum contribution, a value between 0.6-0.95 is recommended.",
278
- "datatype": "DOUBLE",
278
+ "datatype": "NUMERIC",
279
279
  "allowsLists": false,
280
280
  "rName": "momentum",
281
281
  "useInR": true,
@@ -0,0 +1,280 @@
1
+ {
2
+ "json_schema_major_version": "1",
3
+ "json_schema_minor_version": "1",
4
+ "json_content_version": "1",
5
+ "function_name": "TD_Pivoting",
6
+ "function_version": "1.0",
7
+ "function_alias_name": "TD_Pivoting",
8
+ "function_type": "fastpath",
9
+ "function_category": "Feature Engineering Transform",
10
+ "function_r_name": "aa.td.pivoting",
11
+ "short_description": "This function is used to pivot the data i.e. change the data from sparse format to dense format.",
12
+ "long_description": "This function is used to pivot the data i.e. change the data from sparse format to dense format.",
13
+ "input_tables": [
14
+ {
15
+ "requiredInputKind": [
16
+ "PartitionByKey"
17
+ ],
18
+ "isOrdered": false,
19
+ "partitionByOne": false,
20
+ "name": "InputTable",
21
+ "alternateNames": [],
22
+ "isRequired": true,
23
+ "rDescription": "Specifies the table containing the input data to be pivoted.",
24
+ "description": "Specifies the table containing the input data to be pivoted.",
25
+ "datatype": "TABLE_ALIAS",
26
+ "allowsLists": false,
27
+ "rName": "data",
28
+ "useInR": true,
29
+ "rOrderNum": 1
30
+ }
31
+ ],
32
+ "argument_clauses": [
33
+ {
34
+ "targetTable": [
35
+ "InputTable"
36
+ ],
37
+ "checkDuplicate": true,
38
+ "allowedTypes": [],
39
+ "allowedTypeGroups": [
40
+ "ALL"
41
+ ],
42
+ "matchLengthOfArgument": "",
43
+ "allowPadding": false,
44
+ "name": "PartitionColumns",
45
+ "alternateNames": [],
46
+ "isRequired": true,
47
+ "rDescription": "Specify the name of the input table columns on which to partition the input.",
48
+ "description": "Specify the name of the input table columns on which to partition the input.",
49
+ "datatype": "COLUMNS",
50
+ "allowsLists": true,
51
+ "rName": "partition.columns",
52
+ "useInR": true,
53
+ "rOrderNum": 2
54
+ },
55
+ {
56
+ "targetTable": [
57
+ "InputTable"
58
+ ],
59
+ "checkDuplicate": true,
60
+ "allowedTypes": [],
61
+ "allowedTypeGroups": [
62
+ "ALL"
63
+ ],
64
+ "matchLengthOfArgument": "",
65
+ "allowPadding": false,
66
+ "name": "TargetColumns",
67
+ "alternateNames": [],
68
+ "isRequired": true,
69
+ "rDescription": "Specify the columns from the input table which contains the data for pivoting.",
70
+ "description": "Specify the columns from the input table which contains the data for pivoting.",
71
+ "datatype": "COLUMNS",
72
+ "allowsLists": true,
73
+ "rName": "target.columns",
74
+ "useInR": true,
75
+ "rOrderNum": 3
76
+ },
77
+ {
78
+ "targetTable": [
79
+ "InputTable"
80
+ ],
81
+ "checkDuplicate": true,
82
+ "allowedTypes": [],
83
+ "allowedTypeGroups": [
84
+ "ALL"
85
+ ],
86
+ "matchLengthOfArgument": "",
87
+ "allowPadding": false,
88
+ "name": "Accumulate",
89
+ "alternateNames": [],
90
+ "isRequired": false,
91
+ "rDescription": "Specifies the input table columns to copy to the output table. By default, the function copies no input table columns to the output table.",
92
+ "description": "Specifies the input table columns to copy to the output table. By default, the function copies no input table columns to the output table.",
93
+ "datatype": "COLUMNS",
94
+ "allowsLists": true,
95
+ "rName": "accumulate",
96
+ "useInR": true,
97
+ "rOrderNum": 4
98
+ },
99
+ {
100
+ "lowerBound": 1,
101
+ "upperBound": 2147483647,
102
+ "lowerBoundType": "INCLUSIVE",
103
+ "upperBoundType": "INCLUSIVE",
104
+ "allowNaN": false,
105
+ "name": "RowsPerPartition",
106
+ "alternateNames": [],
107
+ "isRequired": false,
108
+ "rDescription": "Specify the integer value which will specify the maximum number of rows in the partition.",
109
+ "description": "Specify the integer value which will specify the maximum number of rows in the partition.",
110
+ "datatype": "INTEGER",
111
+ "allowsLists": false,
112
+ "rName": "rows.per.partition",
113
+ "useInR": true,
114
+ "rOrderNum": 5
115
+ },
116
+ {
117
+ "targetTable": [
118
+ "InputTable"
119
+ ],
120
+ "checkDuplicate": true,
121
+ "allowedTypes": [],
122
+ "allowedTypeGroups": [
123
+ "STRING"
124
+ ],
125
+ "matchLengthOfArgument": "",
126
+ "allowPadding": false,
127
+ "name": "PivotColumn",
128
+ "alternateNames": [],
129
+ "isRequired": false,
130
+ "rDescription": "Specify the name of the input table column that contains the pivot keys.",
131
+ "description": "Specify the name of the input table column that contains the pivot keys.",
132
+ "datatype": "COLUMNS",
133
+ "allowsLists": false,
134
+ "rName": "pivot.column",
135
+ "useInR": true,
136
+ "rOrderNum": 6
137
+ },
138
+ {
139
+ "permittedValues": [],
140
+ "isOutputColumn": false,
141
+ "checkDuplicate": true,
142
+ "matchLengthOfArgument": "",
143
+ "allowPadding": false,
144
+ "name": "PivotKeys",
145
+ "alternateNames": [],
146
+ "isRequired": false,
147
+ "rDescription": "If you specify the PivotColumn argument, then this argument specifies the names of the pivot keys.",
148
+ "description": "If you specify the PivotColumn argument, then this argument specifies the names of the pivot keys.",
149
+ "datatype": "STRING",
150
+ "allowsLists": true,
151
+ "rName": "pivot.keys",
152
+ "useInR": true,
153
+ "rOrderNum": 7
154
+ },
155
+ {
156
+ "permittedValues": [],
157
+ "isOutputColumn": false,
158
+ "checkDuplicate": true,
159
+ "matchLengthOfArgument": "",
160
+ "allowPadding": false,
161
+ "name": "PivotKeysAlias",
162
+ "alternateNames": [],
163
+ "isRequired": false,
164
+ "rDescription": "If you specify the PivotKeys argument, then this argument specifies the alias names of the pivot keys.",
165
+ "description": "If you specify the PivotKeys argument, then this argument specifies the alias names of the pivot keys.",
166
+ "datatype": "STRING",
167
+ "allowsLists": true,
168
+ "rName": "pivot.keys.alias",
169
+ "useInR": true,
170
+ "rOrderNum": 8
171
+ },
172
+ {
173
+ "permittedValues": [],
174
+ "isOutputColumn": false,
175
+ "checkDuplicate": false,
176
+ "matchLengthOfArgument": "",
177
+ "allowPadding": false,
178
+ "name": "DefaultPivotValues",
179
+ "alternateNames": [],
180
+ "isRequired": false,
181
+ "rDescription": "Specify exactly one default value for each pivot_key . The nth default_pivot_value applies to the nth pivot_key.",
182
+ "description": "Specify exactly one default value for each pivot_key . The nth default_pivot_value applies to the nth pivot_key.",
183
+ "datatype": "STRING",
184
+ "allowsLists": true,
185
+ "rName": "default.pivot.values",
186
+ "useInR": true,
187
+ "rOrderNum": 9
188
+ },
189
+ {
190
+ "permittedValues": [],
191
+ "isOutputColumn": false,
192
+ "checkDuplicate": false,
193
+ "matchLengthOfArgument": "",
194
+ "allowPadding": false,
195
+ "name": "Aggregation",
196
+ "alternateNames": [],
197
+ "isRequired": false,
198
+ "rDescription": "Specify the aggregation for the target columns. Aggregation can be provided as one of the single value {CONCAT | UNIQUE_CONCAT | SUM | MIN | MAX | AVG} which will be applicable to all target columns or you can specify multiple values for multiple target columns in following format: ColumnName:{CONCAT|UNIQUE_CONCAT|SUM|MIN|MAX|AVG} [,...].",
199
+ "description": "Specify the aggregation for the target columns. Aggregation can be provided as one of the single value {CONCAT | UNIQUE_CONCAT | SUM | MIN | MAX | AVG} which will be applicable to all target columns or you can specify multiple values for multiple target columns in following format: ColumnName:{CONCAT|UNIQUE_CONCAT|SUM|MIN|MAX|AVG} [,...].",
200
+ "datatype": "STRING",
201
+ "allowsLists": true,
202
+ "rName": "aggregation",
203
+ "useInR": true,
204
+ "rOrderNum": 10
205
+ },
206
+ {
207
+ "permittedValues": [],
208
+ "isOutputColumn": false,
209
+ "checkDuplicate": false,
210
+ "matchLengthOfArgument": "",
211
+ "allowPadding": false,
212
+ "name": "Delimiters",
213
+ "alternateNames": [],
214
+ "isRequired": false,
215
+ "rDescription": "Specify the delimiter to be used for concatenating the values of a target column. A delimiter is a single character string. You can specify a single delimiter values which will be applicable to all target columns or you can specify multiple delimiter values for multiple target columns in following format: ColumnName:single_char [,...].",
216
+ "description": "Specify the delimiter to be used for concatenating the values of a target column. A delimiter is a single character string. You can specify a single delimiter values which will be applicable to all target columns or you can specify multiple delimiter values for multiple target columns in following format: ColumnName:single_char [,...].",
217
+ "datatype": "STRING",
218
+ "allowsLists": true,
219
+ "rName": "delimiters",
220
+ "useInR": true,
221
+ "rOrderNum": 11
222
+ },
223
+ {
224
+ "permittedValues": [],
225
+ "isOutputColumn": false,
226
+ "checkDuplicate": false,
227
+ "matchLengthOfArgument": "",
228
+ "allowPadding": false,
229
+ "name": "CombinedColumnSizes",
230
+ "alternateNames": [],
231
+ "isRequired": false,
232
+ "rDescription": "Specify maximum size of the concatenated string. You can specify a single values which will be applicable to all target columns or you can specify multiple size value for multiple target columns in following format: ColumnName:size_value [,...].",
233
+ "description": "Specify maximum size of the concatenated string. You can specify a single values which will be applicable to all target columns or you can specify multiple size value for multiple target columns in following format: ColumnName:size_value [,...].",
234
+ "datatype": ["STRING", "INTEGER"],
235
+ "allowsLists": true,
236
+ "rName": "combined.column.sizes",
237
+ "useInR": true,
238
+ "rOrderNum": 12
239
+ },
240
+ {
241
+ "targetTable": [
242
+ "InputTable"
243
+ ],
244
+ "checkDuplicate": true,
245
+ "allowedTypes": [],
246
+ "allowedTypeGroups": [
247
+ "ALL"
248
+ ],
249
+ "matchLengthOfArgument": "",
250
+ "allowPadding": false,
251
+ "name": "TruncateColumns",
252
+ "alternateNames": [],
253
+ "isRequired": false,
254
+ "rDescription": "Specify columns from the target columns for which you want to truncate the concatenated string, if it is longer than the specified size. ",
255
+ "description": "Specify columns from the target columns for which you want to truncate the concatenated string, if it is longer than the specified size. ",
256
+ "datatype": "COLUMNS",
257
+ "allowsLists": true,
258
+ "rName": "truncate.columns",
259
+ "useInR": true,
260
+ "rOrderNum": 13
261
+ },
262
+ {
263
+ "permittedValues": [],
264
+ "isOutputColumn": true,
265
+ "matchLengthOfArgument": "",
266
+ "allowPadding": false,
267
+
268
+ "name": "OutputColumnNames",
269
+ "alternateNames": [],
270
+ "isRequired": false,
271
+ "rDescription": "Specify the column name to be used for the output column. The nth column name value applies to the nth output column.",
272
+ "description": "Specify the column name to be used for the output column. The nth column name value applies to the nth output column.",
273
+ "datatype": "STRING",
274
+ "allowsLists": true,
275
+ "rName": "output.column.names",
276
+ "useInR": true,
277
+ "rOrderNum": 14
278
+ }
279
+ ]
280
+ }
@@ -118,10 +118,11 @@
118
118
  },
119
119
  {
120
120
  "permittedValues": [],
121
+ "defaultValue": "1",
121
122
  "isOutputColumn": false,
122
123
  "name": "PositiveLabel",
123
124
  "alternateNames": [],
124
- "isRequired": true,
125
+ "isRequired": false,
125
126
  "rDescription": "Specifies the label of the positive class.",
126
127
  "description": "Specify the label of the positive class.",
127
128
  "datatype": "STRING",
@@ -144,7 +144,7 @@
144
144
  "allowsLists": false,
145
145
  "rName": "epsilon",
146
146
  "useInR": true,
147
- "rOrderNum": 6
147
+ "rOrderNum": 5
148
148
  },
149
149
  {
150
150
  "defaultValue": 10,
@@ -162,7 +162,7 @@
162
162
  "allowsLists": false,
163
163
  "rName": "batch.size",
164
164
  "useInR": true,
165
- "rOrderNum": 5
165
+ "rOrderNum": 6
166
166
  },
167
167
  {
168
168
  "defaultValue": 0.02,
@@ -180,7 +180,7 @@
180
180
  "allowsLists": false,
181
181
  "rName": "lambda1",
182
182
  "useInR": true,
183
- "rOrderNum": 6
183
+ "rOrderNum": 7
184
184
  },
185
185
  {
186
186
  "defaultValue": 0.15,
@@ -198,7 +198,7 @@
198
198
  "allowsLists": false,
199
199
  "rName": "alpha",
200
200
  "useInR": true,
201
- "rOrderNum": 7
201
+ "rOrderNum": 8
202
202
  },
203
203
  {
204
204
  "defaultValue": 50,
@@ -216,7 +216,7 @@
216
216
  "allowsLists": false,
217
217
  "rName": "iter.num.no.change",
218
218
  "useInR": true,
219
- "rOrderNum": 8
219
+ "rOrderNum": 9
220
220
  },
221
221
  {
222
222
  "defaultValue": 0.001,
@@ -234,7 +234,7 @@
234
234
  "allowsLists": false,
235
235
  "rName": "tolerance",
236
236
  "useInR": true,
237
- "rOrderNum": 9
237
+ "rOrderNum": 10
238
238
  },
239
239
  {
240
240
  "defaultValue": true,
@@ -247,7 +247,7 @@
247
247
  "allowsLists": false,
248
248
  "rName": "intercept",
249
249
  "useInR": true,
250
- "rOrderNum": 10
250
+ "rOrderNum": 11
251
251
  },
252
252
  {
253
253
  "defaultValue": "0:1.0, 1:1.0",
@@ -261,7 +261,7 @@
261
261
  "allowsLists": false,
262
262
  "rName": "class.weights",
263
263
  "useInR": true,
264
- "rOrderNum": 11
264
+ "rOrderNum": 12
265
265
  },
266
266
  {
267
267
  "permittedValues": [
@@ -280,7 +280,7 @@
280
280
  "allowsLists": false,
281
281
  "rName": "learning.rate",
282
282
  "useInR": true,
283
- "rOrderNum": 12
283
+ "rOrderNum": 13
284
284
  },
285
285
  {
286
286
  "defaultValue": 0.05,
@@ -298,7 +298,7 @@
298
298
  "allowsLists": false,
299
299
  "rName": "initial.eta",
300
300
  "useInR": true,
301
- "rOrderNum": 13
301
+ "rOrderNum": 14
302
302
  },
303
303
  {
304
304
  "defaultValue": 0.25,
@@ -316,7 +316,7 @@
316
316
  "allowsLists": false,
317
317
  "rName": "decay.rate",
318
318
  "useInR": true,
319
- "rOrderNum": 14
319
+ "rOrderNum": 15
320
320
  },
321
321
  {
322
322
  "defaultValue": 5,
@@ -334,7 +334,7 @@
334
334
  "allowsLists": false,
335
335
  "rName": "decay.steps",
336
336
  "useInR": true,
337
- "rOrderNum": 15
337
+ "rOrderNum": 16
338
338
  },
339
339
  {
340
340
  "defaultValue": 0.0,
@@ -352,7 +352,7 @@
352
352
  "allowsLists": false,
353
353
  "rName": "momentum",
354
354
  "useInR": true,
355
- "rOrderNum": 16
355
+ "rOrderNum": 17
356
356
  },
357
357
  {
358
358
  "defaultValue": false,
@@ -365,7 +365,7 @@
365
365
  "allowsLists": false,
366
366
  "rName": "nesterov",
367
367
  "useInR": true,
368
- "rOrderNum": 17
368
+ "rOrderNum": 18
369
369
  },
370
370
  {
371
371
  "defaultValue": 0,
@@ -383,7 +383,7 @@
383
383
  "allowsLists": false,
384
384
  "rName": "local.sgd.iterations",
385
385
  "useInR": true,
386
- "rOrderNum": 18
386
+ "rOrderNum": 19
387
387
  }
388
388
  ]
389
- }
389
+ }
@@ -119,6 +119,24 @@
119
119
  "rName": "output.responses",
120
120
  "useInR": true,
121
121
  "rOrderNum": 6
122
+ },
123
+ {
124
+ "permittedValues": [
125
+ "Regression",
126
+ "Classification"
127
+ ],
128
+ "defaultValue": "Classification",
129
+ "isOutputColumn": false,
130
+ "name": "ModelType",
131
+ "alternateNames": [],
132
+ "isRequired": false,
133
+ "rDescription": "Specify the type of the analysis. Acceptable values are Regression, Classification.",
134
+ "description": "Specify the type of the analysis. Acceptable values are Regression, Classification.",
135
+ "datatype": "STRING",
136
+ "allowsLists": false,
137
+ "rName": "model.type",
138
+ "useInR": true,
139
+ "rOrderNum": 7
122
140
  }
123
141
  ]
124
- }
142
+ }