teradataml 20.0.0.1__py3-none-any.whl → 20.0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (240) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +306 -0
  4. teradataml/__init__.py +10 -3
  5. teradataml/_version.py +1 -1
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +299 -16
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +13 -3
  11. teradataml/analytics/json_parser/utils.py +13 -6
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +11 -2
  15. teradataml/analytics/table_operator/__init__.py +4 -3
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +66 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1502 -323
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +247 -307
  22. teradataml/automl/data_transformation.py +32 -12
  23. teradataml/automl/feature_engineering.py +325 -86
  24. teradataml/automl/model_evaluation.py +44 -35
  25. teradataml/automl/model_training.py +122 -153
  26. teradataml/catalog/byom.py +8 -8
  27. teradataml/clients/pkce_client.py +1 -1
  28. teradataml/common/__init__.py +2 -1
  29. teradataml/common/constants.py +72 -0
  30. teradataml/common/deprecations.py +13 -7
  31. teradataml/common/garbagecollector.py +152 -120
  32. teradataml/common/messagecodes.py +11 -2
  33. teradataml/common/messages.py +4 -1
  34. teradataml/common/sqlbundle.py +26 -4
  35. teradataml/common/utils.py +225 -14
  36. teradataml/common/wrapper_utils.py +1 -1
  37. teradataml/context/context.py +82 -2
  38. teradataml/data/SQL_Fundamentals.pdf +0 -0
  39. teradataml/data/complaints_test_tokenized.csv +353 -0
  40. teradataml/data/complaints_tokens_model.csv +348 -0
  41. teradataml/data/covid_confirm_sd.csv +83 -0
  42. teradataml/data/dataframe_example.json +27 -1
  43. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  44. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  45. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  46. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  47. teradataml/data/docs/sqle/docs_17_20/Shap.py +203 -0
  48. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  49. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  50. teradataml/data/docs/sqle/docs_17_20/TextParser.py +3 -3
  51. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  52. teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
  53. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  54. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  55. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  56. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  57. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  58. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  59. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  60. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  61. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  62. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  63. teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
  64. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  65. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  66. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  67. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  68. teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +18 -21
  69. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  70. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  71. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  72. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  73. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  74. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  75. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  76. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  77. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  78. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  79. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  80. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  81. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  82. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  83. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  84. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  85. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  86. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  87. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  88. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  89. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  90. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  91. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  92. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  93. teradataml/data/dwt2d_dataTable.csv +65 -0
  94. teradataml/data/dwt_dataTable.csv +8 -0
  95. teradataml/data/dwt_filterTable.csv +3 -0
  96. teradataml/data/finance_data4.csv +13 -0
  97. teradataml/data/grocery_transaction.csv +19 -0
  98. teradataml/data/idwt2d_dataTable.csv +5 -0
  99. teradataml/data/idwt_dataTable.csv +8 -0
  100. teradataml/data/idwt_filterTable.csv +3 -0
  101. teradataml/data/interval_data.csv +5 -0
  102. teradataml/data/jsons/paired_functions.json +14 -0
  103. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  104. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  105. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  106. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  107. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  108. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  109. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  110. teradataml/data/jsons/sqle/17.20/TD_TextParser.json +1 -1
  111. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  112. teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
  113. teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
  114. teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
  115. teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
  116. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  117. teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
  118. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  119. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  120. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  121. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  122. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  123. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  124. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  125. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  126. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  127. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  128. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  129. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  130. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  131. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  132. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  133. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  134. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  135. teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +10 -19
  136. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  137. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  138. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  139. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  140. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  141. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  142. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  143. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  144. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  145. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  146. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  147. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  148. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  149. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  150. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  151. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  152. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  153. teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
  154. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  155. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  156. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  157. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  158. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  159. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
  160. teradataml/data/load_example_data.py +8 -2
  161. teradataml/data/medical_readings.csv +101 -0
  162. teradataml/data/naivebayestextclassifier_example.json +1 -1
  163. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  164. teradataml/data/patient_profile.csv +101 -0
  165. teradataml/data/peppers.png +0 -0
  166. teradataml/data/real_values.csv +14 -0
  167. teradataml/data/sax_example.json +8 -0
  168. teradataml/data/scripts/deploy_script.py +1 -1
  169. teradataml/data/scripts/lightgbm/dataset.template +157 -0
  170. teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +247 -0
  171. teradataml/data/scripts/lightgbm/lightgbm_function.template +216 -0
  172. teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +159 -0
  173. teradataml/data/scripts/sklearn/sklearn_fit.py +194 -160
  174. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +136 -115
  175. teradataml/data/scripts/sklearn/sklearn_function.template +34 -16
  176. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +155 -137
  177. teradataml/data/scripts/sklearn/sklearn_neighbors.py +1 -1
  178. teradataml/data/scripts/sklearn/sklearn_score.py +12 -3
  179. teradataml/data/scripts/sklearn/sklearn_transform.py +162 -24
  180. teradataml/data/star_pivot.csv +8 -0
  181. teradataml/data/target_udt_data.csv +8 -0
  182. teradataml/data/templates/open_source_ml.json +3 -1
  183. teradataml/data/teradataml_example.json +20 -1
  184. teradataml/data/timestamp_data.csv +4 -0
  185. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  186. teradataml/data/uaf_example.json +55 -1
  187. teradataml/data/unpivot_example.json +15 -0
  188. teradataml/data/url_data.csv +9 -0
  189. teradataml/data/vectordistance_example.json +4 -0
  190. teradataml/data/windowdfft.csv +16 -0
  191. teradataml/dataframe/copy_to.py +1 -1
  192. teradataml/dataframe/data_transfer.py +5 -3
  193. teradataml/dataframe/dataframe.py +1002 -201
  194. teradataml/dataframe/fastload.py +3 -3
  195. teradataml/dataframe/functions.py +867 -0
  196. teradataml/dataframe/row.py +160 -0
  197. teradataml/dataframe/setop.py +2 -2
  198. teradataml/dataframe/sql.py +840 -33
  199. teradataml/dataframe/window.py +1 -1
  200. teradataml/dbutils/dbutils.py +878 -34
  201. teradataml/dbutils/filemgr.py +48 -1
  202. teradataml/geospatial/geodataframe.py +1 -1
  203. teradataml/geospatial/geodataframecolumn.py +1 -1
  204. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  205. teradataml/lib/aed_0_1.dll +0 -0
  206. teradataml/opensource/__init__.py +1 -1
  207. teradataml/opensource/{sklearn/_class.py → _class.py} +102 -17
  208. teradataml/opensource/_lightgbm.py +950 -0
  209. teradataml/opensource/{sklearn/_wrapper_utils.py → _wrapper_utils.py} +1 -2
  210. teradataml/opensource/{sklearn/constants.py → constants.py} +13 -10
  211. teradataml/opensource/sklearn/__init__.py +0 -1
  212. teradataml/opensource/sklearn/_sklearn_wrapper.py +1019 -574
  213. teradataml/options/__init__.py +9 -23
  214. teradataml/options/configure.py +42 -4
  215. teradataml/options/display.py +2 -2
  216. teradataml/plot/axis.py +4 -4
  217. teradataml/scriptmgmt/UserEnv.py +13 -9
  218. teradataml/scriptmgmt/lls_utils.py +77 -23
  219. teradataml/store/__init__.py +13 -0
  220. teradataml/store/feature_store/__init__.py +0 -0
  221. teradataml/store/feature_store/constants.py +291 -0
  222. teradataml/store/feature_store/feature_store.py +2223 -0
  223. teradataml/store/feature_store/models.py +1505 -0
  224. teradataml/store/vector_store/__init__.py +1586 -0
  225. teradataml/table_operators/Script.py +2 -2
  226. teradataml/table_operators/TableOperator.py +106 -20
  227. teradataml/table_operators/query_generator.py +3 -0
  228. teradataml/table_operators/table_operator_query_generator.py +3 -1
  229. teradataml/table_operators/table_operator_util.py +102 -56
  230. teradataml/table_operators/templates/dataframe_register.template +69 -0
  231. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  232. teradataml/telemetry_utils/__init__.py +0 -0
  233. teradataml/telemetry_utils/queryband.py +52 -0
  234. teradataml/utils/dtypes.py +4 -2
  235. teradataml/utils/validators.py +34 -2
  236. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/METADATA +311 -3
  237. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/RECORD +240 -157
  238. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/WHEEL +0 -0
  239. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/top_level.txt +0 -0
  240. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/zip-safe +0 -0
@@ -0,0 +1,118 @@
1
+ {
2
+ "json_schema_major_version": "1",
3
+ "json_schema_minor_version": "1",
4
+ "json_content_version": "1",
5
+ "function_name": "TD_CFilter",
6
+ "function_version": "1.0",
7
+ "function_alias_name": "TD_CFilter",
8
+ "function_type": "fastpath",
9
+ "function_category": "Association Analysis",
10
+ "function_r_name": "aa.td.cfilter",
11
+ "short_description": "This function calculates several statistical measures of how likely each pair of items is to be purchased together.",
12
+ "long_description": "This function calculates several statistical measures of how likely each pair of items is to be purchased together.",
13
+ "input_tables": [
14
+ {
15
+ "requiredInputKind": [
16
+ "PartitionByAny"
17
+ ],
18
+ "isOrdered": false,
19
+ "partitionByOne": false,
20
+ "name": "InputTable",
21
+ "alternateNames": [],
22
+ "isRequired": true,
23
+ "rDescription": "Specifies the table containing the input data to filter.",
24
+ "description": "Specifies the table containing the input data to filter.",
25
+ "datatype": "TABLE_ALIAS",
26
+ "allowsLists": false,
27
+ "rName": "data",
28
+ "useInR": true,
29
+ "rOrderNum": 1
30
+ }
31
+ ],
32
+ "argument_clauses": [
33
+ {
34
+ "targetTable": [
35
+ "InputTable"
36
+ ],
37
+ "checkDuplicate": true,
38
+ "allowedTypes": [],
39
+ "allowedTypeGroups": [
40
+ "STRING"
41
+ ],
42
+ "matchLengthOfArgument": "",
43
+ "allowPadding": false,
44
+ "name": "TargetColumn",
45
+ "alternateNames": [],
46
+ "isRequired": true,
47
+ "rDescription": "Specify the column from the input table which contains the data for filter.",
48
+ "description": "Specify the column from the input table which contains the data for filter.",
49
+ "datatype": "COLUMNS",
50
+ "allowsLists": false,
51
+ "rName": "target.column",
52
+ "useInR": true,
53
+ "rOrderNum": 2
54
+ },
55
+ {
56
+ "targetTable": [
57
+ "InputTable"
58
+ ],
59
+ "checkDuplicate": true,
60
+ "allowedTypes": [],
61
+ "allowedTypeGroups": [
62
+ "ALL"
63
+ ],
64
+ "matchLengthOfArgument": "",
65
+ "allowPadding": false,
66
+ "name": "TransactionIDColumns",
67
+ "alternateNames": [],
68
+ "isRequired": true,
69
+ "rDescription": "Specifies the transactionID column to define groups of items listed in the input columns that are purchased together.",
70
+ "description": "Specifies the transactionID column to define groups of items listed in the input columns that are purchased together.",
71
+ "datatype": "COLUMNS",
72
+ "allowsLists": true,
73
+ "rName": "transaction.id.columns",
74
+ "useInR": true,
75
+ "rOrderNum": 3
76
+ },
77
+ {
78
+ "targetTable": [
79
+ "InputTable"
80
+ ],
81
+ "checkDuplicate": true,
82
+ "allowedTypes": [],
83
+ "allowedTypeGroups": [
84
+ "ALL"
85
+ ],
86
+ "matchLengthOfArgument": "",
87
+ "allowPadding": false,
88
+ "name": "PartitionColumns",
89
+ "alternateNames": [],
90
+ "isRequired": false,
91
+ "rDescription": "Specify the name of the input table columns on which to partition the input.",
92
+ "description": "Specify the name of the input table columns on which to partition the input.",
93
+ "datatype": "COLUMNS",
94
+ "allowsLists": true,
95
+ "rName": "partition.columns",
96
+ "useInR": true,
97
+ "rOrderNum": 4
98
+ },
99
+ {
100
+ "defaultValue": 100,
101
+ "lowerBound": 0,
102
+ "upperBound": 2147483647,
103
+ "lowerBoundType": "INCLUSIVE",
104
+ "upperBoundType": "INCLUSIVE",
105
+ "allowNaN": false,
106
+ "name": "MaxDistinctItems",
107
+ "alternateNames": [],
108
+ "isRequired": false,
109
+ "rDescription": "Specifies the maximum size of the item set. The default value is 100.",
110
+ "description": "Specifies the maximum size of the item set. The default value is 100.",
111
+ "datatype": "INTEGER",
112
+ "allowsLists": false,
113
+ "rName": "max.distinct.items",
114
+ "useInR": true,
115
+ "rOrderNum": 5
116
+ }
117
+ ]
118
+ }
@@ -0,0 +1,193 @@
1
+ {
2
+ "json_schema_major_version": "1",
3
+ "json_schema_minor_version": "1",
4
+ "json_content_version": "1",
5
+ "function_name": "TD_NaiveBayes",
6
+ "function_version": "1.0",
7
+ "function_alias_name": "TD_NaiveBayes",
8
+ "function_type": "fastpath",
9
+ "function_category": "Model Training",
10
+ "function_r_name": "aa.td_naivebayes",
11
+ "short_description": "This function generates classification model using NaiveBayes algorithm.",
12
+ "long_description": "This function generates classification model using NaiveBayes algorithm.",
13
+ "input_tables": [
14
+ {
15
+ "requiredInputKind": [
16
+ "PartitionByAny"
17
+ ],
18
+ "isOrdered": false,
19
+ "partitionByOne": false,
20
+ "name": "InputTable",
21
+ "alternateNames": [],
22
+ "isRequired": true,
23
+ "rDescription": "Specifies the table containing the input training data.",
24
+ "description": "Specifies the table containing the input training data.",
25
+ "datatype": "TABLE_ALIAS",
26
+ "allowsLists": false,
27
+ "rName": "data",
28
+ "useInR": true,
29
+ "rOrderNum": 1
30
+ }
31
+ ],
32
+ "argument_clauses": [
33
+ {
34
+ "targetTable": [
35
+ "InputTable"
36
+ ],
37
+ "checkDuplicate": true,
38
+ "allowedTypes": [],
39
+ "allowedTypeGroups": [
40
+ "INTEGER","STRING"
41
+ ],
42
+ "matchLengthOfArgument": "",
43
+ "allowPadding": false,
44
+ "name": "ResponseColumn",
45
+ "alternateNames": [],
46
+ "isRequired": true,
47
+ "rDescription": "Specifies the name of the input table column that contains the response values.",
48
+ "description": "Specifies the name of the input table column that contains the response values.",
49
+ "datatype": "COLUMNS",
50
+ "allowsLists": false,
51
+ "rName": "response.column",
52
+ "useInR": true,
53
+ "rOrderNum": 2
54
+ },
55
+ {
56
+ "targetTable": [
57
+ "InputTable"
58
+ ],
59
+ "checkDuplicate": true,
60
+ "allowedTypes": [],
61
+ "allowedTypeGroups": [
62
+ "NUMERIC"
63
+ ],
64
+ "matchLengthOfArgument": "",
65
+ "allowPadding": false,
66
+ "name": "NumericInputs",
67
+ "alternateNames": [],
68
+ "isRequired": false,
69
+ "rDescription": "Specifies the name of the input table columns that contains the numeric attributes values.",
70
+ "description": "Specifies the name of the input table columns that contains the numeric attributes values.",
71
+ "datatype": "COLUMNS",
72
+ "allowsLists": true,
73
+ "rName": "numeric.inputs",
74
+ "useInR": true,
75
+ "rOrderNum": 3
76
+ },
77
+ {
78
+ "targetTable": [
79
+ "InputTable"
80
+ ],
81
+ "checkDuplicate": true,
82
+ "allowedTypes": [],
83
+ "allowedTypeGroups": [
84
+ "STRING"
85
+ ],
86
+ "matchLengthOfArgument": "",
87
+ "allowPadding": false,
88
+ "name": "CategoricalInputs",
89
+ "alternateNames": [],
90
+ "isRequired": false,
91
+ "rDescription": "Specifies the name of the input table columns that contains the categorical attributes values.",
92
+ "description": "Specifies the name of the input table columns that contains the categorical attributes values.",
93
+ "datatype": "COLUMNS",
94
+ "allowsLists": true,
95
+ "rName": "categorical.inputs",
96
+ "useInR": true,
97
+ "rOrderNum": 4
98
+ },
99
+ {
100
+ "targetTable": [
101
+ "InputTable"
102
+ ],
103
+ "checkDuplicate": true,
104
+ "allowedTypes": [],
105
+ "allowedTypeGroups": [
106
+ "STRING"
107
+ ],
108
+ "matchLengthOfArgument": "",
109
+ "allowPadding": false,
110
+ "name": "AttributeNameColumn",
111
+ "alternateNames": [],
112
+ "isRequired": false,
113
+ "rDescription": "Specifies the name of the input table column that contains the attributes names.",
114
+ "description": "Specifies the name of the input table columns that contains the attributes names.",
115
+ "datatype": "COLUMNS",
116
+ "allowsLists": false,
117
+ "rName": "attribute.name.column",
118
+ "useInR": true,
119
+ "rOrderNum": 5
120
+ },
121
+ {
122
+ "targetTable": [
123
+ "InputTable"
124
+ ],
125
+ "checkDuplicate": true,
126
+ "allowedTypes": [],
127
+ "allowedTypeGroups": [
128
+ "NUMERIC","STRING"
129
+ ],
130
+ "matchLengthOfArgument": "",
131
+ "allowPadding": false,
132
+ "name": "AttributeValueColumn",
133
+ "alternateNames": [],
134
+ "isRequired": false,
135
+ "rDescription": "Specifies the name of the input table column that contains the attributes values.",
136
+ "description": "Specifies the name of the input table columns that contains the attributes values.",
137
+ "datatype": "COLUMNS",
138
+ "allowsLists": false,
139
+ "rName": "attribute.value.column",
140
+ "useInR": true,
141
+ "rOrderNum": 6
142
+ },
143
+ {
144
+ "permittedValues": [
145
+ "ALLNUMERIC",
146
+ "ALLCATEGORICAL"
147
+ ],
148
+ "isOutputColumn": false,
149
+ "matchLengthOfArgument": "",
150
+ "allowPadding": false,
151
+ "name": "AttributeType",
152
+ "alternateNames": [],
153
+ "isRequired": false,
154
+ "rDescription": "Specifies the attribute type as ALLNUMERIC or ALLCATEGORICAL if all the attributes are of numeric type or categorical type respectively.",
155
+ "description": "Specifies the attribute type as ALLNUMERIC or ALLCATEGORICAL if all the attributes are of numeric type or categorical type respectively.",
156
+ "datatype": "STRING",
157
+ "allowsLists": false,
158
+ "rName": "attribute.type",
159
+ "useInR": true,
160
+ "rOrderNum": 7
161
+ },
162
+ {
163
+ "isOutputColumn": false,
164
+ "matchLengthOfArgument": "",
165
+ "allowPadding": false,
166
+ "name": "NumericAttributes",
167
+ "alternateNames": [],
168
+ "isRequired": false,
169
+ "rDescription": "Specifies the numeric attributes names.",
170
+ "description": "Specifies the numeric attributes names.",
171
+ "datatype": "STRING",
172
+ "allowsLists": true,
173
+ "rName": "numeric.attributes",
174
+ "useInR": true,
175
+ "rOrderNum": 8
176
+ },
177
+ {
178
+ "isOutputColumn": false,
179
+ "matchLengthOfArgument": "",
180
+ "allowPadding": false,
181
+ "name": "CategoricalAttributes",
182
+ "alternateNames": [],
183
+ "isRequired": false,
184
+ "rDescription": "Specifies the categorical attributes names.",
185
+ "description": "Specifies the categorical attributes names.",
186
+ "datatype": "STRING",
187
+ "allowsLists": true,
188
+ "rName": "categorical.attributes",
189
+ "useInR": true,
190
+ "rOrderNum": 9
191
+ }
192
+ ]
193
+ }
@@ -0,0 +1,212 @@
1
+ {
2
+ "json_schema_major_version": "1",
3
+ "json_schema_minor_version": "1",
4
+ "json_content_version": "1",
5
+ "function_name": "TD_NaiveBayesPredict",
6
+ "function_version": "1.0",
7
+ "function_alias_name": "TDNaiveBayesPredict",
8
+ "function_type": "fastpath",
9
+ "function_category": "Model Scoring",
10
+ "function_r_name": "aa.td_naivebayespredict",
11
+ "ref_function_r_name": "aa.td_naivebayes",
12
+ "short_description": "This function predicts classification label using NaiveBayes model generated by TD_NaiveBayes.",
13
+ "long_description": "This function predicts classification label using NaiveBayes model generated by TD_NaiveBayes.",
14
+ "input_tables": [
15
+ {
16
+ "requiredInputKind": [
17
+ "PartitionByAny"
18
+ ],
19
+ "isOrdered": false,
20
+ "partitionByOne": false,
21
+ "name": "InputTable",
22
+ "alternateNames": [],
23
+ "isRequired": true,
24
+ "rDescription": "Specifies the table containing the input test data.",
25
+ "description": "Specifies the table containing the input test data.",
26
+ "datatype": "TABLE_ALIAS",
27
+ "allowsLists": false,
28
+ "rName": "data",
29
+ "useInR": true,
30
+ "rOrderNum": 1
31
+ },
32
+ {
33
+ "requiredInputKind": [
34
+ "Dimension"
35
+ ],
36
+ "isOrdered": false,
37
+ "partitionByOne": false,
38
+ "name": "modeltable",
39
+ "alternateNames": [],
40
+ "isRequired": true,
41
+ "rDescription": "Specifies the table containing the model data.",
42
+ "description": "Specifies the table containing the model data.",
43
+ "datatype": "TABLE_ALIAS",
44
+ "allowsLists": false,
45
+ "rName": "object",
46
+ "useInR": true,
47
+ "rOrderNum": 2
48
+ }
49
+ ],
50
+ "argument_clauses": [
51
+ {
52
+ "targetTable": [
53
+ "InputTable"
54
+ ],
55
+ "checkDuplicate": true,
56
+ "allowedTypes": [],
57
+ "allowedTypeGroups": [
58
+ "ALL"
59
+ ],
60
+ "matchLengthOfArgument": "",
61
+ "allowPadding": false,
62
+ "name": "IDColumn",
63
+ "alternateNames": [],
64
+ "isRequired": true,
65
+ "rDescription": "Specifies the name of the column that uniquely identifies an observation in the test table.",
66
+ "description": "Specifies the name of the column that uniquely identifies an observation in the test table..",
67
+ "datatype": "COLUMNS",
68
+ "allowsLists": false,
69
+ "rName": "id.column",
70
+ "useInR": true,
71
+ "rOrderNum": 3
72
+ },
73
+ {
74
+ "targetTable": [
75
+ "InputTable"
76
+ ],
77
+ "checkDuplicate": true,
78
+ "allowedTypes": [],
79
+ "allowedTypeGroups": [
80
+ "NUMERIC"
81
+ ],
82
+ "matchLengthOfArgument": "",
83
+ "allowPadding": false,
84
+ "name": "NumericInputs",
85
+ "alternateNames": [],
86
+ "isRequired": false,
87
+ "rDescription": "Specifies the name of the input table columns that contains the numeric attributes values.",
88
+ "description": "Specifies the name of the input table columns that contains the numeric attributes values.",
89
+ "datatype": "COLUMNS",
90
+ "allowsLists": true,
91
+ "rName": "numeric.inputs",
92
+ "useInR": true,
93
+ "rOrderNum": 4
94
+ },
95
+ {
96
+ "targetTable": [
97
+ "InputTable"
98
+ ],
99
+ "checkDuplicate": true,
100
+ "allowedTypes": [],
101
+ "allowedTypeGroups": [
102
+ "STRING"
103
+ ],
104
+ "matchLengthOfArgument": "",
105
+ "allowPadding": false,
106
+ "name": "CategoricalInputs",
107
+ "alternateNames": [],
108
+ "isRequired": false,
109
+ "rDescription": "Specifies the name of the input table columns that contains the categorical attributes values.",
110
+ "description": "Specifies the name of the input table columns that contains the categorical attributes values.",
111
+ "datatype": "COLUMNS",
112
+ "allowsLists": true,
113
+ "rName": "categorical.inputs",
114
+ "useInR": true,
115
+ "rOrderNum": 5
116
+ },
117
+ {
118
+ "targetTable": [
119
+ "InputTable"
120
+ ],
121
+ "checkDuplicate": true,
122
+ "allowedTypes": [],
123
+ "allowedTypeGroups": [
124
+ "STRING"
125
+ ],
126
+ "matchLengthOfArgument": "",
127
+ "allowPadding": false,
128
+ "name": "AttributeNameColumn",
129
+ "alternateNames": [],
130
+ "isRequired": false,
131
+ "rDescription": "Specifies the name of the input table column that contains the attributes names.",
132
+ "description": "Specifies the name of the input table columns that contains the attributes names.",
133
+ "datatype": "COLUMNS",
134
+ "allowsLists": false,
135
+ "rName": "attribute.name.column",
136
+ "useInR": true,
137
+ "rOrderNum": 6
138
+ },
139
+ {
140
+ "targetTable": [
141
+ "InputTable"
142
+ ],
143
+ "checkDuplicate": true,
144
+ "allowedTypes": [],
145
+ "allowedTypeGroups": [
146
+ "NUMERIC","STRING"
147
+ ],
148
+ "matchLengthOfArgument": "",
149
+ "allowPadding": false,
150
+ "name": "AttributeValueColumn",
151
+ "alternateNames": [],
152
+ "isRequired": false,
153
+ "rDescription": "Specifies the name of the input table column that contains the attributes values.",
154
+ "description": "Specifies the name of the input table columns that contains the attributes values.",
155
+ "datatype": "COLUMNS",
156
+ "allowsLists": false,
157
+ "rName": "attribute.value.column",
158
+ "useInR": true,
159
+ "rOrderNum": 7
160
+ },
161
+ {
162
+ "isOutputColumn": false,
163
+ "matchLengthOfArgument": "",
164
+ "allowPadding": false,
165
+ "name": "Responses",
166
+ "alternateNames": [],
167
+ "isRequired": false,
168
+ "rDescription": "Specifies a list of Responses to output.",
169
+ "description": "Specifies a list of Responses to output.",
170
+ "datatype": "STRING",
171
+ "allowsLists": true,
172
+ "rName": "responses",
173
+ "useInR": true,
174
+ "rOrderNum": 8
175
+ },
176
+ {
177
+ "defaultValue": false,
178
+ "name": "OutputProb",
179
+ "alternateNames": [],
180
+ "isRequired": false,
181
+ "rDescription": "Specify whether the function should output the probability for each response.",
182
+ "description": "Specify whether the function should output the probability for each response. ",
183
+ "datatype": "BOOLEAN",
184
+ "allowsLists": false,
185
+ "rName": "output.prob",
186
+ "useInR": true,
187
+ "rOrderNum": 9
188
+ },
189
+ {
190
+ "targetTable": [
191
+ "inputtable"
192
+ ],
193
+ "checkDuplicate": false,
194
+ "allowedTypes": [],
195
+ "allowedTypeGroups": [
196
+ "ALL"
197
+ ],
198
+ "matchLengthOfArgument": "",
199
+ "allowPadding": false,
200
+ "name": "Accumulate",
201
+ "alternateNames": [],
202
+ "isRequired": false,
203
+ "rDescription": "Specify the names of the input table columns that need to be copied from the input test table to output.",
204
+ "description": "Specify the names of the input table columns that need to be copied from the input test table to output.",
205
+ "datatype": "COLUMNS",
206
+ "allowsLists": true,
207
+ "rName": "accumulate",
208
+ "useInR": true,
209
+ "rOrderNum": 10
210
+ }
211
+ ]
212
+ }
@@ -116,7 +116,7 @@
116
116
  "isRequired": false,
117
117
  "rDescription": "Specify the amount of regularization to be added. The higher the value, the stronger the regularization. It is also used to compute the learning rate when the learning rate is set to ‘optimal’. Must be a non-negative float value. A value of 0 means no regularization.",
118
118
  "description": "Specify the amount of regularization to be added. The higher the value, the stronger the regularization. It is also used to compute the learning rate when the learning rate is set to ‘optimal’. Must be a non-negative float value. A value of 0 means no regularization.",
119
- "datatype": "DOUBLE",
119
+ "datatype": "NUMERIC",
120
120
  "allowsLists": false,
121
121
  "rName": "lambda1",
122
122
  "useInR": true,
@@ -134,7 +134,7 @@
134
134
  "isRequired": false,
135
135
  "rDescription": "Specify the Elasticnet parameter for penalty computation. It only becomes effective if RegularizationLambda > 0. The value represents the contribution ratio of L1 in the penalty. A value of 1.0 indicates L1 (LASSO) only, a value of 0 indicates L2 (Ridge) only, and a value in between is a combination of L1 and L2. Default: 0.15 (15% L1, 85% L2). Must be a float value between 0 and 1.",
136
136
  "description": "Specify the Elasticnet parameter for penalty computation. It only becomes effective if RegularizationLambda > 0. The value represents the contribution ratio of L1 in the penalty. A value of 1.0 indicates L1 (LASSO) only, a value of 0 indicates L2 (Ridge) only, and a value in between is a combination of L1 and L2. Default: 0.15 (15% L1, 85% L2). Must be a float value between 0 and 1.",
137
- "datatype": "DOUBLE",
137
+ "datatype": "NUMERIC",
138
138
  "allowsLists": false,
139
139
  "rName": "alpha",
140
140
  "useInR": true,
@@ -170,7 +170,7 @@
170
170
  "isRequired": false,
171
171
  "rDescription": "Specify the stopping criteria in terms of loss function improvement. Applicable when IterNumNoChange is greater than 0. Value is a positive integer.",
172
172
  "description": "Specify the stopping criteria in terms of loss function improvement. Applicable when IterNumNoChange is greater than 0. Value is a positive integer.",
173
- "datatype": "DOUBLE",
173
+ "datatype": "NUMERIC",
174
174
  "allowsLists": false,
175
175
  "rName": "tolerance",
176
176
  "useInR": true,
@@ -191,9 +191,9 @@
191
191
  },
192
192
  {
193
193
  "permittedValues": [
194
- "CONSTANT",
195
- "OPTIMAL",
196
- "INVTIME",
194
+ "CONSTANT",
195
+ "OPTIMAL",
196
+ "INVTIME",
197
197
  "ADAPTIVE"
198
198
  ],
199
199
  "defaultValue": "OPTIMAL",
@@ -221,7 +221,7 @@
221
221
  "isRequired": false,
222
222
  "rDescription": "Specify the initial value of eta for the learning rate. For ‘constant’, this value is the learning rate for all iterations. ",
223
223
  "description": "Specify the initial value of eta for the learning rate. For ‘constant’, this value is the learning rate for all iterations. ",
224
- "datatype": "DOUBLE",
224
+ "datatype": "NUMERIC",
225
225
  "allowsLists": false,
226
226
  "rName": "initial.eta",
227
227
  "useInR": true,
@@ -239,7 +239,7 @@
239
239
  "isRequired": false,
240
240
  "rDescription": "Specify the decay rate for the learning rate (invtime and adaptive).",
241
241
  "description": "Specify the decay rate for the learning rate (invtime and adaptive).",
242
- "datatype": "DOUBLE",
242
+ "datatype": "NUMERIC",
243
243
  "allowsLists": false,
244
244
  "rName": "decay.rate",
245
245
  "useInR": true,
@@ -275,7 +275,7 @@
275
275
  "isRequired": false,
276
276
  "rDescription": "Specify the value to use for the momentum learning rate optimizer. Must be a non-negative float value between 0 and 1. A larger value indicates a higher momentum contribution. A value of 0 means the momentum optimizer is disabled. For a good momentum contribution, a value between 0.6-0.95 is recommended.",
277
277
  "description": "Specify the value to use for the momentum learning rate optimizer. Must be a non-negative float value between 0 and 1. A larger value indicates a higher momentum contribution. A value of 0 means the momentum optimizer is disabled. For a good momentum contribution, a value between 0.6-0.95 is recommended.",
278
- "datatype": "DOUBLE",
278
+ "datatype": "NUMERIC",
279
279
  "allowsLists": false,
280
280
  "rName": "momentum",
281
281
  "useInR": true,