teradataml 20.0.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (1208) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +2762 -0
  4. teradataml/__init__.py +78 -0
  5. teradataml/_version.py +11 -0
  6. teradataml/analytics/Transformations.py +2996 -0
  7. teradataml/analytics/__init__.py +82 -0
  8. teradataml/analytics/analytic_function_executor.py +2416 -0
  9. teradataml/analytics/analytic_query_generator.py +1050 -0
  10. teradataml/analytics/byom/H2OPredict.py +514 -0
  11. teradataml/analytics/byom/PMMLPredict.py +437 -0
  12. teradataml/analytics/byom/__init__.py +16 -0
  13. teradataml/analytics/json_parser/__init__.py +133 -0
  14. teradataml/analytics/json_parser/analytic_functions_argument.py +1805 -0
  15. teradataml/analytics/json_parser/json_store.py +191 -0
  16. teradataml/analytics/json_parser/metadata.py +1666 -0
  17. teradataml/analytics/json_parser/utils.py +805 -0
  18. teradataml/analytics/meta_class.py +236 -0
  19. teradataml/analytics/sqle/DecisionTreePredict.py +456 -0
  20. teradataml/analytics/sqle/NaiveBayesPredict.py +420 -0
  21. teradataml/analytics/sqle/__init__.py +128 -0
  22. teradataml/analytics/sqle/json/decisiontreepredict_sqle.json +78 -0
  23. teradataml/analytics/sqle/json/naivebayespredict_sqle.json +62 -0
  24. teradataml/analytics/table_operator/__init__.py +11 -0
  25. teradataml/analytics/uaf/__init__.py +82 -0
  26. teradataml/analytics/utils.py +828 -0
  27. teradataml/analytics/valib.py +1617 -0
  28. teradataml/automl/__init__.py +5835 -0
  29. teradataml/automl/autodataprep/__init__.py +493 -0
  30. teradataml/automl/custom_json_utils.py +1625 -0
  31. teradataml/automl/data_preparation.py +1384 -0
  32. teradataml/automl/data_transformation.py +1254 -0
  33. teradataml/automl/feature_engineering.py +2273 -0
  34. teradataml/automl/feature_exploration.py +1873 -0
  35. teradataml/automl/model_evaluation.py +488 -0
  36. teradataml/automl/model_training.py +1407 -0
  37. teradataml/catalog/__init__.py +2 -0
  38. teradataml/catalog/byom.py +1759 -0
  39. teradataml/catalog/function_argument_mapper.py +859 -0
  40. teradataml/catalog/model_cataloging_utils.py +491 -0
  41. teradataml/clients/__init__.py +0 -0
  42. teradataml/clients/auth_client.py +137 -0
  43. teradataml/clients/keycloak_client.py +165 -0
  44. teradataml/clients/pkce_client.py +481 -0
  45. teradataml/common/__init__.py +1 -0
  46. teradataml/common/aed_utils.py +2078 -0
  47. teradataml/common/bulk_exposed_utils.py +113 -0
  48. teradataml/common/constants.py +1669 -0
  49. teradataml/common/deprecations.py +166 -0
  50. teradataml/common/exceptions.py +147 -0
  51. teradataml/common/formula.py +743 -0
  52. teradataml/common/garbagecollector.py +666 -0
  53. teradataml/common/logger.py +1261 -0
  54. teradataml/common/messagecodes.py +518 -0
  55. teradataml/common/messages.py +262 -0
  56. teradataml/common/pylogger.py +67 -0
  57. teradataml/common/sqlbundle.py +764 -0
  58. teradataml/common/td_coltype_code_to_tdtype.py +48 -0
  59. teradataml/common/utils.py +3166 -0
  60. teradataml/common/warnings.py +36 -0
  61. teradataml/common/wrapper_utils.py +625 -0
  62. teradataml/config/__init__.py +0 -0
  63. teradataml/config/dummy_file1.cfg +5 -0
  64. teradataml/config/dummy_file2.cfg +3 -0
  65. teradataml/config/sqlengine_alias_definitions_v1.0 +14 -0
  66. teradataml/config/sqlengine_alias_definitions_v1.1 +20 -0
  67. teradataml/config/sqlengine_alias_definitions_v1.3 +19 -0
  68. teradataml/context/__init__.py +0 -0
  69. teradataml/context/aed_context.py +223 -0
  70. teradataml/context/context.py +1462 -0
  71. teradataml/data/A_loan.csv +19 -0
  72. teradataml/data/BINARY_REALS_LEFT.csv +11 -0
  73. teradataml/data/BINARY_REALS_RIGHT.csv +11 -0
  74. teradataml/data/B_loan.csv +49 -0
  75. teradataml/data/BuoyData2.csv +17 -0
  76. teradataml/data/CONVOLVE2_COMPLEX_LEFT.csv +5 -0
  77. teradataml/data/CONVOLVE2_COMPLEX_RIGHT.csv +5 -0
  78. teradataml/data/Convolve2RealsLeft.csv +5 -0
  79. teradataml/data/Convolve2RealsRight.csv +5 -0
  80. teradataml/data/Convolve2ValidLeft.csv +11 -0
  81. teradataml/data/Convolve2ValidRight.csv +11 -0
  82. teradataml/data/DFFTConv_Real_8_8.csv +65 -0
  83. teradataml/data/Employee.csv +5 -0
  84. teradataml/data/Employee_Address.csv +4 -0
  85. teradataml/data/Employee_roles.csv +5 -0
  86. teradataml/data/JulesBelvezeDummyData.csv +100 -0
  87. teradataml/data/Mall_customer_data.csv +201 -0
  88. teradataml/data/Orders1_12mf.csv +25 -0
  89. teradataml/data/Pi_loan.csv +7 -0
  90. teradataml/data/SMOOTHED_DATA.csv +7 -0
  91. teradataml/data/TestDFFT8.csv +9 -0
  92. teradataml/data/TestRiver.csv +109 -0
  93. teradataml/data/Traindata.csv +28 -0
  94. teradataml/data/__init__.py +0 -0
  95. teradataml/data/acf.csv +17 -0
  96. teradataml/data/adaboost_example.json +34 -0
  97. teradataml/data/adaboostpredict_example.json +24 -0
  98. teradataml/data/additional_table.csv +11 -0
  99. teradataml/data/admissions_test.csv +21 -0
  100. teradataml/data/admissions_train.csv +41 -0
  101. teradataml/data/admissions_train_nulls.csv +41 -0
  102. teradataml/data/advertising.csv +201 -0
  103. teradataml/data/ageandheight.csv +13 -0
  104. teradataml/data/ageandpressure.csv +31 -0
  105. teradataml/data/amazon_reviews_25.csv +26 -0
  106. teradataml/data/antiselect_example.json +36 -0
  107. teradataml/data/antiselect_input.csv +8 -0
  108. teradataml/data/antiselect_input_mixed_case.csv +8 -0
  109. teradataml/data/applicant_external.csv +7 -0
  110. teradataml/data/applicant_reference.csv +7 -0
  111. teradataml/data/apriori_example.json +22 -0
  112. teradataml/data/arima_example.json +9 -0
  113. teradataml/data/assortedtext_input.csv +8 -0
  114. teradataml/data/attribution_example.json +34 -0
  115. teradataml/data/attribution_sample_table.csv +27 -0
  116. teradataml/data/attribution_sample_table1.csv +6 -0
  117. teradataml/data/attribution_sample_table2.csv +11 -0
  118. teradataml/data/bank_churn.csv +10001 -0
  119. teradataml/data/bank_marketing.csv +11163 -0
  120. teradataml/data/bank_web_clicks1.csv +43 -0
  121. teradataml/data/bank_web_clicks2.csv +91 -0
  122. teradataml/data/bank_web_url.csv +85 -0
  123. teradataml/data/barrier.csv +2 -0
  124. teradataml/data/barrier_new.csv +3 -0
  125. teradataml/data/betweenness_example.json +14 -0
  126. teradataml/data/bike_sharing.csv +732 -0
  127. teradataml/data/bin_breaks.csv +8 -0
  128. teradataml/data/bin_fit_ip.csv +4 -0
  129. teradataml/data/binary_complex_left.csv +11 -0
  130. teradataml/data/binary_complex_right.csv +11 -0
  131. teradataml/data/binary_matrix_complex_left.csv +21 -0
  132. teradataml/data/binary_matrix_complex_right.csv +21 -0
  133. teradataml/data/binary_matrix_real_left.csv +21 -0
  134. teradataml/data/binary_matrix_real_right.csv +21 -0
  135. teradataml/data/blood2ageandweight.csv +26 -0
  136. teradataml/data/bmi.csv +501 -0
  137. teradataml/data/boston.csv +507 -0
  138. teradataml/data/boston2cols.csv +721 -0
  139. teradataml/data/breast_cancer.csv +570 -0
  140. teradataml/data/buoydata_mix.csv +11 -0
  141. teradataml/data/burst_data.csv +5 -0
  142. teradataml/data/burst_example.json +21 -0
  143. teradataml/data/byom_example.json +34 -0
  144. teradataml/data/bytes_table.csv +4 -0
  145. teradataml/data/cal_housing_ex_raw.csv +70 -0
  146. teradataml/data/callers.csv +7 -0
  147. teradataml/data/calls.csv +10 -0
  148. teradataml/data/cars_hist.csv +33 -0
  149. teradataml/data/cat_table.csv +25 -0
  150. teradataml/data/ccm_example.json +32 -0
  151. teradataml/data/ccm_input.csv +91 -0
  152. teradataml/data/ccm_input2.csv +13 -0
  153. teradataml/data/ccmexample.csv +101 -0
  154. teradataml/data/ccmprepare_example.json +9 -0
  155. teradataml/data/ccmprepare_input.csv +91 -0
  156. teradataml/data/cfilter_example.json +12 -0
  157. teradataml/data/changepointdetection_example.json +18 -0
  158. teradataml/data/changepointdetectionrt_example.json +8 -0
  159. teradataml/data/chi_sq.csv +3 -0
  160. teradataml/data/churn_data.csv +14 -0
  161. teradataml/data/churn_emission.csv +35 -0
  162. teradataml/data/churn_initial.csv +3 -0
  163. teradataml/data/churn_state_transition.csv +5 -0
  164. teradataml/data/citedges_2.csv +745 -0
  165. teradataml/data/citvertices_2.csv +1210 -0
  166. teradataml/data/clicks2.csv +16 -0
  167. teradataml/data/clickstream.csv +13 -0
  168. teradataml/data/clickstream1.csv +11 -0
  169. teradataml/data/closeness_example.json +16 -0
  170. teradataml/data/complaints.csv +21 -0
  171. teradataml/data/complaints_mini.csv +3 -0
  172. teradataml/data/complaints_test_tokenized.csv +353 -0
  173. teradataml/data/complaints_testtoken.csv +224 -0
  174. teradataml/data/complaints_tokens_model.csv +348 -0
  175. teradataml/data/complaints_tokens_test.csv +353 -0
  176. teradataml/data/complaints_traintoken.csv +472 -0
  177. teradataml/data/computers_category.csv +1001 -0
  178. teradataml/data/computers_test1.csv +1252 -0
  179. teradataml/data/computers_train1.csv +5009 -0
  180. teradataml/data/computers_train1_clustered.csv +5009 -0
  181. teradataml/data/confusionmatrix_example.json +9 -0
  182. teradataml/data/conversion_event_table.csv +3 -0
  183. teradataml/data/corr_input.csv +17 -0
  184. teradataml/data/correlation_example.json +11 -0
  185. teradataml/data/covid_confirm_sd.csv +83 -0
  186. teradataml/data/coxhazardratio_example.json +39 -0
  187. teradataml/data/coxph_example.json +15 -0
  188. teradataml/data/coxsurvival_example.json +28 -0
  189. teradataml/data/cpt.csv +41 -0
  190. teradataml/data/credit_ex_merged.csv +45 -0
  191. teradataml/data/creditcard_data.csv +1001 -0
  192. teradataml/data/customer_loyalty.csv +301 -0
  193. teradataml/data/customer_loyalty_newseq.csv +31 -0
  194. teradataml/data/customer_segmentation_test.csv +2628 -0
  195. teradataml/data/customer_segmentation_train.csv +8069 -0
  196. teradataml/data/dataframe_example.json +173 -0
  197. teradataml/data/decisionforest_example.json +37 -0
  198. teradataml/data/decisionforestpredict_example.json +38 -0
  199. teradataml/data/decisiontree_example.json +21 -0
  200. teradataml/data/decisiontreepredict_example.json +45 -0
  201. teradataml/data/dfft2_size4_real.csv +17 -0
  202. teradataml/data/dfft2_test_matrix16.csv +17 -0
  203. teradataml/data/dfft2conv_real_4_4.csv +65 -0
  204. teradataml/data/diabetes.csv +443 -0
  205. teradataml/data/diabetes_test.csv +89 -0
  206. teradataml/data/dict_table.csv +5 -0
  207. teradataml/data/docperterm_table.csv +4 -0
  208. teradataml/data/docs/__init__.py +1 -0
  209. teradataml/data/docs/byom/__init__.py +0 -0
  210. teradataml/data/docs/byom/docs/DataRobotPredict.py +180 -0
  211. teradataml/data/docs/byom/docs/DataikuPredict.py +217 -0
  212. teradataml/data/docs/byom/docs/H2OPredict.py +325 -0
  213. teradataml/data/docs/byom/docs/ONNXEmbeddings.py +242 -0
  214. teradataml/data/docs/byom/docs/ONNXPredict.py +283 -0
  215. teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
  216. teradataml/data/docs/byom/docs/PMMLPredict.py +278 -0
  217. teradataml/data/docs/byom/docs/__init__.py +0 -0
  218. teradataml/data/docs/sqle/__init__.py +0 -0
  219. teradataml/data/docs/sqle/docs_17_10/Antiselect.py +83 -0
  220. teradataml/data/docs/sqle/docs_17_10/Attribution.py +200 -0
  221. teradataml/data/docs/sqle/docs_17_10/BincodeFit.py +172 -0
  222. teradataml/data/docs/sqle/docs_17_10/BincodeTransform.py +131 -0
  223. teradataml/data/docs/sqle/docs_17_10/CategoricalSummary.py +86 -0
  224. teradataml/data/docs/sqle/docs_17_10/ChiSq.py +90 -0
  225. teradataml/data/docs/sqle/docs_17_10/ColumnSummary.py +86 -0
  226. teradataml/data/docs/sqle/docs_17_10/ConvertTo.py +96 -0
  227. teradataml/data/docs/sqle/docs_17_10/DecisionForestPredict.py +139 -0
  228. teradataml/data/docs/sqle/docs_17_10/DecisionTreePredict.py +152 -0
  229. teradataml/data/docs/sqle/docs_17_10/FTest.py +161 -0
  230. teradataml/data/docs/sqle/docs_17_10/FillRowId.py +83 -0
  231. teradataml/data/docs/sqle/docs_17_10/Fit.py +88 -0
  232. teradataml/data/docs/sqle/docs_17_10/GLMPredict.py +144 -0
  233. teradataml/data/docs/sqle/docs_17_10/GetRowsWithMissingValues.py +85 -0
  234. teradataml/data/docs/sqle/docs_17_10/GetRowsWithoutMissingValues.py +82 -0
  235. teradataml/data/docs/sqle/docs_17_10/Histogram.py +165 -0
  236. teradataml/data/docs/sqle/docs_17_10/MovingAverage.py +134 -0
  237. teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +209 -0
  238. teradataml/data/docs/sqle/docs_17_10/NPath.py +266 -0
  239. teradataml/data/docs/sqle/docs_17_10/NaiveBayesPredict.py +116 -0
  240. teradataml/data/docs/sqle/docs_17_10/NaiveBayesTextClassifierPredict.py +176 -0
  241. teradataml/data/docs/sqle/docs_17_10/NumApply.py +147 -0
  242. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +135 -0
  243. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +109 -0
  244. teradataml/data/docs/sqle/docs_17_10/OutlierFilterFit.py +166 -0
  245. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +105 -0
  246. teradataml/data/docs/sqle/docs_17_10/Pack.py +128 -0
  247. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesFit.py +112 -0
  248. teradataml/data/docs/sqle/docs_17_10/PolynomialFeaturesTransform.py +102 -0
  249. teradataml/data/docs/sqle/docs_17_10/QQNorm.py +105 -0
  250. teradataml/data/docs/sqle/docs_17_10/RoundColumns.py +110 -0
  251. teradataml/data/docs/sqle/docs_17_10/RowNormalizeFit.py +118 -0
  252. teradataml/data/docs/sqle/docs_17_10/RowNormalizeTransform.py +99 -0
  253. teradataml/data/docs/sqle/docs_17_10/SVMSparsePredict.py +153 -0
  254. teradataml/data/docs/sqle/docs_17_10/ScaleFit.py +197 -0
  255. teradataml/data/docs/sqle/docs_17_10/ScaleTransform.py +99 -0
  256. teradataml/data/docs/sqle/docs_17_10/Sessionize.py +114 -0
  257. teradataml/data/docs/sqle/docs_17_10/SimpleImputeFit.py +116 -0
  258. teradataml/data/docs/sqle/docs_17_10/SimpleImputeTransform.py +98 -0
  259. teradataml/data/docs/sqle/docs_17_10/StrApply.py +187 -0
  260. teradataml/data/docs/sqle/docs_17_10/StringSimilarity.py +146 -0
  261. teradataml/data/docs/sqle/docs_17_10/Transform.py +105 -0
  262. teradataml/data/docs/sqle/docs_17_10/UnivariateStatistics.py +142 -0
  263. teradataml/data/docs/sqle/docs_17_10/Unpack.py +214 -0
  264. teradataml/data/docs/sqle/docs_17_10/WhichMax.py +83 -0
  265. teradataml/data/docs/sqle/docs_17_10/WhichMin.py +83 -0
  266. teradataml/data/docs/sqle/docs_17_10/ZTest.py +155 -0
  267. teradataml/data/docs/sqle/docs_17_10/__init__.py +0 -0
  268. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +186 -0
  269. teradataml/data/docs/sqle/docs_17_20/Antiselect.py +83 -0
  270. teradataml/data/docs/sqle/docs_17_20/Apriori.py +138 -0
  271. teradataml/data/docs/sqle/docs_17_20/Attribution.py +201 -0
  272. teradataml/data/docs/sqle/docs_17_20/BincodeFit.py +172 -0
  273. teradataml/data/docs/sqle/docs_17_20/BincodeTransform.py +139 -0
  274. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  275. teradataml/data/docs/sqle/docs_17_20/CategoricalSummary.py +86 -0
  276. teradataml/data/docs/sqle/docs_17_20/ChiSq.py +90 -0
  277. teradataml/data/docs/sqle/docs_17_20/ClassificationEvaluator.py +166 -0
  278. teradataml/data/docs/sqle/docs_17_20/ColumnSummary.py +86 -0
  279. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +246 -0
  280. teradataml/data/docs/sqle/docs_17_20/ConvertTo.py +113 -0
  281. teradataml/data/docs/sqle/docs_17_20/DecisionForest.py +280 -0
  282. teradataml/data/docs/sqle/docs_17_20/DecisionForestPredict.py +144 -0
  283. teradataml/data/docs/sqle/docs_17_20/DecisionTreePredict.py +136 -0
  284. teradataml/data/docs/sqle/docs_17_20/FTest.py +240 -0
  285. teradataml/data/docs/sqle/docs_17_20/FillRowId.py +83 -0
  286. teradataml/data/docs/sqle/docs_17_20/Fit.py +88 -0
  287. teradataml/data/docs/sqle/docs_17_20/GLM.py +541 -0
  288. teradataml/data/docs/sqle/docs_17_20/GLMPerSegment.py +415 -0
  289. teradataml/data/docs/sqle/docs_17_20/GLMPredict.py +144 -0
  290. teradataml/data/docs/sqle/docs_17_20/GLMPredictPerSegment.py +233 -0
  291. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +125 -0
  292. teradataml/data/docs/sqle/docs_17_20/GetRowsWithMissingValues.py +109 -0
  293. teradataml/data/docs/sqle/docs_17_20/GetRowsWithoutMissingValues.py +106 -0
  294. teradataml/data/docs/sqle/docs_17_20/Histogram.py +224 -0
  295. teradataml/data/docs/sqle/docs_17_20/KMeans.py +251 -0
  296. teradataml/data/docs/sqle/docs_17_20/KMeansPredict.py +144 -0
  297. teradataml/data/docs/sqle/docs_17_20/KNN.py +215 -0
  298. teradataml/data/docs/sqle/docs_17_20/MovingAverage.py +134 -0
  299. teradataml/data/docs/sqle/docs_17_20/NERExtractor.py +121 -0
  300. teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +209 -0
  301. teradataml/data/docs/sqle/docs_17_20/NPath.py +266 -0
  302. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  303. teradataml/data/docs/sqle/docs_17_20/NaiveBayesPredict.py +116 -0
  304. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierPredict.py +177 -0
  305. teradataml/data/docs/sqle/docs_17_20/NaiveBayesTextClassifierTrainer.py +127 -0
  306. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +119 -0
  307. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineTransform.py +112 -0
  308. teradataml/data/docs/sqle/docs_17_20/NumApply.py +147 -0
  309. teradataml/data/docs/sqle/docs_17_20/OneClassSVM.py +307 -0
  310. teradataml/data/docs/sqle/docs_17_20/OneClassSVMPredict.py +185 -0
  311. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +231 -0
  312. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +121 -0
  313. teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingFit.py +220 -0
  314. teradataml/data/docs/sqle/docs_17_20/OrdinalEncodingTransform.py +127 -0
  315. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +191 -0
  316. teradataml/data/docs/sqle/docs_17_20/OutlierFilterTransform.py +117 -0
  317. teradataml/data/docs/sqle/docs_17_20/Pack.py +128 -0
  318. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  319. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesFit.py +112 -0
  320. teradataml/data/docs/sqle/docs_17_20/PolynomialFeaturesTransform.py +112 -0
  321. teradataml/data/docs/sqle/docs_17_20/QQNorm.py +105 -0
  322. teradataml/data/docs/sqle/docs_17_20/ROC.py +164 -0
  323. teradataml/data/docs/sqle/docs_17_20/RandomProjectionFit.py +155 -0
  324. teradataml/data/docs/sqle/docs_17_20/RandomProjectionMinComponents.py +106 -0
  325. teradataml/data/docs/sqle/docs_17_20/RandomProjectionTransform.py +120 -0
  326. teradataml/data/docs/sqle/docs_17_20/RegressionEvaluator.py +211 -0
  327. teradataml/data/docs/sqle/docs_17_20/RoundColumns.py +109 -0
  328. teradataml/data/docs/sqle/docs_17_20/RowNormalizeFit.py +118 -0
  329. teradataml/data/docs/sqle/docs_17_20/RowNormalizeTransform.py +111 -0
  330. teradataml/data/docs/sqle/docs_17_20/SMOTE.py +212 -0
  331. teradataml/data/docs/sqle/docs_17_20/SVM.py +414 -0
  332. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +213 -0
  333. teradataml/data/docs/sqle/docs_17_20/SVMSparsePredict.py +153 -0
  334. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +315 -0
  335. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +202 -0
  336. teradataml/data/docs/sqle/docs_17_20/SentimentExtractor.py +206 -0
  337. teradataml/data/docs/sqle/docs_17_20/Sessionize.py +114 -0
  338. teradataml/data/docs/sqle/docs_17_20/Shap.py +225 -0
  339. teradataml/data/docs/sqle/docs_17_20/Silhouette.py +153 -0
  340. teradataml/data/docs/sqle/docs_17_20/SimpleImputeFit.py +116 -0
  341. teradataml/data/docs/sqle/docs_17_20/SimpleImputeTransform.py +109 -0
  342. teradataml/data/docs/sqle/docs_17_20/StrApply.py +187 -0
  343. teradataml/data/docs/sqle/docs_17_20/StringSimilarity.py +146 -0
  344. teradataml/data/docs/sqle/docs_17_20/TDDecisionForestPredict.py +207 -0
  345. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +333 -0
  346. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  347. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  348. teradataml/data/docs/sqle/docs_17_20/TargetEncodingFit.py +267 -0
  349. teradataml/data/docs/sqle/docs_17_20/TargetEncodingTransform.py +141 -0
  350. teradataml/data/docs/sqle/docs_17_20/TextMorph.py +119 -0
  351. teradataml/data/docs/sqle/docs_17_20/TextParser.py +224 -0
  352. teradataml/data/docs/sqle/docs_17_20/TrainTestSplit.py +160 -0
  353. teradataml/data/docs/sqle/docs_17_20/Transform.py +123 -0
  354. teradataml/data/docs/sqle/docs_17_20/UnivariateStatistics.py +142 -0
  355. teradataml/data/docs/sqle/docs_17_20/Unpack.py +214 -0
  356. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  357. teradataml/data/docs/sqle/docs_17_20/VectorDistance.py +169 -0
  358. teradataml/data/docs/sqle/docs_17_20/WhichMax.py +83 -0
  359. teradataml/data/docs/sqle/docs_17_20/WhichMin.py +83 -0
  360. teradataml/data/docs/sqle/docs_17_20/WordEmbeddings.py +237 -0
  361. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +362 -0
  362. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +281 -0
  363. teradataml/data/docs/sqle/docs_17_20/ZTest.py +220 -0
  364. teradataml/data/docs/sqle/docs_17_20/__init__.py +0 -0
  365. teradataml/data/docs/tableoperator/__init__.py +0 -0
  366. teradataml/data/docs/tableoperator/docs_17_00/ReadNOS.py +430 -0
  367. teradataml/data/docs/tableoperator/docs_17_00/__init__.py +0 -0
  368. teradataml/data/docs/tableoperator/docs_17_05/ReadNOS.py +430 -0
  369. teradataml/data/docs/tableoperator/docs_17_05/WriteNOS.py +348 -0
  370. teradataml/data/docs/tableoperator/docs_17_05/__init__.py +0 -0
  371. teradataml/data/docs/tableoperator/docs_17_10/ReadNOS.py +429 -0
  372. teradataml/data/docs/tableoperator/docs_17_10/WriteNOS.py +348 -0
  373. teradataml/data/docs/tableoperator/docs_17_10/__init__.py +0 -0
  374. teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
  375. teradataml/data/docs/tableoperator/docs_17_20/ReadNOS.py +440 -0
  376. teradataml/data/docs/tableoperator/docs_17_20/WriteNOS.py +387 -0
  377. teradataml/data/docs/tableoperator/docs_17_20/__init__.py +0 -0
  378. teradataml/data/docs/uaf/__init__.py +0 -0
  379. teradataml/data/docs/uaf/docs_17_20/ACF.py +186 -0
  380. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +370 -0
  381. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +172 -0
  382. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +161 -0
  383. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  384. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  385. teradataml/data/docs/uaf/docs_17_20/BinaryMatrixOp.py +248 -0
  386. teradataml/data/docs/uaf/docs_17_20/BinarySeriesOp.py +252 -0
  387. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +178 -0
  388. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +175 -0
  389. teradataml/data/docs/uaf/docs_17_20/Convolve.py +230 -0
  390. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +218 -0
  391. teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
  392. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +185 -0
  393. teradataml/data/docs/uaf/docs_17_20/DFFT.py +204 -0
  394. teradataml/data/docs/uaf/docs_17_20/DFFT2.py +216 -0
  395. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +216 -0
  396. teradataml/data/docs/uaf/docs_17_20/DFFTConv.py +192 -0
  397. teradataml/data/docs/uaf/docs_17_20/DIFF.py +175 -0
  398. teradataml/data/docs/uaf/docs_17_20/DTW.py +180 -0
  399. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  400. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +217 -0
  401. teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +142 -0
  402. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +184 -0
  403. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +185 -0
  404. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  405. teradataml/data/docs/uaf/docs_17_20/FitMetrics.py +172 -0
  406. teradataml/data/docs/uaf/docs_17_20/GenseriesFormula.py +206 -0
  407. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +143 -0
  408. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +198 -0
  409. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +260 -0
  410. teradataml/data/docs/uaf/docs_17_20/IDFFT.py +165 -0
  411. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +191 -0
  412. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  413. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  414. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  415. teradataml/data/docs/uaf/docs_17_20/InputValidator.py +121 -0
  416. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +156 -0
  417. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +215 -0
  418. teradataml/data/docs/uaf/docs_17_20/MAMean.py +174 -0
  419. teradataml/data/docs/uaf/docs_17_20/MInfo.py +134 -0
  420. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  421. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +145 -0
  422. teradataml/data/docs/uaf/docs_17_20/MultivarRegr.py +191 -0
  423. teradataml/data/docs/uaf/docs_17_20/PACF.py +157 -0
  424. teradataml/data/docs/uaf/docs_17_20/Portman.py +217 -0
  425. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +203 -0
  426. teradataml/data/docs/uaf/docs_17_20/PowerTransform.py +155 -0
  427. teradataml/data/docs/uaf/docs_17_20/Resample.py +237 -0
  428. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  429. teradataml/data/docs/uaf/docs_17_20/SInfo.py +123 -0
  430. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +173 -0
  431. teradataml/data/docs/uaf/docs_17_20/SelectionCriteria.py +174 -0
  432. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +171 -0
  433. teradataml/data/docs/uaf/docs_17_20/SignifResidmean.py +164 -0
  434. teradataml/data/docs/uaf/docs_17_20/SimpleExp.py +180 -0
  435. teradataml/data/docs/uaf/docs_17_20/Smoothma.py +208 -0
  436. teradataml/data/docs/uaf/docs_17_20/TrackingOp.py +151 -0
  437. teradataml/data/docs/uaf/docs_17_20/UNDIFF.py +171 -0
  438. teradataml/data/docs/uaf/docs_17_20/Unnormalize.py +202 -0
  439. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +171 -0
  440. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  441. teradataml/data/docs/uaf/docs_17_20/__init__.py +0 -0
  442. teradataml/data/dtw_example.json +18 -0
  443. teradataml/data/dtw_t1.csv +11 -0
  444. teradataml/data/dtw_t2.csv +4 -0
  445. teradataml/data/dwt2d_dataTable.csv +65 -0
  446. teradataml/data/dwt2d_example.json +16 -0
  447. teradataml/data/dwt_dataTable.csv +8 -0
  448. teradataml/data/dwt_example.json +15 -0
  449. teradataml/data/dwt_filterTable.csv +3 -0
  450. teradataml/data/dwt_filter_dim.csv +5 -0
  451. teradataml/data/emission.csv +9 -0
  452. teradataml/data/emp_table_by_dept.csv +19 -0
  453. teradataml/data/employee_info.csv +4 -0
  454. teradataml/data/employee_table.csv +6 -0
  455. teradataml/data/excluding_event_table.csv +2 -0
  456. teradataml/data/finance_data.csv +6 -0
  457. teradataml/data/finance_data2.csv +61 -0
  458. teradataml/data/finance_data3.csv +93 -0
  459. teradataml/data/finance_data4.csv +13 -0
  460. teradataml/data/fish.csv +160 -0
  461. teradataml/data/fm_blood2ageandweight.csv +26 -0
  462. teradataml/data/fmeasure_example.json +12 -0
  463. teradataml/data/followers_leaders.csv +10 -0
  464. teradataml/data/fpgrowth_example.json +12 -0
  465. teradataml/data/frequentpaths_example.json +29 -0
  466. teradataml/data/friends.csv +9 -0
  467. teradataml/data/fs_input.csv +33 -0
  468. teradataml/data/fs_input1.csv +33 -0
  469. teradataml/data/genData.csv +513 -0
  470. teradataml/data/geodataframe_example.json +40 -0
  471. teradataml/data/glass_types.csv +215 -0
  472. teradataml/data/glm_admissions_model.csv +12 -0
  473. teradataml/data/glm_example.json +56 -0
  474. teradataml/data/glml1l2_example.json +28 -0
  475. teradataml/data/glml1l2predict_example.json +54 -0
  476. teradataml/data/glmpredict_example.json +54 -0
  477. teradataml/data/gq_t1.csv +21 -0
  478. teradataml/data/grocery_transaction.csv +19 -0
  479. teradataml/data/hconvolve_complex_right.csv +5 -0
  480. teradataml/data/hconvolve_complex_rightmulti.csv +5 -0
  481. teradataml/data/histogram_example.json +12 -0
  482. teradataml/data/hmmdecoder_example.json +79 -0
  483. teradataml/data/hmmevaluator_example.json +25 -0
  484. teradataml/data/hmmsupervised_example.json +10 -0
  485. teradataml/data/hmmunsupervised_example.json +8 -0
  486. teradataml/data/hnsw_alter_data.csv +5 -0
  487. teradataml/data/hnsw_data.csv +10 -0
  488. teradataml/data/house_values.csv +12 -0
  489. teradataml/data/house_values2.csv +13 -0
  490. teradataml/data/housing_cat.csv +7 -0
  491. teradataml/data/housing_data.csv +9 -0
  492. teradataml/data/housing_test.csv +47 -0
  493. teradataml/data/housing_test_binary.csv +47 -0
  494. teradataml/data/housing_train.csv +493 -0
  495. teradataml/data/housing_train_attribute.csv +5 -0
  496. teradataml/data/housing_train_binary.csv +437 -0
  497. teradataml/data/housing_train_parameter.csv +2 -0
  498. teradataml/data/housing_train_response.csv +493 -0
  499. teradataml/data/housing_train_segment.csv +201 -0
  500. teradataml/data/ibm_stock.csv +370 -0
  501. teradataml/data/ibm_stock1.csv +370 -0
  502. teradataml/data/identitymatch_example.json +22 -0
  503. teradataml/data/idf_table.csv +4 -0
  504. teradataml/data/idwt2d_dataTable.csv +5 -0
  505. teradataml/data/idwt_dataTable.csv +8 -0
  506. teradataml/data/idwt_filterTable.csv +3 -0
  507. teradataml/data/impressions.csv +101 -0
  508. teradataml/data/inflation.csv +21 -0
  509. teradataml/data/initial.csv +3 -0
  510. teradataml/data/insect2Cols.csv +61 -0
  511. teradataml/data/insect_sprays.csv +13 -0
  512. teradataml/data/insurance.csv +1339 -0
  513. teradataml/data/interpolator_example.json +13 -0
  514. teradataml/data/interval_data.csv +5 -0
  515. teradataml/data/iris_altinput.csv +481 -0
  516. teradataml/data/iris_attribute_output.csv +8 -0
  517. teradataml/data/iris_attribute_test.csv +121 -0
  518. teradataml/data/iris_attribute_train.csv +481 -0
  519. teradataml/data/iris_category_expect_predict.csv +31 -0
  520. teradataml/data/iris_data.csv +151 -0
  521. teradataml/data/iris_input.csv +151 -0
  522. teradataml/data/iris_response_train.csv +121 -0
  523. teradataml/data/iris_test.csv +31 -0
  524. teradataml/data/iris_train.csv +121 -0
  525. teradataml/data/join_table1.csv +4 -0
  526. teradataml/data/join_table2.csv +4 -0
  527. teradataml/data/jsons/anly_function_name.json +7 -0
  528. teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
  529. teradataml/data/jsons/byom/dataikupredict.json +148 -0
  530. teradataml/data/jsons/byom/datarobotpredict.json +147 -0
  531. teradataml/data/jsons/byom/h2opredict.json +195 -0
  532. teradataml/data/jsons/byom/onnxembeddings.json +267 -0
  533. teradataml/data/jsons/byom/onnxpredict.json +187 -0
  534. teradataml/data/jsons/byom/pmmlpredict.json +147 -0
  535. teradataml/data/jsons/paired_functions.json +450 -0
  536. teradataml/data/jsons/sqle/16.20/Antiselect.json +56 -0
  537. teradataml/data/jsons/sqle/16.20/Attribution.json +249 -0
  538. teradataml/data/jsons/sqle/16.20/DecisionForestPredict.json +156 -0
  539. teradataml/data/jsons/sqle/16.20/DecisionTreePredict.json +170 -0
  540. teradataml/data/jsons/sqle/16.20/GLMPredict.json +122 -0
  541. teradataml/data/jsons/sqle/16.20/MovingAverage.json +367 -0
  542. teradataml/data/jsons/sqle/16.20/NGramSplitter.json +239 -0
  543. teradataml/data/jsons/sqle/16.20/NaiveBayesPredict.json +136 -0
  544. teradataml/data/jsons/sqle/16.20/NaiveBayesTextClassifierPredict.json +235 -0
  545. teradataml/data/jsons/sqle/16.20/Pack.json +98 -0
  546. teradataml/data/jsons/sqle/16.20/SVMSparsePredict.json +162 -0
  547. teradataml/data/jsons/sqle/16.20/Sessionize.json +105 -0
  548. teradataml/data/jsons/sqle/16.20/StringSimilarity.json +86 -0
  549. teradataml/data/jsons/sqle/16.20/Unpack.json +166 -0
  550. teradataml/data/jsons/sqle/16.20/nPath.json +269 -0
  551. teradataml/data/jsons/sqle/17.00/Antiselect.json +56 -0
  552. teradataml/data/jsons/sqle/17.00/Attribution.json +249 -0
  553. teradataml/data/jsons/sqle/17.00/DecisionForestPredict.json +156 -0
  554. teradataml/data/jsons/sqle/17.00/DecisionTreePredict.json +170 -0
  555. teradataml/data/jsons/sqle/17.00/GLMPredict.json +122 -0
  556. teradataml/data/jsons/sqle/17.00/MovingAverage.json +367 -0
  557. teradataml/data/jsons/sqle/17.00/NGramSplitter.json +239 -0
  558. teradataml/data/jsons/sqle/17.00/NaiveBayesPredict.json +136 -0
  559. teradataml/data/jsons/sqle/17.00/NaiveBayesTextClassifierPredict.json +235 -0
  560. teradataml/data/jsons/sqle/17.00/Pack.json +98 -0
  561. teradataml/data/jsons/sqle/17.00/SVMSparsePredict.json +162 -0
  562. teradataml/data/jsons/sqle/17.00/Sessionize.json +105 -0
  563. teradataml/data/jsons/sqle/17.00/StringSimilarity.json +86 -0
  564. teradataml/data/jsons/sqle/17.00/Unpack.json +166 -0
  565. teradataml/data/jsons/sqle/17.00/nPath.json +269 -0
  566. teradataml/data/jsons/sqle/17.05/Antiselect.json +56 -0
  567. teradataml/data/jsons/sqle/17.05/Attribution.json +249 -0
  568. teradataml/data/jsons/sqle/17.05/DecisionForestPredict.json +156 -0
  569. teradataml/data/jsons/sqle/17.05/DecisionTreePredict.json +170 -0
  570. teradataml/data/jsons/sqle/17.05/GLMPredict.json +122 -0
  571. teradataml/data/jsons/sqle/17.05/MovingAverage.json +367 -0
  572. teradataml/data/jsons/sqle/17.05/NGramSplitter.json +239 -0
  573. teradataml/data/jsons/sqle/17.05/NaiveBayesPredict.json +136 -0
  574. teradataml/data/jsons/sqle/17.05/NaiveBayesTextClassifierPredict.json +235 -0
  575. teradataml/data/jsons/sqle/17.05/Pack.json +98 -0
  576. teradataml/data/jsons/sqle/17.05/SVMSparsePredict.json +162 -0
  577. teradataml/data/jsons/sqle/17.05/Sessionize.json +105 -0
  578. teradataml/data/jsons/sqle/17.05/StringSimilarity.json +86 -0
  579. teradataml/data/jsons/sqle/17.05/Unpack.json +166 -0
  580. teradataml/data/jsons/sqle/17.05/nPath.json +269 -0
  581. teradataml/data/jsons/sqle/17.10/Antiselect.json +56 -0
  582. teradataml/data/jsons/sqle/17.10/Attribution.json +249 -0
  583. teradataml/data/jsons/sqle/17.10/DecisionForestPredict.json +185 -0
  584. teradataml/data/jsons/sqle/17.10/DecisionTreePredict.json +172 -0
  585. teradataml/data/jsons/sqle/17.10/GLMPredict.json +151 -0
  586. teradataml/data/jsons/sqle/17.10/MovingAverage.json +368 -0
  587. teradataml/data/jsons/sqle/17.10/NGramSplitter.json +239 -0
  588. teradataml/data/jsons/sqle/17.10/NaiveBayesPredict.json +149 -0
  589. teradataml/data/jsons/sqle/17.10/NaiveBayesTextClassifierPredict.json +288 -0
  590. teradataml/data/jsons/sqle/17.10/Pack.json +133 -0
  591. teradataml/data/jsons/sqle/17.10/SVMSparsePredict.json +193 -0
  592. teradataml/data/jsons/sqle/17.10/Sessionize.json +105 -0
  593. teradataml/data/jsons/sqle/17.10/StringSimilarity.json +86 -0
  594. teradataml/data/jsons/sqle/17.10/TD_BinCodeFit.json +239 -0
  595. teradataml/data/jsons/sqle/17.10/TD_BinCodeTransform.json +70 -0
  596. teradataml/data/jsons/sqle/17.10/TD_CategoricalSummary.json +54 -0
  597. teradataml/data/jsons/sqle/17.10/TD_Chisq.json +68 -0
  598. teradataml/data/jsons/sqle/17.10/TD_ColumnSummary.json +54 -0
  599. teradataml/data/jsons/sqle/17.10/TD_ConvertTo.json +69 -0
  600. teradataml/data/jsons/sqle/17.10/TD_FTest.json +187 -0
  601. teradataml/data/jsons/sqle/17.10/TD_FillRowID.json +52 -0
  602. teradataml/data/jsons/sqle/17.10/TD_FunctionFit.json +46 -0
  603. teradataml/data/jsons/sqle/17.10/TD_FunctionTransform.json +72 -0
  604. teradataml/data/jsons/sqle/17.10/TD_GetRowsWithMissingValues.json +53 -0
  605. teradataml/data/jsons/sqle/17.10/TD_GetRowsWithoutMissingValues.json +53 -0
  606. teradataml/data/jsons/sqle/17.10/TD_Histogram.json +133 -0
  607. teradataml/data/jsons/sqle/17.10/TD_NumApply.json +147 -0
  608. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingFit.json +183 -0
  609. teradataml/data/jsons/sqle/17.10/TD_OneHotEncodingTransform.json +66 -0
  610. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterFit.json +197 -0
  611. teradataml/data/jsons/sqle/17.10/TD_OutlierFilterTransform.json +48 -0
  612. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesFit.json +114 -0
  613. teradataml/data/jsons/sqle/17.10/TD_PolynomialFeaturesTransform.json +72 -0
  614. teradataml/data/jsons/sqle/17.10/TD_QQNorm.json +112 -0
  615. teradataml/data/jsons/sqle/17.10/TD_RoundColumns.json +93 -0
  616. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeFit.json +128 -0
  617. teradataml/data/jsons/sqle/17.10/TD_RowNormalizeTransform.json +71 -0
  618. teradataml/data/jsons/sqle/17.10/TD_ScaleFit.json +157 -0
  619. teradataml/data/jsons/sqle/17.10/TD_ScaleTransform.json +71 -0
  620. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeFit.json +148 -0
  621. teradataml/data/jsons/sqle/17.10/TD_SimpleImputeTransform.json +48 -0
  622. teradataml/data/jsons/sqle/17.10/TD_StrApply.json +240 -0
  623. teradataml/data/jsons/sqle/17.10/TD_UnivariateStatistics.json +119 -0
  624. teradataml/data/jsons/sqle/17.10/TD_WhichMax.json +53 -0
  625. teradataml/data/jsons/sqle/17.10/TD_WhichMin.json +53 -0
  626. teradataml/data/jsons/sqle/17.10/TD_ZTest.json +171 -0
  627. teradataml/data/jsons/sqle/17.10/Unpack.json +188 -0
  628. teradataml/data/jsons/sqle/17.10/nPath.json +269 -0
  629. teradataml/data/jsons/sqle/17.20/Antiselect.json +56 -0
  630. teradataml/data/jsons/sqle/17.20/Attribution.json +249 -0
  631. teradataml/data/jsons/sqle/17.20/DecisionForestPredict.json +185 -0
  632. teradataml/data/jsons/sqle/17.20/DecisionTreePredict.json +172 -0
  633. teradataml/data/jsons/sqle/17.20/GLMPredict.json +151 -0
  634. teradataml/data/jsons/sqle/17.20/MovingAverage.json +367 -0
  635. teradataml/data/jsons/sqle/17.20/NGramSplitter.json +239 -0
  636. teradataml/data/jsons/sqle/17.20/NaiveBayesPredict.json +149 -0
  637. teradataml/data/jsons/sqle/17.20/NaiveBayesTextClassifierPredict.json +287 -0
  638. teradataml/data/jsons/sqle/17.20/Pack.json +133 -0
  639. teradataml/data/jsons/sqle/17.20/SVMSparsePredict.json +192 -0
  640. teradataml/data/jsons/sqle/17.20/Sessionize.json +105 -0
  641. teradataml/data/jsons/sqle/17.20/StringSimilarity.json +86 -0
  642. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +149 -0
  643. teradataml/data/jsons/sqle/17.20/TD_Apriori.json +181 -0
  644. teradataml/data/jsons/sqle/17.20/TD_BinCodeFit.json +239 -0
  645. teradataml/data/jsons/sqle/17.20/TD_BinCodeTransform.json +71 -0
  646. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  647. teradataml/data/jsons/sqle/17.20/TD_CategoricalSummary.json +53 -0
  648. teradataml/data/jsons/sqle/17.20/TD_Chisq.json +68 -0
  649. teradataml/data/jsons/sqle/17.20/TD_ClassificationEvaluator.json +146 -0
  650. teradataml/data/jsons/sqle/17.20/TD_ColumnSummary.json +53 -0
  651. teradataml/data/jsons/sqle/17.20/TD_ColumnTransformer.json +218 -0
  652. teradataml/data/jsons/sqle/17.20/TD_ConvertTo.json +92 -0
  653. teradataml/data/jsons/sqle/17.20/TD_DecisionForest.json +260 -0
  654. teradataml/data/jsons/sqle/17.20/TD_DecisionForestPredict.json +139 -0
  655. teradataml/data/jsons/sqle/17.20/TD_FTest.json +269 -0
  656. teradataml/data/jsons/sqle/17.20/TD_FillRowID.json +52 -0
  657. teradataml/data/jsons/sqle/17.20/TD_FunctionFit.json +46 -0
  658. teradataml/data/jsons/sqle/17.20/TD_FunctionTransform.json +72 -0
  659. teradataml/data/jsons/sqle/17.20/TD_GLM.json +507 -0
  660. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +168 -0
  661. teradataml/data/jsons/sqle/17.20/TD_GLMPerSegment.json +411 -0
  662. teradataml/data/jsons/sqle/17.20/TD_GLMPredictPerSegment.json +146 -0
  663. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +93 -0
  664. teradataml/data/jsons/sqle/17.20/TD_GetRowsWithMissingValues.json +76 -0
  665. teradataml/data/jsons/sqle/17.20/TD_GetRowsWithoutMissingValues.json +76 -0
  666. teradataml/data/jsons/sqle/17.20/TD_Histogram.json +152 -0
  667. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +232 -0
  668. teradataml/data/jsons/sqle/17.20/TD_KMeansPredict.json +87 -0
  669. teradataml/data/jsons/sqle/17.20/TD_KNN.json +262 -0
  670. teradataml/data/jsons/sqle/17.20/TD_NERExtractor.json +145 -0
  671. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  672. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  673. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesTextClassifierTrainer.json +137 -0
  674. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +102 -0
  675. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineTransform.json +71 -0
  676. teradataml/data/jsons/sqle/17.20/TD_NumApply.json +147 -0
  677. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +316 -0
  678. teradataml/data/jsons/sqle/17.20/TD_OneClassSVMPredict.json +124 -0
  679. teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingFit.json +271 -0
  680. teradataml/data/jsons/sqle/17.20/TD_OneHotEncodingTransform.json +65 -0
  681. teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingFit.json +229 -0
  682. teradataml/data/jsons/sqle/17.20/TD_OrdinalEncodingTransform.json +75 -0
  683. teradataml/data/jsons/sqle/17.20/TD_OutlierFilterFit.json +217 -0
  684. teradataml/data/jsons/sqle/17.20/TD_OutlierFilterTransform.json +48 -0
  685. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  686. teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesFit.json +114 -0
  687. teradataml/data/jsons/sqle/17.20/TD_PolynomialFeaturesTransform.json +72 -0
  688. teradataml/data/jsons/sqle/17.20/TD_QQNorm.json +111 -0
  689. teradataml/data/jsons/sqle/17.20/TD_ROC.json +179 -0
  690. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionFit.json +179 -0
  691. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionMinComponents.json +74 -0
  692. teradataml/data/jsons/sqle/17.20/TD_RandomProjectionTransform.json +74 -0
  693. teradataml/data/jsons/sqle/17.20/TD_RegressionEvaluator.json +138 -0
  694. teradataml/data/jsons/sqle/17.20/TD_RoundColumns.json +93 -0
  695. teradataml/data/jsons/sqle/17.20/TD_RowNormalizeFit.json +128 -0
  696. teradataml/data/jsons/sqle/17.20/TD_RowNormalizeTransform.json +71 -0
  697. teradataml/data/jsons/sqle/17.20/TD_SMOTE.json +267 -0
  698. teradataml/data/jsons/sqle/17.20/TD_SVM.json +389 -0
  699. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +142 -0
  700. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +310 -0
  701. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +120 -0
  702. teradataml/data/jsons/sqle/17.20/TD_SentimentExtractor.json +194 -0
  703. teradataml/data/jsons/sqle/17.20/TD_Shap.json +221 -0
  704. teradataml/data/jsons/sqle/17.20/TD_Silhouette.json +143 -0
  705. teradataml/data/jsons/sqle/17.20/TD_SimpleImputeFit.json +147 -0
  706. teradataml/data/jsons/sqle/17.20/TD_SimpleImputeTransform.json +48 -0
  707. teradataml/data/jsons/sqle/17.20/TD_StrApply.json +240 -0
  708. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  709. teradataml/data/jsons/sqle/17.20/TD_TargetEncodingFit.json +248 -0
  710. teradataml/data/jsons/sqle/17.20/TD_TargetEncodingTransform.json +75 -0
  711. teradataml/data/jsons/sqle/17.20/TD_TextMorph.json +134 -0
  712. teradataml/data/jsons/sqle/17.20/TD_TextParser.json +297 -0
  713. teradataml/data/jsons/sqle/17.20/TD_TrainTestSplit.json +142 -0
  714. teradataml/data/jsons/sqle/17.20/TD_UnivariateStatistics.json +117 -0
  715. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  716. teradataml/data/jsons/sqle/17.20/TD_VectorDistance.json +183 -0
  717. teradataml/data/jsons/sqle/17.20/TD_WhichMax.json +53 -0
  718. teradataml/data/jsons/sqle/17.20/TD_WhichMin.json +53 -0
  719. teradataml/data/jsons/sqle/17.20/TD_WordEmbeddings.json +241 -0
  720. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +330 -0
  721. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +195 -0
  722. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +247 -0
  723. teradataml/data/jsons/sqle/17.20/Unpack.json +188 -0
  724. teradataml/data/jsons/sqle/17.20/nPath.json +269 -0
  725. teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +370 -0
  726. teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +460 -0
  727. teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +385 -0
  728. teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +369 -0
  729. teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +369 -0
  730. teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +369 -0
  731. teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +369 -0
  732. teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +400 -0
  733. teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +401 -0
  734. teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +384 -0
  735. teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +384 -0
  736. teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
  737. teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
  738. teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
  739. teradataml/data/jsons/sqle/20.00/TD_HNSW.json +296 -0
  740. teradataml/data/jsons/sqle/20.00/TD_HNSWPredict.json +206 -0
  741. teradataml/data/jsons/sqle/20.00/TD_HNSWSummary.json +32 -0
  742. teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
  743. teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
  744. teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
  745. teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
  746. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  747. teradataml/data/jsons/tableoperator/17.00/read_nos.json +198 -0
  748. teradataml/data/jsons/tableoperator/17.05/read_nos.json +198 -0
  749. teradataml/data/jsons/tableoperator/17.05/write_nos.json +195 -0
  750. teradataml/data/jsons/tableoperator/17.10/read_nos.json +184 -0
  751. teradataml/data/jsons/tableoperator/17.10/write_nos.json +195 -0
  752. teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
  753. teradataml/data/jsons/tableoperator/17.20/read_nos.json +183 -0
  754. teradataml/data/jsons/tableoperator/17.20/write_nos.json +224 -0
  755. teradataml/data/jsons/uaf/17.20/TD_ACF.json +132 -0
  756. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +396 -0
  757. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +77 -0
  758. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +153 -0
  759. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  760. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  761. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +107 -0
  762. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +106 -0
  763. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +89 -0
  764. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +104 -0
  765. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +78 -0
  766. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +66 -0
  767. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +87 -0
  768. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +134 -0
  769. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +144 -0
  770. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +108 -0
  771. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +108 -0
  772. teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +78 -0
  773. teradataml/data/jsons/uaf/17.20/TD_DIFF.json +92 -0
  774. teradataml/data/jsons/uaf/17.20/TD_DTW.json +114 -0
  775. teradataml/data/jsons/uaf/17.20/TD_DURBIN_WATSON.json +101 -0
  776. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  777. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  778. teradataml/data/jsons/uaf/17.20/TD_EXTRACT_RESULTS.json +39 -0
  779. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +101 -0
  780. teradataml/data/jsons/uaf/17.20/TD_GENSERIES4FORMULA.json +85 -0
  781. teradataml/data/jsons/uaf/17.20/TD_GENSERIES4SINUSOIDS.json +71 -0
  782. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +139 -0
  783. teradataml/data/jsons/uaf/17.20/TD_HOLT_WINTERS_FORECASTER.json +313 -0
  784. teradataml/data/jsons/uaf/17.20/TD_IDFFT.json +58 -0
  785. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +81 -0
  786. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  787. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  788. teradataml/data/jsons/uaf/17.20/TD_INPUTVALIDATOR.json +64 -0
  789. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  790. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +182 -0
  791. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +103 -0
  792. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +181 -0
  793. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  794. teradataml/data/jsons/uaf/17.20/TD_MATRIXMULTIPLY.json +68 -0
  795. teradataml/data/jsons/uaf/17.20/TD_MINFO.json +67 -0
  796. teradataml/data/jsons/uaf/17.20/TD_MULTIVAR_REGR.json +179 -0
  797. teradataml/data/jsons/uaf/17.20/TD_PACF.json +114 -0
  798. teradataml/data/jsons/uaf/17.20/TD_PORTMAN.json +119 -0
  799. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +175 -0
  800. teradataml/data/jsons/uaf/17.20/TD_POWERTRANSFORM.json +98 -0
  801. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +194 -0
  802. teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
  803. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +143 -0
  804. teradataml/data/jsons/uaf/17.20/TD_SELECTION_CRITERIA.json +90 -0
  805. teradataml/data/jsons/uaf/17.20/TD_SIGNIF_PERIODICITIES.json +80 -0
  806. teradataml/data/jsons/uaf/17.20/TD_SIGNIF_RESIDMEAN.json +68 -0
  807. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +184 -0
  808. teradataml/data/jsons/uaf/17.20/TD_SINFO.json +58 -0
  809. teradataml/data/jsons/uaf/17.20/TD_SMOOTHMA.json +163 -0
  810. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +101 -0
  811. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +112 -0
  812. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +95 -0
  813. teradataml/data/jsons/uaf/17.20/TD_WHITES_GENERAL.json +78 -0
  814. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
  815. teradataml/data/kmeans_example.json +23 -0
  816. teradataml/data/kmeans_table.csv +10 -0
  817. teradataml/data/kmeans_us_arrests_data.csv +51 -0
  818. teradataml/data/knn_example.json +19 -0
  819. teradataml/data/knnrecommender_example.json +7 -0
  820. teradataml/data/knnrecommenderpredict_example.json +12 -0
  821. teradataml/data/lar_example.json +17 -0
  822. teradataml/data/larpredict_example.json +30 -0
  823. teradataml/data/lc_new_predictors.csv +5 -0
  824. teradataml/data/lc_new_reference.csv +9 -0
  825. teradataml/data/lda_example.json +9 -0
  826. teradataml/data/ldainference_example.json +15 -0
  827. teradataml/data/ldatopicsummary_example.json +9 -0
  828. teradataml/data/levendist_input.csv +13 -0
  829. teradataml/data/levenshteindistance_example.json +10 -0
  830. teradataml/data/linreg_example.json +10 -0
  831. teradataml/data/load_example_data.py +350 -0
  832. teradataml/data/loan_prediction.csv +295 -0
  833. teradataml/data/lungcancer.csv +138 -0
  834. teradataml/data/mappingdata.csv +12 -0
  835. teradataml/data/medical_readings.csv +101 -0
  836. teradataml/data/milk_timeseries.csv +157 -0
  837. teradataml/data/min_max_titanic.csv +4 -0
  838. teradataml/data/minhash_example.json +6 -0
  839. teradataml/data/ml_ratings.csv +7547 -0
  840. teradataml/data/ml_ratings_10.csv +2445 -0
  841. teradataml/data/mobile_data.csv +13 -0
  842. teradataml/data/model1_table.csv +5 -0
  843. teradataml/data/model2_table.csv +5 -0
  844. teradataml/data/models/License_file.txt +1 -0
  845. teradataml/data/models/License_file_empty.txt +0 -0
  846. teradataml/data/models/dataiku_iris_data_ann_thin +0 -0
  847. teradataml/data/models/dr_iris_rf +0 -0
  848. teradataml/data/models/iris_db_dt_model_sklearn.onnx +0 -0
  849. teradataml/data/models/iris_db_dt_model_sklearn_floattensor.onnx +0 -0
  850. teradataml/data/models/iris_db_glm_model.pmml +57 -0
  851. teradataml/data/models/iris_db_xgb_model.pmml +4471 -0
  852. teradataml/data/models/iris_kmeans_model +0 -0
  853. teradataml/data/models/iris_mojo_glm_h2o_model +0 -0
  854. teradataml/data/models/iris_mojo_xgb_h2o_model +0 -0
  855. teradataml/data/modularity_example.json +12 -0
  856. teradataml/data/movavg_example.json +8 -0
  857. teradataml/data/mtx1.csv +7 -0
  858. teradataml/data/mtx2.csv +13 -0
  859. teradataml/data/multi_model_classification.csv +401 -0
  860. teradataml/data/multi_model_regression.csv +401 -0
  861. teradataml/data/mvdfft8.csv +9 -0
  862. teradataml/data/naivebayes_example.json +10 -0
  863. teradataml/data/naivebayespredict_example.json +19 -0
  864. teradataml/data/naivebayestextclassifier2_example.json +7 -0
  865. teradataml/data/naivebayestextclassifier_example.json +8 -0
  866. teradataml/data/naivebayestextclassifierpredict_example.json +32 -0
  867. teradataml/data/name_Find_configure.csv +10 -0
  868. teradataml/data/namedentityfinder_example.json +14 -0
  869. teradataml/data/namedentityfinderevaluator_example.json +10 -0
  870. teradataml/data/namedentityfindertrainer_example.json +6 -0
  871. teradataml/data/nb_iris_input_test.csv +31 -0
  872. teradataml/data/nb_iris_input_train.csv +121 -0
  873. teradataml/data/nbp_iris_model.csv +13 -0
  874. teradataml/data/ner_dict.csv +8 -0
  875. teradataml/data/ner_extractor_text.csv +2 -0
  876. teradataml/data/ner_input_eng.csv +7 -0
  877. teradataml/data/ner_rule.csv +5 -0
  878. teradataml/data/ner_sports_test2.csv +29 -0
  879. teradataml/data/ner_sports_train.csv +501 -0
  880. teradataml/data/nerevaluator_example.json +6 -0
  881. teradataml/data/nerextractor_example.json +18 -0
  882. teradataml/data/nermem_sports_test.csv +18 -0
  883. teradataml/data/nermem_sports_train.csv +51 -0
  884. teradataml/data/nertrainer_example.json +7 -0
  885. teradataml/data/ngrams_example.json +7 -0
  886. teradataml/data/notebooks/__init__.py +0 -0
  887. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Aggregate Functions using SQLAlchemy.ipynb +1455 -0
  888. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Arithmetic Functions Using SQLAlchemy.ipynb +1993 -0
  889. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Bit-Byte Manipulation Functions using SQLAlchemy.ipynb +1492 -0
  890. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Built-in functions using SQLAlchemy.ipynb +536 -0
  891. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Regular Expressions Using SQLAlchemy.ipynb +570 -0
  892. teradataml/data/notebooks/sqlalchemy/Teradata Vantage String Functions Using SQLAlchemy.ipynb +2559 -0
  893. teradataml/data/notebooks/sqlalchemy/Teradata Vantage Window Aggregate Functions using SQLAlchemy.ipynb +2911 -0
  894. teradataml/data/notebooks/sqlalchemy/Using Generic SQLAlchemy ClauseElements teradataml DataFrame assign method.ipynb +698 -0
  895. teradataml/data/notebooks/sqlalchemy/__init__.py +0 -0
  896. teradataml/data/notebooks/sqlalchemy/teradataml filtering using SQLAlchemy ClauseElements.ipynb +784 -0
  897. teradataml/data/npath_example.json +23 -0
  898. teradataml/data/ntree_example.json +14 -0
  899. teradataml/data/numeric_strings.csv +5 -0
  900. teradataml/data/numerics.csv +4 -0
  901. teradataml/data/ocean_buoy.csv +17 -0
  902. teradataml/data/ocean_buoy2.csv +17 -0
  903. teradataml/data/ocean_buoys.csv +28 -0
  904. teradataml/data/ocean_buoys2.csv +10 -0
  905. teradataml/data/ocean_buoys_nonpti.csv +28 -0
  906. teradataml/data/ocean_buoys_seq.csv +29 -0
  907. teradataml/data/onehot_encoder_train.csv +4 -0
  908. teradataml/data/openml_example.json +92 -0
  909. teradataml/data/optional_event_table.csv +4 -0
  910. teradataml/data/orders1.csv +11 -0
  911. teradataml/data/orders1_12.csv +13 -0
  912. teradataml/data/orders_ex.csv +4 -0
  913. teradataml/data/pack_example.json +9 -0
  914. teradataml/data/package_tracking.csv +19 -0
  915. teradataml/data/package_tracking_pti.csv +19 -0
  916. teradataml/data/pagerank_example.json +13 -0
  917. teradataml/data/paragraphs_input.csv +6 -0
  918. teradataml/data/pathanalyzer_example.json +8 -0
  919. teradataml/data/pathgenerator_example.json +8 -0
  920. teradataml/data/patient_profile.csv +101 -0
  921. teradataml/data/pattern_matching_data.csv +11 -0
  922. teradataml/data/payment_fraud_dataset.csv +10001 -0
  923. teradataml/data/peppers.png +0 -0
  924. teradataml/data/phrases.csv +7 -0
  925. teradataml/data/pivot_example.json +9 -0
  926. teradataml/data/pivot_input.csv +22 -0
  927. teradataml/data/playerRating.csv +31 -0
  928. teradataml/data/pos_input.csv +40 -0
  929. teradataml/data/postagger_example.json +7 -0
  930. teradataml/data/posttagger_output.csv +44 -0
  931. teradataml/data/production_data.csv +17 -0
  932. teradataml/data/production_data2.csv +7 -0
  933. teradataml/data/randomsample_example.json +32 -0
  934. teradataml/data/randomwalksample_example.json +9 -0
  935. teradataml/data/rank_table.csv +6 -0
  936. teradataml/data/real_values.csv +14 -0
  937. teradataml/data/ref_mobile_data.csv +4 -0
  938. teradataml/data/ref_mobile_data_dense.csv +2 -0
  939. teradataml/data/ref_url.csv +17 -0
  940. teradataml/data/restaurant_reviews.csv +7 -0
  941. teradataml/data/retail_churn_table.csv +27772 -0
  942. teradataml/data/river_data.csv +145 -0
  943. teradataml/data/roc_example.json +8 -0
  944. teradataml/data/roc_input.csv +101 -0
  945. teradataml/data/rule_inputs.csv +6 -0
  946. teradataml/data/rule_table.csv +2 -0
  947. teradataml/data/sales.csv +7 -0
  948. teradataml/data/sales_transaction.csv +501 -0
  949. teradataml/data/salesdata.csv +342 -0
  950. teradataml/data/sample_cities.csv +3 -0
  951. teradataml/data/sample_shapes.csv +11 -0
  952. teradataml/data/sample_streets.csv +3 -0
  953. teradataml/data/sampling_example.json +16 -0
  954. teradataml/data/sax_example.json +17 -0
  955. teradataml/data/scale_attributes.csv +3 -0
  956. teradataml/data/scale_example.json +74 -0
  957. teradataml/data/scale_housing.csv +11 -0
  958. teradataml/data/scale_housing_test.csv +6 -0
  959. teradataml/data/scale_input_part_sparse.csv +31 -0
  960. teradataml/data/scale_input_partitioned.csv +16 -0
  961. teradataml/data/scale_input_sparse.csv +11 -0
  962. teradataml/data/scale_parameters.csv +3 -0
  963. teradataml/data/scale_stat.csv +11 -0
  964. teradataml/data/scalebypartition_example.json +13 -0
  965. teradataml/data/scalemap_example.json +13 -0
  966. teradataml/data/scalesummary_example.json +12 -0
  967. teradataml/data/score_category.csv +101 -0
  968. teradataml/data/score_summary.csv +4 -0
  969. teradataml/data/script_example.json +10 -0
  970. teradataml/data/scripts/deploy_script.py +84 -0
  971. teradataml/data/scripts/lightgbm/dataset.template +175 -0
  972. teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +264 -0
  973. teradataml/data/scripts/lightgbm/lightgbm_function.template +234 -0
  974. teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +177 -0
  975. teradataml/data/scripts/mapper.R +20 -0
  976. teradataml/data/scripts/mapper.py +16 -0
  977. teradataml/data/scripts/mapper_replace.py +16 -0
  978. teradataml/data/scripts/sklearn/__init__.py +0 -0
  979. teradataml/data/scripts/sklearn/sklearn_fit.py +205 -0
  980. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +148 -0
  981. teradataml/data/scripts/sklearn/sklearn_function.template +144 -0
  982. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +166 -0
  983. teradataml/data/scripts/sklearn/sklearn_neighbors.py +161 -0
  984. teradataml/data/scripts/sklearn/sklearn_score.py +145 -0
  985. teradataml/data/scripts/sklearn/sklearn_transform.py +327 -0
  986. teradataml/data/sdk/modelops/modelops_spec.json +101737 -0
  987. teradataml/data/seeds.csv +10 -0
  988. teradataml/data/sentenceextractor_example.json +7 -0
  989. teradataml/data/sentiment_extract_input.csv +11 -0
  990. teradataml/data/sentiment_train.csv +16 -0
  991. teradataml/data/sentiment_word.csv +20 -0
  992. teradataml/data/sentiment_word_input.csv +20 -0
  993. teradataml/data/sentimentextractor_example.json +24 -0
  994. teradataml/data/sentimenttrainer_example.json +8 -0
  995. teradataml/data/sequence_table.csv +10 -0
  996. teradataml/data/seriessplitter_example.json +8 -0
  997. teradataml/data/sessionize_example.json +17 -0
  998. teradataml/data/sessionize_table.csv +116 -0
  999. teradataml/data/setop_test1.csv +24 -0
  1000. teradataml/data/setop_test2.csv +22 -0
  1001. teradataml/data/soc_nw_edges.csv +11 -0
  1002. teradataml/data/soc_nw_vertices.csv +8 -0
  1003. teradataml/data/souvenir_timeseries.csv +168 -0
  1004. teradataml/data/sparse_iris_attribute.csv +5 -0
  1005. teradataml/data/sparse_iris_test.csv +121 -0
  1006. teradataml/data/sparse_iris_train.csv +601 -0
  1007. teradataml/data/star1.csv +6 -0
  1008. teradataml/data/star_pivot.csv +8 -0
  1009. teradataml/data/state_transition.csv +5 -0
  1010. teradataml/data/stock_data.csv +53 -0
  1011. teradataml/data/stock_movement.csv +11 -0
  1012. teradataml/data/stock_vol.csv +76 -0
  1013. teradataml/data/stop_words.csv +8 -0
  1014. teradataml/data/store_sales.csv +37 -0
  1015. teradataml/data/stringsimilarity_example.json +8 -0
  1016. teradataml/data/strsimilarity_input.csv +13 -0
  1017. teradataml/data/students.csv +101 -0
  1018. teradataml/data/svm_iris_input_test.csv +121 -0
  1019. teradataml/data/svm_iris_input_train.csv +481 -0
  1020. teradataml/data/svm_iris_model.csv +7 -0
  1021. teradataml/data/svmdense_example.json +10 -0
  1022. teradataml/data/svmdensepredict_example.json +19 -0
  1023. teradataml/data/svmsparse_example.json +8 -0
  1024. teradataml/data/svmsparsepredict_example.json +14 -0
  1025. teradataml/data/svmsparsesummary_example.json +8 -0
  1026. teradataml/data/target_mobile_data.csv +13 -0
  1027. teradataml/data/target_mobile_data_dense.csv +5 -0
  1028. teradataml/data/target_udt_data.csv +8 -0
  1029. teradataml/data/tdnerextractor_example.json +14 -0
  1030. teradataml/data/templatedata.csv +1201 -0
  1031. teradataml/data/templates/open_source_ml.json +11 -0
  1032. teradataml/data/teradata_icon.ico +0 -0
  1033. teradataml/data/teradataml_example.json +1473 -0
  1034. teradataml/data/test_classification.csv +101 -0
  1035. teradataml/data/test_loan_prediction.csv +53 -0
  1036. teradataml/data/test_pacf_12.csv +37 -0
  1037. teradataml/data/test_prediction.csv +101 -0
  1038. teradataml/data/test_regression.csv +101 -0
  1039. teradataml/data/test_river2.csv +109 -0
  1040. teradataml/data/text_inputs.csv +6 -0
  1041. teradataml/data/textchunker_example.json +8 -0
  1042. teradataml/data/textclassifier_example.json +7 -0
  1043. teradataml/data/textclassifier_input.csv +7 -0
  1044. teradataml/data/textclassifiertrainer_example.json +7 -0
  1045. teradataml/data/textmorph_example.json +11 -0
  1046. teradataml/data/textparser_example.json +15 -0
  1047. teradataml/data/texttagger_example.json +12 -0
  1048. teradataml/data/texttokenizer_example.json +7 -0
  1049. teradataml/data/texttrainer_input.csv +11 -0
  1050. teradataml/data/tf_example.json +7 -0
  1051. teradataml/data/tfidf_example.json +14 -0
  1052. teradataml/data/tfidf_input1.csv +201 -0
  1053. teradataml/data/tfidf_train.csv +6 -0
  1054. teradataml/data/time_table1.csv +535 -0
  1055. teradataml/data/time_table2.csv +14 -0
  1056. teradataml/data/timeseriesdata.csv +1601 -0
  1057. teradataml/data/timeseriesdatasetsd4.csv +105 -0
  1058. teradataml/data/timestamp_data.csv +4 -0
  1059. teradataml/data/titanic.csv +892 -0
  1060. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  1061. teradataml/data/to_num_data.csv +4 -0
  1062. teradataml/data/tochar_data.csv +5 -0
  1063. teradataml/data/token_table.csv +696 -0
  1064. teradataml/data/train_multiclass.csv +101 -0
  1065. teradataml/data/train_regression.csv +101 -0
  1066. teradataml/data/train_regression_multiple_labels.csv +101 -0
  1067. teradataml/data/train_tracking.csv +28 -0
  1068. teradataml/data/trans_dense.csv +16 -0
  1069. teradataml/data/trans_sparse.csv +55 -0
  1070. teradataml/data/transformation_table.csv +6 -0
  1071. teradataml/data/transformation_table_new.csv +2 -0
  1072. teradataml/data/tv_spots.csv +16 -0
  1073. teradataml/data/twod_climate_data.csv +117 -0
  1074. teradataml/data/uaf_example.json +529 -0
  1075. teradataml/data/univariatestatistics_example.json +9 -0
  1076. teradataml/data/unpack_example.json +10 -0
  1077. teradataml/data/unpivot_example.json +25 -0
  1078. teradataml/data/unpivot_input.csv +8 -0
  1079. teradataml/data/url_data.csv +10 -0
  1080. teradataml/data/us_air_pass.csv +37 -0
  1081. teradataml/data/us_population.csv +624 -0
  1082. teradataml/data/us_states_shapes.csv +52 -0
  1083. teradataml/data/varmax_example.json +18 -0
  1084. teradataml/data/vectordistance_example.json +30 -0
  1085. teradataml/data/ville_climatedata.csv +121 -0
  1086. teradataml/data/ville_tempdata.csv +12 -0
  1087. teradataml/data/ville_tempdata1.csv +12 -0
  1088. teradataml/data/ville_temperature.csv +11 -0
  1089. teradataml/data/waveletTable.csv +1605 -0
  1090. teradataml/data/waveletTable2.csv +1605 -0
  1091. teradataml/data/weightedmovavg_example.json +9 -0
  1092. teradataml/data/wft_testing.csv +5 -0
  1093. teradataml/data/windowdfft.csv +16 -0
  1094. teradataml/data/wine_data.csv +1600 -0
  1095. teradataml/data/word_embed_input_table1.csv +6 -0
  1096. teradataml/data/word_embed_input_table2.csv +5 -0
  1097. teradataml/data/word_embed_model.csv +23 -0
  1098. teradataml/data/words_input.csv +13 -0
  1099. teradataml/data/xconvolve_complex_left.csv +6 -0
  1100. teradataml/data/xconvolve_complex_leftmulti.csv +6 -0
  1101. teradataml/data/xgboost_example.json +36 -0
  1102. teradataml/data/xgboostpredict_example.json +32 -0
  1103. teradataml/data/ztest_example.json +16 -0
  1104. teradataml/dataframe/__init__.py +0 -0
  1105. teradataml/dataframe/copy_to.py +2446 -0
  1106. teradataml/dataframe/data_transfer.py +2840 -0
  1107. teradataml/dataframe/dataframe.py +20908 -0
  1108. teradataml/dataframe/dataframe_utils.py +2114 -0
  1109. teradataml/dataframe/fastload.py +794 -0
  1110. teradataml/dataframe/functions.py +2110 -0
  1111. teradataml/dataframe/indexer.py +424 -0
  1112. teradataml/dataframe/row.py +160 -0
  1113. teradataml/dataframe/setop.py +1171 -0
  1114. teradataml/dataframe/sql.py +10904 -0
  1115. teradataml/dataframe/sql_function_parameters.py +440 -0
  1116. teradataml/dataframe/sql_functions.py +652 -0
  1117. teradataml/dataframe/sql_interfaces.py +220 -0
  1118. teradataml/dataframe/vantage_function_types.py +675 -0
  1119. teradataml/dataframe/window.py +694 -0
  1120. teradataml/dbutils/__init__.py +3 -0
  1121. teradataml/dbutils/dbutils.py +2871 -0
  1122. teradataml/dbutils/filemgr.py +318 -0
  1123. teradataml/gen_ai/__init__.py +2 -0
  1124. teradataml/gen_ai/convAI.py +473 -0
  1125. teradataml/geospatial/__init__.py +4 -0
  1126. teradataml/geospatial/geodataframe.py +1105 -0
  1127. teradataml/geospatial/geodataframecolumn.py +392 -0
  1128. teradataml/geospatial/geometry_types.py +926 -0
  1129. teradataml/hyperparameter_tuner/__init__.py +1 -0
  1130. teradataml/hyperparameter_tuner/optimizer.py +4115 -0
  1131. teradataml/hyperparameter_tuner/utils.py +303 -0
  1132. teradataml/lib/__init__.py +0 -0
  1133. teradataml/lib/aed_0_1.dll +0 -0
  1134. teradataml/lib/libaed_0_1.dylib +0 -0
  1135. teradataml/lib/libaed_0_1.so +0 -0
  1136. teradataml/lib/libaed_0_1_aarch64.so +0 -0
  1137. teradataml/lib/libaed_0_1_ppc64le.so +0 -0
  1138. teradataml/opensource/__init__.py +1 -0
  1139. teradataml/opensource/_base.py +1321 -0
  1140. teradataml/opensource/_class.py +464 -0
  1141. teradataml/opensource/_constants.py +61 -0
  1142. teradataml/opensource/_lightgbm.py +949 -0
  1143. teradataml/opensource/_sklearn.py +1008 -0
  1144. teradataml/opensource/_wrapper_utils.py +267 -0
  1145. teradataml/options/__init__.py +148 -0
  1146. teradataml/options/configure.py +489 -0
  1147. teradataml/options/display.py +187 -0
  1148. teradataml/plot/__init__.py +3 -0
  1149. teradataml/plot/axis.py +1427 -0
  1150. teradataml/plot/constants.py +15 -0
  1151. teradataml/plot/figure.py +431 -0
  1152. teradataml/plot/plot.py +810 -0
  1153. teradataml/plot/query_generator.py +83 -0
  1154. teradataml/plot/subplot.py +216 -0
  1155. teradataml/scriptmgmt/UserEnv.py +4273 -0
  1156. teradataml/scriptmgmt/__init__.py +3 -0
  1157. teradataml/scriptmgmt/lls_utils.py +2157 -0
  1158. teradataml/sdk/README.md +79 -0
  1159. teradataml/sdk/__init__.py +4 -0
  1160. teradataml/sdk/_auth_modes.py +422 -0
  1161. teradataml/sdk/_func_params.py +487 -0
  1162. teradataml/sdk/_json_parser.py +453 -0
  1163. teradataml/sdk/_openapi_spec_constants.py +249 -0
  1164. teradataml/sdk/_utils.py +236 -0
  1165. teradataml/sdk/api_client.py +900 -0
  1166. teradataml/sdk/constants.py +62 -0
  1167. teradataml/sdk/modelops/__init__.py +98 -0
  1168. teradataml/sdk/modelops/_client.py +409 -0
  1169. teradataml/sdk/modelops/_constants.py +304 -0
  1170. teradataml/sdk/modelops/models.py +2308 -0
  1171. teradataml/sdk/spinner.py +107 -0
  1172. teradataml/series/__init__.py +0 -0
  1173. teradataml/series/series.py +537 -0
  1174. teradataml/series/series_utils.py +71 -0
  1175. teradataml/store/__init__.py +12 -0
  1176. teradataml/store/feature_store/__init__.py +0 -0
  1177. teradataml/store/feature_store/constants.py +658 -0
  1178. teradataml/store/feature_store/feature_store.py +4814 -0
  1179. teradataml/store/feature_store/mind_map.py +639 -0
  1180. teradataml/store/feature_store/models.py +7330 -0
  1181. teradataml/store/feature_store/utils.py +390 -0
  1182. teradataml/table_operators/Apply.py +979 -0
  1183. teradataml/table_operators/Script.py +1739 -0
  1184. teradataml/table_operators/TableOperator.py +1343 -0
  1185. teradataml/table_operators/__init__.py +2 -0
  1186. teradataml/table_operators/apply_query_generator.py +262 -0
  1187. teradataml/table_operators/query_generator.py +493 -0
  1188. teradataml/table_operators/table_operator_query_generator.py +462 -0
  1189. teradataml/table_operators/table_operator_util.py +726 -0
  1190. teradataml/table_operators/templates/dataframe_apply.template +184 -0
  1191. teradataml/table_operators/templates/dataframe_map.template +176 -0
  1192. teradataml/table_operators/templates/dataframe_register.template +73 -0
  1193. teradataml/table_operators/templates/dataframe_udf.template +67 -0
  1194. teradataml/table_operators/templates/script_executor.template +170 -0
  1195. teradataml/telemetry_utils/__init__.py +0 -0
  1196. teradataml/telemetry_utils/queryband.py +53 -0
  1197. teradataml/utils/__init__.py +0 -0
  1198. teradataml/utils/docstring.py +527 -0
  1199. teradataml/utils/dtypes.py +943 -0
  1200. teradataml/utils/internal_buffer.py +122 -0
  1201. teradataml/utils/print_versions.py +206 -0
  1202. teradataml/utils/utils.py +451 -0
  1203. teradataml/utils/validators.py +3305 -0
  1204. teradataml-20.0.0.8.dist-info/METADATA +2804 -0
  1205. teradataml-20.0.0.8.dist-info/RECORD +1208 -0
  1206. teradataml-20.0.0.8.dist-info/WHEEL +5 -0
  1207. teradataml-20.0.0.8.dist-info/top_level.txt +1 -0
  1208. teradataml-20.0.0.8.dist-info/zip-safe +1 -0
@@ -0,0 +1,150 @@
1
+ {
2
+ "FuncName": "TD_FILTERFACTORY1D",
3
+ "FuncDescriptionShort": "TD_FILTERFACTORYID creates finite impulse response (FIR) filter coefficients.",
4
+ "FuncDescriptionLong": [
5
+ "The filters are based on certain parameters and stored into a common table for reuse."
6
+ ],
7
+ "FunctionVersion": "...",
8
+ "FunctionCategory": "General Utility",
9
+ "JSONVersion": "1",
10
+ "FuncRName": "td_Filter_Factory1d",
11
+ "MaxInputFiles": 0,
12
+ "Input": [],
13
+ "Params":[
14
+ {
15
+ "Name": "FILTERID",
16
+ "Type": "integer",
17
+ "Optional": false,
18
+ "Description": [
19
+ "Filter identifier, based on filter coefficients, stored in the table."
20
+ ],
21
+ "LangName": "filter_id"
22
+ },
23
+ {
24
+ "Name": "FILTERTYPE",
25
+ "Type": "string",
26
+ "Optional": false,
27
+ "PermittedValues": [
28
+ "LOWPASS",
29
+ "HIGHPASS",
30
+ "BANDPASS",
31
+ "BANDSTOP"
32
+ ],
33
+ "Description": [
34
+ "The type of filter to generate.",
35
+ "Options are:",
36
+ "LOWPASS to remove frequencies above LOWCUTOFF.",
37
+ "HIGHPASS to remove frequencies below HIGHCUTOFF.",
38
+ "BANDPASS to remove frequencies below LOWCUTOFF and above HIGHCUTOFF.",
39
+ "BANDSTOP to remove frequencies between LOWCUTOFF and HIGHCUTOFF."
40
+ ],
41
+ "LangName": "filter_type"
42
+ },
43
+ {
44
+ "Name": "WINDOWTYPE",
45
+ "Type": "string",
46
+ "Optional": true,
47
+ "PermittedValues": [
48
+ "BLACKMAN",
49
+ "HAMMING",
50
+ "HANNING",
51
+ "BARTLETT"
52
+ ],
53
+ "Description": [
54
+ "[Optional] Window function to the filter that maintains a smooth drop off to zero, and avoids extra artifacts in the frequency domain.",
55
+ "Window is applied by multiplying W(n) to the n-th coefficient of the filter",
56
+ "Default is to not apply any windowing function to the filter (leave the filter coefficients as they are)",
57
+ "Options are:",
58
+ "BLACKMAN [ W(n) = 0.42 - 1/2*COS(2PI*n/N) + 0.08*COS(4PI * n/N) ]",
59
+ "HAMMING [ W(n) = 0.54 - 0.46 * COS(2PI*n/N) ]",
60
+ "HANNING [ W(n) = 0.5 - 0.5 * COS(2PI*n/N) ]",
61
+ "BARTLETT [ W(n) = ",
62
+ "{",
63
+ " (2 * n/N) , for n < N/2",
64
+ " (2 - 2 * n/N), for n >= N/2",
65
+ "} ]"
66
+ ],
67
+ "LangName": "window_type"
68
+ },
69
+ {
70
+ "Name": "FILTERLENGTH",
71
+ "Type": "integer",
72
+ "Optional": true,
73
+ "Description": [
74
+ "[Optional] Length of the filter to generate.",
75
+ "Overrides TRANSITIONBANDWIDTH argument if both are supplied, and renders the other an optional argument.",
76
+ "Default is approximately 4/(TRANSITIONBANDWIDTH/ SAMPLINGFREQUENCY)."
77
+ ],
78
+ "LangName": "filter_length"
79
+ },
80
+ {
81
+ "Name": "TRANSITIONBANDWIDTH",
82
+ "Type": "double",
83
+ "Optional": true,
84
+ "LowerBound": 0,
85
+ "LowerBoundType": "EXCLUSIVE",
86
+ "Description": [
87
+ "[Optional] The maximum allowed size for the range of frequencies for filter transitions between a passband and stopband.",
88
+ "This also determines the number of coefficients to be generated.",
89
+ "Value must be greater than 0.",
90
+ "A smaller value produces faster drop off at the cost of more coefficients.",
91
+ "Not used when FILTERLENGTH is supplied.",
92
+ "Default is bandwidth from FILTERLENGTH."
93
+ ],
94
+ "LangName": "transition_bandwidth"
95
+ },
96
+ {
97
+ "Name": "LOWCUTOFF",
98
+ "Type": "double",
99
+ "Optional": true,
100
+ "LowerBound": 0,
101
+ "LowerBoundType": "EXCLUSIVE",
102
+ "Description": [
103
+ "[Optional] Determines the lower frequency that change between a passband and stopband occurs.",
104
+ "Must be greater than 0.",
105
+ "Not used by default with HIGHPASS filter."
106
+ ],
107
+ "LangName": "low_cutoff"
108
+ },
109
+ {
110
+ "Name": "HIGHCUTOFF",
111
+ "Type": "double",
112
+ "Optional": true,
113
+ "LowerBound": 0,
114
+ "LowerBoundType": "EXCLUSIVE",
115
+ "Description": [
116
+ "[Optional] Determines the higher frequency that change between a passband and stopband occurs.",
117
+ "Must be greater than 0.",
118
+ "Not used by default with LOWPASS filter."
119
+ ],
120
+ "LangName": "high_cutoff"
121
+ },
122
+ {
123
+ "Name": "SAMPLINGFREQUENCY",
124
+ "Type": "double",
125
+ "Optional": false,
126
+ "LowerBound": 0,
127
+ "LowerBoundType": "EXCLUSIVE",
128
+ "Description": [
129
+ "The frequency that the data to be filtered was sampled.",
130
+ "Must be greater than 0."
131
+ ],
132
+ "LangName": "sampling_frequency"
133
+ },
134
+ {
135
+ "Name": "FILTERDESCRIPTION",
136
+ "Type": "string",
137
+ "Optional": true,
138
+ "Description": [
139
+ "[Optional] Description for the filter coefficients that contain the same filter ID.",
140
+ "Description is only written to one row for each filter generated, and ROW_I is 0.",
141
+ "Default is a string describing parameters."
142
+ ],
143
+ "LangName": "filter_description"
144
+ }
145
+ ],
146
+ "Output": [],
147
+ "IsPlottable": true,
148
+ "InputFmt": false,
149
+ "OutputFmt": false
150
+ }
@@ -0,0 +1,198 @@
1
+ {
2
+ "json_schema_major_version": "1",
3
+ "json_schema_minor_version": "0",
4
+ "json_content_version": "1",
5
+ "function_name": "read_nos",
6
+ "function_version": "1.0",
7
+ "function_type": "table_operator",
8
+ "function_alias_name": "read_nos",
9
+ "function_r_name": "read.nos",
10
+ "short_description": "This function enables access to external files in JSON, CSV, or Parquet format.",
11
+ "long_description": "This function enables access to external files in JSON, CSV, or Parquet format. You must have the EXECUTE FUNCTION privilege on TD_SYSFNLIB.READ_NOS.",
12
+ "input_tables": [
13
+ {
14
+ "requiredInputKind": [
15
+ "PartitionByAny"
16
+ ],
17
+ "isOrdered": false,
18
+ "partitionByOne": false,
19
+ "name": "input",
20
+ "alternateNames": [],
21
+ "isRequired": false,
22
+ "rDescription": "Specifies the teradataml DataFrame containing the input data.",
23
+ "description": "Specifies the table containing the input data.",
24
+ "datatype": "TABLE_ALIAS",
25
+ "allowsLists": false,
26
+ "rName": "data",
27
+ "useInR": true,
28
+ "rOrderNum": 1
29
+ }
30
+ ],
31
+ "argument_clauses": [
32
+ {
33
+ "permittedValues": [],
34
+ "isOutputColumn": false,
35
+ "name": "LOCATION",
36
+ "alternateNames": [],
37
+ "isRequired": false,
38
+ "rDescription": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to the data in the external object storage system. The location value includes the following components:\nAmazon S3: /connector/bucket.endpoint/[key_prefix].\nAzure Blob storage and Azure Data Lake Storage Gen2: /connector/container.endpoint/[key_prefix].\nGoogle Cloud Storage: /connector/endpoint/bucket/[key_prefix].\nconnector: Identifies the type of external storage system where the data is located. Teradata requires the storage location to start with the following for all external storage locations:\nAmazon S3 storage location must begin with /S3 or /s3\nAzure Blob storage location (including Azure Data Lake Storage Gen2 in Blob Interop Mode) must begin with /AZ or /az\nGoogle Cloud Storage location must begin with /GS or /gs.\nendpoint: A URL that identifies the system-specific entry point for the external object storage system.\nbucket (Amazon S3, Google Cloud Storage) or container (Azure Blob storage and Azure Data Lake Storage Gen2): A container that logically groups stored objects in the external storage system.\nkey_prefix: Identifies one or more objects in the logical organization of the bucket data. Because it is a key prefix, not an actual directory path, the key prefix may match one or more objects in the external storage. For example, the key prefix '/fabrics/cotton/colors/b/' would match objects: /fabrics/cotton/colors/blue, /fabrics/cotton/colors/brown, and /fabrics/cotton/colors/black. If there were organization levels below those, such as /fabrics/cotton/colors/blue/shirts, the same key prefix would gather those objects too.\nNote: Vantage validates only the first file it encounters from the location key prefix.\nFor example, this location value might specify all objects on an Amazon cloud storage system for the month of December, 2001:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/\nThis location could specify an individual storage object (or file), Day1.csv:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/Day1.csv'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/Day11.csv\nThis location specifies an entire container in an Azure external object store (Azure Blob storage or Azure Data Lake Storage Gen2). The container may contain multiple file objects:\nlocation = '/AZ/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/nos-csv-data'\nconnector: AZ, bucket: YOUR-STORAGE-ACCOUNT, endpoint: blob.core.windows.net, key_prefix: nos-csv-data\nThis is an example of a Google Cloud Storage location:\nconnector: GS, bucket: YOUR-BUCKET, endpoint: storage.googleapis.com, key_prefix: CSVDATA/RIVERS/rivers.csv",
39
+ "description": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to the data in the external object storage system. The location value includes the following components:\nAmazon S3: /connector/bucket.endpoint/[key_prefix].\nAzure Blob storage and Azure Data Lake Storage Gen2: /connector/container.endpoint/[key_prefix].\nGoogle Cloud Storage: /connector/endpoint/bucket/[key_prefix].\nconnector: Identifies the type of external storage system where the data is located. Teradata requires the storage location to start with the following for all external storage locations:\nAmazon S3 storage location must begin with /S3 or /s3\nAzure Blob storage location (including Azure Data Lake Storage Gen2 in Blob Interop Mode) must begin with /AZ or /az\nGoogle Cloud Storage location must begin with /GS or /gs.\nendpoint: A URL that identifies the system-specific entry point for the external object storage system.\nbucket (Amazon S3, Google Cloud Storage) or container (Azure Blob storage and Azure Data Lake Storage Gen2): A container that logically groups stored objects in the external storage system.\nkey_prefix: Identifies one or more objects in the logical organization of the bucket data. Because it is a key prefix, not an actual directory path, the key prefix may match one or more objects in the external storage. For example, the key prefix '/fabrics/cotton/colors/b/' would match objects: /fabrics/cotton/colors/blue, /fabrics/cotton/colors/brown, and /fabrics/cotton/colors/black. If there were organization levels below those, such as /fabrics/cotton/colors/blue/shirts, the same key prefix would gather those objects too.\nNote: Vantage validates only the first file it encounters from the location key prefix.\nFor example, this location value might specify all objects on an Amazon cloud storage system for the month of December, 2001:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/\nThis location could specify an individual storage object (or file), Day1.csv:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/Day1.csv'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/Day11.csv\nThis location specifies an entire container in an Azure external object store (Azure Blob storage or Azure Data Lake Storage Gen2). The container may contain multiple file objects:\nlocation = '/AZ/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/nos-csv-data'\nconnector: AZ, bucket: YOUR-STORAGE-ACCOUNT, endpoint: blob.core.windows.net, key_prefix: nos-csv-data\nThis is an example of a Google Cloud Storage location:\nconnector: GS, bucket: YOUR-BUCKET, endpoint: storage.googleapis.com, key_prefix: CSVDATA/RIVERS/rivers.csv",
40
+ "datatype": "STRING",
41
+ "allowsLists": false,
42
+ "rName": "location",
43
+ "useInR": true,
44
+ "rOrderNum": 2
45
+ },
46
+ {
47
+ "permittedValues": [],
48
+ "defaultValue": 16,
49
+ "isOutputColumn": false,
50
+ "name": "BUFFERSIZE",
51
+ "alternateNames": [],
52
+ "isRequired": false,
53
+ "rDescription": "Specifies the size of the network buffer to allocate when retrieving data from the external storage repository. The default value is 16 MB, which is the maximum value",
54
+ "description": "Specifies the size of the network buffer to allocate when retrieving data from the external storage repository. The default value is 16 MB, which is the maximum value",
55
+ "datatype": "INTEGER",
56
+ "allowsLists": false,
57
+ "rName": "buffer.size",
58
+ "useInR": true,
59
+ "rOrderNum": 3
60
+ },
61
+ {
62
+ "permittedValues": ["NOSREAD_RECORD", "NOSREAD_KEYS", "NOSREAD_RAW", "NOSREAD_PARQUET_SCHEMA"],
63
+ "defaultValue": "NOSREAD_RECORD",
64
+ "isOutputColumn": false,
65
+ "name": "RETURNTYPE",
66
+ "alternateNames": [],
67
+ "isRequired": false,
68
+ "rDescription": "Specifies the format in which data is returned.\nNOSREAD_RECORD: Returns one row for each external record along with its metadata. This is the default. Access external records by specifying one of the following:\n* Input table and LOCATION and an empty table. For CSV, you can include a schema definition.\n* Input table with a row for each external file. For CSV, this method does not support a schema definition.\nFor an empty single-column input table, do the following:\n* Define an input table with a single column, Payload, with the appropriate data type: JSON and DATASET with a Storage Format of CSV. This column determines the output Payload column return type.\n* For LOCATION, specify the filepath.\nFor a multiple-column input table, define an input table with the following columns:\n* Location VARCHAR(2048) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* OffsetIntoObject BIGINT\n* ObjectLength BIGINT\n* Payload JSON or VARCHAR for CSV\nThis table can be populated using the output of the NOSREAD_KEYS return type.\nNOSREAD_KEYS: Retrieve the list of files from the path specified in the LOCATION USING clause. A schema definition is not necessary. Returns: Location, ObjectVersionID, ObjectTimeStamp, ObjectLength, size of external file.\nNOSREAD_RAW: Retrieves file data from the external storage services, not specific records. Retrieved data is returned as CLOB/BLOB. You can retrieve a complete file from external storage and save in Teradata CLOB/BLOB format. The maximum amount of data that can be retrieved from the external storage and saved in the Teradata column is 2GB, the Vantage limit for LOBs. The ObjectLength corresponds to the length of CLOB/BLOB column read from the external storage. This information is provided in the form of a table returned to the READ_NOS table operator. The Payload column in the input table is only used to determine the datatype of the column in which the returned data is stored.\nDefine the input table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), Payload CLOB/BLOB.\nREAD_NOS returns a table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), OffsetIntoObject BIGINT, OffsetIntoObject BIGINT, Payload CLOB/BLOB, based on input table CLOB/BLOB Column.\nNOSREAD_PARQUET_SCHEMA: Returns information about the Parquet data schema. For information about the mapping between Parquet data types and Teradata data types, see Parquet External Files in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
69
+ "description": "Specifies the format in which data is returned.\nNOSREAD_RECORD: Returns one row for each external record along with its metadata. This is the default. Access external records by specifying one of the following:\n* Input table and LOCATION and an empty table. For CSV, you can include a schema definition.\n* Input table with a row for each external file. For CSV, this method does not support a schema definition.\nFor an empty single-column input table, do the following:\n* Define an input table with a single column, Payload, with the appropriate data type: JSON and DATASET with a Storage Format of CSV. This column determines the output Payload column return type.\n* For LOCATION, specify the filepath.\nFor a multiple-column input table, define an input table with the following columns:\n* Location VARCHAR(2048) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* OffsetIntoObject BIGINT\n* ObjectLength BIGINT\n* Payload JSON or VARCHAR for CSV\nThis table can be populated using the output of the NOSREAD_KEYS return type.\nNOSREAD_KEYS: Retrieve the list of files from the path specified in the LOCATION USING clause. A schema definition is not necessary. Returns: Location, ObjectVersionID, ObjectTimeStamp, ObjectLength, size of external file.\nNOSREAD_RAW: Retrieves file data from the external storage services, not specific records. Retrieved data is returned as CLOB/BLOB. You can retrieve a complete file from external storage and save in Teradata CLOB/BLOB format. The maximum amount of data that can be retrieved from the external storage and saved in the Teradata column is 2GB, the Vantage limit for LOBs. The ObjectLength corresponds to the length of CLOB/BLOB column read from the external storage. This information is provided in the form of a table returned to the READ_NOS table operator. The Payload column in the input table is only used to determine the datatype of the column in which the returned data is stored.\nDefine the input table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), Payload CLOB/BLOB.\nREAD_NOS returns a table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), OffsetIntoObject BIGINT, OffsetIntoObject BIGINT, Payload CLOB/BLOB, based on input table CLOB/BLOB Column.\nNOSREAD_PARQUET_SCHEMA: Returns information about the Parquet data schema. For information about the mapping between Parquet data types and Teradata data types, see Parquet External Files in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
70
+ "datatype": "STRING",
71
+ "allowsLists": false,
72
+ "rName": "return.type",
73
+ "useInR": true,
74
+ "rOrderNum": 4
75
+ },
76
+ {
77
+ "permittedValues": [],
78
+ "defaultValue": 1.0,
79
+ "lowerBound": 0.0,
80
+ "upperBound": 1.0,
81
+ "lowerBoundType": "INCLUSIVE",
82
+ "upperBoundType": "INCLUSIVE",
83
+ "isOutputColumn": false,
84
+ "name": "SAMPLE_PERC",
85
+ "alternateNames": [],
86
+ "isRequired": false,
87
+ "rDescription": "Specifies the percentage of rows to retrieve from the external storage repository when return.type is NOSREAD_RECORD. The valid range of values is from '0.0' to '1.0', where '1.0' represents 100% of the rows. The default value is 1.0.",
88
+ "description": "Specifies the percentage of rows to retrieve from the external storage repository when return.type is NOSREAD_RECORD. The valid range of values is from '0.0' to '1.0', where '1.0' represents 100% of the rows. The default value is 1.0.",
89
+ "datatype": "DOUBLE PRECISION",
90
+ "allowsLists": false,
91
+ "rName": "sample.perc",
92
+ "useInR": true,
93
+ "rOrderNum": 5
94
+ },
95
+ {
96
+ "permittedValues": ["PARQUET", "TEXTFILE"],
97
+ "defaultValue": "TEXTFILE",
98
+ "isOutputColumn": false,
99
+ "name": "STOREDAS",
100
+ "alternateNames": [],
101
+ "isRequired": false,
102
+ "rDescription": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. This is a required parameter for Parquet data.\nTEXTFILE means the external data uses a text-based format, such as CSV or JSON.\nThe default is TEXTFILE.",
103
+ "description": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. This is a required parameter for Parquet data.\nTEXTFILE means the external data uses a text-based format, such as CSV or JSON.\nThe default is TEXTFILE.",
104
+ "datatype": "STRING",
105
+ "allowsLists": false,
106
+ "rName": "stored.as",
107
+ "useInR": true,
108
+ "rOrderNum": 6
109
+ },
110
+ {
111
+ "permittedValues": [],
112
+ "defaultValue": false,
113
+ "isOutputColumn": false,
114
+ "name": "FULLSCAN",
115
+ "alternateNames": [],
116
+ "isRequired": false,
117
+ "rDescription": "Specifies whether read.nos scans columns of variable length types (CHAR, VARCHAR, BYTE, VARBYTE, JSON, and BSON) to discover the maximum length.\nWhen set to True, the sizes of variable length data is determined from the Parquet data.\nNote: Choosing this value can impact performance because all variable length data type columns in each Parquet file at the location must be scanned to assess the value having the greatest length.\nWhen set to False, variable length field sizes are assigned the Vantage maximum value for the particular data type. The default is False.",
118
+ "description": "Determines whether READ_NOS scans columns of variable length types (CHAR, VARCHAR, BYTE, VARBYTE, JSON, and BSON) to discover the maximum length.\nWhen set to True, the sizes of variable length data is determined from the Parquet data.\nNote: Choosing this value can impact performance because all variable length data type columns in each Parquet file at the location must be scanned to assess the value having the greatest length.\nWhen set to False, variable length field sizes are assigned the Vantage maximum value for the particular data type. The default is False.",
119
+ "datatype": "BOOLEAN",
120
+ "allowsLists": false,
121
+ "rName": "full.scan",
122
+ "useInR": true,
123
+ "rOrderNum": 7
124
+ },
125
+ {
126
+ "permittedValues": [],
127
+ "defaultValue": false,
128
+ "isOutputColumn": false,
129
+ "name": "MANIFEST",
130
+ "alternateNames": [],
131
+ "isRequired": false,
132
+ "rDescription": "Specifies whether the location value points to a manifest file (a file containing a list of files to read) or object name. The object name can include the full path or a partial path. It must identify a single file containing the manifest. Note: The individual entries within the manifest file must show complete paths. Below is an example of a manifest file that contains a list of entries to locations in JSON format\n{\n \"entries\": [\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-10.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-101.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-102.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-103.json\"}\n ]\n}",
133
+ "description": "Specifies whether the LOCATION value points to a manifest file (a file containing a list of files to read) or object name. The object name can include the full path or a partial path. It must identify a single file containing the manifest. Note: The individual entries within the manifest file must show complete paths. Below is an example of a manifest file that contains a list of entries to locations in JSON format\n{\n \"entries\": [\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-10.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-101.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-102.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-103.json\"}\n ]\n}",
134
+ "datatype": "BOOLEAN",
135
+ "allowsLists": false,
136
+ "rName": "manifest",
137
+ "useInR": true,
138
+ "rOrderNum": 8
139
+ },
140
+ {
141
+ "permittedValues": [],
142
+ "isOutputColumn": false,
143
+ "name": "ACCESS_ID",
144
+ "alternateNames": [],
145
+ "isRequired": false,
146
+ "rDescription": "Specifies the identification to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
147
+ "description": "Identification to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
148
+ "datatype": "STRING",
149
+ "allowsLists": false,
150
+ "rName": "access.id",
151
+ "useInR": true,
152
+ "rOrderNum": 9
153
+ },
154
+ {
155
+ "permittedValues": [],
156
+ "isOutputColumn": false,
157
+ "name": "ACCESS_KEY",
158
+ "alternateNames": [],
159
+ "isRequired": false,
160
+ "rDescription": "Specifies the password to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
161
+ "description": "Password to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
162
+ "datatype": "STRING",
163
+ "allowsLists": false,
164
+ "rName": "access.key",
165
+ "useInR": true,
166
+ "rOrderNum": 10
167
+ },
168
+ {
169
+ "permittedValues": [],
170
+ "isOutputColumn": false,
171
+ "name": "ROWFORMAT",
172
+ "alternateNames": [],
173
+ "isRequired": false,
174
+ "rDescription": "Specifies the encoding format of the external row, for example:\nrow.format = '{\"field_delimiter\":\",\", \"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}'.\nSpecify row_format using JSON format. It can include only the three keys shown above. Key names and values are case-specific, except for the value for \"character_set\", which can use any combination of letter cases.\nThe row.format character set specification must be compatible with character set of the Payload column. Do not specify row.format for Parquet format data. For a JSON column, these are the default values:\nUNICODE: row.format = '{\"record_delimiter\":\"\n\", \"character_set\":\"UTF8\"}'\nLATIN: row.format = '{\"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}'\nFor a CSV column, these are the default values:\nUNICODE: row.format = '{\"character_set\":\"UTF8\"}'\nThis is the default if you do not specify an input table for read.nos.\nLATIN: row.format = '{\"character_set\":\"LATIN\"}'\nYou can specify the following options:\nfield_delimiter-> The default is ',' (comma). You can also specify a custom field delimiter, such as tab '\t'.\nrecord_delimiter-> New line feed character: '\n'. A line feed (\n) is the only acceptable record delimiter.\ncharacter_set -> 'UTF8' or 'LATIN'. If you do not specify a row.format or payload column, Vantage assumes UTF8 Unicode.",
175
+ "description": "Specifies the encoding format of the external row, for example:\nROWFORMAT('{\"field_delimiter\":\",\", \"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}').\nSpecify ROWFORMAT using JSON format. It can include only the three keys shown above. Key names and values are case-specific, except for the value for \"character_set\", which can use any combination of letter cases.\nThe ROWFORMAT character set specification must be compatible with character set of the Payload column. Do not specify ROWFORMAT for Parquet format data. For a JSON column, these are the default values:\nUNICODE: ROWFORMAT('{\"record_delimiter\":\"\n\", \"character_set\":\"UTF8\"}')\nLATIN: ROWFORMAT('{\"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}')\nFor a CSV column, these are the default values:\nUNICODE: ROWFORMAT('{\"character_set\":\"UTF8\"}')\nThis is the default if you do not specify an input table for READ_NOS.\nLATIN: ROWFORMAT('{\"character_set\":\"LATIN\"}')\nYou can specify the following options:\nfield_delimiter-> The default is ',' (comma). You can also specify a custom field delimiter, such as tab '\t'.\nrecord_delimiter-> New line feed character: '\n'. A line feed (\n) is the only acceptable record delimiter.\ncharacter_set -> 'UTF8' or 'LATIN'. If you do not specify a ROWFORMAT or payload column, Vantage assumes UTF8 Unicode.",
176
+ "datatype": ["STRING", "JSON"],
177
+ "allowsLists": false,
178
+ "rName": "row.format",
179
+ "useInR": true,
180
+ "rOrderNum": 11
181
+ },
182
+ {
183
+ "permittedValues": [],
184
+ "defaultValue": true,
185
+ "isOutputColumn": false,
186
+ "name": "HEADER",
187
+ "alternateNames": [],
188
+ "isRequired": false,
189
+ "rDescription": "Specifies whether the first row of data in an input CSV file is interpreted as column headings for the subsequent rows of data. Use this parameter only when a CSV input file is not associated with a separate schema object that defines columns for the CSV data. The value for HEADER can be 'TRUE' or 'FALSE'. The default is 'TRUE'.",
190
+ "description": "Specifies whether the first row of data in an input CSV file is interpreted as column headings for the subsequent rows of data. Use this parameter only when a CSV input file is not associated with a separate schema object that defines columns for the CSV data. The value for HEADER can be 'TRUE' or 'FALSE'. The default is 'TRUE'.",
191
+ "datatype": "BOOLEAN",
192
+ "allowsLists": false,
193
+ "rName": "header",
194
+ "useInR": true,
195
+ "rOrderNum": 12
196
+ }
197
+ ]
198
+ }
@@ -0,0 +1,198 @@
1
+ {
2
+ "json_schema_major_version": "1",
3
+ "json_schema_minor_version": "0",
4
+ "json_content_version": "1",
5
+ "function_name": "read_nos",
6
+ "function_version": "1.0",
7
+ "function_type": "table_operator",
8
+ "function_alias_name": "read_nos",
9
+ "function_r_name": "read.nos",
10
+ "short_description": "This function enables access to external files in JSON, CSV, or Parquet format.",
11
+ "long_description": "This function enables access to external files in JSON, CSV, or Parquet format. You must have the EXECUTE FUNCTION privilege on TD_SYSFNLIB.READ_NOS.",
12
+ "input_tables": [
13
+ {
14
+ "requiredInputKind": [
15
+ "PartitionByAny"
16
+ ],
17
+ "isOrdered": false,
18
+ "partitionByOne": false,
19
+ "name": "input",
20
+ "alternateNames": [],
21
+ "isRequired": false,
22
+ "rDescription": "Specifies the teradataml DataFrame containing the input data.",
23
+ "description": "Specifies the table containing the input data.",
24
+ "datatype": "TABLE_ALIAS",
25
+ "allowsLists": false,
26
+ "rName": "data",
27
+ "useInR": true,
28
+ "rOrderNum": 1
29
+ }
30
+ ],
31
+ "argument_clauses": [
32
+ {
33
+ "permittedValues": [],
34
+ "isOutputColumn": false,
35
+ "name": "LOCATION",
36
+ "alternateNames": [],
37
+ "isRequired": false,
38
+ "rDescription": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to the data in the external object storage system. The location value includes the following components:\nAmazon S3: /connector/bucket.endpoint/[key_prefix].\nAzure Blob storage and Azure Data Lake Storage Gen2: /connector/container.endpoint/[key_prefix].\nGoogle Cloud Storage: /connector/endpoint/bucket/[key_prefix].\nconnector: Identifies the type of external storage system where the data is located. Teradata requires the storage location to start with the following for all external storage locations:\nAmazon S3 storage location must begin with /S3 or /s3\nAzure Blob storage location (including Azure Data Lake Storage Gen2 in Blob Interop Mode) must begin with /AZ or /az\nGoogle Cloud Storage location must begin with /GS or /gs.\nendpoint: A URL that identifies the system-specific entry point for the external object storage system.\nbucket (Amazon S3, Google Cloud Storage) or container (Azure Blob storage and Azure Data Lake Storage Gen2): A container that logically groups stored objects in the external storage system.\nkey_prefix: Identifies one or more objects in the logical organization of the bucket data. Because it is a key prefix, not an actual directory path, the key prefix may match one or more objects in the external storage. For example, the key prefix '/fabrics/cotton/colors/b/' would match objects: /fabrics/cotton/colors/blue, /fabrics/cotton/colors/brown, and /fabrics/cotton/colors/black. If there were organization levels below those, such as /fabrics/cotton/colors/blue/shirts, the same key prefix would gather those objects too.\nNote: Vantage validates only the first file it encounters from the location key prefix.\nFor example, this location value might specify all objects on an Amazon cloud storage system for the month of December, 2001:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/\nThis location could specify an individual storage object (or file), Day1.csv:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/Day1.csv'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/Day11.csv\nThis location specifies an entire container in an Azure external object store (Azure Blob storage or Azure Data Lake Storage Gen2). The container may contain multiple file objects:\nlocation = '/AZ/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/nos-csv-data'\nconnector: AZ, bucket: YOUR-STORAGE-ACCOUNT, endpoint: blob.core.windows.net, key_prefix: nos-csv-data\nThis is an example of a Google Cloud Storage location:\nconnector: GS, bucket: YOUR-BUCKET, endpoint: storage.googleapis.com, key_prefix: CSVDATA/RIVERS/rivers.csv",
39
+ "description": "Specifies the location value, which is a Uniform Resource Identifier (URI) pointing to the data in the external object storage system. The location value includes the following components:\nAmazon S3: /connector/bucket.endpoint/[key_prefix].\nAzure Blob storage and Azure Data Lake Storage Gen2: /connector/container.endpoint/[key_prefix].\nGoogle Cloud Storage: /connector/endpoint/bucket/[key_prefix].\nconnector: Identifies the type of external storage system where the data is located. Teradata requires the storage location to start with the following for all external storage locations:\nAmazon S3 storage location must begin with /S3 or /s3\nAzure Blob storage location (including Azure Data Lake Storage Gen2 in Blob Interop Mode) must begin with /AZ or /az\nGoogle Cloud Storage location must begin with /GS or /gs.\nendpoint: A URL that identifies the system-specific entry point for the external object storage system.\nbucket (Amazon S3, Google Cloud Storage) or container (Azure Blob storage and Azure Data Lake Storage Gen2): A container that logically groups stored objects in the external storage system.\nkey_prefix: Identifies one or more objects in the logical organization of the bucket data. Because it is a key prefix, not an actual directory path, the key prefix may match one or more objects in the external storage. For example, the key prefix '/fabrics/cotton/colors/b/' would match objects: /fabrics/cotton/colors/blue, /fabrics/cotton/colors/brown, and /fabrics/cotton/colors/black. If there were organization levels below those, such as /fabrics/cotton/colors/blue/shirts, the same key prefix would gather those objects too.\nNote: Vantage validates only the first file it encounters from the location key prefix.\nFor example, this location value might specify all objects on an Amazon cloud storage system for the month of December, 2001:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/\nThis location could specify an individual storage object (or file), Day1.csv:\nlocation = '/S3/YOUR-BUCKET.s3.amazonaws.com/csv/US-Crimes/csv-files/2001/Dec/Day1.csv'\nconnector: S3, bucket: YOUR-BUCKET, endpoint:s3.amazonaws.com, key_prefix: csv/US-Crimes/csv-files/2001/Dec/Day11.csv\nThis location specifies an entire container in an Azure external object store (Azure Blob storage or Azure Data Lake Storage Gen2). The container may contain multiple file objects:\nlocation = '/AZ/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/nos-csv-data'\nconnector: AZ, bucket: YOUR-STORAGE-ACCOUNT, endpoint: blob.core.windows.net, key_prefix: nos-csv-data\nThis is an example of a Google Cloud Storage location:\nconnector: GS, bucket: YOUR-BUCKET, endpoint: storage.googleapis.com, key_prefix: CSVDATA/RIVERS/rivers.csv",
40
+ "datatype": "STRING",
41
+ "allowsLists": false,
42
+ "rName": "location",
43
+ "useInR": true,
44
+ "rOrderNum": 2
45
+ },
46
+ {
47
+ "permittedValues": [],
48
+ "defaultValue": 16,
49
+ "isOutputColumn": false,
50
+ "name": "BUFFERSIZE",
51
+ "alternateNames": [],
52
+ "isRequired": false,
53
+ "rDescription": "Specifies the size of the network buffer to allocate when retrieving data from the external storage repository. The default value is 16 MB, which is the maximum value",
54
+ "description": "Specifies the size of the network buffer to allocate when retrieving data from the external storage repository. The default value is 16 MB, which is the maximum value",
55
+ "datatype": "INTEGER",
56
+ "allowsLists": false,
57
+ "rName": "buffer.size",
58
+ "useInR": true,
59
+ "rOrderNum": 3
60
+ },
61
+ {
62
+ "permittedValues": ["NOSREAD_RECORD", "NOSREAD_KEYS", "NOSREAD_RAW", "NOSREAD_PARQUET_SCHEMA"],
63
+ "defaultValue": "NOSREAD_RECORD",
64
+ "isOutputColumn": false,
65
+ "name": "RETURNTYPE",
66
+ "alternateNames": [],
67
+ "isRequired": false,
68
+ "rDescription": "Specifies the format in which data is returned.\nNOSREAD_RECORD: Returns one row for each external record along with its metadata. This is the default. Access external records by specifying one of the following:\n* Input table and LOCATION and an empty table. For CSV, you can include a schema definition.\n* Input table with a row for each external file. For CSV, this method does not support a schema definition.\nFor an empty single-column input table, do the following:\n* Define an input table with a single column, Payload, with the appropriate data type: JSON and DATASET with a Storage Format of CSV. This column determines the output Payload column return type.\n* For LOCATION, specify the filepath.\nFor a multiple-column input table, define an input table with the following columns:\n* Location VARCHAR(2048) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* OffsetIntoObject BIGINT\n* ObjectLength BIGINT\n* Payload JSON or VARCHAR for CSV\nThis table can be populated using the output of the NOSREAD_KEYS return type.\nNOSREAD_KEYS: Retrieve the list of files from the path specified in the LOCATION USING clause. A schema definition is not necessary. Returns: Location, ObjectVersionID, ObjectTimeStamp, ObjectLength, size of external file.\nNOSREAD_RAW: Retrieves file data from the external storage services, not specific records. Retrieved data is returned as CLOB/BLOB. You can retrieve a complete file from external storage and save in Teradata CLOB/BLOB format. The maximum amount of data that can be retrieved from the external storage and saved in the Teradata column is 2GB, the Vantage limit for LOBs. The ObjectLength corresponds to the length of CLOB/BLOB column read from the external storage. This information is provided in the form of a table returned to the READ_NOS table operator. The Payload column in the input table is only used to determine the datatype of the column in which the returned data is stored.\nDefine the input table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), Payload CLOB/BLOB.\nREAD_NOS returns a table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), OffsetIntoObject BIGINT, OffsetIntoObject BIGINT, Payload CLOB/BLOB, based on input table CLOB/BLOB Column.\nNOSREAD_PARQUET_SCHEMA: Returns information about the Parquet data schema. For information about the mapping between Parquet data types and Teradata data types, see Parquet External Files in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
69
+ "description": "Specifies the format in which data is returned.\nNOSREAD_RECORD: Returns one row for each external record along with its metadata. This is the default. Access external records by specifying one of the following:\n* Input table and LOCATION and an empty table. For CSV, you can include a schema definition.\n* Input table with a row for each external file. For CSV, this method does not support a schema definition.\nFor an empty single-column input table, do the following:\n* Define an input table with a single column, Payload, with the appropriate data type: JSON and DATASET with a Storage Format of CSV. This column determines the output Payload column return type.\n* For LOCATION, specify the filepath.\nFor a multiple-column input table, define an input table with the following columns:\n* Location VARCHAR(2048) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE\n* OffsetIntoObject BIGINT\n* ObjectLength BIGINT\n* Payload JSON or VARCHAR for CSV\nThis table can be populated using the output of the NOSREAD_KEYS return type.\nNOSREAD_KEYS: Retrieve the list of files from the path specified in the LOCATION USING clause. A schema definition is not necessary. Returns: Location, ObjectVersionID, ObjectTimeStamp, ObjectLength, size of external file.\nNOSREAD_RAW: Retrieves file data from the external storage services, not specific records. Retrieved data is returned as CLOB/BLOB. You can retrieve a complete file from external storage and save in Teradata CLOB/BLOB format. The maximum amount of data that can be retrieved from the external storage and saved in the Teradata column is 2GB, the Vantage limit for LOBs. The ObjectLength corresponds to the length of CLOB/BLOB column read from the external storage. This information is provided in the form of a table returned to the READ_NOS table operator. The Payload column in the input table is only used to determine the datatype of the column in which the returned data is stored.\nDefine the input table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), ObjectTimeStamp TIMESTAMP(6), Payload CLOB/BLOB.\nREAD_NOS returns a table with the following columns: Location VARCHAR(2048) CHARACTER SET UNICODE, ObjectVersionID VARCHAR(1024) CHARACTER SET UNICODE, ObjectTimeStamp TIMESTAMP(6), OffsetIntoObject BIGINT, OffsetIntoObject BIGINT, Payload CLOB/BLOB, based on input table CLOB/BLOB Column.\nNOSREAD_PARQUET_SCHEMA: Returns information about the Parquet data schema. For information about the mapping between Parquet data types and Teradata data types, see Parquet External Files in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
70
+ "datatype": "STRING",
71
+ "allowsLists": false,
72
+ "rName": "return.type",
73
+ "useInR": true,
74
+ "rOrderNum": 4
75
+ },
76
+ {
77
+ "permittedValues": [],
78
+ "defaultValue": 1.0,
79
+ "lowerBound": 0.0,
80
+ "upperBound": 1.0,
81
+ "lowerBoundType": "INCLUSIVE",
82
+ "upperBoundType": "INCLUSIVE",
83
+ "isOutputColumn": false,
84
+ "name": "SAMPLE_PERC",
85
+ "alternateNames": [],
86
+ "isRequired": false,
87
+ "rDescription": "Specifies the percentage of rows to retrieve from the external storage repository when return.type is NOSREAD_RECORD. The valid range of values is from '0.0' to '1.0', where '1.0' represents 100% of the rows. The default value is 1.0.",
88
+ "description": "Specifies the percentage of rows to retrieve from the external storage repository when return.type is NOSREAD_RECORD. The valid range of values is from '0.0' to '1.0', where '1.0' represents 100% of the rows. The default value is 1.0.",
89
+ "datatype": "DOUBLE PRECISION",
90
+ "allowsLists": false,
91
+ "rName": "sample.perc",
92
+ "useInR": true,
93
+ "rOrderNum": 5
94
+ },
95
+ {
96
+ "permittedValues": ["PARQUET", "TEXTFILE"],
97
+ "defaultValue": "TEXTFILE",
98
+ "isOutputColumn": false,
99
+ "name": "STOREDAS",
100
+ "alternateNames": [],
101
+ "isRequired": false,
102
+ "rDescription": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. This is a required parameter for Parquet data.\nTEXTFILE means the external data uses a text-based format, such as CSV or JSON.\nThe default is TEXTFILE.",
103
+ "description": "Specifies the formatting style of the external data.\nPARQUET means the external data is formatted as Parquet. This is a required parameter for Parquet data.\nTEXTFILE means the external data uses a text-based format, such as CSV or JSON.\nThe default is TEXTFILE.",
104
+ "datatype": "STRING",
105
+ "allowsLists": false,
106
+ "rName": "stored.as",
107
+ "useInR": true,
108
+ "rOrderNum": 6
109
+ },
110
+ {
111
+ "permittedValues": [],
112
+ "defaultValue": false,
113
+ "isOutputColumn": false,
114
+ "name": "FULLSCAN",
115
+ "alternateNames": [],
116
+ "isRequired": false,
117
+ "rDescription": "Specifies whether read.nos scans columns of variable length types (CHAR, VARCHAR, BYTE, VARBYTE, JSON, and BSON) to discover the maximum length.\nWhen set to True, the sizes of variable length data is determined from the Parquet data.\nNote: Choosing this value can impact performance because all variable length data type columns in each Parquet file at the location must be scanned to assess the value having the greatest length.\nWhen set to False, variable length field sizes are assigned the Vantage maximum value for the particular data type. The default is False.",
118
+ "description": "Determines whether READ_NOS scans columns of variable length types (CHAR, VARCHAR, BYTE, VARBYTE, JSON, and BSON) to discover the maximum length.\nWhen set to True, the sizes of variable length data is determined from the Parquet data.\nNote: Choosing this value can impact performance because all variable length data type columns in each Parquet file at the location must be scanned to assess the value having the greatest length.\nWhen set to False, variable length field sizes are assigned the Vantage maximum value for the particular data type. The default is False.",
119
+ "datatype": "BOOLEAN",
120
+ "allowsLists": false,
121
+ "rName": "full.scan",
122
+ "useInR": true,
123
+ "rOrderNum": 7
124
+ },
125
+ {
126
+ "permittedValues": [],
127
+ "defaultValue": false,
128
+ "isOutputColumn": false,
129
+ "name": "MANIFEST",
130
+ "alternateNames": [],
131
+ "isRequired": false,
132
+ "rDescription": "Specifies whether the location value points to a manifest file (a file containing a list of files to read) or object name. The object name can include the full path or a partial path. It must identify a single file containing the manifest. Note: The individual entries within the manifest file must show complete paths. Below is an example of a manifest file that contains a list of entries to locations in JSON format\n{\n \"entries\": [\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-10.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-101.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-102.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-103.json\"}\n ]\n}",
133
+ "description": "Specifies whether the LOCATION value points to a manifest file (a file containing a list of files to read) or object name. The object name can include the full path or a partial path. It must identify a single file containing the manifest. Note: The individual entries within the manifest file must show complete paths. Below is an example of a manifest file that contains a list of entries to locations in JSON format\n{\n \"entries\": [\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-10.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-8_9_02-101.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-102.json\"},\n {\"url\":\"s3://nos-core-us-east-1/UNICODE/JSON/mln-key/data-10/data-10-01/data-8_9_02-103.json\"}\n ]\n}",
134
+ "datatype": "BOOLEAN",
135
+ "allowsLists": false,
136
+ "rName": "manifest",
137
+ "useInR": true,
138
+ "rOrderNum": 8
139
+ },
140
+ {
141
+ "permittedValues": [],
142
+ "isOutputColumn": false,
143
+ "name": "ACCESS_ID",
144
+ "alternateNames": [],
145
+ "isRequired": false,
146
+ "rDescription": "Specifies the identification to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
147
+ "description": "Identification to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
148
+ "datatype": "STRING",
149
+ "allowsLists": false,
150
+ "rName": "access.id",
151
+ "useInR": true,
152
+ "rOrderNum": 9
153
+ },
154
+ {
155
+ "permittedValues": [],
156
+ "isOutputColumn": false,
157
+ "name": "ACCESS_KEY",
158
+ "alternateNames": [],
159
+ "isRequired": false,
160
+ "rDescription": "Specifies the password to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
161
+ "description": "Password to access external storage. Specifying this option is not necessary if an authentication is defined, for example, in the EXTERNAL SECURITY clause of a function mapping or if IAM roles are defined for S3 repositories. See CREATE FUNCTION MAPPING in Teradata Vantage™ - SQL Data Definition Language Syntax and Examples, B035-1144.",
162
+ "datatype": "STRING",
163
+ "allowsLists": false,
164
+ "rName": "access.key",
165
+ "useInR": true,
166
+ "rOrderNum": 10
167
+ },
168
+ {
169
+ "permittedValues": [],
170
+ "isOutputColumn": false,
171
+ "name": "ROWFORMAT",
172
+ "alternateNames": [],
173
+ "isRequired": false,
174
+ "rDescription": "Specifies the encoding format of the external row, for example:\nrow.format = '{\"field_delimiter\":\",\", \"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}'.\nSpecify row_format using JSON format. It can include only the three keys shown above. Key names and values are case-specific, except for the value for \"character_set\", which can use any combination of letter cases.\nThe row.format character set specification must be compatible with character set of the Payload column. Do not specify row.format for Parquet format data. For a JSON column, these are the default values:\nUNICODE: row.format = '{\"record_delimiter\":\"\n\", \"character_set\":\"UTF8\"}'\nLATIN: row.format = '{\"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}'\nFor a CSV column, these are the default values:\nUNICODE: row.format = '{\"character_set\":\"UTF8\"}'\nThis is the default if you do not specify an input table for read.nos.\nLATIN: row.format = '{\"character_set\":\"LATIN\"}'\nYou can specify the following options:\nfield_delimiter-> The default is ',' (comma). You can also specify a custom field delimiter, such as tab '\t'.\nrecord_delimiter-> New line feed character: '\n'. A line feed (\n) is the only acceptable record delimiter.\ncharacter_set -> 'UTF8' or 'LATIN'. If you do not specify a row.format or payload column, Vantage assumes UTF8 Unicode.",
175
+ "description": "Specifies the encoding format of the external row, for example:\nROWFORMAT('{\"field_delimiter\":\",\", \"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}').\nSpecify ROWFORMAT using JSON format. It can include only the three keys shown above. Key names and values are case-specific, except for the value for \"character_set\", which can use any combination of letter cases.\nThe ROWFORMAT character set specification must be compatible with character set of the Payload column. Do not specify ROWFORMAT for Parquet format data. For a JSON column, these are the default values:\nUNICODE: ROWFORMAT('{\"record_delimiter\":\"\n\", \"character_set\":\"UTF8\"}')\nLATIN: ROWFORMAT('{\"record_delimiter\":\"\n\", \"character_set\":\"LATIN\"}')\nFor a CSV column, these are the default values:\nUNICODE: ROWFORMAT('{\"character_set\":\"UTF8\"}')\nThis is the default if you do not specify an input table for READ_NOS.\nLATIN: ROWFORMAT('{\"character_set\":\"LATIN\"}')\nYou can specify the following options:\nfield_delimiter-> The default is ',' (comma). You can also specify a custom field delimiter, such as tab '\t'.\nrecord_delimiter-> New line feed character: '\n'. A line feed (\n) is the only acceptable record delimiter.\ncharacter_set -> 'UTF8' or 'LATIN'. If you do not specify a ROWFORMAT or payload column, Vantage assumes UTF8 Unicode.",
176
+ "datatype": ["STRING", "JSON"],
177
+ "allowsLists": false,
178
+ "rName": "row.format",
179
+ "useInR": true,
180
+ "rOrderNum": 11
181
+ },
182
+ {
183
+ "permittedValues": [],
184
+ "defaultValue": true,
185
+ "isOutputColumn": false,
186
+ "name": "HEADER",
187
+ "alternateNames": [],
188
+ "isRequired": false,
189
+ "rDescription": "Specifies whether the first row of data in an input CSV file is interpreted as column headings for the subsequent rows of data. Use this parameter only when a CSV input file is not associated with a separate schema object that defines columns for the CSV data. The value for HEADER can be 'TRUE' or 'FALSE'. The default is 'TRUE'.",
190
+ "description": "Specifies whether the first row of data in an input CSV file is interpreted as column headings for the subsequent rows of data. Use this parameter only when a CSV input file is not associated with a separate schema object that defines columns for the CSV data. The value for HEADER can be 'TRUE' or 'FALSE'. The default is 'TRUE'.",
191
+ "datatype": "BOOLEAN",
192
+ "allowsLists": false,
193
+ "rName": "header",
194
+ "useInR": true,
195
+ "rOrderNum": 12
196
+ }
197
+ ]
198
+ }