teradataml 20.0.0.1__py3-none-any.whl → 20.0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (240) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +306 -0
  4. teradataml/__init__.py +10 -3
  5. teradataml/_version.py +1 -1
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +299 -16
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +13 -3
  11. teradataml/analytics/json_parser/utils.py +13 -6
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +11 -2
  15. teradataml/analytics/table_operator/__init__.py +4 -3
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +66 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1502 -323
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +247 -307
  22. teradataml/automl/data_transformation.py +32 -12
  23. teradataml/automl/feature_engineering.py +325 -86
  24. teradataml/automl/model_evaluation.py +44 -35
  25. teradataml/automl/model_training.py +122 -153
  26. teradataml/catalog/byom.py +8 -8
  27. teradataml/clients/pkce_client.py +1 -1
  28. teradataml/common/__init__.py +2 -1
  29. teradataml/common/constants.py +72 -0
  30. teradataml/common/deprecations.py +13 -7
  31. teradataml/common/garbagecollector.py +152 -120
  32. teradataml/common/messagecodes.py +11 -2
  33. teradataml/common/messages.py +4 -1
  34. teradataml/common/sqlbundle.py +26 -4
  35. teradataml/common/utils.py +225 -14
  36. teradataml/common/wrapper_utils.py +1 -1
  37. teradataml/context/context.py +82 -2
  38. teradataml/data/SQL_Fundamentals.pdf +0 -0
  39. teradataml/data/complaints_test_tokenized.csv +353 -0
  40. teradataml/data/complaints_tokens_model.csv +348 -0
  41. teradataml/data/covid_confirm_sd.csv +83 -0
  42. teradataml/data/dataframe_example.json +27 -1
  43. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  44. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  45. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  46. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  47. teradataml/data/docs/sqle/docs_17_20/Shap.py +203 -0
  48. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  49. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  50. teradataml/data/docs/sqle/docs_17_20/TextParser.py +3 -3
  51. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  52. teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
  53. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  54. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  55. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  56. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  57. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  58. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  59. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  60. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  61. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  62. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  63. teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
  64. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  65. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  66. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  67. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  68. teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +18 -21
  69. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  70. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  71. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  72. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  73. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  74. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  75. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  76. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  77. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  78. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  79. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  80. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  81. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  82. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  83. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  84. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  85. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  86. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  87. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  88. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  89. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  90. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  91. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  92. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  93. teradataml/data/dwt2d_dataTable.csv +65 -0
  94. teradataml/data/dwt_dataTable.csv +8 -0
  95. teradataml/data/dwt_filterTable.csv +3 -0
  96. teradataml/data/finance_data4.csv +13 -0
  97. teradataml/data/grocery_transaction.csv +19 -0
  98. teradataml/data/idwt2d_dataTable.csv +5 -0
  99. teradataml/data/idwt_dataTable.csv +8 -0
  100. teradataml/data/idwt_filterTable.csv +3 -0
  101. teradataml/data/interval_data.csv +5 -0
  102. teradataml/data/jsons/paired_functions.json +14 -0
  103. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  104. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  105. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  106. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  107. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  108. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  109. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  110. teradataml/data/jsons/sqle/17.20/TD_TextParser.json +1 -1
  111. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  112. teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
  113. teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
  114. teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
  115. teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
  116. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  117. teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
  118. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  119. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  120. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  121. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  122. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  123. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  124. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  125. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  126. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  127. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  128. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  129. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  130. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  131. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  132. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  133. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  134. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  135. teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +10 -19
  136. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  137. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  138. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  139. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  140. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  141. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  142. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  143. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  144. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  145. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  146. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  147. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  148. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  149. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  150. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  151. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  152. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  153. teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
  154. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  155. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  156. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  157. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  158. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  159. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
  160. teradataml/data/load_example_data.py +8 -2
  161. teradataml/data/medical_readings.csv +101 -0
  162. teradataml/data/naivebayestextclassifier_example.json +1 -1
  163. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  164. teradataml/data/patient_profile.csv +101 -0
  165. teradataml/data/peppers.png +0 -0
  166. teradataml/data/real_values.csv +14 -0
  167. teradataml/data/sax_example.json +8 -0
  168. teradataml/data/scripts/deploy_script.py +1 -1
  169. teradataml/data/scripts/lightgbm/dataset.template +157 -0
  170. teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +247 -0
  171. teradataml/data/scripts/lightgbm/lightgbm_function.template +216 -0
  172. teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +159 -0
  173. teradataml/data/scripts/sklearn/sklearn_fit.py +194 -160
  174. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +136 -115
  175. teradataml/data/scripts/sklearn/sklearn_function.template +34 -16
  176. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +155 -137
  177. teradataml/data/scripts/sklearn/sklearn_neighbors.py +1 -1
  178. teradataml/data/scripts/sklearn/sklearn_score.py +12 -3
  179. teradataml/data/scripts/sklearn/sklearn_transform.py +162 -24
  180. teradataml/data/star_pivot.csv +8 -0
  181. teradataml/data/target_udt_data.csv +8 -0
  182. teradataml/data/templates/open_source_ml.json +3 -1
  183. teradataml/data/teradataml_example.json +20 -1
  184. teradataml/data/timestamp_data.csv +4 -0
  185. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  186. teradataml/data/uaf_example.json +55 -1
  187. teradataml/data/unpivot_example.json +15 -0
  188. teradataml/data/url_data.csv +9 -0
  189. teradataml/data/vectordistance_example.json +4 -0
  190. teradataml/data/windowdfft.csv +16 -0
  191. teradataml/dataframe/copy_to.py +1 -1
  192. teradataml/dataframe/data_transfer.py +5 -3
  193. teradataml/dataframe/dataframe.py +1002 -201
  194. teradataml/dataframe/fastload.py +3 -3
  195. teradataml/dataframe/functions.py +867 -0
  196. teradataml/dataframe/row.py +160 -0
  197. teradataml/dataframe/setop.py +2 -2
  198. teradataml/dataframe/sql.py +840 -33
  199. teradataml/dataframe/window.py +1 -1
  200. teradataml/dbutils/dbutils.py +878 -34
  201. teradataml/dbutils/filemgr.py +48 -1
  202. teradataml/geospatial/geodataframe.py +1 -1
  203. teradataml/geospatial/geodataframecolumn.py +1 -1
  204. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  205. teradataml/lib/aed_0_1.dll +0 -0
  206. teradataml/opensource/__init__.py +1 -1
  207. teradataml/opensource/{sklearn/_class.py → _class.py} +102 -17
  208. teradataml/opensource/_lightgbm.py +950 -0
  209. teradataml/opensource/{sklearn/_wrapper_utils.py → _wrapper_utils.py} +1 -2
  210. teradataml/opensource/{sklearn/constants.py → constants.py} +13 -10
  211. teradataml/opensource/sklearn/__init__.py +0 -1
  212. teradataml/opensource/sklearn/_sklearn_wrapper.py +1019 -574
  213. teradataml/options/__init__.py +9 -23
  214. teradataml/options/configure.py +42 -4
  215. teradataml/options/display.py +2 -2
  216. teradataml/plot/axis.py +4 -4
  217. teradataml/scriptmgmt/UserEnv.py +13 -9
  218. teradataml/scriptmgmt/lls_utils.py +77 -23
  219. teradataml/store/__init__.py +13 -0
  220. teradataml/store/feature_store/__init__.py +0 -0
  221. teradataml/store/feature_store/constants.py +291 -0
  222. teradataml/store/feature_store/feature_store.py +2223 -0
  223. teradataml/store/feature_store/models.py +1505 -0
  224. teradataml/store/vector_store/__init__.py +1586 -0
  225. teradataml/table_operators/Script.py +2 -2
  226. teradataml/table_operators/TableOperator.py +106 -20
  227. teradataml/table_operators/query_generator.py +3 -0
  228. teradataml/table_operators/table_operator_query_generator.py +3 -1
  229. teradataml/table_operators/table_operator_util.py +102 -56
  230. teradataml/table_operators/templates/dataframe_register.template +69 -0
  231. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  232. teradataml/telemetry_utils/__init__.py +0 -0
  233. teradataml/telemetry_utils/queryband.py +52 -0
  234. teradataml/utils/dtypes.py +4 -2
  235. teradataml/utils/validators.py +34 -2
  236. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/METADATA +311 -3
  237. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/RECORD +240 -157
  238. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/WHEEL +0 -0
  239. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/top_level.txt +0 -0
  240. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/zip-safe +0 -0
@@ -14,11 +14,11 @@ File implements classes for following:
14
14
  * _TableOperatorExecutor
15
15
  * _BYOMFunctionExecutor
16
16
  """
17
-
17
+ from collections import OrderedDict
18
18
  from teradataml.options.configure import configure
19
19
  from teradataml.common.constants import TeradataConstants, TeradataAnalyticFunctionTypes
20
20
  from teradataml.analytics.json_parser import PartitionKind
21
- from teradataml.analytics.analytic_query_generator import AnalyticQueryGenerator, UAFQueryGenerator
21
+ from teradataml.analytics.analytic_query_generator import AnalyticQueryGenerator, UAFQueryGenerator, StoredProcedureQueryGenerator
22
22
  from teradataml.analytics.json_parser.json_store import _JsonStore
23
23
  from teradataml.analytics.utils import FuncSpecialCaseHandler
24
24
  from teradataml.options.display import display
@@ -28,8 +28,11 @@ from teradataml.common.messages import Messages, MessageCodes
28
28
  from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
29
29
  from teradataml.common.utils import UtilFuncs
30
30
  from teradataml.context.context import _get_context_temp_databasename
31
+ from teradataml.dataframe.dataframe import in_schema, DataFrame
32
+ from teradataml.dbutils.dbutils import _create_table, db_drop_table, list_td_reserved_keywords
33
+ from teradatasqlalchemy.types import *
31
34
  from teradataml.table_operators.table_operator_query_generator import TableOperatorQueryGenerator
32
- from teradatasqlalchemy.telemetry.queryband import collect_queryband
35
+ from teradataml.telemetry_utils.queryband import collect_queryband
33
36
  from teradataml.utils.dtypes import _ListOf
34
37
  from teradataml.utils.validators import _Validators
35
38
 
@@ -155,7 +158,7 @@ class _AnlyticFunctionExecutor:
155
158
  _Validators._validate_function_arguments([argument_info])
156
159
 
157
160
  @collect_queryband(attr="func_name")
158
- def _execute_query(self, persist=False, volatile=False):
161
+ def _execute_query(self, persist=False, volatile=False, display_table_name=True):
159
162
  """
160
163
  DESCRIPTION:
161
164
  Function to execute query on Vantage.
@@ -172,6 +175,13 @@ class _AnlyticFunctionExecutor:
172
175
  Specifies whether to create a volatile table or not.
173
176
  Default Value: False
174
177
  Type: bool
178
+
179
+ display_table_name:
180
+ Optional Argument.
181
+ Specifies whether to display the table names or not when
182
+ persist is set to True.
183
+ Default Value: True
184
+ Type: bool
175
185
 
176
186
  RETURNS:
177
187
  None
@@ -206,7 +216,8 @@ class _AnlyticFunctionExecutor:
206
216
  else:
207
217
  self._function_output_table_map["result"] = sqlmr_stdout_temp_tablename
208
218
 
209
- if persist:
219
+ # Print the table/view names if display_table_name is set to True.
220
+ if persist and display_table_name:
210
221
  # SQL is executed. So, print the table/view names.
211
222
  for output_attribute, table_name in self._function_output_table_map.items():
212
223
  print("{} data stored in table '{}'".format(output_attribute, table_name))
@@ -332,6 +343,17 @@ class _AnlyticFunctionExecutor:
332
343
  self._func_output_args.append(temp_table_name)
333
344
  self._function_output_table_map[lang_name] = temp_table_name
334
345
 
346
+ def _get_column_name_from_feature(self, obj):
347
+ # Extract the associated column name from Feature.
348
+ from teradataml.store.feature_store.feature_store import Feature
349
+ if isinstance(obj, Feature):
350
+ return obj.column_name
351
+
352
+ if isinstance(obj, list):
353
+ return [self._get_column_name_from_feature(col) for col in obj]
354
+
355
+ return obj
356
+
335
357
  def _process_other_argument(self, **kwargs):
336
358
  """
337
359
  DESCRIPTION:
@@ -428,6 +450,9 @@ class _AnlyticFunctionExecutor:
428
450
 
429
451
  self._validate_analytic_function_argument(arg_name, arg_value, argument)
430
452
 
453
+ # Extract column names if it is a Feature.
454
+ arg_value = self._get_column_name_from_feature(arg_value)
455
+
431
456
  # Perform the checks which are specific to argument(_AnlyFuncArgument) type.
432
457
  # Check lower bound and upper bound for number type of arguments.
433
458
  if isinstance(arg_value, (int, float)):
@@ -463,6 +488,12 @@ class _AnlyticFunctionExecutor:
463
488
  # does not require special case handler.
464
489
  arg_value = self._spl_func_obj._add_square_bracket(arg_value)
465
490
 
491
+ # Handling special case for Teradata reserved keywords or column names with spaces.
492
+ # If argument is a string or list of strings, then add quotes to the string.
493
+ if arg_name not in ["partition_columns"] and (\
494
+ UtilFuncs._contains_space(arg_value) or list_td_reserved_keywords(arg_value)):
495
+ arg_value = UtilFuncs._teradata_quote_arg(arg_value, "\"", False)
496
+
466
497
  # SequenceInputBy arguments require special processing.
467
498
  if 500 <= argument.get_r_order_number() <= 510:
468
499
  quoted_value = UtilFuncs._teradata_collapse_arglist(arg_value, "")
@@ -524,6 +555,17 @@ class _AnlyticFunctionExecutor:
524
555
  return repr_string
525
556
  self._dyn_cls_data_members["__repr__"] = print_result
526
557
 
558
+ def copy(self, **args):
559
+ """ Function to copy the ART to another table."""
560
+ from teradataml import CopyArt
561
+ params = {
562
+ "data": self.result,
563
+ "database_name": args.get("database_name", None),
564
+ "table_name": args.get("table_name", None),
565
+ "map_name": args.get("map_name", None),
566
+ "persist": args.get("persist", False)}
567
+ return CopyArt(**params)
568
+
527
569
  query = self.sqlmr_query
528
570
  build_time = None if self.__build_time is None else round(self.__build_time, 2)
529
571
 
@@ -533,6 +575,7 @@ class _AnlyticFunctionExecutor:
533
575
  # To list attributes using dict()
534
576
  self._dyn_cls_data_members["__dict__"] = self._dyn_cls_data_members
535
577
  self._dyn_cls_data_members["_mlresults"] = self._mlresults
578
+ self._dyn_cls_data_members["copy"] = copy
536
579
 
537
580
  # Dynamic class creation with In-DB function name.
538
581
  indb_class = type(self.func_name, (object,), self._dyn_cls_data_members)
@@ -678,6 +721,7 @@ class _AnlyticFunctionExecutor:
678
721
  start_time = time.time()
679
722
  persist = kwargs.get("persist", False)
680
723
  volatile = kwargs.get("volatile", False)
724
+ display_table_name = kwargs.get("display_table_name", True)
681
725
 
682
726
  # Validate local_order_column argument type and values.
683
727
  arg_info_matrix = [["persist", persist, True, bool], ["volatile", volatile, True, bool]]
@@ -688,6 +732,14 @@ class _AnlyticFunctionExecutor:
688
732
  raise TeradataMlException(
689
733
  Messages.get_message(MessageCodes.CANNOT_USE_TOGETHER_WITH, "persist", "volatile"),
690
734
  MessageCodes.CANNOT_USE_TOGETHER_WITH)
735
+
736
+ # If function is VectorDistance and largereference_input is set to True,
737
+ # then set data_partition_column to PartitionKind.DIMENSION and
738
+ # reference_data_partition_column to PartitionKind.ANY .
739
+ if self.func_name == "VectorDistance" and \
740
+ kwargs.get("largereference_input", False):
741
+ kwargs['target_data_partition_column'] = PartitionKind.DIMENSION
742
+ kwargs['reference_data_partition_column'] = PartitionKind.ANY
691
743
 
692
744
  self._dyn_cls_data_members.update(kwargs)
693
745
 
@@ -709,6 +761,11 @@ class _AnlyticFunctionExecutor:
709
761
  if self.func_name in ['GLM', 'TDGLMPredict'] and \
710
762
  any(key in kwargs for key in ['data_partition_column', 'data_hash_column', 'local_order_data']):
711
763
  skip_output_arg_processing = True
764
+ elif self.func_name in ['CopyArt']:
765
+ # CopyArt function take care of persisting the result table internally
766
+ # through 'permanent_table' argument.
767
+ persist = False
768
+ volatile = False
712
769
 
713
770
  if not skip_output_arg_processing:
714
771
  self._process_output_argument(**kwargs)
@@ -716,13 +773,27 @@ class _AnlyticFunctionExecutor:
716
773
  if not skip_other_arg_processing:
717
774
  self._process_other_argument(**kwargs)
718
775
 
719
- self._generate_query(volatile=volatile)
776
+ # When Analytic function is executed it stores the result in _function_output_table_map['result'].
777
+ # If we want to skip the query execution of the function then we need to pass result table in '_result_data'.
778
+
779
+ # Execute the query only if the '_result_data' is not passed as an argument in kwargs.
780
+ # Otherwise, store the result table in _function_output_table_map.
781
+ if kwargs.get("_result_data", None) is None:
782
+ self._generate_query(volatile=volatile)
720
783
 
721
- # Print SQL-MR query if requested to do so.
722
- if display.print_sqlmr_query:
723
- print(self.sqlmr_query)
784
+ # Print SQL-MR query if requested to do so.
785
+ if display.print_sqlmr_query:
786
+ print(self.sqlmr_query)
724
787
 
725
- self._execute_query(persist, volatile)
788
+ self._execute_query(persist, volatile, display_table_name)
789
+ else:
790
+ # This is useful when we already have the result table and
791
+ # need to pass function result as an object to another function
792
+ # without executing the function again.
793
+
794
+ # Store the result table in map.
795
+ self._function_output_table_map["result"] = kwargs.pop("_result_data")
796
+ self._dyn_cls_data_members['result'] = self._dyn_cls_data_members.pop('_result_data')
726
797
 
727
798
  if not skip_func_output_processing:
728
799
  self._process_function_output(**kwargs)
@@ -1489,7 +1560,7 @@ class _TableOperatorExecutor(_SQLEFunctionExecutor):
1489
1560
 
1490
1561
  class _UAFFunctionExecutor(_SQLEFunctionExecutor):
1491
1562
  """ Class to hold the attributes and provide methods to enable execution for UAF Functions. """
1492
- def __init__(self, func_name):
1563
+ def __init__(self, func_name, func_type = TeradataAnalyticFunctionTypes.UAF.value):
1493
1564
  """
1494
1565
  DESCRIPTION:
1495
1566
  Constructor for the class.
@@ -1506,7 +1577,7 @@ class _UAFFunctionExecutor(_SQLEFunctionExecutor):
1506
1577
  EXAMPLES:
1507
1578
  _UAFFunctionExecutor("ArimaEstimate")
1508
1579
  """
1509
- super().__init__(func_name, TeradataAnalyticFunctionTypes.UAF.value)
1580
+ super().__init__(func_name, func_type)
1510
1581
  self._func_other_args = {}
1511
1582
  self._func_input_fmt_arguments = {}
1512
1583
  self._func_output_fmt_arguments = {}
@@ -1534,6 +1605,7 @@ class _UAFFunctionExecutor(_SQLEFunctionExecutor):
1534
1605
  EXAMPLES:
1535
1606
  self._generate_query()
1536
1607
  """
1608
+
1537
1609
  query_generator = UAFQueryGenerator(function_name=self._metadata.sql_function_name,
1538
1610
  func_input_args=self._func_input_args,
1539
1611
  func_input_filter_expr_args=self._func_input_filter_expr_args,
@@ -1936,7 +2008,7 @@ class _UAFFunctionExecutor(_SQLEFunctionExecutor):
1936
2008
  **kwargs))
1937
2009
 
1938
2010
  @collect_queryband(attr="func_name")
1939
- def _execute_query(self, persist=False, volatile=None):
2011
+ def _execute_query(self, persist=False, volatile=None, display_table_name=True):
1940
2012
  """
1941
2013
  DESCRIPTION:
1942
2014
  Function to execute query on Vantage.
@@ -1947,6 +2019,13 @@ class _UAFFunctionExecutor(_SQLEFunctionExecutor):
1947
2019
  Specifies whether to persist a table or not.
1948
2020
  Default Value: False
1949
2021
  Type: bool
2022
+
2023
+ display_table_name:
2024
+ Optional Argument.
2025
+ Specifies whether to display the table names or not when
2026
+ persist is set to True.
2027
+ Default Value: True
2028
+ Type: bool
1950
2029
 
1951
2030
  RETURNS:
1952
2031
  None
@@ -1960,9 +2039,10 @@ class _UAFFunctionExecutor(_SQLEFunctionExecutor):
1960
2039
  try:
1961
2040
  # Execute already generated query.
1962
2041
  UtilFuncs._execute_query(query=self.sqlmr_query)
1963
-
1964
- if persist:
1965
- # SQL is already executed. So, print the table names.
2042
+
2043
+ # Print the table/view names if display_table_name is set to True.
2044
+ if persist and display_table_name:
2045
+ # SQL is executed. So, print the table/view names.
1966
2046
  for output_attribute, table_name in self._function_output_table_map.items():
1967
2047
  print("{} data stored in table '{}'".format(output_attribute, table_name))
1968
2048
 
@@ -2038,3 +2118,206 @@ class _BYOMFunctionExecutor(_SQLEFunctionExecutor):
2038
2118
 
2039
2119
  # Invoke call to SQL-MR generation.
2040
2120
  self.sqlmr_query = self.__aqg_obj._gen_sqlmr_select_stmt_sql()
2121
+
2122
+ class _StoredProcedureExecutor(_UAFFunctionExecutor):
2123
+ """
2124
+ Class to hold the attributes and provide methods to enable execution for Stored Procedures.
2125
+ As the stored procedure JSONs are written like UAF Functions we will use
2126
+ _UAFFunctionExecutor as the base class.
2127
+ """
2128
+ def __init__(self, func_name):
2129
+ """
2130
+ DESCRIPTION:
2131
+ Constructor for the class.
2132
+
2133
+ PARAMETERS:
2134
+ func_name:
2135
+ Required Argument.
2136
+ Specifies the name of the analytic function, which is exposed to the user.
2137
+ Types: str
2138
+
2139
+ RAISES:
2140
+ None
2141
+
2142
+ EXAMPLES:
2143
+ _StoredProcedureExecutor("FilterFactory1d")
2144
+ """
2145
+ super().__init__(func_name, TeradataAnalyticFunctionTypes.STORED_PROCEDURE.value)
2146
+ self._func_other_args = OrderedDict()
2147
+
2148
+ def _generate_query(self, volatile=False):
2149
+ """
2150
+ DESCRIPTION:
2151
+ Function to generate the SQL query for Stored Procedures.
2152
+
2153
+ PARAMETERS:
2154
+ volatile:
2155
+ Optional Argument.
2156
+ Specifies whether to create a volatile table or not.
2157
+ Default Value: False
2158
+ Type: bool
2159
+
2160
+ RETURNS:
2161
+ None.
2162
+
2163
+ RAISES:
2164
+ None.
2165
+
2166
+ EXAMPLES:
2167
+ self._generate_query()
2168
+ """
2169
+ # update the db_name.
2170
+ db_name = None
2171
+ if configure.stored_procedure_install_location is not None:
2172
+ db_name = configure.stored_procedure_install_location
2173
+
2174
+ self.__aqg_obj = StoredProcedureQueryGenerator(function_name=self._metadata.sql_function_name,
2175
+ func_other_args_values=self._func_other_args,
2176
+ db_name=db_name)
2177
+
2178
+ # Invoke call to SQL-MR generation.
2179
+ self.sqlmr_query = self.__aqg_obj._gen_call_stmt()
2180
+
2181
+ def _process_other_argument(self, **kwargs):
2182
+ """
2183
+ DESCRIPTION:
2184
+ Internal function to process the arguments.
2185
+ 1. The function does the following:
2186
+ * Checks the required arguments are passed or not.
2187
+ * Checks the type of the arguments are expected or not.
2188
+ * Checks for permitted values.
2189
+ * Checks for empty string.
2190
+ * If validations run fine,
2191
+ then returns a dict with the SQL name of the argument as key
2192
+ and user provided value as the value {arg_sql_name : value}
2193
+
2194
+ PARAMETERS:
2195
+
2196
+ kwargs:
2197
+ Specifies the keyword arguments passed to a function.
2198
+
2199
+ RETURNS:
2200
+ None.
2201
+
2202
+ RAISES:
2203
+ ValueError OR TypeError OR TeradataMlException.
2204
+
2205
+ EXAMPLES:
2206
+ self._process_other_arguments(argument, arg1="string", arg2="db", arg3=2)
2207
+
2208
+ """
2209
+ ## As the function 'FilterFactory1d' requries the output table to be created before the stored procedure call,
2210
+ ## creating it and adding them as parameters as stored procedure requires them
2211
+
2212
+ if self.func_name == "FilterFactory1d":
2213
+ columns_to_create = {"ID": INTEGER,
2214
+ "row_i": INTEGER,
2215
+ "FilterMag": FLOAT,
2216
+ "description": VARCHAR}
2217
+
2218
+ schema_name = UtilFuncs._extract_db_name(self._func_output_args)
2219
+ table_name = UtilFuncs._extract_table_name(self._func_output_args)
2220
+
2221
+ _create_table(table_name=table_name,
2222
+ columns=columns_to_create,
2223
+ schema_name=schema_name,
2224
+ primary_index=["ID", "row_i"])
2225
+ self._func_other_args['database_name'] = UtilFuncs._teradata_quote_arg(schema_name, "\'", False)
2226
+ self._func_other_args['table_name'] = UtilFuncs._teradata_quote_arg(table_name, "\'", False)
2227
+
2228
+ # 'CopyArt' function requires 'SRC_DATABASENMAE' and 'SRC_TABLENAME' as input arguments.
2229
+ # Extract the database and table name from the 'data' argument and add them to the
2230
+ # '_func_other_args' dictionary.
2231
+ if self.func_name == "CopyArt":
2232
+ data = kwargs.get('data', None)
2233
+ argument_info = ["data", data, False, (DataFrame), True]
2234
+ # 'data' is a required argument for 'CopyArt' function to get the source table name and database name.
2235
+ _Validators._validate_missing_required_arguments([argument_info])
2236
+ # 'data' should be a DataFrame.
2237
+ _Validators._validate_function_arguments([argument_info])
2238
+
2239
+ # Add the 'SRC_DATABASENMAE' and 'SRC_TABLENAME' to the '_func_other_args' dictionary.
2240
+ self._func_other_args["SRC_DATABASENMAE"] = "'{0}'".format(UtilFuncs._extract_db_name(data._table_name))
2241
+ self._func_other_args["SRC_TABLENAME"] = "'{0}'".format(UtilFuncs._extract_table_name(data._table_name))
2242
+
2243
+ # Setting permanent_table to True if 'persist' is set to True, else False.
2244
+ kwargs['permanent_table'] = 'True' if kwargs.get('persist', False) else 'False'
2245
+
2246
+ # Setting 'map_name' to empty string if not provided.
2247
+ if kwargs.get('map_name', None) is None:
2248
+ kwargs['map_name'] = ""
2249
+
2250
+ # CopyArt does not take 'data' as input argument.
2251
+ kwargs.pop('data')
2252
+
2253
+ for argument in self._metadata.arguments:
2254
+ sql_name = argument.get_name()
2255
+ lang_name = argument.get_lang_name()
2256
+ arg_value = kwargs.get(lang_name)
2257
+ # Set the "argument".
2258
+ self._spl_func_obj.set_arg_name(argument)
2259
+ # Let's get spl handler if function requires.
2260
+ special_case_handler = self._spl_func_obj._get_handle()
2261
+
2262
+ self._validate_analytic_function_argument(lang_name, arg_value, argument)
2263
+ # As stored procedures require the argument to passed in positional order and
2264
+ # NULL is required for arguments which are not present
2265
+ if arg_value is None:
2266
+ self._func_other_args[sql_name] = 'NULL'
2267
+
2268
+ # If argument is not None add the sql_name and arg_value to the dict.
2269
+ else:
2270
+ # If get_match_length_of_arguments is True, check if the arg_value is
2271
+ # a list and of the required size.
2272
+ if argument.get_match_length_of_arguments():
2273
+ required_length = argument.get_required_length()
2274
+ if (isinstance(arg_value, list) and len(arg_value) != required_length) or\
2275
+ (not isinstance(arg_value, list)):
2276
+ raise TeradataMlException(Messages.get_message(
2277
+ MessageCodes.INVALID_LIST_LENGTH).format(lang_name,
2278
+ required_length),
2279
+ MessageCodes.INVALID_LIST_LENGTH)
2280
+
2281
+ # Perform the checks which are specific to argument(_AnlyFuncArgument) type.
2282
+ # Check lower bound and upper bound for numeric arguments.
2283
+ if isinstance(arg_value, (int, float)):
2284
+ lower_bound_inclusive = argument.get_lower_bound_type() == "INCLUSIVE"
2285
+ upper_bound_inclusive = argument.get_upper_bound_type() == "INCLUSIVE"
2286
+ _Validators._validate_argument_range(arg_value,
2287
+ lang_name,
2288
+ lbound=argument.get_lower_bound(),
2289
+ ubound=argument.get_upper_bound(),
2290
+ lbound_inclusive=lower_bound_inclusive,
2291
+ ubound_inclusive=upper_bound_inclusive)
2292
+
2293
+ # If the argument is a bool type, convert it to integer since SQL do
2294
+ # not know boolean processing.
2295
+ if bool in argument.get_python_type() and isinstance(arg_value, bool):
2296
+ arg_value = int(arg_value)
2297
+
2298
+ # Handle special cases for "arg_values" based on handling method.
2299
+ arg_value = special_case_handler(arg_value) if special_case_handler is not None else arg_value
2300
+ self._func_other_args[sql_name] = arg_value
2301
+
2302
+
2303
+ def _process_function_output(self, **kwargs):
2304
+ """
2305
+ DESCRIPTION:
2306
+ Internal function to process the function output.
2307
+ """
2308
+ for lang_name, table_name in self._function_output_table_map.items():
2309
+ # For 'CopyArt' function, the result should be the destination table name and database name provided as input.
2310
+ if self.func_name == "CopyArt":
2311
+ out_table_name = kwargs.get('table_name')
2312
+ out_db_name = kwargs.get('database_name')
2313
+ else:
2314
+ out_table_name = UtilFuncs._extract_table_name(table_name)
2315
+ out_db_name = UtilFuncs._extract_db_name(table_name)
2316
+ df = self._awu._create_data_set_object(
2317
+ df_input=out_table_name, database_name=out_db_name, source_type="table")
2318
+ self._dyn_cls_data_members[lang_name] = df
2319
+ # Condition make sure that the first element always be result or output in _mlresults.
2320
+ if lang_name in ["output", "result"]:
2321
+ self._mlresults.insert(0, df)
2322
+ else:
2323
+ self._mlresults.append(df)
@@ -956,3 +956,95 @@ class UAFQueryGenerator:
956
956
  sql,
957
957
  on_preserve_clause)
958
958
  return sql
959
+
960
+
961
+ class StoredProcedureQueryGenerator:
962
+ """
963
+ This class creates a SQL-MR object, which can be used to generate
964
+ Stored Procedure Query Generator in FFE syntax for Teradata.
965
+ """
966
+
967
+ def __init__(self, function_name,
968
+ func_other_args_values,
969
+ db_name="SYSLIB"):
970
+ """
971
+ StoredProcedureQueryGenerator constructor, to create query for Stored Procedures.
972
+
973
+ PARAMETERS:
974
+ function_name:
975
+ Required Argument.
976
+ Specifies the name of the function.
977
+
978
+ func_other_args_values:
979
+ Required Argument.
980
+ Specifies a dict in the format: {'sql_name':'value'}.
981
+
982
+ db_name:
983
+ Optional Argument.
984
+ Specifies the install location of Stored Procedures.
985
+ Default Value: SYSLIB
986
+
987
+ RETURNS:
988
+ StoredProcedureQueryGenerator object.
989
+
990
+ EXAMPLES:
991
+ aqg_obj = StoredProcedureQueryGenerator(function_name, other_sql_args, db_name="mldb")
992
+ """
993
+ self.__function_name = function_name
994
+
995
+ # If the db_name is provided, append it to the stored
996
+ # procedure function name.
997
+ self.__db_name = db_name
998
+ if self.__db_name:
999
+ self.__function_name = "\"{}\".{}".format(self.__db_name,
1000
+ self.__function_name)
1001
+
1002
+ self.__func_other_args_values = func_other_args_values
1003
+ self.__CALL_STMT_FMT = "Call {}({})"
1004
+ self.__QUERY_SIZE = self.__get_string_size(self.__CALL_STMT_FMT) + 20
1005
+
1006
+
1007
+ def __generate_sqlmr_func_other_arg_sql(self):
1008
+ """
1009
+ Private function to generate a SQL clause for other function arguments.
1010
+ For Example, two paramater values of {a:False, b:"BINOMIAL"} are
1011
+ appened like: False, "BINOMIAL", in the same order.
1012
+
1013
+ RETURNS:
1014
+ SQL string for other function arguments, as shown in example here.
1015
+
1016
+ EXAMPLES:
1017
+ __func_other_args_values = {"a":False, "b":"BINOMIAL"}
1018
+ other_arg_sql = self.__generate_sqlmr_func_other_arg_sql()
1019
+ # Output is as shown in example in description.
1020
+
1021
+ """
1022
+ args_sql_str = ','.join(map(str, self.__func_other_args_values.values()))
1023
+ self.__QUERY_SIZE = self.__QUERY_SIZE + self.__get_string_size(args_sql_str)
1024
+ return args_sql_str
1025
+
1026
+ def _gen_call_stmt(self):
1027
+ """
1028
+ Protected function to generate complete query.
1029
+ For Example,
1030
+ CALL SYSLIB.TD_FILTERFACTORY1D ('test', 'filters', 33, 'lowpass', 'blackman', NULL, 20.0, 40.0, NULL, 200, NULL);
1031
+
1032
+ PARAMETERS:
1033
+
1034
+ RETURNS:
1035
+ A SQL-MR/Analytical query, as shown in example here.
1036
+
1037
+ RAISES:
1038
+
1039
+ EXAMPLES:
1040
+ aqg_obj = StoredProcedureQueryGenerator(function_name=self._metadata.sql_function_name,
1041
+ func_other_args_values=self._func_other_args,
1042
+ db_name=db_name)
1043
+ anly_query = aqg_obj._gen_sqlmr_select_stmt_sql()
1044
+ # Output is as shown in example in description.
1045
+
1046
+ """
1047
+ return self.__CALL_STMT_FMT.format(self.__function_name, self.__generate_sqlmr_func_other_arg_sql())
1048
+
1049
+ def __get_string_size(self, string):
1050
+ return len(string.encode("utf8"))
@@ -2,7 +2,7 @@ from teradataml.analytics.byom.H2OPredict import H2OPredict
2
2
  from teradataml.analytics.byom.PMMLPredict import PMMLPredict
3
3
 
4
4
  from teradataml.analytics.meta_class import _AnalyticFunction
5
- from teradataml.analytics.meta_class import _common_init
5
+ from teradataml.analytics.meta_class import _common_init, _common_dir
6
6
 
7
7
  _byom_functions = ['H2OPredict', 'PMMLPredict', 'ONNXPredict', 'DataikuPredict', 'DataRobotPredict']
8
8
 
@@ -12,4 +12,5 @@ for func in _byom_functions:
12
12
  **kwargs: _common_init(self,
13
13
  'byom',
14
14
  **kwargs),
15
- "__doc__": _AnalyticFunction.__doc__})
15
+ "__doc__": _AnalyticFunction.__doc__,
16
+ "__dir__": _common_dir})
@@ -111,6 +111,7 @@ class _AnlyFuncMetadata:
111
111
  TeradataAnalyticFunctionTypes.TABLEOPERATOR.value,
112
112
  TeradataAnalyticFunctionTypes.UAF.value,
113
113
  TeradataAnalyticFunctionTypes.BYOM.value,
114
+ TeradataAnalyticFunctionTypes.STORED_PROCEDURE.value,
114
115
  None]])
115
116
  arg_info_matrix.append(["json_file", json_file, False, str, True])
116
117
  _Validators._validate_function_arguments(arg_info_matrix)
@@ -1178,10 +1179,19 @@ class _AnlyFuncMetadata:
1178
1179
  # from teradataml.data.docs.<function_type>.<doc_dir_with_version_info>.<func_name>
1179
1180
  # import <func_name>
1180
1181
  func_module = __import__(("teradataml.data.docs.{}.{}.{}".
1181
- format(function_type, doc_dir, self.func_name)),
1182
- fromlist=[self.func_name])
1183
- return getattr(func_module, self.func_name).__doc__
1182
+ format(function_type, doc_dir, self.func_name)),
1183
+ fromlist=[self.func_name])
1184
+ return getattr(func_module, self.func_name).__doc__
1184
1185
  except:
1186
+ # For db_version 20.00, if function type is sqle, then check for docs_17_20 directory.
1187
+ if version_dir == '20.00' and function_type == 'sqle':
1188
+ try:
1189
+ func_module = __import__(("teradataml.data.docs.{}.{}.{}".
1190
+ format(function_type, "docs_17_20", self.func_name)),
1191
+ fromlist=[self.func_name])
1192
+ return getattr(func_module, self.func_name).__doc__
1193
+ except:
1194
+ pass
1185
1195
  return ("Refer to Teradata Package for Python Function Reference guide for "
1186
1196
  "Documentation. Reference guide can be found at: https://docs.teradata.com ."
1187
1197
  "Refer to the section with Database version: {}".format(self.__database_version))
@@ -54,7 +54,6 @@ def _get_json_data_from_tdml_repo():
54
54
  # both versions are matched, then the json store has data available so no need
55
55
  # to parse again.
56
56
  if configure.database_version != _JsonStore.version:
57
-
58
57
  # Json store version is different from database version. So, json's should
59
58
  # be parsed again. Before parsing the json, first clean the json store.
60
59
  _JsonStore.clean()
@@ -171,9 +170,15 @@ def __get_json_files_directory():
171
170
  if func_info.value["lowest_version"]:
172
171
  # Check if current function type is allowed on connected Vantage version or not.
173
172
  if func_info.value["func_type"] in func_type_json_version.keys():
173
+ # If function type is SQLE and db_version is 20.00, then add 17.20 JSON directory.
174
+ if func_type_json_version[func_info.value["func_type"]] == '20.00' and \
175
+ func_info.value["func_type"] == 'sqle':
176
+ yield [UtilFuncs._get_data_directory(dir_name="jsons", func_type=func_info,
177
+ version='17.20'),
178
+ func_info.name]
174
179
  yield [UtilFuncs._get_data_directory(dir_name="jsons", func_type=func_info,
175
- version=func_type_json_version[func_info.value["func_type"]]),
176
- func_info.name]
180
+ version=func_type_json_version[func_info.value["func_type"]]),
181
+ func_info.name]
177
182
  else:
178
183
  yield [UtilFuncs._get_data_directory(dir_name="jsons", func_type=func_info), func_info.name]
179
184
 
@@ -237,7 +242,7 @@ def _process_paired_functions():
237
242
  """
238
243
 
239
244
  json_path = os.path.join(UtilFuncs._get_data_directory(), "jsons", "paired_functions.json")
240
- with open(json_path) as fp:
245
+ with open(json_path, encoding="utf8") as fp:
241
246
  _json = json.load(fp)
242
247
 
243
248
  _available_functions, _ = _JsonStore._get_function_list()
@@ -556,8 +561,10 @@ class _KNNPredict:
556
561
  "train_data": self.train_data,
557
562
  "input_columns": self.input_columns,
558
563
  "response_column": kwargs.get("response_column", self.response_column),
559
- "accumulate": kwargs.get("accumulate", kwargs.get("response_column", self.response_column)
560
- if getattr(self.obj, "accumulate") is None else self.accumulate)}
564
+ # Retrieve the accumulate value from kwargs if available.
565
+ # otherwise, no accumulation will occur.
566
+ "accumulate": kwargs.get("accumulate")
567
+ }
561
568
 
562
569
  # KNN works in a different way. predict calls the same function with test data along with
563
570
  # the arguments passed to the actual function. The above parameters are required