teradataml 20.0.0.1__py3-none-any.whl → 20.0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (240) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +306 -0
  4. teradataml/__init__.py +10 -3
  5. teradataml/_version.py +1 -1
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +299 -16
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +13 -3
  11. teradataml/analytics/json_parser/utils.py +13 -6
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +11 -2
  15. teradataml/analytics/table_operator/__init__.py +4 -3
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +66 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1502 -323
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +247 -307
  22. teradataml/automl/data_transformation.py +32 -12
  23. teradataml/automl/feature_engineering.py +325 -86
  24. teradataml/automl/model_evaluation.py +44 -35
  25. teradataml/automl/model_training.py +122 -153
  26. teradataml/catalog/byom.py +8 -8
  27. teradataml/clients/pkce_client.py +1 -1
  28. teradataml/common/__init__.py +2 -1
  29. teradataml/common/constants.py +72 -0
  30. teradataml/common/deprecations.py +13 -7
  31. teradataml/common/garbagecollector.py +152 -120
  32. teradataml/common/messagecodes.py +11 -2
  33. teradataml/common/messages.py +4 -1
  34. teradataml/common/sqlbundle.py +26 -4
  35. teradataml/common/utils.py +225 -14
  36. teradataml/common/wrapper_utils.py +1 -1
  37. teradataml/context/context.py +82 -2
  38. teradataml/data/SQL_Fundamentals.pdf +0 -0
  39. teradataml/data/complaints_test_tokenized.csv +353 -0
  40. teradataml/data/complaints_tokens_model.csv +348 -0
  41. teradataml/data/covid_confirm_sd.csv +83 -0
  42. teradataml/data/dataframe_example.json +27 -1
  43. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  44. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  45. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  46. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  47. teradataml/data/docs/sqle/docs_17_20/Shap.py +203 -0
  48. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  49. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  50. teradataml/data/docs/sqle/docs_17_20/TextParser.py +3 -3
  51. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  52. teradataml/data/docs/tableoperator/docs_17_20/Image2Matrix.py +118 -0
  53. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  54. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  55. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  56. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  57. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  58. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  59. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  60. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  61. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  62. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  63. teradataml/data/docs/uaf/docs_17_20/CopyArt.py +145 -0
  64. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  65. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  66. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  67. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  68. teradataml/data/docs/uaf/docs_17_20/DickeyFuller.py +18 -21
  69. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  70. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  71. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  72. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  73. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  74. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  75. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  76. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  77. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  78. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  79. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  80. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  81. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  82. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  83. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  84. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  85. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  86. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  87. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  88. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  89. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  90. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  91. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  92. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  93. teradataml/data/dwt2d_dataTable.csv +65 -0
  94. teradataml/data/dwt_dataTable.csv +8 -0
  95. teradataml/data/dwt_filterTable.csv +3 -0
  96. teradataml/data/finance_data4.csv +13 -0
  97. teradataml/data/grocery_transaction.csv +19 -0
  98. teradataml/data/idwt2d_dataTable.csv +5 -0
  99. teradataml/data/idwt_dataTable.csv +8 -0
  100. teradataml/data/idwt_filterTable.csv +3 -0
  101. teradataml/data/interval_data.csv +5 -0
  102. teradataml/data/jsons/paired_functions.json +14 -0
  103. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  104. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  105. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  106. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  107. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  108. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  109. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  110. teradataml/data/jsons/sqle/17.20/TD_TextParser.json +1 -1
  111. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  112. teradataml/data/jsons/sqle/20.00/TD_KMeans.json +250 -0
  113. teradataml/data/jsons/sqle/20.00/TD_SMOTE.json +266 -0
  114. teradataml/data/jsons/sqle/20.00/TD_VectorDistance.json +278 -0
  115. teradataml/data/jsons/storedprocedure/17.20/TD_COPYART.json +71 -0
  116. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  117. teradataml/data/jsons/tableoperator/17.20/IMAGE2MATRIX.json +53 -0
  118. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  119. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  120. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  121. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  122. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  123. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  124. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  125. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  126. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  127. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  128. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  129. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  130. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  131. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  132. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  133. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  134. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  135. teradataml/data/jsons/uaf/17.20/TD_DICKEY_FULLER.json +10 -19
  136. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  137. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  138. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  139. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  140. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  141. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  142. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  143. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  144. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  145. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  146. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  147. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  148. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  149. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  150. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  151. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  152. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  153. teradataml/data/jsons/uaf/17.20/TD_SAX.json +210 -0
  154. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  155. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  156. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  157. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  158. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  159. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +410 -0
  160. teradataml/data/load_example_data.py +8 -2
  161. teradataml/data/medical_readings.csv +101 -0
  162. teradataml/data/naivebayestextclassifier_example.json +1 -1
  163. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  164. teradataml/data/patient_profile.csv +101 -0
  165. teradataml/data/peppers.png +0 -0
  166. teradataml/data/real_values.csv +14 -0
  167. teradataml/data/sax_example.json +8 -0
  168. teradataml/data/scripts/deploy_script.py +1 -1
  169. teradataml/data/scripts/lightgbm/dataset.template +157 -0
  170. teradataml/data/scripts/lightgbm/lightgbm_class_functions.template +247 -0
  171. teradataml/data/scripts/lightgbm/lightgbm_function.template +216 -0
  172. teradataml/data/scripts/lightgbm/lightgbm_sklearn.template +159 -0
  173. teradataml/data/scripts/sklearn/sklearn_fit.py +194 -160
  174. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +136 -115
  175. teradataml/data/scripts/sklearn/sklearn_function.template +34 -16
  176. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +155 -137
  177. teradataml/data/scripts/sklearn/sklearn_neighbors.py +1 -1
  178. teradataml/data/scripts/sklearn/sklearn_score.py +12 -3
  179. teradataml/data/scripts/sklearn/sklearn_transform.py +162 -24
  180. teradataml/data/star_pivot.csv +8 -0
  181. teradataml/data/target_udt_data.csv +8 -0
  182. teradataml/data/templates/open_source_ml.json +3 -1
  183. teradataml/data/teradataml_example.json +20 -1
  184. teradataml/data/timestamp_data.csv +4 -0
  185. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  186. teradataml/data/uaf_example.json +55 -1
  187. teradataml/data/unpivot_example.json +15 -0
  188. teradataml/data/url_data.csv +9 -0
  189. teradataml/data/vectordistance_example.json +4 -0
  190. teradataml/data/windowdfft.csv +16 -0
  191. teradataml/dataframe/copy_to.py +1 -1
  192. teradataml/dataframe/data_transfer.py +5 -3
  193. teradataml/dataframe/dataframe.py +1002 -201
  194. teradataml/dataframe/fastload.py +3 -3
  195. teradataml/dataframe/functions.py +867 -0
  196. teradataml/dataframe/row.py +160 -0
  197. teradataml/dataframe/setop.py +2 -2
  198. teradataml/dataframe/sql.py +840 -33
  199. teradataml/dataframe/window.py +1 -1
  200. teradataml/dbutils/dbutils.py +878 -34
  201. teradataml/dbutils/filemgr.py +48 -1
  202. teradataml/geospatial/geodataframe.py +1 -1
  203. teradataml/geospatial/geodataframecolumn.py +1 -1
  204. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  205. teradataml/lib/aed_0_1.dll +0 -0
  206. teradataml/opensource/__init__.py +1 -1
  207. teradataml/opensource/{sklearn/_class.py → _class.py} +102 -17
  208. teradataml/opensource/_lightgbm.py +950 -0
  209. teradataml/opensource/{sklearn/_wrapper_utils.py → _wrapper_utils.py} +1 -2
  210. teradataml/opensource/{sklearn/constants.py → constants.py} +13 -10
  211. teradataml/opensource/sklearn/__init__.py +0 -1
  212. teradataml/opensource/sklearn/_sklearn_wrapper.py +1019 -574
  213. teradataml/options/__init__.py +9 -23
  214. teradataml/options/configure.py +42 -4
  215. teradataml/options/display.py +2 -2
  216. teradataml/plot/axis.py +4 -4
  217. teradataml/scriptmgmt/UserEnv.py +13 -9
  218. teradataml/scriptmgmt/lls_utils.py +77 -23
  219. teradataml/store/__init__.py +13 -0
  220. teradataml/store/feature_store/__init__.py +0 -0
  221. teradataml/store/feature_store/constants.py +291 -0
  222. teradataml/store/feature_store/feature_store.py +2223 -0
  223. teradataml/store/feature_store/models.py +1505 -0
  224. teradataml/store/vector_store/__init__.py +1586 -0
  225. teradataml/table_operators/Script.py +2 -2
  226. teradataml/table_operators/TableOperator.py +106 -20
  227. teradataml/table_operators/query_generator.py +3 -0
  228. teradataml/table_operators/table_operator_query_generator.py +3 -1
  229. teradataml/table_operators/table_operator_util.py +102 -56
  230. teradataml/table_operators/templates/dataframe_register.template +69 -0
  231. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  232. teradataml/telemetry_utils/__init__.py +0 -0
  233. teradataml/telemetry_utils/queryband.py +52 -0
  234. teradataml/utils/dtypes.py +4 -2
  235. teradataml/utils/validators.py +34 -2
  236. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/METADATA +311 -3
  237. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/RECORD +240 -157
  238. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/WHEEL +0 -0
  239. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/top_level.txt +0 -0
  240. {teradataml-20.0.0.1.dist-info → teradataml-20.0.0.3.dist-info}/zip-safe +0 -0
@@ -20,22 +20,24 @@ from teradataml.common.messagecodes import MessageCodes
20
20
  from teradataml.common.exceptions import TeradataMlException
21
21
  from teradataml.common.constants import TeradataTableKindConstants
22
22
  from teradataml.common.sqlbundle import SQLBundle
23
- from teradataml.common.constants import SQLConstants
23
+ from teradataml.common.constants import SQLConstants, SessionParamsSQL, SessionParamsPythonNames
24
24
  from teradataml.common.constants import TableOperatorConstants
25
25
  import teradataml.dataframe as tdmldf
26
26
  from teradataml.options.configure import configure
27
27
  from teradataml.utils.utils import execute_sql
28
28
  from teradataml.utils.validators import _Validators
29
+ from teradataml.utils.internal_buffer import _InternalBuffer
29
30
  from teradatasql import OperationalError
30
31
  from teradatasqlalchemy.dialect import preparer, dialect as td_dialect
31
32
  from teradatasqlalchemy.dialect import TDCreateTablePost as post
32
- from teradatasqlalchemy.telemetry.queryband import collect_queryband
33
+ from teradataml.telemetry_utils.queryband import collect_queryband
33
34
  from sqlalchemy import Table, Column, MetaData, CheckConstraint, \
34
35
  PrimaryKeyConstraint, ForeignKeyConstraint, UniqueConstraint
36
+ from teradataml.utils.internal_buffer import _InternalBuffer
35
37
 
36
38
 
37
39
  @collect_queryband(queryband='DrpTbl')
38
- def db_drop_table(table_name, schema_name=None):
40
+ def db_drop_table(table_name, schema_name=None, suppress_error=False):
39
41
  """
40
42
  DESCRIPTION:
41
43
  Drops the table from the given schema.
@@ -53,6 +55,12 @@ def db_drop_table(table_name, schema_name=None):
53
55
  Default Value: None
54
56
  Types: str
55
57
 
58
+ suppress_error:
59
+ Optional Argument
60
+ Specifies whether to raise error or not.
61
+ Default Value: False
62
+ Types: str
63
+
56
64
  RETURNS:
57
65
  True - if the operation is successful.
58
66
 
@@ -81,14 +89,18 @@ def db_drop_table(table_name, schema_name=None):
81
89
 
82
90
  try:
83
91
  return UtilFuncs._drop_table(table_name)
84
- except TeradataMlException:
85
- raise
86
- except OperationalError:
87
- raise
92
+ except (TeradataMlException, OperationalError):
93
+ if suppress_error:
94
+ pass
95
+ else:
96
+ raise
88
97
  except Exception as err:
89
- raise TeradataMlException(Messages.get_message(MessageCodes.DROP_FAILED, "table",
90
- table_name),
91
- MessageCodes.DROP_FAILED) from err
98
+ if suppress_error:
99
+ pass
100
+ else:
101
+ raise TeradataMlException(Messages.get_message(MessageCodes.DROP_FAILED, "table",
102
+ table_name),
103
+ MessageCodes.DROP_FAILED) from err
92
104
 
93
105
 
94
106
  @collect_queryband(queryband='DrpVw')
@@ -171,6 +183,8 @@ def db_list_tables(schema_name=None, object_name=None, object_type='all'):
171
183
  a replacement for the percent.
172
184
  A '_' represents exactly one arbitrary character. Any single character is acceptable in the position in
173
185
  which the underscore character appears.
186
+ Note:
187
+ * If '%' is specified in 'object_name', then the '_' character is not evaluated for an arbitrary character.
174
188
  Default Value: None
175
189
  Types: str
176
190
  Example:
@@ -205,15 +219,15 @@ def db_list_tables(schema_name=None, object_name=None, object_type='all'):
205
219
  >>> execute_sql("create view temporary_view as (select 1 as dummy_col1, 2 as dummy_col2);")
206
220
  >>> db_list_tables(None , None, 'view')
207
221
 
208
- # Example 3 - List all the object types in the default schema whose names begin with 'abc' followed by one
209
- # arbitrary character and any number of characters in the end.
222
+ # Example 3 - List all the object types in the default schema whose names begin with 'abc' followed by any number
223
+ # of characters in the end.
210
224
  >>> execute_sql("create view abcd123 as (select 1 as dummy_col1, 2 as dummy_col2);")
211
- >>> db_list_tables(None, 'abc_%', None)
225
+ >>> db_list_tables(None, 'abc%', None)
212
226
 
213
- # Example 4 - List all the tables in the default schema whose names begin with 'adm_%' followed by one
214
- # arbitrary character and any number of characters in the end.
227
+ # Example 4 - List all the tables in the default schema whose names begin with 'adm' followed by any number of
228
+ # characters and ends with 'train'.
215
229
  >>> load_example_data("dataframe", "admissions_train")
216
- >>> db_list_tables(None, 'adm_%', 'table')
230
+ >>> db_list_tables(None, 'adm%train', 'table')
217
231
 
218
232
  # Example 5 - List all the views in the default schema whose names begin with any character but ends with 'abc'
219
233
  >>> execute_sql("create view view_abc as (select 1 as dummy_col1, 2 as dummy_col2);")
@@ -388,7 +402,7 @@ def _execute_transaction(queries):
388
402
  for query in queries:
389
403
  cur.execute(query)
390
404
 
391
- # Try committing the the transaction
405
+ # Try committing the transaction
392
406
  con.commit()
393
407
  except Exception:
394
408
  # Let's first rollback
@@ -400,6 +414,71 @@ def _execute_transaction(queries):
400
414
  cur.execute(auto_commit_on)
401
415
 
402
416
 
417
+ def db_transaction(func):
418
+ """
419
+ DESCRIPTION:
420
+ Function to execute another function in a transaction.
421
+
422
+ PARAMETERS:
423
+ func:
424
+ Required Argument.
425
+ Specifies the function to be executed in a single transaction.
426
+ Types: function
427
+
428
+ RETURNS:
429
+ The object returned by "func".
430
+
431
+ RAISES:
432
+ TeradataMlException, OperationalError
433
+
434
+ EXAMPLES:
435
+ # Example: Declare a function to delete all the records from two tables
436
+ # and execute the function in a transaction.
437
+ >>> @db_transaction
438
+ ... def insert_data(table1, table2):
439
+ ... execute_sql("delete from {}".format(table1))
440
+ ... execute_sql("delete from {}".format(table2))
441
+ ... return True
442
+ >>> # Executing the above function in a transaction.
443
+ >>> insert_data("sales", "admissions_train")
444
+ True
445
+ >>>
446
+ """
447
+ def execute_transaction(*args, **kwargs):
448
+ auto_commit_off = "{fn teradata_nativesql}{fn teradata_autocommit_off}"
449
+ auto_commit_on = "{fn teradata_nativesql}{fn teradata_autocommit_on}"
450
+ con = None
451
+ cur = None
452
+
453
+ result = None
454
+ try:
455
+ con = tdmlctx.td_connection
456
+ if con is None:
457
+ raise TeradataMlException(Messages.get_message(MessageCodes.CONNECTION_FAILURE),
458
+ MessageCodes.CONNECTION_FAILURE)
459
+ con = con.connection
460
+ cur = con.cursor()
461
+ # Set auto_commit to OFF.
462
+ cur.execute(auto_commit_off)
463
+
464
+ # Execute function.
465
+ result = func(*args, **kwargs)
466
+
467
+ # Try committing the transaction.
468
+ con.commit()
469
+ except Exception:
470
+ # Let's first rollback.
471
+ con.rollback()
472
+ # Now, let's raise the error as is.
473
+ raise
474
+ finally:
475
+ # Finally, we must set auto_commit to ON.
476
+ cur.execute(auto_commit_on)
477
+
478
+ return result
479
+
480
+ return execute_transaction
481
+
403
482
  def _execute_stored_procedure(function_call, fetchWarnings=True, expect_none_result=False):
404
483
  """
405
484
  DESCRIPTION:
@@ -982,6 +1061,7 @@ def _create_table(table_name,
982
1061
  pti = pti.no_primary_index()
983
1062
 
984
1063
  con_form=[]
1064
+ foreign_constraints = []
985
1065
  for c_name, parameters in kwargs.items():
986
1066
  _Validators._validate_function_arguments([["constraint_type", c_name, True, str,
987
1067
  True, SQLConstants.CONSTRAINT.value]])
@@ -990,9 +1070,21 @@ def _create_table(table_name,
990
1070
  [con_form.append("{}('{}')".format("CheckConstraint", col)) for col in parameters]
991
1071
  if c_name in 'foreign_key_constraint':
992
1072
  parameters = parameters if isinstance(parameters[0], tuple) else [tuple(parameters)]
993
- for col in parameters:
994
- meta.reflect(bind=tdmlctx.get_context(), only=[col[2]])
995
- con_form.append("{}({},{})".format("ForeignKeyConstraint", col[0], col[1]))
1073
+ # Every element in parameter is 3 elements.
1074
+ # 1st element and 2nd element also a list. 3rd element is name of ForeignKey.
1075
+ for fk_columns, fk_ref_columns, fk_name in parameters:
1076
+ fk_ref_column_objs = []
1077
+
1078
+ # fk_ref_columns is in this format - table_name.column_name .
1079
+ # There is no provision for schema name here.
1080
+ # sqlalchemy is not accepting this notation here - schema_name.table_name.column_name
1081
+ # So, create Column Object and bind schema name and table name to it.
1082
+ for fk_ref_column in fk_ref_columns:
1083
+ ref_column_table, ref_column = fk_ref_column.split(".")
1084
+ t = Table(ref_column_table, MetaData(), Column(ref_column), schema=schema_name)
1085
+ fk_ref_column_objs.append(getattr(t, "c")[ref_column])
1086
+ foreign_constraints.append(ForeignKeyConstraint(fk_columns, fk_ref_column_objs, fk_name))
1087
+
996
1088
  if c_name in ['primary_key_constraint', 'unique_key_constraint']:
997
1089
  c_name = "UniqueConstraint" if c_name in 'unique_key_constraint' else 'PrimaryKeyConstraint'
998
1090
  parameters = UtilFuncs._as_list(parameters)
@@ -1006,6 +1098,8 @@ def _create_table(table_name,
1006
1098
  "schema=schema_name)".format("" if con_form is None else ",".join(con_form))
1007
1099
 
1008
1100
  table=eval(table_str)
1101
+ for foreign_constraint in foreign_constraints:
1102
+ table.append_constraint(foreign_constraint)
1009
1103
  table.create(bind=tdmlctx.get_context())
1010
1104
 
1011
1105
  except Exception as err:
@@ -1013,30 +1107,357 @@ def _create_table(table_name,
1013
1107
  raise TeradataMlException(Messages.get_message(msg_code, "create table", str(err)), msg_code)
1014
1108
 
1015
1109
 
1110
+ def _create_database(schema_name, size='10e6', spool_size=None):
1111
+ """
1112
+ DESCRIPTION:
1113
+ Internal function to create a database with the specified name and size.
1114
+
1115
+ PARAMETERS:
1116
+ schema_name:
1117
+ Required Argument.
1118
+ Specifies the name of the database to create.
1119
+ Types: str
1120
+
1121
+ size:
1122
+ Optional Argument.
1123
+ Specifies the number of bytes to allocate to new database.
1124
+ Note:
1125
+ Exponential notation can also be used.
1126
+ Types: str or int
1127
+
1128
+ spool_size:
1129
+ Optional Argument.
1130
+ Specifies the number of bytes to allocate to new database
1131
+ for spool space.
1132
+ Note:
1133
+ Exponential notation can also be used.
1134
+ Types: str or int
1135
+
1136
+ RETURNS:
1137
+ bool
1138
+
1139
+ RAISES:
1140
+ TeradataMlException.
1141
+
1142
+ EXAMPLES:
1143
+ >>> from teradataml.dbutils.dbutils import _create_database
1144
+ >>> _create_database("db_name1", "10e5")
1145
+ """
1146
+ sql = "CREATE DATABASE {} FROM {} AS PERM = {}".format(
1147
+ schema_name, tdmlctx._get_database_username(), size)
1148
+
1149
+ # If user pass spool size, create it with specified space.
1150
+ if spool_size:
1151
+ sql = "{} , SPOOL = {}".format(sql, spool_size)
1152
+
1153
+ execute_sql(sql)
1154
+ return True
1155
+
1156
+
1157
+ def _update_data(update_columns_values, table_name, schema_name, datalake_name=None, update_conditions=None):
1158
+ """
1159
+ DESCRIPTION:
1160
+ Internal function to update the data in a table.
1161
+
1162
+ PARAMETERS:
1163
+ update_columns_values:
1164
+ Required Argument.
1165
+ Specifies the columns and it's values to update.
1166
+ Types: dict
1167
+
1168
+ table_name:
1169
+ Required Argument.
1170
+ Specifies the name of the table to update.
1171
+ Types: str
1172
+
1173
+ schema_name:
1174
+ Required Argument.
1175
+ Specifies the name of the database to update the data in the
1176
+ table "table_name".
1177
+ Types: str
1178
+
1179
+ datalake_name:
1180
+ Optional Argument.
1181
+ Specifies the name of the datalake to look for "schema_name".
1182
+ Types: str
1183
+
1184
+ update_conditions:
1185
+ Optional Argument.
1186
+ Specifies the key columns and it's values which is used as condition
1187
+ for updating the records.
1188
+ Types: dict
1189
+
1190
+ RETURNS:
1191
+ bool
1192
+
1193
+ RAISES:
1194
+ TeradataMlException.
1195
+
1196
+ EXAMPLES:
1197
+ >>> from teradataml.dbutils.dbutils import _update_data
1198
+ >>> _update_data("db_name1", "tbl", update_conditions={"column1": "value1"})
1199
+ """
1200
+ # Prepare the update clause.
1201
+ update_clause = ", ".join(("{} = ?".format(col) for col in update_columns_values))
1202
+ update_values = tuple((_value for _value in update_columns_values.values()))
1203
+
1204
+ # If key_columns_values is passed, then prepare the SQL with where clause.
1205
+ # Else, simply update every thing.
1206
+ schema_name = "{}.{}".format(datalake_name, schema_name) if datalake_name else schema_name
1207
+
1208
+ get_str_ = lambda val: "'{}'".format(val) if isinstance(val, str) else val
1209
+ if update_conditions:
1210
+
1211
+ # Prepare where clause.
1212
+ where_ = []
1213
+ for column, col_value in update_conditions.items():
1214
+ if isinstance(col_value, list):
1215
+ col_value = ", ".join(get_str_(val) for val in col_value)
1216
+ col_value = "({})".format(col_value)
1217
+ where_.append("{} IN {}".format(column, col_value))
1218
+ else:
1219
+ where_.append("{} = {}".format(column, col_value))
1220
+
1221
+ where_clause = " AND ".join(where_)
1222
+
1223
+ sql = f"""UPDATE {schema_name}.{table_name} SET {update_clause}
1224
+ WHERE {where_clause}
1225
+ """
1226
+
1227
+ execute_sql(sql, (*update_values, ))
1228
+
1229
+ else:
1230
+ sql = f"""UPDATE {schema_name}.{table_name} SET {update_clause}"""
1231
+
1232
+ execute_sql(sql, update_values)
1233
+ return True
1234
+
1235
+
1236
+ def _insert_data(table_name, values, columns=None, schema_name=None, datalake_name=None):
1237
+ """
1238
+ DESCRIPTION:
1239
+ Internal function to insert the data in a table.
1240
+
1241
+ PARAMETERS:
1242
+ table_name:
1243
+ Required Argument.
1244
+ Specifies the name of the table to insert.
1245
+ Types: str
1246
+
1247
+ values:
1248
+ Required Argument.
1249
+ Specifies the values to insert.
1250
+ Types: tuple or list of tuple
1251
+
1252
+ columns:
1253
+ Optional Argument.
1254
+ Specifies the name of columns to be involved in insert.
1255
+ Types: list
1256
+
1257
+ schema_name:
1258
+ Optional Argument.
1259
+ Specifies the name of the database to insert the data in the
1260
+ table "table_name".
1261
+ Types: str
1262
+
1263
+ datalake_name:
1264
+ Optional Argument.
1265
+ Specifies the name of the datalake to look for "schema_name".
1266
+ Types: str
1267
+
1268
+ RETURNS:
1269
+ bool
1270
+
1271
+ RAISES:
1272
+ TeradataMlException.
1273
+
1274
+ EXAMPLES:
1275
+ >>> from teradataml.dbutils.dbutils import _insert_data
1276
+ >>> _insert_data("tbl", (1, 2, 3))
1277
+ """
1278
+ # Prepare the update clause.
1279
+ if schema_name:
1280
+ table_name = '"{}"."{}"'.format(schema_name, table_name)
1281
+ if datalake_name:
1282
+ table_name = '"{}"."{}"'.format(datalake_name, table_name)
1283
+
1284
+ values = UtilFuncs._as_list(values)
1285
+
1286
+ # Prepare columns clause.
1287
+ if columns:
1288
+ # Prepare question marks.
1289
+ _q_marks = ["?"] * len(columns)
1290
+ columns = "({})".format(", ".join(columns))
1291
+ else:
1292
+ columns = ""
1293
+ _q_marks = ["?"] * (len(values[0]))
1294
+
1295
+ sql = "insert into {} {} values ({});".format(table_name, columns, ", ".join(_q_marks))
1296
+ execute_sql(sql, values)
1297
+
1298
+ return True
1299
+
1300
+
1301
+ def _upsert_data(update_columns_values,
1302
+ insert_columns_values,
1303
+ upsert_conditions,
1304
+ table_name,
1305
+ schema_name,
1306
+ datalake_name=None):
1307
+ """
1308
+ DESCRIPTION:
1309
+ Internal function to either insert or update the data to a table.
1310
+
1311
+ PARAMETERS:
1312
+ update_columns_values:
1313
+ Required Argument.
1314
+ Specifies the columns and it's values to update.
1315
+ Types: dict
1316
+
1317
+ insert_columns_values:
1318
+ Required Argument.
1319
+ Specifies the columns and it's values to insert.
1320
+ Types: dict
1321
+
1322
+ upsert_conditions:
1323
+ Required Argument.
1324
+ Specifies the key columns and it's values which is used as condition
1325
+ for updating the records.
1326
+ Types: tuple
1327
+
1328
+ table_name:
1329
+ Required Argument.
1330
+ Specifies the name of the table to insert.
1331
+ Types: str
1332
+
1333
+ schema_name:
1334
+ Required Argument.
1335
+ Specifies the name of the database to update the data in the
1336
+ table "table_name".
1337
+ Types: str
1338
+
1339
+ datalake_name:
1340
+ Optional Argument.
1341
+ Specifies the name of the datalake to look for "schema_name".
1342
+ Types: str
1343
+
1344
+ RETURNS:
1345
+ bool
1346
+
1347
+ RAISES:
1348
+ TeradataMlException.
1349
+
1350
+ EXAMPLES:
1351
+ >>> from teradataml.dbutils.dbutils import _upsert_data
1352
+ >>> _upsert_data("db_name1",
1353
+ "tbl",
1354
+ update_columns_values={"column1": "value1"},
1355
+ insert_columns_values={"column1": "value2"},
1356
+ upsert_conditions={"key1": "val1"}
1357
+ )
1358
+ """
1359
+ # If user passes datalake name, then append the same to schema name.
1360
+ if datalake_name:
1361
+ schema_name = "{}.{}".format(datalake_name, schema_name)
1362
+
1363
+ # Prepare the update clause.
1364
+ update_clause = ", ".join(("{} = ?".format(col) for col in update_columns_values))
1365
+ update_values = tuple((_value for _value in update_columns_values.values()))
1366
+
1367
+ # Prepare the where clause and it's values.
1368
+ where_clause = " AND ".join(("{} = ?".format(col) for col in upsert_conditions))
1369
+ where_values = tuple((_value for _value in upsert_conditions.values()))
1370
+
1371
+ # Prepare the insert clause and it's values.
1372
+ insert_values_clause = ", ".join(("?" for _ in range(len(insert_columns_values))))
1373
+ insert_clause = "({}) values ({})".format(", ".join(insert_columns_values), insert_values_clause)
1374
+ insert_values = tuple((_value for _value in insert_columns_values.values()))
1375
+
1376
+ sql = f"""UPDATE {schema_name}.{table_name} SET {update_clause}
1377
+ WHERE {where_clause}
1378
+ ELSE INSERT {schema_name}.{table_name} {insert_clause}
1379
+ """
1380
+ execute_sql(sql, (*update_values, *where_values, *insert_values))
1381
+
1382
+ def _delete_data(table_name, schema_name=None, datalake_name=None, delete_conditions=None):
1383
+ """
1384
+ DESCRIPTION:
1385
+ Internal function to delete the data in a table.
1386
+
1387
+ PARAMETERS:
1388
+ table_name:
1389
+ Required Argument.
1390
+ Specifies the name of the table to delete.
1391
+ Types: str
1392
+
1393
+ schema_name:
1394
+ Optional Argument.
1395
+ Specifies the name of the database to delete the data in the
1396
+ table "table_name".
1397
+ Types: str
1398
+
1399
+ datalake_name:
1400
+ Optional Argument.
1401
+ Specifies the name of the datalake to look for "schema_name".
1402
+ Types: str
1403
+
1404
+ delete_conditions:
1405
+ Optional Argument.
1406
+ Specifies the ColumnExpression to use for removing the data.
1407
+ Types: ColumnExpression
1408
+
1409
+ RETURNS:
1410
+ int, specifies the number of records those are deleted.
1411
+
1412
+ RAISES:
1413
+ TeradataMlException.
1414
+
1415
+ EXAMPLES:
1416
+ >>> from teradataml.dbutils.dbutils import _delete_data
1417
+ >>> _delete_data("tbl", "db_name1", delete_conditions={"column1": "value1"})
1418
+ """
1419
+ if schema_name:
1420
+ table_name = '"{}"."{}"'.format(schema_name, table_name)
1421
+
1422
+ if datalake_name:
1423
+ table_name = "{}.{}".format(datalake_name, table_name)
1424
+
1425
+ sqlbundle = SQLBundle()
1426
+
1427
+ sql = sqlbundle._get_sql_query(SQLConstants.SQL_DELETE_ALL_ROWS).format(table_name)
1428
+
1429
+ # If condition exist, the prepare where clause.
1430
+ if delete_conditions:
1431
+ where_clause = delete_conditions.compile()
1432
+ sql = sqlbundle._get_sql_query(SQLConstants.SQL_DELETE_SPECIFIC_ROW).format(table_name, where_clause)
1433
+
1434
+ res = execute_sql(sql)
1435
+ return res.rowcount
1436
+
1016
1437
  @collect_queryband(queryband='LstKwrds')
1017
1438
  def list_td_reserved_keywords(key=None, raise_error=False):
1018
1439
  """
1019
1440
  DESCRIPTION:
1020
- Function validates if the specified string is Teradata reserved keyword or not.
1021
- If key is not specified, list all the Teradata reserved keywords.
1441
+ Function validates if the specified string or the list of strings is Teradata reserved keyword or not.
1442
+ If key is not specified or is a empty list, list all the Teradata reserved keywords.
1022
1443
 
1023
1444
  PARAMETERS:
1024
1445
  key:
1025
1446
  Optional Argument.
1026
- Specifies a string to validate for Teradata reserved keyword.
1027
- Types: string
1447
+ Specifies a string or list of strings to validate for Teradata reserved keyword.
1448
+ Types: string or list of strings
1028
1449
 
1029
1450
  raise_error:
1030
1451
  Optional Argument.
1031
1452
  Specifies whether to raise exception or not.
1032
1453
  When set to True, an exception is raised,
1033
- if specified "key" is a Teradata reserved keyword, otherwise not.
1454
+ if specified "key" contains Teradata reserved keyword, otherwise not.
1034
1455
  Default Value: False
1035
1456
  Types: bool
1036
1457
 
1037
1458
  RETURNS:
1038
- teradataml DataFrame, if "key" is None.
1039
- True, if "key" is Teradata reserved keyword, False otherwise.
1459
+ teradataml DataFrame, if "key" is None or a empty list.
1460
+ True, if "key" contains Teradata reserved keyword, False otherwise.
1040
1461
 
1041
1462
  RAISES:
1042
1463
  TeradataMlException.
@@ -1065,21 +1486,38 @@ def list_td_reserved_keywords(key=None, raise_error=False):
1065
1486
 
1066
1487
  >>> # Example 3: Validate and raise exception if keyword "account" is a Teradata reserved keyword.
1067
1488
  >>> list_td_reserved_keywords("account", raise_error=True)
1068
- TeradataMlException: [Teradata][teradataml](TDML_2121) 'account' is a Teradata reserved keyword.
1489
+ TeradataMlException: [Teradata][teradataml](TDML_2121) '['ACCOUNT']' is a Teradata reserved keyword.
1490
+
1491
+ >>> # Example 4: Validate if the list of keywords contains Teradata reserved keyword or not.
1492
+ >>> list_td_reserved_keywords(["account", 'add', 'abc'])
1493
+ True
1494
+
1495
+ >>> # Example 5: Validate and raise exception if the list of keywords contains Teradata reserved keyword.
1496
+ >>> list_td_reserved_keywords(["account", 'add', 'abc'], raise_error=True)
1497
+ TeradataMlException: [Teradata][teradataml](TDML_2121) '['ADD', 'ACCOUNT']' is a Teradata reserved keyword.
1069
1498
  """
1499
+
1070
1500
  from teradataml.dataframe.dataframe import DataFrame, in_schema
1071
1501
  # Get the reserved keywords from the table
1072
1502
  reserved_keys = DataFrame(in_schema("SYSLIB", "SQLRestrictedWords"))
1073
1503
 
1074
- # If key is not passed, return the list of Teradata reserved keywords.
1075
- if key is None:
1504
+ # If key is not passed or is a empty list, return the list of Teradata reserved keywords.
1505
+ if key is None or len(key) == 0:
1076
1506
  return reserved_keys.select(['restricted_word'])
1077
1507
 
1078
- # Check if key is a Teradata reserved keyword or not.
1079
- num_rows = reserved_keys[reserved_keys.restricted_word == key.upper()].shape[0]
1080
- if num_rows > 0:
1508
+ key = [key] if isinstance(key, str) else key
1509
+
1510
+ # Store the reserved keywords in buffer.
1511
+ if _InternalBuffer.get("reservered_words") is None:
1512
+ _InternalBuffer.add(reservered_words={word_[0] for word_ in reserved_keys.itertuples(name=None)})
1513
+ reservered_words = _InternalBuffer.get("reservered_words")
1514
+
1515
+ # Check if key contains Teradata reserved keyword or not.
1516
+ res_key = (k.upper() for k in key if k.upper() in reservered_words)
1517
+ res_key = list(res_key)
1518
+ if len(res_key)>0:
1081
1519
  if raise_error:
1082
- raise TeradataMlException(Messages.get_message(MessageCodes.RESERVED_KEYWORD, key),
1520
+ raise TeradataMlException(Messages.get_message(MessageCodes.RESERVED_KEYWORD, res_key),
1083
1521
  MessageCodes.RESERVED_KEYWORD)
1084
1522
  return True
1085
1523
  return False
@@ -1150,6 +1588,10 @@ def _execute_query_and_generate_pandas_df(query, index=None, **kwargs):
1150
1588
  if cur is not None:
1151
1589
  cur.close()
1152
1590
 
1591
+ # Set coerce_float to True for Decimal type columns.
1592
+ if 'coerce_float' not in kwargs:
1593
+ kwargs['coerce_float'] = True
1594
+
1153
1595
  try:
1154
1596
  pandas_df = pd.DataFrame.from_records(data=list(tuple(row) for row in rows),
1155
1597
  columns=columns,
@@ -1165,3 +1607,405 @@ def _execute_query_and_generate_pandas_df(query, index=None, **kwargs):
1165
1607
  MessageCodes.TDMLDF_SELECT_DF_FAIL)
1166
1608
 
1167
1609
  return pandas_df
1610
+
1611
+ class _TDSessionParams:
1612
+ """
1613
+ A successfull connection through teradataml establishes a session with Vantage.
1614
+ Every session will have default parameters. For example one can set Offset value
1615
+ for parameter 'Session Time Zone'.
1616
+ This is an internal utility to store all session related parameters.
1617
+ """
1618
+ def __init__(self, data):
1619
+ """
1620
+ Constructor to store columns and rows of session params.
1621
+
1622
+ PARAMETERS:
1623
+ data:
1624
+ Required Argument.
1625
+ Specifies the Session parameters.
1626
+ Types: dict
1627
+ """
1628
+ self.__session_params = data
1629
+
1630
+ def __getitem__(self, parameter):
1631
+ """
1632
+ Return the value of Session parameter.
1633
+
1634
+ PARAMETERS:
1635
+ parameter:
1636
+ Required Argument.
1637
+ Specifies name of the session parameter.
1638
+ Types: str
1639
+ """
1640
+ if parameter in self.__session_params:
1641
+ return self.__session_params[parameter]
1642
+ raise AttributeError("'TDSessionParams' object has no attribute '{}'".format(parameter))
1643
+
1644
+ def set_session_param(name, value):
1645
+ """
1646
+ DESCRIPTION:
1647
+ Function to set the session parameter.
1648
+ Note:
1649
+ * Look at Vantage documentation for session parameters.
1650
+
1651
+ PARAMETERS:
1652
+ name:
1653
+ Required Argument.
1654
+ Specifies the name of the parameter to set.
1655
+ Permitted Values: timezone, calendar, account, character_set_unicode,
1656
+ collation, constraint, database, dateform, debug_function,
1657
+ dot_notation, isolated_loading, function_trace, json_ignore_errors,
1658
+ searchuifdbpath, transaction_isolation_level, query_band, udfsearchpath
1659
+ Types: str
1660
+
1661
+ value:
1662
+ Required Argument.
1663
+ Specifies the value for the parameter "name" to set.
1664
+ Permitted Values:
1665
+ 1. timezone: timezone strings
1666
+ 2. calendar: Teradata, ISO, Compatible
1667
+ 3. character_set_unicode: ON, OFF
1668
+ 4. account: should be a list in which first item should be "account string" second should be
1669
+ either SESSION or REQUEST.
1670
+ 5. collation: ASCII, CHARSET_COLL, EBCDIC, HOST, JIS_COLL, MULTINATIONAL
1671
+ 6. constraint: row_level_security_constraint_name {( level_name | category_name [,...] | NULL )}
1672
+ where,
1673
+ row_level_security_constraint_name:
1674
+ Name of an existing constraint.
1675
+ The specified constraint_name must be currently assigned to the user.
1676
+ User can specify a maximum of 6 hierarchical constraints and 2 non-hierarchical
1677
+ constraints per SET SESSION CONSTRAINT statement.
1678
+ level_name:
1679
+ Name of a hierarchical level, valid for the constraint_name, that is to replace the
1680
+ default level.
1681
+ The specified level_name must be currently assigned to the user. Otherwise, Vantage
1682
+ returns an error to the requestor.
1683
+ category_name:
1684
+ A set of one or more existing non-hierarchical category names valid for the
1685
+ constraint_name.
1686
+ Because all assigned category (non-hierarchical) constraint values assigned to a
1687
+ user are automatically active, "set_session_param" is only useful to specify a
1688
+ subset of the assigned categories for the constraint.
1689
+ For example, assume that User BOB has 3 country codes, and wants to load a table
1690
+ with data that is to be made available to User CARL who only has rights to see data
1691
+ for his own country. User BOB can use "set_session_param" to specify only the
1692
+ country code for User CARL when loading the data so Carl can access the data later.
1693
+ 7. database: Name of the new default database for the remainder of the current session.
1694
+ 8. dateform: ANSIDATE, INTEGERDATE
1695
+ 9. debug_function: should be a list in which first item should be "function_name" second should be
1696
+ either ON or OFF.
1697
+ 10. dot_notation: DEFAULT, LIST, NULL ERROR
1698
+ 11. isolated_loading: NO, '', CONCURRENT
1699
+ 12. function_trace: should be a list first item should be "mask_string" and second should be table name.
1700
+ 13. json_ignore_errors: ON, OFF
1701
+ 14. searchuifdbpath: string in format 'database_name, user_name'
1702
+ 15. transaction_isolation_level: READ UNCOMMITTED, RU, SERIALIZABLE, SR
1703
+ 16. query_band: should be a list first item should be "band_specification" and second should be either
1704
+ SESSION or TRANSACTION
1705
+ 17. udfsearchpath: should be a list first item should be "database_name" and second should be "udf_name"
1706
+ Types: str or list of strings
1707
+
1708
+ Returns:
1709
+ True, if session parameter is set successfully.
1710
+
1711
+ RAISES:
1712
+ ValueError, teradatasql.OperationalError
1713
+
1714
+ EXAMPLES:
1715
+ # Example 1: Set time zone offset for the session as the system default.
1716
+ >>> set_session_param('timezone', "'LOCAL'")
1717
+ True
1718
+
1719
+ # Example 2: Set time zone to "AMERICA PACIFIC".
1720
+ >>> set_session_param('timezone', "'AMERICA PACIFIC'")
1721
+ True
1722
+
1723
+ # Example 3: Set time zone to "-07:00".
1724
+ >>> set_session_param('timezone', "'-07:00'")
1725
+ True
1726
+
1727
+ # Example 4: Set time zone to 3 hours ahead of 'GMT'.
1728
+ >>> set_session_param('timezone', "3")
1729
+ True
1730
+
1731
+ # Example 6: Set calendar to 'COMPATIBLE'.
1732
+ >>> set_session_param('calendar', "COMPATIBLE")
1733
+ True
1734
+
1735
+ # Example 7: Dynamically changes your account to 'dbc' for the remainder of the session.
1736
+ >>> set_session_param('account', ['dbc', 'SESSION'])
1737
+ True
1738
+
1739
+ # Example 8: Enables Unicode Pass Through processing.
1740
+ >>> set_session_param('character_set_unicode', 'ON')
1741
+ True
1742
+
1743
+ # Example 9: Session set to ASCII collation.
1744
+ >>> set_session_param('collation', 'ASCII')
1745
+ True
1746
+
1747
+ # Example 10: The resulting session has a row-level security label consisting of an unclassified level
1748
+ # and nato category.
1749
+ >>> set_session_param('constraint', 'classification_category (norway)')
1750
+ True
1751
+
1752
+ # Example 11: Changes the default database for the session.
1753
+ >>> set_session_param('database', 'alice')
1754
+ True
1755
+
1756
+ # Example 12: Changes the DATE format to 'INTEGERDATE'.
1757
+ >>> set_session_param('dateform', 'INTEGERDATE')
1758
+ True
1759
+
1760
+ # Example 13: Enable Debugging for the Session.
1761
+ >>> set_session_param('debug_function', ['function_name', 'ON'])
1762
+ True
1763
+
1764
+ # Example 14: Sets the session response for dot notation query result.
1765
+ >>> set_session_param('dot_notation', 'DEFAULT')
1766
+ True
1767
+
1768
+ # Example 15: DML operations are not performed as concurrent load isolated operations.
1769
+ >>> set_session_param('isolated_loading', 'NO')
1770
+ True
1771
+
1772
+ # Example 16: Enables function trace output for debugging external user-defined functions and
1773
+ # external SQL procedures for the current session.
1774
+ >>> set_session_param('function_trace', ["'diag,3'", 'titanic'])
1775
+ True
1776
+
1777
+ # Example 17: Enables the validation of JSON data on INSERT operations.
1778
+ >>> set_session_param('json_ignore_errors', 'ON')
1779
+ True
1780
+
1781
+ # Example 18: Sets the database search path for the SCRIPT execution in the SessionTbl.SearchUIFDBPath column.
1782
+ >>> set_session_param('SEARCHUIFDBPATH', 'dbc, alice')
1783
+ True
1784
+
1785
+ # Example 19: Sets the read-only locking severity for all SELECT requests made against nontemporal tables,
1786
+ # whether they are outer SELECT requests or subqueries, in the current session to READ regardless
1787
+ # of the setting for the DBS Control parameter AccessLockForUncomRead.
1788
+ # Note: SR and SERIALIZABLE are synonyms.
1789
+ >>> set_session_param('TRANSACTION_ISOLATION_LEVEL', 'SR')
1790
+ True
1791
+
1792
+ # Example 20: This example uses the PROXYROLE name:value pair in a query band to set the proxy
1793
+ # role in a trusted session to a specific role.
1794
+ >>> set_session_param('query_band', ["'PROXYUSER=fred;PROXYROLE=administration;'", 'SESSION'])
1795
+ True
1796
+
1797
+ # Example 21: Allows you to specify a custom UDF search path. When you execute a UDF,
1798
+ # Vantage searches this path first, before looking in the default Vantage
1799
+ # search path for the UDF.
1800
+ >>> set_session_param('udfsearchpath', ["alice, SYSLIB, TD_SYSFNLIB", 'bitor'])
1801
+ True
1802
+ """
1803
+ # Validate argument types
1804
+ function_args = []
1805
+ function_args.append(["name", name, True, str, True])
1806
+ function_args.append(["value", value, True, (int, str, float, list), False])
1807
+ _Validators._validate_function_arguments(function_args)
1808
+
1809
+ if not isinstance(value, list):
1810
+ value = [value]
1811
+
1812
+ # Before setting the session, first extract the session parameters
1813
+ # and store it in buffer. This helps while unsetting the parameter.
1814
+ result = execute_sql('help session')
1815
+ data = dict(zip(
1816
+ [param[0] for param in result.description],
1817
+ [value for value in next(result)]
1818
+ ))
1819
+ _InternalBuffer.add(session_params = _TDSessionParams(data))
1820
+ # Store function name of 'DEBUG_FUNCTION' used.
1821
+ _InternalBuffer.add(function_name = value[0] if name.upper() == 'DEBUG_FUNCTION' else '')
1822
+
1823
+ # Set the session parameter.
1824
+ execute_sql(getattr(SessionParamsSQL, name.upper()).format(*value))
1825
+
1826
+ return True
1827
+
1828
+ def unset_session_param(name):
1829
+ """
1830
+ DESCRIPTION:
1831
+ Function to unset the session parameter.
1832
+
1833
+ PARAMETERS:
1834
+ name:
1835
+ Required Argument.
1836
+ Specifies the parameter to unset for the session.
1837
+ Permitted Values: timezone, account, calendar, collation,
1838
+ database, dataform, character_set_unicode,
1839
+ debug_function, isolated_loading, function_trace,
1840
+ json_ignore_errors, query_band
1841
+ Type: str
1842
+
1843
+ Returns:
1844
+ True, if successfully unsets the session parameter.
1845
+
1846
+ RAISES:
1847
+ ValueError, teradatasql.OperationalError
1848
+
1849
+ EXAMPLES:
1850
+ # Example 1: unset session to previous time zone.
1851
+ >>> set_session_param('timezone', "'GMT+1'")
1852
+ True
1853
+ >>> unset_session_param("timezone")
1854
+ True
1855
+
1856
+ """
1857
+ # Validate argument types
1858
+ function_args = []
1859
+ function_args.append(["name", name, True, str, True])
1860
+ _Validators._validate_function_arguments(function_args)
1861
+
1862
+ # Check whether session param is set or not first.
1863
+ session_params = _InternalBuffer.get('session_params')
1864
+ if session_params is None:
1865
+ msg_code = MessageCodes.FUNC_EXECUTION_FAILED
1866
+ error_msg = Messages.get_message(msg_code, "unset_session_param", "Set the parameter before unsetting it.")
1867
+ raise TeradataMlException(error_msg, msg_code)
1868
+ # unset_values stores params which are not available in _InternalBuffer, to unset create a dictionary
1869
+ # with param as key and unset param as value
1870
+ unset_values = {"CHARACTER_SET_UNICODE": "OFF", "DEBUG_FUNCTION": [_InternalBuffer.get('function_name'), "OFF"],
1871
+ "ISOLATED_LOADING":"NO", "FUNCTION_TRACE":"SET SESSION FUNCTION TRACE OFF",
1872
+ "JSON_IGNORE_ERRORS": "OFF", "QUERY_BAND": ["", "SESSION"]}
1873
+
1874
+ # If 'name' in unset_values unset the params
1875
+ if name.upper() in unset_values:
1876
+ # When name is 'FUNCTION_TRACE' unset_values already have query for that, use execute_sql on that.
1877
+ if name.upper() == "FUNCTION_TRACE":
1878
+ execute_sql(unset_values[name.upper()])
1879
+ # When name is other than 'FUNCTION_TRACE' use value and key of unset_values to unset param.
1880
+ else:
1881
+ set_session_param(name, unset_values[name.upper()])
1882
+ return True
1883
+
1884
+ previous_value = "{}".format(session_params[getattr(SessionParamsPythonNames, name.upper())]) \
1885
+ if name.upper() != 'TIMEZONE' else "'{}'".format(session_params[getattr(SessionParamsPythonNames, name.upper())])
1886
+
1887
+ if name.upper() == "ACCOUNT":
1888
+ previous_value = [previous_value, 'SESSION']
1889
+ set_session_param(name, previous_value)
1890
+
1891
+ return True
1892
+
1893
+ class _Authorize:
1894
+ """ Parent class to either provide or revoke access on table(s). """
1895
+ _property = None
1896
+
1897
+ def __init__(self, objects):
1898
+ """
1899
+ DESCRIPTION:
1900
+ Constructor for creating Authorize object.
1901
+
1902
+ PARAMETERS:
1903
+ objects:
1904
+ Required Argument.
1905
+ Specifies the name(s) of the database objects to be authorized.
1906
+ Types: str OR list of str.
1907
+
1908
+ RETURNS:
1909
+ Object of _Authorize.
1910
+
1911
+ RAISES:
1912
+ None
1913
+
1914
+ EXAMPLES:
1915
+ >>> auth = _Authorize('vfs_v1')
1916
+ """
1917
+ # Store the objects here. Then use this where ever required.
1918
+ self._objects = objects
1919
+ self._access_method = self.__class__.__name__.upper()
1920
+
1921
+ def read(self, user):
1922
+ """
1923
+ DESCRIPTION:
1924
+ Authorize the read access.
1925
+ Note:
1926
+ One must have admin access to give read access to other "user".
1927
+
1928
+ PARAMETERS:
1929
+ user:
1930
+ Required Argument.
1931
+ Specifies the name of the user to have read only access.
1932
+ Types: str
1933
+
1934
+ RETURNS:
1935
+ bool.
1936
+
1937
+ RAISES:
1938
+ None
1939
+
1940
+ EXAMPLES:
1941
+ >>> _Authorize('repo').read('BoB')
1942
+ """
1943
+ for object in self._objects:
1944
+ sql = "{} SELECT ON {} {} {}".format(self._access_method, object, self._property, user)
1945
+ execute_sql(sql)
1946
+
1947
+ return True
1948
+
1949
+ def write(self, user):
1950
+ """
1951
+ DESCRIPTION:
1952
+ Authorize the write access.
1953
+ Note:
1954
+ One must have admin access to give write access to other "user".
1955
+
1956
+ PARAMETERS:
1957
+ user:
1958
+ Required Argument.
1959
+ Specifies the name of the user to have write only access.
1960
+ Types: str
1961
+
1962
+ RETURNS:
1963
+ bool.
1964
+
1965
+ RAISES:
1966
+ None
1967
+
1968
+ EXAMPLES:
1969
+ >>> _Authorize('repo').write('BoB')
1970
+ """
1971
+ for access_type in ["INSERT", "UPDATE", "DELETE"]:
1972
+ for object in self._objects:
1973
+ sql = "{} {} ON {} {} {}".format(self._access_method, access_type, object, self._property, user)
1974
+ execute_sql(sql)
1975
+
1976
+ return True
1977
+
1978
+ def read_write(self, user):
1979
+ """
1980
+ DESCRIPTION:
1981
+ Authorize the read and write access.
1982
+ Note:
1983
+ One must have admin access to give read and write access to other "user".
1984
+
1985
+ PARAMETERS:
1986
+ user:
1987
+ Required Argument.
1988
+ Specifies the name of the user to have read and write access.
1989
+ Types: str
1990
+
1991
+ RETURNS:
1992
+ bool.
1993
+
1994
+ RAISES:
1995
+ None
1996
+
1997
+ EXAMPLES:
1998
+ >>> _Authorize('repo').read_write('BoB')
1999
+ """
2000
+ self.read(user)
2001
+ return self.write(user)
2002
+
2003
+
2004
+ class Grant(_Authorize):
2005
+ """ Class to grant access to tables."""
2006
+ _property = "TO"
2007
+
2008
+
2009
+ class Revoke(_Authorize):
2010
+ """ Class to revoke access from tables."""
2011
+ _property = "FROM"