teradataml 20.0.0.0__py3-none-any.whl → 20.0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (263) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +183 -0
  4. teradataml/__init__.py +6 -3
  5. teradataml/_version.py +2 -2
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +275 -40
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +1 -0
  11. teradataml/analytics/json_parser/utils.py +17 -21
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +10 -2
  15. teradataml/analytics/table_operator/__init__.py +3 -2
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +62 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1553 -319
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +276 -319
  22. teradataml/automl/data_transformation.py +163 -81
  23. teradataml/automl/feature_engineering.py +402 -239
  24. teradataml/automl/feature_exploration.py +9 -2
  25. teradataml/automl/model_evaluation.py +48 -51
  26. teradataml/automl/model_training.py +291 -189
  27. teradataml/catalog/byom.py +8 -8
  28. teradataml/catalog/model_cataloging_utils.py +1 -1
  29. teradataml/clients/auth_client.py +133 -0
  30. teradataml/clients/pkce_client.py +1 -1
  31. teradataml/common/aed_utils.py +3 -2
  32. teradataml/common/constants.py +48 -6
  33. teradataml/common/deprecations.py +13 -7
  34. teradataml/common/garbagecollector.py +156 -120
  35. teradataml/common/messagecodes.py +6 -1
  36. teradataml/common/messages.py +3 -1
  37. teradataml/common/sqlbundle.py +1 -1
  38. teradataml/common/utils.py +103 -11
  39. teradataml/common/wrapper_utils.py +1 -1
  40. teradataml/context/context.py +121 -31
  41. teradataml/data/advertising.csv +201 -0
  42. teradataml/data/bank_marketing.csv +11163 -0
  43. teradataml/data/bike_sharing.csv +732 -0
  44. teradataml/data/boston2cols.csv +721 -0
  45. teradataml/data/breast_cancer.csv +570 -0
  46. teradataml/data/complaints_test_tokenized.csv +353 -0
  47. teradataml/data/complaints_tokens_model.csv +348 -0
  48. teradataml/data/covid_confirm_sd.csv +83 -0
  49. teradataml/data/customer_segmentation_test.csv +2628 -0
  50. teradataml/data/customer_segmentation_train.csv +8069 -0
  51. teradataml/data/dataframe_example.json +10 -0
  52. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +3 -1
  53. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +6 -0
  54. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +5 -1
  55. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +61 -1
  56. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  57. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +2 -0
  58. teradataml/data/docs/sqle/docs_17_20/FTest.py +105 -26
  59. teradataml/data/docs/sqle/docs_17_20/GLM.py +162 -1
  60. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +5 -3
  61. teradataml/data/docs/sqle/docs_17_20/KMeans.py +48 -1
  62. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  63. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +3 -2
  64. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +5 -0
  65. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +6 -0
  66. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  67. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  68. teradataml/data/docs/sqle/docs_17_20/ROC.py +3 -2
  69. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +13 -2
  70. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +119 -1
  71. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +93 -1
  72. teradataml/data/docs/sqle/docs_17_20/Shap.py +197 -0
  73. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +163 -1
  74. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  75. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  76. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  77. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +12 -4
  78. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +7 -1
  79. teradataml/data/docs/sqle/docs_17_20/ZTest.py +72 -7
  80. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  81. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  82. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  83. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  84. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  85. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  86. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  87. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  88. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  89. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  90. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  91. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  92. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  93. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  94. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  95. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  96. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  97. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  98. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  99. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  100. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  101. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  102. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  103. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  104. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  105. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  106. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  107. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  108. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  109. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  110. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  111. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  112. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  113. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  114. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  115. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  116. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  117. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  118. teradataml/data/dwt2d_dataTable.csv +65 -0
  119. teradataml/data/dwt_dataTable.csv +8 -0
  120. teradataml/data/dwt_filterTable.csv +3 -0
  121. teradataml/data/finance_data4.csv +13 -0
  122. teradataml/data/glm_example.json +28 -1
  123. teradataml/data/grocery_transaction.csv +19 -0
  124. teradataml/data/housing_train_segment.csv +201 -0
  125. teradataml/data/idwt2d_dataTable.csv +5 -0
  126. teradataml/data/idwt_dataTable.csv +8 -0
  127. teradataml/data/idwt_filterTable.csv +3 -0
  128. teradataml/data/insect2Cols.csv +61 -0
  129. teradataml/data/interval_data.csv +5 -0
  130. teradataml/data/jsons/paired_functions.json +14 -0
  131. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +99 -27
  132. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  133. teradataml/data/jsons/sqle/17.20/TD_FTest.json +166 -83
  134. teradataml/data/jsons/sqle/17.20/TD_GLM.json +90 -14
  135. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +48 -5
  136. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +5 -3
  137. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +31 -11
  138. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  139. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  140. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +3 -2
  141. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  142. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  143. teradataml/data/jsons/sqle/17.20/TD_ROC.json +2 -1
  144. teradataml/data/jsons/sqle/17.20/TD_SVM.json +16 -16
  145. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +19 -1
  146. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +168 -15
  147. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +50 -1
  148. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  149. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  150. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  151. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +25 -7
  152. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +17 -4
  153. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +157 -80
  154. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  155. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  156. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  157. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  158. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  159. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  160. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  161. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  162. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  163. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  164. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  165. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  166. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  167. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  168. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  169. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  170. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  171. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  172. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  173. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  174. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  175. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  176. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  177. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  178. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  179. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  180. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  181. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  182. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  183. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  184. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  185. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  186. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  187. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  188. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  189. teradataml/data/jsons/uaf/17.20/TD_SAX.json +208 -0
  190. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  191. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  192. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  193. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  194. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  195. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +400 -0
  196. teradataml/data/kmeans_example.json +5 -0
  197. teradataml/data/kmeans_table.csv +10 -0
  198. teradataml/data/load_example_data.py +8 -2
  199. teradataml/data/naivebayestextclassifier_example.json +1 -1
  200. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  201. teradataml/data/onehot_encoder_train.csv +4 -0
  202. teradataml/data/openml_example.json +29 -0
  203. teradataml/data/peppers.png +0 -0
  204. teradataml/data/real_values.csv +14 -0
  205. teradataml/data/sax_example.json +8 -0
  206. teradataml/data/scale_attributes.csv +3 -0
  207. teradataml/data/scale_example.json +52 -1
  208. teradataml/data/scale_input_part_sparse.csv +31 -0
  209. teradataml/data/scale_input_partitioned.csv +16 -0
  210. teradataml/data/scale_input_sparse.csv +11 -0
  211. teradataml/data/scale_parameters.csv +3 -0
  212. teradataml/data/scripts/deploy_script.py +21 -2
  213. teradataml/data/scripts/sklearn/sklearn_fit.py +40 -37
  214. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +22 -30
  215. teradataml/data/scripts/sklearn/sklearn_function.template +42 -24
  216. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +23 -33
  217. teradataml/data/scripts/sklearn/sklearn_neighbors.py +19 -28
  218. teradataml/data/scripts/sklearn/sklearn_score.py +32 -32
  219. teradataml/data/scripts/sklearn/sklearn_transform.py +85 -42
  220. teradataml/data/star_pivot.csv +8 -0
  221. teradataml/data/templates/open_source_ml.json +2 -1
  222. teradataml/data/teradataml_example.json +97 -1
  223. teradataml/data/timestamp_data.csv +4 -0
  224. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  225. teradataml/data/uaf_example.json +55 -1
  226. teradataml/data/unpivot_example.json +15 -0
  227. teradataml/data/url_data.csv +9 -0
  228. teradataml/data/windowdfft.csv +16 -0
  229. teradataml/data/ztest_example.json +16 -0
  230. teradataml/dataframe/copy_to.py +9 -4
  231. teradataml/dataframe/data_transfer.py +125 -64
  232. teradataml/dataframe/dataframe.py +575 -57
  233. teradataml/dataframe/dataframe_utils.py +47 -9
  234. teradataml/dataframe/fastload.py +273 -90
  235. teradataml/dataframe/functions.py +339 -0
  236. teradataml/dataframe/row.py +160 -0
  237. teradataml/dataframe/setop.py +2 -2
  238. teradataml/dataframe/sql.py +740 -18
  239. teradataml/dataframe/window.py +1 -1
  240. teradataml/dbutils/dbutils.py +324 -18
  241. teradataml/geospatial/geodataframe.py +1 -1
  242. teradataml/geospatial/geodataframecolumn.py +1 -1
  243. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  244. teradataml/lib/aed_0_1.dll +0 -0
  245. teradataml/opensource/sklearn/_sklearn_wrapper.py +254 -122
  246. teradataml/options/__init__.py +16 -5
  247. teradataml/options/configure.py +39 -6
  248. teradataml/options/display.py +2 -2
  249. teradataml/plot/axis.py +4 -4
  250. teradataml/scriptmgmt/UserEnv.py +26 -19
  251. teradataml/scriptmgmt/lls_utils.py +120 -16
  252. teradataml/table_operators/Script.py +4 -5
  253. teradataml/table_operators/TableOperator.py +160 -26
  254. teradataml/table_operators/table_operator_util.py +88 -41
  255. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  256. teradataml/telemetry_utils/__init__.py +0 -0
  257. teradataml/telemetry_utils/queryband.py +52 -0
  258. teradataml/utils/validators.py +41 -3
  259. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/METADATA +191 -6
  260. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/RECORD +263 -185
  261. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/WHEEL +0 -0
  262. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/top_level.txt +0 -0
  263. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/zip-safe +0 -0
@@ -0,0 +1,4 @@
1
+ id,timestamp_col,timestamp_col1,format_col,timezone_col
2
+ 0,"2015-01-08 00:00:12.2",123456,"YYYY-MM-DD HH24:MI:SS.FF6","GMT"
3
+ 1,"2015-01-08 13:00",878986,"YYYY-MM-DD HH24:MI","America Pacific"
4
+ 2,"2015-01-08 00:00:12.2+10:00",45678910234,"YYYY-MM-DD HH24:MI:SS.FF6 TZH:TZM","GMT+10"
@@ -0,0 +1,19 @@
1
+ passenger,AttributeName,AttributeValue,survived
2
+ 61,pclass,3,0
3
+ 1000,pclass,3,1
4
+ 40,pclass,3,1
5
+ 21,pclass,2,0
6
+ 61,gender,male,0
7
+ 1000,gender,,1
8
+ 40,gender,female,1
9
+ 21,gender,male,0
10
+ 2,pclass,1,1
11
+ 16,pclass,2,1
12
+ 7,pclass,1,0
13
+ 2,gender,female,1
14
+ 16,gender,female,1
15
+ 7,gender,male,0
16
+ 10,pclass,2,1
17
+ 4,pclass,1,1
18
+ 10,gender,female,1
19
+ 4,gender,female,1
@@ -471,5 +471,59 @@
471
471
  "CONF_OFF_v": "FLOAT",
472
472
  "CONF_LOW_v": "FLOAT",
473
473
  "CONF_HI_v": "FLOAT"
474
+ },
475
+ "dwt_dataTable":{
476
+ "id": "INTEGER",
477
+ "rowi": "INTEGER",
478
+ "v": "FLOAT"
479
+ },
480
+ "dwt_filterTable":{
481
+ "id": "INTEGER",
482
+ "seq": "INTEGER",
483
+ "lo": "FLOAT",
484
+ "hi": "FLOAT"
485
+ },
486
+ "idwt_dataTable":{
487
+ "id": "INTEGER",
488
+ "rowi": "INTEGER",
489
+ "approx": "FLOAT",
490
+ "detail": "FLOAT"
491
+ },
492
+ "idwt_filterTable":{
493
+ "id": "INTEGER",
494
+ "seq": "INTEGER",
495
+ "lo": "FLOAT",
496
+ "hi": "FLOAT"
497
+ },
498
+ "dwt2d_dataTable":{
499
+ "id": "INTEGER",
500
+ "x": "INTEGER",
501
+ "y": "INTEGER",
502
+ "v": "FLOAT"
503
+ },
504
+ "idwt2d_dataTable":{
505
+ "id": "INTEGER",
506
+ "x": "INTEGER",
507
+ "y": "INTEGER",
508
+ "v": "FLOAT"
509
+ },
510
+ "covid_confirm_sd":{
511
+ "city": "VARCHAR(15)",
512
+ "row_axis": "INTEGER",
513
+ "cnumber": "INTEGER"
514
+ },
515
+ "real_values":{
516
+ "TD_TIMECODE": "TIMESTAMP(0)",
517
+ "id": "INTEGER",
518
+ "val": "FLOAT",
519
+ "<PTI_CLAUSE>": "(TIMESTAMP(0), DATE '2020-01-01', HOURS(1), COLUMNS(id), nonsequenced)"
520
+ },
521
+ "windowdfft":{
522
+ "id": "INTEGER",
523
+ "row_i": "INTEGER",
524
+ "v1": "FLOAT",
525
+ "v2": "FLOAT",
526
+ "v3": "FLOAT",
527
+ "v4": "FLOAT"
474
528
  }
475
- }
529
+ }
@@ -6,5 +6,20 @@
6
6
  "temp": "integer",
7
7
  "pressure": "real",
8
8
  "dewpoint": "varchar(30)"
9
+ },
10
+ "titanic_dataset_unpivoted":{
11
+ "passenger": "integer",
12
+ "AttributeName": "varchar(30)",
13
+ "AttributeValue": "varchar(30)",
14
+ "survived": "integer"
15
+ },
16
+ "star_pivot":{
17
+ "country": "varchar(30)",
18
+ "state": "varchar(30)",
19
+ "yr": "integer",
20
+ "qtr": "varchar(30)",
21
+ "sales": "integer",
22
+ "cogs": "integer",
23
+ "rating": "varchar(30)"
9
24
  }
10
25
  }
@@ -0,0 +1,9 @@
1
+ "id","urls","part"
2
+ 0,"http://example.com:8080/path","FILE"
3
+ 1,"ftp://example.net:21/path","PATH"
4
+ 2,"https://example.net/path4/path5/path6?query4=value4#fragment3","REF"
5
+ 3,"https://www.facebook.com","HOST"
6
+ 4,"https://teracloud-pod-services-pod-account-service.dummyvalue.production.pods.teracloud.ninja/v1/accounts/acc-dummyvalue/user-environment-service/api/v1/","QUERY"
7
+ 5,"http://pg.example.ml/path150#fragment90","AUTHORITY"
8
+ 6,"smtp://user:password@smtp.example.com:21/file.txt","USERINFO"
9
+ 7,"https://www.google.com","PROTOCOL"
@@ -0,0 +1,16 @@
1
+ id,row_i,v1,v2,v3,v4
2
+ 3,1,0.0,1.4,1.0,1.0
3
+ 3,2,1.0,2.4,2.0,2.0
4
+ 3,3,2.0,3.4,3.0,3.0
5
+ 3,4,3.0,4.6,4.0,4.0
6
+ 3,5,0.0,5.9,5.0,5.0
7
+ 3,6,1.0,6.7,6.0,6.0
8
+ 3,7,2.0,7.7,7.0,7.0
9
+ 3,8,3.0,8.7,8.0,8.0
10
+ 3,9,0.0,9.9,9.0,9.0
11
+ 3,10,1.0,10.2,10.0,10.0
12
+ 3,11,2.0,11.2,11.0,11.0
13
+ 3,12,3.0,12.2,12.0,12.0
14
+ 3,13,1.0,10.2,13.0,13.0
15
+ 3,14,2.0,11.2,14.0,14.0
16
+ 3,15,3.0,12.2,15.0,15.0
@@ -0,0 +1,16 @@
1
+ {
2
+ "roc_input" : {
3
+ "model_id" : "integer",
4
+ "id" : "integer",
5
+ "observation" : "integer",
6
+ "probability" : "real"
7
+ },
8
+ "boston2cols":{
9
+ "groupName":"VARCHAR(40)",
10
+ "groupValue":"REAL"
11
+ },
12
+ "insect2Cols":{
13
+ "groupName":"VARCHAR(40)",
14
+ "groupValue":"INTEGER"
15
+ }
16
+ }
@@ -30,7 +30,7 @@ from teradatasql import OperationalError
30
30
  from teradataml.common.wrapper_utils import AnalyticsWrapperUtils
31
31
  from teradataml.utils.utils import execute_sql
32
32
  from teradataml.utils.validators import _Validators
33
- from teradatasqlalchemy.telemetry.queryband import collect_queryband
33
+ from teradataml.telemetry_utils.queryband import collect_queryband
34
34
 
35
35
 
36
36
  @collect_queryband(queryband="CpToSql")
@@ -569,7 +569,6 @@ def copy_to_sql(df, table_name,
569
569
  chunksize, is_pti, timecode_column,
570
570
  sequence_column, match_column_order)
571
571
 
572
-
573
572
  # df is a teradataml DataFrame object (to_sql wrapper used)
574
573
  elif isinstance(df, tdmldf.DataFrame):
575
574
  df_column_list = [col.name for col in df._metaexpr.c]
@@ -578,7 +577,13 @@ def copy_to_sql(df, table_name,
578
577
  # Reorder the column list to reposition the timecode and sequence columns
579
578
  df_column_list = _reorder_insert_list_for_pti(df_column_list, timecode_column, sequence_column)
580
579
 
581
- df_utils._insert_all_from_table(table_name, df._table_name, df_column_list, schema_name, temporary)
580
+ source_tbl_name = UtilFuncs._extract_table_name(df._table_name)
581
+ from_schema_name = UtilFuncs._extract_db_name(df._table_name)
582
+
583
+ df_utils._insert_all_from_table(table_name, source_tbl_name, df_column_list,
584
+ to_schema_name=schema_name,
585
+ from_schema_name=from_schema_name,
586
+ temporary=temporary)
582
587
 
583
588
  # While table name conflict is present, Delete the source table after creation of temporary table.
584
589
  # Rename the temporary table to destination table name.
@@ -1656,7 +1661,7 @@ def _get_all_sqlalchemy_mappings():
1656
1661
  EXAMPLES:
1657
1662
  _get_all_sqlalchemy_mappings()
1658
1663
  """
1659
- teradata_types_map = {'int32':INTEGER(), 'int64':BIGINT(),
1664
+ teradata_types_map = {'int32':INTEGER(), 'int64':BIGINT(), "Int64": INTEGER(),
1660
1665
  'object':VARCHAR(configure.default_varchar_size,charset='UNICODE'),
1661
1666
  'O':VARCHAR(configure.default_varchar_size,charset='UNICODE'),
1662
1667
  'float64':FLOAT(), 'float32':FLOAT(), 'bool':BYTEINT(),
@@ -19,15 +19,15 @@ from teradataml.common.sqlbundle import SQLBundle
19
19
  from teradataml.common.utils import UtilFuncs
20
20
  from teradataml.common.constants import CopyToConstants
21
21
  from teradataml.context.context import get_context, get_connection, \
22
- _get_context_temp_databasename
22
+ _get_context_temp_databasename, _get_current_databasename
23
23
  from teradataml.dataframe import dataframe as tdmldf
24
24
  from teradataml.dataframe.copy_to import copy_to_sql, _create_table_object, \
25
- _get_pd_df_column_names, _extract_column_info, \
26
- _check_columns_insertion_compatible, _get_index_labels
25
+ _get_pd_df_column_names, _extract_column_info, \
26
+ _check_columns_insertion_compatible, _get_index_labels
27
27
  from teradataml.dataframe.dataframe_utils import DataFrameUtils as df_utils
28
28
  from teradataml.dbutils.dbutils import _create_table, _execute_query_and_generate_pandas_df
29
29
  from teradataml.utils.validators import _Validators
30
- from teradatasqlalchemy.telemetry.queryband import collect_queryband
30
+ from teradataml.telemetry_utils.queryband import collect_queryband
31
31
 
32
32
 
33
33
  @collect_queryband(queryband="fstExprt")
@@ -335,7 +335,7 @@ def fastexport(df, export_to="pandas", index_column=None,
335
335
  if not csv_file:
336
336
  raise TeradataMlException(
337
337
  Messages.get_message(MessageCodes.DEPENDENT_ARG_MISSING, "csv_file",
338
- "{0}='{1}'".format("export_to","csv")),
338
+ "{0}='{1}'".format("export_to", "csv")),
339
339
  MessageCodes.DEPENDENT_ARG_MISSING)
340
340
 
341
341
  if not csv_file.lower().endswith(".csv"):
@@ -363,7 +363,7 @@ def fastexport(df, export_to="pandas", index_column=None,
363
363
  raise TeradataMlException(
364
364
  Messages.get_message(MessageCodes.DATA_EXPORT_FAILED, "fastexport",
365
365
  export_to, str(err)),
366
- MessageCodes.DATA_EXPORT_FAILED)
366
+ MessageCodes.DATA_EXPORT_FAILED)
367
367
 
368
368
 
369
369
  @collect_queryband(queryband="rdCsv")
@@ -601,7 +601,7 @@ def read_csv(filepath,
601
601
  Specifies whether to persist the errors/warnings(if any) information in Vantage
602
602
  or not.
603
603
  If "save_errors" is set to False:
604
- 1. Errors or warnings (in any) are not persisted into tables.
604
+ 1. Errors or warnings (if any) are not persisted into tables.
605
605
  2. Errors table genarated by FastloadCSV are not persisted.
606
606
  If "save_errors" is set to True:
607
607
  1. The errors or warnings information is persisted and names of error and
@@ -883,7 +883,7 @@ def read_csv(filepath,
883
883
 
884
884
  # Create SQLAlchemy table object from existing table.
885
885
  existing_table = UtilFuncs._get_sqlalchemy_table(table_name,
886
- schema_name=schema_name)
886
+ schema_name=schema_name)
887
887
 
888
888
  # Check compatibility of CSV columns with existing table columns.
889
889
  if types is not None:
@@ -904,7 +904,7 @@ def read_csv(filepath,
904
904
  primary_index=existing_table_primary_index)
905
905
  else:
906
906
  rc_dict = dt_obj._insert_from_csv_without_fastload(table_name=table_name,
907
- column_names=cols_name)
907
+ column_names=cols_name)
908
908
  # Return the read_csv result.
909
909
  return dt_obj._get_result(rc_dict)
910
910
 
@@ -923,6 +923,7 @@ class _DataTransferUtils():
923
923
  Teradata Vantage to outside world, for example Data Transfer using
924
924
  FastExport Protocol.
925
925
  """
926
+
926
927
  def __init__(self, df, index_column=None, num_rows=99999, all_rows=False,
927
928
  catch_errors_warnings=False, table_name=None,
928
929
  schema_name=None, if_exists='append', index=False,
@@ -934,7 +935,9 @@ class _DataTransferUtils():
934
935
  columns_list=None, sequence_column=None, seq_max=None,
935
936
  use_fastload=True, api_name='fastexport',
936
937
  open_sessions=None, chunksize=CopyToConstants.DBAPI_BATCHSIZE.value,
937
- match_column_order=True):
938
+ match_column_order=True, err_tbl_1_suffix=None,
939
+ err_tbl_2_suffix=None, err_tbl_name=None, warn_tbl_name=None,
940
+ err_staging_db=None):
938
941
  """
939
942
  DESCRIPTION:
940
943
  Constructor for the _DataTransferUtils class. It initialises
@@ -1088,6 +1091,35 @@ class _DataTransferUtils():
1088
1091
  Default Value: 16383
1089
1092
  Types: int
1090
1093
 
1094
+ err_tbl_1_suffix:
1095
+ Optional Argument.
1096
+ Specifies the suffix for error table 1 created by fastload job.
1097
+ Types: String
1098
+
1099
+ err_tbl_2_suffix:
1100
+ Optional Argument.
1101
+ Specifies the suffix for error table 2 created by fastload job.
1102
+ Types: String
1103
+
1104
+ err_tbl_name:
1105
+ Optional Argument.
1106
+ Specifies the name for error table.
1107
+ Types: String
1108
+
1109
+ warn_tbl_name:
1110
+ Optional Argument.
1111
+ Specifies the name for warning table.
1112
+ Types: String
1113
+
1114
+ err_staging_db:
1115
+ Optional Argument.
1116
+ Specifies the name of the database to be used for creating staging
1117
+ table and error tables.
1118
+ Note:
1119
+ Current session user must have CREATE, DELETE and INSERT table
1120
+ rights on err_staging_db database.
1121
+ Types: String
1122
+
1091
1123
  PARAMETERS:
1092
1124
  None.
1093
1125
 
@@ -1139,6 +1171,11 @@ class _DataTransferUtils():
1139
1171
  self.open_sessions = open_sessions
1140
1172
  self.chunksize = chunksize
1141
1173
  self.match_column_order = match_column_order
1174
+ self.err_tbl_1_suffix = err_tbl_1_suffix
1175
+ self.err_tbl_2_suffix = err_tbl_2_suffix
1176
+ self.err_tbl_name = err_tbl_name
1177
+ self.warn_tbl_name = warn_tbl_name
1178
+ self.err_staging_db = err_staging_db
1142
1179
 
1143
1180
  # Validate arguments.
1144
1181
  if self.api_name == 'fastexport':
@@ -1243,6 +1280,7 @@ class _DataTransferUtils():
1243
1280
  dt_obj = _DataTransferUtils()
1244
1281
  dt_obj._validate_csv_sep_quotechar()
1245
1282
  """
1283
+
1246
1284
  # Function to validate char value for length and allowed characters.
1247
1285
  def validate_char_arg_csv(arg_name, arg):
1248
1286
 
@@ -1444,7 +1482,7 @@ class _DataTransferUtils():
1444
1482
  csv_file_name=target_csv)
1445
1483
 
1446
1484
  """
1447
- fastexport_esc_func=""
1485
+ fastexport_esc_func = ""
1448
1486
  open_session_esc_func = ""
1449
1487
  if require is not None:
1450
1488
  if require:
@@ -1492,7 +1530,7 @@ class _DataTransferUtils():
1492
1530
  write_csv_escape_func = DriverEscapeFunctions.WRITE_TO_CSV.value.format(csv_file_name)
1493
1531
  field_sep_esc_func = DriverEscapeFunctions.FIELD_SEP.value.format(field_sep)
1494
1532
  field_quote_esc_func = DriverEscapeFunctions.FIELD_QUOTE.value.format(field_quote)
1495
-
1533
+
1496
1534
  query = "{0}{1}{2}{3}{4}{5}".format(fastexport_esc_func,
1497
1535
  open_session_esc_func,
1498
1536
  field_sep_esc_func,
@@ -1639,7 +1677,7 @@ class _DataTransferUtils():
1639
1677
 
1640
1678
  # End of functions specific to exporting table data in Vantage into pandas DataFrame.
1641
1679
 
1642
- # General functions to get warrnings and errors.
1680
+ # General functions to get warrnings and errors.
1643
1681
  def _get_errors_warnings(self, cur, insert_stmt, escape_function):
1644
1682
  """
1645
1683
  Internal function executes teradatasql provided escape functions
@@ -1672,7 +1710,7 @@ class _DataTransferUtils():
1672
1710
  dt_obj._get_errors_warnings(cur, insert_stmt, escape_function)
1673
1711
  """
1674
1712
  errorwarninglist = self._process_escape_functions(cur,
1675
- escape_function= escape_function,
1713
+ escape_function=escape_function,
1676
1714
  insert_query=insert_stmt)
1677
1715
 
1678
1716
  from teradatasql import vernumber
@@ -1714,11 +1752,10 @@ class _DataTransferUtils():
1714
1752
 
1715
1753
  return pd.DataFrame()
1716
1754
 
1717
- def _create_error_warnings_table(self, pdf, msg_type, logon_seq_number):
1755
+ def _create_error_warnings_table(self, pdf, msg_type, logon_seq_number, table_name=None):
1718
1756
  """
1719
1757
  DESCRIPTION:
1720
- Internal function creates the errors and warnings table in Vantage,
1721
- if save_errors is set to True.
1758
+ Internal function creates the errors and warnings table in Vantage.
1722
1759
 
1723
1760
  PARAMETERS:
1724
1761
  pdf:
@@ -1747,12 +1784,13 @@ class _DataTransferUtils():
1747
1784
  dt_obj = _DataTransferUtils(df, table_name, types)
1748
1785
  dt_obj._create_error_warnings_table(pdf, msg_type, logon_seq_number)
1749
1786
  """
1750
- if self.save_errors:
1751
- err_warn_tablename = "td_fl_{0}_{1}_{2}".format(self.table_name, msg_type, logon_seq_number)
1752
- copy_to_sql(pdf, err_warn_tablename, if_exists='replace')
1753
- return err_warn_tablename
1754
-
1755
- return ''
1787
+ if not table_name:
1788
+ table_name = "td_fl_{0}_{1}_{2}".format(self.table_name, msg_type, logon_seq_number)
1789
+ copy_to_sql(pdf, table_name, schema_name=self.err_staging_db,
1790
+ if_exists='replace')
1791
+ return "{}.{}".format(self.err_staging_db if self.err_staging_db
1792
+ else _get_current_databasename(),
1793
+ table_name)
1756
1794
 
1757
1795
  def _process_escape_functions(self, cur, escape_function, insert_query=None):
1758
1796
  """
@@ -1834,16 +1872,23 @@ class _DataTransferUtils():
1834
1872
  ins_query = dt_obj._form_insert_query()
1835
1873
  """
1836
1874
 
1837
- csv_esc_func = ""
1838
- open_sessions_esc_func = ""
1839
- field_sep_esc_func = ""
1840
- field_quote_esc_func = ""
1875
+ escape_funcs = ""
1841
1876
 
1877
+ # Get the fastload escape function.
1878
+ if self.use_fastload:
1879
+ escape_funcs = escape_funcs + DriverEscapeFunctions.REQUIRE_FASTLOAD.value
1880
+
1881
+ # Get the escape function clause for open_sessions.
1882
+ if self.open_sessions is not None:
1883
+ escape_funcs = escape_funcs + DriverEscapeFunctions.OPEN_SESSIONS.value.format(self.open_sessions)
1884
+
1885
+ # Create the list of values to be inserted.
1842
1886
  if self.api_name == "fastload":
1843
1887
  col_names = _get_pd_df_column_names(self.df)
1844
1888
  insert_values = ", ".join(['?' for i in range(len(col_names) + len(self.df.index.names)
1845
1889
  if self.index is True else len(col_names))])
1846
1890
 
1891
+ # Get escape functions related to read_csv.
1847
1892
  if self.api_name == "read_csv":
1848
1893
  # Get the column names.
1849
1894
  if self.if_exists == 'append' and column_names is not None:
@@ -1852,7 +1897,7 @@ class _DataTransferUtils():
1852
1897
  col_names, _ = _extract_column_info(self.df, self.types)
1853
1898
 
1854
1899
  # Get read_csv escape function.
1855
- csv_esc_func = DriverEscapeFunctions.READ_CSV.value.format(self.df)
1900
+ escape_funcs = escape_funcs + DriverEscapeFunctions.READ_CSV.value.format(self.df)
1856
1901
  insert_values = ", ".join(['?' for i in range(len(col_names))])
1857
1902
 
1858
1903
  # Create escape function for sep.
@@ -1861,7 +1906,7 @@ class _DataTransferUtils():
1861
1906
  field_sep = "''''"
1862
1907
  elif self.sep == "\"":
1863
1908
  field_sep = "\"\"\"\""
1864
- field_sep_esc_func = DriverEscapeFunctions.FIELD_SEP.value.format(field_sep)
1909
+ escape_funcs = escape_funcs + DriverEscapeFunctions.FIELD_SEP.value.format(field_sep)
1865
1910
 
1866
1911
  # Create escape function for quotechar.
1867
1912
  field_quote = "'{0}'".format(self.quotechar)
@@ -1869,27 +1914,28 @@ class _DataTransferUtils():
1869
1914
  field_quote = "''''"
1870
1915
  elif self.quotechar == "\"":
1871
1916
  field_quote = "\"\"\"\""
1872
- field_quote_esc_func = DriverEscapeFunctions.FIELD_QUOTE.value.format(field_quote)
1917
+ escape_funcs = escape_funcs + DriverEscapeFunctions.FIELD_QUOTE.value.format(field_quote)
1873
1918
 
1874
1919
  # Create base insert query.
1875
1920
  base_insert_query = "INSERT INTO {0} VALUES ({1});".format(table, insert_values)
1876
1921
 
1877
- # Get the fastload escape function.
1878
- fastload_esc_func = DriverEscapeFunctions.REQUIRE_FASTLOAD.value \
1879
- if self.use_fastload else ""
1922
+ # Get the escape function clauses for error table and DB related escape functions.
1923
+ # TODO: This condition will be optimized with ELE-6743.
1924
+ if self.api_name == "fastload" and self.save_errors and not self.err_tbl_name:
1925
+ escape_funcs = escape_funcs + DriverEscapeFunctions.ERR_TBL_MNG_FLAG.value.format("off")
1880
1926
 
1881
- # Get the escape function clause for open_sessions
1882
- if self.open_sessions is not None:
1883
- open_sessions_esc_func = DriverEscapeFunctions.OPEN_SESSIONS.value.format(self.open_sessions)
1927
+ if self.err_tbl_1_suffix:
1928
+ escape_funcs = escape_funcs + DriverEscapeFunctions.ERR_TBL_1.value.format(self.err_tbl_1_suffix)
1884
1929
 
1885
- query = "{0}{1}{2}{3}{4}{5}".format(fastload_esc_func,
1886
- open_sessions_esc_func,
1887
- field_sep_esc_func,
1888
- field_quote_esc_func,
1889
- csv_esc_func,
1890
- base_insert_query)
1891
- return query
1930
+ if self.err_tbl_2_suffix:
1931
+ escape_funcs = escape_funcs + DriverEscapeFunctions.ERR_TBL_2.value.format(self.err_tbl_2_suffix)
1932
+
1933
+ if self.err_staging_db:
1934
+ escape_funcs = escape_funcs + DriverEscapeFunctions.ERR_STAGING_DB.value.format(self.err_staging_db)
1892
1935
 
1936
+ # Generate final insert query by appending all escape functions.
1937
+ query = "{0}{1}".format(escape_funcs, base_insert_query)
1938
+ return query
1893
1939
 
1894
1940
  def _table_exists(self, con):
1895
1941
  """
@@ -1912,10 +1958,10 @@ class _DataTransferUtils():
1912
1958
  dt_obj = _DataTransferUtils(df)
1913
1959
  ins_query = dt_obj._table_exists()
1914
1960
  """
1915
- return con.dialect.has_table(get_connection(), self.table_name, self.schema_name)
1916
-
1961
+ return con.dialect.has_table(get_connection(), self.table_name, self.schema_name,
1962
+ table_only=True)
1917
1963
 
1918
- def _get_fully_qualified_table_name(self, table_name=None):
1964
+ def _get_fully_qualified_table_name(self, table_name=None, schema_name=None):
1919
1965
  """
1920
1966
  DESCRIPTION:
1921
1967
  Function returns schema qualified table name
@@ -1929,6 +1975,11 @@ class _DataTransferUtils():
1929
1975
  Specifies the table name.
1930
1976
  Types: str
1931
1977
 
1978
+ schema_name:
1979
+ Optional Argument.
1980
+ Specifies the schema name.
1981
+ Types: str
1982
+
1932
1983
  RETURNS:
1933
1984
  str.
1934
1985
 
@@ -1942,12 +1993,14 @@ class _DataTransferUtils():
1942
1993
  table_name = table_name if table_name else self.table_name
1943
1994
 
1944
1995
  table = '"{}"'.format(table_name)
1945
- if self.schema_name is not None:
1996
+ if schema_name is not None:
1997
+ table = '"{}"."{}"'.format(schema_name, table_name)
1998
+ elif self.schema_name is not None:
1946
1999
  table = '"{}"."{}"'.format(self.schema_name, table_name)
1947
2000
 
1948
2001
  return table
1949
2002
 
1950
- def _create_table(self, con, table_name=None):
2003
+ def _create_table(self, con, table_name=None, schema_name=None):
1951
2004
  """
1952
2005
  DESCRIPTION:
1953
2006
  Internal function creates table in the Vantage.
@@ -1963,6 +2016,11 @@ class _DataTransferUtils():
1963
2016
  Specifies the table name.
1964
2017
  Types: str
1965
2018
 
2019
+ schema_name:
2020
+ Optional Argument.
2021
+ Specifies the schema name where table needs to be created.
2022
+ Types: str
2023
+
1966
2024
  RETURNS:
1967
2025
  None.
1968
2026
 
@@ -1974,9 +2032,9 @@ class _DataTransferUtils():
1974
2032
  dt_obj._create_table(con)
1975
2033
  """
1976
2034
  table_name = table_name if table_name else self.table_name
1977
-
2035
+ schema_name = schema_name if schema_name else self.schema_name
1978
2036
  table = _create_table_object(df=self.df, table_name=table_name, types=self.types, con=con,
1979
- schema_name=self.schema_name, primary_index=self.primary_index,
2037
+ schema_name=schema_name, primary_index=self.primary_index,
1980
2038
  temporary=self.temporary, set_table=self.set_table, index=self.index,
1981
2039
  index_label=self.index_label)
1982
2040
 
@@ -2032,7 +2090,7 @@ class _DataTransferUtils():
2032
2090
 
2033
2091
  # Turn off autocommit before the Fastload insertion.
2034
2092
  self._process_escape_functions(cur, escape_function= \
2035
- DriverEscapeFunctions.AUTOCOMMIT_OFF)
2093
+ DriverEscapeFunctions.AUTOCOMMIT_OFF)
2036
2094
 
2037
2095
  # Initialize dict template for saving error/warning information.
2038
2096
  err_dict = {}
@@ -2052,7 +2110,7 @@ class _DataTransferUtils():
2052
2110
 
2053
2111
  # Get logon sequence number to be used for error/warning table names
2054
2112
  logon_seq_number = self._process_escape_functions(cur, escape_function= \
2055
- DriverEscapeFunctions.LOGON_SEQ_NUM,
2113
+ DriverEscapeFunctions.LOGON_SEQ_NUM,
2056
2114
  insert_query=ins)
2057
2115
 
2058
2116
  # Commit the rows
@@ -2064,16 +2122,16 @@ class _DataTransferUtils():
2064
2122
  if len(warn) != 0:
2065
2123
  warn_dict['error_message'].extend(warn)
2066
2124
 
2067
- # Get error and warning informations for error and warning tables, persist
2125
+ # Get error and warning information for error and warning tables, persist
2068
2126
  # error and warning tables to Vantage if user has specified save_error as True
2069
2127
  # else show it as pandas dataframe on console.
2070
2128
  pd_err_df = self._get_pandas_df_from_errors_warnings(err_dict)
2071
- if not pd_err_df.empty:
2129
+ if not pd_err_df.empty and self.save_errors:
2072
2130
  msg_type = "err"
2073
2131
  error_tablename = self._create_error_warnings_table(pd_err_df, msg_type, logon_seq_number[0][0])
2074
2132
 
2075
2133
  pd_warn_df = self._get_pandas_df_from_errors_warnings(warn_dict)
2076
- if not pd_warn_df.empty:
2134
+ if not pd_warn_df.empty and self.save_errors:
2077
2135
  msg_type = "warn"
2078
2136
  warn_tablename = self._create_error_warnings_table(pd_warn_df, msg_type, logon_seq_number[0][0])
2079
2137
 
@@ -2087,7 +2145,8 @@ class _DataTransferUtils():
2087
2145
  # drop the tables created by FastloadCSV.
2088
2146
  if not self.save_errors:
2089
2147
  for table in fastloadcsv_err_tables:
2090
- if conn.dialect.has_table(conn, table_name=table, schema=self.schema_name):
2148
+ if conn.dialect.has_table(conn, table_name=table, schema=self.schema_name,
2149
+ table_only=True):
2091
2150
  UtilFuncs._drop_table(self._get_fully_qualified_table_name(table))
2092
2151
  err_warn_dict.update({"fastloadcsv_error_tables": []})
2093
2152
  return err_warn_dict
@@ -2100,7 +2159,7 @@ class _DataTransferUtils():
2100
2159
  finally:
2101
2160
  # Turn on autocommit.
2102
2161
  self._process_escape_functions(cur, escape_function= \
2103
- DriverEscapeFunctions.AUTOCOMMIT_ON)
2162
+ DriverEscapeFunctions.AUTOCOMMIT_ON)
2104
2163
  cur.close()
2105
2164
 
2106
2165
  def _get_result(self, result_dict=None):
@@ -2262,7 +2321,7 @@ class _DataTransferUtils():
2262
2321
  awu_matrix.append(['quotechar', self.quotechar, True, (str)])
2263
2322
  awu_matrix.append(['catch_errors_warnings', self.catch_errors_warnings, False, (bool)])
2264
2323
  awu_matrix.append(['use_fastload', self.use_fastload, False, (bool)])
2265
- awu_matrix.append(['open_sessions',self.open_sessions, True, (int), False])
2324
+ awu_matrix.append(['open_sessions', self.open_sessions, True, (int), False])
2266
2325
  awu_matrix.append(['chunksize', self.chunksize, False, (int)])
2267
2326
  awu_matrix.append(['match_column_order', self.match_column_order, True, (bool)])
2268
2327
  if isinstance(self.df, pd.DataFrame):
@@ -2307,8 +2366,8 @@ class _DataTransferUtils():
2307
2366
 
2308
2367
  if (is_multi_index and ((isinstance(self.index_label, str) and index_levels != 1) or
2309
2368
  (is_index_list and index_levels != len(self.index_label)))) or \
2310
- (not is_multi_index and is_index_list and
2311
- (is_index_list and num_index > 1)):
2369
+ (not is_multi_index and is_index_list and
2370
+ (is_index_list and num_index > 1)):
2312
2371
  valid_arg_msg = 'String or list of Strings with the number of ' \
2313
2372
  'Strings matching the number of levels' \
2314
2373
  ' in the index'
@@ -2522,16 +2581,18 @@ class _DataTransferUtils():
2522
2581
 
2523
2582
  # Load the data from CSV to staging table.
2524
2583
  rc_dict = self._insert_from_csv_with_fastload(table_name=stag_table_name,
2525
- column_names=column_names)
2584
+ column_names=column_names)
2526
2585
 
2527
2586
  # Insert all rows from staging table to already existing table.
2528
2587
  df_utils._insert_all_from_table(self.table_name,
2529
- self._get_fully_qualified_table_name(stag_table_name),
2588
+ stag_table_name,
2530
2589
  column_names,
2531
- self.schema_name)
2590
+ to_schema_name=self.schema_name,
2591
+ from_schema_name=self.schema_name)
2532
2592
 
2533
2593
  return rc_dict
2534
2594
  finally:
2595
+ # Drop the staging table.
2535
2596
  if stage_table_created:
2536
2597
  UtilFuncs._drop_table(self._get_fully_qualified_table_name(stag_table_name))
2537
2598
 
@@ -2733,7 +2794,7 @@ class _DataTransferUtils():
2733
2794
  # Get open_sessions argument.
2734
2795
  open_sessions = kwargs.pop("open_sessions", None)
2735
2796
  if not require_fastexport and open_sessions is not None:
2736
- raise TeradataMlException("'{0}' can only be used when '{1}' is set to True."\
2797
+ raise TeradataMlException("'{0}' can only be used when '{1}' is set to True." \
2737
2798
  .format("open_sessions", "fastexport or require"),
2738
2799
  MessageCodes.DEPENDENT_ARGUMENT)
2739
2800