teradataml 20.0.0.0__py3-none-any.whl → 20.0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (263) hide show
  1. teradataml/LICENSE-3RD-PARTY.pdf +0 -0
  2. teradataml/LICENSE.pdf +0 -0
  3. teradataml/README.md +183 -0
  4. teradataml/__init__.py +6 -3
  5. teradataml/_version.py +2 -2
  6. teradataml/analytics/__init__.py +3 -2
  7. teradataml/analytics/analytic_function_executor.py +275 -40
  8. teradataml/analytics/analytic_query_generator.py +92 -0
  9. teradataml/analytics/byom/__init__.py +3 -2
  10. teradataml/analytics/json_parser/metadata.py +1 -0
  11. teradataml/analytics/json_parser/utils.py +17 -21
  12. teradataml/analytics/meta_class.py +40 -1
  13. teradataml/analytics/sqle/DecisionTreePredict.py +1 -1
  14. teradataml/analytics/sqle/__init__.py +10 -2
  15. teradataml/analytics/table_operator/__init__.py +3 -2
  16. teradataml/analytics/uaf/__init__.py +21 -2
  17. teradataml/analytics/utils.py +62 -1
  18. teradataml/analytics/valib.py +1 -1
  19. teradataml/automl/__init__.py +1553 -319
  20. teradataml/automl/custom_json_utils.py +139 -61
  21. teradataml/automl/data_preparation.py +276 -319
  22. teradataml/automl/data_transformation.py +163 -81
  23. teradataml/automl/feature_engineering.py +402 -239
  24. teradataml/automl/feature_exploration.py +9 -2
  25. teradataml/automl/model_evaluation.py +48 -51
  26. teradataml/automl/model_training.py +291 -189
  27. teradataml/catalog/byom.py +8 -8
  28. teradataml/catalog/model_cataloging_utils.py +1 -1
  29. teradataml/clients/auth_client.py +133 -0
  30. teradataml/clients/pkce_client.py +1 -1
  31. teradataml/common/aed_utils.py +3 -2
  32. teradataml/common/constants.py +48 -6
  33. teradataml/common/deprecations.py +13 -7
  34. teradataml/common/garbagecollector.py +156 -120
  35. teradataml/common/messagecodes.py +6 -1
  36. teradataml/common/messages.py +3 -1
  37. teradataml/common/sqlbundle.py +1 -1
  38. teradataml/common/utils.py +103 -11
  39. teradataml/common/wrapper_utils.py +1 -1
  40. teradataml/context/context.py +121 -31
  41. teradataml/data/advertising.csv +201 -0
  42. teradataml/data/bank_marketing.csv +11163 -0
  43. teradataml/data/bike_sharing.csv +732 -0
  44. teradataml/data/boston2cols.csv +721 -0
  45. teradataml/data/breast_cancer.csv +570 -0
  46. teradataml/data/complaints_test_tokenized.csv +353 -0
  47. teradataml/data/complaints_tokens_model.csv +348 -0
  48. teradataml/data/covid_confirm_sd.csv +83 -0
  49. teradataml/data/customer_segmentation_test.csv +2628 -0
  50. teradataml/data/customer_segmentation_train.csv +8069 -0
  51. teradataml/data/dataframe_example.json +10 -0
  52. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingFit.py +3 -1
  53. teradataml/data/docs/sqle/docs_17_10/OneHotEncodingTransform.py +6 -0
  54. teradataml/data/docs/sqle/docs_17_10/OutlierFilterTransform.py +5 -1
  55. teradataml/data/docs/sqle/docs_17_20/ANOVA.py +61 -1
  56. teradataml/data/docs/sqle/docs_17_20/CFilter.py +132 -0
  57. teradataml/data/docs/sqle/docs_17_20/ColumnTransformer.py +2 -0
  58. teradataml/data/docs/sqle/docs_17_20/FTest.py +105 -26
  59. teradataml/data/docs/sqle/docs_17_20/GLM.py +162 -1
  60. teradataml/data/docs/sqle/docs_17_20/GetFutileColumns.py +5 -3
  61. teradataml/data/docs/sqle/docs_17_20/KMeans.py +48 -1
  62. teradataml/data/docs/sqle/docs_17_20/NaiveBayes.py +162 -0
  63. teradataml/data/docs/sqle/docs_17_20/NonLinearCombineFit.py +3 -2
  64. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingFit.py +5 -0
  65. teradataml/data/docs/sqle/docs_17_20/OneHotEncodingTransform.py +6 -0
  66. teradataml/data/docs/sqle/docs_17_20/OutlierFilterFit.py +2 -0
  67. teradataml/data/docs/sqle/docs_17_20/Pivoting.py +279 -0
  68. teradataml/data/docs/sqle/docs_17_20/ROC.py +3 -2
  69. teradataml/data/docs/sqle/docs_17_20/SVMPredict.py +13 -2
  70. teradataml/data/docs/sqle/docs_17_20/ScaleFit.py +119 -1
  71. teradataml/data/docs/sqle/docs_17_20/ScaleTransform.py +93 -1
  72. teradataml/data/docs/sqle/docs_17_20/Shap.py +197 -0
  73. teradataml/data/docs/sqle/docs_17_20/TDGLMPredict.py +163 -1
  74. teradataml/data/docs/sqle/docs_17_20/TDNaiveBayesPredict.py +189 -0
  75. teradataml/data/docs/sqle/docs_17_20/TFIDF.py +142 -0
  76. teradataml/data/docs/sqle/docs_17_20/Unpivoting.py +216 -0
  77. teradataml/data/docs/sqle/docs_17_20/XGBoost.py +12 -4
  78. teradataml/data/docs/sqle/docs_17_20/XGBoostPredict.py +7 -1
  79. teradataml/data/docs/sqle/docs_17_20/ZTest.py +72 -7
  80. teradataml/data/docs/uaf/docs_17_20/ACF.py +1 -10
  81. teradataml/data/docs/uaf/docs_17_20/ArimaEstimate.py +1 -1
  82. teradataml/data/docs/uaf/docs_17_20/ArimaForecast.py +35 -5
  83. teradataml/data/docs/uaf/docs_17_20/ArimaValidate.py +3 -1
  84. teradataml/data/docs/uaf/docs_17_20/ArimaXEstimate.py +293 -0
  85. teradataml/data/docs/uaf/docs_17_20/AutoArima.py +354 -0
  86. teradataml/data/docs/uaf/docs_17_20/BreuschGodfrey.py +3 -2
  87. teradataml/data/docs/uaf/docs_17_20/BreuschPaganGodfrey.py +1 -1
  88. teradataml/data/docs/uaf/docs_17_20/Convolve.py +13 -10
  89. teradataml/data/docs/uaf/docs_17_20/Convolve2.py +4 -1
  90. teradataml/data/docs/uaf/docs_17_20/CumulPeriodogram.py +5 -4
  91. teradataml/data/docs/uaf/docs_17_20/DFFT2Conv.py +4 -4
  92. teradataml/data/docs/uaf/docs_17_20/DWT.py +235 -0
  93. teradataml/data/docs/uaf/docs_17_20/DWT2D.py +214 -0
  94. teradataml/data/docs/uaf/docs_17_20/DurbinWatson.py +1 -1
  95. teradataml/data/docs/uaf/docs_17_20/ExtractResults.py +1 -1
  96. teradataml/data/docs/uaf/docs_17_20/FilterFactory1d.py +160 -0
  97. teradataml/data/docs/uaf/docs_17_20/GenseriesSinusoids.py +1 -1
  98. teradataml/data/docs/uaf/docs_17_20/GoldfeldQuandt.py +9 -31
  99. teradataml/data/docs/uaf/docs_17_20/HoltWintersForecaster.py +4 -2
  100. teradataml/data/docs/uaf/docs_17_20/IDFFT2.py +1 -8
  101. teradataml/data/docs/uaf/docs_17_20/IDWT.py +236 -0
  102. teradataml/data/docs/uaf/docs_17_20/IDWT2D.py +226 -0
  103. teradataml/data/docs/uaf/docs_17_20/IQR.py +134 -0
  104. teradataml/data/docs/uaf/docs_17_20/LineSpec.py +1 -1
  105. teradataml/data/docs/uaf/docs_17_20/LinearRegr.py +2 -2
  106. teradataml/data/docs/uaf/docs_17_20/MAMean.py +3 -3
  107. teradataml/data/docs/uaf/docs_17_20/Matrix2Image.py +297 -0
  108. teradataml/data/docs/uaf/docs_17_20/MatrixMultiply.py +15 -6
  109. teradataml/data/docs/uaf/docs_17_20/PACF.py +0 -1
  110. teradataml/data/docs/uaf/docs_17_20/Portman.py +2 -2
  111. teradataml/data/docs/uaf/docs_17_20/PowerSpec.py +2 -2
  112. teradataml/data/docs/uaf/docs_17_20/Resample.py +9 -1
  113. teradataml/data/docs/uaf/docs_17_20/SAX.py +246 -0
  114. teradataml/data/docs/uaf/docs_17_20/SeasonalNormalize.py +17 -10
  115. teradataml/data/docs/uaf/docs_17_20/SignifPeriodicities.py +1 -1
  116. teradataml/data/docs/uaf/docs_17_20/WhitesGeneral.py +3 -1
  117. teradataml/data/docs/uaf/docs_17_20/WindowDFFT.py +368 -0
  118. teradataml/data/dwt2d_dataTable.csv +65 -0
  119. teradataml/data/dwt_dataTable.csv +8 -0
  120. teradataml/data/dwt_filterTable.csv +3 -0
  121. teradataml/data/finance_data4.csv +13 -0
  122. teradataml/data/glm_example.json +28 -1
  123. teradataml/data/grocery_transaction.csv +19 -0
  124. teradataml/data/housing_train_segment.csv +201 -0
  125. teradataml/data/idwt2d_dataTable.csv +5 -0
  126. teradataml/data/idwt_dataTable.csv +8 -0
  127. teradataml/data/idwt_filterTable.csv +3 -0
  128. teradataml/data/insect2Cols.csv +61 -0
  129. teradataml/data/interval_data.csv +5 -0
  130. teradataml/data/jsons/paired_functions.json +14 -0
  131. teradataml/data/jsons/sqle/17.20/TD_ANOVA.json +99 -27
  132. teradataml/data/jsons/sqle/17.20/TD_CFilter.json +118 -0
  133. teradataml/data/jsons/sqle/17.20/TD_FTest.json +166 -83
  134. teradataml/data/jsons/sqle/17.20/TD_GLM.json +90 -14
  135. teradataml/data/jsons/sqle/17.20/TD_GLMPREDICT.json +48 -5
  136. teradataml/data/jsons/sqle/17.20/TD_GetFutileColumns.json +5 -3
  137. teradataml/data/jsons/sqle/17.20/TD_KMeans.json +31 -11
  138. teradataml/data/jsons/sqle/17.20/TD_NaiveBayes.json +193 -0
  139. teradataml/data/jsons/sqle/17.20/TD_NaiveBayesPredict.json +212 -0
  140. teradataml/data/jsons/sqle/17.20/TD_NonLinearCombineFit.json +3 -2
  141. teradataml/data/jsons/sqle/17.20/TD_OneClassSVM.json +9 -9
  142. teradataml/data/jsons/sqle/17.20/TD_Pivoting.json +280 -0
  143. teradataml/data/jsons/sqle/17.20/TD_ROC.json +2 -1
  144. teradataml/data/jsons/sqle/17.20/TD_SVM.json +16 -16
  145. teradataml/data/jsons/sqle/17.20/TD_SVMPredict.json +19 -1
  146. teradataml/data/jsons/sqle/17.20/TD_ScaleFit.json +168 -15
  147. teradataml/data/jsons/sqle/17.20/TD_ScaleTransform.json +50 -1
  148. teradataml/data/jsons/sqle/17.20/TD_Shap.json +222 -0
  149. teradataml/data/jsons/sqle/17.20/TD_TFIDF.json +162 -0
  150. teradataml/data/jsons/sqle/17.20/TD_Unpivoting.json +235 -0
  151. teradataml/data/jsons/sqle/17.20/TD_XGBoost.json +25 -7
  152. teradataml/data/jsons/sqle/17.20/TD_XGBoostPredict.json +17 -4
  153. teradataml/data/jsons/sqle/17.20/TD_ZTest.json +157 -80
  154. teradataml/data/jsons/storedprocedure/17.20/TD_FILTERFACTORY1D.json +150 -0
  155. teradataml/data/jsons/uaf/17.20/TD_ACF.json +1 -18
  156. teradataml/data/jsons/uaf/17.20/TD_ARIMAESTIMATE.json +3 -16
  157. teradataml/data/jsons/uaf/17.20/TD_ARIMAFORECAST.json +0 -3
  158. teradataml/data/jsons/uaf/17.20/TD_ARIMAVALIDATE.json +5 -3
  159. teradataml/data/jsons/uaf/17.20/TD_ARIMAXESTIMATE.json +362 -0
  160. teradataml/data/jsons/uaf/17.20/TD_AUTOARIMA.json +469 -0
  161. teradataml/data/jsons/uaf/17.20/TD_BINARYMATRIXOP.json +0 -3
  162. teradataml/data/jsons/uaf/17.20/TD_BINARYSERIESOP.json +0 -2
  163. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_GODFREY.json +2 -1
  164. teradataml/data/jsons/uaf/17.20/TD_BREUSCH_PAGAN_GODFREY.json +2 -5
  165. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE.json +3 -6
  166. teradataml/data/jsons/uaf/17.20/TD_CONVOLVE2.json +1 -3
  167. teradataml/data/jsons/uaf/17.20/TD_CUMUL_PERIODOGRAM.json +0 -5
  168. teradataml/data/jsons/uaf/17.20/TD_DFFT.json +1 -4
  169. teradataml/data/jsons/uaf/17.20/TD_DFFT2.json +2 -7
  170. teradataml/data/jsons/uaf/17.20/TD_DFFT2CONV.json +1 -2
  171. teradataml/data/jsons/uaf/17.20/TD_DFFTCONV.json +0 -2
  172. teradataml/data/jsons/uaf/17.20/TD_DTW.json +3 -6
  173. teradataml/data/jsons/uaf/17.20/TD_DWT.json +173 -0
  174. teradataml/data/jsons/uaf/17.20/TD_DWT2D.json +160 -0
  175. teradataml/data/jsons/uaf/17.20/TD_FITMETRICS.json +1 -1
  176. teradataml/data/jsons/uaf/17.20/TD_GOLDFELD_QUANDT.json +16 -30
  177. teradataml/data/jsons/uaf/17.20/{TD_HOLT_WINTERS_FORECAST.json → TD_HOLT_WINTERS_FORECASTER.json} +1 -2
  178. teradataml/data/jsons/uaf/17.20/TD_IDFFT2.json +1 -15
  179. teradataml/data/jsons/uaf/17.20/TD_IDWT.json +162 -0
  180. teradataml/data/jsons/uaf/17.20/TD_IDWT2D.json +149 -0
  181. teradataml/data/jsons/uaf/17.20/TD_IQR.json +117 -0
  182. teradataml/data/jsons/uaf/17.20/TD_LINEAR_REGR.json +1 -1
  183. teradataml/data/jsons/uaf/17.20/TD_LINESPEC.json +1 -1
  184. teradataml/data/jsons/uaf/17.20/TD_MAMEAN.json +1 -3
  185. teradataml/data/jsons/uaf/17.20/TD_MATRIX2IMAGE.json +209 -0
  186. teradataml/data/jsons/uaf/17.20/TD_PACF.json +2 -2
  187. teradataml/data/jsons/uaf/17.20/TD_POWERSPEC.json +5 -5
  188. teradataml/data/jsons/uaf/17.20/TD_RESAMPLE.json +48 -28
  189. teradataml/data/jsons/uaf/17.20/TD_SAX.json +208 -0
  190. teradataml/data/jsons/uaf/17.20/TD_SEASONALNORMALIZE.json +12 -6
  191. teradataml/data/jsons/uaf/17.20/TD_SIMPLEEXP.json +0 -1
  192. teradataml/data/jsons/uaf/17.20/TD_TRACKINGOP.json +8 -8
  193. teradataml/data/jsons/uaf/17.20/TD_UNDIFF.json +1 -1
  194. teradataml/data/jsons/uaf/17.20/TD_UNNORMALIZE.json +1 -1
  195. teradataml/data/jsons/uaf/17.20/TD_WINDOWDFFT.json +400 -0
  196. teradataml/data/kmeans_example.json +5 -0
  197. teradataml/data/kmeans_table.csv +10 -0
  198. teradataml/data/load_example_data.py +8 -2
  199. teradataml/data/naivebayestextclassifier_example.json +1 -1
  200. teradataml/data/naivebayestextclassifierpredict_example.json +11 -0
  201. teradataml/data/onehot_encoder_train.csv +4 -0
  202. teradataml/data/openml_example.json +29 -0
  203. teradataml/data/peppers.png +0 -0
  204. teradataml/data/real_values.csv +14 -0
  205. teradataml/data/sax_example.json +8 -0
  206. teradataml/data/scale_attributes.csv +3 -0
  207. teradataml/data/scale_example.json +52 -1
  208. teradataml/data/scale_input_part_sparse.csv +31 -0
  209. teradataml/data/scale_input_partitioned.csv +16 -0
  210. teradataml/data/scale_input_sparse.csv +11 -0
  211. teradataml/data/scale_parameters.csv +3 -0
  212. teradataml/data/scripts/deploy_script.py +21 -2
  213. teradataml/data/scripts/sklearn/sklearn_fit.py +40 -37
  214. teradataml/data/scripts/sklearn/sklearn_fit_predict.py +22 -30
  215. teradataml/data/scripts/sklearn/sklearn_function.template +42 -24
  216. teradataml/data/scripts/sklearn/sklearn_model_selection_split.py +23 -33
  217. teradataml/data/scripts/sklearn/sklearn_neighbors.py +19 -28
  218. teradataml/data/scripts/sklearn/sklearn_score.py +32 -32
  219. teradataml/data/scripts/sklearn/sklearn_transform.py +85 -42
  220. teradataml/data/star_pivot.csv +8 -0
  221. teradataml/data/templates/open_source_ml.json +2 -1
  222. teradataml/data/teradataml_example.json +97 -1
  223. teradataml/data/timestamp_data.csv +4 -0
  224. teradataml/data/titanic_dataset_unpivoted.csv +19 -0
  225. teradataml/data/uaf_example.json +55 -1
  226. teradataml/data/unpivot_example.json +15 -0
  227. teradataml/data/url_data.csv +9 -0
  228. teradataml/data/windowdfft.csv +16 -0
  229. teradataml/data/ztest_example.json +16 -0
  230. teradataml/dataframe/copy_to.py +9 -4
  231. teradataml/dataframe/data_transfer.py +125 -64
  232. teradataml/dataframe/dataframe.py +575 -57
  233. teradataml/dataframe/dataframe_utils.py +47 -9
  234. teradataml/dataframe/fastload.py +273 -90
  235. teradataml/dataframe/functions.py +339 -0
  236. teradataml/dataframe/row.py +160 -0
  237. teradataml/dataframe/setop.py +2 -2
  238. teradataml/dataframe/sql.py +740 -18
  239. teradataml/dataframe/window.py +1 -1
  240. teradataml/dbutils/dbutils.py +324 -18
  241. teradataml/geospatial/geodataframe.py +1 -1
  242. teradataml/geospatial/geodataframecolumn.py +1 -1
  243. teradataml/hyperparameter_tuner/optimizer.py +13 -13
  244. teradataml/lib/aed_0_1.dll +0 -0
  245. teradataml/opensource/sklearn/_sklearn_wrapper.py +254 -122
  246. teradataml/options/__init__.py +16 -5
  247. teradataml/options/configure.py +39 -6
  248. teradataml/options/display.py +2 -2
  249. teradataml/plot/axis.py +4 -4
  250. teradataml/scriptmgmt/UserEnv.py +26 -19
  251. teradataml/scriptmgmt/lls_utils.py +120 -16
  252. teradataml/table_operators/Script.py +4 -5
  253. teradataml/table_operators/TableOperator.py +160 -26
  254. teradataml/table_operators/table_operator_util.py +88 -41
  255. teradataml/table_operators/templates/dataframe_udf.template +63 -0
  256. teradataml/telemetry_utils/__init__.py +0 -0
  257. teradataml/telemetry_utils/queryband.py +52 -0
  258. teradataml/utils/validators.py +41 -3
  259. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/METADATA +191 -6
  260. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/RECORD +263 -185
  261. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/WHEEL +0 -0
  262. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/top_level.txt +0 -0
  263. {teradataml-20.0.0.0.dist-info → teradataml-20.0.0.2.dist-info}/zip-safe +0 -0
@@ -22,6 +22,7 @@ from teradataml.common.messages import Messages
22
22
  from teradataml.common.messagecodes import MessageCodes
23
23
  from teradataml.common.constants import TeradataConstants
24
24
  from teradataml.options.configure import configure
25
+ from teradataml.utils.internal_buffer import _InternalBuffer
25
26
  from teradatasql import OperationalError
26
27
  import psutil
27
28
  import getpass
@@ -36,15 +37,22 @@ class GarbageCollector():
36
37
  garbage collection, so that they can be dropped when connection is disconnected/lost.
37
38
  Writes to a output file where the database name & table/view/script names are persisted.
38
39
  """
39
- __garbage_persistent_file_name = getpass.getuser() + "_garbagecollect.info"
40
+ # Adding old garbage collector file name to support backward compatibility.
41
+ __old_garbage_persistent_file_name = getpass.getuser() + "_garbagecollect.info"
40
42
  __garbagecollector_folder_name = '.teradataml'
41
43
  __contentseperator = ","
44
+ __filenameseperator = "_"
42
45
  __version = "ver1.0"
43
46
  __gc_tables = []
44
47
  __gc_views = []
45
48
  __gc_scripts = []
46
49
  __gc_container = []
47
50
  __gc_apply = []
51
+ # Function to get the garbage collector file name specific to host and process.
52
+ _get_gc_file_name = lambda: "{}_{}_{}_garbagecollect.info".format(
53
+ getpass.getuser(),
54
+ tdmlctx.context._get_host_ip(),
55
+ str(os.getpid()))
48
56
 
49
57
  @staticmethod
50
58
  def _get_temp_dir_name():
@@ -64,8 +72,13 @@ class GarbageCollector():
64
72
  EXAMPLES:
65
73
  GarbageCollector._get_temp_dir_name()
66
74
  """
75
+ # Default location for .teradataml is user's home directory if configure.local_storage is not set.
67
76
  tempdir = expanduser("~")
68
77
  tempdir = os.path.join(tempdir, GarbageCollector.__garbagecollector_folder_name)
78
+
79
+ # set the .teradataml location to the location specified by the user.
80
+ if configure.local_storage:
81
+ tempdir = os.path.join(configure.local_storage, GarbageCollector.__garbagecollector_folder_name)
69
82
  return tempdir
70
83
 
71
84
  @staticmethod
@@ -88,7 +101,7 @@ class GarbageCollector():
88
101
  """
89
102
  tempdir = GarbageCollector._get_temp_dir_name()
90
103
  os.makedirs(tempdir, exist_ok=True)
91
- tempfile = os.path.join(os.path.sep, tempdir, GarbageCollector.__garbage_persistent_file_name)
104
+ tempfile = os.path.join(os.path.sep, tempdir, GarbageCollector._get_gc_file_name())
92
105
  return tempfile
93
106
 
94
107
  @staticmethod
@@ -200,29 +213,31 @@ class GarbageCollector():
200
213
  EXAMPLES:
201
214
  GarbageCollector._add_to_garbagecollector(object_name = "temp"."temp_table1")
202
215
  """
203
- if object_name and object_type:
204
- try:
205
- tempfilename = GarbageCollector.__make_temp_file_name()
206
- writecontent = str(GarbageCollector.__version) + "," + str(os.getpid())
207
- writecontent += "," + str(object_type.value)
208
- writecontent += "," + object_name + "\n"
209
- with open(tempfilename, 'a+') as fgc:
210
- fgc.write(writecontent)
211
- if configure._validate_gc:
212
- GarbageCollector.__validate_gc_add_object(object_name, object_type)
213
- except TeradataMlException:
214
- raise
215
- except Exception as err:
216
- logger.error(Messages.get_message(MessageCodes.TDMLDF_CREATE_GARBAGE_COLLECTOR) + str(err))
217
- raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_CREATE_GARBAGE_COLLECTOR),
218
- MessageCodes.TDMLDF_CREATE_GARBAGE_COLLECTOR) from err
219
- finally:
220
- if fgc is not None:
221
- fgc.close()
216
+ # Use global lock while writing to the garbage collector file.
217
+ with _InternalBuffer.get("global_lock"):
218
+ if object_name and object_type:
219
+ try:
220
+ tempfilename = GarbageCollector.__make_temp_file_name()
221
+ writecontent = str(GarbageCollector.__version) + "," + str(os.getpid())
222
+ writecontent += "," + str(object_type.value)
223
+ writecontent += "," + object_name + "\n"
224
+ with open(tempfilename, 'a+') as fgc:
225
+ fgc.write(writecontent)
226
+ if configure._validate_gc:
227
+ GarbageCollector.__validate_gc_add_object(object_name, object_type)
228
+ except TeradataMlException:
229
+ raise
230
+ except Exception as err:
231
+ logger.error(Messages.get_message(MessageCodes.TDMLDF_CREATE_GARBAGE_COLLECTOR) + str(err))
232
+ raise TeradataMlException(Messages.get_message(MessageCodes.TDMLDF_CREATE_GARBAGE_COLLECTOR),
233
+ MessageCodes.TDMLDF_CREATE_GARBAGE_COLLECTOR) from err
234
+ finally:
235
+ if fgc is not None:
236
+ fgc.close()
222
237
  return True
223
238
 
224
239
  @staticmethod
225
- def __deleterow(content_row):
240
+ def __deleterow(content_row, file_name):
226
241
  """
227
242
  DESCRIPTION:
228
243
  Deletes an entry from persisted file.
@@ -232,6 +247,11 @@ class GarbageCollector():
232
247
  Required Argument.
233
248
  Specifies the text of row to delete from the persisted file.
234
249
  Types: str
250
+
251
+ file_name:
252
+ Required Argument.
253
+ Specifies the name of the file to delete the row.
254
+ Types: str
235
255
 
236
256
  RETURNS:
237
257
  None.
@@ -243,10 +263,9 @@ class GarbageCollector():
243
263
  GarbageCollector._deleterow(content_row = 'ver1.0,72136,3,"alice"."temp_table_gbview1"')
244
264
  """
245
265
  try:
246
- tempfilename = GarbageCollector.__make_temp_file_name()
247
- if not os.path.isfile(tempfilename):
266
+ if not os.path.isfile(file_name):
248
267
  return True
249
- with open(tempfilename, 'r+') as fgc:
268
+ with open(file_name, 'r+') as fgc:
250
269
  output = fgc.readlines()
251
270
  fgc.seek(0)
252
271
  for dbtablename in output:
@@ -486,104 +505,121 @@ class GarbageCollector():
486
505
  """
487
506
  try:
488
507
  td_connection = tdmlctx.context.get_connection()
489
- tempfilename = GarbageCollector.__make_temp_file_name()
490
- if not os.path.isfile(tempfilename):
508
+ # Get the temp directory where garbage collector file is persisted.
509
+ tempdir = GarbageCollector._get_temp_dir_name()
510
+ # Garbage collect file that is created by the current host and current process.
511
+ # Also check if file is not of current process and associated process is
512
+ # currently running in the system or not.
513
+ # Walk through the temp directory and filter garbage collector files.
514
+ tempfiles = []
515
+ for root, _, files in os.walk(tempdir):
516
+ for file in files:
517
+ if file.endswith('_garbagecollect.info'):
518
+ try:
519
+ filepath = os.path.join(root, file)
520
+ fileparts = file.split(GarbageCollector.__filenameseperator)
521
+ hostname = fileparts[1]
522
+ filepid = int(fileparts[2])
523
+ if hostname == tdmlctx.context._get_host_ip():
524
+ if filepid == os.getpid() or not psutil.pid_exists(filepid):
525
+ tempfiles.append(filepath)
526
+ except (IndexError, ValueError):
527
+ # Handle the case where the filename format is not as expected
528
+ # check if old garbage collector file is present.
529
+ if file == GarbageCollector.__old_garbage_persistent_file_name:
530
+ tempfiles.append(filepath)
531
+
532
+ # Process each garbage collector file.
533
+ if len(tempfiles) == 0:
491
534
  return True
492
- with open(tempfilename, 'r+') as fgc:
493
- content = fgc.readlines()
494
-
495
- for contentrecord in content:
496
- contentrecord = contentrecord.strip()
497
-
498
- if (td_connection is not None) and (len(contentrecord) > 0):
499
- try:
500
- recordparts = contentrecord.split(GarbageCollector.__contentseperator)
501
- version = recordparts[0]
502
- contentpid = int(recordparts[1].strip())
503
- # Check and garbage collect even currrent running process at exit.
504
- # Check if contentpid is not of current process as well as any
505
- # currently running process in the system
506
- proceed_to_cleanup = False
507
- if contentpid != int(os.getpid()):
508
- if not psutil.pid_exists(contentpid):
509
- proceed_to_cleanup = True
510
- else:
511
- proceed_to_cleanup = True
512
- if proceed_to_cleanup == True:
513
- object_type = int(recordparts[2].strip())
514
- database_object = recordparts[3].strip()
515
-
516
- # Create the TeradataConstant to use with __delete_object_from_gc_list().
517
- object_type_enum = TeradataConstants(object_type)
518
-
535
+ else:
536
+ for tempfilename in tempfiles:
537
+ if not os.path.isfile(tempfilename):
538
+ return True
539
+ with open(tempfilename, 'r+') as fgc:
540
+ content = fgc.readlines()
541
+
542
+ for contentrecord in content:
543
+ contentrecord = contentrecord.strip()
544
+ if (td_connection is not None) and (len(contentrecord) > 0):
519
545
  try:
520
- # Drop the table/view/script/container based on database object type retrieved from the collector file.
521
- # # Drop table.
522
- if TeradataConstants.TERADATA_TABLE.value == object_type:
523
- tdmlutil.utils.UtilFuncs._drop_table(database_object,
524
- check_table_exist=False)
525
-
526
- # # Drop view.
527
- elif TeradataConstants.TERADATA_VIEW.value == object_type:
528
- tdmlutil.utils.UtilFuncs._drop_view(database_object,
529
- check_view_exist=False)
530
-
531
- elif object_type in [TeradataConstants.TERADATA_LOCAL_SCRIPT.value,
532
- TeradataConstants.TERADATA_TEXT_FILE.value]:
533
- GarbageCollector.__delete_gc_tempdir_local_file(database_object, object_type)
534
-
535
- # # Drop Apply script.
536
- elif TeradataConstants.TERADATA_APPLY.value == object_type:
537
- tdmlutil.utils.UtilFuncs._delete_script(database_object,
538
- file_type=object_type_enum)
539
- # Delete the script locally
540
- GarbageCollector.__delete_gc_tempdir_local_file(database_object, object_type)
541
-
542
- # # Drop STO script.
543
- else:
544
- tdmlutil.utils.UtilFuncs._delete_script(database_object,
545
- file_type=object_type_enum,
546
- check_script_exist=False)
547
- # Delete the script locally
548
- GarbageCollector.__delete_gc_tempdir_local_file(database_object, object_type)
549
-
550
- # Finally, delete the entry from gc lists if required.
551
- GarbageCollector.__delete_object_from_gc_list(database_object,
552
- object_type_enum)
553
-
554
- # Remove the entry for a table/view from GC, after it has been dropped.
555
- GarbageCollector.__deleterow(contentrecord)
556
- except OperationalError as operr:
557
- # Remove the entry for a table/view/script even after drop has failed,
558
- # if that object does not exist.
559
- # Also added additional check for error when the database containing
560
- # the object doesn't exist anymore.
561
- if "[Teradata Database] [Error 3802] Database" in str(operr) or \
562
- "[Teradata Database] [Error 3807] Object" in str(operr) or \
563
- "[Teradata Database] [Error 9852] The file" in str(operr):
564
- GarbageCollector.__deleterow(contentrecord)
565
- # Delete entry from gc lists of required.
566
- GarbageCollector.__delete_object_from_gc_list(database_object,
567
- object_type_enum)
568
- except (TeradataMlException, RuntimeError) as err:
569
- if "Failed to execute get_env" in str(err) or \
570
- "Failed to execute remove_file" in str(err):
571
- # For removing files in OpenAF environment.
572
- GarbageCollector.__deleterow(contentrecord)
573
- # Delete entry from gc lists of required.
546
+ recordparts = contentrecord.split(GarbageCollector.__contentseperator)
547
+ object_type = int(recordparts[2].strip())
548
+ database_object = recordparts[3].strip()
549
+
550
+ # Create the TeradataConstant to use with __delete_object_from_gc_list().
551
+ object_type_enum = TeradataConstants(object_type)
552
+
553
+ try:
554
+ # Drop the table/view/script/container based on database object type retrieved from the collector file.
555
+ # # Drop table.
556
+ if TeradataConstants.TERADATA_TABLE.value == object_type:
557
+ tdmlutil.utils.UtilFuncs._drop_table(database_object,
558
+ check_table_exist=False)
559
+
560
+ # # Drop view.
561
+ elif TeradataConstants.TERADATA_VIEW.value == object_type:
562
+ tdmlutil.utils.UtilFuncs._drop_view(database_object,
563
+ check_view_exist=False)
564
+
565
+ elif object_type in [TeradataConstants.TERADATA_LOCAL_SCRIPT.value,
566
+ TeradataConstants.TERADATA_TEXT_FILE.value]:
567
+ GarbageCollector.__delete_gc_tempdir_local_file(database_object, object_type)
568
+
569
+ # # Drop Apply script.
570
+ elif TeradataConstants.TERADATA_APPLY.value == object_type:
571
+ tdmlutil.utils.UtilFuncs._delete_script(database_object,
572
+ file_type=object_type_enum)
573
+ # Delete the script locally
574
+ GarbageCollector.__delete_gc_tempdir_local_file(database_object, object_type)
575
+
576
+ # # Drop STO script.
577
+ else:
578
+ tdmlutil.utils.UtilFuncs._delete_script(database_object,
579
+ file_type=object_type_enum,
580
+ check_script_exist=False)
581
+ # Delete the script locally
582
+ GarbageCollector.__delete_gc_tempdir_local_file(database_object, object_type)
583
+
584
+ # Remove the entry for a table/view from GC, after it has been dropped.
585
+ GarbageCollector.__deleterow(contentrecord, tempfilename)
586
+
587
+ # Finally, delete the entry from gc lists if required.
574
588
  GarbageCollector.__delete_object_from_gc_list(database_object,
575
- object_type_enum)
576
- except FileNotFoundError:
577
- # This will occur only when the item being deleted is a file,
578
- # and it's local copy is not found.
579
- GarbageCollector.__deleterow(contentrecord)
580
- if object_type == TeradataConstants.TERADATA_APPLY:
581
- GarbageCollector.__gc_apply.remove(database_object)
582
- elif object_type == TeradataConstants.TERADATA_SCRIPT:
583
- GarbageCollector.__gc_scripts.remove(database_object)
584
- except Exception as err:
585
- pass
586
- # logger.error(Messages.get_message(MessageCodes.TDMLDF_DELETE_GARBAGE_COLLECTOR) + str(err))
589
+ object_type_enum)
590
+ except OperationalError as operr:
591
+ # Remove the entry for a table/view/script even after drop has failed,
592
+ # if that object does not exist.
593
+ # Also added additional check for error when the database containing
594
+ # the object doesn't exist anymore.
595
+ if "[Teradata Database] [Error 3802] Database" in str(operr) or \
596
+ "[Teradata Database] [Error 3807] Object" in str(operr) or \
597
+ "[Teradata Database] [Error 9852] The file" in str(operr):
598
+ GarbageCollector.__deleterow(contentrecord, tempfilename)
599
+ # Delete entry from gc lists of required.
600
+ GarbageCollector.__delete_object_from_gc_list(database_object,
601
+ object_type_enum)
602
+ except (TeradataMlException, RuntimeError) as err:
603
+ if "Failed to execute get_env" in str(err) or \
604
+ "Failed to execute remove_file" in str(err):
605
+ # For removing files in OpenAF environment.
606
+ GarbageCollector.__deleterow(contentrecord, tempfilename)
607
+ # Delete entry from gc lists of required.
608
+ GarbageCollector.__delete_object_from_gc_list(database_object,
609
+ object_type_enum)
610
+ except FileNotFoundError:
611
+ # This will occur only when the item being deleted is a file,
612
+ # and it's local copy is not found.
613
+ GarbageCollector.__deleterow(contentrecord, tempfilename)
614
+ if object_type == TeradataConstants.TERADATA_APPLY:
615
+ GarbageCollector.__gc_apply.remove(database_object)
616
+ elif object_type == TeradataConstants.TERADATA_SCRIPT:
617
+ GarbageCollector.__gc_scripts.remove(database_object)
618
+ except Exception as err:
619
+ pass
620
+ # delete empty file itself after deleting the entry from the file
621
+ if os.path.getsize(tempfilename) == 0:
622
+ GarbageCollector._delete_local_file(tempfilename)
587
623
  except Exception as e:
588
624
  logger.error(Messages.get_message(MessageCodes.TDMLDF_DELETE_GARBAGE_COLLECTOR) + str(e))
589
625
  finally:
@@ -180,6 +180,7 @@ class ErrorInfoCodes(Enum):
180
180
  INVALID_LIST_LENGTH = 'TDML_2314'
181
181
 
182
182
  IMPORT_PYTHON_PACKAGE = 'TDML_2414'
183
+ PATH_NOT_FOUND = 'TDML_2415'
183
184
 
184
185
  # Script local run Error codes
185
186
  SCRIPT_LOCAL_RUN_ERROR = 'TDML_2410'
@@ -218,6 +219,7 @@ class ErrorInfoCodes(Enum):
218
219
  PARTITION_VALUES_NOT_MATCHING = 'TDML_2538'
219
220
  PARTITION_IN_BOTH_FIT_AND_PREDICT = 'TDML_2539'
220
221
  INVALID_PARTITIONING_COLS = 'TDML_2540'
222
+ TARGET_COL_NOT_FOUND_FOR_EVALUATE = 'TDML_2541'
221
223
 
222
224
  class MessageCodes(Enum):
223
225
  """
@@ -419,4 +421,7 @@ class MessageCodes(Enum):
419
421
  " feature columns."
420
422
  PARTITION_VALUES_NOT_MATCHING = "Values in training and test data partition columns should be same."
421
423
  PARTITION_IN_BOTH_FIT_AND_PREDICT = "Use \"partition_columns\" only if model is fitted with partition_column(s)."
422
- INVALID_PARTITIONING_COLS = "Provided partition_column(s) '{}' is/are not present in parent of '{}' DataFrame(s)."
424
+ INVALID_PARTITIONING_COLS = "Provided partition_column(s) '{}' is/are not present in parent of '{}' DataFrame(s)."
425
+ PATH_NOT_FOUND = "Specified local path '{}' not found. Please check the path."
426
+ TARGET_COL_NOT_FOUND_FOR_EVALUATE = "Target column '{}' not found in the passed dataFrame. "\
427
+ "evaluate() requires target column to be present in the dataFrame."
@@ -188,7 +188,9 @@ class Messages():
188
188
  [ErrorInfoCodes.PARTITIONING_COLS_IN_FEATURE_COLS, MessageCodes.PARTITIONING_COLS_IN_FEATURE_COLS],
189
189
  [ErrorInfoCodes.PARTITION_VALUES_NOT_MATCHING, MessageCodes.PARTITION_VALUES_NOT_MATCHING],
190
190
  [ErrorInfoCodes.PARTITION_IN_BOTH_FIT_AND_PREDICT, MessageCodes.PARTITION_IN_BOTH_FIT_AND_PREDICT],
191
- [ErrorInfoCodes.INVALID_PARTITIONING_COLS, MessageCodes.INVALID_PARTITIONING_COLS]
191
+ [ErrorInfoCodes.INVALID_PARTITIONING_COLS, MessageCodes.INVALID_PARTITIONING_COLS],
192
+ [ErrorInfoCodes.PATH_NOT_FOUND, MessageCodes.PATH_NOT_FOUND],
193
+ [ErrorInfoCodes.TARGET_COL_NOT_FOUND_FOR_EVALUATE, MessageCodes.TARGET_COL_NOT_FOUND_FOR_EVALUATE]
192
194
  ]
193
195
 
194
196
  @staticmethod
@@ -47,7 +47,7 @@ class SQLBundle:
47
47
  [SQLConstants.SQL_HELP_COLUMNS, "help column {0}.*"],
48
48
  [SQLConstants.SQL_DROP_TABLE, "DROP TABLE {0}"],
49
49
  [SQLConstants.SQL_DROP_VIEW, "DROP VIEW {0}"],
50
- [SQLConstants.SQL_NROWS_FROM_QUERY, "SELECT COUNT(*) FROM {0}"],
50
+ [SQLConstants.SQL_NROWS_FROM_QUERY, "SELECT CAST(COUNT(*) AS BIGINT) FROM {0}"],
51
51
  [SQLConstants.SQL_TOP_NROWS_FROM_TABLEORVIEW, "select top {0} * from {1}"],
52
52
  [SQLConstants.SQL_INSERT_INTO_TABLE_VALUES, "insert into {0} values({1})"],
53
53
  [SQLConstants.SQL_SELECT_COLUMNNAMES_FROM, "sel {0} from ({1}) as {2}"],
@@ -13,6 +13,7 @@ by other classes which can be reused according to the need.
13
13
  Add all the common functions in this class like creating temporary table names, getting
14
14
  the datatypes etc.
15
15
  """
16
+ import json
16
17
  import uuid
17
18
  from math import floor
18
19
  import os, itertools
@@ -22,6 +23,7 @@ import sqlalchemy
22
23
  from pathlib import Path
23
24
  from numpy import number
24
25
  from sqlalchemy import Column, MetaData, Table
26
+
25
27
  from teradataml.context.context import get_connection
26
28
 
27
29
  from teradataml import _version
@@ -49,7 +51,7 @@ from teradatasqlalchemy.types import (BYTE, VARBYTE, BLOB)
49
51
  from teradatasqlalchemy.types import (CHAR, VARCHAR, CLOB)
50
52
  from functools import reduce
51
53
  import warnings
52
- from teradatasqlalchemy.telemetry.queryband import set_queryband, collect_queryband, get_qb_query
54
+ from teradataml.telemetry_utils.queryband import collect_queryband
53
55
  from teradataml.utils.utils import execute_sql
54
56
  from teradataml.utils.validators import _Validators
55
57
  from sqlalchemy.exc import OperationalError as sqlachemyOperationalError
@@ -300,6 +302,12 @@ class UtilFuncs():
300
302
  tabname = "{}_{}".format(tabname, prefix)
301
303
 
302
304
  tabname = "{}_{}".format(tabname, random_string)
305
+
306
+ # ELE-6710 - Use database user associated with the current context for volatile tables.
307
+ if table_type == TeradataConstants.TERADATA_VOLATILE_TABLE:
308
+ from teradataml.context.context import _get_user
309
+ tabname = "\"{}\".\"{}\"".format(_get_user(), tabname)
310
+ return tabname
303
311
 
304
312
  if use_default_database and databasename is None:
305
313
  tabname = "\"{}\".\"{}\"".format(tdmlctx._get_context_temp_databasename(
@@ -1851,7 +1859,7 @@ class UtilFuncs():
1851
1859
  con = tdmlctx.get_connection()
1852
1860
 
1853
1861
  if check_table_exists:
1854
- table_exists = con.dialect.has_table(con, table_name, schema_name)
1862
+ table_exists = con.dialect.has_table(con, table_name, schema_name, table_only=True)
1855
1863
 
1856
1864
  if not table_exists:
1857
1865
  raise TeradataMlException(Messages.get_message(MessageCodes.TABLE_DOES_NOT_EXIST, table_name),
@@ -2371,14 +2379,11 @@ class UtilFuncs():
2371
2379
  EXAMPLES:
2372
2380
  >>> self._get_python_execution_path()
2373
2381
  """
2382
+ # 'indb_install_location' expects python installation directory path.
2383
+ # Hence, postfixing python binary path.
2384
+ return "python" if UtilFuncs._is_lake() else \
2385
+ '{}/bin/python3'.format(configure.indb_install_location)
2374
2386
 
2375
- if UtilFuncs._is_lake():
2376
- return "python"
2377
- else:
2378
- if configure.indb_install_location == "/var/opt/teradata/languages/sles12sp3/Python/":
2379
- return '{}bin/python3'.format(configure.indb_install_location)
2380
- else:
2381
- return configure.indb_install_location
2382
2387
 
2383
2388
  def _is_view(tablename):
2384
2389
  """
@@ -2405,14 +2410,101 @@ class UtilFuncs():
2405
2410
  return True
2406
2411
  else:
2407
2412
  return False
2413
+
2408
2414
  @staticmethod
2409
2415
  def _set_queryband():
2416
+ from teradataml import session_queryband
2410
2417
  try:
2411
- qb_query = get_qb_query()
2418
+ qb_query = session_queryband.generate_set_queryband_query()
2412
2419
  execute_sql(qb_query)
2413
- except Exception:
2420
+ except Exception as _set_queryband_err:
2414
2421
  pass
2415
2422
 
2423
+ def _create_or_get_env(template):
2424
+ """
2425
+ DESCRIPTION:
2426
+ Internal function to return the environment if already exists else
2427
+ creates the environment using template file and return the environment.
2428
+
2429
+ PARAMETERS:
2430
+ template:
2431
+ Required Argument.
2432
+ Template json file name containing details of environment(s) to be created.
2433
+ Types: str
2434
+
2435
+ RAISES:
2436
+ TeradataMLException
2437
+
2438
+ RETURNS:
2439
+ An object of class UserEnv representing the user environment.
2440
+
2441
+ EXAMPLES:
2442
+ >>> self._create_or_get_env("open_source_ml.json")
2443
+ """
2444
+ # Get the template file path.
2445
+ from teradataml import _TDML_DIRECTORY
2446
+ from teradataml.scriptmgmt.lls_utils import create_env, get_env
2447
+ template_dir_path = os.path.join(_TDML_DIRECTORY, "data", "templates", template)
2448
+
2449
+ # Read template file.
2450
+ with open(template_dir_path, "r") as r_file:
2451
+ data = json.load(r_file)
2452
+
2453
+ # Get env_name.
2454
+ _env_name = data["env_specs"][0]["env_name"]
2455
+
2456
+ try:
2457
+ # Call function to get env.
2458
+ return get_env(_env_name)
2459
+ except TeradataMlException as tdml_e:
2460
+ # We will get here when error says, env does not exist otherwise raise the exception as is.
2461
+ # Env does not exist so create one.
2462
+
2463
+ exc_msg = "Failed to execute get_env(). User environment '{}' not " \
2464
+ "found.".format(_env_name)
2465
+ if exc_msg in tdml_e.args[0]:
2466
+ print(f"No OpenAF environment with name '{_env_name}' found. Creating one with "\
2467
+ "latest supported python and required packages.")
2468
+ return create_env(template=template_dir_path)
2469
+ else:
2470
+ raise tdml_e
2471
+ except Exception as exc:
2472
+ raise exc
2473
+
2474
+ def _get_env_name(col):
2475
+ """
2476
+ DESCRIPTION:
2477
+ Internal function to get the env name if passed with ColumnExpression
2478
+ else the default "openml_env".
2479
+
2480
+ PARAMETERS:
2481
+ col:
2482
+ Required Argument.
2483
+ Specifies teradataml DataFrame ColumnExpression.
2484
+ Types: teradataml DataFrame ColumnExpression
2485
+
2486
+ RAISES:
2487
+ None.
2488
+
2489
+ RETURNS:
2490
+ string
2491
+
2492
+ EXAMPLES:
2493
+ >>> self._get_env_name(col)
2494
+ """
2495
+
2496
+ # If env_name is passed with ColumnExpression fetch the env name,
2497
+ # else check if default "openml_user_env" env is configured or not,
2498
+ # else get the default "openml_env" env if exists or create new deafult env.
2499
+ if col._env_name is not None:
2500
+ from teradataml.scriptmgmt.UserEnv import UserEnv
2501
+ env = col._env_name
2502
+ env_name = env.env_name if isinstance(col._env_name, UserEnv) else env
2503
+ elif configure.openml_user_env is not None:
2504
+ env_name = configure.openml_user_env.env_name
2505
+ else:
2506
+ env_name = UtilFuncs._create_or_get_env("open_source_ml.json").env_name
2507
+ return env_name
2416
2508
 
2417
2509
  from teradataml.common.aed_utils import AedUtils
2418
2510
  from teradataml.dbutils.filemgr import remove_file
@@ -317,7 +317,7 @@ class AnalyticsWrapperUtils:
317
317
  #TODO: Add support for nested level query as in R.
318
318
  return table_ref
319
319
 
320
- def _validate_input_table_datatype(self, data, arg_name, reference_function_name = None):
320
+ def _validate_input_table_datatype(self, data, arg_name, reference_function_name=None):
321
321
  """
322
322
  Method to verify that the input table parameters of type DataFrame.
323
323