teradataml 20.0.0.6__py3-none-any.whl → 20.0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of teradataml might be problematic. Click here for more details.

Files changed (96) hide show
  1. teradataml/README.md +210 -0
  2. teradataml/__init__.py +1 -1
  3. teradataml/_version.py +1 -1
  4. teradataml/analytics/analytic_function_executor.py +162 -76
  5. teradataml/analytics/byom/__init__.py +1 -1
  6. teradataml/analytics/json_parser/__init__.py +2 -0
  7. teradataml/analytics/json_parser/analytic_functions_argument.py +95 -2
  8. teradataml/analytics/json_parser/metadata.py +22 -4
  9. teradataml/analytics/sqle/DecisionTreePredict.py +3 -2
  10. teradataml/analytics/sqle/NaiveBayesPredict.py +3 -2
  11. teradataml/analytics/sqle/__init__.py +3 -0
  12. teradataml/analytics/utils.py +4 -1
  13. teradataml/automl/__init__.py +2369 -464
  14. teradataml/automl/autodataprep/__init__.py +15 -0
  15. teradataml/automl/custom_json_utils.py +184 -112
  16. teradataml/automl/data_preparation.py +113 -58
  17. teradataml/automl/data_transformation.py +154 -53
  18. teradataml/automl/feature_engineering.py +113 -53
  19. teradataml/automl/feature_exploration.py +548 -25
  20. teradataml/automl/model_evaluation.py +260 -32
  21. teradataml/automl/model_training.py +399 -206
  22. teradataml/clients/auth_client.py +2 -2
  23. teradataml/common/aed_utils.py +11 -2
  24. teradataml/common/bulk_exposed_utils.py +4 -2
  25. teradataml/common/constants.py +62 -2
  26. teradataml/common/garbagecollector.py +50 -21
  27. teradataml/common/messagecodes.py +47 -2
  28. teradataml/common/messages.py +19 -1
  29. teradataml/common/sqlbundle.py +23 -6
  30. teradataml/common/utils.py +116 -10
  31. teradataml/context/aed_context.py +16 -10
  32. teradataml/data/Employee.csv +5 -0
  33. teradataml/data/Employee_Address.csv +4 -0
  34. teradataml/data/Employee_roles.csv +5 -0
  35. teradataml/data/JulesBelvezeDummyData.csv +100 -0
  36. teradataml/data/byom_example.json +5 -0
  37. teradataml/data/creditcard_data.csv +284618 -0
  38. teradataml/data/docs/byom/docs/ONNXSeq2Seq.py +255 -0
  39. teradataml/data/docs/sqle/docs_17_10/NGramSplitter.py +1 -1
  40. teradataml/data/docs/sqle/docs_17_20/NGramSplitter.py +1 -1
  41. teradataml/data/docs/sqle/docs_17_20/TextParser.py +1 -1
  42. teradataml/data/jsons/byom/ONNXSeq2Seq.json +287 -0
  43. teradataml/data/jsons/sqle/20.00/AI_AnalyzeSentiment.json +3 -7
  44. teradataml/data/jsons/sqle/20.00/AI_AskLLM.json +3 -7
  45. teradataml/data/jsons/sqle/20.00/AI_DetectLanguage.json +3 -7
  46. teradataml/data/jsons/sqle/20.00/AI_ExtractKeyPhrases.json +3 -7
  47. teradataml/data/jsons/sqle/20.00/AI_MaskPII.json +3 -7
  48. teradataml/data/jsons/sqle/20.00/AI_RecognizeEntities.json +3 -7
  49. teradataml/data/jsons/sqle/20.00/AI_RecognizePIIEntities.json +3 -7
  50. teradataml/data/jsons/sqle/20.00/AI_TextClassifier.json +3 -7
  51. teradataml/data/jsons/sqle/20.00/AI_TextEmbeddings.json +3 -7
  52. teradataml/data/jsons/sqle/20.00/AI_TextSummarize.json +3 -7
  53. teradataml/data/jsons/sqle/20.00/AI_TextTranslate.json +3 -7
  54. teradataml/data/jsons/sqle/20.00/TD_API_AzureML.json +151 -0
  55. teradataml/data/jsons/sqle/20.00/TD_API_Sagemaker.json +182 -0
  56. teradataml/data/jsons/sqle/20.00/TD_API_VertexAI.json +183 -0
  57. teradataml/data/load_example_data.py +29 -11
  58. teradataml/data/payment_fraud_dataset.csv +10001 -0
  59. teradataml/data/teradataml_example.json +67 -0
  60. teradataml/dataframe/copy_to.py +714 -54
  61. teradataml/dataframe/dataframe.py +1153 -33
  62. teradataml/dataframe/dataframe_utils.py +8 -3
  63. teradataml/dataframe/functions.py +168 -1
  64. teradataml/dataframe/setop.py +4 -1
  65. teradataml/dataframe/sql.py +141 -9
  66. teradataml/dbutils/dbutils.py +470 -35
  67. teradataml/dbutils/filemgr.py +1 -1
  68. teradataml/hyperparameter_tuner/optimizer.py +456 -142
  69. teradataml/lib/aed_0_1.dll +0 -0
  70. teradataml/lib/libaed_0_1.dylib +0 -0
  71. teradataml/lib/libaed_0_1.so +0 -0
  72. teradataml/lib/libaed_0_1_aarch64.so +0 -0
  73. teradataml/scriptmgmt/UserEnv.py +234 -34
  74. teradataml/scriptmgmt/lls_utils.py +43 -17
  75. teradataml/sdk/_json_parser.py +1 -1
  76. teradataml/sdk/api_client.py +9 -6
  77. teradataml/sdk/modelops/_client.py +3 -0
  78. teradataml/series/series.py +12 -7
  79. teradataml/store/feature_store/constants.py +601 -234
  80. teradataml/store/feature_store/feature_store.py +2886 -616
  81. teradataml/store/feature_store/mind_map.py +639 -0
  82. teradataml/store/feature_store/models.py +5831 -214
  83. teradataml/store/feature_store/utils.py +390 -0
  84. teradataml/table_operators/table_operator_util.py +1 -1
  85. teradataml/table_operators/templates/dataframe_register.template +6 -2
  86. teradataml/table_operators/templates/dataframe_udf.template +6 -2
  87. teradataml/utils/docstring.py +527 -0
  88. teradataml/utils/dtypes.py +93 -0
  89. teradataml/utils/internal_buffer.py +2 -2
  90. teradataml/utils/utils.py +41 -2
  91. teradataml/utils/validators.py +694 -17
  92. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/METADATA +213 -2
  93. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/RECORD +96 -81
  94. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/WHEEL +0 -0
  95. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/top_level.txt +0 -0
  96. {teradataml-20.0.0.6.dist-info → teradataml-20.0.0.7.dist-info}/zip-safe +0 -0
@@ -1,3 +1,18 @@
1
+ # ##################################################################
2
+ #
3
+ # Copyright 2025 Teradata. All rights reserved.
4
+ # TERADATA CONFIDENTIAL AND TRADE SECRET
5
+ #
6
+ # Primary Owner: Sweta Shaw
7
+ # Email Id: Sweta.Shaw@Teradata.com
8
+ #
9
+ # Secondary Owner: Akhil Bisht
10
+ # Email Id: AKHIL.BISHT@Teradata.com
11
+ #
12
+ # Version: 1.1
13
+ # Function Version: 1.0
14
+ # ##################################################################
15
+
1
16
  # External libraries
2
17
  import pandas as pd
3
18
 
@@ -1,6 +1,6 @@
1
1
  # ##################################################################
2
2
  #
3
- # Copyright 2024 Teradata. All rights reserved.
3
+ # Copyright 2025 Teradata. All rights reserved.
4
4
  # TERADATA CONFIDENTIAL AND TRADE SECRET
5
5
  #
6
6
  # Primary Owner: Sweta Shaw
@@ -14,15 +14,21 @@
14
14
  # ##################################################################
15
15
 
16
16
  import json
17
-
17
+ from teradataml.common.constants import AutoMLConstants
18
18
 
19
19
  class _GenerateCustomJson:
20
20
 
21
- def __init__(self):
21
+ def __init__(self, cluster=False):
22
22
  """
23
23
  DESCRIPTION:
24
24
  Function initializes the data and flags for custom JSON file generation.
25
-
25
+
26
+ PARAMETERS:
27
+ cluster:
28
+ Optional Argument.
29
+ Specifies whether to apply clustering techniques.
30
+ Default Value: False
31
+ Types: bool
26
32
  """
27
33
  # Initializing data dictionary for storing custom parameters
28
34
  self.data = {}
@@ -30,6 +36,7 @@ class _GenerateCustomJson:
30
36
  self.fe_flag = {index : False for index in range(1, 8)}
31
37
  self.de_flag = {index : False for index in range(1, 5)}
32
38
  self.mt_flag = {index : False for index in range(1, 2)}
39
+ self.cluster = cluster
33
40
 
34
41
  def _process_list_input(self,
35
42
  input_data,
@@ -280,13 +287,21 @@ class _GenerateCustomJson:
280
287
  """
281
288
  print("\nCustomizing Data Preparation Phase ...")
282
289
  # Available options for customization of data preparation phase
283
- dp_customize_options = {
284
- 1: 'Customize Data Imbalance Handling',
285
- 2: 'Customize Outlier Handling',
286
- 3: 'Customize Feature Scaling',
287
- 4: 'Back to main menu',
288
- 5: 'Generate custom json and exit'
290
+ if self.cluster:
291
+ dp_customize_options = {
292
+ 1: 'Customize Outlier Handling',
293
+ 2: 'Customize Feature Scaling',
294
+ 3: 'Back to main menu',
295
+ 4: 'Generate custom json and exit'
289
296
  }
297
+ else:
298
+ dp_customize_options = {
299
+ 1: 'Customize Data Imbalance Handling',
300
+ 2: 'Customize Outlier Handling',
301
+ 3: 'Customize Feature Scaling',
302
+ 4: 'Back to main menu',
303
+ 5: 'Generate custom json and exit'
304
+ }
290
305
 
291
306
  while True:
292
307
 
@@ -296,19 +311,26 @@ class _GenerateCustomJson:
296
311
  print(f"\nIndex {index}: {options}")
297
312
  print("-"*80)
298
313
  # Mapping each index to corresponding functionality
299
- de_method_map = {
300
- 1: self._get_customize_input_data_imbalance_handling,
301
- 2: self._get_customize_input_outlier_handling,
302
- 3: self._get_customize_input_feature_scaling
303
- }
304
-
314
+ if self.cluster:
315
+ de_method_map = {
316
+ 1: self._get_customize_input_outlier_handling,
317
+ 2: self._get_customize_input_feature_scaling
318
+ }
319
+ de_back_key, de_exit_key = 3, 4
320
+ else:
321
+ de_method_map = {
322
+ 1: self._get_customize_input_data_imbalance_handling,
323
+ 2: self._get_customize_input_outlier_handling,
324
+ 3: self._get_customize_input_feature_scaling
325
+ }
326
+ de_back_key, de_exit_key = 4, 5
305
327
  # Taking required input for customizing data preparation.
306
328
  dp_phase_idx = self._process_list_input(
307
329
  input("\nEnter the list of indices you want to customize in data preparation phase: "),
308
330
  'int', list(dp_customize_options.keys()))
309
331
 
310
332
  # Setting back_key and exit_key
311
- de_back_key, de_exit_key = 4, 5
333
+
312
334
  # Flag variable to back to main menu
313
335
  de_exit_to_main_flag = False
314
336
  # Flag variable to exit from main menu
@@ -664,7 +686,6 @@ class _GenerateCustomJson:
664
686
  self._set_generic_arguement(func_name='StringManipulationParam')
665
687
  print("\nCustomization of string manipulation has been completed successfully.")
666
688
 
667
-
668
689
  def _get_customize_input_categorical_encoding(self,
669
690
  first_execution_flag=False):
670
691
  """
@@ -927,8 +948,7 @@ class _GenerateCustomJson:
927
948
  self.data['DataImbalanceMethod'] = sampling_methods[sampling_mthd_idx]
928
949
 
929
950
  print("\nCustomization of data imbalance handling has been completed successfully.")
930
-
931
-
951
+
932
952
  def _get_customize_input_outlier_handling(self,
933
953
  first_execution_flag=False):
934
954
  """
@@ -946,13 +966,30 @@ class _GenerateCustomJson:
946
966
  if first_execution_flag:
947
967
  print("\nWARNING : Reinitiated outlier handling customization. "
948
968
  "Overwriting the previous input.")
949
- keys_to_remove = ['OutlierLowerPercentile', 'OutlierUpperPercentile']
969
+ keys_to_remove = ['OutlierLowerPercentile', 'OutlierUpperPercentile', 'OutlierFilterMethod', 'OutlierFilterParam']
950
970
  for key in keys_to_remove:
951
971
  if key in self.data:
952
972
  del self.data[key]
953
973
 
954
974
 
955
975
  print("\nCustomizing Outlier Handling ...")
976
+
977
+ apply_outlier_options = {1: 'Yes', 2: 'No'}
978
+ print("\nDo you want to apply outlier filtering?")
979
+ for idx, val in apply_outlier_options.items():
980
+ print(f"Index {idx}: {val}")
981
+
982
+ user_choice = self._process_single_input(
983
+ input("\nEnter the index of your choice (1 for Yes, 2 for No): "),
984
+ 'int',
985
+ list(apply_outlier_options.keys())
986
+ )
987
+
988
+ if user_choice == 2:
989
+ self.data['OutlierFilterIndicator'] = False
990
+ print("\nSkipping outlier filtering as per user choice.")
991
+ return
992
+
956
993
  # Setting indicator for outlier handling
957
994
  self.data['OutlierFilterIndicator'] = True
958
995
  outlier_methods = {1: 'percentile',
@@ -1127,52 +1164,68 @@ class _GenerateCustomJson:
1127
1164
  Allowed hyperparameters for model.
1128
1165
  """
1129
1166
  # Setting allowed common hyperparameters for tree like model
1130
- allowed_common_hyperparameters_tree_model ={
1131
- 1 : 'min_impurity',
1132
- 2 : 'max_depth',
1133
- 3 : 'min_node_size',
1134
- }
1135
- # Setting allowed hyperparameters for xgbooost model
1136
- allowed_hyperparameters_xgboost = {
1137
- **allowed_common_hyperparameters_tree_model,
1138
- 4 : 'shrinkage_factor',
1139
- 5 : 'iter_num'
1140
- }
1141
- # Setting allowed hyperparameters for decision forest model
1142
- allowed_hyperparameters_decision_forest = {
1143
- **allowed_common_hyperparameters_tree_model,
1144
- 4 : 'num_trees'
1145
- }
1146
- # Setting allowed hyperparameters for knn model
1147
- allowed_hyperparameters_knn = {
1148
- 0 : 'k'
1149
- }
1150
- # Setting allowed hyperparameters for svm model
1151
- allowed_hyperparameters_svm = {
1152
- 1 : 'alpha',
1153
- 2 : 'learning_rate',
1154
- 3 : 'initial_eta',
1155
- 4 : 'momentum',
1156
- 5 : 'iter_num_no_change',
1157
- 6 : 'iter_max',
1158
- 7 : 'batch_size'
1159
- }
1160
- # Setting allowed hyperparameters for glm model
1161
- allowed_hyperparameters_glm = {
1162
- **allowed_hyperparameters_svm,
1163
- 8 : 'tolerance',
1164
- 9 : 'nesterov',
1165
- 10 : 'intercept',
1166
- 11 : 'local_sgd_iterations'
1167
- }
1168
- # Setting allowed hyperparameters for different models
1169
- allowed_hyperparameters = {
1170
- 'xgboost' : allowed_hyperparameters_xgboost,
1171
- 'decision_forest' : allowed_hyperparameters_decision_forest,
1172
- 'knn' : allowed_hyperparameters_knn,
1173
- 'svm' : allowed_hyperparameters_svm,
1174
- 'glm' : allowed_hyperparameters_glm
1175
- }
1167
+ if self.cluster:
1168
+ allowed_hyperparameters_kmeans ={
1169
+ 1 : 'n_clusters',
1170
+ 2 : 'init',
1171
+ 3 : 'max_iter',
1172
+ }
1173
+ allowed_hyperparameters_gaussian_mixture ={
1174
+ 1 : 'n_components',
1175
+ 2 : 'covariance_type',
1176
+ 3 : 'max_iter',
1177
+ }
1178
+ allowed_hyperparameters = {
1179
+ 'KMeans' : allowed_hyperparameters_kmeans,
1180
+ 'GaussianMixture' : allowed_hyperparameters_gaussian_mixture,
1181
+ }
1182
+ else:
1183
+ allowed_common_hyperparameters_tree_model ={
1184
+ 1 : 'min_impurity',
1185
+ 2 : 'max_depth',
1186
+ 3 : 'min_node_size',
1187
+ }
1188
+ # Setting allowed hyperparameters for xgbooost model
1189
+ allowed_hyperparameters_xgboost = {
1190
+ **allowed_common_hyperparameters_tree_model,
1191
+ 4 : 'shrinkage_factor',
1192
+ 5 : 'iter_num'
1193
+ }
1194
+ # Setting allowed hyperparameters for decision forest model
1195
+ allowed_hyperparameters_decision_forest = {
1196
+ **allowed_common_hyperparameters_tree_model,
1197
+ 4 : 'num_trees'
1198
+ }
1199
+ # Setting allowed hyperparameters for knn model
1200
+ allowed_hyperparameters_knn = {
1201
+ 0 : 'k'
1202
+ }
1203
+ # Setting allowed hyperparameters for svm model
1204
+ allowed_hyperparameters_svm = {
1205
+ 1 : 'alpha',
1206
+ 2 : 'learning_rate',
1207
+ 3 : 'initial_eta',
1208
+ 4 : 'momentum',
1209
+ 5 : 'iter_num_no_change',
1210
+ 6 : 'iter_max',
1211
+ 7 : 'batch_size'
1212
+ }
1213
+ # Setting allowed hyperparameters for glm model
1214
+ allowed_hyperparameters_glm = {
1215
+ **allowed_hyperparameters_svm,
1216
+ 8 : 'tolerance',
1217
+ 9 : 'nesterov',
1218
+ 10 : 'intercept',
1219
+ 11 : 'local_sgd_iterations'
1220
+ }
1221
+ # Setting allowed hyperparameters for different models
1222
+ allowed_hyperparameters = {
1223
+ 'xgboost' : allowed_hyperparameters_xgboost,
1224
+ 'decision_forest' : allowed_hyperparameters_decision_forest,
1225
+ 'knn' : allowed_hyperparameters_knn,
1226
+ 'svm' : allowed_hyperparameters_svm,
1227
+ 'glm' : allowed_hyperparameters_glm
1228
+ }
1176
1229
  return allowed_hyperparameters[model_name]
1177
1230
 
1178
1231
  def _get_allowed_hyperparameters_types(self, hyperparameter):
@@ -1190,26 +1243,35 @@ class _GenerateCustomJson:
1190
1243
  Allowed hyperparameters types for hyperparameter.
1191
1244
  """
1192
1245
  # Setting allowed hyperparameters types for different hyperparameters
1193
- allowed_hyperparameters_types = {
1194
- 'min_impurity' : 'float',
1195
- 'max_depth' : 'int',
1196
- 'min_node_size' : 'int',
1197
- 'shrinkage_factor' : 'float',
1198
- 'iter_num' : 'int',
1199
- 'num_trees' : 'int',
1200
- 'k' : 'int',
1201
- 'alpha' : 'float',
1202
- 'learning_rate' : 'str',
1203
- 'initial_eta' : 'float',
1204
- 'momentum' : 'float',
1205
- 'iter_num_no_change' : 'int',
1206
- 'iter_max' : 'int',
1207
- 'batch_size' : 'int',
1208
- 'tolerance' : 'float',
1209
- 'nesterov' : 'bool',
1210
- 'intercept' : 'bool',
1211
- 'local_sgd_iterations' : 'int'
1212
- }
1246
+ if self.cluster:
1247
+ allowed_hyperparameters_types = {
1248
+ 'n_clusters': 'int',
1249
+ 'init': 'str',
1250
+ 'max_iter': 'int',
1251
+ 'n_components': 'int',
1252
+ 'covariance_type': 'str'
1253
+ }
1254
+ else:
1255
+ allowed_hyperparameters_types = {
1256
+ 'min_impurity' : 'float',
1257
+ 'max_depth' : 'int',
1258
+ 'min_node_size' : 'int',
1259
+ 'shrinkage_factor' : 'float',
1260
+ 'iter_num' : 'int',
1261
+ 'num_trees' : 'int',
1262
+ 'k' : 'int',
1263
+ 'alpha' : 'float',
1264
+ 'learning_rate' : 'str',
1265
+ 'initial_eta' : 'float',
1266
+ 'momentum' : 'float',
1267
+ 'iter_num_no_change' : 'int',
1268
+ 'iter_max' : 'int',
1269
+ 'batch_size' : 'int',
1270
+ 'tolerance' : 'float',
1271
+ 'nesterov' : 'bool',
1272
+ 'intercept' : 'bool',
1273
+ 'local_sgd_iterations' : 'int'
1274
+ }
1213
1275
  return allowed_hyperparameters_types[hyperparameter]
1214
1276
 
1215
1277
  def _get_customize_input_model_hyperparameter(self,
@@ -1233,11 +1295,12 @@ class _GenerateCustomJson:
1233
1295
  # Setting indicator for model hyperparameter tuning
1234
1296
  self.data['HyperparameterTuningIndicator'] = True
1235
1297
  self.data['HyperparameterTuningParam'] = {}
1236
- all_models = {1: 'decision_forest',
1237
- 2: 'xgboost',
1238
- 3: 'knn',
1239
- 4: 'glm',
1240
- 5: 'svm'}
1298
+ if self.cluster:
1299
+ # Create numbered mapping for clustering models
1300
+ all_models = {i+1: model for i, model in enumerate(AutoMLConstants.CLUSTERING_MODELS.value)}
1301
+ else:
1302
+ # Create numbered mapping for supervised models
1303
+ all_models = {i+1: model for i, model in enumerate(AutoMLConstants.SUPERVISED_MODELS.value)}
1241
1304
  # Displaying available models for hyperparameter tuning
1242
1305
  print("\nAvailable models for hyperparameter tuning with corresponding indices:")
1243
1306
  for index, model in all_models.items():
@@ -1309,26 +1372,35 @@ class _GenerateCustomJson:
1309
1372
  Types: str
1310
1373
  """
1311
1374
  # Setting example hyperparameter values for different hyperparameters
1312
- example_hyperparameters = {
1313
- 'min_impurity' : ([0.1,0.6], 'float'),
1314
- 'max_depth' : ([1,5,10], 'int'),
1315
- 'min_node_size' : ([1,20,100], 'int'),
1316
- 'num_trees' : ([10,50,100], 'int'),
1317
- 'k' : ([5,25,100], 'int'),
1318
- 'shrinkage_factor': ([0.1,0.5,1.0], 'float'),
1319
- 'alpha' : ([0.1,0.5,1.0], 'float'),
1320
- 'learning_rate' : (['constant','optimal','invtime','adaptive'], 'str'),
1321
- 'initial_eta' : ([0.05,0.1], 'float'),
1322
- 'momentum' : ([0.65,0.95], 'float'),
1323
- 'iter_num_no_change' : ([25,50,100], 'int'),
1324
- 'iter_max' : ([10,100,300], 'int'),
1325
- 'batch_size' : ([10,50,100], 'int'),
1326
- 'tolerance' : ([0.0001,0.01], 'float'),
1327
- 'nesterov' : (['true','false'], 'bool'),
1328
- 'intercept' : (['true','false'], 'bool'),
1329
- 'local_sgd_iterations' : ([10,25,50], 'int'),
1330
- 'iter_num' : ([10,50,100], 'int')
1331
- }
1375
+ if self.cluster:
1376
+ example_hyperparameters = {
1377
+ 'n_clusters': ([2, 3, 4], 'int'),
1378
+ 'init': (['k-means++', 'random'], 'str'),
1379
+ 'max_iter': ([100, 300], 'int'),
1380
+ 'n_components': ([2, 3, 4], 'int'),
1381
+ 'covariance_type': (['full', 'tied', 'diag', 'spherical'], 'str')
1382
+ }
1383
+ else:
1384
+ example_hyperparameters = {
1385
+ 'min_impurity' : ([0.1,0.6], 'float'),
1386
+ 'max_depth' : ([1,5,10], 'int'),
1387
+ 'min_node_size' : ([1,20,100], 'int'),
1388
+ 'num_trees' : ([10,50,100], 'int'),
1389
+ 'k' : ([5,25,100], 'int'),
1390
+ 'shrinkage_factor': ([0.1,0.5,1.0], 'float'),
1391
+ 'alpha' : ([0.1,0.5,1.0], 'float'),
1392
+ 'learning_rate' : (['constant','optimal','invtime','adaptive'], 'str'),
1393
+ 'initial_eta' : ([0.05,0.1], 'float'),
1394
+ 'momentum' : ([0.65,0.95], 'float'),
1395
+ 'iter_num_no_change' : ([25,50,100], 'int'),
1396
+ 'iter_max' : ([10,100,300], 'int'),
1397
+ 'batch_size' : ([10,50,100], 'int'),
1398
+ 'tolerance' : ([0.0001,0.01], 'float'),
1399
+ 'nesterov' : (['true','false'], 'bool'),
1400
+ 'intercept' : (['true','false'], 'bool'),
1401
+ 'local_sgd_iterations' : ([10,25,50], 'int'),
1402
+ 'iter_num' : ([10,50,100], 'int')
1403
+ }
1332
1404
 
1333
1405
  print(f"\nExample values for hyperparameter '{hyperparameter_name}' :")
1334
1406
  if hyperparameter_name in example_hyperparameters: