workbench 0.8.161__py3-none-any.whl → 0.8.163__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- workbench/model_scripts/pytorch_model/generated_model_script.py +22 -11
- workbench/model_scripts/pytorch_model/pytorch.template +14 -6
- workbench/repl/workbench_shell.py +6 -1
- {workbench-0.8.161.dist-info → workbench-0.8.163.dist-info}/METADATA +2 -3
- {workbench-0.8.161.dist-info → workbench-0.8.163.dist-info}/RECORD +9 -9
- {workbench-0.8.161.dist-info → workbench-0.8.163.dist-info}/WHEEL +0 -0
- {workbench-0.8.161.dist-info → workbench-0.8.163.dist-info}/entry_points.txt +0 -0
- {workbench-0.8.161.dist-info → workbench-0.8.163.dist-info}/licenses/LICENSE +0 -0
- {workbench-0.8.161.dist-info → workbench-0.8.163.dist-info}/top_level.txt +0 -0
|
@@ -35,13 +35,13 @@ from typing import List, Tuple
|
|
|
35
35
|
|
|
36
36
|
# Template Parameters
|
|
37
37
|
TEMPLATE_PARAMS = {
|
|
38
|
-
"model_type": "
|
|
39
|
-
"target_column": "
|
|
38
|
+
"model_type": "classifier",
|
|
39
|
+
"target_column": "solubility_class",
|
|
40
40
|
"features": ['molwt', 'mollogp', 'molmr', 'heavyatomcount', 'numhacceptors', 'numhdonors', 'numheteroatoms', 'numrotatablebonds', 'numvalenceelectrons', 'numaromaticrings', 'numsaturatedrings', 'numaliphaticrings', 'ringcount', 'tpsa', 'labuteasa', 'balabanj', 'bertzct'],
|
|
41
41
|
"compressed_features": [],
|
|
42
|
-
"model_metrics_s3_path": "s3://sandbox-sageworks-artifacts/models/aqsol-pytorch-
|
|
42
|
+
"model_metrics_s3_path": "s3://sandbox-sageworks-artifacts/models/aqsol-pytorch-class/training",
|
|
43
43
|
"train_all_data": False,
|
|
44
|
-
"hyperparameters": {'
|
|
44
|
+
"hyperparameters": {'training_config': {'max_epochs': 150}, 'model_config': {'layers': '256-128-64'}}
|
|
45
45
|
}
|
|
46
46
|
|
|
47
47
|
|
|
@@ -432,9 +432,13 @@ if __name__ == "__main__":
|
|
|
432
432
|
"gradient_clip_val": 1.0,
|
|
433
433
|
}
|
|
434
434
|
|
|
435
|
-
# Override defaults with
|
|
436
|
-
|
|
437
|
-
|
|
435
|
+
# Override defaults with training_config if present
|
|
436
|
+
training_overrides = {k: v for k, v in hyperparameters.get('training_config', {}).items()
|
|
437
|
+
if k in trainer_defaults}
|
|
438
|
+
# Print overwrites
|
|
439
|
+
for key, value in training_overrides.items():
|
|
440
|
+
print(f"TRAINING CONFIG Override: {key}: {trainer_defaults[key]} → {value}")
|
|
441
|
+
trainer_params = {**trainer_defaults, **training_overrides}
|
|
438
442
|
trainer_config = TrainerConfig(**trainer_params)
|
|
439
443
|
|
|
440
444
|
# Model config defaults
|
|
@@ -446,10 +450,17 @@ if __name__ == "__main__":
|
|
|
446
450
|
"use_batch_norm": True,
|
|
447
451
|
"initialization": "kaiming",
|
|
448
452
|
}
|
|
449
|
-
# Override defaults with
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
#
|
|
453
|
+
# Override defaults with model_config if present
|
|
454
|
+
model_overrides = {k: v for k, v in hyperparameters.get('model_config', {}).items()
|
|
455
|
+
if k in model_defaults}
|
|
456
|
+
# Print overwrites
|
|
457
|
+
for key, value in model_overrides.items():
|
|
458
|
+
print(f"MODEL CONFIG Override: {key}: {model_defaults[key]} → {value}")
|
|
459
|
+
model_params = {**model_defaults, **model_overrides}
|
|
460
|
+
|
|
461
|
+
# Use CategoryEmbedding model configuration for general-purpose tabular modeling.
|
|
462
|
+
# Works effectively for both regression and classification as the foundational
|
|
463
|
+
# architecture in PyTorch Tabular
|
|
453
464
|
model_config = CategoryEmbeddingModelConfig(
|
|
454
465
|
task=task,
|
|
455
466
|
**model_params
|
|
@@ -432,9 +432,13 @@ if __name__ == "__main__":
|
|
|
432
432
|
"gradient_clip_val": 1.0,
|
|
433
433
|
}
|
|
434
434
|
|
|
435
|
-
# Override defaults with
|
|
436
|
-
|
|
437
|
-
|
|
435
|
+
# Override defaults with training_config if present
|
|
436
|
+
training_overrides = {k: v for k, v in hyperparameters.get('training_config', {}).items()
|
|
437
|
+
if k in trainer_defaults}
|
|
438
|
+
# Print overwrites
|
|
439
|
+
for key, value in training_overrides.items():
|
|
440
|
+
print(f"TRAINING CONFIG Override: {key}: {trainer_defaults[key]} → {value}")
|
|
441
|
+
trainer_params = {**trainer_defaults, **training_overrides}
|
|
438
442
|
trainer_config = TrainerConfig(**trainer_params)
|
|
439
443
|
|
|
440
444
|
# Model config defaults
|
|
@@ -446,9 +450,13 @@ if __name__ == "__main__":
|
|
|
446
450
|
"use_batch_norm": True,
|
|
447
451
|
"initialization": "kaiming",
|
|
448
452
|
}
|
|
449
|
-
# Override defaults with
|
|
450
|
-
|
|
451
|
-
|
|
453
|
+
# Override defaults with model_config if present
|
|
454
|
+
model_overrides = {k: v for k, v in hyperparameters.get('model_config', {}).items()
|
|
455
|
+
if k in model_defaults}
|
|
456
|
+
# Print overwrites
|
|
457
|
+
for key, value in model_overrides.items():
|
|
458
|
+
print(f"MODEL CONFIG Override: {key}: {model_defaults[key]} → {value}")
|
|
459
|
+
model_params = {**model_defaults, **model_overrides}
|
|
452
460
|
|
|
453
461
|
# Use CategoryEmbedding model configuration for general-purpose tabular modeling.
|
|
454
462
|
# Works effectively for both regression and classification as the foundational
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import IPython
|
|
1
2
|
from IPython import start_ipython
|
|
3
|
+
from distutils.version import LooseVersion
|
|
2
4
|
from IPython.terminal.prompts import Prompts
|
|
3
5
|
from IPython.terminal.ipapp import load_default_config
|
|
4
6
|
from pygments.token import Token
|
|
@@ -202,7 +204,10 @@ class WorkbenchShell:
|
|
|
202
204
|
|
|
203
205
|
# Start IPython with the config and commands in the namespace
|
|
204
206
|
try:
|
|
205
|
-
|
|
207
|
+
if LooseVersion(IPython.__version__) >= LooseVersion("9.0.0"):
|
|
208
|
+
ipython_argv = ["--no-tip", "--theme", "linux"]
|
|
209
|
+
else:
|
|
210
|
+
ipython_argv = []
|
|
206
211
|
start_ipython(ipython_argv, user_ns=locs, config=config)
|
|
207
212
|
finally:
|
|
208
213
|
spinner = self.spinner_start("Goodbye to AWS:")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: workbench
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.163
|
|
4
4
|
Summary: Workbench: A Dashboard and Python API for creating and deploying AWS SageMaker Model Pipelines
|
|
5
5
|
Author-email: SuperCowPowers LLC <support@supercowpowers.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -22,7 +22,7 @@ Requires-Dist: pandas>=2.2.1
|
|
|
22
22
|
Requires-Dist: awswrangler>=3.4.0
|
|
23
23
|
Requires-Dist: sagemaker>=2.143
|
|
24
24
|
Requires-Dist: cryptography>=44.0.2
|
|
25
|
-
Requires-Dist: ipython>=
|
|
25
|
+
Requires-Dist: ipython>=8.37.0
|
|
26
26
|
Requires-Dist: pyreadline3; sys_platform == "win32"
|
|
27
27
|
Requires-Dist: scikit-learn>=1.5.2
|
|
28
28
|
Requires-Dist: xgboost>=3.0.3
|
|
@@ -168,7 +168,6 @@ Using Workbench will minimize the time and manpower needed to incorporate AWS ML
|
|
|
168
168
|
|
|
169
169
|
```
|
|
170
170
|
pip install workbench # Installs Workbench with Core Dependencies
|
|
171
|
-
pip install 'workbench[ml-tools]' # + Shap and NetworkX
|
|
172
171
|
pip install 'workbench[ui]' # + Plotly/Dash
|
|
173
172
|
pip install 'workbench[dev]' # + Pytest/flake8/black
|
|
174
173
|
pip install 'workbench[all]' # + All the things :)
|
|
@@ -150,8 +150,8 @@ workbench/model_scripts/custom_script_example/requirements.txt,sha256=jWlGc7HH7v
|
|
|
150
150
|
workbench/model_scripts/ensemble_xgb/ensemble_xgb.template,sha256=s8tPPk_q6UqA2nAzknD8viA-kN7f62Rim2XwMKcqHKc,10399
|
|
151
151
|
workbench/model_scripts/ensemble_xgb/generated_model_script.py,sha256=dsjUGm22xI1ThGn97HPKtooyEPK-HOQnf5chnZ7-MXk,10675
|
|
152
152
|
workbench/model_scripts/ensemble_xgb/requirements.txt,sha256=jWlGc7HH7vqyukTm38LN4EyDi8jDUPEay4n45z-30uc,104
|
|
153
|
-
workbench/model_scripts/pytorch_model/generated_model_script.py,sha256=
|
|
154
|
-
workbench/model_scripts/pytorch_model/pytorch.template,sha256=
|
|
153
|
+
workbench/model_scripts/pytorch_model/generated_model_script.py,sha256=Mr1IMQJE_ML899qjzhjkrP521IjvcAvqU0pk--FB7KY,22356
|
|
154
|
+
workbench/model_scripts/pytorch_model/pytorch.template,sha256=3jM3RUH68r75eH9Wayz6YTXZ7qpuDnaJCKKcHD_oKqA,22054
|
|
155
155
|
workbench/model_scripts/pytorch_model/requirements.txt,sha256=ICS5nW0wix44EJO2tJszJSaUrSvhSfdedn6FcRInGx4,181
|
|
156
156
|
workbench/model_scripts/quant_regression/quant_regression.template,sha256=AQihffV68qI6CG9qztA0jGunDWoijb3eeDWNG5tiIGc,9818
|
|
157
157
|
workbench/model_scripts/quant_regression/requirements.txt,sha256=jWlGc7HH7vqyukTm38LN4EyDi8jDUPEay4n45z-30uc,104
|
|
@@ -162,7 +162,7 @@ workbench/model_scripts/xgb_model/generated_model_script.py,sha256=dm11XC6SHo_-z
|
|
|
162
162
|
workbench/model_scripts/xgb_model/requirements.txt,sha256=jWlGc7HH7vqyukTm38LN4EyDi8jDUPEay4n45z-30uc,104
|
|
163
163
|
workbench/model_scripts/xgb_model/xgb_model.template,sha256=RaUr8X6al5R2IILNKgGUH05Gb4H7AFFG9RE524_VH7Q,17935
|
|
164
164
|
workbench/repl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
165
|
-
workbench/repl/workbench_shell.py,sha256=
|
|
165
|
+
workbench/repl/workbench_shell.py,sha256=ms9nVFfKohK8efmiQ2YbOH1OYBRWLgqbByshkcoKDog,22137
|
|
166
166
|
workbench/resources/open_source_api.key,sha256=3S0OTblsmC0msUPdE_dbBmI83xJNmYscuwLJ57JmuOc,433
|
|
167
167
|
workbench/resources/signature_verify_pub.pem,sha256=V3-u-3_z2PH-805ybkKvzDOBwAbvHxcKn0jLBImEtzM,272
|
|
168
168
|
workbench/scripts/check_double_bond_stereo.py,sha256=p5hnL54Weq77ES0HCELq9JeoM-PyUGkvVSeWYF2dKyo,7776
|
|
@@ -275,9 +275,9 @@ workbench/web_interface/page_views/main_page.py,sha256=X4-KyGTKLAdxR-Zk2niuLJB2Y
|
|
|
275
275
|
workbench/web_interface/page_views/models_page_view.py,sha256=M0bdC7bAzLyIaE2jviY12FF4abdMFZmg6sFuOY_LaGI,2650
|
|
276
276
|
workbench/web_interface/page_views/page_view.py,sha256=Gh6YnpOGlUejx-bHZAf5pzqoQ1H1R0OSwOpGhOBO06w,455
|
|
277
277
|
workbench/web_interface/page_views/pipelines_page_view.py,sha256=v2pxrIbsHBcYiblfius3JK766NZ7ciD2yPx0t3E5IJo,2656
|
|
278
|
-
workbench-0.8.
|
|
279
|
-
workbench-0.8.
|
|
280
|
-
workbench-0.8.
|
|
281
|
-
workbench-0.8.
|
|
282
|
-
workbench-0.8.
|
|
283
|
-
workbench-0.8.
|
|
278
|
+
workbench-0.8.163.dist-info/licenses/LICENSE,sha256=z4QMMPlLJkZjU8VOKqJkZiQZCEZ--saIU2Z8-p3aVc0,1080
|
|
279
|
+
workbench-0.8.163.dist-info/METADATA,sha256=TwnUicLddrHeMkx_gDGiUR6uQD7TR6mRjNG0XY3kh1E,9209
|
|
280
|
+
workbench-0.8.163.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
281
|
+
workbench-0.8.163.dist-info/entry_points.txt,sha256=oZykkheWiiIBjRE8cS5SdcxwmZKSFaQEGwMBjNh-eNM,238
|
|
282
|
+
workbench-0.8.163.dist-info/top_level.txt,sha256=Dhy72zTxaA_o_yRkPZx5zw-fwumnjGaeGf0hBN3jc_w,10
|
|
283
|
+
workbench-0.8.163.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|