aiauto-client 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiauto/core.py +15 -2
- aiauto/serializer.py +20 -9
- {aiauto_client-0.1.13.dist-info → aiauto_client-0.1.15.dist-info}/METADATA +31 -17
- aiauto_client-0.1.15.dist-info/RECORD +10 -0
- aiauto_client-0.1.13.dist-info/RECORD +0 -10
- {aiauto_client-0.1.13.dist-info → aiauto_client-0.1.15.dist-info}/WHEEL +0 -0
- {aiauto_client-0.1.13.dist-info → aiauto_client-0.1.15.dist-info}/top_level.txt +0 -0
aiauto/core.py
CHANGED
@@ -188,11 +188,24 @@ class StudyWrapper:
|
|
188
188
|
parallelism: int = 2,
|
189
189
|
requirements_file: Optional[str] = None,
|
190
190
|
requirements_list: Optional[List[str]] = None,
|
191
|
-
resources_requests: Optional[Dict[str, str]] =
|
192
|
-
resources_limits: Optional[Dict[str, str]] =
|
191
|
+
resources_requests: Optional[Dict[str, str]] = None,
|
192
|
+
resources_limits: Optional[Dict[str, str]] = None,
|
193
193
|
runtime_image: Optional[str] = 'ghcr.io/astral-sh/uv:python3.8-bookworm-slim',
|
194
194
|
use_gpu: bool = False
|
195
195
|
) -> None:
|
196
|
+
# 리소스 기본값 설정
|
197
|
+
if resources_requests is None:
|
198
|
+
if use_gpu:
|
199
|
+
resources_requests = {"cpu": "2", "memory": "4Gi"}
|
200
|
+
else:
|
201
|
+
resources_requests = {"cpu": "1", "memory": "1Gi"}
|
202
|
+
|
203
|
+
if resources_limits is None:
|
204
|
+
if use_gpu:
|
205
|
+
resources_limits = {"cpu": "2", "memory": "4Gi"}
|
206
|
+
else:
|
207
|
+
resources_limits = {"cpu": "1", "memory": "1Gi"}
|
208
|
+
|
196
209
|
if runtime_image is None or runtime_image == "":
|
197
210
|
if use_gpu:
|
198
211
|
runtime_image = "pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime"
|
aiauto/serializer.py
CHANGED
@@ -47,16 +47,27 @@ def object_to_json(obj: Union[object, dict, None]) -> str:
|
|
47
47
|
if not module_name.startswith('optuna.'):
|
48
48
|
raise ValueError(f"optuna 코어 클래스만 지원합니다: {class_name}")
|
49
49
|
|
50
|
-
|
50
|
+
# __init__의 실제 파라미터만 가져오기
|
51
|
+
sig = inspect.signature(cls.__init__)
|
52
|
+
valid_params = set(sig.parameters.keys()) - {'self'}
|
53
|
+
|
54
|
+
# Optuna 객체들은 __dict__에 _param_name 형태로 저장
|
51
55
|
kwargs = {}
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
56
|
+
for key, value in obj.__dict__.items():
|
57
|
+
if key.startswith('_'):
|
58
|
+
param_name = key[1:] # _ 제거
|
59
|
+
|
60
|
+
# __init__의 실제 파라미터인지 확인
|
61
|
+
if param_name in valid_params:
|
62
|
+
# PatientPruner의 wrapped_pruner 특별 처리
|
63
|
+
if class_name == "PatientPruner" and param_name == "wrapped_pruner" and value is not None:
|
64
|
+
kwargs[param_name] = json.loads(object_to_json(value))
|
65
|
+
# CmaEsSampler와 QMCSampler의 independent_sampler 특별 처리
|
66
|
+
elif param_name == "independent_sampler" and value is not None and class_name in ["CmaEsSampler", "QMCSampler"]:
|
67
|
+
kwargs[param_name] = json.loads(object_to_json(value))
|
68
|
+
# Callable 타입은 제외 (gamma, weights 등)
|
69
|
+
elif not callable(value):
|
70
|
+
kwargs[param_name] = value
|
60
71
|
|
61
72
|
return json.dumps({
|
62
73
|
"cls": class_name,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: aiauto-client
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.15
|
4
4
|
Summary: AI Auto HPO (Hyperparameter Optimization) Client Library
|
5
5
|
Author-email: AIAuto Team <ainode@zeroone.ai>
|
6
6
|
Project-URL: Homepage, https://dashboard.common.aiauto.pangyo.ainode.ai
|
@@ -315,7 +315,12 @@ study_wrapper.optimize(
|
|
315
315
|
n_trials=100,
|
316
316
|
parallelism=4,
|
317
317
|
use_gpu=True, # GPU 사용
|
318
|
-
|
318
|
+
runtime_image='pytorch/pytorch:2.1.0-cuda11.8-cudnn8-runtime', # default image for use_gpu True
|
319
|
+
# requirements_list=['torch', 'torchvision'] # Pod에서 자동 설치 # pip list 명시는 다운로드 받는데 느림, runtime_image 를 torch 로 명시하는게 나음
|
320
|
+
resources_requests={
|
321
|
+
"cpu": "2",
|
322
|
+
"memory": "4Gi",
|
323
|
+
},
|
319
324
|
)
|
320
325
|
time.sleep(5)
|
321
326
|
```
|
@@ -446,7 +451,16 @@ study_wrapper.optimize(
|
|
446
451
|
n_trials=100,
|
447
452
|
parallelism=4,
|
448
453
|
use_gpu=True, # GPU 사용
|
449
|
-
|
454
|
+
runtime_image='pytorch/pytorch:2.1.0-cuda11.8-cudnn8-runtime', # default image for use_gpu True
|
455
|
+
requirements_list=[ # pip list 명시는 다운로드 받는데 느림, runtime_image 를 torch 로 명시하는게 나음
|
456
|
+
# 'torch',
|
457
|
+
# 'torchvision',
|
458
|
+
'fvcore',
|
459
|
+
], # Pod에서 자동 설치
|
460
|
+
resources_requests={
|
461
|
+
"cpu": "2",
|
462
|
+
"memory": "4Gi",
|
463
|
+
},
|
450
464
|
)
|
451
465
|
time.sleep(5)
|
452
466
|
```
|
@@ -545,15 +559,15 @@ study = controller.create_study('exp1', direction='minimize')
|
|
545
559
|
time.sleep(5)
|
546
560
|
|
547
561
|
def objective(trial):
|
548
|
-
import numpy as np
|
549
|
-
x = trial.suggest_float('x', -10, 10)
|
550
|
-
return (x - 1.23) ** 2
|
562
|
+
import numpy as np
|
563
|
+
x = trial.suggest_float('x', -10, 10)
|
564
|
+
return (x - 1.23) ** 2
|
551
565
|
|
552
566
|
study.optimize(
|
553
|
-
objective,
|
554
|
-
n_trials=64,
|
555
|
-
parallelism=8,
|
556
|
-
requirements_list=['numpy'],
|
567
|
+
objective,
|
568
|
+
n_trials=64,
|
569
|
+
parallelism=8,
|
570
|
+
requirements_list=['numpy'],
|
557
571
|
)
|
558
572
|
time.sleep(5)
|
559
573
|
```
|
@@ -600,17 +614,17 @@ import optuna, aiauto, time
|
|
600
614
|
|
601
615
|
controller = aiauto.AIAutoController('aiauto_xxx')
|
602
616
|
study = controller.create_study(
|
603
|
-
study_name='cnn',
|
604
|
-
direction='minimize',
|
605
|
-
sampler=optuna.samplers.TPESampler(seed=42),
|
606
|
-
pruner=optuna.pruners.MedianPruner(n_startup_trials=5),
|
617
|
+
study_name='cnn',
|
618
|
+
direction='minimize',
|
619
|
+
sampler=optuna.samplers.TPESampler(seed=42),
|
620
|
+
pruner=optuna.pruners.MedianPruner(n_startup_trials=5),
|
607
621
|
)
|
608
622
|
time.sleep(5)
|
609
623
|
|
610
624
|
def objective(trial):
|
611
|
-
import numpy as np
|
612
|
-
lr = trial.suggest_float('lr', 1e-5, 1e-1, log=True)
|
613
|
-
return (np.log10(lr) + 2) ** 2
|
625
|
+
import numpy as np
|
626
|
+
lr = trial.suggest_float('lr', 1e-5, 1e-1, log=True)
|
627
|
+
return (np.log10(lr) + 2) ** 2
|
614
628
|
|
615
629
|
study.optimize(objective, n_trials=50, parallelism=4)
|
616
630
|
time.sleep(5)
|
@@ -0,0 +1,10 @@
|
|
1
|
+
aiauto/__init__.py,sha256=sF7sJaXg7-MqolSYLxsaXAir1dBzARhXLrHo7zLsupg,345
|
2
|
+
aiauto/_config.py,sha256=hTFh2bH9m-HuX6QCpNtBC0j6rEB0S97hhPKjbEjv4Tg,89
|
3
|
+
aiauto/constants.py,sha256=rBibGOQHHrdkwaai92-3I8-N0cu-B4CoCoQbG9-Cl8k,821
|
4
|
+
aiauto/core.py,sha256=Hz3HP6xJGey6LbF04cYQtPPqerf4LhDgt0BVBPPf0j4,10364
|
5
|
+
aiauto/http_client.py,sha256=v_nPdb-2tIeH1XrOYqzMGvFfXLKEDbQoSaQYPsB0Hik,2587
|
6
|
+
aiauto/serializer.py,sha256=BJmeq6uCD9D2_6bXu_sMBQLSsXCUMIMM10iX923DTXE,2749
|
7
|
+
aiauto_client-0.1.15.dist-info/METADATA,sha256=c0m7XNdPxbhiSYgqEjsGT9hnDnJZr8DBHM7_qCBqkBw,25682
|
8
|
+
aiauto_client-0.1.15.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
9
|
+
aiauto_client-0.1.15.dist-info/top_level.txt,sha256=Sk2ctO9_Bf_tAPwq1x6Vfl6OuL29XzwMTO4F_KG6oJE,7
|
10
|
+
aiauto_client-0.1.15.dist-info/RECORD,,
|
@@ -1,10 +0,0 @@
|
|
1
|
-
aiauto/__init__.py,sha256=sF7sJaXg7-MqolSYLxsaXAir1dBzARhXLrHo7zLsupg,345
|
2
|
-
aiauto/_config.py,sha256=hTFh2bH9m-HuX6QCpNtBC0j6rEB0S97hhPKjbEjv4Tg,89
|
3
|
-
aiauto/constants.py,sha256=rBibGOQHHrdkwaai92-3I8-N0cu-B4CoCoQbG9-Cl8k,821
|
4
|
-
aiauto/core.py,sha256=eEwit5oL8DIfglOVe2km_7MAtuZquEd5Xvkbq6EaW9o,9945
|
5
|
-
aiauto/http_client.py,sha256=v_nPdb-2tIeH1XrOYqzMGvFfXLKEDbQoSaQYPsB0Hik,2587
|
6
|
-
aiauto/serializer.py,sha256=nT2F-Jyrd_3uw1QhlrfMx8vprNsL7OF7MCJYBKhkwoY,1905
|
7
|
-
aiauto_client-0.1.13.dist-info/METADATA,sha256=21_Kdp2DmLjhe5dR2K4F5UqbvJ229oRsvnk5fEKwcJA,25022
|
8
|
-
aiauto_client-0.1.13.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
9
|
-
aiauto_client-0.1.13.dist-info/top_level.txt,sha256=Sk2ctO9_Bf_tAPwq1x6Vfl6OuL29XzwMTO4F_KG6oJE,7
|
10
|
-
aiauto_client-0.1.13.dist-info/RECORD,,
|
File without changes
|
File without changes
|