aiauto-client 0.1.9__py3-none-any.whl → 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiauto/_config.py +1 -2
- aiauto/core.py +4 -80
- aiauto/http_client.py +2 -2
- {aiauto_client-0.1.9.dist-info → aiauto_client-0.1.10.dist-info}/METADATA +138 -4
- aiauto_client-0.1.10.dist-info/RECORD +10 -0
- aiauto_client-0.1.9.dist-info/RECORD +0 -10
- {aiauto_client-0.1.9.dist-info → aiauto_client-0.1.10.dist-info}/WHEEL +0 -0
- {aiauto_client-0.1.9.dist-info → aiauto_client-0.1.10.dist-info}/top_level.txt +0 -0
aiauto/_config.py
CHANGED
aiauto/core.py
CHANGED
@@ -31,16 +31,8 @@ class AIAutoController:
|
|
31
31
|
if not host_external:
|
32
32
|
raise RuntimeError("No storage host returned from EnsureWorkspace")
|
33
33
|
|
34
|
-
|
35
|
-
|
36
|
-
host, port_str = host_external.rsplit(':', 1)
|
37
|
-
port = int(port_str)
|
38
|
-
else:
|
39
|
-
host = host_external
|
40
|
-
port = 443 # Default to HTTPS port
|
41
|
-
|
42
|
-
# Create storage with TLS support for port 443
|
43
|
-
self.storage = self._create_storage_with_tls(host, port)
|
34
|
+
host, port = host_external.split(':')
|
35
|
+
self.storage = optuna.storages.GrpcStorageProxy(host=host, port=int(port))
|
44
36
|
|
45
37
|
# Store the internal host for CRD usage (if needed later)
|
46
38
|
self.storage_host_internal = response.get('journalGrpcStorageProxyHostInternal', '')
|
@@ -49,7 +41,7 @@ class AIAutoController:
|
|
49
41
|
except Exception as e:
|
50
42
|
raise RuntimeError(
|
51
43
|
f"Failed to initialize workspace: {e}\n"
|
52
|
-
"Please delete and reissue your token from the web dashboard at https://dashboard.aiauto.pangyo.ainode.ai"
|
44
|
+
"Please delete and reissue your token from the web dashboard at https://dashboard.common.aiauto.pangyo.ainode.ai"
|
53
45
|
) from e
|
54
46
|
|
55
47
|
# artifact storage
|
@@ -57,74 +49,6 @@ class AIAutoController:
|
|
57
49
|
self.artifact_store = optuna.artifacts.FileSystemArtifactStore('./artifacts')
|
58
50
|
self.tmp_dir = tempfile.mkdtemp(prefix=f'ai_auto_tmp_')
|
59
51
|
|
60
|
-
def _create_storage_with_tls(self, host: str, port: int):
|
61
|
-
"""Create GrpcStorageProxy with automatic TLS detection based on port"""
|
62
|
-
# Port 13000 = internal (plain), Port 443 = external (TLS)
|
63
|
-
if port != 443:
|
64
|
-
# Plain gRPC for internal connections
|
65
|
-
return optuna.storages.GrpcStorageProxy(host=host, port=port)
|
66
|
-
|
67
|
-
# TLS connection for external access (port 443)
|
68
|
-
import grpc
|
69
|
-
creds = grpc.ssl_channel_credentials()
|
70
|
-
|
71
|
-
# Try different TLS parameter names for Optuna version compatibility
|
72
|
-
# Try 1: channel_credentials parameter (newer Optuna versions)
|
73
|
-
try:
|
74
|
-
return optuna.storages.GrpcStorageProxy(
|
75
|
-
host=host,
|
76
|
-
port=port,
|
77
|
-
channel_credentials=creds
|
78
|
-
)
|
79
|
-
except TypeError:
|
80
|
-
pass
|
81
|
-
|
82
|
-
# Try 2: ssl boolean parameter
|
83
|
-
try:
|
84
|
-
return optuna.storages.GrpcStorageProxy(
|
85
|
-
host=host,
|
86
|
-
port=port,
|
87
|
-
ssl=True
|
88
|
-
)
|
89
|
-
except TypeError:
|
90
|
-
pass
|
91
|
-
|
92
|
-
# Try 3: use_tls parameter
|
93
|
-
try:
|
94
|
-
return optuna.storages.GrpcStorageProxy(
|
95
|
-
host=host,
|
96
|
-
port=port,
|
97
|
-
use_tls=True
|
98
|
-
)
|
99
|
-
except TypeError:
|
100
|
-
pass
|
101
|
-
|
102
|
-
# Try 4: secure parameter
|
103
|
-
try:
|
104
|
-
return optuna.storages.GrpcStorageProxy(
|
105
|
-
host=host,
|
106
|
-
port=port,
|
107
|
-
secure=True
|
108
|
-
)
|
109
|
-
except TypeError:
|
110
|
-
pass
|
111
|
-
|
112
|
-
# Fallback: try to create secure channel manually
|
113
|
-
try:
|
114
|
-
channel = grpc.secure_channel(f"{host}:{port}", creds)
|
115
|
-
# Some Optuna versions might accept a channel directly
|
116
|
-
return optuna.storages.GrpcStorageProxy(channel=channel)
|
117
|
-
except (TypeError, AttributeError):
|
118
|
-
pass
|
119
|
-
|
120
|
-
# If all attempts fail, raise informative error
|
121
|
-
raise RuntimeError(
|
122
|
-
f"Failed to create TLS connection to {host}:{port}. "
|
123
|
-
"GrpcStorageProxy TLS parameters not recognized. "
|
124
|
-
"Please check Optuna version compatibility. "
|
125
|
-
"Tried: channel_credentials, ssl, use_tls, secure, channel parameters."
|
126
|
-
)
|
127
|
-
|
128
52
|
def get_storage(self):
|
129
53
|
return self.storage
|
130
54
|
|
@@ -253,7 +177,7 @@ class StudyWrapper:
|
|
253
177
|
except Exception as e:
|
254
178
|
raise RuntimeError(
|
255
179
|
"Failed to get study. If this persists, please delete and reissue your token "
|
256
|
-
"from the web dashboard at https://dashboard.aiauto.pangyo.ainode.ai"
|
180
|
+
"from the web dashboard at https://dashboard.common.aiauto.pangyo.ainode.ai"
|
257
181
|
) from e
|
258
182
|
return self._study
|
259
183
|
|
aiauto/http_client.py
CHANGED
@@ -13,8 +13,8 @@ class ConnectRPCClient:
|
|
13
13
|
if base_url:
|
14
14
|
self.base_url = base_url
|
15
15
|
else:
|
16
|
-
# AIAUTO_API_TARGET is like "api.aiauto.pangyo.ainode.ai:443"
|
17
|
-
# Convert to "https://api.aiauto.pangyo.ainode.ai"
|
16
|
+
# AIAUTO_API_TARGET is like "api.common.aiauto.pangyo.ainode.ai:443"
|
17
|
+
# Convert to "https://api.common.aiauto.pangyo.ainode.ai"
|
18
18
|
host = AIAUTO_API_TARGET.split(':')[0]
|
19
19
|
self.base_url = f"https://{host}"
|
20
20
|
|
@@ -1,11 +1,11 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: aiauto-client
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.10
|
4
4
|
Summary: AI Auto HPO (Hyperparameter Optimization) Client Library
|
5
5
|
Author-email: AIAuto Team <ainode@zeroone.ai>
|
6
|
-
Project-URL: Homepage, https://dashboard.aiauto.pangyo.ainode.ai
|
7
|
-
Project-URL: Repository, https://dashboard.aiauto.pangyo.ainode.ai
|
8
|
-
Project-URL: Documentation, https://dashboard.aiauto.pangyo.ainode.ai
|
6
|
+
Project-URL: Homepage, https://dashboard.common.aiauto.pangyo.ainode.ai
|
7
|
+
Project-URL: Repository, https://dashboard.common.aiauto.pangyo.ainode.ai
|
8
|
+
Project-URL: Documentation, https://dashboard.common.aiauto.pangyo.ainode.ai
|
9
9
|
Classifier: Development Status :: 3 - Alpha
|
10
10
|
Classifier: Intended Audience :: Developers
|
11
11
|
Classifier: Intended Audience :: Science/Research
|
@@ -481,3 +481,137 @@ study.tell(trial, ret)
|
|
481
481
|
```bash
|
482
482
|
make build push
|
483
483
|
```
|
484
|
+
|
485
|
+
|
486
|
+
## LLM을 위한 설명 (for code assistants / agents)
|
487
|
+
|
488
|
+
이 섹션은 **코드 어시스턴트/에이전트**가 AIAuto를 자동으로 사용할 때 참고하는 규칙 모음입니다.
|
489
|
+
사람 사용자는 위 본문 가이드를 우선적으로 따르세요.
|
490
|
+
|
491
|
+
### 1) 목적과 사용 흐름 (요약)
|
492
|
+
- AIAuto는 **Optuna를 Kubernetes에서 분산 실행**하도록 해주는 Python 클라이언트입니다.
|
493
|
+
- 두 가지 흐름을 지원합니다:
|
494
|
+
1) `StudyWrapper.optimize(...)` → 원격(K8s) 병렬 실행
|
495
|
+
2) `StudyWrapper.get_study()` → 실제 `optuna.Study` 로드 후 **ask/tell** 직접 사용
|
496
|
+
|
497
|
+
### 2) 네트워크/도메인 규칙 (필수)
|
498
|
+
- **Connect RPC (HTTPS/443)**: `https://api.aiauto.pangyo.ainode.ai`
|
499
|
+
- **Optuna Storage (gRPC, 평문 h2c/13000)**:
|
500
|
+
`{userID}.journal-grpc-storage-proxy.aiauto.pangyo.ainode.ai:13000`
|
501
|
+
→ **항상 평문(h2c)**. 443/TLS 시도 **금지**.
|
502
|
+
- 대시보드(웹): `https://dashboard.aiauto.pangyo.ainode.ai`
|
503
|
+
|
504
|
+
### 3) 호출 순서 (결정 트리)
|
505
|
+
1) 컨트롤러 생성: `AIAutoController(token)` → 토큰 검증 & 워크스페이스/엔드포인트 획득
|
506
|
+
2) 스터디 생성: `controller.create_study(study_name, direction=… | directions=[…])`
|
507
|
+
- 제약: `direction` **또는** `directions` 중 **하나만** 사용
|
508
|
+
3) 분기:
|
509
|
+
- **분산 실행**: `study.optimize(objective, n_trials, parallelism, …)`
|
510
|
+
- **네이티브 제어**: `real = study.get_study()` 후 `real.ask() / real.tell(...)`
|
511
|
+
|
512
|
+
### 4) 인자/작성 제약(LLM이 반드시 지킬 불변식)
|
513
|
+
- `direction` **XOR** `directions` (동시 지정 금지)
|
514
|
+
- `requirements_file` **XOR** `requirements_list` (동시 지정 금지)
|
515
|
+
- `objective` 작성 규칙:
|
516
|
+
- 모든 **import는 함수 내부**에 작성
|
517
|
+
- 함수 시그니처에 **타입 힌트 금지** (예: `trial: optuna.trial.Trial` 사용 금지)
|
518
|
+
- 로컬 파일 경로 의존 금지(원격에서 다운로드/생성)
|
519
|
+
|
520
|
+
### 5) 최소 예제
|
521
|
+
|
522
|
+
#### (A) 분산 실행
|
523
|
+
```python
|
524
|
+
import aiauto, optuna
|
525
|
+
|
526
|
+
controller = aiauto.AIAutoController('aiauto_xxx')
|
527
|
+
study = controller.create_study('exp1', direction='minimize')
|
528
|
+
|
529
|
+
def objective(trial):
|
530
|
+
import numpy as np
|
531
|
+
x = trial.suggest_float('x', -10, 10)
|
532
|
+
return (x - 1.23) ** 2
|
533
|
+
|
534
|
+
study.optimize(
|
535
|
+
objective,
|
536
|
+
n_trials=64,
|
537
|
+
parallelism=8,
|
538
|
+
requirements_list=['numpy'],
|
539
|
+
)
|
540
|
+
```
|
541
|
+
|
542
|
+
#### (B) ask/tell (실제 optuna.Study 사용)
|
543
|
+
```python
|
544
|
+
import aiauto, optuna
|
545
|
+
|
546
|
+
controller = aiauto.AIAutoController('aiauto_xxx')
|
547
|
+
sw = controller.create_study('manual', direction='minimize')
|
548
|
+
|
549
|
+
real = sw.get_study() # 실제 optuna.Study 로드 (gRPC: h2c 13000)
|
550
|
+
t = real.ask()
|
551
|
+
x = t.suggest_float('x', -10, 10)
|
552
|
+
real.tell(t, (x - 2) ** 2)
|
553
|
+
```
|
554
|
+
|
555
|
+
### 6) 에러 응답 템플릿(자동 응답 가이드)
|
556
|
+
- 초기화/토큰 문제 감지:
|
557
|
+
- “대시보드에서 토큰을 재발급한 뒤 다시 시도하세요: https://dashboard.aiauto.pangyo.ainode.ai”
|
558
|
+
- `get_study()` 연결 실패(스토리지 준비 전):
|
559
|
+
- “Workspace/Storage가 아직 준비 중일 수 있어요. 잠시 후 다시 시도하거나 대시보드에서 상태를 확인해 주세요.”
|
560
|
+
|
561
|
+
### 7) 금지/주의 목록
|
562
|
+
- gRPC를 **443/TLS**로 붙이려는 시도 **금지** (항상 **13000/h2c**).
|
563
|
+
- `direction & directions`, `requirements_file & requirements_list` 동시 지정 **금지**.
|
564
|
+
- `objective`는 함수 내부 import, 타입 힌트 금지, 로컬 파일 경로 의존 금지.
|
565
|
+
|
566
|
+
### 8) Optuna 호환성 (Sampler/Pruner & Study API)
|
567
|
+
- **Sampler/Pruner 호환**: `create_study()`에 **Optuna 원본** `sampler`/`pruner` 인스턴스를 그대로 전달하면 됩니다.
|
568
|
+
Study 생성 시 지정된 sampler/pruner는 **Optuna Journal Storage**에 저장되고, 원격 Pod에서 `optuna.load_study()`로 로드될 때 **자동으로 동일 설정이 적용**됩니다. 별도 복원 로직이 필요 없습니다.
|
569
|
+
- **네이티브 Study 사용**: `StudyWrapper.get_study()`는 **실제 `optuna.Study` 객체**를 반환합니다.
|
570
|
+
따라서 `best_trial`, `best_trials`(다중 목적), `trials_dataframe()`, `get_trials()`, `ask()/tell()` 등 **Optuna API를 그대로** 사용할 수 있습니다.
|
571
|
+
|
572
|
+
**공식 문서 링크**
|
573
|
+
- Samplers: https://optuna.readthedocs.io/en/stable/reference/samplers/index.html
|
574
|
+
- Pruners: https://optuna.readthedocs.io/en/stable/reference/pruners.html
|
575
|
+
- Study API: https://optuna.readthedocs.io/en/stable/reference/generated/optuna.study.Study.html
|
576
|
+
|
577
|
+
#### 예시: Sampler/Pruner 그대로 사용
|
578
|
+
```python
|
579
|
+
import optuna, aiauto
|
580
|
+
|
581
|
+
controller = aiauto.AIAutoController('aiauto_xxx')
|
582
|
+
study = controller.create_study(
|
583
|
+
study_name='cnn',
|
584
|
+
direction='minimize',
|
585
|
+
sampler=optuna.samplers.TPESampler(seed=42),
|
586
|
+
pruner=optuna.pruners.MedianPruner(n_startup_trials=5),
|
587
|
+
)
|
588
|
+
|
589
|
+
def objective(trial):
|
590
|
+
import numpy as np
|
591
|
+
lr = trial.suggest_float('lr', 1e-5, 1e-1, log=True)
|
592
|
+
return (np.log10(lr) + 2) ** 2
|
593
|
+
|
594
|
+
study.optimize(objective, n_trials=50, parallelism=4)
|
595
|
+
```
|
596
|
+
|
597
|
+
#### 예시: get_study() 후 Optuna API 그대로 사용
|
598
|
+
```python
|
599
|
+
# 실제 optuna.Study 로드
|
600
|
+
real = study.get_study()
|
601
|
+
|
602
|
+
# 단일 목적: best_trial
|
603
|
+
print('best value:', real.best_trial.value)
|
604
|
+
print('best params:', real.best_trial.params)
|
605
|
+
|
606
|
+
# (옵션) 다중 목적: Pareto front
|
607
|
+
# print(real.best_trials) # multi-objective일 때 사용
|
608
|
+
|
609
|
+
# 분석/시각화용 DataFrame
|
610
|
+
df = real.trials_dataframe(attrs=('number', 'value', 'params', 'state'))
|
611
|
+
print(df.head())
|
612
|
+
|
613
|
+
# 세밀 제어: ask/tell
|
614
|
+
t = real.ask()
|
615
|
+
x = t.suggest_float('x', -10, 10)
|
616
|
+
real.tell(t, (x - 1.23) ** 2)
|
617
|
+
```
|
@@ -0,0 +1,10 @@
|
|
1
|
+
aiauto/__init__.py,sha256=sF7sJaXg7-MqolSYLxsaXAir1dBzARhXLrHo7zLsupg,345
|
2
|
+
aiauto/_config.py,sha256=hTFh2bH9m-HuX6QCpNtBC0j6rEB0S97hhPKjbEjv4Tg,89
|
3
|
+
aiauto/constants.py,sha256=rBibGOQHHrdkwaai92-3I8-N0cu-B4CoCoQbG9-Cl8k,821
|
4
|
+
aiauto/core.py,sha256=eEwit5oL8DIfglOVe2km_7MAtuZquEd5Xvkbq6EaW9o,9945
|
5
|
+
aiauto/http_client.py,sha256=gVDlgnqjC6FcAbZ4rsjzZHfkusotixOShDCdWBdd-sk,2100
|
6
|
+
aiauto/serializer.py,sha256=KqQeH0xp4LQuZE6r8kzXQsWY6QgC3hqn8MSuWTt4QmU,1938
|
7
|
+
aiauto_client-0.1.10.dist-info/METADATA,sha256=UQAtHz3UFAgqSYIIC3eusxBjRtmnaDuNLSyfQavj2BI,24554
|
8
|
+
aiauto_client-0.1.10.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
9
|
+
aiauto_client-0.1.10.dist-info/top_level.txt,sha256=Sk2ctO9_Bf_tAPwq1x6Vfl6OuL29XzwMTO4F_KG6oJE,7
|
10
|
+
aiauto_client-0.1.10.dist-info/RECORD,,
|
@@ -1,10 +0,0 @@
|
|
1
|
-
aiauto/__init__.py,sha256=sF7sJaXg7-MqolSYLxsaXAir1dBzARhXLrHo7zLsupg,345
|
2
|
-
aiauto/_config.py,sha256=DaRTIZlph9T3iuW-Cy4fkw8i3bXB--gMtW947SLZZNs,159
|
3
|
-
aiauto/constants.py,sha256=rBibGOQHHrdkwaai92-3I8-N0cu-B4CoCoQbG9-Cl8k,821
|
4
|
-
aiauto/core.py,sha256=caGUQEi4KTaov7Brak7o3O_27LZDNdYqUWmXDpusQaU,12553
|
5
|
-
aiauto/http_client.py,sha256=t1gxeM5-d5bsVoFWgaNcTrt_WWUXuMuxge9gDlEqhoA,2086
|
6
|
-
aiauto/serializer.py,sha256=KqQeH0xp4LQuZE6r8kzXQsWY6QgC3hqn8MSuWTt4QmU,1938
|
7
|
-
aiauto_client-0.1.9.dist-info/METADATA,sha256=sIl-_c3kOSgxyO2wL3apNAK5_5t5KRZoBPCIeBkoxz8,19071
|
8
|
-
aiauto_client-0.1.9.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
9
|
-
aiauto_client-0.1.9.dist-info/top_level.txt,sha256=Sk2ctO9_Bf_tAPwq1x6Vfl6OuL29XzwMTO4F_KG6oJE,7
|
10
|
-
aiauto_client-0.1.9.dist-info/RECORD,,
|
File without changes
|
File without changes
|