agentscope-runtime 0.1.6__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/common/container_clients/__init__.py +0 -0
- agentscope_runtime/{sandbox/manager → common}/container_clients/kubernetes_client.py +546 -6
- agentscope_runtime/engine/__init__.py +12 -0
- agentscope_runtime/engine/agents/agentscope_agent.py +130 -10
- agentscope_runtime/engine/agents/agno_agent.py +8 -10
- agentscope_runtime/engine/agents/langgraph_agent.py +52 -9
- agentscope_runtime/engine/app/__init__.py +6 -0
- agentscope_runtime/engine/app/agent_app.py +239 -0
- agentscope_runtime/engine/app/base_app.py +181 -0
- agentscope_runtime/engine/app/celery_mixin.py +92 -0
- agentscope_runtime/engine/deployers/__init__.py +13 -0
- agentscope_runtime/engine/deployers/adapter/responses/__init__.py +0 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_adapter_utils.py +2890 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_agent_adapter.py +51 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_protocol_adapter.py +314 -0
- agentscope_runtime/engine/deployers/base.py +1 -0
- agentscope_runtime/engine/deployers/cli_fc_deploy.py +203 -0
- agentscope_runtime/engine/deployers/kubernetes_deployer.py +272 -0
- agentscope_runtime/engine/deployers/local_deployer.py +414 -501
- agentscope_runtime/engine/deployers/modelstudio_deployer.py +838 -0
- agentscope_runtime/engine/deployers/utils/__init__.py +0 -0
- agentscope_runtime/engine/deployers/utils/deployment_modes.py +14 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/__init__.py +8 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/docker_image_builder.py +429 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +240 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/runner_image_factory.py +306 -0
- agentscope_runtime/engine/deployers/utils/package_project_utils.py +1163 -0
- agentscope_runtime/engine/deployers/utils/service_utils/__init__.py +9 -0
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +1064 -0
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_templates.py +157 -0
- agentscope_runtime/engine/deployers/utils/service_utils/process_manager.py +268 -0
- agentscope_runtime/engine/deployers/utils/service_utils/service_config.py +75 -0
- agentscope_runtime/engine/deployers/utils/service_utils/service_factory.py +220 -0
- agentscope_runtime/engine/deployers/utils/service_utils/standalone_main.py.j2 +211 -0
- agentscope_runtime/engine/deployers/utils/wheel_packager.py +389 -0
- agentscope_runtime/engine/helpers/agent_api_builder.py +651 -0
- agentscope_runtime/engine/runner.py +76 -35
- agentscope_runtime/engine/schemas/agent_schemas.py +112 -2
- agentscope_runtime/engine/schemas/embedding.py +37 -0
- agentscope_runtime/engine/schemas/modelstudio_llm.py +310 -0
- agentscope_runtime/engine/schemas/oai_llm.py +538 -0
- agentscope_runtime/engine/schemas/realtime.py +254 -0
- agentscope_runtime/engine/services/tablestore_memory_service.py +4 -1
- agentscope_runtime/engine/tracing/__init__.py +9 -3
- agentscope_runtime/engine/tracing/asyncio_util.py +24 -0
- agentscope_runtime/engine/tracing/base.py +66 -34
- agentscope_runtime/engine/tracing/local_logging_handler.py +45 -31
- agentscope_runtime/engine/tracing/message_util.py +528 -0
- agentscope_runtime/engine/tracing/tracing_metric.py +20 -8
- agentscope_runtime/engine/tracing/tracing_util.py +130 -0
- agentscope_runtime/engine/tracing/wrapper.py +794 -169
- agentscope_runtime/sandbox/box/base/base_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/browser/browser_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/dummy/dummy_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/gui/gui_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/training_box/training_box.py +0 -42
- agentscope_runtime/sandbox/client/http_client.py +52 -18
- agentscope_runtime/sandbox/constant.py +3 -0
- agentscope_runtime/sandbox/custom/custom_sandbox.py +2 -1
- agentscope_runtime/sandbox/custom/example.py +2 -1
- agentscope_runtime/sandbox/enums.py +0 -1
- agentscope_runtime/sandbox/manager/sandbox_manager.py +29 -22
- agentscope_runtime/sandbox/model/container.py +6 -0
- agentscope_runtime/sandbox/registry.py +1 -1
- agentscope_runtime/sandbox/tools/tool.py +4 -0
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/METADATA +103 -59
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/RECORD +87 -52
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/entry_points.txt +1 -0
- /agentscope_runtime/{sandbox/manager/container_clients → common}/__init__.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/__init__.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/base_mapping.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/base_queue.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/base_set.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_mapping.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_queue.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_set.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/redis_mapping.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/redis_queue.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/redis_set.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/container_clients/agentrun_client.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/container_clients/base_client.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/container_clients/docker_client.py +0 -0
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/WHEEL +0 -0
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,838 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# pylint:disable=too-many-nested-blocks, too-many-return-statements,
|
|
3
|
+
# pylint:disable=too-many-branches, too-many-statements, try-except-raise
|
|
4
|
+
# pylint:disable=ungrouped-imports, arguments-renamed, protected-access
|
|
5
|
+
#
|
|
6
|
+
# flake8: noqa: E501
|
|
7
|
+
import logging
|
|
8
|
+
import os
|
|
9
|
+
import time
|
|
10
|
+
import json
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Dict, Optional, List, Union, Tuple
|
|
13
|
+
|
|
14
|
+
import requests
|
|
15
|
+
from pydantic import BaseModel, Field
|
|
16
|
+
|
|
17
|
+
from .adapter.protocol_adapter import ProtocolAdapter
|
|
18
|
+
from .base import DeployManager
|
|
19
|
+
from .local_deployer import LocalDeployManager
|
|
20
|
+
from .utils.service_utils import (
|
|
21
|
+
ServicesConfig,
|
|
22
|
+
)
|
|
23
|
+
from .utils.wheel_packager import (
|
|
24
|
+
generate_wrapper_project,
|
|
25
|
+
build_wheel,
|
|
26
|
+
default_deploy_name,
|
|
27
|
+
)
|
|
28
|
+
from ..runner import Runner
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
try: # Lazy optional imports; validated at runtime
|
|
34
|
+
import alibabacloud_oss_v2 as oss # type: ignore
|
|
35
|
+
from alibabacloud_oss_v2.models import PutBucketRequest, PutObjectRequest
|
|
36
|
+
from alibabacloud_bailian20231229.client import Client as ModelstudioClient
|
|
37
|
+
from alibabacloud_tea_openapi import models as open_api_models
|
|
38
|
+
from alibabacloud_bailian20231229 import models as ModelstudioTypes
|
|
39
|
+
from alibabacloud_tea_util import models as util_models
|
|
40
|
+
except Exception:
|
|
41
|
+
oss = None
|
|
42
|
+
PutBucketRequest = None
|
|
43
|
+
PutObjectRequest = None
|
|
44
|
+
ModelstudioClient = None
|
|
45
|
+
open_api_models = None
|
|
46
|
+
ModelstudioTypes = None
|
|
47
|
+
util_models = None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class OSSConfig(BaseModel):
|
|
51
|
+
region: str = Field("cn-hangzhou", description="OSS region")
|
|
52
|
+
access_key_id: Optional[str] = None
|
|
53
|
+
access_key_secret: Optional[str] = None
|
|
54
|
+
bucket_prefix: str = Field(
|
|
55
|
+
"tmpbucket-agentscope-runtime",
|
|
56
|
+
description="Prefix for temporary buckets if creation is needed",
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
@classmethod
|
|
60
|
+
def from_env(cls) -> "OSSConfig":
|
|
61
|
+
return cls(
|
|
62
|
+
region=os.environ.get("OSS_REGION", "cn-hangzhou"),
|
|
63
|
+
access_key_id=os.environ.get(
|
|
64
|
+
"OSS_ACCESS_KEY_ID",
|
|
65
|
+
os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_ID"),
|
|
66
|
+
),
|
|
67
|
+
access_key_secret=os.environ.get(
|
|
68
|
+
"OSS_ACCESS_KEY_SECRET",
|
|
69
|
+
os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_SECRET"),
|
|
70
|
+
),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def ensure_valid(self) -> None:
|
|
74
|
+
# allow fallback to Alibaba Cloud AK/SK via from_env()
|
|
75
|
+
if not self.access_key_id or not self.access_key_secret:
|
|
76
|
+
raise RuntimeError(
|
|
77
|
+
"Missing AccessKey for OSS. Set either OSS_ACCESS_KEY_ID/OSS_ACCESS_KEY_SECRET "
|
|
78
|
+
"or ALIBABA_CLOUD_ACCESS_KEY_ID/ALIBABA_CLOUD_ACCESS_KEY_SECRET.",
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class ModelstudioConfig(BaseModel):
|
|
83
|
+
endpoint: str = Field(
|
|
84
|
+
"bailian.cn-beijing.aliyuncs.com",
|
|
85
|
+
description="Modelstudio service endpoint",
|
|
86
|
+
)
|
|
87
|
+
workspace_id: Optional[str] = None
|
|
88
|
+
access_key_id: Optional[str] = None
|
|
89
|
+
access_key_secret: Optional[str] = None
|
|
90
|
+
dashscope_api_key: Optional[str] = None
|
|
91
|
+
|
|
92
|
+
@classmethod
|
|
93
|
+
def from_env(cls) -> "ModelstudioConfig":
|
|
94
|
+
raw_ws = os.environ.get("MODELSTUDIO_WORKSPACE_ID")
|
|
95
|
+
ws = raw_ws.strip() if isinstance(raw_ws, str) else ""
|
|
96
|
+
resolved_ws = ws if ws else "default"
|
|
97
|
+
return cls(
|
|
98
|
+
endpoint=os.environ.get(
|
|
99
|
+
"MODELSTUDIO_ENDPOINT",
|
|
100
|
+
"bailian.cn-beijing.aliyuncs.com",
|
|
101
|
+
),
|
|
102
|
+
workspace_id=resolved_ws,
|
|
103
|
+
access_key_id=os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_ID"),
|
|
104
|
+
access_key_secret=os.environ.get(
|
|
105
|
+
"ALIBABA_CLOUD_ACCESS_KEY_SECRET",
|
|
106
|
+
),
|
|
107
|
+
dashscope_api_key=os.environ.get(
|
|
108
|
+
"DASHSCOPE_API_KEY",
|
|
109
|
+
),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def ensure_valid(self) -> None:
|
|
113
|
+
missing = []
|
|
114
|
+
if not self.access_key_id:
|
|
115
|
+
missing.append("ALIBABA_CLOUD_ACCESS_KEY_ID")
|
|
116
|
+
if not self.access_key_secret:
|
|
117
|
+
missing.append("ALIBABA_CLOUD_ACCESS_KEY_SECRET")
|
|
118
|
+
if missing:
|
|
119
|
+
raise RuntimeError(
|
|
120
|
+
f"Missing required Modelstudio env vars: {', '.join(missing)}",
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _assert_cloud_sdks_available():
|
|
125
|
+
if oss is None or ModelstudioClient is None:
|
|
126
|
+
raise RuntimeError(
|
|
127
|
+
"Cloud SDKs not installed. Please install: "
|
|
128
|
+
"alibabacloud-oss-v2 alibabacloud-bailian20231229 "
|
|
129
|
+
"alibabacloud-credentials alibabacloud-tea-openapi alibabacloud-tea-util",
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _oss_get_client(oss_cfg: OSSConfig):
|
|
134
|
+
oss_cfg.ensure_valid()
|
|
135
|
+
# Ensure OSS SDK can read credentials from environment variables.
|
|
136
|
+
# If OSS_* are not set, populate them from resolved config (which may
|
|
137
|
+
# already have fallen back to ALIBABA_CLOUD_* as per from_env()).
|
|
138
|
+
if not os.environ.get("OSS_ACCESS_KEY_ID") and oss_cfg.access_key_id:
|
|
139
|
+
os.environ["OSS_ACCESS_KEY_ID"] = str(oss_cfg.access_key_id)
|
|
140
|
+
if (
|
|
141
|
+
not os.environ.get("OSS_ACCESS_KEY_SECRET")
|
|
142
|
+
and oss_cfg.access_key_secret
|
|
143
|
+
):
|
|
144
|
+
os.environ["OSS_ACCESS_KEY_SECRET"] = str(oss_cfg.access_key_secret)
|
|
145
|
+
|
|
146
|
+
credentials_provider = (
|
|
147
|
+
oss.credentials.EnvironmentVariableCredentialsProvider()
|
|
148
|
+
)
|
|
149
|
+
cfg = oss.config.load_default()
|
|
150
|
+
cfg.credentials_provider = credentials_provider
|
|
151
|
+
cfg.region = oss_cfg.region
|
|
152
|
+
return oss.Client(cfg)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
async def _oss_create_bucket_if_not_exists(client, bucket_name: str) -> None:
|
|
156
|
+
try:
|
|
157
|
+
exists = client.is_bucket_exist(bucket=bucket_name)
|
|
158
|
+
except Exception:
|
|
159
|
+
exists = False
|
|
160
|
+
if not exists:
|
|
161
|
+
req = PutBucketRequest(
|
|
162
|
+
bucket=bucket_name,
|
|
163
|
+
acl="private",
|
|
164
|
+
create_bucket_configuration=oss.CreateBucketConfiguration(
|
|
165
|
+
storage_class="IA",
|
|
166
|
+
),
|
|
167
|
+
)
|
|
168
|
+
try:
|
|
169
|
+
put_bucket_result = client.put_bucket(req)
|
|
170
|
+
logger.info(
|
|
171
|
+
f"put bucket status code: {put_bucket_result.status_code},"
|
|
172
|
+
f" request id: {put_bucket_result.request_id}",
|
|
173
|
+
)
|
|
174
|
+
except oss.exceptions.OperationError as e:
|
|
175
|
+
logger.error(
|
|
176
|
+
"OSS PutBucket failed: Http Status: %s, ErrorCode: %s, RequestId: %s, Message: %s",
|
|
177
|
+
getattr(e, "http_code", None),
|
|
178
|
+
getattr(e, "error_code", None),
|
|
179
|
+
getattr(e, "request_id", None),
|
|
180
|
+
getattr(e, "message", str(e)),
|
|
181
|
+
)
|
|
182
|
+
raise
|
|
183
|
+
except Exception as e:
|
|
184
|
+
logger.error("Unexpected put bucket failure: %s", e, exc_info=True)
|
|
185
|
+
raise
|
|
186
|
+
result = client.put_bucket_tags(
|
|
187
|
+
oss.PutBucketTagsRequest(
|
|
188
|
+
bucket=bucket_name,
|
|
189
|
+
tagging=oss.Tagging(
|
|
190
|
+
tag_set=oss.TagSet(
|
|
191
|
+
tags=[
|
|
192
|
+
oss.Tag(
|
|
193
|
+
key="bailian-high-code-deploy-oss-access",
|
|
194
|
+
value="ReadAndAdd",
|
|
195
|
+
),
|
|
196
|
+
],
|
|
197
|
+
),
|
|
198
|
+
),
|
|
199
|
+
),
|
|
200
|
+
)
|
|
201
|
+
logger.info(
|
|
202
|
+
f"put bucket tag status code: {result.status_code}, request id: {result.request_id}",
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
async def _oss_put_and_presign(
|
|
207
|
+
client,
|
|
208
|
+
bucket_name: str,
|
|
209
|
+
object_key: str,
|
|
210
|
+
file_bytes: bytes,
|
|
211
|
+
) -> str:
|
|
212
|
+
import datetime as _dt
|
|
213
|
+
|
|
214
|
+
put_req = PutObjectRequest(
|
|
215
|
+
bucket=bucket_name,
|
|
216
|
+
key=object_key,
|
|
217
|
+
body=file_bytes,
|
|
218
|
+
)
|
|
219
|
+
client.put_object(put_req)
|
|
220
|
+
pre = client.presign(
|
|
221
|
+
oss.GetObjectRequest(bucket=bucket_name, key=object_key),
|
|
222
|
+
expires=_dt.timedelta(minutes=180),
|
|
223
|
+
)
|
|
224
|
+
return pre.url
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _upload_to_oss_with_credentials(
|
|
228
|
+
api_response,
|
|
229
|
+
file_path,
|
|
230
|
+
) -> str:
|
|
231
|
+
response_data = (
|
|
232
|
+
json.loads(api_response)
|
|
233
|
+
if isinstance(api_response, str)
|
|
234
|
+
else api_response
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
try:
|
|
238
|
+
body = response_data["body"]
|
|
239
|
+
data = body.get("Data")
|
|
240
|
+
if data is None:
|
|
241
|
+
messages = [
|
|
242
|
+
"\n❌ Configuration Error: "
|
|
243
|
+
"The current RAM user is not assigned to target workspace.",
|
|
244
|
+
"Bailian requires RAM users to be associated with "
|
|
245
|
+
"at least one workspace to use temporary storage.",
|
|
246
|
+
"\n🔧 How to resolve:",
|
|
247
|
+
"1. Ask the primary account to log in to the "
|
|
248
|
+
"Bailian Console: https://bailian.console.aliyun.com",
|
|
249
|
+
"2. Go to [Permission Management]",
|
|
250
|
+
"3. Go to [Add User]",
|
|
251
|
+
"4. Assign the user to a workspace",
|
|
252
|
+
"\n💡 Note: If you are not the primary account holder,"
|
|
253
|
+
" please contact your administrator to complete this step.",
|
|
254
|
+
"=" * 80,
|
|
255
|
+
]
|
|
256
|
+
|
|
257
|
+
for msg in messages:
|
|
258
|
+
logger.error(msg)
|
|
259
|
+
|
|
260
|
+
raise ValueError(
|
|
261
|
+
"RAM user is not assigned to any workspace in Bailian",
|
|
262
|
+
)
|
|
263
|
+
param = data["Param"]
|
|
264
|
+
signed_url = param["Url"]
|
|
265
|
+
headers = param["Headers"]
|
|
266
|
+
except KeyError as e:
|
|
267
|
+
raise ValueError(f"Missing expected field in API response: {e}") from e
|
|
268
|
+
try:
|
|
269
|
+
with open(file_path, "rb") as file:
|
|
270
|
+
response = requests.put(signed_url, data=file, headers=headers)
|
|
271
|
+
logger.info("OSS upload status code: %d", response.status_code)
|
|
272
|
+
response.raise_for_status() # Raises for 4xx/5xx
|
|
273
|
+
logger.info("File uploaded successfully using requests")
|
|
274
|
+
return data["TempStorageLeaseId"]
|
|
275
|
+
except Exception as e:
|
|
276
|
+
logger.error("Failed to upload file to OSS: %s", e)
|
|
277
|
+
raise
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def _get_presign_url_and_upload_to_oss(
|
|
281
|
+
cfg: ModelstudioConfig,
|
|
282
|
+
wheel_path: Path,
|
|
283
|
+
) -> str:
|
|
284
|
+
"""
|
|
285
|
+
Request a temporary storage lease, obtain a pre-signed OSS URL, and upload the file.
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
cfg: ModelStudio configuration with credentials and endpoint.
|
|
289
|
+
wheel_path: Path to the wheel file to upload.
|
|
290
|
+
|
|
291
|
+
Returns:
|
|
292
|
+
The TempStorageLeaseId returned by the service.
|
|
293
|
+
|
|
294
|
+
Raises:
|
|
295
|
+
Exception: Any error from the SDK or upload process (not swallowed).
|
|
296
|
+
"""
|
|
297
|
+
try:
|
|
298
|
+
config = open_api_models.Config(
|
|
299
|
+
access_key_id=cfg.access_key_id,
|
|
300
|
+
access_key_secret=cfg.access_key_secret,
|
|
301
|
+
)
|
|
302
|
+
config.endpoint = cfg.endpoint
|
|
303
|
+
client_modelstudio = ModelstudioClient(config)
|
|
304
|
+
|
|
305
|
+
filename = wheel_path.name
|
|
306
|
+
size = wheel_path.stat().st_size
|
|
307
|
+
|
|
308
|
+
apply_temp_storage_lease_request = (
|
|
309
|
+
ModelstudioTypes.ApplyTempStorageLeaseRequest(
|
|
310
|
+
file_name=filename,
|
|
311
|
+
size_in_bytes=size,
|
|
312
|
+
)
|
|
313
|
+
)
|
|
314
|
+
runtime = util_models.RuntimeOptions()
|
|
315
|
+
headers = {}
|
|
316
|
+
workspace_id = getattr(cfg, "workspace_id", "default")
|
|
317
|
+
try:
|
|
318
|
+
response = (
|
|
319
|
+
client_modelstudio.apply_temp_storage_lease_with_options(
|
|
320
|
+
workspace_id,
|
|
321
|
+
apply_temp_storage_lease_request,
|
|
322
|
+
headers,
|
|
323
|
+
runtime,
|
|
324
|
+
)
|
|
325
|
+
)
|
|
326
|
+
except Exception as error:
|
|
327
|
+
logger.error(
|
|
328
|
+
"Error during temporary storage lease or upload: %s",
|
|
329
|
+
error,
|
|
330
|
+
)
|
|
331
|
+
error_code = None
|
|
332
|
+
recommend_url = None
|
|
333
|
+
if hasattr(error, "code"):
|
|
334
|
+
error_code = error.code
|
|
335
|
+
if hasattr(error, "data") and isinstance(error.data, dict):
|
|
336
|
+
recommend_url = error.data.get("Recommend")
|
|
337
|
+
|
|
338
|
+
if error_code == "NoPermission":
|
|
339
|
+
messages = [
|
|
340
|
+
"\n❌ Permission Denied (NoPermission)",
|
|
341
|
+
"The current account does not have permission to apply "
|
|
342
|
+
"for temporary storage (ApplyTempStorageLease).",
|
|
343
|
+
"\n🔧 How to resolve:",
|
|
344
|
+
"1. Ask the primary account holder (or an administrator)"
|
|
345
|
+
" to grant your RAM user the following permission:",
|
|
346
|
+
" - Action: `AliyunBailianDataFullAccess`",
|
|
347
|
+
"\n2. Steps to grant permission:",
|
|
348
|
+
" - Go to Alibaba Cloud RAM Console: https://ram.console.aliyun.com/users",
|
|
349
|
+
" - Locate your RAM user",
|
|
350
|
+
" - Click 'Add Permissions' and attach a policy that includes "
|
|
351
|
+
"`AliyunBailianDataFullAccess`",
|
|
352
|
+
"\n3. For further diagnostics:",
|
|
353
|
+
]
|
|
354
|
+
official_doc_link = "https://help.aliyun.com/zh/ram/"
|
|
355
|
+
if recommend_url:
|
|
356
|
+
messages.append(
|
|
357
|
+
f" - Official troubleshooting link: {recommend_url}",
|
|
358
|
+
)
|
|
359
|
+
else:
|
|
360
|
+
messages.append(
|
|
361
|
+
" - Visit the Alibaba Cloud API troubleshooting page",
|
|
362
|
+
)
|
|
363
|
+
messages.append(
|
|
364
|
+
f" - Official document link: {official_doc_link or 'N/A'}",
|
|
365
|
+
)
|
|
366
|
+
messages.append(
|
|
367
|
+
"\n💡 Note: If you are not an administrator, please "
|
|
368
|
+
"contact your cloud account administrator for assistance.",
|
|
369
|
+
)
|
|
370
|
+
messages.append("=" * 80)
|
|
371
|
+
|
|
372
|
+
# 一次性记录多行日志(每行单独一条日志,便于解析)
|
|
373
|
+
for msg in messages:
|
|
374
|
+
logger.error(msg)
|
|
375
|
+
|
|
376
|
+
logger.error("Original error details: %s", error)
|
|
377
|
+
raise
|
|
378
|
+
|
|
379
|
+
temp_storage_lease_id = _upload_to_oss_with_credentials(
|
|
380
|
+
response.to_map(),
|
|
381
|
+
wheel_path,
|
|
382
|
+
)
|
|
383
|
+
return temp_storage_lease_id
|
|
384
|
+
|
|
385
|
+
except Exception as error:
|
|
386
|
+
# Log detailed error information
|
|
387
|
+
logger.error(
|
|
388
|
+
"Error during temporary storage upload: %s",
|
|
389
|
+
error,
|
|
390
|
+
)
|
|
391
|
+
if hasattr(error, "message"):
|
|
392
|
+
logger.error("Error message: %s", error.message)
|
|
393
|
+
if hasattr(error, "data") and isinstance(error.data, dict):
|
|
394
|
+
recommend = error.data.get("Recommend")
|
|
395
|
+
if recommend:
|
|
396
|
+
logger.error("Diagnostic recommendation: %s", recommend)
|
|
397
|
+
# Re-raise the exception to avoid silent failures
|
|
398
|
+
raise
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
async def _modelstudio_deploy(
|
|
402
|
+
cfg: ModelstudioConfig,
|
|
403
|
+
file_url: str,
|
|
404
|
+
filename: str,
|
|
405
|
+
deploy_name: str,
|
|
406
|
+
agent_id: Optional[str] = None,
|
|
407
|
+
agent_desc: Optional[str] = None,
|
|
408
|
+
telemetry_enabled: bool = True,
|
|
409
|
+
) -> str:
|
|
410
|
+
cfg.ensure_valid()
|
|
411
|
+
config = open_api_models.Config(
|
|
412
|
+
access_key_id=cfg.access_key_id,
|
|
413
|
+
access_key_secret=cfg.access_key_secret,
|
|
414
|
+
)
|
|
415
|
+
config.endpoint = cfg.endpoint
|
|
416
|
+
client_modelstudio = ModelstudioClient(config)
|
|
417
|
+
req = ModelstudioTypes.HighCodeDeployRequest(
|
|
418
|
+
agent_desc=agent_desc,
|
|
419
|
+
agent_id=agent_id,
|
|
420
|
+
source_code_name=filename,
|
|
421
|
+
source_code_oss_url=file_url,
|
|
422
|
+
agent_name=deploy_name,
|
|
423
|
+
telemetry_enabled=telemetry_enabled,
|
|
424
|
+
)
|
|
425
|
+
runtime = util_models.RuntimeOptions()
|
|
426
|
+
headers: Dict[str, str] = {}
|
|
427
|
+
resp = client_modelstudio.high_code_deploy_with_options(
|
|
428
|
+
cfg.workspace_id,
|
|
429
|
+
req,
|
|
430
|
+
headers,
|
|
431
|
+
runtime,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
# logger.info(json.dumps(resp.to_map(), indent=2, ensure_ascii=False))
|
|
435
|
+
request_id = resp.to_map()["headers"].get("x-acs-request-id")
|
|
436
|
+
logger.info("deploy request id: %s", request_id)
|
|
437
|
+
|
|
438
|
+
# Extract deploy identifier string from response
|
|
439
|
+
def _extract_deploy_identifier(response_obj) -> str:
|
|
440
|
+
try:
|
|
441
|
+
if isinstance(response_obj, str):
|
|
442
|
+
return response_obj
|
|
443
|
+
# Tea responses often have a 'body' that can be a dict or model
|
|
444
|
+
body = getattr(response_obj, "body", None)
|
|
445
|
+
|
|
446
|
+
# 1) If body is a plain string
|
|
447
|
+
if isinstance(body, str):
|
|
448
|
+
return body
|
|
449
|
+
# 2) If body is a dict, prefer common fields
|
|
450
|
+
if isinstance(body, dict):
|
|
451
|
+
# Explicit error handling: do not build URL on failure
|
|
452
|
+
if isinstance(body.get("success"), bool) and not body.get(
|
|
453
|
+
"success",
|
|
454
|
+
):
|
|
455
|
+
err_code = (
|
|
456
|
+
body.get("errorCode") or body.get("code") or "unknown"
|
|
457
|
+
)
|
|
458
|
+
err_msg = body.get("errorMsg") or body.get("message") or ""
|
|
459
|
+
raise RuntimeError(
|
|
460
|
+
f"ModelStudio deploy failed: {err_code} {err_msg}".strip(),
|
|
461
|
+
)
|
|
462
|
+
for key in ("data", "result", "deployId"):
|
|
463
|
+
val = body.get(key)
|
|
464
|
+
if isinstance(val, str) and val:
|
|
465
|
+
return val
|
|
466
|
+
# Try nested structures
|
|
467
|
+
data_val = body.get("data")
|
|
468
|
+
if isinstance(data_val, dict):
|
|
469
|
+
for key in ("id", "deployId"):
|
|
470
|
+
v = data_val.get(key)
|
|
471
|
+
if isinstance(v, str) and v:
|
|
472
|
+
return v
|
|
473
|
+
# 3) If body is a Tea model, try to_map()
|
|
474
|
+
if hasattr(body, "to_map") and callable(getattr(body, "to_map")):
|
|
475
|
+
try:
|
|
476
|
+
m = body.to_map()
|
|
477
|
+
if isinstance(m, dict):
|
|
478
|
+
if isinstance(m.get("success"), bool) and not m.get(
|
|
479
|
+
"success",
|
|
480
|
+
):
|
|
481
|
+
err_code = (
|
|
482
|
+
m.get("errorCode")
|
|
483
|
+
or m.get("code")
|
|
484
|
+
or "unknown"
|
|
485
|
+
)
|
|
486
|
+
err_msg = (
|
|
487
|
+
m.get("errorMsg") or m.get("message") or ""
|
|
488
|
+
)
|
|
489
|
+
raise RuntimeError(
|
|
490
|
+
f"ModelStudio deploy failed: {err_code} {err_msg}".strip(),
|
|
491
|
+
)
|
|
492
|
+
for key in ("data", "result", "deployId"):
|
|
493
|
+
val = m.get(key)
|
|
494
|
+
if isinstance(val, str) and val:
|
|
495
|
+
return val
|
|
496
|
+
d = m.get("data")
|
|
497
|
+
if isinstance(d, dict):
|
|
498
|
+
for key in ("id", "deployId"):
|
|
499
|
+
v = d.get(key)
|
|
500
|
+
if isinstance(v, str) and v:
|
|
501
|
+
return v
|
|
502
|
+
except Exception:
|
|
503
|
+
raise
|
|
504
|
+
# 4) If response_obj itself is a dict
|
|
505
|
+
if isinstance(response_obj, dict):
|
|
506
|
+
b = response_obj.get("body")
|
|
507
|
+
if isinstance(b, dict):
|
|
508
|
+
if isinstance(b.get("success"), bool) and not b.get(
|
|
509
|
+
"success",
|
|
510
|
+
):
|
|
511
|
+
err_code = (
|
|
512
|
+
b.get("errorCode") or b.get("code") or "unknown"
|
|
513
|
+
)
|
|
514
|
+
err_msg = b.get("errorMsg") or b.get("message") or ""
|
|
515
|
+
raise RuntimeError(
|
|
516
|
+
f"ModelStudio deploy failed: {err_code} {err_msg}".strip(),
|
|
517
|
+
)
|
|
518
|
+
for key in ("data", "result", "deployId"):
|
|
519
|
+
val = b.get(key)
|
|
520
|
+
if isinstance(val, str) and val:
|
|
521
|
+
return val
|
|
522
|
+
# Fallback: return empty to avoid polluting URL with dump
|
|
523
|
+
return ""
|
|
524
|
+
except Exception: # pragma: no cover - conservative fallback
|
|
525
|
+
# Propagate errors as empty identifier; upper layer logs/raises
|
|
526
|
+
raise
|
|
527
|
+
|
|
528
|
+
return _extract_deploy_identifier(resp)
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
class ModelstudioDeployManager(DeployManager):
|
|
532
|
+
"""Deployer for Alibaba Modelstudio Function Compute based agent
|
|
533
|
+
deployment.
|
|
534
|
+
|
|
535
|
+
This deployer packages the user project into a wheel, uploads it to OSS,
|
|
536
|
+
and triggers a Modelstudio Full-Code deploy.
|
|
537
|
+
"""
|
|
538
|
+
|
|
539
|
+
def __init__(
|
|
540
|
+
self,
|
|
541
|
+
oss_config: Optional[OSSConfig] = None,
|
|
542
|
+
modelstudio_config: Optional[ModelstudioConfig] = None,
|
|
543
|
+
build_root: Optional[Union[str, Path]] = None,
|
|
544
|
+
) -> None:
|
|
545
|
+
super().__init__()
|
|
546
|
+
self.oss_config = oss_config or OSSConfig.from_env()
|
|
547
|
+
self.modelstudio_config = (
|
|
548
|
+
modelstudio_config or ModelstudioConfig.from_env()
|
|
549
|
+
)
|
|
550
|
+
self.build_root = Path(build_root) if build_root else None
|
|
551
|
+
|
|
552
|
+
async def _generate_wrapper_and_build_wheel(
|
|
553
|
+
self,
|
|
554
|
+
project_dir: Union[Optional[str], Path],
|
|
555
|
+
cmd: Optional[str] = None,
|
|
556
|
+
deploy_name: Optional[str] = None,
|
|
557
|
+
telemetry_enabled: bool = True,
|
|
558
|
+
) -> Tuple[Path, str]:
|
|
559
|
+
"""
|
|
560
|
+
校验参数、生成 wrapper 项目并构建 wheel。
|
|
561
|
+
|
|
562
|
+
返回: (wheel_path, wrapper_project_dir, name)
|
|
563
|
+
"""
|
|
564
|
+
if not project_dir or not cmd:
|
|
565
|
+
raise ValueError(
|
|
566
|
+
"project_dir and cmd are required for "
|
|
567
|
+
"Modelstudio deployment",
|
|
568
|
+
)
|
|
569
|
+
|
|
570
|
+
project_dir = Path(project_dir).resolve()
|
|
571
|
+
if not project_dir.is_dir():
|
|
572
|
+
raise FileNotFoundError(f"Project dir not found: {project_dir}")
|
|
573
|
+
|
|
574
|
+
name = deploy_name or default_deploy_name()
|
|
575
|
+
proj_root = project_dir.resolve()
|
|
576
|
+
if isinstance(self.build_root, Path):
|
|
577
|
+
effective_build_root = self.build_root.resolve()
|
|
578
|
+
else:
|
|
579
|
+
if self.build_root:
|
|
580
|
+
effective_build_root = Path(self.build_root).resolve()
|
|
581
|
+
else:
|
|
582
|
+
effective_build_root = (
|
|
583
|
+
proj_root.parent / ".agentscope_runtime_builds"
|
|
584
|
+
).resolve()
|
|
585
|
+
|
|
586
|
+
build_dir = effective_build_root / f"build-{int(time.time())}"
|
|
587
|
+
build_dir.mkdir(parents=True, exist_ok=True)
|
|
588
|
+
|
|
589
|
+
logger.info("Generating wrapper project for %s", name)
|
|
590
|
+
wrapper_project_dir, _ = await generate_wrapper_project(
|
|
591
|
+
build_root=build_dir,
|
|
592
|
+
user_project_dir=project_dir,
|
|
593
|
+
start_cmd=cmd,
|
|
594
|
+
deploy_name=name,
|
|
595
|
+
telemetry_enabled=telemetry_enabled,
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
logger.info("Building wheel under %s", wrapper_project_dir)
|
|
599
|
+
wheel_path = await build_wheel(wrapper_project_dir)
|
|
600
|
+
return wheel_path, name
|
|
601
|
+
|
|
602
|
+
def _generate_env_file(
|
|
603
|
+
self,
|
|
604
|
+
project_dir: Union[str, Path],
|
|
605
|
+
environment: Optional[Dict[str, str]] = None,
|
|
606
|
+
env_filename: str = ".env",
|
|
607
|
+
) -> Optional[Path]:
|
|
608
|
+
"""
|
|
609
|
+
Generate a .env file from environment variables dictionary.
|
|
610
|
+
|
|
611
|
+
Args:
|
|
612
|
+
project_dir: The project directory where the .env file will be
|
|
613
|
+
created environment: Dictionary of environment variables to
|
|
614
|
+
write to .env file env_filename: Name of the env file (default:
|
|
615
|
+
".env")
|
|
616
|
+
|
|
617
|
+
Returns:
|
|
618
|
+
Path to the created .env file, or None if no environment
|
|
619
|
+
variables provided
|
|
620
|
+
"""
|
|
621
|
+
if not environment:
|
|
622
|
+
return None
|
|
623
|
+
|
|
624
|
+
project_path = Path(project_dir).resolve()
|
|
625
|
+
if not project_path.exists():
|
|
626
|
+
raise FileNotFoundError(
|
|
627
|
+
f"Project directory not found: " f"{project_path}",
|
|
628
|
+
)
|
|
629
|
+
|
|
630
|
+
env_file_path = project_path / env_filename
|
|
631
|
+
|
|
632
|
+
try:
|
|
633
|
+
with env_file_path.open("w", encoding="utf-8") as f:
|
|
634
|
+
f.write("# Environment variables used by AgentScope Runtime\n")
|
|
635
|
+
|
|
636
|
+
for key, value in environment.items():
|
|
637
|
+
# Escape special characters and quote values if needed
|
|
638
|
+
if value is None:
|
|
639
|
+
continue
|
|
640
|
+
|
|
641
|
+
# Quote values that contain spaces or special characters
|
|
642
|
+
if " " in str(value) or any(
|
|
643
|
+
char in str(value)
|
|
644
|
+
for char in ["$", "`", '"', "'", "\\"]
|
|
645
|
+
):
|
|
646
|
+
# Escape existing quotes and wrap in double quotes
|
|
647
|
+
escaped_value = (
|
|
648
|
+
str(value)
|
|
649
|
+
.replace("\\", "\\\\")
|
|
650
|
+
.replace('"', '\\"')
|
|
651
|
+
)
|
|
652
|
+
f.write(f'{key}="{escaped_value}"\n')
|
|
653
|
+
else:
|
|
654
|
+
f.write(f"{key}={value}\n")
|
|
655
|
+
|
|
656
|
+
logger.info(f"Generated .env file at: {env_file_path}")
|
|
657
|
+
return env_file_path
|
|
658
|
+
|
|
659
|
+
except Exception as e:
|
|
660
|
+
logger.warning(f"Failed to generate .env file: {e}")
|
|
661
|
+
return None
|
|
662
|
+
|
|
663
|
+
async def _upload_and_deploy(
|
|
664
|
+
self,
|
|
665
|
+
wheel_path: Path,
|
|
666
|
+
name: str,
|
|
667
|
+
agent_id: Optional[str] = None,
|
|
668
|
+
agent_desc: Optional[str] = None,
|
|
669
|
+
telemetry_enabled: bool = True,
|
|
670
|
+
) -> Tuple[str, str]:
|
|
671
|
+
logger.info("Uploading wheel to OSS")
|
|
672
|
+
temp_storage_lease_id = _get_presign_url_and_upload_to_oss(
|
|
673
|
+
self.modelstudio_config,
|
|
674
|
+
wheel_path,
|
|
675
|
+
)
|
|
676
|
+
logger.info("Triggering Modelstudio Full-Code deploy for %s", name)
|
|
677
|
+
deploy_identifier = await _modelstudio_deploy(
|
|
678
|
+
agent_desc=agent_desc,
|
|
679
|
+
agent_id=agent_id,
|
|
680
|
+
cfg=self.modelstudio_config,
|
|
681
|
+
file_url=temp_storage_lease_id,
|
|
682
|
+
filename=wheel_path.name,
|
|
683
|
+
deploy_name=name,
|
|
684
|
+
telemetry_enabled=telemetry_enabled,
|
|
685
|
+
)
|
|
686
|
+
|
|
687
|
+
def _build_console_url(endpoint: str) -> str:
|
|
688
|
+
# Map API endpoint to console domain (no fragment in base)
|
|
689
|
+
base = (
|
|
690
|
+
"https://pre-bailian.console.aliyun.com/?tab=app#"
|
|
691
|
+
if ("bailian-pre" in endpoint or "pre" in endpoint)
|
|
692
|
+
else "https://bailian.console.aliyun.com/?tab=app#"
|
|
693
|
+
)
|
|
694
|
+
# Optional query can be appended if needed; keep path clean
|
|
695
|
+
return f"{base}/app-center"
|
|
696
|
+
|
|
697
|
+
console_url = (
|
|
698
|
+
_build_console_url(
|
|
699
|
+
self.modelstudio_config.endpoint,
|
|
700
|
+
)
|
|
701
|
+
if deploy_identifier
|
|
702
|
+
else ""
|
|
703
|
+
)
|
|
704
|
+
return console_url, deploy_identifier
|
|
705
|
+
|
|
706
|
+
async def deploy(
|
|
707
|
+
self,
|
|
708
|
+
runner: Optional[Runner] = None,
|
|
709
|
+
endpoint_path: str = "/process",
|
|
710
|
+
services_config: Optional[Union[ServicesConfig, dict]] = None,
|
|
711
|
+
protocol_adapters: Optional[list[ProtocolAdapter]] = None,
|
|
712
|
+
requirements: Optional[Union[str, List[str]]] = None,
|
|
713
|
+
extra_packages: Optional[List[str]] = None,
|
|
714
|
+
environment: Optional[Dict[str, str]] = None,
|
|
715
|
+
# runtime_config: Optional[Dict] = None,
|
|
716
|
+
# ModelStudio-specific/packaging args (required)
|
|
717
|
+
project_dir: Optional[Union[str, Path]] = None,
|
|
718
|
+
cmd: Optional[str] = None,
|
|
719
|
+
deploy_name: Optional[str] = None,
|
|
720
|
+
skip_upload: bool = False,
|
|
721
|
+
telemetry_enabled: bool = True,
|
|
722
|
+
external_whl_path: Optional[str] = None,
|
|
723
|
+
agent_id: Optional[str] = None,
|
|
724
|
+
agent_desc: Optional[str] = None,
|
|
725
|
+
custom_endpoints: Optional[
|
|
726
|
+
List[Dict]
|
|
727
|
+
] = None, # New parameter for custom endpoints
|
|
728
|
+
**kwargs,
|
|
729
|
+
) -> Dict[str, str]:
|
|
730
|
+
"""
|
|
731
|
+
Package the project, upload to OSS and trigger ModelStudio deploy.
|
|
732
|
+
|
|
733
|
+
Returns a dict containing deploy_id, wheel_path, artifact_url (if uploaded),
|
|
734
|
+
resource_name (deploy_name), and workspace_id.
|
|
735
|
+
"""
|
|
736
|
+
if not agent_id:
|
|
737
|
+
if not runner and not project_dir and not external_whl_path:
|
|
738
|
+
raise ValueError(
|
|
739
|
+
"Either runner, project_dir, "
|
|
740
|
+
"or external_whl_path must be provided.",
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
# convert services_config to Model body
|
|
744
|
+
if services_config and isinstance(services_config, dict):
|
|
745
|
+
services_config = ServicesConfig(**services_config)
|
|
746
|
+
|
|
747
|
+
try:
|
|
748
|
+
if runner:
|
|
749
|
+
agent = runner._agent
|
|
750
|
+
if "agent" in kwargs:
|
|
751
|
+
kwargs.pop("agent")
|
|
752
|
+
|
|
753
|
+
# Create package project for detached deployment
|
|
754
|
+
project_dir = await LocalDeployManager.create_detached_project(
|
|
755
|
+
agent=agent,
|
|
756
|
+
endpoint_path=endpoint_path,
|
|
757
|
+
services_config=services_config, # type: ignore[arg-type]
|
|
758
|
+
protocol_adapters=protocol_adapters,
|
|
759
|
+
custom_endpoints=custom_endpoints, # Pass custom endpoints
|
|
760
|
+
requirements=requirements,
|
|
761
|
+
extra_packages=extra_packages,
|
|
762
|
+
**kwargs,
|
|
763
|
+
)
|
|
764
|
+
if project_dir:
|
|
765
|
+
self._generate_env_file(project_dir, environment)
|
|
766
|
+
cmd = "python main.py"
|
|
767
|
+
deploy_name = deploy_name or default_deploy_name()
|
|
768
|
+
|
|
769
|
+
if agent_id:
|
|
770
|
+
if not external_whl_path:
|
|
771
|
+
raise FileNotFoundError(
|
|
772
|
+
"wheel file not found. "
|
|
773
|
+
"Please specify your .whl file path by "
|
|
774
|
+
"'--whl-path <whlpath>' in command line.",
|
|
775
|
+
)
|
|
776
|
+
# if whl exists then skip the project package method
|
|
777
|
+
if external_whl_path:
|
|
778
|
+
wheel_path = Path(external_whl_path).resolve()
|
|
779
|
+
if not wheel_path.is_file():
|
|
780
|
+
raise FileNotFoundError(
|
|
781
|
+
f"External wheel file not found: {wheel_path}",
|
|
782
|
+
)
|
|
783
|
+
name = deploy_name or default_deploy_name()
|
|
784
|
+
# 如果是更新agent,且没有传deploy_name, 则不更新名字
|
|
785
|
+
if agent_id and (deploy_name is None):
|
|
786
|
+
name = None
|
|
787
|
+
else:
|
|
788
|
+
(
|
|
789
|
+
wheel_path,
|
|
790
|
+
name,
|
|
791
|
+
) = await self._generate_wrapper_and_build_wheel(
|
|
792
|
+
project_dir=project_dir,
|
|
793
|
+
cmd=cmd,
|
|
794
|
+
deploy_name=deploy_name,
|
|
795
|
+
telemetry_enabled=telemetry_enabled,
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
console_url = ""
|
|
799
|
+
deploy_identifier = ""
|
|
800
|
+
if not skip_upload:
|
|
801
|
+
# Only require cloud SDKs and credentials when performing upload/deploy
|
|
802
|
+
_assert_cloud_sdks_available()
|
|
803
|
+
self.oss_config.ensure_valid()
|
|
804
|
+
self.modelstudio_config.ensure_valid()
|
|
805
|
+
(
|
|
806
|
+
console_url,
|
|
807
|
+
deploy_identifier,
|
|
808
|
+
) = await self._upload_and_deploy(
|
|
809
|
+
wheel_path,
|
|
810
|
+
name,
|
|
811
|
+
agent_id,
|
|
812
|
+
agent_desc,
|
|
813
|
+
telemetry_enabled,
|
|
814
|
+
)
|
|
815
|
+
|
|
816
|
+
result: Dict[str, str] = {
|
|
817
|
+
"wheel_path": str(wheel_path),
|
|
818
|
+
"resource_name": name,
|
|
819
|
+
"url": console_url,
|
|
820
|
+
}
|
|
821
|
+
env_ws = os.environ.get("MODELSTUDIO_WORKSPACE_ID")
|
|
822
|
+
if env_ws and env_ws.strip():
|
|
823
|
+
result["workspace_id"] = env_ws.strip()
|
|
824
|
+
if deploy_identifier:
|
|
825
|
+
result["deploy_id"] = deploy_identifier
|
|
826
|
+
|
|
827
|
+
return result
|
|
828
|
+
except Exception as e:
|
|
829
|
+
# Print richer error message to improve UX
|
|
830
|
+
err_text = str(e)
|
|
831
|
+
logger.error("Failed to deploy to modelstudio: %s", err_text)
|
|
832
|
+
raise
|
|
833
|
+
|
|
834
|
+
async def stop(self) -> None: # pragma: no cover - not supported yet
|
|
835
|
+
pass
|
|
836
|
+
|
|
837
|
+
def get_status(self) -> str: # pragma: no cover - not supported yet
|
|
838
|
+
return "unknown"
|