agentscope-runtime 0.1.4__py3-none-any.whl → 0.1.5b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/engine/agents/agentscope_agent/agent.py +3 -0
- agentscope_runtime/engine/deployers/__init__.py +13 -0
- agentscope_runtime/engine/deployers/adapter/responses/__init__.py +0 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_adapter_utils.py +2886 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_agent_adapter.py +51 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_protocol_adapter.py +314 -0
- agentscope_runtime/engine/deployers/cli_fc_deploy.py +143 -0
- agentscope_runtime/engine/deployers/kubernetes_deployer.py +265 -0
- agentscope_runtime/engine/deployers/local_deployer.py +356 -501
- agentscope_runtime/engine/deployers/modelstudio_deployer.py +626 -0
- agentscope_runtime/engine/deployers/utils/__init__.py +0 -0
- agentscope_runtime/engine/deployers/utils/deployment_modes.py +14 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/__init__.py +8 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/docker_image_builder.py +429 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +240 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/runner_image_factory.py +297 -0
- agentscope_runtime/engine/deployers/utils/package_project_utils.py +932 -0
- agentscope_runtime/engine/deployers/utils/service_utils/__init__.py +9 -0
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +504 -0
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_templates.py +157 -0
- agentscope_runtime/engine/deployers/utils/service_utils/process_manager.py +268 -0
- agentscope_runtime/engine/deployers/utils/service_utils/service_config.py +75 -0
- agentscope_runtime/engine/deployers/utils/service_utils/service_factory.py +220 -0
- agentscope_runtime/engine/deployers/utils/wheel_packager.py +389 -0
- agentscope_runtime/engine/helpers/agent_api_builder.py +651 -0
- agentscope_runtime/engine/runner.py +36 -10
- agentscope_runtime/engine/schemas/agent_schemas.py +70 -2
- agentscope_runtime/engine/schemas/embedding.py +37 -0
- agentscope_runtime/engine/schemas/modelstudio_llm.py +310 -0
- agentscope_runtime/engine/schemas/oai_llm.py +538 -0
- agentscope_runtime/engine/schemas/realtime.py +254 -0
- agentscope_runtime/engine/services/mem0_memory_service.py +124 -0
- agentscope_runtime/engine/services/memory_service.py +2 -1
- agentscope_runtime/engine/services/redis_session_history_service.py +4 -3
- agentscope_runtime/engine/services/session_history_service.py +4 -3
- agentscope_runtime/sandbox/manager/container_clients/kubernetes_client.py +555 -10
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-0.1.4.dist-info → agentscope_runtime-0.1.5b1.dist-info}/METADATA +21 -4
- {agentscope_runtime-0.1.4.dist-info → agentscope_runtime-0.1.5b1.dist-info}/RECORD +43 -16
- {agentscope_runtime-0.1.4.dist-info → agentscope_runtime-0.1.5b1.dist-info}/entry_points.txt +1 -0
- {agentscope_runtime-0.1.4.dist-info → agentscope_runtime-0.1.5b1.dist-info}/WHEEL +0 -0
- {agentscope_runtime-0.1.4.dist-info → agentscope_runtime-0.1.5b1.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-0.1.4.dist-info → agentscope_runtime-0.1.5b1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,626 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# pylint:disable=too-many-nested-blocks, too-many-return-statements,
|
|
3
|
+
# pylint:disable=too-many-branches, too-many-statements, try-except-raise
|
|
4
|
+
# pylint:disable=ungrouped-imports, arguments-renamed, protected-access
|
|
5
|
+
#
|
|
6
|
+
# flake8: noqa: E501
|
|
7
|
+
import logging
|
|
8
|
+
import os
|
|
9
|
+
import time
|
|
10
|
+
import uuid
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Dict, Optional, List, Union, Tuple
|
|
13
|
+
|
|
14
|
+
from pydantic import BaseModel, Field
|
|
15
|
+
|
|
16
|
+
from .adapter.protocol_adapter import ProtocolAdapter
|
|
17
|
+
from .base import DeployManager
|
|
18
|
+
from .local_deployer import LocalDeployManager
|
|
19
|
+
from .utils.service_utils import (
|
|
20
|
+
ServicesConfig,
|
|
21
|
+
)
|
|
22
|
+
from .utils.wheel_packager import (
|
|
23
|
+
generate_wrapper_project,
|
|
24
|
+
build_wheel,
|
|
25
|
+
default_deploy_name,
|
|
26
|
+
)
|
|
27
|
+
from ..runner import Runner
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
try: # Lazy optional imports; validated at runtime
|
|
33
|
+
import alibabacloud_oss_v2 as oss # type: ignore
|
|
34
|
+
from alibabacloud_oss_v2.models import PutBucketRequest, PutObjectRequest
|
|
35
|
+
from alibabacloud_bailian20231229.client import Client as ModelstudioClient
|
|
36
|
+
from alibabacloud_tea_openapi import models as open_api_models
|
|
37
|
+
from alibabacloud_bailian20231229 import models as ModelstudioTypes
|
|
38
|
+
from alibabacloud_tea_util import models as util_models
|
|
39
|
+
except Exception:
|
|
40
|
+
oss = None
|
|
41
|
+
PutBucketRequest = None
|
|
42
|
+
PutObjectRequest = None
|
|
43
|
+
ModelstudioClient = None
|
|
44
|
+
open_api_models = None
|
|
45
|
+
ModelstudioTypes = None
|
|
46
|
+
util_models = None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class OSSConfig(BaseModel):
|
|
50
|
+
region: str = Field("cn-hangzhou", description="OSS region")
|
|
51
|
+
access_key_id: Optional[str] = None
|
|
52
|
+
access_key_secret: Optional[str] = None
|
|
53
|
+
bucket_prefix: str = Field(
|
|
54
|
+
"tmpbucket-agentscope-runtime",
|
|
55
|
+
description="Prefix for temporary buckets if creation is needed",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def from_env(cls) -> "OSSConfig":
|
|
60
|
+
return cls(
|
|
61
|
+
region=os.environ.get("OSS_REGION", "cn-hangzhou"),
|
|
62
|
+
access_key_id=os.environ.get(
|
|
63
|
+
"OSS_ACCESS_KEY_ID",
|
|
64
|
+
os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_ID"),
|
|
65
|
+
),
|
|
66
|
+
access_key_secret=os.environ.get(
|
|
67
|
+
"OSS_ACCESS_KEY_SECRET",
|
|
68
|
+
os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_SECRET"),
|
|
69
|
+
),
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def ensure_valid(self) -> None:
|
|
73
|
+
# allow fallback to Alibaba Cloud AK/SK via from_env()
|
|
74
|
+
if not self.access_key_id or not self.access_key_secret:
|
|
75
|
+
raise RuntimeError(
|
|
76
|
+
"Missing AccessKey for OSS. Set either OSS_ACCESS_KEY_ID/OSS_ACCESS_KEY_SECRET "
|
|
77
|
+
"or ALIBABA_CLOUD_ACCESS_KEY_ID/ALIBABA_CLOUD_ACCESS_KEY_SECRET.",
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class ModelstudioConfig(BaseModel):
|
|
82
|
+
endpoint: str = Field(
|
|
83
|
+
"bailian.cn-beijing.aliyuncs.com",
|
|
84
|
+
description="Modelstudio service endpoint",
|
|
85
|
+
)
|
|
86
|
+
workspace_id: Optional[str] = None
|
|
87
|
+
access_key_id: Optional[str] = None
|
|
88
|
+
access_key_secret: Optional[str] = None
|
|
89
|
+
dashscope_api_key: Optional[str] = None
|
|
90
|
+
|
|
91
|
+
@classmethod
|
|
92
|
+
def from_env(cls) -> "ModelstudioConfig":
|
|
93
|
+
return cls(
|
|
94
|
+
endpoint=os.environ.get(
|
|
95
|
+
"MODELSTUDIO_ENDPOINT",
|
|
96
|
+
"bailian.cn-beijing.aliyuncs.com",
|
|
97
|
+
),
|
|
98
|
+
workspace_id=os.environ.get("MODELSTUDIO_WORKSPACE_ID"),
|
|
99
|
+
access_key_id=os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_ID"),
|
|
100
|
+
access_key_secret=os.environ.get(
|
|
101
|
+
"ALIBABA_CLOUD_ACCESS_KEY_SECRET",
|
|
102
|
+
),
|
|
103
|
+
dashscope_api_key=os.environ.get(
|
|
104
|
+
"DASHSCOPE_API_KEY",
|
|
105
|
+
),
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
def ensure_valid(self) -> None:
|
|
109
|
+
missing = []
|
|
110
|
+
if not self.workspace_id:
|
|
111
|
+
missing.append("MODELSTUDIO_WORKSPACE_ID")
|
|
112
|
+
if not self.access_key_id:
|
|
113
|
+
missing.append("ALIBABA_CLOUD_ACCESS_KEY_ID")
|
|
114
|
+
if not self.access_key_secret:
|
|
115
|
+
missing.append("ALIBABA_CLOUD_ACCESS_KEY_SECRET")
|
|
116
|
+
if missing:
|
|
117
|
+
raise RuntimeError(
|
|
118
|
+
f"Missing required Modelstudio env vars: {', '.join(missing)}",
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _assert_cloud_sdks_available():
|
|
123
|
+
if oss is None or ModelstudioClient is None:
|
|
124
|
+
raise RuntimeError(
|
|
125
|
+
"Cloud SDKs not installed. Please install: "
|
|
126
|
+
"alibabacloud-oss-v2 alibabacloud-bailian20231229 "
|
|
127
|
+
"alibabacloud-credentials alibabacloud-tea-openapi alibabacloud-tea-util",
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _oss_get_client(oss_cfg: OSSConfig):
|
|
132
|
+
oss_cfg.ensure_valid()
|
|
133
|
+
# Ensure OSS SDK can read credentials from environment variables.
|
|
134
|
+
# If OSS_* are not set, populate them from resolved config (which may
|
|
135
|
+
# already have fallen back to ALIBABA_CLOUD_* as per from_env()).
|
|
136
|
+
if not os.environ.get("OSS_ACCESS_KEY_ID") and oss_cfg.access_key_id:
|
|
137
|
+
os.environ["OSS_ACCESS_KEY_ID"] = str(oss_cfg.access_key_id)
|
|
138
|
+
if (
|
|
139
|
+
not os.environ.get("OSS_ACCESS_KEY_SECRET")
|
|
140
|
+
and oss_cfg.access_key_secret
|
|
141
|
+
):
|
|
142
|
+
os.environ["OSS_ACCESS_KEY_SECRET"] = str(oss_cfg.access_key_secret)
|
|
143
|
+
|
|
144
|
+
credentials_provider = (
|
|
145
|
+
oss.credentials.EnvironmentVariableCredentialsProvider()
|
|
146
|
+
)
|
|
147
|
+
cfg = oss.config.load_default()
|
|
148
|
+
cfg.credentials_provider = credentials_provider
|
|
149
|
+
cfg.region = oss_cfg.region
|
|
150
|
+
return oss.Client(cfg)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
async def _oss_create_bucket_if_not_exists(client, bucket_name: str) -> None:
|
|
154
|
+
try:
|
|
155
|
+
exists = client.is_bucket_exist(bucket=bucket_name)
|
|
156
|
+
except Exception:
|
|
157
|
+
exists = False
|
|
158
|
+
if not exists:
|
|
159
|
+
req = PutBucketRequest(
|
|
160
|
+
bucket=bucket_name,
|
|
161
|
+
acl="private",
|
|
162
|
+
create_bucket_configuration=oss.CreateBucketConfiguration(
|
|
163
|
+
storage_class="IA",
|
|
164
|
+
),
|
|
165
|
+
)
|
|
166
|
+
client.put_bucket(req)
|
|
167
|
+
result = client.put_bucket_tags(
|
|
168
|
+
oss.PutBucketTagsRequest(
|
|
169
|
+
bucket=bucket_name,
|
|
170
|
+
tagging=oss.Tagging(
|
|
171
|
+
tag_set=oss.TagSet(
|
|
172
|
+
tags=[
|
|
173
|
+
oss.Tag(
|
|
174
|
+
key="bailian-high-code-deploy-oss-access",
|
|
175
|
+
value="ReadAndAdd",
|
|
176
|
+
),
|
|
177
|
+
],
|
|
178
|
+
),
|
|
179
|
+
),
|
|
180
|
+
),
|
|
181
|
+
)
|
|
182
|
+
logger.info(
|
|
183
|
+
f"put bucket tag status code: {result.status_code}, request id: {result.request_id}",
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def _create_bucket_name(prefix: str, base_name: str) -> str:
|
|
188
|
+
import re as _re
|
|
189
|
+
|
|
190
|
+
ts = time.strftime("%Y%m%d-%H%M%S", time.gmtime())
|
|
191
|
+
base = _re.sub(r"\s+", "-", base_name)
|
|
192
|
+
base = _re.sub(r"[^a-zA-Z0-9-]", "", base).lower().strip("-")
|
|
193
|
+
name = f"{prefix}-{base}-{ts}"
|
|
194
|
+
return name[:63]
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
async def _oss_put_and_presign(
|
|
198
|
+
client,
|
|
199
|
+
bucket_name: str,
|
|
200
|
+
object_key: str,
|
|
201
|
+
file_bytes: bytes,
|
|
202
|
+
) -> str:
|
|
203
|
+
import datetime as _dt
|
|
204
|
+
|
|
205
|
+
put_req = PutObjectRequest(
|
|
206
|
+
bucket=bucket_name,
|
|
207
|
+
key=object_key,
|
|
208
|
+
body=file_bytes,
|
|
209
|
+
)
|
|
210
|
+
client.put_object(put_req)
|
|
211
|
+
pre = client.presign(
|
|
212
|
+
oss.GetObjectRequest(bucket=bucket_name, key=object_key),
|
|
213
|
+
expires=_dt.timedelta(minutes=180),
|
|
214
|
+
)
|
|
215
|
+
return pre.url
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
async def _modelstudio_deploy(
|
|
219
|
+
cfg: ModelstudioConfig,
|
|
220
|
+
file_url: str,
|
|
221
|
+
filename: str,
|
|
222
|
+
deploy_name: str,
|
|
223
|
+
telemetry_enabled: bool = True,
|
|
224
|
+
) -> str:
|
|
225
|
+
cfg.ensure_valid()
|
|
226
|
+
config = open_api_models.Config(
|
|
227
|
+
access_key_id=cfg.access_key_id,
|
|
228
|
+
access_key_secret=cfg.access_key_secret,
|
|
229
|
+
)
|
|
230
|
+
config.endpoint = cfg.endpoint
|
|
231
|
+
client_modelstudio = ModelstudioClient(config)
|
|
232
|
+
req = ModelstudioTypes.HighCodeDeployRequest(
|
|
233
|
+
source_code_name=filename,
|
|
234
|
+
source_code_oss_url=file_url,
|
|
235
|
+
agent_name=deploy_name,
|
|
236
|
+
telemetry_enabled=telemetry_enabled,
|
|
237
|
+
)
|
|
238
|
+
runtime = util_models.RuntimeOptions()
|
|
239
|
+
headers: Dict[str, str] = {}
|
|
240
|
+
resp = client_modelstudio.high_code_deploy_with_options(
|
|
241
|
+
cfg.workspace_id,
|
|
242
|
+
req,
|
|
243
|
+
headers,
|
|
244
|
+
runtime,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
# Extract deploy identifier string from response
|
|
248
|
+
def _extract_deploy_identifier(response_obj) -> str:
|
|
249
|
+
try:
|
|
250
|
+
if isinstance(response_obj, str):
|
|
251
|
+
return response_obj
|
|
252
|
+
# Tea responses often have a 'body' that can be a dict or model
|
|
253
|
+
body = getattr(response_obj, "body", None)
|
|
254
|
+
|
|
255
|
+
# 1) If body is a plain string
|
|
256
|
+
if isinstance(body, str):
|
|
257
|
+
return body
|
|
258
|
+
# 2) If body is a dict, prefer common fields
|
|
259
|
+
if isinstance(body, dict):
|
|
260
|
+
# Explicit error handling: do not build URL on failure
|
|
261
|
+
if isinstance(body.get("success"), bool) and not body.get(
|
|
262
|
+
"success",
|
|
263
|
+
):
|
|
264
|
+
err_code = (
|
|
265
|
+
body.get("errorCode") or body.get("code") or "unknown"
|
|
266
|
+
)
|
|
267
|
+
err_msg = body.get("errorMsg") or body.get("message") or ""
|
|
268
|
+
raise RuntimeError(
|
|
269
|
+
f"ModelStudio deploy failed: {err_code} {err_msg}".strip(),
|
|
270
|
+
)
|
|
271
|
+
for key in ("data", "result", "deployId"):
|
|
272
|
+
val = body.get(key)
|
|
273
|
+
if isinstance(val, str) and val:
|
|
274
|
+
return val
|
|
275
|
+
# Try nested structures
|
|
276
|
+
data_val = body.get("data")
|
|
277
|
+
if isinstance(data_val, dict):
|
|
278
|
+
for key in ("id", "deployId"):
|
|
279
|
+
v = data_val.get(key)
|
|
280
|
+
if isinstance(v, str) and v:
|
|
281
|
+
return v
|
|
282
|
+
# 3) If body is a Tea model, try to_map()
|
|
283
|
+
if hasattr(body, "to_map") and callable(getattr(body, "to_map")):
|
|
284
|
+
try:
|
|
285
|
+
m = body.to_map()
|
|
286
|
+
if isinstance(m, dict):
|
|
287
|
+
if isinstance(m.get("success"), bool) and not m.get(
|
|
288
|
+
"success",
|
|
289
|
+
):
|
|
290
|
+
err_code = (
|
|
291
|
+
m.get("errorCode")
|
|
292
|
+
or m.get("code")
|
|
293
|
+
or "unknown"
|
|
294
|
+
)
|
|
295
|
+
err_msg = (
|
|
296
|
+
m.get("errorMsg") or m.get("message") or ""
|
|
297
|
+
)
|
|
298
|
+
raise RuntimeError(
|
|
299
|
+
f"ModelStudio deploy failed: {err_code} {err_msg}".strip(),
|
|
300
|
+
)
|
|
301
|
+
for key in ("data", "result", "deployId"):
|
|
302
|
+
val = m.get(key)
|
|
303
|
+
if isinstance(val, str) and val:
|
|
304
|
+
return val
|
|
305
|
+
d = m.get("data")
|
|
306
|
+
if isinstance(d, dict):
|
|
307
|
+
for key in ("id", "deployId"):
|
|
308
|
+
v = d.get(key)
|
|
309
|
+
if isinstance(v, str) and v:
|
|
310
|
+
return v
|
|
311
|
+
except Exception:
|
|
312
|
+
raise
|
|
313
|
+
# 4) If response_obj itself is a dict
|
|
314
|
+
if isinstance(response_obj, dict):
|
|
315
|
+
b = response_obj.get("body")
|
|
316
|
+
if isinstance(b, dict):
|
|
317
|
+
if isinstance(b.get("success"), bool) and not b.get(
|
|
318
|
+
"success",
|
|
319
|
+
):
|
|
320
|
+
err_code = (
|
|
321
|
+
b.get("errorCode") or b.get("code") or "unknown"
|
|
322
|
+
)
|
|
323
|
+
err_msg = b.get("errorMsg") or b.get("message") or ""
|
|
324
|
+
raise RuntimeError(
|
|
325
|
+
f"ModelStudio deploy failed: {err_code} {err_msg}".strip(),
|
|
326
|
+
)
|
|
327
|
+
for key in ("data", "result", "deployId"):
|
|
328
|
+
val = b.get(key)
|
|
329
|
+
if isinstance(val, str) and val:
|
|
330
|
+
return val
|
|
331
|
+
# Fallback: return empty to avoid polluting URL with dump
|
|
332
|
+
return ""
|
|
333
|
+
except Exception: # pragma: no cover - conservative fallback
|
|
334
|
+
# Propagate errors as empty identifier; upper layer logs/raises
|
|
335
|
+
raise
|
|
336
|
+
|
|
337
|
+
return _extract_deploy_identifier(resp)
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
class ModelstudioDeployManager(DeployManager):
|
|
341
|
+
"""Deployer for Alibaba Modelstudio Function Compute based agent
|
|
342
|
+
deployment.
|
|
343
|
+
|
|
344
|
+
This deployer packages the user project into a wheel, uploads it to OSS,
|
|
345
|
+
and triggers a Modelstudio Full-Code deploy.
|
|
346
|
+
"""
|
|
347
|
+
|
|
348
|
+
def __init__(
|
|
349
|
+
self,
|
|
350
|
+
oss_config: Optional[OSSConfig] = None,
|
|
351
|
+
modelstudio_config: Optional[ModelstudioConfig] = None,
|
|
352
|
+
build_root: Optional[Union[str, Path]] = None,
|
|
353
|
+
) -> None:
|
|
354
|
+
super().__init__()
|
|
355
|
+
self.oss_config = oss_config or OSSConfig.from_env()
|
|
356
|
+
self.modelstudio_config = (
|
|
357
|
+
modelstudio_config or ModelstudioConfig.from_env()
|
|
358
|
+
)
|
|
359
|
+
self.build_root = Path(build_root) if build_root else None
|
|
360
|
+
|
|
361
|
+
async def _generate_wrapper_and_build_wheel(
|
|
362
|
+
self,
|
|
363
|
+
project_dir: Union[Optional[str], Path],
|
|
364
|
+
cmd: Optional[str] = None,
|
|
365
|
+
deploy_name: Optional[str] = None,
|
|
366
|
+
telemetry_enabled: bool = True,
|
|
367
|
+
) -> Tuple[Path, str]:
|
|
368
|
+
"""
|
|
369
|
+
校验参数、生成 wrapper 项目并构建 wheel。
|
|
370
|
+
|
|
371
|
+
返回: (wheel_path, wrapper_project_dir, name)
|
|
372
|
+
"""
|
|
373
|
+
if not project_dir or not cmd:
|
|
374
|
+
raise ValueError(
|
|
375
|
+
"project_dir and cmd are required for "
|
|
376
|
+
"Modelstudio deployment",
|
|
377
|
+
)
|
|
378
|
+
|
|
379
|
+
project_dir = Path(project_dir).resolve()
|
|
380
|
+
if not project_dir.is_dir():
|
|
381
|
+
raise FileNotFoundError(f"Project dir not found: {project_dir}")
|
|
382
|
+
|
|
383
|
+
name = deploy_name or default_deploy_name()
|
|
384
|
+
proj_root = project_dir.resolve()
|
|
385
|
+
if isinstance(self.build_root, Path):
|
|
386
|
+
effective_build_root = self.build_root.resolve()
|
|
387
|
+
else:
|
|
388
|
+
if self.build_root:
|
|
389
|
+
effective_build_root = Path(self.build_root).resolve()
|
|
390
|
+
else:
|
|
391
|
+
effective_build_root = (
|
|
392
|
+
proj_root.parent / ".agentscope_runtime_builds"
|
|
393
|
+
).resolve()
|
|
394
|
+
|
|
395
|
+
build_dir = effective_build_root / f"build-{int(time.time())}"
|
|
396
|
+
build_dir.mkdir(parents=True, exist_ok=True)
|
|
397
|
+
|
|
398
|
+
logger.info("Generating wrapper project for %s", name)
|
|
399
|
+
wrapper_project_dir, _ = await generate_wrapper_project(
|
|
400
|
+
build_root=build_dir,
|
|
401
|
+
user_project_dir=project_dir,
|
|
402
|
+
start_cmd=cmd,
|
|
403
|
+
deploy_name=name,
|
|
404
|
+
telemetry_enabled=telemetry_enabled,
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
logger.info("Building wheel under %s", wrapper_project_dir)
|
|
408
|
+
wheel_path = await build_wheel(wrapper_project_dir)
|
|
409
|
+
return wheel_path, name
|
|
410
|
+
|
|
411
|
+
def _generate_env_file(
|
|
412
|
+
self,
|
|
413
|
+
project_dir: Union[str, Path],
|
|
414
|
+
environment: Optional[Dict[str, str]] = None,
|
|
415
|
+
env_filename: str = ".env",
|
|
416
|
+
) -> Optional[Path]:
|
|
417
|
+
"""
|
|
418
|
+
Generate a .env file from environment variables dictionary.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
project_dir: The project directory where the .env file will be
|
|
422
|
+
created environment: Dictionary of environment variables to
|
|
423
|
+
write to .env file env_filename: Name of the env file (default:
|
|
424
|
+
".env")
|
|
425
|
+
|
|
426
|
+
Returns:
|
|
427
|
+
Path to the created .env file, or None if no environment
|
|
428
|
+
variables provided
|
|
429
|
+
"""
|
|
430
|
+
if not environment:
|
|
431
|
+
return None
|
|
432
|
+
|
|
433
|
+
project_path = Path(project_dir).resolve()
|
|
434
|
+
if not project_path.exists():
|
|
435
|
+
raise FileNotFoundError(
|
|
436
|
+
f"Project directory not found: " f"{project_path}",
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
env_file_path = project_path / env_filename
|
|
440
|
+
|
|
441
|
+
try:
|
|
442
|
+
with env_file_path.open("w", encoding="utf-8") as f:
|
|
443
|
+
f.write("# Environment variables used by AgentScope Runtime\n")
|
|
444
|
+
|
|
445
|
+
for key, value in environment.items():
|
|
446
|
+
# Escape special characters and quote values if needed
|
|
447
|
+
if value is None:
|
|
448
|
+
continue
|
|
449
|
+
|
|
450
|
+
# Quote values that contain spaces or special characters
|
|
451
|
+
if " " in str(value) or any(
|
|
452
|
+
char in str(value)
|
|
453
|
+
for char in ["$", "`", '"', "'", "\\"]
|
|
454
|
+
):
|
|
455
|
+
# Escape existing quotes and wrap in double quotes
|
|
456
|
+
escaped_value = (
|
|
457
|
+
str(value)
|
|
458
|
+
.replace("\\", "\\\\")
|
|
459
|
+
.replace('"', '\\"')
|
|
460
|
+
)
|
|
461
|
+
f.write(f'{key}="{escaped_value}"\n')
|
|
462
|
+
else:
|
|
463
|
+
f.write(f"{key}={value}\n")
|
|
464
|
+
|
|
465
|
+
logger.info(f"Generated .env file at: {env_file_path}")
|
|
466
|
+
return env_file_path
|
|
467
|
+
|
|
468
|
+
except Exception as e:
|
|
469
|
+
logger.warning(f"Failed to generate .env file: {e}")
|
|
470
|
+
return None
|
|
471
|
+
|
|
472
|
+
async def _upload_and_deploy(
|
|
473
|
+
self,
|
|
474
|
+
wheel_path: Path,
|
|
475
|
+
name: str,
|
|
476
|
+
telemetry_enabled: bool = True,
|
|
477
|
+
) -> Tuple[str, str]:
|
|
478
|
+
logger.info("Uploading wheel to OSS and generating presigned URL")
|
|
479
|
+
client = _oss_get_client(self.oss_config)
|
|
480
|
+
bucket_name = (
|
|
481
|
+
f"tmp-bucket-for-code-deployment-"
|
|
482
|
+
f"{os.getenv('MODELSTUDIO_WORKSPACE_ID', str(uuid.uuid4()))}"
|
|
483
|
+
)
|
|
484
|
+
await _oss_create_bucket_if_not_exists(client, bucket_name)
|
|
485
|
+
filename = wheel_path.name
|
|
486
|
+
with wheel_path.open("rb") as f:
|
|
487
|
+
file_bytes = f.read()
|
|
488
|
+
artifact_url = await _oss_put_and_presign(
|
|
489
|
+
client,
|
|
490
|
+
bucket_name,
|
|
491
|
+
filename,
|
|
492
|
+
file_bytes,
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
logger.info("Triggering Modelstudio Full-Code deploy for %s", name)
|
|
496
|
+
deploy_identifier = await _modelstudio_deploy(
|
|
497
|
+
cfg=self.modelstudio_config,
|
|
498
|
+
file_url=artifact_url,
|
|
499
|
+
filename=filename,
|
|
500
|
+
deploy_name=name,
|
|
501
|
+
telemetry_enabled=telemetry_enabled,
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
def _build_console_url(endpoint: str, identifier: str) -> str:
|
|
505
|
+
# Map API endpoint to console domain (no fragment in base)
|
|
506
|
+
base = (
|
|
507
|
+
"https://pre-bailian.console.aliyun.com/?tab=app&efm_v=3.4.108#"
|
|
508
|
+
if ("bailian-pre" in endpoint or "pre" in endpoint)
|
|
509
|
+
else "https://bailian.console.aliyun.com/?tab=app"
|
|
510
|
+
)
|
|
511
|
+
# Optional query can be appended if needed; keep path clean
|
|
512
|
+
return f"{base}/app-center/high-code-detail/{identifier}"
|
|
513
|
+
|
|
514
|
+
console_url = (
|
|
515
|
+
_build_console_url(
|
|
516
|
+
self.modelstudio_config.endpoint,
|
|
517
|
+
deploy_identifier,
|
|
518
|
+
)
|
|
519
|
+
if deploy_identifier
|
|
520
|
+
else ""
|
|
521
|
+
)
|
|
522
|
+
return artifact_url, console_url
|
|
523
|
+
|
|
524
|
+
async def deploy(
|
|
525
|
+
self,
|
|
526
|
+
runner: Optional[Runner] = None,
|
|
527
|
+
endpoint_path: str = "/process",
|
|
528
|
+
services_config: Optional[Union[ServicesConfig, dict]] = None,
|
|
529
|
+
protocol_adapters: Optional[list[ProtocolAdapter]] = None,
|
|
530
|
+
requirements: Optional[Union[str, List[str]]] = None,
|
|
531
|
+
extra_packages: Optional[List[str]] = None,
|
|
532
|
+
environment: Optional[Dict[str, str]] = None,
|
|
533
|
+
# runtime_config: Optional[Dict] = None,
|
|
534
|
+
# ModelStudio-specific/packaging args (required)
|
|
535
|
+
project_dir: Optional[Union[str, Path]] = None,
|
|
536
|
+
cmd: Optional[str] = None,
|
|
537
|
+
deploy_name: Optional[str] = None,
|
|
538
|
+
skip_upload: bool = False,
|
|
539
|
+
telemetry_enabled: bool = True,
|
|
540
|
+
external_whl_path: Optional[str] = None,
|
|
541
|
+
**kwargs,
|
|
542
|
+
) -> Dict[str, str]:
|
|
543
|
+
"""
|
|
544
|
+
Package the project, upload to OSS and trigger ModelStudio deploy.
|
|
545
|
+
|
|
546
|
+
Returns a dict containing deploy_id, wheel_path, artifact_url (if uploaded),
|
|
547
|
+
resource_name (deploy_name), and workspace_id.
|
|
548
|
+
"""
|
|
549
|
+
if not runner and not project_dir and not external_whl_path:
|
|
550
|
+
raise ValueError("")
|
|
551
|
+
|
|
552
|
+
# convert services_config to Model body
|
|
553
|
+
if services_config and isinstance(services_config, dict):
|
|
554
|
+
services_config = ServicesConfig(**services_config)
|
|
555
|
+
|
|
556
|
+
try:
|
|
557
|
+
if runner:
|
|
558
|
+
agent = runner._agent
|
|
559
|
+
|
|
560
|
+
# Create package project for detached deployment
|
|
561
|
+
project_dir = await LocalDeployManager.create_detached_project(
|
|
562
|
+
agent=agent,
|
|
563
|
+
endpoint_path=endpoint_path,
|
|
564
|
+
services_config=services_config, # type: ignore[arg-type]
|
|
565
|
+
protocol_adapters=protocol_adapters,
|
|
566
|
+
requirements=requirements,
|
|
567
|
+
extra_packages=extra_packages,
|
|
568
|
+
**kwargs,
|
|
569
|
+
)
|
|
570
|
+
self._generate_env_file(project_dir, environment)
|
|
571
|
+
cmd = "python main.py"
|
|
572
|
+
deploy_name = deploy_name or default_deploy_name()
|
|
573
|
+
|
|
574
|
+
# if whl exists then skip the project package method
|
|
575
|
+
if external_whl_path:
|
|
576
|
+
wheel_path = Path(external_whl_path).resolve()
|
|
577
|
+
if not wheel_path.is_file():
|
|
578
|
+
raise FileNotFoundError(
|
|
579
|
+
f"External wheel file not found: {wheel_path}",
|
|
580
|
+
)
|
|
581
|
+
name = deploy_name or default_deploy_name()
|
|
582
|
+
else:
|
|
583
|
+
(
|
|
584
|
+
wheel_path,
|
|
585
|
+
name,
|
|
586
|
+
) = await self._generate_wrapper_and_build_wheel(
|
|
587
|
+
project_dir=project_dir,
|
|
588
|
+
cmd=cmd,
|
|
589
|
+
deploy_name=deploy_name,
|
|
590
|
+
telemetry_enabled=telemetry_enabled,
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
artifact_url = ""
|
|
594
|
+
console_url = ""
|
|
595
|
+
if not skip_upload:
|
|
596
|
+
# Only require cloud SDKs and credentials when performing upload/deploy
|
|
597
|
+
_assert_cloud_sdks_available()
|
|
598
|
+
self.oss_config.ensure_valid()
|
|
599
|
+
self.modelstudio_config.ensure_valid()
|
|
600
|
+
artifact_url, console_url = await self._upload_and_deploy(
|
|
601
|
+
wheel_path,
|
|
602
|
+
name,
|
|
603
|
+
telemetry_enabled,
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
result: Dict[str, str] = {
|
|
607
|
+
"deploy_id": self.deploy_id,
|
|
608
|
+
"wheel_path": str(wheel_path),
|
|
609
|
+
"artifact_url": artifact_url,
|
|
610
|
+
"resource_name": name,
|
|
611
|
+
"workspace_id": self.modelstudio_config.workspace_id or "",
|
|
612
|
+
"url": console_url,
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
return result
|
|
616
|
+
except Exception as e:
|
|
617
|
+
# Print richer error message to improve UX
|
|
618
|
+
err_text = str(e)
|
|
619
|
+
logger.error("Failed to deploy to modelstudio: %s", err_text)
|
|
620
|
+
raise
|
|
621
|
+
|
|
622
|
+
async def stop(self) -> None: # pragma: no cover - not supported yet
|
|
623
|
+
pass
|
|
624
|
+
|
|
625
|
+
def get_status(self) -> str: # pragma: no cover - not supported yet
|
|
626
|
+
return "unknown"
|
|
File without changes
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""Deployment modes and configuration for unified FastAPI architecture."""
|
|
3
|
+
|
|
4
|
+
from enum import Enum
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DeploymentMode(str, Enum):
|
|
8
|
+
"""FastAPI application deployment modes."""
|
|
9
|
+
|
|
10
|
+
DAEMON_THREAD = "daemon_thread" # LocalDeployManager daemon thread mode
|
|
11
|
+
DETACHED_PROCESS = (
|
|
12
|
+
"detached_process" # LocalDeployManager detached process mode
|
|
13
|
+
)
|
|
14
|
+
STANDALONE = "standalone" # Package project template mode
|