agentscope-runtime 1.0.3__py3-none-any.whl → 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/adapters/agentscope/stream.py +2 -9
- agentscope_runtime/adapters/ms_agent_framework/__init__.py +0 -0
- agentscope_runtime/adapters/ms_agent_framework/message.py +205 -0
- agentscope_runtime/adapters/ms_agent_framework/stream.py +418 -0
- agentscope_runtime/adapters/utils.py +6 -0
- agentscope_runtime/cli/commands/deploy.py +371 -0
- agentscope_runtime/common/container_clients/knative_client.py +466 -0
- agentscope_runtime/engine/__init__.py +4 -0
- agentscope_runtime/engine/constant.py +1 -0
- agentscope_runtime/engine/deployers/__init__.py +12 -0
- agentscope_runtime/engine/deployers/adapter/a2a/__init__.py +26 -51
- agentscope_runtime/engine/deployers/adapter/a2a/a2a_protocol_adapter.py +19 -10
- agentscope_runtime/engine/deployers/adapter/a2a/a2a_registry.py +4 -201
- agentscope_runtime/engine/deployers/adapter/a2a/nacos_a2a_registry.py +134 -25
- agentscope_runtime/engine/deployers/agentrun_deployer.py +2 -2
- agentscope_runtime/engine/deployers/fc_deployer.py +1506 -0
- agentscope_runtime/engine/deployers/knative_deployer.py +290 -0
- agentscope_runtime/engine/runner.py +12 -0
- agentscope_runtime/engine/services/agent_state/redis_state_service.py +2 -2
- agentscope_runtime/engine/services/memory/redis_memory_service.py +2 -2
- agentscope_runtime/engine/services/session_history/redis_session_history_service.py +2 -2
- agentscope_runtime/engine/tracing/wrapper.py +18 -4
- agentscope_runtime/sandbox/__init__.py +14 -6
- agentscope_runtime/sandbox/box/base/__init__.py +2 -2
- agentscope_runtime/sandbox/box/base/base_sandbox.py +51 -1
- agentscope_runtime/sandbox/box/browser/__init__.py +2 -2
- agentscope_runtime/sandbox/box/browser/browser_sandbox.py +198 -2
- agentscope_runtime/sandbox/box/filesystem/__init__.py +2 -2
- agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +99 -2
- agentscope_runtime/sandbox/box/gui/__init__.py +2 -2
- agentscope_runtime/sandbox/box/gui/gui_sandbox.py +117 -1
- agentscope_runtime/sandbox/box/mobile/__init__.py +2 -2
- agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +247 -100
- agentscope_runtime/sandbox/box/sandbox.py +98 -65
- agentscope_runtime/sandbox/box/shared/routers/generic.py +36 -29
- agentscope_runtime/sandbox/client/__init__.py +6 -1
- agentscope_runtime/sandbox/client/async_http_client.py +339 -0
- agentscope_runtime/sandbox/client/base.py +74 -0
- agentscope_runtime/sandbox/client/http_client.py +108 -329
- agentscope_runtime/sandbox/enums.py +7 -0
- agentscope_runtime/sandbox/manager/sandbox_manager.py +264 -4
- agentscope_runtime/sandbox/manager/server/app.py +7 -1
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.3.dist-info → agentscope_runtime-1.0.4.dist-info}/METADATA +102 -28
- {agentscope_runtime-1.0.3.dist-info → agentscope_runtime-1.0.4.dist-info}/RECORD +49 -40
- {agentscope_runtime-1.0.3.dist-info → agentscope_runtime-1.0.4.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.3.dist-info → agentscope_runtime-1.0.4.dist-info}/entry_points.txt +0 -0
- {agentscope_runtime-1.0.3.dist-info → agentscope_runtime-1.0.4.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.3.dist-info → agentscope_runtime-1.0.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1506 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# flake8: noqa: E501
|
|
3
|
+
# pylint: disable=line-too-long, too-many-branches, too-many-statements
|
|
4
|
+
# pylint: disable=protected-access, too-many-nested-blocks
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import os
|
|
8
|
+
import time
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Optional, Union, Dict, Tuple, Any, List
|
|
13
|
+
|
|
14
|
+
from pydantic import BaseModel, Field
|
|
15
|
+
|
|
16
|
+
from alibabacloud_fc20230330 import models as fc20230330_models
|
|
17
|
+
from alibabacloud_fc20230330.client import Client as FC20230330Client
|
|
18
|
+
from alibabacloud_tea_openapi import models as open_api_models
|
|
19
|
+
from alibabacloud_tea_util import models as util_models
|
|
20
|
+
|
|
21
|
+
from agentscope_runtime.engine import DeployManager, LocalDeployManager
|
|
22
|
+
from agentscope_runtime.engine.deployers.adapter.protocol_adapter import (
|
|
23
|
+
ProtocolAdapter,
|
|
24
|
+
)
|
|
25
|
+
from agentscope_runtime.engine.deployers.state import Deployment
|
|
26
|
+
from agentscope_runtime.engine.deployers.utils.detached_app import (
|
|
27
|
+
get_bundle_entry_script,
|
|
28
|
+
)
|
|
29
|
+
from agentscope_runtime.engine.deployers.utils.package import (
|
|
30
|
+
generate_build_directory,
|
|
31
|
+
)
|
|
32
|
+
from agentscope_runtime.engine.deployers.utils.wheel_packager import (
|
|
33
|
+
generate_wrapper_project,
|
|
34
|
+
default_deploy_name,
|
|
35
|
+
build_wheel,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
logger = logging.getLogger(__name__)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class LogConfig:
|
|
43
|
+
"""Configuration for logging."""
|
|
44
|
+
|
|
45
|
+
logstore: Optional[str] = None
|
|
46
|
+
project: Optional[str] = None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass
|
|
50
|
+
class VPCConfig:
|
|
51
|
+
"""VPC configuration for the runtime."""
|
|
52
|
+
|
|
53
|
+
vpc_id: Optional[str] = None
|
|
54
|
+
security_group_id: Optional[str] = None
|
|
55
|
+
vswitch_ids: Optional[List[str]] = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass
|
|
59
|
+
class CodeConfig:
|
|
60
|
+
"""Configuration for code-based runtimes."""
|
|
61
|
+
|
|
62
|
+
command: Optional[List[str]] = None
|
|
63
|
+
oss_bucket_name: Optional[str] = None
|
|
64
|
+
oss_object_name: Optional[str] = None
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class FCConfig(BaseModel):
|
|
68
|
+
access_key_id: Optional[str] = None
|
|
69
|
+
access_key_secret: Optional[str] = None
|
|
70
|
+
account_id: Optional[str] = None
|
|
71
|
+
region_id: str = "cn-hangzhou"
|
|
72
|
+
|
|
73
|
+
log_config: Optional[LogConfig] = None
|
|
74
|
+
vpc_config: Optional[VPCConfig] = None
|
|
75
|
+
|
|
76
|
+
cpu: float = 2.0
|
|
77
|
+
memory: int = 2048
|
|
78
|
+
disk: int = 512
|
|
79
|
+
|
|
80
|
+
execution_role_arn: Optional[str] = None
|
|
81
|
+
|
|
82
|
+
session_concurrency_limit: Optional[int] = 200
|
|
83
|
+
session_idle_timeout_seconds: Optional[int] = 3600
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
def from_env(cls) -> "FCConfig":
|
|
87
|
+
"""Create FCConfig from environment variables.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
FCConfig: Configuration loaded from environment variables.
|
|
91
|
+
"""
|
|
92
|
+
# Read region_id
|
|
93
|
+
region_id = os.environ.get("FC_REGION_ID", "cn-hangzhou")
|
|
94
|
+
|
|
95
|
+
# Read log-related environment variables
|
|
96
|
+
log_store = os.environ.get("FC_LOG_STORE")
|
|
97
|
+
log_project = os.environ.get("FC_LOG_PROJECT")
|
|
98
|
+
log_config = None
|
|
99
|
+
if log_store and log_project:
|
|
100
|
+
log_config = LogConfig(
|
|
101
|
+
logstore=log_store,
|
|
102
|
+
project=log_project,
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
# Read network-related environment variables
|
|
106
|
+
vpc_id = os.environ.get("FC_VPC_ID")
|
|
107
|
+
security_group_id = os.environ.get("FC_SECURITY_GROUP_ID")
|
|
108
|
+
vswitch_ids_str = os.environ.get("FC_VSWITCH_IDS")
|
|
109
|
+
|
|
110
|
+
vpc_config = None
|
|
111
|
+
if vpc_id and security_group_id and vswitch_ids_str:
|
|
112
|
+
vswitch_ids = json.loads(vswitch_ids_str)
|
|
113
|
+
if not isinstance(vswitch_ids, list):
|
|
114
|
+
raise ValueError("vswitch_ids must be a list")
|
|
115
|
+
|
|
116
|
+
vpc_config = VPCConfig(
|
|
117
|
+
vpc_id=vpc_id,
|
|
118
|
+
security_group_id=security_group_id,
|
|
119
|
+
vswitch_ids=vswitch_ids,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# Read CPU and Memory with type conversion
|
|
123
|
+
cpu_str = os.environ.get("FC_CPU", "2.0")
|
|
124
|
+
memory_str = os.environ.get("FC_MEMORY", "2048")
|
|
125
|
+
disk_str = os.environ.get("FC_DISK", "512")
|
|
126
|
+
|
|
127
|
+
session_concurrency_limit_str = os.environ.get(
|
|
128
|
+
"FC_SESSION_CONCURRENCY_LIMIT",
|
|
129
|
+
"200",
|
|
130
|
+
)
|
|
131
|
+
session_idle_timeout_seconds_str = os.environ.get(
|
|
132
|
+
"FC_SESSION_IDLE_TIMEOUT_SECONDS",
|
|
133
|
+
"3600",
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
cpu = float(cpu_str)
|
|
138
|
+
except (ValueError, TypeError):
|
|
139
|
+
cpu = 2.0
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
memory = int(memory_str)
|
|
143
|
+
except (ValueError, TypeError):
|
|
144
|
+
memory = 2048
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
disk = int(disk_str)
|
|
148
|
+
except (ValueError, TypeError):
|
|
149
|
+
disk = 512
|
|
150
|
+
|
|
151
|
+
execution_role_arn = os.environ.get("FC_EXECUTION_ROLE_ARN")
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
session_concurrency_limit = int(session_concurrency_limit_str)
|
|
155
|
+
except (ValueError, TypeError):
|
|
156
|
+
session_concurrency_limit = 200
|
|
157
|
+
|
|
158
|
+
try:
|
|
159
|
+
session_idle_timeout_seconds = int(
|
|
160
|
+
session_idle_timeout_seconds_str,
|
|
161
|
+
)
|
|
162
|
+
except (ValueError, TypeError):
|
|
163
|
+
session_idle_timeout_seconds = 3600
|
|
164
|
+
|
|
165
|
+
return cls(
|
|
166
|
+
access_key_id=os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_ID"),
|
|
167
|
+
access_key_secret=os.environ.get(
|
|
168
|
+
"ALIBABA_CLOUD_ACCESS_KEY_SECRET",
|
|
169
|
+
),
|
|
170
|
+
account_id=os.environ.get("FC_ACCOUNT_ID"),
|
|
171
|
+
region_id=region_id,
|
|
172
|
+
log_config=log_config,
|
|
173
|
+
vpc_config=vpc_config,
|
|
174
|
+
cpu=cpu,
|
|
175
|
+
memory=memory,
|
|
176
|
+
disk=disk,
|
|
177
|
+
execution_role_arn=execution_role_arn,
|
|
178
|
+
session_concurrency_limit=session_concurrency_limit,
|
|
179
|
+
session_idle_timeout_seconds=session_idle_timeout_seconds,
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
def ensure_valid(self) -> None:
|
|
183
|
+
"""Validate that all required configuration fields are present.
|
|
184
|
+
|
|
185
|
+
Raises:
|
|
186
|
+
ValueError: If required environment variables are missing.
|
|
187
|
+
"""
|
|
188
|
+
missing = []
|
|
189
|
+
if not self.access_key_id:
|
|
190
|
+
missing.append("ALIBABA_CLOUD_ACCESS_KEY_ID")
|
|
191
|
+
if not self.access_key_secret:
|
|
192
|
+
missing.append("ALIBABA_CLOUD_ACCESS_KEY_SECRET")
|
|
193
|
+
if not self.account_id:
|
|
194
|
+
missing.append("FC_ACCOUNT_ID")
|
|
195
|
+
if missing:
|
|
196
|
+
raise ValueError(
|
|
197
|
+
f"Missing required FC env vars: {', '.join(missing)}",
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
class OSSConfig(BaseModel):
|
|
202
|
+
region: str = Field("cn-hangzhou", description="OSS region")
|
|
203
|
+
access_key_id: Optional[str] = None
|
|
204
|
+
access_key_secret: Optional[str] = None
|
|
205
|
+
bucket_name: str
|
|
206
|
+
|
|
207
|
+
@classmethod
|
|
208
|
+
def from_env(cls) -> "OSSConfig":
|
|
209
|
+
"""Create OSSConfig from environment variables.
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
OSSConfig: Configuration loaded from environment variables.
|
|
213
|
+
"""
|
|
214
|
+
return cls(
|
|
215
|
+
region=os.environ.get("OSS_REGION", "cn-hangzhou"),
|
|
216
|
+
access_key_id=os.environ.get(
|
|
217
|
+
"OSS_ACCESS_KEY_ID",
|
|
218
|
+
os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_ID"),
|
|
219
|
+
),
|
|
220
|
+
access_key_secret=os.environ.get(
|
|
221
|
+
"OSS_ACCESS_KEY_SECRET",
|
|
222
|
+
os.environ.get("ALIBABA_CLOUD_ACCESS_KEY_SECRET"),
|
|
223
|
+
),
|
|
224
|
+
bucket_name=os.environ.get("OSS_BUCKET_NAME"),
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
def ensure_valid(self) -> None:
|
|
228
|
+
"""Validate that all required OSS configuration fields are present.
|
|
229
|
+
|
|
230
|
+
Raises:
|
|
231
|
+
RuntimeError: If required AccessKey credentials are missing.
|
|
232
|
+
"""
|
|
233
|
+
# Allow fallback to Alibaba Cloud AK/SK via from_env()
|
|
234
|
+
if (
|
|
235
|
+
not self.access_key_id
|
|
236
|
+
or not self.access_key_secret
|
|
237
|
+
or not self.bucket_name
|
|
238
|
+
):
|
|
239
|
+
raise RuntimeError(
|
|
240
|
+
"Missing OSS configuration. Set OSS_BUCKET_NAME and either "
|
|
241
|
+
"OSS_ACCESS_KEY_ID/OSS_ACCESS_KEY_SECRET or "
|
|
242
|
+
"ALIBABA_CLOUD_ACCESS_KEY_ID/ALIBABA_CLOUD_ACCESS_KEY_SECRET.",
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
class FCDeployManager(DeployManager):
|
|
247
|
+
# Fixed trigger name for HTTP trigger
|
|
248
|
+
HTTP_TRIGGER_NAME = "agentscope-runtime-trigger"
|
|
249
|
+
|
|
250
|
+
def __init__(
|
|
251
|
+
self,
|
|
252
|
+
oss_config: Optional[OSSConfig] = None,
|
|
253
|
+
fc_config: Optional[FCConfig] = None,
|
|
254
|
+
build_root: Optional[Union[str, Path]] = None,
|
|
255
|
+
state_manager=None,
|
|
256
|
+
):
|
|
257
|
+
"""Initialize FC deployment manager.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
oss_config: OSS configuration for artifact storage. If None, loads from environment.
|
|
261
|
+
fc_config: FC service configuration. If None, loads from environment.
|
|
262
|
+
build_root: Root directory for build artifacts. If None, uses parent directory of current working directory.
|
|
263
|
+
state_manager: Deployment state manager. If None, creates a new instance.
|
|
264
|
+
"""
|
|
265
|
+
super().__init__(state_manager=state_manager)
|
|
266
|
+
self.oss_config = oss_config or OSSConfig.from_env()
|
|
267
|
+
self.fc_config = fc_config or FCConfig.from_env()
|
|
268
|
+
self.build_root = (
|
|
269
|
+
Path(build_root)
|
|
270
|
+
if build_root
|
|
271
|
+
else Path(os.getcwd()).parent / ".agentscope_runtime_builds"
|
|
272
|
+
)
|
|
273
|
+
self.client = self._create_fc_client()
|
|
274
|
+
|
|
275
|
+
def _create_fc_client(self):
|
|
276
|
+
"""Create and configure the Function Compute client.
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
FC20230330Client: Configured Function Compute client instance.
|
|
280
|
+
"""
|
|
281
|
+
fc_config = open_api_models.Config(
|
|
282
|
+
access_key_id=self.fc_config.access_key_id,
|
|
283
|
+
access_key_secret=self.fc_config.access_key_secret,
|
|
284
|
+
endpoint=f"{self.fc_config.account_id}.{self.fc_config.region_id}.fc.aliyuncs.com",
|
|
285
|
+
read_timeout=60 * 1000,
|
|
286
|
+
)
|
|
287
|
+
return FC20230330Client(fc_config)
|
|
288
|
+
|
|
289
|
+
async def _generate_wrapper_and_build_wheel(
|
|
290
|
+
self,
|
|
291
|
+
project_dir: Union[Optional[str], Path],
|
|
292
|
+
cmd: Optional[str] = None,
|
|
293
|
+
deploy_name: Optional[str] = None,
|
|
294
|
+
telemetry_enabled: bool = True,
|
|
295
|
+
) -> Tuple[Path, str]:
|
|
296
|
+
"""Generate wrapper project and build wheel package.
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
project_dir: Path to the user's project directory.
|
|
300
|
+
cmd: Command to start the agent application.
|
|
301
|
+
deploy_name: Name for the deployment. If None, generates default name.
|
|
302
|
+
telemetry_enabled: Whether to enable telemetry in the wrapper.
|
|
303
|
+
|
|
304
|
+
Returns:
|
|
305
|
+
Tuple containing:
|
|
306
|
+
- wheel_path: Path to the built wheel file
|
|
307
|
+
- name: Deployment name used
|
|
308
|
+
|
|
309
|
+
Raises:
|
|
310
|
+
ValueError: If project_dir or cmd is not provided.
|
|
311
|
+
FileNotFoundError: If project directory does not exist.
|
|
312
|
+
"""
|
|
313
|
+
if not project_dir or not cmd:
|
|
314
|
+
raise ValueError(
|
|
315
|
+
"project_dir and cmd are required for deployment",
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
project_dir = Path(project_dir).resolve()
|
|
319
|
+
if not project_dir.is_dir():
|
|
320
|
+
raise FileNotFoundError(
|
|
321
|
+
f"Project directory not found: {project_dir}",
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
name = deploy_name or default_deploy_name()
|
|
325
|
+
|
|
326
|
+
# Generate build directory with platform-aware naming
|
|
327
|
+
# proj_root = project_dir.resolve()
|
|
328
|
+
if isinstance(self.build_root, Path):
|
|
329
|
+
effective_build_root = self.build_root.resolve()
|
|
330
|
+
else:
|
|
331
|
+
if self.build_root:
|
|
332
|
+
effective_build_root = Path(self.build_root).resolve()
|
|
333
|
+
else:
|
|
334
|
+
# Use centralized directory generation function
|
|
335
|
+
effective_build_root = generate_build_directory("fc")
|
|
336
|
+
|
|
337
|
+
build_dir = effective_build_root
|
|
338
|
+
build_dir.mkdir(parents=True, exist_ok=True)
|
|
339
|
+
|
|
340
|
+
logger.info("Generating wrapper project: %s", name)
|
|
341
|
+
wrapper_project_dir, _ = generate_wrapper_project(
|
|
342
|
+
build_root=build_dir,
|
|
343
|
+
user_project_dir=project_dir,
|
|
344
|
+
start_cmd=cmd,
|
|
345
|
+
deploy_name=name,
|
|
346
|
+
telemetry_enabled=telemetry_enabled,
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
logger.info("Building wheel package from: %s", wrapper_project_dir)
|
|
350
|
+
wheel_path = build_wheel(wrapper_project_dir)
|
|
351
|
+
logger.info("Wheel package created: %s", wheel_path)
|
|
352
|
+
|
|
353
|
+
return wheel_path, name
|
|
354
|
+
|
|
355
|
+
def _generate_env_file(
|
|
356
|
+
self,
|
|
357
|
+
project_dir: Union[str, Path],
|
|
358
|
+
environment: Optional[Dict[str, str]] = None,
|
|
359
|
+
env_filename: str = ".env",
|
|
360
|
+
) -> Optional[Path]:
|
|
361
|
+
"""Generate .env file from environment variables dictionary.
|
|
362
|
+
|
|
363
|
+
Args:
|
|
364
|
+
project_dir: Project directory where the .env file will be created.
|
|
365
|
+
environment: Dictionary of environment variables to write to .env file.
|
|
366
|
+
env_filename: Name of the env file (default: ".env").
|
|
367
|
+
|
|
368
|
+
Returns:
|
|
369
|
+
Path to the created .env file, or None if no environment variables provided.
|
|
370
|
+
|
|
371
|
+
Raises:
|
|
372
|
+
FileNotFoundError: If project directory does not exist.
|
|
373
|
+
"""
|
|
374
|
+
if not environment:
|
|
375
|
+
return None
|
|
376
|
+
|
|
377
|
+
project_path = Path(project_dir).resolve()
|
|
378
|
+
if not project_path.exists():
|
|
379
|
+
raise FileNotFoundError(
|
|
380
|
+
f"Project directory not found: {project_path}",
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
env_file_path = project_path / env_filename
|
|
384
|
+
|
|
385
|
+
try:
|
|
386
|
+
with env_file_path.open("w", encoding="utf-8") as f:
|
|
387
|
+
f.write("# Environment variables used by AgentScope Runtime\n")
|
|
388
|
+
|
|
389
|
+
for key, value in environment.items():
|
|
390
|
+
# Skip None values
|
|
391
|
+
if value is None:
|
|
392
|
+
continue
|
|
393
|
+
|
|
394
|
+
# Quote values that contain spaces or special characters
|
|
395
|
+
if " " in str(value) or any(
|
|
396
|
+
char in str(value)
|
|
397
|
+
for char in ["$", "`", '"', "'", "\\"]
|
|
398
|
+
):
|
|
399
|
+
# Escape existing quotes and wrap in double quotes
|
|
400
|
+
escaped_value = (
|
|
401
|
+
str(value)
|
|
402
|
+
.replace("\\", "\\\\")
|
|
403
|
+
.replace('"', '\\"')
|
|
404
|
+
)
|
|
405
|
+
f.write(f'{key}="{escaped_value}"\n')
|
|
406
|
+
else:
|
|
407
|
+
f.write(f"{key}={value}\n")
|
|
408
|
+
|
|
409
|
+
logger.info("Environment file created: %s", env_file_path)
|
|
410
|
+
return env_file_path
|
|
411
|
+
|
|
412
|
+
except Exception as e:
|
|
413
|
+
logger.warning("Failed to create environment file: %s", e)
|
|
414
|
+
return None
|
|
415
|
+
|
|
416
|
+
async def deploy(
|
|
417
|
+
self,
|
|
418
|
+
runner=None,
|
|
419
|
+
endpoint_path: str = "/process",
|
|
420
|
+
protocol_adapters: Optional[list[ProtocolAdapter]] = None,
|
|
421
|
+
requirements: Optional[Union[str, List[str]]] = None,
|
|
422
|
+
extra_packages: Optional[List[str]] = None,
|
|
423
|
+
environment: Optional[Dict[str, str]] = None,
|
|
424
|
+
project_dir: Optional[Union[str, Path]] = None,
|
|
425
|
+
cmd: Optional[str] = None,
|
|
426
|
+
deploy_name: Optional[str] = None,
|
|
427
|
+
skip_upload: bool = False,
|
|
428
|
+
external_whl_path: Optional[str] = None,
|
|
429
|
+
function_name: Optional[str] = None,
|
|
430
|
+
custom_endpoints: Optional[List[Dict]] = None,
|
|
431
|
+
app=None,
|
|
432
|
+
**kwargs,
|
|
433
|
+
) -> Dict[str, str]:
|
|
434
|
+
if not function_name:
|
|
435
|
+
if (
|
|
436
|
+
not app
|
|
437
|
+
and not runner
|
|
438
|
+
and not project_dir
|
|
439
|
+
and not external_whl_path
|
|
440
|
+
):
|
|
441
|
+
raise ValueError(
|
|
442
|
+
"Must provide either app, runner, project_dir, or external_whl_path",
|
|
443
|
+
)
|
|
444
|
+
try:
|
|
445
|
+
if runner or app:
|
|
446
|
+
logger.info("Creating detached project from runner")
|
|
447
|
+
if "agent" in kwargs:
|
|
448
|
+
kwargs.pop("agent")
|
|
449
|
+
|
|
450
|
+
# Create package project for detached deployment
|
|
451
|
+
project_dir = await LocalDeployManager.create_detached_project(
|
|
452
|
+
app=app,
|
|
453
|
+
runner=runner,
|
|
454
|
+
endpoint_path=endpoint_path,
|
|
455
|
+
custom_endpoints=custom_endpoints,
|
|
456
|
+
protocol_adapters=protocol_adapters,
|
|
457
|
+
requirements=requirements,
|
|
458
|
+
extra_packages=extra_packages,
|
|
459
|
+
platform="fc",
|
|
460
|
+
**kwargs,
|
|
461
|
+
)
|
|
462
|
+
if project_dir:
|
|
463
|
+
self._generate_env_file(project_dir, environment)
|
|
464
|
+
entry_script = get_bundle_entry_script(project_dir)
|
|
465
|
+
cmd = f"python {entry_script}"
|
|
466
|
+
deploy_name = deploy_name or default_deploy_name()
|
|
467
|
+
|
|
468
|
+
# Use external wheel if provided, skip project packaging
|
|
469
|
+
if external_whl_path:
|
|
470
|
+
wheel_path = Path(external_whl_path).resolve()
|
|
471
|
+
if not wheel_path.is_file():
|
|
472
|
+
raise FileNotFoundError(
|
|
473
|
+
f"External wheel file not found: {wheel_path}",
|
|
474
|
+
)
|
|
475
|
+
name = deploy_name or default_deploy_name()
|
|
476
|
+
# Keep existing name when updating agent without specifying deploy_name
|
|
477
|
+
if function_name and (deploy_name is None):
|
|
478
|
+
name = None
|
|
479
|
+
logger.info("Using external wheel file: %s", wheel_path)
|
|
480
|
+
else:
|
|
481
|
+
logger.info("Building wheel package from project")
|
|
482
|
+
(
|
|
483
|
+
wheel_path,
|
|
484
|
+
name,
|
|
485
|
+
) = await self._generate_wrapper_and_build_wheel(
|
|
486
|
+
project_dir=project_dir,
|
|
487
|
+
cmd=cmd,
|
|
488
|
+
deploy_name=deploy_name,
|
|
489
|
+
)
|
|
490
|
+
logger.info(
|
|
491
|
+
"Wheel file ready: %s (deploy name: %s)",
|
|
492
|
+
wheel_path,
|
|
493
|
+
name,
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
timestamp = time.strftime("%Y%m%d%H%M%S")
|
|
497
|
+
|
|
498
|
+
# Step 1: Build and package in Docker container
|
|
499
|
+
logger.info(
|
|
500
|
+
"Building dependencies and creating zip package in Docker",
|
|
501
|
+
)
|
|
502
|
+
zip_file_path = await self._build_and_zip_in_docker(
|
|
503
|
+
wheel_path=wheel_path,
|
|
504
|
+
output_dir=wheel_path.parent,
|
|
505
|
+
zip_filename=f"{name or function_name}-{timestamp}.zip",
|
|
506
|
+
)
|
|
507
|
+
logger.info("Zip package created: %s", zip_file_path)
|
|
508
|
+
|
|
509
|
+
if skip_upload:
|
|
510
|
+
logger.info(
|
|
511
|
+
"Deployment completed (skipped upload to FC)",
|
|
512
|
+
)
|
|
513
|
+
return {
|
|
514
|
+
"message": "Agent package built successfully (upload skipped)",
|
|
515
|
+
"deploy_name": name,
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
# Step 2: Upload to OSS
|
|
519
|
+
logger.info("Uploading zip package to OSS")
|
|
520
|
+
oss_result = await self._upload_to_fixed_oss_bucket(
|
|
521
|
+
zip_file_path=zip_file_path,
|
|
522
|
+
bucket_name=self.oss_config.bucket_name,
|
|
523
|
+
)
|
|
524
|
+
logger.info("Zip package uploaded to OSS successfully")
|
|
525
|
+
|
|
526
|
+
# Deploy to FC service
|
|
527
|
+
logger.info("Deploying to FC service")
|
|
528
|
+
fc_deploy_result = await self.deploy_to_fc(
|
|
529
|
+
agent_runtime_name=name,
|
|
530
|
+
oss_bucket_name=oss_result["bucket_name"],
|
|
531
|
+
oss_object_name=oss_result["object_key"],
|
|
532
|
+
function_name=function_name,
|
|
533
|
+
environment=environment,
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
# Use base class UUID deploy_id (already set in __init__)
|
|
537
|
+
deploy_id = self.deploy_id
|
|
538
|
+
deployed_function_name = fc_deploy_result["function_name"]
|
|
539
|
+
endpoint_internet_url = fc_deploy_result.get(
|
|
540
|
+
"endpoint_internet_url",
|
|
541
|
+
"",
|
|
542
|
+
)
|
|
543
|
+
console_url = (
|
|
544
|
+
f"https://fcnext.console.aliyun.com/{self.fc_config.region_id}/"
|
|
545
|
+
f"functions/{deployed_function_name}"
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
# Save deployment to state manager
|
|
549
|
+
deployment = Deployment(
|
|
550
|
+
id=deploy_id,
|
|
551
|
+
platform="fc",
|
|
552
|
+
url=console_url,
|
|
553
|
+
status="running",
|
|
554
|
+
created_at=datetime.now().isoformat(),
|
|
555
|
+
agent_source=kwargs.get("agent_source"),
|
|
556
|
+
config={
|
|
557
|
+
"function_name": deployed_function_name,
|
|
558
|
+
"endpoint_url": endpoint_internet_url,
|
|
559
|
+
"resource_name": name,
|
|
560
|
+
"wheel_path": str(wheel_path),
|
|
561
|
+
"artifact_url": oss_result.get("presigned_url", ""),
|
|
562
|
+
"region_id": self.fc_config.region_id,
|
|
563
|
+
},
|
|
564
|
+
)
|
|
565
|
+
self.state_manager.save(deployment)
|
|
566
|
+
|
|
567
|
+
# Return deployment results
|
|
568
|
+
logger.info(
|
|
569
|
+
"Deployment completed successfully. Agent runtime ID: %s",
|
|
570
|
+
deployed_function_name,
|
|
571
|
+
)
|
|
572
|
+
return {
|
|
573
|
+
"message": "Agent deployed successfully to FC",
|
|
574
|
+
"function_name": deployed_function_name,
|
|
575
|
+
"endpoint_url": endpoint_internet_url,
|
|
576
|
+
"wheel_path": str(wheel_path),
|
|
577
|
+
"artifact_url": oss_result.get("presigned_url", ""),
|
|
578
|
+
"url": console_url,
|
|
579
|
+
"deploy_id": deploy_id,
|
|
580
|
+
"resource_name": name,
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
except Exception as e:
|
|
584
|
+
logger.error("Deployment failed: %s", str(e))
|
|
585
|
+
raise
|
|
586
|
+
|
|
587
|
+
async def deploy_to_fc(
|
|
588
|
+
self,
|
|
589
|
+
agent_runtime_name: str,
|
|
590
|
+
oss_bucket_name: str,
|
|
591
|
+
oss_object_name: str,
|
|
592
|
+
function_name: Optional[str] = None,
|
|
593
|
+
environment: Optional[Dict[str, str]] = None,
|
|
594
|
+
):
|
|
595
|
+
try:
|
|
596
|
+
logger.info("Starting FC deployment: %s", agent_runtime_name)
|
|
597
|
+
|
|
598
|
+
custom_runtime_config = fc20230330_models.CustomRuntimeConfig(
|
|
599
|
+
port=8090,
|
|
600
|
+
command=["python3", "/code/deploy_starter/main.py"],
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
code_config = fc20230330_models.InputCodeLocation(
|
|
604
|
+
oss_bucket_name=oss_bucket_name,
|
|
605
|
+
oss_object_name=oss_object_name,
|
|
606
|
+
)
|
|
607
|
+
|
|
608
|
+
session_affinity_config = json.dumps(
|
|
609
|
+
{
|
|
610
|
+
"affinityHeaderFieldName": "x-agentscope-runtime-session-id",
|
|
611
|
+
"sessionTTLInSeconds": 21600,
|
|
612
|
+
"sessionConcurrencyPerInstance": self.fc_config.session_concurrency_limit
|
|
613
|
+
if self.fc_config
|
|
614
|
+
else 200,
|
|
615
|
+
"sessionIdleTimeoutInSeconds": self.fc_config.session_idle_timeout_seconds
|
|
616
|
+
if self.fc_config
|
|
617
|
+
else 3600,
|
|
618
|
+
},
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
if function_name:
|
|
622
|
+
# Update existing fc agent runtime
|
|
623
|
+
logger.info(
|
|
624
|
+
"Updating existing FC agent runtime: %s",
|
|
625
|
+
function_name,
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
update_function_kwargs = {
|
|
629
|
+
"runtime": "custom.debian11",
|
|
630
|
+
"code": code_config,
|
|
631
|
+
"custom_runtime_config": custom_runtime_config,
|
|
632
|
+
"description": f"AgentScope Runtime Function - {agent_runtime_name}",
|
|
633
|
+
"timeout": 300,
|
|
634
|
+
"memory_size": self.fc_config.memory
|
|
635
|
+
if self.fc_config
|
|
636
|
+
else 2048,
|
|
637
|
+
"disk_size": 512,
|
|
638
|
+
"cpu": self.fc_config.cpu if self.fc_config else 2,
|
|
639
|
+
"instance_concurrency": 200,
|
|
640
|
+
"internet_access": True,
|
|
641
|
+
"environment_variables": self._merge_environment_variables(
|
|
642
|
+
environment,
|
|
643
|
+
),
|
|
644
|
+
"session_affinity": "HEADER_FIELD",
|
|
645
|
+
"instance_isolation_mode": "SHARE",
|
|
646
|
+
"session_affinity_config": session_affinity_config,
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
if self.fc_config and self.fc_config.log_config:
|
|
650
|
+
log_config = fc20230330_models.LogConfig(
|
|
651
|
+
logstore=self.fc_config.log_config.logstore,
|
|
652
|
+
project=self.fc_config.log_config.project,
|
|
653
|
+
enable_request_metrics=True,
|
|
654
|
+
enable_instance_metrics=True,
|
|
655
|
+
log_begin_rule="DefaultRegex",
|
|
656
|
+
)
|
|
657
|
+
update_function_kwargs["log_config"] = log_config
|
|
658
|
+
logger.debug(
|
|
659
|
+
f"Configuring log service: {self.fc_config.log_config.project}/{self.fc_config.log_config.logstore}",
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
if self.fc_config and self.fc_config.vpc_config:
|
|
663
|
+
vpc_config = fc20230330_models.VPCConfig(
|
|
664
|
+
vpc_id=self.fc_config.vpc_config.vpc_id,
|
|
665
|
+
v_switch_ids=self.fc_config.vpc_config.vswitch_ids,
|
|
666
|
+
security_group_id=self.fc_config.vpc_config.security_group_id,
|
|
667
|
+
)
|
|
668
|
+
update_function_kwargs["vpc_config"] = vpc_config
|
|
669
|
+
logger.debug(
|
|
670
|
+
f"Configuring VPC network: {self.fc_config.vpc_config.vpc_id}",
|
|
671
|
+
)
|
|
672
|
+
|
|
673
|
+
update_function_input = fc20230330_models.UpdateFunctionInput(
|
|
674
|
+
**update_function_kwargs,
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
update_function_request = (
|
|
678
|
+
fc20230330_models.UpdateFunctionRequest(
|
|
679
|
+
body=update_function_input,
|
|
680
|
+
)
|
|
681
|
+
)
|
|
682
|
+
runtime_options = util_models.RuntimeOptions()
|
|
683
|
+
headers = {}
|
|
684
|
+
response = self.client.update_function_with_options(
|
|
685
|
+
function_name,
|
|
686
|
+
update_function_request,
|
|
687
|
+
headers,
|
|
688
|
+
runtime_options,
|
|
689
|
+
)
|
|
690
|
+
|
|
691
|
+
logger.debug(
|
|
692
|
+
"FunctionComputeClient function updated successfully!",
|
|
693
|
+
)
|
|
694
|
+
logger.info(
|
|
695
|
+
f"FunctionComputeClient function name: {response.body.function_name}",
|
|
696
|
+
)
|
|
697
|
+
logger.info(
|
|
698
|
+
f"FunctionComputeClient runtime: {response.body.runtime}",
|
|
699
|
+
)
|
|
700
|
+
logger.info(
|
|
701
|
+
f"FunctionComputeClient update time: {response.body.created_time}",
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
trigger_info = self._get_http_trigger(function_name)
|
|
705
|
+
endpoint_internet_url = trigger_info.get("url_internet", "")
|
|
706
|
+
endpoint_intranet_url = trigger_info.get("url_intranet", "")
|
|
707
|
+
logger.debug(
|
|
708
|
+
f"FC trigger retrieved: {trigger_info['trigger_name']}",
|
|
709
|
+
)
|
|
710
|
+
|
|
711
|
+
return {
|
|
712
|
+
"success": True,
|
|
713
|
+
"function_name": function_name,
|
|
714
|
+
"endpoint_internet_url": endpoint_internet_url,
|
|
715
|
+
"endpoint_intranet_url": endpoint_intranet_url,
|
|
716
|
+
"deploy_id": self.deploy_id
|
|
717
|
+
if hasattr(self, "deploy_id")
|
|
718
|
+
else None,
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
# Create new fc agent runtime
|
|
722
|
+
logger.info("Creating fc runtime: %s", agent_runtime_name)
|
|
723
|
+
|
|
724
|
+
create_function_kwargs = {
|
|
725
|
+
"function_name": agent_runtime_name,
|
|
726
|
+
"runtime": "custom.debian11",
|
|
727
|
+
"code": code_config,
|
|
728
|
+
"custom_runtime_config": custom_runtime_config,
|
|
729
|
+
"description": f"AgentScope Runtime Function - {agent_runtime_name}",
|
|
730
|
+
"timeout": 300,
|
|
731
|
+
"memory_size": self.fc_config.memory
|
|
732
|
+
if self.fc_config
|
|
733
|
+
else 2048,
|
|
734
|
+
"disk_size": 512,
|
|
735
|
+
"cpu": self.fc_config.cpu if self.fc_config else 2,
|
|
736
|
+
"instance_concurrency": 200,
|
|
737
|
+
"internet_access": True,
|
|
738
|
+
"environment_variables": self._merge_environment_variables(
|
|
739
|
+
environment,
|
|
740
|
+
),
|
|
741
|
+
"session_affinity": "HEADER_FIELD",
|
|
742
|
+
"instance_isolation_mode": "SHARE",
|
|
743
|
+
"session_affinity_config": session_affinity_config,
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
if self.fc_config and self.fc_config.log_config:
|
|
747
|
+
log_config = fc20230330_models.LogConfig(
|
|
748
|
+
logstore=self.fc_config.log_config.logstore,
|
|
749
|
+
project=self.fc_config.log_config.project,
|
|
750
|
+
enable_request_metrics=True,
|
|
751
|
+
enable_instance_metrics=True,
|
|
752
|
+
log_begin_rule="DefaultRegex",
|
|
753
|
+
)
|
|
754
|
+
create_function_kwargs["log_config"] = log_config
|
|
755
|
+
logger.debug(
|
|
756
|
+
f"Configuring log service: {self.fc_config.log_config.project}/{self.fc_config.log_config.logstore}",
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
if self.fc_config and self.fc_config.vpc_config:
|
|
760
|
+
vpc_config = fc20230330_models.VPCConfig(
|
|
761
|
+
vpc_id=self.fc_config.vpc_config.vpc_id,
|
|
762
|
+
v_switch_ids=self.fc_config.vpc_config.vswitch_ids,
|
|
763
|
+
security_group_id=self.fc_config.vpc_config.security_group_id,
|
|
764
|
+
)
|
|
765
|
+
create_function_kwargs["vpc_config"] = vpc_config
|
|
766
|
+
logger.debug(
|
|
767
|
+
f"Configuring VPC network: {self.fc_config.vpc_config.vpc_id}",
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
create_function_input = fc20230330_models.CreateFunctionInput(
|
|
771
|
+
**create_function_kwargs,
|
|
772
|
+
)
|
|
773
|
+
create_function_request = fc20230330_models.CreateFunctionRequest(
|
|
774
|
+
body=create_function_input,
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
runtime_options = util_models.RuntimeOptions()
|
|
778
|
+
headers = {}
|
|
779
|
+
|
|
780
|
+
response = self.client.create_function_with_options(
|
|
781
|
+
create_function_request,
|
|
782
|
+
headers,
|
|
783
|
+
runtime_options,
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
logger.debug(
|
|
787
|
+
"FunctionComputeClient function created successfully!",
|
|
788
|
+
)
|
|
789
|
+
logger.info(
|
|
790
|
+
f"FunctionComputeClient function name: {response.body.function_name}",
|
|
791
|
+
)
|
|
792
|
+
logger.info(
|
|
793
|
+
f"FunctionComputeClient runtime: {response.body.runtime}",
|
|
794
|
+
)
|
|
795
|
+
logger.info(
|
|
796
|
+
f"FunctionComputeClient create time: {response.body.created_time}",
|
|
797
|
+
)
|
|
798
|
+
|
|
799
|
+
trigger_info = self._create_http_trigger(agent_runtime_name)
|
|
800
|
+
trigger_name = trigger_info["trigger_name"]
|
|
801
|
+
endpoint_internet_url = trigger_info["url_internet"]
|
|
802
|
+
endpoint_intranet_url = trigger_info["url_intranet"]
|
|
803
|
+
logger.debug(f"FC trigger created: {trigger_name}")
|
|
804
|
+
|
|
805
|
+
return {
|
|
806
|
+
"success": True,
|
|
807
|
+
"function_name": agent_runtime_name,
|
|
808
|
+
"endpoint_internet_url": endpoint_internet_url,
|
|
809
|
+
"endpoint_intranet_url": endpoint_intranet_url,
|
|
810
|
+
"deploy_id": self.deploy_id
|
|
811
|
+
if hasattr(self, "deploy_id")
|
|
812
|
+
else None,
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
except Exception as e:
|
|
816
|
+
logger.error("Exception during FC deployment: %s", str(e))
|
|
817
|
+
return {
|
|
818
|
+
"success": False,
|
|
819
|
+
"error": str(e),
|
|
820
|
+
"message": f"Exception during FC deployment: {str(e)}",
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
def _merge_environment_variables(
|
|
824
|
+
self,
|
|
825
|
+
environment: Optional[Dict[str, str]] = None,
|
|
826
|
+
) -> Dict[str, str]:
|
|
827
|
+
if environment is None:
|
|
828
|
+
environment = {}
|
|
829
|
+
python_312_environment = {
|
|
830
|
+
"PATH": "/var/fc/lang/python3.12/bin:/usr/local/bin/apache-maven/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/ruby/bin:/opt/bin:/code:/code/bin",
|
|
831
|
+
"PYTHONPATH": "/opt/python:/code/python:/code",
|
|
832
|
+
"LD_LIBRARY_PATH": "/code:/code/lib:/usr/lib:/opt/lib:/usr/local/lib",
|
|
833
|
+
"PYTHON_VERSION": "3.12",
|
|
834
|
+
}
|
|
835
|
+
merged = {**python_312_environment, **environment}
|
|
836
|
+
return merged
|
|
837
|
+
|
|
838
|
+
def _create_http_trigger(
|
|
839
|
+
self,
|
|
840
|
+
function_name: str,
|
|
841
|
+
) -> dict:
|
|
842
|
+
"""Create an HTTP trigger for the function - Implementation based on test verification.
|
|
843
|
+
|
|
844
|
+
Args:
|
|
845
|
+
function_name (str): The name of the function to create a trigger for.
|
|
846
|
+
|
|
847
|
+
Returns:
|
|
848
|
+
dict: A dictionary containing trigger information in the format:
|
|
849
|
+
{
|
|
850
|
+
'trigger_name': str,
|
|
851
|
+
'url_internet': str,
|
|
852
|
+
'url_intranet': str,
|
|
853
|
+
'trigger_id': str
|
|
854
|
+
}
|
|
855
|
+
"""
|
|
856
|
+
trigger_name = self.HTTP_TRIGGER_NAME
|
|
857
|
+
|
|
858
|
+
try:
|
|
859
|
+
logger.debug(
|
|
860
|
+
f"FunctionComputeClient creating HTTP trigger: {trigger_name}",
|
|
861
|
+
)
|
|
862
|
+
|
|
863
|
+
# Build trigger configuration (based on test verified configuration)
|
|
864
|
+
trigger_config_dict = {
|
|
865
|
+
"authType": "anonymous",
|
|
866
|
+
"methods": ["GET", "POST", "PUT", "DELETE", "HEAD", "OPTIONS"],
|
|
867
|
+
}
|
|
868
|
+
|
|
869
|
+
# Create trigger input
|
|
870
|
+
trigger_input = fc20230330_models.CreateTriggerInput(
|
|
871
|
+
trigger_name=trigger_name,
|
|
872
|
+
trigger_type="http",
|
|
873
|
+
trigger_config=json.dumps(trigger_config_dict),
|
|
874
|
+
description=f"HTTP trigger for agentscope runtime function {function_name}",
|
|
875
|
+
)
|
|
876
|
+
|
|
877
|
+
# Create trigger request
|
|
878
|
+
create_trigger_request = fc20230330_models.CreateTriggerRequest(
|
|
879
|
+
body=trigger_input,
|
|
880
|
+
)
|
|
881
|
+
|
|
882
|
+
# Call API to create trigger
|
|
883
|
+
response = self.client.create_trigger_with_options(
|
|
884
|
+
function_name=function_name,
|
|
885
|
+
request=create_trigger_request,
|
|
886
|
+
headers={},
|
|
887
|
+
runtime=util_models.RuntimeOptions(),
|
|
888
|
+
)
|
|
889
|
+
|
|
890
|
+
logger.info(
|
|
891
|
+
f"FunctionComputeClient HTTP trigger created successfully: {trigger_name}",
|
|
892
|
+
)
|
|
893
|
+
logger.debug(
|
|
894
|
+
f"FunctionComputeClient HTTP trigger response: {response}",
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
# Extract trigger information from response
|
|
898
|
+
trigger_info = {
|
|
899
|
+
"trigger_name": trigger_name,
|
|
900
|
+
"url_internet": None,
|
|
901
|
+
"url_intranet": None,
|
|
902
|
+
"trigger_id": None,
|
|
903
|
+
"qualifier": "LATEST",
|
|
904
|
+
"last_modified_time": None,
|
|
905
|
+
"created_time": None,
|
|
906
|
+
"status": None,
|
|
907
|
+
}
|
|
908
|
+
|
|
909
|
+
# Parse response body to get URL information
|
|
910
|
+
if hasattr(response, "body") and response.body:
|
|
911
|
+
body = response.body
|
|
912
|
+
if hasattr(body, "http_trigger") and body.http_trigger:
|
|
913
|
+
http_trigger = body.http_trigger
|
|
914
|
+
if hasattr(http_trigger, "url_internet"):
|
|
915
|
+
trigger_info[
|
|
916
|
+
"url_internet"
|
|
917
|
+
] = http_trigger.url_internet
|
|
918
|
+
if hasattr(http_trigger, "url_intranet"):
|
|
919
|
+
trigger_info[
|
|
920
|
+
"url_intranet"
|
|
921
|
+
] = http_trigger.url_intranet
|
|
922
|
+
|
|
923
|
+
if hasattr(body, "trigger_id"):
|
|
924
|
+
trigger_info["trigger_id"] = body.trigger_id
|
|
925
|
+
if hasattr(body, "last_modified_time"):
|
|
926
|
+
trigger_info[
|
|
927
|
+
"last_modified_time"
|
|
928
|
+
] = body.last_modified_time
|
|
929
|
+
if hasattr(body, "createdTime"):
|
|
930
|
+
trigger_info["created_time"] = body.created_time
|
|
931
|
+
if hasattr(body, "status"):
|
|
932
|
+
trigger_info["status"] = body.status
|
|
933
|
+
if hasattr(body, "qualifier"):
|
|
934
|
+
trigger_info["qualifier"] = body.qualifier
|
|
935
|
+
|
|
936
|
+
logger.info("FunctionComputeClient trigger URL information:")
|
|
937
|
+
logger.info(
|
|
938
|
+
f"FunctionComputeClient - Internet URL: {trigger_info['url_internet']}",
|
|
939
|
+
)
|
|
940
|
+
logger.info(
|
|
941
|
+
f"FunctionComputeClient - Intranet URL: {trigger_info['url_intranet']}",
|
|
942
|
+
)
|
|
943
|
+
logger.info(
|
|
944
|
+
f"FunctionComputeClient - Trigger ID: {trigger_info['trigger_id']}",
|
|
945
|
+
)
|
|
946
|
+
|
|
947
|
+
return trigger_info
|
|
948
|
+
|
|
949
|
+
except Exception as e:
|
|
950
|
+
logger.error(
|
|
951
|
+
f"FunctionComputeClient create HTTP trigger failed: {e}",
|
|
952
|
+
)
|
|
953
|
+
# Even if creation fails, return basic information for subsequent cleanup
|
|
954
|
+
return {
|
|
955
|
+
"trigger_name": trigger_name,
|
|
956
|
+
"url_internet": None,
|
|
957
|
+
"url_intranet": None,
|
|
958
|
+
"qualifier": "LATEST",
|
|
959
|
+
"latest_modified_time": None,
|
|
960
|
+
"created_time": None,
|
|
961
|
+
"status": None,
|
|
962
|
+
}
|
|
963
|
+
|
|
964
|
+
def _get_http_trigger(self, function_name: str) -> dict:
|
|
965
|
+
"""Get HTTP trigger information for the function.
|
|
966
|
+
|
|
967
|
+
Args:
|
|
968
|
+
function_name (str): The name of the function to get trigger for.
|
|
969
|
+
|
|
970
|
+
Returns:
|
|
971
|
+
dict: A dictionary containing trigger information in the format:
|
|
972
|
+
{
|
|
973
|
+
'trigger_name': str,
|
|
974
|
+
'url_internet': str,
|
|
975
|
+
'url_intranet': str,
|
|
976
|
+
'trigger_id': str,
|
|
977
|
+
'qualifier': str,
|
|
978
|
+
'last_modified_time': str,
|
|
979
|
+
'created_time': str,
|
|
980
|
+
'status': str
|
|
981
|
+
}
|
|
982
|
+
"""
|
|
983
|
+
trigger_name = self.HTTP_TRIGGER_NAME
|
|
984
|
+
|
|
985
|
+
try:
|
|
986
|
+
logger.debug(
|
|
987
|
+
f"FunctionComputeClient getting HTTP trigger: {trigger_name}",
|
|
988
|
+
)
|
|
989
|
+
|
|
990
|
+
# Call API to get trigger
|
|
991
|
+
response = self.client.get_trigger_with_options(
|
|
992
|
+
function_name=function_name,
|
|
993
|
+
trigger_name=trigger_name,
|
|
994
|
+
headers={},
|
|
995
|
+
runtime=util_models.RuntimeOptions(),
|
|
996
|
+
)
|
|
997
|
+
|
|
998
|
+
logger.info(
|
|
999
|
+
f"FunctionComputeClient HTTP trigger retrieved successfully: {trigger_name}",
|
|
1000
|
+
)
|
|
1001
|
+
logger.debug(
|
|
1002
|
+
f"FunctionComputeClient HTTP trigger response: {response}",
|
|
1003
|
+
)
|
|
1004
|
+
|
|
1005
|
+
# Extract trigger information from response
|
|
1006
|
+
trigger_info = {
|
|
1007
|
+
"trigger_name": trigger_name,
|
|
1008
|
+
"url_internet": None,
|
|
1009
|
+
"url_intranet": None,
|
|
1010
|
+
"trigger_id": None,
|
|
1011
|
+
"qualifier": "LATEST",
|
|
1012
|
+
"last_modified_time": None,
|
|
1013
|
+
"created_time": None,
|
|
1014
|
+
"status": None,
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
# Parse response body to get URL information
|
|
1018
|
+
if hasattr(response, "body") and response.body:
|
|
1019
|
+
body = response.body
|
|
1020
|
+
if hasattr(body, "http_trigger") and body.http_trigger:
|
|
1021
|
+
http_trigger = body.http_trigger
|
|
1022
|
+
if hasattr(http_trigger, "url_internet"):
|
|
1023
|
+
trigger_info[
|
|
1024
|
+
"url_internet"
|
|
1025
|
+
] = http_trigger.url_internet
|
|
1026
|
+
if hasattr(http_trigger, "url_intranet"):
|
|
1027
|
+
trigger_info[
|
|
1028
|
+
"url_intranet"
|
|
1029
|
+
] = http_trigger.url_intranet
|
|
1030
|
+
|
|
1031
|
+
if hasattr(body, "trigger_id"):
|
|
1032
|
+
trigger_info["trigger_id"] = body.trigger_id
|
|
1033
|
+
if hasattr(body, "last_modified_time"):
|
|
1034
|
+
trigger_info[
|
|
1035
|
+
"last_modified_time"
|
|
1036
|
+
] = body.last_modified_time
|
|
1037
|
+
if hasattr(body, "created_time"):
|
|
1038
|
+
trigger_info["created_time"] = body.created_time
|
|
1039
|
+
if hasattr(body, "status"):
|
|
1040
|
+
trigger_info["status"] = body.status
|
|
1041
|
+
if hasattr(body, "qualifier"):
|
|
1042
|
+
trigger_info["qualifier"] = body.qualifier
|
|
1043
|
+
|
|
1044
|
+
logger.info("FunctionComputeClient trigger URL information:")
|
|
1045
|
+
logger.info(
|
|
1046
|
+
f"FunctionComputeClient - Internet URL: {trigger_info['url_internet']}",
|
|
1047
|
+
)
|
|
1048
|
+
logger.info(
|
|
1049
|
+
f"FunctionComputeClient - Intranet URL: {trigger_info['url_intranet']}",
|
|
1050
|
+
)
|
|
1051
|
+
logger.info(
|
|
1052
|
+
f"FunctionComputeClient - Trigger ID: {trigger_info['trigger_id']}",
|
|
1053
|
+
)
|
|
1054
|
+
|
|
1055
|
+
return trigger_info
|
|
1056
|
+
|
|
1057
|
+
except Exception as e:
|
|
1058
|
+
logger.error(
|
|
1059
|
+
f"FunctionComputeClient get HTTP trigger failed: {e}",
|
|
1060
|
+
)
|
|
1061
|
+
# Even if retrieval fails, return basic information
|
|
1062
|
+
return {
|
|
1063
|
+
"trigger_name": trigger_name,
|
|
1064
|
+
"url_internet": None,
|
|
1065
|
+
"url_intranet": None,
|
|
1066
|
+
"trigger_id": None,
|
|
1067
|
+
"qualifier": "LATEST",
|
|
1068
|
+
"last_modified_time": None,
|
|
1069
|
+
"created_time": None,
|
|
1070
|
+
"status": None,
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
async def _build_and_zip_in_docker(
|
|
1074
|
+
self,
|
|
1075
|
+
wheel_path: Path,
|
|
1076
|
+
output_dir: Path,
|
|
1077
|
+
zip_filename: str,
|
|
1078
|
+
) -> Path:
|
|
1079
|
+
"""Build dependencies and create zip package in Docker container.
|
|
1080
|
+
|
|
1081
|
+
All build logic runs in container, only final zip file is returned to host.
|
|
1082
|
+
|
|
1083
|
+
Args:
|
|
1084
|
+
wheel_path: Path to the wheel file on host machine.
|
|
1085
|
+
output_dir: Local directory to save the final zip file.
|
|
1086
|
+
zip_filename: Name of the output zip file.
|
|
1087
|
+
|
|
1088
|
+
Returns:
|
|
1089
|
+
Path to the created zip file.
|
|
1090
|
+
|
|
1091
|
+
Raises:
|
|
1092
|
+
RuntimeError: If Docker is not available or build fails.
|
|
1093
|
+
FileNotFoundError: If Docker is not installed.
|
|
1094
|
+
"""
|
|
1095
|
+
import subprocess
|
|
1096
|
+
|
|
1097
|
+
try:
|
|
1098
|
+
logger.info("Starting Docker build for wheel: %s", wheel_path)
|
|
1099
|
+
logger.debug("Output directory: %s", output_dir)
|
|
1100
|
+
logger.debug("Zip filename: %s", zip_filename)
|
|
1101
|
+
|
|
1102
|
+
# Ensure output directory exists
|
|
1103
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
1104
|
+
|
|
1105
|
+
# Convert paths to absolute paths for Docker volume mounting
|
|
1106
|
+
wheel_path_abs = wheel_path.resolve()
|
|
1107
|
+
output_dir_abs = output_dir.resolve()
|
|
1108
|
+
|
|
1109
|
+
# Keep original wheel filename for pip to parse metadata
|
|
1110
|
+
wheel_filename = wheel_path.name
|
|
1111
|
+
wheel_path_in_container = f"/tmp/{wheel_filename}"
|
|
1112
|
+
|
|
1113
|
+
# Docker image to use
|
|
1114
|
+
docker_image = "registry.cn-beijing.aliyuncs.com/aliyunfc/runtime:custom.debian11-build-3.1.0"
|
|
1115
|
+
|
|
1116
|
+
# Build script that runs in container:
|
|
1117
|
+
# 1. Install wheel and dependencies to /tmp/python
|
|
1118
|
+
# 2. Use Python's zipfile module to create zip
|
|
1119
|
+
# 3. Save zip to /output
|
|
1120
|
+
build_script = f"""
|
|
1121
|
+
set -e
|
|
1122
|
+
echo "=== Installing dependencies to /tmp/python ==="
|
|
1123
|
+
pip install {wheel_path_in_container} -t /tmp/python --no-cache-dir
|
|
1124
|
+
|
|
1125
|
+
echo "=== Creating zip package using Python ==="
|
|
1126
|
+
python3 << 'PYTHON_EOF'
|
|
1127
|
+
import os
|
|
1128
|
+
import zipfile
|
|
1129
|
+
from pathlib import Path
|
|
1130
|
+
|
|
1131
|
+
python_dir = Path("/tmp/python")
|
|
1132
|
+
zip_path = Path("/output/{zip_filename}")
|
|
1133
|
+
|
|
1134
|
+
print(f"Creating zip from {{python_dir}}")
|
|
1135
|
+
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
|
1136
|
+
for root, dirs, files in os.walk(python_dir):
|
|
1137
|
+
for file in files:
|
|
1138
|
+
file_path = Path(root) / file
|
|
1139
|
+
arcname = file_path.relative_to(python_dir)
|
|
1140
|
+
zipf.write(file_path, arcname)
|
|
1141
|
+
|
|
1142
|
+
zip_size_mb = zip_path.stat().st_size / (1024 * 1024)
|
|
1143
|
+
print(f"Created zip ({{zip_size_mb:.2f}} MB): {{zip_path}}")
|
|
1144
|
+
PYTHON_EOF
|
|
1145
|
+
|
|
1146
|
+
echo "=== Build complete ==="
|
|
1147
|
+
ls -lh /output/{zip_filename}
|
|
1148
|
+
"""
|
|
1149
|
+
|
|
1150
|
+
# Docker run command with x86_64 platform for AgentRun compatibility
|
|
1151
|
+
cmd = [
|
|
1152
|
+
"docker",
|
|
1153
|
+
"run",
|
|
1154
|
+
"--rm",
|
|
1155
|
+
"--platform",
|
|
1156
|
+
"linux/amd64",
|
|
1157
|
+
"-v",
|
|
1158
|
+
f"{wheel_path_abs}:{wheel_path_in_container}:ro",
|
|
1159
|
+
"-v",
|
|
1160
|
+
f"{output_dir_abs}:/output",
|
|
1161
|
+
docker_image,
|
|
1162
|
+
"bash",
|
|
1163
|
+
"-c",
|
|
1164
|
+
build_script,
|
|
1165
|
+
]
|
|
1166
|
+
|
|
1167
|
+
logger.info("Executing Docker build command")
|
|
1168
|
+
logger.debug("Build script:\n%s", build_script)
|
|
1169
|
+
|
|
1170
|
+
result = subprocess.run(
|
|
1171
|
+
cmd,
|
|
1172
|
+
capture_output=True,
|
|
1173
|
+
text=True,
|
|
1174
|
+
check=False,
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
if result.returncode != 0:
|
|
1178
|
+
logger.error("Docker build failed: %s", result.stderr)
|
|
1179
|
+
raise RuntimeError(
|
|
1180
|
+
f"Docker build failed: {result.stderr}",
|
|
1181
|
+
)
|
|
1182
|
+
|
|
1183
|
+
logger.info("Docker build completed successfully")
|
|
1184
|
+
if result.stdout:
|
|
1185
|
+
logger.debug("Docker output:\n%s", result.stdout)
|
|
1186
|
+
|
|
1187
|
+
# Verify zip file was created
|
|
1188
|
+
zip_file_path = output_dir / zip_filename
|
|
1189
|
+
if not zip_file_path.exists():
|
|
1190
|
+
raise RuntimeError(f"Zip file not created: {zip_file_path}")
|
|
1191
|
+
|
|
1192
|
+
zip_size_mb = zip_file_path.stat().st_size / (1024 * 1024)
|
|
1193
|
+
logger.info(
|
|
1194
|
+
"Zip package created successfully (%.2f MB): %s",
|
|
1195
|
+
zip_size_mb,
|
|
1196
|
+
zip_file_path,
|
|
1197
|
+
)
|
|
1198
|
+
|
|
1199
|
+
return zip_file_path
|
|
1200
|
+
|
|
1201
|
+
except FileNotFoundError as e:
|
|
1202
|
+
if "docker" in str(e).lower():
|
|
1203
|
+
logger.error(
|
|
1204
|
+
"Docker is not installed or not available in PATH",
|
|
1205
|
+
)
|
|
1206
|
+
raise RuntimeError(
|
|
1207
|
+
"Docker is required for building. "
|
|
1208
|
+
"Install Docker Desktop: https://www.docker.com/products/docker-desktop",
|
|
1209
|
+
) from e
|
|
1210
|
+
raise
|
|
1211
|
+
except Exception as e:
|
|
1212
|
+
logger.error("Error during Docker build: %s", str(e))
|
|
1213
|
+
raise
|
|
1214
|
+
|
|
1215
|
+
async def _upload_to_fixed_oss_bucket(
|
|
1216
|
+
self,
|
|
1217
|
+
zip_file_path: Path,
|
|
1218
|
+
bucket_name: str,
|
|
1219
|
+
) -> Dict[str, str]:
|
|
1220
|
+
"""Upload zip file to a fixed OSS bucket.
|
|
1221
|
+
|
|
1222
|
+
Args:
|
|
1223
|
+
zip_file_path: Path to the zip file to upload.
|
|
1224
|
+
bucket_name: Target OSS bucket name (e.g., "tmp-agentscope-fc-code").
|
|
1225
|
+
|
|
1226
|
+
Returns:
|
|
1227
|
+
Dictionary containing:
|
|
1228
|
+
- bucket_name: OSS bucket name
|
|
1229
|
+
- object_key: Object key in OSS
|
|
1230
|
+
- presigned_url: Presigned URL for downloading (valid for 3 hours)
|
|
1231
|
+
|
|
1232
|
+
Raises:
|
|
1233
|
+
RuntimeError: If OSS SDK is not installed or upload fails.
|
|
1234
|
+
"""
|
|
1235
|
+
try:
|
|
1236
|
+
from alibabacloud_oss_v2 import Client as OSSClient
|
|
1237
|
+
from alibabacloud_oss_v2.models import (
|
|
1238
|
+
PutObjectRequest,
|
|
1239
|
+
GetObjectRequest,
|
|
1240
|
+
PutBucketRequest,
|
|
1241
|
+
CreateBucketConfiguration,
|
|
1242
|
+
PutBucketTagsRequest,
|
|
1243
|
+
Tagging,
|
|
1244
|
+
TagSet,
|
|
1245
|
+
Tag,
|
|
1246
|
+
)
|
|
1247
|
+
from alibabacloud_oss_v2 import config as oss_config
|
|
1248
|
+
from alibabacloud_oss_v2.credentials import (
|
|
1249
|
+
StaticCredentialsProvider,
|
|
1250
|
+
)
|
|
1251
|
+
except ImportError as e:
|
|
1252
|
+
logger.error(
|
|
1253
|
+
"OSS SDK not available. Install with: pip install alibabacloud-oss-v2",
|
|
1254
|
+
)
|
|
1255
|
+
raise RuntimeError(
|
|
1256
|
+
"OSS SDK not installed. Run: pip install alibabacloud-oss-v2",
|
|
1257
|
+
) from e
|
|
1258
|
+
|
|
1259
|
+
# Create OSS client
|
|
1260
|
+
logger.info("Initializing OSS client")
|
|
1261
|
+
|
|
1262
|
+
credentials_provider = StaticCredentialsProvider(
|
|
1263
|
+
access_key_id=self.oss_config.access_key_id,
|
|
1264
|
+
access_key_secret=self.oss_config.access_key_secret,
|
|
1265
|
+
)
|
|
1266
|
+
|
|
1267
|
+
cfg = oss_config.Config(
|
|
1268
|
+
credentials_provider=credentials_provider,
|
|
1269
|
+
region=self.oss_config.region,
|
|
1270
|
+
)
|
|
1271
|
+
oss_client = OSSClient(cfg)
|
|
1272
|
+
|
|
1273
|
+
logger.info("Using OSS bucket: %s", bucket_name)
|
|
1274
|
+
|
|
1275
|
+
# Create bucket if not exists
|
|
1276
|
+
try:
|
|
1277
|
+
bucket_exists = oss_client.is_bucket_exist(bucket=bucket_name)
|
|
1278
|
+
except Exception:
|
|
1279
|
+
bucket_exists = False
|
|
1280
|
+
|
|
1281
|
+
if not bucket_exists:
|
|
1282
|
+
logger.info("OSS bucket does not exist, creating: %s", bucket_name)
|
|
1283
|
+
try:
|
|
1284
|
+
put_bucket_req = PutBucketRequest(
|
|
1285
|
+
bucket=bucket_name,
|
|
1286
|
+
acl="private",
|
|
1287
|
+
create_bucket_configuration=CreateBucketConfiguration(
|
|
1288
|
+
storage_class="IA",
|
|
1289
|
+
),
|
|
1290
|
+
)
|
|
1291
|
+
put_bucket_result = oss_client.put_bucket(put_bucket_req)
|
|
1292
|
+
logger.info(
|
|
1293
|
+
"OSS bucket created (Status: %s, Request ID: %s)",
|
|
1294
|
+
put_bucket_result.status_code,
|
|
1295
|
+
put_bucket_result.request_id,
|
|
1296
|
+
)
|
|
1297
|
+
|
|
1298
|
+
# Add tag for fc access permission
|
|
1299
|
+
tag_result = oss_client.put_bucket_tags(
|
|
1300
|
+
PutBucketTagsRequest(
|
|
1301
|
+
bucket=bucket_name,
|
|
1302
|
+
tagging=Tagging(
|
|
1303
|
+
tag_set=TagSet(
|
|
1304
|
+
tags=[
|
|
1305
|
+
Tag(
|
|
1306
|
+
key="fc-deploy-access",
|
|
1307
|
+
value="ReadAndAdd",
|
|
1308
|
+
),
|
|
1309
|
+
],
|
|
1310
|
+
),
|
|
1311
|
+
),
|
|
1312
|
+
),
|
|
1313
|
+
)
|
|
1314
|
+
logger.info(
|
|
1315
|
+
"OSS bucket tags configured (Status: %s)",
|
|
1316
|
+
tag_result.status_code,
|
|
1317
|
+
)
|
|
1318
|
+
except Exception as e:
|
|
1319
|
+
logger.error("Failed to create OSS bucket: %s", str(e))
|
|
1320
|
+
raise
|
|
1321
|
+
else:
|
|
1322
|
+
logger.debug("OSS bucket already exists: %s", bucket_name)
|
|
1323
|
+
|
|
1324
|
+
# Upload zip file
|
|
1325
|
+
object_key = zip_file_path.name
|
|
1326
|
+
logger.info("Uploading to OSS: %s", object_key)
|
|
1327
|
+
|
|
1328
|
+
try:
|
|
1329
|
+
with open(zip_file_path, "rb") as f:
|
|
1330
|
+
file_bytes = f.read()
|
|
1331
|
+
|
|
1332
|
+
put_obj_req = PutObjectRequest(
|
|
1333
|
+
bucket=bucket_name,
|
|
1334
|
+
key=object_key,
|
|
1335
|
+
body=file_bytes,
|
|
1336
|
+
)
|
|
1337
|
+
put_obj_result = oss_client.put_object(put_obj_req)
|
|
1338
|
+
logger.info(
|
|
1339
|
+
"File uploaded to OSS successfully (Status: %s)",
|
|
1340
|
+
put_obj_result.status_code,
|
|
1341
|
+
)
|
|
1342
|
+
except Exception as e:
|
|
1343
|
+
logger.error("Failed to upload file to OSS: %s", str(e))
|
|
1344
|
+
raise RuntimeError(
|
|
1345
|
+
f"Failed to upload file to OSS: {str(e)}",
|
|
1346
|
+
) from e
|
|
1347
|
+
|
|
1348
|
+
# Generate presigned URL (valid for 3 hours)
|
|
1349
|
+
logger.info("Generating presigned URL for artifact")
|
|
1350
|
+
try:
|
|
1351
|
+
presign_result = oss_client.presign(
|
|
1352
|
+
GetObjectRequest(bucket=bucket_name, key=object_key),
|
|
1353
|
+
expires=timedelta(hours=3),
|
|
1354
|
+
)
|
|
1355
|
+
presigned_url = presign_result.url
|
|
1356
|
+
logger.info("Presigned URL generated (valid for 3 hours)")
|
|
1357
|
+
except Exception as e:
|
|
1358
|
+
logger.error("Failed to generate presigned URL: %s", str(e))
|
|
1359
|
+
raise RuntimeError(
|
|
1360
|
+
f"Failed to generate presigned URL: {str(e)}",
|
|
1361
|
+
) from e
|
|
1362
|
+
|
|
1363
|
+
return {
|
|
1364
|
+
"bucket_name": bucket_name,
|
|
1365
|
+
"object_key": object_key,
|
|
1366
|
+
"presigned_url": presigned_url,
|
|
1367
|
+
}
|
|
1368
|
+
|
|
1369
|
+
async def delete(self, function_name: str) -> Dict[str, Any]:
|
|
1370
|
+
"""Delete a function and its HTTP trigger from FC.
|
|
1371
|
+
|
|
1372
|
+
Args:
|
|
1373
|
+
function_name (str): The name of the function to delete.
|
|
1374
|
+
|
|
1375
|
+
Returns:
|
|
1376
|
+
dict: A dictionary containing:
|
|
1377
|
+
- success: bool indicating if deletion was successful
|
|
1378
|
+
- message: str describing the result
|
|
1379
|
+
- function_name: str the name of the deleted function
|
|
1380
|
+
"""
|
|
1381
|
+
trigger_name = self.HTTP_TRIGGER_NAME
|
|
1382
|
+
|
|
1383
|
+
try:
|
|
1384
|
+
# Step 1: Delete HTTP trigger first
|
|
1385
|
+
logger.info(
|
|
1386
|
+
f"Deleting HTTP trigger '{trigger_name}' for function '{function_name}'",
|
|
1387
|
+
)
|
|
1388
|
+
try:
|
|
1389
|
+
self.client.delete_trigger_with_options(
|
|
1390
|
+
function_name=function_name,
|
|
1391
|
+
trigger_name=trigger_name,
|
|
1392
|
+
headers={},
|
|
1393
|
+
runtime=util_models.RuntimeOptions(),
|
|
1394
|
+
)
|
|
1395
|
+
logger.info(
|
|
1396
|
+
f"HTTP trigger '{trigger_name}' deleted successfully",
|
|
1397
|
+
)
|
|
1398
|
+
except Exception as trigger_error:
|
|
1399
|
+
# Log but continue - trigger might not exist
|
|
1400
|
+
logger.warning(
|
|
1401
|
+
f"Failed to delete trigger '{trigger_name}': {trigger_error}",
|
|
1402
|
+
)
|
|
1403
|
+
|
|
1404
|
+
# Step 2: Delete the function
|
|
1405
|
+
logger.info(f"Deleting function '{function_name}'")
|
|
1406
|
+
self.client.delete_function_with_options(
|
|
1407
|
+
function_name=function_name,
|
|
1408
|
+
headers={},
|
|
1409
|
+
runtime=util_models.RuntimeOptions(),
|
|
1410
|
+
)
|
|
1411
|
+
logger.info(f"Function '{function_name}' deleted successfully")
|
|
1412
|
+
|
|
1413
|
+
return {
|
|
1414
|
+
"success": True,
|
|
1415
|
+
"message": "Agent runtime deletion initiated successfully",
|
|
1416
|
+
"function_name": function_name,
|
|
1417
|
+
}
|
|
1418
|
+
|
|
1419
|
+
except Exception as e:
|
|
1420
|
+
logger.error(f"Failed to delete function '{function_name}': {e}")
|
|
1421
|
+
return {
|
|
1422
|
+
"success": False,
|
|
1423
|
+
"message": f"Failed to delete function: {str(e)}",
|
|
1424
|
+
"function_name": function_name,
|
|
1425
|
+
}
|
|
1426
|
+
|
|
1427
|
+
async def stop(self, deploy_id: str, **kwargs) -> Dict[str, Any]:
|
|
1428
|
+
"""Stop FC deployment by deleting it.
|
|
1429
|
+
|
|
1430
|
+
Args:
|
|
1431
|
+
deploy_id: Deployment ID
|
|
1432
|
+
**kwargs: Additional parameters
|
|
1433
|
+
|
|
1434
|
+
Returns:
|
|
1435
|
+
Dict with success status, message, and details
|
|
1436
|
+
"""
|
|
1437
|
+
try:
|
|
1438
|
+
# Try to get deployment info from state for context
|
|
1439
|
+
deployment_info = None
|
|
1440
|
+
deployment = None
|
|
1441
|
+
try:
|
|
1442
|
+
deployment = self.state_manager.get(deploy_id)
|
|
1443
|
+
if deployment:
|
|
1444
|
+
deployment_info = {
|
|
1445
|
+
"url": deployment.url
|
|
1446
|
+
if hasattr(deployment, "url")
|
|
1447
|
+
else None,
|
|
1448
|
+
"resource_name": deployment.config.get("resource_name")
|
|
1449
|
+
if deployment.config
|
|
1450
|
+
else None,
|
|
1451
|
+
}
|
|
1452
|
+
logger.debug(
|
|
1453
|
+
f"Fetched deployment info from state: {deployment_info}",
|
|
1454
|
+
)
|
|
1455
|
+
except Exception as e:
|
|
1456
|
+
logger.debug(
|
|
1457
|
+
f"Could not fetch deployment info from state: {e}",
|
|
1458
|
+
)
|
|
1459
|
+
|
|
1460
|
+
logger.info(f"Stopping FC deployment: {deploy_id}")
|
|
1461
|
+
|
|
1462
|
+
# Get function_name from deployment config (resource_name is the function name)
|
|
1463
|
+
function_name = None
|
|
1464
|
+
if deployment and deployment.config:
|
|
1465
|
+
function_name = deployment.config.get("resource_name")
|
|
1466
|
+
|
|
1467
|
+
if not function_name:
|
|
1468
|
+
# Fallback: try using deploy_id as function_name for backward compatibility
|
|
1469
|
+
function_name = deploy_id
|
|
1470
|
+
logger.warning(
|
|
1471
|
+
f"Could not find resource_name in deployment config, "
|
|
1472
|
+
f"using deploy_id as fallback: {deploy_id}",
|
|
1473
|
+
)
|
|
1474
|
+
|
|
1475
|
+
# Use the existing delete method with function_name
|
|
1476
|
+
result = await self.delete(function_name)
|
|
1477
|
+
|
|
1478
|
+
if result.get("success"):
|
|
1479
|
+
# Update state manager on successful deletion
|
|
1480
|
+
try:
|
|
1481
|
+
self.state_manager.update_status(deploy_id, "stopped")
|
|
1482
|
+
except KeyError:
|
|
1483
|
+
logger.debug(
|
|
1484
|
+
f"Deployment {deploy_id} not found in state (already removed)",
|
|
1485
|
+
)
|
|
1486
|
+
|
|
1487
|
+
return {
|
|
1488
|
+
"success": True,
|
|
1489
|
+
"message": f"FC deployment {deploy_id} deleted successfully",
|
|
1490
|
+
"details": result,
|
|
1491
|
+
}
|
|
1492
|
+
else:
|
|
1493
|
+
return {
|
|
1494
|
+
"success": False,
|
|
1495
|
+
"message": f"Failed to delete FC deployment: {result.get('message', 'Unknown error')}",
|
|
1496
|
+
"details": result,
|
|
1497
|
+
}
|
|
1498
|
+
except Exception as e:
|
|
1499
|
+
logger.error(
|
|
1500
|
+
f"Failed to stop FC deployment {deploy_id}: {e}",
|
|
1501
|
+
)
|
|
1502
|
+
return {
|
|
1503
|
+
"success": False,
|
|
1504
|
+
"message": f"Failed to stop FC deployment: {e}",
|
|
1505
|
+
"details": {"deploy_id": deploy_id, "error": str(e)},
|
|
1506
|
+
}
|