agentscope-runtime 1.0.0b2__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/adapters/agentscope/message.py +78 -10
- agentscope_runtime/adapters/agentscope/stream.py +155 -101
- agentscope_runtime/adapters/agentscope/tool/tool.py +1 -3
- agentscope_runtime/adapters/agno/__init__.py +0 -0
- agentscope_runtime/adapters/agno/message.py +30 -0
- agentscope_runtime/adapters/agno/stream.py +122 -0
- agentscope_runtime/adapters/langgraph/__init__.py +12 -0
- agentscope_runtime/adapters/langgraph/message.py +257 -0
- agentscope_runtime/adapters/langgraph/stream.py +205 -0
- agentscope_runtime/cli/__init__.py +7 -0
- agentscope_runtime/cli/cli.py +63 -0
- agentscope_runtime/cli/commands/__init__.py +2 -0
- agentscope_runtime/cli/commands/chat.py +815 -0
- agentscope_runtime/cli/commands/deploy.py +1062 -0
- agentscope_runtime/cli/commands/invoke.py +58 -0
- agentscope_runtime/cli/commands/list_cmd.py +103 -0
- agentscope_runtime/cli/commands/run.py +176 -0
- agentscope_runtime/cli/commands/sandbox.py +128 -0
- agentscope_runtime/cli/commands/status.py +60 -0
- agentscope_runtime/cli/commands/stop.py +185 -0
- agentscope_runtime/cli/commands/web.py +166 -0
- agentscope_runtime/cli/loaders/__init__.py +6 -0
- agentscope_runtime/cli/loaders/agent_loader.py +295 -0
- agentscope_runtime/cli/state/__init__.py +10 -0
- agentscope_runtime/cli/utils/__init__.py +18 -0
- agentscope_runtime/cli/utils/console.py +378 -0
- agentscope_runtime/cli/utils/validators.py +118 -0
- agentscope_runtime/engine/app/agent_app.py +15 -5
- agentscope_runtime/engine/deployers/__init__.py +1 -0
- agentscope_runtime/engine/deployers/agentrun_deployer.py +154 -24
- agentscope_runtime/engine/deployers/base.py +27 -2
- agentscope_runtime/engine/deployers/kubernetes_deployer.py +158 -31
- agentscope_runtime/engine/deployers/local_deployer.py +188 -25
- agentscope_runtime/engine/deployers/modelstudio_deployer.py +109 -18
- agentscope_runtime/engine/deployers/state/__init__.py +9 -0
- agentscope_runtime/engine/deployers/state/manager.py +388 -0
- agentscope_runtime/engine/deployers/state/schema.py +96 -0
- agentscope_runtime/engine/deployers/utils/build_cache.py +736 -0
- agentscope_runtime/engine/deployers/utils/detached_app.py +105 -30
- agentscope_runtime/engine/deployers/utils/docker_image_utils/docker_image_builder.py +31 -10
- agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +15 -8
- agentscope_runtime/engine/deployers/utils/docker_image_utils/image_factory.py +30 -2
- agentscope_runtime/engine/deployers/utils/k8s_utils.py +241 -0
- agentscope_runtime/engine/deployers/utils/package.py +56 -6
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +68 -9
- agentscope_runtime/engine/deployers/utils/service_utils/process_manager.py +155 -5
- agentscope_runtime/engine/deployers/utils/wheel_packager.py +107 -123
- agentscope_runtime/engine/runner.py +32 -12
- agentscope_runtime/engine/schemas/agent_schemas.py +21 -7
- agentscope_runtime/engine/schemas/exception.py +580 -0
- agentscope_runtime/engine/services/agent_state/__init__.py +2 -0
- agentscope_runtime/engine/services/agent_state/state_service_factory.py +55 -0
- agentscope_runtime/engine/services/memory/__init__.py +2 -0
- agentscope_runtime/engine/services/memory/memory_service_factory.py +126 -0
- agentscope_runtime/engine/services/sandbox/__init__.py +2 -0
- agentscope_runtime/engine/services/sandbox/sandbox_service_factory.py +49 -0
- agentscope_runtime/engine/services/service_factory.py +119 -0
- agentscope_runtime/engine/services/session_history/__init__.py +2 -0
- agentscope_runtime/engine/services/session_history/session_history_service_factory.py +73 -0
- agentscope_runtime/engine/services/utils/tablestore_service_utils.py +35 -10
- agentscope_runtime/engine/tracing/wrapper.py +49 -31
- agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +113 -39
- agentscope_runtime/sandbox/box/shared/routers/mcp_utils.py +20 -4
- agentscope_runtime/sandbox/utils.py +2 -0
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/METADATA +82 -11
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/RECORD +71 -36
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/entry_points.txt +1 -0
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# pylint: disable=too-many-nested-blocks,too-many-return-statements
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
# Configure logging
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def isLocalK8sEnvironment() -> bool:
|
|
13
|
+
"""
|
|
14
|
+
Determine whether the current environment is a local Kubernetes setup.
|
|
15
|
+
|
|
16
|
+
This function uses multi-dimensional heuristics for detection:
|
|
17
|
+
1. Inspect kubeconfig context name
|
|
18
|
+
2. Inspect cluster server address
|
|
19
|
+
3. Check environment variables
|
|
20
|
+
4. Query Kubernetes API (if accessible)
|
|
21
|
+
5. Analyze network characteristics
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
bool: True if running in a local cluster, False otherwise (
|
|
25
|
+
cloud/production)
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
# Results from various detection methods
|
|
29
|
+
detection_results = {
|
|
30
|
+
"kubeconfig_context": _check_kubeconfig_context(),
|
|
31
|
+
"k8s_api": _check_kubernetes_api(),
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
# Log detection results
|
|
35
|
+
logger.info(f"K8s environment detection results: {detection_results}")
|
|
36
|
+
|
|
37
|
+
# Voting: if majority of applicable checks indicate 'local', return True
|
|
38
|
+
local_votes = sum(
|
|
39
|
+
1 for result in detection_results.values() if result is True
|
|
40
|
+
)
|
|
41
|
+
total_votes = sum(
|
|
42
|
+
1 for result in detection_results.values() if result is not None
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
if total_votes == 0:
|
|
46
|
+
logger.warning(
|
|
47
|
+
"Unable to determine K8s environment type; defaulting to "
|
|
48
|
+
"cloud/remote",
|
|
49
|
+
)
|
|
50
|
+
return False
|
|
51
|
+
|
|
52
|
+
is_local = local_votes > (total_votes / 2)
|
|
53
|
+
logger.info(
|
|
54
|
+
f"Final verdict: "
|
|
55
|
+
f"{'Local environment' if is_local else 'Cloud/remote environment'} "
|
|
56
|
+
f"(votes: {local_votes}/{total_votes})",
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
return is_local
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _check_kubeconfig_context() -> Optional[bool]:
|
|
63
|
+
"""
|
|
64
|
+
Inspect kubeconfig current context name for local-cluster patterns.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Optional[bool]: True=local, False=cloud, None=undetermined
|
|
68
|
+
"""
|
|
69
|
+
try:
|
|
70
|
+
import yaml
|
|
71
|
+
|
|
72
|
+
kubeconfig_path = os.path.expanduser(
|
|
73
|
+
os.getenv("KUBECONFIG", "~/.kube/config"),
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if not os.path.exists(kubeconfig_path):
|
|
77
|
+
logger.debug("kubeconfig file not found")
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
with open(kubeconfig_path, "r", encoding="utf-8") as f:
|
|
81
|
+
config = yaml.safe_load(f)
|
|
82
|
+
|
|
83
|
+
if not config or "current-context" not in config:
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
current_context = config["current-context"]
|
|
87
|
+
|
|
88
|
+
# Common context name patterns for local K8s tools
|
|
89
|
+
local_patterns = [
|
|
90
|
+
"minikube",
|
|
91
|
+
"kind-",
|
|
92
|
+
"k3s",
|
|
93
|
+
"k3d-",
|
|
94
|
+
"microk8s",
|
|
95
|
+
"docker-desktop",
|
|
96
|
+
"docker-for-desktop",
|
|
97
|
+
"localhost",
|
|
98
|
+
"rancher-desktop",
|
|
99
|
+
"colima",
|
|
100
|
+
]
|
|
101
|
+
|
|
102
|
+
# Patterns commonly associated with cloud providers
|
|
103
|
+
cloud_patterns = [
|
|
104
|
+
"gke_",
|
|
105
|
+
"arn:aws:eks",
|
|
106
|
+
"aks-",
|
|
107
|
+
"do-", # DigitalOcean
|
|
108
|
+
"prod-",
|
|
109
|
+
"production",
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
current_context_lower = current_context.lower()
|
|
113
|
+
|
|
114
|
+
# Match local patterns
|
|
115
|
+
if any(pattern in current_context_lower for pattern in local_patterns):
|
|
116
|
+
logger.debug(f"Context '{current_context}' matches local pattern")
|
|
117
|
+
return True
|
|
118
|
+
|
|
119
|
+
# Match cloud patterns
|
|
120
|
+
if any(pattern in current_context_lower for pattern in cloud_patterns):
|
|
121
|
+
logger.debug(f"Context '{current_context}' matches cloud pattern")
|
|
122
|
+
return False
|
|
123
|
+
|
|
124
|
+
# Inspect cluster server URL in config
|
|
125
|
+
contexts = config.get("contexts", [])
|
|
126
|
+
clusters = config.get("clusters", [])
|
|
127
|
+
|
|
128
|
+
for ctx in contexts:
|
|
129
|
+
if ctx.get("name") == current_context:
|
|
130
|
+
cluster_name = ctx.get("context", {}).get("cluster")
|
|
131
|
+
|
|
132
|
+
for cluster in clusters:
|
|
133
|
+
if cluster.get("name") == cluster_name:
|
|
134
|
+
server = cluster.get("cluster", {}).get("server", "")
|
|
135
|
+
|
|
136
|
+
# Check for localhost/loopback addresses
|
|
137
|
+
if any(
|
|
138
|
+
addr in server
|
|
139
|
+
for addr in [
|
|
140
|
+
"192.168.5.1",
|
|
141
|
+
"127.0.0.1",
|
|
142
|
+
"localhost",
|
|
143
|
+
"0.0.0.0",
|
|
144
|
+
]
|
|
145
|
+
):
|
|
146
|
+
logger.debug(
|
|
147
|
+
f"Cluster server '{server}' points to "
|
|
148
|
+
f"localhost",
|
|
149
|
+
)
|
|
150
|
+
return True
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
except ImportError:
|
|
154
|
+
logger.warning(
|
|
155
|
+
"PyYAML not installed; cannot parse kubeconfig. "
|
|
156
|
+
"Install via: pip install pyyaml",
|
|
157
|
+
)
|
|
158
|
+
return None
|
|
159
|
+
except Exception as e:
|
|
160
|
+
logger.debug(f"Error checking kubeconfig: {e}")
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _check_kubernetes_api() -> Optional[bool]:
|
|
165
|
+
"""
|
|
166
|
+
Query the Kubernetes API for local-environment signatures.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
Optional[bool]: True=local, False=cloud, None=undetermined
|
|
170
|
+
"""
|
|
171
|
+
try:
|
|
172
|
+
from kubernetes import client, config
|
|
173
|
+
|
|
174
|
+
# Load config — in-cluster first, then kubeconfig
|
|
175
|
+
try:
|
|
176
|
+
config.load_incluster_config()
|
|
177
|
+
except Exception:
|
|
178
|
+
try:
|
|
179
|
+
config.load_kube_config()
|
|
180
|
+
except Exception:
|
|
181
|
+
logger.debug("Failed to load Kubernetes configuration")
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
v1 = client.CoreV1Api()
|
|
185
|
+
|
|
186
|
+
# Check node info
|
|
187
|
+
nodes = v1.list_node()
|
|
188
|
+
|
|
189
|
+
if len(nodes.items) == 1:
|
|
190
|
+
node = nodes.items[0]
|
|
191
|
+
node_name = node.metadata.name.lower()
|
|
192
|
+
|
|
193
|
+
# Local node name patterns
|
|
194
|
+
local_node_patterns = [
|
|
195
|
+
"minikube",
|
|
196
|
+
"kind-",
|
|
197
|
+
"k3s",
|
|
198
|
+
"k3d-",
|
|
199
|
+
"docker-desktop",
|
|
200
|
+
"localhost",
|
|
201
|
+
"rancher-desktop",
|
|
202
|
+
]
|
|
203
|
+
|
|
204
|
+
if any(pattern in node_name for pattern in local_node_patterns):
|
|
205
|
+
logger.debug(f"Node name '{node_name}' matches local pattern")
|
|
206
|
+
return True
|
|
207
|
+
|
|
208
|
+
# Check node labels
|
|
209
|
+
labels = node.metadata.labels or {}
|
|
210
|
+
|
|
211
|
+
if "minikube.k8s.io/name" in labels:
|
|
212
|
+
return True
|
|
213
|
+
if "node.kubernetes.io/instance-type" in labels:
|
|
214
|
+
instance_type = labels["node.kubernetes.io/instance-type"]
|
|
215
|
+
if instance_type in ["k3s", "k3d"]:
|
|
216
|
+
return True
|
|
217
|
+
|
|
218
|
+
# Check namespaces for local-tool signatures
|
|
219
|
+
namespaces = v1.list_namespace()
|
|
220
|
+
namespace_names = [ns.metadata.name for ns in namespaces.items]
|
|
221
|
+
|
|
222
|
+
local_namespaces = [
|
|
223
|
+
"cattle-system", # Rancher/K3s
|
|
224
|
+
"local-path-storage", # Kind/K3s/MicroK8s
|
|
225
|
+
]
|
|
226
|
+
|
|
227
|
+
if any(ns in namespace_names for ns in local_namespaces):
|
|
228
|
+
logger.debug("Detected local-environment namespace")
|
|
229
|
+
return True
|
|
230
|
+
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
except ImportError:
|
|
234
|
+
logger.warning(
|
|
235
|
+
"kubernetes client not installed; API detection disabled. "
|
|
236
|
+
"Install via: pip install kubernetes",
|
|
237
|
+
)
|
|
238
|
+
return None
|
|
239
|
+
except Exception as e:
|
|
240
|
+
logger.debug(f"Error querying K8s API: {e}")
|
|
241
|
+
return None
|
|
@@ -16,7 +16,6 @@ import inspect
|
|
|
16
16
|
import logging
|
|
17
17
|
import os
|
|
18
18
|
import shutil
|
|
19
|
-
import tempfile
|
|
20
19
|
import zipfile
|
|
21
20
|
from pathlib import Path
|
|
22
21
|
from typing import Optional, List, Tuple, Union
|
|
@@ -30,6 +29,44 @@ DEPLOYMENT_ZIP = "deployment.zip"
|
|
|
30
29
|
TEMPLATES_DIR = Path(__file__).parent / "templates"
|
|
31
30
|
DEFAULT_ENTRYPOINT_FILE = "runtime_main.py"
|
|
32
31
|
|
|
32
|
+
# Default workspace for build artifacts
|
|
33
|
+
DEFAULT_BUILD_WORKSPACE = Path(os.getcwd()) / ".agentscope_runtime" / "builds"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def generate_build_directory(
|
|
37
|
+
platform: str = "unknown",
|
|
38
|
+
workspace: Optional[Path] = None,
|
|
39
|
+
) -> Path:
|
|
40
|
+
"""
|
|
41
|
+
Generate a platform-aware build directory with timestamp and random suffix.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
platform: Deployment platform (k8s, modelstudio, agentrun, local, etc.)
|
|
45
|
+
workspace: Custom workspace directory (defaults to
|
|
46
|
+
DEFAULT_BUILD_WORKSPACE)
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
Path: Generated build directory path
|
|
50
|
+
|
|
51
|
+
Example:
|
|
52
|
+
>>> build_dir = generate_build_directory("modelstudio")
|
|
53
|
+
>>> # Returns: .agentscope_runtime/builds/modelstudio_20251207_xxx
|
|
54
|
+
"""
|
|
55
|
+
import random
|
|
56
|
+
import time
|
|
57
|
+
|
|
58
|
+
if workspace is None:
|
|
59
|
+
workspace = DEFAULT_BUILD_WORKSPACE
|
|
60
|
+
|
|
61
|
+
workspace.mkdir(parents=True, exist_ok=True)
|
|
62
|
+
|
|
63
|
+
# Generate timestamp-based name with random suffix
|
|
64
|
+
timestamp = time.strftime("%Y%m%d_%H%M%S")
|
|
65
|
+
random_suffix = "".join(random.choices("0123456789abcdef", k=6))
|
|
66
|
+
build_name = f"{platform}_{timestamp}_{random_suffix}"
|
|
67
|
+
|
|
68
|
+
return workspace / build_name
|
|
69
|
+
|
|
33
70
|
|
|
34
71
|
def _get_template_env() -> Environment:
|
|
35
72
|
"""
|
|
@@ -254,7 +291,7 @@ def _auto_detect_entrypoint(project_dir: str) -> str:
|
|
|
254
291
|
- app.py
|
|
255
292
|
- main.py
|
|
256
293
|
- __main__.py
|
|
257
|
-
-
|
|
294
|
+
- chat.py
|
|
258
295
|
- runner.py
|
|
259
296
|
|
|
260
297
|
Args:
|
|
@@ -270,7 +307,7 @@ def _auto_detect_entrypoint(project_dir: str) -> str:
|
|
|
270
307
|
"app.py",
|
|
271
308
|
"main.py",
|
|
272
309
|
"__main__.py",
|
|
273
|
-
"
|
|
310
|
+
"chat.py",
|
|
274
311
|
"runner.py",
|
|
275
312
|
]
|
|
276
313
|
|
|
@@ -432,6 +469,7 @@ def _get_default_ignore_patterns() -> List[str]:
|
|
|
432
469
|
".vscode",
|
|
433
470
|
"*.log",
|
|
434
471
|
"logs",
|
|
472
|
+
".agentscope_runtime", # Ignore build workspace
|
|
435
473
|
]
|
|
436
474
|
|
|
437
475
|
|
|
@@ -547,6 +585,8 @@ def package(
|
|
|
547
585
|
host: str = "0.0.0.0",
|
|
548
586
|
port: int = 8090,
|
|
549
587
|
extra_parameters: Optional[List[RuntimeParameter]] = None,
|
|
588
|
+
requirements: Optional[List[str]] = None,
|
|
589
|
+
platform: str = "unknown",
|
|
550
590
|
**kwargs,
|
|
551
591
|
) -> Tuple[str, ProjectInfo]:
|
|
552
592
|
"""
|
|
@@ -562,6 +602,13 @@ def package(
|
|
|
562
602
|
2. Generate a new main.py that imports and runs the app/runner
|
|
563
603
|
3. Package the project with the generated main.py as entrypoint
|
|
564
604
|
|
|
605
|
+
Build directory naming:
|
|
606
|
+
- When output_dir=None (default), creates workspace directory with
|
|
607
|
+
platform-aware naming
|
|
608
|
+
- Directory format: cwd/.agentscope_runtime/builds/<platform>_
|
|
609
|
+
<timestamp>_<code>/
|
|
610
|
+
- Explicit output_dir uses the provided path
|
|
611
|
+
|
|
565
612
|
Args:
|
|
566
613
|
app: AgentApp instance (for object-style deployment)
|
|
567
614
|
runner: Runner instance (for object-style deployment)
|
|
@@ -570,6 +617,8 @@ def package(
|
|
|
570
617
|
host: Default host for the service (default: "0.0.0.0")
|
|
571
618
|
port: Default port for the service (default: 8090)
|
|
572
619
|
extra_parameters: Additional runtime parameters to expose via CLI
|
|
620
|
+
requirements: Additional pip requirements
|
|
621
|
+
platform: Deployment platform (k8s, modelstudio, agentrun, local)
|
|
573
622
|
**kwargs: Additional keyword arguments (ignored)
|
|
574
623
|
|
|
575
624
|
Returns:
|
|
@@ -597,7 +646,7 @@ def package(
|
|
|
597
646
|
... help="Number of worker threads"
|
|
598
647
|
... ),
|
|
599
648
|
... ]
|
|
600
|
-
>>> package(app=my_app, extra_parameters=extra_params)
|
|
649
|
+
>>> package(app=my_app, extra_parameters=extra_params, platform="k8s")
|
|
601
650
|
"""
|
|
602
651
|
# Determine project info and target object
|
|
603
652
|
target_obj = None
|
|
@@ -613,9 +662,10 @@ def package(
|
|
|
613
662
|
|
|
614
663
|
logger.info(f"Packaging project from: {project_info.project_dir}")
|
|
615
664
|
|
|
616
|
-
# Create output directory
|
|
665
|
+
# Create output directory with platform-aware naming
|
|
617
666
|
if output_dir is None:
|
|
618
|
-
output_dir =
|
|
667
|
+
output_dir = str(generate_build_directory(platform))
|
|
668
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
619
669
|
else:
|
|
620
670
|
os.makedirs(output_dir, exist_ok=True)
|
|
621
671
|
|
|
@@ -470,13 +470,27 @@ class FastAPIAppFactory:
|
|
|
470
470
|
}
|
|
471
471
|
|
|
472
472
|
# Mode-specific endpoints
|
|
473
|
-
|
|
474
|
-
FastAPIAppFactory._add_process_control_endpoints(app)
|
|
473
|
+
FastAPIAppFactory._add_process_control_endpoints(app)
|
|
475
474
|
|
|
476
475
|
@staticmethod
|
|
477
476
|
def _add_process_control_endpoints(app: FastAPI):
|
|
478
477
|
"""Add process control endpoints for detached mode."""
|
|
479
478
|
|
|
479
|
+
@app.post("/shutdown")
|
|
480
|
+
async def shutdown_process_simple():
|
|
481
|
+
"""Gracefully shutdown the process (simple endpoint)."""
|
|
482
|
+
# Import here to avoid circular imports
|
|
483
|
+
import os
|
|
484
|
+
import signal
|
|
485
|
+
|
|
486
|
+
# Schedule shutdown after response
|
|
487
|
+
async def delayed_shutdown():
|
|
488
|
+
await asyncio.sleep(0.5)
|
|
489
|
+
os.kill(os.getpid(), signal.SIGTERM)
|
|
490
|
+
|
|
491
|
+
asyncio.create_task(delayed_shutdown())
|
|
492
|
+
return {"status": "shutting down"}
|
|
493
|
+
|
|
480
494
|
@app.post("/admin/shutdown")
|
|
481
495
|
async def shutdown_process():
|
|
482
496
|
"""Gracefully shutdown the process."""
|
|
@@ -693,9 +707,32 @@ class FastAPIAppFactory:
|
|
|
693
707
|
parsing."""
|
|
694
708
|
is_async_gen = inspect.isasyncgenfunction(handler)
|
|
695
709
|
|
|
710
|
+
# NOTE:
|
|
711
|
+
# -----
|
|
712
|
+
# FastAPI >= 0.123.5 uses Dependant.is_coroutine_callable, which in
|
|
713
|
+
# turn unwraps callables via inspect.unwrap() and then inspects the
|
|
714
|
+
# unwrapped target to decide whether it is a coroutine function /
|
|
715
|
+
# generator / async generator.
|
|
716
|
+
#
|
|
717
|
+
# If we decorate an async-generator handler with
|
|
718
|
+
# functools.wraps(handler), FastAPI will unwrap back to the original
|
|
719
|
+
# async-generator function and *misclassify* the endpoint as
|
|
720
|
+
# non-coroutine. It will then call our async wrapper *without awaiting
|
|
721
|
+
# it*, and later try to JSON-encode the resulting coroutine object,
|
|
722
|
+
# causing errors like:
|
|
723
|
+
# TypeError("'coroutine' object is not iterable")
|
|
724
|
+
#
|
|
725
|
+
# To avoid that, we deliberately do NOT use functools.wraps() here.
|
|
726
|
+
# Instead, we manually copy the key metadata (name, qualname, doc,
|
|
727
|
+
# module, and signature) from the original handler, but we do NOT set
|
|
728
|
+
# __wrapped__. This ensures:
|
|
729
|
+
# * FastAPI sees the wrapper itself as the callable (an async def),
|
|
730
|
+
# so Dependant.is_coroutine_callable is True, and it is properly
|
|
731
|
+
# awaited.
|
|
732
|
+
# * FastAPI still sees the correct signature for parameter parsing.
|
|
733
|
+
|
|
696
734
|
if is_async_gen:
|
|
697
735
|
|
|
698
|
-
@functools.wraps(handler)
|
|
699
736
|
async def wrapped_handler(*args, **kwargs):
|
|
700
737
|
async def generate():
|
|
701
738
|
try:
|
|
@@ -720,12 +757,8 @@ class FastAPIAppFactory:
|
|
|
720
757
|
media_type="text/event-stream",
|
|
721
758
|
)
|
|
722
759
|
|
|
723
|
-
wrapped_handler.__signature__ = inspect.signature(handler)
|
|
724
|
-
return wrapped_handler
|
|
725
|
-
|
|
726
760
|
else:
|
|
727
761
|
|
|
728
|
-
@functools.wraps(handler)
|
|
729
762
|
def wrapped_handler(*args, **kwargs):
|
|
730
763
|
def generate():
|
|
731
764
|
try:
|
|
@@ -748,8 +781,34 @@ class FastAPIAppFactory:
|
|
|
748
781
|
media_type="text/event-stream",
|
|
749
782
|
)
|
|
750
783
|
|
|
751
|
-
|
|
752
|
-
|
|
784
|
+
# Manually propagate essential metadata without creating a __wrapped__
|
|
785
|
+
# chain that would confuse FastAPI's unwrap logic.
|
|
786
|
+
wrapped_handler.__name__ = getattr(
|
|
787
|
+
handler,
|
|
788
|
+
"__name__",
|
|
789
|
+
wrapped_handler.__name__,
|
|
790
|
+
)
|
|
791
|
+
wrapped_handler.__qualname__ = getattr(
|
|
792
|
+
handler,
|
|
793
|
+
"__qualname__",
|
|
794
|
+
wrapped_handler.__qualname__,
|
|
795
|
+
)
|
|
796
|
+
wrapped_handler.__doc__ = getattr(
|
|
797
|
+
handler,
|
|
798
|
+
"__doc__",
|
|
799
|
+
wrapped_handler.__doc__,
|
|
800
|
+
)
|
|
801
|
+
wrapped_handler.__module__ = getattr(
|
|
802
|
+
handler,
|
|
803
|
+
"__module__",
|
|
804
|
+
wrapped_handler.__module__,
|
|
805
|
+
)
|
|
806
|
+
wrapped_handler.__signature__ = inspect.signature(handler)
|
|
807
|
+
|
|
808
|
+
# Make sure FastAPI doesn't see any stale __wrapped__ pointing back to
|
|
809
|
+
# the original async-generator; if present, remove it.
|
|
810
|
+
|
|
811
|
+
return wrapped_handler
|
|
753
812
|
|
|
754
813
|
@staticmethod
|
|
755
814
|
def _add_custom_endpoints(app: FastAPI):
|
|
@@ -19,6 +19,8 @@ class ProcessManager:
|
|
|
19
19
|
shutdown_timeout: Timeout in seconds for graceful shutdown
|
|
20
20
|
"""
|
|
21
21
|
self.shutdown_timeout = shutdown_timeout
|
|
22
|
+
self._log_file = None
|
|
23
|
+
self._log_file_handle = None
|
|
22
24
|
|
|
23
25
|
async def start_detached_process(
|
|
24
26
|
self,
|
|
@@ -47,7 +49,25 @@ class ProcessManager:
|
|
|
47
49
|
if env:
|
|
48
50
|
process_env.update(env)
|
|
49
51
|
|
|
50
|
-
#
|
|
52
|
+
# Create log file path with timestamp and child process PID
|
|
53
|
+
# We'll update the filename after process starts
|
|
54
|
+
log_dir = "/tmp/agentscope_runtime_logs"
|
|
55
|
+
os.makedirs(log_dir, exist_ok=True)
|
|
56
|
+
|
|
57
|
+
# Use a temporary name first, will rename after getting PID
|
|
58
|
+
import time
|
|
59
|
+
|
|
60
|
+
timestamp = time.strftime("%Y%m%d_%H%M%S")
|
|
61
|
+
temp_log_file = os.path.join(
|
|
62
|
+
log_dir,
|
|
63
|
+
f"process_temp_{timestamp}_{os.getpid()}.log",
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Open log file (don't use 'with' to keep it open for the
|
|
67
|
+
# subprocess)
|
|
68
|
+
log_f = open(temp_log_file, "w", encoding="utf-8")
|
|
69
|
+
|
|
70
|
+
# Start detached process with log file
|
|
51
71
|
process = subprocess.Popen(
|
|
52
72
|
[
|
|
53
73
|
"python",
|
|
@@ -57,22 +77,63 @@ class ProcessManager:
|
|
|
57
77
|
"--port",
|
|
58
78
|
str(port),
|
|
59
79
|
],
|
|
60
|
-
stdout=
|
|
61
|
-
stderr=subprocess.
|
|
80
|
+
stdout=log_f,
|
|
81
|
+
stderr=subprocess.STDOUT, # Redirect stderr to stdout
|
|
62
82
|
stdin=subprocess.DEVNULL,
|
|
63
83
|
start_new_session=True, # Create new process group
|
|
64
84
|
env=process_env,
|
|
65
85
|
cwd=os.path.dirname(script_path),
|
|
66
86
|
)
|
|
67
87
|
|
|
88
|
+
# Rename log file with actual process PID
|
|
89
|
+
log_file = os.path.join(log_dir, f"process_{process.pid}.log")
|
|
90
|
+
log_f.close() # Close temp file
|
|
91
|
+
os.rename(temp_log_file, log_file)
|
|
92
|
+
|
|
93
|
+
# Reopen with the correct name
|
|
94
|
+
log_f = open(log_file, "a", encoding="utf-8")
|
|
95
|
+
|
|
96
|
+
# Store log file path and handle for later retrieval
|
|
97
|
+
self._log_file = log_file
|
|
98
|
+
self._log_file_handle = log_f
|
|
99
|
+
|
|
68
100
|
# Verify process started successfully
|
|
69
|
-
await asyncio.sleep(
|
|
101
|
+
await asyncio.sleep(
|
|
102
|
+
0.5,
|
|
103
|
+
) # Give process time to start and write logs
|
|
70
104
|
if process.poll() is not None:
|
|
71
|
-
|
|
105
|
+
# Process failed to start, wait a bit more for logs to be
|
|
106
|
+
# flushed
|
|
107
|
+
await asyncio.sleep(0.2)
|
|
108
|
+
# Read logs and print them
|
|
109
|
+
logs = self.get_process_logs(max_lines=50)
|
|
110
|
+
import logging
|
|
111
|
+
|
|
112
|
+
logger = logging.getLogger(__name__)
|
|
113
|
+
logger.error(
|
|
114
|
+
f"Process failed to start immediately.\n\n"
|
|
115
|
+
f"Process logs:\n{logs}",
|
|
116
|
+
)
|
|
117
|
+
raise RuntimeError(
|
|
118
|
+
"Process failed to start. Check logs above.",
|
|
119
|
+
)
|
|
72
120
|
|
|
73
121
|
return process.pid
|
|
74
122
|
|
|
123
|
+
except RuntimeError:
|
|
124
|
+
# Re-raise RuntimeError with logs already included
|
|
125
|
+
raise
|
|
75
126
|
except Exception as e:
|
|
127
|
+
# For other exceptions, try to include logs if available
|
|
128
|
+
if self._log_file:
|
|
129
|
+
logs = self.get_process_logs(max_lines=50)
|
|
130
|
+
import logging
|
|
131
|
+
|
|
132
|
+
logger = logging.getLogger(__name__)
|
|
133
|
+
logger.error(
|
|
134
|
+
f"Failed to start detached process: {e}\n\n"
|
|
135
|
+
f"Process logs:\n{logs}",
|
|
136
|
+
)
|
|
76
137
|
raise RuntimeError(f"Failed to start detached process: {e}") from e
|
|
77
138
|
|
|
78
139
|
async def stop_process_gracefully(
|
|
@@ -125,6 +186,9 @@ class ProcessManager:
|
|
|
125
186
|
raise RuntimeError(
|
|
126
187
|
f"Failed to terminate process {pid}: {e}",
|
|
127
188
|
) from e
|
|
189
|
+
finally:
|
|
190
|
+
# Close log file handle if open
|
|
191
|
+
self._close_log_file()
|
|
128
192
|
|
|
129
193
|
def is_process_running(self, pid: int) -> bool:
|
|
130
194
|
"""Check if a process is running.
|
|
@@ -271,6 +335,92 @@ class ProcessManager:
|
|
|
271
335
|
|
|
272
336
|
return False
|
|
273
337
|
|
|
338
|
+
def get_process_logs(self, max_lines: int = 50) -> str:
|
|
339
|
+
"""Get the last N lines of process logs.
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
max_lines: Maximum number of lines to return
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
Log content as string
|
|
346
|
+
"""
|
|
347
|
+
if not self._log_file or not os.path.exists(self._log_file):
|
|
348
|
+
return "No log file available"
|
|
349
|
+
|
|
350
|
+
try:
|
|
351
|
+
# Flush the log file handle if it's still open
|
|
352
|
+
if self._log_file_handle and not self._log_file_handle.closed:
|
|
353
|
+
self._log_file_handle.flush()
|
|
354
|
+
|
|
355
|
+
with open(self._log_file, "r", encoding="utf-8") as f:
|
|
356
|
+
lines = f.readlines()
|
|
357
|
+
if not lines:
|
|
358
|
+
return (
|
|
359
|
+
"Log file is empty (process may not have written "
|
|
360
|
+
"any output yet)"
|
|
361
|
+
)
|
|
362
|
+
# Return last N lines
|
|
363
|
+
return "".join(lines[-max_lines:])
|
|
364
|
+
except Exception as e:
|
|
365
|
+
return f"Failed to read log file: {e}"
|
|
366
|
+
|
|
367
|
+
def _close_log_file(self):
|
|
368
|
+
"""Close log file handle if open."""
|
|
369
|
+
if self._log_file_handle and not self._log_file_handle.closed:
|
|
370
|
+
try:
|
|
371
|
+
self._log_file_handle.close()
|
|
372
|
+
except Exception:
|
|
373
|
+
pass # Ignore errors when closing
|
|
374
|
+
|
|
375
|
+
def cleanup_log_file(self, keep_file: bool = False):
|
|
376
|
+
"""Clean up log file.
|
|
377
|
+
|
|
378
|
+
Args:
|
|
379
|
+
keep_file: If True, keep the log file on disk but close the handle.
|
|
380
|
+
If False, delete the log file.
|
|
381
|
+
"""
|
|
382
|
+
self._close_log_file()
|
|
383
|
+
|
|
384
|
+
if not keep_file and self._log_file and os.path.exists(self._log_file):
|
|
385
|
+
try:
|
|
386
|
+
os.remove(self._log_file)
|
|
387
|
+
except Exception:
|
|
388
|
+
pass # Ignore cleanup errors
|
|
389
|
+
|
|
390
|
+
self._log_file = None
|
|
391
|
+
self._log_file_handle = None
|
|
392
|
+
|
|
393
|
+
@staticmethod
|
|
394
|
+
def cleanup_old_logs(max_age_hours: int = 24):
|
|
395
|
+
"""Clean up old log files.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
max_age_hours: Remove log files older than this many hours
|
|
399
|
+
"""
|
|
400
|
+
import time
|
|
401
|
+
|
|
402
|
+
log_dir = "/tmp/agentscope_runtime_logs"
|
|
403
|
+
if not os.path.exists(log_dir):
|
|
404
|
+
return
|
|
405
|
+
|
|
406
|
+
current_time = time.time()
|
|
407
|
+
max_age_seconds = max_age_hours * 3600
|
|
408
|
+
|
|
409
|
+
try:
|
|
410
|
+
for filename in os.listdir(log_dir):
|
|
411
|
+
if filename.startswith("process_") and filename.endswith(
|
|
412
|
+
".log",
|
|
413
|
+
):
|
|
414
|
+
filepath = os.path.join(log_dir, filename)
|
|
415
|
+
try:
|
|
416
|
+
file_age = current_time - os.path.getmtime(filepath)
|
|
417
|
+
if file_age > max_age_seconds:
|
|
418
|
+
os.remove(filepath)
|
|
419
|
+
except Exception:
|
|
420
|
+
pass # Ignore errors for individual files
|
|
421
|
+
except Exception:
|
|
422
|
+
pass # Ignore errors during cleanup
|
|
423
|
+
|
|
274
424
|
@staticmethod
|
|
275
425
|
def _normalize_host_for_check(host: str) -> str:
|
|
276
426
|
"""Normalize host for connection check.
|