agentscope-runtime 1.0.0b2__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/adapters/agentscope/message.py +78 -10
- agentscope_runtime/adapters/agentscope/stream.py +155 -101
- agentscope_runtime/adapters/agentscope/tool/tool.py +1 -3
- agentscope_runtime/adapters/agno/__init__.py +0 -0
- agentscope_runtime/adapters/agno/message.py +30 -0
- agentscope_runtime/adapters/agno/stream.py +122 -0
- agentscope_runtime/adapters/langgraph/__init__.py +12 -0
- agentscope_runtime/adapters/langgraph/message.py +257 -0
- agentscope_runtime/adapters/langgraph/stream.py +205 -0
- agentscope_runtime/cli/__init__.py +7 -0
- agentscope_runtime/cli/cli.py +63 -0
- agentscope_runtime/cli/commands/__init__.py +2 -0
- agentscope_runtime/cli/commands/chat.py +815 -0
- agentscope_runtime/cli/commands/deploy.py +1062 -0
- agentscope_runtime/cli/commands/invoke.py +58 -0
- agentscope_runtime/cli/commands/list_cmd.py +103 -0
- agentscope_runtime/cli/commands/run.py +176 -0
- agentscope_runtime/cli/commands/sandbox.py +128 -0
- agentscope_runtime/cli/commands/status.py +60 -0
- agentscope_runtime/cli/commands/stop.py +185 -0
- agentscope_runtime/cli/commands/web.py +166 -0
- agentscope_runtime/cli/loaders/__init__.py +6 -0
- agentscope_runtime/cli/loaders/agent_loader.py +295 -0
- agentscope_runtime/cli/state/__init__.py +10 -0
- agentscope_runtime/cli/utils/__init__.py +18 -0
- agentscope_runtime/cli/utils/console.py +378 -0
- agentscope_runtime/cli/utils/validators.py +118 -0
- agentscope_runtime/engine/app/agent_app.py +15 -5
- agentscope_runtime/engine/deployers/__init__.py +1 -0
- agentscope_runtime/engine/deployers/agentrun_deployer.py +154 -24
- agentscope_runtime/engine/deployers/base.py +27 -2
- agentscope_runtime/engine/deployers/kubernetes_deployer.py +158 -31
- agentscope_runtime/engine/deployers/local_deployer.py +188 -25
- agentscope_runtime/engine/deployers/modelstudio_deployer.py +109 -18
- agentscope_runtime/engine/deployers/state/__init__.py +9 -0
- agentscope_runtime/engine/deployers/state/manager.py +388 -0
- agentscope_runtime/engine/deployers/state/schema.py +96 -0
- agentscope_runtime/engine/deployers/utils/build_cache.py +736 -0
- agentscope_runtime/engine/deployers/utils/detached_app.py +105 -30
- agentscope_runtime/engine/deployers/utils/docker_image_utils/docker_image_builder.py +31 -10
- agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +15 -8
- agentscope_runtime/engine/deployers/utils/docker_image_utils/image_factory.py +30 -2
- agentscope_runtime/engine/deployers/utils/k8s_utils.py +241 -0
- agentscope_runtime/engine/deployers/utils/package.py +56 -6
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +68 -9
- agentscope_runtime/engine/deployers/utils/service_utils/process_manager.py +155 -5
- agentscope_runtime/engine/deployers/utils/wheel_packager.py +107 -123
- agentscope_runtime/engine/runner.py +32 -12
- agentscope_runtime/engine/schemas/agent_schemas.py +21 -7
- agentscope_runtime/engine/schemas/exception.py +580 -0
- agentscope_runtime/engine/services/agent_state/__init__.py +2 -0
- agentscope_runtime/engine/services/agent_state/state_service_factory.py +55 -0
- agentscope_runtime/engine/services/memory/__init__.py +2 -0
- agentscope_runtime/engine/services/memory/memory_service_factory.py +126 -0
- agentscope_runtime/engine/services/sandbox/__init__.py +2 -0
- agentscope_runtime/engine/services/sandbox/sandbox_service_factory.py +49 -0
- agentscope_runtime/engine/services/service_factory.py +119 -0
- agentscope_runtime/engine/services/session_history/__init__.py +2 -0
- agentscope_runtime/engine/services/session_history/session_history_service_factory.py +73 -0
- agentscope_runtime/engine/services/utils/tablestore_service_utils.py +35 -10
- agentscope_runtime/engine/tracing/wrapper.py +49 -31
- agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +113 -39
- agentscope_runtime/sandbox/box/shared/routers/mcp_utils.py +20 -4
- agentscope_runtime/sandbox/utils.py +2 -0
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/METADATA +82 -11
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/RECORD +71 -36
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/entry_points.txt +1 -0
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,736 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
Build cache management with content-aware hashing.
|
|
4
|
+
|
|
5
|
+
This module provides workspace-based build caching to speed up repeated
|
|
6
|
+
deployments during local development by detecting unchanged content and
|
|
7
|
+
reusing existing build artifacts.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import hashlib
|
|
11
|
+
import json
|
|
12
|
+
import logging
|
|
13
|
+
import os
|
|
14
|
+
import shutil
|
|
15
|
+
from datetime import datetime
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Dict, List, Optional
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class BuildCache:
|
|
23
|
+
"""
|
|
24
|
+
Manages workspace-based build cache with content hashing.
|
|
25
|
+
|
|
26
|
+
The cache uses content-aware hashing to automatically detect when project
|
|
27
|
+
code, requirements, or runtime version haven't changed, allowing for
|
|
28
|
+
reuse of existing build artifacts.
|
|
29
|
+
|
|
30
|
+
Cache structure:
|
|
31
|
+
<workspace>/.agentscope_runtime/
|
|
32
|
+
└── builds/
|
|
33
|
+
├── k8s_20251205_1430_a3f9e2/ # platform_timestamp_code
|
|
34
|
+
│ ├── deployment.zip
|
|
35
|
+
│ ├── Dockerfile
|
|
36
|
+
│ └── requirements.txt
|
|
37
|
+
└── modelstudio_20251205_1445_b7c4d1/
|
|
38
|
+
└── *.whl
|
|
39
|
+
|
|
40
|
+
Deployment metadata is tracked in deployments.json for cache validation.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, workspace: Optional[Path] = None):
|
|
44
|
+
"""
|
|
45
|
+
Initialize BuildCache.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
workspace: Workspace directory (defaults to cwd or
|
|
49
|
+
AGENTSCOPE_RUNTIME_WORKSPACE env var)
|
|
50
|
+
"""
|
|
51
|
+
if workspace is None:
|
|
52
|
+
workspace_str = os.getenv(
|
|
53
|
+
"AGENTSCOPE_RUNTIME_WORKSPACE",
|
|
54
|
+
os.getcwd(),
|
|
55
|
+
)
|
|
56
|
+
workspace = Path(workspace_str)
|
|
57
|
+
|
|
58
|
+
self.workspace = Path(workspace).resolve()
|
|
59
|
+
self.cache_root = self.workspace / ".agentscope_runtime" / "builds"
|
|
60
|
+
self.cache_root.mkdir(parents=True, exist_ok=True)
|
|
61
|
+
|
|
62
|
+
# Deployment metadata file for tracking cache mappings
|
|
63
|
+
self.metadata_file = (
|
|
64
|
+
self.workspace / ".agentscope_runtime" / "deployments.json"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
logger.debug(f"BuildCache initialized at: {self.cache_root}")
|
|
68
|
+
|
|
69
|
+
def _generate_build_name(self, platform: str, content_hash: str) -> str:
|
|
70
|
+
"""
|
|
71
|
+
Generate human-readable build directory name.
|
|
72
|
+
|
|
73
|
+
Format: {platform}_{YYYYMMDD_HHMM}_{6-char-code}
|
|
74
|
+
Example: k8s_20251205_1430_a3f9e2
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
platform: Deployment platform (k8s, modelstudio, agentrun, local)
|
|
78
|
+
content_hash: Full content hash for generating 6-char code
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
Build directory name
|
|
82
|
+
"""
|
|
83
|
+
# Timestamp to minute precision
|
|
84
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M")
|
|
85
|
+
|
|
86
|
+
# 6-character code from content hash
|
|
87
|
+
code = content_hash[:6]
|
|
88
|
+
|
|
89
|
+
return f"{platform}_{timestamp}_{code}"
|
|
90
|
+
|
|
91
|
+
def _load_metadata(self) -> Dict:
|
|
92
|
+
"""Load deployment metadata from JSON file."""
|
|
93
|
+
if not self.metadata_file.exists():
|
|
94
|
+
return {}
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
with open(self.metadata_file, "r", encoding="utf-8") as f:
|
|
98
|
+
return json.load(f)
|
|
99
|
+
except Exception as e:
|
|
100
|
+
logger.warning(f"Failed to load metadata: {e}")
|
|
101
|
+
return {}
|
|
102
|
+
|
|
103
|
+
def _save_metadata(self, metadata: Dict) -> None:
|
|
104
|
+
"""Save deployment metadata to JSON file."""
|
|
105
|
+
try:
|
|
106
|
+
self.metadata_file.parent.mkdir(parents=True, exist_ok=True)
|
|
107
|
+
with open(self.metadata_file, "w", encoding="utf-8") as f:
|
|
108
|
+
json.dump(metadata, f, indent=2, ensure_ascii=False)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.warning(f"Failed to save metadata: {e}")
|
|
111
|
+
|
|
112
|
+
def lookup(
|
|
113
|
+
self,
|
|
114
|
+
project_dir: str,
|
|
115
|
+
entrypoint_file: str,
|
|
116
|
+
requirements: List[str],
|
|
117
|
+
use_local_runtime: bool = False,
|
|
118
|
+
platform: str = "unknown",
|
|
119
|
+
) -> Optional[Path]:
|
|
120
|
+
"""
|
|
121
|
+
Look up cached build by content hash.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
project_dir: Absolute path to project directory
|
|
125
|
+
entrypoint_file: Relative path to entrypoint file
|
|
126
|
+
requirements: List of pip requirements
|
|
127
|
+
use_local_runtime: Whether using local runtime (dev mode)
|
|
128
|
+
platform: Deployment platform (k8s, modelstudio, agentrun, local)
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
Path to cached build directory if valid cache exists, No otherwise
|
|
132
|
+
"""
|
|
133
|
+
# Calculate content hash
|
|
134
|
+
build_hash = self._calculate_build_hash(
|
|
135
|
+
project_dir,
|
|
136
|
+
entrypoint_file,
|
|
137
|
+
requirements,
|
|
138
|
+
use_local_runtime,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Load metadata to find build directory
|
|
142
|
+
metadata = self._load_metadata()
|
|
143
|
+
|
|
144
|
+
# Look for existing build with matching hash
|
|
145
|
+
for build_name, build_info in metadata.items():
|
|
146
|
+
if build_info.get("content_hash") == build_hash:
|
|
147
|
+
cache_dir = self.cache_root / build_name
|
|
148
|
+
|
|
149
|
+
if not cache_dir.exists():
|
|
150
|
+
logger.warning(
|
|
151
|
+
f"Cached build referenced in metadata"
|
|
152
|
+
f" but not found: {build_name}",
|
|
153
|
+
)
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
# Validate cache integrity
|
|
157
|
+
if not self._validate_cache(cache_dir):
|
|
158
|
+
logger.warning(f"Cache corrupted: {build_name}")
|
|
159
|
+
try:
|
|
160
|
+
shutil.rmtree(cache_dir)
|
|
161
|
+
except Exception as e:
|
|
162
|
+
logger.warning(f"Failed to clean corrupted cache: {e}")
|
|
163
|
+
continue
|
|
164
|
+
|
|
165
|
+
logger.info(f"✓ Cache hit: {build_name}")
|
|
166
|
+
return cache_dir
|
|
167
|
+
|
|
168
|
+
logger.info(f"Cache miss: {build_hash[:8]} (platform: {platform})")
|
|
169
|
+
return None
|
|
170
|
+
|
|
171
|
+
def store(
|
|
172
|
+
self,
|
|
173
|
+
project_dir: str,
|
|
174
|
+
entrypoint_file: str,
|
|
175
|
+
requirements: List[str],
|
|
176
|
+
build_path: Path,
|
|
177
|
+
use_local_runtime: bool = False,
|
|
178
|
+
platform: str = "unknown",
|
|
179
|
+
) -> str:
|
|
180
|
+
"""
|
|
181
|
+
Store build in cache with platform-aware naming.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
project_dir: Absolute path to project directory
|
|
185
|
+
entrypoint_file: Relative path to entrypoint file
|
|
186
|
+
requirements: List of pip requirements
|
|
187
|
+
build_path: Path to build directory to cache
|
|
188
|
+
use_local_runtime: Whether using local runtime (dev mode)
|
|
189
|
+
platform: Deployment platform (k8s, modelstudio, agentrun, local)
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Build directory name
|
|
193
|
+
"""
|
|
194
|
+
# Calculate content hash
|
|
195
|
+
build_hash = self._calculate_build_hash(
|
|
196
|
+
project_dir,
|
|
197
|
+
entrypoint_file,
|
|
198
|
+
requirements,
|
|
199
|
+
use_local_runtime,
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
# Generate human-readable build name
|
|
203
|
+
build_name = self._generate_build_name(platform, build_hash)
|
|
204
|
+
cache_dir = self.cache_root / build_name
|
|
205
|
+
|
|
206
|
+
# Load metadata
|
|
207
|
+
metadata = self._load_metadata()
|
|
208
|
+
|
|
209
|
+
# Check if build_path is already the cache_dir (built in place)
|
|
210
|
+
build_path_resolved = Path(build_path).resolve()
|
|
211
|
+
cache_dir_resolved = cache_dir.resolve()
|
|
212
|
+
|
|
213
|
+
if build_path_resolved == cache_dir_resolved:
|
|
214
|
+
# Already built in cache directory, just save metadata
|
|
215
|
+
logger.debug(f"Build already in cache location: {build_name}")
|
|
216
|
+
|
|
217
|
+
# Update metadata
|
|
218
|
+
metadata[build_name] = {
|
|
219
|
+
"content_hash": build_hash,
|
|
220
|
+
"platform": platform,
|
|
221
|
+
"project_dir": project_dir,
|
|
222
|
+
"entrypoint": entrypoint_file,
|
|
223
|
+
"created_at": datetime.now().isoformat(),
|
|
224
|
+
"requirements": requirements,
|
|
225
|
+
}
|
|
226
|
+
self._save_metadata(metadata)
|
|
227
|
+
return build_name
|
|
228
|
+
|
|
229
|
+
# If cache already exists with same hash, no need to store again
|
|
230
|
+
if build_name in metadata and cache_dir.exists():
|
|
231
|
+
logger.info(f"Build already cached: {build_name}")
|
|
232
|
+
return build_name
|
|
233
|
+
|
|
234
|
+
# Copy build to cache
|
|
235
|
+
try:
|
|
236
|
+
shutil.copytree(build_path, cache_dir, dirs_exist_ok=False)
|
|
237
|
+
logger.info(f"Build cached: {build_name}")
|
|
238
|
+
|
|
239
|
+
# Update metadata
|
|
240
|
+
metadata[build_name] = {
|
|
241
|
+
"content_hash": build_hash,
|
|
242
|
+
"platform": platform,
|
|
243
|
+
"project_dir": project_dir,
|
|
244
|
+
"entrypoint": entrypoint_file,
|
|
245
|
+
"created_at": datetime.now().isoformat(),
|
|
246
|
+
"requirements": requirements,
|
|
247
|
+
}
|
|
248
|
+
self._save_metadata(metadata)
|
|
249
|
+
|
|
250
|
+
except Exception as e:
|
|
251
|
+
logger.error(f"Failed to cache build: {e}")
|
|
252
|
+
# Clean up partial cache
|
|
253
|
+
if cache_dir.exists():
|
|
254
|
+
try:
|
|
255
|
+
shutil.rmtree(cache_dir)
|
|
256
|
+
except Exception:
|
|
257
|
+
logger.warning(
|
|
258
|
+
f"Failed to remove cache build, {cache_dir} with error"
|
|
259
|
+
f" {e}",
|
|
260
|
+
)
|
|
261
|
+
raise
|
|
262
|
+
|
|
263
|
+
return build_name
|
|
264
|
+
|
|
265
|
+
def invalidate_all(self) -> None:
|
|
266
|
+
"""Remove all cached builds (simple cleanup)."""
|
|
267
|
+
if self.cache_root.exists():
|
|
268
|
+
try:
|
|
269
|
+
shutil.rmtree(self.cache_root)
|
|
270
|
+
logger.info(f"All caches invalidated: {self.cache_root}")
|
|
271
|
+
# Recreate cache root
|
|
272
|
+
self.cache_root.mkdir(parents=True, exist_ok=True)
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error(f"Failed to invalidate caches: {e}")
|
|
275
|
+
raise
|
|
276
|
+
|
|
277
|
+
def _calculate_build_hash(
|
|
278
|
+
self,
|
|
279
|
+
project_dir: str,
|
|
280
|
+
entrypoint_file: str,
|
|
281
|
+
requirements: List[str],
|
|
282
|
+
use_local_runtime: bool,
|
|
283
|
+
) -> str:
|
|
284
|
+
"""
|
|
285
|
+
Calculate content hash for build cache lookup.
|
|
286
|
+
|
|
287
|
+
Hash is based on:
|
|
288
|
+
- User project code (excluding temp files)
|
|
289
|
+
- Requirements list
|
|
290
|
+
- AgentScope-runtime version:
|
|
291
|
+
- Released version: Use version number
|
|
292
|
+
- Dev version: Hash of runtime source code
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
project_dir: Absolute path to project directory
|
|
296
|
+
entrypoint_file: Relative path to entrypoint file
|
|
297
|
+
requirements: List of pip requirements
|
|
298
|
+
use_local_runtime: Whether using local runtime (dev mode)
|
|
299
|
+
|
|
300
|
+
Returns:
|
|
301
|
+
12-character hex string
|
|
302
|
+
"""
|
|
303
|
+
from .detached_app import (
|
|
304
|
+
_get_package_version,
|
|
305
|
+
_get_runtime_source_path,
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
hash_parts = []
|
|
309
|
+
|
|
310
|
+
# 1. User project code (excluding temp files)
|
|
311
|
+
project_hash = self._hash_directory(
|
|
312
|
+
Path(project_dir),
|
|
313
|
+
self._get_ignore_patterns(),
|
|
314
|
+
)
|
|
315
|
+
hash_parts.append(f"project:{project_hash}")
|
|
316
|
+
|
|
317
|
+
# 2. Entrypoint file
|
|
318
|
+
hash_parts.append(f"entry:{entrypoint_file}")
|
|
319
|
+
|
|
320
|
+
# 3. Requirements
|
|
321
|
+
req_string = "\n".join(sorted(requirements))
|
|
322
|
+
req_hash = hashlib.sha256(req_string.encode()).hexdigest()[:8]
|
|
323
|
+
hash_parts.append(f"req:{req_hash}")
|
|
324
|
+
|
|
325
|
+
# 4. AgentScope-runtime version
|
|
326
|
+
if use_local_runtime:
|
|
327
|
+
# Dev mode: hash runtime source code
|
|
328
|
+
runtime_source = _get_runtime_source_path()
|
|
329
|
+
if runtime_source:
|
|
330
|
+
runtime_hash = self._hash_directory(
|
|
331
|
+
runtime_source / "src",
|
|
332
|
+
self._get_ignore_patterns(),
|
|
333
|
+
)
|
|
334
|
+
hash_parts.append(f"runtime-dev:{runtime_hash[:8]}")
|
|
335
|
+
else:
|
|
336
|
+
# Fallback if source not found
|
|
337
|
+
hash_parts.append("runtime-dev:unknown")
|
|
338
|
+
else:
|
|
339
|
+
# Released mode: use version number
|
|
340
|
+
version = _get_package_version()
|
|
341
|
+
hash_parts.append(f"runtime:{version}")
|
|
342
|
+
|
|
343
|
+
# Combine and hash
|
|
344
|
+
combined = "-".join(hash_parts)
|
|
345
|
+
final_hash = hashlib.sha256(combined.encode()).hexdigest()[:12]
|
|
346
|
+
|
|
347
|
+
logger.debug(f"Calculated build hash: {final_hash}")
|
|
348
|
+
logger.debug(f"Hash components: {combined}")
|
|
349
|
+
|
|
350
|
+
return final_hash
|
|
351
|
+
|
|
352
|
+
def _hash_directory(
|
|
353
|
+
self,
|
|
354
|
+
path: Path,
|
|
355
|
+
ignore_patterns: List[str],
|
|
356
|
+
) -> str:
|
|
357
|
+
"""
|
|
358
|
+
Calculate hash of directory contents.
|
|
359
|
+
|
|
360
|
+
Includes:
|
|
361
|
+
- File paths (relative to directory)
|
|
362
|
+
- File contents
|
|
363
|
+
- File mtimes (for quick detection)
|
|
364
|
+
|
|
365
|
+
Excludes:
|
|
366
|
+
- Files matching ignore patterns
|
|
367
|
+
- Empty directories
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
path: Directory path to hash
|
|
371
|
+
ignore_patterns: List of ignore patterns
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
16-character hex string
|
|
375
|
+
"""
|
|
376
|
+
hasher = hashlib.sha256()
|
|
377
|
+
|
|
378
|
+
if not path.exists():
|
|
379
|
+
logger.warning(f"Directory not found for hashing: {path}")
|
|
380
|
+
return "notfound"
|
|
381
|
+
|
|
382
|
+
try:
|
|
383
|
+
for root, dirs, files in sorted(os.walk(path)):
|
|
384
|
+
# Filter ignored directories (in-place)
|
|
385
|
+
dirs[:] = [
|
|
386
|
+
d
|
|
387
|
+
for d in sorted(dirs)
|
|
388
|
+
if not self._should_ignore(d, ignore_patterns)
|
|
389
|
+
]
|
|
390
|
+
|
|
391
|
+
for filename in sorted(files):
|
|
392
|
+
filepath = Path(root) / filename
|
|
393
|
+
|
|
394
|
+
try:
|
|
395
|
+
rel_path = filepath.relative_to(path)
|
|
396
|
+
except ValueError:
|
|
397
|
+
# Skip files outside the base path
|
|
398
|
+
continue
|
|
399
|
+
|
|
400
|
+
if self._should_ignore(str(rel_path), ignore_patterns):
|
|
401
|
+
continue
|
|
402
|
+
|
|
403
|
+
# Hash: relative path + mtime + content
|
|
404
|
+
hasher.update(str(rel_path).encode())
|
|
405
|
+
|
|
406
|
+
try:
|
|
407
|
+
stat = filepath.stat()
|
|
408
|
+
hasher.update(str(stat.st_mtime).encode())
|
|
409
|
+
|
|
410
|
+
with open(filepath, "rb") as f:
|
|
411
|
+
hasher.update(f.read())
|
|
412
|
+
except (OSError, IOError) as e:
|
|
413
|
+
# Skip files that can't be read
|
|
414
|
+
logger.debug(
|
|
415
|
+
f"Skipping unreadable file {filepath}: {e}",
|
|
416
|
+
)
|
|
417
|
+
continue
|
|
418
|
+
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.error(f"Error hashing directory {path}: {e}")
|
|
421
|
+
return "error"
|
|
422
|
+
|
|
423
|
+
return hasher.hexdigest()[:16]
|
|
424
|
+
|
|
425
|
+
def _should_ignore(self, path: str, patterns: List[str]) -> bool:
|
|
426
|
+
"""
|
|
427
|
+
Check if path should be ignored based on patterns.
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
path: Path to check (relative)
|
|
431
|
+
patterns: List of ignore patterns
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
True if path should be ignored
|
|
435
|
+
"""
|
|
436
|
+
path_parts = Path(path).parts
|
|
437
|
+
|
|
438
|
+
for pattern in patterns:
|
|
439
|
+
# Check if any part of the path matches the pattern
|
|
440
|
+
if pattern in path_parts:
|
|
441
|
+
return True
|
|
442
|
+
|
|
443
|
+
# Check wildcard patterns
|
|
444
|
+
if "*" in pattern:
|
|
445
|
+
import fnmatch
|
|
446
|
+
|
|
447
|
+
if fnmatch.fnmatch(path, pattern):
|
|
448
|
+
return True
|
|
449
|
+
# Also check each part
|
|
450
|
+
for part in path_parts:
|
|
451
|
+
if fnmatch.fnmatch(part, pattern):
|
|
452
|
+
return True
|
|
453
|
+
|
|
454
|
+
return False
|
|
455
|
+
|
|
456
|
+
def _get_ignore_patterns(self) -> List[str]:
|
|
457
|
+
"""
|
|
458
|
+
Get ignore patterns for directory hashing.
|
|
459
|
+
|
|
460
|
+
Returns:
|
|
461
|
+
List of ignore patterns
|
|
462
|
+
"""
|
|
463
|
+
return [
|
|
464
|
+
"__pycache__",
|
|
465
|
+
"*.pyc",
|
|
466
|
+
"*.pyo",
|
|
467
|
+
".git",
|
|
468
|
+
".gitignore",
|
|
469
|
+
".pytest_cache",
|
|
470
|
+
".mypy_cache",
|
|
471
|
+
".tox",
|
|
472
|
+
"venv",
|
|
473
|
+
"env",
|
|
474
|
+
".venv",
|
|
475
|
+
".env",
|
|
476
|
+
"node_modules",
|
|
477
|
+
".DS_Store",
|
|
478
|
+
"*.egg-info",
|
|
479
|
+
"build",
|
|
480
|
+
"dist",
|
|
481
|
+
".cache",
|
|
482
|
+
"*.swp",
|
|
483
|
+
"*.swo",
|
|
484
|
+
"*~",
|
|
485
|
+
".idea",
|
|
486
|
+
".vscode",
|
|
487
|
+
"*.log",
|
|
488
|
+
"logs",
|
|
489
|
+
".agentscope_runtime", # Don't hash cache itself
|
|
490
|
+
]
|
|
491
|
+
|
|
492
|
+
def lookup_wrapper(
|
|
493
|
+
self,
|
|
494
|
+
project_dir: str,
|
|
495
|
+
cmd: str,
|
|
496
|
+
platform: str = "wrapper",
|
|
497
|
+
) -> Optional[Path]:
|
|
498
|
+
"""
|
|
499
|
+
Look up cached wrapper project build by content hash.
|
|
500
|
+
|
|
501
|
+
Args:
|
|
502
|
+
project_dir: Absolute path to project directory
|
|
503
|
+
cmd: Start command for the wrapper
|
|
504
|
+
platform: Deployment platform (modelstudio, agentrun)
|
|
505
|
+
|
|
506
|
+
Returns:
|
|
507
|
+
Path to cached directory if valid cache exists, No otherwise
|
|
508
|
+
"""
|
|
509
|
+
# Calculate content hash for wrapper
|
|
510
|
+
build_hash = self._calculate_wrapper_hash(project_dir, cmd)
|
|
511
|
+
|
|
512
|
+
# Load metadata to find wrapper build
|
|
513
|
+
metadata = self._load_metadata()
|
|
514
|
+
|
|
515
|
+
# Look for existing wrapper build with matching hash
|
|
516
|
+
for build_name, build_info in metadata.items():
|
|
517
|
+
if (
|
|
518
|
+
build_info.get("content_hash") == build_hash
|
|
519
|
+
and build_info.get("type") == "wrapper"
|
|
520
|
+
):
|
|
521
|
+
cache_dir = self.cache_root / build_name
|
|
522
|
+
|
|
523
|
+
if not cache_dir.exists():
|
|
524
|
+
logger.warning(
|
|
525
|
+
f"Cached wrapper referenced in metadata but not "
|
|
526
|
+
f"found: {build_name}",
|
|
527
|
+
)
|
|
528
|
+
continue
|
|
529
|
+
|
|
530
|
+
# Validate cache integrity for wrapper (check for wheel file)
|
|
531
|
+
if not self._validate_wrapper_cache(cache_dir):
|
|
532
|
+
logger.warning(f"Wrapper cache corrupted: {build_name}")
|
|
533
|
+
try:
|
|
534
|
+
shutil.rmtree(cache_dir)
|
|
535
|
+
except Exception as e:
|
|
536
|
+
logger.warning(
|
|
537
|
+
f"Failed to clean corrupted wrapper cache: {e}",
|
|
538
|
+
)
|
|
539
|
+
continue
|
|
540
|
+
|
|
541
|
+
logger.info(f"✓ Wrapper cache hit: {build_name}")
|
|
542
|
+
return cache_dir
|
|
543
|
+
|
|
544
|
+
logger.info(
|
|
545
|
+
f"Wrapper cache miss: {build_hash[:8]} (platform: {platform})",
|
|
546
|
+
)
|
|
547
|
+
return None
|
|
548
|
+
|
|
549
|
+
def store_wrapper(
|
|
550
|
+
self,
|
|
551
|
+
project_dir: str,
|
|
552
|
+
cmd: str,
|
|
553
|
+
wrapper_dir: Path,
|
|
554
|
+
platform: str = "wrapper",
|
|
555
|
+
) -> str:
|
|
556
|
+
"""
|
|
557
|
+
Store wrapper project build in cache with platform-aware naming.
|
|
558
|
+
|
|
559
|
+
Args:
|
|
560
|
+
project_dir: Absolute path to project directory
|
|
561
|
+
cmd: Start command for the wrapper
|
|
562
|
+
wrapper_dir: Path to wrapper build directory to cache
|
|
563
|
+
platform: Deployment platform (modelstudio, agentrun)
|
|
564
|
+
|
|
565
|
+
Returns:
|
|
566
|
+
Build directory name
|
|
567
|
+
"""
|
|
568
|
+
# Calculate content hash
|
|
569
|
+
build_hash = self._calculate_wrapper_hash(project_dir, cmd)
|
|
570
|
+
|
|
571
|
+
# Generate human-readable build name
|
|
572
|
+
build_name = self._generate_build_name(platform, build_hash)
|
|
573
|
+
cache_dir = self.cache_root / build_name
|
|
574
|
+
|
|
575
|
+
# Load metadata
|
|
576
|
+
metadata = self._load_metadata()
|
|
577
|
+
|
|
578
|
+
# Check if wrapper_dir is already the cache_dir (built in place)
|
|
579
|
+
wrapper_dir_resolved = Path(wrapper_dir).resolve()
|
|
580
|
+
cache_dir_resolved = cache_dir.resolve()
|
|
581
|
+
|
|
582
|
+
if wrapper_dir_resolved == cache_dir_resolved:
|
|
583
|
+
# Already built in cache directory, just save metadata
|
|
584
|
+
logger.debug(f"Wrapper already in cache location: {build_name}")
|
|
585
|
+
|
|
586
|
+
# Update metadata
|
|
587
|
+
metadata[build_name] = {
|
|
588
|
+
"content_hash": build_hash,
|
|
589
|
+
"type": "wrapper",
|
|
590
|
+
"platform": platform,
|
|
591
|
+
"project_dir": project_dir,
|
|
592
|
+
"cmd": cmd,
|
|
593
|
+
"created_at": datetime.now().isoformat(),
|
|
594
|
+
}
|
|
595
|
+
self._save_metadata(metadata)
|
|
596
|
+
return build_name
|
|
597
|
+
|
|
598
|
+
# If cache already exists with same hash, no need to store again
|
|
599
|
+
if build_name in metadata and cache_dir.exists():
|
|
600
|
+
logger.info(f"Wrapper build already cached: {build_name}")
|
|
601
|
+
return build_name
|
|
602
|
+
|
|
603
|
+
# Copy wrapper to cache
|
|
604
|
+
try:
|
|
605
|
+
shutil.copytree(wrapper_dir, cache_dir, dirs_exist_ok=False)
|
|
606
|
+
logger.info(f"Wrapper build cached: {build_name}")
|
|
607
|
+
|
|
608
|
+
# Update metadata
|
|
609
|
+
metadata[build_name] = {
|
|
610
|
+
"content_hash": build_hash,
|
|
611
|
+
"type": "wrapper",
|
|
612
|
+
"platform": platform,
|
|
613
|
+
"project_dir": project_dir,
|
|
614
|
+
"cmd": cmd,
|
|
615
|
+
"created_at": datetime.now().isoformat(),
|
|
616
|
+
}
|
|
617
|
+
self._save_metadata(metadata)
|
|
618
|
+
|
|
619
|
+
except Exception as e:
|
|
620
|
+
logger.error(f"Failed to cache wrapper build: {e}")
|
|
621
|
+
# Clean up partial cache
|
|
622
|
+
if cache_dir.exists():
|
|
623
|
+
try:
|
|
624
|
+
shutil.rmtree(cache_dir)
|
|
625
|
+
except Exception:
|
|
626
|
+
logger.warning(
|
|
627
|
+
f"Failed to remove cache directory "
|
|
628
|
+
f"{cache_dir} with error: {e}",
|
|
629
|
+
)
|
|
630
|
+
|
|
631
|
+
raise RuntimeError(
|
|
632
|
+
f"Failed to store cache directory {cache_dir}:" f" {e}",
|
|
633
|
+
) from e
|
|
634
|
+
|
|
635
|
+
return build_name
|
|
636
|
+
|
|
637
|
+
def _calculate_wrapper_hash(
|
|
638
|
+
self,
|
|
639
|
+
project_dir: str,
|
|
640
|
+
cmd: str,
|
|
641
|
+
) -> str:
|
|
642
|
+
"""
|
|
643
|
+
Calculate content hash for wrapper project cache lookup.
|
|
644
|
+
|
|
645
|
+
Hash is based on:
|
|
646
|
+
- User project code (excluding temp files)
|
|
647
|
+
- Start command
|
|
648
|
+
|
|
649
|
+
Args:
|
|
650
|
+
project_dir: Absolute path to project directory
|
|
651
|
+
cmd: Start command for the wrapper
|
|
652
|
+
|
|
653
|
+
Returns:
|
|
654
|
+
12-character hex string
|
|
655
|
+
"""
|
|
656
|
+
hash_parts = []
|
|
657
|
+
|
|
658
|
+
# 1. User project code (excluding temp files)
|
|
659
|
+
project_hash = self._hash_directory(
|
|
660
|
+
Path(project_dir),
|
|
661
|
+
self._get_ignore_patterns(),
|
|
662
|
+
)
|
|
663
|
+
hash_parts.append(f"project:{project_hash}")
|
|
664
|
+
|
|
665
|
+
# 2. Start command
|
|
666
|
+
cmd_hash = hashlib.sha256(cmd.encode()).hexdigest()[:8]
|
|
667
|
+
hash_parts.append(f"cmd:{cmd_hash}")
|
|
668
|
+
|
|
669
|
+
# Combine and hash
|
|
670
|
+
combined = "-".join(hash_parts)
|
|
671
|
+
final_hash = hashlib.sha256(combined.encode()).hexdigest()[:12]
|
|
672
|
+
|
|
673
|
+
logger.debug(f"Calculated wrapper hash: {final_hash}")
|
|
674
|
+
logger.debug(f"Hash components: {combined}")
|
|
675
|
+
|
|
676
|
+
return final_hash
|
|
677
|
+
|
|
678
|
+
def _validate_wrapper_cache(self, cache_dir: Path) -> bool:
|
|
679
|
+
"""
|
|
680
|
+
Validate wrapper cache integrity.
|
|
681
|
+
|
|
682
|
+
Args:
|
|
683
|
+
cache_dir: Cache directory to validate
|
|
684
|
+
|
|
685
|
+
Returns:
|
|
686
|
+
True if cache is valid
|
|
687
|
+
"""
|
|
688
|
+
# Check if any wheel file exists
|
|
689
|
+
wheel_files = list(cache_dir.glob("*.whl"))
|
|
690
|
+
|
|
691
|
+
if not wheel_files:
|
|
692
|
+
logger.warning(
|
|
693
|
+
"Wrapper cache validation failed: no wheel file found",
|
|
694
|
+
)
|
|
695
|
+
return False
|
|
696
|
+
|
|
697
|
+
# Check wheel file is not empty
|
|
698
|
+
for wheel_file in wheel_files:
|
|
699
|
+
if wheel_file.stat().st_size == 0:
|
|
700
|
+
logger.warning(
|
|
701
|
+
f"Wrapper cache validation failed: {wheel_file.name} is "
|
|
702
|
+
f"empty",
|
|
703
|
+
)
|
|
704
|
+
return False
|
|
705
|
+
|
|
706
|
+
return True
|
|
707
|
+
|
|
708
|
+
def _validate_cache(self, cache_dir: Path) -> bool:
|
|
709
|
+
"""
|
|
710
|
+
Validate cache integrity.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
cache_dir: Cache directory to validate
|
|
714
|
+
|
|
715
|
+
Returns:
|
|
716
|
+
True if cache is valid
|
|
717
|
+
"""
|
|
718
|
+
# Check required files exist
|
|
719
|
+
required_files = ["deployment.zip"]
|
|
720
|
+
|
|
721
|
+
for required_file in required_files:
|
|
722
|
+
file_path = cache_dir / required_file
|
|
723
|
+
if not file_path.exists():
|
|
724
|
+
logger.warning(
|
|
725
|
+
f"Cache validation failed: missing {required_file}",
|
|
726
|
+
)
|
|
727
|
+
return False
|
|
728
|
+
|
|
729
|
+
# Check file is not empty
|
|
730
|
+
if file_path.stat().st_size == 0:
|
|
731
|
+
logger.warning(
|
|
732
|
+
f"Cache validation failed: {required_file} is empty",
|
|
733
|
+
)
|
|
734
|
+
return False
|
|
735
|
+
|
|
736
|
+
return True
|