agentscope-runtime 1.0.0b2__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. agentscope_runtime/adapters/agentscope/message.py +78 -10
  2. agentscope_runtime/adapters/agentscope/stream.py +155 -101
  3. agentscope_runtime/adapters/agentscope/tool/tool.py +1 -3
  4. agentscope_runtime/adapters/agno/__init__.py +0 -0
  5. agentscope_runtime/adapters/agno/message.py +30 -0
  6. agentscope_runtime/adapters/agno/stream.py +122 -0
  7. agentscope_runtime/adapters/langgraph/__init__.py +12 -0
  8. agentscope_runtime/adapters/langgraph/message.py +257 -0
  9. agentscope_runtime/adapters/langgraph/stream.py +205 -0
  10. agentscope_runtime/cli/__init__.py +7 -0
  11. agentscope_runtime/cli/cli.py +63 -0
  12. agentscope_runtime/cli/commands/__init__.py +2 -0
  13. agentscope_runtime/cli/commands/chat.py +815 -0
  14. agentscope_runtime/cli/commands/deploy.py +1062 -0
  15. agentscope_runtime/cli/commands/invoke.py +58 -0
  16. agentscope_runtime/cli/commands/list_cmd.py +103 -0
  17. agentscope_runtime/cli/commands/run.py +176 -0
  18. agentscope_runtime/cli/commands/sandbox.py +128 -0
  19. agentscope_runtime/cli/commands/status.py +60 -0
  20. agentscope_runtime/cli/commands/stop.py +185 -0
  21. agentscope_runtime/cli/commands/web.py +166 -0
  22. agentscope_runtime/cli/loaders/__init__.py +6 -0
  23. agentscope_runtime/cli/loaders/agent_loader.py +295 -0
  24. agentscope_runtime/cli/state/__init__.py +10 -0
  25. agentscope_runtime/cli/utils/__init__.py +18 -0
  26. agentscope_runtime/cli/utils/console.py +378 -0
  27. agentscope_runtime/cli/utils/validators.py +118 -0
  28. agentscope_runtime/engine/app/agent_app.py +15 -5
  29. agentscope_runtime/engine/deployers/__init__.py +1 -0
  30. agentscope_runtime/engine/deployers/agentrun_deployer.py +154 -24
  31. agentscope_runtime/engine/deployers/base.py +27 -2
  32. agentscope_runtime/engine/deployers/kubernetes_deployer.py +158 -31
  33. agentscope_runtime/engine/deployers/local_deployer.py +188 -25
  34. agentscope_runtime/engine/deployers/modelstudio_deployer.py +109 -18
  35. agentscope_runtime/engine/deployers/state/__init__.py +9 -0
  36. agentscope_runtime/engine/deployers/state/manager.py +388 -0
  37. agentscope_runtime/engine/deployers/state/schema.py +96 -0
  38. agentscope_runtime/engine/deployers/utils/build_cache.py +736 -0
  39. agentscope_runtime/engine/deployers/utils/detached_app.py +105 -30
  40. agentscope_runtime/engine/deployers/utils/docker_image_utils/docker_image_builder.py +31 -10
  41. agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +15 -8
  42. agentscope_runtime/engine/deployers/utils/docker_image_utils/image_factory.py +30 -2
  43. agentscope_runtime/engine/deployers/utils/k8s_utils.py +241 -0
  44. agentscope_runtime/engine/deployers/utils/package.py +56 -6
  45. agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +68 -9
  46. agentscope_runtime/engine/deployers/utils/service_utils/process_manager.py +155 -5
  47. agentscope_runtime/engine/deployers/utils/wheel_packager.py +107 -123
  48. agentscope_runtime/engine/runner.py +32 -12
  49. agentscope_runtime/engine/schemas/agent_schemas.py +21 -7
  50. agentscope_runtime/engine/schemas/exception.py +580 -0
  51. agentscope_runtime/engine/services/agent_state/__init__.py +2 -0
  52. agentscope_runtime/engine/services/agent_state/state_service_factory.py +55 -0
  53. agentscope_runtime/engine/services/memory/__init__.py +2 -0
  54. agentscope_runtime/engine/services/memory/memory_service_factory.py +126 -0
  55. agentscope_runtime/engine/services/sandbox/__init__.py +2 -0
  56. agentscope_runtime/engine/services/sandbox/sandbox_service_factory.py +49 -0
  57. agentscope_runtime/engine/services/service_factory.py +119 -0
  58. agentscope_runtime/engine/services/session_history/__init__.py +2 -0
  59. agentscope_runtime/engine/services/session_history/session_history_service_factory.py +73 -0
  60. agentscope_runtime/engine/services/utils/tablestore_service_utils.py +35 -10
  61. agentscope_runtime/engine/tracing/wrapper.py +49 -31
  62. agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +113 -39
  63. agentscope_runtime/sandbox/box/shared/routers/mcp_utils.py +20 -4
  64. agentscope_runtime/sandbox/utils.py +2 -0
  65. agentscope_runtime/version.py +1 -1
  66. {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/METADATA +82 -11
  67. {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/RECORD +71 -36
  68. {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/entry_points.txt +1 -0
  69. {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/WHEEL +0 -0
  70. {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/licenses/LICENSE +0 -0
  71. {agentscope_runtime-1.0.0b2.dist-info → agentscope_runtime-1.0.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,388 @@
1
+ # -*- coding: utf-8 -*-
2
+ """Deployment state management."""
3
+
4
+ import json
5
+ import os
6
+ import shutil
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+ from typing import Optional, List, Dict, Any
10
+
11
+ from agentscope_runtime.engine.deployers.state.schema import (
12
+ Deployment,
13
+ StateFileSchema,
14
+ )
15
+
16
+
17
+ class DeploymentStateManager:
18
+ """Manages deployment state persistence."""
19
+
20
+ def __init__(self, state_dir: Optional[str] = None):
21
+ """
22
+ Initialize state manager.
23
+
24
+ Args:
25
+ state_dir: Custom state directory (defaults to
26
+ ~/.agentscope-runtime)
27
+ """
28
+ if state_dir is None:
29
+ state_dir = os.path.expanduser("~/.agentscope-runtime")
30
+
31
+ self.state_dir = Path(state_dir)
32
+ self.state_file = self.state_dir / "deployments.json"
33
+ self._ensure_state_dir()
34
+
35
+ def _ensure_state_dir(self) -> None:
36
+ """Ensure state directory exists."""
37
+ self.state_dir.mkdir(parents=True, exist_ok=True)
38
+
39
+ def _backup_state_file(self) -> None:
40
+ """Create backup of state file before modifications.
41
+
42
+ Maintains one backup per day. If a backup for today already exists,
43
+ it will be overwritten. Old backups (older than 30 days) are cleaned up
44
+ """
45
+ if self.state_file.exists():
46
+ # Use date-based filename: deployments.backup.YYYYMMDD.json
47
+ today = datetime.now().strftime("%Y%m%d")
48
+ backup_file = self.state_dir / f"deployments.backup.{today}.json"
49
+
50
+ # Overwrite today's backup if it exists (one backup per day)
51
+ shutil.copy2(self.state_file, backup_file)
52
+
53
+ # Clean up old backups (older than 30 days)
54
+ self._cleanup_old_backups(days_to_keep=30)
55
+
56
+ def _cleanup_old_backups(self, days_to_keep: int = 30) -> None:
57
+ """Clean up backup files older than specified days.
58
+
59
+ Args:
60
+ days_to_keep: Number of days to keep backups (default: 30)
61
+ """
62
+ from datetime import timedelta
63
+
64
+ cutoff_date = datetime.now() - timedelta(days=days_to_keep)
65
+ cutoff_date_str = cutoff_date.strftime("%Y%m%d")
66
+
67
+ # Find all backup files with date format
68
+ backups = list(self.state_dir.glob("deployments.backup.*.json"))
69
+
70
+ for backup_file in backups:
71
+ # Extract date from filename: deployments.backup.YYYYMMDD.json
72
+ try:
73
+ # Get the date part (between "backup." and ".json")
74
+ date_str = backup_file.stem.split("backup.")[-1]
75
+
76
+ # Validate date format (8 digits: YYYYMMDD)
77
+ if len(date_str) == 8 and date_str.isdigit():
78
+ backup_date_str = date_str
79
+
80
+ # Compare dates as strings (YYYYMMDD format allows
81
+ # string comparison)
82
+ if backup_date_str < cutoff_date_str:
83
+ backup_file.unlink()
84
+ except (ValueError, IndexError):
85
+ # If filename doesn't match expected format, skip it
86
+ # (might be old format backups)
87
+ continue
88
+
89
+ def _read_state(self) -> Dict[str, Any]:
90
+ """Read state file with validation."""
91
+ if not self.state_file.exists():
92
+ return StateFileSchema.create_empty()
93
+
94
+ try:
95
+ with open(self.state_file, "r", encoding="utf-8") as f:
96
+ data = json.load(f)
97
+
98
+ # Validate and migrate if needed
99
+ data = StateFileSchema.migrate_if_needed(data)
100
+
101
+ if not StateFileSchema.validate(data):
102
+ # If validation fails, try to preserve existing deployments
103
+ # by only validating the structure, not individual deployments
104
+ if isinstance(data, dict) and "deployments" in data:
105
+ # Keep the deployments dict even if some entries are
106
+ # invalid. This prevents data loss when only some
107
+ # entries are corrupted
108
+ valid_deployments = {}
109
+ for deploy_id, deploy_data in data.get(
110
+ "deployments",
111
+ {},
112
+ ).items():
113
+ try:
114
+ # Try to validate individual deployment
115
+ Deployment.from_dict(deploy_data)
116
+ valid_deployments[deploy_id] = deploy_data
117
+ except (TypeError, KeyError) as e:
118
+ # Skip invalid deployments but keep valid ones
119
+ print(
120
+ f"Warning: Skipping invalid deployment "
121
+ f"{deploy_id} in state file: {e}",
122
+ )
123
+ # Return state with only valid deployments
124
+ # IMPORTANT: Only return empty if ALL deployments are
125
+ # invalid. This prevents accidental data loss
126
+ return {
127
+ "version": data.get(
128
+ "version",
129
+ StateFileSchema.VERSION,
130
+ ),
131
+ "deployments": valid_deployments,
132
+ }
133
+ raise ValueError("Invalid state file format")
134
+
135
+ return data
136
+
137
+ except (json.JSONDecodeError, ValueError) as e:
138
+ # State file is corrupted, return empty state
139
+ # Original file is kept as-is for manual recovery
140
+ print(
141
+ f"Warning: State file is corrupted ({e}). Starting with "
142
+ f"empty state.",
143
+ )
144
+ return StateFileSchema.create_empty()
145
+
146
+ def _write_state(
147
+ self,
148
+ data: Dict[str, Any],
149
+ allow_empty: bool = False,
150
+ ) -> None:
151
+ """
152
+ Write state file atomically.
153
+
154
+ Args:
155
+ data: State data to write
156
+ allow_empty: If True, allow writing empty state even when file
157
+ has data.
158
+ Used for explicit operations like clear() or remove().
159
+ """
160
+ # Safety check: prevent writing empty state if file already exists
161
+ # with data. This prevents accidental data loss, unless explicitly
162
+ # allowed
163
+ if not allow_empty and self.state_file.exists():
164
+ try:
165
+ existing_state = self._read_state()
166
+ existing_count = len(existing_state.get("deployments", {}))
167
+ new_count = len(data.get("deployments", {}))
168
+
169
+ # If we're writing empty state but file had data, this is
170
+ # suspicious unless explicitly allowed (e.g., from clear()
171
+ # or remove())
172
+ if existing_count > 0 and new_count == 0:
173
+ raise ValueError(
174
+ f"Attempted to write empty state when {existing_count}"
175
+ f" deployments exist. This may indicate data loss. "
176
+ f"Aborting write to prevent data loss.",
177
+ )
178
+ except ValueError as e:
179
+ # Re-raise ValueError from safety check (data loss prevention)
180
+ raise ValueError(
181
+ f"Attempted to write empty state when {existing_count}",
182
+ ) from e
183
+ except Exception:
184
+ # If we can't read existing state due to file errors,
185
+ # proceed with caution but still validate the new data
186
+ pass
187
+
188
+ # Validate before writing
189
+ if not StateFileSchema.validate(data):
190
+ raise ValueError("Invalid state data")
191
+
192
+ # Serialize new data to compare with existing file
193
+ new_content = json.dumps(data, indent=2, sort_keys=True)
194
+
195
+ # Check if content actually changed before backing up
196
+ # Use the same read logic as _read_state to ensure consistency
197
+ if self.state_file.exists():
198
+ try:
199
+ # Read existing file using the same method as _read_state
200
+ # to ensure we're comparing apples to apples
201
+ with open(self.state_file, "r", encoding="utf-8") as f:
202
+ existing_data = json.load(f)
203
+
204
+ # Normalize the existing data the same way _read_state does
205
+ existing_data = StateFileSchema.migrate_if_needed(
206
+ existing_data,
207
+ )
208
+
209
+ # Serialize for comparison
210
+ existing_content = json.dumps(
211
+ existing_data,
212
+ indent=2,
213
+ sort_keys=True,
214
+ )
215
+
216
+ # Only backup if content changed
217
+ if existing_content != new_content:
218
+ self._backup_state_file()
219
+ except (json.JSONDecodeError, IOError, ValueError):
220
+ # If file is corrupted or unreadable, backup anyway
221
+ # This ensures we don't lose data if file is corrupted
222
+ self._backup_state_file()
223
+
224
+ # Write to temporary file first
225
+ temp_file = self.state_file.with_suffix(".tmp")
226
+ with open(temp_file, "w", encoding="utf-8") as f:
227
+ json.dump(data, f, indent=2)
228
+
229
+ # Atomic rename
230
+ temp_file.replace(self.state_file)
231
+
232
+ def save(self, deployment: Deployment) -> None:
233
+ """
234
+ Save deployment metadata.
235
+
236
+ Args:
237
+ deployment: Deployment instance to save
238
+ """
239
+ state = self._read_state()
240
+ state["deployments"][deployment.id] = deployment.to_dict()
241
+ self._write_state(state)
242
+
243
+ def get(self, deploy_id: str) -> Optional[Deployment]:
244
+ """
245
+ Retrieve deployment by ID.
246
+
247
+ Args:
248
+ deploy_id: Deployment ID
249
+
250
+ Returns:
251
+ Deployment instance or None if not found
252
+ """
253
+ state = self._read_state()
254
+ deploy_data = state["deployments"].get(deploy_id)
255
+
256
+ if deploy_data is None:
257
+ return None
258
+
259
+ return Deployment.from_dict(deploy_data)
260
+
261
+ def list(
262
+ self,
263
+ status: Optional[str] = None,
264
+ platform: Optional[str] = None,
265
+ ) -> List[Deployment]:
266
+ """
267
+ List all deployments with optional filtering.
268
+
269
+ Args:
270
+ status: Filter by status (e.g., 'running', 'stopped')
271
+ platform: Filter by platform (e.g., 'local', 'k8s')
272
+
273
+ Returns:
274
+ List of Deployment instances
275
+ """
276
+ state = self._read_state()
277
+ deployments = [
278
+ Deployment.from_dict(data)
279
+ for data in state["deployments"].values()
280
+ ]
281
+
282
+ # Apply filters
283
+ if status:
284
+ deployments = [d for d in deployments if d.status == status]
285
+
286
+ if platform:
287
+ deployments = [d for d in deployments if d.platform == platform]
288
+
289
+ # Sort by created_at (newest first)
290
+ deployments.sort(key=lambda d: d.created_at, reverse=True)
291
+
292
+ return deployments
293
+
294
+ def update_status(self, deploy_id: str, status: str) -> None:
295
+ """
296
+ Update deployment status.
297
+
298
+ Args:
299
+ deploy_id: Deployment ID
300
+ status: New status value
301
+
302
+ Raises:
303
+ KeyError: If deployment not found
304
+ """
305
+ state = self._read_state()
306
+
307
+ # Safety check: if state is empty, don't proceed
308
+ # This prevents accidentally writing empty state
309
+ if not state.get("deployments"):
310
+ raise KeyError(
311
+ f"Deployment not found: {deploy_id} "
312
+ f"(state file is empty or corrupted)",
313
+ )
314
+
315
+ if deploy_id not in state["deployments"]:
316
+ raise KeyError(f"Deployment not found: {deploy_id}")
317
+
318
+ # Make a copy to avoid modifying the original dict in place
319
+ # This ensures we don't accidentally lose data
320
+ state["deployments"][deploy_id] = dict(state["deployments"][deploy_id])
321
+ state["deployments"][deploy_id]["status"] = status
322
+
323
+ self._write_state(state)
324
+
325
+ def remove(self, deploy_id: str) -> None:
326
+ """
327
+ Delete deployment record.
328
+
329
+ Args:
330
+ deploy_id: Deployment ID
331
+
332
+ Raises:
333
+ KeyError: If deployment not found
334
+ """
335
+ state = self._read_state()
336
+
337
+ if deploy_id not in state["deployments"]:
338
+ raise KeyError(f"Deployment not found: {deploy_id}")
339
+
340
+ del state["deployments"][deploy_id]
341
+
342
+ # Allow empty state if this was the last deployment (legitimate
343
+ # removal)
344
+ allow_empty = len(state["deployments"]) == 0
345
+ self._write_state(state, allow_empty=allow_empty)
346
+
347
+ def exists(self, deploy_id: str) -> bool:
348
+ """Check if deployment exists."""
349
+ state = self._read_state()
350
+ return deploy_id in state["deployments"]
351
+
352
+ def clear(self) -> None:
353
+ """Clear all deployments (use with caution)."""
354
+ # Allow empty state for explicit clear operation
355
+ # Backup will be created automatically by _write_state() if content
356
+ # changes
357
+ self._write_state(StateFileSchema.create_empty(), allow_empty=True)
358
+
359
+ def export_to_file(self, output_file: str) -> None:
360
+ """Export state to a file."""
361
+ state = self._read_state()
362
+ with open(output_file, "w", encoding="utf-8") as f:
363
+ json.dump(state, f, indent=2)
364
+
365
+ def import_from_file(self, input_file: str, merge: bool = True) -> None:
366
+ """
367
+ Import state from a file.
368
+
369
+ Args:
370
+ input_file: Path to state file to import
371
+ merge: If True, merge with existing state; if False, replace
372
+ """
373
+ with open(input_file, "r", encoding="utf-8") as f:
374
+ import_data = json.load(f)
375
+
376
+ # Validate imported data
377
+ if not StateFileSchema.validate(import_data):
378
+ raise ValueError("Invalid import file format")
379
+
380
+ if merge:
381
+ # Merge with existing state
382
+ state = self._read_state()
383
+ state["deployments"].update(import_data["deployments"])
384
+ else:
385
+ # Replace entire state
386
+ state = import_data
387
+
388
+ self._write_state(state)
@@ -0,0 +1,96 @@
1
+ # -*- coding: utf-8 -*-
2
+ """Deployment state schema definitions."""
3
+
4
+ from dataclasses import dataclass, asdict
5
+ from datetime import datetime
6
+ from typing import Dict, Any, Optional
7
+
8
+
9
+ @dataclass
10
+ class Deployment:
11
+ """Represents a deployment record."""
12
+
13
+ id: str
14
+ platform: str
15
+ url: str
16
+ agent_source: str
17
+ created_at: str
18
+ status: str = "running"
19
+ token: Optional[str] = None
20
+ config: Dict[str, Any] = None
21
+
22
+ def __post_init__(self):
23
+ """Initialize default values."""
24
+ if self.config is None:
25
+ self.config = {}
26
+
27
+ def to_dict(self) -> Dict[str, Any]:
28
+ """Convert to dictionary for JSON serialization."""
29
+ return asdict(self)
30
+
31
+ @classmethod
32
+ def from_dict(cls, data: Dict[str, Any]) -> "Deployment":
33
+ """Create from dictionary."""
34
+ return cls(**data)
35
+
36
+
37
+ class StateFileSchema:
38
+ """Schema for deployment state file."""
39
+
40
+ VERSION = "1.0"
41
+
42
+ @staticmethod
43
+ def create_empty() -> Dict[str, Any]:
44
+ """Create empty state file structure."""
45
+ return {
46
+ "version": StateFileSchema.VERSION,
47
+ "deployments": {},
48
+ }
49
+
50
+ @staticmethod
51
+ def validate(data: Dict[str, Any]) -> bool:
52
+ """Validate state file structure."""
53
+ required_keys = ["version", "deployments"]
54
+ if not all(key in data for key in required_keys):
55
+ return False
56
+
57
+ if not isinstance(data["deployments"], dict):
58
+ return False
59
+
60
+ # Validate each deployment record
61
+ for _, deploy_data in data["deployments"].items():
62
+ try:
63
+ Deployment.from_dict(deploy_data)
64
+ except (TypeError, KeyError):
65
+ return False
66
+
67
+ return True
68
+
69
+ @staticmethod
70
+ def migrate_if_needed(data: Dict[str, Any]) -> Dict[str, Any]:
71
+ """Migrate state file to current version if needed."""
72
+ current_version = data.get("version", "0.0")
73
+
74
+ if current_version == StateFileSchema.VERSION:
75
+ return data
76
+
77
+ # For now, just ensure version is correct
78
+ # Future migrations would go here
79
+ data["version"] = StateFileSchema.VERSION
80
+ return data
81
+
82
+
83
+ def generate_deployment_id(platform: str) -> str:
84
+ """Generate unique deployment ID."""
85
+ import shortuuid
86
+
87
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
88
+ short_id = shortuuid.ShortUUID().random(length=6)
89
+ return f"{platform}_{timestamp}_{short_id}"
90
+
91
+
92
+ def format_timestamp(dt: datetime = None) -> str:
93
+ """Format timestamp in ISO format."""
94
+ if dt is None:
95
+ dt = datetime.now()
96
+ return dt.isoformat() + "Z"