agentscope-runtime 1.0.4__py3-none-any.whl → 1.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/adapters/agentscope/stream.py +1 -1
- agentscope_runtime/adapters/langgraph/stream.py +120 -70
- agentscope_runtime/cli/commands/deploy.py +465 -1
- agentscope_runtime/cli/commands/stop.py +16 -0
- agentscope_runtime/common/container_clients/__init__.py +52 -0
- agentscope_runtime/common/container_clients/agentrun_client.py +6 -4
- agentscope_runtime/common/container_clients/boxlite_client.py +442 -0
- agentscope_runtime/common/container_clients/docker_client.py +0 -20
- agentscope_runtime/common/container_clients/fc_client.py +6 -4
- agentscope_runtime/common/container_clients/gvisor_client.py +38 -0
- agentscope_runtime/common/container_clients/knative_client.py +1 -0
- agentscope_runtime/common/utils/deprecation.py +164 -0
- agentscope_runtime/engine/app/agent_app.py +16 -4
- agentscope_runtime/engine/deployers/__init__.py +31 -20
- agentscope_runtime/engine/deployers/adapter/__init__.py +8 -0
- agentscope_runtime/engine/deployers/adapter/a2a/a2a_protocol_adapter.py +9 -8
- agentscope_runtime/engine/deployers/adapter/a2a/nacos_a2a_registry.py +19 -1
- agentscope_runtime/engine/deployers/adapter/agui/__init__.py +8 -0
- agentscope_runtime/engine/deployers/adapter/agui/agui_adapter_utils.py +652 -0
- agentscope_runtime/engine/deployers/adapter/agui/agui_protocol_adapter.py +225 -0
- agentscope_runtime/engine/deployers/pai_deployer.py +2335 -0
- agentscope_runtime/engine/deployers/utils/net_utils.py +37 -0
- agentscope_runtime/engine/deployers/utils/oss_utils.py +38 -0
- agentscope_runtime/engine/deployers/utils/package.py +46 -42
- agentscope_runtime/engine/helpers/agent_api_client.py +372 -0
- agentscope_runtime/engine/runner.py +1 -0
- agentscope_runtime/engine/schemas/agent_schemas.py +9 -3
- agentscope_runtime/engine/services/agent_state/__init__.py +7 -0
- agentscope_runtime/engine/services/memory/__init__.py +7 -0
- agentscope_runtime/engine/services/memory/redis_memory_service.py +15 -16
- agentscope_runtime/engine/services/session_history/__init__.py +7 -0
- agentscope_runtime/engine/tracing/local_logging_handler.py +2 -3
- agentscope_runtime/sandbox/box/sandbox.py +4 -0
- agentscope_runtime/sandbox/manager/sandbox_manager.py +11 -25
- agentscope_runtime/sandbox/manager/server/config.py +3 -1
- agentscope_runtime/sandbox/model/manager_config.py +11 -9
- agentscope_runtime/tools/modelstudio_memory/__init__.py +106 -0
- agentscope_runtime/tools/modelstudio_memory/base.py +220 -0
- agentscope_runtime/tools/modelstudio_memory/config.py +86 -0
- agentscope_runtime/tools/modelstudio_memory/core.py +594 -0
- agentscope_runtime/tools/modelstudio_memory/exceptions.py +60 -0
- agentscope_runtime/tools/modelstudio_memory/schemas.py +253 -0
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.4.dist-info → agentscope_runtime-1.0.5.dist-info}/METADATA +101 -62
- {agentscope_runtime-1.0.4.dist-info → agentscope_runtime-1.0.5.dist-info}/RECORD +49 -34
- {agentscope_runtime-1.0.4.dist-info → agentscope_runtime-1.0.5.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.4.dist-info → agentscope_runtime-1.0.5.dist-info}/entry_points.txt +0 -0
- {agentscope_runtime-1.0.4.dist-info → agentscope_runtime-1.0.5.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.4.dist-info → agentscope_runtime-1.0.5.dist-info}/top_level.txt +0 -0
|
@@ -62,79 +62,121 @@ async def adapt_langgraph_message_stream(
|
|
|
62
62
|
yield message.completed()
|
|
63
63
|
elif isinstance(msg, AIMessage):
|
|
64
64
|
role = "assistant"
|
|
65
|
+
tool_calls = getattr(msg, "tool_calls", [])
|
|
65
66
|
has_tool_call_chunk = (
|
|
66
|
-
True if getattr(msg, "tool_call_chunks") else False
|
|
67
|
+
True if getattr(msg, "tool_call_chunks", "") else False
|
|
67
68
|
)
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
data_content
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
69
|
+
if tool_calls and not has_tool_call_chunk:
|
|
70
|
+
plugin_call_message = Message(
|
|
71
|
+
type=MessageType.PLUGIN_CALL,
|
|
72
|
+
role=role,
|
|
73
|
+
)
|
|
74
|
+
for tool_call in tool_calls:
|
|
75
|
+
tool_call_args = (
|
|
76
|
+
tool_call.get("args")
|
|
77
|
+
if isinstance(tool_call.get("args"), str)
|
|
78
|
+
else json.dumps(tool_call.get("args"))
|
|
79
|
+
)
|
|
80
|
+
data_content = DataContent(
|
|
81
|
+
index=index,
|
|
82
|
+
data=FunctionCall(
|
|
83
|
+
call_id=tool_call.get("id"),
|
|
84
|
+
name=tool_call.get("name"),
|
|
85
|
+
arguments=tool_call_args,
|
|
86
|
+
).model_dump(),
|
|
87
|
+
)
|
|
88
|
+
plugin_call_message.add_content(
|
|
89
|
+
data_content,
|
|
90
|
+
)
|
|
91
|
+
yield data_content.completed()
|
|
92
|
+
yield plugin_call_message.completed()
|
|
93
|
+
else:
|
|
94
|
+
has_tool_call_chunk = (
|
|
95
|
+
True if getattr(msg, "tool_call_chunks", "") else False
|
|
96
|
+
)
|
|
97
|
+
is_last_chunk = (
|
|
98
|
+
True
|
|
99
|
+
if getattr(msg, "chunk_position", "") == "last"
|
|
100
|
+
else False
|
|
101
|
+
)
|
|
102
|
+
# Extract tool calls if present
|
|
103
|
+
if tool_started:
|
|
104
|
+
if has_tool_call_chunk:
|
|
105
|
+
tool_call_chunk_msgs.append(msg)
|
|
106
|
+
if is_last_chunk:
|
|
107
|
+
# tool call finished
|
|
108
|
+
tool_started = False
|
|
109
|
+
result = reduce(
|
|
110
|
+
lambda x, y: x + y,
|
|
111
|
+
tool_call_chunk_msgs,
|
|
99
112
|
)
|
|
113
|
+
tool_calls = result.tool_call_chunks
|
|
114
|
+
for tool_call in tool_calls:
|
|
115
|
+
call_id = tool_call.get("id", "")
|
|
116
|
+
# Create new tool call message
|
|
117
|
+
plugin_call_message = Message(
|
|
118
|
+
type=MessageType.PLUGIN_CALL,
|
|
119
|
+
role=role,
|
|
120
|
+
)
|
|
121
|
+
tool_call_args = (
|
|
122
|
+
tool_call.get("args")
|
|
123
|
+
if isinstance(tool_call.get("args"), str)
|
|
124
|
+
else json.dumps(tool_call.get("args"))
|
|
125
|
+
)
|
|
100
126
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
tool_started = True
|
|
110
|
-
tool_call_chunk_msgs.append(msg)
|
|
111
|
-
else:
|
|
112
|
-
# normal message
|
|
113
|
-
content = msg.content if hasattr(msg, "content") else None
|
|
114
|
-
if msg_id != getattr(msg, "id"):
|
|
115
|
-
index = None
|
|
116
|
-
message = Message(type=MessageType.MESSAGE, role=role)
|
|
117
|
-
msg_id = getattr(msg, "id")
|
|
118
|
-
yield message.in_progress()
|
|
127
|
+
data_content = DataContent(
|
|
128
|
+
index=index,
|
|
129
|
+
data=FunctionCall(
|
|
130
|
+
call_id=call_id,
|
|
131
|
+
name=tool_call.get("name"),
|
|
132
|
+
arguments=tool_call_args,
|
|
133
|
+
).model_dump(),
|
|
134
|
+
)
|
|
119
135
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
136
|
+
data_content = (
|
|
137
|
+
plugin_call_message.add_delta_content(
|
|
138
|
+
new_content=data_content,
|
|
139
|
+
)
|
|
140
|
+
)
|
|
141
|
+
yield data_content.completed()
|
|
142
|
+
yield plugin_call_message.completed()
|
|
143
|
+
else:
|
|
144
|
+
if has_tool_call_chunk:
|
|
145
|
+
# tool call start, collect chunks and continue
|
|
146
|
+
tool_started = True
|
|
147
|
+
tool_call_chunk_msgs.append(msg)
|
|
148
|
+
else:
|
|
149
|
+
# normal message
|
|
150
|
+
content = (
|
|
151
|
+
msg.content if hasattr(msg, "content") else None
|
|
129
152
|
)
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
153
|
+
if msg_id != getattr(msg, "id"):
|
|
154
|
+
index = None
|
|
155
|
+
message = Message(
|
|
156
|
+
type=MessageType.MESSAGE,
|
|
157
|
+
role=role,
|
|
158
|
+
)
|
|
159
|
+
msg_id = getattr(msg, "id")
|
|
160
|
+
yield message.in_progress()
|
|
161
|
+
|
|
162
|
+
if content:
|
|
163
|
+
# todo support non str content
|
|
164
|
+
text_delta_content = TextContent(
|
|
165
|
+
delta=True,
|
|
166
|
+
index=index,
|
|
167
|
+
text=content,
|
|
168
|
+
)
|
|
169
|
+
text_delta_content = message.add_delta_content(
|
|
170
|
+
new_content=text_delta_content,
|
|
171
|
+
)
|
|
172
|
+
index = text_delta_content.index
|
|
173
|
+
yield text_delta_content
|
|
174
|
+
# Handle final completion
|
|
175
|
+
if last:
|
|
176
|
+
# completed_content = message.content[index]
|
|
177
|
+
# if completed_content.text:
|
|
178
|
+
# yield completed_content.completed()
|
|
179
|
+
yield message.completed()
|
|
138
180
|
elif isinstance(msg, SystemMessage):
|
|
139
181
|
role = "system"
|
|
140
182
|
content = msg.content if hasattr(msg, "content") else None
|
|
@@ -163,18 +205,26 @@ async def adapt_langgraph_message_stream(
|
|
|
163
205
|
type=MessageType.PLUGIN_CALL_OUTPUT,
|
|
164
206
|
role="tool",
|
|
165
207
|
)
|
|
208
|
+
tool_call_output = (
|
|
209
|
+
msg.content
|
|
210
|
+
if isinstance(msg.content, str)
|
|
211
|
+
else json.dumps(msg.content, ensure_ascii=False)
|
|
212
|
+
)
|
|
166
213
|
# Create function call output data
|
|
167
214
|
function_output_data = FunctionCallOutput(
|
|
168
215
|
call_id=msg.tool_call_id,
|
|
169
216
|
name=msg.name,
|
|
170
|
-
output=
|
|
217
|
+
output=tool_call_output,
|
|
171
218
|
)
|
|
172
219
|
|
|
173
220
|
data_content = DataContent(
|
|
174
|
-
index=None,
|
|
175
221
|
data=function_output_data.model_dump(),
|
|
222
|
+
msg_id=plugin_output_message.id,
|
|
223
|
+
)
|
|
224
|
+
yield data_content.completed()
|
|
225
|
+
plugin_output_message.add_content(
|
|
226
|
+
data_content,
|
|
176
227
|
)
|
|
177
|
-
plugin_output_message.content = [data_content]
|
|
178
228
|
yield plugin_output_message.completed()
|
|
179
229
|
else:
|
|
180
230
|
role = "assistant"
|