agentscope-runtime 0.1.5b2__py3-none-any.whl → 0.2.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. agentscope_runtime/common/__init__.py +0 -0
  2. agentscope_runtime/common/collections/in_memory_mapping.py +27 -0
  3. agentscope_runtime/common/collections/redis_mapping.py +42 -0
  4. agentscope_runtime/common/container_clients/__init__.py +0 -0
  5. agentscope_runtime/common/container_clients/agentrun_client.py +1098 -0
  6. agentscope_runtime/common/container_clients/docker_client.py +250 -0
  7. agentscope_runtime/{sandbox/manager → common}/container_clients/kubernetes_client.py +6 -13
  8. agentscope_runtime/engine/__init__.py +12 -0
  9. agentscope_runtime/engine/agents/agentscope_agent.py +488 -0
  10. agentscope_runtime/engine/agents/agno_agent.py +26 -27
  11. agentscope_runtime/engine/agents/autogen_agent.py +13 -8
  12. agentscope_runtime/engine/agents/utils.py +53 -0
  13. agentscope_runtime/engine/app/__init__.py +6 -0
  14. agentscope_runtime/engine/app/agent_app.py +239 -0
  15. agentscope_runtime/engine/app/base_app.py +181 -0
  16. agentscope_runtime/engine/app/celery_mixin.py +92 -0
  17. agentscope_runtime/engine/deployers/base.py +1 -0
  18. agentscope_runtime/engine/deployers/cli_fc_deploy.py +39 -20
  19. agentscope_runtime/engine/deployers/kubernetes_deployer.py +12 -5
  20. agentscope_runtime/engine/deployers/local_deployer.py +61 -3
  21. agentscope_runtime/engine/deployers/modelstudio_deployer.py +10 -11
  22. agentscope_runtime/engine/deployers/utils/docker_image_utils/runner_image_factory.py +9 -0
  23. agentscope_runtime/engine/deployers/utils/package_project_utils.py +234 -3
  24. agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +567 -7
  25. agentscope_runtime/engine/deployers/utils/service_utils/standalone_main.py.j2 +211 -0
  26. agentscope_runtime/engine/deployers/utils/wheel_packager.py +1 -1
  27. agentscope_runtime/engine/helpers/helper.py +60 -41
  28. agentscope_runtime/engine/runner.py +35 -24
  29. agentscope_runtime/engine/schemas/agent_schemas.py +42 -0
  30. agentscope_runtime/engine/schemas/modelstudio_llm.py +14 -14
  31. agentscope_runtime/engine/services/sandbox_service.py +62 -70
  32. agentscope_runtime/engine/services/tablestore_memory_service.py +307 -0
  33. agentscope_runtime/engine/services/tablestore_rag_service.py +143 -0
  34. agentscope_runtime/engine/services/tablestore_session_history_service.py +293 -0
  35. agentscope_runtime/engine/services/utils/__init__.py +0 -0
  36. agentscope_runtime/engine/services/utils/tablestore_service_utils.py +352 -0
  37. agentscope_runtime/engine/tracing/__init__.py +9 -3
  38. agentscope_runtime/engine/tracing/asyncio_util.py +24 -0
  39. agentscope_runtime/engine/tracing/base.py +66 -34
  40. agentscope_runtime/engine/tracing/local_logging_handler.py +45 -31
  41. agentscope_runtime/engine/tracing/message_util.py +528 -0
  42. agentscope_runtime/engine/tracing/tracing_metric.py +20 -8
  43. agentscope_runtime/engine/tracing/tracing_util.py +130 -0
  44. agentscope_runtime/engine/tracing/wrapper.py +794 -169
  45. agentscope_runtime/sandbox/__init__.py +2 -0
  46. agentscope_runtime/sandbox/box/base/__init__.py +4 -0
  47. agentscope_runtime/sandbox/box/base/base_sandbox.py +6 -4
  48. agentscope_runtime/sandbox/box/browser/__init__.py +4 -0
  49. agentscope_runtime/sandbox/box/browser/browser_sandbox.py +10 -14
  50. agentscope_runtime/sandbox/box/dummy/__init__.py +4 -0
  51. agentscope_runtime/sandbox/box/dummy/dummy_sandbox.py +2 -1
  52. agentscope_runtime/sandbox/box/filesystem/__init__.py +4 -0
  53. agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +10 -7
  54. agentscope_runtime/sandbox/box/gui/__init__.py +4 -0
  55. agentscope_runtime/sandbox/box/gui/box/__init__.py +0 -0
  56. agentscope_runtime/sandbox/box/gui/gui_sandbox.py +81 -0
  57. agentscope_runtime/sandbox/box/sandbox.py +5 -2
  58. agentscope_runtime/sandbox/box/shared/routers/generic.py +20 -1
  59. agentscope_runtime/sandbox/box/training_box/__init__.py +4 -0
  60. agentscope_runtime/sandbox/box/training_box/training_box.py +7 -54
  61. agentscope_runtime/sandbox/build.py +143 -58
  62. agentscope_runtime/sandbox/client/http_client.py +87 -59
  63. agentscope_runtime/sandbox/client/training_client.py +0 -1
  64. agentscope_runtime/sandbox/constant.py +27 -1
  65. agentscope_runtime/sandbox/custom/custom_sandbox.py +7 -6
  66. agentscope_runtime/sandbox/custom/example.py +4 -3
  67. agentscope_runtime/sandbox/enums.py +1 -1
  68. agentscope_runtime/sandbox/manager/sandbox_manager.py +212 -106
  69. agentscope_runtime/sandbox/manager/server/app.py +82 -14
  70. agentscope_runtime/sandbox/manager/server/config.py +50 -3
  71. agentscope_runtime/sandbox/model/container.py +12 -23
  72. agentscope_runtime/sandbox/model/manager_config.py +93 -5
  73. agentscope_runtime/sandbox/registry.py +1 -1
  74. agentscope_runtime/sandbox/tools/gui/__init__.py +7 -0
  75. agentscope_runtime/sandbox/tools/gui/tool.py +77 -0
  76. agentscope_runtime/sandbox/tools/mcp_tool.py +6 -2
  77. agentscope_runtime/sandbox/tools/tool.py +4 -0
  78. agentscope_runtime/sandbox/utils.py +124 -0
  79. agentscope_runtime/version.py +1 -1
  80. {agentscope_runtime-0.1.5b2.dist-info → agentscope_runtime-0.2.0b2.dist-info}/METADATA +214 -102
  81. {agentscope_runtime-0.1.5b2.dist-info → agentscope_runtime-0.2.0b2.dist-info}/RECORD +94 -78
  82. agentscope_runtime/engine/agents/agentscope_agent/__init__.py +0 -6
  83. agentscope_runtime/engine/agents/agentscope_agent/agent.py +0 -401
  84. agentscope_runtime/engine/agents/agentscope_agent/hooks.py +0 -169
  85. agentscope_runtime/engine/agents/llm_agent.py +0 -51
  86. agentscope_runtime/engine/llms/__init__.py +0 -3
  87. agentscope_runtime/engine/llms/base_llm.py +0 -60
  88. agentscope_runtime/engine/llms/qwen_llm.py +0 -47
  89. agentscope_runtime/sandbox/manager/collections/in_memory_mapping.py +0 -22
  90. agentscope_runtime/sandbox/manager/collections/redis_mapping.py +0 -26
  91. agentscope_runtime/sandbox/manager/container_clients/__init__.py +0 -10
  92. agentscope_runtime/sandbox/manager/container_clients/docker_client.py +0 -422
  93. /agentscope_runtime/{sandbox/manager → common}/collections/__init__.py +0 -0
  94. /agentscope_runtime/{sandbox/manager → common}/collections/base_mapping.py +0 -0
  95. /agentscope_runtime/{sandbox/manager → common}/collections/base_queue.py +0 -0
  96. /agentscope_runtime/{sandbox/manager → common}/collections/base_set.py +0 -0
  97. /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_queue.py +0 -0
  98. /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_set.py +0 -0
  99. /agentscope_runtime/{sandbox/manager → common}/collections/redis_queue.py +0 -0
  100. /agentscope_runtime/{sandbox/manager → common}/collections/redis_set.py +0 -0
  101. /agentscope_runtime/{sandbox/manager → common}/container_clients/base_client.py +0 -0
  102. {agentscope_runtime-0.1.5b2.dist-info → agentscope_runtime-0.2.0b2.dist-info}/WHEEL +0 -0
  103. {agentscope_runtime-0.1.5b2.dist-info → agentscope_runtime-0.2.0b2.dist-info}/entry_points.txt +0 -0
  104. {agentscope_runtime-0.1.5b2.dist-info → agentscope_runtime-0.2.0b2.dist-info}/licenses/LICENSE +0 -0
  105. {agentscope_runtime-0.1.5b2.dist-info → agentscope_runtime-0.2.0b2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,211 @@
1
+ import os
2
+ import sys
3
+
4
+ # Add current directory and handler directories to Python path for local module imports
5
+ current_dir = os.path.dirname(os.path.abspath(__file__))
6
+ if current_dir not in sys.path:
7
+ sys.path.insert(0, current_dir)
8
+
9
+ {% if handler_dirs %}
10
+ # Add copied handler directories to sys.path
11
+ {% for handler_dir in handler_dirs %}
12
+ handler_dir_path = os.path.join(current_dir, "{{handler_dir}}")
13
+ if os.path.isdir(handler_dir_path) and handler_dir_path not in sys.path:
14
+ sys.path.insert(0, handler_dir_path)
15
+ {% endfor %}
16
+ {% endif %}
17
+
18
+ import uvicorn
19
+ from agentscope_runtime.engine.deployers.utils.deployment_modes import DeploymentMode
20
+ from agentscope_runtime.engine.deployers.utils.service_utils import (
21
+ FastAPIAppFactory,
22
+ ServicesConfig,
23
+ ServiceConfig,
24
+ )
25
+ from agentscope_runtime.engine.schemas.agent_schemas import AgentRequest
26
+ from typing import Callable, Optional, Dict, List, Any, Union
27
+
28
+ from agent_file import {{agent_name}} as agent
29
+ {% if protocol_adapters %}{{protocol_adapters}}{% endif %}
30
+
31
+ {% if custom_endpoints %}
32
+ # Custom endpoint imports
33
+ {% for endpoint in custom_endpoints %}
34
+ {% if endpoint.handler_module and endpoint.function_name %}
35
+ from {{endpoint.handler_module}} import {{endpoint.function_name}}
36
+ {% elif endpoint.module and endpoint.function_name %}
37
+ from agent_file import {{endpoint.function_name}}
38
+ {% endif %}
39
+ {% endfor %}
40
+ {% endif %}
41
+
42
+
43
+
44
+ def load_services_config() -> ServicesConfig:
45
+ """Load services configuration from environment variables or config file."""
46
+ config_file = os.getenv('AGENTSCOPE_SERVICES_CONFIG')
47
+
48
+ if config_file and os.path.exists(config_file):
49
+ # Load from JSON config file
50
+ import json
51
+ with open(config_file, 'r', encoding='utf-8') as f:
52
+ config_data = json.load(f)
53
+ return ServicesConfig.model_validate(config_data)
54
+ else:
55
+ # Load from environment variables
56
+ memory_provider = os.getenv('MEMORY_PROVIDER', 'in_memory')
57
+ session_provider = os.getenv('SESSION_HISTORY_PROVIDER', 'in_memory')
58
+
59
+ memory_config = {}
60
+ session_config = {}
61
+
62
+ # Add Redis configuration if using Redis
63
+ if memory_provider == 'redis':
64
+ memory_config = {
65
+ 'host': os.getenv('REDIS_HOST', 'localhost'),
66
+ 'port': int(os.getenv('REDIS_PORT', 6379)),
67
+ 'db': int(os.getenv('REDIS_MEMORY_DB', 0))
68
+ }
69
+
70
+ if session_provider == 'redis':
71
+ session_config = {
72
+ 'host': os.getenv('REDIS_HOST', 'localhost'),
73
+ 'port': int(os.getenv('REDIS_PORT', 6379)),
74
+ 'db': int(os.getenv('REDIS_SESSION_DB', 1))
75
+ }
76
+
77
+ return ServicesConfig(
78
+ memory=ServiceConfig(provider=memory_provider, config=memory_config),
79
+ session_history=ServiceConfig(provider=session_provider, config=session_config)
80
+ )
81
+
82
+
83
+ def load_celery_config():
84
+ """Load Celery configuration from environment variables or infer from existing config."""
85
+ celery_config = {
86
+ 'broker_url': None,
87
+ 'backend_url': None,
88
+ 'enable_embedded_worker': False,
89
+ 'redis_available': False
90
+ }
91
+
92
+ {% if celery_config %}
93
+ # Apply inline Celery configuration
94
+ {{celery_config}}
95
+ {% endif %}
96
+
97
+ if not celery_config.get('broker_url') or not celery_config.get('backend_url'):
98
+ redis_host = os.getenv('REDIS_HOST', 'localhost')
99
+ redis_port = os.getenv('REDIS_PORT', '6379')
100
+
101
+ # Use different databases for different purposes
102
+ broker_db = os.getenv('CELERY_BROKER_DB', '2') # Default DB 2 for Celery broker
103
+ backend_db = os.getenv('CELERY_BACKEND_DB', '3') # Default DB 3 for Celery backend
104
+
105
+ # Auto-generate URLs if Redis is available
106
+ if not celery_config.get('broker_url'):
107
+ celery_config['broker_url'] = f"redis://{redis_host}:{redis_port}/{broker_db}"
108
+ if not celery_config.get('backend_url'):
109
+ celery_config['backend_url'] = f"redis://{redis_host}:{redis_port}/{backend_db}"
110
+
111
+ return celery_config
112
+
113
+
114
+ def _check_redis_availability(redis_url: str) -> bool:
115
+ """Check if Redis is available at the given URL."""
116
+ try:
117
+ import redis
118
+ from urllib.parse import urlparse
119
+
120
+ parsed = urlparse(redis_url)
121
+ host = parsed.hostname or 'localhost'
122
+ port = parsed.port or 6379
123
+
124
+ r = redis.Redis(host=host, port=port, socket_connect_timeout=2)
125
+ r.ping()
126
+ return True
127
+ except Exception:
128
+ return False
129
+
130
+
131
+ {% if custom_endpoints %}
132
+ def setup_custom_endpoints():
133
+ """Setup custom endpoints configuration."""
134
+ custom_endpoints = [
135
+ {% for endpoint in custom_endpoints %}
136
+ {
137
+ "path": "{{endpoint.path}}",
138
+ "handler": {% if endpoint.handler_module or endpoint.module %}{{endpoint.function_name}}{% else %}{{endpoint.inline_code|default('lambda request: {"error": "Handler not available"}')}}{% endif %},
139
+ "methods": {{endpoint.methods}}
140
+ },
141
+ {% endfor %}
142
+ ]
143
+ return custom_endpoints
144
+ {% endif %}
145
+
146
+
147
+ async def setup_runner_with_agent(app):
148
+ """Setup runner with the agent instance."""
149
+ if hasattr(app.state, 'runner') and app.state.runner:
150
+ # Set the agent on the runner
151
+ app.state.runner._agent = agent
152
+
153
+
154
+ async def before_start(app, **kwargs):
155
+ """Application startup callback."""
156
+ await setup_runner_with_agent(app)
157
+
158
+
159
+ async def after_finish(app, **kwargs):
160
+ """Application shutdown callback."""
161
+ # Runner cleanup is handled by the factory
162
+ pass
163
+
164
+
165
+ # Load services configuration
166
+ services_config = load_services_config()
167
+
168
+ # Load Celery configuration
169
+ celery_config = load_celery_config()
170
+
171
+ # Determine deployment mode from environment or use default
172
+ deployment_mode_str = os.getenv('DEPLOYMENT_MODE', '{{deployment_mode|default("standalone")}}')
173
+ if deployment_mode_str == 'detached_process':
174
+ deployment_mode = DeploymentMode.DETACHED_PROCESS
175
+ elif deployment_mode_str == 'standalone':
176
+ deployment_mode = DeploymentMode.STANDALONE
177
+ else:
178
+ deployment_mode = DeploymentMode.STANDALONE # fallback
179
+
180
+ # Create FastAPI application using the factory
181
+ app = FastAPIAppFactory.create_app(
182
+ endpoint_path="{{endpoint_path}}",
183
+ mode=deployment_mode,
184
+ services_config=services_config,
185
+ before_start=before_start,
186
+ after_finish=after_finish,
187
+ stream=True,
188
+ broker_url=celery_config['broker_url'],
189
+ backend_url=celery_config['backend_url'],
190
+ enable_embedded_worker=celery_config['enable_embedded_worker']{% if protocol_adapters %},
191
+ protocol_adapters=protocol_adapters{% endif %}{% if custom_endpoints %},
192
+ custom_endpoints=setup_custom_endpoints(){% endif %}
193
+ )
194
+
195
+
196
+ if __name__ == "__main__":
197
+ import argparse
198
+
199
+ parser = argparse.ArgumentParser(description="AgentScope Runtime Standalone Service")
200
+ parser.add_argument("--host", default="0.0.0.0", help="Host to bind to")
201
+ parser.add_argument("--port", type=int, default=8080, help="Port to bind to")
202
+ parser.add_argument("--workers", type=int, default=1, help="Number of worker processes")
203
+
204
+ args = parser.parse_args()
205
+
206
+ uvicorn.run(
207
+ app,
208
+ host=args.host,
209
+ port=args.port,
210
+ workers=args.workers if args.workers > 1 else None
211
+ )
@@ -26,7 +26,7 @@ from typing import List, Tuple
26
26
 
27
27
  try:
28
28
  import tomllib # Python 3.11+
29
- except Exception: # pragma: no cover - fallback on older Pythons
29
+ except ImportError: # pragma: no cover - fallback on older Pythons
30
30
  tomllib = None # type: ignore
31
31
 
32
32
 
@@ -1,4 +1,5 @@
1
1
  # -*- coding: utf-8 -*-
2
+ import logging
2
3
 
3
4
  from agentscope_runtime.engine import Runner
4
5
  from agentscope_runtime.engine.schemas.agent_schemas import (
@@ -13,22 +14,28 @@ from agentscope_runtime.engine.services.environment_manager import (
13
14
  create_environment_manager,
14
15
  )
15
16
 
17
+ logger = logging.getLogger(__name__)
18
+
16
19
 
17
20
  async def simple_call_agent(query, runner, user_id=None, session_id=None):
18
- request = AgentRequest(
19
- input=[
20
- {
21
- "role": "user",
22
- "content": [
23
- {
24
- "type": "text",
25
- "text": query,
26
- },
27
- ],
28
- },
29
- ],
30
- session_id=session_id,
31
- )
21
+ if isinstance(query, str):
22
+ request = AgentRequest(
23
+ input=[
24
+ {
25
+ "role": "user",
26
+ "content": [
27
+ {
28
+ "type": "text",
29
+ "text": query,
30
+ },
31
+ ],
32
+ },
33
+ ],
34
+ session_id=session_id,
35
+ )
36
+ else:
37
+ request = query
38
+
32
39
  all_result = ""
33
40
  async for message in runner.stream_query(
34
41
  user_id=user_id,
@@ -58,6 +65,23 @@ async def simple_call_agent_direct(agent, query):
58
65
 
59
66
 
60
67
  async def simple_call_agent_tool(agent, query):
68
+ if isinstance(query, str):
69
+ request = AgentRequest(
70
+ input=[
71
+ {
72
+ "role": "user",
73
+ "content": [
74
+ {
75
+ "type": "text",
76
+ "text": query,
77
+ },
78
+ ],
79
+ },
80
+ ],
81
+ )
82
+ else:
83
+ request = query
84
+
61
85
  all_result = ""
62
86
  async with create_context_manager() as context_manager:
63
87
  async with create_environment_manager() as environment_manager:
@@ -67,20 +91,6 @@ async def simple_call_agent_tool(agent, query):
67
91
  environment_manager=environment_manager,
68
92
  )
69
93
 
70
- request = AgentRequest(
71
- input=[
72
- {
73
- "role": "user",
74
- "content": [
75
- {
76
- "type": "text",
77
- "text": query,
78
- },
79
- ],
80
- },
81
- ],
82
- )
83
-
84
94
  async for message in runner.stream_query(
85
95
  request=request,
86
96
  ):
@@ -94,12 +104,7 @@ async def simple_call_agent_tool(agent, query):
94
104
 
95
105
 
96
106
  async def simple_call_agent_tool_auto_lifecycle(agent, query):
97
- all_result = ""
98
- async with Runner(
99
- agent=agent,
100
- context_manager=create_context_manager(),
101
- environment_manager=create_environment_manager(),
102
- ) as runner:
107
+ if isinstance(query, str):
103
108
  request = AgentRequest(
104
109
  input=[
105
110
  {
@@ -113,7 +118,15 @@ async def simple_call_agent_tool_auto_lifecycle(agent, query):
113
118
  },
114
119
  ],
115
120
  )
121
+ else:
122
+ request = query
116
123
 
124
+ all_result = ""
125
+ async with Runner(
126
+ agent=agent,
127
+ context_manager=create_context_manager(),
128
+ environment_manager=create_environment_manager(),
129
+ ) as runner:
117
130
  async for message in runner.stream_query(
118
131
  request=request,
119
132
  ):
@@ -127,13 +140,7 @@ async def simple_call_agent_tool_auto_lifecycle(agent, query):
127
140
 
128
141
 
129
142
  async def simple_call_agent_tool_wo_env(agent, query):
130
- all_result = ""
131
- async with create_context_manager() as context_manager:
132
- runner = Runner(
133
- agent=agent,
134
- context_manager=context_manager,
135
- )
136
-
143
+ if isinstance(query, str):
137
144
  request = AgentRequest(
138
145
  input=[
139
146
  {
@@ -147,6 +154,15 @@ async def simple_call_agent_tool_wo_env(agent, query):
147
154
  },
148
155
  ],
149
156
  )
157
+ else:
158
+ request = query
159
+
160
+ all_result = ""
161
+ async with create_context_manager() as context_manager:
162
+ runner = Runner(
163
+ agent=agent,
164
+ context_manager=context_manager,
165
+ )
150
166
 
151
167
  async for message in runner.stream_query(
152
168
  request=request,
@@ -157,4 +173,7 @@ async def simple_call_agent_tool_wo_env(agent, query):
157
173
  and RunStatus.Completed == message.status
158
174
  ):
159
175
  all_result = message.content[0].text
176
+
177
+ logger.debug(message.model_dump())
178
+
160
179
  return all_result
@@ -3,8 +3,6 @@ import uuid
3
3
  from contextlib import AsyncExitStack
4
4
  from typing import Optional, List, AsyncGenerator, Any, Union, Dict
5
5
 
6
- from openai.types.chat import ChatCompletion
7
-
8
6
  from agentscope_runtime.engine.deployers.utils.service_utils import (
9
7
  ServicesConfig,
10
8
  )
@@ -19,12 +17,17 @@ from .schemas.agent_schemas import (
19
17
  AgentRequest,
20
18
  RunStatus,
21
19
  AgentResponse,
20
+ SequenceNumberGenerator,
22
21
  )
23
22
  from .schemas.context import Context
24
23
  from .services.context_manager import ContextManager
25
24
  from .services.environment_manager import EnvironmentManager
26
25
  from .tracing import TraceType
27
26
  from .tracing.wrapper import trace
27
+ from .tracing.message_util import (
28
+ merge_agent_response,
29
+ get_agent_response_finish_reason,
30
+ )
28
31
 
29
32
 
30
33
  class Runner:
@@ -43,7 +46,9 @@ class Runner:
43
46
  """
44
47
  self._agent = agent
45
48
  self._environment_manager = environment_manager
46
- self._context_manager = context_manager
49
+ self._context_manager = (
50
+ context_manager or ContextManager()
51
+ ) # Add default context manager
47
52
  self._deploy_managers = {}
48
53
  self._exit_stack = AsyncExitStack()
49
54
 
@@ -127,7 +132,12 @@ class Runner:
127
132
  self._deploy_managers[deploy_manager.deploy_id] = deploy_result
128
133
  return deploy_result
129
134
 
130
- @trace(TraceType.AGENT_STEP)
135
+ @trace(
136
+ TraceType.AGENT_STEP,
137
+ trace_name="agent_step",
138
+ merge_output_func=merge_agent_response,
139
+ get_finish_reason_func=get_agent_response_finish_reason,
140
+ )
131
141
  async def stream_query( # pylint:disable=unused-argument
132
142
  self,
133
143
  request: Union[AgentRequest, dict],
@@ -141,11 +151,15 @@ class Runner:
141
151
  if isinstance(request, dict):
142
152
  request = AgentRequest(**request)
143
153
 
154
+ seq_gen = SequenceNumberGenerator()
155
+
156
+ # Initial response
144
157
  response = AgentResponse()
145
- yield response
158
+ yield seq_gen.yield_with_sequence(response)
146
159
 
160
+ # Set to in-progress status
147
161
  response.in_progress()
148
- yield response
162
+ yield seq_gen.yield_with_sequence(response)
149
163
 
150
164
  user_id = user_id or str(uuid.uuid4())
151
165
  session_id = request.session_id or str(uuid.uuid4())
@@ -193,35 +207,32 @@ class Runner:
193
207
  request_input=request_input,
194
208
  )
195
209
 
196
- sequence_number = 0
197
210
  async for event in context.agent.run_async(context):
198
211
  if (
199
212
  event.status == RunStatus.Completed
200
213
  and event.object == "message"
201
214
  ):
202
215
  response.add_new_message(event)
203
- event.sequence_number = sequence_number
204
- yield event
205
- sequence_number += 1
216
+ yield seq_gen.yield_with_sequence(event)
206
217
 
207
218
  await context.context_manager.append(
208
219
  session=context.session,
209
220
  event_output=response.output,
210
221
  )
211
- response.sequence_number = sequence_number
212
- yield response.completed()
213
-
214
- @trace(TraceType.AGENT_STEP)
215
- async def query( # pylint:disable=unused-argument
216
- self,
217
- message: List[dict],
218
- session_id: Optional[str] = None,
219
- **kwargs: Any,
220
- ) -> ChatCompletion:
221
- """
222
- Streams the agent.
223
- """
224
- return self._agent.query(message, session_id)
222
+ yield seq_gen.yield_with_sequence(response.completed())
223
+
224
+ # TODO: will be added before 2025/11/30
225
+ # @trace(TraceType.AGENT_STEP)
226
+ # async def query( # pylint:disable=unused-argument
227
+ # self,
228
+ # message: List[dict],
229
+ # session_id: Optional[str] = None,
230
+ # **kwargs: Any,
231
+ # ) -> ChatCompletion:
232
+ # """
233
+ # Streams the agent.
234
+ # """
235
+ # return self._agent.query(message, session_id)
225
236
 
226
237
  # TODO: should be sync method?
227
238
  async def stop(
@@ -752,6 +752,48 @@ class AgentResponse(BaseResponse):
752
752
  """conversation id for dialog"""
753
753
 
754
754
 
755
+ class SequenceNumberGenerator:
756
+ """
757
+ A simple sequence number generator for streaming events.
758
+
759
+ This class encapsulates the logic for generating sequential numbers,
760
+ making the code more maintainable and less error-prone.
761
+ """
762
+
763
+ def __init__(self, start: int = 0):
764
+ """
765
+ Initialize the generator with a starting number.
766
+
767
+ Args:
768
+ start: The starting sequence number (default: 0)
769
+ """
770
+ self._current = start
771
+
772
+ def next(self) -> int:
773
+ """
774
+ Get the next sequence number and increment the counter.
775
+
776
+ Returns:
777
+ The current sequence number before incrementing
778
+ """
779
+ current = self._current
780
+ self._current += 1
781
+ return current
782
+
783
+ def yield_with_sequence(self, event: Event) -> Event:
784
+ """
785
+ Set the sequence number on an event and increment the counter.
786
+
787
+ Args:
788
+ event: The event to set the sequence number on
789
+
790
+ Returns:
791
+ The same event with sequence number set
792
+ """
793
+ event.sequence_number = self.next()
794
+ return event
795
+
796
+
755
797
  def convert_to_openai_tool_call(function: FunctionCall):
756
798
  return {
757
799
  "id": function.get("call_id", None),
@@ -32,7 +32,7 @@ class IntentionOptions(BaseModel):
32
32
  """A list of blocked intentions that should not be processed."""
33
33
 
34
34
  search_model: str = "search_v6"
35
- """The searches model version to use for intentions recognition."""
35
+ """The search model version to use for intentions recognition."""
36
36
 
37
37
  intensity: Optional[int] = None
38
38
  """The intensity level for intentions matching and processing."""
@@ -44,11 +44,11 @@ class IntentionOptions(BaseModel):
44
44
  class SearchOptions(BaseModel):
45
45
  """
46
46
  Search Options on Modelstudio platform for knowledge retrieval and web
47
- searches.
47
+ search.
48
48
  """
49
49
 
50
50
  enable_source: bool = False
51
- """Whether to include source information in searches results."""
51
+ """Whether to include source information in search results."""
52
52
 
53
53
  enable_citation: bool = False
54
54
  """Whether to include citation information for retrieved content."""
@@ -72,26 +72,26 @@ class SearchOptions(BaseModel):
72
72
  "turbo",
73
73
  "max",
74
74
  ] = "turbo"
75
- """The searches strategy to use ('standard', 'pro_ultra',
75
+ """The search strategy to use ('standard', 'pro_ultra',
76
76
  'pro', 'lite','pro_max', 'image','turbo','max'). """
77
77
 
78
78
  forced_search: bool = False
79
- """Whether to force searches even when cached results are available."""
79
+ """Whether to force search even when cached results are available."""
80
80
 
81
81
  prepend_search_result: bool = False
82
- """Whether to prepend searches results to the response."""
82
+ """Whether to prepend search results to the response."""
83
83
 
84
84
  enable_search_extension: bool = False
85
- """Whether to enable extended searches capabilities."""
85
+ """Whether to enable extended search capabilities."""
86
86
 
87
87
  item_cnt: int = 20000
88
- """The maximum number of items to retrieve in searches results."""
88
+ """The maximum number of items to retrieve in search results."""
89
89
 
90
90
  top_n: int = 0
91
91
  """The number of top results to return (0 means return all)."""
92
92
 
93
93
  intention_options: Union[IntentionOptions, None] = IntentionOptions()
94
- """Options for intentions recognition and processing during searches."""
94
+ """Options for intentions recognition and processing during search."""
95
95
 
96
96
 
97
97
  # maximum chunk size from knowledge base [1, 20]
@@ -148,7 +148,7 @@ class RagOptions(BaseModel):
148
148
  default_factory=list,
149
149
  alias="file_id_list",
150
150
  )
151
- """List of specific file IDs to searches within."""
151
+ """List of specific file IDs to search within."""
152
152
 
153
153
  prompt_strategy: Optional[str] = Field(
154
154
  default="topK",
@@ -173,7 +173,7 @@ class RagOptions(BaseModel):
173
173
  """Options for handling cases when RAG retrieval fails."""
174
174
 
175
175
  enable_web_search: bool = False
176
- """Whether to enable web searches as part of the RAG pipeline."""
176
+ """Whether to enable web search as part of the RAG pipeline."""
177
177
 
178
178
  session_file_ids: Optional[List[str]] = Field(default_factory=list)
179
179
  """List of file IDs that are specific to the current session."""
@@ -214,7 +214,7 @@ class RagOptions(BaseModel):
214
214
  """The prompt to use for content rejection filtering."""
215
215
 
216
216
  enable_agg_search: Optional[bool] = None
217
- """Whether to enable aggregation searches."""
217
+ """Whether to enable aggregation search."""
218
218
 
219
219
  enable_hybrid_gen: Optional[bool] = None
220
220
  """Whether to enable hybrid generations."""
@@ -269,10 +269,10 @@ class ModelstudioParameters(Parameters):
269
269
 
270
270
  # Search
271
271
  enable_search: bool = False
272
- """Whether to enable searches capabilities for knowledge retrieval."""
272
+ """Whether to enable search capabilities for knowledge retrieval."""
273
273
 
274
274
  search_options: Optional[SearchOptions] = SearchOptions()
275
- """Configuration options for searches functionality."""
275
+ """Configuration options for search functionality."""
276
276
 
277
277
  # RAG
278
278
  enable_rag: bool = False # RAGs of modelstudio assistant service