agentscope-runtime 0.1.6__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/common/container_clients/__init__.py +0 -0
- agentscope_runtime/{sandbox/manager → common}/container_clients/kubernetes_client.py +546 -6
- agentscope_runtime/engine/__init__.py +12 -0
- agentscope_runtime/engine/agents/agentscope_agent.py +130 -10
- agentscope_runtime/engine/agents/agno_agent.py +8 -10
- agentscope_runtime/engine/agents/langgraph_agent.py +52 -9
- agentscope_runtime/engine/app/__init__.py +6 -0
- agentscope_runtime/engine/app/agent_app.py +239 -0
- agentscope_runtime/engine/app/base_app.py +181 -0
- agentscope_runtime/engine/app/celery_mixin.py +92 -0
- agentscope_runtime/engine/deployers/__init__.py +13 -0
- agentscope_runtime/engine/deployers/adapter/responses/__init__.py +0 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_adapter_utils.py +2890 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_agent_adapter.py +51 -0
- agentscope_runtime/engine/deployers/adapter/responses/response_api_protocol_adapter.py +314 -0
- agentscope_runtime/engine/deployers/base.py +1 -0
- agentscope_runtime/engine/deployers/cli_fc_deploy.py +203 -0
- agentscope_runtime/engine/deployers/kubernetes_deployer.py +272 -0
- agentscope_runtime/engine/deployers/local_deployer.py +414 -501
- agentscope_runtime/engine/deployers/modelstudio_deployer.py +838 -0
- agentscope_runtime/engine/deployers/utils/__init__.py +0 -0
- agentscope_runtime/engine/deployers/utils/deployment_modes.py +14 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/__init__.py +8 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/docker_image_builder.py +429 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +240 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/runner_image_factory.py +306 -0
- agentscope_runtime/engine/deployers/utils/package_project_utils.py +1163 -0
- agentscope_runtime/engine/deployers/utils/service_utils/__init__.py +9 -0
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +1064 -0
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_templates.py +157 -0
- agentscope_runtime/engine/deployers/utils/service_utils/process_manager.py +268 -0
- agentscope_runtime/engine/deployers/utils/service_utils/service_config.py +75 -0
- agentscope_runtime/engine/deployers/utils/service_utils/service_factory.py +220 -0
- agentscope_runtime/engine/deployers/utils/service_utils/standalone_main.py.j2 +211 -0
- agentscope_runtime/engine/deployers/utils/wheel_packager.py +389 -0
- agentscope_runtime/engine/helpers/agent_api_builder.py +651 -0
- agentscope_runtime/engine/runner.py +76 -35
- agentscope_runtime/engine/schemas/agent_schemas.py +112 -2
- agentscope_runtime/engine/schemas/embedding.py +37 -0
- agentscope_runtime/engine/schemas/modelstudio_llm.py +310 -0
- agentscope_runtime/engine/schemas/oai_llm.py +538 -0
- agentscope_runtime/engine/schemas/realtime.py +254 -0
- agentscope_runtime/engine/services/tablestore_memory_service.py +4 -1
- agentscope_runtime/engine/tracing/__init__.py +9 -3
- agentscope_runtime/engine/tracing/asyncio_util.py +24 -0
- agentscope_runtime/engine/tracing/base.py +66 -34
- agentscope_runtime/engine/tracing/local_logging_handler.py +45 -31
- agentscope_runtime/engine/tracing/message_util.py +528 -0
- agentscope_runtime/engine/tracing/tracing_metric.py +20 -8
- agentscope_runtime/engine/tracing/tracing_util.py +130 -0
- agentscope_runtime/engine/tracing/wrapper.py +794 -169
- agentscope_runtime/sandbox/box/base/base_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/browser/browser_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/dummy/dummy_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/gui/gui_sandbox.py +2 -1
- agentscope_runtime/sandbox/box/training_box/training_box.py +0 -42
- agentscope_runtime/sandbox/client/http_client.py +52 -18
- agentscope_runtime/sandbox/constant.py +3 -0
- agentscope_runtime/sandbox/custom/custom_sandbox.py +2 -1
- agentscope_runtime/sandbox/custom/example.py +2 -1
- agentscope_runtime/sandbox/enums.py +0 -1
- agentscope_runtime/sandbox/manager/sandbox_manager.py +29 -22
- agentscope_runtime/sandbox/model/container.py +6 -0
- agentscope_runtime/sandbox/registry.py +1 -1
- agentscope_runtime/sandbox/tools/tool.py +4 -0
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/METADATA +103 -59
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/RECORD +87 -52
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/entry_points.txt +1 -0
- /agentscope_runtime/{sandbox/manager/container_clients → common}/__init__.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/__init__.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/base_mapping.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/base_queue.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/base_set.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_mapping.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_queue.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/in_memory_set.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/redis_mapping.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/redis_queue.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/collections/redis_set.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/container_clients/agentrun_client.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/container_clients/base_client.py +0 -0
- /agentscope_runtime/{sandbox/manager → common}/container_clients/docker_client.py +0 -0
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/WHEEL +0 -0
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-0.1.6.dist-info → agentscope_runtime-0.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1064 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# pylint:disable=too-many-branches, unused-argument, too-many-return-statements
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import inspect
|
|
7
|
+
import json
|
|
8
|
+
from contextlib import asynccontextmanager
|
|
9
|
+
from typing import Optional, Callable, Type, Any, List, Dict
|
|
10
|
+
|
|
11
|
+
from fastapi import FastAPI, Request
|
|
12
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
13
|
+
from fastapi.responses import StreamingResponse, JSONResponse
|
|
14
|
+
from pydantic import BaseModel
|
|
15
|
+
|
|
16
|
+
from .service_config import ServicesConfig, DEFAULT_SERVICES_CONFIG
|
|
17
|
+
from .service_factory import ServiceFactory
|
|
18
|
+
from ..deployment_modes import DeploymentMode
|
|
19
|
+
from ...adapter.protocol_adapter import ProtocolAdapter
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def error_stream(e):
|
|
23
|
+
yield (
|
|
24
|
+
f"data: "
|
|
25
|
+
f"{json.dumps({'error': f'Request parsing error: {str(e)}'})}\n\n"
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class FastAPIAppFactory:
|
|
30
|
+
"""Factory for creating FastAPI applications with unified architecture."""
|
|
31
|
+
|
|
32
|
+
@staticmethod
|
|
33
|
+
def create_app(
|
|
34
|
+
func: Optional[Callable] = None,
|
|
35
|
+
runner: Optional[Any] = None,
|
|
36
|
+
endpoint_path: str = "/process",
|
|
37
|
+
request_model: Optional[Type] = None,
|
|
38
|
+
response_type: str = "sse",
|
|
39
|
+
stream: bool = True,
|
|
40
|
+
before_start: Optional[Callable] = None,
|
|
41
|
+
after_finish: Optional[Callable] = None,
|
|
42
|
+
mode: DeploymentMode = DeploymentMode.DAEMON_THREAD,
|
|
43
|
+
services_config: Optional[ServicesConfig] = None,
|
|
44
|
+
protocol_adapters: Optional[list[ProtocolAdapter]] = None,
|
|
45
|
+
custom_endpoints: Optional[
|
|
46
|
+
List[Dict]
|
|
47
|
+
] = None, # New parameter for custom endpoints
|
|
48
|
+
# Celery parameters
|
|
49
|
+
broker_url: Optional[str] = None,
|
|
50
|
+
backend_url: Optional[str] = None,
|
|
51
|
+
enable_embedded_worker: bool = False,
|
|
52
|
+
**kwargs: Any,
|
|
53
|
+
) -> FastAPI:
|
|
54
|
+
"""Create a FastAPI application with unified architecture.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
func: Custom processing function
|
|
58
|
+
runner: Runner instance (for DAEMON_THREAD mode)
|
|
59
|
+
endpoint_path: API endpoint path for the processing function
|
|
60
|
+
request_model: Pydantic model for request validation
|
|
61
|
+
response_type: Response type - "json", "sse", or "text"
|
|
62
|
+
stream: Enable streaming responses
|
|
63
|
+
before_start: Callback function called before server starts
|
|
64
|
+
after_finish: Callback function called after server finishes
|
|
65
|
+
mode: Deployment mode
|
|
66
|
+
services_config: Services configuration
|
|
67
|
+
protocol_adapters: Protocol adapters
|
|
68
|
+
custom_endpoints: List of custom endpoint configurations
|
|
69
|
+
broker_url: Celery broker URL
|
|
70
|
+
backend_url: Celery backend URL
|
|
71
|
+
enable_embedded_worker: Whether to run embedded Celery worker
|
|
72
|
+
**kwargs: Additional keyword arguments
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
FastAPI application instance
|
|
76
|
+
"""
|
|
77
|
+
# Use default services config if not provided
|
|
78
|
+
if services_config is None:
|
|
79
|
+
services_config = DEFAULT_SERVICES_CONFIG
|
|
80
|
+
|
|
81
|
+
# Initialize Celery mixin if broker and backend URLs are provided
|
|
82
|
+
celery_mixin = None
|
|
83
|
+
if broker_url and backend_url:
|
|
84
|
+
try:
|
|
85
|
+
from ....app.celery_mixin import CeleryMixin
|
|
86
|
+
|
|
87
|
+
celery_mixin = CeleryMixin(
|
|
88
|
+
broker_url=broker_url,
|
|
89
|
+
backend_url=backend_url,
|
|
90
|
+
)
|
|
91
|
+
except ImportError:
|
|
92
|
+
# CeleryMixin not available, will use fallback task processing
|
|
93
|
+
celery_mixin = None
|
|
94
|
+
|
|
95
|
+
# Create lifespan manager
|
|
96
|
+
@asynccontextmanager
|
|
97
|
+
async def lifespan(app: FastAPI):
|
|
98
|
+
"""Application lifespan manager."""
|
|
99
|
+
# Startup
|
|
100
|
+
try:
|
|
101
|
+
await FastAPIAppFactory._handle_startup(
|
|
102
|
+
app,
|
|
103
|
+
mode,
|
|
104
|
+
services_config,
|
|
105
|
+
runner,
|
|
106
|
+
before_start,
|
|
107
|
+
**kwargs,
|
|
108
|
+
)
|
|
109
|
+
yield
|
|
110
|
+
finally:
|
|
111
|
+
# Shutdown
|
|
112
|
+
await FastAPIAppFactory._handle_shutdown(
|
|
113
|
+
app,
|
|
114
|
+
after_finish,
|
|
115
|
+
**kwargs,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# Create FastAPI app
|
|
119
|
+
app = FastAPI(lifespan=lifespan)
|
|
120
|
+
|
|
121
|
+
# Store configuration in app state
|
|
122
|
+
app.state.deployment_mode = mode
|
|
123
|
+
app.state.services_config = services_config
|
|
124
|
+
app.state.stream_enabled = stream
|
|
125
|
+
app.state.response_type = response_type
|
|
126
|
+
app.state.custom_func = func
|
|
127
|
+
app.state.external_runner = runner
|
|
128
|
+
app.state.endpoint_path = endpoint_path
|
|
129
|
+
app.state.protocol_adapters = protocol_adapters # Store for later use
|
|
130
|
+
app.state.custom_endpoints = (
|
|
131
|
+
custom_endpoints or []
|
|
132
|
+
) # Store custom endpoints
|
|
133
|
+
|
|
134
|
+
# Store Celery configuration
|
|
135
|
+
app.state.celery_mixin = celery_mixin
|
|
136
|
+
app.state.broker_url = broker_url
|
|
137
|
+
app.state.backend_url = backend_url
|
|
138
|
+
app.state.enable_embedded_worker = enable_embedded_worker
|
|
139
|
+
|
|
140
|
+
# Add middleware
|
|
141
|
+
FastAPIAppFactory._add_middleware(app, mode)
|
|
142
|
+
|
|
143
|
+
# Add routes
|
|
144
|
+
FastAPIAppFactory._add_routes(
|
|
145
|
+
app,
|
|
146
|
+
endpoint_path,
|
|
147
|
+
request_model,
|
|
148
|
+
stream,
|
|
149
|
+
mode,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
# Note: protocol_adapters will be added in _handle_startup
|
|
153
|
+
# after runner is available
|
|
154
|
+
|
|
155
|
+
return app
|
|
156
|
+
|
|
157
|
+
@staticmethod
|
|
158
|
+
async def _handle_startup(
|
|
159
|
+
app: FastAPI,
|
|
160
|
+
mode: DeploymentMode,
|
|
161
|
+
services_config: ServicesConfig,
|
|
162
|
+
external_runner: Optional[Any],
|
|
163
|
+
before_start: Optional[Callable],
|
|
164
|
+
**kwargs,
|
|
165
|
+
):
|
|
166
|
+
"""Handle application startup."""
|
|
167
|
+
# Mode-specific initialization
|
|
168
|
+
if mode == DeploymentMode.DAEMON_THREAD:
|
|
169
|
+
# Use external runner
|
|
170
|
+
app.state.runner = external_runner
|
|
171
|
+
app.state.runner_managed_externally = True
|
|
172
|
+
|
|
173
|
+
elif mode in [
|
|
174
|
+
DeploymentMode.DETACHED_PROCESS,
|
|
175
|
+
DeploymentMode.STANDALONE,
|
|
176
|
+
]:
|
|
177
|
+
# Create internal runner
|
|
178
|
+
app.state.runner = await FastAPIAppFactory._create_internal_runner(
|
|
179
|
+
services_config,
|
|
180
|
+
)
|
|
181
|
+
app.state.runner_managed_externally = False
|
|
182
|
+
|
|
183
|
+
# Call custom startup callback
|
|
184
|
+
if before_start:
|
|
185
|
+
if asyncio.iscoroutinefunction(before_start):
|
|
186
|
+
await before_start(app, **kwargs)
|
|
187
|
+
else:
|
|
188
|
+
before_start(app, **kwargs)
|
|
189
|
+
|
|
190
|
+
# Add protocol adapter endpoints after runner is available
|
|
191
|
+
if (
|
|
192
|
+
hasattr(app.state, "protocol_adapters")
|
|
193
|
+
and app.state.protocol_adapters
|
|
194
|
+
):
|
|
195
|
+
# Determine the effective function to use
|
|
196
|
+
if hasattr(app.state, "custom_func") and app.state.custom_func:
|
|
197
|
+
effective_func = app.state.custom_func
|
|
198
|
+
elif hasattr(app.state, "runner") and app.state.runner:
|
|
199
|
+
# Use stream_query if streaming is enabled, otherwise query
|
|
200
|
+
if (
|
|
201
|
+
hasattr(app.state, "stream_enabled")
|
|
202
|
+
and app.state.stream_enabled
|
|
203
|
+
):
|
|
204
|
+
effective_func = app.state.runner.stream_query
|
|
205
|
+
else:
|
|
206
|
+
effective_func = app.state.runner.query
|
|
207
|
+
else:
|
|
208
|
+
effective_func = None
|
|
209
|
+
|
|
210
|
+
if effective_func:
|
|
211
|
+
for protocol_adapter in app.state.protocol_adapters:
|
|
212
|
+
protocol_adapter.add_endpoint(app=app, func=effective_func)
|
|
213
|
+
|
|
214
|
+
# Add custom endpoints after runner is available
|
|
215
|
+
if (
|
|
216
|
+
hasattr(app.state, "custom_endpoints")
|
|
217
|
+
and app.state.custom_endpoints
|
|
218
|
+
):
|
|
219
|
+
FastAPIAppFactory._add_custom_endpoints(app)
|
|
220
|
+
|
|
221
|
+
# Start embedded Celery worker if enabled
|
|
222
|
+
if (
|
|
223
|
+
hasattr(app.state, "enable_embedded_worker")
|
|
224
|
+
and app.state.enable_embedded_worker
|
|
225
|
+
and hasattr(app.state, "celery_mixin")
|
|
226
|
+
and app.state.celery_mixin
|
|
227
|
+
):
|
|
228
|
+
# Start Celery worker in background thread
|
|
229
|
+
import threading
|
|
230
|
+
|
|
231
|
+
def start_celery_worker():
|
|
232
|
+
try:
|
|
233
|
+
celery_mixin = app.state.celery_mixin
|
|
234
|
+
# Get registered queues or use default
|
|
235
|
+
queues = (
|
|
236
|
+
list(celery_mixin.get_registered_queues())
|
|
237
|
+
if celery_mixin.get_registered_queues()
|
|
238
|
+
else ["celery"]
|
|
239
|
+
)
|
|
240
|
+
celery_mixin.run_task_processor(
|
|
241
|
+
loglevel="INFO",
|
|
242
|
+
concurrency=1,
|
|
243
|
+
queues=queues,
|
|
244
|
+
)
|
|
245
|
+
except Exception as e:
|
|
246
|
+
import logging
|
|
247
|
+
|
|
248
|
+
logger = logging.getLogger(__name__)
|
|
249
|
+
logger.error(f"Failed to start Celery worker: {e}")
|
|
250
|
+
|
|
251
|
+
worker_thread = threading.Thread(
|
|
252
|
+
target=start_celery_worker,
|
|
253
|
+
daemon=True,
|
|
254
|
+
)
|
|
255
|
+
worker_thread.start()
|
|
256
|
+
|
|
257
|
+
@staticmethod
|
|
258
|
+
async def _handle_shutdown(
|
|
259
|
+
app: FastAPI,
|
|
260
|
+
after_finish: Optional[Callable],
|
|
261
|
+
**kwargs,
|
|
262
|
+
):
|
|
263
|
+
"""Handle application shutdown."""
|
|
264
|
+
# Call custom shutdown callback
|
|
265
|
+
if after_finish:
|
|
266
|
+
if asyncio.iscoroutinefunction(after_finish):
|
|
267
|
+
await after_finish(app, **kwargs)
|
|
268
|
+
else:
|
|
269
|
+
after_finish(app, **kwargs)
|
|
270
|
+
|
|
271
|
+
# Cleanup internal runner
|
|
272
|
+
if (
|
|
273
|
+
hasattr(app.state, "runner")
|
|
274
|
+
and not app.state.runner_managed_externally
|
|
275
|
+
):
|
|
276
|
+
runner = app.state.runner
|
|
277
|
+
if runner:
|
|
278
|
+
try:
|
|
279
|
+
# Clean up runner
|
|
280
|
+
await runner.__aexit__(None, None, None)
|
|
281
|
+
except Exception as e:
|
|
282
|
+
print(f"Warning: Error during runner cleanup: {e}")
|
|
283
|
+
|
|
284
|
+
@staticmethod
|
|
285
|
+
async def _create_internal_runner(services_config: ServicesConfig):
|
|
286
|
+
"""Create internal runner with configured services."""
|
|
287
|
+
from agentscope_runtime.engine import Runner
|
|
288
|
+
from agentscope_runtime.engine.services.context_manager import (
|
|
289
|
+
ContextManager,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
# Create services
|
|
293
|
+
services = ServiceFactory.create_services_from_config(services_config)
|
|
294
|
+
|
|
295
|
+
# Create context manager
|
|
296
|
+
context_manager = ContextManager(
|
|
297
|
+
session_history_service=services["session_history"],
|
|
298
|
+
memory_service=services["memory"],
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
# Create runner (agent will be set later)
|
|
302
|
+
runner = Runner(
|
|
303
|
+
agent=None, # Will be set by the specific deployment
|
|
304
|
+
context_manager=context_manager,
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
# Initialize runner
|
|
308
|
+
await runner.__aenter__()
|
|
309
|
+
|
|
310
|
+
return runner
|
|
311
|
+
|
|
312
|
+
@staticmethod
|
|
313
|
+
def _add_middleware(app: FastAPI, mode: DeploymentMode):
|
|
314
|
+
"""Add middleware based on deployment mode."""
|
|
315
|
+
# Common middleware
|
|
316
|
+
app.add_middleware(
|
|
317
|
+
CORSMiddleware,
|
|
318
|
+
allow_origins=["*"],
|
|
319
|
+
allow_credentials=True,
|
|
320
|
+
allow_methods=["*"],
|
|
321
|
+
allow_headers=["*"],
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
# Mode-specific middleware
|
|
325
|
+
if mode == DeploymentMode.DETACHED_PROCESS:
|
|
326
|
+
# Add process management middleware
|
|
327
|
+
@app.middleware("http")
|
|
328
|
+
async def process_middleware(request: Request, call_next):
|
|
329
|
+
# Add process-specific headers
|
|
330
|
+
response = await call_next(request)
|
|
331
|
+
response.headers["X-Process-Mode"] = "detached"
|
|
332
|
+
return response
|
|
333
|
+
|
|
334
|
+
elif mode == DeploymentMode.STANDALONE:
|
|
335
|
+
# Add configuration middleware
|
|
336
|
+
@app.middleware("http")
|
|
337
|
+
async def config_middleware(request: Request, call_next):
|
|
338
|
+
# Add configuration headers
|
|
339
|
+
response = await call_next(request)
|
|
340
|
+
response.headers["X-Deployment-Mode"] = "standalone"
|
|
341
|
+
return response
|
|
342
|
+
|
|
343
|
+
@staticmethod
|
|
344
|
+
def _add_routes(
|
|
345
|
+
app: FastAPI,
|
|
346
|
+
endpoint_path: str,
|
|
347
|
+
request_model: Optional[Type],
|
|
348
|
+
stream_enabled: bool,
|
|
349
|
+
mode: DeploymentMode,
|
|
350
|
+
):
|
|
351
|
+
"""Add routes to the FastAPI application."""
|
|
352
|
+
|
|
353
|
+
# Health check endpoint
|
|
354
|
+
@app.get("/health")
|
|
355
|
+
async def health_check():
|
|
356
|
+
"""Health check endpoint."""
|
|
357
|
+
status = {"status": "healthy", "mode": mode.value}
|
|
358
|
+
|
|
359
|
+
# Add service health checks
|
|
360
|
+
if hasattr(app.state, "runner") and app.state.runner:
|
|
361
|
+
status["runner"] = "ready"
|
|
362
|
+
else:
|
|
363
|
+
status["runner"] = "not_ready"
|
|
364
|
+
|
|
365
|
+
return status
|
|
366
|
+
|
|
367
|
+
# Main processing endpoint
|
|
368
|
+
# if stream_enabled:
|
|
369
|
+
# Streaming endpoint
|
|
370
|
+
@app.post(endpoint_path)
|
|
371
|
+
async def stream_endpoint(request: dict):
|
|
372
|
+
"""Streaming endpoint."""
|
|
373
|
+
return StreamingResponse(
|
|
374
|
+
FastAPIAppFactory._create_stream_generator(app, request),
|
|
375
|
+
media_type="text/event-stream",
|
|
376
|
+
headers={
|
|
377
|
+
"Cache-Control": "no-cache",
|
|
378
|
+
"Connection": "keep-alive",
|
|
379
|
+
},
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
# # Standard endpoint
|
|
383
|
+
# @app.post(endpoint_path)
|
|
384
|
+
# async def process_endpoint(request: dict):
|
|
385
|
+
# """Main processing endpoint."""
|
|
386
|
+
# return await FastAPIAppFactory._handle_request(
|
|
387
|
+
# app,
|
|
388
|
+
# request,
|
|
389
|
+
# stream_enabled,
|
|
390
|
+
# )
|
|
391
|
+
|
|
392
|
+
# Root endpoint
|
|
393
|
+
@app.get("/")
|
|
394
|
+
async def root():
|
|
395
|
+
"""Root endpoint."""
|
|
396
|
+
return {
|
|
397
|
+
"service": "AgentScope Runtime",
|
|
398
|
+
"mode": mode.value,
|
|
399
|
+
"endpoints": {
|
|
400
|
+
"process": endpoint_path,
|
|
401
|
+
"stream": f"{endpoint_path}/stream"
|
|
402
|
+
if stream_enabled
|
|
403
|
+
else None,
|
|
404
|
+
"health": "/health",
|
|
405
|
+
},
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
# Mode-specific endpoints
|
|
409
|
+
if mode == DeploymentMode.DETACHED_PROCESS:
|
|
410
|
+
FastAPIAppFactory._add_process_control_endpoints(app)
|
|
411
|
+
elif mode == DeploymentMode.STANDALONE:
|
|
412
|
+
FastAPIAppFactory._add_configuration_endpoints(app)
|
|
413
|
+
|
|
414
|
+
@staticmethod
|
|
415
|
+
def _add_process_control_endpoints(app: FastAPI):
|
|
416
|
+
"""Add process control endpoints for detached mode."""
|
|
417
|
+
|
|
418
|
+
@app.post("/admin/shutdown")
|
|
419
|
+
async def shutdown_process():
|
|
420
|
+
"""Gracefully shutdown the process."""
|
|
421
|
+
# Import here to avoid circular imports
|
|
422
|
+
import os
|
|
423
|
+
import signal
|
|
424
|
+
|
|
425
|
+
# Schedule shutdown after response
|
|
426
|
+
async def delayed_shutdown():
|
|
427
|
+
await asyncio.sleep(1)
|
|
428
|
+
os.kill(os.getpid(), signal.SIGTERM)
|
|
429
|
+
|
|
430
|
+
asyncio.create_task(delayed_shutdown())
|
|
431
|
+
return {"message": "Shutdown initiated"}
|
|
432
|
+
|
|
433
|
+
@app.get("/admin/status")
|
|
434
|
+
async def get_process_status():
|
|
435
|
+
"""Get process status information."""
|
|
436
|
+
import os
|
|
437
|
+
import psutil
|
|
438
|
+
|
|
439
|
+
process = psutil.Process(os.getpid())
|
|
440
|
+
return {
|
|
441
|
+
"pid": os.getpid(),
|
|
442
|
+
"status": process.status(),
|
|
443
|
+
"memory_usage": process.memory_info().rss,
|
|
444
|
+
"cpu_percent": process.cpu_percent(),
|
|
445
|
+
"uptime": process.create_time(),
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
@staticmethod
|
|
449
|
+
def _add_configuration_endpoints(app: FastAPI):
|
|
450
|
+
"""Add configuration endpoints for standalone mode."""
|
|
451
|
+
|
|
452
|
+
@app.get("/config")
|
|
453
|
+
async def get_configuration():
|
|
454
|
+
"""Get current service configuration."""
|
|
455
|
+
return {
|
|
456
|
+
"services_config": app.state.services_config.model_dump(),
|
|
457
|
+
"deployment_mode": app.state.deployment_mode.value,
|
|
458
|
+
"stream_enabled": app.state.stream_enabled,
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
@app.get("/config/services")
|
|
462
|
+
async def get_services_status():
|
|
463
|
+
"""Get services status."""
|
|
464
|
+
status = {}
|
|
465
|
+
if hasattr(app.state, "runner") and app.state.runner:
|
|
466
|
+
runner = app.state.runner
|
|
467
|
+
if hasattr(runner, "context_manager"):
|
|
468
|
+
cm = runner.context_manager
|
|
469
|
+
status["memory_service"] = (
|
|
470
|
+
"connected" if cm.memory_service else "disconnected"
|
|
471
|
+
)
|
|
472
|
+
status["session_history_service"] = (
|
|
473
|
+
"connected"
|
|
474
|
+
if cm.session_history_service
|
|
475
|
+
else "disconnected"
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
return {"services": status}
|
|
479
|
+
|
|
480
|
+
@staticmethod
|
|
481
|
+
async def _handle_request(
|
|
482
|
+
app: FastAPI,
|
|
483
|
+
request: dict,
|
|
484
|
+
stream_enabled: bool,
|
|
485
|
+
):
|
|
486
|
+
"""Handle a standard request."""
|
|
487
|
+
try:
|
|
488
|
+
# Get runner instance
|
|
489
|
+
runner = FastAPIAppFactory._get_runner_instance(app)
|
|
490
|
+
if not runner:
|
|
491
|
+
return JSONResponse(
|
|
492
|
+
status_code=503,
|
|
493
|
+
content={
|
|
494
|
+
"error": "Service not ready",
|
|
495
|
+
"message": "Runner not initialized",
|
|
496
|
+
},
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
# Handle custom function vs runner
|
|
500
|
+
if app.state.custom_func:
|
|
501
|
+
# Use custom function
|
|
502
|
+
result = await FastAPIAppFactory._call_custom_function(
|
|
503
|
+
app.state.custom_func,
|
|
504
|
+
request,
|
|
505
|
+
)
|
|
506
|
+
return {"response": result}
|
|
507
|
+
else:
|
|
508
|
+
# Use runner
|
|
509
|
+
if stream_enabled:
|
|
510
|
+
# Collect streaming response
|
|
511
|
+
result = await FastAPIAppFactory._collect_stream_response(
|
|
512
|
+
runner,
|
|
513
|
+
request,
|
|
514
|
+
)
|
|
515
|
+
return {"response": result}
|
|
516
|
+
else:
|
|
517
|
+
# Direct query
|
|
518
|
+
result = await runner.query(request)
|
|
519
|
+
return {"response": result}
|
|
520
|
+
|
|
521
|
+
except Exception as e:
|
|
522
|
+
return JSONResponse(
|
|
523
|
+
status_code=500,
|
|
524
|
+
content={"error": "Internal server error", "message": str(e)},
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
@staticmethod
|
|
528
|
+
async def _create_stream_generator(app: FastAPI, request: dict):
|
|
529
|
+
"""Create streaming response generator."""
|
|
530
|
+
try:
|
|
531
|
+
runner = FastAPIAppFactory._get_runner_instance(app)
|
|
532
|
+
if not runner:
|
|
533
|
+
yield (
|
|
534
|
+
f"data: {json.dumps({'error': 'Runner not initialized'})}"
|
|
535
|
+
f"\n\n"
|
|
536
|
+
)
|
|
537
|
+
return
|
|
538
|
+
|
|
539
|
+
if app.state.custom_func:
|
|
540
|
+
# Handle custom function (convert to stream)
|
|
541
|
+
result = await FastAPIAppFactory._call_custom_function(
|
|
542
|
+
app.state.custom_func,
|
|
543
|
+
request,
|
|
544
|
+
)
|
|
545
|
+
yield f"data: {json.dumps({'text': str(result)})}\n\n"
|
|
546
|
+
else:
|
|
547
|
+
# Use runner streaming
|
|
548
|
+
async for chunk in runner.stream_query(request):
|
|
549
|
+
if hasattr(chunk, "model_dump_json"):
|
|
550
|
+
yield f"data: {chunk.model_dump_json()}\n\n"
|
|
551
|
+
elif hasattr(chunk, "json"):
|
|
552
|
+
yield f"data: {chunk.json()}\n\n"
|
|
553
|
+
else:
|
|
554
|
+
yield f"data: {json.dumps({'text': str(chunk)})}\n\n"
|
|
555
|
+
|
|
556
|
+
except Exception as e:
|
|
557
|
+
yield f"data: {json.dumps({'error': str(e)})}\n\n"
|
|
558
|
+
|
|
559
|
+
@staticmethod
|
|
560
|
+
async def _collect_stream_response(runner, request: dict) -> str:
|
|
561
|
+
"""Collect streaming response into a single string."""
|
|
562
|
+
response_parts = []
|
|
563
|
+
async for chunk in runner.stream_query(request):
|
|
564
|
+
if hasattr(chunk, "text"):
|
|
565
|
+
response_parts.append(chunk.text)
|
|
566
|
+
else:
|
|
567
|
+
response_parts.append(str(chunk))
|
|
568
|
+
return "".join(response_parts)
|
|
569
|
+
|
|
570
|
+
@staticmethod
|
|
571
|
+
async def _call_custom_function(func: Callable, request: dict):
|
|
572
|
+
"""Call custom function with proper parameters."""
|
|
573
|
+
if asyncio.iscoroutinefunction(func):
|
|
574
|
+
return await func(
|
|
575
|
+
user_id="default",
|
|
576
|
+
request=request,
|
|
577
|
+
request_id="generated",
|
|
578
|
+
)
|
|
579
|
+
else:
|
|
580
|
+
return func(
|
|
581
|
+
user_id="default",
|
|
582
|
+
request=request,
|
|
583
|
+
request_id="generated",
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
@staticmethod
|
|
587
|
+
def _get_runner_instance(app: FastAPI):
|
|
588
|
+
"""Get runner instance from app state."""
|
|
589
|
+
if hasattr(app.state, "runner"):
|
|
590
|
+
return app.state.runner
|
|
591
|
+
return None
|
|
592
|
+
|
|
593
|
+
@staticmethod
|
|
594
|
+
def _create_parameter_wrapper(handler: Callable):
|
|
595
|
+
"""Create a wrapper that handles parameter parsing based on function
|
|
596
|
+
signature.
|
|
597
|
+
|
|
598
|
+
This method inspects the handler function's parameters and creates
|
|
599
|
+
appropriate wrappers to parse request data into the expected
|
|
600
|
+
parameter types.
|
|
601
|
+
"""
|
|
602
|
+
try:
|
|
603
|
+
sig = inspect.signature(handler)
|
|
604
|
+
params = list(sig.parameters.values())
|
|
605
|
+
|
|
606
|
+
if not params:
|
|
607
|
+
# No parameters, call function directly
|
|
608
|
+
return handler
|
|
609
|
+
|
|
610
|
+
# Get the first parameter (assuming single parameter for now)
|
|
611
|
+
first_param = params[0]
|
|
612
|
+
param_annotation = first_param.annotation
|
|
613
|
+
|
|
614
|
+
# If no annotation or annotation is Request, pass Request directly
|
|
615
|
+
if param_annotation in [inspect.Parameter.empty, Request]:
|
|
616
|
+
return handler
|
|
617
|
+
|
|
618
|
+
# Check if the annotation is a Pydantic model
|
|
619
|
+
if isinstance(param_annotation, type) and issubclass(
|
|
620
|
+
param_annotation,
|
|
621
|
+
BaseModel,
|
|
622
|
+
):
|
|
623
|
+
# Create wrapper that parses JSON to Pydantic model
|
|
624
|
+
if inspect.iscoroutinefunction(handler):
|
|
625
|
+
|
|
626
|
+
async def async_pydantic_wrapper(request: Request):
|
|
627
|
+
try:
|
|
628
|
+
body = await request.json()
|
|
629
|
+
parsed_param = param_annotation(**body)
|
|
630
|
+
return await handler(parsed_param)
|
|
631
|
+
except Exception as e:
|
|
632
|
+
return JSONResponse(
|
|
633
|
+
status_code=422,
|
|
634
|
+
content={
|
|
635
|
+
"detail": f"Request parsing error: "
|
|
636
|
+
f"{str(e)}",
|
|
637
|
+
},
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
return async_pydantic_wrapper
|
|
641
|
+
else:
|
|
642
|
+
|
|
643
|
+
async def sync_pydantic_wrapper(request: Request):
|
|
644
|
+
try:
|
|
645
|
+
body = await request.json()
|
|
646
|
+
parsed_param = param_annotation(**body)
|
|
647
|
+
return handler(parsed_param)
|
|
648
|
+
except Exception as e:
|
|
649
|
+
return JSONResponse(
|
|
650
|
+
status_code=422,
|
|
651
|
+
content={
|
|
652
|
+
"detail": f"Request parsing error: "
|
|
653
|
+
f"{str(e)}",
|
|
654
|
+
},
|
|
655
|
+
)
|
|
656
|
+
|
|
657
|
+
return sync_pydantic_wrapper
|
|
658
|
+
|
|
659
|
+
# For other types, fall back to original behavior
|
|
660
|
+
return handler
|
|
661
|
+
|
|
662
|
+
except Exception:
|
|
663
|
+
# If anything goes wrong with introspection, fall back to
|
|
664
|
+
# original behavior
|
|
665
|
+
return handler
|
|
666
|
+
|
|
667
|
+
@staticmethod
|
|
668
|
+
def _create_streaming_parameter_wrapper(
|
|
669
|
+
handler: Callable,
|
|
670
|
+
is_async_gen: bool = False,
|
|
671
|
+
):
|
|
672
|
+
"""Create a wrapper for streaming handlers that handles parameter
|
|
673
|
+
parsing."""
|
|
674
|
+
try:
|
|
675
|
+
sig = inspect.signature(handler)
|
|
676
|
+
params = list(sig.parameters.values())
|
|
677
|
+
no_params = False
|
|
678
|
+
param_annotation = None
|
|
679
|
+
|
|
680
|
+
if not params:
|
|
681
|
+
no_params = True
|
|
682
|
+
else:
|
|
683
|
+
# Get the first parameter
|
|
684
|
+
first_param = params[0]
|
|
685
|
+
param_annotation = first_param.annotation
|
|
686
|
+
|
|
687
|
+
# If no annotation or annotation is Request, goto no params
|
|
688
|
+
# logic
|
|
689
|
+
if param_annotation in [inspect.Parameter.empty, Request]:
|
|
690
|
+
no_params = True
|
|
691
|
+
|
|
692
|
+
if no_params:
|
|
693
|
+
if is_async_gen:
|
|
694
|
+
|
|
695
|
+
async def async_no_param_wrapper():
|
|
696
|
+
async def generate():
|
|
697
|
+
async for chunk in handler():
|
|
698
|
+
yield str(chunk)
|
|
699
|
+
|
|
700
|
+
return StreamingResponse(
|
|
701
|
+
generate(),
|
|
702
|
+
media_type="text/plain",
|
|
703
|
+
)
|
|
704
|
+
|
|
705
|
+
return async_no_param_wrapper
|
|
706
|
+
else:
|
|
707
|
+
|
|
708
|
+
async def sync_no_param_wrapper():
|
|
709
|
+
def generate():
|
|
710
|
+
for chunk in handler():
|
|
711
|
+
yield str(chunk)
|
|
712
|
+
|
|
713
|
+
return StreamingResponse(
|
|
714
|
+
generate(),
|
|
715
|
+
media_type="text/plain",
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
return sync_no_param_wrapper
|
|
719
|
+
|
|
720
|
+
# Check if the annotation is a Pydantic model
|
|
721
|
+
if isinstance(param_annotation, type) and issubclass(
|
|
722
|
+
param_annotation,
|
|
723
|
+
BaseModel,
|
|
724
|
+
):
|
|
725
|
+
if is_async_gen:
|
|
726
|
+
|
|
727
|
+
async def async_stream_pydantic_wrapper(
|
|
728
|
+
request: Request,
|
|
729
|
+
):
|
|
730
|
+
try:
|
|
731
|
+
body = await request.json()
|
|
732
|
+
parsed_param = param_annotation(**body)
|
|
733
|
+
|
|
734
|
+
async def generate():
|
|
735
|
+
async for chunk in handler(parsed_param):
|
|
736
|
+
yield str(chunk)
|
|
737
|
+
|
|
738
|
+
return StreamingResponse(
|
|
739
|
+
generate(),
|
|
740
|
+
media_type="text/plain",
|
|
741
|
+
)
|
|
742
|
+
except Exception as e:
|
|
743
|
+
return StreamingResponse(
|
|
744
|
+
error_stream(e),
|
|
745
|
+
media_type="text/event-stream",
|
|
746
|
+
)
|
|
747
|
+
|
|
748
|
+
return async_stream_pydantic_wrapper
|
|
749
|
+
else:
|
|
750
|
+
|
|
751
|
+
async def sync_stream_pydantic_wrapper(
|
|
752
|
+
request: Request,
|
|
753
|
+
):
|
|
754
|
+
try:
|
|
755
|
+
body = await request.json()
|
|
756
|
+
parsed_param = param_annotation(**body)
|
|
757
|
+
|
|
758
|
+
def generate():
|
|
759
|
+
for chunk in handler(parsed_param):
|
|
760
|
+
yield str(chunk)
|
|
761
|
+
|
|
762
|
+
return StreamingResponse(
|
|
763
|
+
generate(),
|
|
764
|
+
media_type="text/plain",
|
|
765
|
+
)
|
|
766
|
+
except Exception as e:
|
|
767
|
+
return JSONResponse(
|
|
768
|
+
status_code=422,
|
|
769
|
+
content={
|
|
770
|
+
"detail": f"Request parsing error:"
|
|
771
|
+
f" {str(e)}",
|
|
772
|
+
},
|
|
773
|
+
)
|
|
774
|
+
|
|
775
|
+
return sync_stream_pydantic_wrapper
|
|
776
|
+
|
|
777
|
+
return handler
|
|
778
|
+
|
|
779
|
+
except Exception:
|
|
780
|
+
return handler
|
|
781
|
+
|
|
782
|
+
@staticmethod
|
|
783
|
+
def _add_custom_endpoints(app: FastAPI):
|
|
784
|
+
"""Add all custom endpoints to the FastAPI application."""
|
|
785
|
+
if (
|
|
786
|
+
not hasattr(app.state, "custom_endpoints")
|
|
787
|
+
or not app.state.custom_endpoints
|
|
788
|
+
):
|
|
789
|
+
return
|
|
790
|
+
|
|
791
|
+
for endpoint in app.state.custom_endpoints:
|
|
792
|
+
FastAPIAppFactory._register_single_custom_endpoint(
|
|
793
|
+
app,
|
|
794
|
+
endpoint["path"],
|
|
795
|
+
endpoint["handler"],
|
|
796
|
+
endpoint["methods"],
|
|
797
|
+
endpoint, # Pass the full endpoint config
|
|
798
|
+
)
|
|
799
|
+
|
|
800
|
+
@staticmethod
|
|
801
|
+
def _register_single_custom_endpoint(
|
|
802
|
+
app: FastAPI,
|
|
803
|
+
path: str,
|
|
804
|
+
handler: Callable,
|
|
805
|
+
methods: List[str],
|
|
806
|
+
endpoint_config: Dict = None,
|
|
807
|
+
):
|
|
808
|
+
"""Register a single custom endpoint with proper async/sync
|
|
809
|
+
handling."""
|
|
810
|
+
|
|
811
|
+
for method in methods:
|
|
812
|
+
# Check if this is a task endpoint
|
|
813
|
+
if endpoint_config and endpoint_config.get("task_type"):
|
|
814
|
+
# Create task endpoint with proper execution logic
|
|
815
|
+
task_handler = FastAPIAppFactory._create_task_handler(
|
|
816
|
+
app,
|
|
817
|
+
handler,
|
|
818
|
+
endpoint_config.get("queue", "default"),
|
|
819
|
+
)
|
|
820
|
+
app.add_api_route(path, task_handler, methods=[method])
|
|
821
|
+
|
|
822
|
+
# Add task status endpoint - align with BaseApp pattern
|
|
823
|
+
status_path = f"{path}/{{task_id}}"
|
|
824
|
+
status_handler = FastAPIAppFactory._create_task_status_handler(
|
|
825
|
+
app,
|
|
826
|
+
)
|
|
827
|
+
app.add_api_route(
|
|
828
|
+
status_path,
|
|
829
|
+
status_handler,
|
|
830
|
+
methods=["GET"],
|
|
831
|
+
)
|
|
832
|
+
|
|
833
|
+
else:
|
|
834
|
+
# Regular endpoint handling with automatic parameter parsing
|
|
835
|
+
# Check in the correct order: async gen > sync gen > async &
|
|
836
|
+
# sync
|
|
837
|
+
if inspect.isasyncgenfunction(handler):
|
|
838
|
+
# Async generator -> Streaming response with parameter
|
|
839
|
+
# parsing
|
|
840
|
+
wrapped_handler = (
|
|
841
|
+
FastAPIAppFactory._create_streaming_parameter_wrapper(
|
|
842
|
+
handler,
|
|
843
|
+
is_async_gen=True,
|
|
844
|
+
)
|
|
845
|
+
)
|
|
846
|
+
|
|
847
|
+
app.add_api_route(
|
|
848
|
+
path,
|
|
849
|
+
wrapped_handler,
|
|
850
|
+
methods=[method],
|
|
851
|
+
)
|
|
852
|
+
elif inspect.isgeneratorfunction(handler):
|
|
853
|
+
# Sync generator -> Streaming response with parameter
|
|
854
|
+
# parsing
|
|
855
|
+
wrapped_handler = (
|
|
856
|
+
FastAPIAppFactory._create_streaming_parameter_wrapper(
|
|
857
|
+
handler,
|
|
858
|
+
is_async_gen=False,
|
|
859
|
+
)
|
|
860
|
+
)
|
|
861
|
+
app.add_api_route(
|
|
862
|
+
path,
|
|
863
|
+
wrapped_handler,
|
|
864
|
+
methods=[method],
|
|
865
|
+
)
|
|
866
|
+
else:
|
|
867
|
+
# Sync function -> Async wrapper with parameter parsing
|
|
868
|
+
wrapped_handler = (
|
|
869
|
+
FastAPIAppFactory._create_parameter_wrapper(handler)
|
|
870
|
+
)
|
|
871
|
+
app.add_api_route(path, wrapped_handler, methods=[method])
|
|
872
|
+
|
|
873
|
+
@staticmethod
|
|
874
|
+
def _create_task_handler(app: FastAPI, task_func: Callable, queue: str):
|
|
875
|
+
"""Create a task handler that executes functions asynchronously."""
|
|
876
|
+
|
|
877
|
+
async def task_endpoint(request: dict):
|
|
878
|
+
try:
|
|
879
|
+
import uuid
|
|
880
|
+
|
|
881
|
+
# Generate task ID
|
|
882
|
+
task_id = str(uuid.uuid4())
|
|
883
|
+
|
|
884
|
+
# Check if Celery is available
|
|
885
|
+
if (
|
|
886
|
+
hasattr(app.state, "celery_mixin")
|
|
887
|
+
and app.state.celery_mixin
|
|
888
|
+
):
|
|
889
|
+
# Use Celery for task processing
|
|
890
|
+
celery_mixin = app.state.celery_mixin
|
|
891
|
+
|
|
892
|
+
# Register the task function if not already registered
|
|
893
|
+
if not hasattr(task_func, "celery_task"):
|
|
894
|
+
celery_task = celery_mixin.register_celery_task(
|
|
895
|
+
task_func,
|
|
896
|
+
queue,
|
|
897
|
+
)
|
|
898
|
+
task_func.celery_task = celery_task
|
|
899
|
+
|
|
900
|
+
# Submit task to Celery
|
|
901
|
+
result = celery_mixin.submit_task(task_func, request)
|
|
902
|
+
|
|
903
|
+
return {
|
|
904
|
+
"task_id": result.id,
|
|
905
|
+
"status": "submitted",
|
|
906
|
+
"queue": queue,
|
|
907
|
+
"message": f"Task {result.id} submitted to Celery "
|
|
908
|
+
f"queue {queue}",
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
else:
|
|
912
|
+
# Fallback to in-memory task processing
|
|
913
|
+
import time
|
|
914
|
+
|
|
915
|
+
# Initialize task storage if not exists
|
|
916
|
+
if not hasattr(app.state, "active_tasks"):
|
|
917
|
+
app.state.active_tasks = {}
|
|
918
|
+
|
|
919
|
+
# Create task info for tracking
|
|
920
|
+
task_info = {
|
|
921
|
+
"task_id": task_id,
|
|
922
|
+
"status": "submitted",
|
|
923
|
+
"queue": queue,
|
|
924
|
+
"submitted_at": time.time(),
|
|
925
|
+
"request": request,
|
|
926
|
+
}
|
|
927
|
+
app.state.active_tasks[task_id] = task_info
|
|
928
|
+
|
|
929
|
+
# Execute task asynchronously in background
|
|
930
|
+
asyncio.create_task(
|
|
931
|
+
FastAPIAppFactory._execute_background_task(
|
|
932
|
+
app,
|
|
933
|
+
task_id,
|
|
934
|
+
task_func,
|
|
935
|
+
request,
|
|
936
|
+
queue,
|
|
937
|
+
),
|
|
938
|
+
)
|
|
939
|
+
|
|
940
|
+
return {
|
|
941
|
+
"task_id": task_id,
|
|
942
|
+
"status": "submitted",
|
|
943
|
+
"queue": queue,
|
|
944
|
+
"message": f"Task {task_id} submitted to queue "
|
|
945
|
+
f"{queue}",
|
|
946
|
+
}
|
|
947
|
+
|
|
948
|
+
except Exception as e:
|
|
949
|
+
return {
|
|
950
|
+
"error": str(e),
|
|
951
|
+
"type": "task",
|
|
952
|
+
"queue": queue,
|
|
953
|
+
"status": "failed",
|
|
954
|
+
}
|
|
955
|
+
|
|
956
|
+
return task_endpoint
|
|
957
|
+
|
|
958
|
+
@staticmethod
|
|
959
|
+
async def _execute_background_task(
|
|
960
|
+
app: FastAPI,
|
|
961
|
+
task_id: str,
|
|
962
|
+
func: Callable,
|
|
963
|
+
request: dict,
|
|
964
|
+
queue: str,
|
|
965
|
+
):
|
|
966
|
+
"""Execute task in background and update status."""
|
|
967
|
+
try:
|
|
968
|
+
import time
|
|
969
|
+
import concurrent.futures
|
|
970
|
+
|
|
971
|
+
# Update status to running
|
|
972
|
+
if (
|
|
973
|
+
hasattr(app.state, "active_tasks")
|
|
974
|
+
and task_id in app.state.active_tasks
|
|
975
|
+
):
|
|
976
|
+
app.state.active_tasks[task_id].update(
|
|
977
|
+
{
|
|
978
|
+
"status": "running",
|
|
979
|
+
"started_at": time.time(),
|
|
980
|
+
},
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
# Execute the actual task function
|
|
984
|
+
if asyncio.iscoroutinefunction(func):
|
|
985
|
+
result = await func(request)
|
|
986
|
+
else:
|
|
987
|
+
# Run sync function in thread pool to avoid blocking
|
|
988
|
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
989
|
+
result = await asyncio.get_event_loop().run_in_executor(
|
|
990
|
+
executor,
|
|
991
|
+
func,
|
|
992
|
+
request,
|
|
993
|
+
)
|
|
994
|
+
|
|
995
|
+
# Update status to completed
|
|
996
|
+
if (
|
|
997
|
+
hasattr(app.state, "active_tasks")
|
|
998
|
+
and task_id in app.state.active_tasks
|
|
999
|
+
):
|
|
1000
|
+
app.state.active_tasks[task_id].update(
|
|
1001
|
+
{
|
|
1002
|
+
"status": "completed",
|
|
1003
|
+
"result": result,
|
|
1004
|
+
"completed_at": time.time(),
|
|
1005
|
+
},
|
|
1006
|
+
)
|
|
1007
|
+
|
|
1008
|
+
except Exception as e:
|
|
1009
|
+
# Update status to failed
|
|
1010
|
+
if (
|
|
1011
|
+
hasattr(app.state, "active_tasks")
|
|
1012
|
+
and task_id in app.state.active_tasks
|
|
1013
|
+
):
|
|
1014
|
+
app.state.active_tasks[task_id].update(
|
|
1015
|
+
{
|
|
1016
|
+
"status": "failed",
|
|
1017
|
+
"error": str(e),
|
|
1018
|
+
"failed_at": time.time(),
|
|
1019
|
+
},
|
|
1020
|
+
)
|
|
1021
|
+
|
|
1022
|
+
@staticmethod
|
|
1023
|
+
def _create_task_status_handler(app: FastAPI):
|
|
1024
|
+
"""Create a handler for checking task status."""
|
|
1025
|
+
|
|
1026
|
+
async def task_status_handler(task_id: str):
|
|
1027
|
+
if not task_id:
|
|
1028
|
+
return {"error": "task_id required"}
|
|
1029
|
+
|
|
1030
|
+
# Check if Celery is available
|
|
1031
|
+
if hasattr(app.state, "celery_mixin") and app.state.celery_mixin:
|
|
1032
|
+
# Use Celery for task status checking
|
|
1033
|
+
celery_mixin = app.state.celery_mixin
|
|
1034
|
+
return celery_mixin.get_task_status(task_id)
|
|
1035
|
+
|
|
1036
|
+
else:
|
|
1037
|
+
# Fallback to in-memory task status checking
|
|
1038
|
+
if (
|
|
1039
|
+
not hasattr(app.state, "active_tasks")
|
|
1040
|
+
or task_id not in app.state.active_tasks
|
|
1041
|
+
):
|
|
1042
|
+
return {"error": f"Task {task_id} not found"}
|
|
1043
|
+
|
|
1044
|
+
task_info = app.state.active_tasks[task_id]
|
|
1045
|
+
task_status = task_info.get("status", "unknown")
|
|
1046
|
+
|
|
1047
|
+
# Align with BaseApp.get_task logic - map internal status to
|
|
1048
|
+
# external status format
|
|
1049
|
+
if task_status in ["submitted", "running"]:
|
|
1050
|
+
return {"status": "pending", "result": None}
|
|
1051
|
+
elif task_status == "completed":
|
|
1052
|
+
return {
|
|
1053
|
+
"status": "finished",
|
|
1054
|
+
"result": task_info.get("result"),
|
|
1055
|
+
}
|
|
1056
|
+
elif task_status == "failed":
|
|
1057
|
+
return {
|
|
1058
|
+
"status": "error",
|
|
1059
|
+
"result": task_info.get("error", "Unknown error"),
|
|
1060
|
+
}
|
|
1061
|
+
else:
|
|
1062
|
+
return {"status": task_status, "result": None}
|
|
1063
|
+
|
|
1064
|
+
return task_status_handler
|