PraisonAI 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- praisonai/__init__.py +54 -0
- praisonai/__main__.py +15 -0
- praisonai/acp/__init__.py +54 -0
- praisonai/acp/config.py +159 -0
- praisonai/acp/server.py +587 -0
- praisonai/acp/session.py +219 -0
- praisonai/adapters/__init__.py +50 -0
- praisonai/adapters/readers.py +395 -0
- praisonai/adapters/rerankers.py +315 -0
- praisonai/adapters/retrievers.py +394 -0
- praisonai/adapters/vector_stores.py +409 -0
- praisonai/agent_scheduler.py +337 -0
- praisonai/agents_generator.py +903 -0
- praisonai/api/call.py +292 -0
- praisonai/auto.py +1197 -0
- praisonai/capabilities/__init__.py +275 -0
- praisonai/capabilities/a2a.py +140 -0
- praisonai/capabilities/assistants.py +283 -0
- praisonai/capabilities/audio.py +320 -0
- praisonai/capabilities/batches.py +469 -0
- praisonai/capabilities/completions.py +336 -0
- praisonai/capabilities/container_files.py +155 -0
- praisonai/capabilities/containers.py +93 -0
- praisonai/capabilities/embeddings.py +158 -0
- praisonai/capabilities/files.py +467 -0
- praisonai/capabilities/fine_tuning.py +293 -0
- praisonai/capabilities/guardrails.py +182 -0
- praisonai/capabilities/images.py +330 -0
- praisonai/capabilities/mcp.py +190 -0
- praisonai/capabilities/messages.py +270 -0
- praisonai/capabilities/moderations.py +154 -0
- praisonai/capabilities/ocr.py +217 -0
- praisonai/capabilities/passthrough.py +204 -0
- praisonai/capabilities/rag.py +207 -0
- praisonai/capabilities/realtime.py +160 -0
- praisonai/capabilities/rerank.py +165 -0
- praisonai/capabilities/responses.py +266 -0
- praisonai/capabilities/search.py +109 -0
- praisonai/capabilities/skills.py +133 -0
- praisonai/capabilities/vector_store_files.py +334 -0
- praisonai/capabilities/vector_stores.py +304 -0
- praisonai/capabilities/videos.py +141 -0
- praisonai/chainlit_ui.py +304 -0
- praisonai/chat/__init__.py +106 -0
- praisonai/chat/app.py +125 -0
- praisonai/cli/__init__.py +26 -0
- praisonai/cli/app.py +213 -0
- praisonai/cli/commands/__init__.py +75 -0
- praisonai/cli/commands/acp.py +70 -0
- praisonai/cli/commands/completion.py +333 -0
- praisonai/cli/commands/config.py +166 -0
- praisonai/cli/commands/debug.py +142 -0
- praisonai/cli/commands/diag.py +55 -0
- praisonai/cli/commands/doctor.py +166 -0
- praisonai/cli/commands/environment.py +179 -0
- praisonai/cli/commands/lsp.py +112 -0
- praisonai/cli/commands/mcp.py +210 -0
- praisonai/cli/commands/profile.py +457 -0
- praisonai/cli/commands/run.py +228 -0
- praisonai/cli/commands/schedule.py +150 -0
- praisonai/cli/commands/serve.py +97 -0
- praisonai/cli/commands/session.py +212 -0
- praisonai/cli/commands/traces.py +145 -0
- praisonai/cli/commands/version.py +101 -0
- praisonai/cli/configuration/__init__.py +18 -0
- praisonai/cli/configuration/loader.py +353 -0
- praisonai/cli/configuration/paths.py +114 -0
- praisonai/cli/configuration/schema.py +164 -0
- praisonai/cli/features/__init__.py +268 -0
- praisonai/cli/features/acp.py +236 -0
- praisonai/cli/features/action_orchestrator.py +546 -0
- praisonai/cli/features/agent_scheduler.py +773 -0
- praisonai/cli/features/agent_tools.py +474 -0
- praisonai/cli/features/agents.py +375 -0
- praisonai/cli/features/at_mentions.py +471 -0
- praisonai/cli/features/auto_memory.py +182 -0
- praisonai/cli/features/autonomy_mode.py +490 -0
- praisonai/cli/features/background.py +356 -0
- praisonai/cli/features/base.py +168 -0
- praisonai/cli/features/capabilities.py +1326 -0
- praisonai/cli/features/checkpoints.py +338 -0
- praisonai/cli/features/code_intelligence.py +652 -0
- praisonai/cli/features/compaction.py +294 -0
- praisonai/cli/features/compare.py +534 -0
- praisonai/cli/features/cost_tracker.py +514 -0
- praisonai/cli/features/debug.py +810 -0
- praisonai/cli/features/deploy.py +517 -0
- praisonai/cli/features/diag.py +289 -0
- praisonai/cli/features/doctor/__init__.py +63 -0
- praisonai/cli/features/doctor/checks/__init__.py +24 -0
- praisonai/cli/features/doctor/checks/acp_checks.py +240 -0
- praisonai/cli/features/doctor/checks/config_checks.py +366 -0
- praisonai/cli/features/doctor/checks/db_checks.py +366 -0
- praisonai/cli/features/doctor/checks/env_checks.py +543 -0
- praisonai/cli/features/doctor/checks/lsp_checks.py +199 -0
- praisonai/cli/features/doctor/checks/mcp_checks.py +349 -0
- praisonai/cli/features/doctor/checks/memory_checks.py +268 -0
- praisonai/cli/features/doctor/checks/network_checks.py +251 -0
- praisonai/cli/features/doctor/checks/obs_checks.py +328 -0
- praisonai/cli/features/doctor/checks/performance_checks.py +235 -0
- praisonai/cli/features/doctor/checks/permissions_checks.py +259 -0
- praisonai/cli/features/doctor/checks/selftest_checks.py +322 -0
- praisonai/cli/features/doctor/checks/serve_checks.py +426 -0
- praisonai/cli/features/doctor/checks/skills_checks.py +231 -0
- praisonai/cli/features/doctor/checks/tools_checks.py +371 -0
- praisonai/cli/features/doctor/engine.py +266 -0
- praisonai/cli/features/doctor/formatters.py +310 -0
- praisonai/cli/features/doctor/handler.py +397 -0
- praisonai/cli/features/doctor/models.py +264 -0
- praisonai/cli/features/doctor/registry.py +239 -0
- praisonai/cli/features/endpoints.py +1019 -0
- praisonai/cli/features/eval.py +560 -0
- praisonai/cli/features/external_agents.py +231 -0
- praisonai/cli/features/fast_context.py +410 -0
- praisonai/cli/features/flow_display.py +566 -0
- praisonai/cli/features/git_integration.py +651 -0
- praisonai/cli/features/guardrail.py +171 -0
- praisonai/cli/features/handoff.py +185 -0
- praisonai/cli/features/hooks.py +583 -0
- praisonai/cli/features/image.py +384 -0
- praisonai/cli/features/interactive_runtime.py +585 -0
- praisonai/cli/features/interactive_tools.py +380 -0
- praisonai/cli/features/interactive_tui.py +603 -0
- praisonai/cli/features/jobs.py +632 -0
- praisonai/cli/features/knowledge.py +531 -0
- praisonai/cli/features/lite.py +244 -0
- praisonai/cli/features/lsp_cli.py +225 -0
- praisonai/cli/features/mcp.py +169 -0
- praisonai/cli/features/message_queue.py +587 -0
- praisonai/cli/features/metrics.py +211 -0
- praisonai/cli/features/n8n.py +673 -0
- praisonai/cli/features/observability.py +293 -0
- praisonai/cli/features/ollama.py +361 -0
- praisonai/cli/features/output_style.py +273 -0
- praisonai/cli/features/package.py +631 -0
- praisonai/cli/features/performance.py +308 -0
- praisonai/cli/features/persistence.py +636 -0
- praisonai/cli/features/profile.py +226 -0
- praisonai/cli/features/profiler/__init__.py +81 -0
- praisonai/cli/features/profiler/core.py +558 -0
- praisonai/cli/features/profiler/optimizations.py +652 -0
- praisonai/cli/features/profiler/suite.py +386 -0
- praisonai/cli/features/profiling.py +350 -0
- praisonai/cli/features/queue/__init__.py +73 -0
- praisonai/cli/features/queue/manager.py +395 -0
- praisonai/cli/features/queue/models.py +286 -0
- praisonai/cli/features/queue/persistence.py +564 -0
- praisonai/cli/features/queue/scheduler.py +484 -0
- praisonai/cli/features/queue/worker.py +372 -0
- praisonai/cli/features/recipe.py +1723 -0
- praisonai/cli/features/recipes.py +449 -0
- praisonai/cli/features/registry.py +229 -0
- praisonai/cli/features/repo_map.py +860 -0
- praisonai/cli/features/router.py +466 -0
- praisonai/cli/features/sandbox_executor.py +515 -0
- praisonai/cli/features/serve.py +829 -0
- praisonai/cli/features/session.py +222 -0
- praisonai/cli/features/skills.py +856 -0
- praisonai/cli/features/slash_commands.py +650 -0
- praisonai/cli/features/telemetry.py +179 -0
- praisonai/cli/features/templates.py +1384 -0
- praisonai/cli/features/thinking.py +305 -0
- praisonai/cli/features/todo.py +334 -0
- praisonai/cli/features/tools.py +680 -0
- praisonai/cli/features/tui/__init__.py +83 -0
- praisonai/cli/features/tui/app.py +580 -0
- praisonai/cli/features/tui/cli.py +566 -0
- praisonai/cli/features/tui/debug.py +511 -0
- praisonai/cli/features/tui/events.py +99 -0
- praisonai/cli/features/tui/mock_provider.py +328 -0
- praisonai/cli/features/tui/orchestrator.py +652 -0
- praisonai/cli/features/tui/screens/__init__.py +50 -0
- praisonai/cli/features/tui/screens/main.py +245 -0
- praisonai/cli/features/tui/screens/queue.py +174 -0
- praisonai/cli/features/tui/screens/session.py +124 -0
- praisonai/cli/features/tui/screens/settings.py +148 -0
- praisonai/cli/features/tui/widgets/__init__.py +56 -0
- praisonai/cli/features/tui/widgets/chat.py +261 -0
- praisonai/cli/features/tui/widgets/composer.py +224 -0
- praisonai/cli/features/tui/widgets/queue_panel.py +200 -0
- praisonai/cli/features/tui/widgets/status.py +167 -0
- praisonai/cli/features/tui/widgets/tool_panel.py +248 -0
- praisonai/cli/features/workflow.py +720 -0
- praisonai/cli/legacy.py +236 -0
- praisonai/cli/main.py +5559 -0
- praisonai/cli/schedule_cli.py +54 -0
- praisonai/cli/state/__init__.py +31 -0
- praisonai/cli/state/identifiers.py +161 -0
- praisonai/cli/state/sessions.py +313 -0
- praisonai/code/__init__.py +93 -0
- praisonai/code/agent_tools.py +344 -0
- praisonai/code/diff/__init__.py +21 -0
- praisonai/code/diff/diff_strategy.py +432 -0
- praisonai/code/tools/__init__.py +27 -0
- praisonai/code/tools/apply_diff.py +221 -0
- praisonai/code/tools/execute_command.py +275 -0
- praisonai/code/tools/list_files.py +274 -0
- praisonai/code/tools/read_file.py +206 -0
- praisonai/code/tools/search_replace.py +248 -0
- praisonai/code/tools/write_file.py +217 -0
- praisonai/code/utils/__init__.py +46 -0
- praisonai/code/utils/file_utils.py +307 -0
- praisonai/code/utils/ignore_utils.py +308 -0
- praisonai/code/utils/text_utils.py +276 -0
- praisonai/db/__init__.py +64 -0
- praisonai/db/adapter.py +531 -0
- praisonai/deploy/__init__.py +62 -0
- praisonai/deploy/api.py +231 -0
- praisonai/deploy/docker.py +454 -0
- praisonai/deploy/doctor.py +367 -0
- praisonai/deploy/main.py +327 -0
- praisonai/deploy/models.py +179 -0
- praisonai/deploy/providers/__init__.py +33 -0
- praisonai/deploy/providers/aws.py +331 -0
- praisonai/deploy/providers/azure.py +358 -0
- praisonai/deploy/providers/base.py +101 -0
- praisonai/deploy/providers/gcp.py +314 -0
- praisonai/deploy/schema.py +208 -0
- praisonai/deploy.py +185 -0
- praisonai/endpoints/__init__.py +53 -0
- praisonai/endpoints/a2u_server.py +410 -0
- praisonai/endpoints/discovery.py +165 -0
- praisonai/endpoints/providers/__init__.py +28 -0
- praisonai/endpoints/providers/a2a.py +253 -0
- praisonai/endpoints/providers/a2u.py +208 -0
- praisonai/endpoints/providers/agents_api.py +171 -0
- praisonai/endpoints/providers/base.py +231 -0
- praisonai/endpoints/providers/mcp.py +263 -0
- praisonai/endpoints/providers/recipe.py +206 -0
- praisonai/endpoints/providers/tools_mcp.py +150 -0
- praisonai/endpoints/registry.py +131 -0
- praisonai/endpoints/server.py +161 -0
- praisonai/inbuilt_tools/__init__.py +24 -0
- praisonai/inbuilt_tools/autogen_tools.py +117 -0
- praisonai/inc/__init__.py +2 -0
- praisonai/inc/config.py +96 -0
- praisonai/inc/models.py +155 -0
- praisonai/integrations/__init__.py +56 -0
- praisonai/integrations/base.py +303 -0
- praisonai/integrations/claude_code.py +270 -0
- praisonai/integrations/codex_cli.py +255 -0
- praisonai/integrations/cursor_cli.py +195 -0
- praisonai/integrations/gemini_cli.py +222 -0
- praisonai/jobs/__init__.py +67 -0
- praisonai/jobs/executor.py +425 -0
- praisonai/jobs/models.py +230 -0
- praisonai/jobs/router.py +314 -0
- praisonai/jobs/server.py +186 -0
- praisonai/jobs/store.py +203 -0
- praisonai/llm/__init__.py +66 -0
- praisonai/llm/registry.py +382 -0
- praisonai/mcp_server/__init__.py +152 -0
- praisonai/mcp_server/adapters/__init__.py +74 -0
- praisonai/mcp_server/adapters/agents.py +128 -0
- praisonai/mcp_server/adapters/capabilities.py +168 -0
- praisonai/mcp_server/adapters/cli_tools.py +568 -0
- praisonai/mcp_server/adapters/extended_capabilities.py +462 -0
- praisonai/mcp_server/adapters/knowledge.py +93 -0
- praisonai/mcp_server/adapters/memory.py +104 -0
- praisonai/mcp_server/adapters/prompts.py +306 -0
- praisonai/mcp_server/adapters/resources.py +124 -0
- praisonai/mcp_server/adapters/tools_bridge.py +280 -0
- praisonai/mcp_server/auth/__init__.py +48 -0
- praisonai/mcp_server/auth/api_key.py +291 -0
- praisonai/mcp_server/auth/oauth.py +460 -0
- praisonai/mcp_server/auth/oidc.py +289 -0
- praisonai/mcp_server/auth/scopes.py +260 -0
- praisonai/mcp_server/cli.py +852 -0
- praisonai/mcp_server/elicitation.py +445 -0
- praisonai/mcp_server/icons.py +302 -0
- praisonai/mcp_server/recipe_adapter.py +573 -0
- praisonai/mcp_server/recipe_cli.py +824 -0
- praisonai/mcp_server/registry.py +703 -0
- praisonai/mcp_server/sampling.py +422 -0
- praisonai/mcp_server/server.py +490 -0
- praisonai/mcp_server/tasks.py +443 -0
- praisonai/mcp_server/transports/__init__.py +18 -0
- praisonai/mcp_server/transports/http_stream.py +376 -0
- praisonai/mcp_server/transports/stdio.py +132 -0
- praisonai/persistence/__init__.py +84 -0
- praisonai/persistence/config.py +238 -0
- praisonai/persistence/conversation/__init__.py +25 -0
- praisonai/persistence/conversation/async_mysql.py +427 -0
- praisonai/persistence/conversation/async_postgres.py +410 -0
- praisonai/persistence/conversation/async_sqlite.py +371 -0
- praisonai/persistence/conversation/base.py +151 -0
- praisonai/persistence/conversation/json_store.py +250 -0
- praisonai/persistence/conversation/mysql.py +387 -0
- praisonai/persistence/conversation/postgres.py +401 -0
- praisonai/persistence/conversation/singlestore.py +240 -0
- praisonai/persistence/conversation/sqlite.py +341 -0
- praisonai/persistence/conversation/supabase.py +203 -0
- praisonai/persistence/conversation/surrealdb.py +287 -0
- praisonai/persistence/factory.py +301 -0
- praisonai/persistence/hooks/__init__.py +18 -0
- praisonai/persistence/hooks/agent_hooks.py +297 -0
- praisonai/persistence/knowledge/__init__.py +26 -0
- praisonai/persistence/knowledge/base.py +144 -0
- praisonai/persistence/knowledge/cassandra.py +232 -0
- praisonai/persistence/knowledge/chroma.py +295 -0
- praisonai/persistence/knowledge/clickhouse.py +242 -0
- praisonai/persistence/knowledge/cosmosdb_vector.py +438 -0
- praisonai/persistence/knowledge/couchbase.py +286 -0
- praisonai/persistence/knowledge/lancedb.py +216 -0
- praisonai/persistence/knowledge/langchain_adapter.py +291 -0
- praisonai/persistence/knowledge/lightrag_adapter.py +212 -0
- praisonai/persistence/knowledge/llamaindex_adapter.py +256 -0
- praisonai/persistence/knowledge/milvus.py +277 -0
- praisonai/persistence/knowledge/mongodb_vector.py +306 -0
- praisonai/persistence/knowledge/pgvector.py +335 -0
- praisonai/persistence/knowledge/pinecone.py +253 -0
- praisonai/persistence/knowledge/qdrant.py +301 -0
- praisonai/persistence/knowledge/redis_vector.py +291 -0
- praisonai/persistence/knowledge/singlestore_vector.py +299 -0
- praisonai/persistence/knowledge/surrealdb_vector.py +309 -0
- praisonai/persistence/knowledge/upstash_vector.py +266 -0
- praisonai/persistence/knowledge/weaviate.py +223 -0
- praisonai/persistence/migrations/__init__.py +10 -0
- praisonai/persistence/migrations/manager.py +251 -0
- praisonai/persistence/orchestrator.py +406 -0
- praisonai/persistence/state/__init__.py +21 -0
- praisonai/persistence/state/async_mongodb.py +200 -0
- praisonai/persistence/state/base.py +107 -0
- praisonai/persistence/state/dynamodb.py +226 -0
- praisonai/persistence/state/firestore.py +175 -0
- praisonai/persistence/state/gcs.py +155 -0
- praisonai/persistence/state/memory.py +245 -0
- praisonai/persistence/state/mongodb.py +158 -0
- praisonai/persistence/state/redis.py +190 -0
- praisonai/persistence/state/upstash.py +144 -0
- praisonai/persistence/tests/__init__.py +3 -0
- praisonai/persistence/tests/test_all_backends.py +633 -0
- praisonai/profiler.py +1214 -0
- praisonai/recipe/__init__.py +134 -0
- praisonai/recipe/bridge.py +278 -0
- praisonai/recipe/core.py +893 -0
- praisonai/recipe/exceptions.py +54 -0
- praisonai/recipe/history.py +402 -0
- praisonai/recipe/models.py +266 -0
- praisonai/recipe/operations.py +440 -0
- praisonai/recipe/policy.py +422 -0
- praisonai/recipe/registry.py +849 -0
- praisonai/recipe/runtime.py +214 -0
- praisonai/recipe/security.py +711 -0
- praisonai/recipe/serve.py +859 -0
- praisonai/recipe/server.py +613 -0
- praisonai/scheduler/__init__.py +45 -0
- praisonai/scheduler/agent_scheduler.py +552 -0
- praisonai/scheduler/base.py +124 -0
- praisonai/scheduler/daemon_manager.py +225 -0
- praisonai/scheduler/state_manager.py +155 -0
- praisonai/scheduler/yaml_loader.py +193 -0
- praisonai/scheduler.py +194 -0
- praisonai/setup/__init__.py +1 -0
- praisonai/setup/build.py +21 -0
- praisonai/setup/post_install.py +23 -0
- praisonai/setup/setup_conda_env.py +25 -0
- praisonai/setup.py +16 -0
- praisonai/templates/__init__.py +116 -0
- praisonai/templates/cache.py +364 -0
- praisonai/templates/dependency_checker.py +358 -0
- praisonai/templates/discovery.py +391 -0
- praisonai/templates/loader.py +564 -0
- praisonai/templates/registry.py +511 -0
- praisonai/templates/resolver.py +206 -0
- praisonai/templates/security.py +327 -0
- praisonai/templates/tool_override.py +498 -0
- praisonai/templates/tools_doctor.py +256 -0
- praisonai/test.py +105 -0
- praisonai/train.py +562 -0
- praisonai/train_vision.py +306 -0
- praisonai/ui/agents.py +824 -0
- praisonai/ui/callbacks.py +57 -0
- praisonai/ui/chainlit_compat.py +246 -0
- praisonai/ui/chat.py +532 -0
- praisonai/ui/code.py +717 -0
- praisonai/ui/colab.py +474 -0
- praisonai/ui/colab_chainlit.py +81 -0
- praisonai/ui/components/aicoder.py +284 -0
- praisonai/ui/context.py +283 -0
- praisonai/ui/database_config.py +56 -0
- praisonai/ui/db.py +294 -0
- praisonai/ui/realtime.py +488 -0
- praisonai/ui/realtimeclient/__init__.py +756 -0
- praisonai/ui/realtimeclient/tools.py +242 -0
- praisonai/ui/sql_alchemy.py +710 -0
- praisonai/upload_vision.py +140 -0
- praisonai/version.py +1 -0
- praisonai-3.0.0.dist-info/METADATA +3493 -0
- praisonai-3.0.0.dist-info/RECORD +393 -0
- praisonai-3.0.0.dist-info/WHEEL +5 -0
- praisonai-3.0.0.dist-info/entry_points.txt +4 -0
- praisonai-3.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Completions Capabilities Module
|
|
3
|
+
|
|
4
|
+
Provides chat/completions and text completions functionality via LiteLLM.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import Optional, Any, Dict, List, Union
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class CompletionResult:
|
|
13
|
+
"""Result from completion operations."""
|
|
14
|
+
id: str
|
|
15
|
+
content: Optional[str] = None
|
|
16
|
+
role: str = "assistant"
|
|
17
|
+
model: Optional[str] = None
|
|
18
|
+
finish_reason: Optional[str] = None
|
|
19
|
+
usage: Optional[Dict[str, int]] = None
|
|
20
|
+
tool_calls: Optional[List[Dict[str, Any]]] = None
|
|
21
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def chat_completion(
|
|
25
|
+
messages: List[Dict[str, Any]],
|
|
26
|
+
model: str = "gpt-4o-mini",
|
|
27
|
+
temperature: float = 1.0,
|
|
28
|
+
max_tokens: Optional[int] = None,
|
|
29
|
+
tools: Optional[List[Dict[str, Any]]] = None,
|
|
30
|
+
tool_choice: Optional[str] = None,
|
|
31
|
+
stream: bool = False,
|
|
32
|
+
timeout: float = 600.0,
|
|
33
|
+
api_key: Optional[str] = None,
|
|
34
|
+
api_base: Optional[str] = None,
|
|
35
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
36
|
+
**kwargs
|
|
37
|
+
) -> CompletionResult:
|
|
38
|
+
"""
|
|
39
|
+
Create a chat completion.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
messages: List of messages [{"role": "user", "content": "..."}]
|
|
43
|
+
model: Model to use
|
|
44
|
+
temperature: Sampling temperature
|
|
45
|
+
max_tokens: Maximum tokens to generate
|
|
46
|
+
tools: List of tools
|
|
47
|
+
tool_choice: Tool choice mode
|
|
48
|
+
stream: Whether to stream
|
|
49
|
+
timeout: Request timeout in seconds
|
|
50
|
+
api_key: Optional API key override
|
|
51
|
+
api_base: Optional API base URL override
|
|
52
|
+
metadata: Optional metadata for tracing
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
CompletionResult with response
|
|
56
|
+
|
|
57
|
+
Example:
|
|
58
|
+
>>> result = chat_completion([{"role": "user", "content": "Hello"}])
|
|
59
|
+
>>> print(result.content)
|
|
60
|
+
"""
|
|
61
|
+
import litellm
|
|
62
|
+
|
|
63
|
+
call_kwargs = {
|
|
64
|
+
'model': model,
|
|
65
|
+
'messages': messages,
|
|
66
|
+
'temperature': temperature,
|
|
67
|
+
'stream': stream,
|
|
68
|
+
'timeout': timeout,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if max_tokens:
|
|
72
|
+
call_kwargs['max_tokens'] = max_tokens
|
|
73
|
+
if tools:
|
|
74
|
+
call_kwargs['tools'] = tools
|
|
75
|
+
if tool_choice:
|
|
76
|
+
call_kwargs['tool_choice'] = tool_choice
|
|
77
|
+
if api_key:
|
|
78
|
+
call_kwargs['api_key'] = api_key
|
|
79
|
+
if api_base:
|
|
80
|
+
call_kwargs['api_base'] = api_base
|
|
81
|
+
|
|
82
|
+
call_kwargs.update(kwargs)
|
|
83
|
+
|
|
84
|
+
if metadata:
|
|
85
|
+
call_kwargs['metadata'] = metadata
|
|
86
|
+
|
|
87
|
+
response = litellm.completion(**call_kwargs)
|
|
88
|
+
|
|
89
|
+
choice = response.choices[0] if response.choices else None
|
|
90
|
+
message = choice.message if choice else None
|
|
91
|
+
|
|
92
|
+
usage = None
|
|
93
|
+
if hasattr(response, 'usage') and response.usage:
|
|
94
|
+
usage = {
|
|
95
|
+
'prompt_tokens': getattr(response.usage, 'prompt_tokens', 0),
|
|
96
|
+
'completion_tokens': getattr(response.usage, 'completion_tokens', 0),
|
|
97
|
+
'total_tokens': getattr(response.usage, 'total_tokens', 0),
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
tool_calls = None
|
|
101
|
+
if message and hasattr(message, 'tool_calls') and message.tool_calls:
|
|
102
|
+
tool_calls = []
|
|
103
|
+
for tc in message.tool_calls:
|
|
104
|
+
tool_calls.append({
|
|
105
|
+
'id': getattr(tc, 'id', ''),
|
|
106
|
+
'type': getattr(tc, 'type', 'function'),
|
|
107
|
+
'function': {
|
|
108
|
+
'name': getattr(tc.function, 'name', ''),
|
|
109
|
+
'arguments': getattr(tc.function, 'arguments', '{}'),
|
|
110
|
+
}
|
|
111
|
+
})
|
|
112
|
+
|
|
113
|
+
return CompletionResult(
|
|
114
|
+
id=getattr(response, 'id', ''),
|
|
115
|
+
content=getattr(message, 'content', None) if message else None,
|
|
116
|
+
role=getattr(message, 'role', 'assistant') if message else 'assistant',
|
|
117
|
+
model=getattr(response, 'model', model),
|
|
118
|
+
finish_reason=getattr(choice, 'finish_reason', None) if choice else None,
|
|
119
|
+
usage=usage,
|
|
120
|
+
tool_calls=tool_calls,
|
|
121
|
+
metadata=metadata or {},
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
async def achat_completion(
|
|
126
|
+
messages: List[Dict[str, Any]],
|
|
127
|
+
model: str = "gpt-4o-mini",
|
|
128
|
+
temperature: float = 1.0,
|
|
129
|
+
max_tokens: Optional[int] = None,
|
|
130
|
+
tools: Optional[List[Dict[str, Any]]] = None,
|
|
131
|
+
tool_choice: Optional[str] = None,
|
|
132
|
+
stream: bool = False,
|
|
133
|
+
timeout: float = 600.0,
|
|
134
|
+
api_key: Optional[str] = None,
|
|
135
|
+
api_base: Optional[str] = None,
|
|
136
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
137
|
+
**kwargs
|
|
138
|
+
) -> CompletionResult:
|
|
139
|
+
"""
|
|
140
|
+
Async: Create a chat completion.
|
|
141
|
+
|
|
142
|
+
See chat_completion() for full documentation.
|
|
143
|
+
"""
|
|
144
|
+
import litellm
|
|
145
|
+
|
|
146
|
+
call_kwargs = {
|
|
147
|
+
'model': model,
|
|
148
|
+
'messages': messages,
|
|
149
|
+
'temperature': temperature,
|
|
150
|
+
'stream': stream,
|
|
151
|
+
'timeout': timeout,
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
if max_tokens:
|
|
155
|
+
call_kwargs['max_tokens'] = max_tokens
|
|
156
|
+
if tools:
|
|
157
|
+
call_kwargs['tools'] = tools
|
|
158
|
+
if tool_choice:
|
|
159
|
+
call_kwargs['tool_choice'] = tool_choice
|
|
160
|
+
if api_key:
|
|
161
|
+
call_kwargs['api_key'] = api_key
|
|
162
|
+
if api_base:
|
|
163
|
+
call_kwargs['api_base'] = api_base
|
|
164
|
+
|
|
165
|
+
call_kwargs.update(kwargs)
|
|
166
|
+
|
|
167
|
+
if metadata:
|
|
168
|
+
call_kwargs['metadata'] = metadata
|
|
169
|
+
|
|
170
|
+
response = await litellm.acompletion(**call_kwargs)
|
|
171
|
+
|
|
172
|
+
choice = response.choices[0] if response.choices else None
|
|
173
|
+
message = choice.message if choice else None
|
|
174
|
+
|
|
175
|
+
usage = None
|
|
176
|
+
if hasattr(response, 'usage') and response.usage:
|
|
177
|
+
usage = {
|
|
178
|
+
'prompt_tokens': getattr(response.usage, 'prompt_tokens', 0),
|
|
179
|
+
'completion_tokens': getattr(response.usage, 'completion_tokens', 0),
|
|
180
|
+
'total_tokens': getattr(response.usage, 'total_tokens', 0),
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
tool_calls = None
|
|
184
|
+
if message and hasattr(message, 'tool_calls') and message.tool_calls:
|
|
185
|
+
tool_calls = []
|
|
186
|
+
for tc in message.tool_calls:
|
|
187
|
+
tool_calls.append({
|
|
188
|
+
'id': getattr(tc, 'id', ''),
|
|
189
|
+
'type': getattr(tc, 'type', 'function'),
|
|
190
|
+
'function': {
|
|
191
|
+
'name': getattr(tc.function, 'name', ''),
|
|
192
|
+
'arguments': getattr(tc.function, 'arguments', '{}'),
|
|
193
|
+
}
|
|
194
|
+
})
|
|
195
|
+
|
|
196
|
+
return CompletionResult(
|
|
197
|
+
id=getattr(response, 'id', ''),
|
|
198
|
+
content=getattr(message, 'content', None) if message else None,
|
|
199
|
+
role=getattr(message, 'role', 'assistant') if message else 'assistant',
|
|
200
|
+
model=getattr(response, 'model', model),
|
|
201
|
+
finish_reason=getattr(choice, 'finish_reason', None) if choice else None,
|
|
202
|
+
usage=usage,
|
|
203
|
+
tool_calls=tool_calls,
|
|
204
|
+
metadata=metadata or {},
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def text_completion(
|
|
209
|
+
prompt: str,
|
|
210
|
+
model: str = "gpt-3.5-turbo-instruct",
|
|
211
|
+
temperature: float = 1.0,
|
|
212
|
+
max_tokens: Optional[int] = None,
|
|
213
|
+
timeout: float = 600.0,
|
|
214
|
+
api_key: Optional[str] = None,
|
|
215
|
+
api_base: Optional[str] = None,
|
|
216
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
217
|
+
**kwargs
|
|
218
|
+
) -> CompletionResult:
|
|
219
|
+
"""
|
|
220
|
+
Create a text completion (legacy completions API).
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
prompt: Text prompt
|
|
224
|
+
model: Model to use
|
|
225
|
+
temperature: Sampling temperature
|
|
226
|
+
max_tokens: Maximum tokens to generate
|
|
227
|
+
timeout: Request timeout in seconds
|
|
228
|
+
api_key: Optional API key override
|
|
229
|
+
api_base: Optional API base URL override
|
|
230
|
+
metadata: Optional metadata for tracing
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
CompletionResult with response
|
|
234
|
+
"""
|
|
235
|
+
import litellm
|
|
236
|
+
|
|
237
|
+
call_kwargs = {
|
|
238
|
+
'model': model,
|
|
239
|
+
'prompt': prompt,
|
|
240
|
+
'temperature': temperature,
|
|
241
|
+
'timeout': timeout,
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if max_tokens:
|
|
245
|
+
call_kwargs['max_tokens'] = max_tokens
|
|
246
|
+
if api_key:
|
|
247
|
+
call_kwargs['api_key'] = api_key
|
|
248
|
+
if api_base:
|
|
249
|
+
call_kwargs['api_base'] = api_base
|
|
250
|
+
|
|
251
|
+
call_kwargs.update(kwargs)
|
|
252
|
+
|
|
253
|
+
if metadata:
|
|
254
|
+
call_kwargs['metadata'] = metadata
|
|
255
|
+
|
|
256
|
+
response = litellm.text_completion(**call_kwargs)
|
|
257
|
+
|
|
258
|
+
choice = response.choices[0] if response.choices else None
|
|
259
|
+
|
|
260
|
+
usage = None
|
|
261
|
+
if hasattr(response, 'usage') and response.usage:
|
|
262
|
+
usage = {
|
|
263
|
+
'prompt_tokens': getattr(response.usage, 'prompt_tokens', 0),
|
|
264
|
+
'completion_tokens': getattr(response.usage, 'completion_tokens', 0),
|
|
265
|
+
'total_tokens': getattr(response.usage, 'total_tokens', 0),
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
return CompletionResult(
|
|
269
|
+
id=getattr(response, 'id', ''),
|
|
270
|
+
content=getattr(choice, 'text', None) if choice else None,
|
|
271
|
+
role='assistant',
|
|
272
|
+
model=getattr(response, 'model', model),
|
|
273
|
+
finish_reason=getattr(choice, 'finish_reason', None) if choice else None,
|
|
274
|
+
usage=usage,
|
|
275
|
+
metadata=metadata or {},
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
async def atext_completion(
|
|
280
|
+
prompt: str,
|
|
281
|
+
model: str = "gpt-3.5-turbo-instruct",
|
|
282
|
+
temperature: float = 1.0,
|
|
283
|
+
max_tokens: Optional[int] = None,
|
|
284
|
+
timeout: float = 600.0,
|
|
285
|
+
api_key: Optional[str] = None,
|
|
286
|
+
api_base: Optional[str] = None,
|
|
287
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
288
|
+
**kwargs
|
|
289
|
+
) -> CompletionResult:
|
|
290
|
+
"""
|
|
291
|
+
Async: Create a text completion.
|
|
292
|
+
|
|
293
|
+
See text_completion() for full documentation.
|
|
294
|
+
"""
|
|
295
|
+
import litellm
|
|
296
|
+
|
|
297
|
+
call_kwargs = {
|
|
298
|
+
'model': model,
|
|
299
|
+
'prompt': prompt,
|
|
300
|
+
'temperature': temperature,
|
|
301
|
+
'timeout': timeout,
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
if max_tokens:
|
|
305
|
+
call_kwargs['max_tokens'] = max_tokens
|
|
306
|
+
if api_key:
|
|
307
|
+
call_kwargs['api_key'] = api_key
|
|
308
|
+
if api_base:
|
|
309
|
+
call_kwargs['api_base'] = api_base
|
|
310
|
+
|
|
311
|
+
call_kwargs.update(kwargs)
|
|
312
|
+
|
|
313
|
+
if metadata:
|
|
314
|
+
call_kwargs['metadata'] = metadata
|
|
315
|
+
|
|
316
|
+
response = await litellm.atext_completion(**call_kwargs)
|
|
317
|
+
|
|
318
|
+
choice = response.choices[0] if response.choices else None
|
|
319
|
+
|
|
320
|
+
usage = None
|
|
321
|
+
if hasattr(response, 'usage') and response.usage:
|
|
322
|
+
usage = {
|
|
323
|
+
'prompt_tokens': getattr(response.usage, 'prompt_tokens', 0),
|
|
324
|
+
'completion_tokens': getattr(response.usage, 'completion_tokens', 0),
|
|
325
|
+
'total_tokens': getattr(response.usage, 'total_tokens', 0),
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return CompletionResult(
|
|
329
|
+
id=getattr(response, 'id', ''),
|
|
330
|
+
content=getattr(choice, 'text', None) if choice else None,
|
|
331
|
+
role='assistant',
|
|
332
|
+
model=getattr(response, 'model', model),
|
|
333
|
+
finish_reason=getattr(choice, 'finish_reason', None) if choice else None,
|
|
334
|
+
usage=usage,
|
|
335
|
+
metadata=metadata or {},
|
|
336
|
+
)
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Container Files Capabilities Module
|
|
3
|
+
|
|
4
|
+
Provides container file management functionality.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import Optional, Any, Dict, List
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ContainerFileResult:
|
|
13
|
+
"""Result from container file operations."""
|
|
14
|
+
path: str
|
|
15
|
+
container_id: str
|
|
16
|
+
content: Optional[str] = None
|
|
17
|
+
size: Optional[int] = None
|
|
18
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def container_file_read(
|
|
22
|
+
container_id: str,
|
|
23
|
+
path: str,
|
|
24
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
25
|
+
**kwargs
|
|
26
|
+
) -> ContainerFileResult:
|
|
27
|
+
"""
|
|
28
|
+
Read a file from a container.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
container_id: Container ID
|
|
32
|
+
path: File path in container
|
|
33
|
+
metadata: Optional metadata for tracing
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
ContainerFileResult with file content
|
|
37
|
+
|
|
38
|
+
Example:
|
|
39
|
+
>>> result = container_file_read("container-abc123", "/app/output.txt")
|
|
40
|
+
>>> print(result.content)
|
|
41
|
+
"""
|
|
42
|
+
# Placeholder implementation
|
|
43
|
+
return ContainerFileResult(
|
|
44
|
+
path=path,
|
|
45
|
+
container_id=container_id,
|
|
46
|
+
content=None,
|
|
47
|
+
metadata={"status": "not_implemented", **(metadata or {})},
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
async def acontainer_file_read(
|
|
52
|
+
container_id: str,
|
|
53
|
+
path: str,
|
|
54
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
55
|
+
**kwargs
|
|
56
|
+
) -> ContainerFileResult:
|
|
57
|
+
"""
|
|
58
|
+
Async: Read a file from a container.
|
|
59
|
+
|
|
60
|
+
See container_file_read() for full documentation.
|
|
61
|
+
"""
|
|
62
|
+
return container_file_read(
|
|
63
|
+
container_id=container_id,
|
|
64
|
+
path=path,
|
|
65
|
+
metadata=metadata,
|
|
66
|
+
**kwargs
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def container_file_write(
|
|
71
|
+
container_id: str,
|
|
72
|
+
path: str,
|
|
73
|
+
content: str,
|
|
74
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
75
|
+
**kwargs
|
|
76
|
+
) -> ContainerFileResult:
|
|
77
|
+
"""
|
|
78
|
+
Write a file to a container.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
container_id: Container ID
|
|
82
|
+
path: File path in container
|
|
83
|
+
content: File content
|
|
84
|
+
metadata: Optional metadata for tracing
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
ContainerFileResult with write confirmation
|
|
88
|
+
"""
|
|
89
|
+
return ContainerFileResult(
|
|
90
|
+
path=path,
|
|
91
|
+
container_id=container_id,
|
|
92
|
+
content=content,
|
|
93
|
+
size=len(content),
|
|
94
|
+
metadata={"status": "not_implemented", **(metadata or {})},
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
async def acontainer_file_write(
|
|
99
|
+
container_id: str,
|
|
100
|
+
path: str,
|
|
101
|
+
content: str,
|
|
102
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
103
|
+
**kwargs
|
|
104
|
+
) -> ContainerFileResult:
|
|
105
|
+
"""
|
|
106
|
+
Async: Write a file to a container.
|
|
107
|
+
|
|
108
|
+
See container_file_write() for full documentation.
|
|
109
|
+
"""
|
|
110
|
+
return container_file_write(
|
|
111
|
+
container_id=container_id,
|
|
112
|
+
path=path,
|
|
113
|
+
content=content,
|
|
114
|
+
metadata=metadata,
|
|
115
|
+
**kwargs
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def container_file_list(
|
|
120
|
+
container_id: str,
|
|
121
|
+
path: str = "/",
|
|
122
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
123
|
+
**kwargs
|
|
124
|
+
) -> List[ContainerFileResult]:
|
|
125
|
+
"""
|
|
126
|
+
List files in a container directory.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
container_id: Container ID
|
|
130
|
+
path: Directory path in container
|
|
131
|
+
metadata: Optional metadata for tracing
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
List of ContainerFileResult objects
|
|
135
|
+
"""
|
|
136
|
+
return []
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
async def acontainer_file_list(
|
|
140
|
+
container_id: str,
|
|
141
|
+
path: str = "/",
|
|
142
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
143
|
+
**kwargs
|
|
144
|
+
) -> List[ContainerFileResult]:
|
|
145
|
+
"""
|
|
146
|
+
Async: List files in a container directory.
|
|
147
|
+
|
|
148
|
+
See container_file_list() for full documentation.
|
|
149
|
+
"""
|
|
150
|
+
return container_file_list(
|
|
151
|
+
container_id=container_id,
|
|
152
|
+
path=path,
|
|
153
|
+
metadata=metadata,
|
|
154
|
+
**kwargs
|
|
155
|
+
)
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Containers Capabilities Module
|
|
3
|
+
|
|
4
|
+
Provides container management functionality.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import Optional, Any, Dict
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ContainerResult:
|
|
13
|
+
"""Result from container operations."""
|
|
14
|
+
id: str
|
|
15
|
+
status: str = "created"
|
|
16
|
+
name: Optional[str] = None
|
|
17
|
+
image: Optional[str] = None
|
|
18
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def container_create(
|
|
22
|
+
image: str,
|
|
23
|
+
name: Optional[str] = None,
|
|
24
|
+
command: Optional[str] = None,
|
|
25
|
+
environment: Optional[Dict[str, str]] = None,
|
|
26
|
+
timeout: float = 600.0,
|
|
27
|
+
api_key: Optional[str] = None,
|
|
28
|
+
api_base: Optional[str] = None,
|
|
29
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
30
|
+
**kwargs
|
|
31
|
+
) -> ContainerResult:
|
|
32
|
+
"""
|
|
33
|
+
Create a container for code execution.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
image: Container image name
|
|
37
|
+
name: Optional container name
|
|
38
|
+
command: Command to run
|
|
39
|
+
environment: Environment variables
|
|
40
|
+
timeout: Request timeout in seconds
|
|
41
|
+
api_key: Optional API key override
|
|
42
|
+
api_base: Optional API base URL override
|
|
43
|
+
metadata: Optional metadata for tracing
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
ContainerResult with container ID
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
>>> result = container_create("python:3.11")
|
|
50
|
+
>>> print(result.id)
|
|
51
|
+
"""
|
|
52
|
+
# Container functionality is provider-specific
|
|
53
|
+
# This is a placeholder that can be extended
|
|
54
|
+
import uuid
|
|
55
|
+
|
|
56
|
+
container_id = f"container-{uuid.uuid4().hex[:12]}"
|
|
57
|
+
|
|
58
|
+
return ContainerResult(
|
|
59
|
+
id=container_id,
|
|
60
|
+
status="created",
|
|
61
|
+
name=name,
|
|
62
|
+
image=image,
|
|
63
|
+
metadata=metadata or {},
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
async def acontainer_create(
|
|
68
|
+
image: str,
|
|
69
|
+
name: Optional[str] = None,
|
|
70
|
+
command: Optional[str] = None,
|
|
71
|
+
environment: Optional[Dict[str, str]] = None,
|
|
72
|
+
timeout: float = 600.0,
|
|
73
|
+
api_key: Optional[str] = None,
|
|
74
|
+
api_base: Optional[str] = None,
|
|
75
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
76
|
+
**kwargs
|
|
77
|
+
) -> ContainerResult:
|
|
78
|
+
"""
|
|
79
|
+
Async: Create a container for code execution.
|
|
80
|
+
|
|
81
|
+
See container_create() for full documentation.
|
|
82
|
+
"""
|
|
83
|
+
import uuid
|
|
84
|
+
|
|
85
|
+
container_id = f"container-{uuid.uuid4().hex[:12]}"
|
|
86
|
+
|
|
87
|
+
return ContainerResult(
|
|
88
|
+
id=container_id,
|
|
89
|
+
status="created",
|
|
90
|
+
name=name,
|
|
91
|
+
image=image,
|
|
92
|
+
metadata=metadata or {},
|
|
93
|
+
)
|