gobby 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +3 -0
- gobby/adapters/__init__.py +30 -0
- gobby/adapters/base.py +93 -0
- gobby/adapters/claude_code.py +276 -0
- gobby/adapters/codex.py +1292 -0
- gobby/adapters/gemini.py +343 -0
- gobby/agents/__init__.py +37 -0
- gobby/agents/codex_session.py +120 -0
- gobby/agents/constants.py +112 -0
- gobby/agents/context.py +362 -0
- gobby/agents/definitions.py +133 -0
- gobby/agents/gemini_session.py +111 -0
- gobby/agents/registry.py +618 -0
- gobby/agents/runner.py +968 -0
- gobby/agents/session.py +259 -0
- gobby/agents/spawn.py +916 -0
- gobby/agents/spawners/__init__.py +77 -0
- gobby/agents/spawners/base.py +142 -0
- gobby/agents/spawners/cross_platform.py +266 -0
- gobby/agents/spawners/embedded.py +225 -0
- gobby/agents/spawners/headless.py +226 -0
- gobby/agents/spawners/linux.py +125 -0
- gobby/agents/spawners/macos.py +277 -0
- gobby/agents/spawners/windows.py +308 -0
- gobby/agents/tty_config.py +319 -0
- gobby/autonomous/__init__.py +32 -0
- gobby/autonomous/progress_tracker.py +447 -0
- gobby/autonomous/stop_registry.py +269 -0
- gobby/autonomous/stuck_detector.py +383 -0
- gobby/cli/__init__.py +67 -0
- gobby/cli/__main__.py +8 -0
- gobby/cli/agents.py +529 -0
- gobby/cli/artifacts.py +266 -0
- gobby/cli/daemon.py +329 -0
- gobby/cli/extensions.py +526 -0
- gobby/cli/github.py +263 -0
- gobby/cli/init.py +53 -0
- gobby/cli/install.py +614 -0
- gobby/cli/installers/__init__.py +37 -0
- gobby/cli/installers/antigravity.py +65 -0
- gobby/cli/installers/claude.py +363 -0
- gobby/cli/installers/codex.py +192 -0
- gobby/cli/installers/gemini.py +294 -0
- gobby/cli/installers/git_hooks.py +377 -0
- gobby/cli/installers/shared.py +737 -0
- gobby/cli/linear.py +250 -0
- gobby/cli/mcp.py +30 -0
- gobby/cli/mcp_proxy.py +698 -0
- gobby/cli/memory.py +304 -0
- gobby/cli/merge.py +384 -0
- gobby/cli/projects.py +79 -0
- gobby/cli/sessions.py +622 -0
- gobby/cli/tasks/__init__.py +30 -0
- gobby/cli/tasks/_utils.py +658 -0
- gobby/cli/tasks/ai.py +1025 -0
- gobby/cli/tasks/commits.py +169 -0
- gobby/cli/tasks/crud.py +685 -0
- gobby/cli/tasks/deps.py +135 -0
- gobby/cli/tasks/labels.py +63 -0
- gobby/cli/tasks/main.py +273 -0
- gobby/cli/tasks/search.py +178 -0
- gobby/cli/tui.py +34 -0
- gobby/cli/utils.py +513 -0
- gobby/cli/workflows.py +927 -0
- gobby/cli/worktrees.py +481 -0
- gobby/config/__init__.py +129 -0
- gobby/config/app.py +551 -0
- gobby/config/extensions.py +167 -0
- gobby/config/features.py +472 -0
- gobby/config/llm_providers.py +98 -0
- gobby/config/logging.py +66 -0
- gobby/config/mcp.py +346 -0
- gobby/config/persistence.py +247 -0
- gobby/config/servers.py +141 -0
- gobby/config/sessions.py +250 -0
- gobby/config/tasks.py +784 -0
- gobby/hooks/__init__.py +104 -0
- gobby/hooks/artifact_capture.py +213 -0
- gobby/hooks/broadcaster.py +243 -0
- gobby/hooks/event_handlers.py +723 -0
- gobby/hooks/events.py +218 -0
- gobby/hooks/git.py +169 -0
- gobby/hooks/health_monitor.py +171 -0
- gobby/hooks/hook_manager.py +856 -0
- gobby/hooks/hook_types.py +575 -0
- gobby/hooks/plugins.py +813 -0
- gobby/hooks/session_coordinator.py +396 -0
- gobby/hooks/verification_runner.py +268 -0
- gobby/hooks/webhooks.py +339 -0
- gobby/install/claude/commands/gobby/bug.md +51 -0
- gobby/install/claude/commands/gobby/chore.md +51 -0
- gobby/install/claude/commands/gobby/epic.md +52 -0
- gobby/install/claude/commands/gobby/eval.md +235 -0
- gobby/install/claude/commands/gobby/feat.md +49 -0
- gobby/install/claude/commands/gobby/nit.md +52 -0
- gobby/install/claude/commands/gobby/ref.md +52 -0
- gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
- gobby/install/claude/hooks/hook_dispatcher.py +364 -0
- gobby/install/claude/hooks/validate_settings.py +102 -0
- gobby/install/claude/hooks-template.json +118 -0
- gobby/install/codex/hooks/hook_dispatcher.py +153 -0
- gobby/install/codex/prompts/forget.md +7 -0
- gobby/install/codex/prompts/memories.md +7 -0
- gobby/install/codex/prompts/recall.md +7 -0
- gobby/install/codex/prompts/remember.md +13 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
- gobby/install/gemini/hooks-template.json +138 -0
- gobby/install/shared/plugins/code_guardian.py +456 -0
- gobby/install/shared/plugins/example_notify.py +331 -0
- gobby/integrations/__init__.py +10 -0
- gobby/integrations/github.py +145 -0
- gobby/integrations/linear.py +145 -0
- gobby/llm/__init__.py +40 -0
- gobby/llm/base.py +120 -0
- gobby/llm/claude.py +578 -0
- gobby/llm/claude_executor.py +503 -0
- gobby/llm/codex.py +322 -0
- gobby/llm/codex_executor.py +513 -0
- gobby/llm/executor.py +316 -0
- gobby/llm/factory.py +34 -0
- gobby/llm/gemini.py +258 -0
- gobby/llm/gemini_executor.py +339 -0
- gobby/llm/litellm.py +287 -0
- gobby/llm/litellm_executor.py +303 -0
- gobby/llm/resolver.py +499 -0
- gobby/llm/service.py +236 -0
- gobby/mcp_proxy/__init__.py +29 -0
- gobby/mcp_proxy/actions.py +175 -0
- gobby/mcp_proxy/daemon_control.py +198 -0
- gobby/mcp_proxy/importer.py +436 -0
- gobby/mcp_proxy/lazy.py +325 -0
- gobby/mcp_proxy/manager.py +798 -0
- gobby/mcp_proxy/metrics.py +609 -0
- gobby/mcp_proxy/models.py +139 -0
- gobby/mcp_proxy/registries.py +215 -0
- gobby/mcp_proxy/schema_hash.py +381 -0
- gobby/mcp_proxy/semantic_search.py +706 -0
- gobby/mcp_proxy/server.py +549 -0
- gobby/mcp_proxy/services/__init__.py +0 -0
- gobby/mcp_proxy/services/fallback.py +306 -0
- gobby/mcp_proxy/services/recommendation.py +224 -0
- gobby/mcp_proxy/services/server_mgmt.py +214 -0
- gobby/mcp_proxy/services/system.py +72 -0
- gobby/mcp_proxy/services/tool_filter.py +231 -0
- gobby/mcp_proxy/services/tool_proxy.py +309 -0
- gobby/mcp_proxy/stdio.py +565 -0
- gobby/mcp_proxy/tools/__init__.py +27 -0
- gobby/mcp_proxy/tools/agents.py +1103 -0
- gobby/mcp_proxy/tools/artifacts.py +207 -0
- gobby/mcp_proxy/tools/hub.py +335 -0
- gobby/mcp_proxy/tools/internal.py +337 -0
- gobby/mcp_proxy/tools/memory.py +543 -0
- gobby/mcp_proxy/tools/merge.py +422 -0
- gobby/mcp_proxy/tools/metrics.py +283 -0
- gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
- gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
- gobby/mcp_proxy/tools/orchestration/review.py +736 -0
- gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
- gobby/mcp_proxy/tools/session_messages.py +1056 -0
- gobby/mcp_proxy/tools/task_dependencies.py +219 -0
- gobby/mcp_proxy/tools/task_expansion.py +591 -0
- gobby/mcp_proxy/tools/task_github.py +393 -0
- gobby/mcp_proxy/tools/task_linear.py +379 -0
- gobby/mcp_proxy/tools/task_orchestration.py +77 -0
- gobby/mcp_proxy/tools/task_readiness.py +522 -0
- gobby/mcp_proxy/tools/task_sync.py +351 -0
- gobby/mcp_proxy/tools/task_validation.py +843 -0
- gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
- gobby/mcp_proxy/tools/tasks/_context.py +112 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
- gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
- gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
- gobby/mcp_proxy/tools/tasks/_search.py +215 -0
- gobby/mcp_proxy/tools/tasks/_session.py +125 -0
- gobby/mcp_proxy/tools/workflows.py +973 -0
- gobby/mcp_proxy/tools/worktrees.py +1264 -0
- gobby/mcp_proxy/transports/__init__.py +0 -0
- gobby/mcp_proxy/transports/base.py +95 -0
- gobby/mcp_proxy/transports/factory.py +44 -0
- gobby/mcp_proxy/transports/http.py +139 -0
- gobby/mcp_proxy/transports/stdio.py +213 -0
- gobby/mcp_proxy/transports/websocket.py +136 -0
- gobby/memory/backends/__init__.py +116 -0
- gobby/memory/backends/mem0.py +408 -0
- gobby/memory/backends/memu.py +485 -0
- gobby/memory/backends/null.py +111 -0
- gobby/memory/backends/openmemory.py +537 -0
- gobby/memory/backends/sqlite.py +304 -0
- gobby/memory/context.py +87 -0
- gobby/memory/manager.py +1001 -0
- gobby/memory/protocol.py +451 -0
- gobby/memory/search/__init__.py +66 -0
- gobby/memory/search/text.py +127 -0
- gobby/memory/viz.py +258 -0
- gobby/prompts/__init__.py +13 -0
- gobby/prompts/defaults/expansion/system.md +119 -0
- gobby/prompts/defaults/expansion/user.md +48 -0
- gobby/prompts/defaults/external_validation/agent.md +72 -0
- gobby/prompts/defaults/external_validation/external.md +63 -0
- gobby/prompts/defaults/external_validation/spawn.md +83 -0
- gobby/prompts/defaults/external_validation/system.md +6 -0
- gobby/prompts/defaults/features/import_mcp.md +22 -0
- gobby/prompts/defaults/features/import_mcp_github.md +17 -0
- gobby/prompts/defaults/features/import_mcp_search.md +16 -0
- gobby/prompts/defaults/features/recommend_tools.md +32 -0
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
- gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
- gobby/prompts/defaults/features/server_description.md +20 -0
- gobby/prompts/defaults/features/server_description_system.md +6 -0
- gobby/prompts/defaults/features/task_description.md +31 -0
- gobby/prompts/defaults/features/task_description_system.md +6 -0
- gobby/prompts/defaults/features/tool_summary.md +17 -0
- gobby/prompts/defaults/features/tool_summary_system.md +6 -0
- gobby/prompts/defaults/research/step.md +58 -0
- gobby/prompts/defaults/validation/criteria.md +47 -0
- gobby/prompts/defaults/validation/validate.md +38 -0
- gobby/prompts/loader.py +346 -0
- gobby/prompts/models.py +113 -0
- gobby/py.typed +0 -0
- gobby/runner.py +488 -0
- gobby/search/__init__.py +23 -0
- gobby/search/protocol.py +104 -0
- gobby/search/tfidf.py +232 -0
- gobby/servers/__init__.py +7 -0
- gobby/servers/http.py +636 -0
- gobby/servers/models.py +31 -0
- gobby/servers/routes/__init__.py +23 -0
- gobby/servers/routes/admin.py +416 -0
- gobby/servers/routes/dependencies.py +118 -0
- gobby/servers/routes/mcp/__init__.py +24 -0
- gobby/servers/routes/mcp/hooks.py +135 -0
- gobby/servers/routes/mcp/plugins.py +121 -0
- gobby/servers/routes/mcp/tools.py +1337 -0
- gobby/servers/routes/mcp/webhooks.py +159 -0
- gobby/servers/routes/sessions.py +582 -0
- gobby/servers/websocket.py +766 -0
- gobby/sessions/__init__.py +13 -0
- gobby/sessions/analyzer.py +322 -0
- gobby/sessions/lifecycle.py +240 -0
- gobby/sessions/manager.py +563 -0
- gobby/sessions/processor.py +225 -0
- gobby/sessions/summary.py +532 -0
- gobby/sessions/transcripts/__init__.py +41 -0
- gobby/sessions/transcripts/base.py +125 -0
- gobby/sessions/transcripts/claude.py +386 -0
- gobby/sessions/transcripts/codex.py +143 -0
- gobby/sessions/transcripts/gemini.py +195 -0
- gobby/storage/__init__.py +21 -0
- gobby/storage/agents.py +409 -0
- gobby/storage/artifact_classifier.py +341 -0
- gobby/storage/artifacts.py +285 -0
- gobby/storage/compaction.py +67 -0
- gobby/storage/database.py +357 -0
- gobby/storage/inter_session_messages.py +194 -0
- gobby/storage/mcp.py +680 -0
- gobby/storage/memories.py +562 -0
- gobby/storage/merge_resolutions.py +550 -0
- gobby/storage/migrations.py +860 -0
- gobby/storage/migrations_legacy.py +1359 -0
- gobby/storage/projects.py +166 -0
- gobby/storage/session_messages.py +251 -0
- gobby/storage/session_tasks.py +97 -0
- gobby/storage/sessions.py +817 -0
- gobby/storage/task_dependencies.py +223 -0
- gobby/storage/tasks/__init__.py +42 -0
- gobby/storage/tasks/_aggregates.py +180 -0
- gobby/storage/tasks/_crud.py +449 -0
- gobby/storage/tasks/_id.py +104 -0
- gobby/storage/tasks/_lifecycle.py +311 -0
- gobby/storage/tasks/_manager.py +889 -0
- gobby/storage/tasks/_models.py +300 -0
- gobby/storage/tasks/_ordering.py +119 -0
- gobby/storage/tasks/_path_cache.py +110 -0
- gobby/storage/tasks/_queries.py +343 -0
- gobby/storage/tasks/_search.py +143 -0
- gobby/storage/workflow_audit.py +393 -0
- gobby/storage/worktrees.py +547 -0
- gobby/sync/__init__.py +29 -0
- gobby/sync/github.py +333 -0
- gobby/sync/linear.py +304 -0
- gobby/sync/memories.py +284 -0
- gobby/sync/tasks.py +641 -0
- gobby/tasks/__init__.py +8 -0
- gobby/tasks/build_verification.py +193 -0
- gobby/tasks/commits.py +633 -0
- gobby/tasks/context.py +747 -0
- gobby/tasks/criteria.py +342 -0
- gobby/tasks/enhanced_validator.py +226 -0
- gobby/tasks/escalation.py +263 -0
- gobby/tasks/expansion.py +626 -0
- gobby/tasks/external_validator.py +764 -0
- gobby/tasks/issue_extraction.py +171 -0
- gobby/tasks/prompts/expand.py +327 -0
- gobby/tasks/research.py +421 -0
- gobby/tasks/tdd.py +352 -0
- gobby/tasks/tree_builder.py +263 -0
- gobby/tasks/validation.py +712 -0
- gobby/tasks/validation_history.py +357 -0
- gobby/tasks/validation_models.py +89 -0
- gobby/tools/__init__.py +0 -0
- gobby/tools/summarizer.py +170 -0
- gobby/tui/__init__.py +5 -0
- gobby/tui/api_client.py +281 -0
- gobby/tui/app.py +327 -0
- gobby/tui/screens/__init__.py +25 -0
- gobby/tui/screens/agents.py +333 -0
- gobby/tui/screens/chat.py +450 -0
- gobby/tui/screens/dashboard.py +377 -0
- gobby/tui/screens/memory.py +305 -0
- gobby/tui/screens/metrics.py +231 -0
- gobby/tui/screens/orchestrator.py +904 -0
- gobby/tui/screens/sessions.py +412 -0
- gobby/tui/screens/tasks.py +442 -0
- gobby/tui/screens/workflows.py +289 -0
- gobby/tui/screens/worktrees.py +174 -0
- gobby/tui/widgets/__init__.py +21 -0
- gobby/tui/widgets/chat.py +210 -0
- gobby/tui/widgets/conductor.py +104 -0
- gobby/tui/widgets/menu.py +132 -0
- gobby/tui/widgets/message_panel.py +160 -0
- gobby/tui/widgets/review_gate.py +224 -0
- gobby/tui/widgets/task_tree.py +99 -0
- gobby/tui/widgets/token_budget.py +166 -0
- gobby/tui/ws_client.py +258 -0
- gobby/utils/__init__.py +3 -0
- gobby/utils/daemon_client.py +235 -0
- gobby/utils/git.py +222 -0
- gobby/utils/id.py +38 -0
- gobby/utils/json_helpers.py +161 -0
- gobby/utils/logging.py +376 -0
- gobby/utils/machine_id.py +135 -0
- gobby/utils/metrics.py +589 -0
- gobby/utils/project_context.py +182 -0
- gobby/utils/project_init.py +263 -0
- gobby/utils/status.py +256 -0
- gobby/utils/validation.py +80 -0
- gobby/utils/version.py +23 -0
- gobby/workflows/__init__.py +4 -0
- gobby/workflows/actions.py +1310 -0
- gobby/workflows/approval_flow.py +138 -0
- gobby/workflows/artifact_actions.py +103 -0
- gobby/workflows/audit_helpers.py +110 -0
- gobby/workflows/autonomous_actions.py +286 -0
- gobby/workflows/context_actions.py +394 -0
- gobby/workflows/definitions.py +130 -0
- gobby/workflows/detection_helpers.py +208 -0
- gobby/workflows/engine.py +485 -0
- gobby/workflows/evaluator.py +669 -0
- gobby/workflows/git_utils.py +96 -0
- gobby/workflows/hooks.py +169 -0
- gobby/workflows/lifecycle_evaluator.py +613 -0
- gobby/workflows/llm_actions.py +70 -0
- gobby/workflows/loader.py +333 -0
- gobby/workflows/mcp_actions.py +60 -0
- gobby/workflows/memory_actions.py +272 -0
- gobby/workflows/premature_stop.py +164 -0
- gobby/workflows/session_actions.py +139 -0
- gobby/workflows/state_actions.py +123 -0
- gobby/workflows/state_manager.py +104 -0
- gobby/workflows/stop_signal_actions.py +163 -0
- gobby/workflows/summary_actions.py +344 -0
- gobby/workflows/task_actions.py +249 -0
- gobby/workflows/task_enforcement_actions.py +901 -0
- gobby/workflows/templates.py +52 -0
- gobby/workflows/todo_actions.py +84 -0
- gobby/workflows/webhook.py +223 -0
- gobby/workflows/webhook_executor.py +399 -0
- gobby/worktrees/__init__.py +5 -0
- gobby/worktrees/git.py +690 -0
- gobby/worktrees/merge/__init__.py +20 -0
- gobby/worktrees/merge/conflict_parser.py +177 -0
- gobby/worktrees/merge/resolver.py +485 -0
- gobby-0.2.5.dist-info/METADATA +351 -0
- gobby-0.2.5.dist-info/RECORD +383 -0
- gobby-0.2.5.dist-info/WHEEL +5 -0
- gobby-0.2.5.dist-info/entry_points.txt +2 -0
- gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
- gobby-0.2.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
"""
|
|
2
|
+
LiteLLM implementation of AgentExecutor.
|
|
3
|
+
|
|
4
|
+
Provides a unified interface to 100+ LLM providers using OpenAI-compatible
|
|
5
|
+
function calling API. Supports models from OpenAI, Anthropic, Mistral,
|
|
6
|
+
Cohere, and many others through a single interface.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import os
|
|
13
|
+
from typing import Any
|
|
14
|
+
|
|
15
|
+
from gobby.llm.executor import (
|
|
16
|
+
AgentExecutor,
|
|
17
|
+
AgentResult,
|
|
18
|
+
ToolCallRecord,
|
|
19
|
+
ToolHandler,
|
|
20
|
+
ToolResult,
|
|
21
|
+
ToolSchema,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class LiteLLMExecutor(AgentExecutor):
|
|
28
|
+
"""
|
|
29
|
+
LiteLLM implementation of AgentExecutor.
|
|
30
|
+
|
|
31
|
+
Uses LiteLLM's unified API to access 100+ LLM providers with OpenAI-compatible
|
|
32
|
+
function calling. Supports models from OpenAI, Anthropic, Mistral, Cohere, etc.
|
|
33
|
+
|
|
34
|
+
The executor implements a proper agentic loop:
|
|
35
|
+
1. Send prompt to LLM with function/tool schemas
|
|
36
|
+
2. When LLM requests a function call, call tool_handler
|
|
37
|
+
3. Send function result back to LLM
|
|
38
|
+
4. Repeat until LLM stops requesting functions or limits are reached
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
>>> executor = LiteLLMExecutor(default_model="gpt-4o-mini")
|
|
42
|
+
>>> result = await executor.run(
|
|
43
|
+
... prompt="Create a task",
|
|
44
|
+
... tools=[ToolSchema(name="create_task", ...)],
|
|
45
|
+
... tool_handler=my_handler,
|
|
46
|
+
... )
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(
|
|
50
|
+
self,
|
|
51
|
+
default_model: str = "gpt-4o-mini",
|
|
52
|
+
api_base: str | None = None,
|
|
53
|
+
api_keys: dict[str, str] | None = None,
|
|
54
|
+
):
|
|
55
|
+
"""
|
|
56
|
+
Initialize LiteLLMExecutor.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
default_model: Default model to use if not specified in run().
|
|
60
|
+
Examples: "gpt-4o-mini", "claude-3-sonnet-20240229",
|
|
61
|
+
"mistral/mistral-large-latest"
|
|
62
|
+
api_base: Optional custom API base URL (e.g., OpenRouter endpoint).
|
|
63
|
+
api_keys: Optional dict of API keys to set in environment.
|
|
64
|
+
Keys should be like "OPENAI_API_KEY", "ANTHROPIC_API_KEY", etc.
|
|
65
|
+
"""
|
|
66
|
+
self.default_model = default_model
|
|
67
|
+
self.api_base = api_base
|
|
68
|
+
self.logger = logger
|
|
69
|
+
self._litellm: Any = None
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
import litellm
|
|
73
|
+
|
|
74
|
+
self._litellm = litellm
|
|
75
|
+
|
|
76
|
+
# Set API keys in environment if provided
|
|
77
|
+
if api_keys:
|
|
78
|
+
for key, value in api_keys.items():
|
|
79
|
+
if value and key not in os.environ:
|
|
80
|
+
os.environ[key] = value
|
|
81
|
+
self.logger.debug(f"Set {key} from config")
|
|
82
|
+
|
|
83
|
+
self.logger.debug("LiteLLM executor initialized")
|
|
84
|
+
|
|
85
|
+
except ImportError as e:
|
|
86
|
+
raise ImportError(
|
|
87
|
+
"litellm package not found. Please install with `pip install litellm`."
|
|
88
|
+
) from e
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def provider_name(self) -> str:
|
|
92
|
+
"""Return the provider name."""
|
|
93
|
+
return "litellm"
|
|
94
|
+
|
|
95
|
+
def _convert_tools_to_openai_format(self, tools: list[ToolSchema]) -> list[dict[str, Any]]:
|
|
96
|
+
"""Convert ToolSchema list to OpenAI function calling format."""
|
|
97
|
+
openai_tools = []
|
|
98
|
+
for tool in tools:
|
|
99
|
+
# Build parameter schema
|
|
100
|
+
params = tool.input_schema.copy()
|
|
101
|
+
# Ensure type is object
|
|
102
|
+
if "type" not in params:
|
|
103
|
+
params["type"] = "object"
|
|
104
|
+
|
|
105
|
+
openai_tools.append(
|
|
106
|
+
{
|
|
107
|
+
"type": "function",
|
|
108
|
+
"function": {
|
|
109
|
+
"name": tool.name,
|
|
110
|
+
"description": tool.description,
|
|
111
|
+
"parameters": params,
|
|
112
|
+
},
|
|
113
|
+
}
|
|
114
|
+
)
|
|
115
|
+
return openai_tools
|
|
116
|
+
|
|
117
|
+
async def run(
|
|
118
|
+
self,
|
|
119
|
+
prompt: str,
|
|
120
|
+
tools: list[ToolSchema],
|
|
121
|
+
tool_handler: ToolHandler,
|
|
122
|
+
system_prompt: str | None = None,
|
|
123
|
+
model: str | None = None,
|
|
124
|
+
max_turns: int = 10,
|
|
125
|
+
timeout: float = 120.0,
|
|
126
|
+
) -> AgentResult:
|
|
127
|
+
"""
|
|
128
|
+
Execute an agentic loop with function calling.
|
|
129
|
+
|
|
130
|
+
Runs LiteLLM with the given prompt, calling tools via tool_handler
|
|
131
|
+
until completion, max_turns, or timeout.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
prompt: The user prompt to process.
|
|
135
|
+
tools: List of available tools with their schemas.
|
|
136
|
+
tool_handler: Callback to execute tool calls.
|
|
137
|
+
system_prompt: Optional system prompt.
|
|
138
|
+
model: Optional model override.
|
|
139
|
+
max_turns: Maximum turns before stopping (default: 10).
|
|
140
|
+
timeout: Maximum execution time in seconds (default: 120.0).
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
AgentResult with output, status, and tool call records.
|
|
144
|
+
"""
|
|
145
|
+
if self._litellm is None:
|
|
146
|
+
return AgentResult(
|
|
147
|
+
output="",
|
|
148
|
+
status="error",
|
|
149
|
+
error="LiteLLM client not initialized",
|
|
150
|
+
turns_used=0,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
tool_calls_records: list[ToolCallRecord] = []
|
|
154
|
+
effective_model = model or self.default_model
|
|
155
|
+
|
|
156
|
+
# Track turns in outer scope so timeout handler can access the count
|
|
157
|
+
turns_counter = [0]
|
|
158
|
+
|
|
159
|
+
async def _run_loop() -> AgentResult:
|
|
160
|
+
turns_used = 0
|
|
161
|
+
final_output = ""
|
|
162
|
+
litellm = self._litellm
|
|
163
|
+
if litellm is None:
|
|
164
|
+
raise RuntimeError("LiteLLMExecutor litellm not initialized")
|
|
165
|
+
|
|
166
|
+
# Convert tools to OpenAI format
|
|
167
|
+
openai_tools = self._convert_tools_to_openai_format(tools)
|
|
168
|
+
|
|
169
|
+
# Build initial messages
|
|
170
|
+
messages: list[dict[str, Any]] = []
|
|
171
|
+
if system_prompt:
|
|
172
|
+
messages.append({"role": "system", "content": system_prompt})
|
|
173
|
+
else:
|
|
174
|
+
messages.append({"role": "system", "content": "You are a helpful assistant."})
|
|
175
|
+
messages.append({"role": "user", "content": prompt})
|
|
176
|
+
|
|
177
|
+
while turns_used < max_turns:
|
|
178
|
+
turns_used += 1
|
|
179
|
+
turns_counter[0] = turns_used
|
|
180
|
+
|
|
181
|
+
try:
|
|
182
|
+
# Build completion kwargs
|
|
183
|
+
completion_kwargs: dict[str, Any] = {
|
|
184
|
+
"model": effective_model,
|
|
185
|
+
"messages": messages,
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
# Add tools if available
|
|
189
|
+
if openai_tools:
|
|
190
|
+
completion_kwargs["tools"] = openai_tools
|
|
191
|
+
completion_kwargs["tool_choice"] = "auto"
|
|
192
|
+
|
|
193
|
+
# Add api_base if configured
|
|
194
|
+
if self.api_base:
|
|
195
|
+
completion_kwargs["api_base"] = self.api_base
|
|
196
|
+
|
|
197
|
+
# Call LiteLLM
|
|
198
|
+
response = await litellm.acompletion(**completion_kwargs)
|
|
199
|
+
|
|
200
|
+
except Exception as e:
|
|
201
|
+
self.logger.error(f"LiteLLM API error: {e}")
|
|
202
|
+
return AgentResult(
|
|
203
|
+
output="",
|
|
204
|
+
status="error",
|
|
205
|
+
tool_calls=tool_calls_records,
|
|
206
|
+
error=f"LiteLLM API error: {e}",
|
|
207
|
+
turns_used=turns_used,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# Process response
|
|
211
|
+
response_message = response.choices[0].message
|
|
212
|
+
tool_calls = getattr(response_message, "tool_calls", None)
|
|
213
|
+
|
|
214
|
+
# Extract text content
|
|
215
|
+
if response_message.content:
|
|
216
|
+
final_output = response_message.content
|
|
217
|
+
|
|
218
|
+
# If no tool calls, we're done
|
|
219
|
+
if not tool_calls:
|
|
220
|
+
return AgentResult(
|
|
221
|
+
output=final_output,
|
|
222
|
+
status="success",
|
|
223
|
+
tool_calls=tool_calls_records,
|
|
224
|
+
turns_used=turns_used,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
# Add assistant message to history
|
|
228
|
+
messages.append(response_message.model_dump())
|
|
229
|
+
|
|
230
|
+
# Handle tool calls
|
|
231
|
+
for tool_call in tool_calls:
|
|
232
|
+
function_name = tool_call.function.name
|
|
233
|
+
try:
|
|
234
|
+
function_args = json.loads(tool_call.function.arguments)
|
|
235
|
+
except json.JSONDecodeError:
|
|
236
|
+
function_args = {}
|
|
237
|
+
|
|
238
|
+
# Record the tool call
|
|
239
|
+
record = ToolCallRecord(
|
|
240
|
+
tool_name=function_name,
|
|
241
|
+
arguments=function_args,
|
|
242
|
+
)
|
|
243
|
+
tool_calls_records.append(record)
|
|
244
|
+
|
|
245
|
+
# Execute via handler
|
|
246
|
+
try:
|
|
247
|
+
result = await tool_handler(function_name, function_args)
|
|
248
|
+
record.result = result
|
|
249
|
+
|
|
250
|
+
# Format result for LiteLLM
|
|
251
|
+
if result.success:
|
|
252
|
+
# Use explicit None check to handle valid falsy values (0, False, "", {}, etc.)
|
|
253
|
+
content = (
|
|
254
|
+
json.dumps(result.result)
|
|
255
|
+
if result.result is not None
|
|
256
|
+
else "Success"
|
|
257
|
+
)
|
|
258
|
+
else:
|
|
259
|
+
content = f"Error: {result.error}"
|
|
260
|
+
|
|
261
|
+
messages.append(
|
|
262
|
+
{
|
|
263
|
+
"role": "tool",
|
|
264
|
+
"tool_call_id": tool_call.id,
|
|
265
|
+
"name": function_name,
|
|
266
|
+
"content": content,
|
|
267
|
+
}
|
|
268
|
+
)
|
|
269
|
+
except Exception as e:
|
|
270
|
+
self.logger.error(f"Tool handler error for {function_name}: {e}")
|
|
271
|
+
record.result = ToolResult(
|
|
272
|
+
tool_name=function_name,
|
|
273
|
+
success=False,
|
|
274
|
+
error=str(e),
|
|
275
|
+
)
|
|
276
|
+
messages.append(
|
|
277
|
+
{
|
|
278
|
+
"role": "tool",
|
|
279
|
+
"tool_call_id": tool_call.id,
|
|
280
|
+
"name": function_name,
|
|
281
|
+
"content": f"Error: {e}",
|
|
282
|
+
}
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
# Max turns reached
|
|
286
|
+
return AgentResult(
|
|
287
|
+
output=final_output,
|
|
288
|
+
status="partial",
|
|
289
|
+
tool_calls=tool_calls_records,
|
|
290
|
+
turns_used=turns_used,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Run with timeout
|
|
294
|
+
try:
|
|
295
|
+
return await asyncio.wait_for(_run_loop(), timeout=timeout)
|
|
296
|
+
except TimeoutError:
|
|
297
|
+
return AgentResult(
|
|
298
|
+
output="",
|
|
299
|
+
status="timeout",
|
|
300
|
+
tool_calls=tool_calls_records,
|
|
301
|
+
error=f"Execution timed out after {timeout}s",
|
|
302
|
+
turns_used=turns_counter[0],
|
|
303
|
+
)
|