agentpool 2.1.9__py3-none-any.whl → 2.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- acp/__init__.py +13 -0
- acp/bridge/README.md +15 -2
- acp/bridge/__init__.py +3 -2
- acp/bridge/__main__.py +60 -19
- acp/bridge/ws_server.py +173 -0
- acp/bridge/ws_server_cli.py +89 -0
- acp/notifications.py +2 -1
- acp/stdio.py +39 -9
- acp/transports.py +362 -2
- acp/utils.py +15 -2
- agentpool/__init__.py +4 -1
- agentpool/agents/__init__.py +2 -0
- agentpool/agents/acp_agent/acp_agent.py +203 -88
- agentpool/agents/acp_agent/acp_converters.py +46 -21
- agentpool/agents/acp_agent/client_handler.py +157 -3
- agentpool/agents/acp_agent/session_state.py +4 -1
- agentpool/agents/agent.py +314 -107
- agentpool/agents/agui_agent/__init__.py +0 -2
- agentpool/agents/agui_agent/agui_agent.py +90 -21
- agentpool/agents/agui_agent/agui_converters.py +0 -131
- agentpool/agents/base_agent.py +163 -1
- agentpool/agents/claude_code_agent/claude_code_agent.py +626 -179
- agentpool/agents/claude_code_agent/converters.py +71 -3
- agentpool/agents/claude_code_agent/history.py +474 -0
- agentpool/agents/context.py +40 -0
- agentpool/agents/events/__init__.py +2 -0
- agentpool/agents/events/builtin_handlers.py +2 -1
- agentpool/agents/events/event_emitter.py +29 -2
- agentpool/agents/events/events.py +20 -0
- agentpool/agents/modes.py +54 -0
- agentpool/agents/tool_call_accumulator.py +213 -0
- agentpool/common_types.py +21 -0
- agentpool/config_resources/__init__.py +38 -1
- agentpool/config_resources/claude_code_agent.yml +3 -0
- agentpool/delegation/pool.py +37 -29
- agentpool/delegation/team.py +1 -0
- agentpool/delegation/teamrun.py +1 -0
- agentpool/diagnostics/__init__.py +53 -0
- agentpool/diagnostics/lsp_manager.py +1593 -0
- agentpool/diagnostics/lsp_proxy.py +41 -0
- agentpool/diagnostics/lsp_proxy_script.py +229 -0
- agentpool/diagnostics/models.py +398 -0
- agentpool/mcp_server/__init__.py +0 -2
- agentpool/mcp_server/client.py +12 -3
- agentpool/mcp_server/manager.py +25 -31
- agentpool/mcp_server/registries/official_registry_client.py +25 -0
- agentpool/mcp_server/tool_bridge.py +78 -66
- agentpool/messaging/__init__.py +0 -2
- agentpool/messaging/compaction.py +72 -197
- agentpool/messaging/message_history.py +12 -0
- agentpool/messaging/messages.py +52 -9
- agentpool/messaging/processing.py +3 -1
- agentpool/models/acp_agents/base.py +0 -22
- agentpool/models/acp_agents/mcp_capable.py +8 -148
- agentpool/models/acp_agents/non_mcp.py +129 -72
- agentpool/models/agents.py +35 -13
- agentpool/models/claude_code_agents.py +33 -2
- agentpool/models/manifest.py +43 -0
- agentpool/repomap.py +1 -1
- agentpool/resource_providers/__init__.py +9 -1
- agentpool/resource_providers/aggregating.py +52 -3
- agentpool/resource_providers/base.py +57 -1
- agentpool/resource_providers/mcp_provider.py +23 -0
- agentpool/resource_providers/plan_provider.py +130 -41
- agentpool/resource_providers/pool.py +2 -0
- agentpool/resource_providers/static.py +2 -0
- agentpool/sessions/__init__.py +2 -1
- agentpool/sessions/manager.py +31 -2
- agentpool/sessions/models.py +50 -0
- agentpool/skills/registry.py +13 -8
- agentpool/storage/manager.py +217 -1
- agentpool/testing.py +537 -19
- agentpool/utils/file_watcher.py +269 -0
- agentpool/utils/identifiers.py +121 -0
- agentpool/utils/pydantic_ai_helpers.py +46 -0
- agentpool/utils/streams.py +690 -1
- agentpool/utils/subprocess_utils.py +155 -0
- agentpool/utils/token_breakdown.py +461 -0
- {agentpool-2.1.9.dist-info → agentpool-2.2.3.dist-info}/METADATA +27 -7
- {agentpool-2.1.9.dist-info → agentpool-2.2.3.dist-info}/RECORD +170 -112
- {agentpool-2.1.9.dist-info → agentpool-2.2.3.dist-info}/WHEEL +1 -1
- agentpool_cli/__main__.py +4 -0
- agentpool_cli/serve_acp.py +41 -20
- agentpool_cli/serve_agui.py +87 -0
- agentpool_cli/serve_opencode.py +119 -0
- agentpool_commands/__init__.py +30 -0
- agentpool_commands/agents.py +74 -1
- agentpool_commands/history.py +62 -0
- agentpool_commands/mcp.py +176 -0
- agentpool_commands/models.py +56 -3
- agentpool_commands/tools.py +57 -0
- agentpool_commands/utils.py +51 -0
- agentpool_config/builtin_tools.py +77 -22
- agentpool_config/commands.py +24 -1
- agentpool_config/compaction.py +258 -0
- agentpool_config/mcp_server.py +131 -1
- agentpool_config/storage.py +46 -1
- agentpool_config/tools.py +7 -1
- agentpool_config/toolsets.py +92 -148
- agentpool_server/acp_server/acp_agent.py +134 -150
- agentpool_server/acp_server/commands/acp_commands.py +216 -51
- agentpool_server/acp_server/commands/docs_commands/fetch_repo.py +10 -10
- agentpool_server/acp_server/server.py +23 -79
- agentpool_server/acp_server/session.py +181 -19
- agentpool_server/opencode_server/.rules +95 -0
- agentpool_server/opencode_server/ENDPOINTS.md +362 -0
- agentpool_server/opencode_server/__init__.py +27 -0
- agentpool_server/opencode_server/command_validation.py +172 -0
- agentpool_server/opencode_server/converters.py +869 -0
- agentpool_server/opencode_server/dependencies.py +24 -0
- agentpool_server/opencode_server/input_provider.py +269 -0
- agentpool_server/opencode_server/models/__init__.py +228 -0
- agentpool_server/opencode_server/models/agent.py +53 -0
- agentpool_server/opencode_server/models/app.py +60 -0
- agentpool_server/opencode_server/models/base.py +26 -0
- agentpool_server/opencode_server/models/common.py +23 -0
- agentpool_server/opencode_server/models/config.py +37 -0
- agentpool_server/opencode_server/models/events.py +647 -0
- agentpool_server/opencode_server/models/file.py +88 -0
- agentpool_server/opencode_server/models/mcp.py +25 -0
- agentpool_server/opencode_server/models/message.py +162 -0
- agentpool_server/opencode_server/models/parts.py +190 -0
- agentpool_server/opencode_server/models/provider.py +81 -0
- agentpool_server/opencode_server/models/pty.py +43 -0
- agentpool_server/opencode_server/models/session.py +99 -0
- agentpool_server/opencode_server/routes/__init__.py +25 -0
- agentpool_server/opencode_server/routes/agent_routes.py +442 -0
- agentpool_server/opencode_server/routes/app_routes.py +139 -0
- agentpool_server/opencode_server/routes/config_routes.py +241 -0
- agentpool_server/opencode_server/routes/file_routes.py +392 -0
- agentpool_server/opencode_server/routes/global_routes.py +94 -0
- agentpool_server/opencode_server/routes/lsp_routes.py +319 -0
- agentpool_server/opencode_server/routes/message_routes.py +705 -0
- agentpool_server/opencode_server/routes/pty_routes.py +299 -0
- agentpool_server/opencode_server/routes/session_routes.py +1205 -0
- agentpool_server/opencode_server/routes/tui_routes.py +139 -0
- agentpool_server/opencode_server/server.py +430 -0
- agentpool_server/opencode_server/state.py +121 -0
- agentpool_server/opencode_server/time_utils.py +8 -0
- agentpool_storage/__init__.py +16 -0
- agentpool_storage/base.py +103 -0
- agentpool_storage/claude_provider.py +907 -0
- agentpool_storage/file_provider.py +129 -0
- agentpool_storage/memory_provider.py +61 -0
- agentpool_storage/models.py +3 -0
- agentpool_storage/opencode_provider.py +730 -0
- agentpool_storage/project_store.py +325 -0
- agentpool_storage/session_store.py +6 -0
- agentpool_storage/sql_provider/__init__.py +4 -2
- agentpool_storage/sql_provider/models.py +48 -0
- agentpool_storage/sql_provider/sql_provider.py +134 -1
- agentpool_storage/sql_provider/utils.py +10 -1
- agentpool_storage/text_log_provider.py +1 -0
- agentpool_toolsets/builtin/__init__.py +0 -8
- agentpool_toolsets/builtin/code.py +95 -56
- agentpool_toolsets/builtin/debug.py +16 -21
- agentpool_toolsets/builtin/execution_environment.py +99 -103
- agentpool_toolsets/builtin/file_edit/file_edit.py +115 -7
- agentpool_toolsets/builtin/skills.py +86 -4
- agentpool_toolsets/fsspec_toolset/__init__.py +13 -1
- agentpool_toolsets/fsspec_toolset/diagnostics.py +860 -73
- agentpool_toolsets/fsspec_toolset/grep.py +74 -2
- agentpool_toolsets/fsspec_toolset/image_utils.py +161 -0
- agentpool_toolsets/fsspec_toolset/toolset.py +159 -38
- agentpool_toolsets/mcp_discovery/__init__.py +5 -0
- agentpool_toolsets/mcp_discovery/data/mcp_servers.parquet +0 -0
- agentpool_toolsets/mcp_discovery/toolset.py +454 -0
- agentpool_toolsets/mcp_run_toolset.py +84 -6
- agentpool_toolsets/builtin/agent_management.py +0 -239
- agentpool_toolsets/builtin/history.py +0 -36
- agentpool_toolsets/builtin/integration.py +0 -85
- agentpool_toolsets/builtin/tool_management.py +0 -90
- {agentpool-2.1.9.dist-info → agentpool-2.2.3.dist-info}/entry_points.txt +0 -0
- {agentpool-2.1.9.dist-info → agentpool-2.2.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Config and provider routes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from datetime import timedelta
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from fastapi import APIRouter
|
|
10
|
+
|
|
11
|
+
from agentpool_server.opencode_server.dependencies import StateDep # noqa: TC001
|
|
12
|
+
from agentpool_server.opencode_server.models import (
|
|
13
|
+
Config,
|
|
14
|
+
Mode,
|
|
15
|
+
Model,
|
|
16
|
+
ModelCost,
|
|
17
|
+
ModelLimit,
|
|
18
|
+
Provider,
|
|
19
|
+
ProviderListResponse,
|
|
20
|
+
ProvidersResponse,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from tokonomics.model_discovery.model_info import ModelInfo as TokoModelInfo
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
router = APIRouter(tags=["config"])
|
|
29
|
+
|
|
30
|
+
# Provider display names and environment variable mappings
|
|
31
|
+
PROVIDER_INFO: dict[str, tuple[str, list[str]]] = {
|
|
32
|
+
"anthropic": ("Anthropic", ["ANTHROPIC_API_KEY"]),
|
|
33
|
+
"openai": ("OpenAI", ["OPENAI_API_KEY"]),
|
|
34
|
+
"google": ("Google", ["GOOGLE_API_KEY", "GEMINI_API_KEY"]),
|
|
35
|
+
"mistral": ("Mistral", ["MISTRAL_API_KEY"]),
|
|
36
|
+
"groq": ("Groq", ["GROQ_API_KEY"]),
|
|
37
|
+
"deepseek": ("DeepSeek", ["DEEPSEEK_API_KEY"]),
|
|
38
|
+
"xai": ("xAI", ["XAI_API_KEY"]),
|
|
39
|
+
"together": ("Together AI", ["TOGETHER_API_KEY"]),
|
|
40
|
+
"perplexity": ("Perplexity", ["PERPLEXITY_API_KEY"]),
|
|
41
|
+
"cohere": ("Cohere", ["COHERE_API_KEY"]),
|
|
42
|
+
"fireworks": ("Fireworks AI", ["FIREWORKS_API_KEY"]),
|
|
43
|
+
"openrouter": ("OpenRouter", ["OPENROUTER_API_KEY"]),
|
|
44
|
+
"bedrock": ("AWS Bedrock", ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"]),
|
|
45
|
+
"azure": ("Azure OpenAI", ["AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT"]),
|
|
46
|
+
"vertex": ("Google Vertex AI", ["GOOGLE_APPLICATION_CREDENTIALS"]),
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _convert_toko_model_to_opencode(model: TokoModelInfo) -> Model:
|
|
51
|
+
"""Convert a tokonomics ModelInfo to an OpenCode Model."""
|
|
52
|
+
# Convert pricing (tokonomics uses per-token, OpenCode uses per-million-token)
|
|
53
|
+
input_cost = 0.0
|
|
54
|
+
output_cost = 0.0
|
|
55
|
+
cache_read = None
|
|
56
|
+
cache_write = None
|
|
57
|
+
|
|
58
|
+
if model.pricing:
|
|
59
|
+
# tokonomics pricing is per-token, convert to per-million-tokens
|
|
60
|
+
if model.pricing.prompt is not None:
|
|
61
|
+
input_cost = model.pricing.prompt * 1_000_000
|
|
62
|
+
if model.pricing.completion is not None:
|
|
63
|
+
output_cost = model.pricing.completion * 1_000_000
|
|
64
|
+
if model.pricing.input_cache_read is not None:
|
|
65
|
+
cache_read = model.pricing.input_cache_read * 1_000_000
|
|
66
|
+
if model.pricing.input_cache_write is not None:
|
|
67
|
+
cache_write = model.pricing.input_cache_write * 1_000_000
|
|
68
|
+
|
|
69
|
+
cost = ModelCost(
|
|
70
|
+
input=input_cost,
|
|
71
|
+
output=output_cost,
|
|
72
|
+
cache_read=cache_read,
|
|
73
|
+
cache_write=cache_write,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
# Convert limits
|
|
77
|
+
context = float(model.context_window) if model.context_window else 128000.0
|
|
78
|
+
output = float(model.max_output_tokens) if model.max_output_tokens else 4096.0
|
|
79
|
+
limit = ModelLimit(context=context, output=output)
|
|
80
|
+
|
|
81
|
+
# Determine capabilities from modalities and metadata
|
|
82
|
+
has_vision = "image" in model.input_modalities
|
|
83
|
+
has_reasoning = "reasoning" in model.output_modalities or "thinking" in model.name.lower()
|
|
84
|
+
|
|
85
|
+
# Format release date if available
|
|
86
|
+
release_date = ""
|
|
87
|
+
if model.created_at:
|
|
88
|
+
release_date = model.created_at.strftime("%Y-%m-%d")
|
|
89
|
+
|
|
90
|
+
return Model(
|
|
91
|
+
id=model.id,
|
|
92
|
+
name=model.name,
|
|
93
|
+
attachment=has_vision,
|
|
94
|
+
cost=cost,
|
|
95
|
+
limit=limit,
|
|
96
|
+
reasoning=has_reasoning,
|
|
97
|
+
release_date=release_date,
|
|
98
|
+
temperature=True,
|
|
99
|
+
tool_call=True, # Assume most models support tool calling
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _group_models_by_provider(
|
|
104
|
+
models: list[TokoModelInfo],
|
|
105
|
+
) -> dict[str, list[TokoModelInfo]]:
|
|
106
|
+
"""Group models by their provider."""
|
|
107
|
+
grouped: dict[str, list[TokoModelInfo]] = defaultdict(list)
|
|
108
|
+
for model in models:
|
|
109
|
+
# Skip embedding models - OpenCode is for chat/agent models
|
|
110
|
+
if model.is_embedding:
|
|
111
|
+
continue
|
|
112
|
+
grouped[model.provider].append(model)
|
|
113
|
+
return grouped
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _build_providers(models: list[TokoModelInfo]) -> list[Provider]:
|
|
117
|
+
"""Build Provider list from tokonomics models."""
|
|
118
|
+
grouped = _group_models_by_provider(models)
|
|
119
|
+
providers: list[Provider] = []
|
|
120
|
+
|
|
121
|
+
for provider_id, provider_models in sorted(grouped.items()):
|
|
122
|
+
# Get provider display info
|
|
123
|
+
display_name, env_vars = PROVIDER_INFO.get(
|
|
124
|
+
provider_id, (provider_id.title(), [f"{provider_id.upper()}_API_KEY"])
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Convert models to OpenCode format
|
|
128
|
+
models_dict: dict[str, Model] = {}
|
|
129
|
+
for toko_model in provider_models:
|
|
130
|
+
opencode_model = _convert_toko_model_to_opencode(toko_model)
|
|
131
|
+
models_dict[toko_model.id] = opencode_model
|
|
132
|
+
|
|
133
|
+
provider = Provider(
|
|
134
|
+
id=provider_id,
|
|
135
|
+
name=display_name,
|
|
136
|
+
env=env_vars,
|
|
137
|
+
models=models_dict,
|
|
138
|
+
)
|
|
139
|
+
providers.append(provider)
|
|
140
|
+
|
|
141
|
+
return providers
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
async def _get_available_models() -> list[TokoModelInfo]:
|
|
145
|
+
"""Fetch available models using tokonomics."""
|
|
146
|
+
from tokonomics.model_discovery import get_all_models
|
|
147
|
+
|
|
148
|
+
max_age = timedelta(days=7) # Cache for a week
|
|
149
|
+
return await get_all_models(max_age=max_age)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@router.get("/config")
|
|
153
|
+
async def get_config(state: StateDep) -> Config:
|
|
154
|
+
"""Get server configuration."""
|
|
155
|
+
_ = state # unused for now
|
|
156
|
+
return Config()
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _get_dummy_providers() -> list[Provider]:
|
|
160
|
+
"""Return a single dummy provider for testing."""
|
|
161
|
+
dummy_model = Model(
|
|
162
|
+
id="gpt-4o",
|
|
163
|
+
name="GPT-4o",
|
|
164
|
+
attachment=True,
|
|
165
|
+
cost=ModelCost(input=5.0, output=15.0),
|
|
166
|
+
limit=ModelLimit(context=128000.0, output=4096.0),
|
|
167
|
+
reasoning=False,
|
|
168
|
+
release_date="2024-05-13",
|
|
169
|
+
temperature=True,
|
|
170
|
+
tool_call=True,
|
|
171
|
+
)
|
|
172
|
+
dummy_provider = Provider(
|
|
173
|
+
id="openai",
|
|
174
|
+
name="OpenAI",
|
|
175
|
+
env=["OPENAI_API_KEY"],
|
|
176
|
+
models={"gpt-4o": dummy_model},
|
|
177
|
+
)
|
|
178
|
+
return [dummy_provider]
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
@router.get("/config/providers")
|
|
182
|
+
async def get_providers(state: StateDep) -> ProvidersResponse:
|
|
183
|
+
"""Get available providers and models from agent."""
|
|
184
|
+
providers: list[Provider] = []
|
|
185
|
+
|
|
186
|
+
# Try to get models from the agent
|
|
187
|
+
try:
|
|
188
|
+
toko_models = await state.agent.get_available_models()
|
|
189
|
+
if toko_models:
|
|
190
|
+
providers = _build_providers(toko_models)
|
|
191
|
+
except Exception: # noqa: BLE001
|
|
192
|
+
pass # Fall through to dummy providers
|
|
193
|
+
|
|
194
|
+
# Fall back to dummy providers if no models available
|
|
195
|
+
if not providers:
|
|
196
|
+
providers = _get_dummy_providers()
|
|
197
|
+
|
|
198
|
+
return ProvidersResponse(providers=providers)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
@router.get("/provider")
|
|
202
|
+
async def list_providers(state: StateDep) -> ProviderListResponse:
|
|
203
|
+
"""List all providers."""
|
|
204
|
+
import os
|
|
205
|
+
|
|
206
|
+
providers: list[Provider] = []
|
|
207
|
+
|
|
208
|
+
# Try to get models from the agent
|
|
209
|
+
try:
|
|
210
|
+
toko_models = await state.agent.get_available_models()
|
|
211
|
+
if toko_models:
|
|
212
|
+
providers = _build_providers(toko_models)
|
|
213
|
+
except Exception: # noqa: BLE001
|
|
214
|
+
pass # Fall through to dummy providers
|
|
215
|
+
|
|
216
|
+
# Fall back to dummy providers if no models available
|
|
217
|
+
if not providers:
|
|
218
|
+
providers = _get_dummy_providers()
|
|
219
|
+
|
|
220
|
+
# Determine which providers are "connected" based on env vars
|
|
221
|
+
connected = [
|
|
222
|
+
provider.id for provider in providers if any(os.environ.get(env) for env in provider.env)
|
|
223
|
+
]
|
|
224
|
+
|
|
225
|
+
return ProviderListResponse(
|
|
226
|
+
all=providers,
|
|
227
|
+
default={},
|
|
228
|
+
connected=connected,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
@router.get("/mode")
|
|
233
|
+
async def list_modes(state: StateDep) -> list[Mode]:
|
|
234
|
+
"""List available modes."""
|
|
235
|
+
_ = state # unused for now
|
|
236
|
+
return [
|
|
237
|
+
Mode(
|
|
238
|
+
name="default",
|
|
239
|
+
tools={},
|
|
240
|
+
)
|
|
241
|
+
]
|
|
@@ -0,0 +1,392 @@
|
|
|
1
|
+
"""File operation routes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import fnmatch
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
import re
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
from fastapi import APIRouter, HTTPException, Query
|
|
11
|
+
|
|
12
|
+
from agentpool_server.opencode_server.dependencies import StateDep # noqa: TC001
|
|
13
|
+
from agentpool_server.opencode_server.models import ( # noqa: TC001
|
|
14
|
+
FileContent,
|
|
15
|
+
FileNode,
|
|
16
|
+
FindMatch,
|
|
17
|
+
Symbol,
|
|
18
|
+
)
|
|
19
|
+
from agentpool_server.opencode_server.models.file import SubmatchInfo
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from fsspec.asyn import AsyncFileSystem
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
router = APIRouter(tags=["file"])
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# Directories to skip when searching
|
|
30
|
+
SKIP_DIRS = {".git", "node_modules", "__pycache__", ".venv", "venv", ".tox", "dist", "build"}
|
|
31
|
+
|
|
32
|
+
# Sensitive files that should never be exposed via the API
|
|
33
|
+
BLOCKED_FILES = {".env", ".env.local", ".env.production", ".env.development", ".env.test"}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _validate_path(root: Path, user_path: str) -> Path:
|
|
37
|
+
"""Validate and resolve a user-provided path, ensuring it stays within root.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
root: The root directory (working_dir) that paths must stay within.
|
|
41
|
+
user_path: The user-provided relative path.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
The resolved absolute path that is guaranteed to be within root.
|
|
45
|
+
|
|
46
|
+
Raises:
|
|
47
|
+
HTTPException: If the path escapes root or is blocked.
|
|
48
|
+
"""
|
|
49
|
+
# Resolve the root to handle any symlinks in the root itself
|
|
50
|
+
resolved_root = root.resolve()
|
|
51
|
+
|
|
52
|
+
# Join and resolve the full path (this handles ../, symlinks, etc.)
|
|
53
|
+
target = (root / user_path).resolve()
|
|
54
|
+
|
|
55
|
+
# Check that the resolved path is within the resolved root
|
|
56
|
+
try:
|
|
57
|
+
target.relative_to(resolved_root)
|
|
58
|
+
except ValueError:
|
|
59
|
+
raise HTTPException(
|
|
60
|
+
status_code=403,
|
|
61
|
+
detail="Access denied: path escapes project directory",
|
|
62
|
+
) from None
|
|
63
|
+
|
|
64
|
+
# Check for blocked files
|
|
65
|
+
if target.name in BLOCKED_FILES:
|
|
66
|
+
raise HTTPException(
|
|
67
|
+
status_code=403,
|
|
68
|
+
detail=f"Access denied: {target.name} files are protected",
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
return target
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _validate_path_str(root: str, user_path: str) -> str:
|
|
75
|
+
"""Validate path for fsspec filesystem (string-based).
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
root: The root directory path as string.
|
|
79
|
+
user_path: The user-provided relative path.
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
The validated absolute path as string.
|
|
83
|
+
|
|
84
|
+
Raises:
|
|
85
|
+
HTTPException: If the path escapes root or is blocked.
|
|
86
|
+
"""
|
|
87
|
+
validated = _validate_path(Path(root), user_path)
|
|
88
|
+
return str(validated)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _get_fs(state: StateDep) -> tuple[AsyncFileSystem, str] | None:
|
|
92
|
+
"""Get the fsspec filesystem from the agent's environment if available.
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
Tuple of (filesystem, base_path) or None if not available.
|
|
96
|
+
base_path is the root directory to use for operations.
|
|
97
|
+
"""
|
|
98
|
+
try:
|
|
99
|
+
fs = state.agent.env.get_fs()
|
|
100
|
+
# Use env's cwd if set, otherwise use state.working_dir
|
|
101
|
+
env = state.agent.env
|
|
102
|
+
base_path = env.cwd or state.working_dir
|
|
103
|
+
except NotImplementedError:
|
|
104
|
+
return None
|
|
105
|
+
else:
|
|
106
|
+
return (fs, base_path)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@router.get("/file")
|
|
110
|
+
async def list_files(state: StateDep, path: str = Query(default="")) -> list[FileNode]:
|
|
111
|
+
"""List files in a directory."""
|
|
112
|
+
working_path = Path(state.working_dir)
|
|
113
|
+
|
|
114
|
+
# Validate path if provided (empty path means root, which is always valid)
|
|
115
|
+
target_p = _validate_path(working_path, path) if path else working_path.resolve()
|
|
116
|
+
|
|
117
|
+
fs_info = _get_fs(state)
|
|
118
|
+
|
|
119
|
+
if fs_info is not None:
|
|
120
|
+
fs, _base_path = fs_info
|
|
121
|
+
# Use fsspec filesystem with validated path
|
|
122
|
+
target = str(target_p)
|
|
123
|
+
try:
|
|
124
|
+
if not await fs._isdir(target):
|
|
125
|
+
raise HTTPException(status_code=404, detail="Directory not found")
|
|
126
|
+
|
|
127
|
+
entries = await fs._ls(target, detail=True)
|
|
128
|
+
nodes = []
|
|
129
|
+
resolved_root = working_path.resolve()
|
|
130
|
+
for entry in entries:
|
|
131
|
+
full_name = entry.get("name", "")
|
|
132
|
+
name = full_name.split("/")[-1]
|
|
133
|
+
if not name:
|
|
134
|
+
continue
|
|
135
|
+
# Skip blocked files in directory listings
|
|
136
|
+
if name in BLOCKED_FILES:
|
|
137
|
+
continue
|
|
138
|
+
node_type = "directory" if entry.get("type") == "directory" else "file"
|
|
139
|
+
size = entry.get("size") if node_type == "file" else None
|
|
140
|
+
# Build relative path from resolved root
|
|
141
|
+
entry_path = Path(full_name)
|
|
142
|
+
try:
|
|
143
|
+
rel_path = str(entry_path.relative_to(resolved_root))
|
|
144
|
+
except ValueError:
|
|
145
|
+
rel_path = name
|
|
146
|
+
nodes.append(FileNode(name=name, path=rel_path or name, type=node_type, size=size))
|
|
147
|
+
return sorted(nodes, key=lambda n: (n.type != "directory", n.name.lower()))
|
|
148
|
+
except FileNotFoundError as err:
|
|
149
|
+
raise HTTPException(status_code=404, detail="Directory not found") from err
|
|
150
|
+
else:
|
|
151
|
+
# Fallback to local Path operations
|
|
152
|
+
if not target_p.is_dir():
|
|
153
|
+
raise HTTPException(status_code=404, detail="Directory not found")
|
|
154
|
+
|
|
155
|
+
nodes = []
|
|
156
|
+
resolved_root = working_path.resolve()
|
|
157
|
+
for entry in target_p.iterdir():
|
|
158
|
+
# Skip blocked files in directory listings
|
|
159
|
+
if entry.name in BLOCKED_FILES:
|
|
160
|
+
continue
|
|
161
|
+
node_type = "directory" if entry.is_dir() else "file"
|
|
162
|
+
size = entry.stat().st_size if entry.is_file() else None
|
|
163
|
+
rel_path = str(entry.relative_to(resolved_root))
|
|
164
|
+
nodes.append(FileNode(name=entry.name, path=rel_path, type=node_type, size=size))
|
|
165
|
+
|
|
166
|
+
return sorted(nodes, key=lambda n: (n.type != "directory", n.name.lower()))
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
@router.get("/file/content")
|
|
170
|
+
async def read_file(state: StateDep, path: str = Query()) -> FileContent:
|
|
171
|
+
"""Read a file's content."""
|
|
172
|
+
working_path = Path(state.working_dir)
|
|
173
|
+
|
|
174
|
+
# Validate path - this checks for traversal, symlink escapes, and blocked files
|
|
175
|
+
target = _validate_path(working_path, path)
|
|
176
|
+
|
|
177
|
+
fs_info = _get_fs(state)
|
|
178
|
+
|
|
179
|
+
if fs_info is not None:
|
|
180
|
+
fs, _base_path = fs_info
|
|
181
|
+
# Use fsspec filesystem with validated path
|
|
182
|
+
full_path = str(target)
|
|
183
|
+
try:
|
|
184
|
+
if not await fs._isfile(full_path):
|
|
185
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
186
|
+
content = await fs._cat_file(full_path)
|
|
187
|
+
if isinstance(content, bytes):
|
|
188
|
+
content = content.decode("utf-8")
|
|
189
|
+
return FileContent(path=path, content=content)
|
|
190
|
+
except FileNotFoundError as err:
|
|
191
|
+
raise HTTPException(status_code=404, detail="File not found") from err
|
|
192
|
+
except UnicodeDecodeError as err:
|
|
193
|
+
raise HTTPException(status_code=400, detail="Cannot read binary file") from err
|
|
194
|
+
else:
|
|
195
|
+
# Fallback to local Path operations (target already validated)
|
|
196
|
+
if not target.is_file():
|
|
197
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
198
|
+
|
|
199
|
+
try:
|
|
200
|
+
content = target.read_text(encoding="utf-8")
|
|
201
|
+
return FileContent(path=path, content=content)
|
|
202
|
+
except UnicodeDecodeError as err:
|
|
203
|
+
raise HTTPException(status_code=400, detail="Cannot read binary file") from err
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@router.get("/file/status")
|
|
207
|
+
async def get_file_status(state: StateDep) -> list[dict[str, Any]]:
|
|
208
|
+
"""Get status of tracked files.
|
|
209
|
+
|
|
210
|
+
Returns empty list - file tracking not yet implemented.
|
|
211
|
+
"""
|
|
212
|
+
_ = state
|
|
213
|
+
return []
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@router.get("/find")
|
|
217
|
+
async def find_text(state: StateDep, pattern: str = Query()) -> list[FindMatch]: # noqa: PLR0915
|
|
218
|
+
"""Search for text pattern in files using regex."""
|
|
219
|
+
try:
|
|
220
|
+
regex = re.compile(pattern)
|
|
221
|
+
except re.error as e:
|
|
222
|
+
raise HTTPException(status_code=400, detail=f"Invalid regex: {e}") from e
|
|
223
|
+
|
|
224
|
+
matches: list[FindMatch] = []
|
|
225
|
+
max_matches = 100
|
|
226
|
+
fs_info = _get_fs(state)
|
|
227
|
+
|
|
228
|
+
if fs_info is not None:
|
|
229
|
+
fs, base_path = fs_info
|
|
230
|
+
|
|
231
|
+
# Use fsspec filesystem with walk
|
|
232
|
+
async def search_fs() -> None:
|
|
233
|
+
try:
|
|
234
|
+
# Use find to get all files recursively (limit depth to avoid scanning huge trees)
|
|
235
|
+
all_files = await fs._find(base_path, maxdepth=10, withdirs=False)
|
|
236
|
+
for file_path in all_files:
|
|
237
|
+
if len(matches) >= max_matches:
|
|
238
|
+
return
|
|
239
|
+
|
|
240
|
+
# Skip directories we don't want to search
|
|
241
|
+
parts = file_path.split("/")
|
|
242
|
+
if any(part in SKIP_DIRS for part in parts):
|
|
243
|
+
continue
|
|
244
|
+
|
|
245
|
+
# Get relative path
|
|
246
|
+
if file_path.startswith(base_path):
|
|
247
|
+
rel_path = file_path[len(base_path) :].lstrip("/")
|
|
248
|
+
else:
|
|
249
|
+
rel_path = file_path
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
content = await fs._cat_file(file_path)
|
|
253
|
+
if isinstance(content, bytes):
|
|
254
|
+
content = content.decode("utf-8")
|
|
255
|
+
|
|
256
|
+
for line_num, line in enumerate(content.splitlines(), 1):
|
|
257
|
+
for match in regex.finditer(line):
|
|
258
|
+
submatches = [
|
|
259
|
+
SubmatchInfo.create(match.group(), match.start(), match.end())
|
|
260
|
+
]
|
|
261
|
+
matches.append(
|
|
262
|
+
FindMatch.create(
|
|
263
|
+
path=rel_path,
|
|
264
|
+
lines=line.strip(),
|
|
265
|
+
line_number=line_num,
|
|
266
|
+
absolute_offset=match.start(),
|
|
267
|
+
submatches=submatches,
|
|
268
|
+
)
|
|
269
|
+
)
|
|
270
|
+
if len(matches) >= max_matches:
|
|
271
|
+
return
|
|
272
|
+
except (UnicodeDecodeError, PermissionError, OSError):
|
|
273
|
+
continue
|
|
274
|
+
except Exception: # noqa: BLE001
|
|
275
|
+
pass
|
|
276
|
+
|
|
277
|
+
await search_fs()
|
|
278
|
+
else:
|
|
279
|
+
# Fallback to local Path operations
|
|
280
|
+
working_path = Path(state.working_dir)
|
|
281
|
+
|
|
282
|
+
def search_dir(dir_path: Path) -> None:
|
|
283
|
+
if len(matches) >= max_matches:
|
|
284
|
+
return
|
|
285
|
+
|
|
286
|
+
for entry in dir_path.iterdir():
|
|
287
|
+
if len(matches) >= max_matches:
|
|
288
|
+
return
|
|
289
|
+
|
|
290
|
+
if entry.is_dir():
|
|
291
|
+
if entry.name not in SKIP_DIRS:
|
|
292
|
+
search_dir(entry)
|
|
293
|
+
elif entry.is_file():
|
|
294
|
+
try:
|
|
295
|
+
content = entry.read_text(encoding="utf-8")
|
|
296
|
+
for line_num, line in enumerate(content.splitlines(), 1):
|
|
297
|
+
for match in regex.finditer(line):
|
|
298
|
+
rel_path = str(entry.relative_to(working_path))
|
|
299
|
+
submatches = [
|
|
300
|
+
SubmatchInfo.create(match.group(), match.start(), match.end())
|
|
301
|
+
]
|
|
302
|
+
matches.append(
|
|
303
|
+
FindMatch.create(
|
|
304
|
+
path=rel_path,
|
|
305
|
+
lines=line.strip(),
|
|
306
|
+
line_number=line_num,
|
|
307
|
+
absolute_offset=match.start(),
|
|
308
|
+
submatches=submatches,
|
|
309
|
+
)
|
|
310
|
+
)
|
|
311
|
+
if len(matches) >= max_matches:
|
|
312
|
+
return
|
|
313
|
+
except (UnicodeDecodeError, PermissionError, OSError):
|
|
314
|
+
continue
|
|
315
|
+
|
|
316
|
+
search_dir(working_path)
|
|
317
|
+
|
|
318
|
+
return matches
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
@router.get("/find/file")
|
|
322
|
+
async def find_files(
|
|
323
|
+
state: StateDep,
|
|
324
|
+
query: str = Query(),
|
|
325
|
+
dirs: str = Query(default="false"),
|
|
326
|
+
) -> list[str]:
|
|
327
|
+
"""Find files by name pattern (glob-style matching)."""
|
|
328
|
+
include_dirs = dirs.lower() == "true"
|
|
329
|
+
results: list[str] = []
|
|
330
|
+
max_results = 100
|
|
331
|
+
fs_info = _get_fs(state)
|
|
332
|
+
|
|
333
|
+
if fs_info is not None:
|
|
334
|
+
fs, base_path = fs_info
|
|
335
|
+
# Use fsspec filesystem
|
|
336
|
+
try:
|
|
337
|
+
# Get all entries recursively (limit depth to avoid scanning huge trees)
|
|
338
|
+
all_entries = await fs._find(base_path, maxdepth=10, withdirs=include_dirs)
|
|
339
|
+
for entry_path in all_entries:
|
|
340
|
+
if len(results) >= max_results:
|
|
341
|
+
break
|
|
342
|
+
|
|
343
|
+
# Skip directories we don't want to search
|
|
344
|
+
parts = entry_path.split("/")
|
|
345
|
+
if any(part in SKIP_DIRS for part in parts):
|
|
346
|
+
continue
|
|
347
|
+
|
|
348
|
+
name = parts[-1] if parts else entry_path
|
|
349
|
+
if fnmatch.fnmatch(name, query):
|
|
350
|
+
# Get relative path
|
|
351
|
+
if entry_path.startswith(base_path):
|
|
352
|
+
rel_path = entry_path[len(base_path) :].lstrip("/")
|
|
353
|
+
else:
|
|
354
|
+
rel_path = entry_path
|
|
355
|
+
results.append(rel_path)
|
|
356
|
+
except Exception: # noqa: BLE001
|
|
357
|
+
pass
|
|
358
|
+
else:
|
|
359
|
+
# Fallback to local Path operations
|
|
360
|
+
working_path = Path(state.working_dir)
|
|
361
|
+
|
|
362
|
+
def search_dir(dir_path: Path) -> None:
|
|
363
|
+
if len(results) >= max_results:
|
|
364
|
+
return
|
|
365
|
+
|
|
366
|
+
for entry in dir_path.iterdir():
|
|
367
|
+
if len(results) >= max_results:
|
|
368
|
+
return
|
|
369
|
+
|
|
370
|
+
if entry.is_dir():
|
|
371
|
+
if entry.name not in SKIP_DIRS:
|
|
372
|
+
if include_dirs and fnmatch.fnmatch(entry.name, query):
|
|
373
|
+
results.append(str(entry.relative_to(working_path)))
|
|
374
|
+
search_dir(entry)
|
|
375
|
+
elif entry.is_file() and fnmatch.fnmatch(entry.name, query):
|
|
376
|
+
results.append(str(entry.relative_to(working_path)))
|
|
377
|
+
|
|
378
|
+
search_dir(working_path)
|
|
379
|
+
|
|
380
|
+
return sorted(results)
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
@router.get("/find/symbol")
|
|
384
|
+
async def find_symbols(state: StateDep, query: str = Query()) -> list[Symbol]:
|
|
385
|
+
"""Find workspace symbols.
|
|
386
|
+
|
|
387
|
+
Returns empty list - LSP symbol search not yet implemented.
|
|
388
|
+
"""
|
|
389
|
+
_ = state
|
|
390
|
+
_ = query
|
|
391
|
+
# TODO: Integrate with LSP or implement basic symbol extraction
|
|
392
|
+
return []
|