foundry-mcp 0.8.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of foundry-mcp might be problematic. Click here for more details.
- foundry_mcp/__init__.py +13 -0
- foundry_mcp/cli/__init__.py +67 -0
- foundry_mcp/cli/__main__.py +9 -0
- foundry_mcp/cli/agent.py +96 -0
- foundry_mcp/cli/commands/__init__.py +37 -0
- foundry_mcp/cli/commands/cache.py +137 -0
- foundry_mcp/cli/commands/dashboard.py +148 -0
- foundry_mcp/cli/commands/dev.py +446 -0
- foundry_mcp/cli/commands/journal.py +377 -0
- foundry_mcp/cli/commands/lifecycle.py +274 -0
- foundry_mcp/cli/commands/modify.py +824 -0
- foundry_mcp/cli/commands/plan.py +640 -0
- foundry_mcp/cli/commands/pr.py +393 -0
- foundry_mcp/cli/commands/review.py +667 -0
- foundry_mcp/cli/commands/session.py +472 -0
- foundry_mcp/cli/commands/specs.py +686 -0
- foundry_mcp/cli/commands/tasks.py +807 -0
- foundry_mcp/cli/commands/testing.py +676 -0
- foundry_mcp/cli/commands/validate.py +982 -0
- foundry_mcp/cli/config.py +98 -0
- foundry_mcp/cli/context.py +298 -0
- foundry_mcp/cli/logging.py +212 -0
- foundry_mcp/cli/main.py +44 -0
- foundry_mcp/cli/output.py +122 -0
- foundry_mcp/cli/registry.py +110 -0
- foundry_mcp/cli/resilience.py +178 -0
- foundry_mcp/cli/transcript.py +217 -0
- foundry_mcp/config.py +1454 -0
- foundry_mcp/core/__init__.py +144 -0
- foundry_mcp/core/ai_consultation.py +1773 -0
- foundry_mcp/core/batch_operations.py +1202 -0
- foundry_mcp/core/cache.py +195 -0
- foundry_mcp/core/capabilities.py +446 -0
- foundry_mcp/core/concurrency.py +898 -0
- foundry_mcp/core/context.py +540 -0
- foundry_mcp/core/discovery.py +1603 -0
- foundry_mcp/core/error_collection.py +728 -0
- foundry_mcp/core/error_store.py +592 -0
- foundry_mcp/core/health.py +749 -0
- foundry_mcp/core/intake.py +933 -0
- foundry_mcp/core/journal.py +700 -0
- foundry_mcp/core/lifecycle.py +412 -0
- foundry_mcp/core/llm_config.py +1376 -0
- foundry_mcp/core/llm_patterns.py +510 -0
- foundry_mcp/core/llm_provider.py +1569 -0
- foundry_mcp/core/logging_config.py +374 -0
- foundry_mcp/core/metrics_persistence.py +584 -0
- foundry_mcp/core/metrics_registry.py +327 -0
- foundry_mcp/core/metrics_store.py +641 -0
- foundry_mcp/core/modifications.py +224 -0
- foundry_mcp/core/naming.py +146 -0
- foundry_mcp/core/observability.py +1216 -0
- foundry_mcp/core/otel.py +452 -0
- foundry_mcp/core/otel_stubs.py +264 -0
- foundry_mcp/core/pagination.py +255 -0
- foundry_mcp/core/progress.py +387 -0
- foundry_mcp/core/prometheus.py +564 -0
- foundry_mcp/core/prompts/__init__.py +464 -0
- foundry_mcp/core/prompts/fidelity_review.py +691 -0
- foundry_mcp/core/prompts/markdown_plan_review.py +515 -0
- foundry_mcp/core/prompts/plan_review.py +627 -0
- foundry_mcp/core/providers/__init__.py +237 -0
- foundry_mcp/core/providers/base.py +515 -0
- foundry_mcp/core/providers/claude.py +472 -0
- foundry_mcp/core/providers/codex.py +637 -0
- foundry_mcp/core/providers/cursor_agent.py +630 -0
- foundry_mcp/core/providers/detectors.py +515 -0
- foundry_mcp/core/providers/gemini.py +426 -0
- foundry_mcp/core/providers/opencode.py +718 -0
- foundry_mcp/core/providers/opencode_wrapper.js +308 -0
- foundry_mcp/core/providers/package-lock.json +24 -0
- foundry_mcp/core/providers/package.json +25 -0
- foundry_mcp/core/providers/registry.py +607 -0
- foundry_mcp/core/providers/test_provider.py +171 -0
- foundry_mcp/core/providers/validation.py +857 -0
- foundry_mcp/core/rate_limit.py +427 -0
- foundry_mcp/core/research/__init__.py +68 -0
- foundry_mcp/core/research/memory.py +528 -0
- foundry_mcp/core/research/models.py +1234 -0
- foundry_mcp/core/research/providers/__init__.py +40 -0
- foundry_mcp/core/research/providers/base.py +242 -0
- foundry_mcp/core/research/providers/google.py +507 -0
- foundry_mcp/core/research/providers/perplexity.py +442 -0
- foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
- foundry_mcp/core/research/providers/tavily.py +383 -0
- foundry_mcp/core/research/workflows/__init__.py +25 -0
- foundry_mcp/core/research/workflows/base.py +298 -0
- foundry_mcp/core/research/workflows/chat.py +271 -0
- foundry_mcp/core/research/workflows/consensus.py +539 -0
- foundry_mcp/core/research/workflows/deep_research.py +4142 -0
- foundry_mcp/core/research/workflows/ideate.py +682 -0
- foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
- foundry_mcp/core/resilience.py +600 -0
- foundry_mcp/core/responses.py +1624 -0
- foundry_mcp/core/review.py +366 -0
- foundry_mcp/core/security.py +438 -0
- foundry_mcp/core/spec.py +4119 -0
- foundry_mcp/core/task.py +2463 -0
- foundry_mcp/core/testing.py +839 -0
- foundry_mcp/core/validation.py +2357 -0
- foundry_mcp/dashboard/__init__.py +32 -0
- foundry_mcp/dashboard/app.py +119 -0
- foundry_mcp/dashboard/components/__init__.py +17 -0
- foundry_mcp/dashboard/components/cards.py +88 -0
- foundry_mcp/dashboard/components/charts.py +177 -0
- foundry_mcp/dashboard/components/filters.py +136 -0
- foundry_mcp/dashboard/components/tables.py +195 -0
- foundry_mcp/dashboard/data/__init__.py +11 -0
- foundry_mcp/dashboard/data/stores.py +433 -0
- foundry_mcp/dashboard/launcher.py +300 -0
- foundry_mcp/dashboard/views/__init__.py +12 -0
- foundry_mcp/dashboard/views/errors.py +217 -0
- foundry_mcp/dashboard/views/metrics.py +164 -0
- foundry_mcp/dashboard/views/overview.py +96 -0
- foundry_mcp/dashboard/views/providers.py +83 -0
- foundry_mcp/dashboard/views/sdd_workflow.py +255 -0
- foundry_mcp/dashboard/views/tool_usage.py +139 -0
- foundry_mcp/prompts/__init__.py +9 -0
- foundry_mcp/prompts/workflows.py +525 -0
- foundry_mcp/resources/__init__.py +9 -0
- foundry_mcp/resources/specs.py +591 -0
- foundry_mcp/schemas/__init__.py +38 -0
- foundry_mcp/schemas/intake-schema.json +89 -0
- foundry_mcp/schemas/sdd-spec-schema.json +414 -0
- foundry_mcp/server.py +150 -0
- foundry_mcp/tools/__init__.py +10 -0
- foundry_mcp/tools/unified/__init__.py +92 -0
- foundry_mcp/tools/unified/authoring.py +3620 -0
- foundry_mcp/tools/unified/context_helpers.py +98 -0
- foundry_mcp/tools/unified/documentation_helpers.py +268 -0
- foundry_mcp/tools/unified/environment.py +1341 -0
- foundry_mcp/tools/unified/error.py +479 -0
- foundry_mcp/tools/unified/health.py +225 -0
- foundry_mcp/tools/unified/journal.py +841 -0
- foundry_mcp/tools/unified/lifecycle.py +640 -0
- foundry_mcp/tools/unified/metrics.py +777 -0
- foundry_mcp/tools/unified/plan.py +876 -0
- foundry_mcp/tools/unified/pr.py +294 -0
- foundry_mcp/tools/unified/provider.py +589 -0
- foundry_mcp/tools/unified/research.py +1283 -0
- foundry_mcp/tools/unified/review.py +1042 -0
- foundry_mcp/tools/unified/review_helpers.py +314 -0
- foundry_mcp/tools/unified/router.py +102 -0
- foundry_mcp/tools/unified/server.py +565 -0
- foundry_mcp/tools/unified/spec.py +1283 -0
- foundry_mcp/tools/unified/task.py +3846 -0
- foundry_mcp/tools/unified/test.py +431 -0
- foundry_mcp/tools/unified/verification.py +520 -0
- foundry_mcp-0.8.22.dist-info/METADATA +344 -0
- foundry_mcp-0.8.22.dist-info/RECORD +153 -0
- foundry_mcp-0.8.22.dist-info/WHEEL +4 -0
- foundry_mcp-0.8.22.dist-info/entry_points.txt +3 -0
- foundry_mcp-0.8.22.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,718 @@
|
|
|
1
|
+
"""
|
|
2
|
+
OpenCode AI provider implementation.
|
|
3
|
+
|
|
4
|
+
Bridges the OpenCode AI Node.js SDK wrapper to the ProviderContext contract by
|
|
5
|
+
handling availability checks, server management, wrapper script execution,
|
|
6
|
+
response parsing, and token usage normalization.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import logging
|
|
13
|
+
import os
|
|
14
|
+
import socket
|
|
15
|
+
import subprocess
|
|
16
|
+
import tempfile
|
|
17
|
+
import time
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, Dict, List, Optional, Protocol, Sequence
|
|
20
|
+
|
|
21
|
+
from .base import (
|
|
22
|
+
ProviderCapability,
|
|
23
|
+
ProviderContext,
|
|
24
|
+
ProviderExecutionError,
|
|
25
|
+
ProviderHooks,
|
|
26
|
+
ProviderMetadata,
|
|
27
|
+
ProviderRequest,
|
|
28
|
+
ProviderResult,
|
|
29
|
+
ProviderStatus,
|
|
30
|
+
ProviderTimeoutError,
|
|
31
|
+
ProviderUnavailableError,
|
|
32
|
+
StreamChunk,
|
|
33
|
+
TokenUsage,
|
|
34
|
+
)
|
|
35
|
+
from .detectors import detect_provider_availability
|
|
36
|
+
from .registry import register_provider
|
|
37
|
+
|
|
38
|
+
logger = logging.getLogger(__name__)
|
|
39
|
+
|
|
40
|
+
DEFAULT_BINARY = "node"
|
|
41
|
+
DEFAULT_WRAPPER_SCRIPT = Path(__file__).parent / "opencode_wrapper.js"
|
|
42
|
+
DEFAULT_TIMEOUT_SECONDS = 360
|
|
43
|
+
DEFAULT_SERVER_URL = "http://localhost:4096"
|
|
44
|
+
SERVER_STARTUP_TIMEOUT = 30
|
|
45
|
+
AVAILABILITY_OVERRIDE_ENV = "OPENCODE_AVAILABLE_OVERRIDE"
|
|
46
|
+
CUSTOM_BINARY_ENV = "OPENCODE_BINARY"
|
|
47
|
+
CUSTOM_WRAPPER_ENV = "OPENCODE_WRAPPER_SCRIPT"
|
|
48
|
+
|
|
49
|
+
# Read-only tools configuration for OpenCode server
|
|
50
|
+
# Uses dual-layer protection: tool disabling + permission denial
|
|
51
|
+
READONLY_TOOLS_CONFIG = {
|
|
52
|
+
"$schema": "https://opencode.ai/config.json",
|
|
53
|
+
"tools": {
|
|
54
|
+
# Disable write operations
|
|
55
|
+
"write": False,
|
|
56
|
+
"edit": False,
|
|
57
|
+
"patch": False,
|
|
58
|
+
"todowrite": False,
|
|
59
|
+
# Disable shell execution
|
|
60
|
+
"bash": False,
|
|
61
|
+
# Enable read operations
|
|
62
|
+
"read": True,
|
|
63
|
+
"grep": True,
|
|
64
|
+
"glob": True,
|
|
65
|
+
"list": True,
|
|
66
|
+
"todoread": True,
|
|
67
|
+
"task": True,
|
|
68
|
+
# Disable web operations (data exfiltration risk)
|
|
69
|
+
"webfetch": False,
|
|
70
|
+
},
|
|
71
|
+
"permission": {
|
|
72
|
+
# Double-guard with permission denials
|
|
73
|
+
"edit": "deny",
|
|
74
|
+
"bash": "deny",
|
|
75
|
+
"webfetch": "deny",
|
|
76
|
+
"external_directory": "deny",
|
|
77
|
+
},
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
# System prompt warning about tool limitations
|
|
81
|
+
SHELL_COMMAND_WARNING = """
|
|
82
|
+
IMPORTANT SECURITY NOTE: This session is running in read-only mode with the following restrictions:
|
|
83
|
+
1. File write operations (write, edit, patch) are disabled
|
|
84
|
+
2. Shell command execution (bash) is disabled
|
|
85
|
+
3. Web operations (webfetch) are disabled to prevent data exfiltration
|
|
86
|
+
4. Only read operations are available (read, grep, glob, list)
|
|
87
|
+
5. Attempts to modify files, execute commands, or access the web will be blocked by the server
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class RunnerProtocol(Protocol):
|
|
92
|
+
"""Callable signature used for executing Node.js wrapper commands."""
|
|
93
|
+
|
|
94
|
+
def __call__(
|
|
95
|
+
self,
|
|
96
|
+
command: Sequence[str],
|
|
97
|
+
*,
|
|
98
|
+
timeout: Optional[int] = None,
|
|
99
|
+
env: Optional[Dict[str, str]] = None,
|
|
100
|
+
input_data: Optional[str] = None,
|
|
101
|
+
) -> subprocess.CompletedProcess[str]:
|
|
102
|
+
raise NotImplementedError
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def _default_runner(
|
|
106
|
+
command: Sequence[str],
|
|
107
|
+
*,
|
|
108
|
+
timeout: Optional[int] = None,
|
|
109
|
+
env: Optional[Dict[str, str]] = None,
|
|
110
|
+
input_data: Optional[str] = None,
|
|
111
|
+
) -> subprocess.CompletedProcess[str]:
|
|
112
|
+
"""Invoke the OpenCode wrapper via subprocess."""
|
|
113
|
+
return subprocess.run( # noqa: S603,S607 - intentional wrapper invocation
|
|
114
|
+
list(command),
|
|
115
|
+
capture_output=True,
|
|
116
|
+
text=True,
|
|
117
|
+
input=input_data,
|
|
118
|
+
timeout=timeout,
|
|
119
|
+
env=env,
|
|
120
|
+
check=False,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
OPENCODE_METADATA = ProviderMetadata(
|
|
125
|
+
provider_id="opencode",
|
|
126
|
+
display_name="OpenCode AI SDK",
|
|
127
|
+
models=[], # Model validation delegated to CLI
|
|
128
|
+
default_model="openai/gpt-5.1-codex-mini",
|
|
129
|
+
capabilities={ProviderCapability.TEXT, ProviderCapability.STREAMING},
|
|
130
|
+
security_flags={"writes_allowed": False, "read_only": True},
|
|
131
|
+
extra={
|
|
132
|
+
"wrapper": "opencode_wrapper.js",
|
|
133
|
+
"server_url": DEFAULT_SERVER_URL,
|
|
134
|
+
"configurable": True,
|
|
135
|
+
"readonly_config": READONLY_TOOLS_CONFIG,
|
|
136
|
+
},
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class OpenCodeProvider(ProviderContext):
|
|
141
|
+
"""ProviderContext implementation backed by the OpenCode AI wrapper."""
|
|
142
|
+
|
|
143
|
+
def __init__(
|
|
144
|
+
self,
|
|
145
|
+
metadata: ProviderMetadata,
|
|
146
|
+
hooks: ProviderHooks,
|
|
147
|
+
*,
|
|
148
|
+
model: Optional[str] = None,
|
|
149
|
+
binary: Optional[str] = None,
|
|
150
|
+
wrapper_path: Optional[Path] = None,
|
|
151
|
+
runner: Optional[RunnerProtocol] = None,
|
|
152
|
+
env: Optional[Dict[str, str]] = None,
|
|
153
|
+
timeout: Optional[int] = None,
|
|
154
|
+
):
|
|
155
|
+
super().__init__(metadata, hooks)
|
|
156
|
+
self._runner = runner or _default_runner
|
|
157
|
+
self._binary = binary or os.environ.get(CUSTOM_BINARY_ENV, DEFAULT_BINARY)
|
|
158
|
+
self._wrapper_path = wrapper_path or Path(
|
|
159
|
+
os.environ.get(CUSTOM_WRAPPER_ENV, str(DEFAULT_WRAPPER_SCRIPT))
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Prepare environment for subprocess with secure API key handling
|
|
163
|
+
self._env = self._prepare_subprocess_env(env)
|
|
164
|
+
|
|
165
|
+
self._timeout = timeout or DEFAULT_TIMEOUT_SECONDS
|
|
166
|
+
|
|
167
|
+
# Validate model - reject empty or whitespace-only strings
|
|
168
|
+
effective_model = model or metadata.default_model or "openai/gpt-5.1-codex-mini"
|
|
169
|
+
if not effective_model or not effective_model.strip():
|
|
170
|
+
raise ProviderExecutionError(
|
|
171
|
+
"Model identifier cannot be empty",
|
|
172
|
+
provider="opencode",
|
|
173
|
+
)
|
|
174
|
+
self._model = effective_model
|
|
175
|
+
|
|
176
|
+
self._server_process: Optional[subprocess.Popen[bytes]] = None
|
|
177
|
+
self._config_file_path: Optional[Path] = None
|
|
178
|
+
|
|
179
|
+
def __del__(self) -> None:
|
|
180
|
+
"""Clean up server process and config file on provider destruction."""
|
|
181
|
+
# Clean up server process
|
|
182
|
+
if hasattr(self, "_server_process") and self._server_process is not None:
|
|
183
|
+
try:
|
|
184
|
+
self._server_process.terminate()
|
|
185
|
+
# Give it a moment to terminate gracefully
|
|
186
|
+
try:
|
|
187
|
+
self._server_process.wait(timeout=5)
|
|
188
|
+
except subprocess.TimeoutExpired:
|
|
189
|
+
# Force kill if it doesn't terminate
|
|
190
|
+
self._server_process.kill()
|
|
191
|
+
except (OSError, ProcessLookupError):
|
|
192
|
+
# Process already terminated, ignore
|
|
193
|
+
pass
|
|
194
|
+
finally:
|
|
195
|
+
self._server_process = None
|
|
196
|
+
|
|
197
|
+
# Clean up config file
|
|
198
|
+
self._cleanup_config_file()
|
|
199
|
+
|
|
200
|
+
def _prepare_subprocess_env(
|
|
201
|
+
self, custom_env: Optional[Dict[str, str]]
|
|
202
|
+
) -> Dict[str, str]:
|
|
203
|
+
"""
|
|
204
|
+
Prepare environment variables for subprocess execution.
|
|
205
|
+
|
|
206
|
+
Merges current process environment with custom overrides and ensures
|
|
207
|
+
required OpenCode variables are present.
|
|
208
|
+
"""
|
|
209
|
+
# Start with a copy of the current environment
|
|
210
|
+
subprocess_env = os.environ.copy()
|
|
211
|
+
|
|
212
|
+
# Merge custom environment if provided
|
|
213
|
+
if custom_env:
|
|
214
|
+
subprocess_env.update(custom_env)
|
|
215
|
+
|
|
216
|
+
# Ensure OPENCODE_SERVER_URL is set (use default if not provided)
|
|
217
|
+
if "OPENCODE_SERVER_URL" not in subprocess_env:
|
|
218
|
+
subprocess_env["OPENCODE_SERVER_URL"] = DEFAULT_SERVER_URL
|
|
219
|
+
|
|
220
|
+
# Note: OPENCODE_API_KEY should be provided via environment or custom_env
|
|
221
|
+
# We don't set a default value for security reasons
|
|
222
|
+
|
|
223
|
+
# Add global npm modules to NODE_PATH so wrapper can find @opencode-ai/sdk
|
|
224
|
+
# This allows the SDK to be installed globally rather than bundled
|
|
225
|
+
self._ensure_node_path(subprocess_env)
|
|
226
|
+
|
|
227
|
+
return subprocess_env
|
|
228
|
+
|
|
229
|
+
def _ensure_node_path(self, env: Dict[str, str]) -> None:
|
|
230
|
+
"""
|
|
231
|
+
Ensure NODE_PATH includes global npm modules and local node_modules.
|
|
232
|
+
|
|
233
|
+
This allows the wrapper script to import @opencode-ai/sdk whether it's
|
|
234
|
+
installed globally (npm install -g @opencode-ai/sdk) or locally in the
|
|
235
|
+
providers directory.
|
|
236
|
+
"""
|
|
237
|
+
node_paths: List[str] = []
|
|
238
|
+
|
|
239
|
+
# Add existing NODE_PATH entries
|
|
240
|
+
if env.get("NODE_PATH"):
|
|
241
|
+
node_paths.extend(env["NODE_PATH"].split(os.pathsep))
|
|
242
|
+
|
|
243
|
+
# Add local node_modules (alongside wrapper script)
|
|
244
|
+
local_node_modules = self._wrapper_path.parent / "node_modules"
|
|
245
|
+
if local_node_modules.exists():
|
|
246
|
+
node_paths.append(str(local_node_modules))
|
|
247
|
+
|
|
248
|
+
# Detect and add global npm root
|
|
249
|
+
try:
|
|
250
|
+
result = subprocess.run(
|
|
251
|
+
["npm", "root", "-g"],
|
|
252
|
+
capture_output=True,
|
|
253
|
+
text=True,
|
|
254
|
+
timeout=5,
|
|
255
|
+
check=False,
|
|
256
|
+
)
|
|
257
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
258
|
+
global_root = result.stdout.strip()
|
|
259
|
+
if global_root not in node_paths:
|
|
260
|
+
node_paths.append(global_root)
|
|
261
|
+
except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
|
|
262
|
+
# npm not available or timed out - skip global path
|
|
263
|
+
pass
|
|
264
|
+
|
|
265
|
+
if node_paths:
|
|
266
|
+
env["NODE_PATH"] = os.pathsep.join(node_paths)
|
|
267
|
+
|
|
268
|
+
def _create_readonly_config(self) -> Path:
|
|
269
|
+
"""
|
|
270
|
+
Create temporary opencode.json with read-only tool restrictions.
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
Path to the temporary config file
|
|
274
|
+
|
|
275
|
+
Note:
|
|
276
|
+
- Tool blocking may not work for MCP tools (OpenCode issue #3756)
|
|
277
|
+
- Config is server-wide, affecting all sessions on this server instance
|
|
278
|
+
"""
|
|
279
|
+
# Create temp directory for config
|
|
280
|
+
temp_dir = Path(tempfile.mkdtemp(prefix="opencode_readonly_"))
|
|
281
|
+
|
|
282
|
+
# Create config file
|
|
283
|
+
config_path = temp_dir / "opencode.json"
|
|
284
|
+
with open(config_path, "w") as f:
|
|
285
|
+
json.dump(READONLY_TOOLS_CONFIG, f, indent=2)
|
|
286
|
+
|
|
287
|
+
return config_path
|
|
288
|
+
|
|
289
|
+
def _cleanup_config_file(self) -> None:
|
|
290
|
+
"""Remove temporary config file and directory."""
|
|
291
|
+
if hasattr(self, "_config_file_path") and self._config_file_path is not None:
|
|
292
|
+
try:
|
|
293
|
+
# Remove config file
|
|
294
|
+
if self._config_file_path.exists():
|
|
295
|
+
self._config_file_path.unlink()
|
|
296
|
+
|
|
297
|
+
# Remove temp directory
|
|
298
|
+
temp_dir = self._config_file_path.parent
|
|
299
|
+
if temp_dir.exists():
|
|
300
|
+
temp_dir.rmdir()
|
|
301
|
+
except (OSError, FileNotFoundError):
|
|
302
|
+
# File already removed or doesn't exist, ignore
|
|
303
|
+
pass
|
|
304
|
+
finally:
|
|
305
|
+
self._config_file_path = None
|
|
306
|
+
|
|
307
|
+
def _is_port_open(self, port: int, host: str = "localhost") -> bool:
|
|
308
|
+
"""Check if a TCP port is open and accepting connections."""
|
|
309
|
+
try:
|
|
310
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
311
|
+
sock.settimeout(1)
|
|
312
|
+
result = sock.connect_ex((host, port))
|
|
313
|
+
return result == 0
|
|
314
|
+
except (socket.error, OSError):
|
|
315
|
+
return False
|
|
316
|
+
|
|
317
|
+
def _is_opencode_server_healthy(self, server_url: str) -> bool:
|
|
318
|
+
"""Verify the opencode server is actually responding (not just port open)."""
|
|
319
|
+
import urllib.request
|
|
320
|
+
import urllib.error
|
|
321
|
+
|
|
322
|
+
try:
|
|
323
|
+
# Try to hit the opencode server - it should respond to HTTP
|
|
324
|
+
req = urllib.request.Request(
|
|
325
|
+
f"{server_url}/session",
|
|
326
|
+
method="POST",
|
|
327
|
+
headers={"Content-Type": "application/json"},
|
|
328
|
+
data=b"{}",
|
|
329
|
+
)
|
|
330
|
+
with urllib.request.urlopen(req, timeout=3) as resp:
|
|
331
|
+
# Any response (even error) means server is alive
|
|
332
|
+
return resp.status < 500
|
|
333
|
+
except urllib.error.HTTPError as e:
|
|
334
|
+
# 4xx errors mean server is alive but rejected request - that's OK
|
|
335
|
+
return e.code < 500
|
|
336
|
+
except Exception:
|
|
337
|
+
# Connection refused, timeout, etc. - server not healthy
|
|
338
|
+
return False
|
|
339
|
+
|
|
340
|
+
def _ensure_server_running(self) -> None:
|
|
341
|
+
"""Ensure OpenCode server is running, start if necessary."""
|
|
342
|
+
# Extract port from server URL (default: 4096)
|
|
343
|
+
server_url = (
|
|
344
|
+
self._env.get("OPENCODE_SERVER_URL", DEFAULT_SERVER_URL)
|
|
345
|
+
if self._env
|
|
346
|
+
else DEFAULT_SERVER_URL
|
|
347
|
+
)
|
|
348
|
+
try:
|
|
349
|
+
# Parse port from URL (e.g., "http://localhost:4096" -> 4096)
|
|
350
|
+
port = int(server_url.split(":")[-1].rstrip("/"))
|
|
351
|
+
except (ValueError, IndexError):
|
|
352
|
+
port = 4096
|
|
353
|
+
|
|
354
|
+
# Check if server is already running and healthy
|
|
355
|
+
if self._is_port_open(port):
|
|
356
|
+
if self._is_opencode_server_healthy(server_url):
|
|
357
|
+
logger.debug(f"OpenCode server already running on port {port}")
|
|
358
|
+
return
|
|
359
|
+
else:
|
|
360
|
+
# Port is open but server not responding properly
|
|
361
|
+
logger.warning(
|
|
362
|
+
f"Port {port} is open but OpenCode server not responding. "
|
|
363
|
+
"Another process may be using this port."
|
|
364
|
+
)
|
|
365
|
+
raise ProviderExecutionError(
|
|
366
|
+
f"Port {port} is in use but OpenCode server is not responding. "
|
|
367
|
+
"Another process may be using this port. "
|
|
368
|
+
"Try: lsof -i :{port} to identify the process.",
|
|
369
|
+
provider=self.metadata.provider_id,
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
# Server not running - need to start it
|
|
373
|
+
logger.info(f"OpenCode server not running on port {port}, attempting to start...")
|
|
374
|
+
# Look for opencode binary in node_modules/.bin first
|
|
375
|
+
opencode_binary = None
|
|
376
|
+
node_modules_bin = Path("node_modules/.bin/opencode")
|
|
377
|
+
|
|
378
|
+
if node_modules_bin.exists():
|
|
379
|
+
opencode_binary = str(node_modules_bin)
|
|
380
|
+
else:
|
|
381
|
+
# Fall back to global opencode if available
|
|
382
|
+
try:
|
|
383
|
+
result = subprocess.run(
|
|
384
|
+
["which", "opencode"],
|
|
385
|
+
capture_output=True,
|
|
386
|
+
text=True,
|
|
387
|
+
timeout=5,
|
|
388
|
+
check=False,
|
|
389
|
+
)
|
|
390
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
391
|
+
opencode_binary = result.stdout.strip()
|
|
392
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
393
|
+
pass
|
|
394
|
+
|
|
395
|
+
if not opencode_binary:
|
|
396
|
+
raise ProviderUnavailableError(
|
|
397
|
+
"OpenCode server not running and 'opencode' binary not found in node_modules/.bin or PATH",
|
|
398
|
+
provider=self.metadata.provider_id,
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
# Create read-only configuration file
|
|
402
|
+
self._config_file_path = self._create_readonly_config()
|
|
403
|
+
|
|
404
|
+
# Start server in background
|
|
405
|
+
# Prepare environment with API keys and configuration
|
|
406
|
+
server_env = self._prepare_subprocess_env(self._env)
|
|
407
|
+
# Set OPENCODE_CONFIG to point to our readonly config
|
|
408
|
+
server_env["OPENCODE_CONFIG"] = str(self._config_file_path)
|
|
409
|
+
|
|
410
|
+
try:
|
|
411
|
+
self._server_process = subprocess.Popen(
|
|
412
|
+
[opencode_binary, "serve", "--hostname=127.0.0.1", f"--port={port}"],
|
|
413
|
+
stdout=subprocess.PIPE,
|
|
414
|
+
stderr=subprocess.PIPE,
|
|
415
|
+
env=server_env, # Pass environment variables to server
|
|
416
|
+
start_new_session=True, # Detach from parent
|
|
417
|
+
)
|
|
418
|
+
except (OSError, subprocess.SubprocessError) as e:
|
|
419
|
+
raise ProviderExecutionError(
|
|
420
|
+
f"Failed to start OpenCode server: {e}",
|
|
421
|
+
provider=self.metadata.provider_id,
|
|
422
|
+
) from e
|
|
423
|
+
|
|
424
|
+
# Wait for server to become available and healthy
|
|
425
|
+
start_time = time.time()
|
|
426
|
+
while time.time() - start_time < SERVER_STARTUP_TIMEOUT:
|
|
427
|
+
if self._is_port_open(port) and self._is_opencode_server_healthy(server_url):
|
|
428
|
+
logger.info(f"OpenCode server started successfully on port {port}")
|
|
429
|
+
return
|
|
430
|
+
time.sleep(0.5)
|
|
431
|
+
|
|
432
|
+
# Timeout - server didn't start
|
|
433
|
+
if self._server_process:
|
|
434
|
+
self._server_process.terminate()
|
|
435
|
+
self._server_process = None
|
|
436
|
+
|
|
437
|
+
raise ProviderTimeoutError(
|
|
438
|
+
f"OpenCode server failed to start within {SERVER_STARTUP_TIMEOUT} seconds",
|
|
439
|
+
provider=self.metadata.provider_id,
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
def _validate_request(self, request: ProviderRequest) -> None:
|
|
443
|
+
"""Validate request parameters supported by OpenCode."""
|
|
444
|
+
unsupported: List[str] = []
|
|
445
|
+
if request.attachments:
|
|
446
|
+
unsupported.append("attachments")
|
|
447
|
+
if unsupported:
|
|
448
|
+
raise ProviderExecutionError(
|
|
449
|
+
f"OpenCode does not support: {', '.join(unsupported)}",
|
|
450
|
+
provider=self.metadata.provider_id,
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
def _build_prompt(self, request: ProviderRequest) -> str:
|
|
454
|
+
"""Build the prompt with system prompt and security warning."""
|
|
455
|
+
system_parts = []
|
|
456
|
+
if request.system_prompt:
|
|
457
|
+
system_parts.append(request.system_prompt.strip())
|
|
458
|
+
system_parts.append(SHELL_COMMAND_WARNING.strip())
|
|
459
|
+
|
|
460
|
+
if system_parts:
|
|
461
|
+
return f"{chr(10).join(system_parts)}\n\n{request.prompt}"
|
|
462
|
+
return request.prompt
|
|
463
|
+
|
|
464
|
+
def _resolve_model(self, request: ProviderRequest) -> str:
|
|
465
|
+
"""Resolve model from request or use default."""
|
|
466
|
+
# 1. Check request.model first (from ProviderRequest constructor)
|
|
467
|
+
if request.model:
|
|
468
|
+
return str(request.model)
|
|
469
|
+
# 2. Fallback to metadata override (legacy/alternative path)
|
|
470
|
+
model_override = request.metadata.get("model") if request.metadata else None
|
|
471
|
+
if model_override:
|
|
472
|
+
return str(model_override)
|
|
473
|
+
# 3. Fallback to instance default
|
|
474
|
+
return self._model
|
|
475
|
+
|
|
476
|
+
def _emit_stream_if_requested(self, content: str, *, stream: bool) -> None:
|
|
477
|
+
"""Emit streaming chunk if streaming is enabled."""
|
|
478
|
+
if not stream or not content:
|
|
479
|
+
return
|
|
480
|
+
self._emit_stream_chunk(StreamChunk(content=content, index=0))
|
|
481
|
+
|
|
482
|
+
def _extract_error_from_jsonl(self, stdout: str) -> Optional[str]:
|
|
483
|
+
"""
|
|
484
|
+
Extract error message from OpenCode wrapper JSONL output.
|
|
485
|
+
|
|
486
|
+
The wrapper outputs errors as {"type":"error","code":"...","message":"..."}.
|
|
487
|
+
"""
|
|
488
|
+
if not stdout:
|
|
489
|
+
return None
|
|
490
|
+
|
|
491
|
+
for line in stdout.strip().split("\n"):
|
|
492
|
+
if not line.strip():
|
|
493
|
+
continue
|
|
494
|
+
try:
|
|
495
|
+
event = json.loads(line)
|
|
496
|
+
except json.JSONDecodeError:
|
|
497
|
+
continue
|
|
498
|
+
|
|
499
|
+
if event.get("type") == "error":
|
|
500
|
+
msg = event.get("message", "")
|
|
501
|
+
if msg:
|
|
502
|
+
return msg
|
|
503
|
+
|
|
504
|
+
return None
|
|
505
|
+
|
|
506
|
+
def _execute(self, request: ProviderRequest) -> ProviderResult:
|
|
507
|
+
"""Execute generation request via OpenCode wrapper."""
|
|
508
|
+
self._validate_request(request)
|
|
509
|
+
|
|
510
|
+
# Ensure server is running before making request
|
|
511
|
+
self._ensure_server_running()
|
|
512
|
+
|
|
513
|
+
model = self._resolve_model(request)
|
|
514
|
+
|
|
515
|
+
# Build JSON payload for wrapper stdin
|
|
516
|
+
payload = {
|
|
517
|
+
"prompt": self._build_prompt(request),
|
|
518
|
+
"system_prompt": request.system_prompt,
|
|
519
|
+
"config": {
|
|
520
|
+
"model": model,
|
|
521
|
+
"temperature": request.temperature,
|
|
522
|
+
"max_tokens": request.max_tokens,
|
|
523
|
+
},
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
# Build command to invoke wrapper
|
|
527
|
+
command = [self._binary, str(self._wrapper_path)]
|
|
528
|
+
if request.stream:
|
|
529
|
+
command.append("--stream")
|
|
530
|
+
|
|
531
|
+
# Execute wrapper with JSON payload via stdin
|
|
532
|
+
timeout = request.timeout or self._timeout
|
|
533
|
+
try:
|
|
534
|
+
completed = self._runner(
|
|
535
|
+
command,
|
|
536
|
+
timeout=int(timeout) if timeout else None,
|
|
537
|
+
env=self._env,
|
|
538
|
+
input_data=json.dumps(payload),
|
|
539
|
+
)
|
|
540
|
+
except FileNotFoundError as exc:
|
|
541
|
+
raise ProviderUnavailableError(
|
|
542
|
+
f"Node.js binary '{self._binary}' not found",
|
|
543
|
+
provider=self.metadata.provider_id,
|
|
544
|
+
) from exc
|
|
545
|
+
except subprocess.TimeoutExpired as exc:
|
|
546
|
+
raise ProviderTimeoutError(
|
|
547
|
+
f"OpenCode wrapper timed out after {timeout}s",
|
|
548
|
+
provider=self.metadata.provider_id,
|
|
549
|
+
) from exc
|
|
550
|
+
|
|
551
|
+
if completed.returncode != 0:
|
|
552
|
+
stderr = (completed.stderr or "").strip()
|
|
553
|
+
logger.debug(f"OpenCode wrapper stderr: {stderr or 'no stderr'}")
|
|
554
|
+
|
|
555
|
+
# Extract error from JSONL stdout (wrapper outputs {"type":"error","message":"..."})
|
|
556
|
+
jsonl_error = self._extract_error_from_jsonl(completed.stdout)
|
|
557
|
+
|
|
558
|
+
error_msg = f"OpenCode wrapper exited with code {completed.returncode}"
|
|
559
|
+
if jsonl_error:
|
|
560
|
+
error_msg += f": {jsonl_error[:500]}"
|
|
561
|
+
elif stderr:
|
|
562
|
+
error_msg += f": {stderr[:500]}"
|
|
563
|
+
raise ProviderExecutionError(
|
|
564
|
+
error_msg,
|
|
565
|
+
provider=self.metadata.provider_id,
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
# Parse line-delimited JSON output
|
|
569
|
+
content_parts: List[str] = []
|
|
570
|
+
final_usage: Optional[TokenUsage] = None
|
|
571
|
+
raw_payload: Dict[str, Any] = {}
|
|
572
|
+
reported_model = model
|
|
573
|
+
|
|
574
|
+
for line in completed.stdout.strip().split("\n"):
|
|
575
|
+
if not line.strip():
|
|
576
|
+
continue
|
|
577
|
+
|
|
578
|
+
try:
|
|
579
|
+
msg = json.loads(line)
|
|
580
|
+
except json.JSONDecodeError as exc:
|
|
581
|
+
logger.debug(f"OpenCode wrapper JSON parse error: {exc}")
|
|
582
|
+
raise ProviderExecutionError(
|
|
583
|
+
"OpenCode wrapper returned invalid JSON response",
|
|
584
|
+
provider=self.metadata.provider_id,
|
|
585
|
+
) from exc
|
|
586
|
+
|
|
587
|
+
msg_type = msg.get("type")
|
|
588
|
+
|
|
589
|
+
if msg_type == "chunk":
|
|
590
|
+
# Streaming chunk
|
|
591
|
+
chunk_content = msg.get("content", "")
|
|
592
|
+
content_parts.append(chunk_content)
|
|
593
|
+
if request.stream:
|
|
594
|
+
self._emit_stream_chunk(
|
|
595
|
+
StreamChunk(content=chunk_content, index=len(content_parts) - 1)
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
elif msg_type == "done":
|
|
599
|
+
# Final response with metadata
|
|
600
|
+
response_data = msg.get("response", {})
|
|
601
|
+
final_text = response_data.get("text", "")
|
|
602
|
+
if final_text and not content_parts:
|
|
603
|
+
content_parts.append(final_text)
|
|
604
|
+
|
|
605
|
+
# Extract model from response
|
|
606
|
+
reported_model = response_data.get("model", model)
|
|
607
|
+
|
|
608
|
+
# Extract token usage
|
|
609
|
+
usage_data = response_data.get("usage", {})
|
|
610
|
+
final_usage = TokenUsage(
|
|
611
|
+
input_tokens=usage_data.get("prompt_tokens", 0),
|
|
612
|
+
output_tokens=usage_data.get("completion_tokens", 0),
|
|
613
|
+
total_tokens=usage_data.get("total_tokens", 0),
|
|
614
|
+
)
|
|
615
|
+
raw_payload = response_data
|
|
616
|
+
|
|
617
|
+
elif msg_type == "error":
|
|
618
|
+
# Error from wrapper
|
|
619
|
+
error_msg = msg.get("message", "Unknown error")
|
|
620
|
+
raise ProviderExecutionError(
|
|
621
|
+
f"OpenCode wrapper error: {error_msg}",
|
|
622
|
+
provider=self.metadata.provider_id,
|
|
623
|
+
)
|
|
624
|
+
|
|
625
|
+
# Combine all content parts
|
|
626
|
+
final_content = "".join(content_parts)
|
|
627
|
+
|
|
628
|
+
# Validate we got actual content (defense in depth)
|
|
629
|
+
if not final_content.strip():
|
|
630
|
+
stderr_info = (completed.stderr or "").strip() or "none"
|
|
631
|
+
raise ProviderExecutionError(
|
|
632
|
+
f"OpenCode wrapper returned empty response. "
|
|
633
|
+
f"Server may not be running on port 4096. Stderr: {stderr_info}",
|
|
634
|
+
provider=self.metadata.provider_id,
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
# Emit final content if streaming was requested
|
|
638
|
+
self._emit_stream_if_requested(final_content, stream=request.stream)
|
|
639
|
+
|
|
640
|
+
# Use default usage if not provided
|
|
641
|
+
if final_usage is None:
|
|
642
|
+
final_usage = TokenUsage(input_tokens=0, output_tokens=0, total_tokens=0)
|
|
643
|
+
|
|
644
|
+
return ProviderResult(
|
|
645
|
+
content=final_content,
|
|
646
|
+
provider_id=self.metadata.provider_id,
|
|
647
|
+
model_used=f"{self.metadata.provider_id}:{reported_model}",
|
|
648
|
+
status=ProviderStatus.SUCCESS,
|
|
649
|
+
tokens=final_usage,
|
|
650
|
+
stderr=(completed.stderr or "").strip() or None,
|
|
651
|
+
raw_payload=raw_payload,
|
|
652
|
+
)
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def is_opencode_available() -> bool:
|
|
656
|
+
"""OpenCode provider availability check."""
|
|
657
|
+
return detect_provider_availability("opencode")
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
def create_provider(
|
|
661
|
+
*,
|
|
662
|
+
hooks: ProviderHooks,
|
|
663
|
+
model: Optional[str] = None,
|
|
664
|
+
dependencies: Optional[Dict[str, object]] = None,
|
|
665
|
+
overrides: Optional[Dict[str, object]] = None,
|
|
666
|
+
) -> OpenCodeProvider:
|
|
667
|
+
"""
|
|
668
|
+
Factory function for creating OpenCodeProvider instances.
|
|
669
|
+
|
|
670
|
+
Args:
|
|
671
|
+
hooks: Provider hooks for callbacks
|
|
672
|
+
model: Optional model ID override
|
|
673
|
+
dependencies: Optional dependencies (runner, env, binary)
|
|
674
|
+
overrides: Optional parameter overrides
|
|
675
|
+
|
|
676
|
+
Returns:
|
|
677
|
+
Configured OpenCodeProvider instance
|
|
678
|
+
"""
|
|
679
|
+
dependencies = dependencies or {}
|
|
680
|
+
overrides = overrides or {}
|
|
681
|
+
|
|
682
|
+
runner = dependencies.get("runner")
|
|
683
|
+
env = dependencies.get("env")
|
|
684
|
+
binary = overrides.get("binary") or dependencies.get("binary")
|
|
685
|
+
wrapper_path = overrides.get("wrapper_path") or dependencies.get("wrapper_path")
|
|
686
|
+
timeout = overrides.get("timeout") or dependencies.get("timeout")
|
|
687
|
+
selected_model = overrides.get("model") if overrides.get("model") else model
|
|
688
|
+
|
|
689
|
+
return OpenCodeProvider(
|
|
690
|
+
metadata=OPENCODE_METADATA,
|
|
691
|
+
hooks=hooks,
|
|
692
|
+
model=selected_model, # type: ignore[arg-type]
|
|
693
|
+
binary=binary, # type: ignore[arg-type]
|
|
694
|
+
wrapper_path=wrapper_path, # type: ignore[arg-type]
|
|
695
|
+
runner=runner if runner is not None else None, # type: ignore[arg-type]
|
|
696
|
+
env=env if env is not None else None, # type: ignore[arg-type]
|
|
697
|
+
timeout=timeout if timeout is not None else None, # type: ignore[arg-type]
|
|
698
|
+
)
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
# Register the provider immediately so consumers can resolve it by id.
|
|
702
|
+
register_provider(
|
|
703
|
+
"opencode",
|
|
704
|
+
factory=create_provider,
|
|
705
|
+
metadata=OPENCODE_METADATA,
|
|
706
|
+
availability_check=is_opencode_available,
|
|
707
|
+
description="OpenCode AI SDK adapter with Node.js wrapper",
|
|
708
|
+
tags=("sdk", "text", "streaming", "read-only"),
|
|
709
|
+
replace=True,
|
|
710
|
+
)
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
__all__ = [
|
|
714
|
+
"OpenCodeProvider",
|
|
715
|
+
"create_provider",
|
|
716
|
+
"is_opencode_available",
|
|
717
|
+
"OPENCODE_METADATA",
|
|
718
|
+
]
|