opencode-a2a 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opencode_a2a/__init__.py +15 -0
- opencode_a2a/cli.py +52 -0
- opencode_a2a/config.py +160 -0
- opencode_a2a/contracts/__init__.py +1 -0
- opencode_a2a/contracts/extensions.py +948 -0
- opencode_a2a/execution/__init__.py +1 -0
- opencode_a2a/execution/executor.py +1582 -0
- opencode_a2a/execution/request_context.py +91 -0
- opencode_a2a/execution/stream_events.py +578 -0
- opencode_a2a/execution/stream_state.py +279 -0
- opencode_a2a/execution/upstream_errors.py +264 -0
- opencode_a2a/jsonrpc/__init__.py +1 -0
- opencode_a2a/jsonrpc/application.py +1036 -0
- opencode_a2a/jsonrpc/methods.py +537 -0
- opencode_a2a/jsonrpc/params.py +123 -0
- opencode_a2a/opencode_upstream_client.py +544 -0
- opencode_a2a/parts/__init__.py +1 -0
- opencode_a2a/parts/mapping.py +151 -0
- opencode_a2a/parts/text.py +24 -0
- opencode_a2a/profile/__init__.py +1 -0
- opencode_a2a/profile/runtime.py +254 -0
- opencode_a2a/server/__init__.py +1 -0
- opencode_a2a/server/agent_card.py +288 -0
- opencode_a2a/server/application.py +634 -0
- opencode_a2a/server/openapi.py +432 -0
- opencode_a2a/server/request_parsing.py +109 -0
- opencode_a2a-0.3.1.dist-info/METADATA +173 -0
- opencode_a2a-0.3.1.dist-info/RECORD +32 -0
- opencode_a2a-0.3.1.dist-info/WHEEL +5 -0
- opencode_a2a-0.3.1.dist-info/entry_points.txt +2 -0
- opencode_a2a-0.3.1.dist-info/licenses/LICENSE +176 -0
- opencode_a2a-0.3.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from collections.abc import Callable, Mapping
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from enum import StrEnum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BlockType(StrEnum):
|
|
11
|
+
TEXT = "text"
|
|
12
|
+
REASONING = "reasoning"
|
|
13
|
+
TOOL_CALL = "tool_call"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(frozen=True)
|
|
17
|
+
class _NormalizedStreamChunk:
|
|
18
|
+
part: Any
|
|
19
|
+
content_key: str
|
|
20
|
+
accumulate_content: bool
|
|
21
|
+
append: bool
|
|
22
|
+
block_type: BlockType
|
|
23
|
+
internal_source: str
|
|
24
|
+
shared_source: str
|
|
25
|
+
message_id: str | None
|
|
26
|
+
role: str | None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass(frozen=True)
|
|
30
|
+
class _PendingDelta:
|
|
31
|
+
field: str
|
|
32
|
+
delta: str
|
|
33
|
+
message_id: str | None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class _StreamPartState:
|
|
38
|
+
block_type: BlockType
|
|
39
|
+
message_id: str | None
|
|
40
|
+
role: str | None
|
|
41
|
+
buffer: str = ""
|
|
42
|
+
saw_delta: bool = False
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _merge_token_usage(
|
|
46
|
+
base: Mapping[str, Any] | None,
|
|
47
|
+
incoming: Mapping[str, Any] | None,
|
|
48
|
+
) -> dict[str, Any] | None:
|
|
49
|
+
if base is None and incoming is None:
|
|
50
|
+
return None
|
|
51
|
+
merged: dict[str, Any] = dict(base) if base else {}
|
|
52
|
+
if incoming:
|
|
53
|
+
for key, value in incoming.items():
|
|
54
|
+
if value is None:
|
|
55
|
+
continue
|
|
56
|
+
if key == "raw" and isinstance(value, Mapping):
|
|
57
|
+
existing = merged.get("raw")
|
|
58
|
+
if isinstance(existing, Mapping):
|
|
59
|
+
merged["raw"] = {**dict(existing), **dict(value)}
|
|
60
|
+
else:
|
|
61
|
+
merged["raw"] = dict(value)
|
|
62
|
+
continue
|
|
63
|
+
merged[key] = value
|
|
64
|
+
return merged or None
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class _StreamOutputState:
|
|
69
|
+
user_text: str
|
|
70
|
+
stable_message_id: str
|
|
71
|
+
event_id_namespace: str
|
|
72
|
+
content_buffers: dict[BlockType, str] = field(default_factory=dict)
|
|
73
|
+
progress_buffers: dict[str, str] = field(default_factory=dict)
|
|
74
|
+
token_usage: dict[str, Any] | None = None
|
|
75
|
+
upstream_error: Any | None = None
|
|
76
|
+
pending_interrupt_request_ids: set[str] = field(default_factory=set)
|
|
77
|
+
saw_any_chunk: bool = False
|
|
78
|
+
emitted_stream_chunk: bool = False
|
|
79
|
+
sequence: int = 0
|
|
80
|
+
|
|
81
|
+
def should_drop_initial_user_echo(
|
|
82
|
+
self,
|
|
83
|
+
text: str,
|
|
84
|
+
*,
|
|
85
|
+
block_type: BlockType,
|
|
86
|
+
role: str | None,
|
|
87
|
+
) -> bool:
|
|
88
|
+
if role is not None:
|
|
89
|
+
return False
|
|
90
|
+
if block_type != BlockType.TEXT:
|
|
91
|
+
return False
|
|
92
|
+
if self.saw_any_chunk:
|
|
93
|
+
return False
|
|
94
|
+
user_text = self.user_text.strip()
|
|
95
|
+
return bool(user_text) and text.strip() == user_text
|
|
96
|
+
|
|
97
|
+
def register_chunk(
|
|
98
|
+
self,
|
|
99
|
+
*,
|
|
100
|
+
block_type: BlockType,
|
|
101
|
+
content_key: str,
|
|
102
|
+
append: bool,
|
|
103
|
+
accumulate_content: bool = True,
|
|
104
|
+
) -> tuple[bool, bool]:
|
|
105
|
+
previous = self.content_buffers.get(block_type, "")
|
|
106
|
+
next_value = f"{previous}{content_key}" if append and accumulate_content else content_key
|
|
107
|
+
if next_value == previous:
|
|
108
|
+
return False, False
|
|
109
|
+
self.content_buffers[block_type] = next_value
|
|
110
|
+
self.saw_any_chunk = True
|
|
111
|
+
effective_append = self.emitted_stream_chunk
|
|
112
|
+
self.emitted_stream_chunk = True
|
|
113
|
+
return True, effective_append
|
|
114
|
+
|
|
115
|
+
def register_progress(self, *, identity: str, content_key: str) -> bool:
|
|
116
|
+
previous = self.progress_buffers.get(identity)
|
|
117
|
+
if previous == content_key:
|
|
118
|
+
return False
|
|
119
|
+
self.progress_buffers[identity] = content_key
|
|
120
|
+
return True
|
|
121
|
+
|
|
122
|
+
def should_emit_final_snapshot(self, text: str) -> bool:
|
|
123
|
+
if not text.strip():
|
|
124
|
+
return False
|
|
125
|
+
existing = self.content_buffers.get(BlockType.TEXT, "")
|
|
126
|
+
if existing.strip() == text.strip():
|
|
127
|
+
return False
|
|
128
|
+
self.content_buffers[BlockType.TEXT] = text
|
|
129
|
+
self.saw_any_chunk = True
|
|
130
|
+
return True
|
|
131
|
+
|
|
132
|
+
def next_sequence(self) -> int:
|
|
133
|
+
self.sequence += 1
|
|
134
|
+
return self.sequence
|
|
135
|
+
|
|
136
|
+
def resolve_message_id(self, message_id: str | None) -> str:
|
|
137
|
+
if isinstance(message_id, str):
|
|
138
|
+
normalized = message_id.strip()
|
|
139
|
+
if normalized:
|
|
140
|
+
return normalized
|
|
141
|
+
return self.stable_message_id
|
|
142
|
+
|
|
143
|
+
def build_event_id(self, sequence: int) -> str:
|
|
144
|
+
return f"{self.event_id_namespace}:{sequence}"
|
|
145
|
+
|
|
146
|
+
def ingest_token_usage(self, usage: Mapping[str, Any] | None) -> None:
|
|
147
|
+
self.token_usage = _merge_token_usage(self.token_usage, usage)
|
|
148
|
+
|
|
149
|
+
def mark_interrupt_pending(self, request_id: str) -> bool:
|
|
150
|
+
normalized = request_id.strip()
|
|
151
|
+
if not normalized:
|
|
152
|
+
return False
|
|
153
|
+
if normalized in self.pending_interrupt_request_ids:
|
|
154
|
+
return False
|
|
155
|
+
self.pending_interrupt_request_ids.add(normalized)
|
|
156
|
+
return True
|
|
157
|
+
|
|
158
|
+
def clear_interrupt_pending(self, request_id: str) -> bool:
|
|
159
|
+
normalized = request_id.strip()
|
|
160
|
+
if not normalized:
|
|
161
|
+
return False
|
|
162
|
+
if normalized not in self.pending_interrupt_request_ids:
|
|
163
|
+
return False
|
|
164
|
+
self.pending_interrupt_request_ids.discard(normalized)
|
|
165
|
+
return True
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class _TTLCache:
|
|
169
|
+
"""Bounded TTL cache for hashable key -> string value."""
|
|
170
|
+
|
|
171
|
+
def __init__(
|
|
172
|
+
self,
|
|
173
|
+
*,
|
|
174
|
+
ttl_seconds: int,
|
|
175
|
+
maxsize: int,
|
|
176
|
+
now: Callable[[], float] = time.monotonic,
|
|
177
|
+
refresh_on_get: bool = False,
|
|
178
|
+
) -> None:
|
|
179
|
+
self._ttl_seconds = int(ttl_seconds)
|
|
180
|
+
self._maxsize = int(maxsize)
|
|
181
|
+
self._now = now
|
|
182
|
+
self._refresh_on_get = bool(refresh_on_get)
|
|
183
|
+
self._store: dict[object, tuple[str, float]] = {}
|
|
184
|
+
|
|
185
|
+
def get(self, key: object) -> str | None:
|
|
186
|
+
if self._ttl_seconds <= 0 or self._maxsize <= 0:
|
|
187
|
+
return None
|
|
188
|
+
item = self._store.get(key)
|
|
189
|
+
if not item:
|
|
190
|
+
return None
|
|
191
|
+
value, expires_at = item
|
|
192
|
+
now = self._now()
|
|
193
|
+
if expires_at <= now:
|
|
194
|
+
self._store.pop(key, None)
|
|
195
|
+
return None
|
|
196
|
+
if self._refresh_on_get:
|
|
197
|
+
self._store[key] = (value, now + float(self._ttl_seconds))
|
|
198
|
+
return value
|
|
199
|
+
|
|
200
|
+
def set(self, key: object, value: str) -> None:
|
|
201
|
+
if self._ttl_seconds <= 0 or self._maxsize <= 0:
|
|
202
|
+
return
|
|
203
|
+
now = self._now()
|
|
204
|
+
expires_at = now + float(self._ttl_seconds)
|
|
205
|
+
self._store[key] = (value, expires_at)
|
|
206
|
+
self._evict_if_needed(now=now)
|
|
207
|
+
|
|
208
|
+
def pop(self, key: object) -> None:
|
|
209
|
+
self._store.pop(key, None)
|
|
210
|
+
|
|
211
|
+
def _evict_if_needed(self, *, now: float) -> None:
|
|
212
|
+
if len(self._store) <= self._maxsize:
|
|
213
|
+
return
|
|
214
|
+
expired = [key for key, (_, exp) in self._store.items() if exp <= now]
|
|
215
|
+
for key in expired:
|
|
216
|
+
self._store.pop(key, None)
|
|
217
|
+
if len(self._store) <= self._maxsize:
|
|
218
|
+
return
|
|
219
|
+
overflow = len(self._store) - self._maxsize
|
|
220
|
+
by_expiry = sorted(self._store.items(), key=lambda item: item[1][1])
|
|
221
|
+
for key, _ in by_expiry[:overflow]:
|
|
222
|
+
self._store.pop(key, None)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def _build_stream_artifact_metadata(
|
|
226
|
+
*,
|
|
227
|
+
block_type: BlockType,
|
|
228
|
+
shared_source: str,
|
|
229
|
+
message_id: str | None = None,
|
|
230
|
+
role: str | None = None,
|
|
231
|
+
event_id: str | None = None,
|
|
232
|
+
sequence: int | None = None,
|
|
233
|
+
) -> dict[str, Any]:
|
|
234
|
+
stream_meta: dict[str, Any] = {
|
|
235
|
+
"block_type": block_type.value,
|
|
236
|
+
"source": shared_source,
|
|
237
|
+
}
|
|
238
|
+
if message_id:
|
|
239
|
+
stream_meta["message_id"] = message_id
|
|
240
|
+
if role:
|
|
241
|
+
stream_meta["role"] = role
|
|
242
|
+
if event_id:
|
|
243
|
+
stream_meta["event_id"] = event_id
|
|
244
|
+
if sequence is not None:
|
|
245
|
+
stream_meta["sequence"] = sequence
|
|
246
|
+
return {"shared": {"stream": stream_meta}}
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _build_output_metadata(
|
|
250
|
+
*,
|
|
251
|
+
session_id: str | None = None,
|
|
252
|
+
session_title: str | None = None,
|
|
253
|
+
usage: Mapping[str, Any] | None = None,
|
|
254
|
+
stream: Mapping[str, Any] | None = None,
|
|
255
|
+
progress: Mapping[str, Any] | None = None,
|
|
256
|
+
interrupt: Mapping[str, Any] | None = None,
|
|
257
|
+
opencode_private: Mapping[str, Any] | None = None,
|
|
258
|
+
) -> dict[str, Any] | None:
|
|
259
|
+
metadata: dict[str, Any] = {}
|
|
260
|
+
shared_meta: dict[str, Any] = {}
|
|
261
|
+
|
|
262
|
+
if session_id:
|
|
263
|
+
session_meta: dict[str, Any] = {"id": session_id}
|
|
264
|
+
if session_title is not None:
|
|
265
|
+
session_meta["title"] = session_title
|
|
266
|
+
shared_meta["session"] = session_meta
|
|
267
|
+
if usage is not None:
|
|
268
|
+
shared_meta["usage"] = dict(usage)
|
|
269
|
+
if stream is not None:
|
|
270
|
+
shared_meta["stream"] = dict(stream)
|
|
271
|
+
if progress is not None:
|
|
272
|
+
shared_meta["progress"] = dict(progress)
|
|
273
|
+
if interrupt is not None:
|
|
274
|
+
shared_meta["interrupt"] = dict(interrupt)
|
|
275
|
+
if shared_meta:
|
|
276
|
+
metadata["shared"] = shared_meta
|
|
277
|
+
if opencode_private:
|
|
278
|
+
metadata["opencode"] = dict(opencode_private)
|
|
279
|
+
return metadata or None
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from contextlib import suppress
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
from a2a.types import TaskState
|
|
9
|
+
|
|
10
|
+
from ..opencode_upstream_client import UpstreamContractError
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class _StreamTerminalSignal:
|
|
15
|
+
state: TaskState
|
|
16
|
+
error_type: str | None = None
|
|
17
|
+
message: str | None = None
|
|
18
|
+
upstream_status: int | None = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass(frozen=True)
|
|
22
|
+
class _UpstreamErrorProfile:
|
|
23
|
+
error_type: str
|
|
24
|
+
state: TaskState
|
|
25
|
+
default_message: str
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(frozen=True)
|
|
29
|
+
class _UpstreamInBandError:
|
|
30
|
+
error_type: str
|
|
31
|
+
state: TaskState
|
|
32
|
+
message: str
|
|
33
|
+
upstream_status: int | None = None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
_UPSTREAM_HTTP_ERROR_PROFILE_BY_STATUS: dict[int, _UpstreamErrorProfile] = {
|
|
37
|
+
400: _UpstreamErrorProfile(
|
|
38
|
+
"UPSTREAM_BAD_REQUEST",
|
|
39
|
+
TaskState.failed,
|
|
40
|
+
"OpenCode rejected the request due to invalid input",
|
|
41
|
+
),
|
|
42
|
+
401: _UpstreamErrorProfile(
|
|
43
|
+
"UPSTREAM_UNAUTHORIZED",
|
|
44
|
+
TaskState.auth_required,
|
|
45
|
+
"OpenCode rejected the request due to authentication failure",
|
|
46
|
+
),
|
|
47
|
+
403: _UpstreamErrorProfile(
|
|
48
|
+
"UPSTREAM_PERMISSION_DENIED",
|
|
49
|
+
TaskState.failed,
|
|
50
|
+
"OpenCode rejected the request due to insufficient permissions",
|
|
51
|
+
),
|
|
52
|
+
404: _UpstreamErrorProfile(
|
|
53
|
+
"UPSTREAM_RESOURCE_NOT_FOUND",
|
|
54
|
+
TaskState.failed,
|
|
55
|
+
"OpenCode rejected the request because the target resource was not found",
|
|
56
|
+
),
|
|
57
|
+
429: _UpstreamErrorProfile(
|
|
58
|
+
"UPSTREAM_QUOTA_EXCEEDED",
|
|
59
|
+
TaskState.failed,
|
|
60
|
+
"OpenCode rejected the request due to quota limits",
|
|
61
|
+
),
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _resolve_upstream_error_profile(status: int) -> _UpstreamErrorProfile:
|
|
66
|
+
if status in _UPSTREAM_HTTP_ERROR_PROFILE_BY_STATUS:
|
|
67
|
+
return _UPSTREAM_HTTP_ERROR_PROFILE_BY_STATUS[status]
|
|
68
|
+
if 400 <= status < 500:
|
|
69
|
+
return _UpstreamErrorProfile(
|
|
70
|
+
"UPSTREAM_CLIENT_ERROR",
|
|
71
|
+
TaskState.failed,
|
|
72
|
+
f"OpenCode rejected the request with client error {status}",
|
|
73
|
+
)
|
|
74
|
+
if status >= 500:
|
|
75
|
+
return _UpstreamErrorProfile(
|
|
76
|
+
"UPSTREAM_SERVER_ERROR",
|
|
77
|
+
TaskState.failed,
|
|
78
|
+
f"OpenCode rejected the request with server error {status}",
|
|
79
|
+
)
|
|
80
|
+
return _UpstreamErrorProfile(
|
|
81
|
+
"UPSTREAM_HTTP_ERROR",
|
|
82
|
+
TaskState.failed,
|
|
83
|
+
f"OpenCode rejected the request with HTTP status {status}",
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _extract_upstream_error_detail(response: httpx.Response | None) -> str | None:
|
|
88
|
+
if response is None:
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
payload = None
|
|
92
|
+
try:
|
|
93
|
+
payload = response.json()
|
|
94
|
+
except Exception:
|
|
95
|
+
payload = None
|
|
96
|
+
|
|
97
|
+
if isinstance(payload, dict):
|
|
98
|
+
for key in ("detail", "error", "message"):
|
|
99
|
+
value = payload.get(key)
|
|
100
|
+
if isinstance(value, str):
|
|
101
|
+
value = value.strip()
|
|
102
|
+
if value:
|
|
103
|
+
return value
|
|
104
|
+
|
|
105
|
+
text = response.text.strip()
|
|
106
|
+
if text:
|
|
107
|
+
return text[:512]
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _format_upstream_error(
|
|
112
|
+
exc: httpx.HTTPStatusError, *, request: str
|
|
113
|
+
) -> tuple[str, TaskState, str]:
|
|
114
|
+
status = exc.response.status_code
|
|
115
|
+
profile = _resolve_upstream_error_profile(status)
|
|
116
|
+
detail = _extract_upstream_error_detail(exc.response)
|
|
117
|
+
if detail:
|
|
118
|
+
return (
|
|
119
|
+
profile.error_type,
|
|
120
|
+
profile.state,
|
|
121
|
+
f"{profile.default_message} ({request}, status={status}, detail={detail}).",
|
|
122
|
+
)
|
|
123
|
+
return (
|
|
124
|
+
profile.error_type,
|
|
125
|
+
profile.state,
|
|
126
|
+
f"{profile.default_message} ({request}, status={status}).",
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _format_stream_terminal_error(
|
|
131
|
+
*,
|
|
132
|
+
detail: str | None,
|
|
133
|
+
status: int | None,
|
|
134
|
+
error_name: str | None,
|
|
135
|
+
) -> _StreamTerminalSignal:
|
|
136
|
+
if status is not None:
|
|
137
|
+
profile = _resolve_upstream_error_profile(status)
|
|
138
|
+
if detail:
|
|
139
|
+
message = (
|
|
140
|
+
f"{profile.default_message} (session.error, status={status}, detail={detail})."
|
|
141
|
+
)
|
|
142
|
+
else:
|
|
143
|
+
message = f"{profile.default_message} (session.error, status={status})."
|
|
144
|
+
return _StreamTerminalSignal(
|
|
145
|
+
state=profile.state,
|
|
146
|
+
error_type=profile.error_type,
|
|
147
|
+
message=message,
|
|
148
|
+
upstream_status=status,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
if error_name == "ProviderAuthError":
|
|
152
|
+
if detail:
|
|
153
|
+
message = (
|
|
154
|
+
"OpenCode rejected the request due to authentication failure "
|
|
155
|
+
f"(session.error, detail={detail})."
|
|
156
|
+
)
|
|
157
|
+
else:
|
|
158
|
+
message = "OpenCode rejected the request due to authentication failure (session.error)."
|
|
159
|
+
return _StreamTerminalSignal(
|
|
160
|
+
state=TaskState.auth_required,
|
|
161
|
+
error_type="UPSTREAM_UNAUTHORIZED",
|
|
162
|
+
message=message,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
if detail:
|
|
166
|
+
message = f"OpenCode execution failed (session.error, detail={detail})."
|
|
167
|
+
elif error_name:
|
|
168
|
+
message = f"OpenCode execution failed (session.error, error={error_name})."
|
|
169
|
+
else:
|
|
170
|
+
message = "OpenCode execution failed (session.error)."
|
|
171
|
+
return _StreamTerminalSignal(
|
|
172
|
+
state=TaskState.failed,
|
|
173
|
+
error_type="UPSTREAM_EXECUTION_ERROR",
|
|
174
|
+
message=message,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _format_inband_upstream_error(
|
|
179
|
+
*,
|
|
180
|
+
source: str,
|
|
181
|
+
detail: str | None,
|
|
182
|
+
status: int | None,
|
|
183
|
+
error_name: str | None,
|
|
184
|
+
) -> _UpstreamInBandError:
|
|
185
|
+
if status is not None:
|
|
186
|
+
profile = _resolve_upstream_error_profile(status)
|
|
187
|
+
if detail:
|
|
188
|
+
message = f"{profile.default_message} ({source}, status={status}, detail={detail})."
|
|
189
|
+
else:
|
|
190
|
+
message = f"{profile.default_message} ({source}, status={status})."
|
|
191
|
+
return _UpstreamInBandError(
|
|
192
|
+
error_type=profile.error_type,
|
|
193
|
+
state=profile.state,
|
|
194
|
+
message=message,
|
|
195
|
+
upstream_status=status,
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
if error_name == "ProviderAuthError":
|
|
199
|
+
if detail:
|
|
200
|
+
message = (
|
|
201
|
+
"OpenCode rejected the request due to authentication failure "
|
|
202
|
+
f"({source}, detail={detail})."
|
|
203
|
+
)
|
|
204
|
+
else:
|
|
205
|
+
message = f"OpenCode rejected the request due to authentication failure ({source})."
|
|
206
|
+
return _UpstreamInBandError(
|
|
207
|
+
error_type="UPSTREAM_UNAUTHORIZED",
|
|
208
|
+
state=TaskState.auth_required,
|
|
209
|
+
message=message,
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
if detail:
|
|
213
|
+
message = f"OpenCode execution failed ({source}, detail={detail})."
|
|
214
|
+
elif error_name:
|
|
215
|
+
message = f"OpenCode execution failed ({source}, error={error_name})."
|
|
216
|
+
else:
|
|
217
|
+
message = f"OpenCode execution failed ({source})."
|
|
218
|
+
return _UpstreamInBandError(
|
|
219
|
+
error_type="UPSTREAM_EXECUTION_ERROR",
|
|
220
|
+
state=TaskState.failed,
|
|
221
|
+
message=message,
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
async def _await_stream_terminal_signal(
|
|
226
|
+
*,
|
|
227
|
+
stream_task: asyncio.Task[None] | None,
|
|
228
|
+
terminal_signal: asyncio.Future[_StreamTerminalSignal],
|
|
229
|
+
session_id: str,
|
|
230
|
+
) -> _StreamTerminalSignal:
|
|
231
|
+
if terminal_signal.done():
|
|
232
|
+
return terminal_signal.result()
|
|
233
|
+
if stream_task is None:
|
|
234
|
+
raise RuntimeError("Streaming task was not initialized")
|
|
235
|
+
|
|
236
|
+
terminal_wait_task = asyncio.create_task(_wait_for_terminal_signal(terminal_signal))
|
|
237
|
+
try:
|
|
238
|
+
done, _pending = await asyncio.wait(
|
|
239
|
+
{stream_task, terminal_wait_task},
|
|
240
|
+
return_when=asyncio.FIRST_COMPLETED,
|
|
241
|
+
)
|
|
242
|
+
if terminal_wait_task in done:
|
|
243
|
+
return terminal_wait_task.result()
|
|
244
|
+
if stream_task in done:
|
|
245
|
+
with suppress(asyncio.CancelledError):
|
|
246
|
+
await stream_task
|
|
247
|
+
if terminal_signal.done():
|
|
248
|
+
return terminal_signal.result()
|
|
249
|
+
raise UpstreamContractError(
|
|
250
|
+
"OpenCode event stream ended before terminal signal "
|
|
251
|
+
f"(session_id={session_id}, expected session.idle or session.error)"
|
|
252
|
+
)
|
|
253
|
+
return await terminal_wait_task
|
|
254
|
+
finally:
|
|
255
|
+
if not terminal_wait_task.done():
|
|
256
|
+
terminal_wait_task.cancel()
|
|
257
|
+
with suppress(asyncio.CancelledError):
|
|
258
|
+
await terminal_wait_task
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
async def _wait_for_terminal_signal(
|
|
262
|
+
terminal_signal: asyncio.Future[_StreamTerminalSignal],
|
|
263
|
+
) -> _StreamTerminalSignal:
|
|
264
|
+
return await terminal_signal
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""JSON-RPC application and method helpers."""
|