opencode-a2a 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opencode_a2a/__init__.py +15 -0
- opencode_a2a/cli.py +52 -0
- opencode_a2a/config.py +160 -0
- opencode_a2a/contracts/__init__.py +1 -0
- opencode_a2a/contracts/extensions.py +948 -0
- opencode_a2a/execution/__init__.py +1 -0
- opencode_a2a/execution/executor.py +1582 -0
- opencode_a2a/execution/request_context.py +91 -0
- opencode_a2a/execution/stream_events.py +578 -0
- opencode_a2a/execution/stream_state.py +279 -0
- opencode_a2a/execution/upstream_errors.py +264 -0
- opencode_a2a/jsonrpc/__init__.py +1 -0
- opencode_a2a/jsonrpc/application.py +1036 -0
- opencode_a2a/jsonrpc/methods.py +537 -0
- opencode_a2a/jsonrpc/params.py +123 -0
- opencode_a2a/opencode_upstream_client.py +544 -0
- opencode_a2a/parts/__init__.py +1 -0
- opencode_a2a/parts/mapping.py +151 -0
- opencode_a2a/parts/text.py +24 -0
- opencode_a2a/profile/__init__.py +1 -0
- opencode_a2a/profile/runtime.py +254 -0
- opencode_a2a/server/__init__.py +1 -0
- opencode_a2a/server/agent_card.py +288 -0
- opencode_a2a/server/application.py +634 -0
- opencode_a2a/server/openapi.py +432 -0
- opencode_a2a/server/request_parsing.py +109 -0
- opencode_a2a-0.3.1.dist-info/METADATA +173 -0
- opencode_a2a-0.3.1.dist-info/RECORD +32 -0
- opencode_a2a-0.3.1.dist-info/WHEEL +5 -0
- opencode_a2a-0.3.1.dist-info/entry_points.txt +2 -0
- opencode_a2a-0.3.1.dist-info/licenses/LICENSE +176 -0
- opencode_a2a-0.3.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1582 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
import time
|
|
8
|
+
import uuid
|
|
9
|
+
from collections import defaultdict
|
|
10
|
+
from collections.abc import Mapping
|
|
11
|
+
from contextlib import suppress
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
import httpx
|
|
17
|
+
from a2a.server.agent_execution import AgentExecutor, RequestContext
|
|
18
|
+
from a2a.server.events.event_queue import EventQueue
|
|
19
|
+
from a2a.types import (
|
|
20
|
+
Artifact,
|
|
21
|
+
DataPart,
|
|
22
|
+
Message,
|
|
23
|
+
Part,
|
|
24
|
+
Role,
|
|
25
|
+
Task,
|
|
26
|
+
TaskArtifactUpdateEvent,
|
|
27
|
+
TaskState,
|
|
28
|
+
TaskStatus,
|
|
29
|
+
TaskStatusUpdateEvent,
|
|
30
|
+
TextPart,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
from ..opencode_upstream_client import OpencodeUpstreamClient, UpstreamContractError
|
|
34
|
+
from ..parts.mapping import (
|
|
35
|
+
UnsupportedA2AInputError,
|
|
36
|
+
extract_text_from_a2a_parts,
|
|
37
|
+
map_a2a_parts_to_opencode_parts,
|
|
38
|
+
summarize_a2a_parts,
|
|
39
|
+
)
|
|
40
|
+
from .request_context import (
|
|
41
|
+
_build_history,
|
|
42
|
+
_extract_opencode_directory,
|
|
43
|
+
_extract_shared_model,
|
|
44
|
+
_extract_shared_session_id,
|
|
45
|
+
)
|
|
46
|
+
from .stream_events import (
|
|
47
|
+
BlockType,
|
|
48
|
+
_build_progress_identity,
|
|
49
|
+
_coerce_number,
|
|
50
|
+
_extract_event_session_id,
|
|
51
|
+
_extract_interrupt_asked_event,
|
|
52
|
+
_extract_interrupt_resolved_event,
|
|
53
|
+
_extract_progress_metadata,
|
|
54
|
+
_extract_stream_message_id,
|
|
55
|
+
_extract_stream_part_id,
|
|
56
|
+
_extract_stream_role,
|
|
57
|
+
_extract_stream_session_id,
|
|
58
|
+
_extract_stream_snapshot_text,
|
|
59
|
+
_extract_stream_terminal_signal,
|
|
60
|
+
_extract_token_usage,
|
|
61
|
+
_extract_tool_part_payload,
|
|
62
|
+
_extract_upstream_error_from_event,
|
|
63
|
+
_extract_upstream_error_from_response,
|
|
64
|
+
_log_stream_event_debug,
|
|
65
|
+
_normalize_interrupt_question_options,
|
|
66
|
+
_normalize_interrupt_questions,
|
|
67
|
+
_normalize_role,
|
|
68
|
+
_preview_log_value,
|
|
69
|
+
_resolve_stream_block_type,
|
|
70
|
+
)
|
|
71
|
+
from .stream_state import (
|
|
72
|
+
_build_output_metadata,
|
|
73
|
+
_build_stream_artifact_metadata,
|
|
74
|
+
_merge_token_usage,
|
|
75
|
+
_NormalizedStreamChunk,
|
|
76
|
+
_PendingDelta,
|
|
77
|
+
_StreamOutputState,
|
|
78
|
+
_StreamPartState,
|
|
79
|
+
_TTLCache,
|
|
80
|
+
)
|
|
81
|
+
from .upstream_errors import (
|
|
82
|
+
_await_stream_terminal_signal,
|
|
83
|
+
_extract_upstream_error_detail,
|
|
84
|
+
_format_inband_upstream_error,
|
|
85
|
+
_format_stream_terminal_error,
|
|
86
|
+
_format_upstream_error,
|
|
87
|
+
_resolve_upstream_error_profile,
|
|
88
|
+
_StreamTerminalSignal,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
logger = logging.getLogger(__name__)
|
|
92
|
+
|
|
93
|
+
__all__ = [
|
|
94
|
+
"_build_output_metadata",
|
|
95
|
+
"_build_progress_identity",
|
|
96
|
+
"_coerce_number",
|
|
97
|
+
"_extract_event_session_id",
|
|
98
|
+
"_extract_interrupt_asked_event",
|
|
99
|
+
"_extract_interrupt_resolved_event",
|
|
100
|
+
"_extract_progress_metadata",
|
|
101
|
+
"_extract_stream_session_id",
|
|
102
|
+
"_extract_stream_snapshot_text",
|
|
103
|
+
"_extract_stream_terminal_signal",
|
|
104
|
+
"_extract_token_usage",
|
|
105
|
+
"_extract_upstream_error_detail",
|
|
106
|
+
"_extract_upstream_error_from_event",
|
|
107
|
+
"_extract_upstream_error_from_response",
|
|
108
|
+
"_format_inband_upstream_error",
|
|
109
|
+
"_format_stream_terminal_error",
|
|
110
|
+
"_format_upstream_error",
|
|
111
|
+
"_merge_token_usage",
|
|
112
|
+
"_normalize_interrupt_question_options",
|
|
113
|
+
"_normalize_interrupt_questions",
|
|
114
|
+
"_normalize_role",
|
|
115
|
+
"_preview_log_value",
|
|
116
|
+
"_resolve_upstream_error_profile",
|
|
117
|
+
]
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _emit_metric(
|
|
121
|
+
name: str,
|
|
122
|
+
value: float = 1.0,
|
|
123
|
+
**labels: str | int | float | bool,
|
|
124
|
+
) -> None:
|
|
125
|
+
if labels:
|
|
126
|
+
labels_text = ",".join(
|
|
127
|
+
f"{key}={str(label).lower() if isinstance(label, bool) else label}"
|
|
128
|
+
for key, label in sorted(labels.items())
|
|
129
|
+
)
|
|
130
|
+
logger.debug("metric=%s value=%s labels=%s", name, value, labels_text)
|
|
131
|
+
return
|
|
132
|
+
logger.debug("metric=%s value=%s", name, value)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@dataclass(frozen=True)
|
|
136
|
+
class _PreparedExecution:
|
|
137
|
+
identity: str
|
|
138
|
+
streaming_request: bool
|
|
139
|
+
request_parts: list[Any]
|
|
140
|
+
user_text: str
|
|
141
|
+
session_title: str
|
|
142
|
+
use_structured_parts: bool
|
|
143
|
+
bound_session_id: str | None
|
|
144
|
+
model_override: dict[str, str] | None
|
|
145
|
+
directory: str | None
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class _ExecutionCoordinator:
|
|
149
|
+
def __init__(
|
|
150
|
+
self,
|
|
151
|
+
executor: OpencodeAgentExecutor,
|
|
152
|
+
*,
|
|
153
|
+
context: RequestContext,
|
|
154
|
+
event_queue: EventQueue,
|
|
155
|
+
task_id: str,
|
|
156
|
+
context_id: str,
|
|
157
|
+
prepared: _PreparedExecution,
|
|
158
|
+
) -> None:
|
|
159
|
+
self._executor = executor
|
|
160
|
+
self._context = context
|
|
161
|
+
self._event_queue = event_queue
|
|
162
|
+
self._task_id = task_id
|
|
163
|
+
self._context_id = context_id
|
|
164
|
+
self._prepared = prepared
|
|
165
|
+
self._stream_artifact_id = f"{task_id}:stream"
|
|
166
|
+
self._stream_state = _StreamOutputState(
|
|
167
|
+
user_text=prepared.user_text,
|
|
168
|
+
stable_message_id=f"{task_id}:{context_id}:assistant",
|
|
169
|
+
event_id_namespace=f"{task_id}:{context_id}:{self._stream_artifact_id}",
|
|
170
|
+
)
|
|
171
|
+
self._stream_terminal_signal: asyncio.Future[_StreamTerminalSignal] | None = None
|
|
172
|
+
self._stop_event = asyncio.Event()
|
|
173
|
+
self._stream_task: asyncio.Task[None] | None = None
|
|
174
|
+
self._pending_preferred_claim = False
|
|
175
|
+
self._session_lock: asyncio.Lock | None = None
|
|
176
|
+
self._session_id = ""
|
|
177
|
+
self._execution_key = (task_id, context_id)
|
|
178
|
+
|
|
179
|
+
async def run(self) -> None:
|
|
180
|
+
current_task = asyncio.current_task()
|
|
181
|
+
if current_task is not None:
|
|
182
|
+
await self._register_running_request(current_task)
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
await self._bind_session()
|
|
186
|
+
await self._enqueue_working_status()
|
|
187
|
+
response = await self._send_message()
|
|
188
|
+
if self._pending_preferred_claim:
|
|
189
|
+
await self._executor._finalize_preferred_session_binding(
|
|
190
|
+
identity=self._prepared.identity,
|
|
191
|
+
context_id=self._context_id,
|
|
192
|
+
session_id=self._session_id,
|
|
193
|
+
)
|
|
194
|
+
self._pending_preferred_claim = False
|
|
195
|
+
await self._handle_response(response)
|
|
196
|
+
except httpx.HTTPStatusError as exc:
|
|
197
|
+
logger.exception("OpenCode request failed with HTTP error")
|
|
198
|
+
error_type, state, message = _format_upstream_error(
|
|
199
|
+
exc,
|
|
200
|
+
request="send_message",
|
|
201
|
+
)
|
|
202
|
+
await self._executor._emit_error(
|
|
203
|
+
self._event_queue,
|
|
204
|
+
task_id=self._task_id,
|
|
205
|
+
context_id=self._context_id,
|
|
206
|
+
message=message,
|
|
207
|
+
state=state,
|
|
208
|
+
error_type=error_type,
|
|
209
|
+
upstream_status=exc.response.status_code,
|
|
210
|
+
streaming_request=self._prepared.streaming_request,
|
|
211
|
+
)
|
|
212
|
+
except httpx.TimeoutException as exc:
|
|
213
|
+
logger.exception("OpenCode request timed out")
|
|
214
|
+
await self._executor._emit_error(
|
|
215
|
+
self._event_queue,
|
|
216
|
+
task_id=self._task_id,
|
|
217
|
+
context_id=self._context_id,
|
|
218
|
+
message=f"OpenCode request timed out: {exc}",
|
|
219
|
+
state=TaskState.failed,
|
|
220
|
+
error_type="UPSTREAM_TIMEOUT",
|
|
221
|
+
streaming_request=self._prepared.streaming_request,
|
|
222
|
+
)
|
|
223
|
+
except UpstreamContractError as exc:
|
|
224
|
+
logger.warning("OpenCode request failed with payload mismatch: %s", exc)
|
|
225
|
+
await self._executor._emit_error(
|
|
226
|
+
self._event_queue,
|
|
227
|
+
task_id=self._task_id,
|
|
228
|
+
context_id=self._context_id,
|
|
229
|
+
message=f"OpenCode payload mismatch: {exc}",
|
|
230
|
+
state=TaskState.failed,
|
|
231
|
+
error_type="UPSTREAM_PAYLOAD_ERROR",
|
|
232
|
+
streaming_request=self._prepared.streaming_request,
|
|
233
|
+
)
|
|
234
|
+
except Exception as exc:
|
|
235
|
+
logger.exception("OpenCode request failed")
|
|
236
|
+
await self._executor._emit_error(
|
|
237
|
+
self._event_queue,
|
|
238
|
+
task_id=self._task_id,
|
|
239
|
+
context_id=self._context_id,
|
|
240
|
+
message=f"OpenCode error: {exc}",
|
|
241
|
+
state=TaskState.failed,
|
|
242
|
+
streaming_request=self._prepared.streaming_request,
|
|
243
|
+
)
|
|
244
|
+
finally:
|
|
245
|
+
await self._cleanup()
|
|
246
|
+
|
|
247
|
+
async def _register_running_request(self, current_task: asyncio.Task[Any]) -> None:
|
|
248
|
+
async with self._executor._lock:
|
|
249
|
+
self._executor._running_requests[self._execution_key] = current_task
|
|
250
|
+
self._executor._running_stop_events[self._execution_key] = self._stop_event
|
|
251
|
+
self._executor._running_identities[self._execution_key] = self._prepared.identity
|
|
252
|
+
|
|
253
|
+
async def _bind_session(self) -> None:
|
|
254
|
+
(
|
|
255
|
+
self._session_id,
|
|
256
|
+
self._pending_preferred_claim,
|
|
257
|
+
) = await self._executor._get_or_create_session(
|
|
258
|
+
self._prepared.identity,
|
|
259
|
+
self._context_id,
|
|
260
|
+
self._prepared.session_title or self._prepared.user_text,
|
|
261
|
+
preferred_session_id=self._prepared.bound_session_id,
|
|
262
|
+
directory=self._prepared.directory,
|
|
263
|
+
)
|
|
264
|
+
self._session_lock = await self._executor._get_session_lock(self._session_id)
|
|
265
|
+
await self._session_lock.acquire()
|
|
266
|
+
async with self._executor._lock:
|
|
267
|
+
self._executor._running_session_ids[self._execution_key] = self._session_id
|
|
268
|
+
self._executor._running_directories[self._execution_key] = self._prepared.directory
|
|
269
|
+
|
|
270
|
+
if self._prepared.streaming_request:
|
|
271
|
+
self._stream_terminal_signal = asyncio.get_running_loop().create_future()
|
|
272
|
+
self._stream_task = asyncio.create_task(
|
|
273
|
+
self._executor._consume_opencode_stream(
|
|
274
|
+
session_id=self._session_id,
|
|
275
|
+
identity=self._prepared.identity,
|
|
276
|
+
task_id=self._task_id,
|
|
277
|
+
context_id=self._context_id,
|
|
278
|
+
artifact_id=self._stream_artifact_id,
|
|
279
|
+
stream_state=self._stream_state,
|
|
280
|
+
event_queue=self._event_queue,
|
|
281
|
+
stop_event=self._stop_event,
|
|
282
|
+
directory=self._prepared.directory,
|
|
283
|
+
terminal_signal=self._stream_terminal_signal,
|
|
284
|
+
)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
async def _enqueue_working_status(self) -> None:
|
|
288
|
+
await self._event_queue.enqueue_event(
|
|
289
|
+
TaskStatusUpdateEvent(
|
|
290
|
+
task_id=self._task_id,
|
|
291
|
+
context_id=self._context_id,
|
|
292
|
+
status=TaskStatus(state=TaskState.working),
|
|
293
|
+
final=False,
|
|
294
|
+
)
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
async def _send_message(self) -> Any:
|
|
298
|
+
send_kwargs: dict[str, Any] = {
|
|
299
|
+
"directory": self._prepared.directory,
|
|
300
|
+
"model_override": self._prepared.model_override,
|
|
301
|
+
}
|
|
302
|
+
if self._prepared.streaming_request:
|
|
303
|
+
send_kwargs["timeout_override"] = self._executor._client.stream_timeout
|
|
304
|
+
|
|
305
|
+
if not self._prepared.use_structured_parts:
|
|
306
|
+
return await self._executor._client.send_message(
|
|
307
|
+
self._session_id,
|
|
308
|
+
self._prepared.user_text,
|
|
309
|
+
**send_kwargs,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
return await self._executor._client.send_message(
|
|
313
|
+
self._session_id,
|
|
314
|
+
self._prepared.user_text or None,
|
|
315
|
+
parts=self._prepared.request_parts,
|
|
316
|
+
**send_kwargs,
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
async def _handle_response(self, response: Any) -> None:
|
|
320
|
+
response_text = response.text or ""
|
|
321
|
+
resolved_message_id = self._stream_state.resolve_message_id(response.message_id)
|
|
322
|
+
response_error = _extract_upstream_error_from_response(response.raw)
|
|
323
|
+
resolved_token_usage = _merge_token_usage(
|
|
324
|
+
_extract_token_usage(response.raw),
|
|
325
|
+
self._stream_state.token_usage,
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
logger.debug(
|
|
329
|
+
"OpenCode response task_id=%s session_id=%s message_id=%s text=%s",
|
|
330
|
+
self._task_id,
|
|
331
|
+
response.session_id,
|
|
332
|
+
resolved_message_id,
|
|
333
|
+
response_text,
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
if response_error is not None:
|
|
337
|
+
await self._executor._emit_error(
|
|
338
|
+
self._event_queue,
|
|
339
|
+
task_id=self._task_id,
|
|
340
|
+
context_id=self._context_id,
|
|
341
|
+
message=response_error.message,
|
|
342
|
+
state=response_error.state,
|
|
343
|
+
error_type=response_error.error_type,
|
|
344
|
+
upstream_status=response_error.upstream_status,
|
|
345
|
+
streaming_request=self._prepared.streaming_request,
|
|
346
|
+
)
|
|
347
|
+
return
|
|
348
|
+
|
|
349
|
+
if self._prepared.streaming_request:
|
|
350
|
+
await self._handle_streaming_response(
|
|
351
|
+
response=response,
|
|
352
|
+
response_text=response_text,
|
|
353
|
+
resolved_message_id=resolved_message_id,
|
|
354
|
+
resolved_token_usage=resolved_token_usage,
|
|
355
|
+
)
|
|
356
|
+
return
|
|
357
|
+
|
|
358
|
+
await self._handle_non_streaming_response(
|
|
359
|
+
response=response,
|
|
360
|
+
response_text=response_text,
|
|
361
|
+
resolved_message_id=resolved_message_id,
|
|
362
|
+
resolved_token_usage=resolved_token_usage,
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
async def _handle_streaming_response(
|
|
366
|
+
self,
|
|
367
|
+
*,
|
|
368
|
+
response: Any,
|
|
369
|
+
response_text: str,
|
|
370
|
+
resolved_message_id: str,
|
|
371
|
+
resolved_token_usage: Mapping[str, Any] | None,
|
|
372
|
+
) -> None:
|
|
373
|
+
del response
|
|
374
|
+
if self._stream_terminal_signal is None:
|
|
375
|
+
raise RuntimeError("Streaming terminal signal was not initialized")
|
|
376
|
+
|
|
377
|
+
terminal_signal = await _await_stream_terminal_signal(
|
|
378
|
+
stream_task=self._stream_task,
|
|
379
|
+
terminal_signal=self._stream_terminal_signal,
|
|
380
|
+
session_id=self._session_id,
|
|
381
|
+
)
|
|
382
|
+
if terminal_signal.state != TaskState.completed:
|
|
383
|
+
await self._executor._emit_error(
|
|
384
|
+
self._event_queue,
|
|
385
|
+
task_id=self._task_id,
|
|
386
|
+
context_id=self._context_id,
|
|
387
|
+
message=terminal_signal.message or "OpenCode execution failed.",
|
|
388
|
+
state=terminal_signal.state,
|
|
389
|
+
error_type=terminal_signal.error_type,
|
|
390
|
+
upstream_status=terminal_signal.upstream_status,
|
|
391
|
+
streaming_request=True,
|
|
392
|
+
)
|
|
393
|
+
return
|
|
394
|
+
|
|
395
|
+
if self._stream_state.upstream_error is not None:
|
|
396
|
+
await self._executor._emit_error(
|
|
397
|
+
self._event_queue,
|
|
398
|
+
task_id=self._task_id,
|
|
399
|
+
context_id=self._context_id,
|
|
400
|
+
message=self._stream_state.upstream_error.message,
|
|
401
|
+
state=self._stream_state.upstream_error.state,
|
|
402
|
+
error_type=self._stream_state.upstream_error.error_type,
|
|
403
|
+
upstream_status=self._stream_state.upstream_error.upstream_status,
|
|
404
|
+
streaming_request=True,
|
|
405
|
+
)
|
|
406
|
+
return
|
|
407
|
+
|
|
408
|
+
if self._stream_state.should_emit_final_snapshot(response_text):
|
|
409
|
+
sequence = self._stream_state.next_sequence()
|
|
410
|
+
await _enqueue_artifact_update(
|
|
411
|
+
event_queue=self._event_queue,
|
|
412
|
+
task_id=self._task_id,
|
|
413
|
+
context_id=self._context_id,
|
|
414
|
+
artifact_id=self._stream_artifact_id,
|
|
415
|
+
part=Part(root=TextPart(text=response_text)),
|
|
416
|
+
append=self._stream_state.emitted_stream_chunk,
|
|
417
|
+
last_chunk=True,
|
|
418
|
+
artifact_metadata=_build_stream_artifact_metadata(
|
|
419
|
+
block_type=BlockType.TEXT,
|
|
420
|
+
shared_source="final_snapshot",
|
|
421
|
+
message_id=resolved_message_id,
|
|
422
|
+
event_id=self._stream_state.build_event_id(sequence),
|
|
423
|
+
sequence=sequence,
|
|
424
|
+
),
|
|
425
|
+
)
|
|
426
|
+
|
|
427
|
+
await self._event_queue.enqueue_event(
|
|
428
|
+
TaskStatusUpdateEvent(
|
|
429
|
+
task_id=self._task_id,
|
|
430
|
+
context_id=self._context_id,
|
|
431
|
+
status=TaskStatus(state=TaskState.completed),
|
|
432
|
+
final=True,
|
|
433
|
+
metadata=_build_output_metadata(
|
|
434
|
+
session_id=self._session_id,
|
|
435
|
+
usage=resolved_token_usage,
|
|
436
|
+
stream={
|
|
437
|
+
"message_id": resolved_message_id,
|
|
438
|
+
"event_id": f"{self._stream_state.event_id_namespace}:status",
|
|
439
|
+
"source": "status",
|
|
440
|
+
},
|
|
441
|
+
),
|
|
442
|
+
)
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
async def _handle_non_streaming_response(
|
|
446
|
+
self,
|
|
447
|
+
*,
|
|
448
|
+
response: Any,
|
|
449
|
+
response_text: str,
|
|
450
|
+
resolved_message_id: str,
|
|
451
|
+
resolved_token_usage: Mapping[str, Any] | None,
|
|
452
|
+
) -> None:
|
|
453
|
+
response_text = response_text or "(No text content returned by OpenCode.)"
|
|
454
|
+
assistant_message = _build_assistant_message(
|
|
455
|
+
task_id=self._task_id,
|
|
456
|
+
context_id=self._context_id,
|
|
457
|
+
text=response_text,
|
|
458
|
+
message_id=resolved_message_id,
|
|
459
|
+
)
|
|
460
|
+
artifact = Artifact(
|
|
461
|
+
artifact_id=str(uuid.uuid4()),
|
|
462
|
+
name="response",
|
|
463
|
+
parts=[Part(root=TextPart(text=response_text))],
|
|
464
|
+
)
|
|
465
|
+
history = _build_history(self._context)
|
|
466
|
+
task = Task(
|
|
467
|
+
id=self._task_id,
|
|
468
|
+
context_id=self._context_id,
|
|
469
|
+
status=TaskStatus(state=TaskState.completed),
|
|
470
|
+
history=history,
|
|
471
|
+
artifacts=[artifact],
|
|
472
|
+
metadata=_build_output_metadata(
|
|
473
|
+
session_id=response.session_id,
|
|
474
|
+
usage=resolved_token_usage,
|
|
475
|
+
),
|
|
476
|
+
)
|
|
477
|
+
task.status.message = assistant_message
|
|
478
|
+
await self._event_queue.enqueue_event(task)
|
|
479
|
+
|
|
480
|
+
async def _cleanup(self) -> None:
|
|
481
|
+
if self._pending_preferred_claim and self._session_id:
|
|
482
|
+
with suppress(Exception):
|
|
483
|
+
await self._executor._release_preferred_session_claim(
|
|
484
|
+
identity=self._prepared.identity,
|
|
485
|
+
session_id=self._session_id,
|
|
486
|
+
)
|
|
487
|
+
self._stop_event.set()
|
|
488
|
+
if self._stream_task:
|
|
489
|
+
self._stream_task.cancel()
|
|
490
|
+
with suppress(asyncio.CancelledError):
|
|
491
|
+
await self._stream_task
|
|
492
|
+
if self._session_lock and self._session_lock.locked():
|
|
493
|
+
self._session_lock.release()
|
|
494
|
+
async with self._executor._lock:
|
|
495
|
+
self._executor._running_requests.pop(self._execution_key, None)
|
|
496
|
+
self._executor._running_stop_events.pop(self._execution_key, None)
|
|
497
|
+
self._executor._running_identities.pop(self._execution_key, None)
|
|
498
|
+
self._executor._running_session_ids.pop(self._execution_key, None)
|
|
499
|
+
self._executor._running_directories.pop(self._execution_key, None)
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
class OpencodeAgentExecutor(AgentExecutor):
|
|
503
|
+
def __init__(
|
|
504
|
+
self,
|
|
505
|
+
client: OpencodeUpstreamClient,
|
|
506
|
+
*,
|
|
507
|
+
streaming_enabled: bool,
|
|
508
|
+
cancel_abort_timeout_seconds: float = 2.0,
|
|
509
|
+
session_cache_ttl_seconds: int = 3600,
|
|
510
|
+
session_cache_maxsize: int = 10_000,
|
|
511
|
+
) -> None:
|
|
512
|
+
self._client = client
|
|
513
|
+
self._streaming_enabled = streaming_enabled
|
|
514
|
+
self._cancel_abort_timeout_seconds = max(0.0, float(cancel_abort_timeout_seconds))
|
|
515
|
+
self._sessions = _TTLCache(
|
|
516
|
+
ttl_seconds=session_cache_ttl_seconds,
|
|
517
|
+
maxsize=session_cache_maxsize,
|
|
518
|
+
)
|
|
519
|
+
self._session_owners = _TTLCache(
|
|
520
|
+
ttl_seconds=session_cache_ttl_seconds,
|
|
521
|
+
maxsize=session_cache_maxsize,
|
|
522
|
+
refresh_on_get=True,
|
|
523
|
+
) # session_id -> identity
|
|
524
|
+
self._pending_session_claims: dict[str, str] = {}
|
|
525
|
+
self._lock = asyncio.Lock()
|
|
526
|
+
self._inflight_session_creates: dict[tuple[str, str], asyncio.Task[str]] = {}
|
|
527
|
+
self._session_locks: dict[str, asyncio.Lock] = {}
|
|
528
|
+
self._running_requests: dict[tuple[str, str], asyncio.Task[Any]] = {}
|
|
529
|
+
self._running_stop_events: dict[tuple[str, str], asyncio.Event] = {}
|
|
530
|
+
self._running_identities: dict[tuple[str, str], str] = {}
|
|
531
|
+
self._running_session_ids: dict[tuple[str, str], str] = {}
|
|
532
|
+
self._running_directories: dict[tuple[str, str], str | None] = {}
|
|
533
|
+
|
|
534
|
+
@staticmethod
|
|
535
|
+
def _emit_metric(
|
|
536
|
+
name: str,
|
|
537
|
+
value: float = 1.0,
|
|
538
|
+
**labels: str | int | float | bool,
|
|
539
|
+
) -> None:
|
|
540
|
+
_emit_metric(name, value, **labels)
|
|
541
|
+
|
|
542
|
+
def _resolve_and_validate_directory(self, requested: str | None) -> str | None:
|
|
543
|
+
"""Normalizes and validates the directory parameter against workspace boundaries.
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
The normalized absolute path string if valid.
|
|
547
|
+
Raises:
|
|
548
|
+
ValueError: If the path is outside the allowed workspace.
|
|
549
|
+
"""
|
|
550
|
+
base_dir_str = self._client.directory or os.getcwd()
|
|
551
|
+
base_path = Path(base_dir_str).resolve()
|
|
552
|
+
|
|
553
|
+
if requested is not None and not isinstance(requested, str):
|
|
554
|
+
raise ValueError("Directory must be a string path")
|
|
555
|
+
|
|
556
|
+
requested = requested.strip() if requested else requested
|
|
557
|
+
if not requested:
|
|
558
|
+
return str(base_path)
|
|
559
|
+
|
|
560
|
+
def _resolve_requested(path: str) -> Path:
|
|
561
|
+
p = Path(path)
|
|
562
|
+
if not p.is_absolute():
|
|
563
|
+
p = base_path / p
|
|
564
|
+
return p.resolve()
|
|
565
|
+
|
|
566
|
+
# 1. Deny override if disabled in settings
|
|
567
|
+
if not self._client.settings.a2a_allow_directory_override:
|
|
568
|
+
# If requested matches normalized base, it's fine.
|
|
569
|
+
requested_path = _resolve_requested(requested)
|
|
570
|
+
if requested_path == base_path:
|
|
571
|
+
return str(base_path)
|
|
572
|
+
raise ValueError("Directory override is disabled by service configuration")
|
|
573
|
+
|
|
574
|
+
# 2. Resolve requested path
|
|
575
|
+
requested_path = _resolve_requested(requested)
|
|
576
|
+
|
|
577
|
+
# 3. Boundary check: must be subpath of base_path
|
|
578
|
+
try:
|
|
579
|
+
requested_path.relative_to(base_path)
|
|
580
|
+
except ValueError as err:
|
|
581
|
+
raise ValueError(
|
|
582
|
+
f"Directory {requested} is outside the allowed workspace {base_path}"
|
|
583
|
+
) from err
|
|
584
|
+
|
|
585
|
+
return str(requested_path)
|
|
586
|
+
|
|
587
|
+
def resolve_directory_for_control(self, requested: str | None) -> str | None:
|
|
588
|
+
"""Shared directory policy for session control JSON-RPC methods."""
|
|
589
|
+
return self._resolve_and_validate_directory(requested)
|
|
590
|
+
|
|
591
|
+
async def claim_session_for_control(self, *, identity: str, session_id: str) -> bool:
|
|
592
|
+
"""Reserve control access for a session.
|
|
593
|
+
|
|
594
|
+
Returns True when caller created a pending ownership claim that must be finalized or
|
|
595
|
+
released after upstream call completes.
|
|
596
|
+
"""
|
|
597
|
+
return await self._claim_preferred_session(identity=identity, session_id=session_id)
|
|
598
|
+
|
|
599
|
+
async def finalize_session_for_control(self, *, identity: str, session_id: str) -> None:
|
|
600
|
+
"""Finalize control-session ownership after upstream call succeeds."""
|
|
601
|
+
await self._finalize_session_claim(identity=identity, session_id=session_id)
|
|
602
|
+
|
|
603
|
+
async def release_session_for_control(self, *, identity: str, session_id: str) -> None:
|
|
604
|
+
"""Release pending control-session ownership on failure."""
|
|
605
|
+
await self._release_preferred_session_claim(identity=identity, session_id=session_id)
|
|
606
|
+
|
|
607
|
+
async def execute(self, context: RequestContext, event_queue: EventQueue) -> None:
|
|
608
|
+
task_id = context.task_id
|
|
609
|
+
context_id = context.context_id
|
|
610
|
+
if not task_id or not context_id:
|
|
611
|
+
await self._emit_error(
|
|
612
|
+
event_queue,
|
|
613
|
+
task_id=task_id or "unknown",
|
|
614
|
+
context_id=context_id or "unknown",
|
|
615
|
+
message="Missing task_id or context_id in request context",
|
|
616
|
+
state=TaskState.failed,
|
|
617
|
+
streaming_request=self._should_stream(context),
|
|
618
|
+
)
|
|
619
|
+
return
|
|
620
|
+
|
|
621
|
+
call_context = context.call_context
|
|
622
|
+
identity = (call_context.state.get("identity") if call_context else None) or "anonymous"
|
|
623
|
+
|
|
624
|
+
streaming_request = self._should_stream(context)
|
|
625
|
+
message_parts = (
|
|
626
|
+
getattr(context.message, "parts", None) if context.message is not None else None
|
|
627
|
+
)
|
|
628
|
+
try:
|
|
629
|
+
request_parts = map_a2a_parts_to_opencode_parts(message_parts)
|
|
630
|
+
except UnsupportedA2AInputError as exc:
|
|
631
|
+
await self._emit_error(
|
|
632
|
+
event_queue,
|
|
633
|
+
task_id=task_id,
|
|
634
|
+
context_id=context_id,
|
|
635
|
+
message=str(exc),
|
|
636
|
+
state=TaskState.failed,
|
|
637
|
+
streaming_request=streaming_request,
|
|
638
|
+
)
|
|
639
|
+
return
|
|
640
|
+
|
|
641
|
+
user_text = extract_text_from_a2a_parts(message_parts) or context.get_user_input().strip()
|
|
642
|
+
session_title = user_text or summarize_a2a_parts(message_parts)
|
|
643
|
+
text_only_request = (
|
|
644
|
+
len(request_parts) == 1
|
|
645
|
+
and request_parts[0].get("type") == "text"
|
|
646
|
+
and request_parts[0].get("text") == user_text
|
|
647
|
+
)
|
|
648
|
+
use_structured_parts = bool(request_parts) and not text_only_request
|
|
649
|
+
bound_session_id = _extract_shared_session_id(context)
|
|
650
|
+
model_override = _extract_shared_model(context)
|
|
651
|
+
# Directory validation
|
|
652
|
+
metadata = context.metadata
|
|
653
|
+
if metadata is not None and not isinstance(metadata, Mapping):
|
|
654
|
+
await self._emit_error(
|
|
655
|
+
event_queue,
|
|
656
|
+
task_id=task_id,
|
|
657
|
+
context_id=context_id,
|
|
658
|
+
message="Invalid metadata: expected an object/map.",
|
|
659
|
+
state=TaskState.failed,
|
|
660
|
+
streaming_request=streaming_request,
|
|
661
|
+
)
|
|
662
|
+
return
|
|
663
|
+
requested_dir = _extract_opencode_directory(context)
|
|
664
|
+
|
|
665
|
+
try:
|
|
666
|
+
directory = self._resolve_and_validate_directory(requested_dir)
|
|
667
|
+
except ValueError as e:
|
|
668
|
+
logger.warning("Directory validation failed: %s", e)
|
|
669
|
+
await self._emit_error(
|
|
670
|
+
event_queue,
|
|
671
|
+
task_id=task_id,
|
|
672
|
+
context_id=context_id,
|
|
673
|
+
message=str(e),
|
|
674
|
+
state=TaskState.failed,
|
|
675
|
+
streaming_request=streaming_request,
|
|
676
|
+
)
|
|
677
|
+
return
|
|
678
|
+
|
|
679
|
+
if not user_text and not request_parts:
|
|
680
|
+
await self._emit_error(
|
|
681
|
+
event_queue,
|
|
682
|
+
task_id=task_id,
|
|
683
|
+
context_id=context_id,
|
|
684
|
+
message="Only text and file input are supported.",
|
|
685
|
+
state=TaskState.failed,
|
|
686
|
+
streaming_request=streaming_request,
|
|
687
|
+
)
|
|
688
|
+
return
|
|
689
|
+
|
|
690
|
+
logger.debug(
|
|
691
|
+
(
|
|
692
|
+
"Received message identity=%s task_id=%s context_id=%s "
|
|
693
|
+
"streaming=%s text=%s part_count=%s"
|
|
694
|
+
),
|
|
695
|
+
identity,
|
|
696
|
+
task_id,
|
|
697
|
+
context_id,
|
|
698
|
+
streaming_request,
|
|
699
|
+
user_text,
|
|
700
|
+
len(request_parts),
|
|
701
|
+
)
|
|
702
|
+
prepared = _PreparedExecution(
|
|
703
|
+
identity=identity,
|
|
704
|
+
streaming_request=streaming_request,
|
|
705
|
+
request_parts=request_parts,
|
|
706
|
+
user_text=user_text,
|
|
707
|
+
session_title=session_title or user_text,
|
|
708
|
+
use_structured_parts=use_structured_parts,
|
|
709
|
+
bound_session_id=bound_session_id,
|
|
710
|
+
model_override=model_override,
|
|
711
|
+
directory=directory,
|
|
712
|
+
)
|
|
713
|
+
coordinator = _ExecutionCoordinator(
|
|
714
|
+
self,
|
|
715
|
+
context=context,
|
|
716
|
+
event_queue=event_queue,
|
|
717
|
+
task_id=task_id,
|
|
718
|
+
context_id=context_id,
|
|
719
|
+
prepared=prepared,
|
|
720
|
+
)
|
|
721
|
+
await coordinator.run()
|
|
722
|
+
|
|
723
|
+
async def cancel(self, context: RequestContext, event_queue: EventQueue) -> None:
|
|
724
|
+
task_id = context.task_id
|
|
725
|
+
context_id = context.context_id
|
|
726
|
+
started_at = time.monotonic()
|
|
727
|
+
abort_outcome = "not_attempted"
|
|
728
|
+
self._emit_metric("a2a_cancel_requests_total")
|
|
729
|
+
try:
|
|
730
|
+
if not task_id or not context_id:
|
|
731
|
+
abort_outcome = "invalid_request_context"
|
|
732
|
+
await self._emit_error(
|
|
733
|
+
event_queue,
|
|
734
|
+
task_id=task_id or "unknown",
|
|
735
|
+
context_id=context_id or "unknown",
|
|
736
|
+
message="Missing task_id or context_id in request context",
|
|
737
|
+
state=TaskState.failed,
|
|
738
|
+
streaming_request=False,
|
|
739
|
+
)
|
|
740
|
+
return
|
|
741
|
+
|
|
742
|
+
call_context = context.call_context
|
|
743
|
+
identity = (call_context.state.get("identity") if call_context else None) or "anonymous"
|
|
744
|
+
|
|
745
|
+
event = TaskStatusUpdateEvent(
|
|
746
|
+
task_id=task_id,
|
|
747
|
+
context_id=context_id,
|
|
748
|
+
status=TaskStatus(state=TaskState.canceled),
|
|
749
|
+
final=True,
|
|
750
|
+
)
|
|
751
|
+
await event_queue.enqueue_event(event)
|
|
752
|
+
|
|
753
|
+
execution_key = (task_id, context_id)
|
|
754
|
+
async with self._lock:
|
|
755
|
+
running_identity = self._running_identities.get(execution_key, identity)
|
|
756
|
+
running_task = self._running_requests.get(execution_key)
|
|
757
|
+
stop_event = self._running_stop_events.get(execution_key)
|
|
758
|
+
running_session_id = self._running_session_ids.get(execution_key)
|
|
759
|
+
running_directory = self._running_directories.get(execution_key)
|
|
760
|
+
self._sessions.pop((running_identity, context_id))
|
|
761
|
+
inflight = self._inflight_session_creates.pop((running_identity, context_id), None)
|
|
762
|
+
if stop_event:
|
|
763
|
+
stop_event.set()
|
|
764
|
+
should_cancel_running_task = (
|
|
765
|
+
running_task
|
|
766
|
+
and running_task is not asyncio.current_task()
|
|
767
|
+
and not running_task.done()
|
|
768
|
+
)
|
|
769
|
+
if running_session_id and should_cancel_running_task:
|
|
770
|
+
self._emit_metric("a2a_cancel_abort_attempt_total")
|
|
771
|
+
try:
|
|
772
|
+
await asyncio.wait_for(
|
|
773
|
+
self._client.abort_session(
|
|
774
|
+
running_session_id,
|
|
775
|
+
directory=running_directory,
|
|
776
|
+
),
|
|
777
|
+
timeout=self._cancel_abort_timeout_seconds,
|
|
778
|
+
)
|
|
779
|
+
abort_outcome = "success"
|
|
780
|
+
self._emit_metric("a2a_cancel_abort_success_total")
|
|
781
|
+
except TimeoutError:
|
|
782
|
+
abort_outcome = "timeout"
|
|
783
|
+
self._emit_metric("a2a_cancel_abort_timeout_total")
|
|
784
|
+
logger.warning(
|
|
785
|
+
(
|
|
786
|
+
"Best-effort session abort timed out task_id=%s "
|
|
787
|
+
"context_id=%s session_id=%s timeout=%.2fs"
|
|
788
|
+
),
|
|
789
|
+
task_id,
|
|
790
|
+
context_id,
|
|
791
|
+
running_session_id,
|
|
792
|
+
self._cancel_abort_timeout_seconds,
|
|
793
|
+
)
|
|
794
|
+
except (httpx.HTTPError, RuntimeError) as exc:
|
|
795
|
+
abort_outcome = "error"
|
|
796
|
+
self._emit_metric("a2a_cancel_abort_error_total")
|
|
797
|
+
logger.warning(
|
|
798
|
+
(
|
|
799
|
+
"Best-effort session abort failed task_id=%s "
|
|
800
|
+
"context_id=%s session_id=%s: %s"
|
|
801
|
+
),
|
|
802
|
+
task_id,
|
|
803
|
+
context_id,
|
|
804
|
+
running_session_id,
|
|
805
|
+
exc,
|
|
806
|
+
)
|
|
807
|
+
elif should_cancel_running_task:
|
|
808
|
+
abort_outcome = "no_session_binding"
|
|
809
|
+
else:
|
|
810
|
+
abort_outcome = "no_running_task"
|
|
811
|
+
if should_cancel_running_task:
|
|
812
|
+
if running_task is not None:
|
|
813
|
+
running_task.cancel()
|
|
814
|
+
if inflight:
|
|
815
|
+
inflight.cancel()
|
|
816
|
+
with suppress(asyncio.CancelledError):
|
|
817
|
+
await inflight
|
|
818
|
+
except Exception as exc:
|
|
819
|
+
abort_outcome = "cancel_error"
|
|
820
|
+
self._emit_metric("a2a_cancel_errors_total")
|
|
821
|
+
logger.exception("Cancel failed")
|
|
822
|
+
if task_id and context_id:
|
|
823
|
+
with suppress(Exception):
|
|
824
|
+
await self._emit_error(
|
|
825
|
+
event_queue,
|
|
826
|
+
task_id=task_id,
|
|
827
|
+
context_id=context_id,
|
|
828
|
+
message=f"Cancel failed: {exc}",
|
|
829
|
+
state=TaskState.failed,
|
|
830
|
+
streaming_request=False,
|
|
831
|
+
)
|
|
832
|
+
finally:
|
|
833
|
+
self._emit_metric(
|
|
834
|
+
"a2a_cancel_duration_ms",
|
|
835
|
+
(time.monotonic() - started_at) * 1000.0,
|
|
836
|
+
abort_outcome=abort_outcome,
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
async def _get_or_create_session(
|
|
840
|
+
self,
|
|
841
|
+
identity: str,
|
|
842
|
+
context_id: str,
|
|
843
|
+
title: str,
|
|
844
|
+
*,
|
|
845
|
+
preferred_session_id: str | None = None,
|
|
846
|
+
directory: str | None = None,
|
|
847
|
+
) -> tuple[str, bool]:
|
|
848
|
+
# Caller explicitly bound the request to a known OpenCode session.
|
|
849
|
+
if preferred_session_id:
|
|
850
|
+
pending_claim = await self._claim_preferred_session(
|
|
851
|
+
identity=identity,
|
|
852
|
+
session_id=preferred_session_id,
|
|
853
|
+
)
|
|
854
|
+
if not pending_claim:
|
|
855
|
+
self._sessions.set((identity, context_id), preferred_session_id)
|
|
856
|
+
return preferred_session_id, pending_claim
|
|
857
|
+
|
|
858
|
+
task: asyncio.Task[str] | None = None
|
|
859
|
+
cache_key = (identity, context_id)
|
|
860
|
+
async with self._lock:
|
|
861
|
+
existing = self._sessions.get(cache_key)
|
|
862
|
+
if existing:
|
|
863
|
+
return existing, False
|
|
864
|
+
task = self._inflight_session_creates.get(cache_key)
|
|
865
|
+
if task is None:
|
|
866
|
+
task = asyncio.create_task(
|
|
867
|
+
self._client.create_session(title=title, directory=directory)
|
|
868
|
+
)
|
|
869
|
+
self._inflight_session_creates[cache_key] = task
|
|
870
|
+
|
|
871
|
+
try:
|
|
872
|
+
session_id = await task
|
|
873
|
+
except Exception:
|
|
874
|
+
async with self._lock:
|
|
875
|
+
if self._inflight_session_creates.get(cache_key) is task:
|
|
876
|
+
self._inflight_session_creates.pop(cache_key, None)
|
|
877
|
+
raise
|
|
878
|
+
|
|
879
|
+
async with self._lock:
|
|
880
|
+
# Session create finished; commit to cache and drop inflight marker.
|
|
881
|
+
owner = self._session_owners.get(session_id)
|
|
882
|
+
if owner and owner != identity:
|
|
883
|
+
if self._inflight_session_creates.get(cache_key) is task:
|
|
884
|
+
self._inflight_session_creates.pop(cache_key, None)
|
|
885
|
+
raise PermissionError(f"Session {session_id} is not owned by you")
|
|
886
|
+
self._sessions.set(cache_key, session_id)
|
|
887
|
+
if not owner:
|
|
888
|
+
self._session_owners.set(session_id, identity)
|
|
889
|
+
if self._inflight_session_creates.get(cache_key) is task:
|
|
890
|
+
self._inflight_session_creates.pop(cache_key, None)
|
|
891
|
+
return session_id, False
|
|
892
|
+
|
|
893
|
+
async def _finalize_preferred_session_binding(
|
|
894
|
+
self,
|
|
895
|
+
*,
|
|
896
|
+
identity: str,
|
|
897
|
+
context_id: str,
|
|
898
|
+
session_id: str,
|
|
899
|
+
) -> None:
|
|
900
|
+
await self._finalize_session_claim(identity=identity, session_id=session_id)
|
|
901
|
+
async with self._lock:
|
|
902
|
+
self._sessions.set((identity, context_id), session_id)
|
|
903
|
+
|
|
904
|
+
async def _claim_preferred_session(self, *, identity: str, session_id: str) -> bool:
|
|
905
|
+
async with self._lock:
|
|
906
|
+
owner = self._session_owners.get(session_id)
|
|
907
|
+
pending_owner = self._pending_session_claims.get(session_id)
|
|
908
|
+
if owner and owner != identity:
|
|
909
|
+
logger.warning(
|
|
910
|
+
"Identity %s tried to hijack session %s owned by %s",
|
|
911
|
+
identity,
|
|
912
|
+
session_id,
|
|
913
|
+
owner,
|
|
914
|
+
)
|
|
915
|
+
raise PermissionError(f"Session {session_id} is not owned by you")
|
|
916
|
+
|
|
917
|
+
if pending_owner and pending_owner != identity:
|
|
918
|
+
logger.warning(
|
|
919
|
+
"Identity %s tried to use session %s while pending owner is %s",
|
|
920
|
+
identity,
|
|
921
|
+
session_id,
|
|
922
|
+
pending_owner,
|
|
923
|
+
)
|
|
924
|
+
raise PermissionError(f"Session {session_id} is not owned by you")
|
|
925
|
+
|
|
926
|
+
if owner == identity:
|
|
927
|
+
return False
|
|
928
|
+
|
|
929
|
+
self._pending_session_claims[session_id] = identity
|
|
930
|
+
return True
|
|
931
|
+
|
|
932
|
+
async def _finalize_session_claim(self, *, identity: str, session_id: str) -> None:
|
|
933
|
+
async with self._lock:
|
|
934
|
+
owner = self._session_owners.get(session_id)
|
|
935
|
+
pending_owner = self._pending_session_claims.get(session_id)
|
|
936
|
+
if owner and owner != identity:
|
|
937
|
+
raise PermissionError(f"Session {session_id} is not owned by you")
|
|
938
|
+
if pending_owner and pending_owner != identity:
|
|
939
|
+
raise PermissionError(f"Session {session_id} is not owned by you")
|
|
940
|
+
|
|
941
|
+
self._session_owners.set(session_id, identity)
|
|
942
|
+
if self._pending_session_claims.get(session_id) == identity:
|
|
943
|
+
self._pending_session_claims.pop(session_id, None)
|
|
944
|
+
|
|
945
|
+
async def _release_preferred_session_claim(self, *, identity: str, session_id: str) -> None:
|
|
946
|
+
async with self._lock:
|
|
947
|
+
if self._pending_session_claims.get(session_id) == identity:
|
|
948
|
+
self._pending_session_claims.pop(session_id, None)
|
|
949
|
+
|
|
950
|
+
async def _get_session_lock(self, session_id: str) -> asyncio.Lock:
|
|
951
|
+
async with self._lock:
|
|
952
|
+
lock = self._session_locks.get(session_id)
|
|
953
|
+
if lock is None:
|
|
954
|
+
lock = asyncio.Lock()
|
|
955
|
+
self._session_locks[session_id] = lock
|
|
956
|
+
return lock
|
|
957
|
+
|
|
958
|
+
async def _emit_error(
|
|
959
|
+
self,
|
|
960
|
+
event_queue: EventQueue,
|
|
961
|
+
task_id: str,
|
|
962
|
+
context_id: str,
|
|
963
|
+
message: str,
|
|
964
|
+
*,
|
|
965
|
+
state: TaskState,
|
|
966
|
+
error_type: str | None = None,
|
|
967
|
+
upstream_status: int | None = None,
|
|
968
|
+
streaming_request: bool,
|
|
969
|
+
) -> None:
|
|
970
|
+
error_message = Message(
|
|
971
|
+
message_id=str(uuid.uuid4()),
|
|
972
|
+
role=Role.agent,
|
|
973
|
+
parts=[Part(root=TextPart(text=message))],
|
|
974
|
+
task_id=task_id,
|
|
975
|
+
context_id=context_id,
|
|
976
|
+
)
|
|
977
|
+
error_metadata: dict[str, Any] | None = None
|
|
978
|
+
if error_type or upstream_status is not None:
|
|
979
|
+
error_payload: dict[str, Any] = {}
|
|
980
|
+
if error_type:
|
|
981
|
+
error_payload["type"] = error_type
|
|
982
|
+
if upstream_status is not None:
|
|
983
|
+
error_payload["upstream_status"] = upstream_status
|
|
984
|
+
error_metadata = {"opencode": {"error": error_payload}}
|
|
985
|
+
if streaming_request:
|
|
986
|
+
await _enqueue_artifact_update(
|
|
987
|
+
event_queue=event_queue,
|
|
988
|
+
task_id=task_id,
|
|
989
|
+
context_id=context_id,
|
|
990
|
+
artifact_id=f"{task_id}:error",
|
|
991
|
+
part=Part(root=TextPart(text=message)),
|
|
992
|
+
append=False,
|
|
993
|
+
last_chunk=True,
|
|
994
|
+
)
|
|
995
|
+
await event_queue.enqueue_event(
|
|
996
|
+
TaskStatusUpdateEvent(
|
|
997
|
+
task_id=task_id,
|
|
998
|
+
context_id=context_id,
|
|
999
|
+
status=TaskStatus(state=state),
|
|
1000
|
+
metadata=error_metadata,
|
|
1001
|
+
final=True,
|
|
1002
|
+
)
|
|
1003
|
+
)
|
|
1004
|
+
return
|
|
1005
|
+
task = Task(
|
|
1006
|
+
id=task_id,
|
|
1007
|
+
context_id=context_id,
|
|
1008
|
+
status=TaskStatus(state=state, message=error_message),
|
|
1009
|
+
history=[error_message],
|
|
1010
|
+
metadata=error_metadata,
|
|
1011
|
+
)
|
|
1012
|
+
await event_queue.enqueue_event(task)
|
|
1013
|
+
|
|
1014
|
+
def _should_stream(self, context: RequestContext) -> bool:
|
|
1015
|
+
if not self._streaming_enabled:
|
|
1016
|
+
return False
|
|
1017
|
+
call_context = context.call_context
|
|
1018
|
+
if not call_context:
|
|
1019
|
+
return False
|
|
1020
|
+
if call_context.state.get("a2a_streaming_request"):
|
|
1021
|
+
return True
|
|
1022
|
+
# JSON-RPC transport sets method in call context state.
|
|
1023
|
+
method = call_context.state.get("method")
|
|
1024
|
+
return method == "message/stream"
|
|
1025
|
+
|
|
1026
|
+
async def _consume_opencode_stream(
|
|
1027
|
+
self,
|
|
1028
|
+
*,
|
|
1029
|
+
session_id: str,
|
|
1030
|
+
identity: str,
|
|
1031
|
+
task_id: str,
|
|
1032
|
+
context_id: str,
|
|
1033
|
+
artifact_id: str,
|
|
1034
|
+
stream_state: _StreamOutputState,
|
|
1035
|
+
event_queue: EventQueue,
|
|
1036
|
+
stop_event: asyncio.Event,
|
|
1037
|
+
terminal_signal: asyncio.Future[_StreamTerminalSignal],
|
|
1038
|
+
directory: str | None = None,
|
|
1039
|
+
) -> None:
|
|
1040
|
+
part_states: dict[str, _StreamPartState] = {}
|
|
1041
|
+
pending_deltas: defaultdict[str, list[_PendingDelta]] = defaultdict(list)
|
|
1042
|
+
backoff = 0.5
|
|
1043
|
+
max_backoff = 5.0
|
|
1044
|
+
|
|
1045
|
+
async def _emit_chunks(chunks: list[_NormalizedStreamChunk]) -> None:
|
|
1046
|
+
for chunk in chunks:
|
|
1047
|
+
resolved_message_id = stream_state.resolve_message_id(chunk.message_id)
|
|
1048
|
+
chunk_text = getattr(chunk.part.root, "text", "")
|
|
1049
|
+
if stream_state.should_drop_initial_user_echo(
|
|
1050
|
+
chunk_text,
|
|
1051
|
+
block_type=chunk.block_type,
|
|
1052
|
+
role=chunk.role,
|
|
1053
|
+
):
|
|
1054
|
+
continue
|
|
1055
|
+
should_emit, effective_append = stream_state.register_chunk(
|
|
1056
|
+
block_type=chunk.block_type,
|
|
1057
|
+
content_key=chunk.content_key,
|
|
1058
|
+
append=chunk.append,
|
|
1059
|
+
accumulate_content=chunk.accumulate_content,
|
|
1060
|
+
)
|
|
1061
|
+
if not should_emit:
|
|
1062
|
+
continue
|
|
1063
|
+
sequence = stream_state.next_sequence()
|
|
1064
|
+
await _enqueue_artifact_update(
|
|
1065
|
+
event_queue=event_queue,
|
|
1066
|
+
task_id=task_id,
|
|
1067
|
+
context_id=context_id,
|
|
1068
|
+
artifact_id=artifact_id,
|
|
1069
|
+
part=chunk.part,
|
|
1070
|
+
append=effective_append,
|
|
1071
|
+
last_chunk=False,
|
|
1072
|
+
artifact_metadata=_build_stream_artifact_metadata(
|
|
1073
|
+
block_type=chunk.block_type,
|
|
1074
|
+
shared_source=chunk.shared_source,
|
|
1075
|
+
message_id=resolved_message_id,
|
|
1076
|
+
role=chunk.role,
|
|
1077
|
+
event_id=stream_state.build_event_id(sequence),
|
|
1078
|
+
sequence=sequence,
|
|
1079
|
+
),
|
|
1080
|
+
)
|
|
1081
|
+
logger.debug(
|
|
1082
|
+
"Stream chunk task_id=%s session_id=%s block_type=%s append=%s "
|
|
1083
|
+
"shared_source=%s internal_source=%s text=%s",
|
|
1084
|
+
task_id,
|
|
1085
|
+
session_id,
|
|
1086
|
+
chunk.block_type,
|
|
1087
|
+
effective_append,
|
|
1088
|
+
chunk.shared_source,
|
|
1089
|
+
chunk.internal_source,
|
|
1090
|
+
chunk.content_key,
|
|
1091
|
+
)
|
|
1092
|
+
if chunk.block_type == BlockType.TOOL_CALL:
|
|
1093
|
+
_emit_metric("tool_call_chunks_emitted_total")
|
|
1094
|
+
|
|
1095
|
+
async def _emit_interrupt_status(
|
|
1096
|
+
*,
|
|
1097
|
+
state: TaskState,
|
|
1098
|
+
request_id: str,
|
|
1099
|
+
interrupt_type: str,
|
|
1100
|
+
phase: str,
|
|
1101
|
+
details: Mapping[str, Any] | None = None,
|
|
1102
|
+
resolution: str | None = None,
|
|
1103
|
+
) -> None:
|
|
1104
|
+
interrupt_metadata: dict[str, Any] = {
|
|
1105
|
+
"request_id": request_id,
|
|
1106
|
+
"type": interrupt_type,
|
|
1107
|
+
"phase": phase,
|
|
1108
|
+
}
|
|
1109
|
+
if details is not None:
|
|
1110
|
+
interrupt_metadata["details"] = dict(details)
|
|
1111
|
+
if resolution is not None:
|
|
1112
|
+
interrupt_metadata["resolution"] = resolution
|
|
1113
|
+
sequence = stream_state.next_sequence()
|
|
1114
|
+
await event_queue.enqueue_event(
|
|
1115
|
+
TaskStatusUpdateEvent(
|
|
1116
|
+
task_id=task_id,
|
|
1117
|
+
context_id=context_id,
|
|
1118
|
+
status=TaskStatus(state=state),
|
|
1119
|
+
final=False,
|
|
1120
|
+
metadata=_build_output_metadata(
|
|
1121
|
+
session_id=session_id,
|
|
1122
|
+
stream={
|
|
1123
|
+
"message_id": stream_state.resolve_message_id(None),
|
|
1124
|
+
"event_id": stream_state.build_event_id(sequence),
|
|
1125
|
+
"source": "interrupt",
|
|
1126
|
+
"sequence": sequence,
|
|
1127
|
+
},
|
|
1128
|
+
interrupt={
|
|
1129
|
+
**interrupt_metadata,
|
|
1130
|
+
},
|
|
1131
|
+
),
|
|
1132
|
+
)
|
|
1133
|
+
)
|
|
1134
|
+
if phase == "asked":
|
|
1135
|
+
_emit_metric("interrupt_requests_total")
|
|
1136
|
+
elif phase == "resolved":
|
|
1137
|
+
_emit_metric("interrupt_resolved_total")
|
|
1138
|
+
|
|
1139
|
+
async def _emit_progress_status(
|
|
1140
|
+
*,
|
|
1141
|
+
message_id: str | None,
|
|
1142
|
+
progress: Mapping[str, Any],
|
|
1143
|
+
) -> None:
|
|
1144
|
+
sequence = stream_state.next_sequence()
|
|
1145
|
+
await event_queue.enqueue_event(
|
|
1146
|
+
TaskStatusUpdateEvent(
|
|
1147
|
+
task_id=task_id,
|
|
1148
|
+
context_id=context_id,
|
|
1149
|
+
status=TaskStatus(state=TaskState.working),
|
|
1150
|
+
final=False,
|
|
1151
|
+
metadata=_build_output_metadata(
|
|
1152
|
+
session_id=session_id,
|
|
1153
|
+
stream={
|
|
1154
|
+
"message_id": stream_state.resolve_message_id(message_id),
|
|
1155
|
+
"event_id": stream_state.build_event_id(sequence),
|
|
1156
|
+
"source": "progress",
|
|
1157
|
+
"sequence": sequence,
|
|
1158
|
+
},
|
|
1159
|
+
progress=dict(progress),
|
|
1160
|
+
),
|
|
1161
|
+
)
|
|
1162
|
+
)
|
|
1163
|
+
|
|
1164
|
+
def _new_text_chunk(
|
|
1165
|
+
*,
|
|
1166
|
+
text: str,
|
|
1167
|
+
append: bool,
|
|
1168
|
+
block_type: BlockType,
|
|
1169
|
+
internal_source: str,
|
|
1170
|
+
shared_source: str,
|
|
1171
|
+
message_id: str | None,
|
|
1172
|
+
role: str | None,
|
|
1173
|
+
) -> _NormalizedStreamChunk:
|
|
1174
|
+
return _NormalizedStreamChunk(
|
|
1175
|
+
part=Part(root=TextPart(text=text)),
|
|
1176
|
+
content_key=text,
|
|
1177
|
+
accumulate_content=True,
|
|
1178
|
+
append=append,
|
|
1179
|
+
block_type=block_type,
|
|
1180
|
+
internal_source=internal_source,
|
|
1181
|
+
shared_source=shared_source,
|
|
1182
|
+
message_id=message_id,
|
|
1183
|
+
role=role,
|
|
1184
|
+
)
|
|
1185
|
+
|
|
1186
|
+
def _new_data_chunk(
|
|
1187
|
+
*,
|
|
1188
|
+
data: Mapping[str, Any],
|
|
1189
|
+
content_key: str,
|
|
1190
|
+
append: bool,
|
|
1191
|
+
block_type: BlockType,
|
|
1192
|
+
internal_source: str,
|
|
1193
|
+
shared_source: str,
|
|
1194
|
+
message_id: str | None,
|
|
1195
|
+
role: str | None,
|
|
1196
|
+
) -> _NormalizedStreamChunk:
|
|
1197
|
+
return _NormalizedStreamChunk(
|
|
1198
|
+
part=Part(root=DataPart(data=dict(data))),
|
|
1199
|
+
content_key=content_key,
|
|
1200
|
+
accumulate_content=False,
|
|
1201
|
+
append=append,
|
|
1202
|
+
block_type=block_type,
|
|
1203
|
+
internal_source=internal_source,
|
|
1204
|
+
shared_source=shared_source,
|
|
1205
|
+
message_id=message_id,
|
|
1206
|
+
role=role,
|
|
1207
|
+
)
|
|
1208
|
+
|
|
1209
|
+
def _upsert_part_state(
|
|
1210
|
+
*,
|
|
1211
|
+
part_id: str,
|
|
1212
|
+
part: Mapping[str, Any],
|
|
1213
|
+
props: Mapping[str, Any],
|
|
1214
|
+
role: str | None,
|
|
1215
|
+
message_id: str | None,
|
|
1216
|
+
) -> _StreamPartState | None:
|
|
1217
|
+
block_type = _resolve_stream_block_type(part, props)
|
|
1218
|
+
if block_type is None:
|
|
1219
|
+
return None
|
|
1220
|
+
state = part_states.get(part_id)
|
|
1221
|
+
if state is None:
|
|
1222
|
+
state = _StreamPartState(
|
|
1223
|
+
block_type=block_type,
|
|
1224
|
+
message_id=message_id,
|
|
1225
|
+
role=role,
|
|
1226
|
+
)
|
|
1227
|
+
part_states[part_id] = state
|
|
1228
|
+
return state
|
|
1229
|
+
state.block_type = block_type
|
|
1230
|
+
if role is not None:
|
|
1231
|
+
state.role = role
|
|
1232
|
+
if message_id:
|
|
1233
|
+
state.message_id = message_id
|
|
1234
|
+
return state
|
|
1235
|
+
|
|
1236
|
+
def _delta_chunks(
|
|
1237
|
+
*,
|
|
1238
|
+
state: _StreamPartState,
|
|
1239
|
+
delta_text: str,
|
|
1240
|
+
message_id: str | None,
|
|
1241
|
+
internal_source: str,
|
|
1242
|
+
) -> list[_NormalizedStreamChunk]:
|
|
1243
|
+
if not delta_text:
|
|
1244
|
+
return []
|
|
1245
|
+
if message_id:
|
|
1246
|
+
state.message_id = message_id
|
|
1247
|
+
state.buffer = f"{state.buffer}{delta_text}"
|
|
1248
|
+
state.saw_delta = True
|
|
1249
|
+
return [
|
|
1250
|
+
_new_text_chunk(
|
|
1251
|
+
text=delta_text,
|
|
1252
|
+
append=True,
|
|
1253
|
+
block_type=state.block_type,
|
|
1254
|
+
internal_source=internal_source,
|
|
1255
|
+
shared_source="stream",
|
|
1256
|
+
message_id=state.message_id,
|
|
1257
|
+
role=state.role,
|
|
1258
|
+
)
|
|
1259
|
+
]
|
|
1260
|
+
|
|
1261
|
+
def _snapshot_chunks(
|
|
1262
|
+
*,
|
|
1263
|
+
state: _StreamPartState,
|
|
1264
|
+
snapshot: str,
|
|
1265
|
+
message_id: str | None,
|
|
1266
|
+
part_id: str,
|
|
1267
|
+
) -> list[_NormalizedStreamChunk]:
|
|
1268
|
+
if message_id:
|
|
1269
|
+
state.message_id = message_id
|
|
1270
|
+
previous = state.buffer
|
|
1271
|
+
if snapshot == previous:
|
|
1272
|
+
return []
|
|
1273
|
+
if snapshot.startswith(previous):
|
|
1274
|
+
delta_text = snapshot[len(previous) :]
|
|
1275
|
+
state.buffer = snapshot
|
|
1276
|
+
if not delta_text:
|
|
1277
|
+
return []
|
|
1278
|
+
return [
|
|
1279
|
+
_new_text_chunk(
|
|
1280
|
+
text=delta_text,
|
|
1281
|
+
append=True,
|
|
1282
|
+
block_type=state.block_type,
|
|
1283
|
+
internal_source="part_text_diff",
|
|
1284
|
+
shared_source="stream",
|
|
1285
|
+
message_id=state.message_id,
|
|
1286
|
+
role=state.role,
|
|
1287
|
+
)
|
|
1288
|
+
]
|
|
1289
|
+
state.buffer = snapshot
|
|
1290
|
+
logger.warning(
|
|
1291
|
+
"Suppressing non-prefix snapshot rewrite "
|
|
1292
|
+
"task_id=%s session_id=%s part_id=%s block_type=%s had_delta=%s",
|
|
1293
|
+
task_id,
|
|
1294
|
+
session_id,
|
|
1295
|
+
part_id,
|
|
1296
|
+
state.block_type.value,
|
|
1297
|
+
state.saw_delta,
|
|
1298
|
+
)
|
|
1299
|
+
return []
|
|
1300
|
+
|
|
1301
|
+
def _tool_chunks(
|
|
1302
|
+
*,
|
|
1303
|
+
state: _StreamPartState,
|
|
1304
|
+
part: Mapping[str, Any],
|
|
1305
|
+
message_id: str | None,
|
|
1306
|
+
) -> list[_NormalizedStreamChunk]:
|
|
1307
|
+
tool_payload = _extract_tool_part_payload(part)
|
|
1308
|
+
if tool_payload is None:
|
|
1309
|
+
return []
|
|
1310
|
+
content_key = json.dumps(
|
|
1311
|
+
tool_payload,
|
|
1312
|
+
ensure_ascii=False,
|
|
1313
|
+
sort_keys=True,
|
|
1314
|
+
separators=(",", ":"),
|
|
1315
|
+
)
|
|
1316
|
+
if message_id:
|
|
1317
|
+
state.message_id = message_id
|
|
1318
|
+
previous = state.buffer
|
|
1319
|
+
if content_key == previous:
|
|
1320
|
+
return []
|
|
1321
|
+
state.buffer = content_key
|
|
1322
|
+
return [
|
|
1323
|
+
_new_data_chunk(
|
|
1324
|
+
data=tool_payload,
|
|
1325
|
+
content_key=content_key,
|
|
1326
|
+
append=bool(previous),
|
|
1327
|
+
block_type=state.block_type,
|
|
1328
|
+
internal_source="tool_part_update",
|
|
1329
|
+
shared_source="tool_part_update",
|
|
1330
|
+
message_id=state.message_id,
|
|
1331
|
+
role=state.role,
|
|
1332
|
+
)
|
|
1333
|
+
]
|
|
1334
|
+
|
|
1335
|
+
try:
|
|
1336
|
+
while not stop_event.is_set():
|
|
1337
|
+
try:
|
|
1338
|
+
async for event in self._client.stream_events(
|
|
1339
|
+
stop_event=stop_event, directory=directory
|
|
1340
|
+
):
|
|
1341
|
+
if stop_event.is_set():
|
|
1342
|
+
break
|
|
1343
|
+
_log_stream_event_debug(
|
|
1344
|
+
event,
|
|
1345
|
+
limit=max(0, self._client.settings.a2a_log_body_limit),
|
|
1346
|
+
)
|
|
1347
|
+
event_type = event.get("type")
|
|
1348
|
+
if not isinstance(event_type, str):
|
|
1349
|
+
continue
|
|
1350
|
+
props = event.get("properties")
|
|
1351
|
+
if not isinstance(props, Mapping):
|
|
1352
|
+
continue
|
|
1353
|
+
event_session_id = _extract_event_session_id(event)
|
|
1354
|
+
if event_session_id == session_id:
|
|
1355
|
+
part = props.get("part")
|
|
1356
|
+
if isinstance(part, Mapping):
|
|
1357
|
+
progress = _extract_progress_metadata(part, props)
|
|
1358
|
+
if progress is not None:
|
|
1359
|
+
progress_identity = _build_progress_identity(part, props)
|
|
1360
|
+
progress_key = json.dumps(
|
|
1361
|
+
progress,
|
|
1362
|
+
ensure_ascii=False,
|
|
1363
|
+
sort_keys=True,
|
|
1364
|
+
separators=(",", ":"),
|
|
1365
|
+
)
|
|
1366
|
+
if stream_state.register_progress(
|
|
1367
|
+
identity=progress_identity,
|
|
1368
|
+
content_key=progress_key,
|
|
1369
|
+
):
|
|
1370
|
+
await _emit_progress_status(
|
|
1371
|
+
message_id=_extract_stream_message_id(part, props),
|
|
1372
|
+
progress=progress,
|
|
1373
|
+
)
|
|
1374
|
+
upstream_error = _extract_upstream_error_from_event(event)
|
|
1375
|
+
if upstream_error is not None and stream_state.upstream_error is None:
|
|
1376
|
+
stream_state.upstream_error = upstream_error
|
|
1377
|
+
signal = _extract_stream_terminal_signal(event)
|
|
1378
|
+
if signal is not None and not terminal_signal.done():
|
|
1379
|
+
terminal_signal.set_result(signal)
|
|
1380
|
+
stop_event.set()
|
|
1381
|
+
usage = _extract_token_usage(event)
|
|
1382
|
+
if usage is not None:
|
|
1383
|
+
stream_state.ingest_token_usage(usage)
|
|
1384
|
+
asked = _extract_interrupt_asked_event(event)
|
|
1385
|
+
if asked is not None:
|
|
1386
|
+
request_id = asked["request_id"]
|
|
1387
|
+
if stream_state.mark_interrupt_pending(request_id):
|
|
1388
|
+
remember_request = getattr(
|
|
1389
|
+
self._client, "remember_interrupt_request", None
|
|
1390
|
+
)
|
|
1391
|
+
if callable(remember_request):
|
|
1392
|
+
remember_request(
|
|
1393
|
+
request_id=request_id,
|
|
1394
|
+
session_id=session_id,
|
|
1395
|
+
interrupt_type=asked["interrupt_type"],
|
|
1396
|
+
identity=identity,
|
|
1397
|
+
task_id=task_id,
|
|
1398
|
+
context_id=context_id,
|
|
1399
|
+
)
|
|
1400
|
+
await _emit_interrupt_status(
|
|
1401
|
+
state=TaskState.input_required,
|
|
1402
|
+
request_id=request_id,
|
|
1403
|
+
interrupt_type=asked["interrupt_type"],
|
|
1404
|
+
phase="asked",
|
|
1405
|
+
details=asked["details"],
|
|
1406
|
+
)
|
|
1407
|
+
resolved = _extract_interrupt_resolved_event(event)
|
|
1408
|
+
if resolved is not None:
|
|
1409
|
+
resolved_request_id = resolved["request_id"]
|
|
1410
|
+
cleared_pending = stream_state.clear_interrupt_pending(
|
|
1411
|
+
resolved_request_id
|
|
1412
|
+
)
|
|
1413
|
+
discard_request = getattr(
|
|
1414
|
+
self._client, "discard_interrupt_request", None
|
|
1415
|
+
)
|
|
1416
|
+
if callable(discard_request):
|
|
1417
|
+
discard_request(resolved_request_id)
|
|
1418
|
+
if cleared_pending:
|
|
1419
|
+
await _emit_interrupt_status(
|
|
1420
|
+
state=TaskState.working,
|
|
1421
|
+
request_id=resolved_request_id,
|
|
1422
|
+
interrupt_type=resolved["interrupt_type"],
|
|
1423
|
+
phase="resolved",
|
|
1424
|
+
resolution=resolved["resolution"],
|
|
1425
|
+
)
|
|
1426
|
+
if event_type not in {"message.part.updated", "message.part.delta"}:
|
|
1427
|
+
continue
|
|
1428
|
+
part = props.get("part")
|
|
1429
|
+
if not isinstance(part, Mapping):
|
|
1430
|
+
part = {}
|
|
1431
|
+
if _extract_stream_session_id(part, props) != session_id:
|
|
1432
|
+
continue
|
|
1433
|
+
message_id = _extract_stream_message_id(part, props)
|
|
1434
|
+
part_id = _extract_stream_part_id(part, props)
|
|
1435
|
+
if not part_id:
|
|
1436
|
+
continue
|
|
1437
|
+
|
|
1438
|
+
if event_type == "message.part.delta":
|
|
1439
|
+
field = props.get("field")
|
|
1440
|
+
delta = props.get("delta")
|
|
1441
|
+
if field != "text" or not isinstance(delta, str) or not delta:
|
|
1442
|
+
continue
|
|
1443
|
+
state = part_states.get(part_id)
|
|
1444
|
+
if state is None:
|
|
1445
|
+
pending_deltas[part_id].append(
|
|
1446
|
+
_PendingDelta(
|
|
1447
|
+
field=field,
|
|
1448
|
+
delta=delta,
|
|
1449
|
+
message_id=message_id,
|
|
1450
|
+
)
|
|
1451
|
+
)
|
|
1452
|
+
continue
|
|
1453
|
+
if state.role in {"user", "system"}:
|
|
1454
|
+
continue
|
|
1455
|
+
delta_chunks = _delta_chunks(
|
|
1456
|
+
state=state,
|
|
1457
|
+
delta_text=delta,
|
|
1458
|
+
message_id=message_id,
|
|
1459
|
+
internal_source="delta_event",
|
|
1460
|
+
)
|
|
1461
|
+
if delta_chunks:
|
|
1462
|
+
await _emit_chunks(delta_chunks)
|
|
1463
|
+
continue
|
|
1464
|
+
|
|
1465
|
+
role = _extract_stream_role(part, props)
|
|
1466
|
+
state = _upsert_part_state(
|
|
1467
|
+
part_id=part_id,
|
|
1468
|
+
part=part,
|
|
1469
|
+
props=props,
|
|
1470
|
+
role=role,
|
|
1471
|
+
message_id=message_id,
|
|
1472
|
+
)
|
|
1473
|
+
if state is None:
|
|
1474
|
+
pending_deltas.pop(part_id, None)
|
|
1475
|
+
continue
|
|
1476
|
+
if state.role in {"user", "system"}:
|
|
1477
|
+
pending_deltas.pop(part_id, None)
|
|
1478
|
+
continue
|
|
1479
|
+
|
|
1480
|
+
chunks: list[_NormalizedStreamChunk] = []
|
|
1481
|
+
pending = pending_deltas.pop(part_id, [])
|
|
1482
|
+
for buffered in pending:
|
|
1483
|
+
if buffered.field != "text":
|
|
1484
|
+
continue
|
|
1485
|
+
chunks.extend(
|
|
1486
|
+
_delta_chunks(
|
|
1487
|
+
state=state,
|
|
1488
|
+
delta_text=buffered.delta,
|
|
1489
|
+
message_id=buffered.message_id,
|
|
1490
|
+
internal_source="delta_event_buffered",
|
|
1491
|
+
)
|
|
1492
|
+
)
|
|
1493
|
+
|
|
1494
|
+
delta = props.get("delta")
|
|
1495
|
+
if isinstance(delta, str) and delta:
|
|
1496
|
+
chunks.extend(
|
|
1497
|
+
_delta_chunks(
|
|
1498
|
+
state=state,
|
|
1499
|
+
delta_text=delta,
|
|
1500
|
+
message_id=message_id,
|
|
1501
|
+
internal_source="delta",
|
|
1502
|
+
)
|
|
1503
|
+
)
|
|
1504
|
+
elif state.block_type == BlockType.TOOL_CALL:
|
|
1505
|
+
chunks.extend(
|
|
1506
|
+
_tool_chunks(
|
|
1507
|
+
state=state,
|
|
1508
|
+
part=part,
|
|
1509
|
+
message_id=message_id,
|
|
1510
|
+
)
|
|
1511
|
+
)
|
|
1512
|
+
else:
|
|
1513
|
+
snapshot_text = _extract_stream_snapshot_text(part)
|
|
1514
|
+
if snapshot_text is not None:
|
|
1515
|
+
chunks.extend(
|
|
1516
|
+
_snapshot_chunks(
|
|
1517
|
+
state=state,
|
|
1518
|
+
snapshot=snapshot_text,
|
|
1519
|
+
message_id=message_id,
|
|
1520
|
+
part_id=part_id,
|
|
1521
|
+
)
|
|
1522
|
+
)
|
|
1523
|
+
|
|
1524
|
+
if chunks:
|
|
1525
|
+
await _emit_chunks(chunks)
|
|
1526
|
+
|
|
1527
|
+
break
|
|
1528
|
+
except Exception:
|
|
1529
|
+
if stop_event.is_set():
|
|
1530
|
+
break
|
|
1531
|
+
_emit_metric("opencode_stream_retries_total")
|
|
1532
|
+
logger.exception("OpenCode event stream failed; retrying")
|
|
1533
|
+
await asyncio.sleep(backoff)
|
|
1534
|
+
backoff = min(backoff * 2, max_backoff)
|
|
1535
|
+
except Exception:
|
|
1536
|
+
logger.exception("OpenCode event stream failed")
|
|
1537
|
+
|
|
1538
|
+
|
|
1539
|
+
def _build_assistant_message(
|
|
1540
|
+
task_id: str,
|
|
1541
|
+
context_id: str,
|
|
1542
|
+
text: str,
|
|
1543
|
+
*,
|
|
1544
|
+
message_id: str | None = None,
|
|
1545
|
+
) -> Message:
|
|
1546
|
+
return Message(
|
|
1547
|
+
message_id=message_id or str(uuid.uuid4()),
|
|
1548
|
+
role=Role.agent,
|
|
1549
|
+
parts=[Part(root=TextPart(text=text))],
|
|
1550
|
+
task_id=task_id,
|
|
1551
|
+
context_id=context_id,
|
|
1552
|
+
)
|
|
1553
|
+
|
|
1554
|
+
|
|
1555
|
+
async def _enqueue_artifact_update(
|
|
1556
|
+
*,
|
|
1557
|
+
event_queue: EventQueue,
|
|
1558
|
+
task_id: str,
|
|
1559
|
+
context_id: str,
|
|
1560
|
+
artifact_id: str,
|
|
1561
|
+
part: Part,
|
|
1562
|
+
append: bool | None,
|
|
1563
|
+
last_chunk: bool | None,
|
|
1564
|
+
artifact_metadata: Mapping[str, Any] | None = None,
|
|
1565
|
+
event_metadata: Mapping[str, Any] | None = None,
|
|
1566
|
+
) -> None:
|
|
1567
|
+
normalized_last_chunk = True if last_chunk is True else None
|
|
1568
|
+
artifact = Artifact(
|
|
1569
|
+
artifact_id=artifact_id,
|
|
1570
|
+
parts=[part],
|
|
1571
|
+
metadata=dict(artifact_metadata) if artifact_metadata else None,
|
|
1572
|
+
)
|
|
1573
|
+
await event_queue.enqueue_event(
|
|
1574
|
+
TaskArtifactUpdateEvent(
|
|
1575
|
+
task_id=task_id,
|
|
1576
|
+
context_id=context_id,
|
|
1577
|
+
artifact=artifact,
|
|
1578
|
+
append=append,
|
|
1579
|
+
last_chunk=normalized_last_chunk,
|
|
1580
|
+
metadata=dict(event_metadata) if event_metadata else None,
|
|
1581
|
+
)
|
|
1582
|
+
)
|