dominds 1.16.2 → 1.16.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dialog-display-state.d.ts +3 -1
- package/dist/dialog-display-state.js +8 -1
- package/dist/dialog-fork.js +9 -0
- package/dist/dialog-global-registry.d.ts +5 -1
- package/dist/dialog-global-registry.js +45 -1
- package/dist/dialog.d.ts +3 -0
- package/dist/dialog.js +4 -0
- package/dist/llm/gen/anthropic.d.ts +3 -3
- package/dist/llm/gen/anthropic.js +115 -21
- package/dist/llm/gen/codex.js +78 -13
- package/dist/llm/gen/failure-classifier.js +10 -0
- package/dist/llm/gen/openai-compatible.d.ts +6 -3
- package/dist/llm/gen/openai-compatible.js +132 -16
- package/dist/llm/gen/openai.d.ts +2 -2
- package/dist/llm/gen/openai.js +95 -15
- package/dist/llm/gen/tool-result-image-ingest.d.ts +39 -0
- package/dist/llm/gen/tool-result-image-ingest.js +158 -0
- package/dist/llm/gen.d.ts +17 -1
- package/dist/llm/kernel-driver/drive.js +15 -0
- package/dist/llm/kernel-driver/flow.js +230 -177
- package/dist/llm/kernel-driver/loop.js +5 -0
- package/dist/persistence.d.ts +2 -0
- package/dist/persistence.js +107 -0
- package/dist/priming.js +76 -0
- package/package.json +3 -3
- package/webapp/dist/assets/{_basePickBy-BJWCmtUm.js → _basePickBy-BKLfvXfr.js} +3 -3
- package/webapp/dist/assets/_basePickBy-BKLfvXfr.js.map +1 -0
- package/webapp/dist/assets/{_baseUniq-D6k_lGfs.js → _baseUniq-DeO2MBcA.js} +2 -2
- package/webapp/dist/assets/_baseUniq-DeO2MBcA.js.map +1 -0
- package/webapp/dist/assets/{arc-DYDaESgj.js → arc-Bez5-ouI.js} +2 -2
- package/webapp/dist/assets/arc-Bez5-ouI.js.map +1 -0
- package/webapp/dist/assets/{architectureDiagram-VXUJARFQ-DZMT7dqc.js → architectureDiagram-2XIMDMQ5-BiDh8CGJ.js} +26 -8
- package/webapp/dist/assets/architectureDiagram-2XIMDMQ5-BiDh8CGJ.js.map +1 -0
- package/webapp/dist/assets/{blockDiagram-VD42YOAC-CABpgVAa.js → blockDiagram-WCTKOSBZ-fSZbZ3PY.js} +187 -170
- package/webapp/dist/assets/blockDiagram-WCTKOSBZ-fSZbZ3PY.js.map +1 -0
- package/webapp/dist/assets/{c4Diagram-YG6GDRKO-COLZS8Ul.js → c4Diagram-IC4MRINW-C-WxkPD_.js} +4 -4
- package/webapp/dist/assets/c4Diagram-IC4MRINW-C-WxkPD_.js.map +1 -0
- package/webapp/dist/assets/{channel-CYFm9Cri.js → channel-CbXK2-c_.js} +2 -2
- package/webapp/dist/assets/channel-CbXK2-c_.js.map +1 -0
- package/webapp/dist/assets/{chunk-4BX2VUAB-CX_-XbaN.js → chunk-4BX2VUAB-D6CgMaUm.js} +2 -2
- package/webapp/dist/assets/chunk-4BX2VUAB-D6CgMaUm.js.map +1 -0
- package/webapp/dist/assets/{chunk-55IACEB6-ByD-NdBC.js → chunk-55IACEB6-CafQjmEn.js} +2 -2
- package/webapp/dist/assets/chunk-55IACEB6-CafQjmEn.js.map +1 -0
- package/webapp/dist/assets/{chunk-FMBD7UC4-DYGviJnf.js → chunk-FMBD7UC4-CZRe1oW9.js} +2 -2
- package/webapp/dist/assets/chunk-FMBD7UC4-CZRe1oW9.js.map +1 -0
- package/webapp/dist/assets/{chunk-TZMSLE5B-_HISzxl3.js → chunk-JSJVCQXG-C3KJDde2.js} +14 -6
- package/webapp/dist/assets/chunk-JSJVCQXG-C3KJDde2.js.map +1 -0
- package/webapp/dist/assets/{chunk-QN33PNHL-B8DWRL9f.js → chunk-KX2RTZJC-1gHfMJyM.js} +2 -2
- package/webapp/dist/assets/chunk-KX2RTZJC-1gHfMJyM.js.map +1 -0
- package/webapp/dist/assets/{chunk-DI55MBZ5-DV7sdJmQ.js → chunk-NQ4KR5QH-CEZ0VIio.js} +9 -7
- package/webapp/dist/assets/chunk-NQ4KR5QH-CEZ0VIio.js.map +1 -0
- package/webapp/dist/assets/{chunk-QZHKN3VN-B1aYIzMR.js → chunk-QZHKN3VN-BE5nbumW.js} +2 -2
- package/webapp/dist/assets/chunk-QZHKN3VN-BE5nbumW.js.map +1 -0
- package/webapp/dist/assets/{chunk-B4BG7PRW-BANemsDD.js → chunk-WL4C6EOR-CY1FogYe.js} +171 -121
- package/webapp/dist/assets/chunk-WL4C6EOR-CY1FogYe.js.map +1 -0
- package/webapp/dist/assets/{classDiagram-2ON5EDUG-CpyYj1Rc.js → classDiagram-VBA2DB6C-DIAo1m4U.js} +7 -6
- package/webapp/dist/assets/classDiagram-VBA2DB6C-DIAo1m4U.js.map +1 -0
- package/webapp/dist/assets/{classDiagram-v2-WZHVMYZB-CpyYj1Rc.js → classDiagram-v2-RAHNMMFH-DIAo1m4U.js} +7 -6
- package/webapp/dist/assets/classDiagram-v2-RAHNMMFH-DIAo1m4U.js.map +1 -0
- package/webapp/dist/assets/{clone-B_9AxWIU.js → clone-BzZfwhKW.js} +2 -2
- package/webapp/dist/assets/clone-BzZfwhKW.js.map +1 -0
- package/webapp/dist/assets/{cose-bilkent-S5V4N54A-17ECLfPR.js → cose-bilkent-S5V4N54A-DIRdD9UY.js} +2 -2
- package/webapp/dist/assets/cose-bilkent-S5V4N54A-DIRdD9UY.js.map +1 -0
- package/webapp/dist/assets/cytoscape.esm-Bm8DJGmZ.js.map +1 -1
- package/webapp/dist/assets/{dagre-6UL2VRFP-DhRqcF1o.js → dagre-KLK3FWXG-BCEgv7zL.js} +7 -7
- package/webapp/dist/assets/dagre-KLK3FWXG-BCEgv7zL.js.map +1 -0
- package/webapp/dist/assets/defaultLocale-B2RvLBDe.js.map +1 -1
- package/webapp/dist/assets/{diagram-PSM6KHXK-lpDT6Wwb.js → diagram-E7M64L7V-CwNdHUlg.js} +10 -10
- package/webapp/dist/assets/diagram-E7M64L7V-CwNdHUlg.js.map +1 -0
- package/webapp/dist/assets/{diagram-QEK2KX5R-ohxbPpDH.js → diagram-IFDJBPK2-DBtRFFBv.js} +9 -8
- package/webapp/dist/assets/diagram-IFDJBPK2-DBtRFFBv.js.map +1 -0
- package/webapp/dist/assets/{diagram-S2PKOQOG-DAFFGfch.js → diagram-P4PSJMXO-BJRf8VnU.js} +8 -8
- package/webapp/dist/assets/diagram-P4PSJMXO-BJRf8VnU.js.map +1 -0
- package/webapp/dist/assets/{erDiagram-Q2GNP2WA-BH-7rI43.js → erDiagram-INFDFZHY-BoZdWdP2.js} +96 -75
- package/webapp/dist/assets/erDiagram-INFDFZHY-BoZdWdP2.js.map +1 -0
- package/webapp/dist/assets/{flowDiagram-NV44I4VS-CdEjFbz4.js → flowDiagram-PKNHOUZH-Dptcw76l.js} +98 -81
- package/webapp/dist/assets/flowDiagram-PKNHOUZH-Dptcw76l.js.map +1 -0
- package/webapp/dist/assets/{ganttDiagram-JELNMOA3-Cju2t-lK.js → ganttDiagram-A5KZAMGK-qM3zsgxI.js} +28 -3
- package/webapp/dist/assets/ganttDiagram-A5KZAMGK-qM3zsgxI.js.map +1 -0
- package/webapp/dist/assets/{gitGraphDiagram-V2S2FVAM-CUJ5oxCg.js → gitGraphDiagram-K3NZZRJ6-125S1YW0.js} +38 -46
- package/webapp/dist/assets/gitGraphDiagram-K3NZZRJ6-125S1YW0.js.map +1 -0
- package/webapp/dist/assets/graph-PACfG8qk.js +782 -0
- package/webapp/dist/assets/graph-PACfG8qk.js.map +1 -0
- package/webapp/dist/assets/{index-BLnM-uON.js → index-D8Klc1n-.js} +1114 -1048
- package/webapp/dist/assets/{index-BLnM-uON.js.map → index-D8Klc1n-.js.map} +1 -1
- package/webapp/dist/assets/{index-xvYYeHuy.css → index-YaxF76or.css} +1 -1
- package/webapp/dist/assets/{infoDiagram-HS3SLOUP-Df8p0okQ.js → infoDiagram-LFFYTUFH-B9vrFy_9.js} +7 -7
- package/webapp/dist/assets/infoDiagram-LFFYTUFH-B9vrFy_9.js.map +1 -0
- package/webapp/dist/assets/init-ZxktEp_H.js.map +1 -1
- package/webapp/dist/assets/ishikawaDiagram-PHBUUO56-CzcXR0Tc.js +966 -0
- package/webapp/dist/assets/ishikawaDiagram-PHBUUO56-CzcXR0Tc.js.map +1 -0
- package/webapp/dist/assets/{journeyDiagram-XKPGCS4Q-BXMl8H-d.js → journeyDiagram-4ABVD52K-BzoWs6ft.js} +5 -5
- package/webapp/dist/assets/journeyDiagram-4ABVD52K-BzoWs6ft.js.map +1 -0
- package/webapp/dist/assets/{kanban-definition-3W4ZIXB7-Cc5RwCEh.js → kanban-definition-K7BYSVSG-TJm1UiSH.js} +5 -3
- package/webapp/dist/assets/kanban-definition-K7BYSVSG-TJm1UiSH.js.map +1 -0
- package/webapp/dist/assets/{layout-BB2SvQcE.js → layout-D-kg27bk.js} +5 -5
- package/webapp/dist/assets/layout-D-kg27bk.js.map +1 -0
- package/webapp/dist/assets/{linear-Cj50lA0a.js → linear-l0qAHpRW.js} +2 -2
- package/webapp/dist/assets/linear-l0qAHpRW.js.map +1 -0
- package/webapp/dist/assets/{mindmap-definition-VGOIOE7T-BLBklJVX.js → mindmap-definition-YRQLILUH-D4282T7u.js} +7 -5
- package/webapp/dist/assets/mindmap-definition-YRQLILUH-D4282T7u.js.map +1 -0
- package/webapp/dist/assets/ordinal-CxptdPJm.js.map +1 -1
- package/webapp/dist/assets/{pieDiagram-ADFJNKIX-BQrOgSc-.js → pieDiagram-SKSYHLDU-BKJYIUkU.js} +8 -8
- package/webapp/dist/assets/pieDiagram-SKSYHLDU-BKJYIUkU.js.map +1 -0
- package/webapp/dist/assets/{quadrantDiagram-AYHSOK5B-C62TxtsO.js → quadrantDiagram-337W2JSQ-yjekONzR.js} +3 -3
- package/webapp/dist/assets/quadrantDiagram-337W2JSQ-yjekONzR.js.map +1 -0
- package/webapp/dist/assets/{requirementDiagram-UZGBJVZJ-Is6Q3osM.js → requirementDiagram-Z7DCOOCP-DMH1wutn.js} +16 -6
- package/webapp/dist/assets/requirementDiagram-Z7DCOOCP-DMH1wutn.js.map +1 -0
- package/webapp/dist/assets/{sankeyDiagram-TZEHDZUN-CCAW8Dr2.js → sankeyDiagram-WA2Y5GQK-Cs4ACtdq.js} +2 -2
- package/webapp/dist/assets/sankeyDiagram-WA2Y5GQK-Cs4ACtdq.js.map +1 -0
- package/webapp/dist/assets/{sequenceDiagram-WL72ISMW-DpODpbl6.js → sequenceDiagram-2WXFIKYE-4sriOpV9.js} +601 -201
- package/webapp/dist/assets/sequenceDiagram-2WXFIKYE-4sriOpV9.js.map +1 -0
- package/webapp/dist/assets/{stateDiagram-FKZM4ZOC-B8LHaf5T.js → stateDiagram-RAJIS63D-BUMObt6W.js} +9 -9
- package/webapp/dist/assets/stateDiagram-RAJIS63D-BUMObt6W.js.map +1 -0
- package/webapp/dist/assets/{stateDiagram-v2-4FDKWEC3-4FzTkpgz.js → stateDiagram-v2-FVOUBMTO-DazzpfnH.js} +5 -5
- package/webapp/dist/assets/stateDiagram-v2-FVOUBMTO-DazzpfnH.js.map +1 -0
- package/webapp/dist/assets/{timeline-definition-IT6M3QCI-Ckxc4qZe.js → timeline-definition-YZTLITO2-CdChFPnp.js} +3 -3
- package/webapp/dist/assets/timeline-definition-YZTLITO2-CdChFPnp.js.map +1 -0
- package/webapp/dist/assets/{treemap-GDKQZRPO-QQa4vKMv.js → treemap-KZPCXAKY-DW9mBchB.js} +37 -24
- package/webapp/dist/assets/treemap-KZPCXAKY-DW9mBchB.js.map +1 -0
- package/webapp/dist/assets/vennDiagram-LZ73GAT5-Bu9N_8Cu.js +2487 -0
- package/webapp/dist/assets/vennDiagram-LZ73GAT5-Bu9N_8Cu.js.map +1 -0
- package/webapp/dist/assets/{xychartDiagram-PRI3JC2R-Ba2uJcs6.js → xychartDiagram-JWTSCODW-BI_N4JiZ.js} +4 -4
- package/webapp/dist/assets/xychartDiagram-JWTSCODW-BI_N4JiZ.js.map +1 -0
- package/webapp/dist/index.html +2 -2
- package/webapp/dist/assets/_basePickBy-BJWCmtUm.js.map +0 -1
- package/webapp/dist/assets/_baseUniq-D6k_lGfs.js.map +0 -1
- package/webapp/dist/assets/arc-DYDaESgj.js.map +0 -1
- package/webapp/dist/assets/architectureDiagram-VXUJARFQ-DZMT7dqc.js.map +0 -1
- package/webapp/dist/assets/blockDiagram-VD42YOAC-CABpgVAa.js.map +0 -1
- package/webapp/dist/assets/c4Diagram-YG6GDRKO-COLZS8Ul.js.map +0 -1
- package/webapp/dist/assets/channel-CYFm9Cri.js.map +0 -1
- package/webapp/dist/assets/chunk-4BX2VUAB-CX_-XbaN.js.map +0 -1
- package/webapp/dist/assets/chunk-55IACEB6-ByD-NdBC.js.map +0 -1
- package/webapp/dist/assets/chunk-B4BG7PRW-BANemsDD.js.map +0 -1
- package/webapp/dist/assets/chunk-DI55MBZ5-DV7sdJmQ.js.map +0 -1
- package/webapp/dist/assets/chunk-FMBD7UC4-DYGviJnf.js.map +0 -1
- package/webapp/dist/assets/chunk-QN33PNHL-B8DWRL9f.js.map +0 -1
- package/webapp/dist/assets/chunk-QZHKN3VN-B1aYIzMR.js.map +0 -1
- package/webapp/dist/assets/chunk-TZMSLE5B-_HISzxl3.js.map +0 -1
- package/webapp/dist/assets/classDiagram-2ON5EDUG-CpyYj1Rc.js.map +0 -1
- package/webapp/dist/assets/classDiagram-v2-WZHVMYZB-CpyYj1Rc.js.map +0 -1
- package/webapp/dist/assets/clone-B_9AxWIU.js.map +0 -1
- package/webapp/dist/assets/cose-bilkent-S5V4N54A-17ECLfPR.js.map +0 -1
- package/webapp/dist/assets/dagre-6UL2VRFP-DhRqcF1o.js.map +0 -1
- package/webapp/dist/assets/diagram-PSM6KHXK-lpDT6Wwb.js.map +0 -1
- package/webapp/dist/assets/diagram-QEK2KX5R-ohxbPpDH.js.map +0 -1
- package/webapp/dist/assets/diagram-S2PKOQOG-DAFFGfch.js.map +0 -1
- package/webapp/dist/assets/erDiagram-Q2GNP2WA-BH-7rI43.js.map +0 -1
- package/webapp/dist/assets/flowDiagram-NV44I4VS-CdEjFbz4.js.map +0 -1
- package/webapp/dist/assets/ganttDiagram-JELNMOA3-Cju2t-lK.js.map +0 -1
- package/webapp/dist/assets/gitGraphDiagram-V2S2FVAM-CUJ5oxCg.js.map +0 -1
- package/webapp/dist/assets/graph-mhcc7ldf.js +0 -425
- package/webapp/dist/assets/graph-mhcc7ldf.js.map +0 -1
- package/webapp/dist/assets/infoDiagram-HS3SLOUP-Df8p0okQ.js.map +0 -1
- package/webapp/dist/assets/journeyDiagram-XKPGCS4Q-BXMl8H-d.js.map +0 -1
- package/webapp/dist/assets/kanban-definition-3W4ZIXB7-Cc5RwCEh.js.map +0 -1
- package/webapp/dist/assets/layout-BB2SvQcE.js.map +0 -1
- package/webapp/dist/assets/linear-Cj50lA0a.js.map +0 -1
- package/webapp/dist/assets/mindmap-definition-VGOIOE7T-BLBklJVX.js.map +0 -1
- package/webapp/dist/assets/pieDiagram-ADFJNKIX-BQrOgSc-.js.map +0 -1
- package/webapp/dist/assets/quadrantDiagram-AYHSOK5B-C62TxtsO.js.map +0 -1
- package/webapp/dist/assets/requirementDiagram-UZGBJVZJ-Is6Q3osM.js.map +0 -1
- package/webapp/dist/assets/sankeyDiagram-TZEHDZUN-CCAW8Dr2.js.map +0 -1
- package/webapp/dist/assets/sequenceDiagram-WL72ISMW-DpODpbl6.js.map +0 -1
- package/webapp/dist/assets/stateDiagram-FKZM4ZOC-B8LHaf5T.js.map +0 -1
- package/webapp/dist/assets/stateDiagram-v2-4FDKWEC3-4FzTkpgz.js.map +0 -1
- package/webapp/dist/assets/timeline-definition-IT6M3QCI-Ckxc4qZe.js.map +0 -1
- package/webapp/dist/assets/treemap-GDKQZRPO-QQa4vKMv.js.map +0 -1
- package/webapp/dist/assets/xychartDiagram-PRI3JC2R-Ba2uJcs6.js.map +0 -1
|
@@ -27,6 +27,7 @@ const artifacts_1 = require("./artifacts");
|
|
|
27
27
|
const failure_classifier_1 = require("./failure-classifier");
|
|
28
28
|
const tool_call_context_1 = require("./tool-call-context");
|
|
29
29
|
const tool_output_limit_1 = require("./tool-output-limit");
|
|
30
|
+
const tool_result_image_ingest_1 = require("./tool-result-image-ingest");
|
|
30
31
|
const log = (0, log_1.createLogger)('llm/openai-compatible');
|
|
31
32
|
function limitOpenAiCompatibleToolOutputText(text, msg, limitChars) {
|
|
32
33
|
const limited = (0, tool_output_limit_1.truncateProviderToolOutputText)(text, limitChars);
|
|
@@ -43,6 +44,13 @@ function limitOpenAiCompatibleToolOutputText(text, msg, limitChars) {
|
|
|
43
44
|
function isRecord(value) {
|
|
44
45
|
return typeof value === 'object' && value !== null && !Array.isArray(value);
|
|
45
46
|
}
|
|
47
|
+
function isLlmRequestContext(value) {
|
|
48
|
+
return (isRecord(value) &&
|
|
49
|
+
typeof value.dialogSelfId === 'string' &&
|
|
50
|
+
typeof value.dialogRootId === 'string' &&
|
|
51
|
+
typeof value.providerKey === 'string' &&
|
|
52
|
+
typeof value.modelKey === 'string');
|
|
53
|
+
}
|
|
46
54
|
function tryExtractChatUsage(usage) {
|
|
47
55
|
// NOTE: External API payload; a runtime check is unavoidable.
|
|
48
56
|
if (!isRecord(usage))
|
|
@@ -177,7 +185,7 @@ function chatMessageToChatCompletionMessage(msg) {
|
|
|
177
185
|
}
|
|
178
186
|
}
|
|
179
187
|
}
|
|
180
|
-
async function funcResultToChatCompletionMessages(msg, limitChars) {
|
|
188
|
+
async function funcResultToChatCompletionMessages(msg, limitChars, requestContext, providerConfig, allowedImageKeys, onToolResultImageIngest) {
|
|
181
189
|
const items = msg.contentItems;
|
|
182
190
|
if (!Array.isArray(items) || items.length === 0) {
|
|
183
191
|
return [
|
|
@@ -201,31 +209,121 @@ async function funcResultToChatCompletionMessages(msg, limitChars) {
|
|
|
201
209
|
type: 'text',
|
|
202
210
|
text: `Tool output images (${msg.name}, call_id=${msg.id}):`,
|
|
203
211
|
});
|
|
212
|
+
const supportsImageInput = (() => {
|
|
213
|
+
if (!providerConfig)
|
|
214
|
+
return false;
|
|
215
|
+
const modelKey = typeof requestContext.modelKey === 'string' ? requestContext.modelKey.trim() : '';
|
|
216
|
+
const modelInfo = modelKey.length > 0 ? providerConfig.models[modelKey] : undefined;
|
|
217
|
+
return isRecord(modelInfo) && modelInfo['supports_image_input'] === true;
|
|
218
|
+
})();
|
|
204
219
|
for (const item of items) {
|
|
205
220
|
if (item.type === 'input_text')
|
|
206
221
|
continue;
|
|
207
222
|
if (item.type === 'input_image') {
|
|
208
223
|
sawAnyImage = true;
|
|
224
|
+
if (!supportsImageInput) {
|
|
225
|
+
if (onToolResultImageIngest) {
|
|
226
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
227
|
+
requestContext,
|
|
228
|
+
toolCallId: msg.id,
|
|
229
|
+
toolName: msg.name,
|
|
230
|
+
artifact: item.artifact,
|
|
231
|
+
disposition: 'filtered_provider_unsupported',
|
|
232
|
+
providerPathLabel: 'OpenAI-compatible path',
|
|
233
|
+
}));
|
|
234
|
+
}
|
|
235
|
+
parts.push({
|
|
236
|
+
type: 'text',
|
|
237
|
+
text: `[image not sent: current openai-compatible image input is disabled for model=${typeof requestContext.modelKey === 'string' && requestContext.modelKey.trim().length > 0 ? requestContext.modelKey.trim() : 'unknown'}]`,
|
|
238
|
+
});
|
|
239
|
+
continue;
|
|
240
|
+
}
|
|
209
241
|
if (!(0, artifacts_1.isVisionImageMimeType)(item.mimeType)) {
|
|
242
|
+
if (onToolResultImageIngest) {
|
|
243
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
244
|
+
requestContext,
|
|
245
|
+
toolCallId: msg.id,
|
|
246
|
+
toolName: msg.name,
|
|
247
|
+
artifact: item.artifact,
|
|
248
|
+
disposition: 'filtered_mime_unsupported',
|
|
249
|
+
mimeType: item.mimeType,
|
|
250
|
+
providerPathLabel: 'OpenAI-compatible path',
|
|
251
|
+
}));
|
|
252
|
+
}
|
|
210
253
|
parts.push({
|
|
211
254
|
type: 'text',
|
|
212
255
|
text: `[image omitted: unsupported mimeType=${item.mimeType}]`,
|
|
213
256
|
});
|
|
214
257
|
continue;
|
|
215
258
|
}
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
259
|
+
if (!allowedImageKeys.has((0, tool_result_image_ingest_1.buildToolResultImageBudgetKeyForMsg)(msg, item.artifact))) {
|
|
260
|
+
if (onToolResultImageIngest) {
|
|
261
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
262
|
+
requestContext,
|
|
263
|
+
toolCallId: msg.id,
|
|
264
|
+
toolName: msg.name,
|
|
265
|
+
artifact: item.artifact,
|
|
266
|
+
disposition: 'filtered_size_limit',
|
|
267
|
+
detail: (0, tool_result_image_ingest_1.buildToolResultImageBudgetLimitDetail)({
|
|
268
|
+
byteLength: item.byteLength,
|
|
269
|
+
budgetBytes: tool_result_image_ingest_1.OPENAI_COMPATIBLE_TOOL_RESULT_IMAGE_BUDGET_BYTES,
|
|
270
|
+
}),
|
|
271
|
+
providerPathLabel: 'OpenAI-compatible path',
|
|
272
|
+
}));
|
|
273
|
+
}
|
|
274
|
+
parts.push({
|
|
275
|
+
type: 'text',
|
|
276
|
+
text: `[image omitted: request image budget exceeded bytes=${String(item.byteLength)} budget=${String(tool_result_image_ingest_1.OPENAI_COMPATIBLE_TOOL_RESULT_IMAGE_BUDGET_BYTES)}]`,
|
|
277
|
+
});
|
|
278
|
+
continue;
|
|
279
|
+
}
|
|
280
|
+
const bytesResult = await (0, tool_result_image_ingest_1.readToolResultImageBytesSafe)(item.artifact);
|
|
281
|
+
if (bytesResult.kind === 'missing') {
|
|
282
|
+
if (onToolResultImageIngest) {
|
|
283
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
284
|
+
requestContext,
|
|
285
|
+
toolCallId: msg.id,
|
|
286
|
+
toolName: msg.name,
|
|
287
|
+
artifact: item.artifact,
|
|
288
|
+
disposition: 'filtered_missing',
|
|
289
|
+
providerPathLabel: 'OpenAI-compatible path',
|
|
290
|
+
}));
|
|
291
|
+
}
|
|
223
292
|
parts.push({
|
|
224
293
|
type: 'text',
|
|
225
294
|
text: `[image missing: ${item.artifact.relPath}]`,
|
|
226
295
|
});
|
|
227
296
|
continue;
|
|
228
297
|
}
|
|
298
|
+
if (bytesResult.kind === 'read_failed') {
|
|
299
|
+
if (onToolResultImageIngest) {
|
|
300
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
301
|
+
requestContext,
|
|
302
|
+
toolCallId: msg.id,
|
|
303
|
+
toolName: msg.name,
|
|
304
|
+
artifact: item.artifact,
|
|
305
|
+
disposition: 'filtered_read_failed',
|
|
306
|
+
detail: bytesResult.detail,
|
|
307
|
+
providerPathLabel: 'OpenAI-compatible path',
|
|
308
|
+
}));
|
|
309
|
+
}
|
|
310
|
+
parts.push({
|
|
311
|
+
type: 'text',
|
|
312
|
+
text: `[image unreadable: ${item.artifact.relPath}]`,
|
|
313
|
+
});
|
|
314
|
+
continue;
|
|
315
|
+
}
|
|
316
|
+
if (onToolResultImageIngest) {
|
|
317
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
318
|
+
requestContext,
|
|
319
|
+
toolCallId: msg.id,
|
|
320
|
+
toolName: msg.name,
|
|
321
|
+
artifact: item.artifact,
|
|
322
|
+
disposition: 'fed_provider_transformed',
|
|
323
|
+
providerPathLabel: 'OpenAI-compatible path',
|
|
324
|
+
}));
|
|
325
|
+
}
|
|
326
|
+
const bytes = bytesResult.bytes;
|
|
229
327
|
parts.push({
|
|
230
328
|
type: 'image_url',
|
|
231
329
|
image_url: {
|
|
@@ -358,7 +456,7 @@ function mergeAdjacentMessages(input) {
|
|
|
358
456
|
}
|
|
359
457
|
return merged;
|
|
360
458
|
}
|
|
361
|
-
async function buildChatCompletionMessages(systemPrompt, context, options) {
|
|
459
|
+
async function buildChatCompletionMessages(systemPrompt, context, requestContext, options) {
|
|
362
460
|
const normalized = (0, tool_call_context_1.normalizeToolCallPairs)(context);
|
|
363
461
|
const violation = (0, tool_call_context_1.findFirstToolCallAdjacencyViolation)(normalized);
|
|
364
462
|
if (violation) {
|
|
@@ -374,6 +472,7 @@ async function buildChatCompletionMessages(systemPrompt, context, options) {
|
|
|
374
472
|
const input = [];
|
|
375
473
|
const reasoningContentMode = options?.reasoningContentMode === true;
|
|
376
474
|
const toolResultMaxChars = (0, tool_output_limit_1.resolveProviderToolResultMaxChars)(options?.providerConfig);
|
|
475
|
+
const allowedImageKeys = (0, tool_result_image_ingest_1.selectLatestToolResultImagesWithinBudget)(normalized, tool_result_image_ingest_1.OPENAI_COMPATIBLE_TOOL_RESULT_IMAGE_BUDGET_BYTES);
|
|
377
476
|
let pendingReasoningContent;
|
|
378
477
|
const takePendingReasoningContent = () => {
|
|
379
478
|
const current = pendingReasoningContent;
|
|
@@ -412,7 +511,7 @@ async function buildChatCompletionMessages(systemPrompt, context, options) {
|
|
|
412
511
|
}
|
|
413
512
|
if (msg.type === 'func_result_msg') {
|
|
414
513
|
flushPendingReasoningAsAssistantMessage();
|
|
415
|
-
input.push(...(await funcResultToChatCompletionMessages(msg, toolResultMaxChars)));
|
|
514
|
+
input.push(...(await funcResultToChatCompletionMessages(msg, toolResultMaxChars, requestContext, options?.providerConfig, allowedImageKeys, options?.onToolResultImageIngest)));
|
|
416
515
|
continue;
|
|
417
516
|
}
|
|
418
517
|
const mapped = chatMessageToChatCompletionMessage(msg);
|
|
@@ -421,8 +520,19 @@ async function buildChatCompletionMessages(systemPrompt, context, options) {
|
|
|
421
520
|
flushPendingReasoningAsAssistantMessage();
|
|
422
521
|
return mergeAdjacentMessages(input);
|
|
423
522
|
}
|
|
424
|
-
async function buildOpenAiCompatibleRequestMessagesWrapper(systemPrompt, context,
|
|
425
|
-
|
|
523
|
+
async function buildOpenAiCompatibleRequestMessagesWrapper(systemPrompt, context, requestContextOrOptions, optionsMaybe) {
|
|
524
|
+
const requestContext = isLlmRequestContext(requestContextOrOptions)
|
|
525
|
+
? requestContextOrOptions
|
|
526
|
+
: {
|
|
527
|
+
dialogSelfId: '',
|
|
528
|
+
dialogRootId: '',
|
|
529
|
+
providerKey: 'openai-compatible',
|
|
530
|
+
modelKey: 'unknown',
|
|
531
|
+
};
|
|
532
|
+
const options = isLlmRequestContext(requestContextOrOptions)
|
|
533
|
+
? optionsMaybe
|
|
534
|
+
: requestContextOrOptions;
|
|
535
|
+
return await buildChatCompletionMessages(systemPrompt, context, requestContext, options);
|
|
426
536
|
}
|
|
427
537
|
function applyArgsDelta(state, chunk) {
|
|
428
538
|
if (chunk.length === 0)
|
|
@@ -507,7 +617,7 @@ class OpenAiCompatibleGen {
|
|
|
507
617
|
classifyFailure(error) {
|
|
508
618
|
return (0, failure_classifier_1.classifyOpenAiLikeFailure)(error);
|
|
509
619
|
}
|
|
510
|
-
async genToReceiver(providerConfig, agent, systemPrompt, funcTools,
|
|
620
|
+
async genToReceiver(providerConfig, agent, systemPrompt, funcTools, requestContext, context, receiver, genseq, abortSignal) {
|
|
511
621
|
const apiKey = process.env[providerConfig.apiKeyEnvVar];
|
|
512
622
|
if (!apiKey)
|
|
513
623
|
throw new Error(`Missing API key env var ${providerConfig.apiKeyEnvVar}`);
|
|
@@ -516,9 +626,10 @@ class OpenAiCompatibleGen {
|
|
|
516
626
|
}
|
|
517
627
|
const client = new openai_1.default({ apiKey, baseURL: providerConfig.baseUrl });
|
|
518
628
|
const reasoningContentMode = resolveOpenAiCompatibleReasoningContentMode(providerConfig, agent);
|
|
519
|
-
const messages = await buildChatCompletionMessages(systemPrompt, context, {
|
|
629
|
+
const messages = await buildChatCompletionMessages(systemPrompt, context, requestContext, {
|
|
520
630
|
reasoningContentMode,
|
|
521
631
|
providerConfig,
|
|
632
|
+
onToolResultImageIngest: receiver.toolResultImageIngest,
|
|
522
633
|
});
|
|
523
634
|
const openAiParams = agent.model_params?.openai || {};
|
|
524
635
|
const maxTokens = agent.model_params?.max_tokens;
|
|
@@ -707,7 +818,7 @@ class OpenAiCompatibleGen {
|
|
|
707
818
|
}
|
|
708
819
|
return { usage, ...(returnedModel ? { llmGenModel: returnedModel } : {}) };
|
|
709
820
|
}
|
|
710
|
-
async genMoreMessages(providerConfig, agent, systemPrompt, funcTools,
|
|
821
|
+
async genMoreMessages(providerConfig, agent, systemPrompt, funcTools, requestContext, context, genseq, abortSignal) {
|
|
711
822
|
const apiKey = process.env[providerConfig.apiKeyEnvVar];
|
|
712
823
|
if (!apiKey)
|
|
713
824
|
throw new Error(`Missing API key env var ${providerConfig.apiKeyEnvVar}`);
|
|
@@ -716,9 +827,13 @@ class OpenAiCompatibleGen {
|
|
|
716
827
|
}
|
|
717
828
|
const client = new openai_1.default({ apiKey, baseURL: providerConfig.baseUrl });
|
|
718
829
|
const reasoningContentMode = resolveOpenAiCompatibleReasoningContentMode(providerConfig, agent);
|
|
719
|
-
const
|
|
830
|
+
const outputs = [];
|
|
831
|
+
const messages = await buildChatCompletionMessages(systemPrompt, context, requestContext, {
|
|
720
832
|
reasoningContentMode,
|
|
721
833
|
providerConfig,
|
|
834
|
+
onToolResultImageIngest: async (ingest) => {
|
|
835
|
+
outputs.push({ kind: 'tool_result_image_ingest', ingest });
|
|
836
|
+
},
|
|
722
837
|
});
|
|
723
838
|
const openAiParams = agent.model_params?.openai || {};
|
|
724
839
|
const maxTokens = agent.model_params?.max_tokens;
|
|
@@ -754,6 +869,7 @@ class OpenAiCompatibleGen {
|
|
|
754
869
|
: undefined;
|
|
755
870
|
return {
|
|
756
871
|
messages: messagesOut,
|
|
872
|
+
...(outputs.length > 0 ? { outputs } : {}),
|
|
757
873
|
usage,
|
|
758
874
|
...(model ? { llmGenModel: model } : {}),
|
|
759
875
|
};
|
package/dist/llm/gen/openai.d.ts
CHANGED
|
@@ -14,6 +14,6 @@ export declare function buildOpenAiRequestInputWrapper(context: ChatMessage[], p
|
|
|
14
14
|
export declare class OpenAiGen implements LlmGenerator {
|
|
15
15
|
get apiType(): string;
|
|
16
16
|
classifyFailure(error: unknown): LlmFailureDisposition | undefined;
|
|
17
|
-
genToReceiver(providerConfig: ProviderConfig, agent: Team.Member, systemPrompt: string, funcTools: FuncTool[],
|
|
18
|
-
genMoreMessages(providerConfig: ProviderConfig, agent: Team.Member, systemPrompt: string, funcTools: FuncTool[],
|
|
17
|
+
genToReceiver(providerConfig: ProviderConfig, agent: Team.Member, systemPrompt: string, funcTools: FuncTool[], requestContext: LlmRequestContext, context: ChatMessage[], receiver: LlmStreamReceiver, _genseq: number, abortSignal?: AbortSignal): Promise<LlmStreamResult>;
|
|
18
|
+
genMoreMessages(providerConfig: ProviderConfig, agent: Team.Member, systemPrompt: string, funcTools: FuncTool[], requestContext: LlmRequestContext, context: ChatMessage[], genseq: number, abortSignal?: AbortSignal): Promise<LlmBatchResult>;
|
|
19
19
|
}
|
package/dist/llm/gen/openai.js
CHANGED
|
@@ -23,6 +23,7 @@ const artifacts_1 = require("./artifacts");
|
|
|
23
23
|
const failure_classifier_1 = require("./failure-classifier");
|
|
24
24
|
const tool_call_context_1 = require("./tool-call-context");
|
|
25
25
|
const tool_output_limit_1 = require("./tool-output-limit");
|
|
26
|
+
const tool_result_image_ingest_1 = require("./tool-result-image-ingest");
|
|
26
27
|
const log = (0, log_1.createLogger)('llm/openai');
|
|
27
28
|
const OPENAI_API_QUIRK_VENDOR_HEARTBEAT_EVENT_TYPES = {
|
|
28
29
|
'xcode.best': ['keepalive'],
|
|
@@ -237,7 +238,7 @@ function thinkingMessageToOpenAiReasoningItem(msg) {
|
|
|
237
238
|
}
|
|
238
239
|
return out;
|
|
239
240
|
}
|
|
240
|
-
async function funcResultToOpenAiInputItemWithLimit(msg, limitChars) {
|
|
241
|
+
async function funcResultToOpenAiInputItemWithLimit(msg, limitChars, requestContext, allowedImageKeys, onToolResultImageIngest) {
|
|
241
242
|
const items = msg.contentItems;
|
|
242
243
|
if (!Array.isArray(items) || items.length === 0) {
|
|
243
244
|
return {
|
|
@@ -254,25 +255,91 @@ async function funcResultToOpenAiInputItemWithLimit(msg, limitChars) {
|
|
|
254
255
|
}
|
|
255
256
|
if (item.type === 'input_image') {
|
|
256
257
|
if (!(0, artifacts_1.isVisionImageMimeType)(item.mimeType)) {
|
|
258
|
+
if (onToolResultImageIngest) {
|
|
259
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
260
|
+
requestContext,
|
|
261
|
+
toolCallId: msg.id,
|
|
262
|
+
toolName: msg.name,
|
|
263
|
+
artifact: item.artifact,
|
|
264
|
+
disposition: 'filtered_mime_unsupported',
|
|
265
|
+
mimeType: item.mimeType,
|
|
266
|
+
providerPathLabel: 'OpenAI Responses path',
|
|
267
|
+
}));
|
|
268
|
+
}
|
|
257
269
|
output.push({
|
|
258
270
|
type: 'input_text',
|
|
259
271
|
text: `[image omitted: unsupported mimeType=${item.mimeType}]`,
|
|
260
272
|
});
|
|
261
273
|
continue;
|
|
262
274
|
}
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
275
|
+
if (!allowedImageKeys.has((0, tool_result_image_ingest_1.buildToolResultImageBudgetKeyForMsg)(msg, item.artifact))) {
|
|
276
|
+
if (onToolResultImageIngest) {
|
|
277
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
278
|
+
requestContext,
|
|
279
|
+
toolCallId: msg.id,
|
|
280
|
+
toolName: msg.name,
|
|
281
|
+
artifact: item.artifact,
|
|
282
|
+
disposition: 'filtered_size_limit',
|
|
283
|
+
detail: (0, tool_result_image_ingest_1.buildToolResultImageBudgetLimitDetail)({
|
|
284
|
+
byteLength: item.byteLength,
|
|
285
|
+
budgetBytes: tool_result_image_ingest_1.OPENAI_TOOL_RESULT_IMAGE_BUDGET_BYTES,
|
|
286
|
+
}),
|
|
287
|
+
providerPathLabel: 'OpenAI Responses path',
|
|
288
|
+
}));
|
|
289
|
+
}
|
|
290
|
+
output.push({
|
|
291
|
+
type: 'input_text',
|
|
292
|
+
text: `[image omitted: request image budget exceeded bytes=${String(item.byteLength)} budget=${String(tool_result_image_ingest_1.OPENAI_TOOL_RESULT_IMAGE_BUDGET_BYTES)}]`,
|
|
293
|
+
});
|
|
294
|
+
continue;
|
|
295
|
+
}
|
|
296
|
+
const bytesResult = await (0, tool_result_image_ingest_1.readToolResultImageBytesSafe)(item.artifact);
|
|
297
|
+
if (bytesResult.kind === 'missing') {
|
|
298
|
+
if (onToolResultImageIngest) {
|
|
299
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
300
|
+
requestContext,
|
|
301
|
+
toolCallId: msg.id,
|
|
302
|
+
toolName: msg.name,
|
|
303
|
+
artifact: item.artifact,
|
|
304
|
+
disposition: 'filtered_missing',
|
|
305
|
+
providerPathLabel: 'OpenAI Responses path',
|
|
306
|
+
}));
|
|
307
|
+
}
|
|
270
308
|
output.push({
|
|
271
309
|
type: 'input_text',
|
|
272
310
|
text: `[image missing: ${item.artifact.relPath}]`,
|
|
273
311
|
});
|
|
274
312
|
continue;
|
|
275
313
|
}
|
|
314
|
+
if (bytesResult.kind === 'read_failed') {
|
|
315
|
+
if (onToolResultImageIngest) {
|
|
316
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
317
|
+
requestContext,
|
|
318
|
+
toolCallId: msg.id,
|
|
319
|
+
toolName: msg.name,
|
|
320
|
+
artifact: item.artifact,
|
|
321
|
+
disposition: 'filtered_read_failed',
|
|
322
|
+
detail: bytesResult.detail,
|
|
323
|
+
providerPathLabel: 'OpenAI Responses path',
|
|
324
|
+
}));
|
|
325
|
+
}
|
|
326
|
+
output.push({
|
|
327
|
+
type: 'input_text',
|
|
328
|
+
text: `[image unreadable: ${item.artifact.relPath}]`,
|
|
329
|
+
});
|
|
330
|
+
continue;
|
|
331
|
+
}
|
|
332
|
+
const bytes = bytesResult.bytes;
|
|
333
|
+
if (onToolResultImageIngest) {
|
|
334
|
+
await onToolResultImageIngest((0, tool_result_image_ingest_1.buildToolResultImageIngest)({
|
|
335
|
+
requestContext,
|
|
336
|
+
toolCallId: msg.id,
|
|
337
|
+
toolName: msg.name,
|
|
338
|
+
artifact: item.artifact,
|
|
339
|
+
disposition: 'fed_native',
|
|
340
|
+
providerPathLabel: 'OpenAI Responses path',
|
|
341
|
+
}));
|
|
342
|
+
}
|
|
276
343
|
output.push({
|
|
277
344
|
type: 'input_image',
|
|
278
345
|
detail: 'auto',
|
|
@@ -327,7 +394,7 @@ function shouldIncludeOpenAiEncryptedReasoning(input, reasoning) {
|
|
|
327
394
|
return true;
|
|
328
395
|
return input.some((item) => isRecord(item) && item.type === 'reasoning');
|
|
329
396
|
}
|
|
330
|
-
async function buildOpenAiRequestInput(context, providerConfig) {
|
|
397
|
+
async function buildOpenAiRequestInput(context, requestContext, providerConfig, onToolResultImageIngest) {
|
|
331
398
|
const normalized = (0, tool_call_context_1.normalizeToolCallPairs)(context);
|
|
332
399
|
const violation = (0, tool_call_context_1.findFirstToolCallAdjacencyViolation)(normalized);
|
|
333
400
|
if (violation) {
|
|
@@ -342,7 +409,12 @@ async function buildOpenAiRequestInput(context, providerConfig) {
|
|
|
342
409
|
}
|
|
343
410
|
const input = [];
|
|
344
411
|
const toolResultMaxChars = (0, tool_output_limit_1.resolveProviderToolResultMaxChars)(providerConfig);
|
|
412
|
+
const allowedImageKeys = (0, tool_result_image_ingest_1.selectLatestToolResultImagesWithinBudget)(normalized, tool_result_image_ingest_1.OPENAI_TOOL_RESULT_IMAGE_BUDGET_BYTES);
|
|
345
413
|
for (const msg of normalized) {
|
|
414
|
+
if (msg.type === 'func_result_msg') {
|
|
415
|
+
input.push(await funcResultToOpenAiInputItemWithLimit(msg, toolResultMaxChars, requestContext, allowedImageKeys, onToolResultImageIngest));
|
|
416
|
+
continue;
|
|
417
|
+
}
|
|
346
418
|
input.push(chatMessageToOpenAiInputItem(msg));
|
|
347
419
|
}
|
|
348
420
|
return mergeAdjacentOpenAiMessages(input);
|
|
@@ -806,7 +878,12 @@ function buildOpenAiNativeToolSeed(item, itemId) {
|
|
|
806
878
|
}
|
|
807
879
|
}
|
|
808
880
|
async function buildOpenAiRequestInputWrapper(context, providerConfig) {
|
|
809
|
-
return await buildOpenAiRequestInput(context,
|
|
881
|
+
return await buildOpenAiRequestInput(context, {
|
|
882
|
+
dialogSelfId: '',
|
|
883
|
+
dialogRootId: '',
|
|
884
|
+
providerKey: 'openai',
|
|
885
|
+
modelKey: 'unknown',
|
|
886
|
+
}, providerConfig);
|
|
810
887
|
}
|
|
811
888
|
function extractOutputMessageText(item) {
|
|
812
889
|
if (!isRecord(item) || item.type !== 'message')
|
|
@@ -978,7 +1055,7 @@ class OpenAiGen {
|
|
|
978
1055
|
classifyFailure(error) {
|
|
979
1056
|
return (0, failure_classifier_1.classifyOpenAiLikeFailure)(error);
|
|
980
1057
|
}
|
|
981
|
-
async genToReceiver(providerConfig, agent, systemPrompt, funcTools,
|
|
1058
|
+
async genToReceiver(providerConfig, agent, systemPrompt, funcTools, requestContext, context, receiver, _genseq, abortSignal) {
|
|
982
1059
|
const apiKey = process.env[providerConfig.apiKeyEnvVar];
|
|
983
1060
|
if (!apiKey)
|
|
984
1061
|
throw new Error(`Missing API key env var ${providerConfig.apiKeyEnvVar}`);
|
|
@@ -986,7 +1063,7 @@ class OpenAiGen {
|
|
|
986
1063
|
throw new Error(`Internal error: Model is undefined for agent '${agent.id}'`);
|
|
987
1064
|
}
|
|
988
1065
|
const client = new openai_1.default({ apiKey, baseURL: providerConfig.baseUrl });
|
|
989
|
-
const requestInput = await buildOpenAiRequestInput(context, providerConfig);
|
|
1066
|
+
const requestInput = await buildOpenAiRequestInput(context, requestContext, providerConfig, receiver.toolResultImageIngest);
|
|
990
1067
|
const openAiParams = agent.model_params?.openai || {};
|
|
991
1068
|
const maxTokens = agent.model_params?.max_tokens;
|
|
992
1069
|
const modelInfo = providerConfig.models[agent.model];
|
|
@@ -1733,7 +1810,7 @@ class OpenAiGen {
|
|
|
1733
1810
|
}
|
|
1734
1811
|
return { usage, llmGenModel: returnedModel };
|
|
1735
1812
|
}
|
|
1736
|
-
async genMoreMessages(providerConfig, agent, systemPrompt, funcTools,
|
|
1813
|
+
async genMoreMessages(providerConfig, agent, systemPrompt, funcTools, requestContext, context, genseq, abortSignal) {
|
|
1737
1814
|
const apiKey = process.env[providerConfig.apiKeyEnvVar];
|
|
1738
1815
|
if (!apiKey)
|
|
1739
1816
|
throw new Error(`Missing API key env var ${providerConfig.apiKeyEnvVar}`);
|
|
@@ -1741,7 +1818,10 @@ class OpenAiGen {
|
|
|
1741
1818
|
throw new Error(`Internal error: Model is undefined for agent '${agent.id}'`);
|
|
1742
1819
|
}
|
|
1743
1820
|
const client = new openai_1.default({ apiKey, baseURL: providerConfig.baseUrl });
|
|
1744
|
-
const
|
|
1821
|
+
const outputs = [];
|
|
1822
|
+
const requestInput = await buildOpenAiRequestInput(context, requestContext, providerConfig, async (ingest) => {
|
|
1823
|
+
outputs.push({ kind: 'tool_result_image_ingest', ingest });
|
|
1824
|
+
});
|
|
1745
1825
|
const openAiParams = agent.model_params?.openai || {};
|
|
1746
1826
|
const maxTokens = agent.model_params?.max_tokens;
|
|
1747
1827
|
const modelInfo = providerConfig.models[agent.model];
|
|
@@ -1782,7 +1862,7 @@ class OpenAiGen {
|
|
|
1782
1862
|
}
|
|
1783
1863
|
const returnedModel = typeof response.model === 'string' ? response.model : undefined;
|
|
1784
1864
|
const usage = parseOpenAiUsage(response.usage);
|
|
1785
|
-
|
|
1865
|
+
outputs.push(...openAiResponseToBatchOutputs(response, genseq));
|
|
1786
1866
|
return {
|
|
1787
1867
|
messages: outputs
|
|
1788
1868
|
.filter((entry) => entry.kind === 'message')
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import type { ToolResultImageArtifact, ToolResultImageDisposition } from '@longrun-ai/kernel/types/storage';
|
|
2
|
+
import type { ChatMessage, FuncResultMsg } from '../client';
|
|
3
|
+
import type { LlmRequestContext, ToolResultImageIngest } from '../gen';
|
|
4
|
+
type ToolResultImageReadResult = {
|
|
5
|
+
kind: 'ready';
|
|
6
|
+
bytes: Buffer;
|
|
7
|
+
} | {
|
|
8
|
+
kind: 'missing';
|
|
9
|
+
} | {
|
|
10
|
+
kind: 'read_failed';
|
|
11
|
+
detail: string;
|
|
12
|
+
};
|
|
13
|
+
export declare const OPENAI_TOOL_RESULT_IMAGE_BUDGET_BYTES: number;
|
|
14
|
+
export declare const OPENAI_COMPATIBLE_TOOL_RESULT_IMAGE_BUDGET_BYTES: number;
|
|
15
|
+
export declare const CODEX_TOOL_RESULT_IMAGE_BUDGET_BYTES: number;
|
|
16
|
+
export declare const ANTHROPIC_TOOL_RESULT_IMAGE_BUDGET_BYTES: number;
|
|
17
|
+
export declare function buildToolResultImageBudgetLimitDetail(args: {
|
|
18
|
+
byteLength: number;
|
|
19
|
+
budgetBytes: number;
|
|
20
|
+
}): string;
|
|
21
|
+
export declare function buildToolResultImageBudgetKey(args: {
|
|
22
|
+
genseq: number;
|
|
23
|
+
toolCallId: string;
|
|
24
|
+
artifact: ToolResultImageArtifact;
|
|
25
|
+
}): string;
|
|
26
|
+
export declare function buildToolResultImageBudgetKeyForMsg(msg: FuncResultMsg, artifact: ToolResultImageArtifact): string;
|
|
27
|
+
export declare function selectLatestToolResultImagesWithinBudget(context: ChatMessage[], budgetBytes: number): Set<string>;
|
|
28
|
+
export declare function readToolResultImageBytesSafe(artifact: ToolResultImageArtifact): Promise<ToolResultImageReadResult>;
|
|
29
|
+
export declare function buildToolResultImageIngest(args: {
|
|
30
|
+
requestContext: LlmRequestContext;
|
|
31
|
+
toolCallId: string;
|
|
32
|
+
toolName: string;
|
|
33
|
+
artifact: ToolResultImageArtifact;
|
|
34
|
+
disposition: ToolResultImageDisposition;
|
|
35
|
+
mimeType?: string;
|
|
36
|
+
detail?: string;
|
|
37
|
+
providerPathLabel?: string;
|
|
38
|
+
}): ToolResultImageIngest;
|
|
39
|
+
export {};
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ANTHROPIC_TOOL_RESULT_IMAGE_BUDGET_BYTES = exports.CODEX_TOOL_RESULT_IMAGE_BUDGET_BYTES = exports.OPENAI_COMPATIBLE_TOOL_RESULT_IMAGE_BUDGET_BYTES = exports.OPENAI_TOOL_RESULT_IMAGE_BUDGET_BYTES = void 0;
|
|
4
|
+
exports.buildToolResultImageBudgetLimitDetail = buildToolResultImageBudgetLimitDetail;
|
|
5
|
+
exports.buildToolResultImageBudgetKey = buildToolResultImageBudgetKey;
|
|
6
|
+
exports.buildToolResultImageBudgetKeyForMsg = buildToolResultImageBudgetKeyForMsg;
|
|
7
|
+
exports.selectLatestToolResultImagesWithinBudget = selectLatestToolResultImagesWithinBudget;
|
|
8
|
+
exports.readToolResultImageBytesSafe = readToolResultImageBytesSafe;
|
|
9
|
+
exports.buildToolResultImageIngest = buildToolResultImageIngest;
|
|
10
|
+
const work_language_1 = require("../../runtime/work-language");
|
|
11
|
+
const artifacts_1 = require("./artifacts");
|
|
12
|
+
// These are provider-path guardrails derived from publicly documented request payload limits.
|
|
13
|
+
// They are intentionally documented here as coarse transport caps for Dominds' inline image replay,
|
|
14
|
+
// not as claims about the provider's exact per-image validator.
|
|
15
|
+
//
|
|
16
|
+
// OpenAI official vision guide currently documents up to 50 MB total payload size per request.
|
|
17
|
+
// Codex here reuses the OpenAI Responses image path, so we follow the same cap by inference.
|
|
18
|
+
// Anthropic vision docs currently document 32 MB request size limits for standard endpoints.
|
|
19
|
+
//
|
|
20
|
+
// OpenAI-compatible has no cross-provider standard. We keep an OpenAI-like fallback cap here for
|
|
21
|
+
// future opt-in paths, but must not present it as an official guarantee for arbitrary gateways.
|
|
22
|
+
exports.OPENAI_TOOL_RESULT_IMAGE_BUDGET_BYTES = 50 * 1024 * 1024;
|
|
23
|
+
exports.OPENAI_COMPATIBLE_TOOL_RESULT_IMAGE_BUDGET_BYTES = 50 * 1024 * 1024;
|
|
24
|
+
exports.CODEX_TOOL_RESULT_IMAGE_BUDGET_BYTES = 50 * 1024 * 1024;
|
|
25
|
+
exports.ANTHROPIC_TOOL_RESULT_IMAGE_BUDGET_BYTES = 32 * 1024 * 1024;
|
|
26
|
+
function buildToolResultImageBudgetLimitDetail(args) {
|
|
27
|
+
return `image_bytes=${String(args.byteLength)}, request_image_budget_bytes=${String(args.budgetBytes)}`;
|
|
28
|
+
}
|
|
29
|
+
function buildToolResultImageBudgetKey(args) {
|
|
30
|
+
return `${String(args.genseq)}::${args.toolCallId}::${args.artifact.rootId}::${args.artifact.selfId}::${args.artifact.relPath}`;
|
|
31
|
+
}
|
|
32
|
+
function buildToolResultImageBudgetKeyForMsg(msg, artifact) {
|
|
33
|
+
return buildToolResultImageBudgetKey({
|
|
34
|
+
genseq: msg.genseq,
|
|
35
|
+
toolCallId: msg.id,
|
|
36
|
+
artifact,
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
function selectLatestToolResultImagesWithinBudget(context, budgetBytes) {
|
|
40
|
+
const candidates = [];
|
|
41
|
+
for (const msg of context) {
|
|
42
|
+
if (msg.type !== 'func_result_msg')
|
|
43
|
+
continue;
|
|
44
|
+
const items = msg.contentItems;
|
|
45
|
+
if (!Array.isArray(items) || items.length === 0)
|
|
46
|
+
continue;
|
|
47
|
+
for (const item of items) {
|
|
48
|
+
if (item.type !== 'input_image')
|
|
49
|
+
continue;
|
|
50
|
+
candidates.push({
|
|
51
|
+
key: buildToolResultImageBudgetKeyForMsg(msg, item.artifact),
|
|
52
|
+
byteLength: item.byteLength,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
const allowed = new Set();
|
|
57
|
+
let usedBytes = 0;
|
|
58
|
+
for (let index = candidates.length - 1; index >= 0; index -= 1) {
|
|
59
|
+
const candidate = candidates[index];
|
|
60
|
+
if (usedBytes + candidate.byteLength > budgetBytes) {
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
allowed.add(candidate.key);
|
|
64
|
+
usedBytes += candidate.byteLength;
|
|
65
|
+
}
|
|
66
|
+
return allowed;
|
|
67
|
+
}
|
|
68
|
+
async function readToolResultImageBytesSafe(artifact) {
|
|
69
|
+
try {
|
|
70
|
+
const bytes = await (0, artifacts_1.readDialogArtifactBytes)(artifact);
|
|
71
|
+
if (!bytes)
|
|
72
|
+
return { kind: 'missing' };
|
|
73
|
+
return { kind: 'ready', bytes };
|
|
74
|
+
}
|
|
75
|
+
catch (error) {
|
|
76
|
+
return {
|
|
77
|
+
kind: 'read_failed',
|
|
78
|
+
detail: error instanceof Error ? `${error.name}: ${error.message}` : String(error),
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
function buildToolResultImageIngest(args) {
|
|
83
|
+
const language = (0, work_language_1.getWorkLanguage)();
|
|
84
|
+
const providerKey = typeof args.requestContext.providerKey === 'string' &&
|
|
85
|
+
args.requestContext.providerKey.trim().length > 0
|
|
86
|
+
? args.requestContext.providerKey.trim()
|
|
87
|
+
: 'unknown-provider';
|
|
88
|
+
const modelKey = typeof args.requestContext.modelKey === 'string' &&
|
|
89
|
+
args.requestContext.modelKey.trim().length > 0
|
|
90
|
+
? args.requestContext.modelKey.trim()
|
|
91
|
+
: 'unknown-model';
|
|
92
|
+
const providerModel = `${providerKey}/${modelKey}`;
|
|
93
|
+
const pathLabel = typeof args.providerPathLabel === 'string' && args.providerPathLabel.trim().length > 0
|
|
94
|
+
? args.providerPathLabel.trim()
|
|
95
|
+
: 'current provider path';
|
|
96
|
+
const relPath = args.artifact.relPath;
|
|
97
|
+
const mimeText = typeof args.mimeType === 'string' && args.mimeType.trim().length > 0
|
|
98
|
+
? args.mimeType
|
|
99
|
+
: undefined;
|
|
100
|
+
const message = (() => {
|
|
101
|
+
if (language === 'zh') {
|
|
102
|
+
switch (args.disposition) {
|
|
103
|
+
case 'fed_native':
|
|
104
|
+
return `本轮已将这张图片发送给 ${providerModel}。`;
|
|
105
|
+
case 'fed_provider_transformed':
|
|
106
|
+
return `本轮已将这张图片发送给 ${providerModel}(按当前 provider 的原生图片消息方式投喂)。`;
|
|
107
|
+
case 'filtered_provider_unsupported':
|
|
108
|
+
return `本轮未将这张图片发送给 ${providerModel}:当前 ${pathLabel} 不支持图片输入。对话仍会继续,但后续分析不会使用这张图片。`;
|
|
109
|
+
case 'filtered_model_unsupported':
|
|
110
|
+
return `本轮未将这张图片发送给 ${providerModel}:当前模型不支持图片输入。对话仍会继续,但后续分析不会使用这张图片。`;
|
|
111
|
+
case 'filtered_mime_unsupported':
|
|
112
|
+
return `本轮未将这张图片发送给 ${providerModel}:当前 ${pathLabel} 不接受该图片格式${mimeText ? `(${mimeText})` : ''}。对话仍会继续,但后续分析不会使用这张图片。`;
|
|
113
|
+
case 'filtered_size_limit':
|
|
114
|
+
return `本轮未将这张图片发送给 ${providerModel}:图片超出当前 ${pathLabel} 的限制。对话仍会继续,但后续分析不会使用这张图片。`;
|
|
115
|
+
case 'filtered_read_failed':
|
|
116
|
+
return `本轮未将这张图片发送给 ${providerModel}:读取图片 artifact 失败(${relPath})。对话仍会继续,但后续分析不会使用这张图片。`;
|
|
117
|
+
case 'filtered_missing':
|
|
118
|
+
return `本轮未将这张图片发送给 ${providerModel}:图片 artifact 缺失(${relPath})。对话仍会继续,但后续分析不会使用这张图片。`;
|
|
119
|
+
default: {
|
|
120
|
+
const _exhaustive = args.disposition;
|
|
121
|
+
return _exhaustive;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
switch (args.disposition) {
|
|
126
|
+
case 'fed_native':
|
|
127
|
+
return `This round sent this image to ${providerModel}.`;
|
|
128
|
+
case 'fed_provider_transformed':
|
|
129
|
+
return `This round sent this image to ${providerModel} using the provider's native image-message projection.`;
|
|
130
|
+
case 'filtered_provider_unsupported':
|
|
131
|
+
return `This round did not send this image to ${providerModel}: the current ${pathLabel} does not support image input. The dialog will continue, but later analysis will not use this image.`;
|
|
132
|
+
case 'filtered_model_unsupported':
|
|
133
|
+
return `This round did not send this image to ${providerModel}: the current model does not support image input. The dialog will continue, but later analysis will not use this image.`;
|
|
134
|
+
case 'filtered_mime_unsupported':
|
|
135
|
+
return `This round did not send this image to ${providerModel}: the current ${pathLabel} does not accept this image format${mimeText ? ` (${mimeText})` : ''}. The dialog will continue, but later analysis will not use this image.`;
|
|
136
|
+
case 'filtered_size_limit':
|
|
137
|
+
return `This round did not send this image to ${providerModel}: the image exceeds the current ${pathLabel} limit. The dialog will continue, but later analysis will not use this image.`;
|
|
138
|
+
case 'filtered_read_failed':
|
|
139
|
+
return `This round did not send this image to ${providerModel}: failed to read the image artifact (${relPath}). The dialog will continue, but later analysis will not use this image.`;
|
|
140
|
+
case 'filtered_missing':
|
|
141
|
+
return `This round did not send this image to ${providerModel}: the image artifact is missing (${relPath}). The dialog will continue, but later analysis will not use this image.`;
|
|
142
|
+
default: {
|
|
143
|
+
const _exhaustive = args.disposition;
|
|
144
|
+
return _exhaustive;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
})();
|
|
148
|
+
return {
|
|
149
|
+
toolCallId: args.toolCallId,
|
|
150
|
+
toolName: args.toolName,
|
|
151
|
+
artifact: args.artifact,
|
|
152
|
+
provider: providerKey,
|
|
153
|
+
model: modelKey,
|
|
154
|
+
disposition: args.disposition,
|
|
155
|
+
message,
|
|
156
|
+
...(args.detail !== undefined ? { detail: args.detail } : {}),
|
|
157
|
+
};
|
|
158
|
+
}
|