phi-code-ai 0.56.4 → 0.74.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +258 -73
- package/dist/api-registry.d.ts.map +1 -1
- package/dist/api-registry.js.map +1 -1
- package/dist/bedrock-provider.d.ts.map +1 -1
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +1 -1
- package/dist/cli.js.map +1 -1
- package/dist/env-api-keys.d.ts +9 -0
- package/dist/env-api-keys.d.ts.map +1 -1
- package/dist/env-api-keys.js +96 -30
- package/dist/env-api-keys.js.map +1 -1
- package/dist/image-models.d.ts +10 -0
- package/dist/image-models.d.ts.map +1 -0
- package/dist/image-models.generated.d.ts +305 -0
- package/dist/image-models.generated.d.ts.map +1 -0
- package/dist/image-models.generated.js +307 -0
- package/dist/image-models.generated.js.map +1 -0
- package/dist/image-models.js +23 -0
- package/dist/image-models.js.map +1 -0
- package/dist/images-api-registry.d.ts +14 -0
- package/dist/images-api-registry.d.ts.map +1 -0
- package/dist/images-api-registry.js +22 -0
- package/dist/images-api-registry.js.map +1 -0
- package/dist/images.d.ts +4 -0
- package/dist/images.d.ts.map +1 -0
- package/dist/images.js +14 -0
- package/dist/images.js.map +1 -0
- package/dist/index.d.ts +20 -11
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -9
- package/dist/index.js.map +1 -1
- package/dist/models.d.ts +3 -9
- package/dist/models.d.ts.map +1 -1
- package/dist/models.generated.d.ts +6525 -2231
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +8992 -5524
- package/dist/models.generated.js.map +1 -1
- package/dist/models.js +28 -12
- package/dist/models.js.map +1 -1
- package/dist/oauth.d.ts.map +1 -1
- package/dist/providers/amazon-bedrock.d.ts +23 -0
- package/dist/providers/amazon-bedrock.d.ts.map +1 -1
- package/dist/providers/amazon-bedrock.js +206 -44
- package/dist/providers/amazon-bedrock.js.map +1 -1
- package/dist/providers/anthropic.d.ts +23 -2
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +294 -63
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/azure-openai-responses.d.ts.map +1 -1
- package/dist/providers/azure-openai-responses.js +47 -23
- package/dist/providers/azure-openai-responses.js.map +1 -1
- package/dist/providers/cloudflare.d.ts +13 -0
- package/dist/providers/cloudflare.d.ts.map +1 -0
- package/dist/providers/cloudflare.js +26 -0
- package/dist/providers/cloudflare.js.map +1 -0
- package/dist/providers/faux.d.ts +56 -0
- package/dist/providers/faux.d.ts.map +1 -0
- package/dist/providers/faux.js +368 -0
- package/dist/providers/faux.js.map +1 -0
- package/dist/providers/github-copilot-headers.d.ts.map +1 -1
- package/dist/providers/github-copilot-headers.js.map +1 -1
- package/dist/providers/google-shared.d.ts +7 -2
- package/dist/providers/google-shared.d.ts.map +1 -1
- package/dist/providers/google-shared.js +53 -24
- package/dist/providers/google-shared.js.map +1 -1
- package/dist/providers/google-vertex.d.ts +1 -1
- package/dist/providers/google-vertex.d.ts.map +1 -1
- package/dist/providers/google-vertex.js +87 -16
- package/dist/providers/google-vertex.js.map +1 -1
- package/dist/providers/google.d.ts +1 -1
- package/dist/providers/google.d.ts.map +1 -1
- package/dist/providers/google.js +57 -9
- package/dist/providers/google.js.map +1 -1
- package/dist/providers/images/openrouter.d.ts +3 -0
- package/dist/providers/images/openrouter.d.ts.map +1 -0
- package/dist/providers/images/openrouter.js +129 -0
- package/dist/providers/images/openrouter.js.map +1 -0
- package/dist/providers/images/register-builtins.d.ts +4 -0
- package/dist/providers/images/register-builtins.d.ts.map +1 -0
- package/dist/providers/images/register-builtins.js +34 -0
- package/dist/providers/images/register-builtins.js.map +1 -0
- package/dist/providers/mistral.d.ts +3 -0
- package/dist/providers/mistral.d.ts.map +1 -1
- package/dist/providers/mistral.js +49 -9
- package/dist/providers/mistral.js.map +1 -1
- package/dist/providers/openai-codex-responses.d.ts +21 -0
- package/dist/providers/openai-codex-responses.d.ts.map +1 -1
- package/dist/providers/openai-codex-responses.js +443 -86
- package/dist/providers/openai-codex-responses.js.map +1 -1
- package/dist/providers/openai-completions.d.ts +5 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +459 -225
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses-shared.d.ts +1 -0
- package/dist/providers/openai-responses-shared.d.ts.map +1 -1
- package/dist/providers/openai-responses-shared.js +95 -45
- package/dist/providers/openai-responses-shared.js.map +1 -1
- package/dist/providers/openai-responses.d.ts.map +1 -1
- package/dist/providers/openai-responses.js +66 -44
- package/dist/providers/openai-responses.js.map +1 -1
- package/dist/providers/register-builtins.d.ts +27 -2
- package/dist/providers/register-builtins.d.ts.map +1 -1
- package/dist/providers/register-builtins.js +157 -52
- package/dist/providers/register-builtins.js.map +1 -1
- package/dist/providers/simple-options.d.ts.map +1 -1
- package/dist/providers/simple-options.js +5 -1
- package/dist/providers/simple-options.js.map +1 -1
- package/dist/providers/transform-messages.d.ts.map +1 -1
- package/dist/providers/transform-messages.js +63 -34
- package/dist/providers/transform-messages.js.map +1 -1
- package/dist/session-resources.d.ts +4 -0
- package/dist/session-resources.d.ts.map +1 -0
- package/dist/session-resources.js +22 -0
- package/dist/session-resources.js.map +1 -0
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +219 -15
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/diagnostics.d.ts +19 -0
- package/dist/utils/diagnostics.d.ts.map +1 -0
- package/dist/utils/diagnostics.js +25 -0
- package/dist/utils/diagnostics.js.map +1 -0
- package/dist/utils/event-stream.d.ts.map +1 -1
- package/dist/utils/event-stream.js +7 -3
- package/dist/utils/event-stream.js.map +1 -1
- package/dist/utils/hash.d.ts.map +1 -1
- package/dist/utils/hash.js.map +1 -1
- package/dist/utils/headers.d.ts +2 -0
- package/dist/utils/headers.d.ts.map +1 -0
- package/dist/utils/headers.js +8 -0
- package/dist/utils/headers.js.map +1 -0
- package/dist/utils/json-parse.d.ts +8 -1
- package/dist/utils/json-parse.d.ts.map +1 -1
- package/dist/utils/json-parse.js +89 -5
- package/dist/utils/json-parse.js.map +1 -1
- package/dist/utils/oauth/anthropic.d.ts +14 -6
- package/dist/utils/oauth/anthropic.d.ts.map +1 -1
- package/dist/utils/oauth/anthropic.js +288 -57
- package/dist/utils/oauth/anthropic.js.map +1 -1
- package/dist/utils/oauth/github-copilot.d.ts.map +1 -1
- package/dist/utils/oauth/github-copilot.js +23 -12
- package/dist/utils/oauth/github-copilot.js.map +1 -1
- package/dist/utils/oauth/index.d.ts +0 -4
- package/dist/utils/oauth/index.d.ts.map +1 -1
- package/dist/utils/oauth/index.js +0 -10
- package/dist/utils/oauth/index.js.map +1 -1
- package/dist/utils/oauth/oauth-page.d.ts +3 -0
- package/dist/utils/oauth/oauth-page.d.ts.map +1 -0
- package/dist/utils/oauth/oauth-page.js +105 -0
- package/dist/utils/oauth/oauth-page.js.map +1 -0
- package/dist/utils/oauth/openai-codex.d.ts.map +1 -1
- package/dist/utils/oauth/openai-codex.js +51 -46
- package/dist/utils/oauth/openai-codex.js.map +1 -1
- package/dist/utils/oauth/pkce.d.ts.map +1 -1
- package/dist/utils/oauth/pkce.js.map +1 -1
- package/dist/utils/oauth/types.d.ts +10 -0
- package/dist/utils/oauth/types.d.ts.map +1 -1
- package/dist/utils/oauth/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +7 -3
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +46 -13
- package/dist/utils/overflow.js.map +1 -1
- package/dist/utils/sanitize-unicode.d.ts.map +1 -1
- package/dist/utils/sanitize-unicode.js.map +1 -1
- package/dist/utils/typebox-helpers.d.ts +1 -1
- package/dist/utils/typebox-helpers.d.ts.map +1 -1
- package/dist/utils/typebox-helpers.js +1 -1
- package/dist/utils/typebox-helpers.js.map +1 -1
- package/dist/utils/validation.d.ts.map +1 -1
- package/dist/utils/validation.js +247 -38
- package/dist/utils/validation.js.map +1 -1
- package/package.json +43 -13
- package/bedrock-provider.d.ts +0 -1
- package/bedrock-provider.js +0 -1
- package/dist/providers/google-gemini-cli.d.ts +0 -74
- package/dist/providers/google-gemini-cli.d.ts.map +0 -1
- package/dist/providers/google-gemini-cli.js +0 -754
- package/dist/providers/google-gemini-cli.js.map +0 -1
- package/dist/utils/oauth/google-antigravity.d.ts +0 -26
- package/dist/utils/oauth/google-antigravity.d.ts.map +0 -1
- package/dist/utils/oauth/google-antigravity.js +0 -373
- package/dist/utils/oauth/google-antigravity.js.map +0 -1
- package/dist/utils/oauth/google-gemini-cli.d.ts +0 -26
- package/dist/utils/oauth/google-gemini-cli.d.ts.map +0 -1
- package/dist/utils/oauth/google-gemini-cli.js +0 -478
- package/dist/utils/oauth/google-gemini-cli.js.map +0 -1
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import { getEnvApiKey } from "../../env-api-keys.js";
|
|
3
|
+
import { headersToRecord } from "../../utils/headers.js";
|
|
4
|
+
import { sanitizeSurrogates } from "../../utils/sanitize-unicode.js";
|
|
5
|
+
export const generateImagesOpenRouter = async (model, context, options) => {
|
|
6
|
+
const output = {
|
|
7
|
+
api: model.api,
|
|
8
|
+
provider: model.provider,
|
|
9
|
+
model: model.id,
|
|
10
|
+
output: [],
|
|
11
|
+
stopReason: "stop",
|
|
12
|
+
timestamp: Date.now(),
|
|
13
|
+
};
|
|
14
|
+
try {
|
|
15
|
+
const apiKey = options?.apiKey || getEnvApiKey(model.provider);
|
|
16
|
+
if (!apiKey) {
|
|
17
|
+
throw new Error(`No API key available for provider: ${model.provider}`);
|
|
18
|
+
}
|
|
19
|
+
const client = createClient(model, apiKey, options?.headers);
|
|
20
|
+
let params = buildParams(model, context);
|
|
21
|
+
const nextParams = await options?.onPayload?.(params, model);
|
|
22
|
+
if (nextParams !== undefined) {
|
|
23
|
+
params = nextParams;
|
|
24
|
+
}
|
|
25
|
+
const requestOptions = {
|
|
26
|
+
...(options?.signal ? { signal: options.signal } : {}),
|
|
27
|
+
...(options?.timeoutMs !== undefined ? { timeout: options.timeoutMs } : {}),
|
|
28
|
+
...(options?.maxRetries !== undefined ? { maxRetries: options.maxRetries } : {}),
|
|
29
|
+
};
|
|
30
|
+
const { data: response, response: rawResponse } = await client.chat.completions
|
|
31
|
+
.create(params, requestOptions)
|
|
32
|
+
.withResponse();
|
|
33
|
+
await options?.onResponse?.({ status: rawResponse.status, headers: headersToRecord(rawResponse.headers) }, model);
|
|
34
|
+
const imageResponse = response;
|
|
35
|
+
output.responseId = imageResponse.id;
|
|
36
|
+
if (imageResponse.usage) {
|
|
37
|
+
output.usage = parseUsage(imageResponse.usage, model);
|
|
38
|
+
}
|
|
39
|
+
const choice = imageResponse.choices[0];
|
|
40
|
+
if (choice) {
|
|
41
|
+
const content = choice.message.content;
|
|
42
|
+
if (typeof content === "string" && content.length > 0) {
|
|
43
|
+
output.output.push({ type: "text", text: content });
|
|
44
|
+
}
|
|
45
|
+
for (const image of choice.message.images ?? []) {
|
|
46
|
+
const imageUrl = typeof image.image_url === "string" ? image.image_url : image.image_url?.url;
|
|
47
|
+
if (!imageUrl?.startsWith("data:"))
|
|
48
|
+
continue;
|
|
49
|
+
const matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);
|
|
50
|
+
if (!matches)
|
|
51
|
+
continue;
|
|
52
|
+
output.output.push({
|
|
53
|
+
type: "image",
|
|
54
|
+
mimeType: matches[1],
|
|
55
|
+
data: matches[2],
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
return output;
|
|
60
|
+
}
|
|
61
|
+
catch (error) {
|
|
62
|
+
output.stopReason = options?.signal?.aborted ? "aborted" : "error";
|
|
63
|
+
output.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);
|
|
64
|
+
return output;
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
function createClient(model, apiKey, optionsHeaders) {
|
|
68
|
+
return new OpenAI({
|
|
69
|
+
apiKey,
|
|
70
|
+
baseURL: model.baseUrl,
|
|
71
|
+
dangerouslyAllowBrowser: true,
|
|
72
|
+
defaultHeaders: {
|
|
73
|
+
...model.headers,
|
|
74
|
+
...optionsHeaders,
|
|
75
|
+
},
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
function buildParams(model, context) {
|
|
79
|
+
const content = context.input.map((item) => {
|
|
80
|
+
if (item.type === "text") {
|
|
81
|
+
return {
|
|
82
|
+
type: "text",
|
|
83
|
+
text: sanitizeSurrogates(item.text),
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
return {
|
|
87
|
+
type: "image_url",
|
|
88
|
+
image_url: {
|
|
89
|
+
url: `data:${item.mimeType};base64,${item.data}`,
|
|
90
|
+
},
|
|
91
|
+
};
|
|
92
|
+
});
|
|
93
|
+
return {
|
|
94
|
+
model: model.id,
|
|
95
|
+
messages: [
|
|
96
|
+
{
|
|
97
|
+
role: "user",
|
|
98
|
+
content,
|
|
99
|
+
},
|
|
100
|
+
],
|
|
101
|
+
stream: false,
|
|
102
|
+
modalities: model.output.includes("text") ? ["image", "text"] : ["image"],
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
function parseUsage(rawUsage, model) {
|
|
106
|
+
const promptTokens = rawUsage.prompt_tokens || 0;
|
|
107
|
+
const reportedCachedTokens = rawUsage.prompt_tokens_details?.cached_tokens || 0;
|
|
108
|
+
const cacheWriteTokens = rawUsage.prompt_tokens_details?.cache_write_tokens || 0;
|
|
109
|
+
const cacheReadTokens = cacheWriteTokens > 0 ? Math.max(0, reportedCachedTokens - cacheWriteTokens) : reportedCachedTokens;
|
|
110
|
+
const input = Math.max(0, promptTokens - cacheReadTokens - cacheWriteTokens);
|
|
111
|
+
const output = rawUsage.completion_tokens || 0;
|
|
112
|
+
const usage = {
|
|
113
|
+
input,
|
|
114
|
+
output,
|
|
115
|
+
cacheRead: cacheReadTokens,
|
|
116
|
+
cacheWrite: cacheWriteTokens,
|
|
117
|
+
totalTokens: input + output + cacheReadTokens + cacheWriteTokens,
|
|
118
|
+
cost: {
|
|
119
|
+
input: (model.cost.input / 1000000) * input,
|
|
120
|
+
output: (model.cost.output / 1000000) * output,
|
|
121
|
+
cacheRead: (model.cost.cacheRead / 1000000) * cacheReadTokens,
|
|
122
|
+
cacheWrite: (model.cost.cacheWrite / 1000000) * cacheWriteTokens,
|
|
123
|
+
total: 0,
|
|
124
|
+
},
|
|
125
|
+
};
|
|
126
|
+
usage.cost.total = usage.cost.input + usage.cost.output + usage.cost.cacheRead + usage.cost.cacheWrite;
|
|
127
|
+
return usage;
|
|
128
|
+
}
|
|
129
|
+
//# sourceMappingURL=openrouter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openrouter.js","sourceRoot":"","sources":["../../../src/providers/images/openrouter.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAQ5B,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAUrD,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAC;AACzD,OAAO,EAAE,kBAAkB,EAAE,MAAM,iCAAiC,CAAC;AAkBrE,MAAM,CAAC,MAAM,wBAAwB,GAAuD,KAAK,EAChG,KAAuC,EACvC,OAAsB,EACtB,OAAuB,EACtB,EAAE,CAAC;IACJ,MAAM,MAAM,GAAoB;QAC/B,GAAG,EAAE,KAAK,CAAC,GAAG;QACd,QAAQ,EAAE,KAAK,CAAC,QAAQ;QACxB,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,MAAM,EAAE,EAAE;QACV,UAAU,EAAE,MAAM;QAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;IAEF,IAAI,CAAC;QACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC/D,IAAI,CAAC,MAAM,EAAE,CAAC;YACb,MAAM,IAAI,KAAK,CAAC,sCAAsC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QACzE,CAAC;QACD,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAC7D,IAAI,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QACzC,MAAM,UAAU,GAAG,MAAM,OAAO,EAAE,SAAS,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QAC7D,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,GAAG,UAA2B,CAAC;QACtC,CAAC;QACD,MAAM,cAAc,GAAG;YACtB,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YACtD,GAAG,CAAC,OAAO,EAAE,SAAS,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YAC3E,GAAG,CAAC,OAAO,EAAE,UAAU,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;SAChF,CAAC;QACF,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW;aAC7E,MAAM,CAAC,MAA2D,EAAE,cAAc,CAAC;aACnF,YAAY,EAAE,CAAC;QACjB,MAAM,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,MAAM,EAAE,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC,OAAO,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;QAElH,MAAM,aAAa,GAAG,QAA6C,CAAC;QACpE,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,EAAE,CAAC;QACrC,IAAI,aAAa,CAAC,KAAK,EAAE,CAAC;YACzB,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;QACvD,CAAC;QAED,MAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACxC,IAAI,MAAM,EAAE,CAAC;YACZ,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC;YACvC,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAwB,CAAC,CAAC;YAC3E,CAAC;YAED,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,IAAI,EAAE,EAAE,CAAC;gBACjD,MAAM,QAAQ,GAAG,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,EAAE,GAAG,CAAC;gBAC9F,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC,OAAO,CAAC;oBAAE,SAAS;gBAC7C,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;gBAC7D,IAAI,CAAC,OAAO;oBAAE,SAAS;gBACvB,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;oBAClB,IAAI,EAAE,OAAO;oBACb,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;oBACpB,IAAI,EAAE,OAAO,CAAC,CAAC,CAAC;iBACO,CAAC,CAAC;YAC3B,CAAC;QACF,CAAC;QAED,OAAO,MAAM,CAAC;IACf,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;QACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;QACrF,OAAO,MAAM,CAAC;IACf,CAAC;AAAA,CACD,CAAC;AAEF,SAAS,YAAY,CACpB,KAAuC,EACvC,MAAc,EACd,cAAuC,EAC9B;IACT,OAAO,IAAI,MAAM,CAAC;QACjB,MAAM;QACN,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,uBAAuB,EAAE,IAAI;QAC7B,cAAc,EAAE;YACf,GAAG,KAAK,CAAC,OAAO;YAChB,GAAG,cAAc;SACjB;KACD,CAAC,CAAC;AAAA,CACH;AAMD,SAAS,WAAW,CAAC,KAAuC,EAAE,OAAsB,EAAgC;IACnH,MAAM,OAAO,GAAgC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAA6B,EAAE,CAAC;QACnG,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC1B,OAAO;gBACN,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;aACK,CAAC;QAC3C,CAAC;QACD,OAAO;YACN,IAAI,EAAE,WAAW;YACjB,SAAS,EAAE;gBACV,GAAG,EAAE,QAAQ,IAAI,CAAC,QAAQ,WAAW,IAAI,CAAC,IAAI,EAAE;aAChD;SACwC,CAAC;IAAA,CAC3C,CAAC,CAAC;IAEH,OAAO;QACN,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ,EAAE;YACT;gBACC,IAAI,EAAE,MAAe;gBACrB,OAAO;aACP;SACD;QACD,MAAM,EAAE,KAAK;QACb,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;KACzE,CAAC;AAAA,CACF;AAED,SAAS,UAAU,CAClB,QAIC,EACD,KAAuC,EACtC;IACD,MAAM,YAAY,GAAG,QAAQ,CAAC,aAAa,IAAI,CAAC,CAAC;IACjD,MAAM,oBAAoB,GAAG,QAAQ,CAAC,qBAAqB,EAAE,aAAa,IAAI,CAAC,CAAC;IAChF,MAAM,gBAAgB,GAAG,QAAQ,CAAC,qBAAqB,EAAE,kBAAkB,IAAI,CAAC,CAAC;IACjF,MAAM,eAAe,GACpB,gBAAgB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,oBAAoB,GAAG,gBAAgB,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC;IACpG,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,YAAY,GAAG,eAAe,GAAG,gBAAgB,CAAC,CAAC;IAC7E,MAAM,MAAM,GAAG,QAAQ,CAAC,iBAAiB,IAAI,CAAC,CAAC;IAC/C,MAAM,KAAK,GAAG;QACb,KAAK;QACL,MAAM;QACN,SAAS,EAAE,eAAe;QAC1B,UAAU,EAAE,gBAAgB;QAC5B,WAAW,EAAE,KAAK,GAAG,MAAM,GAAG,eAAe,GAAG,gBAAgB;QAChE,IAAI,EAAE;YACL,KAAK,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,KAAK;YAC3C,MAAM,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,MAAM;YAC9C,SAAS,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,eAAe;YAC7D,UAAU,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,GAAG,gBAAgB;YAChE,KAAK,EAAE,CAAC;SACR;KACD,CAAC;IACF,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;IACvG,OAAO,KAAK,CAAC;AAAA,CACb","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletion,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionCreateParamsNonStreaming,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../../env-api-keys.js\";\nimport type {\n\tAssistantImages,\n\tImageContent,\n\tImagesContext,\n\tImagesFunction,\n\tImagesModel,\n\tImagesOptions,\n\tTextContent,\n} from \"../../types.js\";\nimport { headersToRecord } from \"../../utils/headers.js\";\nimport { sanitizeSurrogates } from \"../../utils/sanitize-unicode.js\";\n\ninterface OpenRouterGeneratedImage {\n\timage_url?: string | { url?: string };\n}\n\ntype OpenRouterImageGenerationMessage = ChatCompletion[\"choices\"][number][\"message\"] & {\n\timages?: OpenRouterGeneratedImage[];\n};\n\ntype OpenRouterImageGenerationChoice = ChatCompletion[\"choices\"][number] & {\n\tmessage: OpenRouterImageGenerationMessage;\n};\n\ntype OpenRouterImageGenerationResponse = ChatCompletion & {\n\tchoices: OpenRouterImageGenerationChoice[];\n};\n\nexport const generateImagesOpenRouter: ImagesFunction<\"openrouter-images\", ImagesOptions> = async (\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tcontext: ImagesContext,\n\toptions?: ImagesOptions,\n) => {\n\tconst output: AssistantImages = {\n\t\tapi: model.api,\n\t\tprovider: model.provider,\n\t\tmodel: model.id,\n\t\toutput: [],\n\t\tstopReason: \"stop\",\n\t\ttimestamp: Date.now(),\n\t};\n\n\ttry {\n\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\t\tif (!apiKey) {\n\t\t\tthrow new Error(`No API key available for provider: ${model.provider}`);\n\t\t}\n\t\tconst client = createClient(model, apiKey, options?.headers);\n\t\tlet params = buildParams(model, context);\n\t\tconst nextParams = await options?.onPayload?.(params, model);\n\t\tif (nextParams !== undefined) {\n\t\t\tparams = nextParams as typeof params;\n\t\t}\n\t\tconst requestOptions = {\n\t\t\t...(options?.signal ? { signal: options.signal } : {}),\n\t\t\t...(options?.timeoutMs !== undefined ? { timeout: options.timeoutMs } : {}),\n\t\t\t...(options?.maxRetries !== undefined ? { maxRetries: options.maxRetries } : {}),\n\t\t};\n\t\tconst { data: response, response: rawResponse } = await client.chat.completions\n\t\t\t.create(params as unknown as ChatCompletionCreateParamsNonStreaming, requestOptions)\n\t\t\t.withResponse();\n\t\tawait options?.onResponse?.({ status: rawResponse.status, headers: headersToRecord(rawResponse.headers) }, model);\n\n\t\tconst imageResponse = response as OpenRouterImageGenerationResponse;\n\t\toutput.responseId = imageResponse.id;\n\t\tif (imageResponse.usage) {\n\t\t\toutput.usage = parseUsage(imageResponse.usage, model);\n\t\t}\n\n\t\tconst choice = imageResponse.choices[0];\n\t\tif (choice) {\n\t\t\tconst content = choice.message.content;\n\t\t\tif (typeof content === \"string\" && content.length > 0) {\n\t\t\t\toutput.output.push({ type: \"text\", text: content } satisfies TextContent);\n\t\t\t}\n\n\t\t\tfor (const image of choice.message.images ?? []) {\n\t\t\t\tconst imageUrl = typeof image.image_url === \"string\" ? image.image_url : image.image_url?.url;\n\t\t\t\tif (!imageUrl?.startsWith(\"data:\")) continue;\n\t\t\t\tconst matches = imageUrl.match(/^data:([^;]+);base64,(.+)$/);\n\t\t\t\tif (!matches) continue;\n\t\t\t\toutput.output.push({\n\t\t\t\t\ttype: \"image\",\n\t\t\t\t\tmimeType: matches[1],\n\t\t\t\t\tdata: matches[2],\n\t\t\t\t} satisfies ImageContent);\n\t\t\t}\n\t\t}\n\n\t\treturn output;\n\t} catch (error) {\n\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\treturn output;\n\t}\n};\n\nfunction createClient(\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tapiKey: string,\n\toptionsHeaders?: Record<string, string>,\n): OpenAI {\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: {\n\t\t\t...model.headers,\n\t\t\t...optionsHeaders,\n\t\t},\n\t});\n}\n\ntype OpenRouterImagesCreateParams = Omit<ChatCompletionCreateParamsNonStreaming, \"modalities\"> & {\n\tmodalities: Array<\"image\" | \"text\">;\n};\n\nfunction buildParams(model: ImagesModel<\"openrouter-images\">, context: ImagesContext): OpenRouterImagesCreateParams {\n\tconst content: ChatCompletionContentPart[] = context.input.map((item): ChatCompletionContentPart => {\n\t\tif (item.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image_url\",\n\t\t\timage_url: {\n\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t},\n\t\t} satisfies ChatCompletionContentPartImage;\n\t});\n\n\treturn {\n\t\tmodel: model.id,\n\t\tmessages: [\n\t\t\t{\n\t\t\t\trole: \"user\" as const,\n\t\t\t\tcontent,\n\t\t\t},\n\t\t],\n\t\tstream: false,\n\t\tmodalities: model.output.includes(\"text\") ? [\"image\", \"text\"] : [\"image\"],\n\t};\n}\n\nfunction parseUsage(\n\trawUsage: {\n\t\tprompt_tokens?: number;\n\t\tcompletion_tokens?: number;\n\t\tprompt_tokens_details?: { cached_tokens?: number; cache_write_tokens?: number };\n\t},\n\tmodel: ImagesModel<\"openrouter-images\">,\n) {\n\tconst promptTokens = rawUsage.prompt_tokens || 0;\n\tconst reportedCachedTokens = rawUsage.prompt_tokens_details?.cached_tokens || 0;\n\tconst cacheWriteTokens = rawUsage.prompt_tokens_details?.cache_write_tokens || 0;\n\tconst cacheReadTokens =\n\t\tcacheWriteTokens > 0 ? Math.max(0, reportedCachedTokens - cacheWriteTokens) : reportedCachedTokens;\n\tconst input = Math.max(0, promptTokens - cacheReadTokens - cacheWriteTokens);\n\tconst output = rawUsage.completion_tokens || 0;\n\tconst usage = {\n\t\tinput,\n\t\toutput,\n\t\tcacheRead: cacheReadTokens,\n\t\tcacheWrite: cacheWriteTokens,\n\t\ttotalTokens: input + output + cacheReadTokens + cacheWriteTokens,\n\t\tcost: {\n\t\t\tinput: (model.cost.input / 1000000) * input,\n\t\t\toutput: (model.cost.output / 1000000) * output,\n\t\t\tcacheRead: (model.cost.cacheRead / 1000000) * cacheReadTokens,\n\t\t\tcacheWrite: (model.cost.cacheWrite / 1000000) * cacheWriteTokens,\n\t\t\ttotal: 0,\n\t\t},\n\t};\n\tusage.cost.total = usage.cost.input + usage.cost.output + usage.cost.cacheRead + usage.cost.cacheWrite;\n\treturn usage;\n}\n"]}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { ImagesFunction, ImagesOptions } from "../../types.js";
|
|
2
|
+
export declare const generateImagesOpenRouter: ImagesFunction<"openrouter-images", ImagesOptions>;
|
|
3
|
+
export declare function registerBuiltInImagesApiProviders(): void;
|
|
4
|
+
//# sourceMappingURL=register-builtins.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"register-builtins.d.ts","sourceRoot":"","sources":["../../../src/providers/images/register-builtins.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAkC,cAAc,EAAe,aAAa,EAAE,MAAM,gBAAgB,CAAC;AA4BjH,eAAO,MAAM,wBAAwB,EAAE,cAAc,CAAC,mBAAmB,EAAE,aAAa,CAWvF,CAAC;AAEF,wBAAgB,iCAAiC,IAAI,IAAI,CAKxD","sourcesContent":["import { registerImagesApiProvider } from \"../../images-api-registry.js\";\nimport type { AssistantImages, ImagesContext, ImagesFunction, ImagesModel, ImagesOptions } from \"../../types.js\";\nimport type { generateImagesOpenRouter as generateImagesOpenRouterFunction } from \"./openrouter.js\";\n\ninterface OpenRouterImagesProviderModule {\n\tgenerateImagesOpenRouter: typeof generateImagesOpenRouterFunction;\n}\n\nlet openRouterImagesProviderModulePromise: Promise<OpenRouterImagesProviderModule> | undefined;\n\nfunction createLazyLoadErrorImages(model: ImagesModel<\"openrouter-images\">, error: unknown): AssistantImages {\n\treturn {\n\t\tapi: model.api,\n\t\tprovider: model.provider,\n\t\tmodel: model.id,\n\t\toutput: [],\n\t\tstopReason: \"error\",\n\t\terrorMessage: error instanceof Error ? error.message : String(error),\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction loadOpenRouterImagesProviderModule(): Promise<OpenRouterImagesProviderModule> {\n\topenRouterImagesProviderModulePromise ||= import(\"./openrouter.js\").then(\n\t\t(module) => module as OpenRouterImagesProviderModule,\n\t);\n\treturn openRouterImagesProviderModulePromise;\n}\n\nexport const generateImagesOpenRouter: ImagesFunction<\"openrouter-images\", ImagesOptions> = async (\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tcontext: ImagesContext,\n\toptions?: ImagesOptions,\n) => {\n\ttry {\n\t\tconst module = await loadOpenRouterImagesProviderModule();\n\t\treturn await module.generateImagesOpenRouter(model, context, options);\n\t} catch (error) {\n\t\treturn createLazyLoadErrorImages(model, error);\n\t}\n};\n\nexport function registerBuiltInImagesApiProviders(): void {\n\tregisterImagesApiProvider({\n\t\tapi: \"openrouter-images\",\n\t\tgenerateImages: generateImagesOpenRouter,\n\t});\n}\n\nregisterBuiltInImagesApiProviders();\n"]}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { registerImagesApiProvider } from "../../images-api-registry.js";
|
|
2
|
+
let openRouterImagesProviderModulePromise;
|
|
3
|
+
function createLazyLoadErrorImages(model, error) {
|
|
4
|
+
return {
|
|
5
|
+
api: model.api,
|
|
6
|
+
provider: model.provider,
|
|
7
|
+
model: model.id,
|
|
8
|
+
output: [],
|
|
9
|
+
stopReason: "error",
|
|
10
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
11
|
+
timestamp: Date.now(),
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
function loadOpenRouterImagesProviderModule() {
|
|
15
|
+
openRouterImagesProviderModulePromise ||= import("./openrouter.js").then((module) => module);
|
|
16
|
+
return openRouterImagesProviderModulePromise;
|
|
17
|
+
}
|
|
18
|
+
export const generateImagesOpenRouter = async (model, context, options) => {
|
|
19
|
+
try {
|
|
20
|
+
const module = await loadOpenRouterImagesProviderModule();
|
|
21
|
+
return await module.generateImagesOpenRouter(model, context, options);
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
return createLazyLoadErrorImages(model, error);
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
export function registerBuiltInImagesApiProviders() {
|
|
28
|
+
registerImagesApiProvider({
|
|
29
|
+
api: "openrouter-images",
|
|
30
|
+
generateImages: generateImagesOpenRouter,
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
registerBuiltInImagesApiProviders();
|
|
34
|
+
//# sourceMappingURL=register-builtins.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"register-builtins.js","sourceRoot":"","sources":["../../../src/providers/images/register-builtins.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,yBAAyB,EAAE,MAAM,8BAA8B,CAAC;AAQzE,IAAI,qCAA0F,CAAC;AAE/F,SAAS,yBAAyB,CAAC,KAAuC,EAAE,KAAc,EAAmB;IAC5G,OAAO;QACN,GAAG,EAAE,KAAK,CAAC,GAAG;QACd,QAAQ,EAAE,KAAK,CAAC,QAAQ;QACxB,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,MAAM,EAAE,EAAE;QACV,UAAU,EAAE,OAAO;QACnB,YAAY,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACpE,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;AAAA,CACF;AAED,SAAS,kCAAkC,GAA4C;IACtF,qCAAqC,KAAK,MAAM,CAAC,iBAAiB,CAAC,CAAC,IAAI,CACvE,CAAC,MAAM,EAAE,EAAE,CAAC,MAAwC,CACpD,CAAC;IACF,OAAO,qCAAqC,CAAC;AAAA,CAC7C;AAED,MAAM,CAAC,MAAM,wBAAwB,GAAuD,KAAK,EAChG,KAAuC,EACvC,OAAsB,EACtB,OAAuB,EACtB,EAAE,CAAC;IACJ,IAAI,CAAC;QACJ,MAAM,MAAM,GAAG,MAAM,kCAAkC,EAAE,CAAC;QAC1D,OAAO,MAAM,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IACvE,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,OAAO,yBAAyB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IAChD,CAAC;AAAA,CACD,CAAC;AAEF,MAAM,UAAU,iCAAiC,GAAS;IACzD,yBAAyB,CAAC;QACzB,GAAG,EAAE,mBAAmB;QACxB,cAAc,EAAE,wBAAwB;KACxC,CAAC,CAAC;AAAA,CACH;AAED,iCAAiC,EAAE,CAAC","sourcesContent":["import { registerImagesApiProvider } from \"../../images-api-registry.js\";\nimport type { AssistantImages, ImagesContext, ImagesFunction, ImagesModel, ImagesOptions } from \"../../types.js\";\nimport type { generateImagesOpenRouter as generateImagesOpenRouterFunction } from \"./openrouter.js\";\n\ninterface OpenRouterImagesProviderModule {\n\tgenerateImagesOpenRouter: typeof generateImagesOpenRouterFunction;\n}\n\nlet openRouterImagesProviderModulePromise: Promise<OpenRouterImagesProviderModule> | undefined;\n\nfunction createLazyLoadErrorImages(model: ImagesModel<\"openrouter-images\">, error: unknown): AssistantImages {\n\treturn {\n\t\tapi: model.api,\n\t\tprovider: model.provider,\n\t\tmodel: model.id,\n\t\toutput: [],\n\t\tstopReason: \"error\",\n\t\terrorMessage: error instanceof Error ? error.message : String(error),\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction loadOpenRouterImagesProviderModule(): Promise<OpenRouterImagesProviderModule> {\n\topenRouterImagesProviderModulePromise ||= import(\"./openrouter.js\").then(\n\t\t(module) => module as OpenRouterImagesProviderModule,\n\t);\n\treturn openRouterImagesProviderModulePromise;\n}\n\nexport const generateImagesOpenRouter: ImagesFunction<\"openrouter-images\", ImagesOptions> = async (\n\tmodel: ImagesModel<\"openrouter-images\">,\n\tcontext: ImagesContext,\n\toptions?: ImagesOptions,\n) => {\n\ttry {\n\t\tconst module = await loadOpenRouterImagesProviderModule();\n\t\treturn await module.generateImagesOpenRouter(model, context, options);\n\t} catch (error) {\n\t\treturn createLazyLoadErrorImages(model, error);\n\t}\n};\n\nexport function registerBuiltInImagesApiProviders(): void {\n\tregisterImagesApiProvider({\n\t\tapi: \"openrouter-images\",\n\t\tgenerateImages: generateImagesOpenRouter,\n\t});\n}\n\nregisterBuiltInImagesApiProviders();\n"]}
|
|
@@ -2,6 +2,7 @@ import type { SimpleStreamOptions, StreamFunction, StreamOptions } from "../type
|
|
|
2
2
|
/**
|
|
3
3
|
* Provider-specific options for the Mistral API.
|
|
4
4
|
*/
|
|
5
|
+
type MistralReasoningEffort = "none" | "high";
|
|
5
6
|
export interface MistralOptions extends StreamOptions {
|
|
6
7
|
toolChoice?: "auto" | "none" | "any" | "required" | {
|
|
7
8
|
type: "function";
|
|
@@ -10,6 +11,7 @@ export interface MistralOptions extends StreamOptions {
|
|
|
10
11
|
};
|
|
11
12
|
};
|
|
12
13
|
promptMode?: "reasoning";
|
|
14
|
+
reasoningEffort?: MistralReasoningEffort;
|
|
13
15
|
}
|
|
14
16
|
/**
|
|
15
17
|
* Stream responses from Mistral using `chat.stream`.
|
|
@@ -19,4 +21,5 @@ export declare const streamMistral: StreamFunction<"mistral-conversations", Mist
|
|
|
19
21
|
* Maps provider-agnostic `SimpleStreamOptions` to Mistral options.
|
|
20
22
|
*/
|
|
21
23
|
export declare const streamSimpleMistral: StreamFunction<"mistral-conversations", SimpleStreamOptions>;
|
|
24
|
+
export {};
|
|
22
25
|
//# sourceMappingURL=mistral.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mistral.d.ts","sourceRoot":"","sources":["../../src/providers/mistral.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EAKX,mBAAmB,EAEnB,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAWrB;;GAEG;AACH,MAAM,WAAW,cAAe,SAAQ,aAAa;IACpD,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,UAAU,GAAG;QAAE,IAAI,EAAE,UAAU,CAAC;QAAC,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE,CAAC;IACrG,UAAU,CAAC,EAAE,WAAW,CAAC;CACzB;AAED;;GAEG;AACH,eAAO,MAAM,aAAa,EAAE,cAAc,CAAC,uBAAuB,EAAE,cAAc,CAkDjF,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,mBAAmB,EAAE,cAAc,CAAC,uBAAuB,EAAE,mBAAmB,CAiB5F,CAAC"}
|
|
1
|
+
{"version":3,"file":"mistral.d.ts","sourceRoot":"","sources":["../../src/providers/mistral.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAKX,mBAAmB,EAEnB,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAWrB;;GAEG;AACH,KAAK,sBAAsB,GAAG,MAAM,GAAG,MAAM,CAAC;AAE9C,MAAM,WAAW,cAAe,SAAQ,aAAa;IACpD,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,UAAU,GAAG;QAAE,IAAI,EAAE,UAAU,CAAC;QAAC,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE,CAAC;IACrG,UAAU,CAAC,EAAE,WAAW,CAAC;IACzB,eAAe,CAAC,EAAE,sBAAsB,CAAC;CACzC;AAED;;GAEG;AACH,eAAO,MAAM,aAAa,EAAE,cAAc,CAAC,uBAAuB,EAAE,cAAc,CAyDjF,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,mBAAmB,EAAE,cAAc,CAAC,uBAAuB,EAAE,mBAAmB,CAqB5F,CAAC","sourcesContent":["import { Mistral } from \"@mistralai/mistralai\";\nimport type {\n\tChatCompletionStreamRequest,\n\tChatCompletionStreamRequestMessage,\n\tCompletionEvent,\n\tContentChunk,\n\tFunctionTool,\n} from \"@mistralai/mistralai/models/components\";\nimport { getEnvApiKey } from \"../env-api-keys.js\";\nimport { calculateCost, clampThinkingLevel } from \"../models.js\";\nimport type {\n\tAssistantMessage,\n\tContext,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { shortHash } from \"../utils/hash.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { buildBaseOptions } from \"./simple-options.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\nconst MISTRAL_TOOL_CALL_ID_LENGTH = 9;\nconst MAX_MISTRAL_ERROR_BODY_CHARS = 4000;\n\n/**\n * Provider-specific options for the Mistral API.\n */\ntype MistralReasoningEffort = \"none\" | \"high\";\n\nexport interface MistralOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"any\" | \"required\" | { type: \"function\"; function: { name: string } };\n\tpromptMode?: \"reasoning\";\n\treasoningEffort?: MistralReasoningEffort;\n}\n\n/**\n * Stream responses from Mistral using `chat.stream`.\n */\nexport const streamMistral: StreamFunction<\"mistral-conversations\", MistralOptions> = (\n\tmodel: Model<\"mistral-conversations\">,\n\tcontext: Context,\n\toptions?: MistralOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output = createOutput(model);\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\t\t\tif (!apiKey) {\n\t\t\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t\t\t}\n\n\t\t\t// Intentionally per-request: avoids shared SDK mutable state across concurrent consumers.\n\t\t\tconst mistral = new Mistral({\n\t\t\t\tapiKey,\n\t\t\t\tserverURL: model.baseUrl,\n\t\t\t});\n\n\t\t\tconst normalizeMistralToolCallId = createMistralToolCallIdNormalizer();\n\t\t\tconst transformedMessages = transformMessages(context.messages, model, (id) => normalizeMistralToolCallId(id));\n\n\t\t\tlet payload = buildChatPayload(model, context, transformedMessages, options);\n\t\t\tconst nextPayload = await options?.onPayload?.(payload, model);\n\t\t\tif (nextPayload !== undefined) {\n\t\t\t\tpayload = nextPayload as ChatCompletionStreamRequest;\n\t\t\t}\n\t\t\tconst mistralStream = await mistral.chat.stream(payload, buildRequestOptions(model, options));\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tawait consumeChatStream(model, output, stream, mistralStream);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) {\n\t\t\t\t// partialArgs is only a streaming scratch buffer; never persist it.\n\t\t\t\tdelete (block as { partialArgs?: string }).partialArgs;\n\t\t\t}\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = formatMistralError(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\n/**\n * Maps provider-agnostic `SimpleStreamOptions` to Mistral options.\n */\nexport const streamSimpleMistral: StreamFunction<\"mistral-conversations\", SimpleStreamOptions> = (\n\tmodel: Model<\"mistral-conversations\">,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream => {\n\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst base = buildBaseOptions(model, options, apiKey);\n\tconst clampedReasoning = options?.reasoning ? clampThinkingLevel(model, options.reasoning) : undefined;\n\tconst reasoning = clampedReasoning === \"off\" ? undefined : clampedReasoning;\n\tconst shouldUseReasoning = model.reasoning && reasoning !== undefined;\n\n\treturn streamMistral(model, context, {\n\t\t...base,\n\t\tpromptMode: shouldUseReasoning && usesPromptModeReasoning(model) ? \"reasoning\" : undefined,\n\t\treasoningEffort:\n\t\t\tshouldUseReasoning && usesReasoningEffort(model) ? mapReasoningEffort(model, reasoning) : undefined,\n\t} satisfies MistralOptions);\n};\n\nfunction createOutput(model: Model<\"mistral-conversations\">): AssistantMessage {\n\treturn {\n\t\trole: \"assistant\",\n\t\tcontent: [],\n\t\tapi: model.api,\n\t\tprovider: model.provider,\n\t\tmodel: model.id,\n\t\tusage: {\n\t\t\tinput: 0,\n\t\t\toutput: 0,\n\t\t\tcacheRead: 0,\n\t\t\tcacheWrite: 0,\n\t\t\ttotalTokens: 0,\n\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t},\n\t\tstopReason: \"stop\",\n\t\ttimestamp: Date.now(),\n\t};\n}\n\nfunction createMistralToolCallIdNormalizer(): (id: string) => string {\n\tconst idMap = new Map<string, string>();\n\tconst reverseMap = new Map<string, string>();\n\n\treturn (id: string): string => {\n\t\tconst existing = idMap.get(id);\n\t\tif (existing) return existing;\n\n\t\tlet attempt = 0;\n\t\twhile (true) {\n\t\t\tconst candidate = deriveMistralToolCallId(id, attempt);\n\t\t\tconst owner = reverseMap.get(candidate);\n\t\t\tif (!owner || owner === id) {\n\t\t\t\tidMap.set(id, candidate);\n\t\t\t\treverseMap.set(candidate, id);\n\t\t\t\treturn candidate;\n\t\t\t}\n\t\t\tattempt++;\n\t\t}\n\t};\n}\n\nfunction deriveMistralToolCallId(id: string, attempt: number): string {\n\tconst normalized = id.replace(/[^a-zA-Z0-9]/g, \"\");\n\tif (attempt === 0 && normalized.length === MISTRAL_TOOL_CALL_ID_LENGTH) return normalized;\n\tconst seedBase = normalized || id;\n\tconst seed = attempt === 0 ? seedBase : `${seedBase}:${attempt}`;\n\treturn shortHash(seed)\n\t\t.replace(/[^a-zA-Z0-9]/g, \"\")\n\t\t.slice(0, MISTRAL_TOOL_CALL_ID_LENGTH);\n}\n\nfunction formatMistralError(error: unknown): string {\n\tif (error instanceof Error) {\n\t\tconst sdkError = error as Error & { statusCode?: unknown; body?: unknown };\n\t\tconst statusCode = typeof sdkError.statusCode === \"number\" ? sdkError.statusCode : undefined;\n\t\tconst bodyText = typeof sdkError.body === \"string\" ? sdkError.body.trim() : undefined;\n\t\tif (statusCode !== undefined && bodyText) {\n\t\t\treturn `Mistral API error (${statusCode}): ${truncateErrorText(bodyText, MAX_MISTRAL_ERROR_BODY_CHARS)}`;\n\t\t}\n\t\tif (statusCode !== undefined) return `Mistral API error (${statusCode}): ${error.message}`;\n\t\treturn error.message;\n\t}\n\treturn safeJsonStringify(error);\n}\n\nfunction truncateErrorText(text: string, maxChars: number): string {\n\tif (text.length <= maxChars) return text;\n\treturn `${text.slice(0, maxChars)}... [truncated ${text.length - maxChars} chars]`;\n}\n\nfunction safeJsonStringify(value: unknown): string {\n\ttry {\n\t\tconst serialized = JSON.stringify(value);\n\t\treturn serialized === undefined ? String(value) : serialized;\n\t} catch {\n\t\treturn String(value);\n\t}\n}\n\nfunction buildRequestOptions(model: Model<\"mistral-conversations\">, options?: MistralOptions) {\n\tconst requestOptions: {\n\t\tsignal?: AbortSignal;\n\t\tretries: { strategy: \"none\" };\n\t\theaders?: Record<string, string>;\n\t} = {\n\t\tretries: { strategy: \"none\" },\n\t};\n\tif (options?.signal) requestOptions.signal = options.signal;\n\n\tconst headers: Record<string, string> = {};\n\tif (model.headers) Object.assign(headers, model.headers);\n\tif (options?.headers) Object.assign(headers, options.headers);\n\n\t// Mistral infrastructure uses `x-affinity` for KV-cache reuse (prefix caching).\n\t// Respect explicit caller-provided header values.\n\tif (options?.sessionId && !headers[\"x-affinity\"]) {\n\t\theaders[\"x-affinity\"] = options.sessionId;\n\t}\n\n\tif (Object.keys(headers).length > 0) {\n\t\trequestOptions.headers = headers;\n\t}\n\n\treturn requestOptions;\n}\n\nfunction buildChatPayload(\n\tmodel: Model<\"mistral-conversations\">,\n\tcontext: Context,\n\tmessages: Message[],\n\toptions?: MistralOptions,\n): ChatCompletionStreamRequest {\n\tconst payload: ChatCompletionStreamRequest = {\n\t\tmodel: model.id,\n\t\tstream: true,\n\t\tmessages: toChatMessages(messages, model.input.includes(\"image\")),\n\t};\n\n\tif (context.tools?.length) payload.tools = toFunctionTools(context.tools);\n\tif (options?.temperature !== undefined) payload.temperature = options.temperature;\n\tif (options?.maxTokens !== undefined) payload.maxTokens = options.maxTokens;\n\tif (options?.toolChoice) payload.toolChoice = mapToolChoice(options.toolChoice);\n\tif (options?.promptMode) payload.promptMode = options.promptMode;\n\tif (options?.reasoningEffort) payload.reasoningEffort = options.reasoningEffort;\n\n\tif (context.systemPrompt) {\n\t\tpayload.messages.unshift({\n\t\t\trole: \"system\",\n\t\t\tcontent: sanitizeSurrogates(context.systemPrompt),\n\t\t});\n\t}\n\n\treturn payload;\n}\n\nasync function consumeChatStream(\n\tmodel: Model<\"mistral-conversations\">,\n\toutput: AssistantMessage,\n\tstream: AssistantMessageEventStream,\n\tmistralStream: AsyncIterable<CompletionEvent>,\n): Promise<void> {\n\tlet currentBlock: TextContent | ThinkingContent | null = null;\n\tconst blocks = output.content;\n\tconst blockIndex = () => blocks.length - 1;\n\tconst toolBlocksByKey = new Map<string, number>();\n\n\tconst finishCurrentBlock = (block?: typeof currentBlock) => {\n\t\tif (!block) return;\n\t\tif (block.type === \"text\") {\n\t\t\tstream.push({\n\t\t\t\ttype: \"text_end\",\n\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\tcontent: block.text,\n\t\t\t\tpartial: output,\n\t\t\t});\n\t\t\treturn;\n\t\t}\n\t\tif (block.type === \"thinking\") {\n\t\t\tstream.push({\n\t\t\t\ttype: \"thinking_end\",\n\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\tcontent: block.thinking,\n\t\t\t\tpartial: output,\n\t\t\t});\n\t\t}\n\t};\n\n\tfor await (const event of mistralStream) {\n\t\tconst chunk = event.data;\n\t\t// Mistral's streamed CompletionChunk carries an id field. Keep the first non-empty one,\n\t\t// mirroring how OpenAI-style streaming exposes a stable response identifier per stream.\n\t\toutput.responseId ||= chunk.id;\n\n\t\tif (chunk.usage) {\n\t\t\toutput.usage.input = chunk.usage.promptTokens || 0;\n\t\t\toutput.usage.output = chunk.usage.completionTokens || 0;\n\t\t\toutput.usage.cacheRead = 0;\n\t\t\toutput.usage.cacheWrite = 0;\n\t\t\toutput.usage.totalTokens = chunk.usage.totalTokens || output.usage.input + output.usage.output;\n\t\t\tcalculateCost(model, output.usage);\n\t\t}\n\n\t\tconst choice = chunk.choices[0];\n\t\tif (!choice) continue;\n\n\t\tif (choice.finishReason) {\n\t\t\toutput.stopReason = mapChatStopReason(choice.finishReason);\n\t\t}\n\n\t\tconst delta = choice.delta;\n\t\tif (delta.content !== null && delta.content !== undefined) {\n\t\t\tconst contentItems = typeof delta.content === \"string\" ? [delta.content] : delta.content;\n\t\t\tfor (const item of contentItems) {\n\t\t\t\tif (typeof item === \"string\") {\n\t\t\t\t\tconst textDelta = sanitizeSurrogates(item);\n\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"text\") {\n\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t}\n\t\t\t\t\tcurrentBlock.text += textDelta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: textDelta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tif (item.type === \"thinking\") {\n\t\t\t\t\tconst deltaText = item.thinking\n\t\t\t\t\t\t.map((part) => (\"text\" in part ? part.text : \"\"))\n\t\t\t\t\t\t.filter((text) => text.length > 0)\n\t\t\t\t\t\t.join(\"\");\n\t\t\t\t\tconst thinkingDelta = sanitizeSurrogates(deltaText);\n\t\t\t\t\tif (!thinkingDelta) continue;\n\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"thinking\") {\n\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t}\n\t\t\t\t\tcurrentBlock.thinking += thinkingDelta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: thinkingDelta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\tconst textDelta = sanitizeSurrogates(item.text);\n\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"text\") {\n\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t}\n\t\t\t\t\tcurrentBlock.text += textDelta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: textDelta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tconst toolCalls = delta.toolCalls || [];\n\t\tfor (const toolCall of toolCalls) {\n\t\t\tif (currentBlock) {\n\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\tcurrentBlock = null;\n\t\t\t}\n\t\t\tconst callId =\n\t\t\t\ttoolCall.id && toolCall.id !== \"null\"\n\t\t\t\t\t? toolCall.id\n\t\t\t\t\t: deriveMistralToolCallId(`toolcall:${toolCall.index ?? 0}`, 0);\n\t\t\tconst key = `${callId}:${toolCall.index || 0}`;\n\t\t\tconst existingIndex = toolBlocksByKey.get(key);\n\t\t\tlet block: (ToolCall & { partialArgs?: string }) | undefined;\n\n\t\t\tif (existingIndex !== undefined) {\n\t\t\t\tconst existing = output.content[existingIndex];\n\t\t\t\tif (existing?.type === \"toolCall\") {\n\t\t\t\t\tblock = existing as ToolCall & { partialArgs?: string };\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (!block) {\n\t\t\t\tblock = {\n\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\tid: callId,\n\t\t\t\t\tname: toolCall.function.name,\n\t\t\t\t\targuments: {},\n\t\t\t\t\tpartialArgs: \"\",\n\t\t\t\t};\n\t\t\t\toutput.content.push(block);\n\t\t\t\ttoolBlocksByKey.set(key, output.content.length - 1);\n\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t}\n\n\t\t\tconst argsDelta =\n\t\t\t\ttypeof toolCall.function.arguments === \"string\"\n\t\t\t\t\t? toolCall.function.arguments\n\t\t\t\t\t: JSON.stringify(toolCall.function.arguments || {});\n\t\t\tblock.partialArgs = (block.partialArgs || \"\") + argsDelta;\n\t\t\tblock.arguments = parseStreamingJson<Record<string, unknown>>(block.partialArgs);\n\t\t\tstream.push({\n\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\tcontentIndex: toolBlocksByKey.get(key)!,\n\t\t\t\tdelta: argsDelta,\n\t\t\t\tpartial: output,\n\t\t\t});\n\t\t}\n\t}\n\n\tfinishCurrentBlock(currentBlock);\n\tfor (const index of toolBlocksByKey.values()) {\n\t\tconst block = output.content[index];\n\t\tif (block.type !== \"toolCall\") continue;\n\t\tconst toolBlock = block as ToolCall & { partialArgs?: string };\n\t\ttoolBlock.arguments = parseStreamingJson<Record<string, unknown>>(toolBlock.partialArgs);\n\t\t// Finalize in-place and strip the scratch buffer so replay only\n\t\t// carries parsed arguments.\n\t\tdelete toolBlock.partialArgs;\n\t\tstream.push({\n\t\t\ttype: \"toolcall_end\",\n\t\t\tcontentIndex: index,\n\t\t\ttoolCall: toolBlock,\n\t\t\tpartial: output,\n\t\t});\n\t}\n}\n\nfunction toFunctionTools(tools: Tool[]): Array<FunctionTool & { type: \"function\" }> {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: stripSymbolKeys(tool.parameters) as Record<string, unknown>,\n\t\t\tstrict: false,\n\t\t},\n\t}));\n}\n\nfunction stripSymbolKeys(value: unknown): unknown {\n\tif (Array.isArray(value)) {\n\t\treturn value.map((item) => stripSymbolKeys(item));\n\t}\n\n\tif (value && typeof value === \"object\") {\n\t\tconst result: Record<string, unknown> = {};\n\t\tfor (const [key, entry] of Object.entries(value)) {\n\t\t\tresult[key] = stripSymbolKeys(entry);\n\t\t}\n\t\treturn result;\n\t}\n\n\treturn value;\n}\n\nfunction toChatMessages(messages: Message[], supportsImages: boolean): ChatCompletionStreamRequestMessage[] {\n\tconst result: ChatCompletionStreamRequestMessage[] = [];\n\n\tfor (const msg of messages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tresult.push({ role: \"user\", content: sanitizeSurrogates(msg.content) });\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tconst hadImages = msg.content.some((item) => item.type === \"image\");\n\t\t\tconst content: ContentChunk[] = msg.content\n\t\t\t\t.filter((item) => item.type === \"text\" || supportsImages)\n\t\t\t\t.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") return { type: \"text\", text: sanitizeSurrogates(item.text) };\n\t\t\t\t\treturn { type: \"image_url\", imageUrl: `data:${item.mimeType};base64,${item.data}` };\n\t\t\t\t});\n\t\t\tif (content.length > 0) {\n\t\t\t\tresult.push({ role: \"user\", content });\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (hadImages && !supportsImages) {\n\t\t\t\tresult.push({ role: \"user\", content: \"(image omitted: model does not support images)\" });\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (msg.role === \"assistant\") {\n\t\t\tconst contentParts: ContentChunk[] = [];\n\t\t\tconst toolCalls: Array<{ id: string; type: \"function\"; function: { name: string; arguments: string } }> = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tif (block.text.trim().length > 0) {\n\t\t\t\t\t\tcontentParts.push({ type: \"text\", text: sanitizeSurrogates(block.text) });\n\t\t\t\t\t}\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinking.trim().length > 0) {\n\t\t\t\t\t\tcontentParts.push({\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: [{ type: \"text\", text: sanitizeSurrogates(block.thinking) }],\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\ttoolCalls.push({\n\t\t\t\t\tid: block.id,\n\t\t\t\t\ttype: \"function\",\n\t\t\t\t\tfunction: { name: block.name, arguments: JSON.stringify(block.arguments || {}) },\n\t\t\t\t});\n\t\t\t}\n\n\t\t\tconst assistantMessage: ChatCompletionStreamRequestMessage = { role: \"assistant\" };\n\t\t\tif (contentParts.length > 0) assistantMessage.content = contentParts;\n\t\t\tif (toolCalls.length > 0) assistantMessage.toolCalls = toolCalls;\n\t\t\tif (contentParts.length > 0 || toolCalls.length > 0) result.push(assistantMessage);\n\t\t\tcontinue;\n\t\t}\n\n\t\tconst toolContent: ContentChunk[] = [];\n\t\tconst textResult = msg.content\n\t\t\t.filter((part) => part.type === \"text\")\n\t\t\t.map((part) => (part.type === \"text\" ? sanitizeSurrogates(part.text) : \"\"))\n\t\t\t.join(\"\\n\");\n\t\tconst hasImages = msg.content.some((part) => part.type === \"image\");\n\t\tconst toolText = buildToolResultText(textResult, hasImages, supportsImages, msg.isError);\n\t\ttoolContent.push({ type: \"text\", text: toolText });\n\t\tfor (const part of msg.content) {\n\t\t\tif (!supportsImages) continue;\n\t\t\tif (part.type !== \"image\") continue;\n\t\t\ttoolContent.push({\n\t\t\t\ttype: \"image_url\",\n\t\t\t\timageUrl: `data:${part.mimeType};base64,${part.data}`,\n\t\t\t});\n\t\t}\n\t\tresult.push({\n\t\t\trole: \"tool\",\n\t\t\ttoolCallId: msg.toolCallId,\n\t\t\tname: msg.toolName,\n\t\t\tcontent: toolContent,\n\t\t});\n\t}\n\n\treturn result;\n}\n\nfunction buildToolResultText(text: string, hasImages: boolean, supportsImages: boolean, isError: boolean): string {\n\tconst trimmed = text.trim();\n\tconst errorPrefix = isError ? \"[tool error] \" : \"\";\n\n\tif (trimmed.length > 0) {\n\t\tconst imageSuffix = hasImages && !supportsImages ? \"\\n[tool image omitted: model does not support images]\" : \"\";\n\t\treturn `${errorPrefix}${trimmed}${imageSuffix}`;\n\t}\n\n\tif (hasImages) {\n\t\tif (supportsImages) {\n\t\t\treturn isError ? \"[tool error] (see attached image)\" : \"(see attached image)\";\n\t\t}\n\t\treturn isError\n\t\t\t? \"[tool error] (image omitted: model does not support images)\"\n\t\t\t: \"(image omitted: model does not support images)\";\n\t}\n\n\treturn isError ? \"[tool error] (no tool output)\" : \"(no tool output)\";\n}\n\nfunction usesReasoningEffort(model: Model<\"mistral-conversations\">): boolean {\n\treturn model.id === \"mistral-small-2603\" || model.id === \"mistral-small-latest\" || model.id === \"mistral-medium-3.5\";\n}\n\nfunction usesPromptModeReasoning(model: Model<\"mistral-conversations\">): boolean {\n\treturn model.reasoning && !usesReasoningEffort(model);\n}\n\nfunction mapReasoningEffort(\n\tmodel: Model<\"mistral-conversations\">,\n\tlevel: Exclude<SimpleStreamOptions[\"reasoning\"], undefined>,\n): MistralReasoningEffort {\n\treturn (model.thinkingLevelMap?.[level] ?? \"high\") as MistralReasoningEffort;\n}\n\nfunction mapToolChoice(\n\tchoice: MistralOptions[\"toolChoice\"],\n): \"auto\" | \"none\" | \"any\" | \"required\" | { type: \"function\"; function: { name: string } } | undefined {\n\tif (!choice) return undefined;\n\tif (choice === \"auto\" || choice === \"none\" || choice === \"any\" || choice === \"required\") {\n\t\treturn choice as any;\n\t}\n\treturn {\n\t\ttype: \"function\",\n\t\tfunction: { name: choice.function.name },\n\t};\n}\n\nfunction mapChatStopReason(reason: string | null): StopReason {\n\tif (reason === null) return \"stop\";\n\tswitch (reason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\tcase \"model_length\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_calls\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"error\":\n\t\t\treturn \"error\";\n\t\tdefault:\n\t\t\treturn \"stop\";\n\t}\n}\n"]}
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import { Mistral } from "@mistralai/mistralai";
|
|
2
2
|
import { getEnvApiKey } from "../env-api-keys.js";
|
|
3
|
-
import { calculateCost } from "../models.js";
|
|
3
|
+
import { calculateCost, clampThinkingLevel } from "../models.js";
|
|
4
4
|
import { AssistantMessageEventStream } from "../utils/event-stream.js";
|
|
5
5
|
import { shortHash } from "../utils/hash.js";
|
|
6
6
|
import { parseStreamingJson } from "../utils/json-parse.js";
|
|
7
7
|
import { sanitizeSurrogates } from "../utils/sanitize-unicode.js";
|
|
8
|
-
import { buildBaseOptions
|
|
8
|
+
import { buildBaseOptions } from "./simple-options.js";
|
|
9
9
|
import { transformMessages } from "./transform-messages.js";
|
|
10
10
|
const MISTRAL_TOOL_CALL_ID_LENGTH = 9;
|
|
11
11
|
const MAX_MISTRAL_ERROR_BODY_CHARS = 4000;
|
|
@@ -28,8 +28,11 @@ export const streamMistral = (model, context, options) => {
|
|
|
28
28
|
});
|
|
29
29
|
const normalizeMistralToolCallId = createMistralToolCallIdNormalizer();
|
|
30
30
|
const transformedMessages = transformMessages(context.messages, model, (id) => normalizeMistralToolCallId(id));
|
|
31
|
-
|
|
32
|
-
options?.onPayload?.(payload);
|
|
31
|
+
let payload = buildChatPayload(model, context, transformedMessages, options);
|
|
32
|
+
const nextPayload = await options?.onPayload?.(payload, model);
|
|
33
|
+
if (nextPayload !== undefined) {
|
|
34
|
+
payload = nextPayload;
|
|
35
|
+
}
|
|
33
36
|
const mistralStream = await mistral.chat.stream(payload, buildRequestOptions(model, options));
|
|
34
37
|
stream.push({ type: "start", partial: output });
|
|
35
38
|
await consumeChatStream(model, output, stream, mistralStream);
|
|
@@ -43,6 +46,10 @@ export const streamMistral = (model, context, options) => {
|
|
|
43
46
|
stream.end();
|
|
44
47
|
}
|
|
45
48
|
catch (error) {
|
|
49
|
+
for (const block of output.content) {
|
|
50
|
+
// partialArgs is only a streaming scratch buffer; never persist it.
|
|
51
|
+
delete block.partialArgs;
|
|
52
|
+
}
|
|
46
53
|
output.stopReason = options?.signal?.aborted ? "aborted" : "error";
|
|
47
54
|
output.errorMessage = formatMistralError(error);
|
|
48
55
|
stream.push({ type: "error", reason: output.stopReason, error: output });
|
|
@@ -60,10 +67,13 @@ export const streamSimpleMistral = (model, context, options) => {
|
|
|
60
67
|
throw new Error(`No API key for provider: ${model.provider}`);
|
|
61
68
|
}
|
|
62
69
|
const base = buildBaseOptions(model, options, apiKey);
|
|
63
|
-
const
|
|
70
|
+
const clampedReasoning = options?.reasoning ? clampThinkingLevel(model, options.reasoning) : undefined;
|
|
71
|
+
const reasoning = clampedReasoning === "off" ? undefined : clampedReasoning;
|
|
72
|
+
const shouldUseReasoning = model.reasoning && reasoning !== undefined;
|
|
64
73
|
return streamMistral(model, context, {
|
|
65
74
|
...base,
|
|
66
|
-
promptMode:
|
|
75
|
+
promptMode: shouldUseReasoning && usesPromptModeReasoning(model) ? "reasoning" : undefined,
|
|
76
|
+
reasoningEffort: shouldUseReasoning && usesReasoningEffort(model) ? mapReasoningEffort(model, reasoning) : undefined,
|
|
67
77
|
});
|
|
68
78
|
};
|
|
69
79
|
function createOutput(model) {
|
|
@@ -144,10 +154,11 @@ function safeJsonStringify(value) {
|
|
|
144
154
|
}
|
|
145
155
|
}
|
|
146
156
|
function buildRequestOptions(model, options) {
|
|
147
|
-
const requestOptions = {
|
|
157
|
+
const requestOptions = {
|
|
158
|
+
retries: { strategy: "none" },
|
|
159
|
+
};
|
|
148
160
|
if (options?.signal)
|
|
149
161
|
requestOptions.signal = options.signal;
|
|
150
|
-
requestOptions.retries = { strategy: "none" };
|
|
151
162
|
const headers = {};
|
|
152
163
|
if (model.headers)
|
|
153
164
|
Object.assign(headers, model.headers);
|
|
@@ -179,6 +190,8 @@ function buildChatPayload(model, context, messages, options) {
|
|
|
179
190
|
payload.toolChoice = mapToolChoice(options.toolChoice);
|
|
180
191
|
if (options?.promptMode)
|
|
181
192
|
payload.promptMode = options.promptMode;
|
|
193
|
+
if (options?.reasoningEffort)
|
|
194
|
+
payload.reasoningEffort = options.reasoningEffort;
|
|
182
195
|
if (context.systemPrompt) {
|
|
183
196
|
payload.messages.unshift({
|
|
184
197
|
role: "system",
|
|
@@ -215,6 +228,9 @@ async function consumeChatStream(model, output, stream, mistralStream) {
|
|
|
215
228
|
};
|
|
216
229
|
for await (const event of mistralStream) {
|
|
217
230
|
const chunk = event.data;
|
|
231
|
+
// Mistral's streamed CompletionChunk carries an id field. Keep the first non-empty one,
|
|
232
|
+
// mirroring how OpenAI-style streaming exposes a stable response identifier per stream.
|
|
233
|
+
output.responseId ||= chunk.id;
|
|
218
234
|
if (chunk.usage) {
|
|
219
235
|
output.usage.input = chunk.usage.promptTokens || 0;
|
|
220
236
|
output.usage.output = chunk.usage.completionTokens || 0;
|
|
@@ -341,6 +357,8 @@ async function consumeChatStream(model, output, stream, mistralStream) {
|
|
|
341
357
|
continue;
|
|
342
358
|
const toolBlock = block;
|
|
343
359
|
toolBlock.arguments = parseStreamingJson(toolBlock.partialArgs);
|
|
360
|
+
// Finalize in-place and strip the scratch buffer so replay only
|
|
361
|
+
// carries parsed arguments.
|
|
344
362
|
delete toolBlock.partialArgs;
|
|
345
363
|
stream.push({
|
|
346
364
|
type: "toolcall_end",
|
|
@@ -356,11 +374,24 @@ function toFunctionTools(tools) {
|
|
|
356
374
|
function: {
|
|
357
375
|
name: tool.name,
|
|
358
376
|
description: tool.description,
|
|
359
|
-
parameters: tool.parameters,
|
|
377
|
+
parameters: stripSymbolKeys(tool.parameters),
|
|
360
378
|
strict: false,
|
|
361
379
|
},
|
|
362
380
|
}));
|
|
363
381
|
}
|
|
382
|
+
function stripSymbolKeys(value) {
|
|
383
|
+
if (Array.isArray(value)) {
|
|
384
|
+
return value.map((item) => stripSymbolKeys(item));
|
|
385
|
+
}
|
|
386
|
+
if (value && typeof value === "object") {
|
|
387
|
+
const result = {};
|
|
388
|
+
for (const [key, entry] of Object.entries(value)) {
|
|
389
|
+
result[key] = stripSymbolKeys(entry);
|
|
390
|
+
}
|
|
391
|
+
return result;
|
|
392
|
+
}
|
|
393
|
+
return value;
|
|
394
|
+
}
|
|
364
395
|
function toChatMessages(messages, supportsImages) {
|
|
365
396
|
const result = [];
|
|
366
397
|
for (const msg of messages) {
|
|
@@ -464,6 +495,15 @@ function buildToolResultText(text, hasImages, supportsImages, isError) {
|
|
|
464
495
|
}
|
|
465
496
|
return isError ? "[tool error] (no tool output)" : "(no tool output)";
|
|
466
497
|
}
|
|
498
|
+
function usesReasoningEffort(model) {
|
|
499
|
+
return model.id === "mistral-small-2603" || model.id === "mistral-small-latest" || model.id === "mistral-medium-3.5";
|
|
500
|
+
}
|
|
501
|
+
function usesPromptModeReasoning(model) {
|
|
502
|
+
return model.reasoning && !usesReasoningEffort(model);
|
|
503
|
+
}
|
|
504
|
+
function mapReasoningEffort(model, level) {
|
|
505
|
+
return (model.thinkingLevelMap?.[level] ?? "high");
|
|
506
|
+
}
|
|
467
507
|
function mapToolChoice(choice) {
|
|
468
508
|
if (!choice)
|
|
469
509
|
return undefined;
|