openmagic 0.8.1 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +16 -6
- package/dist/cli.js.map +1 -1
- package/dist/toolbar/index.global.js +129 -159
- package/dist/toolbar/index.global.js.map +1 -1
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -1135,16 +1135,26 @@ async function chatOpenAICompatible(provider, model, apiKey, messages, context,
|
|
|
1135
1135
|
});
|
|
1136
1136
|
}
|
|
1137
1137
|
}
|
|
1138
|
+
const usesCompletionTokens = provider === "openai" && (model.startsWith("gpt-5") || model.startsWith("o3") || model.startsWith("o4") || model.startsWith("codex"));
|
|
1138
1139
|
const body = {
|
|
1139
1140
|
model,
|
|
1140
1141
|
messages: apiMessages,
|
|
1141
|
-
stream: true
|
|
1142
|
-
max_tokens: 4096
|
|
1142
|
+
stream: true
|
|
1143
1143
|
};
|
|
1144
|
+
if (usesCompletionTokens) {
|
|
1145
|
+
body.max_completion_tokens = 4096;
|
|
1146
|
+
} else {
|
|
1147
|
+
body.max_tokens = 4096;
|
|
1148
|
+
}
|
|
1144
1149
|
const modelInfo = providerConfig.models.find((m) => m.id === model);
|
|
1145
1150
|
if (modelInfo?.thinking?.supported && modelInfo.thinking.paramType === "level") {
|
|
1146
1151
|
body.reasoning_effort = modelInfo.thinking.defaultLevel || "medium";
|
|
1147
|
-
|
|
1152
|
+
const limit = Math.min(modelInfo.maxOutput, 16384);
|
|
1153
|
+
if (usesCompletionTokens) {
|
|
1154
|
+
body.max_completion_tokens = limit;
|
|
1155
|
+
} else {
|
|
1156
|
+
body.max_tokens = limit;
|
|
1157
|
+
}
|
|
1148
1158
|
}
|
|
1149
1159
|
try {
|
|
1150
1160
|
const headers = {
|
|
@@ -1514,7 +1524,7 @@ function createOpenMagicServer(proxyPort, roots) {
|
|
|
1514
1524
|
"Content-Type": "application/json",
|
|
1515
1525
|
"Access-Control-Allow-Origin": "*"
|
|
1516
1526
|
});
|
|
1517
|
-
res.end(JSON.stringify({ status: "ok", version: "0.
|
|
1527
|
+
res.end(JSON.stringify({ status: "ok", version: "0.9.0" }));
|
|
1518
1528
|
return;
|
|
1519
1529
|
}
|
|
1520
1530
|
res.writeHead(404);
|
|
@@ -1572,7 +1582,7 @@ async function handleMessage(ws, msg, state, roots, _proxyPort) {
|
|
|
1572
1582
|
id: msg.id,
|
|
1573
1583
|
type: "handshake.ok",
|
|
1574
1584
|
payload: {
|
|
1575
|
-
version: "0.
|
|
1585
|
+
version: "0.9.0",
|
|
1576
1586
|
roots,
|
|
1577
1587
|
config: {
|
|
1578
1588
|
provider: config.provider,
|
|
@@ -1903,7 +1913,7 @@ process.on("uncaughtException", (err) => {
|
|
|
1903
1913
|
process.exit(1);
|
|
1904
1914
|
});
|
|
1905
1915
|
var childProcesses = [];
|
|
1906
|
-
var VERSION = "0.
|
|
1916
|
+
var VERSION = "0.9.0";
|
|
1907
1917
|
function ask(question) {
|
|
1908
1918
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
1909
1919
|
return new Promise((resolve3) => {
|