@opencompress/openclaw 3.0.6 → 3.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +67 -41
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -6,7 +6,7 @@ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require
|
|
|
6
6
|
});
|
|
7
7
|
|
|
8
8
|
// src/config.ts
|
|
9
|
-
var VERSION = "3.0.
|
|
9
|
+
var VERSION = "3.0.8";
|
|
10
10
|
var PROXY_PORT = 8401;
|
|
11
11
|
var PROXY_HOST = "127.0.0.1";
|
|
12
12
|
var OCC_API = "https://www.opencompress.ai/api";
|
|
@@ -63,23 +63,33 @@ function resolveUpstream(modelId, providers) {
|
|
|
63
63
|
return null;
|
|
64
64
|
}
|
|
65
65
|
const slashIdx = stripped.indexOf("/");
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
66
|
+
let upstreamProvider;
|
|
67
|
+
let upstreamModel;
|
|
68
|
+
if (slashIdx !== -1) {
|
|
69
|
+
upstreamProvider = stripped.slice(0, slashIdx);
|
|
70
|
+
upstreamModel = stripped.slice(slashIdx + 1);
|
|
71
|
+
} else {
|
|
72
|
+
const knownProviders = ["anthropic", "openai", "google", "xai", "deepseek"];
|
|
73
|
+
const matched = knownProviders.find((p) => stripped.startsWith(p + "-"));
|
|
74
|
+
if (matched) {
|
|
75
|
+
upstreamProvider = matched;
|
|
76
|
+
upstreamModel = stripped.slice(matched.length + 1);
|
|
77
|
+
} else {
|
|
78
|
+
const config2 = providers[stripped];
|
|
79
|
+
if (config2) {
|
|
80
|
+
return {
|
|
81
|
+
upstreamProvider: stripped,
|
|
82
|
+
upstreamModel: config2.models?.[0]?.id || stripped,
|
|
83
|
+
upstreamKey: config2.apiKey,
|
|
84
|
+
upstreamBaseUrl: config2.baseUrl,
|
|
85
|
+
upstreamApi: config2.api || "openai-completions"
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
const builtin2 = resolveBuiltin(stripped);
|
|
89
|
+
if (builtin2) return builtin2;
|
|
90
|
+
return null;
|
|
76
91
|
}
|
|
77
|
-
const builtin2 = resolveBuiltin(stripped);
|
|
78
|
-
if (builtin2) return builtin2;
|
|
79
|
-
return null;
|
|
80
92
|
}
|
|
81
|
-
const upstreamProvider = stripped.slice(0, slashIdx);
|
|
82
|
-
const upstreamModel = stripped.slice(slashIdx + 1);
|
|
83
93
|
const config = providers[upstreamProvider];
|
|
84
94
|
if (config) {
|
|
85
95
|
return {
|
|
@@ -491,38 +501,54 @@ function injectConfig(api) {
|
|
|
491
501
|
if (!fs.existsSync(configPath)) return;
|
|
492
502
|
const cfg = JSON.parse(fs.readFileSync(configPath, "utf-8"));
|
|
493
503
|
let changed = false;
|
|
504
|
+
if (!cfg.agents) cfg.agents = {};
|
|
505
|
+
if (!cfg.agents.defaults) cfg.agents.defaults = {};
|
|
506
|
+
if (!cfg.agents.defaults.models) cfg.agents.defaults.models = {};
|
|
494
507
|
if (!cfg.models) cfg.models = {};
|
|
495
508
|
if (!cfg.models.providers) cfg.models.providers = {};
|
|
496
|
-
const
|
|
497
|
-
const models = generateModelCatalog(providers);
|
|
498
|
-
const firstProvider = Object.values(providers).find((p) => p.api);
|
|
499
|
-
const primaryApi = firstProvider?.api || "anthropic-messages";
|
|
509
|
+
const existingModels = Object.keys(cfg.agents.defaults.models).filter((id) => !id.startsWith("opencompress/"));
|
|
500
510
|
const occKey = getApiKey(api) || "auto-provision-pending";
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
511
|
+
const hasAnthropic = existingModels.some((m) => m.startsWith("anthropic/"));
|
|
512
|
+
const hasOpenAI = existingModels.some((m) => m.startsWith("openai/"));
|
|
513
|
+
const primaryApi = hasAnthropic ? "anthropic-messages" : "openai-completions";
|
|
514
|
+
const compressedModels = [
|
|
515
|
+
{
|
|
516
|
+
id: "opencompress/auto",
|
|
517
|
+
name: "OpenCompress Auto (compressed)",
|
|
504
518
|
api: primaryApi,
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
519
|
+
reasoning: false,
|
|
520
|
+
input: ["text", "image"],
|
|
521
|
+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
|
522
|
+
contextWindow: 2e5,
|
|
523
|
+
maxTokens: 8192
|
|
524
|
+
}
|
|
525
|
+
];
|
|
526
|
+
for (const modelId of existingModels) {
|
|
527
|
+
const compressedId = `opencompress/${modelId.replace("/", "-")}`;
|
|
528
|
+
const provider = modelId.split("/")[0];
|
|
529
|
+
const modelApi = provider === "anthropic" ? "anthropic-messages" : "openai-completions";
|
|
530
|
+
const modelName = modelId.split("/").slice(1).join("/");
|
|
531
|
+
compressedModels.push({
|
|
532
|
+
id: compressedId,
|
|
533
|
+
name: `${modelName} (compressed)`,
|
|
534
|
+
api: modelApi,
|
|
535
|
+
reasoning: modelId.includes("opus") || modelId.includes("o1") || modelId.includes("o3"),
|
|
536
|
+
input: ["text", "image"],
|
|
537
|
+
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
|
538
|
+
contextWindow: 2e5,
|
|
539
|
+
maxTokens: 8192
|
|
540
|
+
});
|
|
518
541
|
}
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
542
|
+
cfg.models.providers.opencompress = {
|
|
543
|
+
baseUrl: `http://${PROXY_HOST}:${PROXY_PORT}/v1`,
|
|
544
|
+
api: primaryApi,
|
|
545
|
+
apiKey: occKey,
|
|
546
|
+
models: compressedModels
|
|
547
|
+
};
|
|
548
|
+
changed = true;
|
|
549
|
+
for (const m of compressedModels) {
|
|
523
550
|
if (!cfg.agents.defaults.models[m.id]) {
|
|
524
551
|
cfg.agents.defaults.models[m.id] = {};
|
|
525
|
-
changed = true;
|
|
526
552
|
}
|
|
527
553
|
}
|
|
528
554
|
if (!cfg.plugins) cfg.plugins = {};
|