clawmux 0.1.7 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -175,9 +175,35 @@ async function dispatch(req) {
175
175
  }
176
176
  return jsonResponse({ error: "not found" }, 404);
177
177
  }
178
+ function setRouteHandler(path, handler) {
179
+ customHandlers.set(path, handler);
180
+ }
181
+ function clearCustomHandlers() {
182
+ customHandlers.clear();
183
+ }
178
184
 
179
185
  // src/utils/runtime.ts
186
+ var import_promises = require("node:fs/promises");
180
187
  var isBun = typeof globalThis.Bun !== "undefined";
188
+ async function readFileText(path) {
189
+ if (isBun) {
190
+ const bun = globalThis.Bun;
191
+ return bun.file(path).text();
192
+ }
193
+ return import_promises.readFile(path, "utf-8");
194
+ }
195
+ async function fileExists(path) {
196
+ if (isBun) {
197
+ const bun = globalThis.Bun;
198
+ return bun.file(path).exists();
199
+ }
200
+ try {
201
+ await import_promises.access(path);
202
+ return true;
203
+ } catch {
204
+ return false;
205
+ }
206
+ }
181
207
 
182
208
  // src/proxy/server.ts
183
209
  function createServer(config) {
@@ -240,8 +266,3200 @@ function createNodeServer(config) {
240
266
  };
241
267
  }
242
268
 
243
- // src/index.ts
244
- var port = parseInt(process.env.CLAWMUX_PORT ?? "3456", 10);
245
- var server = createServer({ port, host: "127.0.0.1" });
246
- server.start();
247
- console.log(`[clawmux] Proxy server running on http://127.0.0.1:${port}`);
269
+ // src/config/loader.ts
270
+ var import_promises2 = require("node:fs/promises");
271
+ var import_node_path = require("node:path");
272
+
273
+ // src/config/defaults.ts
274
+ var DEFAULT_CONFIG = {
275
+ compression: {
276
+ threshold: 0.75,
277
+ model: "",
278
+ targetRatio: 0.6
279
+ },
280
+ routing: {
281
+ models: {
282
+ LIGHT: "",
283
+ MEDIUM: "",
284
+ HEAVY: ""
285
+ },
286
+ contextWindows: {}
287
+ },
288
+ server: {
289
+ port: 3456,
290
+ host: "127.0.0.1"
291
+ }
292
+ };
293
+ function applyDefaults(partial) {
294
+ const defaults = DEFAULT_CONFIG;
295
+ return {
296
+ compression: {
297
+ threshold: partial.compression.threshold ?? defaults.compression.threshold,
298
+ model: partial.compression.model ?? defaults.compression.model,
299
+ targetRatio: partial.compression.targetRatio ?? defaults.compression.targetRatio
300
+ },
301
+ routing: {
302
+ models: {
303
+ LIGHT: partial.routing.models.LIGHT ?? defaults.routing.models.LIGHT,
304
+ MEDIUM: partial.routing.models.MEDIUM ?? defaults.routing.models.MEDIUM,
305
+ HEAVY: partial.routing.models.HEAVY ?? defaults.routing.models.HEAVY
306
+ },
307
+ contextWindows: { ...defaults.routing.contextWindows, ...partial.routing.contextWindows }
308
+ },
309
+ server: {
310
+ port: partial.server?.port ?? defaults.server.port,
311
+ host: partial.server?.host ?? defaults.server.host
312
+ }
313
+ };
314
+ }
315
+
316
+ // src/config/validator.ts
317
+ function isObject(value) {
318
+ return typeof value === "object" && value !== null && !Array.isArray(value);
319
+ }
320
+ function requireNumber(errors, path, value) {
321
+ if (typeof value !== "number") {
322
+ errors.push(`${path}: must be a number, got ${typeof value}`);
323
+ return false;
324
+ }
325
+ return true;
326
+ }
327
+ function requireString(errors, path, value) {
328
+ if (typeof value !== "string") {
329
+ errors.push(`${path}: must be a string, got ${typeof value}`);
330
+ return false;
331
+ }
332
+ return true;
333
+ }
334
+ function requireInRange(errors, path, value, min, max) {
335
+ if (value < min || value > max) {
336
+ errors.push(`${path}: must be between ${min} and ${max}, got ${value}`);
337
+ }
338
+ }
339
+ function checkRequiredString(errors, errorPath, obj, key) {
340
+ if (!isObject(obj)) {
341
+ errors.push(`${errorPath}: is required`);
342
+ return;
343
+ }
344
+ const value = obj[key];
345
+ if (typeof value !== "string" || value === "") {
346
+ errors.push(`${errorPath}: is required`);
347
+ return;
348
+ }
349
+ return value;
350
+ }
351
+ function checkProviderModelFormat(errors, path, model) {
352
+ if (!model.includes("/")) {
353
+ errors.push(`${path} must be in 'provider/model' format (e.g., 'anthropic/claude-sonnet-4-20250514')`);
354
+ return;
355
+ }
356
+ const providerName = model.split("/", 2)[0];
357
+ if (providerName.toLowerCase().startsWith("clawmux-")) {
358
+ errors.push(`Self-referencing model detected: ${model}. This would cause an infinite routing loop.`);
359
+ }
360
+ }
361
+ function checkOptionalNumberRange(errors, path, value, min, max) {
362
+ if (requireNumber(errors, path, value)) {
363
+ requireInRange(errors, path, value, min, max);
364
+ }
365
+ }
366
+ function validateConfig(raw) {
367
+ const errors = [];
368
+ const obj = isObject(raw) ? raw : {};
369
+ const compression = isObject(obj.compression) ? obj.compression : {};
370
+ const threshold = compression.threshold;
371
+ if (threshold === undefined) {
372
+ errors.push("compression.threshold: is required");
373
+ } else {
374
+ checkOptionalNumberRange(errors, "compression.threshold", threshold, 0.1, 0.95);
375
+ }
376
+ if (compression.model === undefined || compression.model === "") {
377
+ errors.push("compression.model: is required");
378
+ } else if (requireString(errors, "compression.model", compression.model)) {
379
+ checkProviderModelFormat(errors, "compression.model", compression.model);
380
+ }
381
+ if (compression.targetRatio !== undefined) {
382
+ checkOptionalNumberRange(errors, "compression.targetRatio", compression.targetRatio, 0.2, 0.9);
383
+ }
384
+ const routing = isObject(obj.routing) ? obj.routing : {};
385
+ const models = isObject(routing.models) ? routing.models : {};
386
+ const light = checkRequiredString(errors, "routing.models.LIGHT", models, "LIGHT");
387
+ const medium = checkRequiredString(errors, "routing.models.MEDIUM", models, "MEDIUM");
388
+ const heavy = checkRequiredString(errors, "routing.models.HEAVY", models, "HEAVY");
389
+ if (light)
390
+ checkProviderModelFormat(errors, "routing.models.LIGHT", light);
391
+ if (medium)
392
+ checkProviderModelFormat(errors, "routing.models.MEDIUM", medium);
393
+ if (heavy)
394
+ checkProviderModelFormat(errors, "routing.models.HEAVY", heavy);
395
+ if (routing.contextWindows !== undefined) {
396
+ if (!isObject(routing.contextWindows)) {
397
+ errors.push("routing.contextWindows: must be an object");
398
+ } else {
399
+ for (const [key, value] of Object.entries(routing.contextWindows)) {
400
+ if (typeof key !== "string") {
401
+ errors.push(`routing.contextWindows: keys must be strings`);
402
+ }
403
+ if (typeof value !== "number" || value <= 0) {
404
+ errors.push(`routing.contextWindows["${key}"]: must be a positive number, got ${String(value)}`);
405
+ }
406
+ }
407
+ }
408
+ }
409
+ const server = obj.server !== undefined && isObject(obj.server) ? obj.server : null;
410
+ if (server !== null && server.port !== undefined) {
411
+ checkOptionalNumberRange(errors, "server.port", server.port, 1024, 65535);
412
+ }
413
+ if (errors.length > 0) {
414
+ return { valid: false, errors };
415
+ }
416
+ return { valid: true, config: applyDefaults(obj) };
417
+ }
418
+
419
+ // src/config/loader.ts
420
+ function findConfigPath() {
421
+ const envPath = process.env.CLAWMUX_CONFIG;
422
+ if (envPath) {
423
+ return import_node_path.resolve(envPath);
424
+ }
425
+ return import_node_path.resolve(process.cwd(), "clawmux.json");
426
+ }
427
+ async function loadConfig(configPath) {
428
+ const filePath = configPath ?? findConfigPath();
429
+ let raw;
430
+ try {
431
+ raw = await import_promises2.readFile(filePath, "utf-8");
432
+ } catch (err) {
433
+ const message = err instanceof Error ? err.message : String(err);
434
+ return {
435
+ valid: false,
436
+ errors: [`Failed to read config file at ${filePath}: ${message}`]
437
+ };
438
+ }
439
+ let parsed;
440
+ try {
441
+ parsed = JSON.parse(raw);
442
+ } catch (err) {
443
+ const message = err instanceof Error ? err.message : String(err);
444
+ return {
445
+ valid: false,
446
+ errors: [`Invalid JSON in config file ${filePath}: ${message}`]
447
+ };
448
+ }
449
+ return validateConfig(parsed);
450
+ }
451
+
452
+ // src/config/watcher.ts
453
+ var import_node_fs = require("node:fs");
454
+ var import_promises3 = require("node:fs/promises");
455
+ function createConfigWatcher(configPath, onReload, options) {
456
+ const debounceMs = options?.debounceMs ?? 2000;
457
+ let watcher = null;
458
+ let debounceTimer = null;
459
+ let reloading = false;
460
+ let pendingReload = false;
461
+ async function reloadConfig() {
462
+ if (reloading) {
463
+ pendingReload = true;
464
+ return;
465
+ }
466
+ reloading = true;
467
+ try {
468
+ let raw;
469
+ try {
470
+ raw = await import_promises3.readFile(configPath, "utf-8");
471
+ } catch {
472
+ console.warn(`[config] Config file ${configPath} not found, keeping old config`);
473
+ return;
474
+ }
475
+ let parsed;
476
+ try {
477
+ parsed = JSON.parse(raw);
478
+ } catch (err) {
479
+ const message = err instanceof Error ? err.message : String(err);
480
+ console.warn(`[config] Invalid JSON in config change, ignored: ${message}`);
481
+ return;
482
+ }
483
+ const result = validateConfig(parsed);
484
+ if (result.valid) {
485
+ onReload(result.config);
486
+ console.log("[config] Reloaded clawmux.json");
487
+ } else {
488
+ console.warn(`[config] Invalid config change ignored: ${result.errors.join(", ")}`);
489
+ }
490
+ } finally {
491
+ reloading = false;
492
+ if (pendingReload) {
493
+ pendingReload = false;
494
+ scheduleReload();
495
+ }
496
+ }
497
+ }
498
+ function scheduleReload() {
499
+ if (debounceTimer !== null) {
500
+ clearTimeout(debounceTimer);
501
+ }
502
+ debounceTimer = setTimeout(() => {
503
+ debounceTimer = null;
504
+ reloadConfig();
505
+ }, debounceMs);
506
+ }
507
+ return {
508
+ start() {
509
+ if (watcher !== null)
510
+ return;
511
+ watcher = import_node_fs.watch(configPath, (eventType) => {
512
+ if (eventType === "rename") {
513
+ console.warn(`[config] Config file ${configPath} was deleted, keeping old config`);
514
+ return;
515
+ }
516
+ scheduleReload();
517
+ });
518
+ watcher.on("error", (err) => {
519
+ console.warn(`[config] Watcher error: ${err instanceof Error ? err.message : String(err)}`);
520
+ });
521
+ },
522
+ stop() {
523
+ if (debounceTimer !== null) {
524
+ clearTimeout(debounceTimer);
525
+ debounceTimer = null;
526
+ }
527
+ if (watcher !== null) {
528
+ watcher.close();
529
+ watcher = null;
530
+ }
531
+ },
532
+ isWatching() {
533
+ return watcher !== null;
534
+ }
535
+ };
536
+ }
537
+
538
+ // src/openclaw/config-reader.ts
539
+ var import_promises4 = require("node:fs/promises");
540
+ var import_node_path2 = require("node:path");
541
+ var ENV_VAR_PATTERN = /^\$\{([^}]+)\}$/;
542
+ function resolveEnvVar(value) {
543
+ const match = value.match(ENV_VAR_PATTERN);
544
+ if (match) {
545
+ return process.env[match[1]] ?? "";
546
+ }
547
+ return value;
548
+ }
549
+ function getHomeDir() {
550
+ return process.env.HOME ?? "/root";
551
+ }
552
+ function getConfigPath(override) {
553
+ if (override)
554
+ return override;
555
+ if (process.env.OPENCLAW_CONFIG_PATH)
556
+ return process.env.OPENCLAW_CONFIG_PATH;
557
+ return import_node_path2.join(getHomeDir(), ".openclaw", "openclaw.json");
558
+ }
559
+ async function readOpenClawConfig(configPath) {
560
+ const path = getConfigPath(configPath);
561
+ let text;
562
+ try {
563
+ text = await import_promises4.readFile(path, "utf-8");
564
+ } catch (err) {
565
+ const code = err.code;
566
+ if (code === "ENOENT" || code === "ENOTDIR") {
567
+ throw new Error(`openclaw.json not found at ${path}. Ensure OpenClaw is installed.`);
568
+ }
569
+ throw err;
570
+ }
571
+ try {
572
+ return JSON.parse(text);
573
+ } catch (err) {
574
+ const message = err instanceof Error ? err.message : String(err);
575
+ throw new Error(`Failed to parse openclaw.json: ${message}`);
576
+ }
577
+ }
578
+ function getAuthProfilesPath(agentId) {
579
+ const id = agentId ?? "main";
580
+ return import_node_path2.join(getHomeDir(), ".openclaw", "agents", id, "agent", "auth-profiles.json");
581
+ }
582
+ async function readAuthProfiles(agentId, profilesPath) {
583
+ const path = profilesPath ?? getAuthProfilesPath(agentId);
584
+ let text;
585
+ try {
586
+ text = await import_promises4.readFile(path, "utf-8");
587
+ } catch {
588
+ return [];
589
+ }
590
+ try {
591
+ const parsed = JSON.parse(text);
592
+ if (Array.isArray(parsed))
593
+ return parsed;
594
+ if (parsed && typeof parsed === "object" && parsed.profiles) {
595
+ return Object.entries(parsed.profiles).map(([key, profile]) => ({
596
+ provider: profile.provider ?? key.split(":")[0],
597
+ apiKey: profile.access ?? profile.apiKey,
598
+ token: profile.token
599
+ }));
600
+ }
601
+ return [];
602
+ } catch (err) {
603
+ const message = err instanceof Error ? err.message : String(err);
604
+ throw new Error(`Failed to parse auth-profiles.json: ${message}`);
605
+ }
606
+ }
607
+ function getProviderConfig(provider, config) {
608
+ return config.models?.providers?.[provider];
609
+ }
610
+ function lookupContextWindowFromConfig(modelKey, config) {
611
+ const [provider, ...rest] = modelKey.split("/");
612
+ const modelId = rest.join("/");
613
+ if (!provider || !modelId)
614
+ return;
615
+ const providerConfig = config.models?.providers?.[provider];
616
+ if (!providerConfig?.models)
617
+ return;
618
+ const model = providerConfig.models.find((m) => m.id === modelId);
619
+ return model?.contextWindow;
620
+ }
621
+
622
+ // src/openclaw/model-limits.ts
623
+ var import_promises5 = require("node:fs/promises");
624
+ var import_node_path3 = require("node:path");
625
+ var DEFAULT_CONTEXT_TOKENS = 200000;
626
+ var cachedCatalog = null;
627
+ function resolveContextWindow(modelKey, clawmuxContextWindows, openclawConfig, piAiCatalog) {
628
+ const fromClawmux = clawmuxContextWindows[modelKey];
629
+ if (typeof fromClawmux === "number" && fromClawmux > 0) {
630
+ return fromClawmux;
631
+ }
632
+ const fromOpenclaw = lookupContextWindowFromConfig(modelKey, openclawConfig);
633
+ if (typeof fromOpenclaw === "number" && fromOpenclaw > 0) {
634
+ return fromOpenclaw;
635
+ }
636
+ if (piAiCatalog) {
637
+ const [provider, ...rest] = modelKey.split("/");
638
+ const modelId = rest.join("/");
639
+ if (provider && modelId) {
640
+ const providerModels = piAiCatalog[provider];
641
+ if (providerModels) {
642
+ const entry = providerModels[modelId];
643
+ if (entry && typeof entry.contextWindow === "number" && entry.contextWindow > 0) {
644
+ return entry.contextWindow;
645
+ }
646
+ }
647
+ }
648
+ }
649
+ return DEFAULT_CONTEXT_TOKENS;
650
+ }
651
+ function resolveCompressionContextWindow(routingModels, clawmuxContextWindows, openclawConfig, piAiCatalog) {
652
+ const modelKeys = [routingModels.LIGHT, routingModels.MEDIUM, routingModels.HEAVY];
653
+ const uniqueKeys = [...new Set(modelKeys.filter((k) => k !== ""))];
654
+ if (uniqueKeys.length === 0) {
655
+ return DEFAULT_CONTEXT_TOKENS;
656
+ }
657
+ let min = Infinity;
658
+ for (const key of uniqueKeys) {
659
+ const window = resolveContextWindow(key, clawmuxContextWindows, openclawConfig, piAiCatalog);
660
+ if (window < min) {
661
+ min = window;
662
+ }
663
+ }
664
+ return min === Infinity ? DEFAULT_CONTEXT_TOKENS : min;
665
+ }
666
+ async function findOpenClawNodeModulesPath() {
667
+ try {
668
+ const { execSync } = await import("node:child_process");
669
+ const whichResult = execSync("which openclaw", { encoding: "utf-8" }).trim();
670
+ if (!whichResult)
671
+ return;
672
+ const resolved = await import_promises5.realpath(whichResult);
673
+ let dir = import_node_path3.dirname(resolved);
674
+ for (let i = 0;i < 10; i++) {
675
+ const candidate = import_node_path3.join(dir, "node_modules", "@mariozechner", "pi-ai", "dist", "models.generated.js");
676
+ try {
677
+ if (await fileExists(candidate)) {
678
+ return candidate;
679
+ }
680
+ } catch {}
681
+ const parent = import_node_path3.dirname(dir);
682
+ if (parent === dir)
683
+ break;
684
+ dir = parent;
685
+ }
686
+ } catch {}
687
+ const homeDir = process.env.HOME ?? "/root";
688
+ const fallbackPaths = [
689
+ import_node_path3.join(homeDir, ".npm-global", "lib", "node_modules", "openclaw", "node_modules", "@mariozechner", "pi-ai", "dist", "models.generated.js"),
690
+ import_node_path3.join(homeDir, ".local", "lib", "node_modules", "openclaw", "node_modules", "@mariozechner", "pi-ai", "dist", "models.generated.js")
691
+ ];
692
+ for (const path of fallbackPaths) {
693
+ try {
694
+ if (await fileExists(path)) {
695
+ return path;
696
+ }
697
+ } catch {}
698
+ }
699
+ return;
700
+ }
701
+ function parseCatalogFromSource(source) {
702
+ const modelsMatch = source.match(/export\s+const\s+MODELS\s*=\s*(\{[\s\S]*\});?\s*$/m);
703
+ if (!modelsMatch)
704
+ return;
705
+ try {
706
+ const fn = new Function(`return (${modelsMatch[1]});`);
707
+ const result = fn();
708
+ if (typeof result === "object" && result !== null && !Array.isArray(result)) {
709
+ return result;
710
+ }
711
+ } catch (err) {
712
+ console.warn("[clawmux] Failed to parse pi-ai model catalog:", err instanceof Error ? err.message : String(err));
713
+ }
714
+ return;
715
+ }
716
+ async function loadPiAiCatalog() {
717
+ if (cachedCatalog !== null) {
718
+ return cachedCatalog;
719
+ }
720
+ const filePath = await findOpenClawNodeModulesPath();
721
+ if (!filePath) {
722
+ console.warn("[clawmux] pi-ai model catalog not found — using default context windows");
723
+ cachedCatalog = undefined;
724
+ return;
725
+ }
726
+ try {
727
+ const source = await readFileText(filePath);
728
+ const catalog = parseCatalogFromSource(source);
729
+ if (catalog) {
730
+ const providerCount = Object.keys(catalog).length;
731
+ const modelCount = Object.values(catalog).reduce((sum, models) => sum + Object.keys(models).length, 0);
732
+ console.log(`[clawmux] Loaded pi-ai model catalog: ${providerCount} providers, ${modelCount} models`);
733
+ cachedCatalog = catalog;
734
+ return catalog;
735
+ }
736
+ console.warn("[clawmux] pi-ai model catalog found but could not be parsed");
737
+ cachedCatalog = undefined;
738
+ return;
739
+ } catch (err) {
740
+ console.warn("[clawmux] Failed to load pi-ai model catalog:", err instanceof Error ? err.message : String(err));
741
+ cachedCatalog = undefined;
742
+ return;
743
+ }
744
+ }
745
+
746
+ // src/adapters/registry.ts
747
+ var adapters = new Map;
748
+ function registerAdapter(adapter) {
749
+ adapters.set(adapter.apiType, adapter);
750
+ }
751
+ function getAdapter(apiType) {
752
+ return adapters.get(apiType);
753
+ }
754
+
755
+ // src/adapters/anthropic.ts
756
+ class AnthropicAdapter {
757
+ apiType = "anthropic-messages";
758
+ parseRequest(body) {
759
+ const raw = body;
760
+ const model = String(raw.model ?? "");
761
+ const messages = raw.messages ?? [];
762
+ const stream = raw.stream !== false;
763
+ const maxTokens = typeof raw.max_tokens === "number" ? raw.max_tokens : undefined;
764
+ const system = raw.system;
765
+ return {
766
+ model,
767
+ messages,
768
+ system,
769
+ stream,
770
+ maxTokens,
771
+ rawBody: raw
772
+ };
773
+ }
774
+ buildUpstreamRequest(parsed, targetModel, baseUrl, auth) {
775
+ const url = `${baseUrl}/v1/messages`;
776
+ const headers = {
777
+ "x-api-key": auth.apiKey,
778
+ "anthropic-version": "2023-06-01",
779
+ "content-type": "application/json"
780
+ };
781
+ let bodyObj = {
782
+ ...parsed.rawBody,
783
+ model: targetModel
784
+ };
785
+ const isHaiku = targetModel.toLowerCase().includes("haiku");
786
+ const hasThinking = "thinking" in parsed.rawBody;
787
+ if (hasThinking && !isHaiku) {
788
+ headers["anthropic-beta"] = "interleaved-thinking-2025-05-14";
789
+ }
790
+ if (isHaiku && "thinking" in bodyObj) {
791
+ const { thinking: _, ...rest } = bodyObj;
792
+ bodyObj = rest;
793
+ }
794
+ return {
795
+ url,
796
+ method: "POST",
797
+ headers,
798
+ body: JSON.stringify(bodyObj)
799
+ };
800
+ }
801
+ modifyMessages(rawBody, compressedMessages) {
802
+ return {
803
+ ...rawBody,
804
+ messages: compressedMessages
805
+ };
806
+ }
807
+ parseResponse(body) {
808
+ const raw = body;
809
+ const id = String(raw.id ?? "");
810
+ const model = String(raw.model ?? "");
811
+ let content = "";
812
+ const contentBlocks = raw.content;
813
+ if (Array.isArray(contentBlocks)) {
814
+ const textParts = [];
815
+ for (const block of contentBlocks) {
816
+ if (typeof block === "object" && block !== null && block.type === "text" && typeof block.text === "string") {
817
+ textParts.push(block.text);
818
+ }
819
+ }
820
+ content = textParts.join("");
821
+ }
822
+ const stopReason = typeof raw.stop_reason === "string" ? raw.stop_reason : null;
823
+ let usage;
824
+ const rawUsage = raw.usage;
825
+ if (rawUsage) {
826
+ usage = {
827
+ inputTokens: typeof rawUsage.input_tokens === "number" ? rawUsage.input_tokens : 0,
828
+ outputTokens: typeof rawUsage.output_tokens === "number" ? rawUsage.output_tokens : 0
829
+ };
830
+ }
831
+ return { id, model, content, role: "assistant", stopReason, usage };
832
+ }
833
+ buildResponse(parsed) {
834
+ const result = {
835
+ id: parsed.id,
836
+ type: "message",
837
+ role: "assistant",
838
+ model: parsed.model,
839
+ content: [{ type: "text", text: parsed.content }],
840
+ stop_reason: parsed.stopReason
841
+ };
842
+ if (parsed.usage) {
843
+ result.usage = {
844
+ input_tokens: parsed.usage.inputTokens,
845
+ output_tokens: parsed.usage.outputTokens
846
+ };
847
+ }
848
+ return result;
849
+ }
850
+ parseStreamChunk(chunk) {
851
+ const events = [];
852
+ let eventType = "";
853
+ let dataStr = "";
854
+ for (const line of chunk.split(`
855
+ `)) {
856
+ if (line.startsWith("event: ")) {
857
+ eventType = line.slice(7).trim();
858
+ } else if (line.startsWith("data: ")) {
859
+ dataStr = line.slice(6);
860
+ }
861
+ }
862
+ if (!eventType && !dataStr)
863
+ return events;
864
+ if (eventType === "ping" || eventType === "content_block_start") {
865
+ return events;
866
+ }
867
+ let data = {};
868
+ if (dataStr) {
869
+ try {
870
+ data = JSON.parse(dataStr);
871
+ } catch {
872
+ return events;
873
+ }
874
+ }
875
+ switch (eventType) {
876
+ case "message_start": {
877
+ const message = data.message;
878
+ events.push({
879
+ type: "message_start",
880
+ id: String(message?.id ?? data.id ?? ""),
881
+ model: String(message?.model ?? data.model ?? "")
882
+ });
883
+ break;
884
+ }
885
+ case "content_block_delta": {
886
+ const delta = data.delta;
887
+ if (delta?.type === "text_delta" && typeof delta.text === "string") {
888
+ events.push({
889
+ type: "content_delta",
890
+ text: delta.text,
891
+ index: typeof data.index === "number" ? data.index : 0
892
+ });
893
+ }
894
+ break;
895
+ }
896
+ case "content_block_stop": {
897
+ events.push({
898
+ type: "content_stop",
899
+ index: typeof data.index === "number" ? data.index : 0
900
+ });
901
+ break;
902
+ }
903
+ case "message_delta": {
904
+ const rawUsage = data.usage;
905
+ let usage;
906
+ if (rawUsage) {
907
+ usage = {
908
+ inputTokens: typeof rawUsage.input_tokens === "number" ? rawUsage.input_tokens : 0,
909
+ outputTokens: typeof rawUsage.output_tokens === "number" ? rawUsage.output_tokens : 0
910
+ };
911
+ }
912
+ events.push({ type: "message_stop", usage });
913
+ break;
914
+ }
915
+ case "message_stop": {
916
+ events.push({ type: "message_stop" });
917
+ break;
918
+ }
919
+ }
920
+ return events;
921
+ }
922
+ buildStreamChunk(event) {
923
+ switch (event.type) {
924
+ case "message_start":
925
+ return `event: message_start
926
+ data: ${JSON.stringify({
927
+ type: "message_start",
928
+ message: {
929
+ id: event.id,
930
+ type: "message",
931
+ role: "assistant",
932
+ model: event.model
933
+ }
934
+ })}
935
+
936
+ `;
937
+ case "content_delta":
938
+ return `event: content_block_delta
939
+ data: ${JSON.stringify({
940
+ type: "content_block_delta",
941
+ index: event.index,
942
+ delta: { type: "text_delta", text: event.text }
943
+ })}
944
+
945
+ `;
946
+ case "content_stop":
947
+ return `event: content_block_stop
948
+ data: ${JSON.stringify({
949
+ type: "content_block_stop",
950
+ index: event.index
951
+ })}
952
+
953
+ `;
954
+ case "message_stop":
955
+ if (event.usage) {
956
+ return `event: message_delta
957
+ data: ${JSON.stringify({
958
+ type: "message_delta",
959
+ usage: {
960
+ input_tokens: event.usage.inputTokens,
961
+ output_tokens: event.usage.outputTokens
962
+ }
963
+ })}
964
+
965
+ ` + `event: message_stop
966
+ data: ${JSON.stringify({
967
+ type: "message_stop"
968
+ })}
969
+
970
+ `;
971
+ }
972
+ return `event: message_stop
973
+ data: ${JSON.stringify({
974
+ type: "message_stop"
975
+ })}
976
+
977
+ `;
978
+ case "error":
979
+ return `event: error
980
+ data: ${JSON.stringify({
981
+ type: "error",
982
+ error: { message: event.message }
983
+ })}
984
+
985
+ `;
986
+ default:
987
+ return "";
988
+ }
989
+ }
990
+ }
991
+
992
+ // src/adapters/openai-shared.ts
993
+ function isRecord(value) {
994
+ return typeof value === "object" && value !== null && !Array.isArray(value);
995
+ }
996
+ function isMessageArray(value) {
997
+ if (!Array.isArray(value))
998
+ return false;
999
+ return value.every((item) => isRecord(item) && typeof item.role === "string" && ("content" in item));
1000
+ }
1001
+ function extractSystemMessage(messages) {
1002
+ const systemMessages = [];
1003
+ const filtered = [];
1004
+ for (const msg of messages) {
1005
+ if (msg.role === "system" || msg.role === "developer") {
1006
+ if (typeof msg.content === "string") {
1007
+ systemMessages.push(msg.content);
1008
+ } else if (Array.isArray(msg.content)) {
1009
+ for (const part of msg.content) {
1010
+ if (isRecord(part) && part.type === "text" && typeof part.text === "string") {
1011
+ systemMessages.push(part.text);
1012
+ }
1013
+ }
1014
+ }
1015
+ } else {
1016
+ filtered.push(msg);
1017
+ }
1018
+ }
1019
+ return {
1020
+ system: systemMessages.length > 0 ? systemMessages.join(`
1021
+ `) : undefined,
1022
+ filtered
1023
+ };
1024
+ }
1025
+ function parseOpenAIBody(body) {
1026
+ if (!isRecord(body)) {
1027
+ throw new Error("Request body must be a JSON object");
1028
+ }
1029
+ const model = typeof body.model === "string" ? body.model : "";
1030
+ if (!model) {
1031
+ throw new Error("Missing required field: model");
1032
+ }
1033
+ const rawMessages = body.messages ?? body.input;
1034
+ if (!isMessageArray(rawMessages)) {
1035
+ throw new Error("Request must contain a valid 'messages' or 'input' array");
1036
+ }
1037
+ const { system, filtered } = extractSystemMessage(rawMessages);
1038
+ const stream = body.stream === true;
1039
+ const rawMax = body.max_tokens ?? body.max_output_tokens;
1040
+ const maxTokens = typeof rawMax === "number" ? rawMax : undefined;
1041
+ const rawBody = { ...body };
1042
+ return {
1043
+ model,
1044
+ messages: filtered,
1045
+ system,
1046
+ stream,
1047
+ maxTokens,
1048
+ rawBody
1049
+ };
1050
+ }
1051
+
1052
+ // src/adapters/openai-completions.ts
1053
+ class OpenAICompletionsAdapter {
1054
+ apiType = "openai-completions";
1055
+ parseRequest(body) {
1056
+ return parseOpenAIBody(body);
1057
+ }
1058
+ buildUpstreamRequest(parsed, targetModel, baseUrl, auth) {
1059
+ const { rawBody } = parsed;
1060
+ const upstreamBody = {
1061
+ ...rawBody,
1062
+ model: targetModel
1063
+ };
1064
+ return {
1065
+ url: /\/v\d+\/?$/.test(baseUrl) ? `${baseUrl.replace(/\/$/, "")}/chat/completions` : `${baseUrl}/v1/chat/completions`,
1066
+ method: "POST",
1067
+ headers: {
1068
+ "Content-Type": "application/json",
1069
+ Authorization: `Bearer ${auth.apiKey}`
1070
+ },
1071
+ body: JSON.stringify(upstreamBody)
1072
+ };
1073
+ }
1074
+ modifyMessages(rawBody, compressedMessages) {
1075
+ return {
1076
+ ...rawBody,
1077
+ messages: compressedMessages
1078
+ };
1079
+ }
1080
+ parseResponse(body) {
1081
+ const raw = body;
1082
+ const id = String(raw.id ?? "");
1083
+ const model = String(raw.model ?? "");
1084
+ let content = "";
1085
+ let stopReason = null;
1086
+ const choices = raw.choices;
1087
+ if (Array.isArray(choices) && choices.length > 0) {
1088
+ const choice = choices[0];
1089
+ const message = choice.message;
1090
+ if (message && typeof message.content === "string") {
1091
+ content = message.content;
1092
+ }
1093
+ if (typeof choice.finish_reason === "string") {
1094
+ stopReason = choice.finish_reason;
1095
+ }
1096
+ }
1097
+ let usage;
1098
+ const rawUsage = raw.usage;
1099
+ if (rawUsage) {
1100
+ usage = {
1101
+ inputTokens: typeof rawUsage.prompt_tokens === "number" ? rawUsage.prompt_tokens : 0,
1102
+ outputTokens: typeof rawUsage.completion_tokens === "number" ? rawUsage.completion_tokens : 0
1103
+ };
1104
+ }
1105
+ return { id, model, content, role: "assistant", stopReason, usage };
1106
+ }
1107
+ buildResponse(parsed) {
1108
+ const result = {
1109
+ id: parsed.id,
1110
+ object: "chat.completion",
1111
+ model: parsed.model,
1112
+ choices: [
1113
+ {
1114
+ index: 0,
1115
+ message: { role: "assistant", content: parsed.content },
1116
+ finish_reason: parsed.stopReason
1117
+ }
1118
+ ]
1119
+ };
1120
+ if (parsed.usage) {
1121
+ result.usage = {
1122
+ prompt_tokens: parsed.usage.inputTokens,
1123
+ completion_tokens: parsed.usage.outputTokens,
1124
+ total_tokens: parsed.usage.inputTokens + parsed.usage.outputTokens
1125
+ };
1126
+ }
1127
+ return result;
1128
+ }
1129
+ parseStreamChunk(chunk) {
1130
+ const events = [];
1131
+ for (const line of chunk.split(`
1132
+ `)) {
1133
+ const trimmed = line.trim();
1134
+ if (!trimmed.startsWith("data: "))
1135
+ continue;
1136
+ const payload = trimmed.slice(6);
1137
+ if (payload === "[DONE]") {
1138
+ events.push({ type: "message_stop" });
1139
+ continue;
1140
+ }
1141
+ let data;
1142
+ try {
1143
+ data = JSON.parse(payload);
1144
+ } catch {
1145
+ continue;
1146
+ }
1147
+ const choices = data.choices;
1148
+ if (!Array.isArray(choices) || choices.length === 0) {
1149
+ if (data.id && data.model) {
1150
+ events.push({
1151
+ type: "message_start",
1152
+ id: String(data.id),
1153
+ model: String(data.model)
1154
+ });
1155
+ }
1156
+ continue;
1157
+ }
1158
+ const choice = choices[0];
1159
+ const delta = choice.delta;
1160
+ const finishReason = choice.finish_reason;
1161
+ if (delta?.role === "assistant" && !delta.content) {
1162
+ events.push({
1163
+ type: "message_start",
1164
+ id: String(data.id ?? ""),
1165
+ model: String(data.model ?? "")
1166
+ });
1167
+ } else if (typeof delta?.content === "string") {
1168
+ events.push({
1169
+ type: "content_delta",
1170
+ text: delta.content,
1171
+ index: typeof choice.index === "number" ? choice.index : 0
1172
+ });
1173
+ }
1174
+ if (typeof finishReason === "string" && finishReason !== "") {
1175
+ events.push({
1176
+ type: "content_stop",
1177
+ index: typeof choice.index === "number" ? choice.index : 0
1178
+ });
1179
+ let usage;
1180
+ const rawUsage = data.usage;
1181
+ if (rawUsage) {
1182
+ usage = {
1183
+ inputTokens: typeof rawUsage.prompt_tokens === "number" ? rawUsage.prompt_tokens : 0,
1184
+ outputTokens: typeof rawUsage.completion_tokens === "number" ? rawUsage.completion_tokens : 0
1185
+ };
1186
+ }
1187
+ events.push({ type: "message_stop", usage });
1188
+ }
1189
+ }
1190
+ return events;
1191
+ }
1192
+ buildStreamChunk(event) {
1193
+ switch (event.type) {
1194
+ case "message_start":
1195
+ return `data: ${JSON.stringify({
1196
+ id: event.id,
1197
+ object: "chat.completion.chunk",
1198
+ model: event.model,
1199
+ choices: [
1200
+ {
1201
+ index: 0,
1202
+ delta: { role: "assistant", content: "" },
1203
+ finish_reason: null
1204
+ }
1205
+ ]
1206
+ })}
1207
+
1208
+ `;
1209
+ case "content_delta":
1210
+ return `data: ${JSON.stringify({
1211
+ id: "",
1212
+ object: "chat.completion.chunk",
1213
+ choices: [
1214
+ {
1215
+ index: event.index,
1216
+ delta: { content: event.text },
1217
+ finish_reason: null
1218
+ }
1219
+ ]
1220
+ })}
1221
+
1222
+ `;
1223
+ case "content_stop":
1224
+ return `data: ${JSON.stringify({
1225
+ id: "",
1226
+ object: "chat.completion.chunk",
1227
+ choices: [
1228
+ {
1229
+ index: event.index,
1230
+ delta: {},
1231
+ finish_reason: "stop"
1232
+ }
1233
+ ]
1234
+ })}
1235
+
1236
+ `;
1237
+ case "message_stop":
1238
+ return `data: [DONE]
1239
+
1240
+ `;
1241
+ case "error":
1242
+ return `data: ${JSON.stringify({
1243
+ error: { message: event.message }
1244
+ })}
1245
+
1246
+ `;
1247
+ default:
1248
+ return "";
1249
+ }
1250
+ }
1251
+ }
1252
+ var openaiCompletionsAdapter = new OpenAICompletionsAdapter;
1253
+ registerAdapter(openaiCompletionsAdapter);
1254
+
1255
+ // src/adapters/openai-responses.ts
1256
+ class OpenAIResponsesAdapter {
1257
+ apiType = "openai-responses";
1258
+ parseRequest(body) {
1259
+ return parseOpenAIBody(body);
1260
+ }
1261
+ buildUpstreamRequest(parsed, targetModel, baseUrl, auth) {
1262
+ const { rawBody } = parsed;
1263
+ const upstreamBody = {
1264
+ ...rawBody,
1265
+ model: targetModel
1266
+ };
1267
+ return {
1268
+ url: `${baseUrl}/v1/responses`,
1269
+ method: "POST",
1270
+ headers: {
1271
+ "Content-Type": "application/json",
1272
+ Authorization: `Bearer ${auth.apiKey}`
1273
+ },
1274
+ body: JSON.stringify(upstreamBody)
1275
+ };
1276
+ }
1277
+ modifyMessages(rawBody, compressedMessages) {
1278
+ const hasInput = "input" in rawBody;
1279
+ const fieldName = hasInput ? "input" : "messages";
1280
+ return {
1281
+ ...rawBody,
1282
+ [fieldName]: compressedMessages
1283
+ };
1284
+ }
1285
+ parseResponse(body) {
1286
+ const raw = body;
1287
+ const id = String(raw.id ?? "");
1288
+ const model = String(raw.model ?? "");
1289
+ let content = "";
1290
+ let stopReason = null;
1291
+ const output = raw.output;
1292
+ if (Array.isArray(output)) {
1293
+ const textParts = [];
1294
+ for (const item of output) {
1295
+ if (item.type === "message") {
1296
+ const msgContent = item.content;
1297
+ if (Array.isArray(msgContent)) {
1298
+ for (const part of msgContent) {
1299
+ if (part.type === "output_text" && typeof part.text === "string") {
1300
+ textParts.push(part.text);
1301
+ }
1302
+ }
1303
+ }
1304
+ }
1305
+ }
1306
+ content = textParts.join("");
1307
+ }
1308
+ if (typeof raw.status === "string") {
1309
+ stopReason = raw.status;
1310
+ }
1311
+ let usage;
1312
+ const rawUsage = raw.usage;
1313
+ if (rawUsage) {
1314
+ usage = {
1315
+ inputTokens: typeof rawUsage.input_tokens === "number" ? rawUsage.input_tokens : 0,
1316
+ outputTokens: typeof rawUsage.output_tokens === "number" ? rawUsage.output_tokens : 0
1317
+ };
1318
+ }
1319
+ return { id, model, content, role: "assistant", stopReason, usage };
1320
+ }
1321
+ buildResponse(parsed) {
1322
+ const result = {
1323
+ id: parsed.id,
1324
+ object: "response",
1325
+ model: parsed.model,
1326
+ status: parsed.stopReason ?? "completed",
1327
+ output: [
1328
+ {
1329
+ type: "message",
1330
+ role: "assistant",
1331
+ content: [
1332
+ { type: "output_text", text: parsed.content }
1333
+ ]
1334
+ }
1335
+ ]
1336
+ };
1337
+ if (parsed.usage) {
1338
+ result.usage = {
1339
+ input_tokens: parsed.usage.inputTokens,
1340
+ output_tokens: parsed.usage.outputTokens,
1341
+ total_tokens: parsed.usage.inputTokens + parsed.usage.outputTokens
1342
+ };
1343
+ }
1344
+ return result;
1345
+ }
1346
+ parseStreamChunk(chunk) {
1347
+ const events = [];
1348
+ for (const line of chunk.split(`
1349
+ `)) {
1350
+ const trimmed = line.trim();
1351
+ if (!trimmed.startsWith("data: "))
1352
+ continue;
1353
+ const payload = trimmed.slice(6);
1354
+ if (payload === "[DONE]") {
1355
+ events.push({ type: "message_stop" });
1356
+ continue;
1357
+ }
1358
+ let data;
1359
+ try {
1360
+ data = JSON.parse(payload);
1361
+ } catch {
1362
+ continue;
1363
+ }
1364
+ const eventType = String(data.type ?? "");
1365
+ switch (eventType) {
1366
+ case "response.created": {
1367
+ const response = data.response;
1368
+ events.push({
1369
+ type: "message_start",
1370
+ id: String(response?.id ?? data.id ?? ""),
1371
+ model: String(response?.model ?? data.model ?? "")
1372
+ });
1373
+ break;
1374
+ }
1375
+ case "response.output_text.delta": {
1376
+ events.push({
1377
+ type: "content_delta",
1378
+ text: typeof data.delta === "string" ? data.delta : "",
1379
+ index: typeof data.output_index === "number" ? data.output_index : 0
1380
+ });
1381
+ break;
1382
+ }
1383
+ case "response.output_text.done": {
1384
+ events.push({
1385
+ type: "content_stop",
1386
+ index: typeof data.output_index === "number" ? data.output_index : 0
1387
+ });
1388
+ break;
1389
+ }
1390
+ case "response.completed": {
1391
+ let usage;
1392
+ const response = data.response;
1393
+ const rawUsage = response?.usage;
1394
+ if (rawUsage) {
1395
+ usage = {
1396
+ inputTokens: typeof rawUsage.input_tokens === "number" ? rawUsage.input_tokens : 0,
1397
+ outputTokens: typeof rawUsage.output_tokens === "number" ? rawUsage.output_tokens : 0
1398
+ };
1399
+ }
1400
+ events.push({ type: "message_stop", usage });
1401
+ break;
1402
+ }
1403
+ }
1404
+ }
1405
+ return events;
1406
+ }
1407
+ buildStreamChunk(event) {
1408
+ switch (event.type) {
1409
+ case "message_start":
1410
+ return `data: ${JSON.stringify({
1411
+ type: "response.created",
1412
+ response: {
1413
+ id: event.id,
1414
+ object: "response",
1415
+ model: event.model,
1416
+ status: "in_progress"
1417
+ }
1418
+ })}
1419
+
1420
+ `;
1421
+ case "content_delta":
1422
+ return `data: ${JSON.stringify({
1423
+ type: "response.output_text.delta",
1424
+ output_index: event.index,
1425
+ delta: event.text
1426
+ })}
1427
+
1428
+ `;
1429
+ case "content_stop":
1430
+ return `data: ${JSON.stringify({
1431
+ type: "response.output_text.done",
1432
+ output_index: event.index
1433
+ })}
1434
+
1435
+ `;
1436
+ case "message_stop":
1437
+ if (event.usage) {
1438
+ return `data: ${JSON.stringify({
1439
+ type: "response.completed",
1440
+ response: {
1441
+ usage: {
1442
+ input_tokens: event.usage.inputTokens,
1443
+ output_tokens: event.usage.outputTokens
1444
+ }
1445
+ }
1446
+ })}
1447
+
1448
+ `;
1449
+ }
1450
+ return `data: ${JSON.stringify({
1451
+ type: "response.completed"
1452
+ })}
1453
+
1454
+ `;
1455
+ case "error":
1456
+ return `data: ${JSON.stringify({
1457
+ type: "error",
1458
+ error: { message: event.message }
1459
+ })}
1460
+
1461
+ `;
1462
+ default:
1463
+ return "";
1464
+ }
1465
+ }
1466
+ }
1467
+ var openaiResponsesAdapter = new OpenAIResponsesAdapter;
1468
+ registerAdapter(openaiResponsesAdapter);
1469
+
1470
+ // src/adapters/google.ts
1471
+ function googleRoleToStandard(role) {
1472
+ if (role === "model")
1473
+ return "assistant";
1474
+ return role ?? "user";
1475
+ }
1476
+ function standardRoleToGoogle(role) {
1477
+ if (role === "assistant")
1478
+ return "model";
1479
+ return "user";
1480
+ }
1481
+ function contentsToMessages(contents) {
1482
+ return contents.map((c) => {
1483
+ const text = c.parts.filter((p) => p.text !== undefined).map((p) => p.text).join("");
1484
+ return {
1485
+ role: googleRoleToStandard(c.role),
1486
+ content: text || c.parts
1487
+ };
1488
+ });
1489
+ }
1490
+ function messagesToContents(messages) {
1491
+ return messages.map((m) => ({
1492
+ role: standardRoleToGoogle(m.role),
1493
+ parts: typeof m.content === "string" ? [{ text: m.content }] : Array.isArray(m.content) ? m.content : [{ text: String(m.content) }]
1494
+ }));
1495
+ }
1496
+ function mapGoogleFinishReason(reason) {
1497
+ if (reason === null)
1498
+ return null;
1499
+ switch (reason) {
1500
+ case "STOP":
1501
+ return "stop";
1502
+ case "MAX_TOKENS":
1503
+ return "max_tokens";
1504
+ case "SAFETY":
1505
+ return "content_filter";
1506
+ default:
1507
+ return reason.toLowerCase();
1508
+ }
1509
+ }
1510
+ function mapStopReasonToGoogle(reason) {
1511
+ if (reason === null)
1512
+ return "STOP";
1513
+ switch (reason) {
1514
+ case "stop":
1515
+ return "STOP";
1516
+ case "max_tokens":
1517
+ return "MAX_TOKENS";
1518
+ case "content_filter":
1519
+ return "SAFETY";
1520
+ default:
1521
+ return reason.toUpperCase();
1522
+ }
1523
+ }
1524
+
1525
+ class GoogleGenerativeAIAdapter {
1526
+ apiType = "google-generative-ai";
1527
+ parseRequest(body) {
1528
+ const raw = body;
1529
+ const model = raw.model ?? "";
1530
+ const contents = raw.contents ?? [];
1531
+ const messages = contentsToMessages(contents);
1532
+ let system;
1533
+ if (raw.systemInstruction?.parts) {
1534
+ system = raw.systemInstruction.parts.filter((p) => p.text !== undefined).map((p) => p.text).join("");
1535
+ }
1536
+ return {
1537
+ model,
1538
+ messages,
1539
+ system,
1540
+ stream: raw.stream !== false,
1541
+ maxTokens: raw.generationConfig?.maxOutputTokens,
1542
+ rawBody: raw
1543
+ };
1544
+ }
1545
+ buildUpstreamRequest(parsed, targetModel, baseUrl, auth) {
1546
+ const endpoint = parsed.stream ? `${baseUrl}/v1beta/models/${targetModel}:streamGenerateContent?alt=sse` : `${baseUrl}/v1beta/models/${targetModel}:generateContent`;
1547
+ const contents = messagesToContents(parsed.messages);
1548
+ const requestBody = {
1549
+ ...parsed.rawBody,
1550
+ contents
1551
+ };
1552
+ delete requestBody.model;
1553
+ delete requestBody.stream;
1554
+ if (parsed.system) {
1555
+ requestBody.systemInstruction = {
1556
+ parts: [{ text: parsed.system }]
1557
+ };
1558
+ }
1559
+ if (parsed.maxTokens !== undefined) {
1560
+ requestBody.generationConfig = {
1561
+ ...requestBody.generationConfig,
1562
+ maxOutputTokens: parsed.maxTokens
1563
+ };
1564
+ }
1565
+ return {
1566
+ url: endpoint,
1567
+ method: "POST",
1568
+ headers: {
1569
+ "Content-Type": "application/json",
1570
+ [auth.headerName || "x-goog-api-key"]: auth.headerValue || auth.apiKey
1571
+ },
1572
+ body: JSON.stringify(requestBody)
1573
+ };
1574
+ }
1575
+ modifyMessages(rawBody, compressedMessages) {
1576
+ return {
1577
+ ...rawBody,
1578
+ contents: messagesToContents(compressedMessages)
1579
+ };
1580
+ }
1581
+ parseResponse(body) {
1582
+ const raw = body;
1583
+ const candidates = raw.candidates;
1584
+ const candidate = candidates?.[0];
1585
+ const content = candidate?.content;
1586
+ const text = content?.parts?.filter((p) => p.text !== undefined).map((p) => p.text).join("") ?? "";
1587
+ const finishReason = candidate?.finishReason;
1588
+ const usageMeta = raw.usageMetadata;
1589
+ return {
1590
+ id: raw.id ?? `google-${Date.now()}`,
1591
+ model: raw.modelVersion ?? "",
1592
+ content: text,
1593
+ role: "assistant",
1594
+ stopReason: mapGoogleFinishReason(finishReason ?? null),
1595
+ usage: usageMeta ? {
1596
+ inputTokens: usageMeta.promptTokenCount ?? 0,
1597
+ outputTokens: usageMeta.candidatesTokenCount ?? 0
1598
+ } : undefined
1599
+ };
1600
+ }
1601
+ buildResponse(parsed) {
1602
+ const result = {
1603
+ candidates: [
1604
+ {
1605
+ content: {
1606
+ parts: [{ text: parsed.content }],
1607
+ role: "model"
1608
+ },
1609
+ finishReason: mapStopReasonToGoogle(parsed.stopReason)
1610
+ }
1611
+ ]
1612
+ };
1613
+ if (parsed.usage) {
1614
+ result.usageMetadata = {
1615
+ promptTokenCount: parsed.usage.inputTokens,
1616
+ candidatesTokenCount: parsed.usage.outputTokens
1617
+ };
1618
+ }
1619
+ return result;
1620
+ }
1621
+ parseStreamChunk(chunk) {
1622
+ const events = [];
1623
+ const lines = chunk.split(`
1624
+ `);
1625
+ for (const line of lines) {
1626
+ const trimmed = line.trim();
1627
+ if (!trimmed.startsWith("data:"))
1628
+ continue;
1629
+ const jsonStr = trimmed.slice(5).trim();
1630
+ if (jsonStr === "" || jsonStr === "[DONE]")
1631
+ continue;
1632
+ let parsed;
1633
+ try {
1634
+ parsed = JSON.parse(jsonStr);
1635
+ } catch {
1636
+ continue;
1637
+ }
1638
+ const candidates = parsed.candidates;
1639
+ const candidate = candidates?.[0];
1640
+ if (!candidate)
1641
+ continue;
1642
+ const content = candidate.content;
1643
+ const text = content?.parts?.filter((p) => p.text !== undefined).map((p) => p.text).join("") ?? "";
1644
+ const finishReason = candidate.finishReason;
1645
+ if (content?.role === "model" && text !== "") {
1646
+ events.push({
1647
+ type: "message_start",
1648
+ id: parsed.id ?? `google-${Date.now()}`,
1649
+ model: parsed.modelVersion ?? ""
1650
+ });
1651
+ events.push({ type: "content_delta", text, index: 0 });
1652
+ } else if (text !== "") {
1653
+ events.push({ type: "content_delta", text, index: 0 });
1654
+ }
1655
+ if (finishReason && finishReason !== "FINISH_REASON_UNSPECIFIED") {
1656
+ const usageMeta = parsed.usageMetadata;
1657
+ events.push({ type: "content_stop", index: 0 });
1658
+ events.push({
1659
+ type: "message_stop",
1660
+ usage: usageMeta ? {
1661
+ inputTokens: usageMeta.promptTokenCount ?? 0,
1662
+ outputTokens: usageMeta.candidatesTokenCount ?? 0
1663
+ } : undefined
1664
+ });
1665
+ }
1666
+ }
1667
+ return events;
1668
+ }
1669
+ buildStreamChunk(event) {
1670
+ switch (event.type) {
1671
+ case "message_start":
1672
+ return `data: ${JSON.stringify({
1673
+ candidates: [
1674
+ {
1675
+ content: { parts: [{ text: "" }], role: "model" }
1676
+ }
1677
+ ]
1678
+ })}
1679
+
1680
+ `;
1681
+ case "content_delta":
1682
+ return `data: ${JSON.stringify({
1683
+ candidates: [
1684
+ {
1685
+ content: { parts: [{ text: event.text }], role: "model" }
1686
+ }
1687
+ ]
1688
+ })}
1689
+
1690
+ `;
1691
+ case "content_stop":
1692
+ return "";
1693
+ case "message_stop":
1694
+ return `data: ${JSON.stringify({
1695
+ candidates: [
1696
+ {
1697
+ content: { parts: [{ text: "" }], role: "model" },
1698
+ finishReason: "STOP"
1699
+ }
1700
+ ],
1701
+ ...event.usage ? {
1702
+ usageMetadata: {
1703
+ promptTokenCount: event.usage.inputTokens,
1704
+ candidatesTokenCount: event.usage.outputTokens
1705
+ }
1706
+ } : {}
1707
+ })}
1708
+
1709
+ `;
1710
+ case "error":
1711
+ return `data: ${JSON.stringify({
1712
+ error: { message: event.message }
1713
+ })}
1714
+
1715
+ `;
1716
+ }
1717
+ }
1718
+ }
1719
+ var googleAdapter = new GoogleGenerativeAIAdapter;
1720
+ registerAdapter(googleAdapter);
1721
+
1722
+ // src/adapters/ollama.ts
1723
+ class OllamaAdapter {
1724
+ apiType = "ollama";
1725
+ parseRequest(body) {
1726
+ const raw = body;
1727
+ const model = raw.model ?? "";
1728
+ const messages = raw.messages ?? [];
1729
+ const stream = raw.stream !== false;
1730
+ let system;
1731
+ const filteredMessages = [];
1732
+ for (const msg of messages) {
1733
+ if (msg.role === "system" && typeof msg.content === "string") {
1734
+ system = msg.content;
1735
+ } else {
1736
+ filteredMessages.push(msg);
1737
+ }
1738
+ }
1739
+ return {
1740
+ model,
1741
+ messages: filteredMessages,
1742
+ system,
1743
+ stream,
1744
+ maxTokens: raw.options?.num_predict,
1745
+ rawBody: raw
1746
+ };
1747
+ }
1748
+ buildUpstreamRequest(parsed, targetModel, baseUrl, _auth) {
1749
+ const messages = [];
1750
+ if (parsed.system) {
1751
+ messages.push({ role: "system", content: parsed.system });
1752
+ }
1753
+ messages.push(...parsed.messages);
1754
+ const requestBody = {
1755
+ ...parsed.rawBody,
1756
+ model: targetModel,
1757
+ messages,
1758
+ stream: parsed.stream
1759
+ };
1760
+ if (parsed.maxTokens !== undefined) {
1761
+ requestBody.options = {
1762
+ ...requestBody.options,
1763
+ num_predict: parsed.maxTokens
1764
+ };
1765
+ }
1766
+ return {
1767
+ url: `${baseUrl}/api/chat`,
1768
+ method: "POST",
1769
+ headers: {
1770
+ "Content-Type": "application/json"
1771
+ },
1772
+ body: JSON.stringify(requestBody)
1773
+ };
1774
+ }
1775
+ modifyMessages(rawBody, compressedMessages) {
1776
+ return {
1777
+ ...rawBody,
1778
+ messages: compressedMessages
1779
+ };
1780
+ }
1781
+ parseResponse(body) {
1782
+ const raw = body;
1783
+ const message = raw.message;
1784
+ const model = raw.model ?? "";
1785
+ return {
1786
+ id: `ollama-${Date.now()}`,
1787
+ model,
1788
+ content: message?.content ?? "",
1789
+ role: "assistant",
1790
+ stopReason: raw.done === true ? "stop" : null,
1791
+ usage: raw.prompt_eval_count !== undefined || raw.eval_count !== undefined ? {
1792
+ inputTokens: raw.prompt_eval_count ?? 0,
1793
+ outputTokens: raw.eval_count ?? 0
1794
+ } : undefined
1795
+ };
1796
+ }
1797
+ buildResponse(parsed) {
1798
+ const result = {
1799
+ model: parsed.model,
1800
+ message: {
1801
+ role: "assistant",
1802
+ content: parsed.content
1803
+ },
1804
+ done: parsed.stopReason === "stop"
1805
+ };
1806
+ if (parsed.usage) {
1807
+ result.prompt_eval_count = parsed.usage.inputTokens;
1808
+ result.eval_count = parsed.usage.outputTokens;
1809
+ }
1810
+ return result;
1811
+ }
1812
+ parseStreamChunk(chunk) {
1813
+ const events = [];
1814
+ const lines = chunk.split(`
1815
+ `);
1816
+ for (const line of lines) {
1817
+ const trimmed = line.trim();
1818
+ if (trimmed === "")
1819
+ continue;
1820
+ let parsed;
1821
+ try {
1822
+ parsed = JSON.parse(trimmed);
1823
+ } catch {
1824
+ continue;
1825
+ }
1826
+ const message = parsed.message;
1827
+ const done = parsed.done === true;
1828
+ if (message?.role === "assistant" && !done) {
1829
+ if (events.length === 0 && message.content !== undefined) {
1830
+ events.push({
1831
+ type: "message_start",
1832
+ id: `ollama-${Date.now()}`,
1833
+ model: parsed.model ?? ""
1834
+ });
1835
+ }
1836
+ if (message.content !== undefined) {
1837
+ events.push({
1838
+ type: "content_delta",
1839
+ text: message.content,
1840
+ index: 0
1841
+ });
1842
+ }
1843
+ }
1844
+ if (done) {
1845
+ events.push({ type: "content_stop", index: 0 });
1846
+ events.push({
1847
+ type: "message_stop",
1848
+ usage: parsed.prompt_eval_count !== undefined || parsed.eval_count !== undefined ? {
1849
+ inputTokens: parsed.prompt_eval_count ?? 0,
1850
+ outputTokens: parsed.eval_count ?? 0
1851
+ } : undefined
1852
+ });
1853
+ }
1854
+ }
1855
+ return events;
1856
+ }
1857
+ buildStreamChunk(event) {
1858
+ switch (event.type) {
1859
+ case "message_start":
1860
+ return `${JSON.stringify({
1861
+ model: event.model,
1862
+ message: { role: "assistant", content: "" },
1863
+ done: false
1864
+ })}
1865
+ `;
1866
+ case "content_delta":
1867
+ return `${JSON.stringify({
1868
+ message: { role: "assistant", content: event.text },
1869
+ done: false
1870
+ })}
1871
+ `;
1872
+ case "content_stop":
1873
+ return "";
1874
+ case "message_stop":
1875
+ return `${JSON.stringify({
1876
+ done: true,
1877
+ ...event.usage ? {
1878
+ prompt_eval_count: event.usage.inputTokens,
1879
+ eval_count: event.usage.outputTokens
1880
+ } : {}
1881
+ })}
1882
+ `;
1883
+ case "error":
1884
+ return `${JSON.stringify({
1885
+ error: event.message,
1886
+ done: true
1887
+ })}
1888
+ `;
1889
+ }
1890
+ }
1891
+ }
1892
+ var ollamaAdapter = new OllamaAdapter;
1893
+ registerAdapter(ollamaAdapter);
1894
+
1895
+ // src/adapters/bedrock.ts
1896
+ function bedrockMessagesToStandard(messages) {
1897
+ return messages.map((m) => {
1898
+ if (typeof m.content === "string") {
1899
+ return { role: m.role, content: m.content };
1900
+ }
1901
+ const textParts = m.content.filter((b) => b.text !== undefined);
1902
+ if (textParts.length === 1) {
1903
+ return { role: m.role, content: textParts[0].text };
1904
+ }
1905
+ return { role: m.role, content: m.content };
1906
+ });
1907
+ }
1908
+ function standardMessagesToBedrock(messages) {
1909
+ return messages.map((m) => {
1910
+ if (typeof m.content === "string") {
1911
+ return { role: m.role, content: [{ text: m.content }] };
1912
+ }
1913
+ if (Array.isArray(m.content)) {
1914
+ return { role: m.role, content: m.content };
1915
+ }
1916
+ return { role: m.role, content: [{ text: String(m.content) }] };
1917
+ });
1918
+ }
1919
+ function mapBedrockStopReason(reason) {
1920
+ if (reason === null)
1921
+ return null;
1922
+ switch (reason) {
1923
+ case "end_turn":
1924
+ return "stop";
1925
+ case "max_tokens":
1926
+ return "max_tokens";
1927
+ case "content_filtered":
1928
+ return "content_filter";
1929
+ default:
1930
+ return reason;
1931
+ }
1932
+ }
1933
+ function mapStopReasonToBedrock(reason) {
1934
+ if (reason === null)
1935
+ return "end_turn";
1936
+ switch (reason) {
1937
+ case "stop":
1938
+ return "end_turn";
1939
+ case "max_tokens":
1940
+ return "max_tokens";
1941
+ case "content_filter":
1942
+ return "content_filtered";
1943
+ default:
1944
+ return reason;
1945
+ }
1946
+ }
1947
+
1948
+ class BedrockAdapter {
1949
+ apiType = "bedrock-converse-stream";
1950
+ parseRequest(body) {
1951
+ const raw = body;
1952
+ const model = raw.modelId ?? "";
1953
+ const messages = raw.messages ? bedrockMessagesToStandard(raw.messages) : [];
1954
+ let system;
1955
+ if (raw.system && raw.system.length > 0) {
1956
+ if (raw.system.length === 1) {
1957
+ system = raw.system[0].text;
1958
+ } else {
1959
+ system = raw.system.map((s) => ({ type: "text", text: s.text }));
1960
+ }
1961
+ }
1962
+ return {
1963
+ model,
1964
+ messages,
1965
+ system,
1966
+ stream: true,
1967
+ maxTokens: raw.inferenceConfig?.maxTokens,
1968
+ rawBody: raw
1969
+ };
1970
+ }
1971
+ buildUpstreamRequest(parsed, targetModel, baseUrl, auth) {
1972
+ const bedrockMessages = standardMessagesToBedrock(parsed.messages);
1973
+ const requestBody = {
1974
+ ...parsed.rawBody,
1975
+ messages: bedrockMessages
1976
+ };
1977
+ delete requestBody.modelId;
1978
+ if (parsed.system) {
1979
+ if (typeof parsed.system === "string") {
1980
+ requestBody.system = [{ text: parsed.system }];
1981
+ } else {
1982
+ requestBody.system = parsed.system.map((s) => ({ text: s.text }));
1983
+ }
1984
+ }
1985
+ if (parsed.maxTokens !== undefined) {
1986
+ requestBody.inferenceConfig = {
1987
+ ...requestBody.inferenceConfig,
1988
+ maxTokens: parsed.maxTokens
1989
+ };
1990
+ }
1991
+ const headers = {
1992
+ "Content-Type": "application/json"
1993
+ };
1994
+ if (auth.apiKey) {
1995
+ headers[auth.headerName || "Authorization"] = auth.headerValue || auth.apiKey;
1996
+ }
1997
+ return {
1998
+ url: `${baseUrl}/model/${targetModel}/converse-stream`,
1999
+ method: "POST",
2000
+ headers,
2001
+ body: JSON.stringify(requestBody)
2002
+ };
2003
+ }
2004
+ modifyMessages(rawBody, compressedMessages) {
2005
+ return {
2006
+ ...rawBody,
2007
+ messages: standardMessagesToBedrock(compressedMessages)
2008
+ };
2009
+ }
2010
+ parseResponse(body) {
2011
+ const raw = body;
2012
+ const output = raw.output;
2013
+ const message = output?.message;
2014
+ const text = message?.content?.filter((b) => b.text !== undefined).map((b) => b.text).join("") ?? "";
2015
+ const stopReason = raw.stopReason;
2016
+ const usage = raw.usage;
2017
+ return {
2018
+ id: raw.requestId ?? `bedrock-${Date.now()}`,
2019
+ model: raw.modelId ?? "",
2020
+ content: text,
2021
+ role: "assistant",
2022
+ stopReason: mapBedrockStopReason(stopReason ?? null),
2023
+ usage: usage ? {
2024
+ inputTokens: usage.inputTokens ?? 0,
2025
+ outputTokens: usage.outputTokens ?? 0
2026
+ } : undefined
2027
+ };
2028
+ }
2029
+ buildResponse(parsed) {
2030
+ const result = {
2031
+ output: {
2032
+ message: {
2033
+ role: "assistant",
2034
+ content: [{ text: parsed.content }]
2035
+ }
2036
+ },
2037
+ stopReason: mapStopReasonToBedrock(parsed.stopReason)
2038
+ };
2039
+ if (parsed.usage) {
2040
+ result.usage = {
2041
+ inputTokens: parsed.usage.inputTokens,
2042
+ outputTokens: parsed.usage.outputTokens
2043
+ };
2044
+ }
2045
+ return result;
2046
+ }
2047
+ parseStreamChunk(chunk) {
2048
+ const events = [];
2049
+ const lines = chunk.split(`
2050
+ `);
2051
+ for (const line of lines) {
2052
+ const trimmed = line.trim();
2053
+ if (trimmed === "")
2054
+ continue;
2055
+ let parsed;
2056
+ try {
2057
+ parsed = JSON.parse(trimmed);
2058
+ } catch {
2059
+ continue;
2060
+ }
2061
+ if (parsed.messageStart !== undefined) {
2062
+ const start = parsed.messageStart;
2063
+ events.push({
2064
+ type: "message_start",
2065
+ id: `bedrock-${Date.now()}`,
2066
+ model: ""
2067
+ });
2068
+ }
2069
+ if (parsed.contentBlockDelta !== undefined) {
2070
+ const delta = parsed.contentBlockDelta;
2071
+ events.push({
2072
+ type: "content_delta",
2073
+ text: delta.delta?.text ?? "",
2074
+ index: delta.contentBlockIndex ?? 0
2075
+ });
2076
+ }
2077
+ if (parsed.contentBlockStop !== undefined) {
2078
+ const stop = parsed.contentBlockStop;
2079
+ events.push({
2080
+ type: "content_stop",
2081
+ index: stop.contentBlockIndex ?? 0
2082
+ });
2083
+ }
2084
+ if (parsed.messageStop !== undefined) {
2085
+ const stop = parsed.messageStop;
2086
+ events.push({
2087
+ type: "message_stop",
2088
+ usage: undefined
2089
+ });
2090
+ }
2091
+ if (parsed.metadata !== undefined) {
2092
+ const meta = parsed.metadata;
2093
+ if (meta.usage) {
2094
+ events.push({
2095
+ type: "message_stop",
2096
+ usage: {
2097
+ inputTokens: meta.usage.inputTokens ?? 0,
2098
+ outputTokens: meta.usage.outputTokens ?? 0
2099
+ }
2100
+ });
2101
+ }
2102
+ }
2103
+ }
2104
+ return events;
2105
+ }
2106
+ buildStreamChunk(event) {
2107
+ switch (event.type) {
2108
+ case "message_start":
2109
+ return `${JSON.stringify({
2110
+ messageStart: { role: "assistant" }
2111
+ })}
2112
+ `;
2113
+ case "content_delta":
2114
+ return `${JSON.stringify({
2115
+ contentBlockDelta: {
2116
+ delta: { text: event.text },
2117
+ contentBlockIndex: event.index
2118
+ }
2119
+ })}
2120
+ `;
2121
+ case "content_stop":
2122
+ return `${JSON.stringify({
2123
+ contentBlockStop: { contentBlockIndex: event.index }
2124
+ })}
2125
+ `;
2126
+ case "message_stop":
2127
+ if (event.usage) {
2128
+ return `${JSON.stringify({
2129
+ messageStop: { stopReason: "end_turn" }
2130
+ })}
2131
+ ${JSON.stringify({
2132
+ metadata: {
2133
+ usage: {
2134
+ inputTokens: event.usage.inputTokens,
2135
+ outputTokens: event.usage.outputTokens
2136
+ }
2137
+ }
2138
+ })}
2139
+ `;
2140
+ }
2141
+ return `${JSON.stringify({
2142
+ messageStop: { stopReason: "end_turn" }
2143
+ })}
2144
+ `;
2145
+ case "error":
2146
+ return `${JSON.stringify({
2147
+ error: { message: event.message }
2148
+ })}
2149
+ `;
2150
+ }
2151
+ }
2152
+ }
2153
+ var bedrockAdapter = new BedrockAdapter;
2154
+ registerAdapter(bedrockAdapter);
2155
+
2156
+ // src/compression/compaction-detector.ts
2157
+ var COMPACTION_PATTERNS = [
2158
+ "merge these partial summaries into a single cohesive summary",
2159
+ "preserve all opaque identifiers exactly as written",
2160
+ "your task is to create a detailed summary of the conversation so far",
2161
+ "do not use any tools. you must respond with only the <summary>",
2162
+ "important: do not use any tools",
2163
+ "summarize the conversation",
2164
+ "create a summary of our conversation",
2165
+ "compact the conversation"
2166
+ ];
2167
+ function extractTextFromContent(content) {
2168
+ if (typeof content === "string")
2169
+ return content;
2170
+ if (Array.isArray(content)) {
2171
+ return content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join(`
2172
+ `);
2173
+ }
2174
+ return "";
2175
+ }
2176
+ function detectCompaction(headers, messages) {
2177
+ for (const [key, value] of Object.entries(headers)) {
2178
+ if (key.toLowerCase() === "x-request-compaction" && value === "true") {
2179
+ return { isCompaction: true, detectedBy: "header", confidence: 1 };
2180
+ }
2181
+ }
2182
+ let lastUserMessage;
2183
+ for (let i = messages.length - 1;i >= 0; i--) {
2184
+ if (messages[i].role === "user") {
2185
+ lastUserMessage = messages[i];
2186
+ break;
2187
+ }
2188
+ }
2189
+ if (lastUserMessage) {
2190
+ const text = extractTextFromContent(lastUserMessage.content).toLowerCase();
2191
+ for (const pattern of COMPACTION_PATTERNS) {
2192
+ if (text.includes(pattern)) {
2193
+ return { isCompaction: true, detectedBy: "prompt_pattern", confidence: 0.95 };
2194
+ }
2195
+ }
2196
+ }
2197
+ return { isCompaction: false, detectedBy: "none", confidence: 0 };
2198
+ }
2199
+
2200
+ // src/utils/token-estimator.ts
2201
+ var TOKENS_PER_CJK = 2.5;
2202
+ var CHARS_PER_ASCII_TOKEN = 4;
2203
+ var MESSAGE_OVERHEAD = 4;
2204
+ function isCJK(charCode) {
2205
+ return charCode >= 12288 && charCode <= 40959 || charCode >= 44032 && charCode <= 55215 || charCode >= 63744 && charCode <= 64255;
2206
+ }
2207
+ function estimateTokens(text) {
2208
+ if (text.length === 0)
2209
+ return 0;
2210
+ let asciiSegmentLength = 0;
2211
+ let tokenCount = 0;
2212
+ for (let i = 0;i < text.length; i++) {
2213
+ const code = text.charCodeAt(i);
2214
+ if (isCJK(code)) {
2215
+ tokenCount += asciiSegmentLength / CHARS_PER_ASCII_TOKEN;
2216
+ asciiSegmentLength = 0;
2217
+ tokenCount += TOKENS_PER_CJK;
2218
+ } else {
2219
+ asciiSegmentLength++;
2220
+ }
2221
+ }
2222
+ tokenCount += asciiSegmentLength / CHARS_PER_ASCII_TOKEN;
2223
+ return Math.ceil(tokenCount);
2224
+ }
2225
+ function estimateMessagesTokens(messages) {
2226
+ let total = 0;
2227
+ for (const message of messages) {
2228
+ total += MESSAGE_OVERHEAD;
2229
+ if (typeof message.content === "string") {
2230
+ total += estimateTokens(message.content);
2231
+ } else if (Array.isArray(message.content)) {
2232
+ for (const block of message.content) {
2233
+ if (block.type === "text" && block.text !== undefined) {
2234
+ total += estimateTokens(block.text);
2235
+ }
2236
+ }
2237
+ }
2238
+ }
2239
+ return total;
2240
+ }
2241
+
2242
+ // src/compression/synthetic-response.ts
2243
+ function messageContentToString(content) {
2244
+ if (typeof content === "string")
2245
+ return content;
2246
+ if (Array.isArray(content)) {
2247
+ return content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join(`
2248
+ `);
2249
+ }
2250
+ return JSON.stringify(content);
2251
+ }
2252
+ function formatRecentMessages(messages) {
2253
+ return messages.map((m) => `[${m.role}]: ${messageContentToString(m.content)}`).join(`
2254
+
2255
+ `);
2256
+ }
2257
+ function buildSyntheticSummaryResponse(summary, recentMessages, model) {
2258
+ const recentText = formatRecentMessages(recentMessages);
2259
+ const content = `<summary>
2260
+ ${summary}
2261
+ </summary>
2262
+
2263
+ <recent_messages>
2264
+ ${recentText}
2265
+ </recent_messages>`;
2266
+ return {
2267
+ id: `msg_precomputed_${Date.now()}`,
2268
+ model,
2269
+ content,
2270
+ role: "assistant",
2271
+ stopReason: "end_turn",
2272
+ usage: { inputTokens: 0, outputTokens: estimateTokens(content) }
2273
+ };
2274
+ }
2275
+ function buildSyntheticHttpResponse(parsed, adapter) {
2276
+ const body = adapter.buildResponse ? adapter.buildResponse(parsed) : {
2277
+ id: parsed.id,
2278
+ type: "message",
2279
+ role: "assistant",
2280
+ model: parsed.model,
2281
+ content: [{ type: "text", text: parsed.content }],
2282
+ stop_reason: parsed.stopReason,
2283
+ usage: parsed.usage ? {
2284
+ input_tokens: parsed.usage.inputTokens,
2285
+ output_tokens: parsed.usage.outputTokens
2286
+ } : undefined
2287
+ };
2288
+ return new Response(JSON.stringify(body), {
2289
+ status: 200,
2290
+ headers: {
2291
+ "content-type": "application/json",
2292
+ "x-synthetic-response": "true"
2293
+ }
2294
+ });
2295
+ }
2296
+
2297
+ // src/routing/local-classifier.ts
2298
+ var import_transformers = require("@huggingface/transformers");
2299
+ var CAT_L = "L";
2300
+ var CAT_M = "M";
2301
+ var CAT_H = "H";
2302
+ var CAT_Q = "Q";
2303
+ var TIER_MAP = {
2304
+ L: "LIGHT",
2305
+ M: "MEDIUM",
2306
+ H: "HEAVY"
2307
+ };
2308
+ var MODEL_ID = "Xenova/multilingual-e5-small";
2309
+ var E5_PREFIX = "query: ";
2310
+ var BATCH_SIZE = 32;
2311
+ var TRAINING_LIGHT = [
2312
+ "안녕하세요",
2313
+ "안녕",
2314
+ "안녕히 가세요",
2315
+ "안녕히 계세요",
2316
+ "반갑습니다",
2317
+ "잘 지내시죠",
2318
+ "오랜만이에요",
2319
+ "고마워",
2320
+ "감사합니다",
2321
+ "고맙습니다",
2322
+ "네 고마워요",
2323
+ "정말 감사합니다",
2324
+ "도와줘서 고마워",
2325
+ "네",
2326
+ "예",
2327
+ "아니요",
2328
+ "좋아요",
2329
+ "알겠습니다",
2330
+ "확인했습니다",
2331
+ "그래요",
2332
+ "맞아요",
2333
+ "아 네",
2334
+ "Python이 뭐야?",
2335
+ "JavaScript가 뭐야?",
2336
+ "오늘 날씨 어때?",
2337
+ "지금 몇 시야?",
2338
+ "이거 뭐야?",
2339
+ "TypeScript가 뭐예요?",
2340
+ "API가 뭐야?",
2341
+ "HTML이 뭐야?",
2342
+ "CSS가 뭐야?",
2343
+ "Hello",
2344
+ "Hi",
2345
+ "Hey there",
2346
+ "Good morning",
2347
+ "Good afternoon",
2348
+ "How are you",
2349
+ "What's up",
2350
+ "Thanks",
2351
+ "Thank you",
2352
+ "Got it",
2353
+ "OK",
2354
+ "Sounds good",
2355
+ "I see",
2356
+ "Understood",
2357
+ "Great thanks",
2358
+ "What is Python?",
2359
+ "What time is it?",
2360
+ "What's the weather?",
2361
+ "Who is Einstein?",
2362
+ "Where is Seoul?",
2363
+ "How old are you?",
2364
+ "yes",
2365
+ "no",
2366
+ "maybe",
2367
+ "sure",
2368
+ "please",
2369
+ "done",
2370
+ "ok",
2371
+ "cool",
2372
+ "nice",
2373
+ "awesome"
2374
+ ];
2375
+ var TRAINING_MEDIUM = [
2376
+ "Write a quicksort function in TypeScript",
2377
+ "Implement a binary search tree with insert and delete",
2378
+ "Create a REST API endpoint for user authentication",
2379
+ "Write a function to merge two sorted arrays",
2380
+ "Implement a linked list in Python",
2381
+ "Write a unit test for the calculator module",
2382
+ "Create a simple Express.js middleware for logging",
2383
+ "Write a regex to validate email addresses",
2384
+ "Implement a LRU cache with get and put operations",
2385
+ "Create a React component for a todo list",
2386
+ "Write a SQL query to join two tables",
2387
+ "Implement a basic JWT authentication flow",
2388
+ "Write a function to parse CSV files",
2389
+ "Create a simple WebSocket server",
2390
+ "Implement bubble sort in Java",
2391
+ "Write a Python script to read a JSON file",
2392
+ "Create a Docker compose file for a web app",
2393
+ "Write a Git pre-commit hook",
2394
+ "REST API에 로그인 엔드포인트 추가해줘",
2395
+ "이 함수에 에러 핸들링 추가해줘",
2396
+ "TypeScript로 이벤트 이미터 만들어줘",
2397
+ "데이터베이스 마이그레이션 스크립트 작성해줘",
2398
+ "React 컴포넌트에 상태 관리 추가해줘",
2399
+ "Express 라우터에 CORS 미들웨어 추가해줘",
2400
+ "테스트 코드 작성해줘",
2401
+ "이 코드 리팩토링해줘",
2402
+ "Explain the difference between let and const in JavaScript",
2403
+ "What's the difference between SQL and NoSQL databases",
2404
+ "Explain how async await works in Python",
2405
+ "Describe the MVC architecture pattern",
2406
+ "Explain what Docker containers are",
2407
+ "REST와 GraphQL의 차이점을 설명해줘",
2408
+ "이벤트 루프가 어떻게 동작하는지 설명해줘",
2409
+ "클로저가 뭐야? 설명해줘",
2410
+ "Set up a Node.js project with TypeScript and ESLint",
2411
+ "Create a basic CI/CD pipeline using GitHub Actions",
2412
+ "Configure Nginx as a reverse proxy for a Node.js app",
2413
+ `이 함수를 리팩토링해줘:
2414
+ function processUsers(data) {
2415
+ var result = [];
2416
+ for (var i = 0; i < data.length; i++) {
2417
+ if (data[i].active == true && data[i].age > 18) {
2418
+ var name = data[i].firstName + ' ' + data[i].lastName;
2419
+ var obj = { name: name, email: data[i].email, role: data[i].isAdmin ? 'admin' : 'user' };
2420
+ if (data[i].department !== null && data[i].department !== undefined) {
2421
+ obj.department = data[i].department.name;
2422
+ obj.manager = data[i].department.manager ? data[i].department.manager.name : 'N/A';
2423
+ }
2424
+ result.push(obj);
2425
+ }
2426
+ }
2427
+ result.sort(function(a, b) { return a.name > b.name ? 1 : -1; });
2428
+ return result;
2429
+ }`,
2430
+ `Refactor this code to use modern JavaScript:
2431
+ function getItems(list) {
2432
+ var items = [];
2433
+ for (var i = 0; i < list.length; i++) {
2434
+ if (list[i].active === true) {
2435
+ items.push(list[i].name);
2436
+ }
2437
+ }
2438
+ return items;
2439
+ }`
2440
+ ];
2441
+ var TRAINING_HEAVY = [
2442
+ "Design a distributed consensus algorithm for a multi-region database with strong consistency and Byzantine fault tolerance",
2443
+ "Explain the theoretical foundations of quantum computing and how quantum entanglement can be used for cryptographic key distribution",
2444
+ "Analyze the trade-offs between eventual consistency and strong consistency in distributed systems, including CAP theorem implications",
2445
+ "Design a fault-tolerant microservices architecture for a real-time trading platform handling millions of transactions per second",
2446
+ "Propose a novel approach to solving the traveling salesman problem that improves upon current approximation algorithms",
2447
+ "Design a machine learning pipeline for real-time fraud detection in financial transactions with sub-millisecond latency requirements",
2448
+ "Compare and contrast different consensus protocols (Paxos, Raft, PBFT) and recommend the best one for a blockchain-based supply chain system",
2449
+ "Architect a system that can handle 10 million concurrent WebSocket connections with horizontal scaling",
2450
+ "Design a real-time data streaming architecture combining Kafka, Flink, and a time-series database for IoT sensor data",
2451
+ "메모리 릭이 발생하는데 프로파일러에서 이벤트 루프 블로킹과 GC 지연이 동시에 나타나. 마이크로서비스 간 gRPC 연결 풀링도 의심되는 상황인데 원인 분석 방법을 단계별로 설명해줘",
2452
+ "대규모 분산 시스템에서 파티션 톨런스와 일관성을 동시에 보장하는 방법을 설계해줘",
2453
+ "실시간 추천 시스템을 위한 아키텍처를 설계해줘. 1초 이내에 개인화된 추천을 제공해야 해",
2454
+ "카프카 기반 이벤트 드리븐 아키텍처에서 순서 보장과 정확히 한 번 처리를 어떻게 보장할 수 있을까?",
2455
+ "마이크로서비스 간의 분산 트랜잭션을 사가 패턴으로 구현하는 방법을 단계별로 설명해줘",
2456
+ "Debug a memory leak in a production Node.js application where the heap grows indefinitely but garbage collection logs show normal behavior",
2457
+ "Investigate why our Kubernetes pods are being OOMKilled despite having memory limits set to 4GB and actual usage reported as 2GB",
2458
+ "Find the root cause of intermittent 500ms latency spikes in our PostgreSQL queries that happen every 15 minutes",
2459
+ "Design a multi-tenant SaaS platform with shared infrastructure but isolated data, supporting custom domains and white-labeling",
2460
+ "Implement a distributed task scheduler that guarantees at-least-once execution with idempotency support across multiple data centers"
2461
+ ];
2462
+ var TRAINING_Q = [
2463
+ "아까 그거 다시 해줘",
2464
+ "그거 좀 더 자세히 설명해줘",
2465
+ "아까 말한 거 그대로 해줘",
2466
+ "이거 수정해줘",
2467
+ "저거 어디 있지",
2468
+ "그거 어떻게 됐어",
2469
+ "위에꺼 다시 한번",
2470
+ "그거 그대로 해줘",
2471
+ "아까 한 거 다시",
2472
+ "그 코드 다시 보여줘",
2473
+ "저번에 한 거 기억나?",
2474
+ "그 부분 수정해줘",
2475
+ "Do that again",
2476
+ "What about the thing we discussed earlier",
2477
+ "Show me that again",
2478
+ "Can you fix that",
2479
+ "Change it like I said before",
2480
+ "Continue from where we left off",
2481
+ "That thing from earlier, do it again",
2482
+ "Remember what we were working on",
2483
+ "Go back to the previous one",
2484
+ "Make it like the other one",
2485
+ "The same thing but different",
2486
+ "Update the one from before",
2487
+ "그거 해줘",
2488
+ "이거 해줘",
2489
+ "저거 어때",
2490
+ "How about this one",
2491
+ "What about that",
2492
+ "Try the other approach",
2493
+ "Use the one I mentioned",
2494
+ "Fix the issue",
2495
+ "그냥 그거",
2496
+ "이건 어때",
2497
+ "Make it better",
2498
+ "Change it",
2499
+ "이거 수정해"
2500
+ ];
2501
+ var Q_PATTERNS = [
2502
+ /^(아까|그거|저거|이거|그|위에|아래|저번|이전|전에).*(다시|해줘|해|보여|설명|수정|변경|삭제|추가|해봐)/,
2503
+ /^(그거|저거|이거|그|이|저)(만|만큼|대로|처럼|같이)?\s*(해줘|해|놔|둬|봐|어때|어떻게)/,
2504
+ /^(그거|저거|이거)\s*$/,
2505
+ /(아까|저번에|전에|위에서|앞에서|이전에).*(그|그거|그것|그때|했던|말한)/,
2506
+ /^(이거|저거|그거)(\s*.*)?$/
2507
+ ];
2508
+ var DEICTIC_WORDS = new Set(["그거", "저거", "이거", "그것", "이것", "저것", "아까", "저번"]);
2509
+ function matchesQPattern(text) {
2510
+ const trimmed = text.trim();
2511
+ for (const pattern of Q_PATTERNS) {
2512
+ if (pattern.test(trimmed))
2513
+ return true;
2514
+ }
2515
+ if (trimmed.length < 20) {
2516
+ for (const word of DEICTIC_WORDS) {
2517
+ if (trimmed.includes(word))
2518
+ return true;
2519
+ }
2520
+ }
2521
+ return false;
2522
+ }
2523
+ var CODE_PATTERN = /[{}();]|function |const |let |var |class |import |export |=>|\bdef \b|\bfn\b/;
2524
+ var TECH_TERMS = /\b(implement|create|design|architect|debug|refactor|migrate|deploy|build|write|develop)\b/i;
2525
+ function isLikelyLight(text) {
2526
+ const trimmed = text.trim();
2527
+ if (trimmed.length <= 20 && !CODE_PATTERN.test(trimmed) && !TECH_TERMS.test(trimmed)) {
2528
+ return true;
2529
+ }
2530
+ return false;
2531
+ }
2532
+ var extractorPromise = null;
2533
+ function getExtractor() {
2534
+ if (!extractorPromise) {
2535
+ console.log("[clawmux] Loading embedding model...");
2536
+ extractorPromise = import_transformers.pipeline("feature-extraction", MODEL_ID).then((pipe) => {
2537
+ console.log("[clawmux] Embedding model loaded");
2538
+ return pipe;
2539
+ });
2540
+ }
2541
+ return extractorPromise;
2542
+ }
2543
+ var centroidsPromise = null;
2544
+ async function computeMeanEmbedding(texts) {
2545
+ const extractor = await getExtractor();
2546
+ const allEmbeddings = [];
2547
+ for (let i = 0;i < texts.length; i += BATCH_SIZE) {
2548
+ const batch = texts.slice(i, i + BATCH_SIZE).map((t) => E5_PREFIX + t);
2549
+ const output = await extractor(batch, { pooling: "mean", normalize: true });
2550
+ const list = output.tolist();
2551
+ for (const emb of list) {
2552
+ allEmbeddings.push(emb);
2553
+ }
2554
+ }
2555
+ if (allEmbeddings.length === 0)
2556
+ return [];
2557
+ const dim = allEmbeddings[0].length;
2558
+ const mean = new Array(dim).fill(0);
2559
+ for (const emb of allEmbeddings) {
2560
+ for (let j = 0;j < dim; j++) {
2561
+ mean[j] += emb[j] / allEmbeddings.length;
2562
+ }
2563
+ }
2564
+ const magnitude = Math.sqrt(mean.reduce((sum, v) => sum + v * v, 0));
2565
+ if (magnitude > 0) {
2566
+ for (let j = 0;j < dim; j++)
2567
+ mean[j] /= magnitude;
2568
+ }
2569
+ return mean;
2570
+ }
2571
+ function getCentroids() {
2572
+ if (!centroidsPromise) {
2573
+ centroidsPromise = (async () => {
2574
+ console.log("[clawmux] Computing category centroids...");
2575
+ const [cL, cM, cH, cQ] = await Promise.all([
2576
+ computeMeanEmbedding(TRAINING_LIGHT),
2577
+ computeMeanEmbedding(TRAINING_MEDIUM),
2578
+ computeMeanEmbedding(TRAINING_HEAVY),
2579
+ computeMeanEmbedding(TRAINING_Q)
2580
+ ]);
2581
+ console.log(`[clawmux] Centroids ready: L=${TRAINING_LIGHT.length} M=${TRAINING_MEDIUM.length} ` + `H=${TRAINING_HEAVY.length} Q=${TRAINING_Q.length} samples`);
2582
+ return { [CAT_L]: cL, [CAT_M]: cM, [CAT_H]: cH, [CAT_Q]: cQ };
2583
+ })();
2584
+ }
2585
+ return centroidsPromise;
2586
+ }
2587
+ function cosineSimilarity(a, b) {
2588
+ let dot = 0;
2589
+ let magA = 0;
2590
+ let magB = 0;
2591
+ for (let i = 0;i < a.length; i++) {
2592
+ dot += a[i] * b[i];
2593
+ magA += a[i] * a[i];
2594
+ magB += b[i] * b[i];
2595
+ }
2596
+ const denom = Math.sqrt(magA) * Math.sqrt(magB);
2597
+ return denom > 0 ? dot / denom : 0;
2598
+ }
2599
+ async function classifyLocal(messages, config) {
2600
+ const userText = extractLastUserText(messages);
2601
+ if (!userText) {
2602
+ return {
2603
+ tier: "MEDIUM",
2604
+ confidence: 0,
2605
+ reasoning: "No user message found",
2606
+ error: "No user message found in request"
2607
+ };
2608
+ }
2609
+ const centroids = await getCentroids();
2610
+ const extractor = await getExtractor();
2611
+ const output = await extractor([E5_PREFIX + userText], { pooling: "mean", normalize: true });
2612
+ const inputEmb = output.tolist()[0];
2613
+ let bestCat = CAT_M;
2614
+ let bestSim = -Infinity;
2615
+ for (const [cat, centroid] of Object.entries(centroids)) {
2616
+ const sim = cosineSimilarity(inputEmb, centroid);
2617
+ if (sim > bestSim) {
2618
+ bestSim = sim;
2619
+ bestCat = cat;
2620
+ }
2621
+ }
2622
+ if (isLikelyLight(userText) && bestCat !== CAT_Q) {
2623
+ bestCat = CAT_L;
2624
+ bestSim = Math.max(bestSim, 0.7);
2625
+ }
2626
+ const heuristicQ = matchesQPattern(userText);
2627
+ if (bestCat === CAT_Q || heuristicQ) {
2628
+ const contextText = buildContextText(messages, userText, config?.contextMessages ?? 10);
2629
+ const ctxOutput = await extractor([E5_PREFIX + contextText], { pooling: "mean", normalize: true });
2630
+ const contextEmb = ctxOutput.tolist()[0];
2631
+ let reBestCat = CAT_M;
2632
+ let reBestSim = -Infinity;
2633
+ for (const [cat, centroid] of Object.entries(centroids)) {
2634
+ if (cat === CAT_Q)
2635
+ continue;
2636
+ const sim = cosineSimilarity(contextEmb, centroid);
2637
+ if (sim > reBestSim) {
2638
+ reBestSim = sim;
2639
+ reBestCat = cat;
2640
+ }
2641
+ }
2642
+ const tier2 = TIER_MAP[reBestCat] ?? "MEDIUM";
2643
+ return {
2644
+ tier: tier2,
2645
+ confidence: reBestSim,
2646
+ reasoning: `Re-classified with context (initial: Q, heuristic: ${heuristicQ})`
2647
+ };
2648
+ }
2649
+ const tier = TIER_MAP[bestCat] ?? "MEDIUM";
2650
+ return { tier, confidence: bestSim };
2651
+ }
2652
+ function extractLastUserText(messages) {
2653
+ for (let i = messages.length - 1;i >= 0; i--) {
2654
+ const msg = messages[i];
2655
+ if (msg.role !== "user")
2656
+ continue;
2657
+ if (typeof msg.content === "string") {
2658
+ return msg.content;
2659
+ }
2660
+ if (Array.isArray(msg.content)) {
2661
+ const parts = [];
2662
+ for (const block of msg.content) {
2663
+ if (block.type === "text" && block.text) {
2664
+ parts.push(block.text);
2665
+ }
2666
+ }
2667
+ if (parts.length > 0)
2668
+ return parts.join(" ");
2669
+ }
2670
+ }
2671
+ return;
2672
+ }
2673
+ function buildContextText(allMessages, currentText, contextCount) {
2674
+ const relevantMessages = allMessages.filter((m) => m.role === "user" || m.role === "assistant");
2675
+ const lastN = relevantMessages.slice(-contextCount);
2676
+ const parts = [];
2677
+ for (const msg of lastN) {
2678
+ let text;
2679
+ if (typeof msg.content === "string") {
2680
+ text = msg.content;
2681
+ } else if (Array.isArray(msg.content)) {
2682
+ text = msg.content.filter((b) => b.type === "text" && b.text).map((b) => b.text).join(" ");
2683
+ } else {
2684
+ continue;
2685
+ }
2686
+ parts.push(`[${msg.role}]: ${text}`);
2687
+ }
2688
+ const lastPart = parts[parts.length - 1];
2689
+ if (!lastPart || !lastPart.includes(currentText)) {
2690
+ parts.push(`[user]: ${currentText}`);
2691
+ }
2692
+ return parts.join(`
2693
+ `);
2694
+ }
2695
+
2696
+ // src/openclaw/auth-resolver.ts
2697
+ var PROVIDER_ENV_VARS = {
2698
+ anthropic: "ANTHROPIC_API_KEY",
2699
+ openai: "OPENAI_API_KEY",
2700
+ google: "GEMINI_API_KEY",
2701
+ gemini: "GEMINI_API_KEY",
2702
+ zai: "ZAI_API_KEY",
2703
+ aws: "AWS_ACCESS_KEY_ID",
2704
+ bedrock: "AWS_ACCESS_KEY_ID"
2705
+ };
2706
+ function getEnvFallback(provider) {
2707
+ const exact = PROVIDER_ENV_VARS[provider];
2708
+ if (exact)
2709
+ return process.env[exact];
2710
+ for (const [key, envVar] of Object.entries(PROVIDER_ENV_VARS)) {
2711
+ if (provider.startsWith(key)) {
2712
+ return process.env[envVar];
2713
+ }
2714
+ }
2715
+ return;
2716
+ }
2717
+ function formatAuth(apiKey, providerConfig) {
2718
+ const api = providerConfig?.api ?? "";
2719
+ if (api === "anthropic-messages") {
2720
+ return { apiKey, headerName: "x-api-key", headerValue: apiKey };
2721
+ }
2722
+ if (api === "openai-completions" || api === "openai-responses") {
2723
+ return { apiKey, headerName: "Authorization", headerValue: `Bearer ${apiKey}` };
2724
+ }
2725
+ if (api === "google-generative-ai") {
2726
+ return { apiKey, headerName: "x-goog-api-key", headerValue: apiKey };
2727
+ }
2728
+ if (api === "bedrock-converse-stream") {
2729
+ return {
2730
+ apiKey,
2731
+ headerName: "Authorization",
2732
+ headerValue: "AWS4-HMAC-SHA256 Credential=placeholder"
2733
+ };
2734
+ }
2735
+ return { apiKey, headerName: "Authorization", headerValue: `Bearer ${apiKey}` };
2736
+ }
2737
+ var NO_AUTH_APIS = new Set(["ollama"]);
2738
+ function resolveApiKey(provider, openclawConfig, authProfiles) {
2739
+ const providerConfig = getProviderConfig(provider, openclawConfig);
2740
+ const api = providerConfig?.api ?? "";
2741
+ if (NO_AUTH_APIS.has(api)) {
2742
+ return { apiKey: "ollama-local", headerName: "", headerValue: "" };
2743
+ }
2744
+ for (const profile of authProfiles) {
2745
+ if (profile.provider === provider) {
2746
+ const key = profile.apiKey ?? profile.token;
2747
+ if (key) {
2748
+ const resolved = resolveEnvVar(key);
2749
+ if (resolved) {
2750
+ return formatAuth(resolved, providerConfig);
2751
+ }
2752
+ }
2753
+ }
2754
+ }
2755
+ if (providerConfig?.apiKey) {
2756
+ const resolved = resolveEnvVar(providerConfig.apiKey);
2757
+ if (resolved) {
2758
+ return formatAuth(resolved, providerConfig);
2759
+ }
2760
+ }
2761
+ const envKey = getEnvFallback(provider);
2762
+ if (envKey) {
2763
+ return formatAuth(envKey, providerConfig);
2764
+ }
2765
+ return;
2766
+ }
2767
+
2768
+ // src/adapters/stream-transformer.ts
2769
+ var encoder = new TextEncoder;
2770
+ var decoder = new TextDecoder;
2771
+ function createStreamTranslator(sourceAdapter, targetAdapter) {
2772
+ if (sourceAdapter.apiType === targetAdapter.apiType) {
2773
+ return new TransformStream;
2774
+ }
2775
+ let buffer = "";
2776
+ return new TransformStream({
2777
+ transform(chunk, controller) {
2778
+ if (!sourceAdapter.parseStreamChunk || !targetAdapter.buildStreamChunk) {
2779
+ controller.enqueue(chunk);
2780
+ return;
2781
+ }
2782
+ buffer += decoder.decode(chunk, { stream: true });
2783
+ let delimiterIndex;
2784
+ while ((delimiterIndex = buffer.indexOf(`
2785
+
2786
+ `)) !== -1) {
2787
+ const frame = buffer.slice(0, delimiterIndex);
2788
+ buffer = buffer.slice(delimiterIndex + 2);
2789
+ if (frame.trim() === "")
2790
+ continue;
2791
+ const events = sourceAdapter.parseStreamChunk(frame);
2792
+ for (const event of events) {
2793
+ const translated = targetAdapter.buildStreamChunk(event);
2794
+ controller.enqueue(encoder.encode(translated));
2795
+ }
2796
+ }
2797
+ },
2798
+ flush(controller) {
2799
+ if (buffer.trim() !== "" && sourceAdapter.parseStreamChunk && targetAdapter.buildStreamChunk) {
2800
+ const events = sourceAdapter.parseStreamChunk(buffer);
2801
+ for (const event of events) {
2802
+ const translated = targetAdapter.buildStreamChunk(event);
2803
+ controller.enqueue(encoder.encode(translated));
2804
+ }
2805
+ }
2806
+ }
2807
+ });
2808
+ }
2809
+ function getStreamContentType(adapter) {
2810
+ switch (adapter.apiType) {
2811
+ case "anthropic-messages":
2812
+ case "openai-completions":
2813
+ case "openai-responses":
2814
+ return "text/event-stream";
2815
+ case "google-generative-ai":
2816
+ return "application/json";
2817
+ case "ollama":
2818
+ return "application/x-ndjson";
2819
+ case "bedrock-converse-stream":
2820
+ return "application/vnd.amazon.eventstream";
2821
+ default:
2822
+ return "text/event-stream";
2823
+ }
2824
+ }
2825
+ async function translateResponse(sourceAdapter, targetAdapter, upstreamResponse, streaming) {
2826
+ if (sourceAdapter.apiType === targetAdapter.apiType) {
2827
+ return upstreamResponse;
2828
+ }
2829
+ if (!streaming) {
2830
+ return translateNonStreamingResponse(sourceAdapter, targetAdapter, upstreamResponse);
2831
+ }
2832
+ return translateStreamingResponse(sourceAdapter, targetAdapter, upstreamResponse);
2833
+ }
2834
+ async function translateNonStreamingResponse(sourceAdapter, targetAdapter, upstreamResponse) {
2835
+ if (!sourceAdapter.parseResponse || !targetAdapter.buildResponse) {
2836
+ return upstreamResponse;
2837
+ }
2838
+ const body = await upstreamResponse.json();
2839
+ const parsed = sourceAdapter.parseResponse(body);
2840
+ const translated = targetAdapter.buildResponse(parsed);
2841
+ const headers = copyRelevantHeaders(upstreamResponse.headers);
2842
+ headers.set("content-type", "application/json");
2843
+ return new Response(JSON.stringify(translated), {
2844
+ status: upstreamResponse.status,
2845
+ headers
2846
+ });
2847
+ }
2848
+ function translateStreamingResponse(sourceAdapter, targetAdapter, upstreamResponse) {
2849
+ if (!upstreamResponse.body) {
2850
+ return upstreamResponse;
2851
+ }
2852
+ if (!sourceAdapter.parseStreamChunk || !targetAdapter.buildStreamChunk) {
2853
+ return upstreamResponse;
2854
+ }
2855
+ const translator = createStreamTranslator(sourceAdapter, targetAdapter);
2856
+ const translatedBody = upstreamResponse.body.pipeThrough(translator);
2857
+ const headers = copyRelevantHeaders(upstreamResponse.headers);
2858
+ headers.set("content-type", getStreamContentType(targetAdapter));
2859
+ return new Response(translatedBody, {
2860
+ status: upstreamResponse.status,
2861
+ headers
2862
+ });
2863
+ }
2864
+ function copyRelevantHeaders(source) {
2865
+ const headers = new Headers;
2866
+ const passthrough = [
2867
+ "cache-control",
2868
+ "x-request-id",
2869
+ "x-ratelimit-limit",
2870
+ "x-ratelimit-remaining",
2871
+ "x-ratelimit-reset"
2872
+ ];
2873
+ for (const name of passthrough) {
2874
+ const value = source.get(name);
2875
+ if (value !== null) {
2876
+ headers.set(name, value);
2877
+ }
2878
+ }
2879
+ return headers;
2880
+ }
2881
+
2882
+ // src/compression/session-store.ts
2883
+ function djb2Hash(str) {
2884
+ let hash = 5381;
2885
+ for (let i = 0;i < str.length; i++) {
2886
+ hash = (hash << 5) + hash + str.charCodeAt(i) | 0;
2887
+ }
2888
+ return hash >>> 0;
2889
+ }
2890
+ function generateSessionId(messages) {
2891
+ const firstUserMessage = messages.find((m) => m.role === "user");
2892
+ if (!firstUserMessage)
2893
+ return "empty-session";
2894
+ const content = typeof firstUserMessage.content === "string" ? firstUserMessage.content : JSON.stringify(firstUserMessage.content);
2895
+ return `session-${djb2Hash(content)}`;
2896
+ }
2897
+ function createSessionStore(maxSessions = 500) {
2898
+ const store = new Map;
2899
+ function evictLru() {
2900
+ if (store.size < maxSessions)
2901
+ return;
2902
+ let oldestId = "";
2903
+ let oldestAccess = Infinity;
2904
+ for (const [id, session] of store) {
2905
+ if (session.lastAccess < oldestAccess) {
2906
+ oldestAccess = session.lastAccess;
2907
+ oldestId = id;
2908
+ }
2909
+ }
2910
+ if (oldestId)
2911
+ store.delete(oldestId);
2912
+ }
2913
+ return {
2914
+ get(id) {
2915
+ const session = store.get(id);
2916
+ if (session) {
2917
+ session.lastAccess = Date.now();
2918
+ }
2919
+ return session;
2920
+ },
2921
+ getOrCreate(id, messages) {
2922
+ const existing = store.get(id);
2923
+ if (existing) {
2924
+ existing.lastAccess = Date.now();
2925
+ return existing;
2926
+ }
2927
+ evictLru();
2928
+ const session = {
2929
+ id,
2930
+ messages,
2931
+ tokenCount: 0,
2932
+ compressionState: "idle",
2933
+ lastAccess: Date.now()
2934
+ };
2935
+ store.set(id, session);
2936
+ return session;
2937
+ },
2938
+ set(id, session) {
2939
+ if (!store.has(id)) {
2940
+ evictLru();
2941
+ }
2942
+ if (session.lastAccess === 0) {
2943
+ session.lastAccess = Date.now();
2944
+ }
2945
+ store.set(id, session);
2946
+ },
2947
+ update(id, updates) {
2948
+ const session = store.get(id);
2949
+ if (!session)
2950
+ return;
2951
+ Object.assign(session, updates);
2952
+ session.lastAccess = Date.now();
2953
+ return session;
2954
+ },
2955
+ delete(id) {
2956
+ return store.delete(id);
2957
+ },
2958
+ size() {
2959
+ return store.size;
2960
+ },
2961
+ has(id) {
2962
+ return store.has(id);
2963
+ }
2964
+ };
2965
+ }
2966
+
2967
+ // src/compression/worker.ts
2968
+ var SUMMARY_PREFIX = "[Summary of previous conversation]";
2969
+ function messageContentToString2(content) {
2970
+ if (typeof content === "string")
2971
+ return content;
2972
+ if (Array.isArray(content)) {
2973
+ return content.filter((block) => block.type === "text" && typeof block.text === "string").map((block) => block.text).join(`
2974
+ `);
2975
+ }
2976
+ return JSON.stringify(content);
2977
+ }
2978
+ function estimateMessageTokens(msg) {
2979
+ const MESSAGE_OVERHEAD2 = 4;
2980
+ return MESSAGE_OVERHEAD2 + estimateTokens(messageContentToString2(msg.content));
2981
+ }
2982
+ function buildCompressionPrompt(messages, targetTokens) {
2983
+ const conversationText = messages.map((m) => `${m.role}: ${messageContentToString2(m.content)}`).join(`
2984
+
2985
+ `);
2986
+ return [
2987
+ {
2988
+ role: "system",
2989
+ content: [
2990
+ "You are a conversation summarizer. Produce a concise summary of the conversation below.",
2991
+ `Target length: approximately ${targetTokens} tokens.`,
2992
+ "Preserve: key decisions, code snippets, technical details, and action items.",
2993
+ "Format: plain text paragraphs. Start with the most important context."
2994
+ ].join(`
2995
+ `)
2996
+ },
2997
+ {
2998
+ role: "user",
2999
+ content: conversationText
3000
+ }
3001
+ ];
3002
+ }
3003
+ function buildCompressedMessages(summary) {
3004
+ return [
3005
+ {
3006
+ role: "user",
3007
+ content: `${SUMMARY_PREFIX}
3008
+ ${summary}`
3009
+ },
3010
+ {
3011
+ role: "assistant",
3012
+ content: "Understood. I have the context from our previous conversation. How can I help you continue?"
3013
+ }
3014
+ ];
3015
+ }
3016
+ function truncateToFit(messages, targetTokens) {
3017
+ const result = [];
3018
+ let usedTokens = 0;
3019
+ const firstMsg = messages[0];
3020
+ const firstContent = firstMsg ? messageContentToString2(firstMsg.content) : "";
3021
+ const hasSystemPrefix = firstMsg?.role === "user" && firstContent.startsWith(SUMMARY_PREFIX);
3022
+ if (hasSystemPrefix && firstMsg) {
3023
+ const tokens = estimateMessageTokens(firstMsg);
3024
+ result.push(firstMsg);
3025
+ usedTokens += tokens;
3026
+ }
3027
+ const tail = [];
3028
+ const startIdx = hasSystemPrefix ? 1 : 0;
3029
+ for (let i = messages.length - 1;i >= startIdx; i--) {
3030
+ const msg = messages[i];
3031
+ const tokens = estimateMessageTokens(msg);
3032
+ if (usedTokens + tokens > targetTokens)
3033
+ break;
3034
+ tail.unshift(msg);
3035
+ usedTokens += tokens;
3036
+ }
3037
+ return [...result, ...tail];
3038
+ }
3039
+ function createCompressionWorker(config) {
3040
+ let activeJobs = 0;
3041
+ let completedJobs = 0;
3042
+ let failedJobs = 0;
3043
+ function shouldCompress(session) {
3044
+ const thresholdTokens = config.threshold * config.contextWindow;
3045
+ return session.tokenCount >= thresholdTokens && session.compressionState === "idle";
3046
+ }
3047
+ function triggerCompression(session, sessionStore, makeApiCall) {
3048
+ if (activeJobs >= config.maxConcurrent)
3049
+ return;
3050
+ session.compressionState = "computing";
3051
+ session.snapshotIndex = session.messages.length;
3052
+ sessionStore.update(session.id, {
3053
+ compressionState: "computing",
3054
+ snapshotIndex: session.messages.length
3055
+ });
3056
+ activeJobs++;
3057
+ const targetTokens = config.targetRatio * config.contextWindow;
3058
+ const promptMessages = buildCompressionPrompt(session.messages, targetTokens);
3059
+ const sessionId = session.id;
3060
+ const originalMessages = [...session.messages];
3061
+ const jobPromise = Promise.race([
3062
+ makeApiCall(config.compressionModel, promptMessages),
3063
+ new Promise((_resolve, reject) => {
3064
+ setTimeout(() => reject(new Error("compression_timeout")), config.timeoutMs);
3065
+ })
3066
+ ]);
3067
+ jobPromise.then((summaryText) => {
3068
+ const compressed = buildCompressedMessages(summaryText);
3069
+ sessionStore.update(sessionId, {
3070
+ compressionState: "ready",
3071
+ compressedSummary: summaryText,
3072
+ compressedMessages: compressed
3073
+ });
3074
+ const current = sessionStore.get(sessionId);
3075
+ if (current) {
3076
+ session.compressionState = current.compressionState;
3077
+ session.compressedSummary = current.compressedSummary;
3078
+ session.compressedMessages = current.compressedMessages;
3079
+ }
3080
+ activeJobs--;
3081
+ completedJobs++;
3082
+ }).catch((error) => {
3083
+ if (error.message === "compression_timeout") {
3084
+ const truncated = truncateToFit(originalMessages, targetTokens);
3085
+ sessionStore.update(sessionId, {
3086
+ compressionState: "ready",
3087
+ compressedMessages: truncated
3088
+ });
3089
+ const current = sessionStore.get(sessionId);
3090
+ if (current) {
3091
+ session.compressionState = current.compressionState;
3092
+ session.compressedMessages = current.compressedMessages;
3093
+ }
3094
+ activeJobs--;
3095
+ completedJobs++;
3096
+ } else {
3097
+ sessionStore.update(sessionId, { compressionState: "idle" });
3098
+ session.compressionState = "idle";
3099
+ activeJobs--;
3100
+ failedJobs++;
3101
+ console.error(`[CompressionWorker] Job failed for session ${sessionId}:`, error.message);
3102
+ }
3103
+ });
3104
+ }
3105
+ function applyCompression(session) {
3106
+ if (session.compressionState !== "ready" || !session.compressedMessages) {
3107
+ return;
3108
+ }
3109
+ const compressed = session.compressedMessages;
3110
+ const snapshotIdx = session.snapshotIndex ?? session.messages.length - 3;
3111
+ const postSnapshotMessages = session.messages.slice(snapshotIdx);
3112
+ const combined = [...compressed, ...postSnapshotMessages];
3113
+ session.compressionState = "idle";
3114
+ session.compressedMessages = undefined;
3115
+ session.compressedSummary = undefined;
3116
+ session.snapshotIndex = undefined;
3117
+ return combined;
3118
+ }
3119
+ function getStats() {
3120
+ return { activeJobs, completedJobs, failedJobs };
3121
+ }
3122
+ return {
3123
+ shouldCompress,
3124
+ triggerCompression,
3125
+ applyCompression,
3126
+ getStats
3127
+ };
3128
+ }
3129
+
3130
+ // src/proxy/compression-integration.ts
3131
+ var HARD_CEILING_RATIO = 0.9;
3132
+ function messagesToTokenMessages(messages) {
3133
+ return messages.map((m) => ({
3134
+ role: m.role,
3135
+ content: m.content
3136
+ }));
3137
+ }
3138
+ function extractResponseText(responseBody) {
3139
+ try {
3140
+ const parsed = JSON.parse(responseBody);
3141
+ if (Array.isArray(parsed.content)) {
3142
+ const textBlocks = parsed.content.filter((b) => b.type === "text" && typeof b.text === "string").map((b) => b.text);
3143
+ if (textBlocks.length > 0)
3144
+ return textBlocks.join(`
3145
+ `);
3146
+ }
3147
+ if (Array.isArray(parsed.choices)) {
3148
+ const choices = parsed.choices;
3149
+ const first = choices[0];
3150
+ if (first) {
3151
+ const message = first.message;
3152
+ if (message && typeof message.content === "string") {
3153
+ return message.content;
3154
+ }
3155
+ }
3156
+ }
3157
+ return JSON.stringify(parsed);
3158
+ } catch {
3159
+ return responseBody;
3160
+ }
3161
+ }
3162
+ function createMakeApiCall(adapter, compressionModel, baseUrl, auth) {
3163
+ const actualModelId = compressionModel.includes("/") ? compressionModel.split("/").slice(1).join("/") : compressionModel;
3164
+ return async (model, messages) => {
3165
+ const syntheticParsed = {
3166
+ model,
3167
+ messages: messages.map((m) => ({ role: m.role, content: m.content })),
3168
+ stream: false,
3169
+ maxTokens: 4096,
3170
+ rawBody: {
3171
+ model,
3172
+ messages,
3173
+ stream: false,
3174
+ max_tokens: 4096
3175
+ }
3176
+ };
3177
+ const upstream = adapter.buildUpstreamRequest(syntheticParsed, actualModelId, baseUrl, auth);
3178
+ const response = await fetch(upstream.url, {
3179
+ method: upstream.method,
3180
+ headers: upstream.headers,
3181
+ body: upstream.body
3182
+ });
3183
+ const body = await response.text();
3184
+ if (!response.ok) {
3185
+ throw new Error(`Compression API call failed: ${response.status} ${body}`);
3186
+ }
3187
+ return extractResponseText(body);
3188
+ };
3189
+ }
3190
+ function createCompressionMiddleware(config) {
3191
+ const contextWindow = config.resolvedContextWindow;
3192
+ const sessionStore = createSessionStore(config.maxSessions ?? 500);
3193
+ const worker = createCompressionWorker({
3194
+ threshold: config.threshold,
3195
+ targetRatio: config.targetRatio,
3196
+ compressionModel: config.compressionModel,
3197
+ contextWindow,
3198
+ maxConcurrent: 2,
3199
+ timeoutMs: 60000
3200
+ });
3201
+ function beforeForward(parsed, adapter) {
3202
+ const messages = parsed.messages;
3203
+ if (messages.length <= 1) {
3204
+ return { messages, wasCompressed: false };
3205
+ }
3206
+ const sessionId = generateSessionId(messages);
3207
+ const session = sessionStore.getOrCreate(sessionId, messages);
3208
+ const compressed = worker.applyCompression(session);
3209
+ if (compressed) {
3210
+ sessionStore.update(sessionId, {
3211
+ messages: compressed,
3212
+ compressionState: "idle",
3213
+ compressedMessages: undefined,
3214
+ compressedSummary: undefined,
3215
+ snapshotIndex: undefined
3216
+ });
3217
+ const originalTokens = estimateMessagesTokens(messagesToTokenMessages(messages));
3218
+ const compressedTokens = estimateMessagesTokens(messagesToTokenMessages(compressed));
3219
+ if (config.statsTracker) {
3220
+ config.statsTracker.recordCompression(originalTokens, compressedTokens);
3221
+ }
3222
+ console.log(`[compression] Applied compression: ${originalTokens} → ${compressedTokens} tokens (${((1 - compressedTokens / originalTokens) * 100).toFixed(0)}% reduction)`);
3223
+ return { messages: compressed, wasCompressed: true };
3224
+ }
3225
+ const tokenCount = estimateMessagesTokens(messagesToTokenMessages(messages));
3226
+ const hardCeilingTokens = HARD_CEILING_RATIO * contextWindow;
3227
+ if (tokenCount >= hardCeilingTokens) {
3228
+ const targetTokens = config.targetRatio * contextWindow;
3229
+ const truncated = truncateToFit(messages, targetTokens);
3230
+ console.log(`[compression] Hard ceiling hit (${tokenCount} tokens >= ${Math.round(hardCeilingTokens)}), truncating to ${Math.round(targetTokens)} tokens`);
3231
+ return { messages: truncated, wasCompressed: true };
3232
+ }
3233
+ return { messages, wasCompressed: false };
3234
+ }
3235
+ function afterResponse(parsed, adapter, baseUrl, auth) {
3236
+ const messages = parsed.messages;
3237
+ if (messages.length <= 1)
3238
+ return;
3239
+ const sessionId = generateSessionId(messages);
3240
+ const session = sessionStore.getOrCreate(sessionId, messages);
3241
+ const tokenCount = estimateMessagesTokens(messagesToTokenMessages(messages));
3242
+ sessionStore.update(sessionId, {
3243
+ messages: [...messages],
3244
+ tokenCount
3245
+ });
3246
+ const updatedSession = sessionStore.get(sessionId);
3247
+ if (!updatedSession)
3248
+ return;
3249
+ if (worker.shouldCompress(updatedSession)) {
3250
+ const makeApiCall = createMakeApiCall(adapter, config.compressionModel, baseUrl, auth);
3251
+ worker.triggerCompression(updatedSession, sessionStore, makeApiCall);
3252
+ console.log(`[compression] Triggered background compression for session ${sessionId} (${tokenCount} tokens)`);
3253
+ }
3254
+ }
3255
+ function getSummaryForSession(messages) {
3256
+ if (messages.length <= 1)
3257
+ return;
3258
+ const sessionId = generateSessionId(messages);
3259
+ const session = sessionStore.get(sessionId);
3260
+ if (!session)
3261
+ return;
3262
+ if (session.compressionState === "ready" && session.compressedSummary) {
3263
+ const recentMessages = session.messages.slice(-3);
3264
+ const summary = session.compressedSummary;
3265
+ sessionStore.update(sessionId, {
3266
+ compressionState: "idle",
3267
+ compressedMessages: undefined,
3268
+ compressedSummary: undefined
3269
+ });
3270
+ return { summary, recentMessages };
3271
+ }
3272
+ return;
3273
+ }
3274
+ return {
3275
+ beforeForward,
3276
+ afterResponse,
3277
+ getSessionStore: () => sessionStore,
3278
+ getWorker: () => worker,
3279
+ getSummaryForSession
3280
+ };
3281
+ }
3282
+
3283
+ // src/proxy/pipeline.ts
3284
+ registerAdapter(new AnthropicAdapter);
3285
+ function findProviderForModel(modelString, openclawConfig) {
3286
+ const providers = openclawConfig.models?.providers;
3287
+ if (!providers)
3288
+ return;
3289
+ const [providerName, modelId] = modelString.split("/", 2);
3290
+ if (!providerName || !modelId)
3291
+ return;
3292
+ const providerConfig = providers[providerName];
3293
+ if (!providerConfig)
3294
+ return;
3295
+ return {
3296
+ providerName,
3297
+ baseUrl: providerConfig.baseUrl ?? "",
3298
+ apiType: providerConfig.api ?? ""
3299
+ };
3300
+ }
3301
+ function jsonErrorResponse(message, status) {
3302
+ return new Response(JSON.stringify({ error: message }), {
3303
+ status,
3304
+ headers: { "content-type": "application/json" }
3305
+ });
3306
+ }
3307
+ async function handleApiRequest(req, body, apiType, config, openclawConfig, authProfiles, compressionMiddleware) {
3308
+ const adapter = getAdapter(apiType);
3309
+ if (!adapter) {
3310
+ return jsonErrorResponse(`Unknown API type: ${apiType}`, 500);
3311
+ }
3312
+ const parsed = adapter.parseRequest(body);
3313
+ const compactionHeaders = {};
3314
+ req.headers.forEach((value, key) => {
3315
+ compactionHeaders[key] = value;
3316
+ });
3317
+ const compaction = detectCompaction(compactionHeaders, parsed.messages);
3318
+ if (compaction.isCompaction && compressionMiddleware) {
3319
+ const summaryData = compressionMiddleware.getSummaryForSession(parsed.messages);
3320
+ if (summaryData) {
3321
+ const syntheticParsed = buildSyntheticSummaryResponse(summaryData.summary, summaryData.recentMessages, parsed.model);
3322
+ console.log(`[clawmux] Compaction detected (${compaction.detectedBy}) → returning synthetic response`);
3323
+ return buildSyntheticHttpResponse(syntheticParsed, adapter);
3324
+ }
3325
+ console.log(`[clawmux] Compaction detected but no summary available, forwarding to upstream`);
3326
+ }
3327
+ let effectiveParsed = parsed;
3328
+ if (compressionMiddleware) {
3329
+ const { messages: compressedMessages, wasCompressed } = compressionMiddleware.beforeForward(parsed, adapter);
3330
+ if (wasCompressed) {
3331
+ const modifiedRawBody = adapter.modifyMessages(parsed.rawBody, compressedMessages);
3332
+ effectiveParsed = {
3333
+ ...parsed,
3334
+ messages: compressedMessages,
3335
+ rawBody: modifiedRawBody
3336
+ };
3337
+ }
3338
+ }
3339
+ const messages = effectiveParsed.messages;
3340
+ const classification = await classifyLocal(messages);
3341
+ const decision = {
3342
+ tier: classification.tier,
3343
+ model: config.routing.models[classification.tier],
3344
+ confidence: classification.confidence,
3345
+ overrideReason: classification.reasoning
3346
+ };
3347
+ const lookup = findProviderForModel(decision.model, openclawConfig);
3348
+ let providerName;
3349
+ let baseUrl;
3350
+ let targetApiType;
3351
+ if (lookup) {
3352
+ providerName = lookup.providerName;
3353
+ baseUrl = lookup.baseUrl;
3354
+ targetApiType = lookup.apiType;
3355
+ } else {
3356
+ const reqUrl = new URL(req.url);
3357
+ baseUrl = `${reqUrl.protocol}//${reqUrl.host}`;
3358
+ providerName = apiType.split("-")[0];
3359
+ targetApiType = apiType;
3360
+ }
3361
+ const auth = resolveApiKey(providerName, openclawConfig, authProfiles);
3362
+ if (!auth) {
3363
+ return jsonErrorResponse(`No auth credentials found for provider: ${providerName}`, 502);
3364
+ }
3365
+ const authInfo = {
3366
+ apiKey: auth.apiKey,
3367
+ headerName: auth.headerName,
3368
+ headerValue: auth.headerValue
3369
+ };
3370
+ const actualModelId = decision.model.split("/").slice(1).join("/");
3371
+ const isCrossProvider = targetApiType !== "" && targetApiType !== apiType;
3372
+ const targetAdapter = isCrossProvider ? getAdapter(targetApiType) : undefined;
3373
+ const requestAdapter = targetAdapter ?? adapter;
3374
+ const upstream = requestAdapter.buildUpstreamRequest(effectiveParsed, actualModelId, baseUrl, authInfo);
3375
+ let upstreamResponse;
3376
+ try {
3377
+ upstreamResponse = await fetch(upstream.url, {
3378
+ method: upstream.method,
3379
+ headers: upstream.headers,
3380
+ body: upstream.body
3381
+ });
3382
+ } catch (err) {
3383
+ const message = err instanceof Error ? err.message : String(err);
3384
+ return jsonErrorResponse(`Upstream request failed: ${message}`, 502);
3385
+ }
3386
+ console.log(`[clawmux] [llm] ${decision.tier} → ${decision.model} | conf=${classification.confidence.toFixed(2)}${classification.reasoning ? ` | ${classification.reasoning}` : ""}`);
3387
+ if (compressionMiddleware && upstreamResponse.ok) {
3388
+ compressionMiddleware.afterResponse(parsed, adapter, baseUrl, authInfo);
3389
+ }
3390
+ if (targetAdapter && upstreamResponse.ok) {
3391
+ return translateResponse(targetAdapter, adapter, upstreamResponse, effectiveParsed.stream);
3392
+ }
3393
+ return new Response(upstreamResponse.body, {
3394
+ status: upstreamResponse.status,
3395
+ statusText: upstreamResponse.statusText,
3396
+ headers: upstreamResponse.headers
3397
+ });
3398
+ }
3399
+ function createResolvedCompressionMiddleware(config, openclawConfig, piAiCatalog, statsTracker) {
3400
+ const contextWindows = config.routing.contextWindows ?? {};
3401
+ const resolvedContextWindow = resolveCompressionContextWindow(config.routing.models, contextWindows, openclawConfig, piAiCatalog);
3402
+ const tiers = ["LIGHT", "MEDIUM", "HEAVY"];
3403
+ for (const tier of tiers) {
3404
+ const modelKey = config.routing.models[tier];
3405
+ if (modelKey) {
3406
+ const window = resolveContextWindow(modelKey, contextWindows, openclawConfig, piAiCatalog);
3407
+ console.log(`[clawmux] ${tier} → ${modelKey} contextWindow=${window}`);
3408
+ }
3409
+ }
3410
+ console.log(`[clawmux] Compression contextWindow=${resolvedContextWindow} (minimum across tiers)`);
3411
+ return createCompressionMiddleware({
3412
+ threshold: config.compression.threshold,
3413
+ targetRatio: config.compression.targetRatio ?? 0.6,
3414
+ compressionModel: config.compression.model,
3415
+ resolvedContextWindow,
3416
+ statsTracker
3417
+ });
3418
+ }
3419
+ var ROUTE_MAPPINGS = [
3420
+ { apiType: "anthropic-messages", key: "/v1/messages" },
3421
+ { apiType: "openai-completions", key: "/v1/chat/completions" },
3422
+ { apiType: "openai-responses", key: "/v1/responses" },
3423
+ { apiType: "google-generative-ai", key: "/v1beta/models/*" },
3424
+ { apiType: "ollama", key: "/api/chat" },
3425
+ { apiType: "bedrock-converse-stream", key: "/model/*/converse-stream" }
3426
+ ];
3427
+ function setupPipelineRoutes(config, openclawConfig, authProfiles, compressionMiddleware) {
3428
+ for (const mapping of ROUTE_MAPPINGS) {
3429
+ setRouteHandler(mapping.key, (req, body) => handleApiRequest(req, body, mapping.apiType, config, openclawConfig, authProfiles, compressionMiddleware));
3430
+ }
3431
+ }
3432
+
3433
+ // src/index.ts
3434
+ var import_node_path4 = require("node:path");
3435
+ async function bootstrap() {
3436
+ const configPath = process.env.CLAWMUX_CONFIG ? import_node_path4.resolve(process.env.CLAWMUX_CONFIG) : import_node_path4.resolve(process.cwd(), "clawmux.json");
3437
+ const result = await loadConfig(configPath);
3438
+ if (!result.valid) {
3439
+ console.error("[clawmux] Config errors:");
3440
+ for (const err of result.errors)
3441
+ console.error(` - ${err}`);
3442
+ process.exit(1);
3443
+ }
3444
+ const config = result.config;
3445
+ const openclawConfig = await readOpenClawConfig();
3446
+ const authProfiles = await readAuthProfiles();
3447
+ const piAiCatalog = await loadPiAiCatalog();
3448
+ const compressionMiddleware = createResolvedCompressionMiddleware(config, openclawConfig, piAiCatalog);
3449
+ setupPipelineRoutes(config, openclawConfig, authProfiles, compressionMiddleware);
3450
+ const port = parseInt(process.env.CLAWMUX_PORT ?? "3456", 10);
3451
+ const server = createServer({ port, host: "127.0.0.1" });
3452
+ server.start();
3453
+ console.log(`[clawmux] Proxy server running on http://127.0.0.1:${port}`);
3454
+ const watcher = createConfigWatcher(configPath, (newConfig) => {
3455
+ console.log("[clawmux] Config reloaded, updating routes...");
3456
+ clearCustomHandlers();
3457
+ const newCompression = createResolvedCompressionMiddleware(newConfig, openclawConfig, piAiCatalog);
3458
+ setupPipelineRoutes(newConfig, openclawConfig, authProfiles, newCompression);
3459
+ });
3460
+ watcher.start();
3461
+ }
3462
+ bootstrap().catch((err) => {
3463
+ console.error(`[clawmux] Fatal: ${err.message}`);
3464
+ process.exit(1);
3465
+ });