@cognisos/liminal 2.3.0 → 2.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/bin.js +1216 -223
  2. package/dist/bin.js.map +1 -1
  3. package/package.json +1 -1
package/dist/bin.js CHANGED
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/version.ts
4
- var VERSION = true ? "2.3.0" : "0.2.1";
4
+ var VERSION = true ? "2.4.0" : "0.2.1";
5
5
  var BANNER_LINES = [
6
6
  " ___ ___ _____ ______ ___ ________ ________ ___",
7
7
  "|\\ \\ |\\ \\|\\ _ \\ _ \\|\\ \\|\\ ___ \\|\\ __ \\|\\ \\",
@@ -21,15 +21,8 @@ function printBanner() {
21
21
  console.log();
22
22
  }
23
23
 
24
- // src/commands/init.ts
25
- import { createInterface as createInterface2 } from "readline/promises";
26
- import { stdin as stdin2, stdout as stdout2 } from "process";
27
- import { existsSync as existsSync2, readFileSync as readFileSync2, appendFileSync } from "fs";
28
- import { join as join2 } from "path";
29
- import { homedir as homedir2 } from "os";
30
-
31
24
  // src/config/loader.ts
32
- import { readFileSync, writeFileSync, mkdirSync, existsSync } from "fs";
25
+ import { readFileSync, writeFileSync, mkdirSync, existsSync, chmodSync } from "fs";
33
26
  import { dirname } from "path";
34
27
 
35
28
  // src/config/paths.ts
@@ -43,7 +36,7 @@ var LOG_FILE = join(LOG_DIR, "liminal.log");
43
36
 
44
37
  // src/config/schema.ts
45
38
  var DEFAULTS = {
46
- apiBaseUrl: "https://rsc-platform-production.up.railway.app",
39
+ apiBaseUrl: "https://api.cognisos.ai",
47
40
  upstreamBaseUrl: "https://api.openai.com",
48
41
  anthropicUpstreamUrl: "https://api.anthropic.com",
49
42
  port: 3141,
@@ -111,11 +104,15 @@ function saveConfig(config) {
111
104
  }
112
105
  }
113
106
  const merged = { ...existing, ...config };
114
- writeFileSync(CONFIG_FILE, JSON.stringify(merged, null, 2) + "\n", "utf-8");
107
+ writeFileSync(CONFIG_FILE, JSON.stringify(merged, null, 2) + "\n", { encoding: "utf-8", mode: 384 });
108
+ try {
109
+ chmodSync(CONFIG_FILE, 384);
110
+ } catch {
111
+ }
115
112
  }
116
113
  function ensureDirectories() {
117
- if (!existsSync(LIMINAL_DIR)) mkdirSync(LIMINAL_DIR, { recursive: true });
118
- if (!existsSync(LOG_DIR)) mkdirSync(LOG_DIR, { recursive: true });
114
+ if (!existsSync(LIMINAL_DIR)) mkdirSync(LIMINAL_DIR, { recursive: true, mode: 448 });
115
+ if (!existsSync(LOG_DIR)) mkdirSync(LOG_DIR, { recursive: true, mode: 448 });
119
116
  const configDir = dirname(CONFIG_FILE);
120
117
  if (!existsSync(configDir)) mkdirSync(configDir, { recursive: true });
121
118
  }
@@ -132,6 +129,99 @@ function maskApiKey(key) {
132
129
  return key.slice(0, 8) + "..." + key.slice(-4);
133
130
  }
134
131
 
132
+ // src/config/shell.ts
133
+ import { existsSync as existsSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2, appendFileSync } from "fs";
134
+ import { join as join2 } from "path";
135
+ import { homedir as homedir2 } from "os";
136
+ var LIMINAL_BLOCK_HEADER = "# Liminal \u2014 route AI tools through compression proxy";
137
+ function detectShellProfile() {
138
+ const shell = process.env.SHELL || "";
139
+ const home = homedir2();
140
+ if (shell.endsWith("/zsh")) {
141
+ return { name: "~/.zshrc", path: join2(home, ".zshrc") };
142
+ }
143
+ if (shell.endsWith("/bash")) {
144
+ const bashProfile = join2(home, ".bash_profile");
145
+ if (existsSync2(bashProfile)) {
146
+ return { name: "~/.bash_profile", path: bashProfile };
147
+ }
148
+ return { name: "~/.bashrc", path: join2(home, ".bashrc") };
149
+ }
150
+ const candidates = [
151
+ { name: "~/.zshrc", path: join2(home, ".zshrc") },
152
+ { name: "~/.bashrc", path: join2(home, ".bashrc") },
153
+ { name: "~/.profile", path: join2(home, ".profile") }
154
+ ];
155
+ for (const c of candidates) {
156
+ if (existsSync2(c.path)) return c;
157
+ }
158
+ return null;
159
+ }
160
+ function lineExistsInFile(filePath, line) {
161
+ if (!existsSync2(filePath)) return false;
162
+ try {
163
+ const content = readFileSync2(filePath, "utf-8");
164
+ return content.includes(line);
165
+ } catch {
166
+ return false;
167
+ }
168
+ }
169
+ function appendToShellProfile(profile, lines) {
170
+ const newLines = lines.filter((line) => !lineExistsInFile(profile.path, line));
171
+ if (newLines.length === 0) return [];
172
+ const block = [
173
+ "",
174
+ LIMINAL_BLOCK_HEADER,
175
+ ...newLines
176
+ ].join("\n") + "\n";
177
+ appendFileSync(profile.path, block, "utf-8");
178
+ return newLines;
179
+ }
180
+ function removeLiminalFromShellProfile(profile) {
181
+ if (!existsSync2(profile.path)) return [];
182
+ let content;
183
+ try {
184
+ content = readFileSync2(profile.path, "utf-8");
185
+ } catch {
186
+ return [];
187
+ }
188
+ const lines = content.split("\n");
189
+ const removed = [];
190
+ const kept = [];
191
+ for (const line of lines) {
192
+ if (line.trim() === LIMINAL_BLOCK_HEADER) {
193
+ removed.push(line);
194
+ continue;
195
+ }
196
+ if (isLiminalExportLine(line)) {
197
+ removed.push(line);
198
+ continue;
199
+ }
200
+ kept.push(line);
201
+ }
202
+ if (removed.length > 0) {
203
+ const cleaned = kept.join("\n").replace(/\n{3,}/g, "\n\n");
204
+ writeFileSync2(profile.path, cleaned, "utf-8");
205
+ }
206
+ return removed;
207
+ }
208
+ function isLiminalExportLine(line) {
209
+ const trimmed = line.trim();
210
+ if (!trimmed.startsWith("export ")) return false;
211
+ if (trimmed.includes("ANTHROPIC_BASE_URL=http://127.0.0.1:")) return true;
212
+ if (trimmed.includes("OPENAI_BASE_URL=http://127.0.0.1:")) return true;
213
+ return false;
214
+ }
215
+ function findLiminalExportsInProfile(profile) {
216
+ if (!existsSync2(profile.path)) return [];
217
+ try {
218
+ const content = readFileSync2(profile.path, "utf-8");
219
+ return content.split("\n").filter(isLiminalExportLine);
220
+ } catch {
221
+ return [];
222
+ }
223
+ }
224
+
135
225
  // src/ui/prompts.ts
136
226
  var ANSI = {
137
227
  HIDE_CURSOR: "\x1B[?25l",
@@ -190,23 +280,23 @@ function renderMultiSelect(options, cursorIndex, selected, message) {
190
280
  lines.push(` ${pointer} ${box} ${label}${desc}`);
191
281
  }
192
282
  lines.push("");
193
- lines.push(` ${ANSI.DIM}Space to toggle, Enter to confirm${ANSI.RESET}`);
283
+ lines.push(` ${ANSI.DIM}\u2191/\u2193 Navigate ${ANSI.RESET}${ANSI.CYAN}Space${ANSI.RESET}${ANSI.DIM} Select ${ANSI.RESET}${ANSI.CYAN}Enter${ANSI.RESET}${ANSI.DIM} Confirm${ANSI.RESET}`);
194
284
  lines.push("");
195
285
  return { text: lines.join("\n"), lineCount: lines.length };
196
286
  }
197
287
  function withRawMode(streams, handler) {
198
- const { stdin: stdin3, stdout: stdout3 } = streams;
288
+ const { stdin: stdin2, stdout: stdout2 } = streams;
199
289
  return new Promise((resolve, reject) => {
200
290
  let cleaned = false;
201
291
  function cleanup() {
202
292
  if (cleaned) return;
203
293
  cleaned = true;
204
- stdin3.removeListener("data", onData);
205
- if (stdin3.setRawMode) stdin3.setRawMode(false);
206
- if ("pause" in stdin3 && typeof stdin3.pause === "function") {
207
- stdin3.pause();
294
+ stdin2.removeListener("data", onData);
295
+ if (stdin2.setRawMode) stdin2.setRawMode(false);
296
+ if ("pause" in stdin2 && typeof stdin2.pause === "function") {
297
+ stdin2.pause();
208
298
  }
209
- stdout3.write(ANSI.SHOW_CURSOR);
299
+ stdout2.write(ANSI.SHOW_CURSOR);
210
300
  process.removeListener("exit", cleanup);
211
301
  }
212
302
  function onData(data) {
@@ -223,11 +313,11 @@ function withRawMode(streams, handler) {
223
313
  });
224
314
  process.on("exit", cleanup);
225
315
  try {
226
- if (stdin3.setRawMode) stdin3.setRawMode(true);
227
- stdout3.write(ANSI.HIDE_CURSOR);
228
- stdin3.on("data", onData);
229
- if ("resume" in stdin3 && typeof stdin3.resume === "function") {
230
- stdin3.resume();
316
+ if (stdin2.setRawMode) stdin2.setRawMode(true);
317
+ stdout2.write(ANSI.HIDE_CURSOR);
318
+ stdin2.on("data", onData);
319
+ if ("resume" in stdin2 && typeof stdin2.resume === "function") {
320
+ stdin2.resume();
231
321
  }
232
322
  } catch (err) {
233
323
  cleanup();
@@ -575,59 +665,322 @@ async function runAuthFlow() {
575
665
  }
576
666
  }
577
667
 
578
- // src/commands/init.ts
579
- function detectShellProfile() {
580
- const shell = process.env.SHELL || "";
581
- const home = homedir2();
582
- if (shell.endsWith("/zsh")) {
583
- const zshrc = join2(home, ".zshrc");
584
- return { name: "~/.zshrc", path: zshrc };
668
+ // src/connectors/claude-code.ts
669
+ import { execSync } from "child_process";
670
+ var ENV_VAR = "ANTHROPIC_BASE_URL";
671
+ var INFO = {
672
+ id: "claude-code",
673
+ label: "Claude Code",
674
+ description: "Anthropic CLI for coding with Claude",
675
+ protocol: "anthropic-messages",
676
+ automatable: true
677
+ };
678
+ function isClaudeInstalled() {
679
+ try {
680
+ execSync("which claude", { stdio: "ignore" });
681
+ return true;
682
+ } catch {
683
+ return false;
585
684
  }
586
- if (shell.endsWith("/bash")) {
587
- const bashProfile = join2(home, ".bash_profile");
588
- if (existsSync2(bashProfile)) {
589
- return { name: "~/.bash_profile", path: bashProfile };
685
+ }
686
+ function getCurrentBaseUrl() {
687
+ return process.env[ENV_VAR] || void 0;
688
+ }
689
+ var claudeCodeConnector = {
690
+ info: INFO,
691
+ async detect() {
692
+ const installed = isClaudeInstalled();
693
+ const currentUrl = getCurrentBaseUrl();
694
+ const configured = currentUrl?.includes("127.0.0.1") ?? false;
695
+ if (!installed) {
696
+ return { installed, configured: false, detail: "Claude Code not found in PATH" };
590
697
  }
591
- return { name: "~/.bashrc", path: join2(home, ".bashrc") };
698
+ if (configured) {
699
+ return { installed, configured, detail: `Routing through ${currentUrl}` };
700
+ }
701
+ return { installed, configured, detail: "Installed but not routing through Liminal" };
702
+ },
703
+ getShellExports(port) {
704
+ return [`export ${ENV_VAR}=http://127.0.0.1:${port}`];
705
+ },
706
+ async setup(port) {
707
+ const exports = this.getShellExports(port);
708
+ return {
709
+ success: true,
710
+ shellExports: exports,
711
+ postSetupInstructions: [
712
+ "Claude Code will automatically route through Liminal.",
713
+ "Make sure to source your shell profile or restart your terminal."
714
+ ]
715
+ };
716
+ },
717
+ async teardown() {
718
+ return {
719
+ success: true,
720
+ manualSteps: [
721
+ `Remove the line \`export ${ENV_VAR}=...\` from your shell profile (~/.zshrc or ~/.bashrc).`,
722
+ "Restart your terminal or run: unset ANTHROPIC_BASE_URL"
723
+ ]
724
+ };
592
725
  }
593
- const candidates = [
594
- { name: "~/.zshrc", path: join2(home, ".zshrc") },
595
- { name: "~/.bashrc", path: join2(home, ".bashrc") },
596
- { name: "~/.profile", path: join2(home, ".profile") }
597
- ];
598
- for (const c of candidates) {
599
- if (existsSync2(c.path)) return c;
726
+ };
727
+
728
+ // src/connectors/codex.ts
729
+ import { execSync as execSync2 } from "child_process";
730
+ import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
731
+ import { join as join3 } from "path";
732
+ import { homedir as homedir3 } from "os";
733
+ var ENV_VAR2 = "OPENAI_BASE_URL";
734
+ var CODEX_CONFIG_DIR = join3(homedir3(), ".codex");
735
+ var CODEX_CONFIG_FILE = join3(CODEX_CONFIG_DIR, "config.toml");
736
+ var INFO2 = {
737
+ id: "codex",
738
+ label: "Codex CLI",
739
+ description: "OpenAI CLI agent for coding (Responses API)",
740
+ protocol: "openai-responses",
741
+ automatable: true
742
+ };
743
+ function isCodexInstalled() {
744
+ try {
745
+ execSync2("which codex", { stdio: "ignore" });
746
+ return true;
747
+ } catch {
748
+ return false;
600
749
  }
601
- return null;
602
750
  }
603
- function getExportLines(tools, port) {
604
- const base = `http://127.0.0.1:${port}`;
605
- const lines = [];
606
- if (tools.includes("claude-code")) {
607
- lines.push(`export ANTHROPIC_BASE_URL=${base}`);
608
- }
609
- if (tools.includes("codex") || tools.includes("openai-compatible")) {
610
- lines.push(`export OPENAI_BASE_URL=${base}/v1`);
611
- }
612
- return lines;
751
+ function getCurrentBaseUrl2() {
752
+ return process.env[ENV_VAR2] || void 0;
613
753
  }
614
- function lineExistsInFile(filePath, line) {
615
- if (!existsSync2(filePath)) return false;
754
+ function hasCodexConfig() {
755
+ return existsSync3(CODEX_CONFIG_FILE);
756
+ }
757
+ function codexConfigMentionsLiminal() {
758
+ if (!hasCodexConfig()) return false;
616
759
  try {
617
- const content = readFileSync2(filePath, "utf-8");
618
- return content.includes(line);
760
+ const content = readFileSync3(CODEX_CONFIG_FILE, "utf-8");
761
+ return content.includes("127.0.0.1") || content.includes("liminal");
619
762
  } catch {
620
763
  return false;
621
764
  }
622
765
  }
623
- function appendToShellProfile(profile, lines) {
624
- const block = [
625
- "",
626
- "# Liminal \u2014 route AI tools through compression proxy",
627
- ...lines
628
- ].join("\n") + "\n";
629
- appendFileSync(profile.path, block, "utf-8");
766
+ var codexConnector = {
767
+ info: INFO2,
768
+ async detect() {
769
+ const installed = isCodexInstalled();
770
+ const currentUrl = getCurrentBaseUrl2();
771
+ const envConfigured = currentUrl?.includes("127.0.0.1") ?? false;
772
+ const tomlConfigured = codexConfigMentionsLiminal();
773
+ const configured = envConfigured || tomlConfigured;
774
+ if (!installed) {
775
+ return { installed, configured: false, detail: "Codex CLI not found in PATH" };
776
+ }
777
+ if (configured) {
778
+ const via = envConfigured ? ENV_VAR2 : "config.toml";
779
+ return { installed, configured, detail: `Routing through Liminal (via ${via})` };
780
+ }
781
+ return { installed, configured, detail: "Installed but not routing through Liminal" };
782
+ },
783
+ getShellExports(port) {
784
+ return [`export ${ENV_VAR2}=http://127.0.0.1:${port}/v1`];
785
+ },
786
+ async setup(port) {
787
+ const exports = this.getShellExports(port);
788
+ const instructions = [
789
+ "Codex CLI will automatically route through Liminal.",
790
+ "Make sure to source your shell profile or restart your terminal."
791
+ ];
792
+ instructions.push(
793
+ "Codex uses the OpenAI Responses API (/v1/responses) by default."
794
+ );
795
+ return {
796
+ success: true,
797
+ shellExports: exports,
798
+ postSetupInstructions: instructions
799
+ };
800
+ },
801
+ async teardown() {
802
+ const steps = [
803
+ `Remove the line \`export ${ENV_VAR2}=...\` from your shell profile (~/.zshrc or ~/.bashrc).`,
804
+ "Restart your terminal or run: unset OPENAI_BASE_URL"
805
+ ];
806
+ if (codexConfigMentionsLiminal()) {
807
+ steps.push(
808
+ `Remove the Liminal provider block from ${CODEX_CONFIG_FILE}.`
809
+ );
810
+ }
811
+ return { success: true, manualSteps: steps };
812
+ }
813
+ };
814
+
815
+ // src/connectors/cursor.ts
816
+ import { existsSync as existsSync4 } from "fs";
817
+ import { join as join4 } from "path";
818
+ import { homedir as homedir4 } from "os";
819
+ var INFO3 = {
820
+ id: "cursor",
821
+ label: "Cursor",
822
+ description: "AI-first code editor (GUI config required)",
823
+ protocol: "openai-chat",
824
+ automatable: false
825
+ };
826
+ function getCursorPaths() {
827
+ const platform = process.platform;
828
+ const home = homedir4();
829
+ if (platform === "darwin") {
830
+ return {
831
+ app: "/Applications/Cursor.app",
832
+ data: join4(home, "Library", "Application Support", "Cursor")
833
+ };
834
+ }
835
+ if (platform === "win32") {
836
+ const appData = process.env.APPDATA || join4(home, "AppData", "Roaming");
837
+ const localAppData = process.env.LOCALAPPDATA || join4(home, "AppData", "Local");
838
+ return {
839
+ app: join4(localAppData, "Programs", "Cursor", "Cursor.exe"),
840
+ data: join4(appData, "Cursor")
841
+ };
842
+ }
843
+ return {
844
+ app: "/usr/bin/cursor",
845
+ data: join4(home, ".config", "Cursor")
846
+ };
847
+ }
848
+ function isCursorInstalled() {
849
+ const { app, data } = getCursorPaths();
850
+ return existsSync4(app) || existsSync4(data);
851
+ }
852
+ function getSettingsDbPath() {
853
+ const { data } = getCursorPaths();
854
+ return join4(data, "User", "globalStorage", "state.vscdb");
855
+ }
856
+ var cursorConnector = {
857
+ info: INFO3,
858
+ async detect() {
859
+ const installed = isCursorInstalled();
860
+ const dbExists = existsSync4(getSettingsDbPath());
861
+ if (!installed) {
862
+ return { installed, configured: false, detail: "Cursor not found on this system" };
863
+ }
864
+ return {
865
+ installed,
866
+ configured: false,
867
+ detail: dbExists ? "Installed \u2014 configuration requires Cursor Settings GUI" : "Installed but settings database not found"
868
+ };
869
+ },
870
+ getShellExports(_port) {
871
+ return [];
872
+ },
873
+ async setup(port) {
874
+ const baseUrl = `http://127.0.0.1:${port}/v1`;
875
+ return {
876
+ success: true,
877
+ shellExports: [],
878
+ // No env vars — GUI only
879
+ postSetupInstructions: [
880
+ "Cursor requires manual configuration:",
881
+ "",
882
+ " 1. Open Cursor Settings (not VS Code settings)",
883
+ " 2. Go to Models",
884
+ ' 3. Enable "Override OpenAI Base URL (when using key)"',
885
+ ` 4. Set the base URL to: ${baseUrl}`,
886
+ ' 5. Enter any string as the API key (e.g., "liminal")',
887
+ " 6. Restart Cursor",
888
+ "",
889
+ "Cursor uses OpenAI format for all models, including Claude.",
890
+ "Both Chat Completions and Agent mode (Responses API) are supported."
891
+ ]
892
+ };
893
+ },
894
+ async teardown() {
895
+ return {
896
+ success: true,
897
+ manualSteps: [
898
+ "In Cursor Settings > Models:",
899
+ ' 1. Disable "Override OpenAI Base URL (when using key)"',
900
+ " 2. Clear the base URL field",
901
+ " 3. Restart Cursor"
902
+ ]
903
+ };
904
+ }
905
+ };
906
+
907
+ // src/connectors/openai-compatible.ts
908
+ var ENV_VAR3 = "OPENAI_BASE_URL";
909
+ var INFO4 = {
910
+ id: "openai-compatible",
911
+ label: "Other / OpenAI-compatible",
912
+ description: "Any tool that reads OPENAI_BASE_URL",
913
+ protocol: "openai-chat",
914
+ automatable: true
915
+ };
916
+ function getCurrentBaseUrl3() {
917
+ return process.env[ENV_VAR3] || void 0;
918
+ }
919
+ var openaiCompatibleConnector = {
920
+ info: INFO4,
921
+ async detect() {
922
+ const currentUrl = getCurrentBaseUrl3();
923
+ const configured = currentUrl?.includes("127.0.0.1") ?? false;
924
+ return {
925
+ installed: true,
926
+ // Generic — always "available"
927
+ configured,
928
+ detail: configured ? `OPENAI_BASE_URL \u2192 ${currentUrl}` : "OPENAI_BASE_URL not set to Liminal"
929
+ };
930
+ },
931
+ getShellExports(port) {
932
+ return [`export ${ENV_VAR3}=http://127.0.0.1:${port}/v1`];
933
+ },
934
+ async setup(port) {
935
+ const exports = this.getShellExports(port);
936
+ return {
937
+ success: true,
938
+ shellExports: exports,
939
+ postSetupInstructions: [
940
+ "Any tool that reads OPENAI_BASE_URL will route through Liminal.",
941
+ "Make sure to source your shell profile or restart your terminal.",
942
+ "",
943
+ "If your tool uses a different env var (e.g., OPENAI_API_BASE),",
944
+ `set it to: http://127.0.0.1:${port}/v1`
945
+ ]
946
+ };
947
+ },
948
+ async teardown() {
949
+ return {
950
+ success: true,
951
+ manualSteps: [
952
+ `Remove the line \`export ${ENV_VAR3}=...\` from your shell profile (~/.zshrc or ~/.bashrc).`,
953
+ "Restart your terminal or run: unset OPENAI_BASE_URL"
954
+ ]
955
+ };
956
+ }
957
+ };
958
+
959
+ // src/connectors/index.ts
960
+ var CONNECTORS = [
961
+ claudeCodeConnector,
962
+ codexConnector,
963
+ cursorConnector,
964
+ openaiCompatibleConnector
965
+ ];
966
+ function getConnector(id) {
967
+ const connector = CONNECTORS.find((c) => c.info.id === id);
968
+ if (!connector) {
969
+ throw new Error(`Unknown connector: ${id}`);
970
+ }
971
+ return connector;
972
+ }
973
+ function getConnectors(ids) {
974
+ return ids.map(getConnector);
630
975
  }
976
+
977
+ // src/commands/init.ts
978
+ var BOLD = "\x1B[1m";
979
+ var DIM = "\x1B[2m";
980
+ var CYAN = "\x1B[36m";
981
+ var GREEN = "\x1B[32m";
982
+ var YELLOW = "\x1B[33m";
983
+ var RESET = "\x1B[0m";
631
984
  async function initCommand() {
632
985
  printBanner();
633
986
  console.log(" Welcome to Liminal -- Your Transparency & Context Partner");
@@ -636,29 +989,38 @@ async function initCommand() {
636
989
  console.log();
637
990
  const apiKey = await runAuthFlow();
638
991
  console.log();
639
- const rl = createInterface2({ input: stdin2, output: stdout2 });
640
- let port;
641
- try {
642
- const portInput = await rl.question(` Proxy port [${DEFAULTS.port}]: `);
643
- port = portInput.trim() ? parseInt(portInput.trim(), 10) : DEFAULTS.port;
644
- if (isNaN(port) || port < 1 || port > 65535) {
645
- console.error("\n Error: Invalid port number.");
646
- process.exit(1);
647
- }
648
- } finally {
649
- rl.close();
992
+ const port = DEFAULTS.port;
993
+ console.log(` ${BOLD}Detecting installed tools...${RESET}`);
994
+ console.log();
995
+ const detectionResults = await Promise.all(
996
+ CONNECTORS.map(async (c) => {
997
+ const status = await c.detect();
998
+ return { connector: c, status };
999
+ })
1000
+ );
1001
+ for (const { connector, status } of detectionResults) {
1002
+ const icon = status.installed ? `${GREEN}\u2713${RESET}` : `${DIM}\xB7${RESET}`;
1003
+ console.log(` ${icon} ${connector.info.label} ${DIM}${status.detail}${RESET}`);
650
1004
  }
651
1005
  console.log();
1006
+ const toolOptions = CONNECTORS.map((c) => {
1007
+ const detected = detectionResults.find((r) => r.connector.info.id === c.info.id);
1008
+ const installed = detected?.status.installed ?? false;
1009
+ let description = c.info.description;
1010
+ if (!installed) description += ` ${DIM}(not detected)${RESET}`;
1011
+ if (!c.info.automatable) description += ` ${DIM}(manual setup)${RESET}`;
1012
+ return {
1013
+ label: c.info.label,
1014
+ value: c.info.id,
1015
+ description,
1016
+ default: c.info.id === "claude-code" && installed
1017
+ };
1018
+ });
652
1019
  const toolsResult = await multiSelectPrompt({
653
1020
  message: "Which AI tools will you use with Liminal?",
654
- options: [
655
- { label: "Claude Code", value: "claude-code", default: true },
656
- { label: "Codex", value: "codex" },
657
- { label: "Cursor", value: "cursor" },
658
- { label: "Other / OpenAI", value: "openai-compatible" }
659
- ]
1021
+ options: toolOptions
660
1022
  });
661
- const tools = toolsResult ?? ["claude-code"];
1023
+ const selectedIds = toolsResult ?? ["claude-code"];
662
1024
  console.log();
663
1025
  const learnResult = await selectPrompt({
664
1026
  message: "Learn from LLM responses?",
@@ -670,78 +1032,100 @@ async function initCommand() {
670
1032
  });
671
1033
  const learnFromResponses = learnResult ?? true;
672
1034
  console.log();
673
- const apiBaseUrl = DEFAULTS.apiBaseUrl;
674
1035
  ensureDirectories();
675
1036
  saveConfig({
676
1037
  apiKey,
677
- apiBaseUrl,
1038
+ apiBaseUrl: DEFAULTS.apiBaseUrl,
678
1039
  upstreamBaseUrl: DEFAULTS.upstreamBaseUrl,
679
1040
  anthropicUpstreamUrl: DEFAULTS.anthropicUpstreamUrl,
680
1041
  port,
681
1042
  learnFromResponses,
682
- tools,
1043
+ tools: selectedIds,
683
1044
  compressionThreshold: DEFAULTS.compressionThreshold,
684
1045
  compressRoles: DEFAULTS.compressRoles,
685
1046
  latencyBudgetMs: DEFAULTS.latencyBudgetMs,
686
1047
  enabled: DEFAULTS.enabled
687
1048
  });
1049
+ console.log(` ${GREEN}\u2713${RESET} Configuration saved to ${DIM}${CONFIG_FILE}${RESET}`);
688
1050
  console.log();
689
- console.log(` Configuration saved to ${CONFIG_FILE}`);
690
- console.log();
691
- const exportLines = getExportLines(tools, port);
692
- const hasCursor = tools.includes("cursor");
693
- if (exportLines.length > 0) {
694
- const profile = detectShellProfile();
695
- if (profile) {
696
- const allExist = exportLines.every((line) => lineExistsInFile(profile.path, line));
697
- if (allExist) {
698
- console.log(` Shell already configured in ${profile.name}`);
1051
+ const connectors = getConnectors(selectedIds);
1052
+ const allShellExports = [];
1053
+ const profile = detectShellProfile();
1054
+ console.log(` ${BOLD}Configuring ${connectors.length} tool${connectors.length > 1 ? "s" : ""}...${RESET}`);
1055
+ for (const connector of connectors) {
1056
+ const result = await connector.setup(port);
1057
+ const protocol = connector.info.protocol === "anthropic-messages" ? "Anthropic Messages API" : connector.info.protocol === "openai-responses" ? "Responses API" : "Chat Completions API";
1058
+ console.log();
1059
+ console.log(` ${CYAN}\u2500\u2500 ${connector.info.label} ${RESET}${DIM}(${protocol})${RESET}`);
1060
+ if (connector.info.automatable && result.shellExports.length > 0) {
1061
+ for (const line of result.shellExports) {
1062
+ console.log(` ${GREEN}\u2713${RESET} ${line}`);
1063
+ }
1064
+ allShellExports.push(...result.shellExports);
1065
+ }
1066
+ for (const line of result.postSetupInstructions) {
1067
+ if (line === "") {
1068
+ console.log();
699
1069
  } else {
700
- const autoResult = await selectPrompt({
701
- message: "Configure shell automatically?",
702
- options: [
703
- { label: "Yes", value: true, description: `Add to ${profile.name}` },
704
- { label: "No", value: false, description: "I'll set it up manually" }
705
- ],
706
- defaultIndex: 0
707
- });
708
- if (autoResult === true) {
709
- const newLines = exportLines.filter((line) => !lineExistsInFile(profile.path, line));
710
- if (newLines.length > 0) {
711
- appendToShellProfile(profile, newLines);
712
- }
713
- console.log();
714
- console.log(` Added to ${profile.name}:`);
715
- for (const line of exportLines) {
716
- console.log(` ${line}`);
717
- }
718
- console.log();
719
- console.log(` Run \x1B[1msource ${profile.name}\x1B[0m or restart your terminal to apply.`);
1070
+ if (line.includes("source your shell profile") || line.includes("restart your terminal")) continue;
1071
+ if (line.includes("will automatically route through Liminal")) {
1072
+ console.log(` ${DIM}${line}${RESET}`);
1073
+ } else if (line.startsWith(" ")) {
1074
+ console.log(` ${line}`);
720
1075
  } else {
721
- console.log();
722
- console.log(" Add these to your shell profile:");
723
- console.log();
724
- for (const line of exportLines) {
725
- console.log(` ${line}`);
726
- }
1076
+ console.log(` ${line}`);
727
1077
  }
728
1078
  }
1079
+ }
1080
+ if (!connector.info.automatable) {
1081
+ console.log(` ${YELLOW}\u26A0 Requires manual configuration (see steps above)${RESET}`);
1082
+ }
1083
+ }
1084
+ const uniqueExports = [...new Set(allShellExports)];
1085
+ if (uniqueExports.length > 0 && profile) {
1086
+ const allExist = uniqueExports.every((line) => lineExistsInFile(profile.path, line));
1087
+ console.log();
1088
+ if (allExist) {
1089
+ console.log(` ${GREEN}\u2713${RESET} Shell already configured in ${profile.name}`);
729
1090
  } else {
730
- console.log(" Add these to your shell profile:");
731
- console.log();
732
- for (const line of exportLines) {
733
- console.log(` ${line}`);
1091
+ const autoResult = await selectPrompt({
1092
+ message: `Add proxy exports to ${profile.name}?`,
1093
+ options: [
1094
+ { label: "Yes", value: true, description: "Automatic shell configuration" },
1095
+ { label: "No", value: false, description: "I'll set it up manually" }
1096
+ ],
1097
+ defaultIndex: 0
1098
+ });
1099
+ if (autoResult === true) {
1100
+ const added = appendToShellProfile(profile, uniqueExports);
1101
+ if (added.length > 0) {
1102
+ console.log();
1103
+ console.log(` ${GREEN}\u2713${RESET} Added to ${profile.name}`);
1104
+ console.log();
1105
+ console.log(` Run ${BOLD}source ${profile.name}${RESET} or restart your terminal.`);
1106
+ }
1107
+ } else {
1108
+ console.log();
1109
+ console.log(" Add these to your shell profile:");
1110
+ console.log();
1111
+ for (const line of uniqueExports) {
1112
+ console.log(` ${CYAN}${line}${RESET}`);
1113
+ }
734
1114
  }
735
1115
  }
736
- }
737
- if (hasCursor) {
1116
+ } else if (uniqueExports.length > 0) {
1117
+ console.log();
1118
+ console.log(" Add these to your shell profile:");
738
1119
  console.log();
739
- console.log(" Cursor setup (manual):");
740
- console.log(` Settings > Models > OpenAI API Base URL: http://127.0.0.1:${port}/v1`);
1120
+ for (const line of uniqueExports) {
1121
+ console.log(` ${CYAN}${line}${RESET}`);
1122
+ }
741
1123
  }
742
1124
  console.log();
1125
+ console.log(` ${BOLD}Setup complete!${RESET}`);
1126
+ console.log();
743
1127
  console.log(" Next step:");
744
- console.log(" liminal start");
1128
+ console.log(` ${BOLD}liminal start${RESET}`);
745
1129
  console.log();
746
1130
  }
747
1131
 
@@ -1061,7 +1445,7 @@ function createStreamLearningBuffer(pipeline) {
1061
1445
  }
1062
1446
 
1063
1447
  // src/proxy/streaming.ts
1064
- async function pipeSSEResponse(upstreamResponse, clientRes, onContentDelta, onComplete) {
1448
+ async function pipeSSEResponse(upstreamResponse, clientRes, onContentDelta, onComplete, totalTokensSaved = 0) {
1065
1449
  clientRes.writeHead(200, {
1066
1450
  "Content-Type": "text/event-stream",
1067
1451
  "Cache-Control": "no-cache",
@@ -1071,27 +1455,67 @@ async function pipeSSEResponse(upstreamResponse, clientRes, onContentDelta, onCo
1071
1455
  const reader = upstreamResponse.body.getReader();
1072
1456
  const decoder = new TextDecoder();
1073
1457
  let lineBuf = "";
1458
+ const needsAdjustment = totalTokensSaved > 0;
1074
1459
  try {
1075
1460
  while (true) {
1076
1461
  const { done, value } = await reader.read();
1077
1462
  if (done) break;
1078
1463
  const chunk = decoder.decode(value, { stream: true });
1079
- clientRes.write(chunk);
1464
+ if (!needsAdjustment) {
1465
+ clientRes.write(chunk);
1466
+ lineBuf += chunk;
1467
+ const lines2 = lineBuf.split("\n");
1468
+ lineBuf = lines2.pop() || "";
1469
+ for (const line of lines2) {
1470
+ if (line.startsWith("data: ") && line !== "data: [DONE]") {
1471
+ try {
1472
+ const json = JSON.parse(line.slice(6));
1473
+ const content = json?.choices?.[0]?.delta?.content;
1474
+ if (typeof content === "string") {
1475
+ onContentDelta(content);
1476
+ }
1477
+ } catch {
1478
+ }
1479
+ }
1480
+ }
1481
+ continue;
1482
+ }
1080
1483
  lineBuf += chunk;
1081
1484
  const lines = lineBuf.split("\n");
1082
1485
  lineBuf = lines.pop() || "";
1486
+ let adjusted = false;
1487
+ const outputLines = [];
1083
1488
  for (const line of lines) {
1084
1489
  if (line.startsWith("data: ") && line !== "data: [DONE]") {
1085
1490
  try {
1086
1491
  const json = JSON.parse(line.slice(6));
1492
+ if (json?.usage?.prompt_tokens != null) {
1493
+ json.usage.prompt_tokens += totalTokensSaved;
1494
+ if (json.usage.total_tokens != null) {
1495
+ json.usage.total_tokens += totalTokensSaved;
1496
+ }
1497
+ outputLines.push(`data: ${JSON.stringify(json)}`);
1498
+ adjusted = true;
1499
+ } else {
1500
+ outputLines.push(line);
1501
+ }
1087
1502
  const content = json?.choices?.[0]?.delta?.content;
1088
1503
  if (typeof content === "string") {
1089
1504
  onContentDelta(content);
1090
1505
  }
1091
1506
  } catch {
1507
+ outputLines.push(line);
1092
1508
  }
1509
+ } else {
1510
+ outputLines.push(line);
1093
1511
  }
1094
1512
  }
1513
+ if (adjusted) {
1514
+ const reconstructed = outputLines.join("\n") + "\n";
1515
+ clientRes.write(reconstructed);
1516
+ } else {
1517
+ clientRes.write(chunk);
1518
+ }
1095
1519
  }
1096
1520
  } finally {
1097
1521
  clientRes.end();
@@ -1132,6 +1556,7 @@ async function handleChatCompletions(req, res, body, pipeline, config, logger) {
1132
1556
  }
1133
1557
  let messages = request.messages;
1134
1558
  let anyCompressed = false;
1559
+ let totalTokensSaved = 0;
1135
1560
  if (config.enabled && !pipeline.isCircuitOpen()) {
1136
1561
  try {
1137
1562
  const compressRoles = new Set(config.compressRoles);
@@ -1143,6 +1568,7 @@ async function handleChatCompletions(req, res, body, pipeline, config, logger) {
1143
1568
  );
1144
1569
  messages = result.messages;
1145
1570
  anyCompressed = result.anyCompressed;
1571
+ totalTokensSaved = result.totalTokensSaved;
1146
1572
  if (result.totalTokensSaved > 0) {
1147
1573
  logger.log(`[COMPRESS] Saved ${result.totalTokensSaved} tokens`);
1148
1574
  }
@@ -1185,14 +1611,30 @@ async function handleChatCompletions(req, res, body, pipeline, config, logger) {
1185
1611
  upstreamResponse,
1186
1612
  res,
1187
1613
  (text) => learningBuffer?.append(text),
1188
- () => learningBuffer?.flush()
1614
+ () => learningBuffer?.flush(),
1615
+ totalTokensSaved
1189
1616
  );
1190
1617
  return;
1191
1618
  }
1192
1619
  const responseBody = await upstreamResponse.text();
1620
+ let finalBody = responseBody;
1621
+ if (totalTokensSaved > 0) {
1622
+ try {
1623
+ const parsed = JSON.parse(responseBody);
1624
+ if (parsed?.usage?.prompt_tokens != null) {
1625
+ parsed.usage.prompt_tokens += totalTokensSaved;
1626
+ if (parsed.usage.total_tokens != null) {
1627
+ parsed.usage.total_tokens += totalTokensSaved;
1628
+ }
1629
+ finalBody = JSON.stringify(parsed);
1630
+ logger.log(`[TOKENS] Adjusted prompt_tokens by +${totalTokensSaved}`);
1631
+ }
1632
+ } catch {
1633
+ }
1634
+ }
1193
1635
  setCORSHeaders(res);
1194
1636
  res.writeHead(200, { "Content-Type": "application/json" });
1195
- res.end(responseBody);
1637
+ res.end(finalBody);
1196
1638
  if (anyCompressed) {
1197
1639
  try {
1198
1640
  const parsed = JSON.parse(responseBody);
@@ -1218,7 +1660,18 @@ async function handleChatCompletions(req, res, body, pipeline, config, logger) {
1218
1660
  import { RSCCircuitOpenError as RSCCircuitOpenError3 } from "@cognisos/rsc-sdk";
1219
1661
 
1220
1662
  // src/proxy/anthropic-streaming.ts
1221
- async function pipeAnthropicSSEResponse(upstreamResponse, clientRes, onContentDelta, onComplete) {
1663
+ function adjustMessageStartLine(dataLine, tokensSaved) {
1664
+ try {
1665
+ const json = JSON.parse(dataLine.slice(6));
1666
+ if (json?.message?.usage?.input_tokens != null) {
1667
+ json.message.usage.input_tokens += tokensSaved;
1668
+ return `data: ${JSON.stringify(json)}`;
1669
+ }
1670
+ } catch {
1671
+ }
1672
+ return null;
1673
+ }
1674
+ async function pipeAnthropicSSEResponse(upstreamResponse, clientRes, onContentDelta, onComplete, totalTokensSaved = 0) {
1222
1675
  clientRes.writeHead(200, {
1223
1676
  "Content-Type": "text/event-stream",
1224
1677
  "Cache-Control": "no-cache",
@@ -1229,28 +1682,70 @@ async function pipeAnthropicSSEResponse(upstreamResponse, clientRes, onContentDe
1229
1682
  const decoder = new TextDecoder();
1230
1683
  let lineBuf = "";
1231
1684
  let currentEvent = "";
1685
+ let usageAdjusted = false;
1686
+ const needsAdjustment = totalTokensSaved > 0;
1232
1687
  try {
1233
1688
  while (true) {
1234
1689
  const { done, value } = await reader.read();
1235
1690
  if (done) break;
1236
1691
  const chunk = decoder.decode(value, { stream: true });
1237
- clientRes.write(chunk);
1692
+ if (!needsAdjustment || usageAdjusted) {
1693
+ clientRes.write(chunk);
1694
+ lineBuf += chunk;
1695
+ const lines2 = lineBuf.split("\n");
1696
+ lineBuf = lines2.pop() || "";
1697
+ for (const line of lines2) {
1698
+ if (line.startsWith("event: ")) {
1699
+ currentEvent = line.slice(7).trim();
1700
+ } else if (line.startsWith("data: ") && currentEvent === "content_block_delta") {
1701
+ try {
1702
+ const json = JSON.parse(line.slice(6));
1703
+ if (json?.delta?.type === "text_delta" && typeof json.delta.text === "string") {
1704
+ onContentDelta(json.delta.text);
1705
+ }
1706
+ } catch {
1707
+ }
1708
+ }
1709
+ }
1710
+ continue;
1711
+ }
1238
1712
  lineBuf += chunk;
1239
1713
  const lines = lineBuf.split("\n");
1240
1714
  lineBuf = lines.pop() || "";
1715
+ let adjusted = false;
1716
+ const outputLines = [];
1241
1717
  for (const line of lines) {
1242
1718
  if (line.startsWith("event: ")) {
1243
1719
  currentEvent = line.slice(7).trim();
1244
- } else if (line.startsWith("data: ") && currentEvent === "content_block_delta") {
1245
- try {
1246
- const json = JSON.parse(line.slice(6));
1247
- if (json?.delta?.type === "text_delta" && typeof json.delta.text === "string") {
1248
- onContentDelta(json.delta.text);
1720
+ outputLines.push(line);
1721
+ } else if (line.startsWith("data: ") && currentEvent === "message_start" && !usageAdjusted) {
1722
+ const adjustedLine = adjustMessageStartLine(line, totalTokensSaved);
1723
+ if (adjustedLine) {
1724
+ outputLines.push(adjustedLine);
1725
+ usageAdjusted = true;
1726
+ adjusted = true;
1727
+ } else {
1728
+ outputLines.push(line);
1729
+ }
1730
+ } else {
1731
+ outputLines.push(line);
1732
+ if (line.startsWith("data: ") && currentEvent === "content_block_delta") {
1733
+ try {
1734
+ const json = JSON.parse(line.slice(6));
1735
+ if (json?.delta?.type === "text_delta" && typeof json.delta.text === "string") {
1736
+ onContentDelta(json.delta.text);
1737
+ }
1738
+ } catch {
1249
1739
  }
1250
- } catch {
1251
1740
  }
1252
1741
  }
1253
1742
  }
1743
+ if (adjusted) {
1744
+ const reconstructed = outputLines.join("\n") + "\n" + (lineBuf ? "" : "");
1745
+ clientRes.write(reconstructed);
1746
+ } else {
1747
+ clientRes.write(chunk);
1748
+ }
1254
1749
  }
1255
1750
  } finally {
1256
1751
  clientRes.end();
@@ -1310,6 +1805,7 @@ async function handleAnthropicMessages(req, res, body, pipeline, config, logger)
1310
1805
  }
1311
1806
  let messages = request.messages;
1312
1807
  let anyCompressed = false;
1808
+ let totalTokensSaved = 0;
1313
1809
  if (config.enabled && !pipeline.isCircuitOpen()) {
1314
1810
  try {
1315
1811
  const compressRoles = new Set(config.compressRoles);
@@ -1322,6 +1818,7 @@ async function handleAnthropicMessages(req, res, body, pipeline, config, logger)
1322
1818
  );
1323
1819
  messages = convertCompressedToAnthropic(result.messages);
1324
1820
  anyCompressed = result.anyCompressed;
1821
+ totalTokensSaved = result.totalTokensSaved;
1325
1822
  if (result.totalTokensSaved > 0) {
1326
1823
  logger.log(`[COMPRESS] Saved ${result.totalTokensSaved} tokens`);
1327
1824
  }
@@ -1369,14 +1866,27 @@ async function handleAnthropicMessages(req, res, body, pipeline, config, logger)
1369
1866
  upstreamResponse,
1370
1867
  res,
1371
1868
  (text) => learningBuffer?.append(text),
1372
- () => learningBuffer?.flush()
1869
+ () => learningBuffer?.flush(),
1870
+ totalTokensSaved
1373
1871
  );
1374
1872
  return;
1375
1873
  }
1376
1874
  const responseBody = await upstreamResponse.text();
1875
+ let finalBody = responseBody;
1876
+ if (totalTokensSaved > 0) {
1877
+ try {
1878
+ const parsed = JSON.parse(responseBody);
1879
+ if (parsed?.usage?.input_tokens != null) {
1880
+ parsed.usage.input_tokens += totalTokensSaved;
1881
+ finalBody = JSON.stringify(parsed);
1882
+ logger.log(`[TOKENS] Adjusted input_tokens by +${totalTokensSaved}`);
1883
+ }
1884
+ } catch {
1885
+ }
1886
+ }
1377
1887
  setCORSHeaders2(res);
1378
1888
  res.writeHead(200, { "Content-Type": "application/json" });
1379
- res.end(responseBody);
1889
+ res.end(finalBody);
1380
1890
  if (anyCompressed) {
1381
1891
  try {
1382
1892
  const parsed = JSON.parse(responseBody);
@@ -1399,58 +1909,426 @@ async function handleAnthropicMessages(req, res, body, pipeline, config, logger)
1399
1909
  }
1400
1910
  }
1401
1911
 
1402
- // src/proxy/handler.ts
1912
+ // src/proxy/responses.ts
1913
+ import { RSCCircuitOpenError as RSCCircuitOpenError4 } from "@cognisos/rsc-sdk";
1914
+
1915
+ // src/proxy/responses-streaming.ts
1916
+ async function pipeResponsesSSE(upstreamResponse, clientRes, onContentDelta, onComplete, totalTokensSaved = 0) {
1917
+ clientRes.writeHead(200, {
1918
+ "Content-Type": "text/event-stream",
1919
+ "Cache-Control": "no-cache",
1920
+ "Connection": "keep-alive",
1921
+ "Access-Control-Allow-Origin": "*"
1922
+ });
1923
+ const reader = upstreamResponse.body.getReader();
1924
+ const decoder = new TextDecoder();
1925
+ let lineBuf = "";
1926
+ let currentEvent = "";
1927
+ let usageAdjusted = false;
1928
+ const needsAdjustment = totalTokensSaved > 0;
1929
+ try {
1930
+ while (true) {
1931
+ const { done, value } = await reader.read();
1932
+ if (done) break;
1933
+ const chunk = decoder.decode(value, { stream: true });
1934
+ if (!needsAdjustment || usageAdjusted) {
1935
+ clientRes.write(chunk);
1936
+ lineBuf += chunk;
1937
+ const lines2 = lineBuf.split("\n");
1938
+ lineBuf = lines2.pop() || "";
1939
+ for (const line of lines2) {
1940
+ if (line.startsWith("event: ")) {
1941
+ currentEvent = line.slice(7).trim();
1942
+ } else if (line.startsWith("data: ") && currentEvent === "response.output_text.delta") {
1943
+ try {
1944
+ const json = JSON.parse(line.slice(6));
1945
+ if (typeof json?.delta === "string") {
1946
+ onContentDelta(json.delta);
1947
+ }
1948
+ } catch {
1949
+ }
1950
+ }
1951
+ }
1952
+ continue;
1953
+ }
1954
+ lineBuf += chunk;
1955
+ const lines = lineBuf.split("\n");
1956
+ lineBuf = lines.pop() || "";
1957
+ let adjusted = false;
1958
+ const outputLines = [];
1959
+ for (const line of lines) {
1960
+ if (line.startsWith("event: ")) {
1961
+ currentEvent = line.slice(7).trim();
1962
+ outputLines.push(line);
1963
+ } else if (line.startsWith("data: ") && currentEvent === "response.completed" && !usageAdjusted) {
1964
+ try {
1965
+ const json = JSON.parse(line.slice(6));
1966
+ if (json?.response?.usage?.input_tokens != null) {
1967
+ json.response.usage.input_tokens += totalTokensSaved;
1968
+ if (json.response.usage.total_tokens != null) {
1969
+ json.response.usage.total_tokens += totalTokensSaved;
1970
+ }
1971
+ outputLines.push(`data: ${JSON.stringify(json)}`);
1972
+ usageAdjusted = true;
1973
+ adjusted = true;
1974
+ } else {
1975
+ outputLines.push(line);
1976
+ }
1977
+ } catch {
1978
+ outputLines.push(line);
1979
+ }
1980
+ } else {
1981
+ outputLines.push(line);
1982
+ if (line.startsWith("data: ") && currentEvent === "response.output_text.delta") {
1983
+ try {
1984
+ const json = JSON.parse(line.slice(6));
1985
+ if (typeof json?.delta === "string") {
1986
+ onContentDelta(json.delta);
1987
+ }
1988
+ } catch {
1989
+ }
1990
+ }
1991
+ }
1992
+ }
1993
+ if (adjusted) {
1994
+ const reconstructed = outputLines.join("\n") + "\n";
1995
+ clientRes.write(reconstructed);
1996
+ } else {
1997
+ clientRes.write(chunk);
1998
+ }
1999
+ }
2000
+ } finally {
2001
+ clientRes.end();
2002
+ onComplete();
2003
+ }
2004
+ }
2005
+
2006
+ // src/proxy/responses.ts
1403
2007
  function setCORSHeaders3(res) {
1404
2008
  res.setHeader("Access-Control-Allow-Origin", "*");
1405
2009
  res.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS");
1406
- res.setHeader("Access-Control-Allow-Headers", "Content-Type, Authorization, x-api-key, anthropic-version, anthropic-beta");
1407
- res.setHeader("Access-Control-Max-Age", "86400");
2010
+ res.setHeader("Access-Control-Allow-Headers", "Content-Type, Authorization");
1408
2011
  }
1409
2012
  function sendJSON2(res, status, body) {
1410
2013
  setCORSHeaders3(res);
1411
2014
  res.writeHead(status, { "Content-Type": "application/json" });
1412
2015
  res.end(JSON.stringify(body));
1413
2016
  }
2017
+ function extractBearerToken2(req) {
2018
+ const auth = req.headers.authorization;
2019
+ if (!auth || !auth.startsWith("Bearer ")) return null;
2020
+ return auth.slice(7);
2021
+ }
2022
+ function isMessageItem(item) {
2023
+ return item.type === "message";
2024
+ }
2025
+ function inputToCompressibleMessages(input) {
2026
+ if (typeof input === "string") {
2027
+ return [{ role: "user", content: input }];
2028
+ }
2029
+ const messages = [];
2030
+ for (const item of input) {
2031
+ if (!isMessageItem(item)) continue;
2032
+ if (typeof item.content === "string") {
2033
+ const role = item.role === "developer" ? "system" : item.role;
2034
+ messages.push({ role, content: item.content });
2035
+ } else if (Array.isArray(item.content)) {
2036
+ const role = item.role === "developer" ? "system" : item.role;
2037
+ const parts = item.content.map((c) => {
2038
+ if (c.type === "input_text") {
2039
+ return { type: "text", text: c.text };
2040
+ }
2041
+ return c;
2042
+ });
2043
+ messages.push({ role, content: parts });
2044
+ }
2045
+ }
2046
+ return messages;
2047
+ }
2048
+ function applyCompressedToInput(originalInput, compressedMessages) {
2049
+ if (typeof originalInput === "string") {
2050
+ const first = compressedMessages[0];
2051
+ if (first && typeof first.content === "string") {
2052
+ return first.content;
2053
+ }
2054
+ return originalInput;
2055
+ }
2056
+ let msgIdx = 0;
2057
+ const result = [];
2058
+ for (const item of originalInput) {
2059
+ if (!isMessageItem(item)) {
2060
+ result.push(item);
2061
+ continue;
2062
+ }
2063
+ const compressed = compressedMessages[msgIdx];
2064
+ msgIdx++;
2065
+ if (!compressed) {
2066
+ result.push(item);
2067
+ continue;
2068
+ }
2069
+ if (typeof compressed.content === "string") {
2070
+ result.push({
2071
+ ...item,
2072
+ content: compressed.content
2073
+ });
2074
+ } else if (Array.isArray(compressed.content)) {
2075
+ const content = compressed.content.map((part) => {
2076
+ if (part.type === "text" && "text" in part) {
2077
+ return { type: "input_text", text: part.text };
2078
+ }
2079
+ return part;
2080
+ });
2081
+ result.push({
2082
+ ...item,
2083
+ content
2084
+ });
2085
+ } else {
2086
+ result.push(item);
2087
+ }
2088
+ }
2089
+ return result;
2090
+ }
2091
+ function extractOutputText(output) {
2092
+ const texts = [];
2093
+ for (const item of output) {
2094
+ if (item.type === "message") {
2095
+ const msg = item;
2096
+ for (const block of msg.content) {
2097
+ if (block.type === "output_text" && typeof block.text === "string") {
2098
+ texts.push(block.text);
2099
+ }
2100
+ }
2101
+ }
2102
+ }
2103
+ return texts.join("");
2104
+ }
2105
+ async function handleResponses(req, res, body, pipeline, config, logger) {
2106
+ const request = body;
2107
+ if (request.input === void 0 || request.input === null) {
2108
+ sendJSON2(res, 400, {
2109
+ error: { message: "input is required", type: "invalid_request_error" }
2110
+ });
2111
+ return;
2112
+ }
2113
+ const llmApiKey = extractBearerToken2(req);
2114
+ if (!llmApiKey) {
2115
+ sendJSON2(res, 401, {
2116
+ error: { message: "Authorization header with Bearer token is required", type: "authentication_error" }
2117
+ });
2118
+ return;
2119
+ }
2120
+ let compressedInput = request.input;
2121
+ let anyCompressed = false;
2122
+ let totalTokensSaved = 0;
2123
+ if (config.enabled && !pipeline.isCircuitOpen()) {
2124
+ try {
2125
+ const compressRoles = new Set(config.compressRoles);
2126
+ const compressible = inputToCompressibleMessages(request.input);
2127
+ if (compressible.length > 0) {
2128
+ const result = await compressMessages(
2129
+ compressible,
2130
+ pipeline.pipeline,
2131
+ pipeline.session,
2132
+ compressRoles
2133
+ );
2134
+ compressedInput = applyCompressedToInput(request.input, result.messages);
2135
+ anyCompressed = result.anyCompressed;
2136
+ totalTokensSaved = result.totalTokensSaved;
2137
+ if (result.totalTokensSaved > 0) {
2138
+ logger.log(`[COMPRESS] Responses API: saved ${result.totalTokensSaved} tokens`);
2139
+ }
2140
+ }
2141
+ } catch (err) {
2142
+ if (err instanceof RSCCircuitOpenError4) {
2143
+ logger.log("[DEGRADE] Circuit breaker open \u2014 passing through directly");
2144
+ } else {
2145
+ logger.log(`[ERROR] Compression failed: ${err instanceof Error ? err.message : String(err)}`);
2146
+ }
2147
+ compressedInput = request.input;
2148
+ }
2149
+ }
2150
+ const upstreamUrl = `${config.upstreamBaseUrl}/v1/responses`;
2151
+ const upstreamBody = { ...request, input: compressedInput };
2152
+ const upstreamHeaders = {
2153
+ "Authorization": `Bearer ${llmApiKey}`,
2154
+ "Content-Type": "application/json"
2155
+ };
2156
+ if (request.stream) {
2157
+ upstreamHeaders["Accept"] = "text/event-stream";
2158
+ }
2159
+ logger.log(`[RESPONSES] ${request.model} \u2192 ${upstreamUrl}`);
2160
+ try {
2161
+ const upstreamResponse = await fetch(upstreamUrl, {
2162
+ method: "POST",
2163
+ headers: upstreamHeaders,
2164
+ body: JSON.stringify(upstreamBody)
2165
+ });
2166
+ if (!upstreamResponse.ok) {
2167
+ const errorBody = await upstreamResponse.text();
2168
+ logger.log(`[RESPONSES] Upstream error ${upstreamResponse.status}: ${errorBody.slice(0, 500)}`);
2169
+ setCORSHeaders3(res);
2170
+ res.writeHead(upstreamResponse.status, {
2171
+ "Content-Type": upstreamResponse.headers.get("Content-Type") || "application/json"
2172
+ });
2173
+ res.end(errorBody);
2174
+ return;
2175
+ }
2176
+ if (request.stream && upstreamResponse.body) {
2177
+ const learningBuffer = anyCompressed ? createStreamLearningBuffer(pipeline.pipeline) : null;
2178
+ await pipeResponsesSSE(
2179
+ upstreamResponse,
2180
+ res,
2181
+ (text) => learningBuffer?.append(text),
2182
+ () => learningBuffer?.flush(),
2183
+ totalTokensSaved
2184
+ );
2185
+ return;
2186
+ }
2187
+ const responseBody = await upstreamResponse.text();
2188
+ let finalBody = responseBody;
2189
+ if (totalTokensSaved > 0) {
2190
+ try {
2191
+ const parsed = JSON.parse(responseBody);
2192
+ if (parsed?.usage?.input_tokens != null) {
2193
+ parsed.usage.input_tokens += totalTokensSaved;
2194
+ if (parsed.usage.total_tokens != null) {
2195
+ parsed.usage.total_tokens += totalTokensSaved;
2196
+ }
2197
+ finalBody = JSON.stringify(parsed);
2198
+ logger.log(`[TOKENS] Adjusted input_tokens by +${totalTokensSaved}`);
2199
+ }
2200
+ } catch {
2201
+ }
2202
+ }
2203
+ setCORSHeaders3(res);
2204
+ res.writeHead(200, { "Content-Type": "application/json" });
2205
+ res.end(finalBody);
2206
+ if (anyCompressed) {
2207
+ try {
2208
+ const parsed = JSON.parse(responseBody);
2209
+ if (parsed?.output) {
2210
+ const text = extractOutputText(parsed.output);
2211
+ if (text.length > 0) {
2212
+ pipeline.pipeline.triggerLearning(text);
2213
+ }
2214
+ }
2215
+ } catch {
2216
+ }
2217
+ }
2218
+ } catch (err) {
2219
+ const message = err instanceof Error ? err.message : String(err);
2220
+ logger.log(`[ERROR] Upstream request failed: ${message}`);
2221
+ if (!res.headersSent) {
2222
+ sendJSON2(res, 502, {
2223
+ error: { message: `Failed to reach upstream LLM: ${message}`, type: "server_error" }
2224
+ });
2225
+ }
2226
+ }
2227
+ }
2228
+
2229
+ // src/proxy/handler.ts
2230
+ function setCORSHeaders4(res) {
2231
+ res.setHeader("Access-Control-Allow-Origin", "*");
2232
+ res.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE, PATCH");
2233
+ res.setHeader("Access-Control-Allow-Headers", "Content-Type, Authorization, x-api-key, anthropic-version, anthropic-beta, anthropic-dangerous-direct-browser-access");
2234
+ res.setHeader("Access-Control-Max-Age", "86400");
2235
+ }
2236
+ function sendJSON3(res, status, body) {
2237
+ setCORSHeaders4(res);
2238
+ res.writeHead(status, { "Content-Type": "application/json" });
2239
+ res.end(JSON.stringify(body));
2240
+ }
1414
2241
  function readBody(req) {
1415
2242
  return new Promise((resolve, reject) => {
1416
2243
  const chunks = [];
1417
2244
  req.on("data", (chunk) => chunks.push(chunk));
1418
- req.on("end", () => resolve(Buffer.concat(chunks).toString("utf-8")));
2245
+ req.on("end", () => resolve(Buffer.concat(chunks)));
1419
2246
  req.on("error", reject);
1420
2247
  });
1421
2248
  }
1422
- async function passthroughAnthropic(req, res, fullUrl, config, logger) {
1423
- const upstreamUrl = `${config.anthropicUpstreamUrl}${fullUrl}`;
1424
- const headers = {
1425
- "Content-Type": "application/json"
1426
- };
1427
- const xApiKey = req.headers["x-api-key"];
1428
- if (typeof xApiKey === "string") headers["x-api-key"] = xApiKey;
1429
- const auth = req.headers["authorization"];
1430
- if (typeof auth === "string") headers["Authorization"] = auth;
1431
- const version = req.headers["anthropic-version"];
1432
- if (typeof version === "string") headers["anthropic-version"] = version;
1433
- const beta = req.headers["anthropic-beta"];
1434
- if (typeof beta === "string") headers["anthropic-beta"] = beta;
2249
+ function detectUpstream(req, url) {
2250
+ if (req.headers["x-api-key"]) return "anthropic";
2251
+ if (req.headers["anthropic-version"]) return "anthropic";
2252
+ if (url.startsWith("/v1/messages") || url.startsWith("/messages")) return "anthropic";
2253
+ return "anthropic";
2254
+ }
2255
+ function getUpstreamBaseUrl(target, config) {
2256
+ return target === "anthropic" ? config.anthropicUpstreamUrl : config.upstreamBaseUrl;
2257
+ }
2258
+ var HOP_BY_HOP = /* @__PURE__ */ new Set([
2259
+ "host",
2260
+ "connection",
2261
+ "keep-alive",
2262
+ "transfer-encoding",
2263
+ "te",
2264
+ "trailer",
2265
+ "upgrade",
2266
+ "proxy-authorization",
2267
+ "proxy-authenticate"
2268
+ ]);
2269
+ function buildUpstreamHeaders(req) {
2270
+ const headers = {};
2271
+ for (const [key, value] of Object.entries(req.headers)) {
2272
+ if (HOP_BY_HOP.has(key)) continue;
2273
+ if (value === void 0) continue;
2274
+ headers[key] = Array.isArray(value) ? value.join(", ") : value;
2275
+ }
2276
+ return headers;
2277
+ }
2278
+ async function passthroughToUpstream(req, res, fullUrl, config, logger) {
2279
+ const target = detectUpstream(req, fullUrl);
2280
+ const upstreamBase = getUpstreamBaseUrl(target, config);
2281
+ const upstreamUrl = `${upstreamBase}${fullUrl}`;
2282
+ const method = req.method?.toUpperCase() ?? "GET";
2283
+ logger.log(`[PASSTHROUGH] ${method} ${fullUrl} \u2192 ${target} (${upstreamUrl})`);
2284
+ const headers = buildUpstreamHeaders(req);
1435
2285
  try {
1436
- const body = await readBody(req);
2286
+ const hasBody = method !== "GET" && method !== "HEAD";
2287
+ const body = hasBody ? await readBody(req) : void 0;
1437
2288
  const upstreamRes = await fetch(upstreamUrl, {
1438
- method: "POST",
2289
+ method,
1439
2290
  headers,
1440
2291
  body
1441
2292
  });
1442
- const responseBody = await upstreamRes.text();
1443
- setCORSHeaders3(res);
1444
- res.writeHead(upstreamRes.status, {
1445
- "Content-Type": upstreamRes.headers.get("Content-Type") || "application/json"
1446
- });
1447
- res.end(responseBody);
2293
+ const contentType = upstreamRes.headers.get("Content-Type") || "application/json";
2294
+ const isStreaming = contentType.includes("text/event-stream");
2295
+ if (isStreaming && upstreamRes.body) {
2296
+ setCORSHeaders4(res);
2297
+ res.writeHead(upstreamRes.status, {
2298
+ "Content-Type": contentType,
2299
+ "Cache-Control": "no-cache",
2300
+ "Connection": "keep-alive"
2301
+ });
2302
+ const reader = upstreamRes.body.getReader();
2303
+ try {
2304
+ while (true) {
2305
+ const { done, value } = await reader.read();
2306
+ if (done) break;
2307
+ res.write(value);
2308
+ }
2309
+ } finally {
2310
+ res.end();
2311
+ }
2312
+ } else {
2313
+ const responseBody = await upstreamRes.arrayBuffer();
2314
+ setCORSHeaders4(res);
2315
+ const responseHeaders = { "Content-Type": contentType };
2316
+ const reqId = upstreamRes.headers.get("request-id");
2317
+ if (reqId) responseHeaders["request-id"] = reqId;
2318
+ res.writeHead(upstreamRes.status, responseHeaders);
2319
+ res.end(Buffer.from(responseBody));
2320
+ }
1448
2321
  } catch (err) {
1449
2322
  const message = err instanceof Error ? err.message : String(err);
1450
- logger.log(`[ERROR] Passthrough failed: ${message}`);
1451
- setCORSHeaders3(res);
1452
- res.writeHead(502, { "Content-Type": "application/json" });
1453
- res.end(JSON.stringify({ type: "error", error: { type: "api_error", message: `Failed to reach upstream: ${message}` } }));
2323
+ logger.log(`[ERROR] Passthrough to ${target} failed: ${message}`);
2324
+ if (!res.headersSent) {
2325
+ setCORSHeaders4(res);
2326
+ res.writeHead(502, { "Content-Type": "application/json" });
2327
+ res.end(JSON.stringify({
2328
+ type: "error",
2329
+ error: { type: "api_error", message: `Liminal proxy: failed to reach ${target} upstream: ${message}` }
2330
+ }));
2331
+ }
1454
2332
  }
1455
2333
  }
1456
2334
  function createRequestHandler(pipeline, config, logger) {
@@ -1463,14 +2341,14 @@ function createRequestHandler(pipeline, config, logger) {
1463
2341
  const authType = req.headers["x-api-key"] ? "x-api-key" : req.headers["authorization"] ? "bearer" : "none";
1464
2342
  logger.log(`[REQUEST] ${method} ${fullUrl} (auth: ${authType})`);
1465
2343
  if (method === "OPTIONS") {
1466
- setCORSHeaders3(res);
2344
+ setCORSHeaders4(res);
1467
2345
  res.writeHead(204);
1468
2346
  res.end();
1469
2347
  return;
1470
2348
  }
1471
2349
  if (method === "GET" && (url === "/health" || url === "/")) {
1472
2350
  const summary = pipeline.getSessionSummary();
1473
- sendJSON2(res, 200, {
2351
+ sendJSON3(res, 200, {
1474
2352
  status: "ok",
1475
2353
  version: config.rscApiKey ? "connected" : "no-api-key",
1476
2354
  rsc_connected: !pipeline.isCircuitOpen(),
@@ -1490,53 +2368,41 @@ function createRequestHandler(pipeline, config, logger) {
1490
2368
  });
1491
2369
  return;
1492
2370
  }
1493
- if (method === "GET" && (url === "/v1/models" || url === "/models")) {
1494
- const llmApiKey = req.headers.authorization?.slice(7);
1495
- if (!llmApiKey) {
1496
- sendJSON2(res, 401, {
1497
- error: { message: "Authorization header with Bearer token is required", type: "authentication_error" }
1498
- });
1499
- return;
1500
- }
2371
+ if (method === "POST" && (url === "/v1/chat/completions" || url === "/chat/completions")) {
2372
+ const body = await readBody(req);
2373
+ let parsed;
1501
2374
  try {
1502
- const upstreamRes = await fetch(`${config.upstreamBaseUrl}/v1/models`, {
1503
- headers: { "Authorization": `Bearer ${llmApiKey}` }
1504
- });
1505
- const body = await upstreamRes.text();
1506
- setCORSHeaders3(res);
1507
- res.writeHead(upstreamRes.status, {
1508
- "Content-Type": upstreamRes.headers.get("Content-Type") || "application/json"
1509
- });
1510
- res.end(body);
1511
- } catch (err) {
1512
- const message = err instanceof Error ? err.message : String(err);
1513
- sendJSON2(res, 502, {
1514
- error: { message: `Failed to reach upstream: ${message}`, type: "server_error" }
2375
+ parsed = JSON.parse(body.toString("utf-8"));
2376
+ } catch {
2377
+ sendJSON3(res, 400, {
2378
+ error: { message: "Invalid JSON body", type: "invalid_request_error" }
1515
2379
  });
2380
+ return;
1516
2381
  }
2382
+ await handleChatCompletions(req, res, parsed, pipeline, config, logger);
1517
2383
  return;
1518
2384
  }
1519
- if (method === "POST" && (url === "/v1/chat/completions" || url === "/chat/completions")) {
2385
+ if (method === "POST" && (url === "/v1/responses" || url === "/responses")) {
1520
2386
  const body = await readBody(req);
1521
2387
  let parsed;
1522
2388
  try {
1523
- parsed = JSON.parse(body);
2389
+ parsed = JSON.parse(body.toString("utf-8"));
1524
2390
  } catch {
1525
- sendJSON2(res, 400, {
2391
+ sendJSON3(res, 400, {
1526
2392
  error: { message: "Invalid JSON body", type: "invalid_request_error" }
1527
2393
  });
1528
2394
  return;
1529
2395
  }
1530
- await handleChatCompletions(req, res, parsed, pipeline, config, logger);
2396
+ await handleResponses(req, res, parsed, pipeline, config, logger);
1531
2397
  return;
1532
2398
  }
1533
2399
  if (method === "POST" && (url === "/v1/messages" || url === "/messages")) {
1534
2400
  const body = await readBody(req);
1535
2401
  let parsed;
1536
2402
  try {
1537
- parsed = JSON.parse(body);
2403
+ parsed = JSON.parse(body.toString("utf-8"));
1538
2404
  } catch {
1539
- sendJSON2(res, 400, {
2405
+ sendJSON3(res, 400, {
1540
2406
  type: "error",
1541
2407
  error: { type: "invalid_request_error", message: "Invalid JSON body" }
1542
2408
  });
@@ -1545,18 +2411,12 @@ function createRequestHandler(pipeline, config, logger) {
1545
2411
  await handleAnthropicMessages(req, res, parsed, pipeline, config, logger);
1546
2412
  return;
1547
2413
  }
1548
- if (method === "POST" && url.startsWith("/v1/messages/")) {
1549
- await passthroughAnthropic(req, res, fullUrl, config, logger);
1550
- return;
1551
- }
1552
- sendJSON2(res, 404, {
1553
- error: { message: `Not found: ${method} ${url}`, type: "invalid_request_error" }
1554
- });
2414
+ await passthroughToUpstream(req, res, fullUrl, config, logger);
1555
2415
  } catch (err) {
1556
2416
  const message = err instanceof Error ? err.message : String(err);
1557
2417
  logger.log(`[ERROR] Proxy handler error: ${message}`);
1558
2418
  if (!res.headersSent) {
1559
- sendJSON2(res, 500, {
2419
+ sendJSON3(res, 500, {
1560
2420
  error: { message: "Internal proxy error", type: "server_error" }
1561
2421
  });
1562
2422
  }
@@ -1623,7 +2483,7 @@ var ProxyServer = class {
1623
2483
  };
1624
2484
 
1625
2485
  // src/daemon/logger.ts
1626
- import { appendFileSync as appendFileSync2, statSync, renameSync, mkdirSync as mkdirSync2, existsSync as existsSync3 } from "fs";
2486
+ import { appendFileSync as appendFileSync2, statSync, renameSync, mkdirSync as mkdirSync2, existsSync as existsSync5 } from "fs";
1627
2487
  import { dirname as dirname2 } from "path";
1628
2488
  var MAX_LOG_SIZE = 10 * 1024 * 1024;
1629
2489
  var MAX_BACKUPS = 2;
@@ -1634,7 +2494,7 @@ var FileLogger = class {
1634
2494
  this.logFile = options?.logFile ?? LOG_FILE;
1635
2495
  this.mirrorStdout = options?.mirrorStdout ?? false;
1636
2496
  const logDir = dirname2(this.logFile);
1637
- if (!existsSync3(logDir)) {
2497
+ if (!existsSync5(logDir)) {
1638
2498
  mkdirSync2(logDir, { recursive: true });
1639
2499
  }
1640
2500
  }
@@ -1659,7 +2519,7 @@ var FileLogger = class {
1659
2519
  for (let i = MAX_BACKUPS - 1; i >= 1; i--) {
1660
2520
  const from = `${this.logFile}.${i}`;
1661
2521
  const to = `${this.logFile}.${i + 1}`;
1662
- if (existsSync3(from)) renameSync(from, to);
2522
+ if (existsSync5(from)) renameSync(from, to);
1663
2523
  }
1664
2524
  renameSync(this.logFile, `${this.logFile}.1`);
1665
2525
  } catch {
@@ -1671,16 +2531,16 @@ var FileLogger = class {
1671
2531
  };
1672
2532
 
1673
2533
  // src/daemon/lifecycle.ts
1674
- import { readFileSync as readFileSync3, writeFileSync as writeFileSync2, unlinkSync, existsSync as existsSync4 } from "fs";
2534
+ import { readFileSync as readFileSync4, writeFileSync as writeFileSync3, unlinkSync, existsSync as existsSync6 } from "fs";
1675
2535
  import { fork } from "child_process";
1676
2536
  import { fileURLToPath } from "url";
1677
2537
  function writePidFile(pid) {
1678
- writeFileSync2(PID_FILE, String(pid), "utf-8");
2538
+ writeFileSync3(PID_FILE, String(pid), "utf-8");
1679
2539
  }
1680
2540
  function readPidFile() {
1681
- if (!existsSync4(PID_FILE)) return null;
2541
+ if (!existsSync6(PID_FILE)) return null;
1682
2542
  try {
1683
- const content = readFileSync3(PID_FILE, "utf-8").trim();
2543
+ const content = readFileSync4(PID_FILE, "utf-8").trim();
1684
2544
  const pid = parseInt(content, 10);
1685
2545
  return isNaN(pid) ? null : pid;
1686
2546
  } catch {
@@ -1689,7 +2549,7 @@ function readPidFile() {
1689
2549
  }
1690
2550
  function removePidFile() {
1691
2551
  try {
1692
- if (existsSync4(PID_FILE)) unlinkSync(PID_FILE);
2552
+ if (existsSync6(PID_FILE)) unlinkSync(PID_FILE);
1693
2553
  } catch {
1694
2554
  }
1695
2555
  }
@@ -2070,17 +2930,17 @@ async function configCommand(flags) {
2070
2930
  }
2071
2931
 
2072
2932
  // src/commands/logs.ts
2073
- import { readFileSync as readFileSync4, existsSync as existsSync5, statSync as statSync2, createReadStream } from "fs";
2933
+ import { readFileSync as readFileSync5, existsSync as existsSync7, statSync as statSync2, createReadStream } from "fs";
2074
2934
  import { watchFile, unwatchFile } from "fs";
2075
2935
  async function logsCommand(flags) {
2076
2936
  const follow = flags.has("follow") || flags.has("f");
2077
2937
  const linesFlag = flags.get("lines") ?? flags.get("n");
2078
2938
  const lines = typeof linesFlag === "string" ? parseInt(linesFlag, 10) : 50;
2079
- if (!existsSync5(LOG_FILE)) {
2939
+ if (!existsSync7(LOG_FILE)) {
2080
2940
  console.log('No log file found. Start the daemon with "liminal start" to generate logs.');
2081
2941
  return;
2082
2942
  }
2083
- const content = readFileSync4(LOG_FILE, "utf-8");
2943
+ const content = readFileSync5(LOG_FILE, "utf-8");
2084
2944
  const allLines = content.split("\n");
2085
2945
  const tail = allLines.slice(-lines - 1);
2086
2946
  process.stdout.write(tail.join("\n"));
@@ -2105,6 +2965,135 @@ async function logsCommand(flags) {
2105
2965
  });
2106
2966
  }
2107
2967
 
2968
+ // src/commands/uninstall.ts
2969
+ import { existsSync as existsSync8, rmSync, readFileSync as readFileSync6 } from "fs";
2970
+ var BOLD2 = "\x1B[1m";
2971
+ var DIM2 = "\x1B[2m";
2972
+ var GREEN2 = "\x1B[32m";
2973
+ var YELLOW2 = "\x1B[33m";
2974
+ var RESET2 = "\x1B[0m";
2975
+ function loadConfiguredTools() {
2976
+ if (!existsSync8(CONFIG_FILE)) return [];
2977
+ try {
2978
+ const raw = readFileSync6(CONFIG_FILE, "utf-8");
2979
+ const config = JSON.parse(raw);
2980
+ if (Array.isArray(config.tools)) return config.tools;
2981
+ } catch {
2982
+ }
2983
+ return [];
2984
+ }
2985
+ async function uninstallCommand() {
2986
+ console.log();
2987
+ console.log(` ${BOLD2}Liminal Uninstall${RESET2}`);
2988
+ console.log();
2989
+ const confirm = await selectPrompt({
2990
+ message: "Remove Liminal configuration and restore tool settings?",
2991
+ options: [
2992
+ { label: "Yes", value: true, description: "Undo all Liminal setup" },
2993
+ { label: "No", value: false, description: "Cancel" }
2994
+ ],
2995
+ defaultIndex: 1
2996
+ // Default to No for safety
2997
+ });
2998
+ if (confirm !== true) {
2999
+ console.log();
3000
+ console.log(" Cancelled.");
3001
+ console.log();
3002
+ return;
3003
+ }
3004
+ console.log();
3005
+ const state = isDaemonRunning();
3006
+ if (state.running && state.pid) {
3007
+ console.log(` Stopping Liminal daemon (PID ${state.pid})...`);
3008
+ try {
3009
+ process.kill(state.pid, "SIGTERM");
3010
+ for (let i = 0; i < 15; i++) {
3011
+ await sleep(200);
3012
+ if (!isProcessAlive(state.pid)) break;
3013
+ }
3014
+ if (isProcessAlive(state.pid)) {
3015
+ process.kill(state.pid, "SIGKILL");
3016
+ }
3017
+ } catch {
3018
+ }
3019
+ removePidFile();
3020
+ console.log(` ${GREEN2}\u2713${RESET2} Daemon stopped`);
3021
+ } else {
3022
+ console.log(` ${DIM2}\xB7${RESET2} Daemon not running`);
3023
+ }
3024
+ const profile = detectShellProfile();
3025
+ if (profile) {
3026
+ const existing = findLiminalExportsInProfile(profile);
3027
+ if (existing.length > 0) {
3028
+ const removed = removeLiminalFromShellProfile(profile);
3029
+ if (removed.length > 0) {
3030
+ console.log(` ${GREEN2}\u2713${RESET2} Removed ${removed.length} line${removed.length > 1 ? "s" : ""} from ${profile.name}:`);
3031
+ for (const line of removed) {
3032
+ const trimmed = line.trim();
3033
+ if (trimmed && trimmed !== "# Liminal \u2014 route AI tools through compression proxy") {
3034
+ console.log(` ${DIM2}${trimmed}${RESET2}`);
3035
+ }
3036
+ }
3037
+ }
3038
+ } else {
3039
+ console.log(` ${DIM2}\xB7${RESET2} No Liminal exports found in ${profile.name}`);
3040
+ }
3041
+ }
3042
+ const configuredTools = loadConfiguredTools();
3043
+ const allTools = configuredTools.length > 0 ? configuredTools : CONNECTORS.map((c) => c.info.id);
3044
+ const connectors = getConnectors(allTools);
3045
+ const manualSteps = [];
3046
+ for (const connector of connectors) {
3047
+ const result = await connector.teardown();
3048
+ if (result.manualSteps.length > 0 && !connector.info.automatable) {
3049
+ manualSteps.push({
3050
+ label: connector.info.label,
3051
+ steps: result.manualSteps
3052
+ });
3053
+ }
3054
+ }
3055
+ if (manualSteps.length > 0) {
3056
+ console.log();
3057
+ console.log(` ${YELLOW2}Manual steps needed:${RESET2}`);
3058
+ for (const { label, steps } of manualSteps) {
3059
+ console.log();
3060
+ console.log(` ${BOLD2}${label}:${RESET2}`);
3061
+ for (const step of steps) {
3062
+ console.log(` ${step}`);
3063
+ }
3064
+ }
3065
+ }
3066
+ if (existsSync8(LIMINAL_DIR)) {
3067
+ console.log();
3068
+ const removeData = await selectPrompt({
3069
+ message: "Remove ~/.liminal/ directory? (config, logs, PID file)",
3070
+ options: [
3071
+ { label: "Yes", value: true, description: "Delete all Liminal data" },
3072
+ { label: "No", value: false, description: "Keep config and logs" }
3073
+ ],
3074
+ defaultIndex: 1
3075
+ // Default to keep
3076
+ });
3077
+ if (removeData === true) {
3078
+ rmSync(LIMINAL_DIR, { recursive: true, force: true });
3079
+ console.log(` ${GREEN2}\u2713${RESET2} Removed ~/.liminal/`);
3080
+ } else {
3081
+ console.log(` ${DIM2}\xB7${RESET2} Kept ~/.liminal/`);
3082
+ }
3083
+ }
3084
+ console.log();
3085
+ console.log(` ${GREEN2}Liminal has been uninstalled.${RESET2}`);
3086
+ console.log();
3087
+ console.log(` ${DIM2}Your AI tools will connect directly to their APIs.${RESET2}`);
3088
+ console.log(` ${DIM2}Restart your terminal for shell changes to take effect.${RESET2}`);
3089
+ if (manualSteps.length > 0) {
3090
+ console.log(` ${YELLOW2}Don't forget the manual steps above for ${manualSteps.map((s) => s.label).join(", ")}.${RESET2}`);
3091
+ }
3092
+ console.log();
3093
+ console.log(` ${DIM2}To reinstall: npx @cognisos/liminal init${RESET2}`);
3094
+ console.log();
3095
+ }
3096
+
2108
3097
  // src/bin.ts
2109
3098
  var USAGE = `
2110
3099
  liminal v${VERSION} \u2014 Transparent LLM context compression proxy
@@ -2119,6 +3108,7 @@ var USAGE = `
2119
3108
  liminal summary Detailed session metrics
2120
3109
  liminal config [--set k=v] [--get k] View or edit configuration
2121
3110
  liminal logs [--follow] [--lines N] View proxy logs
3111
+ liminal uninstall Remove Liminal configuration
2122
3112
 
2123
3113
  Options:
2124
3114
  -h, --help Show this help message
@@ -2196,6 +3186,9 @@ async function main() {
2196
3186
  case "logs":
2197
3187
  await logsCommand(flags);
2198
3188
  break;
3189
+ case "uninstall":
3190
+ await uninstallCommand();
3191
+ break;
2199
3192
  case "":
2200
3193
  console.log(USAGE);
2201
3194
  process.exit(0);