smartcontext-proxy 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/PLAN-v2.md +390 -0
  2. package/dist/src/context/ab-test.d.ts +32 -0
  3. package/dist/src/context/ab-test.js +133 -0
  4. package/dist/src/index.js +99 -78
  5. package/dist/src/proxy/classifier.d.ts +14 -0
  6. package/dist/src/proxy/classifier.js +63 -0
  7. package/dist/src/proxy/connect-proxy.d.ts +34 -0
  8. package/dist/src/proxy/connect-proxy.js +167 -0
  9. package/dist/src/proxy/tls-interceptor.d.ts +23 -0
  10. package/dist/src/proxy/tls-interceptor.js +211 -0
  11. package/dist/src/proxy/tunnel.d.ts +7 -0
  12. package/dist/src/proxy/tunnel.js +33 -0
  13. package/dist/src/system/installer.d.ts +25 -0
  14. package/dist/src/system/installer.js +180 -0
  15. package/dist/src/system/linux.d.ts +11 -0
  16. package/dist/src/system/linux.js +60 -0
  17. package/dist/src/system/macos.d.ts +24 -0
  18. package/dist/src/system/macos.js +98 -0
  19. package/dist/src/system/watchdog.d.ts +7 -0
  20. package/dist/src/system/watchdog.js +115 -0
  21. package/dist/src/test/connect-proxy.test.d.ts +1 -0
  22. package/dist/src/test/connect-proxy.test.js +147 -0
  23. package/dist/src/tls/ca-manager.d.ts +9 -0
  24. package/dist/src/tls/ca-manager.js +117 -0
  25. package/dist/src/tls/trust-store.d.ts +11 -0
  26. package/dist/src/tls/trust-store.js +121 -0
  27. package/dist/src/tray/bridge.d.ts +8 -0
  28. package/dist/src/tray/bridge.js +66 -0
  29. package/dist/src/ui/ws-feed.d.ts +8 -0
  30. package/dist/src/ui/ws-feed.js +30 -0
  31. package/native/macos/SmartContextTray/Package.swift +13 -0
  32. package/native/macos/SmartContextTray/Sources/main.swift +206 -0
  33. package/package.json +6 -2
  34. package/src/context/ab-test.ts +172 -0
  35. package/src/index.ts +104 -74
  36. package/src/proxy/classifier.ts +71 -0
  37. package/src/proxy/connect-proxy.ts +187 -0
  38. package/src/proxy/tls-interceptor.ts +261 -0
  39. package/src/proxy/tunnel.ts +32 -0
  40. package/src/system/installer.ts +148 -0
  41. package/src/system/linux.ts +57 -0
  42. package/src/system/macos.ts +89 -0
  43. package/src/system/watchdog.ts +76 -0
  44. package/src/test/connect-proxy.test.ts +170 -0
  45. package/src/tls/ca-manager.ts +140 -0
  46. package/src/tls/trust-store.ts +123 -0
  47. package/src/tray/bridge.ts +61 -0
  48. package/src/ui/ws-feed.ts +32 -0
package/dist/src/index.js CHANGED
@@ -6,26 +6,35 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
6
6
  Object.defineProperty(exports, "__esModule", { value: true });
7
7
  const auto_detect_js_1 = require("./config/auto-detect.js");
8
8
  const server_js_1 = require("./proxy/server.js");
9
+ const connect_proxy_js_1 = require("./proxy/connect-proxy.js");
9
10
  const ollama_js_1 = require("./embedding/ollama.js");
10
11
  const lancedb_js_1 = require("./storage/lancedb.js");
12
+ const optimizer_js_1 = require("./context/optimizer.js");
13
+ const anthropic_js_1 = require("./providers/anthropic.js");
14
+ const openai_js_1 = require("./providers/openai.js");
15
+ const ollama_js_2 = require("./providers/ollama.js");
16
+ const google_js_1 = require("./providers/google.js");
17
+ const ca_manager_js_1 = require("./tls/ca-manager.js");
11
18
  const process_js_1 = require("./daemon/process.js");
12
- const service_js_1 = require("./daemon/service.js");
19
+ const installer_js_1 = require("./system/installer.js");
13
20
  const node_http_1 = __importDefault(require("node:http"));
14
- const VERSION = '0.1.0';
21
+ const VERSION = '0.2.0';
15
22
  function parseArgs(args) {
16
23
  const result = {};
17
24
  for (let i = 0; i < args.length; i++) {
18
25
  const arg = args[i];
19
26
  if (arg === '--port' || arg === '-p')
20
27
  result.port = args[++i];
21
- else if (arg === '--config' || arg === '-c')
22
- result.config = args[++i];
23
28
  else if (arg === '--help' || arg === '-h')
24
29
  result.help = true;
25
30
  else if (arg === '--version' || arg === '-v')
26
31
  result.version = true;
27
32
  else if (arg === '--no-optimize')
28
33
  result.noOptimize = true;
34
+ else if (arg === '--legacy')
35
+ result.legacy = true;
36
+ else if (arg === '--purge')
37
+ result.purge = true;
29
38
  else if (arg === '--embedding-url')
30
39
  result.embeddingUrl = args[++i];
31
40
  else if (arg === '--embedding-model')
@@ -33,63 +42,60 @@ function parseArgs(args) {
33
42
  else if (arg === '--data-dir')
34
43
  result.dataDir = args[++i];
35
44
  else if (!arg.startsWith('-'))
36
- result.command = arg;
45
+ result.command = result.command ? result.command : arg;
37
46
  }
38
47
  return result;
39
48
  }
40
49
  function printHelp() {
41
50
  console.log(`
42
51
  SmartContext Proxy v${VERSION}
43
- Intelligent context window optimization for LLM APIs
52
+ Transparent LLM context optimization proxy
44
53
 
45
- Usage:
46
- smartcontext-proxy [options]
47
- smartcontext-proxy status Show proxy status
54
+ Commands:
55
+ (default) Start proxy in foreground
56
+ install Install: CA cert + system proxy + auto-start service
57
+ uninstall Remove all: CA, proxy config, service (--purge for data)
58
+ status Show installation and proxy status
59
+ start Start as background daemon
60
+ stop Stop daemon
61
+ restart Restart daemon
48
62
 
49
63
  Options:
50
- --port, -p <port> Proxy port (default: 4800)
51
- --config, -c <file> Config file path
52
- --no-optimize Run in transparent proxy mode (no context optimization)
53
- --embedding-url <url> Ollama URL for embeddings (default: http://localhost:11434)
54
- --embedding-model <model> Embedding model (default: nomic-embed-text)
55
- --data-dir <path> Data directory (default: ~/.smartcontext/data)
56
- --help, -h Show help
57
- --version, -v Show version
64
+ --port, -p <port> Proxy port (default: 4800)
65
+ --no-optimize Disable context optimization (transparent proxy only)
66
+ --legacy Use legacy explicit-route proxy instead of CONNECT proxy
67
+ --purge With uninstall: also delete all data
68
+ --help, -h Show help
69
+ --version, -v Show version
58
70
 
59
- Client Integration:
60
- ANTHROPIC_API_URL=http://localhost:4800/v1/anthropic
61
- OPENAI_BASE_URL=http://localhost:4800/v1/openai
62
- OLLAMA_HOST=http://localhost:4800/v1/ollama
63
-
64
- API:
65
- GET /health Health check
66
- GET /_sc/status Proxy status
67
- GET /_sc/stats Aggregate metrics
68
- GET /_sc/feed Recent requests
69
- POST /_sc/pause Pause optimization
70
- POST /_sc/resume Resume optimization
71
+ After install, all LLM API traffic is automatically intercepted.
72
+ Dashboard: http://localhost:4800
71
73
  `);
72
74
  }
73
75
  async function showStatus(port) {
76
+ const inst = (0, installer_js_1.status)(port);
77
+ console.log('Installation:');
78
+ for (const [key, val] of Object.entries(inst)) {
79
+ console.log(` ${key}: ${val}`);
80
+ }
74
81
  return new Promise((resolve) => {
75
82
  node_http_1.default.get(`http://127.0.0.1:${port}/_sc/status`, (res) => {
76
83
  let data = '';
77
84
  res.on('data', (chunk) => (data += chunk));
78
85
  res.on('end', () => {
79
86
  try {
80
- const status = JSON.parse(data);
81
- console.log(`SmartContext Proxy: ${status.state}`);
82
- console.log(` Uptime: ${Math.round(status.uptime / 1000)}s`);
83
- console.log(` Requests: ${status.requests}`);
84
- console.log(` Mode: ${status.mode}`);
87
+ const s = JSON.parse(data);
88
+ console.log(`\nProxy: ${s.state} (${s.mode})`);
89
+ console.log(` Uptime: ${Math.round(s.uptime / 1000)}s`);
90
+ console.log(` Requests: ${s.requests}`);
85
91
  }
86
92
  catch {
87
- console.log('Could not parse status response');
93
+ console.log('\nProxy: response parse error');
88
94
  }
89
95
  resolve();
90
96
  });
91
97
  }).on('error', () => {
92
- console.log(`SmartContext Proxy: not running on port ${port}`);
98
+ console.log(`\nProxy: not running on port ${port}`);
93
99
  resolve();
94
100
  });
95
101
  });
@@ -105,6 +111,7 @@ async function main() {
105
111
  return;
106
112
  }
107
113
  const port = args.port ? parseInt(args.port, 10) : 4800;
114
+ // Commands
108
115
  if (args.command === 'status') {
109
116
  await showStatus(port);
110
117
  return;
@@ -113,69 +120,83 @@ async function main() {
113
120
  (0, process_js_1.stopDaemon)();
114
121
  return;
115
122
  }
116
- if (args.command === 'start') {
117
- (0, process_js_1.startDaemon)(process.argv.slice(3));
118
- return;
119
- }
120
- if (args.command === 'restart') {
121
- (0, process_js_1.stopDaemon)();
122
- await new Promise(r => setTimeout(r, 1000));
123
- (0, process_js_1.startDaemon)(process.argv.slice(3));
124
- return;
125
- }
126
- if (args.command === 'install-service') {
127
- const path = (0, service_js_1.installService)(port);
128
- console.log(`Service installed: ${path}`);
123
+ if (args.command === 'install') {
124
+ console.log('Installing SmartContext Proxy...\n');
125
+ (0, ca_manager_js_1.ensureCA)();
126
+ const result = (0, installer_js_1.install)(port);
127
+ for (const step of result.steps) {
128
+ console.log(` ${step.success ? '✓' : '✗'} ${step.step}: ${step.message}`);
129
+ }
130
+ console.log(result.success ? '\nInstalled successfully.' : '\nInstallation failed (rolled back).');
129
131
  return;
130
132
  }
131
- if (args.command === 'uninstall-service') {
132
- console.log((0, service_js_1.uninstallService)());
133
+ if (args.command === 'uninstall') {
134
+ console.log('Uninstalling SmartContext Proxy...\n');
135
+ const result = (0, installer_js_1.uninstall)(!!args.purge);
136
+ for (const step of result.steps) {
137
+ console.log(` ${step.success ? '✓' : '✗'} ${step.step}: ${step.message}`);
138
+ }
139
+ console.log(result.success ? '\nUninstalled.' : '\nSome steps failed.');
133
140
  return;
134
141
  }
135
- const config = (0, auto_detect_js_1.buildConfig)({
136
- proxy: { port, host: '127.0.0.1' },
137
- });
138
- // Initialize embedding and storage (unless --no-optimize)
142
+ // Build config
143
+ const config = (0, auto_detect_js_1.buildConfig)({ proxy: { port, host: '127.0.0.1' } });
144
+ // Initialize embedding and storage
139
145
  let embedding;
140
146
  let storage;
147
+ let optimizer;
141
148
  if (!args.noOptimize) {
142
149
  try {
143
- const embeddingUrl = args.embeddingUrl || process.env['OLLAMA_HOST'] || 'http://localhost:11434';
144
- const embeddingModel = args.embeddingModel || 'nomic-embed-text';
145
- const dataDir = args.dataDir;
146
- embedding = new ollama_js_1.OllamaEmbeddingAdapter(embeddingUrl, embeddingModel);
150
+ const embUrl = args.embeddingUrl || process.env['OLLAMA_HOST'] || 'http://localhost:11434';
151
+ const embModel = args.embeddingModel || 'nomic-embed-text';
152
+ embedding = new ollama_js_1.OllamaEmbeddingAdapter(embUrl, embModel);
147
153
  await embedding.initialize();
148
- storage = new lancedb_js_1.LanceDBAdapter(dataDir);
154
+ storage = new lancedb_js_1.LanceDBAdapter(args.dataDir);
149
155
  await storage.initialize();
150
- console.log(` Embedding: ${embeddingModel} @ ${embeddingUrl}`);
151
- console.log(` Storage: LanceDB`);
156
+ optimizer = new optimizer_js_1.ContextOptimizer(embedding, storage, config.context);
152
157
  }
153
158
  catch (err) {
154
159
  console.log(` Optimization unavailable: ${err}`);
155
160
  console.log(` Running in transparent proxy mode`);
156
- embedding = undefined;
157
- storage = undefined;
158
161
  }
159
162
  }
160
- const server = new server_js_1.ProxyServer(config, embedding, storage);
161
- const providers = server.getProviderNames();
162
- const mode = embedding && storage ? 'optimizing' : 'transparent';
163
- await server.start();
163
+ // Build provider adapters map
164
+ const adapters = new Map();
165
+ adapters.set('anthropic', new anthropic_js_1.AnthropicAdapter());
166
+ adapters.set('openai', new openai_js_1.OpenAIAdapter());
167
+ adapters.set('ollama', new ollama_js_2.OllamaAdapter());
168
+ adapters.set('google', new google_js_1.GoogleAdapter());
169
+ adapters.set('openrouter', new openai_js_1.OpenAIAdapter('https://openrouter.ai/api'));
170
+ adapters.set('groq', new openai_js_1.OpenAIAdapter('https://api.groq.com'));
171
+ adapters.set('together', new openai_js_1.OpenAIAdapter('https://api.together.xyz'));
172
+ adapters.set('deepseek', new openai_js_1.OpenAIAdapter('https://api.deepseek.com'));
173
+ const mode = optimizer ? 'optimizing' : 'transparent';
174
+ if (args.legacy) {
175
+ // Legacy mode: explicit /v1/{provider}/* routing
176
+ const server = new server_js_1.ProxyServer(config, embedding, storage);
177
+ await server.start();
178
+ console.log(` Mode: legacy (explicit routing)`);
179
+ }
180
+ else {
181
+ // Default: CONNECT proxy with transparent interception
182
+ (0, ca_manager_js_1.ensureCA)();
183
+ const proxy = new connect_proxy_js_1.ConnectProxy(config, optimizer, adapters);
184
+ await proxy.start();
185
+ }
186
+ (0, process_js_1.writePid)();
164
187
  console.log(`
165
- ┌─────────────────────────────────────────────┐
166
- │ SmartContext Proxy v${VERSION}
167
- │ http://${config.proxy.host}:${config.proxy.port}
168
-
169
- Providers: ${providers.join(', ').padEnd(31)}│
170
- Mode: ${mode.padEnd(36)}│
171
- └─────────────────────────────────────────────┘
188
+ ┌──────────────────────────────────────────────────┐
189
+ │ SmartContext Proxy v${VERSION}
190
+ │ http://127.0.0.1:${port}
191
+
192
+ Mode: ${(args.legacy ? 'legacy (explicit)' : 'transparent (CONNECT)').padEnd(40)}│
193
+ Optimization: ${mode.padEnd(33)}│
194
+ │ Dashboard: http://localhost:${port}/ │
195
+ └──────────────────────────────────────────────────┘
172
196
  `);
173
- // Write PID file
174
- (0, process_js_1.writePid)();
175
197
  const shutdown = async () => {
176
198
  console.log('\nShutting down...');
177
199
  (0, process_js_1.removePid)();
178
- await server.stop();
179
200
  if (storage)
180
201
  await storage.close();
181
202
  process.exit(0);
@@ -0,0 +1,14 @@
1
+ /** Known LLM provider hostnames and their API patterns */
2
+ export interface ProviderMatch {
3
+ provider: string;
4
+ hostname: string;
5
+ isLLM: true;
6
+ }
7
+ export declare function addCustomHost(hostname: string, provider: string): void;
8
+ export declare function removeCustomHost(hostname: string): void;
9
+ /** Check if a hostname:port is an LLM provider that should be intercepted */
10
+ export declare function classifyHost(hostname: string, port: number): ProviderMatch | null;
11
+ /** Get all known LLM hostnames (for PAC file generation) */
12
+ export declare function getLLMHostnames(): string[];
13
+ /** Check if a request path looks like an LLM API call */
14
+ export declare function isLLMPath(path: string): boolean;
@@ -0,0 +1,63 @@
1
+ "use strict";
2
+ /** Known LLM provider hostnames and their API patterns */
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.addCustomHost = addCustomHost;
5
+ exports.removeCustomHost = removeCustomHost;
6
+ exports.classifyHost = classifyHost;
7
+ exports.getLLMHostnames = getLLMHostnames;
8
+ exports.isLLMPath = isLLMPath;
9
+ const LLM_HOSTS = {
10
+ 'api.anthropic.com': 'anthropic',
11
+ 'api.openai.com': 'openai',
12
+ 'generativelanguage.googleapis.com': 'google',
13
+ 'openrouter.ai': 'openrouter',
14
+ 'api.together.xyz': 'together',
15
+ 'api.fireworks.ai': 'fireworks',
16
+ 'api.mistral.ai': 'mistral',
17
+ 'api.cohere.com': 'cohere',
18
+ 'api.groq.com': 'groq',
19
+ 'api.deepseek.com': 'deepseek',
20
+ };
21
+ /** Ollama ports to intercept (HTTP, no TLS) */
22
+ const OLLAMA_PORTS = new Set([11434]);
23
+ /** Custom hosts added via config */
24
+ let customHosts = {};
25
+ function addCustomHost(hostname, provider) {
26
+ customHosts[hostname] = provider;
27
+ }
28
+ function removeCustomHost(hostname) {
29
+ delete customHosts[hostname];
30
+ }
31
+ /** Check if a hostname:port is an LLM provider that should be intercepted */
32
+ function classifyHost(hostname, port) {
33
+ // Check known LLM hosts
34
+ const provider = LLM_HOSTS[hostname] || customHosts[hostname];
35
+ if (provider) {
36
+ return { provider, hostname, isLLM: true };
37
+ }
38
+ // Check Ollama local
39
+ if ((hostname === 'localhost' || hostname === '127.0.0.1') && OLLAMA_PORTS.has(port)) {
40
+ return { provider: 'ollama', hostname, isLLM: true };
41
+ }
42
+ return null;
43
+ }
44
+ /** Get all known LLM hostnames (for PAC file generation) */
45
+ function getLLMHostnames() {
46
+ return [
47
+ ...Object.keys(LLM_HOSTS),
48
+ ...Object.keys(customHosts),
49
+ ];
50
+ }
51
+ /** Check if a request path looks like an LLM API call */
52
+ function isLLMPath(path) {
53
+ const llmPaths = [
54
+ '/v1/messages', // Anthropic
55
+ '/v1/chat/completions', // OpenAI
56
+ '/v1/completions', // OpenAI legacy
57
+ '/api/chat', // Ollama
58
+ '/api/generate', // Ollama
59
+ '/v1beta/models', // Google
60
+ ];
61
+ return llmPaths.some((p) => path.startsWith(p));
62
+ }
63
+ //# sourceMappingURL=classifier.js.map
@@ -0,0 +1,34 @@
1
+ import type { SmartContextConfig } from '../config/schema.js';
2
+ import type { ProviderAdapter } from '../providers/types.js';
3
+ import { ContextOptimizer } from '../context/optimizer.js';
4
+ import { MetricsCollector } from '../metrics/collector.js';
5
+ /**
6
+ * HTTP CONNECT proxy that transparently intercepts LLM traffic.
7
+ *
8
+ * - Non-LLM HTTPS: blind TCP tunnel (zero overhead)
9
+ * - LLM HTTPS: TLS intercept → optimize → forward
10
+ * - HTTP requests: direct handling (dashboard, API, Ollama)
11
+ */
12
+ export declare class ConnectProxy {
13
+ private server;
14
+ private metrics;
15
+ private optimizer;
16
+ private adapters;
17
+ private paused;
18
+ private requestCounter;
19
+ private config;
20
+ constructor(config: SmartContextConfig, optimizer?: ContextOptimizer | null, adapters?: Map<string, ProviderAdapter>);
21
+ start(): Promise<void>;
22
+ stop(): Promise<void>;
23
+ getMetrics(): MetricsCollector;
24
+ isPaused(): boolean;
25
+ setPaused(v: boolean): void;
26
+ /** Handle HTTP CONNECT requests (HTTPS tunnel establishment) */
27
+ private handleConnect;
28
+ /** Handle plain HTTP requests (dashboard, API, Ollama interception) */
29
+ private handleHTTP;
30
+ private handleAPI;
31
+ private generatePAC;
32
+ private interceptorOptions;
33
+ private log;
34
+ }
@@ -0,0 +1,167 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.ConnectProxy = void 0;
7
+ const node_http_1 = __importDefault(require("node:http"));
8
+ const classifier_js_1 = require("./classifier.js");
9
+ const tunnel_js_1 = require("./tunnel.js");
10
+ const tls_interceptor_js_1 = require("./tls-interceptor.js");
11
+ const collector_js_1 = require("../metrics/collector.js");
12
+ const dashboard_js_1 = require("../ui/dashboard.js");
13
+ /**
14
+ * HTTP CONNECT proxy that transparently intercepts LLM traffic.
15
+ *
16
+ * - Non-LLM HTTPS: blind TCP tunnel (zero overhead)
17
+ * - LLM HTTPS: TLS intercept → optimize → forward
18
+ * - HTTP requests: direct handling (dashboard, API, Ollama)
19
+ */
20
+ class ConnectProxy {
21
+ server;
22
+ metrics = new collector_js_1.MetricsCollector();
23
+ optimizer = null;
24
+ adapters = new Map();
25
+ paused = false;
26
+ requestCounter = { value: 0 };
27
+ config;
28
+ constructor(config, optimizer, adapters) {
29
+ this.config = config;
30
+ this.optimizer = optimizer || null;
31
+ if (adapters)
32
+ this.adapters = adapters;
33
+ this.server = node_http_1.default.createServer((req, res) => this.handleHTTP(req, res));
34
+ this.server.on('connect', (req, clientSocket, head) => this.handleConnect(req, clientSocket, head));
35
+ }
36
+ async start() {
37
+ const { port, host } = this.config.proxy;
38
+ return new Promise((resolve) => {
39
+ this.server.listen(port, host, () => resolve());
40
+ });
41
+ }
42
+ async stop() {
43
+ return new Promise((resolve) => {
44
+ this.server.close(() => resolve());
45
+ });
46
+ }
47
+ getMetrics() { return this.metrics; }
48
+ isPaused() { return this.paused; }
49
+ setPaused(v) { this.paused = v; }
50
+ /** Handle HTTP CONNECT requests (HTTPS tunnel establishment) */
51
+ handleConnect(req, clientSocket, head) {
52
+ const [hostname, portStr] = (req.url || '').split(':');
53
+ const port = parseInt(portStr || '443', 10);
54
+ const match = (0, classifier_js_1.classifyHost)(hostname, port);
55
+ if (match) {
56
+ // LLM provider → intercept TLS
57
+ this.log('info', `INTERCEPT ${hostname}:${port} (${match.provider})`);
58
+ (0, tls_interceptor_js_1.interceptTLS)(clientSocket, hostname, port, match, this.interceptorOptions());
59
+ }
60
+ else {
61
+ // Non-LLM → blind tunnel
62
+ (0, tunnel_js_1.createTunnel)(clientSocket, hostname, port);
63
+ }
64
+ }
65
+ /** Handle plain HTTP requests (dashboard, API, Ollama interception) */
66
+ async handleHTTP(req, res) {
67
+ const path = req.url || '/';
68
+ const method = req.method || 'GET';
69
+ // Dashboard
70
+ if (path === '/' && method === 'GET') {
71
+ res.writeHead(200, { 'Content-Type': 'text/html; charset=utf-8' });
72
+ res.end((0, dashboard_js_1.renderDashboard)(this.metrics, this.paused));
73
+ return;
74
+ }
75
+ // Health
76
+ if (path === '/health') {
77
+ res.writeHead(200, { 'Content-Type': 'application/json' });
78
+ res.end(JSON.stringify({
79
+ ok: true,
80
+ requests: this.requestCounter.value,
81
+ paused: this.paused,
82
+ mode: this.optimizer ? 'optimizing' : 'transparent',
83
+ type: 'connect-proxy',
84
+ }));
85
+ return;
86
+ }
87
+ // PAC file
88
+ if (path === '/proxy.pac') {
89
+ res.writeHead(200, { 'Content-Type': 'application/x-ns-proxy-autoconfig' });
90
+ res.end(this.generatePAC());
91
+ return;
92
+ }
93
+ // API endpoints
94
+ if (path.startsWith('/_sc/')) {
95
+ this.handleAPI(path, method, req, res);
96
+ return;
97
+ }
98
+ // Everything else: 404
99
+ res.writeHead(404, { 'Content-Type': 'application/json' });
100
+ res.end(JSON.stringify({ error: 'Not found' }));
101
+ }
102
+ handleAPI(path, method, req, res) {
103
+ res.setHeader('Content-Type', 'application/json');
104
+ switch (path) {
105
+ case '/_sc/status':
106
+ res.end(JSON.stringify({
107
+ state: this.paused ? 'paused' : 'running',
108
+ uptime: this.metrics.getUptime(),
109
+ requests: this.requestCounter.value,
110
+ mode: this.optimizer ? 'optimizing' : 'transparent',
111
+ }));
112
+ break;
113
+ case '/_sc/stats':
114
+ res.end(JSON.stringify(this.metrics.getStats()));
115
+ break;
116
+ case '/_sc/feed':
117
+ res.end(JSON.stringify(this.metrics.getRecent(50)));
118
+ break;
119
+ case '/_sc/pause':
120
+ this.paused = true;
121
+ res.end(JSON.stringify({ ok: true, state: 'paused' }));
122
+ break;
123
+ case '/_sc/resume':
124
+ this.paused = false;
125
+ res.end(JSON.stringify({ ok: true, state: 'running' }));
126
+ break;
127
+ default:
128
+ res.writeHead(404);
129
+ res.end(JSON.stringify({ error: `Unknown: ${path}` }));
130
+ }
131
+ }
132
+ generatePAC() {
133
+ const { getLLMHostnames } = require('./classifier.js');
134
+ const hosts = getLLMHostnames();
135
+ const { port, host } = this.config.proxy;
136
+ const conditions = hosts
137
+ .map((h) => ` if (dnsDomainIs(host, "${h}")) return proxy;`)
138
+ .join('\n');
139
+ return `function FindProxyForURL(url, host) {
140
+ var proxy = "PROXY ${host}:${port}";
141
+ ${conditions}
142
+ // Ollama local
143
+ if (host === "localhost" && url.indexOf(":11434") !== -1) return proxy;
144
+ return "DIRECT";
145
+ }`;
146
+ }
147
+ interceptorOptions() {
148
+ return {
149
+ config: this.config,
150
+ optimizer: this.optimizer,
151
+ metrics: this.metrics,
152
+ adapters: this.adapters,
153
+ paused: this.paused,
154
+ requestCounter: this.requestCounter,
155
+ log: this.log.bind(this),
156
+ };
157
+ }
158
+ log(level, message) {
159
+ const timestamp = new Date().toISOString().slice(11, 23);
160
+ const prefix = level === 'error' ? '✗' : '→';
161
+ if (level === 'error' || this.config.logging.level !== 'error') {
162
+ console.log(`[${timestamp}] ${prefix} ${message}`);
163
+ }
164
+ }
165
+ }
166
+ exports.ConnectProxy = ConnectProxy;
167
+ //# sourceMappingURL=connect-proxy.js.map
@@ -0,0 +1,23 @@
1
+ import type { Socket } from 'node:net';
2
+ import { type ProviderMatch } from './classifier.js';
3
+ import type { ProviderAdapter } from '../providers/types.js';
4
+ import type { SmartContextConfig } from '../config/schema.js';
5
+ import { ContextOptimizer } from '../context/optimizer.js';
6
+ import { MetricsCollector } from '../metrics/collector.js';
7
+ export interface InterceptorOptions {
8
+ config: SmartContextConfig;
9
+ optimizer: ContextOptimizer | null;
10
+ metrics: MetricsCollector;
11
+ adapters: Map<string, ProviderAdapter>;
12
+ paused: boolean;
13
+ requestCounter: {
14
+ value: number;
15
+ };
16
+ log: (level: string, message: string) => void;
17
+ }
18
+ /**
19
+ * Intercept TLS connection to an LLM provider.
20
+ * Terminates TLS with a generated cert, parses the HTTP request inside,
21
+ * optionally optimizes context, then forwards to the real provider.
22
+ */
23
+ export declare function interceptTLS(clientSocket: Socket, hostname: string, port: number, match: ProviderMatch, options: InterceptorOptions): void;