smartcontext-proxy 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. package/PLAN.md +406 -0
  2. package/PROGRESS.md +60 -0
  3. package/README.md +99 -0
  4. package/SPEC.md +915 -0
  5. package/adapters/openclaw/embedding.d.ts +8 -0
  6. package/adapters/openclaw/embedding.js +16 -0
  7. package/adapters/openclaw/embedding.ts +15 -0
  8. package/adapters/openclaw/index.d.ts +18 -0
  9. package/adapters/openclaw/index.js +42 -0
  10. package/adapters/openclaw/index.ts +43 -0
  11. package/adapters/openclaw/session-importer.d.ts +22 -0
  12. package/adapters/openclaw/session-importer.js +99 -0
  13. package/adapters/openclaw/session-importer.ts +105 -0
  14. package/adapters/openclaw/storage.d.ts +26 -0
  15. package/adapters/openclaw/storage.js +177 -0
  16. package/adapters/openclaw/storage.ts +183 -0
  17. package/dist/adapters/openclaw/embedding.d.ts +8 -0
  18. package/dist/adapters/openclaw/embedding.js +16 -0
  19. package/dist/adapters/openclaw/index.d.ts +18 -0
  20. package/dist/adapters/openclaw/index.js +42 -0
  21. package/dist/adapters/openclaw/session-importer.d.ts +22 -0
  22. package/dist/adapters/openclaw/session-importer.js +99 -0
  23. package/dist/adapters/openclaw/storage.d.ts +26 -0
  24. package/dist/adapters/openclaw/storage.js +177 -0
  25. package/dist/config/auto-detect.d.ts +3 -0
  26. package/dist/config/auto-detect.js +48 -0
  27. package/dist/config/defaults.d.ts +2 -0
  28. package/dist/config/defaults.js +28 -0
  29. package/dist/config/schema.d.ts +30 -0
  30. package/dist/config/schema.js +3 -0
  31. package/dist/context/budget.d.ts +25 -0
  32. package/dist/context/budget.js +85 -0
  33. package/dist/context/canonical.d.ts +39 -0
  34. package/dist/context/canonical.js +12 -0
  35. package/dist/context/chunker.d.ts +9 -0
  36. package/dist/context/chunker.js +148 -0
  37. package/dist/context/optimizer.d.ts +31 -0
  38. package/dist/context/optimizer.js +163 -0
  39. package/dist/context/retriever.d.ts +29 -0
  40. package/dist/context/retriever.js +103 -0
  41. package/dist/daemon/process.d.ts +6 -0
  42. package/dist/daemon/process.js +76 -0
  43. package/dist/daemon/service.d.ts +2 -0
  44. package/dist/daemon/service.js +99 -0
  45. package/dist/embedding/ollama.d.ts +11 -0
  46. package/dist/embedding/ollama.js +72 -0
  47. package/dist/embedding/types.d.ts +6 -0
  48. package/dist/embedding/types.js +3 -0
  49. package/dist/index.d.ts +2 -0
  50. package/dist/index.js +190 -0
  51. package/dist/metrics/collector.d.ts +43 -0
  52. package/dist/metrics/collector.js +72 -0
  53. package/dist/providers/anthropic.d.ts +15 -0
  54. package/dist/providers/anthropic.js +109 -0
  55. package/dist/providers/google.d.ts +13 -0
  56. package/dist/providers/google.js +40 -0
  57. package/dist/providers/ollama.d.ts +13 -0
  58. package/dist/providers/ollama.js +82 -0
  59. package/dist/providers/openai.d.ts +15 -0
  60. package/dist/providers/openai.js +115 -0
  61. package/dist/providers/types.d.ts +18 -0
  62. package/dist/providers/types.js +3 -0
  63. package/dist/proxy/router.d.ts +12 -0
  64. package/dist/proxy/router.js +46 -0
  65. package/dist/proxy/server.d.ts +25 -0
  66. package/dist/proxy/server.js +265 -0
  67. package/dist/proxy/stream.d.ts +8 -0
  68. package/dist/proxy/stream.js +32 -0
  69. package/dist/src/config/auto-detect.d.ts +3 -0
  70. package/dist/src/config/auto-detect.js +48 -0
  71. package/dist/src/config/defaults.d.ts +2 -0
  72. package/dist/src/config/defaults.js +28 -0
  73. package/dist/src/config/schema.d.ts +30 -0
  74. package/dist/src/config/schema.js +3 -0
  75. package/dist/src/context/budget.d.ts +25 -0
  76. package/dist/src/context/budget.js +85 -0
  77. package/dist/src/context/canonical.d.ts +39 -0
  78. package/dist/src/context/canonical.js +12 -0
  79. package/dist/src/context/chunker.d.ts +9 -0
  80. package/dist/src/context/chunker.js +148 -0
  81. package/dist/src/context/optimizer.d.ts +31 -0
  82. package/dist/src/context/optimizer.js +163 -0
  83. package/dist/src/context/retriever.d.ts +29 -0
  84. package/dist/src/context/retriever.js +103 -0
  85. package/dist/src/daemon/process.d.ts +6 -0
  86. package/dist/src/daemon/process.js +76 -0
  87. package/dist/src/daemon/service.d.ts +2 -0
  88. package/dist/src/daemon/service.js +99 -0
  89. package/dist/src/embedding/ollama.d.ts +11 -0
  90. package/dist/src/embedding/ollama.js +72 -0
  91. package/dist/src/embedding/types.d.ts +6 -0
  92. package/dist/src/embedding/types.js +3 -0
  93. package/dist/src/index.d.ts +2 -0
  94. package/dist/src/index.js +190 -0
  95. package/dist/src/metrics/collector.d.ts +43 -0
  96. package/dist/src/metrics/collector.js +72 -0
  97. package/dist/src/providers/anthropic.d.ts +15 -0
  98. package/dist/src/providers/anthropic.js +109 -0
  99. package/dist/src/providers/google.d.ts +13 -0
  100. package/dist/src/providers/google.js +40 -0
  101. package/dist/src/providers/ollama.d.ts +13 -0
  102. package/dist/src/providers/ollama.js +82 -0
  103. package/dist/src/providers/openai.d.ts +15 -0
  104. package/dist/src/providers/openai.js +115 -0
  105. package/dist/src/providers/types.d.ts +18 -0
  106. package/dist/src/providers/types.js +3 -0
  107. package/dist/src/proxy/router.d.ts +12 -0
  108. package/dist/src/proxy/router.js +46 -0
  109. package/dist/src/proxy/server.d.ts +25 -0
  110. package/dist/src/proxy/server.js +265 -0
  111. package/dist/src/proxy/stream.d.ts +8 -0
  112. package/dist/src/proxy/stream.js +32 -0
  113. package/dist/src/storage/lancedb.d.ts +21 -0
  114. package/dist/src/storage/lancedb.js +158 -0
  115. package/dist/src/storage/types.d.ts +52 -0
  116. package/dist/src/storage/types.js +3 -0
  117. package/dist/src/test/context.test.d.ts +1 -0
  118. package/dist/src/test/context.test.js +141 -0
  119. package/dist/src/test/dashboard.test.d.ts +1 -0
  120. package/dist/src/test/dashboard.test.js +85 -0
  121. package/dist/src/test/proxy.test.d.ts +1 -0
  122. package/dist/src/test/proxy.test.js +188 -0
  123. package/dist/src/ui/dashboard.d.ts +2 -0
  124. package/dist/src/ui/dashboard.js +183 -0
  125. package/dist/storage/lancedb.d.ts +21 -0
  126. package/dist/storage/lancedb.js +158 -0
  127. package/dist/storage/types.d.ts +52 -0
  128. package/dist/storage/types.js +3 -0
  129. package/dist/test/context.test.d.ts +1 -0
  130. package/dist/test/context.test.js +141 -0
  131. package/dist/test/dashboard.test.d.ts +1 -0
  132. package/dist/test/dashboard.test.js +85 -0
  133. package/dist/test/proxy.test.d.ts +1 -0
  134. package/dist/test/proxy.test.js +188 -0
  135. package/dist/ui/dashboard.d.ts +2 -0
  136. package/dist/ui/dashboard.js +183 -0
  137. package/package.json +38 -0
  138. package/src/config/auto-detect.ts +51 -0
  139. package/src/config/defaults.ts +26 -0
  140. package/src/config/schema.ts +33 -0
  141. package/src/context/budget.ts +126 -0
  142. package/src/context/canonical.ts +50 -0
  143. package/src/context/chunker.ts +165 -0
  144. package/src/context/optimizer.ts +201 -0
  145. package/src/context/retriever.ts +123 -0
  146. package/src/daemon/process.ts +70 -0
  147. package/src/daemon/service.ts +103 -0
  148. package/src/embedding/ollama.ts +68 -0
  149. package/src/embedding/types.ts +6 -0
  150. package/src/index.ts +176 -0
  151. package/src/metrics/collector.ts +114 -0
  152. package/src/providers/anthropic.ts +117 -0
  153. package/src/providers/google.ts +42 -0
  154. package/src/providers/ollama.ts +87 -0
  155. package/src/providers/openai.ts +127 -0
  156. package/src/providers/types.ts +20 -0
  157. package/src/proxy/router.ts +48 -0
  158. package/src/proxy/server.ts +315 -0
  159. package/src/proxy/stream.ts +39 -0
  160. package/src/storage/lancedb.ts +169 -0
  161. package/src/storage/types.ts +47 -0
  162. package/src/test/context.test.ts +165 -0
  163. package/src/test/dashboard.test.ts +94 -0
  164. package/src/test/proxy.test.ts +218 -0
  165. package/src/ui/dashboard.ts +184 -0
  166. package/tsconfig.json +18 -0
@@ -0,0 +1,169 @@
1
+ import { connect, type Connection, type Table } from '@lancedb/lancedb';
2
+ import fs from 'node:fs';
3
+ import path from 'node:path';
4
+ import type { StorageAdapter, Chunk, ScoredChunk, SearchOptions, Exchange } from './types.js';
5
+
6
+ export class LanceDBAdapter implements StorageAdapter {
7
+ name = 'lancedb';
8
+ private db!: Connection;
9
+ private chunksTable!: Table;
10
+ private logsDir: string;
11
+ private dbPath: string;
12
+
13
+ constructor(private basePath: string = path.join(process.env['HOME'] || '.', '.smartcontext', 'data')) {
14
+ this.dbPath = path.join(basePath, 'vectors');
15
+ this.logsDir = path.join(basePath, 'logs');
16
+ }
17
+
18
+ async initialize(): Promise<void> {
19
+ fs.mkdirSync(this.dbPath, { recursive: true });
20
+ fs.mkdirSync(this.logsDir, { recursive: true });
21
+
22
+ this.db = await connect(this.dbPath);
23
+
24
+ // Create or open chunks table
25
+ const tableNames = await this.db.tableNames();
26
+ if (tableNames.includes('chunks')) {
27
+ this.chunksTable = await this.db.openTable('chunks');
28
+ }
29
+ // Table will be created lazily on first upsert with actual data
30
+ }
31
+
32
+ async upsertChunks(chunks: Chunk[]): Promise<void> {
33
+ if (chunks.length === 0) return;
34
+
35
+ const records = chunks.map((c) => ({
36
+ id: c.id,
37
+ text: c.text,
38
+ vector: c.embedding,
39
+ sessionId: c.sessionId,
40
+ timestamp: c.timestamp,
41
+ summary: c.metadata.summary,
42
+ tokenCount: c.metadata.tokenCount,
43
+ exchangeIndex: c.metadata.exchangeIndex,
44
+ files: JSON.stringify(c.metadata.files || []),
45
+ tools: JSON.stringify(c.metadata.tools || []),
46
+ }));
47
+
48
+ const tableNames = await this.db.tableNames();
49
+ if (!tableNames.includes('chunks')) {
50
+ this.chunksTable = await this.db.createTable('chunks', records);
51
+ } else {
52
+ if (!this.chunksTable) {
53
+ this.chunksTable = await this.db.openTable('chunks');
54
+ }
55
+ await this.chunksTable.add(records);
56
+ }
57
+ }
58
+
59
+ async search(embedding: number[], options: SearchOptions): Promise<ScoredChunk[]> {
60
+ const tableNames = await this.db.tableNames();
61
+ if (!tableNames.includes('chunks')) return [];
62
+
63
+ if (!this.chunksTable) {
64
+ this.chunksTable = await this.db.openTable('chunks');
65
+ }
66
+
67
+ const results = await this.chunksTable
68
+ .vectorSearch(embedding)
69
+ .limit(options.topK * 2) // Fetch extra for filtering
70
+ .toArray();
71
+
72
+ const scored: ScoredChunk[] = results.map((r) => {
73
+ // LanceDB returns _distance (L2), convert to similarity score
74
+ const distance = (r as Record<string, unknown>)['_distance'] as number;
75
+ let score = 1 / (1 + distance); // Convert L2 distance to similarity
76
+
77
+ const boosts: Record<string, number> = {};
78
+
79
+ // Session recency boost
80
+ if (options.sessionBoost && r.sessionId === options.sessionBoost.sessionId) {
81
+ boosts.session = options.sessionBoost.boost;
82
+ score += options.sessionBoost.boost;
83
+ }
84
+
85
+ // File-path boost
86
+ if (options.fileBoost && options.fileBoost.patterns.length > 0) {
87
+ const files: string[] = JSON.parse(r.files || '[]');
88
+ const hasMatch = options.fileBoost.patterns.some((p) =>
89
+ files.some((f: string) => f.includes(p)),
90
+ );
91
+ if (hasMatch) {
92
+ boosts.filepath = options.fileBoost.boost;
93
+ score += options.fileBoost.boost;
94
+ }
95
+ }
96
+
97
+ return {
98
+ id: r.id,
99
+ text: r.text,
100
+ embedding: [], // Don't return embedding in search results
101
+ sessionId: r.sessionId,
102
+ timestamp: r.timestamp,
103
+ metadata: {
104
+ summary: r.summary,
105
+ tokenCount: r.tokenCount,
106
+ exchangeIndex: r.exchangeIndex,
107
+ files: JSON.parse(r.files || '[]'),
108
+ tools: JSON.parse(r.tools || '[]'),
109
+ },
110
+ score,
111
+ boosts,
112
+ };
113
+ });
114
+
115
+ // Filter by min score and limit
116
+ return scored
117
+ .filter((c) => c.score >= options.minScore)
118
+ .sort((a, b) => b.score - a.score)
119
+ .slice(0, options.topK);
120
+ }
121
+
122
+ async appendLog(sessionId: string, exchange: Exchange): Promise<void> {
123
+ const logFile = path.join(this.logsDir, `${sessionId}.jsonl`);
124
+ const line = JSON.stringify(exchange) + '\n';
125
+ fs.appendFileSync(logFile, line);
126
+ }
127
+
128
+ async getSessionLog(sessionId: string): Promise<Exchange[]> {
129
+ const logFile = path.join(this.logsDir, `${sessionId}.jsonl`);
130
+ if (!fs.existsSync(logFile)) return [];
131
+
132
+ return fs.readFileSync(logFile, 'utf-8')
133
+ .trim()
134
+ .split('\n')
135
+ .filter(Boolean)
136
+ .map((line) => JSON.parse(line));
137
+ }
138
+
139
+ async getStats(): Promise<{ chunks: number; sessions: number; diskBytes: number }> {
140
+ let chunks = 0;
141
+ const tableNames = await this.db.tableNames();
142
+ if (tableNames.includes('chunks')) {
143
+ if (!this.chunksTable) {
144
+ this.chunksTable = await this.db.openTable('chunks');
145
+ }
146
+ chunks = await this.chunksTable.countRows();
147
+ }
148
+
149
+ const sessions = fs.readdirSync(this.logsDir).filter((f) => f.endsWith('.jsonl')).length;
150
+
151
+ // Rough disk usage
152
+ let diskBytes = 0;
153
+ const walk = (dir: string) => {
154
+ if (!fs.existsSync(dir)) return;
155
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
156
+ const full = path.join(dir, entry.name);
157
+ if (entry.isDirectory()) walk(full);
158
+ else diskBytes += fs.statSync(full).size;
159
+ }
160
+ };
161
+ walk(this.basePath);
162
+
163
+ return { chunks, sessions, diskBytes };
164
+ }
165
+
166
+ async close(): Promise<void> {
167
+ // LanceDB doesn't require explicit close
168
+ }
169
+ }
@@ -0,0 +1,47 @@
1
+ export interface Chunk {
2
+ id: string;
3
+ text: string;
4
+ embedding: number[];
5
+ sessionId: string;
6
+ timestamp: number;
7
+ metadata: ChunkMetadata;
8
+ }
9
+
10
+ export interface ChunkMetadata {
11
+ files?: string[];
12
+ tools?: string[];
13
+ summary: string;
14
+ tokenCount: number;
15
+ exchangeIndex: number;
16
+ }
17
+
18
+ export interface ScoredChunk extends Chunk {
19
+ score: number;
20
+ boosts?: Record<string, number>;
21
+ }
22
+
23
+ export interface SearchOptions {
24
+ topK: number;
25
+ minScore: number;
26
+ sessionBoost?: { sessionId: string; boost: number };
27
+ fileBoost?: { patterns: string[]; boost: number };
28
+ }
29
+
30
+ export interface Exchange {
31
+ index: number;
32
+ userMessage: string;
33
+ assistantMessage: string;
34
+ timestamp: number;
35
+ metadata?: Record<string, unknown>;
36
+ }
37
+
38
+ export interface StorageAdapter {
39
+ name: string;
40
+ initialize(): Promise<void>;
41
+ upsertChunks(chunks: Chunk[]): Promise<void>;
42
+ search(embedding: number[], options: SearchOptions): Promise<ScoredChunk[]>;
43
+ appendLog(sessionId: string, exchange: Exchange): Promise<void>;
44
+ getSessionLog(sessionId: string): Promise<Exchange[]>;
45
+ getStats(): Promise<{ chunks: number; sessions: number; diskBytes: number }>;
46
+ close(): Promise<void>;
47
+ }
@@ -0,0 +1,165 @@
1
+ import { describe, it } from 'node:test';
2
+ import assert from 'node:assert';
3
+ import { chunkConversation, estimateTokens } from '../context/chunker.js';
4
+ import { packContext, getModelContextLimit } from '../context/budget.js';
5
+ import type { CanonicalMessage } from '../context/canonical.js';
6
+ import type { ScoredChunk } from '../storage/types.js';
7
+
8
+ describe('Chunker', () => {
9
+ it('estimates tokens roughly correctly', () => {
10
+ const text = 'Hello world, this is a test message.';
11
+ const tokens = estimateTokens(text);
12
+ assert.ok(tokens > 5 && tokens < 20, `Expected 5-20 tokens, got ${tokens}`);
13
+ });
14
+
15
+ it('chunks a simple conversation into exchange pairs', () => {
16
+ const messages: CanonicalMessage[] = [
17
+ { role: 'user', content: 'What is TypeScript?' },
18
+ { role: 'assistant', content: 'TypeScript is a superset of JavaScript that adds static types.' },
19
+ { role: 'user', content: 'How do I install it?' },
20
+ { role: 'assistant', content: 'Run npm install -g typescript' },
21
+ ];
22
+
23
+ const chunks = chunkConversation(messages, 'test-session');
24
+ assert.strictEqual(chunks.length, 2);
25
+ assert.ok(chunks[0].text.includes('What is TypeScript'));
26
+ assert.ok(chunks[0].text.includes('superset of JavaScript'));
27
+ assert.ok(chunks[1].text.includes('How do I install'));
28
+ });
29
+
30
+ it('splits long exchanges at paragraph boundaries', () => {
31
+ const longResponse = Array(100).fill('This is a paragraph with some detailed content about various programming concepts, design patterns, and software architecture principles that spans multiple lines.\n\n').join('');
32
+ const messages: CanonicalMessage[] = [
33
+ { role: 'user', content: 'Tell me everything about programming.' },
34
+ { role: 'assistant', content: longResponse },
35
+ ];
36
+
37
+ const chunks = chunkConversation(messages, 'test-session');
38
+ assert.ok(chunks.length > 1, `Expected >1 chunks for long response, got ${chunks.length}`);
39
+ for (const chunk of chunks) {
40
+ assert.ok(chunk.metadata.tokenCount <= 2500, `Chunk too large: ${chunk.metadata.tokenCount} tokens`);
41
+ }
42
+ });
43
+
44
+ it('keeps code blocks atomic', () => {
45
+ const response = 'Here is the code:\n\n```typescript\nfunction hello() {\n console.log("hello");\n console.log("world");\n}\n```\n\nThat is the function.';
46
+ const messages: CanonicalMessage[] = [
47
+ { role: 'user', content: 'Show me code' },
48
+ { role: 'assistant', content: response },
49
+ ];
50
+
51
+ const chunks = chunkConversation(messages, 'test-session');
52
+ // Code block should be in one chunk
53
+ const codeChunk = chunks.find((c) => c.text.includes('```typescript'));
54
+ assert.ok(codeChunk, 'Should have a chunk with the code block');
55
+ assert.ok(codeChunk.text.includes('console.log("world")'), 'Code block should be complete');
56
+ });
57
+
58
+ it('extracts file paths from text', () => {
59
+ const messages: CanonicalMessage[] = [
60
+ { role: 'user', content: 'Fix the bug in `src/proxy/server.ts`' },
61
+ { role: 'assistant', content: 'I found the issue in /Users/vt/projects/test/index.ts' },
62
+ ];
63
+
64
+ const chunks = chunkConversation(messages, 'test-session');
65
+ assert.ok(chunks[0].metadata.files!.length > 0, 'Should extract file paths');
66
+ });
67
+
68
+ it('assigns unique IDs to chunks', () => {
69
+ const messages: CanonicalMessage[] = [
70
+ { role: 'user', content: 'Hello' },
71
+ { role: 'assistant', content: 'Hi' },
72
+ { role: 'user', content: 'Bye' },
73
+ { role: 'assistant', content: 'Goodbye' },
74
+ ];
75
+
76
+ const chunks = chunkConversation(messages, 'test-session');
77
+ const ids = new Set(chunks.map((c) => c.id));
78
+ assert.strictEqual(ids.size, chunks.length, 'All chunk IDs should be unique');
79
+ });
80
+ });
81
+
82
+ describe('Budget', () => {
83
+ it('knows context limits for common models', () => {
84
+ assert.strictEqual(getModelContextLimit('claude-opus-4-6'), 200000);
85
+ assert.strictEqual(getModelContextLimit('gpt-4o'), 128000);
86
+ assert.strictEqual(getModelContextLimit('unknown-model'), 128000);
87
+ });
88
+
89
+ it('packs context within budget', () => {
90
+ const messages: CanonicalMessage[] = Array(20).fill(null).map((_, i) => ({
91
+ role: i % 2 === 0 ? 'user' as const : 'assistant' as const,
92
+ content: `Message ${i}: `.padEnd(200, 'x'),
93
+ }));
94
+
95
+ const mockChunks: ScoredChunk[] = [
96
+ {
97
+ id: '1', text: 'chunk 1 text', embedding: [], sessionId: 's1',
98
+ timestamp: Date.now(), score: 0.9, metadata: {
99
+ summary: 'chunk 1', tokenCount: 100, exchangeIndex: 0,
100
+ },
101
+ },
102
+ {
103
+ id: '2', text: 'chunk 2 text', embedding: [], sessionId: 's1',
104
+ timestamp: Date.now(), score: 0.8, metadata: {
105
+ summary: 'chunk 2', tokenCount: 150, exchangeIndex: 1,
106
+ },
107
+ },
108
+ ];
109
+
110
+ const packed = packContext(
111
+ 'You are a helpful assistant.',
112
+ messages,
113
+ mockChunks,
114
+ 'claude-opus-4-6',
115
+ 3, // tier1_exchanges
116
+ 500, // tier3_reserve
117
+ 8192, // response_reserve
118
+ );
119
+
120
+ assert.ok(packed.tier1Messages.length > 0, 'Should have tier 1 messages');
121
+ assert.ok(packed.tier2Chunks.length > 0, 'Should have tier 2 chunks');
122
+ assert.ok(packed.optimizedTokens < packed.originalTokens, 'Should save tokens');
123
+ assert.ok(packed.savingsPercent > 0, 'Should have positive savings');
124
+ });
125
+
126
+ it('handles empty retrieval', () => {
127
+ const messages: CanonicalMessage[] = [
128
+ { role: 'user', content: 'Hello' },
129
+ { role: 'assistant', content: 'Hi there' },
130
+ ];
131
+
132
+ const packed = packContext('System prompt', messages, [], 'gpt-4o', 3, 500, 8192);
133
+ assert.strictEqual(packed.tier2Chunks.length, 0);
134
+ assert.strictEqual(packed.savingsPercent, 0);
135
+ });
136
+ });
137
+
138
+ describe('Metrics', () => {
139
+ it('records and aggregates correctly', async () => {
140
+ const { MetricsCollector } = await import('../metrics/collector.js');
141
+ const metrics = new MetricsCollector();
142
+
143
+ metrics.record({
144
+ id: 1, timestamp: Date.now(), provider: 'anthropic', model: 'claude-opus-4-6',
145
+ streaming: false, originalTokens: 1000, optimizedTokens: 300,
146
+ savingsPercent: 70, latencyOverheadMs: 12, chunksRetrieved: 5,
147
+ topScore: 0.89, passThrough: false,
148
+ });
149
+
150
+ metrics.record({
151
+ id: 2, timestamp: Date.now(), provider: 'openai', model: 'gpt-4o',
152
+ streaming: true, originalTokens: 500, optimizedTokens: 200,
153
+ savingsPercent: 60, latencyOverheadMs: 8, chunksRetrieved: 3,
154
+ topScore: 0.82, passThrough: false,
155
+ });
156
+
157
+ const stats = metrics.getStats();
158
+ assert.strictEqual(stats.totalRequests, 2);
159
+ assert.strictEqual(stats.totalOriginalTokens, 1500);
160
+ assert.strictEqual(stats.totalOptimizedTokens, 500);
161
+ assert.ok(stats.totalSavingsPercent > 50);
162
+ assert.ok(stats.byProvider['anthropic']);
163
+ assert.ok(stats.byModel['gpt-4o']);
164
+ });
165
+ });
@@ -0,0 +1,94 @@
1
+ import { describe, it, before, after } from 'node:test';
2
+ import assert from 'node:assert';
3
+ import http from 'node:http';
4
+ import { ProxyServer } from '../proxy/server.js';
5
+ import { buildConfig } from '../config/auto-detect.js';
6
+
7
+ function httpRequest(
8
+ url: string,
9
+ options: http.RequestOptions,
10
+ body?: string,
11
+ ): Promise<{ status: number; headers: http.IncomingHttpHeaders; body: string }> {
12
+ return new Promise((resolve, reject) => {
13
+ const req = http.request(url, options, (res) => {
14
+ let data = '';
15
+ res.on('data', (chunk) => (data += chunk));
16
+ res.on('end', () => resolve({ status: res.statusCode || 0, headers: res.headers, body: data }));
17
+ });
18
+ req.on('error', reject);
19
+ if (body) req.write(body);
20
+ req.end();
21
+ });
22
+ }
23
+
24
+ describe('Dashboard & API', () => {
25
+ let proxy: ProxyServer;
26
+ const PORT = 14810;
27
+
28
+ before(async () => {
29
+ const config = buildConfig({ proxy: { port: PORT, host: '127.0.0.1' } });
30
+ config.providers['anthropic'] = { apiKey: 'test', baseUrl: 'http://127.0.0.1:1' };
31
+ config.logging.level = 'error';
32
+ proxy = new ProxyServer(config);
33
+ await proxy.start();
34
+ });
35
+
36
+ after(async () => {
37
+ await proxy.stop();
38
+ });
39
+
40
+ it('serves dashboard at root path', async () => {
41
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/`, { method: 'GET' });
42
+ assert.strictEqual(res.status, 200);
43
+ assert.ok(res.headers['content-type']?.includes('text/html'));
44
+ assert.ok(res.body.includes('SmartContext Proxy'));
45
+ assert.ok(res.body.includes('Total Requests'));
46
+ });
47
+
48
+ it('returns status via /_sc/status', async () => {
49
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/status`, { method: 'GET' });
50
+ assert.strictEqual(res.status, 200);
51
+ const data = JSON.parse(res.body);
52
+ assert.strictEqual(data.state, 'running');
53
+ assert.ok(data.uptime >= 0);
54
+ });
55
+
56
+ it('returns stats via /_sc/stats', async () => {
57
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/stats`, { method: 'GET' });
58
+ assert.strictEqual(res.status, 200);
59
+ const data = JSON.parse(res.body);
60
+ assert.strictEqual(data.totalRequests, 0);
61
+ assert.ok('byProvider' in data);
62
+ assert.ok('byModel' in data);
63
+ });
64
+
65
+ it('pause/resume works', async () => {
66
+ // Pause
67
+ let res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/pause`, { method: 'POST' });
68
+ assert.strictEqual(res.status, 200);
69
+ let data = JSON.parse(res.body);
70
+ assert.strictEqual(data.state, 'paused');
71
+
72
+ // Check status
73
+ res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/status`, { method: 'GET' });
74
+ data = JSON.parse(res.body);
75
+ assert.strictEqual(data.state, 'paused');
76
+
77
+ // Resume
78
+ res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/resume`, { method: 'POST' });
79
+ data = JSON.parse(res.body);
80
+ assert.strictEqual(data.state, 'running');
81
+ });
82
+
83
+ it('returns feed via /_sc/feed', async () => {
84
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/feed`, { method: 'GET' });
85
+ assert.strictEqual(res.status, 200);
86
+ const data = JSON.parse(res.body);
87
+ assert.ok(Array.isArray(data));
88
+ });
89
+
90
+ it('returns 404 for unknown API path', async () => {
91
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/nonexistent`, { method: 'GET' });
92
+ assert.strictEqual(res.status, 404);
93
+ });
94
+ });
@@ -0,0 +1,218 @@
1
+ import { describe, it, before, after } from 'node:test';
2
+ import assert from 'node:assert';
3
+ import http from 'node:http';
4
+ import { ProxyServer } from '../proxy/server.js';
5
+ import { buildConfig } from '../config/auto-detect.js';
6
+
7
+ /** Create a mock LLM provider server */
8
+ function createMockProvider(port: number): Promise<http.Server> {
9
+ return new Promise((resolve) => {
10
+ const server = http.createServer((req, res) => {
11
+ let body = '';
12
+ req.on('data', (chunk) => (body += chunk));
13
+ req.on('end', () => {
14
+ const parsed = JSON.parse(body);
15
+
16
+ if (parsed.stream) {
17
+ // SSE streaming response
18
+ res.writeHead(200, {
19
+ 'Content-Type': 'text/event-stream',
20
+ 'Cache-Control': 'no-cache',
21
+ });
22
+
23
+ const events = [
24
+ 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}\n\n',
25
+ 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello"}}\n\n',
26
+ 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" there"}}\n\n',
27
+ 'data: {"type":"content_block_stop","index":0}\n\n',
28
+ 'data: {"type":"message_stop"}\n\n',
29
+ ];
30
+
31
+ let i = 0;
32
+ const interval = setInterval(() => {
33
+ if (i < events.length) {
34
+ res.write(events[i]);
35
+ i++;
36
+ } else {
37
+ clearInterval(interval);
38
+ res.end();
39
+ }
40
+ }, 10);
41
+ } else {
42
+ // Non-streaming response
43
+ res.writeHead(200, { 'Content-Type': 'application/json' });
44
+ res.end(JSON.stringify({
45
+ id: 'msg_test',
46
+ type: 'message',
47
+ role: 'assistant',
48
+ content: [{ type: 'text', text: 'Hello from mock provider!' }],
49
+ model: parsed.model || 'test-model',
50
+ stop_reason: 'end_turn',
51
+ usage: { input_tokens: 10, output_tokens: 5 },
52
+ }));
53
+ }
54
+ });
55
+ });
56
+ server.listen(port, '127.0.0.1', () => resolve(server));
57
+ });
58
+ }
59
+
60
+ function httpRequest(
61
+ url: string,
62
+ options: http.RequestOptions,
63
+ body?: string,
64
+ ): Promise<{ status: number; headers: http.IncomingHttpHeaders; body: string }> {
65
+ return new Promise((resolve, reject) => {
66
+ const req = http.request(url, options, (res) => {
67
+ let data = '';
68
+ res.on('data', (chunk) => (data += chunk));
69
+ res.on('end', () => resolve({ status: res.statusCode || 0, headers: res.headers, body: data }));
70
+ });
71
+ req.on('error', reject);
72
+ if (body) req.write(body);
73
+ req.end();
74
+ });
75
+ }
76
+
77
+ describe('SmartContext Proxy', () => {
78
+ let proxy: ProxyServer;
79
+ let mockServer: http.Server;
80
+ const PROXY_PORT = 14800;
81
+ const MOCK_PORT = 14801;
82
+
83
+ before(async () => {
84
+ // Start mock provider
85
+ mockServer = await createMockProvider(MOCK_PORT);
86
+
87
+ // Start proxy pointing to mock provider
88
+ const config = buildConfig({
89
+ proxy: { port: PROXY_PORT, host: '127.0.0.1' },
90
+ });
91
+ // Override anthropic baseUrl to point to mock
92
+ config.providers['anthropic'] = {
93
+ apiKey: 'test-key',
94
+ baseUrl: `http://127.0.0.1:${MOCK_PORT}`,
95
+ };
96
+ config.providers['openai'] = {
97
+ apiKey: 'test-key',
98
+ baseUrl: `http://127.0.0.1:${MOCK_PORT}`,
99
+ };
100
+ config.logging.level = 'error'; // quiet during tests
101
+
102
+ proxy = new ProxyServer(config);
103
+ await proxy.start();
104
+ });
105
+
106
+ after(async () => {
107
+ await proxy.stop();
108
+ mockServer.close();
109
+ });
110
+
111
+ it('health endpoint returns ok', async () => {
112
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/health`, { method: 'GET' });
113
+ assert.strictEqual(res.status, 200);
114
+ const data = JSON.parse(res.body);
115
+ assert.strictEqual(data.ok, true);
116
+ });
117
+
118
+ it('returns 404 for unknown provider', async () => {
119
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/unknown/test`, {
120
+ method: 'POST',
121
+ headers: { 'Content-Type': 'application/json' },
122
+ }, '{}');
123
+ assert.strictEqual(res.status, 404);
124
+ });
125
+
126
+ it('returns 405 for GET on provider path', async () => {
127
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/anthropic/v1/messages`, {
128
+ method: 'GET',
129
+ });
130
+ assert.strictEqual(res.status, 405);
131
+ });
132
+
133
+ it('forwards Anthropic non-streaming request correctly', async () => {
134
+ const body = JSON.stringify({
135
+ model: 'claude-haiku-4-5-20251001',
136
+ max_tokens: 50,
137
+ messages: [{ role: 'user', content: 'Hello' }],
138
+ });
139
+
140
+ const res = await httpRequest(
141
+ `http://127.0.0.1:${PROXY_PORT}/v1/anthropic/v1/messages`,
142
+ {
143
+ method: 'POST',
144
+ headers: {
145
+ 'Content-Type': 'application/json',
146
+ 'x-api-key': 'test-key',
147
+ 'anthropic-version': '2023-06-01',
148
+ },
149
+ },
150
+ body,
151
+ );
152
+
153
+ assert.strictEqual(res.status, 200);
154
+ const data = JSON.parse(res.body);
155
+ assert.strictEqual(data.content[0].text, 'Hello from mock provider!');
156
+ assert.strictEqual(data.role, 'assistant');
157
+ });
158
+
159
+ it('forwards Anthropic streaming request correctly', async () => {
160
+ const body = JSON.stringify({
161
+ model: 'claude-haiku-4-5-20251001',
162
+ max_tokens: 50,
163
+ stream: true,
164
+ messages: [{ role: 'user', content: 'Hello' }],
165
+ });
166
+
167
+ const res = await httpRequest(
168
+ `http://127.0.0.1:${PROXY_PORT}/v1/anthropic/v1/messages`,
169
+ {
170
+ method: 'POST',
171
+ headers: {
172
+ 'Content-Type': 'application/json',
173
+ 'x-api-key': 'test-key',
174
+ 'anthropic-version': '2023-06-01',
175
+ },
176
+ },
177
+ body,
178
+ );
179
+
180
+ assert.strictEqual(res.status, 200);
181
+ assert.ok(res.body.includes('Hello'));
182
+ assert.ok(res.body.includes('there'));
183
+ assert.ok(res.body.includes('message_stop'));
184
+ });
185
+
186
+ it('forwards OpenAI request correctly', async () => {
187
+ const body = JSON.stringify({
188
+ model: 'gpt-4o',
189
+ messages: [
190
+ { role: 'system', content: 'You are helpful.' },
191
+ { role: 'user', content: 'Hello' },
192
+ ],
193
+ });
194
+
195
+ const res = await httpRequest(
196
+ `http://127.0.0.1:${PROXY_PORT}/v1/openai/v1/chat/completions`,
197
+ {
198
+ method: 'POST',
199
+ headers: {
200
+ 'Content-Type': 'application/json',
201
+ 'Authorization': 'Bearer test-key',
202
+ },
203
+ },
204
+ body,
205
+ );
206
+
207
+ assert.strictEqual(res.status, 200);
208
+ const data = JSON.parse(res.body);
209
+ assert.ok(data.content);
210
+ });
211
+
212
+ it('auto-detect finds providers from config', () => {
213
+ const providers = proxy.getProviderNames();
214
+ assert.ok(providers.includes('anthropic'));
215
+ assert.ok(providers.includes('openai'));
216
+ assert.ok(providers.includes('ollama'));
217
+ });
218
+ });