smartcontext-proxy 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. package/PLAN.md +406 -0
  2. package/PROGRESS.md +60 -0
  3. package/README.md +99 -0
  4. package/SPEC.md +915 -0
  5. package/adapters/openclaw/embedding.d.ts +8 -0
  6. package/adapters/openclaw/embedding.js +16 -0
  7. package/adapters/openclaw/embedding.ts +15 -0
  8. package/adapters/openclaw/index.d.ts +18 -0
  9. package/adapters/openclaw/index.js +42 -0
  10. package/adapters/openclaw/index.ts +43 -0
  11. package/adapters/openclaw/session-importer.d.ts +22 -0
  12. package/adapters/openclaw/session-importer.js +99 -0
  13. package/adapters/openclaw/session-importer.ts +105 -0
  14. package/adapters/openclaw/storage.d.ts +26 -0
  15. package/adapters/openclaw/storage.js +177 -0
  16. package/adapters/openclaw/storage.ts +183 -0
  17. package/dist/adapters/openclaw/embedding.d.ts +8 -0
  18. package/dist/adapters/openclaw/embedding.js +16 -0
  19. package/dist/adapters/openclaw/index.d.ts +18 -0
  20. package/dist/adapters/openclaw/index.js +42 -0
  21. package/dist/adapters/openclaw/session-importer.d.ts +22 -0
  22. package/dist/adapters/openclaw/session-importer.js +99 -0
  23. package/dist/adapters/openclaw/storage.d.ts +26 -0
  24. package/dist/adapters/openclaw/storage.js +177 -0
  25. package/dist/config/auto-detect.d.ts +3 -0
  26. package/dist/config/auto-detect.js +48 -0
  27. package/dist/config/defaults.d.ts +2 -0
  28. package/dist/config/defaults.js +28 -0
  29. package/dist/config/schema.d.ts +30 -0
  30. package/dist/config/schema.js +3 -0
  31. package/dist/context/budget.d.ts +25 -0
  32. package/dist/context/budget.js +85 -0
  33. package/dist/context/canonical.d.ts +39 -0
  34. package/dist/context/canonical.js +12 -0
  35. package/dist/context/chunker.d.ts +9 -0
  36. package/dist/context/chunker.js +148 -0
  37. package/dist/context/optimizer.d.ts +31 -0
  38. package/dist/context/optimizer.js +163 -0
  39. package/dist/context/retriever.d.ts +29 -0
  40. package/dist/context/retriever.js +103 -0
  41. package/dist/daemon/process.d.ts +6 -0
  42. package/dist/daemon/process.js +76 -0
  43. package/dist/daemon/service.d.ts +2 -0
  44. package/dist/daemon/service.js +99 -0
  45. package/dist/embedding/ollama.d.ts +11 -0
  46. package/dist/embedding/ollama.js +72 -0
  47. package/dist/embedding/types.d.ts +6 -0
  48. package/dist/embedding/types.js +3 -0
  49. package/dist/index.d.ts +2 -0
  50. package/dist/index.js +190 -0
  51. package/dist/metrics/collector.d.ts +43 -0
  52. package/dist/metrics/collector.js +72 -0
  53. package/dist/providers/anthropic.d.ts +15 -0
  54. package/dist/providers/anthropic.js +109 -0
  55. package/dist/providers/google.d.ts +13 -0
  56. package/dist/providers/google.js +40 -0
  57. package/dist/providers/ollama.d.ts +13 -0
  58. package/dist/providers/ollama.js +82 -0
  59. package/dist/providers/openai.d.ts +15 -0
  60. package/dist/providers/openai.js +115 -0
  61. package/dist/providers/types.d.ts +18 -0
  62. package/dist/providers/types.js +3 -0
  63. package/dist/proxy/router.d.ts +12 -0
  64. package/dist/proxy/router.js +46 -0
  65. package/dist/proxy/server.d.ts +25 -0
  66. package/dist/proxy/server.js +265 -0
  67. package/dist/proxy/stream.d.ts +8 -0
  68. package/dist/proxy/stream.js +32 -0
  69. package/dist/src/config/auto-detect.d.ts +3 -0
  70. package/dist/src/config/auto-detect.js +48 -0
  71. package/dist/src/config/defaults.d.ts +2 -0
  72. package/dist/src/config/defaults.js +28 -0
  73. package/dist/src/config/schema.d.ts +30 -0
  74. package/dist/src/config/schema.js +3 -0
  75. package/dist/src/context/budget.d.ts +25 -0
  76. package/dist/src/context/budget.js +85 -0
  77. package/dist/src/context/canonical.d.ts +39 -0
  78. package/dist/src/context/canonical.js +12 -0
  79. package/dist/src/context/chunker.d.ts +9 -0
  80. package/dist/src/context/chunker.js +148 -0
  81. package/dist/src/context/optimizer.d.ts +31 -0
  82. package/dist/src/context/optimizer.js +163 -0
  83. package/dist/src/context/retriever.d.ts +29 -0
  84. package/dist/src/context/retriever.js +103 -0
  85. package/dist/src/daemon/process.d.ts +6 -0
  86. package/dist/src/daemon/process.js +76 -0
  87. package/dist/src/daemon/service.d.ts +2 -0
  88. package/dist/src/daemon/service.js +99 -0
  89. package/dist/src/embedding/ollama.d.ts +11 -0
  90. package/dist/src/embedding/ollama.js +72 -0
  91. package/dist/src/embedding/types.d.ts +6 -0
  92. package/dist/src/embedding/types.js +3 -0
  93. package/dist/src/index.d.ts +2 -0
  94. package/dist/src/index.js +190 -0
  95. package/dist/src/metrics/collector.d.ts +43 -0
  96. package/dist/src/metrics/collector.js +72 -0
  97. package/dist/src/providers/anthropic.d.ts +15 -0
  98. package/dist/src/providers/anthropic.js +109 -0
  99. package/dist/src/providers/google.d.ts +13 -0
  100. package/dist/src/providers/google.js +40 -0
  101. package/dist/src/providers/ollama.d.ts +13 -0
  102. package/dist/src/providers/ollama.js +82 -0
  103. package/dist/src/providers/openai.d.ts +15 -0
  104. package/dist/src/providers/openai.js +115 -0
  105. package/dist/src/providers/types.d.ts +18 -0
  106. package/dist/src/providers/types.js +3 -0
  107. package/dist/src/proxy/router.d.ts +12 -0
  108. package/dist/src/proxy/router.js +46 -0
  109. package/dist/src/proxy/server.d.ts +25 -0
  110. package/dist/src/proxy/server.js +265 -0
  111. package/dist/src/proxy/stream.d.ts +8 -0
  112. package/dist/src/proxy/stream.js +32 -0
  113. package/dist/src/storage/lancedb.d.ts +21 -0
  114. package/dist/src/storage/lancedb.js +158 -0
  115. package/dist/src/storage/types.d.ts +52 -0
  116. package/dist/src/storage/types.js +3 -0
  117. package/dist/src/test/context.test.d.ts +1 -0
  118. package/dist/src/test/context.test.js +141 -0
  119. package/dist/src/test/dashboard.test.d.ts +1 -0
  120. package/dist/src/test/dashboard.test.js +85 -0
  121. package/dist/src/test/proxy.test.d.ts +1 -0
  122. package/dist/src/test/proxy.test.js +188 -0
  123. package/dist/src/ui/dashboard.d.ts +2 -0
  124. package/dist/src/ui/dashboard.js +183 -0
  125. package/dist/storage/lancedb.d.ts +21 -0
  126. package/dist/storage/lancedb.js +158 -0
  127. package/dist/storage/types.d.ts +52 -0
  128. package/dist/storage/types.js +3 -0
  129. package/dist/test/context.test.d.ts +1 -0
  130. package/dist/test/context.test.js +141 -0
  131. package/dist/test/dashboard.test.d.ts +1 -0
  132. package/dist/test/dashboard.test.js +85 -0
  133. package/dist/test/proxy.test.d.ts +1 -0
  134. package/dist/test/proxy.test.js +188 -0
  135. package/dist/ui/dashboard.d.ts +2 -0
  136. package/dist/ui/dashboard.js +183 -0
  137. package/package.json +38 -0
  138. package/src/config/auto-detect.ts +51 -0
  139. package/src/config/defaults.ts +26 -0
  140. package/src/config/schema.ts +33 -0
  141. package/src/context/budget.ts +126 -0
  142. package/src/context/canonical.ts +50 -0
  143. package/src/context/chunker.ts +165 -0
  144. package/src/context/optimizer.ts +201 -0
  145. package/src/context/retriever.ts +123 -0
  146. package/src/daemon/process.ts +70 -0
  147. package/src/daemon/service.ts +103 -0
  148. package/src/embedding/ollama.ts +68 -0
  149. package/src/embedding/types.ts +6 -0
  150. package/src/index.ts +176 -0
  151. package/src/metrics/collector.ts +114 -0
  152. package/src/providers/anthropic.ts +117 -0
  153. package/src/providers/google.ts +42 -0
  154. package/src/providers/ollama.ts +87 -0
  155. package/src/providers/openai.ts +127 -0
  156. package/src/providers/types.ts +20 -0
  157. package/src/proxy/router.ts +48 -0
  158. package/src/proxy/server.ts +315 -0
  159. package/src/proxy/stream.ts +39 -0
  160. package/src/storage/lancedb.ts +169 -0
  161. package/src/storage/types.ts +47 -0
  162. package/src/test/context.test.ts +165 -0
  163. package/src/test/dashboard.test.ts +94 -0
  164. package/src/test/proxy.test.ts +218 -0
  165. package/src/ui/dashboard.ts +184 -0
  166. package/tsconfig.json +18 -0
@@ -0,0 +1,158 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.LanceDBAdapter = void 0;
7
+ const lancedb_1 = require("@lancedb/lancedb");
8
+ const node_fs_1 = __importDefault(require("node:fs"));
9
+ const node_path_1 = __importDefault(require("node:path"));
10
+ class LanceDBAdapter {
11
+ basePath;
12
+ name = 'lancedb';
13
+ db;
14
+ chunksTable;
15
+ logsDir;
16
+ dbPath;
17
+ constructor(basePath = node_path_1.default.join(process.env['HOME'] || '.', '.smartcontext', 'data')) {
18
+ this.basePath = basePath;
19
+ this.dbPath = node_path_1.default.join(basePath, 'vectors');
20
+ this.logsDir = node_path_1.default.join(basePath, 'logs');
21
+ }
22
+ async initialize() {
23
+ node_fs_1.default.mkdirSync(this.dbPath, { recursive: true });
24
+ node_fs_1.default.mkdirSync(this.logsDir, { recursive: true });
25
+ this.db = await (0, lancedb_1.connect)(this.dbPath);
26
+ // Create or open chunks table
27
+ const tableNames = await this.db.tableNames();
28
+ if (tableNames.includes('chunks')) {
29
+ this.chunksTable = await this.db.openTable('chunks');
30
+ }
31
+ // Table will be created lazily on first upsert with actual data
32
+ }
33
+ async upsertChunks(chunks) {
34
+ if (chunks.length === 0)
35
+ return;
36
+ const records = chunks.map((c) => ({
37
+ id: c.id,
38
+ text: c.text,
39
+ vector: c.embedding,
40
+ sessionId: c.sessionId,
41
+ timestamp: c.timestamp,
42
+ summary: c.metadata.summary,
43
+ tokenCount: c.metadata.tokenCount,
44
+ exchangeIndex: c.metadata.exchangeIndex,
45
+ files: JSON.stringify(c.metadata.files || []),
46
+ tools: JSON.stringify(c.metadata.tools || []),
47
+ }));
48
+ const tableNames = await this.db.tableNames();
49
+ if (!tableNames.includes('chunks')) {
50
+ this.chunksTable = await this.db.createTable('chunks', records);
51
+ }
52
+ else {
53
+ if (!this.chunksTable) {
54
+ this.chunksTable = await this.db.openTable('chunks');
55
+ }
56
+ await this.chunksTable.add(records);
57
+ }
58
+ }
59
+ async search(embedding, options) {
60
+ const tableNames = await this.db.tableNames();
61
+ if (!tableNames.includes('chunks'))
62
+ return [];
63
+ if (!this.chunksTable) {
64
+ this.chunksTable = await this.db.openTable('chunks');
65
+ }
66
+ const results = await this.chunksTable
67
+ .vectorSearch(embedding)
68
+ .limit(options.topK * 2) // Fetch extra for filtering
69
+ .toArray();
70
+ const scored = results.map((r) => {
71
+ // LanceDB returns _distance (L2), convert to similarity score
72
+ const distance = r['_distance'];
73
+ let score = 1 / (1 + distance); // Convert L2 distance to similarity
74
+ const boosts = {};
75
+ // Session recency boost
76
+ if (options.sessionBoost && r.sessionId === options.sessionBoost.sessionId) {
77
+ boosts.session = options.sessionBoost.boost;
78
+ score += options.sessionBoost.boost;
79
+ }
80
+ // File-path boost
81
+ if (options.fileBoost && options.fileBoost.patterns.length > 0) {
82
+ const files = JSON.parse(r.files || '[]');
83
+ const hasMatch = options.fileBoost.patterns.some((p) => files.some((f) => f.includes(p)));
84
+ if (hasMatch) {
85
+ boosts.filepath = options.fileBoost.boost;
86
+ score += options.fileBoost.boost;
87
+ }
88
+ }
89
+ return {
90
+ id: r.id,
91
+ text: r.text,
92
+ embedding: [], // Don't return embedding in search results
93
+ sessionId: r.sessionId,
94
+ timestamp: r.timestamp,
95
+ metadata: {
96
+ summary: r.summary,
97
+ tokenCount: r.tokenCount,
98
+ exchangeIndex: r.exchangeIndex,
99
+ files: JSON.parse(r.files || '[]'),
100
+ tools: JSON.parse(r.tools || '[]'),
101
+ },
102
+ score,
103
+ boosts,
104
+ };
105
+ });
106
+ // Filter by min score and limit
107
+ return scored
108
+ .filter((c) => c.score >= options.minScore)
109
+ .sort((a, b) => b.score - a.score)
110
+ .slice(0, options.topK);
111
+ }
112
+ async appendLog(sessionId, exchange) {
113
+ const logFile = node_path_1.default.join(this.logsDir, `${sessionId}.jsonl`);
114
+ const line = JSON.stringify(exchange) + '\n';
115
+ node_fs_1.default.appendFileSync(logFile, line);
116
+ }
117
+ async getSessionLog(sessionId) {
118
+ const logFile = node_path_1.default.join(this.logsDir, `${sessionId}.jsonl`);
119
+ if (!node_fs_1.default.existsSync(logFile))
120
+ return [];
121
+ return node_fs_1.default.readFileSync(logFile, 'utf-8')
122
+ .trim()
123
+ .split('\n')
124
+ .filter(Boolean)
125
+ .map((line) => JSON.parse(line));
126
+ }
127
+ async getStats() {
128
+ let chunks = 0;
129
+ const tableNames = await this.db.tableNames();
130
+ if (tableNames.includes('chunks')) {
131
+ if (!this.chunksTable) {
132
+ this.chunksTable = await this.db.openTable('chunks');
133
+ }
134
+ chunks = await this.chunksTable.countRows();
135
+ }
136
+ const sessions = node_fs_1.default.readdirSync(this.logsDir).filter((f) => f.endsWith('.jsonl')).length;
137
+ // Rough disk usage
138
+ let diskBytes = 0;
139
+ const walk = (dir) => {
140
+ if (!node_fs_1.default.existsSync(dir))
141
+ return;
142
+ for (const entry of node_fs_1.default.readdirSync(dir, { withFileTypes: true })) {
143
+ const full = node_path_1.default.join(dir, entry.name);
144
+ if (entry.isDirectory())
145
+ walk(full);
146
+ else
147
+ diskBytes += node_fs_1.default.statSync(full).size;
148
+ }
149
+ };
150
+ walk(this.basePath);
151
+ return { chunks, sessions, diskBytes };
152
+ }
153
+ async close() {
154
+ // LanceDB doesn't require explicit close
155
+ }
156
+ }
157
+ exports.LanceDBAdapter = LanceDBAdapter;
158
+ //# sourceMappingURL=lancedb.js.map
@@ -0,0 +1,52 @@
1
+ export interface Chunk {
2
+ id: string;
3
+ text: string;
4
+ embedding: number[];
5
+ sessionId: string;
6
+ timestamp: number;
7
+ metadata: ChunkMetadata;
8
+ }
9
+ export interface ChunkMetadata {
10
+ files?: string[];
11
+ tools?: string[];
12
+ summary: string;
13
+ tokenCount: number;
14
+ exchangeIndex: number;
15
+ }
16
+ export interface ScoredChunk extends Chunk {
17
+ score: number;
18
+ boosts?: Record<string, number>;
19
+ }
20
+ export interface SearchOptions {
21
+ topK: number;
22
+ minScore: number;
23
+ sessionBoost?: {
24
+ sessionId: string;
25
+ boost: number;
26
+ };
27
+ fileBoost?: {
28
+ patterns: string[];
29
+ boost: number;
30
+ };
31
+ }
32
+ export interface Exchange {
33
+ index: number;
34
+ userMessage: string;
35
+ assistantMessage: string;
36
+ timestamp: number;
37
+ metadata?: Record<string, unknown>;
38
+ }
39
+ export interface StorageAdapter {
40
+ name: string;
41
+ initialize(): Promise<void>;
42
+ upsertChunks(chunks: Chunk[]): Promise<void>;
43
+ search(embedding: number[], options: SearchOptions): Promise<ScoredChunk[]>;
44
+ appendLog(sessionId: string, exchange: Exchange): Promise<void>;
45
+ getSessionLog(sessionId: string): Promise<Exchange[]>;
46
+ getStats(): Promise<{
47
+ chunks: number;
48
+ sessions: number;
49
+ diskBytes: number;
50
+ }>;
51
+ close(): Promise<void>;
52
+ }
@@ -0,0 +1,3 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,141 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const node_test_1 = require("node:test");
7
+ const node_assert_1 = __importDefault(require("node:assert"));
8
+ const chunker_js_1 = require("../context/chunker.js");
9
+ const budget_js_1 = require("../context/budget.js");
10
+ (0, node_test_1.describe)('Chunker', () => {
11
+ (0, node_test_1.it)('estimates tokens roughly correctly', () => {
12
+ const text = 'Hello world, this is a test message.';
13
+ const tokens = (0, chunker_js_1.estimateTokens)(text);
14
+ node_assert_1.default.ok(tokens > 5 && tokens < 20, `Expected 5-20 tokens, got ${tokens}`);
15
+ });
16
+ (0, node_test_1.it)('chunks a simple conversation into exchange pairs', () => {
17
+ const messages = [
18
+ { role: 'user', content: 'What is TypeScript?' },
19
+ { role: 'assistant', content: 'TypeScript is a superset of JavaScript that adds static types.' },
20
+ { role: 'user', content: 'How do I install it?' },
21
+ { role: 'assistant', content: 'Run npm install -g typescript' },
22
+ ];
23
+ const chunks = (0, chunker_js_1.chunkConversation)(messages, 'test-session');
24
+ node_assert_1.default.strictEqual(chunks.length, 2);
25
+ node_assert_1.default.ok(chunks[0].text.includes('What is TypeScript'));
26
+ node_assert_1.default.ok(chunks[0].text.includes('superset of JavaScript'));
27
+ node_assert_1.default.ok(chunks[1].text.includes('How do I install'));
28
+ });
29
+ (0, node_test_1.it)('splits long exchanges at paragraph boundaries', () => {
30
+ const longResponse = Array(100).fill('This is a paragraph with some detailed content about various programming concepts, design patterns, and software architecture principles that spans multiple lines.\n\n').join('');
31
+ const messages = [
32
+ { role: 'user', content: 'Tell me everything about programming.' },
33
+ { role: 'assistant', content: longResponse },
34
+ ];
35
+ const chunks = (0, chunker_js_1.chunkConversation)(messages, 'test-session');
36
+ node_assert_1.default.ok(chunks.length > 1, `Expected >1 chunks for long response, got ${chunks.length}`);
37
+ for (const chunk of chunks) {
38
+ node_assert_1.default.ok(chunk.metadata.tokenCount <= 2500, `Chunk too large: ${chunk.metadata.tokenCount} tokens`);
39
+ }
40
+ });
41
+ (0, node_test_1.it)('keeps code blocks atomic', () => {
42
+ const response = 'Here is the code:\n\n```typescript\nfunction hello() {\n console.log("hello");\n console.log("world");\n}\n```\n\nThat is the function.';
43
+ const messages = [
44
+ { role: 'user', content: 'Show me code' },
45
+ { role: 'assistant', content: response },
46
+ ];
47
+ const chunks = (0, chunker_js_1.chunkConversation)(messages, 'test-session');
48
+ // Code block should be in one chunk
49
+ const codeChunk = chunks.find((c) => c.text.includes('```typescript'));
50
+ node_assert_1.default.ok(codeChunk, 'Should have a chunk with the code block');
51
+ node_assert_1.default.ok(codeChunk.text.includes('console.log("world")'), 'Code block should be complete');
52
+ });
53
+ (0, node_test_1.it)('extracts file paths from text', () => {
54
+ const messages = [
55
+ { role: 'user', content: 'Fix the bug in `src/proxy/server.ts`' },
56
+ { role: 'assistant', content: 'I found the issue in /Users/vt/projects/test/index.ts' },
57
+ ];
58
+ const chunks = (0, chunker_js_1.chunkConversation)(messages, 'test-session');
59
+ node_assert_1.default.ok(chunks[0].metadata.files.length > 0, 'Should extract file paths');
60
+ });
61
+ (0, node_test_1.it)('assigns unique IDs to chunks', () => {
62
+ const messages = [
63
+ { role: 'user', content: 'Hello' },
64
+ { role: 'assistant', content: 'Hi' },
65
+ { role: 'user', content: 'Bye' },
66
+ { role: 'assistant', content: 'Goodbye' },
67
+ ];
68
+ const chunks = (0, chunker_js_1.chunkConversation)(messages, 'test-session');
69
+ const ids = new Set(chunks.map((c) => c.id));
70
+ node_assert_1.default.strictEqual(ids.size, chunks.length, 'All chunk IDs should be unique');
71
+ });
72
+ });
73
+ (0, node_test_1.describe)('Budget', () => {
74
+ (0, node_test_1.it)('knows context limits for common models', () => {
75
+ node_assert_1.default.strictEqual((0, budget_js_1.getModelContextLimit)('claude-opus-4-6'), 200000);
76
+ node_assert_1.default.strictEqual((0, budget_js_1.getModelContextLimit)('gpt-4o'), 128000);
77
+ node_assert_1.default.strictEqual((0, budget_js_1.getModelContextLimit)('unknown-model'), 128000);
78
+ });
79
+ (0, node_test_1.it)('packs context within budget', () => {
80
+ const messages = Array(20).fill(null).map((_, i) => ({
81
+ role: i % 2 === 0 ? 'user' : 'assistant',
82
+ content: `Message ${i}: `.padEnd(200, 'x'),
83
+ }));
84
+ const mockChunks = [
85
+ {
86
+ id: '1', text: 'chunk 1 text', embedding: [], sessionId: 's1',
87
+ timestamp: Date.now(), score: 0.9, metadata: {
88
+ summary: 'chunk 1', tokenCount: 100, exchangeIndex: 0,
89
+ },
90
+ },
91
+ {
92
+ id: '2', text: 'chunk 2 text', embedding: [], sessionId: 's1',
93
+ timestamp: Date.now(), score: 0.8, metadata: {
94
+ summary: 'chunk 2', tokenCount: 150, exchangeIndex: 1,
95
+ },
96
+ },
97
+ ];
98
+ const packed = (0, budget_js_1.packContext)('You are a helpful assistant.', messages, mockChunks, 'claude-opus-4-6', 3, // tier1_exchanges
99
+ 500, // tier3_reserve
100
+ 8192);
101
+ node_assert_1.default.ok(packed.tier1Messages.length > 0, 'Should have tier 1 messages');
102
+ node_assert_1.default.ok(packed.tier2Chunks.length > 0, 'Should have tier 2 chunks');
103
+ node_assert_1.default.ok(packed.optimizedTokens < packed.originalTokens, 'Should save tokens');
104
+ node_assert_1.default.ok(packed.savingsPercent > 0, 'Should have positive savings');
105
+ });
106
+ (0, node_test_1.it)('handles empty retrieval', () => {
107
+ const messages = [
108
+ { role: 'user', content: 'Hello' },
109
+ { role: 'assistant', content: 'Hi there' },
110
+ ];
111
+ const packed = (0, budget_js_1.packContext)('System prompt', messages, [], 'gpt-4o', 3, 500, 8192);
112
+ node_assert_1.default.strictEqual(packed.tier2Chunks.length, 0);
113
+ node_assert_1.default.strictEqual(packed.savingsPercent, 0);
114
+ });
115
+ });
116
+ (0, node_test_1.describe)('Metrics', () => {
117
+ (0, node_test_1.it)('records and aggregates correctly', async () => {
118
+ const { MetricsCollector } = await import('../metrics/collector.js');
119
+ const metrics = new MetricsCollector();
120
+ metrics.record({
121
+ id: 1, timestamp: Date.now(), provider: 'anthropic', model: 'claude-opus-4-6',
122
+ streaming: false, originalTokens: 1000, optimizedTokens: 300,
123
+ savingsPercent: 70, latencyOverheadMs: 12, chunksRetrieved: 5,
124
+ topScore: 0.89, passThrough: false,
125
+ });
126
+ metrics.record({
127
+ id: 2, timestamp: Date.now(), provider: 'openai', model: 'gpt-4o',
128
+ streaming: true, originalTokens: 500, optimizedTokens: 200,
129
+ savingsPercent: 60, latencyOverheadMs: 8, chunksRetrieved: 3,
130
+ topScore: 0.82, passThrough: false,
131
+ });
132
+ const stats = metrics.getStats();
133
+ node_assert_1.default.strictEqual(stats.totalRequests, 2);
134
+ node_assert_1.default.strictEqual(stats.totalOriginalTokens, 1500);
135
+ node_assert_1.default.strictEqual(stats.totalOptimizedTokens, 500);
136
+ node_assert_1.default.ok(stats.totalSavingsPercent > 50);
137
+ node_assert_1.default.ok(stats.byProvider['anthropic']);
138
+ node_assert_1.default.ok(stats.byModel['gpt-4o']);
139
+ });
140
+ });
141
+ //# sourceMappingURL=context.test.js.map
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,85 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const node_test_1 = require("node:test");
7
+ const node_assert_1 = __importDefault(require("node:assert"));
8
+ const node_http_1 = __importDefault(require("node:http"));
9
+ const server_js_1 = require("../proxy/server.js");
10
+ const auto_detect_js_1 = require("../config/auto-detect.js");
11
+ function httpRequest(url, options, body) {
12
+ return new Promise((resolve, reject) => {
13
+ const req = node_http_1.default.request(url, options, (res) => {
14
+ let data = '';
15
+ res.on('data', (chunk) => (data += chunk));
16
+ res.on('end', () => resolve({ status: res.statusCode || 0, headers: res.headers, body: data }));
17
+ });
18
+ req.on('error', reject);
19
+ if (body)
20
+ req.write(body);
21
+ req.end();
22
+ });
23
+ }
24
+ (0, node_test_1.describe)('Dashboard & API', () => {
25
+ let proxy;
26
+ const PORT = 14810;
27
+ (0, node_test_1.before)(async () => {
28
+ const config = (0, auto_detect_js_1.buildConfig)({ proxy: { port: PORT, host: '127.0.0.1' } });
29
+ config.providers['anthropic'] = { apiKey: 'test', baseUrl: 'http://127.0.0.1:1' };
30
+ config.logging.level = 'error';
31
+ proxy = new server_js_1.ProxyServer(config);
32
+ await proxy.start();
33
+ });
34
+ (0, node_test_1.after)(async () => {
35
+ await proxy.stop();
36
+ });
37
+ (0, node_test_1.it)('serves dashboard at root path', async () => {
38
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/`, { method: 'GET' });
39
+ node_assert_1.default.strictEqual(res.status, 200);
40
+ node_assert_1.default.ok(res.headers['content-type']?.includes('text/html'));
41
+ node_assert_1.default.ok(res.body.includes('SmartContext Proxy'));
42
+ node_assert_1.default.ok(res.body.includes('Total Requests'));
43
+ });
44
+ (0, node_test_1.it)('returns status via /_sc/status', async () => {
45
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/status`, { method: 'GET' });
46
+ node_assert_1.default.strictEqual(res.status, 200);
47
+ const data = JSON.parse(res.body);
48
+ node_assert_1.default.strictEqual(data.state, 'running');
49
+ node_assert_1.default.ok(data.uptime >= 0);
50
+ });
51
+ (0, node_test_1.it)('returns stats via /_sc/stats', async () => {
52
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/stats`, { method: 'GET' });
53
+ node_assert_1.default.strictEqual(res.status, 200);
54
+ const data = JSON.parse(res.body);
55
+ node_assert_1.default.strictEqual(data.totalRequests, 0);
56
+ node_assert_1.default.ok('byProvider' in data);
57
+ node_assert_1.default.ok('byModel' in data);
58
+ });
59
+ (0, node_test_1.it)('pause/resume works', async () => {
60
+ // Pause
61
+ let res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/pause`, { method: 'POST' });
62
+ node_assert_1.default.strictEqual(res.status, 200);
63
+ let data = JSON.parse(res.body);
64
+ node_assert_1.default.strictEqual(data.state, 'paused');
65
+ // Check status
66
+ res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/status`, { method: 'GET' });
67
+ data = JSON.parse(res.body);
68
+ node_assert_1.default.strictEqual(data.state, 'paused');
69
+ // Resume
70
+ res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/resume`, { method: 'POST' });
71
+ data = JSON.parse(res.body);
72
+ node_assert_1.default.strictEqual(data.state, 'running');
73
+ });
74
+ (0, node_test_1.it)('returns feed via /_sc/feed', async () => {
75
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/feed`, { method: 'GET' });
76
+ node_assert_1.default.strictEqual(res.status, 200);
77
+ const data = JSON.parse(res.body);
78
+ node_assert_1.default.ok(Array.isArray(data));
79
+ });
80
+ (0, node_test_1.it)('returns 404 for unknown API path', async () => {
81
+ const res = await httpRequest(`http://127.0.0.1:${PORT}/_sc/nonexistent`, { method: 'GET' });
82
+ node_assert_1.default.strictEqual(res.status, 404);
83
+ });
84
+ });
85
+ //# sourceMappingURL=dashboard.test.js.map
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,188 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const node_test_1 = require("node:test");
7
+ const node_assert_1 = __importDefault(require("node:assert"));
8
+ const node_http_1 = __importDefault(require("node:http"));
9
+ const server_js_1 = require("../proxy/server.js");
10
+ const auto_detect_js_1 = require("../config/auto-detect.js");
11
+ /** Create a mock LLM provider server */
12
+ function createMockProvider(port) {
13
+ return new Promise((resolve) => {
14
+ const server = node_http_1.default.createServer((req, res) => {
15
+ let body = '';
16
+ req.on('data', (chunk) => (body += chunk));
17
+ req.on('end', () => {
18
+ const parsed = JSON.parse(body);
19
+ if (parsed.stream) {
20
+ // SSE streaming response
21
+ res.writeHead(200, {
22
+ 'Content-Type': 'text/event-stream',
23
+ 'Cache-Control': 'no-cache',
24
+ });
25
+ const events = [
26
+ 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}\n\n',
27
+ 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello"}}\n\n',
28
+ 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" there"}}\n\n',
29
+ 'data: {"type":"content_block_stop","index":0}\n\n',
30
+ 'data: {"type":"message_stop"}\n\n',
31
+ ];
32
+ let i = 0;
33
+ const interval = setInterval(() => {
34
+ if (i < events.length) {
35
+ res.write(events[i]);
36
+ i++;
37
+ }
38
+ else {
39
+ clearInterval(interval);
40
+ res.end();
41
+ }
42
+ }, 10);
43
+ }
44
+ else {
45
+ // Non-streaming response
46
+ res.writeHead(200, { 'Content-Type': 'application/json' });
47
+ res.end(JSON.stringify({
48
+ id: 'msg_test',
49
+ type: 'message',
50
+ role: 'assistant',
51
+ content: [{ type: 'text', text: 'Hello from mock provider!' }],
52
+ model: parsed.model || 'test-model',
53
+ stop_reason: 'end_turn',
54
+ usage: { input_tokens: 10, output_tokens: 5 },
55
+ }));
56
+ }
57
+ });
58
+ });
59
+ server.listen(port, '127.0.0.1', () => resolve(server));
60
+ });
61
+ }
62
+ function httpRequest(url, options, body) {
63
+ return new Promise((resolve, reject) => {
64
+ const req = node_http_1.default.request(url, options, (res) => {
65
+ let data = '';
66
+ res.on('data', (chunk) => (data += chunk));
67
+ res.on('end', () => resolve({ status: res.statusCode || 0, headers: res.headers, body: data }));
68
+ });
69
+ req.on('error', reject);
70
+ if (body)
71
+ req.write(body);
72
+ req.end();
73
+ });
74
+ }
75
+ (0, node_test_1.describe)('SmartContext Proxy', () => {
76
+ let proxy;
77
+ let mockServer;
78
+ const PROXY_PORT = 14800;
79
+ const MOCK_PORT = 14801;
80
+ (0, node_test_1.before)(async () => {
81
+ // Start mock provider
82
+ mockServer = await createMockProvider(MOCK_PORT);
83
+ // Start proxy pointing to mock provider
84
+ const config = (0, auto_detect_js_1.buildConfig)({
85
+ proxy: { port: PROXY_PORT, host: '127.0.0.1' },
86
+ });
87
+ // Override anthropic baseUrl to point to mock
88
+ config.providers['anthropic'] = {
89
+ apiKey: 'test-key',
90
+ baseUrl: `http://127.0.0.1:${MOCK_PORT}`,
91
+ };
92
+ config.providers['openai'] = {
93
+ apiKey: 'test-key',
94
+ baseUrl: `http://127.0.0.1:${MOCK_PORT}`,
95
+ };
96
+ config.logging.level = 'error'; // quiet during tests
97
+ proxy = new server_js_1.ProxyServer(config);
98
+ await proxy.start();
99
+ });
100
+ (0, node_test_1.after)(async () => {
101
+ await proxy.stop();
102
+ mockServer.close();
103
+ });
104
+ (0, node_test_1.it)('health endpoint returns ok', async () => {
105
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/health`, { method: 'GET' });
106
+ node_assert_1.default.strictEqual(res.status, 200);
107
+ const data = JSON.parse(res.body);
108
+ node_assert_1.default.strictEqual(data.ok, true);
109
+ });
110
+ (0, node_test_1.it)('returns 404 for unknown provider', async () => {
111
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/unknown/test`, {
112
+ method: 'POST',
113
+ headers: { 'Content-Type': 'application/json' },
114
+ }, '{}');
115
+ node_assert_1.default.strictEqual(res.status, 404);
116
+ });
117
+ (0, node_test_1.it)('returns 405 for GET on provider path', async () => {
118
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/anthropic/v1/messages`, {
119
+ method: 'GET',
120
+ });
121
+ node_assert_1.default.strictEqual(res.status, 405);
122
+ });
123
+ (0, node_test_1.it)('forwards Anthropic non-streaming request correctly', async () => {
124
+ const body = JSON.stringify({
125
+ model: 'claude-haiku-4-5-20251001',
126
+ max_tokens: 50,
127
+ messages: [{ role: 'user', content: 'Hello' }],
128
+ });
129
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/anthropic/v1/messages`, {
130
+ method: 'POST',
131
+ headers: {
132
+ 'Content-Type': 'application/json',
133
+ 'x-api-key': 'test-key',
134
+ 'anthropic-version': '2023-06-01',
135
+ },
136
+ }, body);
137
+ node_assert_1.default.strictEqual(res.status, 200);
138
+ const data = JSON.parse(res.body);
139
+ node_assert_1.default.strictEqual(data.content[0].text, 'Hello from mock provider!');
140
+ node_assert_1.default.strictEqual(data.role, 'assistant');
141
+ });
142
+ (0, node_test_1.it)('forwards Anthropic streaming request correctly', async () => {
143
+ const body = JSON.stringify({
144
+ model: 'claude-haiku-4-5-20251001',
145
+ max_tokens: 50,
146
+ stream: true,
147
+ messages: [{ role: 'user', content: 'Hello' }],
148
+ });
149
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/anthropic/v1/messages`, {
150
+ method: 'POST',
151
+ headers: {
152
+ 'Content-Type': 'application/json',
153
+ 'x-api-key': 'test-key',
154
+ 'anthropic-version': '2023-06-01',
155
+ },
156
+ }, body);
157
+ node_assert_1.default.strictEqual(res.status, 200);
158
+ node_assert_1.default.ok(res.body.includes('Hello'));
159
+ node_assert_1.default.ok(res.body.includes('there'));
160
+ node_assert_1.default.ok(res.body.includes('message_stop'));
161
+ });
162
+ (0, node_test_1.it)('forwards OpenAI request correctly', async () => {
163
+ const body = JSON.stringify({
164
+ model: 'gpt-4o',
165
+ messages: [
166
+ { role: 'system', content: 'You are helpful.' },
167
+ { role: 'user', content: 'Hello' },
168
+ ],
169
+ });
170
+ const res = await httpRequest(`http://127.0.0.1:${PROXY_PORT}/v1/openai/v1/chat/completions`, {
171
+ method: 'POST',
172
+ headers: {
173
+ 'Content-Type': 'application/json',
174
+ 'Authorization': 'Bearer test-key',
175
+ },
176
+ }, body);
177
+ node_assert_1.default.strictEqual(res.status, 200);
178
+ const data = JSON.parse(res.body);
179
+ node_assert_1.default.ok(data.content);
180
+ });
181
+ (0, node_test_1.it)('auto-detect finds providers from config', () => {
182
+ const providers = proxy.getProviderNames();
183
+ node_assert_1.default.ok(providers.includes('anthropic'));
184
+ node_assert_1.default.ok(providers.includes('openai'));
185
+ node_assert_1.default.ok(providers.includes('ollama'));
186
+ });
187
+ });
188
+ //# sourceMappingURL=proxy.test.js.map
@@ -0,0 +1,2 @@
1
+ import type { MetricsCollector } from '../metrics/collector.js';
2
+ export declare function renderDashboard(metrics: MetricsCollector, paused: boolean): string;