bluera-knowledge 0.9.26 → 0.9.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/.claude/commands/commit.md +4 -7
  2. package/.claude/hooks/post-edit-check.sh +21 -24
  3. package/.claude/skills/atomic-commits/SKILL.md +6 -0
  4. package/.claude-plugin/plugin.json +1 -1
  5. package/.env.example +4 -0
  6. package/.husky/pre-push +12 -2
  7. package/.versionrc.json +0 -4
  8. package/CHANGELOG.md +69 -0
  9. package/README.md +55 -20
  10. package/bun.lock +35 -1
  11. package/commands/crawl.md +2 -0
  12. package/dist/{chunk-BICFAWMN.js → chunk-DNOIM7BO.js} +73 -8
  13. package/dist/chunk-DNOIM7BO.js.map +1 -0
  14. package/dist/{chunk-5QMHZUC4.js → chunk-NJUMU4X2.js} +462 -105
  15. package/dist/chunk-NJUMU4X2.js.map +1 -0
  16. package/dist/{chunk-J7J6LXOJ.js → chunk-SZNTYLYT.js} +106 -41
  17. package/dist/chunk-SZNTYLYT.js.map +1 -0
  18. package/dist/index.js +65 -25
  19. package/dist/index.js.map +1 -1
  20. package/dist/mcp/server.js +2 -2
  21. package/dist/workers/background-worker-cli.js +2 -2
  22. package/eslint.config.js +1 -1
  23. package/package.json +3 -1
  24. package/src/analysis/ast-parser.test.ts +46 -0
  25. package/src/cli/commands/crawl.test.ts +99 -12
  26. package/src/cli/commands/crawl.ts +76 -24
  27. package/src/crawl/article-converter.ts +36 -1
  28. package/src/crawl/bridge.ts +18 -7
  29. package/src/crawl/intelligent-crawler.ts +45 -4
  30. package/src/db/embeddings.test.ts +16 -0
  31. package/src/logging/index.ts +29 -0
  32. package/src/logging/logger.test.ts +75 -0
  33. package/src/logging/logger.ts +147 -0
  34. package/src/logging/payload.test.ts +152 -0
  35. package/src/logging/payload.ts +121 -0
  36. package/src/mcp/handlers/search.handler.test.ts +28 -9
  37. package/src/mcp/handlers/search.handler.ts +69 -29
  38. package/src/mcp/handlers/store.handler.test.ts +1 -0
  39. package/src/mcp/server.ts +44 -16
  40. package/src/services/chunking.service.ts +23 -0
  41. package/src/services/index.service.test.ts +921 -1
  42. package/src/services/index.service.ts +76 -1
  43. package/src/services/index.ts +10 -1
  44. package/src/services/search.service.test.ts +573 -21
  45. package/src/services/search.service.ts +257 -105
  46. package/src/services/snippet.service.ts +28 -3
  47. package/src/services/token.service.test.ts +45 -0
  48. package/src/services/token.service.ts +33 -0
  49. package/src/types/result.test.ts +10 -0
  50. package/tests/integration/cli-consistency.test.ts +1 -4
  51. package/vitest.config.ts +4 -0
  52. package/dist/chunk-5QMHZUC4.js.map +0 -1
  53. package/dist/chunk-BICFAWMN.js.map +0 -1
  54. package/dist/chunk-J7J6LXOJ.js.map +0 -1
  55. package/scripts/readme-version-updater.cjs +0 -18
@@ -0,0 +1,29 @@
1
+ /**
2
+ * Logging module - pino-based file logging with auto-rotation
3
+ *
4
+ * @example
5
+ * import { createLogger, summarizePayload } from './logging/index.js';
6
+ *
7
+ * const logger = createLogger('my-module');
8
+ * logger.info({ data }, 'Something happened');
9
+ *
10
+ * // For large payloads:
11
+ * logger.info({
12
+ * ...summarizePayload(html, 'raw-html', url),
13
+ * }, 'Fetched HTML');
14
+ */
15
+
16
+ export {
17
+ createLogger,
18
+ shutdownLogger,
19
+ getCurrentLogLevel,
20
+ isLevelEnabled,
21
+ getLogDirectory,
22
+ type LogLevel,
23
+ } from './logger.js';
24
+
25
+ export {
26
+ summarizePayload,
27
+ truncateForLog,
28
+ type PayloadSummary,
29
+ } from './payload.js';
@@ -0,0 +1,75 @@
1
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
+ import { getCurrentLogLevel, isLevelEnabled, getLogDirectory } from './logger.js';
3
+
4
+ describe('logger', () => {
5
+ const originalEnv = process.env['LOG_LEVEL'];
6
+
7
+ beforeEach(() => {
8
+ delete process.env['LOG_LEVEL'];
9
+ });
10
+
11
+ afterEach(() => {
12
+ if (originalEnv !== undefined) {
13
+ process.env['LOG_LEVEL'] = originalEnv;
14
+ } else {
15
+ delete process.env['LOG_LEVEL'];
16
+ }
17
+ });
18
+
19
+ describe('getCurrentLogLevel', () => {
20
+ it('returns info as default level', () => {
21
+ expect(getCurrentLogLevel()).toBe('info');
22
+ });
23
+
24
+ it('returns level from environment variable', () => {
25
+ process.env['LOG_LEVEL'] = 'debug';
26
+ expect(getCurrentLogLevel()).toBe('debug');
27
+ });
28
+
29
+ it('handles lowercase environment variable', () => {
30
+ process.env['LOG_LEVEL'] = 'WARN';
31
+ expect(getCurrentLogLevel()).toBe('warn');
32
+ });
33
+
34
+ it('treats empty string as default', () => {
35
+ process.env['LOG_LEVEL'] = '';
36
+ expect(getCurrentLogLevel()).toBe('info');
37
+ });
38
+
39
+ it('throws on invalid log level', () => {
40
+ process.env['LOG_LEVEL'] = 'invalid';
41
+ expect(() => getCurrentLogLevel()).toThrow('Invalid LOG_LEVEL: "invalid"');
42
+ });
43
+ });
44
+
45
+ describe('isLevelEnabled', () => {
46
+ it('returns true when check level is at or above current level', () => {
47
+ process.env['LOG_LEVEL'] = 'info';
48
+ expect(isLevelEnabled('info')).toBe(true);
49
+ expect(isLevelEnabled('warn')).toBe(true);
50
+ expect(isLevelEnabled('error')).toBe(true);
51
+ });
52
+
53
+ it('returns false when check level is below current level', () => {
54
+ process.env['LOG_LEVEL'] = 'warn';
55
+ expect(isLevelEnabled('debug')).toBe(false);
56
+ expect(isLevelEnabled('info')).toBe(false);
57
+ });
58
+
59
+ it('enables all levels when set to trace', () => {
60
+ process.env['LOG_LEVEL'] = 'trace';
61
+ expect(isLevelEnabled('trace')).toBe(true);
62
+ expect(isLevelEnabled('debug')).toBe(true);
63
+ expect(isLevelEnabled('info')).toBe(true);
64
+ });
65
+ });
66
+
67
+ describe('getLogDirectory', () => {
68
+ it('returns path under home directory', () => {
69
+ const logDir = getLogDirectory();
70
+ expect(logDir).toContain('.bluera');
71
+ expect(logDir).toContain('bluera-knowledge');
72
+ expect(logDir).toContain('logs');
73
+ });
74
+ });
75
+ });
@@ -0,0 +1,147 @@
1
+ /**
2
+ * Core logger factory using pino with file-based rolling logs
3
+ *
4
+ * Features:
5
+ * - File-only output (no console pollution for Claude Code)
6
+ * - Size-based rotation (10MB, keeps 5 files)
7
+ * - LOG_LEVEL env var control (trace/debug/info/warn/error/fatal)
8
+ * - Child loggers per module for context
9
+ */
10
+
11
+ import pino, { type Logger, type LoggerOptions } from 'pino';
12
+ import { homedir } from 'node:os';
13
+ import { mkdirSync, existsSync } from 'node:fs';
14
+ import { join } from 'node:path';
15
+
16
+ /** Valid log levels */
17
+ export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal';
18
+
19
+ const VALID_LEVELS: readonly LogLevel[] = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'];
20
+ const VALID_LEVELS_SET: ReadonlySet<string> = new Set(VALID_LEVELS);
21
+
22
+ /** Default log directory under user home */
23
+ function getLogDir(): string {
24
+ return join(homedir(), '.bluera', 'bluera-knowledge', 'logs');
25
+ }
26
+
27
+ /** Resolve and create log directory - fails fast if cannot create */
28
+ function ensureLogDir(): string {
29
+ const logDir = getLogDir();
30
+ if (!existsSync(logDir)) {
31
+ mkdirSync(logDir, { recursive: true });
32
+ }
33
+ return logDir;
34
+ }
35
+
36
+ /** Check if a string is a valid log level */
37
+ function isValidLogLevel(level: string): level is LogLevel {
38
+ return VALID_LEVELS_SET.has(level);
39
+ }
40
+
41
+ /** Get log level from environment - fails fast on invalid value */
42
+ function getLogLevel(): LogLevel {
43
+ const level = process.env['LOG_LEVEL']?.toLowerCase();
44
+
45
+ if (level === undefined || level === '') {
46
+ return 'info';
47
+ }
48
+
49
+ if (!isValidLogLevel(level)) {
50
+ throw new Error(
51
+ `Invalid LOG_LEVEL: "${level}". Valid values: ${VALID_LEVELS.join(', ')}`
52
+ );
53
+ }
54
+
55
+ return level;
56
+ }
57
+
58
+ /** Root logger instance - lazily initialized */
59
+ let rootLogger: Logger | null = null;
60
+
61
+ /** Initialize the root logger with pino-roll transport */
62
+ function initializeLogger(): Logger {
63
+ if (rootLogger !== null) {
64
+ return rootLogger;
65
+ }
66
+
67
+ const logDir = ensureLogDir();
68
+ const logFile = join(logDir, 'app.log');
69
+ const level = getLogLevel();
70
+
71
+ const options: LoggerOptions = {
72
+ level,
73
+ timestamp: pino.stdTimeFunctions.isoTime,
74
+ formatters: {
75
+ level: (label) => ({ level: label }),
76
+ },
77
+ transport: {
78
+ target: 'pino-roll',
79
+ options: {
80
+ file: logFile,
81
+ size: '10m', // 10MB rotation
82
+ limit: { count: 5 }, // Keep 5 rotated files
83
+ mkdir: true,
84
+ },
85
+ },
86
+ };
87
+
88
+ rootLogger = pino(options);
89
+ return rootLogger;
90
+ }
91
+
92
+ /**
93
+ * Create a named child logger for a specific module
94
+ *
95
+ * @param module - Module name (e.g., 'crawler', 'mcp-server', 'search-service')
96
+ * @returns Logger instance with module context
97
+ *
98
+ * @example
99
+ * const logger = createLogger('crawler');
100
+ * logger.info({ url }, 'Fetching page');
101
+ */
102
+ export function createLogger(module: string): Logger {
103
+ const root = initializeLogger();
104
+ return root.child({ module });
105
+ }
106
+
107
+ /**
108
+ * Get the current log level
109
+ */
110
+ export function getCurrentLogLevel(): LogLevel {
111
+ return getLogLevel();
112
+ }
113
+
114
+ /**
115
+ * Check if a specific log level is enabled
116
+ */
117
+ export function isLevelEnabled(level: LogLevel): boolean {
118
+ const currentLevel = getLogLevel();
119
+ const currentIndex = VALID_LEVELS.indexOf(currentLevel);
120
+ const checkIndex = VALID_LEVELS.indexOf(level);
121
+ return checkIndex >= currentIndex;
122
+ }
123
+
124
+ /**
125
+ * Get the log directory path
126
+ */
127
+ export function getLogDirectory(): string {
128
+ return getLogDir();
129
+ }
130
+
131
+ /**
132
+ * Flush and shutdown the logger - call before process exit
133
+ */
134
+ export function shutdownLogger(): Promise<void> {
135
+ return new Promise((resolve) => {
136
+ if (rootLogger !== null) {
137
+ rootLogger.flush();
138
+ // Give time for async transport to flush
139
+ setTimeout(() => {
140
+ rootLogger = null;
141
+ resolve();
142
+ }, 100);
143
+ } else {
144
+ resolve();
145
+ }
146
+ });
147
+ }
@@ -0,0 +1,152 @@
1
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
2
+ import { mkdtempSync, rmSync, existsSync, readdirSync, readFileSync } from 'node:fs';
3
+ import { join } from 'node:path';
4
+ import { tmpdir } from 'node:os';
5
+
6
+ // Mock logger module before importing payload
7
+ vi.mock('./logger.js', () => ({
8
+ getLogDirectory: vi.fn(),
9
+ isLevelEnabled: vi.fn()
10
+ }));
11
+
12
+ import { summarizePayload, truncateForLog } from './payload.js';
13
+ import { getLogDirectory, isLevelEnabled } from './logger.js';
14
+
15
+ const mockGetLogDirectory = getLogDirectory as ReturnType<typeof vi.fn>;
16
+ const mockIsLevelEnabled = isLevelEnabled as ReturnType<typeof vi.fn>;
17
+
18
+ describe('payload utilities', () => {
19
+ let tempDir: string;
20
+
21
+ beforeEach(() => {
22
+ tempDir = mkdtempSync(join(tmpdir(), 'payload-test-'));
23
+ mockGetLogDirectory.mockReturnValue(tempDir);
24
+ mockIsLevelEnabled.mockReturnValue(false);
25
+ });
26
+
27
+ afterEach(() => {
28
+ rmSync(tempDir, { recursive: true, force: true });
29
+ vi.clearAllMocks();
30
+ });
31
+
32
+ describe('truncateForLog', () => {
33
+ it('returns content unchanged when under max length', () => {
34
+ const content = 'short content';
35
+ expect(truncateForLog(content, 500)).toBe(content);
36
+ });
37
+
38
+ it('truncates content when over max length', () => {
39
+ const content = 'a'.repeat(600);
40
+ const result = truncateForLog(content, 500);
41
+ expect(result.length).toBe(500 + '... [truncated]'.length);
42
+ expect(result).toContain('... [truncated]');
43
+ });
44
+
45
+ it('uses default max length of 500', () => {
46
+ const content = 'a'.repeat(600);
47
+ const result = truncateForLog(content);
48
+ expect(result.startsWith('a'.repeat(500))).toBe(true);
49
+ expect(result).toContain('... [truncated]');
50
+ });
51
+
52
+ it('handles exact max length content', () => {
53
+ const content = 'a'.repeat(500);
54
+ expect(truncateForLog(content, 500)).toBe(content);
55
+ });
56
+ });
57
+
58
+ describe('summarizePayload', () => {
59
+ it('returns summary with preview, size, and hash', () => {
60
+ const content = 'test content for summarization';
61
+ const result = summarizePayload(content, 'test-type', 'test-id');
62
+
63
+ expect(result.preview).toBe(content);
64
+ expect(result.sizeBytes).toBe(Buffer.byteLength(content, 'utf8'));
65
+ expect(result.hash).toMatch(/^[a-f0-9]{12}$/);
66
+ expect(result.payloadFile).toBeUndefined();
67
+ });
68
+
69
+ it('truncates preview for large content', () => {
70
+ const content = 'x'.repeat(1000);
71
+ const result = summarizePayload(content, 'large', 'large-id');
72
+
73
+ expect(result.preview).toContain('... [truncated]');
74
+ expect(result.preview.length).toBeLessThan(content.length);
75
+ });
76
+
77
+ it('does not dump payload when dumpFull is false', () => {
78
+ const content = 'x'.repeat(20000); // Above threshold
79
+ const result = summarizePayload(content, 'type', 'id', false);
80
+
81
+ expect(result.payloadFile).toBeUndefined();
82
+ const payloadDir = join(tempDir, 'payload');
83
+ expect(existsSync(payloadDir)).toBe(false);
84
+ });
85
+
86
+ it('dumps payload to file when dumpFull is true and above threshold', () => {
87
+ const content = 'x'.repeat(20000); // Above 10KB threshold
88
+ const result = summarizePayload(content, 'dump-type', 'dump-id', true);
89
+
90
+ expect(result.payloadFile).toBeDefined();
91
+ expect(result.payloadFile).toContain('dump-type');
92
+ expect(result.payloadFile).toContain(result.hash);
93
+
94
+ const payloadDir = join(tempDir, 'payload');
95
+ expect(existsSync(payloadDir)).toBe(true);
96
+
97
+ const files = readdirSync(payloadDir);
98
+ expect(files.length).toBe(1);
99
+ expect(files[0]).toBe(result.payloadFile);
100
+
101
+ const fileContent = JSON.parse(readFileSync(join(payloadDir, files[0]), 'utf8'));
102
+ expect(fileContent.type).toBe('dump-type');
103
+ expect(fileContent.identifier).toBe('dump-id');
104
+ expect(fileContent.content).toBe(content);
105
+ expect(fileContent.sizeBytes).toBe(result.sizeBytes);
106
+ });
107
+
108
+ it('does not dump payload below threshold even with dumpFull true', () => {
109
+ const content = 'small content'; // Below 10KB threshold
110
+ const result = summarizePayload(content, 'small-type', 'small-id', true);
111
+
112
+ expect(result.payloadFile).toBeUndefined();
113
+ });
114
+
115
+ it('creates payload directory if it does not exist', () => {
116
+ const content = 'y'.repeat(20000);
117
+ const payloadDir = join(tempDir, 'payload');
118
+ expect(existsSync(payloadDir)).toBe(false);
119
+
120
+ summarizePayload(content, 'create-dir', 'create-id', true);
121
+
122
+ expect(existsSync(payloadDir)).toBe(true);
123
+ });
124
+
125
+ it('sanitizes identifier for filename', () => {
126
+ const content = 'z'.repeat(20000);
127
+ const result = summarizePayload(content, 'type', 'https://example.com/path?query=1', true);
128
+
129
+ expect(result.payloadFile).toBeDefined();
130
+ expect(result.payloadFile).not.toContain('://');
131
+ expect(result.payloadFile).not.toContain('?');
132
+ });
133
+
134
+ it('uses trace level check for dumpFull default', () => {
135
+ mockIsLevelEnabled.mockReturnValue(true);
136
+ const content = 'a'.repeat(20000);
137
+
138
+ const result = summarizePayload(content, 'trace-type', 'trace-id');
139
+
140
+ expect(mockIsLevelEnabled).toHaveBeenCalledWith('trace');
141
+ expect(result.payloadFile).toBeDefined();
142
+ });
143
+
144
+ it('generates consistent hash for same content', () => {
145
+ const content = 'consistent content';
146
+ const result1 = summarizePayload(content, 'type1', 'id1');
147
+ const result2 = summarizePayload(content, 'type2', 'id2');
148
+
149
+ expect(result1.hash).toBe(result2.hash);
150
+ });
151
+ });
152
+ });
@@ -0,0 +1,121 @@
1
+ /**
2
+ * Large payload handling utilities for logging
3
+ *
4
+ * Handles large content (raw HTML, MCP responses) by:
5
+ * - Truncating to preview in log entries
6
+ * - Optionally dumping full content to separate files at trace level
7
+ */
8
+
9
+ import { writeFileSync, mkdirSync, existsSync } from 'node:fs';
10
+ import { join } from 'node:path';
11
+ import { createHash } from 'node:crypto';
12
+ import { getLogDirectory, isLevelEnabled } from './logger.js';
13
+
14
+ /** Maximum characters for log preview */
15
+ const MAX_PREVIEW_LENGTH = 500;
16
+
17
+ /** Minimum size to trigger payload dump (10KB) */
18
+ const PAYLOAD_DUMP_THRESHOLD = 10_000;
19
+
20
+ /** Summary of a large payload for logging */
21
+ export interface PayloadSummary {
22
+ /** Truncated preview of content */
23
+ preview: string;
24
+ /** Size in bytes */
25
+ sizeBytes: number;
26
+ /** Short hash for identification */
27
+ hash: string;
28
+ /** Filename if full content was dumped (trace level only) */
29
+ payloadFile?: string;
30
+ }
31
+
32
+ /** Get the payload dump directory */
33
+ function getPayloadDir(): string {
34
+ const dir = join(getLogDirectory(), 'payload');
35
+ if (!existsSync(dir)) {
36
+ mkdirSync(dir, { recursive: true });
37
+ }
38
+ return dir;
39
+ }
40
+
41
+ /** Generate a safe filename from an identifier */
42
+ function safeFilename(identifier: string): string {
43
+ return identifier
44
+ .replace(/[^a-zA-Z0-9-]/g, '_')
45
+ .substring(0, 50);
46
+ }
47
+
48
+ /**
49
+ * Summarize a large payload for logging
50
+ *
51
+ * Creates a summary with:
52
+ * - Truncated preview (first 500 chars)
53
+ * - Size in bytes
54
+ * - Short MD5 hash for identification
55
+ * - Optional full dump to file at trace level
56
+ *
57
+ * @param content - The full content to summarize
58
+ * @param type - Type identifier (e.g., 'raw-html', 'mcp-response')
59
+ * @param identifier - Unique identifier (e.g., URL, query)
60
+ * @param dumpFull - Whether to dump full content to file (default: trace level check)
61
+ * @returns PayloadSummary for inclusion in log entry
62
+ *
63
+ * @example
64
+ * logger.info({
65
+ * url,
66
+ * ...summarizePayload(html, 'raw-html', url),
67
+ * }, 'Fetched HTML');
68
+ */
69
+ export function summarizePayload(
70
+ content: string,
71
+ type: string,
72
+ identifier: string,
73
+ dumpFull: boolean = isLevelEnabled('trace')
74
+ ): PayloadSummary {
75
+ const sizeBytes = Buffer.byteLength(content, 'utf8');
76
+ const hash = createHash('md5').update(content).digest('hex').substring(0, 12);
77
+ const preview = truncateForLog(content, MAX_PREVIEW_LENGTH);
78
+
79
+ const baseSummary = { preview, sizeBytes, hash };
80
+
81
+ // Dump full payload to file if enabled and above threshold
82
+ if (dumpFull && sizeBytes > PAYLOAD_DUMP_THRESHOLD) {
83
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
84
+ const safeId = safeFilename(identifier);
85
+ const filename = `${timestamp}-${type}-${safeId}-${hash}.json`;
86
+ const filepath = join(getPayloadDir(), filename);
87
+
88
+ writeFileSync(
89
+ filepath,
90
+ JSON.stringify(
91
+ {
92
+ timestamp: new Date().toISOString(),
93
+ type,
94
+ identifier,
95
+ sizeBytes,
96
+ content,
97
+ },
98
+ null,
99
+ 2
100
+ )
101
+ );
102
+
103
+ return { ...baseSummary, payloadFile: filename };
104
+ }
105
+
106
+ return baseSummary;
107
+ }
108
+
109
+ /**
110
+ * Truncate content for logging with ellipsis indicator
111
+ *
112
+ * @param content - Content to truncate
113
+ * @param maxLength - Maximum length (default: 500)
114
+ * @returns Truncated string with '... [truncated]' if needed
115
+ */
116
+ export function truncateForLog(content: string, maxLength: number = MAX_PREVIEW_LENGTH): string {
117
+ if (content.length <= maxLength) {
118
+ return content;
119
+ }
120
+ return content.substring(0, maxLength) + '... [truncated]';
121
+ }
@@ -3,6 +3,20 @@ import { handleSearch, handleGetFullContext, resultCache } from './search.handle
3
3
  import type { HandlerContext } from '../types.js';
4
4
  import type { ServiceContainer } from '../../services/index.js';
5
5
 
6
+ /**
7
+ * Extract JSON from search response that includes a header line.
8
+ * Format: "Search: ... | Results: ... | ~X tokens | Xms\n\n{json}"
9
+ */
10
+ function parseSearchResponse(text: string): { header: string; json: Record<string, unknown> } {
11
+ const parts = text.split('\n\n');
12
+ const header = parts[0] ?? '';
13
+ const jsonStr = parts.slice(1).join('\n\n');
14
+ return {
15
+ header,
16
+ json: JSON.parse(jsonStr || '{}')
17
+ };
18
+ }
19
+
6
20
  describe('Search Handlers', () => {
7
21
  let mockContext: HandlerContext;
8
22
  let mockServices: ServiceContainer;
@@ -70,7 +84,10 @@ describe('Search Handlers', () => {
70
84
  })
71
85
  );
72
86
 
73
- const response = JSON.parse(result.content[0]?.text ?? '{}');
87
+ const { header, json: response } = parseSearchResponse(result.content[0]?.text ?? '');
88
+ expect(header).toContain('Search: "test query"');
89
+ expect(header).toContain('Results: 1');
90
+ expect(header).toContain('tokens');
74
91
  expect(response.results).toHaveLength(1);
75
92
  expect(response.totalResults).toBe(1);
76
93
  });
@@ -129,14 +146,15 @@ describe('Search Handlers', () => {
129
146
  expect(cached?.id).toBe('doc1');
130
147
  });
131
148
 
132
- it('should calculate estimated tokens', async () => {
149
+ it('should show token count in header', async () => {
133
150
  const result = await handleSearch(
134
151
  { query: 'test', detail: 'minimal', limit: 10 },
135
152
  mockContext
136
153
  );
137
154
 
138
- const response = JSON.parse(result.content[0]?.text ?? '{}');
139
- expect(response.estimatedTokens).toBeGreaterThan(0);
155
+ const { header } = parseSearchResponse(result.content[0]?.text ?? '');
156
+ // Header should contain token count (either "~X tokens" or "~X.Xk tokens")
157
+ expect(header).toMatch(/~\d+\.?\d*k? tokens/);
140
158
  });
141
159
 
142
160
  it('should add repoRoot for repo stores', async () => {
@@ -156,7 +174,7 @@ describe('Search Handlers', () => {
156
174
  mockContext
157
175
  );
158
176
 
159
- const response = JSON.parse(result.content[0]?.text ?? '{}');
177
+ const { json: response } = parseSearchResponse(result.content[0]?.text ?? '');
160
178
  expect(response.results[0]?.summary.repoRoot).toBe('/repos/test');
161
179
  });
162
180
 
@@ -166,7 +184,7 @@ describe('Search Handlers', () => {
166
184
  mockContext
167
185
  );
168
186
 
169
- const response = JSON.parse(result.content[0]?.text ?? '{}');
187
+ const { json: response } = parseSearchResponse(result.content[0]?.text ?? '');
170
188
  expect(response.results[0]?.summary.repoRoot).toBeUndefined();
171
189
  });
172
190
 
@@ -176,7 +194,7 @@ describe('Search Handlers', () => {
176
194
  mockContext
177
195
  );
178
196
 
179
- const response = JSON.parse(result.content[0]?.text ?? '{}');
197
+ const { json: response } = parseSearchResponse(result.content[0]?.text ?? '');
180
198
  expect(response.results[0]?.summary.storeName).toBe('Test Store');
181
199
  });
182
200
 
@@ -186,11 +204,12 @@ describe('Search Handlers', () => {
186
204
  mockContext
187
205
  );
188
206
 
189
- const response = JSON.parse(result.content[0]?.text ?? '{}');
207
+ const { header, json: response } = parseSearchResponse(result.content[0]?.text ?? '');
190
208
  expect(response).toHaveProperty('totalResults', 1);
191
- expect(response).toHaveProperty('estimatedTokens');
192
209
  expect(response).toHaveProperty('mode', 'hybrid');
193
210
  expect(response).toHaveProperty('timeMs', 50);
211
+ // Token count is now in header, not in JSON
212
+ expect(header).toContain('tokens');
194
213
  });
195
214
  });
196
215