@gotza02/sequential-thinking 10000.0.0 → 10000.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chaos.test.d.ts +1 -0
- package/dist/chaos.test.js +73 -0
- package/dist/codestore.test.d.ts +1 -0
- package/dist/codestore.test.js +65 -0
- package/dist/coding.test.d.ts +1 -0
- package/dist/coding.test.js +140 -0
- package/dist/e2e.test.d.ts +1 -0
- package/dist/e2e.test.js +122 -0
- package/dist/filesystem.test.d.ts +1 -0
- package/dist/filesystem.test.js +190 -0
- package/dist/graph.test.d.ts +1 -0
- package/dist/graph.test.js +150 -0
- package/dist/graph_extra.test.d.ts +1 -0
- package/dist/graph_extra.test.js +93 -0
- package/dist/graph_repro.test.d.ts +1 -0
- package/dist/graph_repro.test.js +50 -0
- package/dist/human.test.d.ts +1 -0
- package/dist/human.test.js +221 -0
- package/dist/integration.test.d.ts +1 -0
- package/dist/integration.test.js +58 -0
- package/dist/knowledge.test.d.ts +1 -0
- package/dist/knowledge.test.js +105 -0
- package/dist/lib.js +1 -0
- package/dist/notes.test.d.ts +1 -0
- package/dist/notes.test.js +84 -0
- package/dist/registration.test.d.ts +1 -0
- package/dist/registration.test.js +39 -0
- package/dist/server.test.d.ts +1 -0
- package/dist/server.test.js +127 -0
- package/dist/stress.test.d.ts +1 -0
- package/dist/stress.test.js +72 -0
- package/dist/tools/codestore_tools.test.d.ts +1 -0
- package/dist/tools/codestore_tools.test.js +115 -0
- package/dist/tools/filesystem.js +1 -0
- package/dist/tools/sports/tracker.test.d.ts +1 -0
- package/dist/tools/sports/tracker.test.js +100 -0
- package/dist/utils.test.d.ts +1 -0
- package/dist/utils.test.js +40 -0
- package/dist/verify_cache.test.d.ts +1 -0
- package/dist/verify_cache.test.js +185 -0
- package/dist/web_fallback.test.d.ts +1 -0
- package/dist/web_fallback.test.js +103 -0
- package/dist/web_read.test.d.ts +1 -0
- package/dist/web_read.test.js +60 -0
- package/package.json +7 -6
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterAll } from 'vitest';
|
|
2
|
+
import { PredictionTracker } from './core/tracker.js';
|
|
3
|
+
import * as fs from 'fs/promises';
|
|
4
|
+
const TEST_LOG_FILE = 'test_betting_log.json';
|
|
5
|
+
describe('ROI Tracker System', () => {
|
|
6
|
+
let tracker;
|
|
7
|
+
beforeEach(async () => {
|
|
8
|
+
// Use a test file instead of the real one
|
|
9
|
+
tracker = new PredictionTracker(TEST_LOG_FILE);
|
|
10
|
+
// Clear previous test data
|
|
11
|
+
try {
|
|
12
|
+
await fs.unlink(TEST_LOG_FILE);
|
|
13
|
+
}
|
|
14
|
+
catch { }
|
|
15
|
+
});
|
|
16
|
+
afterAll(async () => {
|
|
17
|
+
try {
|
|
18
|
+
await fs.unlink(TEST_LOG_FILE);
|
|
19
|
+
}
|
|
20
|
+
catch { }
|
|
21
|
+
});
|
|
22
|
+
it('should track a new prediction', async () => {
|
|
23
|
+
const bet = await tracker.track({
|
|
24
|
+
league: 'Premier League',
|
|
25
|
+
homeTeam: 'Arsenal',
|
|
26
|
+
awayTeam: 'Man City',
|
|
27
|
+
selection: 'Over 2.5 Goals',
|
|
28
|
+
odds: 1.85,
|
|
29
|
+
stake: 10,
|
|
30
|
+
confidence: 80,
|
|
31
|
+
analysis: 'High scoring teams',
|
|
32
|
+
date: new Date().toISOString()
|
|
33
|
+
});
|
|
34
|
+
expect(bet.id).toBeDefined();
|
|
35
|
+
expect(bet.status).toBe('pending');
|
|
36
|
+
expect(bet.profit).toBeUndefined();
|
|
37
|
+
const pending = await tracker.list('pending');
|
|
38
|
+
expect(pending.length).toBe(1);
|
|
39
|
+
expect(pending[0].id).toBe(bet.id);
|
|
40
|
+
});
|
|
41
|
+
it('should resolve a prediction and calculate profit correctly', async () => {
|
|
42
|
+
const bet = await tracker.track({
|
|
43
|
+
league: 'Premier League',
|
|
44
|
+
homeTeam: 'Liverpool',
|
|
45
|
+
awayTeam: 'Chelsea',
|
|
46
|
+
selection: 'Liverpool Win',
|
|
47
|
+
odds: 2.0,
|
|
48
|
+
stake: 50,
|
|
49
|
+
confidence: 90,
|
|
50
|
+
analysis: 'Form is good',
|
|
51
|
+
date: new Date().toISOString()
|
|
52
|
+
});
|
|
53
|
+
// Resolve as WON
|
|
54
|
+
const resolved = await tracker.resolve(bet.id, 'won', '2-0');
|
|
55
|
+
expect(resolved).toBeDefined();
|
|
56
|
+
expect(resolved.status).toBe('won');
|
|
57
|
+
expect(resolved.profit).toBe(50); // (50 * 2.0) - 50 = 50
|
|
58
|
+
expect(resolved.resultScore).toBe('2-0');
|
|
59
|
+
// Check Stats
|
|
60
|
+
const stats = await tracker.getStats();
|
|
61
|
+
expect(stats.totalBets).toBe(1);
|
|
62
|
+
expect(stats.wins).toBe(1);
|
|
63
|
+
expect(stats.totalProfit).toBe(50);
|
|
64
|
+
expect(stats.roi).toBe(100); // (50/50)*100
|
|
65
|
+
});
|
|
66
|
+
it('should handle lost bets correctly', async () => {
|
|
67
|
+
const bet = await tracker.track({
|
|
68
|
+
league: 'La Liga',
|
|
69
|
+
homeTeam: 'Real Madrid',
|
|
70
|
+
awayTeam: 'Barca',
|
|
71
|
+
selection: 'Real Win',
|
|
72
|
+
odds: 2.5,
|
|
73
|
+
stake: 100,
|
|
74
|
+
confidence: 60,
|
|
75
|
+
analysis: 'El Classico',
|
|
76
|
+
date: new Date().toISOString()
|
|
77
|
+
});
|
|
78
|
+
// Resolve as LOST
|
|
79
|
+
const resolved = await tracker.resolve(bet.id, 'lost', '0-3');
|
|
80
|
+
expect(resolved.status).toBe('lost');
|
|
81
|
+
expect(resolved.profit).toBe(-100);
|
|
82
|
+
const stats = await tracker.getStats();
|
|
83
|
+
expect(stats.totalProfit).toBe(-100);
|
|
84
|
+
expect(stats.roi).toBe(-100); // (-100/100)*100
|
|
85
|
+
});
|
|
86
|
+
it('should filter stats by league', async () => {
|
|
87
|
+
await tracker.track({
|
|
88
|
+
league: 'EPL', homeTeam: 'A', awayTeam: 'B', selection: 'Win', odds: 2.0, stake: 10, confidence: 50, analysis: '.', date: new Date().toISOString()
|
|
89
|
+
}).then(b => tracker.resolve(b.id, 'won')); // Profit +10
|
|
90
|
+
await tracker.track({
|
|
91
|
+
league: 'La Liga', homeTeam: 'C', awayTeam: 'D', selection: 'Win', odds: 2.0, stake: 10, confidence: 50, analysis: '.', date: new Date().toISOString()
|
|
92
|
+
}).then(b => tracker.resolve(b.id, 'lost')); // Profit -10
|
|
93
|
+
const eplStats = await tracker.getStats({ league: 'EPL' });
|
|
94
|
+
expect(eplStats.totalProfit).toBe(10);
|
|
95
|
+
const ligaStats = await tracker.getStats({ league: 'La Liga' });
|
|
96
|
+
expect(ligaStats.totalProfit).toBe(-10);
|
|
97
|
+
const allStats = await tracker.getStats();
|
|
98
|
+
expect(allStats.totalProfit).toBe(0);
|
|
99
|
+
});
|
|
100
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
|
|
2
|
+
import { validatePath } from './utils.js';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
describe('Utils: validatePath', () => {
|
|
5
|
+
// Mock process.cwd to be a known fixed path
|
|
6
|
+
const mockCwd = '/app/project';
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
vi.spyOn(process, 'cwd').mockReturnValue(mockCwd);
|
|
9
|
+
});
|
|
10
|
+
afterEach(() => {
|
|
11
|
+
vi.restoreAllMocks();
|
|
12
|
+
});
|
|
13
|
+
it('should allow paths within the project root', () => {
|
|
14
|
+
const p = validatePath('src/index.ts');
|
|
15
|
+
expect(p).toBe(path.resolve(mockCwd, 'src/index.ts'));
|
|
16
|
+
});
|
|
17
|
+
it('should allow explicit ./ paths', () => {
|
|
18
|
+
const p = validatePath('./package.json');
|
|
19
|
+
expect(p).toBe(path.resolve(mockCwd, 'package.json'));
|
|
20
|
+
});
|
|
21
|
+
it('should block traversal to parent directory', () => {
|
|
22
|
+
expect(() => {
|
|
23
|
+
validatePath('../outside.txt');
|
|
24
|
+
}).toThrow(/Access denied/);
|
|
25
|
+
});
|
|
26
|
+
it('should block multiple level traversal', () => {
|
|
27
|
+
expect(() => {
|
|
28
|
+
validatePath('src/../../etc/passwd');
|
|
29
|
+
}).toThrow(/Access denied/);
|
|
30
|
+
});
|
|
31
|
+
it('should block absolute paths outside root', () => {
|
|
32
|
+
// Only run this check if we can reliably simulate absolute paths
|
|
33
|
+
// For now, let's assume standard unix paths for the test logic
|
|
34
|
+
if (path.sep === '/') {
|
|
35
|
+
expect(() => {
|
|
36
|
+
validatePath('/etc/passwd');
|
|
37
|
+
}).toThrow(/Access denied/);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach } from 'vitest';
|
|
2
|
+
import { ProjectKnowledgeGraph } from './graph.js';
|
|
3
|
+
import * as fs from 'fs/promises';
|
|
4
|
+
import * as path from 'path';
|
|
5
|
+
import * as os from 'os';
|
|
6
|
+
describe('Graph Caching Verification', () => {
|
|
7
|
+
const root = process.cwd();
|
|
8
|
+
const cachePath = path.join(root, '.gemini_graph_cache.json');
|
|
9
|
+
beforeEach(async () => {
|
|
10
|
+
// Cleanup existing cache before each test
|
|
11
|
+
try {
|
|
12
|
+
await fs.unlink(cachePath);
|
|
13
|
+
}
|
|
14
|
+
catch (e) { }
|
|
15
|
+
});
|
|
16
|
+
it('should use cache on second run', async () => {
|
|
17
|
+
const graph = new ProjectKnowledgeGraph();
|
|
18
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'graph-cache-test-'));
|
|
19
|
+
const testFile = path.join(tempDir, 'main.ts');
|
|
20
|
+
try {
|
|
21
|
+
// Create a dummy file to scan
|
|
22
|
+
await fs.writeFile(testFile, 'export const val = 1;', 'utf-8');
|
|
23
|
+
console.log('--- Run 1 (Fresh) ---');
|
|
24
|
+
const res1 = await graph.build(tempDir);
|
|
25
|
+
console.log('Result 1:', res1);
|
|
26
|
+
expect(res1.parsedFiles).toBeGreaterThan(0);
|
|
27
|
+
expect(res1.cachedFiles).toBe(0);
|
|
28
|
+
console.log('--- Run 2 (Cached) ---');
|
|
29
|
+
const res2 = await graph.build(tempDir);
|
|
30
|
+
console.log('Result 2:', res2);
|
|
31
|
+
expect(res2.parsedFiles).toBe(0);
|
|
32
|
+
expect(res2.cachedFiles).toBeGreaterThan(0);
|
|
33
|
+
expect(res2.nodeCount).toBe(res1.nodeCount);
|
|
34
|
+
}
|
|
35
|
+
finally {
|
|
36
|
+
// Cleanup
|
|
37
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
it('should invalidate cache when file mtime changes', async () => {
|
|
41
|
+
const graph = new ProjectKnowledgeGraph();
|
|
42
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'graph-mtime-test-'));
|
|
43
|
+
const testFile = path.join(tempDir, 'test.ts');
|
|
44
|
+
try {
|
|
45
|
+
// Create initial file
|
|
46
|
+
await fs.writeFile(testFile, 'export const foo = 1;', 'utf-8');
|
|
47
|
+
// Build 1: Fresh
|
|
48
|
+
const res1 = await graph.build(tempDir);
|
|
49
|
+
expect(res1.parsedFiles).toBe(1);
|
|
50
|
+
expect(res1.cachedFiles).toBe(0);
|
|
51
|
+
console.log('Build 1 (fresh):', res1);
|
|
52
|
+
// Build 2: Should use cache
|
|
53
|
+
const res2 = await graph.build(tempDir);
|
|
54
|
+
expect(res2.parsedFiles).toBe(0);
|
|
55
|
+
expect(res2.cachedFiles).toBe(1);
|
|
56
|
+
console.log('Build 2 (cached):', res2);
|
|
57
|
+
// Modify file content (this changes mtime)
|
|
58
|
+
await new Promise(resolve => setTimeout(resolve, 50)); // Small delay to ensure mtime changes
|
|
59
|
+
await fs.writeFile(testFile, 'export const bar = 2;', 'utf-8');
|
|
60
|
+
// Build 3: Should detect mtime change and re-parse
|
|
61
|
+
const res3 = await graph.build(tempDir);
|
|
62
|
+
expect(res3.parsedFiles).toBe(1); // File should be re-parsed due to mtime change
|
|
63
|
+
expect(res3.cachedFiles).toBe(0);
|
|
64
|
+
console.log('Build 3 (mtime changed):', res3);
|
|
65
|
+
}
|
|
66
|
+
finally {
|
|
67
|
+
// Cleanup
|
|
68
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
it('should detect new files added after cache was built', async () => {
|
|
72
|
+
const graph = new ProjectKnowledgeGraph();
|
|
73
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'graph-newfile-test-'));
|
|
74
|
+
const file1 = path.join(tempDir, 'a.ts');
|
|
75
|
+
const file2 = path.join(tempDir, 'b.ts');
|
|
76
|
+
try {
|
|
77
|
+
// Create initial file
|
|
78
|
+
await fs.writeFile(file1, 'export const a = 1;', 'utf-8');
|
|
79
|
+
// Build 1: Only one file
|
|
80
|
+
const res1 = await graph.build(tempDir);
|
|
81
|
+
expect(res1.nodeCount).toBe(1);
|
|
82
|
+
expect(res1.totalFiles).toBe(1);
|
|
83
|
+
console.log('Build 1 (1 file):', res1);
|
|
84
|
+
// Add new file
|
|
85
|
+
await fs.writeFile(file2, 'export const b = 2;', 'utf-8');
|
|
86
|
+
// Build 2: Should detect new file
|
|
87
|
+
const res2 = await graph.build(tempDir);
|
|
88
|
+
expect(res2.nodeCount).toBe(2);
|
|
89
|
+
expect(res2.totalFiles).toBe(2);
|
|
90
|
+
expect(res2.parsedFiles).toBe(1); // Only the new file should be parsed
|
|
91
|
+
expect(res2.cachedFiles).toBe(1); // Old file should use cache
|
|
92
|
+
console.log('Build 2 (new file added):', res2);
|
|
93
|
+
}
|
|
94
|
+
finally {
|
|
95
|
+
// Cleanup
|
|
96
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
it('should handle deleted files by pruning from cache', async () => {
|
|
100
|
+
const graph = new ProjectKnowledgeGraph();
|
|
101
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'graph-delete-test-'));
|
|
102
|
+
const file1 = path.join(tempDir, 'keep.ts');
|
|
103
|
+
const file2 = path.join(tempDir, 'delete.ts');
|
|
104
|
+
try {
|
|
105
|
+
// Create two files
|
|
106
|
+
await fs.writeFile(file1, 'export const keep = 1;', 'utf-8');
|
|
107
|
+
await fs.writeFile(file2, 'export const deleted = 2;', 'utf-8');
|
|
108
|
+
// Build 1: Two files
|
|
109
|
+
const res1 = await graph.build(tempDir);
|
|
110
|
+
expect(res1.nodeCount).toBe(2);
|
|
111
|
+
console.log('Build 1 (2 files):', res1);
|
|
112
|
+
// Delete one file
|
|
113
|
+
await fs.unlink(file2);
|
|
114
|
+
// Build 2: Should only have one file, cache pruned
|
|
115
|
+
const res2 = await graph.build(tempDir);
|
|
116
|
+
expect(res2.nodeCount).toBe(1);
|
|
117
|
+
expect(res2.totalFiles).toBe(1);
|
|
118
|
+
console.log('Build 2 (after delete):', res2);
|
|
119
|
+
// Verify cache file doesn't have deleted file
|
|
120
|
+
const cacheContent = JSON.parse(await fs.readFile(path.join(tempDir, '.gemini_graph_cache.json'), 'utf-8'));
|
|
121
|
+
expect(cacheContent.files[file2]).toBeUndefined();
|
|
122
|
+
expect(cacheContent.files[file1]).toBeDefined();
|
|
123
|
+
}
|
|
124
|
+
finally {
|
|
125
|
+
// Cleanup
|
|
126
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
127
|
+
}
|
|
128
|
+
});
|
|
129
|
+
it('should force rebuild when forceRebuild() is called', async () => {
|
|
130
|
+
const graph = new ProjectKnowledgeGraph();
|
|
131
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'graph-force-test-'));
|
|
132
|
+
const testFile = path.join(tempDir, 'test.ts');
|
|
133
|
+
try {
|
|
134
|
+
// Create file
|
|
135
|
+
await fs.writeFile(testFile, 'export const x = 1;', 'utf-8');
|
|
136
|
+
// Build 1: Fresh
|
|
137
|
+
const res1 = await graph.build(tempDir);
|
|
138
|
+
expect(res1.parsedFiles).toBe(1);
|
|
139
|
+
console.log('Build 1 (fresh):', res1);
|
|
140
|
+
// Build 2: Should use cache
|
|
141
|
+
const res2 = await graph.build(tempDir);
|
|
142
|
+
expect(res2.parsedFiles).toBe(0);
|
|
143
|
+
expect(res2.cachedFiles).toBe(1);
|
|
144
|
+
console.log('Build 2 (cached):', res2);
|
|
145
|
+
// Force rebuild: Should ignore cache
|
|
146
|
+
const res3 = await graph.forceRebuild(tempDir);
|
|
147
|
+
expect(res3.parsedFiles).toBe(1); // Force re-parse everything
|
|
148
|
+
expect(res3.cachedFiles).toBe(0);
|
|
149
|
+
console.log('Build 3 (force):', res3);
|
|
150
|
+
}
|
|
151
|
+
finally {
|
|
152
|
+
// Cleanup
|
|
153
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
it('should have consistent results between cached and fresh builds', async () => {
|
|
157
|
+
const graph = new ProjectKnowledgeGraph();
|
|
158
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'graph-consistent-test-'));
|
|
159
|
+
const file1 = path.join(tempDir, 'main.ts');
|
|
160
|
+
const file2 = path.join(tempDir, 'utils.ts');
|
|
161
|
+
try {
|
|
162
|
+
// Create files with imports
|
|
163
|
+
await fs.writeFile(file2, 'export function helper() { return 42; }', 'utf-8');
|
|
164
|
+
await fs.writeFile(file1, 'import { helper } from "./utils.js";\nexport const result = helper();', 'utf-8');
|
|
165
|
+
// Build 1: Fresh
|
|
166
|
+
const res1 = await graph.build(tempDir);
|
|
167
|
+
const rel1 = graph.getRelationships('main.ts');
|
|
168
|
+
// Build 2: Cached
|
|
169
|
+
const res2 = await graph.build(tempDir);
|
|
170
|
+
const rel2 = graph.getRelationships('main.ts');
|
|
171
|
+
// Force rebuild: Fresh again
|
|
172
|
+
const res3 = await graph.forceRebuild(tempDir);
|
|
173
|
+
const rel3 = graph.getRelationships('main.ts');
|
|
174
|
+
// All should have same relationships
|
|
175
|
+
expect(rel1).toEqual(rel2);
|
|
176
|
+
expect(rel2).toEqual(rel3);
|
|
177
|
+
expect(rel1?.imports.length).toBeGreaterThan(0);
|
|
178
|
+
console.log('Relationships consistent:', JSON.stringify(rel1, null, 2));
|
|
179
|
+
}
|
|
180
|
+
finally {
|
|
181
|
+
// Cleanup
|
|
182
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { registerWebTools } from './tools/web.js';
|
|
3
|
+
import * as utils from './utils.js';
|
|
4
|
+
// Mock utils
|
|
5
|
+
vi.mock('./utils.js', async (importOriginal) => {
|
|
6
|
+
const actual = await importOriginal();
|
|
7
|
+
return {
|
|
8
|
+
...actual,
|
|
9
|
+
fetchWithRetry: vi.fn(),
|
|
10
|
+
};
|
|
11
|
+
});
|
|
12
|
+
describe('web_search fallback', () => {
|
|
13
|
+
let mockToolCallback;
|
|
14
|
+
const mockServer = {
|
|
15
|
+
tool: vi.fn((name, desc, schema, callback) => {
|
|
16
|
+
if (name === 'web_search') {
|
|
17
|
+
mockToolCallback = callback;
|
|
18
|
+
}
|
|
19
|
+
})
|
|
20
|
+
};
|
|
21
|
+
const originalEnv = process.env;
|
|
22
|
+
beforeEach(() => {
|
|
23
|
+
process.env = { ...originalEnv };
|
|
24
|
+
vi.clearAllMocks();
|
|
25
|
+
});
|
|
26
|
+
afterEach(() => {
|
|
27
|
+
process.env = originalEnv;
|
|
28
|
+
});
|
|
29
|
+
it('should use Brave if configured and no provider specified', async () => {
|
|
30
|
+
process.env.BRAVE_API_KEY = 'test-brave-key';
|
|
31
|
+
delete process.env.EXA_API_KEY;
|
|
32
|
+
delete process.env.GOOGLE_SEARCH_API_KEY;
|
|
33
|
+
registerWebTools(mockServer);
|
|
34
|
+
const mockResponse = {
|
|
35
|
+
ok: true,
|
|
36
|
+
json: async () => ({ web: { results: ['brave result'] } })
|
|
37
|
+
};
|
|
38
|
+
utils.fetchWithRetry.mockResolvedValue(mockResponse);
|
|
39
|
+
const result = await mockToolCallback({ query: 'test' });
|
|
40
|
+
expect(utils.fetchWithRetry).toHaveBeenCalledWith(expect.stringContaining('api.search.brave.com'), expect.anything());
|
|
41
|
+
expect(result.isError).toBeUndefined();
|
|
42
|
+
expect(JSON.parse(result.content[0].text)).toEqual(['brave result']);
|
|
43
|
+
});
|
|
44
|
+
it('should fallback to Exa if Brave fails', async () => {
|
|
45
|
+
process.env.BRAVE_API_KEY = 'test-brave-key';
|
|
46
|
+
process.env.EXA_API_KEY = 'test-exa-key';
|
|
47
|
+
registerWebTools(mockServer);
|
|
48
|
+
// First call (Brave) fails
|
|
49
|
+
utils.fetchWithRetry
|
|
50
|
+
.mockResolvedValueOnce({ ok: false, statusText: 'Brave Error', status: 500 })
|
|
51
|
+
// Second call (Exa) succeeds
|
|
52
|
+
.mockResolvedValueOnce({
|
|
53
|
+
ok: true,
|
|
54
|
+
json: async () => ({ results: ['exa result'] })
|
|
55
|
+
});
|
|
56
|
+
const result = await mockToolCallback({ query: 'test' });
|
|
57
|
+
expect(utils.fetchWithRetry).toHaveBeenCalledTimes(2);
|
|
58
|
+
// 1. Brave
|
|
59
|
+
expect(utils.fetchWithRetry).toHaveBeenNthCalledWith(1, expect.stringContaining('api.search.brave.com'), expect.anything());
|
|
60
|
+
// 2. Exa
|
|
61
|
+
expect(utils.fetchWithRetry).toHaveBeenNthCalledWith(2, expect.stringContaining('api.exa.ai'), expect.anything());
|
|
62
|
+
expect(result.isError).toBeUndefined();
|
|
63
|
+
expect(JSON.parse(result.content[0].text)).toEqual(['exa result']);
|
|
64
|
+
});
|
|
65
|
+
it('should respect requested provider and verify its availability', async () => {
|
|
66
|
+
process.env.BRAVE_API_KEY = 'test-brave-key';
|
|
67
|
+
// Exa not configured
|
|
68
|
+
delete process.env.EXA_API_KEY;
|
|
69
|
+
registerWebTools(mockServer);
|
|
70
|
+
const result = await mockToolCallback({ query: 'test', provider: 'exa' });
|
|
71
|
+
expect(result.isError).toBe(true);
|
|
72
|
+
expect(result.content[0].text).toContain("Requested provider 'exa' is not configured");
|
|
73
|
+
});
|
|
74
|
+
it('should try requested provider first, then fallback', async () => {
|
|
75
|
+
process.env.BRAVE_API_KEY = 'test-brave-key';
|
|
76
|
+
process.env.EXA_API_KEY = 'test-exa-key';
|
|
77
|
+
registerWebTools(mockServer);
|
|
78
|
+
// Request Exa
|
|
79
|
+
// Mock Exa fail, Brave success
|
|
80
|
+
utils.fetchWithRetry
|
|
81
|
+
.mockResolvedValueOnce({ ok: false, statusText: 'Exa Error', status: 500 })
|
|
82
|
+
.mockResolvedValueOnce({
|
|
83
|
+
ok: true,
|
|
84
|
+
json: async () => ({ web: { results: ['brave result'] } })
|
|
85
|
+
});
|
|
86
|
+
const result = await mockToolCallback({ query: 'test', provider: 'exa' });
|
|
87
|
+
expect(utils.fetchWithRetry).toHaveBeenCalledTimes(2);
|
|
88
|
+
// 1. Exa (requested)
|
|
89
|
+
expect(utils.fetchWithRetry).toHaveBeenNthCalledWith(1, expect.stringContaining('api.exa.ai'), expect.anything());
|
|
90
|
+
// 2. Brave (fallback)
|
|
91
|
+
expect(utils.fetchWithRetry).toHaveBeenNthCalledWith(2, expect.stringContaining('api.search.brave.com'), expect.anything());
|
|
92
|
+
expect(result.isError).toBeUndefined();
|
|
93
|
+
});
|
|
94
|
+
it('should return error if all fail', async () => {
|
|
95
|
+
process.env.BRAVE_API_KEY = 'test-brave-key';
|
|
96
|
+
registerWebTools(mockServer);
|
|
97
|
+
utils.fetchWithRetry.mockResolvedValue({ ok: false, statusText: 'Some Error', status: 500 });
|
|
98
|
+
const result = await mockToolCallback({ query: 'test' });
|
|
99
|
+
expect(result.isError).toBe(true);
|
|
100
|
+
expect(result.content[0].text).toContain("All search providers failed");
|
|
101
|
+
expect(result.content[0].text).toContain("Brave API error");
|
|
102
|
+
});
|
|
103
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
import { registerWebTools } from './tools/web.js';
|
|
3
|
+
import * as utils from './utils.js';
|
|
4
|
+
vi.mock('./utils.js', async (importOriginal) => {
|
|
5
|
+
const actual = await importOriginal();
|
|
6
|
+
return {
|
|
7
|
+
...actual,
|
|
8
|
+
fetchWithRetry: vi.fn(),
|
|
9
|
+
validatePublicUrl: vi.fn(),
|
|
10
|
+
};
|
|
11
|
+
});
|
|
12
|
+
describe('read_webpage tool', () => {
|
|
13
|
+
let mockToolCallback;
|
|
14
|
+
const mockServer = {
|
|
15
|
+
tool: vi.fn((name, desc, schema, callback) => {
|
|
16
|
+
if (name === 'read_webpage') {
|
|
17
|
+
mockToolCallback = callback;
|
|
18
|
+
}
|
|
19
|
+
})
|
|
20
|
+
};
|
|
21
|
+
beforeEach(() => {
|
|
22
|
+
vi.clearAllMocks();
|
|
23
|
+
registerWebTools(mockServer);
|
|
24
|
+
});
|
|
25
|
+
it('should convert HTML to Markdown', async () => {
|
|
26
|
+
const mockHtml = `
|
|
27
|
+
<html>
|
|
28
|
+
<head><title>Test Article</title></head>
|
|
29
|
+
<body>
|
|
30
|
+
<h1>Main Header</h1>
|
|
31
|
+
<p>Paragraph <b>bold</b>.</p>
|
|
32
|
+
</body>
|
|
33
|
+
</html>
|
|
34
|
+
`;
|
|
35
|
+
utils.fetchWithRetry.mockResolvedValue({
|
|
36
|
+
ok: true,
|
|
37
|
+
text: async () => mockHtml
|
|
38
|
+
});
|
|
39
|
+
utils.validatePublicUrl.mockResolvedValue(undefined);
|
|
40
|
+
const result = await mockToolCallback({ url: 'https://example.com' });
|
|
41
|
+
expect(result.isError).toBeUndefined();
|
|
42
|
+
const content = result.content[0].text;
|
|
43
|
+
expect(content).toContain("Title: Test Article");
|
|
44
|
+
expect(content).toContain("Main Header");
|
|
45
|
+
expect(content).toContain("**bold**"); // Markdown bold
|
|
46
|
+
});
|
|
47
|
+
it('should handle private URL validation error', async () => {
|
|
48
|
+
utils.validatePublicUrl.mockRejectedValue(new Error("Access denied: Private IP"));
|
|
49
|
+
const result = await mockToolCallback({ url: 'http://localhost' });
|
|
50
|
+
expect(result.isError).toBe(true);
|
|
51
|
+
expect(result.content[0].text).toContain("Access denied");
|
|
52
|
+
});
|
|
53
|
+
it('should handle fetch errors', async () => {
|
|
54
|
+
utils.validatePublicUrl.mockResolvedValue(undefined);
|
|
55
|
+
utils.fetchWithRetry.mockRejectedValue(new Error("Network Error"));
|
|
56
|
+
const result = await mockToolCallback({ url: 'https://example.com' });
|
|
57
|
+
expect(result.isError).toBe(true);
|
|
58
|
+
expect(result.content[0].text).toContain("Network Error");
|
|
59
|
+
});
|
|
60
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@gotza02/sequential-thinking",
|
|
3
|
-
"version": "10000.0.
|
|
3
|
+
"version": "10000.0.1",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "public"
|
|
6
6
|
},
|
|
@@ -53,12 +53,13 @@
|
|
|
53
53
|
"@types/node": "^25.0.9",
|
|
54
54
|
"@types/turndown": "^5.0.6",
|
|
55
55
|
"@types/yargs": "^17.0.35",
|
|
56
|
-
"@vitest/coverage-v8": "^
|
|
57
|
-
"eslint": "^
|
|
58
|
-
"@typescript-eslint/parser": "^
|
|
59
|
-
"@typescript-eslint/eslint-plugin": "^
|
|
56
|
+
"@vitest/coverage-v8": "^4.0.18",
|
|
57
|
+
"eslint": "^9.39.2",
|
|
58
|
+
"@typescript-eslint/parser": "^8.54.0",
|
|
59
|
+
"@typescript-eslint/eslint-plugin": "^8.54.0",
|
|
60
|
+
"typescript-eslint": "^8.54.0",
|
|
60
61
|
"prettier": "^3.2.5",
|
|
61
62
|
"shx": "^0.4.0",
|
|
62
|
-
"vitest": "^
|
|
63
|
+
"vitest": "^4.0.18"
|
|
63
64
|
}
|
|
64
65
|
}
|