@gotza02/sequential-thinking 2026.1.27 → 2026.1.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +20 -1
- package/dist/graph.js +27 -3
- package/dist/index.js +56 -5
- package/dist/lib.js +48 -1
- package/dist/verify_new_tools.test.js +67 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -52,6 +52,14 @@ The core engine for structured problem-solving. It forces a step-by-step analysi
|
|
|
52
52
|
#### `clear_thought_history`
|
|
53
53
|
Clears the stored thinking history. Use this to start fresh or free up context.
|
|
54
54
|
|
|
55
|
+
#### `summarize_history`
|
|
56
|
+
Compresses multiple thoughts into a single summary thought. This is essential for long reasoning chains to save token context while preserving the core insights.
|
|
57
|
+
|
|
58
|
+
**Inputs:**
|
|
59
|
+
- `startIndex` (integer): Start of the range to summarize.
|
|
60
|
+
- `endIndex` (integer): End of the range to summarize.
|
|
61
|
+
- `summary` (string): The summary text that replaces the range.
|
|
62
|
+
|
|
55
63
|
### 🌐 External Knowledge
|
|
56
64
|
|
|
57
65
|
#### `web_search`
|
|
@@ -82,7 +90,7 @@ Reads a webpage and converts it to clean Markdown, removing ads and navigation.
|
|
|
82
90
|
### 🏗 Codebase Intelligence
|
|
83
91
|
|
|
84
92
|
#### `build_project_graph`
|
|
85
|
-
**RUN THIS FIRST** when entering a new project. It scans the directory and builds a map of file dependencies using TypeScript AST analysis.
|
|
93
|
+
**RUN THIS FIRST** when entering a new project. It scans the directory and builds a map of file dependencies using TypeScript AST analysis. Now also extracts **exported symbols** (Functions, Classes, Variables) to provide deeper structural insight.
|
|
86
94
|
|
|
87
95
|
**Inputs:**
|
|
88
96
|
- `path` (string, optional): Root directory (defaults to `.`).
|
|
@@ -214,6 +222,17 @@ npm run build
|
|
|
214
222
|
npm test
|
|
215
223
|
```
|
|
216
224
|
|
|
225
|
+
## Recent Updates (v2026.1.28)
|
|
226
|
+
- **Robustness**:
|
|
227
|
+
- Implemented **Atomic Writes** for `thoughts_history.json` to prevent file corruption.
|
|
228
|
+
- Added **Internal Locking** to handle concurrent save requests gracefully.
|
|
229
|
+
- Added **API Retry Logic** with exponential backoff for all search and web tools (handles HTTP 429/5xx).
|
|
230
|
+
- Improved HTTP requests with browser-like headers (User-Agent) to reduce blocking.
|
|
231
|
+
- **New Tools**:
|
|
232
|
+
- `summarize_history`: Archive and condense long reasoning chains.
|
|
233
|
+
- **Graph Enhancements**:
|
|
234
|
+
- Added **Symbol Extraction**: The project graph now tracks exported functions, classes, and variables.
|
|
235
|
+
|
|
217
236
|
## Recent Updates (v2026.1.27)
|
|
218
237
|
- **New Tools**:
|
|
219
238
|
- `read_webpage`: Convert webpages to Markdown for efficient reading.
|
package/dist/graph.js
CHANGED
|
@@ -14,7 +14,8 @@ export class ProjectKnowledgeGraph {
|
|
|
14
14
|
this.nodes.set(file, {
|
|
15
15
|
path: file,
|
|
16
16
|
imports: [],
|
|
17
|
-
importedBy: []
|
|
17
|
+
importedBy: [],
|
|
18
|
+
symbols: []
|
|
18
19
|
});
|
|
19
20
|
}
|
|
20
21
|
// Step 2: Parse imports and build edges
|
|
@@ -49,8 +50,29 @@ export class ProjectKnowledgeGraph {
|
|
|
49
50
|
const content = await fs.readFile(filePath, 'utf-8');
|
|
50
51
|
const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true);
|
|
51
52
|
const imports = [];
|
|
53
|
+
const symbols = [];
|
|
52
54
|
const visit = (node) => {
|
|
53
|
-
//
|
|
55
|
+
// --- Symbols (Exports) ---
|
|
56
|
+
if (ts.isFunctionDeclaration(node) && node.name) {
|
|
57
|
+
const isExported = node.modifiers?.some(m => m.kind === ts.SyntaxKind.ExportKeyword);
|
|
58
|
+
if (isExported)
|
|
59
|
+
symbols.push(`function:${node.name.text}`);
|
|
60
|
+
}
|
|
61
|
+
else if (ts.isClassDeclaration(node) && node.name) {
|
|
62
|
+
const isExported = node.modifiers?.some(m => m.kind === ts.SyntaxKind.ExportKeyword);
|
|
63
|
+
if (isExported)
|
|
64
|
+
symbols.push(`class:${node.name.text}`);
|
|
65
|
+
}
|
|
66
|
+
else if (ts.isVariableStatement(node)) {
|
|
67
|
+
const isExported = node.modifiers?.some(m => m.kind === ts.SyntaxKind.ExportKeyword);
|
|
68
|
+
if (isExported) {
|
|
69
|
+
node.declarationList.declarations.forEach(d => {
|
|
70
|
+
if (ts.isIdentifier(d.name))
|
|
71
|
+
symbols.push(`var:${d.name.text}`);
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
// --- Imports ---
|
|
54
76
|
if (ts.isImportDeclaration(node) || ts.isExportDeclaration(node)) {
|
|
55
77
|
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
|
|
56
78
|
imports.push(node.moduleSpecifier.text);
|
|
@@ -78,6 +100,7 @@ export class ProjectKnowledgeGraph {
|
|
|
78
100
|
const currentNode = this.nodes.get(filePath);
|
|
79
101
|
if (!currentNode)
|
|
80
102
|
return;
|
|
103
|
+
currentNode.symbols = symbols;
|
|
81
104
|
for (const importPath of imports) {
|
|
82
105
|
let resolvedPath = null;
|
|
83
106
|
if (importPath.startsWith('.')) {
|
|
@@ -146,7 +169,8 @@ export class ProjectKnowledgeGraph {
|
|
|
146
169
|
return {
|
|
147
170
|
path: node.path,
|
|
148
171
|
imports: node.imports.map(p => path.relative(this.rootDir, p)),
|
|
149
|
-
importedBy: node.importedBy.map(p => path.relative(this.rootDir, p))
|
|
172
|
+
importedBy: node.importedBy.map(p => path.relative(this.rootDir, p)),
|
|
173
|
+
symbols: node.symbols
|
|
150
174
|
};
|
|
151
175
|
}
|
|
152
176
|
getSummary() {
|
package/dist/index.js
CHANGED
|
@@ -12,6 +12,38 @@ import { JSDOM } from 'jsdom';
|
|
|
12
12
|
import { Readability } from '@mozilla/readability';
|
|
13
13
|
import TurndownService from 'turndown';
|
|
14
14
|
const execAsync = promisify(exec);
|
|
15
|
+
const DEFAULT_HEADERS = {
|
|
16
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
|
17
|
+
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8',
|
|
18
|
+
'Accept-Language': 'en-US,en;q=0.9',
|
|
19
|
+
};
|
|
20
|
+
async function fetchWithRetry(url, options = {}, retries = 3, backoff = 1000) {
|
|
21
|
+
const fetchOptions = {
|
|
22
|
+
...options,
|
|
23
|
+
headers: { ...DEFAULT_HEADERS, ...options.headers }
|
|
24
|
+
};
|
|
25
|
+
try {
|
|
26
|
+
const response = await fetch(url, fetchOptions);
|
|
27
|
+
if (response.status === 429 && retries > 0) {
|
|
28
|
+
const retryAfter = response.headers.get('Retry-After');
|
|
29
|
+
const waitTime = retryAfter ? parseInt(retryAfter) * 1000 : backoff;
|
|
30
|
+
await new Promise(resolve => setTimeout(resolve, waitTime));
|
|
31
|
+
return fetchWithRetry(url, options, retries - 1, backoff * 2);
|
|
32
|
+
}
|
|
33
|
+
if (!response.ok && retries > 0 && response.status >= 500) {
|
|
34
|
+
await new Promise(resolve => setTimeout(resolve, backoff));
|
|
35
|
+
return fetchWithRetry(url, options, retries - 1, backoff * 2);
|
|
36
|
+
}
|
|
37
|
+
return response;
|
|
38
|
+
}
|
|
39
|
+
catch (error) {
|
|
40
|
+
if (retries > 0) {
|
|
41
|
+
await new Promise(resolve => setTimeout(resolve, backoff));
|
|
42
|
+
return fetchWithRetry(url, options, retries - 1, backoff * 2);
|
|
43
|
+
}
|
|
44
|
+
throw error;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
15
47
|
const server = new McpServer({
|
|
16
48
|
name: "sequential-thinking-server",
|
|
17
49
|
version: "2026.1.18",
|
|
@@ -124,7 +156,7 @@ server.tool("web_search", "Search the web using Brave or Exa APIs (requires API
|
|
|
124
156
|
if (selectedProvider === 'brave') {
|
|
125
157
|
if (!process.env.BRAVE_API_KEY)
|
|
126
158
|
throw new Error("BRAVE_API_KEY not found");
|
|
127
|
-
const response = await
|
|
159
|
+
const response = await fetchWithRetry(`https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(query)}&count=5`, {
|
|
128
160
|
headers: { 'X-Subscription-Token': process.env.BRAVE_API_KEY }
|
|
129
161
|
});
|
|
130
162
|
if (!response.ok)
|
|
@@ -135,7 +167,7 @@ server.tool("web_search", "Search the web using Brave or Exa APIs (requires API
|
|
|
135
167
|
if (selectedProvider === 'exa') {
|
|
136
168
|
if (!process.env.EXA_API_KEY)
|
|
137
169
|
throw new Error("EXA_API_KEY not found");
|
|
138
|
-
const response = await
|
|
170
|
+
const response = await fetchWithRetry('https://api.exa.ai/search', {
|
|
139
171
|
method: 'POST',
|
|
140
172
|
headers: {
|
|
141
173
|
'x-api-key': process.env.EXA_API_KEY,
|
|
@@ -153,7 +185,7 @@ server.tool("web_search", "Search the web using Brave or Exa APIs (requires API
|
|
|
153
185
|
throw new Error("GOOGLE_SEARCH_API_KEY not found");
|
|
154
186
|
if (!process.env.GOOGLE_SEARCH_CX)
|
|
155
187
|
throw new Error("GOOGLE_SEARCH_CX (Search Engine ID) not found");
|
|
156
|
-
const response = await
|
|
188
|
+
const response = await fetchWithRetry(`https://www.googleapis.com/customsearch/v1?key=${process.env.GOOGLE_SEARCH_API_KEY}&cx=${process.env.GOOGLE_SEARCH_CX}&q=${encodeURIComponent(query)}&num=5`);
|
|
157
189
|
if (!response.ok)
|
|
158
190
|
throw new Error(`Google API error: ${response.statusText}`);
|
|
159
191
|
const data = await response.json();
|
|
@@ -182,7 +214,7 @@ server.tool("fetch", "Perform an HTTP request to a specific URL.", {
|
|
|
182
214
|
body: z.string().optional().describe("Request body (for POST/PUT)")
|
|
183
215
|
}, async ({ url, method, headers, body }) => {
|
|
184
216
|
try {
|
|
185
|
-
const response = await
|
|
217
|
+
const response = await fetchWithRetry(url, {
|
|
186
218
|
method,
|
|
187
219
|
headers: headers || {},
|
|
188
220
|
body: body
|
|
@@ -324,7 +356,7 @@ server.tool("read_webpage", "Read a webpage and convert it to clean Markdown (re
|
|
|
324
356
|
url: z.string().url().describe("The URL to read")
|
|
325
357
|
}, async ({ url }) => {
|
|
326
358
|
try {
|
|
327
|
-
const response = await
|
|
359
|
+
const response = await fetchWithRetry(url);
|
|
328
360
|
const html = await response.text();
|
|
329
361
|
const doc = new JSDOM(html, { url });
|
|
330
362
|
const reader = new Readability(doc.window.document);
|
|
@@ -394,6 +426,25 @@ server.tool("clear_thought_history", "Clear the sequential thinking history.", {
|
|
|
394
426
|
content: [{ type: "text", text: "Thought history cleared." }]
|
|
395
427
|
};
|
|
396
428
|
});
|
|
429
|
+
// 12. summarize_history
|
|
430
|
+
server.tool("summarize_history", "Compress multiple thoughts into a single summary thought to save space/context.", {
|
|
431
|
+
startIndex: z.number().int().min(1).describe("The starting thought number to summarize"),
|
|
432
|
+
endIndex: z.number().int().min(1).describe("The ending thought number to summarize"),
|
|
433
|
+
summary: z.string().describe("The summary text that replaces the range")
|
|
434
|
+
}, async ({ startIndex, endIndex, summary }) => {
|
|
435
|
+
try {
|
|
436
|
+
const result = await thinkingServer.archiveHistory(startIndex, endIndex, summary);
|
|
437
|
+
return {
|
|
438
|
+
content: [{ type: "text", text: `Successfully summarized thoughts ${startIndex}-${endIndex}. New history length: ${result.newHistoryLength}` }]
|
|
439
|
+
};
|
|
440
|
+
}
|
|
441
|
+
catch (error) {
|
|
442
|
+
return {
|
|
443
|
+
content: [{ type: "text", text: `Archive Error: ${error instanceof Error ? error.message : String(error)}` }],
|
|
444
|
+
isError: true
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
});
|
|
397
448
|
runServer().catch((error) => {
|
|
398
449
|
console.error("Fatal error running server:", error);
|
|
399
450
|
process.exit(1);
|
package/dist/lib.js
CHANGED
|
@@ -8,6 +8,7 @@ export class SequentialThinkingServer {
|
|
|
8
8
|
disableThoughtLogging;
|
|
9
9
|
storagePath;
|
|
10
10
|
delayMs;
|
|
11
|
+
isSaving = false;
|
|
11
12
|
constructor(storagePath = 'thoughts_history.json', delayMs = 0) {
|
|
12
13
|
this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true";
|
|
13
14
|
this.storagePath = path.resolve(storagePath);
|
|
@@ -31,18 +32,64 @@ export class SequentialThinkingServer {
|
|
|
31
32
|
}
|
|
32
33
|
}
|
|
33
34
|
async saveHistory() {
|
|
35
|
+
if (this.isSaving) {
|
|
36
|
+
// Simple retry if already saving
|
|
37
|
+
setTimeout(() => this.saveHistory(), 100);
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
this.isSaving = true;
|
|
34
41
|
try {
|
|
35
|
-
|
|
42
|
+
// Atomic write: write to tmp then rename
|
|
43
|
+
const tmpPath = `${this.storagePath}.tmp`;
|
|
44
|
+
await fs.writeFile(tmpPath, JSON.stringify(this.thoughtHistory, null, 2), 'utf-8');
|
|
45
|
+
await fs.rename(tmpPath, this.storagePath);
|
|
36
46
|
}
|
|
37
47
|
catch (error) {
|
|
38
48
|
console.error(`Error saving history to ${this.storagePath}:`, error);
|
|
39
49
|
}
|
|
50
|
+
finally {
|
|
51
|
+
this.isSaving = false;
|
|
52
|
+
}
|
|
40
53
|
}
|
|
41
54
|
async clearHistory() {
|
|
42
55
|
this.thoughtHistory = [];
|
|
43
56
|
this.branches = {};
|
|
44
57
|
await this.saveHistory();
|
|
45
58
|
}
|
|
59
|
+
async archiveHistory(startIndex, endIndex, summary) {
|
|
60
|
+
if (startIndex < 1 || endIndex > this.thoughtHistory.length || startIndex > endIndex) {
|
|
61
|
+
throw new Error(`Invalid range: ${startIndex} to ${endIndex}. History length is ${this.thoughtHistory.length}.`);
|
|
62
|
+
}
|
|
63
|
+
const summaryThought = {
|
|
64
|
+
thought: `SUMMARY [${startIndex}-${endIndex}]: ${summary}`,
|
|
65
|
+
thoughtNumber: startIndex,
|
|
66
|
+
totalThoughts: this.thoughtHistory[this.thoughtHistory.length - 1].totalThoughts - (endIndex - startIndex),
|
|
67
|
+
nextThoughtNeeded: true,
|
|
68
|
+
thoughtType: 'analysis'
|
|
69
|
+
};
|
|
70
|
+
// Remove the range and insert summary
|
|
71
|
+
const removedCount = endIndex - startIndex + 1;
|
|
72
|
+
this.thoughtHistory.splice(startIndex - 1, removedCount, summaryThought);
|
|
73
|
+
// Renumber subsequent thoughts
|
|
74
|
+
for (let i = startIndex; i < this.thoughtHistory.length; i++) {
|
|
75
|
+
this.thoughtHistory[i].thoughtNumber -= (removedCount - 1);
|
|
76
|
+
}
|
|
77
|
+
// Rebuild branches (simplification: clear and let it rebuild if needed, or just clear)
|
|
78
|
+
this.branches = {};
|
|
79
|
+
this.thoughtHistory.forEach(t => {
|
|
80
|
+
if (t.branchFromThought && t.branchId) {
|
|
81
|
+
const branchKey = `${t.branchFromThought}-${t.branchId}`;
|
|
82
|
+
if (!this.branches[branchKey])
|
|
83
|
+
this.branches[branchKey] = [];
|
|
84
|
+
this.branches[branchKey].push(t);
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
await this.saveHistory();
|
|
88
|
+
return {
|
|
89
|
+
newHistoryLength: this.thoughtHistory.length,
|
|
90
|
+
summaryInsertedAt: startIndex
|
|
91
|
+
};
|
|
92
|
+
}
|
|
46
93
|
addToMemory(input) {
|
|
47
94
|
if (input.thoughtNumber > input.totalThoughts) {
|
|
48
95
|
input.totalThoughts = input.thoughtNumber;
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import * as fs from 'fs/promises';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
import { SequentialThinkingServer } from './lib';
|
|
5
|
+
// Mock dependencies for read_webpage if needed,
|
|
6
|
+
// but for now let's test the logic we can control.
|
|
7
|
+
describe('New Tools Verification', () => {
|
|
8
|
+
const testDir = path.resolve('test_sandbox');
|
|
9
|
+
beforeEach(async () => {
|
|
10
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
11
|
+
});
|
|
12
|
+
afterEach(async () => {
|
|
13
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
14
|
+
});
|
|
15
|
+
it('search_code should find patterns and ignore node_modules', async () => {
|
|
16
|
+
// Setup files
|
|
17
|
+
await fs.writeFile(path.join(testDir, 'target.ts'), 'const x = "FIND_ME";');
|
|
18
|
+
await fs.writeFile(path.join(testDir, 'other.ts'), 'const y = "nope";');
|
|
19
|
+
const modulesDir = path.join(testDir, 'node_modules');
|
|
20
|
+
await fs.mkdir(modulesDir);
|
|
21
|
+
await fs.writeFile(path.join(modulesDir, 'ignored.ts'), 'const z = "FIND_ME";');
|
|
22
|
+
// Logic from search_code (replicated here for unit testing the logic itself,
|
|
23
|
+
// effectively testing the implementation I wrote in index.ts)
|
|
24
|
+
async function searchDir(dir, pattern) {
|
|
25
|
+
const results = [];
|
|
26
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
27
|
+
for (const entry of entries) {
|
|
28
|
+
const fullPath = path.join(dir, entry.name);
|
|
29
|
+
if (entry.isDirectory()) {
|
|
30
|
+
if (['node_modules', '.git', 'dist', 'coverage', '.gemini'].includes(entry.name))
|
|
31
|
+
continue;
|
|
32
|
+
results.push(...await searchDir(fullPath, pattern));
|
|
33
|
+
}
|
|
34
|
+
else if (/\.(ts|js|json|md|txt|html|css|py|java|c|cpp|h|rs|go)$/.test(entry.name)) {
|
|
35
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
36
|
+
if (content.includes(pattern)) {
|
|
37
|
+
results.push(fullPath);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return results;
|
|
42
|
+
}
|
|
43
|
+
const results = await searchDir(testDir, 'FIND_ME');
|
|
44
|
+
expect(results).toHaveLength(1);
|
|
45
|
+
expect(results[0]).toContain('target.ts');
|
|
46
|
+
expect(results[0]).not.toContain('node_modules');
|
|
47
|
+
});
|
|
48
|
+
it('SequentialThinkingServer should clear history', async () => {
|
|
49
|
+
const historyFile = path.join(testDir, 'test_history.json');
|
|
50
|
+
const server = new SequentialThinkingServer(historyFile);
|
|
51
|
+
// Add a thought
|
|
52
|
+
await server.processThought({
|
|
53
|
+
thought: "Test thought",
|
|
54
|
+
thoughtNumber: 1,
|
|
55
|
+
totalThoughts: 1,
|
|
56
|
+
nextThoughtNeeded: false
|
|
57
|
+
});
|
|
58
|
+
// Verify it was written
|
|
59
|
+
const contentBefore = JSON.parse(await fs.readFile(historyFile, 'utf-8'));
|
|
60
|
+
expect(contentBefore).toHaveLength(1);
|
|
61
|
+
// Clear history
|
|
62
|
+
await server.clearHistory();
|
|
63
|
+
// Verify it is empty
|
|
64
|
+
const contentAfter = JSON.parse(await fs.readFile(historyFile, 'utf-8'));
|
|
65
|
+
expect(contentAfter).toHaveLength(0);
|
|
66
|
+
});
|
|
67
|
+
});
|