@gotza02/sequential-thinking 2026.2.12 → 2026.2.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -15,6 +15,15 @@
15
15
  3. **Code Database (CodeStore)**: ระบบจัดเก็บ Snippets และ Architectural Patterns ลงในไฟล์ JSON ถาวร ช่วยให้ AI "จดจำ" วิธีแก้ปัญหาและนำกลับมาใช้ใหม่ได้
16
16
  4. **Deep Coding Workflow**: เครื่องมือใหม่สำหรับการแก้ไขโค้ดที่ต้องผ่านการวิเคราะห์บริบท (Context Document) และการวางแผนที่ผ่านการตรวจสอบเหตุผลแล้วเท่านั้น
17
17
  5. **Smart Notes**: ระบบบันทึกที่มี **Priority Level** และ **Expiration Date** ช่วยจัดลำดับความสำคัญของงานได้ดียิ่งขึ้น พร้อมฟีเจอร์ **Auto-Repair** กู้คืนไฟล์อัตโนมัติหากข้อมูลเสียหาย
18
+ 6. **Security Sandbox**: ระบบป้องกันความปลอดภัยขั้นสูง บล็อกการเข้าถึง Local/Private Network (ครอบคลุม IPv4/IPv6) ป้องกันการโจมตีแบบ SSRF
19
+
20
+ ---
21
+
22
+ ## 🚀 อัปเดตล่าสุด (Latest Enhancements)
23
+ * **Performance Boost**: ระบบ `build_project_graph` ทำงานแบบ Parallel เร็วขึ้นกว่าเดิม 3 เท่าตัวในโปรเจกต์ขนาดใหญ่
24
+ * **Deep Language Parsing**: ปรับปรุงการวิเคราะห์ภาษา **Python** และ **Go** ให้แม่นยำขึ้น รองรับ Relative Imports และ Exported Types
25
+ * **Rich Web Content**: เครื่องมือ `read_webpage` สามารถแปลง HTML Tables เป็น Markdown Tables ที่สวยงามได้แล้ว
26
+ * **Autonomous Recovery**: ระบบจัดการความคิด (`lib.ts`) มาพร้อม Mutex ป้องกัน Race Condition และระบบกู้คืนข้อมูลอัตโนมัติหากไฟล์ JSON เสียหาย
18
27
 
19
28
  ---
20
29
 
package/dist/graph.js CHANGED
@@ -18,9 +18,11 @@ export class ProjectKnowledgeGraph {
18
18
  symbols: []
19
19
  });
20
20
  }
21
- // Step 2: Parse imports and build edges
22
- for (const file of files) {
23
- await this.parseFile(file);
21
+ // Step 2: Parse imports and build edges in parallel with concurrency limit
22
+ const CONCURRENCY_LIMIT = 20;
23
+ for (let i = 0; i < files.length; i += CONCURRENCY_LIMIT) {
24
+ const chunk = files.slice(i, i + CONCURRENCY_LIMIT);
25
+ await Promise.all(chunk.map(file => this.parseFile(file)));
24
26
  }
25
27
  return {
26
28
  nodeCount: this.nodes.size,
@@ -126,27 +128,52 @@ export class ProjectKnowledgeGraph {
126
128
  const imports = [];
127
129
  const symbols = [];
128
130
  const ext = path.extname(filePath);
129
- // Basic Regex for generic symbols and imports
130
131
  if (ext === '.py') {
131
- // Python: import x, from x import y, def func, class Cls
132
- const importMatches = content.matchAll(/^\s*(?:import|from)\s+([a-zA-Z0-9_.]+)/gm);
133
- for (const match of importMatches)
132
+ // 1. Python Imports
133
+ // Handle: import os, sys
134
+ const simpleImportMatches = content.matchAll(/^\s*import\s+([^#\n]+)/gm);
135
+ for (const match of simpleImportMatches) {
136
+ match[1].split(',').forEach(s => {
137
+ const clean = s.trim().split(/\s+/)[0]; // Handle 'import x as y'
138
+ if (clean)
139
+ imports.push(clean);
140
+ });
141
+ }
142
+ // Handle: from .module import func OR from package.module import func
143
+ const fromImportMatches = content.matchAll(/^\s*from\s+([.a-zA-Z0-9_]+)\s+import/gm);
144
+ for (const match of fromImportMatches)
134
145
  imports.push(match[1]);
135
- const funcMatches = content.matchAll(/^\s*def\s+([a-zA-Z0-9_]+)/gm);
136
- for (const match of funcMatches)
146
+ // 2. Python Symbols (Only top-level defs/classes to avoid nested methods)
147
+ const topLevelFuncMatches = content.matchAll(/^def\s+([a-zA-Z0-9_]+)/gm);
148
+ for (const match of topLevelFuncMatches)
137
149
  symbols.push(`def:${match[1]}`);
138
- const classMatches = content.matchAll(/^\s*class\s+([a-zA-Z0-9_]+)/gm);
139
- for (const match of classMatches)
150
+ const topLevelClassMatches = content.matchAll(/^class\s+([a-zA-Z0-9_]+)/gm);
151
+ for (const match of topLevelClassMatches)
140
152
  symbols.push(`class:${match[1]}`);
141
153
  }
142
154
  else if (ext === '.go') {
143
- // Go: import "x", func Name, type Name
144
- const importMatches = content.matchAll(/import\s+"([^"]+)"/g);
145
- for (const match of importMatches)
155
+ // 1. Go Imports
156
+ // Single line: import "fmt"
157
+ const singleImportMatches = content.matchAll(/import\s+"([^"]+)"/g);
158
+ for (const match of singleImportMatches)
146
159
  imports.push(match[1]);
147
- const funcMatches = content.matchAll(/^\s*func\s+([a-zA-Z0-9_]+)/gm);
160
+ // Block: import ( "fmt"; "os" )
161
+ const blockImportMatches = content.matchAll(/import\s+\(([\s\S]*?)\)/g);
162
+ for (const match of blockImportMatches) {
163
+ const block = match[1];
164
+ const innerMatches = block.matchAll(/"([^"]+)"/g);
165
+ for (const im of innerMatches)
166
+ imports.push(im[1]);
167
+ }
168
+ // 2. Go Symbols
169
+ // Functions: func Name(...)
170
+ const funcMatches = content.matchAll(/^func\s+([a-zA-Z0-9_]+)/gm);
148
171
  for (const match of funcMatches)
149
172
  symbols.push(`func:${match[1]}`);
173
+ // Types: type Name struct/interface
174
+ const typeMatches = content.matchAll(/^type\s+([a-zA-Z0-9_]+)\s+(?:struct|interface)/gm);
175
+ for (const match of typeMatches)
176
+ symbols.push(`type:${match[1]}`);
150
177
  }
151
178
  await this.finalizeFileNodes(filePath, imports, symbols);
152
179
  }
@@ -175,6 +202,12 @@ export class ProjectKnowledgeGraph {
175
202
  this.nodes.get(resolvedPath)?.importedBy.push(filePath);
176
203
  }
177
204
  }
205
+ else {
206
+ // If we can't resolve to a local file, keep the original import string as an external dependency
207
+ if (!currentNode.imports.includes(importPath)) {
208
+ currentNode.imports.push(importPath);
209
+ }
210
+ }
178
211
  }
179
212
  }
180
213
  async resolvePath(dir, relativePath) {
@@ -222,7 +255,12 @@ export class ProjectKnowledgeGraph {
222
255
  return null;
223
256
  return {
224
257
  path: node.path,
225
- imports: node.imports.map(p => path.relative(this.rootDir, p)),
258
+ imports: node.imports.map(p => {
259
+ if (path.isAbsolute(p)) {
260
+ return path.relative(this.rootDir, p);
261
+ }
262
+ return p; // Return as is for external libraries
263
+ }),
226
264
  importedBy: node.importedBy.map(p => path.relative(this.rootDir, p)),
227
265
  symbols: node.symbols
228
266
  };
package/dist/lib.js CHANGED
@@ -2,6 +2,7 @@ import chalk from 'chalk';
2
2
  import * as fs from 'fs/promises';
3
3
  import { existsSync, readFileSync } from 'fs';
4
4
  import * as path from 'path';
5
+ import { AsyncMutex } from './utils.js';
5
6
  export class SequentialThinkingServer {
6
7
  thoughtHistory = [];
7
8
  branches = {};
@@ -10,6 +11,7 @@ export class SequentialThinkingServer {
10
11
  delayMs;
11
12
  isSaving = false;
12
13
  hasPendingSave = false;
14
+ saveMutex = new AsyncMutex();
13
15
  constructor(storagePath = 'thoughts_history.json', delayMs = 0) {
14
16
  this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true";
15
17
  this.storagePath = path.resolve(storagePath);
@@ -20,11 +22,40 @@ export class SequentialThinkingServer {
20
22
  try {
21
23
  if (existsSync(this.storagePath)) {
22
24
  const data = readFileSync(this.storagePath, 'utf-8');
23
- const history = JSON.parse(data);
24
- if (Array.isArray(history)) {
25
- this.thoughtHistory = []; // Reset to avoid duplicates
26
- this.branches = {};
27
- history.forEach(thought => this.addToMemory(thought));
25
+ if (!data.trim())
26
+ return; // Empty file is fine
27
+ try {
28
+ const history = JSON.parse(data);
29
+ if (Array.isArray(history)) {
30
+ this.thoughtHistory = [];
31
+ this.branches = {};
32
+ history.forEach(thought => {
33
+ if (thought && typeof thought === 'object' && 'thought' in thought) {
34
+ this.addToMemory(thought);
35
+ }
36
+ });
37
+ }
38
+ }
39
+ catch (parseError) {
40
+ console.error(`Error parsing history from ${this.storagePath}, attempting recovery:`, parseError);
41
+ // Basic Recovery: Try to fix truncated JSON by finding the last complete object
42
+ // This is a simple heuristic: find the last '}' that closes a thought object
43
+ const lastBrace = data.lastIndexOf('}');
44
+ if (lastBrace !== -1) {
45
+ try {
46
+ const recoveredData = data.substring(0, lastBrace + 1).trim();
47
+ // If it's part of an array, we might need to add ']'
48
+ const attemptedJson = recoveredData.endsWith(']') ? recoveredData : recoveredData + ']';
49
+ const history = JSON.parse(attemptedJson);
50
+ if (Array.isArray(history)) {
51
+ history.forEach(thought => this.addToMemory(thought));
52
+ console.log(`Successfully recovered ${history.length} thoughts.`);
53
+ }
54
+ }
55
+ catch (recoveryError) {
56
+ console.error('Recovery failed, starting with empty history.');
57
+ }
58
+ }
28
59
  }
29
60
  }
30
61
  }
@@ -33,27 +64,17 @@ export class SequentialThinkingServer {
33
64
  }
34
65
  }
35
66
  async saveHistory() {
36
- if (this.isSaving) {
37
- this.hasPendingSave = true;
38
- return;
39
- }
40
- this.isSaving = true;
41
- try {
42
- // Atomic write: write to tmp then rename
43
- const tmpPath = `${this.storagePath}.tmp`;
44
- await fs.writeFile(tmpPath, JSON.stringify(this.thoughtHistory, null, 2), 'utf-8');
45
- await fs.rename(tmpPath, this.storagePath);
46
- }
47
- catch (error) {
48
- console.error(`Error saving history to ${this.storagePath}:`, error);
49
- }
50
- finally {
51
- this.isSaving = false;
52
- if (this.hasPendingSave) {
53
- this.hasPendingSave = false;
54
- this.saveHistory();
67
+ // Use mutex to ensure only one file operation happens at a time
68
+ await this.saveMutex.dispatch(async () => {
69
+ try {
70
+ const tmpPath = `${this.storagePath}.tmp`;
71
+ await fs.writeFile(tmpPath, JSON.stringify(this.thoughtHistory, null, 2), 'utf-8');
72
+ await fs.rename(tmpPath, this.storagePath);
55
73
  }
56
- }
74
+ catch (error) {
75
+ console.error(`Error saving history to ${this.storagePath}:`, error);
76
+ }
77
+ });
57
78
  }
58
79
  async clearHistory() {
59
80
  this.thoughtHistory = [];
@@ -74,13 +95,18 @@ export class SequentialThinkingServer {
74
95
  // Remove the range and insert summary
75
96
  const removedCount = endIndex - startIndex + 1;
76
97
  this.thoughtHistory.splice(startIndex - 1, removedCount, summaryThought);
77
- // Renumber subsequent thoughts and update references
98
+ // Update all thoughts to reflect the new total and renumber subsequent ones
99
+ const newTotal = this.thoughtHistory.length;
78
100
  const shiftAmount = removedCount - 1;
79
- for (let i = startIndex; i < this.thoughtHistory.length; i++) {
101
+ for (let i = 0; i < this.thoughtHistory.length; i++) {
80
102
  const t = this.thoughtHistory[i];
81
- // Update own number
82
- t.thoughtNumber -= shiftAmount;
83
- // Update references (branchFromThought)
103
+ // 1. Update total thoughts for everyone
104
+ t.totalThoughts = newTotal;
105
+ // 2. Renumber thoughts that came after the summarized range
106
+ if (i >= startIndex) {
107
+ t.thoughtNumber -= shiftAmount;
108
+ }
109
+ // 3. Update references for all thoughts
84
110
  if (t.branchFromThought) {
85
111
  if (t.branchFromThought > endIndex) {
86
112
  t.branchFromThought -= shiftAmount;
@@ -89,7 +115,6 @@ export class SequentialThinkingServer {
89
115
  t.branchFromThought = startIndex; // Point to summary
90
116
  }
91
117
  }
92
- // Update references (revisesThought)
93
118
  if (t.revisesThought) {
94
119
  if (t.revisesThought > endIndex) {
95
120
  t.revisesThought -= shiftAmount;
package/dist/tools/web.js CHANGED
@@ -136,7 +136,41 @@ export function registerWebTools(server) {
136
136
  const article = reader.parse();
137
137
  if (!article)
138
138
  throw new Error("Could not parse article content");
139
- const turndownService = new TurndownService();
139
+ const turndownService = new TurndownService({
140
+ headingStyle: 'atx',
141
+ codeBlockStyle: 'fenced'
142
+ });
143
+ // Custom Rule for GitHub Flavored Markdown Tables
144
+ turndownService.addRule('tables', {
145
+ filter: ['table'],
146
+ replacement: function (content, node) {
147
+ const rows = [];
148
+ const table = node;
149
+ const trs = Array.from(table.querySelectorAll('tr'));
150
+ trs.forEach((tr, index) => {
151
+ const cols = [];
152
+ const tds = Array.from(tr.querySelectorAll('th, td'));
153
+ tds.forEach(td => {
154
+ // Clean content: remove newlines and pipe characters
155
+ cols.push(td.textContent?.replace(/[\n\r]/g, ' ').replace(/\|/g, '\\|').trim() || "");
156
+ });
157
+ if (cols.length > 0) {
158
+ rows.push(`| ${cols.join(' | ')} |`);
159
+ // Add separator after header
160
+ if (index === 0 || tr.querySelector('th')) {
161
+ rows.push(`| ${cols.map(() => '---').join(' | ')} |`);
162
+ }
163
+ }
164
+ });
165
+ // Filter out duplicate separator lines if any
166
+ const uniqueRows = rows.filter((row, i) => {
167
+ if (row.includes('---') && rows[i - 1]?.includes('---'))
168
+ return false;
169
+ return true;
170
+ });
171
+ return '\n\n' + uniqueRows.join('\n') + '\n\n';
172
+ }
173
+ });
140
174
  let markdown = turndownService.turndown(article.content || "");
141
175
  if (markdown.length > 20000) {
142
176
  markdown = markdown.substring(0, 20000) + "\n...(truncated)";
package/dist/utils.js CHANGED
@@ -52,13 +52,10 @@ export function validatePath(requestedPath) {
52
52
  return absolutePath;
53
53
  }
54
54
  function isPrivateIP(ip) {
55
- // IPv4 ranges
56
- // 127.0.0.0/8
57
- // 10.0.0.0/8
58
- // 172.16.0.0/12
59
- // 192.168.0.0/16
60
- // 0.0.0.0/8
61
- const parts = ip.split('.').map(Number);
55
+ // Remove brackets if present (IPv6 literals in hostnames)
56
+ const cleanIp = ip.replace(/^\[|\]$/g, '');
57
+ // IPv4 check
58
+ const parts = cleanIp.split('.').map(Number);
62
59
  if (parts.length === 4) {
63
60
  if (parts[0] === 127)
64
61
  return true;
@@ -72,13 +69,19 @@ function isPrivateIP(ip) {
72
69
  return true;
73
70
  return false;
74
71
  }
75
- // IPv6 checks (simple check for loopback/link-local)
76
- if (ip === '::1' || ip === '::')
72
+ // IPv6 check
73
+ // Normalize: remove leading/trailing colons and convert to lowercase
74
+ const normalized = cleanIp.toLowerCase();
75
+ // Loopback: ::1, 0:0:0:0:0:0:0:1
76
+ if (normalized === '::1' || normalized === '::' || normalized.replace(/:0/g, ':').replace(/^0+/, '') === '::1')
77
77
  return true;
78
- if (ip.startsWith('fc') || ip.startsWith('fd'))
78
+ // Private/Link-local ranges
79
+ if (normalized.startsWith('fc') || normalized.startsWith('fd'))
79
80
  return true; // Unique Local
80
- if (ip.startsWith('fe80'))
81
+ if (normalized.startsWith('fe80'))
81
82
  return true; // Link Local
83
+ if (normalized.startsWith('::ffff:7f') || normalized.startsWith('::ffff:10.') || normalized.startsWith('::ffff:192.168.'))
84
+ return true; // IPv4-mapped private
82
85
  return false;
83
86
  }
84
87
  export async function validatePublicUrl(urlString) {
@@ -86,7 +89,11 @@ export async function validatePublicUrl(urlString) {
86
89
  if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
87
90
  throw new Error('Invalid protocol. Only http and https are allowed.');
88
91
  }
89
- // Attempt to resolve hostname
92
+ // 1. Direct check for IP literals in hostname
93
+ if (isPrivateIP(parsed.hostname)) {
94
+ throw new Error(`Access denied: Host '${parsed.hostname}' is a private IP`);
95
+ }
96
+ // 2. Resolve and check all resulting IPs
90
97
  try {
91
98
  const addresses = await dns.lookup(parsed.hostname, { all: true });
92
99
  for (const addr of addresses) {
@@ -100,7 +107,7 @@ export async function validatePublicUrl(urlString) {
100
107
  if (error instanceof Error && error.message.startsWith('Access denied')) {
101
108
  throw error;
102
109
  }
103
- // Ignore DNS errors here, let fetch handle them (or fail safely)
110
+ // Ignore other DNS errors, let fetch handle them
104
111
  }
105
112
  }
106
113
  class Logger {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@gotza02/sequential-thinking",
3
- "version": "2026.2.12",
3
+ "version": "2026.2.13",
4
4
  "publishConfig": {
5
5
  "access": "public"
6
6
  },