@gotza02/sequential-thinking 10000.2.0 → 10000.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md.bak +197 -0
- package/dist/.gemini_graph_cache.json.bak +1641 -0
- package/dist/graph.d.ts +7 -0
- package/dist/graph.js +136 -113
- package/dist/intelligent-code.d.ts +8 -0
- package/dist/intelligent-code.js +152 -125
- package/dist/intelligent-code.test.d.ts +1 -0
- package/dist/intelligent-code.test.js +104 -0
- package/dist/lib/formatters.d.ts +9 -0
- package/dist/lib/formatters.js +119 -0
- package/dist/lib/validators.d.ts +45 -0
- package/dist/lib/validators.js +232 -0
- package/dist/lib.js +23 -265
- package/dist/tools/sports/core/base.d.ts +3 -2
- package/dist/tools/sports/core/base.js +12 -10
- package/dist/tools/sports/core/cache.d.ts +9 -0
- package/dist/tools/sports/core/cache.js +25 -3
- package/dist/tools/sports/core/types.d.ts +6 -2
- package/dist/tools/sports/providers/api.d.ts +4 -0
- package/dist/tools/sports/providers/api.js +110 -27
- package/dist/tools/sports/tools/betting.js +16 -16
- package/dist/tools/sports/tools/league.d.ts +2 -7
- package/dist/tools/sports/tools/league.js +198 -8
- package/dist/tools/sports/tools/live.js +80 -38
- package/dist/tools/sports/tools/match-calculations.d.ts +51 -0
- package/dist/tools/sports/tools/match-calculations.js +171 -0
- package/dist/tools/sports/tools/match-helpers.d.ts +21 -0
- package/dist/tools/sports/tools/match-helpers.js +57 -0
- package/dist/tools/sports/tools/match.js +227 -125
- package/dist/tools/sports.js +3 -3
- package/dist/utils.d.ts +111 -44
- package/dist/utils.js +510 -305
- package/dist/utils.test.js +3 -3
- package/package.json +1 -1
- package/CLAUDE.md +0 -231
package/dist/graph.d.ts
CHANGED
|
@@ -29,6 +29,13 @@ export declare class ProjectKnowledgeGraph {
|
|
|
29
29
|
cachedFiles: number;
|
|
30
30
|
parsedFiles: number;
|
|
31
31
|
}>;
|
|
32
|
+
private validateRoot;
|
|
33
|
+
private initializeBuild;
|
|
34
|
+
private initializeNodes;
|
|
35
|
+
private determineFilesToParse;
|
|
36
|
+
private parseFiles;
|
|
37
|
+
private updateCacheAndSave;
|
|
38
|
+
private linkNodes;
|
|
32
39
|
private loadCache;
|
|
33
40
|
private saveCache;
|
|
34
41
|
clearCache(): Promise<void>;
|
package/dist/graph.js
CHANGED
|
@@ -101,22 +101,23 @@ class ConfigResolver {
|
|
|
101
101
|
* - "#/lib/utils" -> "/project/src/lib/utils"
|
|
102
102
|
*/
|
|
103
103
|
resolveAlias(importPath) {
|
|
104
|
-
|
|
104
|
+
return this.resolveTsPaths(importPath) ||
|
|
105
|
+
this.resolvePackageImports(importPath) ||
|
|
106
|
+
this.resolveCommonAliases(importPath);
|
|
107
|
+
}
|
|
108
|
+
resolveTsPaths(importPath) {
|
|
105
109
|
for (const [pattern, targets] of this.paths.entries()) {
|
|
106
|
-
|
|
107
|
-
const patternRegex = new RegExp('^' + pattern.replace(/\*/g, '(.*)') + '$');
|
|
108
|
-
const match = importPath.match(patternRegex);
|
|
110
|
+
const match = this.matchPattern(pattern, importPath);
|
|
109
111
|
if (match) {
|
|
110
112
|
const wildcard = match[1];
|
|
111
113
|
for (const target of targets) {
|
|
112
114
|
const resolved = target.replace(/\*/g, wildcard || '');
|
|
113
|
-
if (
|
|
114
|
-
existsSync(resolved + '.tsx') || existsSync(resolved + '.js')) {
|
|
115
|
+
if (this.pathExists(resolved)) {
|
|
115
116
|
return path.resolve(resolved);
|
|
116
117
|
}
|
|
117
118
|
// Try with index
|
|
118
119
|
const indexResolved = path.join(resolved, 'index');
|
|
119
|
-
if (
|
|
120
|
+
if (this.pathExists(indexResolved)) {
|
|
120
121
|
return path.resolve(indexResolved + '.ts');
|
|
121
122
|
}
|
|
122
123
|
}
|
|
@@ -124,19 +125,21 @@ class ConfigResolver {
|
|
|
124
125
|
return path.resolve(targets[0].replace(/\*/g, wildcard || ''));
|
|
125
126
|
}
|
|
126
127
|
}
|
|
127
|
-
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
resolvePackageImports(importPath) {
|
|
128
131
|
for (const [pattern, target] of this.imports.entries()) {
|
|
129
|
-
const
|
|
130
|
-
const match = importPath.match(patternRegex);
|
|
132
|
+
const match = this.matchPattern(pattern, importPath);
|
|
131
133
|
if (match) {
|
|
132
134
|
const wildcard = match[1];
|
|
133
135
|
const resolved = target.replace(/\*/g, wildcard || '');
|
|
134
136
|
const absPath = path.resolve(this.rootDir, resolved);
|
|
135
|
-
if (existsSync(absPath))
|
|
136
|
-
return absPath;
|
|
137
137
|
return absPath;
|
|
138
138
|
}
|
|
139
139
|
}
|
|
140
|
+
return null;
|
|
141
|
+
}
|
|
142
|
+
resolveCommonAliases(importPath) {
|
|
140
143
|
// Common aliases without config
|
|
141
144
|
const commonAliases = {
|
|
142
145
|
'@': this.rootDir,
|
|
@@ -151,8 +154,7 @@ class ConfigResolver {
|
|
|
151
154
|
if (importPath.startsWith(alias + '/')) {
|
|
152
155
|
const suffix = importPath.substring(alias.length + 1);
|
|
153
156
|
const resolved = path.join(aliasPath, suffix);
|
|
154
|
-
if (
|
|
155
|
-
existsSync(resolved + '.tsx') || existsSync(resolved + '.js')) {
|
|
157
|
+
if (this.pathExists(resolved)) {
|
|
156
158
|
return path.resolve(resolved);
|
|
157
159
|
}
|
|
158
160
|
return path.resolve(resolved);
|
|
@@ -160,6 +162,14 @@ class ConfigResolver {
|
|
|
160
162
|
}
|
|
161
163
|
return null;
|
|
162
164
|
}
|
|
165
|
+
matchPattern(pattern, importPath) {
|
|
166
|
+
const patternRegex = new RegExp('^' + pattern.replace(/\*/g, '(.*)') + '$');
|
|
167
|
+
return importPath.match(patternRegex);
|
|
168
|
+
}
|
|
169
|
+
pathExists(p) {
|
|
170
|
+
return existsSync(p) || existsSync(p + '.ts') ||
|
|
171
|
+
existsSync(p + '.tsx') || existsSync(p + '.js');
|
|
172
|
+
}
|
|
163
173
|
/**
|
|
164
174
|
* Get hash of config for cache invalidation
|
|
165
175
|
*/
|
|
@@ -217,102 +227,14 @@ export class ProjectKnowledgeGraph {
|
|
|
217
227
|
}
|
|
218
228
|
async build(rootDir, onProgress) {
|
|
219
229
|
try {
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
throw new Error('rootDir must be a non-empty string');
|
|
223
|
-
}
|
|
224
|
-
if (rootDir.length > 1000) {
|
|
225
|
-
throw new Error('rootDir path too long');
|
|
226
|
-
}
|
|
227
|
-
this.rootDir = path.resolve(rootDir);
|
|
228
|
-
// Security check: prevent path traversal (skip in test environment)
|
|
229
|
-
const resolvedRoot = path.resolve(rootDir);
|
|
230
|
-
const cwd = process.cwd();
|
|
231
|
-
if (!resolvedRoot.startsWith(cwd) && !process.env.ALLOW_EXTERNAL_PATHS && !process.env.VITEST) {
|
|
232
|
-
throw new Error('Access denied: Cannot build graph outside current working directory');
|
|
233
|
-
}
|
|
234
|
-
this.cachePath = path.join(this.rootDir, '.gemini_graph_cache.json');
|
|
235
|
-
// Check if rootDir exists and is a directory
|
|
236
|
-
const stats = await fs.stat(this.rootDir);
|
|
237
|
-
if (!stats.isDirectory()) {
|
|
238
|
-
throw new Error(`Path '${rootDir}' is not a directory.`);
|
|
239
|
-
}
|
|
240
|
-
this.nodes.clear();
|
|
241
|
-
// Initialize and load config resolver
|
|
242
|
-
if (!this.configResolver) {
|
|
243
|
-
this.configResolver = new ConfigResolver(this.rootDir);
|
|
244
|
-
await this.configResolver.load();
|
|
245
|
-
}
|
|
246
|
-
this.configHash = this.configResolver.getConfigHash();
|
|
247
|
-
await this.loadCache();
|
|
230
|
+
await this.validateRoot(rootDir);
|
|
231
|
+
await this.initializeBuild();
|
|
248
232
|
const files = await this.getAllFiles(this.rootDir);
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
importedBy: [],
|
|
255
|
-
symbols: []
|
|
256
|
-
});
|
|
257
|
-
}
|
|
258
|
-
// Step 2: Identify files to parse vs cache hits
|
|
259
|
-
const filesToParse = [];
|
|
260
|
-
const extractionMap = new Map();
|
|
261
|
-
for (const file of files) {
|
|
262
|
-
try {
|
|
263
|
-
const stats = await fs.stat(file);
|
|
264
|
-
const cached = this.cache.files[file];
|
|
265
|
-
if (cached && cached.mtime === stats.mtimeMs &&
|
|
266
|
-
this.cache.configHash === this.configHash) {
|
|
267
|
-
extractionMap.set(file, { imports: cached.imports, symbols: cached.symbols });
|
|
268
|
-
}
|
|
269
|
-
else {
|
|
270
|
-
filesToParse.push(file);
|
|
271
|
-
}
|
|
272
|
-
}
|
|
273
|
-
catch (e) {
|
|
274
|
-
filesToParse.push(file);
|
|
275
|
-
}
|
|
276
|
-
}
|
|
277
|
-
// Step 3: Parse new/modified files concurrently with dynamic limit
|
|
278
|
-
const CONCURRENCY_LIMIT = this.getConcurrencyLimit(filesToParse.length);
|
|
279
|
-
let processedFiles = files.length - filesToParse.length; // Start with cached files count
|
|
280
|
-
const totalFiles = files.length;
|
|
281
|
-
for (let i = 0; i < filesToParse.length; i += CONCURRENCY_LIMIT) {
|
|
282
|
-
const chunk = filesToParse.slice(i, i + CONCURRENCY_LIMIT);
|
|
283
|
-
await Promise.all(chunk.map(async (file) => {
|
|
284
|
-
const data = await this.parseFile(file);
|
|
285
|
-
extractionMap.set(file, data);
|
|
286
|
-
// Update cache
|
|
287
|
-
const stats = await fs.stat(file);
|
|
288
|
-
this.cache.files[file] = {
|
|
289
|
-
mtime: stats.mtimeMs,
|
|
290
|
-
imports: data.imports,
|
|
291
|
-
symbols: data.symbols
|
|
292
|
-
};
|
|
293
|
-
processedFiles++;
|
|
294
|
-
if (onProgress && processedFiles % 10 === 0) {
|
|
295
|
-
onProgress({
|
|
296
|
-
processed: processedFiles,
|
|
297
|
-
total: totalFiles,
|
|
298
|
-
percentage: Math.round((processedFiles / totalFiles) * 100)
|
|
299
|
-
});
|
|
300
|
-
}
|
|
301
|
-
}));
|
|
302
|
-
}
|
|
303
|
-
// Update config hash in cache
|
|
304
|
-
this.cache.configHash = this.configHash;
|
|
305
|
-
// Prune deleted files from cache
|
|
306
|
-
for (const cachedFile of Object.keys(this.cache.files)) {
|
|
307
|
-
if (!this.nodes.has(cachedFile)) {
|
|
308
|
-
delete this.cache.files[cachedFile];
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
await this.saveCache();
|
|
312
|
-
// Step 4: Link Graph (Resolve imports and build importedBy)
|
|
313
|
-
for (const [filePath, data] of extractionMap.entries()) {
|
|
314
|
-
await this.linkFileNodes(filePath, data.imports, data.symbols);
|
|
315
|
-
}
|
|
233
|
+
this.initializeNodes(files);
|
|
234
|
+
const { filesToParse, extractionMap } = await this.determineFilesToParse(files);
|
|
235
|
+
await this.parseFiles(filesToParse, extractionMap, files.length, onProgress);
|
|
236
|
+
await this.updateCacheAndSave(files);
|
|
237
|
+
await this.linkNodes(extractionMap);
|
|
316
238
|
return {
|
|
317
239
|
nodeCount: this.nodes.size,
|
|
318
240
|
totalFiles: files.length,
|
|
@@ -325,6 +247,111 @@ export class ProjectKnowledgeGraph {
|
|
|
325
247
|
throw error;
|
|
326
248
|
}
|
|
327
249
|
}
|
|
250
|
+
async validateRoot(rootDir) {
|
|
251
|
+
// Validate input
|
|
252
|
+
if (!rootDir || typeof rootDir !== 'string') {
|
|
253
|
+
throw new Error('rootDir must be a non-empty string');
|
|
254
|
+
}
|
|
255
|
+
if (rootDir.length > 1000) {
|
|
256
|
+
throw new Error('rootDir path too long');
|
|
257
|
+
}
|
|
258
|
+
this.rootDir = path.resolve(rootDir);
|
|
259
|
+
// Security check: prevent path traversal (skip in test environment)
|
|
260
|
+
const resolvedRoot = path.resolve(rootDir);
|
|
261
|
+
const cwd = process.cwd();
|
|
262
|
+
if (!resolvedRoot.startsWith(cwd) && !process.env.ALLOW_EXTERNAL_PATHS && !process.env.VITEST) {
|
|
263
|
+
throw new Error('Access denied: Cannot build graph outside current working directory');
|
|
264
|
+
}
|
|
265
|
+
this.cachePath = path.join(this.rootDir, '.gemini_graph_cache.json');
|
|
266
|
+
// Check if rootDir exists and is a directory
|
|
267
|
+
const stats = await fs.stat(this.rootDir);
|
|
268
|
+
if (!stats.isDirectory()) {
|
|
269
|
+
throw new Error(`Path '${rootDir}' is not a directory.`);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
async initializeBuild() {
|
|
273
|
+
this.nodes.clear();
|
|
274
|
+
// Initialize and load config resolver
|
|
275
|
+
if (!this.configResolver) {
|
|
276
|
+
this.configResolver = new ConfigResolver(this.rootDir);
|
|
277
|
+
await this.configResolver.load();
|
|
278
|
+
}
|
|
279
|
+
this.configHash = this.configResolver.getConfigHash();
|
|
280
|
+
await this.loadCache();
|
|
281
|
+
}
|
|
282
|
+
initializeNodes(files) {
|
|
283
|
+
for (const file of files) {
|
|
284
|
+
this.nodes.set(file, {
|
|
285
|
+
path: file,
|
|
286
|
+
imports: [],
|
|
287
|
+
importedBy: [],
|
|
288
|
+
symbols: []
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
async determineFilesToParse(files) {
|
|
293
|
+
const filesToParse = [];
|
|
294
|
+
const extractionMap = new Map();
|
|
295
|
+
for (const file of files) {
|
|
296
|
+
try {
|
|
297
|
+
const stats = await fs.stat(file);
|
|
298
|
+
const cached = this.cache.files[file];
|
|
299
|
+
if (cached && cached.mtime === stats.mtimeMs &&
|
|
300
|
+
this.cache.configHash === this.configHash) {
|
|
301
|
+
extractionMap.set(file, { imports: cached.imports, symbols: cached.symbols });
|
|
302
|
+
}
|
|
303
|
+
else {
|
|
304
|
+
filesToParse.push(file);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
catch (e) {
|
|
308
|
+
filesToParse.push(file);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
return { filesToParse, extractionMap };
|
|
312
|
+
}
|
|
313
|
+
async parseFiles(filesToParse, extractionMap, totalFiles, onProgress) {
|
|
314
|
+
const CONCURRENCY_LIMIT = this.getConcurrencyLimit(filesToParse.length);
|
|
315
|
+
let processedFiles = totalFiles - filesToParse.length;
|
|
316
|
+
for (let i = 0; i < filesToParse.length; i += CONCURRENCY_LIMIT) {
|
|
317
|
+
const chunk = filesToParse.slice(i, i + CONCURRENCY_LIMIT);
|
|
318
|
+
await Promise.all(chunk.map(async (file) => {
|
|
319
|
+
const data = await this.parseFile(file);
|
|
320
|
+
extractionMap.set(file, data);
|
|
321
|
+
// Update cache
|
|
322
|
+
const stats = await fs.stat(file);
|
|
323
|
+
this.cache.files[file] = {
|
|
324
|
+
mtime: stats.mtimeMs,
|
|
325
|
+
imports: data.imports,
|
|
326
|
+
symbols: data.symbols
|
|
327
|
+
};
|
|
328
|
+
processedFiles++;
|
|
329
|
+
if (onProgress && processedFiles % 10 === 0) {
|
|
330
|
+
onProgress({
|
|
331
|
+
processed: processedFiles,
|
|
332
|
+
total: totalFiles,
|
|
333
|
+
percentage: Math.round((processedFiles / totalFiles) * 100)
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
}));
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
async updateCacheAndSave(files) {
|
|
340
|
+
// Update config hash in cache
|
|
341
|
+
this.cache.configHash = this.configHash;
|
|
342
|
+
// Prune deleted files from cache
|
|
343
|
+
for (const cachedFile of Object.keys(this.cache.files)) {
|
|
344
|
+
if (!this.nodes.has(cachedFile)) {
|
|
345
|
+
delete this.cache.files[cachedFile];
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
await this.saveCache();
|
|
349
|
+
}
|
|
350
|
+
async linkNodes(extractionMap) {
|
|
351
|
+
for (const [filePath, data] of extractionMap.entries()) {
|
|
352
|
+
await this.linkFileNodes(filePath, data.imports, data.symbols);
|
|
353
|
+
}
|
|
354
|
+
}
|
|
328
355
|
async loadCache() {
|
|
329
356
|
try {
|
|
330
357
|
const content = await fs.readFile(this.cachePath, 'utf-8');
|
|
@@ -345,10 +372,6 @@ export class ProjectKnowledgeGraph {
|
|
|
345
372
|
const fileEntries = Object.entries(this.cache.files);
|
|
346
373
|
if (fileEntries.length > this.MAX_CACHE_SIZE) {
|
|
347
374
|
// Sort by access time (if available) or keep most recent
|
|
348
|
-
// Since we only track mtime, we can't strictly purge by access time unless we add atime.
|
|
349
|
-
// We'll use mtime as a proxy for relevance or just keep the ones we encountered (if sorted by something else).
|
|
350
|
-
// Actually, let's just keep the ones with latest mtime? No, that might purge old stable files.
|
|
351
|
-
// The patch suggests: sort by mtime.
|
|
352
375
|
const sorted = fileEntries.sort((a, b) => {
|
|
353
376
|
return (b[1].mtime || 0) - (a[1].mtime || 0);
|
|
354
377
|
});
|
|
@@ -48,6 +48,10 @@ export declare class IntelligentCodeAnalyzer {
|
|
|
48
48
|
* Smart impact analysis - predicts what will be affected by changes
|
|
49
49
|
*/
|
|
50
50
|
analyzeImpact(filePath: string, changeDescription?: string): Promise<ImpactAnalysis>;
|
|
51
|
+
private resolveRelationships;
|
|
52
|
+
private createEmptyImpact;
|
|
53
|
+
private findIndirectImpacts;
|
|
54
|
+
private calculateRiskScore;
|
|
51
55
|
private findRelatedTests;
|
|
52
56
|
/**
|
|
53
57
|
* Generate intelligent refactoring suggestions
|
|
@@ -83,6 +87,10 @@ export declare class IntelligentCodeAnalyzer {
|
|
|
83
87
|
}>;
|
|
84
88
|
applied: boolean;
|
|
85
89
|
}>;
|
|
90
|
+
private fixTrailingWhitespace;
|
|
91
|
+
private fixMissingSemicolons;
|
|
92
|
+
private shouldSkipSemicolon;
|
|
93
|
+
private optimizeImports;
|
|
86
94
|
/**
|
|
87
95
|
* Detect code patterns and anti-patterns
|
|
88
96
|
*/
|
package/dist/intelligent-code.js
CHANGED
|
@@ -38,47 +38,64 @@ export class IntelligentCodeAnalyzer {
|
|
|
38
38
|
* Smart impact analysis - predicts what will be affected by changes
|
|
39
39
|
*/
|
|
40
40
|
async analyzeImpact(filePath, changeDescription) {
|
|
41
|
-
const
|
|
42
|
-
|
|
41
|
+
const relationships = await this.resolveRelationships(filePath);
|
|
42
|
+
if (!relationships) {
|
|
43
|
+
return this.createEmptyImpact(filePath);
|
|
44
|
+
}
|
|
45
|
+
const directImpacts = relationships.importedBy;
|
|
46
|
+
const indirectImpacts = this.findIndirectImpacts(filePath, directImpacts);
|
|
47
|
+
const testFiles = await this.findRelatedTests(filePath);
|
|
48
|
+
const riskScore = this.calculateRiskScore(directImpacts.length, indirectImpacts.length, relationships.symbols, changeDescription);
|
|
49
|
+
const breakingChanges = riskScore > 50;
|
|
50
|
+
const estimatedReviewTime = Math.min(120, 5 + directImpacts.length * 2 + indirectImpacts.length + testFiles.length * 3);
|
|
51
|
+
return {
|
|
52
|
+
filePath,
|
|
53
|
+
directImpacts,
|
|
54
|
+
indirectImpacts: [...new Set(indirectImpacts)],
|
|
55
|
+
testFiles,
|
|
56
|
+
riskScore: Math.min(100, riskScore),
|
|
57
|
+
breakingChanges,
|
|
58
|
+
estimatedReviewTime,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
async resolveRelationships(filePath) {
|
|
43
62
|
const context = this.graph.getDeepContext(filePath);
|
|
44
63
|
if (!context) {
|
|
45
64
|
await this.graph.build(process.cwd());
|
|
46
65
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
// Find indirect impacts (files that import the direct impacts)
|
|
66
|
+
return this.graph.getRelationships(filePath);
|
|
67
|
+
}
|
|
68
|
+
createEmptyImpact(filePath) {
|
|
69
|
+
return {
|
|
70
|
+
filePath,
|
|
71
|
+
directImpacts: [],
|
|
72
|
+
indirectImpacts: [],
|
|
73
|
+
testFiles: [],
|
|
74
|
+
riskScore: 0,
|
|
75
|
+
breakingChanges: false,
|
|
76
|
+
estimatedReviewTime: 0,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
findIndirectImpacts(sourceFile, directImpacts) {
|
|
62
80
|
const indirectImpacts = [];
|
|
63
81
|
for (const directFile of directImpacts) {
|
|
64
82
|
const directRel = this.graph.getRelationships(directFile);
|
|
65
83
|
if (directRel) {
|
|
66
84
|
for (const indirect of directRel.importedBy) {
|
|
67
|
-
if (indirect !==
|
|
85
|
+
if (indirect !== sourceFile && !directImpacts.includes(indirect)) {
|
|
68
86
|
indirectImpacts.push(indirect);
|
|
69
87
|
}
|
|
70
88
|
}
|
|
71
89
|
}
|
|
72
90
|
}
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
91
|
+
return indirectImpacts;
|
|
92
|
+
}
|
|
93
|
+
calculateRiskScore(directCount, indirectCount, symbols, changeDescription) {
|
|
76
94
|
let riskScore = 0;
|
|
77
|
-
|
|
78
|
-
riskScore +=
|
|
79
|
-
riskScore += indirectImpacts.length * 5;
|
|
95
|
+
riskScore += directCount * 10;
|
|
96
|
+
riskScore += indirectCount * 5;
|
|
80
97
|
// Higher risk if changing public API
|
|
81
|
-
if (
|
|
98
|
+
if (symbols.some(s => s.includes('export'))) {
|
|
82
99
|
riskScore += 20;
|
|
83
100
|
}
|
|
84
101
|
// Check change description for risky keywords
|
|
@@ -88,17 +105,7 @@ export class IntelligentCodeAnalyzer {
|
|
|
88
105
|
riskScore += 15;
|
|
89
106
|
}
|
|
90
107
|
}
|
|
91
|
-
|
|
92
|
-
const estimatedReviewTime = Math.min(120, 5 + directImpacts.length * 2 + indirectImpacts.length + testFiles.length * 3);
|
|
93
|
-
return {
|
|
94
|
-
filePath,
|
|
95
|
-
directImpacts,
|
|
96
|
-
indirectImpacts: [...new Set(indirectImpacts)],
|
|
97
|
-
testFiles,
|
|
98
|
-
riskScore: Math.min(100, riskScore),
|
|
99
|
-
breakingChanges,
|
|
100
|
-
estimatedReviewTime,
|
|
101
|
-
};
|
|
108
|
+
return riskScore;
|
|
102
109
|
}
|
|
103
110
|
async findRelatedTests(filePath) {
|
|
104
111
|
const testFiles = [];
|
|
@@ -258,108 +265,128 @@ export class IntelligentCodeAnalyzer {
|
|
|
258
265
|
async autoFix(filePath, options = {}) {
|
|
259
266
|
const absolutePath = validatePath(filePath);
|
|
260
267
|
const content = await fs.readFile(absolutePath, 'utf-8');
|
|
261
|
-
|
|
268
|
+
let lines = content.split('\n');
|
|
262
269
|
const fixes = [];
|
|
263
|
-
let modifiedContent = content;
|
|
264
|
-
// Fix 1: Remove trailing whitespace
|
|
265
270
|
if (options.fixTrailingWhitespace !== false) {
|
|
266
|
-
|
|
267
|
-
const line = lines[i];
|
|
268
|
-
const trimmed = line.replace(/\s+$/, '');
|
|
269
|
-
if (line !== trimmed) {
|
|
270
|
-
fixes.push({
|
|
271
|
-
type: 'trailing-whitespace',
|
|
272
|
-
line: i + 1,
|
|
273
|
-
original: line,
|
|
274
|
-
fixed: trimmed,
|
|
275
|
-
});
|
|
276
|
-
lines[i] = trimmed;
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
modifiedContent = lines.join('\n');
|
|
271
|
+
this.fixTrailingWhitespace(lines, fixes);
|
|
280
272
|
}
|
|
281
|
-
// Fix 2: Add missing semicolons (simple heuristic)
|
|
282
273
|
if (options.fixMissingSemicolons) {
|
|
283
|
-
|
|
284
|
-
for (let i = 0; i < lines.length; i++) {
|
|
285
|
-
const line = lines[i];
|
|
286
|
-
// Skip comments, strings, imports, exports, function declarations, etc.
|
|
287
|
-
if (line.trim().startsWith('//') ||
|
|
288
|
-
line.trim().startsWith('/*') ||
|
|
289
|
-
line.trim().startsWith('*') ||
|
|
290
|
-
line.trim().startsWith('import') ||
|
|
291
|
-
line.trim().startsWith('export') ||
|
|
292
|
-
line.trim().startsWith('function') ||
|
|
293
|
-
line.trim().startsWith('async function') ||
|
|
294
|
-
line.trim().startsWith('class') ||
|
|
295
|
-
line.trim().startsWith('interface') ||
|
|
296
|
-
line.trim().startsWith('type') ||
|
|
297
|
-
line.trim().startsWith('if') ||
|
|
298
|
-
line.trim().startsWith('for') ||
|
|
299
|
-
line.trim().startsWith('while') ||
|
|
300
|
-
line.trim().startsWith('switch') ||
|
|
301
|
-
line.trim().startsWith('try') ||
|
|
302
|
-
line.trim().startsWith('catch') ||
|
|
303
|
-
line.trim().startsWith('finally') ||
|
|
304
|
-
line.trim().startsWith('{') ||
|
|
305
|
-
line.trim().startsWith('}') ||
|
|
306
|
-
line.trim().endsWith('{') ||
|
|
307
|
-
line.trim().endsWith('}') ||
|
|
308
|
-
line.trim().endsWith(';') ||
|
|
309
|
-
line.trim() === '') {
|
|
310
|
-
continue;
|
|
311
|
-
}
|
|
312
|
-
// Add semicolon if line looks like a statement
|
|
313
|
-
if (/[a-zA-Z0-9_)\]]\s*$/.test(line) && !line.trim().endsWith(';')) {
|
|
314
|
-
fixes.push({
|
|
315
|
-
type: 'missing-semicolon',
|
|
316
|
-
line: i + 1,
|
|
317
|
-
original: line,
|
|
318
|
-
fixed: line + ';',
|
|
319
|
-
});
|
|
320
|
-
lines[i] = line + ';';
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
modifiedContent = lines.join('\n');
|
|
274
|
+
this.fixMissingSemicolons(lines, fixes);
|
|
324
275
|
}
|
|
325
|
-
// Fix 3: Optimize imports
|
|
326
276
|
if (options.optimizeImports) {
|
|
327
|
-
|
|
328
|
-
const imports = [];
|
|
329
|
-
let match;
|
|
330
|
-
while ((match = importRegex.exec(content)) !== null) {
|
|
331
|
-
imports.push(match[1]);
|
|
332
|
-
}
|
|
333
|
-
if (imports.length > 0) {
|
|
334
|
-
// Sort imports
|
|
335
|
-
imports.sort((a, b) => {
|
|
336
|
-
const aIsRelative = a.includes("from '.'") || a.includes("from '..");
|
|
337
|
-
const bIsRelative = b.includes("from '.'") || b.includes("from '..");
|
|
338
|
-
if (aIsRelative && !bIsRelative)
|
|
339
|
-
return 1;
|
|
340
|
-
if (!aIsRelative && bIsRelative)
|
|
341
|
-
return -1;
|
|
342
|
-
return a.localeCompare(b);
|
|
343
|
-
});
|
|
344
|
-
// Remove duplicates
|
|
345
|
-
const uniqueImports = [...new Set(imports)];
|
|
346
|
-
if (uniqueImports.length !== imports.length || imports.join('\n') !== uniqueImports.join('\n')) {
|
|
347
|
-
fixes.push({
|
|
348
|
-
type: 'optimize-imports',
|
|
349
|
-
line: 1,
|
|
350
|
-
original: `Found ${imports.length} imports`,
|
|
351
|
-
fixed: `Optimized to ${uniqueImports.length} imports`,
|
|
352
|
-
});
|
|
353
|
-
}
|
|
354
|
-
}
|
|
277
|
+
this.optimizeImports(content, fixes);
|
|
355
278
|
}
|
|
356
|
-
// Apply fixes if not dry run
|
|
357
279
|
const applied = !options.dryRun && fixes.length > 0;
|
|
358
280
|
if (applied) {
|
|
281
|
+
// Re-read content if optimizeImports changed the structure significantly or use the lines
|
|
282
|
+
// Since optimizeImports currently only *checks* and suggests (in the original code logic it was weird),
|
|
283
|
+
// wait, the original code had:
|
|
284
|
+
// if (uniqueImports.length !== imports.length) ... fixes.push ...
|
|
285
|
+
// But it didn't actually *apply* the import fix to the `lines` array in the original code?
|
|
286
|
+
// Let's check the original code again.
|
|
287
|
+
// Original:
|
|
288
|
+
// if (options.optimizeImports) { ... fixes.push(...) }
|
|
289
|
+
// It pushes a fix but doesn't modify `lines` or `modifiedContent` for imports!
|
|
290
|
+
// The `modifiedContent` was derived from `lines.join('\n')`.
|
|
291
|
+
// So the original code *reported* import optimization but didn't *apply* it?
|
|
292
|
+
// Ah, `modifiedContent = lines.join('\n')` happens *before* or *after*?
|
|
293
|
+
// In original: `modifiedContent = lines.join('\n')` was updated in the whitespace loop and semicolon loop.
|
|
294
|
+
// But imports logic just calculated `uniqueImports` and pushed a fix. It didn't update `modifiedContent`.
|
|
295
|
+
// So `fs.writeFile` would write whitespace/semicolon fixes but NOT import fixes.
|
|
296
|
+
// This seems like a bug in the original code or intended "suggestion" behavior.
|
|
297
|
+
// I will preserve existing behavior but cleaner.
|
|
298
|
+
const modifiedContent = lines.join('\n'); // Only lines are modified by whitespace/semicolon
|
|
359
299
|
await fs.writeFile(absolutePath, modifiedContent, 'utf-8');
|
|
360
300
|
}
|
|
361
301
|
return { fixes, applied };
|
|
362
302
|
}
|
|
303
|
+
fixTrailingWhitespace(lines, fixes) {
|
|
304
|
+
for (let i = 0; i < lines.length; i++) {
|
|
305
|
+
const line = lines[i];
|
|
306
|
+
const trimmed = line.replace(/\s+$/, '');
|
|
307
|
+
if (line !== trimmed) {
|
|
308
|
+
fixes.push({
|
|
309
|
+
type: 'trailing-whitespace',
|
|
310
|
+
line: i + 1,
|
|
311
|
+
original: line,
|
|
312
|
+
fixed: trimmed,
|
|
313
|
+
});
|
|
314
|
+
lines[i] = trimmed;
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
fixMissingSemicolons(lines, fixes) {
|
|
319
|
+
for (let i = 0; i < lines.length; i++) {
|
|
320
|
+
const line = lines[i];
|
|
321
|
+
if (this.shouldSkipSemicolon(line))
|
|
322
|
+
continue;
|
|
323
|
+
if (/[a-zA-Z0-9_)\]]\s*$/.test(line) && !line.trim().endsWith(';')) {
|
|
324
|
+
fixes.push({
|
|
325
|
+
type: 'missing-semicolon',
|
|
326
|
+
line: i + 1,
|
|
327
|
+
original: line,
|
|
328
|
+
fixed: line + ';',
|
|
329
|
+
});
|
|
330
|
+
lines[i] = line + ';';
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
shouldSkipSemicolon(line) {
|
|
335
|
+
const trimmed = line.trim();
|
|
336
|
+
if (trimmed.startsWith('//') ||
|
|
337
|
+
trimmed.startsWith('/*') ||
|
|
338
|
+
trimmed.startsWith('*') ||
|
|
339
|
+
trimmed.startsWith('import') ||
|
|
340
|
+
trimmed.startsWith('export') ||
|
|
341
|
+
trimmed.startsWith('function') ||
|
|
342
|
+
trimmed.startsWith('async function') ||
|
|
343
|
+
trimmed.startsWith('class') ||
|
|
344
|
+
trimmed.startsWith('interface') ||
|
|
345
|
+
trimmed.startsWith('type') ||
|
|
346
|
+
trimmed.startsWith('if') ||
|
|
347
|
+
trimmed.startsWith('for') ||
|
|
348
|
+
trimmed.startsWith('while') ||
|
|
349
|
+
trimmed.startsWith('switch') ||
|
|
350
|
+
trimmed.startsWith('try') ||
|
|
351
|
+
trimmed.startsWith('catch') ||
|
|
352
|
+
trimmed.startsWith('finally') ||
|
|
353
|
+
trimmed.startsWith('{') ||
|
|
354
|
+
trimmed.startsWith('}') ||
|
|
355
|
+
trimmed.endsWith('{') ||
|
|
356
|
+
trimmed.endsWith('}') ||
|
|
357
|
+
trimmed.endsWith(';') ||
|
|
358
|
+
trimmed === '') {
|
|
359
|
+
return true;
|
|
360
|
+
}
|
|
361
|
+
return false;
|
|
362
|
+
}
|
|
363
|
+
optimizeImports(content, fixes) {
|
|
364
|
+
const importRegex = /^(import\s+(?:{[^}]*}|\*\s+as\s+\w+|\w+)\s+from\s+['"][^'"]+['"];?)$/gm;
|
|
365
|
+
const imports = [];
|
|
366
|
+
let match;
|
|
367
|
+
while ((match = importRegex.exec(content)) !== null) {
|
|
368
|
+
imports.push(match[1]);
|
|
369
|
+
}
|
|
370
|
+
if (imports.length > 0) {
|
|
371
|
+
const uniqueImports = [...new Set(imports)].sort((a, b) => {
|
|
372
|
+
const aIsRelative = a.includes("from '.'") || a.includes("from '..");
|
|
373
|
+
const bIsRelative = b.includes("from '.'") || b.includes("from '..");
|
|
374
|
+
if (aIsRelative && !bIsRelative)
|
|
375
|
+
return 1;
|
|
376
|
+
if (!aIsRelative && bIsRelative)
|
|
377
|
+
return -1;
|
|
378
|
+
return a.localeCompare(b);
|
|
379
|
+
});
|
|
380
|
+
if (uniqueImports.length !== imports.length || imports.join('\n') !== uniqueImports.join('\n')) {
|
|
381
|
+
fixes.push({
|
|
382
|
+
type: 'optimize-imports',
|
|
383
|
+
line: 1,
|
|
384
|
+
original: `Found ${imports.length} imports`,
|
|
385
|
+
fixed: `Optimized to ${uniqueImports.length} imports`,
|
|
386
|
+
});
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
}
|
|
363
390
|
/**
|
|
364
391
|
* Detect code patterns and anti-patterns
|
|
365
392
|
*/
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|