code-graph-context 1.0.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +221 -101
- package/dist/core/config/fairsquare-framework-schema.js +47 -60
- package/dist/core/config/nestjs-framework-schema.js +11 -1
- package/dist/core/config/schema.js +1 -1
- package/dist/core/config/timeouts.js +27 -0
- package/dist/core/embeddings/embeddings.service.js +122 -2
- package/dist/core/embeddings/natural-language-to-cypher.service.js +428 -30
- package/dist/core/parsers/parser-factory.js +6 -6
- package/dist/core/parsers/typescript-parser.js +639 -44
- package/dist/core/parsers/workspace-parser.js +553 -0
- package/dist/core/utils/edge-factory.js +37 -0
- package/dist/core/utils/file-change-detection.js +105 -0
- package/dist/core/utils/file-utils.js +20 -0
- package/dist/core/utils/index.js +3 -0
- package/dist/core/utils/path-utils.js +75 -0
- package/dist/core/utils/progress-reporter.js +112 -0
- package/dist/core/utils/project-id.js +176 -0
- package/dist/core/utils/retry.js +41 -0
- package/dist/core/workspace/index.js +4 -0
- package/dist/core/workspace/workspace-detector.js +221 -0
- package/dist/mcp/constants.js +172 -7
- package/dist/mcp/handlers/cross-file-edge.helpers.js +19 -0
- package/dist/mcp/handlers/file-change-detection.js +105 -0
- package/dist/mcp/handlers/graph-generator.handler.js +97 -32
- package/dist/mcp/handlers/incremental-parse.handler.js +146 -0
- package/dist/mcp/handlers/streaming-import.handler.js +210 -0
- package/dist/mcp/handlers/traversal.handler.js +130 -71
- package/dist/mcp/mcp.server.js +46 -7
- package/dist/mcp/service-init.js +79 -0
- package/dist/mcp/services/job-manager.js +165 -0
- package/dist/mcp/services/watch-manager.js +376 -0
- package/dist/mcp/services.js +48 -127
- package/dist/mcp/tools/check-parse-status.tool.js +64 -0
- package/dist/mcp/tools/impact-analysis.tool.js +319 -0
- package/dist/mcp/tools/index.js +15 -1
- package/dist/mcp/tools/list-projects.tool.js +62 -0
- package/dist/mcp/tools/list-watchers.tool.js +51 -0
- package/dist/mcp/tools/natural-language-to-cypher.tool.js +34 -8
- package/dist/mcp/tools/parse-typescript-project.tool.js +325 -60
- package/dist/mcp/tools/search-codebase.tool.js +57 -23
- package/dist/mcp/tools/start-watch-project.tool.js +100 -0
- package/dist/mcp/tools/stop-watch-project.tool.js +49 -0
- package/dist/mcp/tools/traverse-from-node.tool.js +68 -9
- package/dist/mcp/utils.js +35 -12
- package/dist/mcp/workers/parse-worker.js +198 -0
- package/dist/storage/neo4j/neo4j.service.js +273 -34
- package/package.json +4 -2
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File Change Detection
|
|
3
|
+
* Shared utilities for detecting changed files for incremental parsing
|
|
4
|
+
*/
|
|
5
|
+
import { stat, realpath } from 'fs/promises';
|
|
6
|
+
import { resolve, sep } from 'path';
|
|
7
|
+
import { glob } from 'glob';
|
|
8
|
+
import { EXCLUDE_PATTERNS_GLOB } from '../../constants.js';
|
|
9
|
+
import { QUERIES } from '../../storage/neo4j/neo4j.service.js';
|
|
10
|
+
import { hashFile } from './file-utils.js';
|
|
11
|
+
/**
|
|
12
|
+
* Detect which files have changed and need reparsing.
|
|
13
|
+
* Compares current files on disk with indexed files in Neo4j.
|
|
14
|
+
*
|
|
15
|
+
* SECURITY: Validates that all file paths stay within the project directory
|
|
16
|
+
* after symlink resolution to prevent path traversal attacks.
|
|
17
|
+
*
|
|
18
|
+
* @param projectPath - Root path of the project
|
|
19
|
+
* @param neo4jService - Neo4j service instance
|
|
20
|
+
* @param projectId - Project ID for scoping queries
|
|
21
|
+
* @param options - Optional configuration
|
|
22
|
+
* @returns Files that need reparsing and files that were deleted
|
|
23
|
+
*/
|
|
24
|
+
export const detectChangedFiles = async (projectPath, neo4jService, projectId, options = {}) => {
|
|
25
|
+
const { logWarnings = true } = options;
|
|
26
|
+
// SECURITY: Resolve project path to real path to handle symlinks consistently
|
|
27
|
+
const realProjectPath = await realpath(projectPath);
|
|
28
|
+
const relativeFiles = await glob('**/*.{ts,tsx}', { cwd: projectPath, ignore: EXCLUDE_PATTERNS_GLOB });
|
|
29
|
+
// SECURITY: Validate each file stays within project directory after symlink resolution
|
|
30
|
+
const validatedFiles = [];
|
|
31
|
+
for (const relFile of relativeFiles) {
|
|
32
|
+
const absolutePath = resolve(projectPath, relFile);
|
|
33
|
+
try {
|
|
34
|
+
const realFilePath = await realpath(absolutePath);
|
|
35
|
+
// Check that resolved path is within project
|
|
36
|
+
if (realFilePath.startsWith(realProjectPath + sep) || realFilePath === realProjectPath) {
|
|
37
|
+
// Use realFilePath for consistent path matching with Neo4j
|
|
38
|
+
validatedFiles.push(realFilePath);
|
|
39
|
+
}
|
|
40
|
+
else if (logWarnings) {
|
|
41
|
+
console.warn(`SECURITY: Skipping file outside project directory: ${relFile}`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
// File may have been deleted between glob and realpath - skip it
|
|
46
|
+
if (logWarnings) {
|
|
47
|
+
console.warn(`File no longer accessible: ${relFile}`);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
const currentFiles = new Set(validatedFiles);
|
|
52
|
+
// Get indexed files from Neo4j
|
|
53
|
+
const queryResult = await neo4jService.run(QUERIES.GET_SOURCE_FILE_TRACKING_INFO, { projectId });
|
|
54
|
+
const indexedFiles = queryResult;
|
|
55
|
+
const indexedMap = new Map(indexedFiles.map((f) => [f.filePath, f]));
|
|
56
|
+
const filesToReparse = [];
|
|
57
|
+
const filesToDelete = [];
|
|
58
|
+
// Check each current file against indexed state
|
|
59
|
+
for (const filePath of currentFiles) {
|
|
60
|
+
const indexed = indexedMap.get(filePath);
|
|
61
|
+
if (!indexed) {
|
|
62
|
+
// New file - needs parsing
|
|
63
|
+
filesToReparse.push(filePath);
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
try {
|
|
67
|
+
const fileStats = await stat(filePath);
|
|
68
|
+
const currentHash = await hashFile(filePath);
|
|
69
|
+
// Only skip if mtime, size, AND hash all match (correctness over optimization)
|
|
70
|
+
if (fileStats.mtimeMs === indexed.mtime &&
|
|
71
|
+
fileStats.size === indexed.size &&
|
|
72
|
+
currentHash === indexed.contentHash) {
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
// Any mismatch means file changed
|
|
76
|
+
filesToReparse.push(filePath);
|
|
77
|
+
}
|
|
78
|
+
catch (error) {
|
|
79
|
+
const nodeError = error;
|
|
80
|
+
if (nodeError.code === 'ENOENT') {
|
|
81
|
+
// File was deleted between glob and stat - will be caught in deletion logic below
|
|
82
|
+
if (logWarnings) {
|
|
83
|
+
console.warn(`File deleted between glob and stat: ${filePath}`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
else if (nodeError.code === 'EACCES') {
|
|
87
|
+
// Permission denied - assume changed to be safe
|
|
88
|
+
if (logWarnings) {
|
|
89
|
+
console.warn(`Permission denied reading file: ${filePath}`);
|
|
90
|
+
}
|
|
91
|
+
filesToReparse.push(filePath);
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
throw error;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
// Find deleted files (indexed but no longer on disk)
|
|
99
|
+
for (const indexedPath of indexedMap.keys()) {
|
|
100
|
+
if (!currentFiles.has(indexedPath)) {
|
|
101
|
+
filesToDelete.push(indexedPath);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return { filesToReparse, filesToDelete };
|
|
105
|
+
};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import * as crypto from 'crypto';
|
|
2
|
+
import * as fs from 'fs/promises';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
const DEBUG_LOG_FILE = 'debug-search.log';
|
|
5
|
+
const LOG_SEPARATOR = '---';
|
|
6
|
+
const JSON_INDENT = 2;
|
|
7
|
+
export const hashFile = async (filePath) => {
|
|
8
|
+
const content = await fs.readFile(filePath);
|
|
9
|
+
return crypto.createHash('sha256').update(content).digest('hex');
|
|
10
|
+
};
|
|
11
|
+
export const debugLog = async (message, data) => {
|
|
12
|
+
const timestamp = new Date().toISOString();
|
|
13
|
+
const logEntry = `[${timestamp}] ${message}\n${data ? JSON.stringify(data, null, JSON_INDENT) : ''}\n${LOG_SEPARATOR}\n`;
|
|
14
|
+
try {
|
|
15
|
+
await fs.appendFile(path.join(process.cwd(), DEBUG_LOG_FILE), logEntry);
|
|
16
|
+
}
|
|
17
|
+
catch (error) {
|
|
18
|
+
console.error('Failed to write debug log:', error);
|
|
19
|
+
}
|
|
20
|
+
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Path Utilities
|
|
3
|
+
* Centralized path normalization functions using Node.js path module
|
|
4
|
+
*/
|
|
5
|
+
import path from 'path';
|
|
6
|
+
/**
|
|
7
|
+
* Normalize a file path to absolute, consistent format
|
|
8
|
+
* - Resolves relative paths against cwd
|
|
9
|
+
* - Normalizes separators and removes trailing slashes
|
|
10
|
+
*/
|
|
11
|
+
export const normalizeFilePath = (filePath) => {
|
|
12
|
+
if (!filePath)
|
|
13
|
+
return '';
|
|
14
|
+
// Resolve to absolute if relative
|
|
15
|
+
const absolute = path.isAbsolute(filePath) ? filePath : path.resolve(process.cwd(), filePath);
|
|
16
|
+
// Normalize (resolves .. and . segments, consistent separators)
|
|
17
|
+
return path.normalize(absolute);
|
|
18
|
+
};
|
|
19
|
+
/**
|
|
20
|
+
* Convert absolute path to relative from a root directory
|
|
21
|
+
* - Uses path.relative() for correct handling
|
|
22
|
+
* - Returns absolute if path is outside root
|
|
23
|
+
*/
|
|
24
|
+
export const toRelativePath = (absolutePath, projectRoot) => {
|
|
25
|
+
if (!absolutePath)
|
|
26
|
+
return '';
|
|
27
|
+
if (!projectRoot)
|
|
28
|
+
return absolutePath;
|
|
29
|
+
const relative = path.relative(projectRoot, absolutePath);
|
|
30
|
+
// If relative path starts with '..', it's outside the root - return absolute
|
|
31
|
+
if (relative.startsWith('..')) {
|
|
32
|
+
return absolutePath;
|
|
33
|
+
}
|
|
34
|
+
return relative;
|
|
35
|
+
};
|
|
36
|
+
/**
|
|
37
|
+
* Find common root directory from array of file paths
|
|
38
|
+
* - Uses path.dirname() and path.sep correctly
|
|
39
|
+
* - Handles edge cases (single file, no common root)
|
|
40
|
+
*/
|
|
41
|
+
export const getCommonRoot = (filePaths) => {
|
|
42
|
+
const validPaths = filePaths.filter(Boolean);
|
|
43
|
+
if (validPaths.length === 0)
|
|
44
|
+
return process.cwd();
|
|
45
|
+
if (validPaths.length === 1)
|
|
46
|
+
return path.dirname(validPaths[0]);
|
|
47
|
+
const parts = validPaths.map((p) => p.split(path.sep));
|
|
48
|
+
const commonParts = [];
|
|
49
|
+
for (let i = 0; i < parts[0].length; i++) {
|
|
50
|
+
const segment = parts[0][i];
|
|
51
|
+
if (parts.every((p) => p[i] === segment)) {
|
|
52
|
+
commonParts.push(segment);
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return commonParts.join(path.sep) || path.sep;
|
|
59
|
+
};
|
|
60
|
+
/**
|
|
61
|
+
* Check if a path is absolute
|
|
62
|
+
* - Cross-platform using path.isAbsolute()
|
|
63
|
+
*/
|
|
64
|
+
export const isAbsolutePath = (filePath) => {
|
|
65
|
+
return path.isAbsolute(filePath);
|
|
66
|
+
};
|
|
67
|
+
/**
|
|
68
|
+
* Normalize path for comparison/matching
|
|
69
|
+
* - Consistent separators using path.normalize()
|
|
70
|
+
*/
|
|
71
|
+
export const normalizeForComparison = (filePath) => {
|
|
72
|
+
if (!filePath)
|
|
73
|
+
return '';
|
|
74
|
+
return path.normalize(filePath);
|
|
75
|
+
};
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Progress Reporter
|
|
3
|
+
* Provides progress updates during long-running operations like parsing large codebases
|
|
4
|
+
*/
|
|
5
|
+
export class ProgressReporter {
|
|
6
|
+
callback;
|
|
7
|
+
startTime = Date.now();
|
|
8
|
+
/**
|
|
9
|
+
* Set the progress callback function
|
|
10
|
+
*/
|
|
11
|
+
setCallback(callback) {
|
|
12
|
+
this.callback = callback;
|
|
13
|
+
this.startTime = Date.now();
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Report progress update
|
|
17
|
+
*/
|
|
18
|
+
async report(update) {
|
|
19
|
+
if (!this.callback)
|
|
20
|
+
return;
|
|
21
|
+
// Add elapsed time to details
|
|
22
|
+
const enrichedUpdate = {
|
|
23
|
+
...update,
|
|
24
|
+
details: {
|
|
25
|
+
...update.details,
|
|
26
|
+
elapsedMs: Date.now() - this.startTime,
|
|
27
|
+
},
|
|
28
|
+
};
|
|
29
|
+
try {
|
|
30
|
+
await this.callback(enrichedUpdate);
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
// Don't let progress reporting errors interrupt the main operation
|
|
34
|
+
console.warn('Progress callback error:', error);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Report discovery phase progress
|
|
39
|
+
*/
|
|
40
|
+
async reportDiscovery(filesFound, message) {
|
|
41
|
+
await this.report({
|
|
42
|
+
phase: 'discovery',
|
|
43
|
+
current: filesFound,
|
|
44
|
+
total: filesFound,
|
|
45
|
+
message: message ?? `Discovered ${filesFound} files`,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Report parsing phase progress
|
|
50
|
+
*/
|
|
51
|
+
async reportParsing(current, total, currentFile, chunkIndex, totalChunks) {
|
|
52
|
+
await this.report({
|
|
53
|
+
phase: 'parsing',
|
|
54
|
+
current,
|
|
55
|
+
total,
|
|
56
|
+
message: `Parsing files: ${current}/${total}`,
|
|
57
|
+
details: {
|
|
58
|
+
filesProcessed: current,
|
|
59
|
+
currentFile,
|
|
60
|
+
chunkIndex,
|
|
61
|
+
totalChunks,
|
|
62
|
+
},
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Report importing phase progress
|
|
67
|
+
*/
|
|
68
|
+
async reportImporting(nodesCreated, edgesCreated, total) {
|
|
69
|
+
await this.report({
|
|
70
|
+
phase: 'importing',
|
|
71
|
+
current: nodesCreated + edgesCreated,
|
|
72
|
+
total,
|
|
73
|
+
message: `Importing: ${nodesCreated} nodes, ${edgesCreated} edges`,
|
|
74
|
+
details: {
|
|
75
|
+
nodesCreated,
|
|
76
|
+
edgesCreated,
|
|
77
|
+
},
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Report edge resolution phase
|
|
82
|
+
*/
|
|
83
|
+
async reportResolving(resolved, total) {
|
|
84
|
+
await this.report({
|
|
85
|
+
phase: 'resolving',
|
|
86
|
+
current: resolved,
|
|
87
|
+
total,
|
|
88
|
+
message: `Resolving cross-file edges: ${resolved}/${total}`,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Report completion
|
|
93
|
+
*/
|
|
94
|
+
async reportComplete(nodesCreated, edgesCreated) {
|
|
95
|
+
await this.report({
|
|
96
|
+
phase: 'complete',
|
|
97
|
+
current: 1,
|
|
98
|
+
total: 1,
|
|
99
|
+
message: `Complete: ${nodesCreated} nodes, ${edgesCreated} edges`,
|
|
100
|
+
details: {
|
|
101
|
+
nodesCreated,
|
|
102
|
+
edgesCreated,
|
|
103
|
+
},
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Reset the start time (call when starting a new operation)
|
|
108
|
+
*/
|
|
109
|
+
reset() {
|
|
110
|
+
this.startTime = Date.now();
|
|
111
|
+
}
|
|
112
|
+
}
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import crypto from 'crypto';
|
|
2
|
+
import { basename, resolve } from 'path';
|
|
3
|
+
/**
|
|
4
|
+
* Project ID prefix for generated IDs
|
|
5
|
+
*/
|
|
6
|
+
const PROJECT_ID_PREFIX = 'proj_';
|
|
7
|
+
/**
|
|
8
|
+
* Generates a deterministic projectId from an absolute project path.
|
|
9
|
+
* The projectId is a short hash that uniquely identifies the project.
|
|
10
|
+
*
|
|
11
|
+
* @param projectPath - The absolute path to the project root
|
|
12
|
+
* @returns A deterministic projectId in format 'proj_<hash>'
|
|
13
|
+
*
|
|
14
|
+
* @example
|
|
15
|
+
* generateProjectId('/Users/dev/my-api') // => 'proj_a1b2c3d4e5f6'
|
|
16
|
+
*/
|
|
17
|
+
export const generateProjectId = (projectPath) => {
|
|
18
|
+
// Normalize to absolute path
|
|
19
|
+
const absolutePath = resolve(projectPath);
|
|
20
|
+
// Create a deterministic hash of the path
|
|
21
|
+
const hash = crypto.createHash('sha256').update(absolutePath).digest('hex').substring(0, 12);
|
|
22
|
+
return `${PROJECT_ID_PREFIX}${hash}`;
|
|
23
|
+
};
|
|
24
|
+
/**
|
|
25
|
+
* Validates that a projectId has the correct format.
|
|
26
|
+
*
|
|
27
|
+
* @param projectId - The projectId to validate
|
|
28
|
+
* @returns true if the projectId is valid, false otherwise
|
|
29
|
+
*
|
|
30
|
+
* @example
|
|
31
|
+
* validateProjectId('proj_a1b2c3d4e5f6') // => true
|
|
32
|
+
* validateProjectId('invalid') // => false
|
|
33
|
+
*/
|
|
34
|
+
export const validateProjectId = (projectId) => {
|
|
35
|
+
if (!projectId || typeof projectId !== 'string') {
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
// Must start with prefix
|
|
39
|
+
if (!projectId.startsWith(PROJECT_ID_PREFIX)) {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
// Must have exactly 12 hex characters after prefix
|
|
43
|
+
const hash = projectId.slice(PROJECT_ID_PREFIX.length);
|
|
44
|
+
if (hash.length !== 12) {
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
// Hash must be valid hex
|
|
48
|
+
return /^[a-f0-9]{12}$/.test(hash);
|
|
49
|
+
};
|
|
50
|
+
/**
|
|
51
|
+
* Resolves a projectId from either an explicit value or a project path.
|
|
52
|
+
* If projectId is provided, it's validated and returned.
|
|
53
|
+
* If not, generates one from the projectPath.
|
|
54
|
+
*
|
|
55
|
+
* @param projectPath - The project path (required)
|
|
56
|
+
* @param projectId - Optional explicit projectId
|
|
57
|
+
* @returns The resolved projectId
|
|
58
|
+
* @throws Error if explicit projectId is invalid
|
|
59
|
+
*
|
|
60
|
+
* @example
|
|
61
|
+
* resolveProjectId('/Users/dev/my-api') // => 'proj_a1b2c3d4e5f6'
|
|
62
|
+
* resolveProjectId('/Users/dev/my-api', 'proj_custom12345') // => 'proj_custom12345'
|
|
63
|
+
*/
|
|
64
|
+
export const resolveProjectId = (projectPath, projectId) => {
|
|
65
|
+
if (projectId) {
|
|
66
|
+
if (!validateProjectId(projectId)) {
|
|
67
|
+
throw new Error(`Invalid projectId format: '${projectId}'. Expected format: 'proj_<12-hex-chars>' (e.g., 'proj_a1b2c3d4e5f6')`);
|
|
68
|
+
}
|
|
69
|
+
return projectId;
|
|
70
|
+
}
|
|
71
|
+
return generateProjectId(projectPath);
|
|
72
|
+
};
|
|
73
|
+
/**
|
|
74
|
+
* Extracts a friendly project name from a path or package.json.
|
|
75
|
+
* Falls back to directory basename if package.json not available.
|
|
76
|
+
*
|
|
77
|
+
* @param projectPath - The project root path
|
|
78
|
+
* @returns The project name
|
|
79
|
+
*/
|
|
80
|
+
export const getProjectName = async (projectPath) => {
|
|
81
|
+
const absolutePath = resolve(projectPath);
|
|
82
|
+
try {
|
|
83
|
+
// Try to read package.json for the name
|
|
84
|
+
const fs = await import('fs/promises');
|
|
85
|
+
const packageJsonPath = `${absolutePath}/package.json`;
|
|
86
|
+
const content = await fs.readFile(packageJsonPath, 'utf-8');
|
|
87
|
+
const pkg = JSON.parse(content);
|
|
88
|
+
if (pkg.name) {
|
|
89
|
+
return pkg.name;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
catch {
|
|
93
|
+
// No package.json or invalid - fall back to directory name
|
|
94
|
+
}
|
|
95
|
+
// Use directory basename as fallback
|
|
96
|
+
return basename(absolutePath);
|
|
97
|
+
};
|
|
98
|
+
/**
|
|
99
|
+
* Query to find project by name, path, or projectId
|
|
100
|
+
*/
|
|
101
|
+
export const FIND_PROJECT_QUERY = `
|
|
102
|
+
MATCH (p:Project)
|
|
103
|
+
WHERE p.name = $input OR p.path = $input OR p.projectId = $input
|
|
104
|
+
RETURN p.projectId AS projectId
|
|
105
|
+
LIMIT 1
|
|
106
|
+
`;
|
|
107
|
+
/**
|
|
108
|
+
* Query to create/update a Project node with status
|
|
109
|
+
*/
|
|
110
|
+
export const UPSERT_PROJECT_QUERY = `
|
|
111
|
+
MERGE (p:Project {projectId: $projectId})
|
|
112
|
+
SET p.name = $name,
|
|
113
|
+
p.path = $path,
|
|
114
|
+
p.status = $status,
|
|
115
|
+
p.updatedAt = datetime()
|
|
116
|
+
RETURN p.projectId AS projectId
|
|
117
|
+
`;
|
|
118
|
+
/**
|
|
119
|
+
* Query to update Project node status after completion/failure
|
|
120
|
+
*/
|
|
121
|
+
export const UPDATE_PROJECT_STATUS_QUERY = `
|
|
122
|
+
MATCH (p:Project {projectId: $projectId})
|
|
123
|
+
SET p.status = $status,
|
|
124
|
+
p.nodeCount = $nodeCount,
|
|
125
|
+
p.edgeCount = $edgeCount,
|
|
126
|
+
p.updatedAt = datetime()
|
|
127
|
+
RETURN p.projectId AS projectId
|
|
128
|
+
`;
|
|
129
|
+
/**
|
|
130
|
+
* Query to list all projects with status
|
|
131
|
+
*/
|
|
132
|
+
export const LIST_PROJECTS_QUERY = `
|
|
133
|
+
MATCH (p:Project)
|
|
134
|
+
RETURN p.projectId AS projectId, p.name AS name, p.path AS path,
|
|
135
|
+
p.status AS status, p.nodeCount AS nodeCount, p.edgeCount AS edgeCount,
|
|
136
|
+
p.updatedAt AS updatedAt
|
|
137
|
+
ORDER BY p.updatedAt DESC
|
|
138
|
+
`;
|
|
139
|
+
/**
|
|
140
|
+
* Resolves a flexible project input (name, path, or projectId) to a valid projectId.
|
|
141
|
+
* Looks up the project in Neo4j if needed.
|
|
142
|
+
*
|
|
143
|
+
* @param input - Project name ("backend"), path ("/Users/.../backend"), or projectId
|
|
144
|
+
* @param resolver - Neo4j service or compatible resolver
|
|
145
|
+
* @returns The resolved projectId
|
|
146
|
+
* @throws Error if project not found
|
|
147
|
+
*
|
|
148
|
+
* @example
|
|
149
|
+
* // Valid projectId passes through
|
|
150
|
+
* resolveProjectIdFromInput('proj_a1b2c3d4e5f6', neo4j) // => 'proj_a1b2c3d4e5f6'
|
|
151
|
+
*
|
|
152
|
+
* // Name looks up in Neo4j
|
|
153
|
+
* resolveProjectIdFromInput('backend', neo4j) // => 'proj_a1b2c3d4e5f6'
|
|
154
|
+
*
|
|
155
|
+
* // Path looks up in Neo4j, or generates if not found
|
|
156
|
+
* resolveProjectIdFromInput('/Users/dev/backend', neo4j) // => 'proj_a1b2c3d4e5f6'
|
|
157
|
+
*/
|
|
158
|
+
export const resolveProjectIdFromInput = async (input, resolver) => {
|
|
159
|
+
// Already valid projectId format? Return as-is
|
|
160
|
+
if (validateProjectId(input)) {
|
|
161
|
+
return input;
|
|
162
|
+
}
|
|
163
|
+
// Try to find by name or path in Neo4j
|
|
164
|
+
const result = await resolver.run(FIND_PROJECT_QUERY, { input });
|
|
165
|
+
if (result.length > 0 && result[0].projectId) {
|
|
166
|
+
return result[0].projectId;
|
|
167
|
+
}
|
|
168
|
+
// If looks like a path, generate the projectId from it
|
|
169
|
+
// Check for Unix paths (/, ./, ..) and Windows paths (C:\, D:/, etc.)
|
|
170
|
+
const isUnixPath = input.startsWith('/') || input.startsWith('./') || input.startsWith('..');
|
|
171
|
+
const isWindowsPath = /^[a-zA-Z]:[\\/]/.test(input);
|
|
172
|
+
if (isUnixPath || isWindowsPath) {
|
|
173
|
+
return generateProjectId(input);
|
|
174
|
+
}
|
|
175
|
+
throw new Error(`Project not found: "${input}". Run parse_typescript_project first or use list_projects to see available projects.`);
|
|
176
|
+
};
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Retry utilities with exponential backoff
|
|
3
|
+
*/
|
|
4
|
+
export const DEFAULT_RETRY_OPTIONS = {
|
|
5
|
+
maxRetries: 3,
|
|
6
|
+
baseDelayMs: 1000,
|
|
7
|
+
maxDelayMs: 30000,
|
|
8
|
+
shouldRetry: (error) => {
|
|
9
|
+
// Retry on rate limits and transient errors
|
|
10
|
+
return (error.status === 429 ||
|
|
11
|
+
error.code === 'ETIMEDOUT' ||
|
|
12
|
+
error.message?.includes('timeout') ||
|
|
13
|
+
error.message?.includes('ECONNRESET'));
|
|
14
|
+
},
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Execute a function with automatic retry and exponential backoff.
|
|
18
|
+
* @param fn The async function to execute
|
|
19
|
+
* @param options Retry configuration options
|
|
20
|
+
* @returns The result of the function
|
|
21
|
+
*/
|
|
22
|
+
export const withRetry = async (fn, options = {}) => {
|
|
23
|
+
const opts = { ...DEFAULT_RETRY_OPTIONS, ...options };
|
|
24
|
+
let lastError;
|
|
25
|
+
for (let attempt = 0; attempt <= opts.maxRetries; attempt++) {
|
|
26
|
+
try {
|
|
27
|
+
return await fn();
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
lastError = error;
|
|
31
|
+
if (attempt === opts.maxRetries || !opts.shouldRetry?.(error)) {
|
|
32
|
+
throw error;
|
|
33
|
+
}
|
|
34
|
+
// Exponential backoff with jitter
|
|
35
|
+
const delay = Math.min(opts.baseDelayMs * Math.pow(2, attempt) + Math.random() * 1000, opts.maxDelayMs);
|
|
36
|
+
console.warn(`Retry attempt ${attempt + 1}/${opts.maxRetries} after ${Math.round(delay)}ms. Error: ${error.message}`);
|
|
37
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
throw lastError;
|
|
41
|
+
};
|