code-graph-context 2.4.4 → 2.4.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -559
- package/dist/cli/cli.js +0 -0
- package/package.json +2 -1
- package/dist/core/config/graph-v2.js +0 -1595
- package/dist/core/parsers/typescript-parser-v2.js +0 -590
- package/dist/core/utils/edge-factory.js +0 -37
- package/dist/mcp/handlers/file-change-detection.js +0 -105
- package/dist/mcp/services.js +0 -79
- package/dist/mcp/workers/parse-worker.js +0 -198
- package/dist/mcp/workers/worker.pool.js +0 -54
- package/dist/parsers/cypher-result.parser.js +0 -44
- package/dist/utils/file-utils.js +0 -20
- package/dist/utils/test.js +0 -19
|
@@ -1,105 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* File Change Detection
|
|
3
|
-
* Shared utilities for detecting changed files for incremental parsing
|
|
4
|
-
*/
|
|
5
|
-
import { stat, realpath } from 'fs/promises';
|
|
6
|
-
import { resolve, sep } from 'path';
|
|
7
|
-
import { glob } from 'glob';
|
|
8
|
-
import { EXCLUDE_PATTERNS_GLOB } from '../../constants.js';
|
|
9
|
-
import { QUERIES } from '../../storage/neo4j/neo4j.service.js';
|
|
10
|
-
import { hashFile } from '../../utils/file-utils.js';
|
|
11
|
-
/**
|
|
12
|
-
* Detect which files have changed and need reparsing.
|
|
13
|
-
* Compares current files on disk with indexed files in Neo4j.
|
|
14
|
-
*
|
|
15
|
-
* SECURITY: Validates that all file paths stay within the project directory
|
|
16
|
-
* after symlink resolution to prevent path traversal attacks.
|
|
17
|
-
*
|
|
18
|
-
* @param projectPath - Root path of the project
|
|
19
|
-
* @param neo4jService - Neo4j service instance
|
|
20
|
-
* @param projectId - Project ID for scoping queries
|
|
21
|
-
* @param options - Optional configuration
|
|
22
|
-
* @returns Files that need reparsing and files that were deleted
|
|
23
|
-
*/
|
|
24
|
-
export const detectChangedFiles = async (projectPath, neo4jService, projectId, options = {}) => {
|
|
25
|
-
const { logWarnings = true } = options;
|
|
26
|
-
// SECURITY: Resolve project path to real path to handle symlinks consistently
|
|
27
|
-
const realProjectPath = await realpath(projectPath);
|
|
28
|
-
const relativeFiles = await glob('**/*.{ts,tsx}', { cwd: projectPath, ignore: EXCLUDE_PATTERNS_GLOB });
|
|
29
|
-
// SECURITY: Validate each file stays within project directory after symlink resolution
|
|
30
|
-
const validatedFiles = [];
|
|
31
|
-
for (const relFile of relativeFiles) {
|
|
32
|
-
const absolutePath = resolve(projectPath, relFile);
|
|
33
|
-
try {
|
|
34
|
-
const realFilePath = await realpath(absolutePath);
|
|
35
|
-
// Check that resolved path is within project
|
|
36
|
-
if (realFilePath.startsWith(realProjectPath + sep) || realFilePath === realProjectPath) {
|
|
37
|
-
// Use realFilePath for consistent path matching with Neo4j
|
|
38
|
-
validatedFiles.push(realFilePath);
|
|
39
|
-
}
|
|
40
|
-
else if (logWarnings) {
|
|
41
|
-
console.warn(`SECURITY: Skipping file outside project directory: ${relFile}`);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
catch {
|
|
45
|
-
// File may have been deleted between glob and realpath - skip it
|
|
46
|
-
if (logWarnings) {
|
|
47
|
-
console.warn(`File no longer accessible: ${relFile}`);
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
const currentFiles = new Set(validatedFiles);
|
|
52
|
-
// Get indexed files from Neo4j
|
|
53
|
-
const queryResult = await neo4jService.run(QUERIES.GET_SOURCE_FILE_TRACKING_INFO, { projectId });
|
|
54
|
-
const indexedFiles = queryResult;
|
|
55
|
-
const indexedMap = new Map(indexedFiles.map((f) => [f.filePath, f]));
|
|
56
|
-
const filesToReparse = [];
|
|
57
|
-
const filesToDelete = [];
|
|
58
|
-
// Check each current file against indexed state
|
|
59
|
-
for (const filePath of currentFiles) {
|
|
60
|
-
const indexed = indexedMap.get(filePath);
|
|
61
|
-
if (!indexed) {
|
|
62
|
-
// New file - needs parsing
|
|
63
|
-
filesToReparse.push(filePath);
|
|
64
|
-
continue;
|
|
65
|
-
}
|
|
66
|
-
try {
|
|
67
|
-
const fileStats = await stat(filePath);
|
|
68
|
-
const currentHash = await hashFile(filePath);
|
|
69
|
-
// Only skip if mtime, size, AND hash all match (correctness over optimization)
|
|
70
|
-
if (fileStats.mtimeMs === indexed.mtime &&
|
|
71
|
-
fileStats.size === indexed.size &&
|
|
72
|
-
currentHash === indexed.contentHash) {
|
|
73
|
-
continue;
|
|
74
|
-
}
|
|
75
|
-
// Any mismatch means file changed
|
|
76
|
-
filesToReparse.push(filePath);
|
|
77
|
-
}
|
|
78
|
-
catch (error) {
|
|
79
|
-
const nodeError = error;
|
|
80
|
-
if (nodeError.code === 'ENOENT') {
|
|
81
|
-
// File was deleted between glob and stat - will be caught in deletion logic below
|
|
82
|
-
if (logWarnings) {
|
|
83
|
-
console.warn(`File deleted between glob and stat: ${filePath}`);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
else if (nodeError.code === 'EACCES') {
|
|
87
|
-
// Permission denied - assume changed to be safe
|
|
88
|
-
if (logWarnings) {
|
|
89
|
-
console.warn(`Permission denied reading file: ${filePath}`);
|
|
90
|
-
}
|
|
91
|
-
filesToReparse.push(filePath);
|
|
92
|
-
}
|
|
93
|
-
else {
|
|
94
|
-
throw error;
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
// Find deleted files (indexed but no longer on disk)
|
|
99
|
-
for (const indexedPath of indexedMap.keys()) {
|
|
100
|
-
if (!currentFiles.has(indexedPath)) {
|
|
101
|
-
filesToDelete.push(indexedPath);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
return { filesToReparse, filesToDelete };
|
|
105
|
-
};
|
package/dist/mcp/services.js
DELETED
|
@@ -1,79 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Service Initialization
|
|
3
|
-
* Handles initialization of external services like Neo4j schema and OpenAI assistant
|
|
4
|
-
*/
|
|
5
|
-
import fs from 'fs/promises';
|
|
6
|
-
import { join } from 'path';
|
|
7
|
-
import { Neo4jService, QUERIES } from '../storage/neo4j/neo4j.service.js';
|
|
8
|
-
import { FILE_PATHS, LOG_CONFIG } from './constants.js';
|
|
9
|
-
import { initializeNaturalLanguageService } from './tools/natural-language-to-cypher.tool.js';
|
|
10
|
-
import { debugLog } from './utils.js';
|
|
11
|
-
/**
|
|
12
|
-
* Initialize all external services required by the MCP server
|
|
13
|
-
*/
|
|
14
|
-
export const initializeServices = async () => {
|
|
15
|
-
await Promise.all([initializeNeo4jSchema(), initializeNaturalLanguageService()]);
|
|
16
|
-
};
|
|
17
|
-
/**
|
|
18
|
-
* Dynamically discover schema from the actual graph contents.
|
|
19
|
-
* This is framework-agnostic - it discovers what's actually in the graph.
|
|
20
|
-
*/
|
|
21
|
-
const discoverSchemaFromGraph = async (neo4jService) => {
|
|
22
|
-
try {
|
|
23
|
-
// Discover actual node types, relationships, and patterns from the graph
|
|
24
|
-
const [nodeTypes, relationshipTypes, semanticTypes, commonPatterns] = await Promise.all([
|
|
25
|
-
neo4jService.run(QUERIES.DISCOVER_NODE_TYPES),
|
|
26
|
-
neo4jService.run(QUERIES.DISCOVER_RELATIONSHIP_TYPES),
|
|
27
|
-
neo4jService.run(QUERIES.DISCOVER_SEMANTIC_TYPES),
|
|
28
|
-
neo4jService.run(QUERIES.DISCOVER_COMMON_PATTERNS),
|
|
29
|
-
]);
|
|
30
|
-
return {
|
|
31
|
-
nodeTypes: nodeTypes.map((r) => ({
|
|
32
|
-
label: r.label,
|
|
33
|
-
count: typeof r.nodeCount === 'object' ? r.nodeCount.toNumber() : r.nodeCount,
|
|
34
|
-
properties: r.sampleProperties ?? [],
|
|
35
|
-
})),
|
|
36
|
-
relationshipTypes: relationshipTypes.map((r) => ({
|
|
37
|
-
type: r.relationshipType,
|
|
38
|
-
count: typeof r.relCount === 'object' ? r.relCount.toNumber() : r.relCount,
|
|
39
|
-
connections: r.connections ?? [],
|
|
40
|
-
})),
|
|
41
|
-
semanticTypes: semanticTypes.map((r) => ({
|
|
42
|
-
type: r.semanticType,
|
|
43
|
-
count: typeof r.count === 'object' ? r.count.toNumber() : r.count,
|
|
44
|
-
})),
|
|
45
|
-
commonPatterns: commonPatterns.map((r) => ({
|
|
46
|
-
from: r.fromType,
|
|
47
|
-
relationship: r.relType,
|
|
48
|
-
to: r.toType,
|
|
49
|
-
count: typeof r.count === 'object' ? r.count.toNumber() : r.count,
|
|
50
|
-
})),
|
|
51
|
-
};
|
|
52
|
-
}
|
|
53
|
-
catch (error) {
|
|
54
|
-
await debugLog('Failed to discover schema from graph', error);
|
|
55
|
-
return null;
|
|
56
|
-
}
|
|
57
|
-
};
|
|
58
|
-
/**
|
|
59
|
-
* Initialize Neo4j schema by fetching from APOC and discovering actual graph structure
|
|
60
|
-
*/
|
|
61
|
-
const initializeNeo4jSchema = async () => {
|
|
62
|
-
try {
|
|
63
|
-
const neo4jService = new Neo4jService();
|
|
64
|
-
const rawSchema = await neo4jService.getSchema();
|
|
65
|
-
// Dynamically discover what's actually in the graph
|
|
66
|
-
const discoveredSchema = await discoverSchemaFromGraph(neo4jService);
|
|
67
|
-
const schema = {
|
|
68
|
-
rawSchema,
|
|
69
|
-
discoveredSchema,
|
|
70
|
-
};
|
|
71
|
-
const schemaPath = join(process.cwd(), FILE_PATHS.schemaOutput);
|
|
72
|
-
await fs.writeFile(schemaPath, JSON.stringify(schema, null, LOG_CONFIG.jsonIndentation));
|
|
73
|
-
await debugLog('Neo4j schema cached successfully', { schemaPath });
|
|
74
|
-
}
|
|
75
|
-
catch (error) {
|
|
76
|
-
await debugLog('Failed to initialize Neo4j schema', error);
|
|
77
|
-
// Don't throw - service can still function without cached schema
|
|
78
|
-
}
|
|
79
|
-
};
|
|
@@ -1,198 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Parse Worker
|
|
3
|
-
* Runs TypeScript parsing in a separate thread to avoid blocking the MCP server
|
|
4
|
-
*/
|
|
5
|
-
import { dirname, join } from 'path';
|
|
6
|
-
import { fileURLToPath } from 'url';
|
|
7
|
-
import { parentPort, workerData } from 'worker_threads';
|
|
8
|
-
// Load environment variables in worker thread
|
|
9
|
-
import dotenv from 'dotenv';
|
|
10
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
11
|
-
const __dirname = dirname(__filename);
|
|
12
|
-
dotenv.config({ path: join(__dirname, '..', '..', '..', '.env') });
|
|
13
|
-
import { EmbeddingsService } from '../../core/embeddings/embeddings.service.js';
|
|
14
|
-
import { ParserFactory } from '../../core/parsers/parser-factory.js';
|
|
15
|
-
import { WorkspaceParser } from '../../core/parsers/workspace-parser.js';
|
|
16
|
-
import { getProjectName, UPSERT_PROJECT_QUERY, UPDATE_PROJECT_STATUS_QUERY } from '../../core/utils/project-id.js';
|
|
17
|
-
import { WorkspaceDetector } from '../../core/workspace/index.js';
|
|
18
|
-
import { Neo4jService, QUERIES } from '../../storage/neo4j/neo4j.service.js';
|
|
19
|
-
import { debugLog } from '../../core/utils/file-utils.js';
|
|
20
|
-
import { GraphGeneratorHandler } from '../handlers/graph-generator.handler.js';
|
|
21
|
-
import { StreamingImportHandler } from '../handlers/streaming-import.handler.js';
|
|
22
|
-
const sendMessage = (msg) => {
|
|
23
|
-
parentPort?.postMessage(msg);
|
|
24
|
-
};
|
|
25
|
-
const runParser = async () => {
|
|
26
|
-
const config = workerData;
|
|
27
|
-
const startTime = Date.now();
|
|
28
|
-
// Declare outside try block so it's available in catch/finally
|
|
29
|
-
let resolvedProjectId = config.projectId;
|
|
30
|
-
let neo4jService = null;
|
|
31
|
-
try {
|
|
32
|
-
sendMessage({
|
|
33
|
-
type: 'progress',
|
|
34
|
-
data: {
|
|
35
|
-
phase: 'discovery',
|
|
36
|
-
filesProcessed: 0,
|
|
37
|
-
filesTotal: 0,
|
|
38
|
-
nodesImported: 0,
|
|
39
|
-
edgesImported: 0,
|
|
40
|
-
currentChunk: 0,
|
|
41
|
-
totalChunks: 0,
|
|
42
|
-
},
|
|
43
|
-
});
|
|
44
|
-
neo4jService = new Neo4jService();
|
|
45
|
-
const embeddingsService = new EmbeddingsService();
|
|
46
|
-
const graphGeneratorHandler = new GraphGeneratorHandler(neo4jService, embeddingsService);
|
|
47
|
-
// Use lazy loading to avoid OOM on large projects
|
|
48
|
-
const lazyLoad = true;
|
|
49
|
-
// Auto-detect workspace (Turborepo, pnpm, yarn, npm workspaces)
|
|
50
|
-
const workspaceDetector = new WorkspaceDetector();
|
|
51
|
-
await debugLog('Detecting workspace', { projectPath: config.projectPath });
|
|
52
|
-
const workspaceConfig = await workspaceDetector.detect(config.projectPath);
|
|
53
|
-
await debugLog('Workspace detection result', {
|
|
54
|
-
type: workspaceConfig.type,
|
|
55
|
-
rootPath: workspaceConfig.rootPath,
|
|
56
|
-
packageCount: workspaceConfig.packages.length,
|
|
57
|
-
packages: workspaceConfig.packages.map((p) => p.name),
|
|
58
|
-
});
|
|
59
|
-
// Use WorkspaceParser for monorepos, TypeScriptParser for single projects
|
|
60
|
-
let parser;
|
|
61
|
-
if (workspaceConfig.type !== 'single' && workspaceConfig.packages.length > 1) {
|
|
62
|
-
await debugLog('Using WorkspaceParser', {
|
|
63
|
-
type: workspaceConfig.type,
|
|
64
|
-
packageCount: workspaceConfig.packages.length,
|
|
65
|
-
});
|
|
66
|
-
// for workspaces default to auto for now
|
|
67
|
-
// TODO: allow worker config to specify projectType array to support multi-framework monorepos
|
|
68
|
-
parser = new WorkspaceParser(workspaceConfig, config.projectId, lazyLoad, 'auto');
|
|
69
|
-
resolvedProjectId = parser.getProjectId();
|
|
70
|
-
}
|
|
71
|
-
else {
|
|
72
|
-
await debugLog('Using single project mode', {
|
|
73
|
-
type: workspaceConfig.type,
|
|
74
|
-
packageCount: workspaceConfig.packages.length,
|
|
75
|
-
});
|
|
76
|
-
parser =
|
|
77
|
-
config.projectType === 'auto'
|
|
78
|
-
? await ParserFactory.createParserWithAutoDetection(config.projectPath, config.tsconfigPath, config.projectId, lazyLoad)
|
|
79
|
-
: ParserFactory.createParser({
|
|
80
|
-
workspacePath: config.projectPath,
|
|
81
|
-
tsConfigPath: config.tsconfigPath,
|
|
82
|
-
projectType: config.projectType,
|
|
83
|
-
projectId: config.projectId,
|
|
84
|
-
lazyLoad,
|
|
85
|
-
});
|
|
86
|
-
resolvedProjectId = parser.getProjectId();
|
|
87
|
-
}
|
|
88
|
-
// Use async file discovery (works in lazy mode)
|
|
89
|
-
const sourceFiles = await parser.discoverSourceFiles();
|
|
90
|
-
const totalFiles = sourceFiles.length;
|
|
91
|
-
sendMessage({
|
|
92
|
-
type: 'progress',
|
|
93
|
-
data: {
|
|
94
|
-
phase: 'parsing',
|
|
95
|
-
filesProcessed: 0,
|
|
96
|
-
filesTotal: totalFiles,
|
|
97
|
-
nodesImported: 0,
|
|
98
|
-
edgesImported: 0,
|
|
99
|
-
currentChunk: 0,
|
|
100
|
-
totalChunks: Math.ceil(totalFiles / config.chunkSize),
|
|
101
|
-
},
|
|
102
|
-
});
|
|
103
|
-
// Clear existing project data first
|
|
104
|
-
graphGeneratorHandler.setProjectId(resolvedProjectId);
|
|
105
|
-
await neo4jService.run(QUERIES.CLEAR_PROJECT, { projectId: resolvedProjectId });
|
|
106
|
-
// Create/update Project node with 'parsing' status
|
|
107
|
-
const projectName = await getProjectName(config.projectPath);
|
|
108
|
-
await neo4jService.run(UPSERT_PROJECT_QUERY, {
|
|
109
|
-
projectId: resolvedProjectId,
|
|
110
|
-
name: projectName,
|
|
111
|
-
path: config.projectPath,
|
|
112
|
-
status: 'parsing',
|
|
113
|
-
});
|
|
114
|
-
await debugLog('Project node created', { projectId: resolvedProjectId, name: projectName });
|
|
115
|
-
const streamingHandler = new StreamingImportHandler(graphGeneratorHandler);
|
|
116
|
-
const result = await streamingHandler.importProjectStreaming(parser, {
|
|
117
|
-
chunkSize: config.chunkSize > 0 ? config.chunkSize : 100,
|
|
118
|
-
projectId: resolvedProjectId,
|
|
119
|
-
onProgress: async (progress) => {
|
|
120
|
-
sendMessage({
|
|
121
|
-
type: 'progress',
|
|
122
|
-
data: {
|
|
123
|
-
phase: progress.phase,
|
|
124
|
-
filesProcessed: progress.current,
|
|
125
|
-
filesTotal: progress.total,
|
|
126
|
-
nodesImported: progress.details?.nodesCreated ?? 0,
|
|
127
|
-
edgesImported: progress.details?.edgesCreated ?? 0,
|
|
128
|
-
currentChunk: progress.details?.chunkIndex ?? 0,
|
|
129
|
-
totalChunks: progress.details?.totalChunks ?? 0,
|
|
130
|
-
},
|
|
131
|
-
});
|
|
132
|
-
},
|
|
133
|
-
});
|
|
134
|
-
// Update Project node with 'complete' status and final counts
|
|
135
|
-
await neo4jService.run(UPDATE_PROJECT_STATUS_QUERY, {
|
|
136
|
-
projectId: resolvedProjectId,
|
|
137
|
-
status: 'complete',
|
|
138
|
-
nodeCount: result.nodesImported,
|
|
139
|
-
edgeCount: result.edgesImported,
|
|
140
|
-
});
|
|
141
|
-
await debugLog('Project node updated', {
|
|
142
|
-
projectId: resolvedProjectId,
|
|
143
|
-
status: 'complete',
|
|
144
|
-
nodeCount: result.nodesImported,
|
|
145
|
-
edgeCount: result.edgesImported,
|
|
146
|
-
});
|
|
147
|
-
sendMessage({
|
|
148
|
-
type: 'complete',
|
|
149
|
-
data: {
|
|
150
|
-
nodesImported: result.nodesImported,
|
|
151
|
-
edgesImported: result.edgesImported,
|
|
152
|
-
elapsedMs: Date.now() - startTime,
|
|
153
|
-
},
|
|
154
|
-
});
|
|
155
|
-
}
|
|
156
|
-
catch (error) {
|
|
157
|
-
// Try to update Project node with 'failed' status
|
|
158
|
-
try {
|
|
159
|
-
// Use existing service if available, otherwise create temporary one
|
|
160
|
-
const serviceForUpdate = neo4jService ?? new Neo4jService();
|
|
161
|
-
await serviceForUpdate.run(UPDATE_PROJECT_STATUS_QUERY, {
|
|
162
|
-
projectId: resolvedProjectId, // Use resolved ID, not config.projectId
|
|
163
|
-
status: 'failed',
|
|
164
|
-
nodeCount: 0,
|
|
165
|
-
edgeCount: 0,
|
|
166
|
-
});
|
|
167
|
-
// Close temporary service if we created one
|
|
168
|
-
if (!neo4jService) {
|
|
169
|
-
await serviceForUpdate.close();
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
catch {
|
|
173
|
-
// Ignore errors updating project status on failure
|
|
174
|
-
}
|
|
175
|
-
sendMessage({
|
|
176
|
-
type: 'error',
|
|
177
|
-
error: error.message ?? String(error),
|
|
178
|
-
});
|
|
179
|
-
}
|
|
180
|
-
finally {
|
|
181
|
-
// Always close the Neo4j connection to prevent resource leaks
|
|
182
|
-
if (neo4jService) {
|
|
183
|
-
try {
|
|
184
|
-
await neo4jService.close();
|
|
185
|
-
}
|
|
186
|
-
catch {
|
|
187
|
-
// Ignore cleanup errors
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
};
|
|
192
|
-
// Run the parser
|
|
193
|
-
runParser().catch((err) => {
|
|
194
|
-
sendMessage({
|
|
195
|
-
type: 'error',
|
|
196
|
-
error: err.message ?? String(err),
|
|
197
|
-
});
|
|
198
|
-
});
|
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
import { Worker } from 'worker_threads';
|
|
2
|
-
export class ParallelPool {
|
|
3
|
-
workerPath;
|
|
4
|
-
numWorkers;
|
|
5
|
-
constructor(workerPath, numWorkers = 2) {
|
|
6
|
-
this.workerPath = workerPath;
|
|
7
|
-
this.numWorkers = numWorkers;
|
|
8
|
-
}
|
|
9
|
-
async run(items) {
|
|
10
|
-
const start = Date.now();
|
|
11
|
-
const indexBuffer = new SharedArrayBuffer(4);
|
|
12
|
-
const sharedIndex = new Int32Array(indexBuffer);
|
|
13
|
-
const workerPromises = Array.from({ length: this.numWorkers }, (_, id) => this.spawnWorker(id, items, indexBuffer));
|
|
14
|
-
const workerResults = await Promise.all(workerPromises);
|
|
15
|
-
const results = [];
|
|
16
|
-
const workerTaskCounts = [];
|
|
17
|
-
for (const { results: map, count } of workerResults) {
|
|
18
|
-
workerTaskCounts.push(count);
|
|
19
|
-
for (const [i, result] of map) {
|
|
20
|
-
results[i] = result;
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
return {
|
|
24
|
-
results,
|
|
25
|
-
stats: {
|
|
26
|
-
workerTaskCounts,
|
|
27
|
-
totalTasks: items.length,
|
|
28
|
-
totalTimeMs: Date.now() - start,
|
|
29
|
-
},
|
|
30
|
-
};
|
|
31
|
-
}
|
|
32
|
-
spawnWorker(workerId, items, indexBuffer) {
|
|
33
|
-
return new Promise((resolve, reject) => {
|
|
34
|
-
const worker = new Worker(this.workerPath, {
|
|
35
|
-
workerData: {
|
|
36
|
-
items,
|
|
37
|
-
indexBuffer,
|
|
38
|
-
total: items.length,
|
|
39
|
-
workerId,
|
|
40
|
-
},
|
|
41
|
-
});
|
|
42
|
-
worker.on('message', (result) => {
|
|
43
|
-
worker.terminate();
|
|
44
|
-
resolve(result);
|
|
45
|
-
});
|
|
46
|
-
worker.on('error', reject);
|
|
47
|
-
worker.on('exit', (code) => {
|
|
48
|
-
if (code !== 0) {
|
|
49
|
-
reject(new Error(`Worker ${workerId} exited with code ${code}`));
|
|
50
|
-
}
|
|
51
|
-
});
|
|
52
|
-
});
|
|
53
|
-
}
|
|
54
|
-
}
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
export class CypherResultParser {
|
|
2
|
-
static parseCypherResult(result) {
|
|
3
|
-
if (!result || result.length === 0) {
|
|
4
|
-
return 'No results found.';
|
|
5
|
-
}
|
|
6
|
-
const parsedResults = {};
|
|
7
|
-
// Parse nodes
|
|
8
|
-
if (result[0].nodes) {
|
|
9
|
-
parsedResults.nodes = result[0].nodes.map((node) => ({
|
|
10
|
-
identity: node.identity.toString(),
|
|
11
|
-
labels: node.labels,
|
|
12
|
-
properties: node.properties,
|
|
13
|
-
}));
|
|
14
|
-
}
|
|
15
|
-
// Parse relationships
|
|
16
|
-
if (result[0].relationships) {
|
|
17
|
-
parsedResults.relationships = result[0].relationships.map((rel) => ({
|
|
18
|
-
identity: rel.identity.toString(),
|
|
19
|
-
type: rel.type,
|
|
20
|
-
properties: rel.properties,
|
|
21
|
-
start: rel.start.toString(),
|
|
22
|
-
end: rel.end.toString(),
|
|
23
|
-
}));
|
|
24
|
-
}
|
|
25
|
-
// Parse paths
|
|
26
|
-
if (result[0].paths) {
|
|
27
|
-
parsedResults.paths = result[0].paths.map((path) => ({
|
|
28
|
-
start: path.start,
|
|
29
|
-
end: path.end,
|
|
30
|
-
segments: path.segments,
|
|
31
|
-
length: path.length,
|
|
32
|
-
}));
|
|
33
|
-
}
|
|
34
|
-
// Parse data
|
|
35
|
-
if (result[0].data) {
|
|
36
|
-
parsedResults.data = result[0].data;
|
|
37
|
-
}
|
|
38
|
-
// Parse summary
|
|
39
|
-
if (result[0].summary) {
|
|
40
|
-
parsedResults.summary = result[0].summary;
|
|
41
|
-
}
|
|
42
|
-
return JSON.stringify(parsedResults, null, 2);
|
|
43
|
-
}
|
|
44
|
-
}
|
package/dist/utils/file-utils.js
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import * as crypto from 'crypto';
|
|
2
|
-
import * as fs from 'fs/promises';
|
|
3
|
-
import * as path from 'path';
|
|
4
|
-
const DEBUG_LOG_FILE = 'debug-search.log';
|
|
5
|
-
const LOG_SEPARATOR = '---';
|
|
6
|
-
const JSON_INDENT = 2;
|
|
7
|
-
export const hashFile = async (filePath) => {
|
|
8
|
-
const content = await fs.readFile(filePath);
|
|
9
|
-
return crypto.createHash('sha256').update(content).digest('hex');
|
|
10
|
-
};
|
|
11
|
-
export const debugLog = async (message, data) => {
|
|
12
|
-
const timestamp = new Date().toISOString();
|
|
13
|
-
const logEntry = `[${timestamp}] ${message}\n${data ? JSON.stringify(data, null, JSON_INDENT) : ''}\n${LOG_SEPARATOR}\n`;
|
|
14
|
-
try {
|
|
15
|
-
await fs.appendFile(path.join(process.cwd(), DEBUG_LOG_FILE), logEntry);
|
|
16
|
-
}
|
|
17
|
-
catch (error) {
|
|
18
|
-
console.error('Failed to write debug log:', error);
|
|
19
|
-
}
|
|
20
|
-
};
|
package/dist/utils/test.js
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import 'reflect-metadata';
|
|
2
|
-
import { writeFileSync } from 'node:fs';
|
|
3
|
-
import fs from 'node:fs';
|
|
4
|
-
import os from 'node:os';
|
|
5
|
-
import path from 'node:path';
|
|
6
|
-
import { NESTJS_FRAMEWORK_SCHEMA } from '../core/config/graph-v2';
|
|
7
|
-
import { TypeScriptParser } from '../core/parsers/typescript-parser-v2';
|
|
8
|
-
const workspace = path.join(os.homedir(), 'nestjs/iluvcoffee');
|
|
9
|
-
const tsconfig = path.join(workspace, 'tsconfig.json'); // or tsconfig.build.json etc.
|
|
10
|
-
(async () => {
|
|
11
|
-
console.log({ workspace, tsconfig, exists: fs.existsSync(tsconfig) });
|
|
12
|
-
const parser = new TypeScriptParser(workspace, tsconfig, undefined, [NESTJS_FRAMEWORK_SCHEMA]);
|
|
13
|
-
// 👇 pull in every *.ts file under the repo
|
|
14
|
-
parser['project'].addSourceFilesAtPaths(path.join(workspace, 'src/**/*.ts'));
|
|
15
|
-
const { nodes, edges } = await parser.parseWorkspace(); // runs fine now
|
|
16
|
-
const { nodes: cleanNodes, edges: cleanEdges } = parser.exportToJson();
|
|
17
|
-
console.log(`Parsed ${cleanNodes.length} nodes / ${cleanEdges.length} edges`);
|
|
18
|
-
writeFileSync('em-backend-graph.json', JSON.stringify({ nodes: cleanNodes, edges: cleanEdges }, null, 2));
|
|
19
|
-
})();
|