fileflows 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -11
- package/cli.mjs +7 -0
- package/config/jest-require-polyfill.cjs +20 -0
- package/config/jest-setup.mjs +28 -0
- package/config/jest.config.mjs +72 -0
- package/config/localVars.js +2 -109
- package/dist/cli.js +76 -0
- package/dist/config/localVars.js +76 -0
- package/dist/index.js +7 -0
- package/dist/lib/dataFlowGrouper.js +137 -0
- package/dist/lib/dependencyExtractor.js +46 -0
- package/dist/lib/fileClassifier.js +78 -0
- package/dist/lib/fileFlowsGenerator.js +301 -0
- package/dist/lib/fileIO.js +35 -0
- package/dist/lib/graphUtils.js +89 -0
- package/dist/lib/index.js +50 -0
- package/dist/lib/jsParser.js +131 -0
- package/dist/lib/otherFileParser.js +131 -0
- package/index.mjs +2 -0
- package/package.json +31 -14
- package/scripts/broadcast.sh +26 -0
- package/scripts/clean-bun-cache.mjs +14 -0
- package/scripts/clean-dist.mjs +7 -0
- package/scripts/ensure-runner.mjs +9 -0
- package/scripts/kill-agent.sh +24 -0
- package/scripts/kill-all-agents.sh +31 -0
- package/scripts/list-agents.sh +16 -0
- package/scripts/send-to-agent.sh +28 -0
- package/scripts/spawn-agent.sh +62 -0
- package/cli.js +0 -81
- package/config/localVars.test.js +0 -37
- package/index.js +0 -13
- package/lib/SUMMARY.md +0 -53
- package/lib/dataFlowGrouper.js +0 -150
- package/lib/dataFlowGrouper.test.js +0 -17
- package/lib/dependencyExtractor.js +0 -70
- package/lib/dependencyExtractor.test.js +0 -9
- package/lib/fileClassifier.js +0 -38
- package/lib/fileClassifier.test.js +0 -9
- package/lib/fileFlowsGenerator.js +0 -156
- package/lib/fileFlowsGenerator.test.js +0 -17
- package/lib/fileIO.js +0 -60
- package/lib/fileIO.test.js +0 -13
- package/lib/graphUtils.js +0 -139
- package/lib/graphUtils.test.js +0 -25
- package/lib/index.js +0 -29
- package/lib/index.test.js +0 -53
- package/lib/jsParser.js +0 -132
- package/lib/jsParser.test.js +0 -13
- package/lib/otherFileParser.js +0 -103
- package/lib/otherFileParser.test.js +0 -9
package/lib/dataFlowGrouper.js
DELETED
|
@@ -1,150 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Data flow grouping functionality
|
|
3
|
-
* Groups files by actual data flow relationships (imports/exports)
|
|
4
|
-
* @param {Array} fileList - List of files to analyze
|
|
5
|
-
* @param {string} rootDir - Root directory path
|
|
6
|
-
* @returns {Promise<Map>} Map of grouped files by data flow relationships
|
|
7
|
-
*/
|
|
8
|
-
// 🚩AI: CORE_DATA_FLOW_ANALYSIS_ALGORITHM
|
|
9
|
-
async function groupByDataFlow(fileList, rootDir) {
|
|
10
|
-
const fs = require(`fs`);
|
|
11
|
-
const path = require(`path`);
|
|
12
|
-
const localVars = require(`../config/localVars`);
|
|
13
|
-
const parseJSFile = require(`./jsParser`);
|
|
14
|
-
const parseOtherFile = require(`./otherFileParser`);
|
|
15
|
-
const classifyFile = require(`./fileClassifier`);
|
|
16
|
-
const extractDependencies = require(`./dependencyExtractor`);
|
|
17
|
-
|
|
18
|
-
const dependencyGraph = new Map();
|
|
19
|
-
const fileMetadata = new Map();
|
|
20
|
-
|
|
21
|
-
// Build dependency graph by analyzing imports/exports
|
|
22
|
-
for (const filePath of fileList) {
|
|
23
|
-
const fullPath = path.join(rootDir, filePath);
|
|
24
|
-
const ext = path.extname(filePath).slice(1);
|
|
25
|
-
|
|
26
|
-
try {
|
|
27
|
-
const content = fs.readFileSync(fullPath, `utf8`);
|
|
28
|
-
let metadata = {};
|
|
29
|
-
|
|
30
|
-
if (localVars.CODE_EXTENSIONS.includes(ext)) {
|
|
31
|
-
metadata = parseJSFile(content, filePath);
|
|
32
|
-
} else {
|
|
33
|
-
metadata = parseOtherFile(content, filePath, ext);
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
fileMetadata.set(filePath, {
|
|
37
|
-
...metadata,
|
|
38
|
-
type: classifyFile(filePath, ext),
|
|
39
|
-
dependencies: extractDependencies(metadata.Imports || [], filePath, fileList)
|
|
40
|
-
});
|
|
41
|
-
|
|
42
|
-
dependencyGraph.set(filePath, metadata.Imports || []);
|
|
43
|
-
} catch (error) {
|
|
44
|
-
// File read error - skip this file
|
|
45
|
-
dependencyGraph.set(filePath, []);
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
// Group files by connected components in dependency graph
|
|
50
|
-
const visited = new Set();
|
|
51
|
-
const groups = new Map();
|
|
52
|
-
let groupCounter = 1;
|
|
53
|
-
|
|
54
|
-
function dfs(file, groupName, group) {
|
|
55
|
-
if (visited.has(file)) return;
|
|
56
|
-
visited.add(file);
|
|
57
|
-
group.push(file);
|
|
58
|
-
|
|
59
|
-
// Visit all files that this file imports
|
|
60
|
-
const deps = fileMetadata.get(file)?.dependencies || [];
|
|
61
|
-
for (const dep of deps) {
|
|
62
|
-
if (!visited.has(dep)) {
|
|
63
|
-
dfs(dep, groupName, group);
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
// Visit all files that import this file
|
|
68
|
-
for (const [otherFile, otherDeps] of fileMetadata) {
|
|
69
|
-
if (!visited.has(otherFile) && otherDeps.dependencies?.includes(file)) {
|
|
70
|
-
dfs(otherFile, groupName, group);
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
// Create connected components
|
|
76
|
-
for (const file of fileList) {
|
|
77
|
-
if (!visited.has(file)) {
|
|
78
|
-
const group = [];
|
|
79
|
-
const groupName = `Group-${groupCounter++}`;
|
|
80
|
-
dfs(file, groupName, group);
|
|
81
|
-
|
|
82
|
-
if (group.length > 0) {
|
|
83
|
-
groups.set(groupName, group);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
// SECONDARY: Use filename similarity for files without clear connections
|
|
89
|
-
const ungroupedFiles = fileList.filter(f => !visited.has(f));
|
|
90
|
-
if (ungroupedFiles.length > 0) {
|
|
91
|
-
const similarityGroups = groupBySimilarity(ungroupedFiles);
|
|
92
|
-
for (const [name, files] of similarityGroups) {
|
|
93
|
-
groups.set(`Similarity-${name}`, files);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// Convert Map to Array format expected by tests
|
|
98
|
-
const groupArray = [];
|
|
99
|
-
for (const [groupName, files] of groups) {
|
|
100
|
-
groupArray.push({
|
|
101
|
-
name: groupName,
|
|
102
|
-
files: files,
|
|
103
|
-
metadata: files.map(f => fileMetadata.get(f)).filter(Boolean)
|
|
104
|
-
});
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
return groupArray;
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
/**
|
|
111
|
-
* Groups files by filename similarity when no data flow connections exist
|
|
112
|
-
* @param {Array} files - Array of file paths
|
|
113
|
-
* @returns {Map} Map of similarity-based groups
|
|
114
|
-
*/
|
|
115
|
-
function groupBySimilarity(files) {
|
|
116
|
-
const path = require(`path`);
|
|
117
|
-
const groups = new Map();
|
|
118
|
-
|
|
119
|
-
for (const file of files) {
|
|
120
|
-
const basename = path.basename(file, path.extname(file));
|
|
121
|
-
const parts = basename.split(/[-_.]/).filter(p => p.length > 2);
|
|
122
|
-
|
|
123
|
-
let bestGroup = null;
|
|
124
|
-
let maxSimilarity = 0;
|
|
125
|
-
|
|
126
|
-
for (const [groupName, groupFiles] of groups) {
|
|
127
|
-
const groupBasename = path.basename(groupFiles[0], path.extname(groupFiles[0]));
|
|
128
|
-
const groupParts = groupBasename.split(/[-_.]/).filter(p => p.length > 2);
|
|
129
|
-
|
|
130
|
-
const commonParts = parts.filter(p => groupParts.includes(p));
|
|
131
|
-
const similarity = commonParts.length / Math.max(parts.length, groupParts.length);
|
|
132
|
-
|
|
133
|
-
if (similarity > maxSimilarity && similarity > 0.3) {
|
|
134
|
-
maxSimilarity = similarity;
|
|
135
|
-
bestGroup = groupName;
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
if (bestGroup) {
|
|
140
|
-
groups.get(bestGroup).push(file);
|
|
141
|
-
} else {
|
|
142
|
-
const key = parts.length > 0 ? parts[0] : basename;
|
|
143
|
-
groups.set(key, [file]);
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
return groups;
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
module.exports = groupByDataFlow;
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
// Auto-generated unit test for dataFlowGrouper.js - optimized for speed
|
|
2
|
-
const mod = require('./dataFlowGrouper.js');
|
|
3
|
-
|
|
4
|
-
describe('dataFlowGrouper.js', () => {
|
|
5
|
-
test('groupByDataFlow works', async () => {
|
|
6
|
-
// Fast assertion - TODO: implement specific test logic
|
|
7
|
-
expect(typeof mod.groupByDataFlow).toBeDefined();
|
|
8
|
-
});
|
|
9
|
-
test('dfs works', async () => {
|
|
10
|
-
// Fast assertion - TODO: implement specific test logic
|
|
11
|
-
expect(typeof mod.dfs).toBeDefined();
|
|
12
|
-
});
|
|
13
|
-
test('groupBySimilarity works', async () => {
|
|
14
|
-
// Fast assertion - TODO: implement specific test logic
|
|
15
|
-
expect(typeof mod.groupBySimilarity).toBeDefined();
|
|
16
|
-
});
|
|
17
|
-
});
|
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Dependency extraction functionality
|
|
3
|
-
* Extracts and resolves file dependencies from import statements
|
|
4
|
-
* @param {Array} imports - Array of import statements
|
|
5
|
-
* @param {string} currentFile - Current file path
|
|
6
|
-
* @param {Array} fileList - List of all files in the project
|
|
7
|
-
* @returns {Array} Array of resolved dependencies
|
|
8
|
-
*/
|
|
9
|
-
function extractDependencies(imports, currentFile, fileList) {
|
|
10
|
-
const path = require(`path`);
|
|
11
|
-
const dependencies = [];
|
|
12
|
-
|
|
13
|
-
for (const imp of imports) {
|
|
14
|
-
if (imp.startsWith(`.`)) {
|
|
15
|
-
// Relative import - try to resolve to actual file
|
|
16
|
-
const basePath = path.dirname(currentFile);
|
|
17
|
-
let resolvedPath = path.normalize(path.join(basePath, imp));
|
|
18
|
-
|
|
19
|
-
// Convert to forward slashes for cross-platform compatibility
|
|
20
|
-
resolvedPath = resolvedPath.replace(/\\/g, '/');
|
|
21
|
-
|
|
22
|
-
// Handle different resolution scenarios:
|
|
23
|
-
// If resolvedPath starts with project structure, normalize it
|
|
24
|
-
// Remove src/ prefix if it exists and the target doesn't include it
|
|
25
|
-
const pathParts = resolvedPath.split('/');
|
|
26
|
-
|
|
27
|
-
// Try original resolved path first, then variations
|
|
28
|
-
const possibleBasePaths = [
|
|
29
|
-
resolvedPath,
|
|
30
|
-
// Remove 'src/' prefix if present
|
|
31
|
-
pathParts[0] === 'src' ? pathParts.slice(1).join('/') : null,
|
|
32
|
-
// Handle cases where path goes up from src/
|
|
33
|
-
resolvedPath.replace(/^src\//, '')
|
|
34
|
-
].filter(p => p && p !== resolvedPath); // Remove nulls and duplicates
|
|
35
|
-
|
|
36
|
-
const allPossiblePaths = [resolvedPath, ...possibleBasePaths];
|
|
37
|
-
|
|
38
|
-
// Try to find matching file with common extensions for each base path
|
|
39
|
-
for (const basePath of allPossiblePaths) {
|
|
40
|
-
if (!basePath) continue;
|
|
41
|
-
|
|
42
|
-
const possiblePaths = [
|
|
43
|
-
basePath,
|
|
44
|
-
basePath + `.js`,
|
|
45
|
-
basePath + `.ts`,
|
|
46
|
-
basePath + `.jsx`,
|
|
47
|
-
basePath + `.tsx`,
|
|
48
|
-
basePath + `/index.js`,
|
|
49
|
-
basePath + `/index.ts`
|
|
50
|
-
];
|
|
51
|
-
|
|
52
|
-
for (const possiblePath of possiblePaths) {
|
|
53
|
-
if (fileList.includes(possiblePath)) {
|
|
54
|
-
dependencies.push(possiblePath);
|
|
55
|
-
break;
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
// If we found a match, stop looking
|
|
60
|
-
if (dependencies.some(dep => allPossiblePaths.some(base => dep.startsWith(base)))) {
|
|
61
|
-
break;
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
return dependencies;
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
module.exports = extractDependencies;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
// Auto-generated unit test for dependencyExtractor.js - optimized for speed
|
|
2
|
-
const mod = require('./dependencyExtractor.js');
|
|
3
|
-
|
|
4
|
-
describe('dependencyExtractor.js', () => {
|
|
5
|
-
test('extractDependencies works', async () => {
|
|
6
|
-
// Fast assertion - TODO: implement specific test logic
|
|
7
|
-
expect(typeof mod.extractDependencies).toBeDefined();
|
|
8
|
-
});
|
|
9
|
-
});
|
package/lib/fileClassifier.js
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* File classification functionality
|
|
3
|
-
* Classifies files based on their path and extension
|
|
4
|
-
* @param {string} filePath - The file path to classify
|
|
5
|
-
* @param {string} ext - The file extension
|
|
6
|
-
* @returns {string} Classification of the file type
|
|
7
|
-
*/
|
|
8
|
-
function classifyFile(filePath, ext) {
|
|
9
|
-
const localVars = require(`../config/localVars`);
|
|
10
|
-
|
|
11
|
-
if (localVars.CODE_EXTENSIONS.includes(ext)) {
|
|
12
|
-
// TSX files are specifically React/UI components (check first)
|
|
13
|
-
if (ext === `tsx`) return `UI Component`;
|
|
14
|
-
if (filePath.includes(`test`) || filePath.includes(`spec`)) return `Test File`;
|
|
15
|
-
if (filePath.includes(`component`) || filePath.includes(`Component`)) return `UI Component`;
|
|
16
|
-
if (filePath.includes(`api`) || filePath.includes(`route`)) return `API/Route`;
|
|
17
|
-
if (filePath.includes(`util`) || filePath.includes(`helper`)) return `Utility`;
|
|
18
|
-
if (filePath.includes(`config`) || filePath.includes(`setting`)) return `Configuration`;
|
|
19
|
-
if (filePath.includes(`model`) || filePath.includes(`schema`)) return `Data Model`;
|
|
20
|
-
return `Code File`;
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
switch (ext) {
|
|
24
|
-
case `json`: return `Configuration/Data`;
|
|
25
|
-
case `md`: return `Documentation`;
|
|
26
|
-
case `yml`:
|
|
27
|
-
case `yaml`: return `Configuration`;
|
|
28
|
-
case `env`: return `Environment`;
|
|
29
|
-
case `html`: return `Template/View`;
|
|
30
|
-
case `css`:
|
|
31
|
-
case `scss`: return `Stylesheet`;
|
|
32
|
-
case `sh`: return `Script`;
|
|
33
|
-
case `graphql`: return `Schema`;
|
|
34
|
-
default: return `Other`;
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
module.exports = classifyFile;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
// Auto-generated unit test for fileClassifier.js - optimized for speed
|
|
2
|
-
const mod = require('./fileClassifier.js');
|
|
3
|
-
|
|
4
|
-
describe('fileClassifier.js', () => {
|
|
5
|
-
test('classifyFile works', async () => {
|
|
6
|
-
// Fast assertion - TODO: implement specific test logic
|
|
7
|
-
expect(typeof mod.classifyFile).toBeDefined();
|
|
8
|
-
});
|
|
9
|
-
});
|
|
@@ -1,156 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Main file flows generation functionality
|
|
3
|
-
* Generates FILE_FLOWS.md documentation showing data flow relationships
|
|
4
|
-
* @param {string} rootDir - Root directory to analyze (default: '.')
|
|
5
|
-
* @param {string} outputFile - Output file path (default: 'FILE_FLOWS.md')
|
|
6
|
-
* @returns {Promise<Object>} Generation result with statistics
|
|
7
|
-
*/
|
|
8
|
-
// 🚩AI: ENTRY_POINT_FOR_FILE_FLOWS_GENERATION
|
|
9
|
-
async function generateFileFlows(rootDir = `.`, outputFile = null) {
|
|
10
|
-
const fs = require(`fs`);
|
|
11
|
-
const path = require(`path`);
|
|
12
|
-
const localVars = require(`../config/localVars`);
|
|
13
|
-
const groupByDataFlow = require(`./dataFlowGrouper`);
|
|
14
|
-
const classifyFile = require(`./fileClassifier`);
|
|
15
|
-
const parseJSFile = require(`./jsParser`);
|
|
16
|
-
const parseOtherFile = require(`./otherFileParser`);
|
|
17
|
-
|
|
18
|
-
const ROOT = rootDir;
|
|
19
|
-
const FILE_OUTPUT = outputFile || localVars.DEFAULT_OUTPUT_FILE;
|
|
20
|
-
|
|
21
|
-
// Use efficient file discovery with proper ignore patterns
|
|
22
|
-
let files = [];
|
|
23
|
-
try {
|
|
24
|
-
const { readdirSync, statSync } = require(`fs`);
|
|
25
|
-
const path = require(`path`);
|
|
26
|
-
|
|
27
|
-
function shouldIgnore(relativePath) {
|
|
28
|
-
// Check against ignore patterns
|
|
29
|
-
return localVars.IGNORE_PATTERNS.some(pattern => {
|
|
30
|
-
const cleanPattern = pattern.replace(/\*\*/g, '').replace(/\*/g, '').replace(/\//g, '');
|
|
31
|
-
return relativePath.includes(cleanPattern);
|
|
32
|
-
});
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
function findFiles(dir, depth = 0) {
|
|
36
|
-
// Prevent infinite recursion with depth limit
|
|
37
|
-
if (depth > 10) return [];
|
|
38
|
-
|
|
39
|
-
const fullDirPath = path.join(ROOT, dir);
|
|
40
|
-
if (!fs.existsSync(fullDirPath)) return [];
|
|
41
|
-
|
|
42
|
-
const items = readdirSync(fullDirPath);
|
|
43
|
-
const foundFiles = [];
|
|
44
|
-
|
|
45
|
-
for (const item of items) {
|
|
46
|
-
const relativePath = dir === '.' ? item : path.join(dir, item).replace(/\\/g, '/');
|
|
47
|
-
|
|
48
|
-
// Skip ignored items early
|
|
49
|
-
if (shouldIgnore(relativePath)) continue;
|
|
50
|
-
|
|
51
|
-
try {
|
|
52
|
-
const fullPath = path.join(fullDirPath, item);
|
|
53
|
-
const stat = statSync(fullPath);
|
|
54
|
-
|
|
55
|
-
if (stat.isDirectory()) {
|
|
56
|
-
// Recursive directory scan with depth tracking
|
|
57
|
-
foundFiles.push(...findFiles(relativePath, depth + 1));
|
|
58
|
-
} else if (stat.isFile()) {
|
|
59
|
-
const ext = path.extname(item).slice(1);
|
|
60
|
-
if (localVars.ALL_EXTENSIONS.includes(ext)) {
|
|
61
|
-
foundFiles.push(relativePath);
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
} catch (e) {
|
|
65
|
-
// Skip files/directories that can't be accessed
|
|
66
|
-
continue;
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
return foundFiles;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
files = findFiles('.');
|
|
73
|
-
} catch (error) {
|
|
74
|
-
require('qerrors').logError(error, 'File flow generation failed during file discovery', {
|
|
75
|
-
context: 'FILE_DISCOVERY_ERROR',
|
|
76
|
-
rootDir: ROOT,
|
|
77
|
-
outputFile: FILE_OUTPUT
|
|
78
|
-
});
|
|
79
|
-
return { filesAnalyzed: 0, flowGroups: 0, outputFile: null };
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Check if any files were found
|
|
83
|
-
if (files.length === 0) {
|
|
84
|
-
require('qerrors').logWarn(`No files found in directory: ${ROOT}`, 'generateFileFlows', {
|
|
85
|
-
context: 'NO_FILES_FOUND',
|
|
86
|
-
rootDir: ROOT,
|
|
87
|
-
searchExtensions: localVars.ALL_EXTENSIONS
|
|
88
|
-
});
|
|
89
|
-
return { filesAnalyzed: 0, flowGroups: 0, outputFile: null };
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
// PRIMARY: Group by data flow relationships
|
|
93
|
-
const grouped = await groupByDataFlow(files, ROOT);
|
|
94
|
-
const output = [];
|
|
95
|
-
|
|
96
|
-
output.push(`# FILE_FLOWS`);
|
|
97
|
-
output.push(`> Auto-generated. Do not edit directly.`);
|
|
98
|
-
output.push(`> Files grouped by PRIMARY: actual data flow relationships, SECONDARY: filename similarity.\n`);
|
|
99
|
-
|
|
100
|
-
let fileIndex = 1;
|
|
101
|
-
for (const group of grouped) {
|
|
102
|
-
const groupName = group.name || `Unknown-Group`;
|
|
103
|
-
const fileGroup = group.files || [];
|
|
104
|
-
output.push(`### 🧩 Flow Group: \`${groupName}\`\n`);
|
|
105
|
-
for (const relPath of fileGroup) {
|
|
106
|
-
const absPath = path.resolve(ROOT, relPath);
|
|
107
|
-
const ext = path.extname(relPath).slice(1);
|
|
108
|
-
|
|
109
|
-
// 🚩AI: Safe file reading - skip files that don't exist to prevent test failures
|
|
110
|
-
let content;
|
|
111
|
-
try {
|
|
112
|
-
content = fs.readFileSync(absPath, `utf8`);
|
|
113
|
-
} catch (error) {
|
|
114
|
-
if (error.code === 'ENOENT') {
|
|
115
|
-
continue; // Skip files that don't exist
|
|
116
|
-
}
|
|
117
|
-
throw error; // Re-throw other errors
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
const section = [`## [${fileIndex++}] \`${relPath}\``];
|
|
121
|
-
|
|
122
|
-
const fileType = classifyFile(relPath, ext);
|
|
123
|
-
section.push(`**Type:** ${fileType}`);
|
|
124
|
-
|
|
125
|
-
const metadata = localVars.CODE_EXTENSIONS.includes(ext)
|
|
126
|
-
? parseJSFile(content, relPath)
|
|
127
|
-
: parseOtherFile(content, relPath, ext);
|
|
128
|
-
|
|
129
|
-
for (const [label, value] of Object.entries(metadata)) {
|
|
130
|
-
// 🚩AI: Ensure value is always an array before calling join
|
|
131
|
-
const arrayValue = Array.isArray(value) ? value : [value];
|
|
132
|
-
if (arrayValue.length > 0) section.push(`**${label}:** ${arrayValue.join(`, `)}`);
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
section.push(`\n---\n`);
|
|
136
|
-
output.push(section.join(`\n`));
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
fs.writeFileSync(FILE_OUTPUT, output.join(`\n`));
|
|
141
|
-
const result = {
|
|
142
|
-
filesAnalyzed: fileIndex - 1,
|
|
143
|
-
flowGroups: grouped.length,
|
|
144
|
-
outputFile: FILE_OUTPUT
|
|
145
|
-
};
|
|
146
|
-
|
|
147
|
-
require('qerrors').logInfo(`FILE_FLOWS.md generation completed`, 'generateFileFlows', {
|
|
148
|
-
context: 'GENERATION_SUCCESS',
|
|
149
|
-
filesAnalyzed: result.filesAnalyzed,
|
|
150
|
-
flowGroups: result.flowGroups,
|
|
151
|
-
outputFile: result.outputFile
|
|
152
|
-
});
|
|
153
|
-
return result;
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
module.exports = generateFileFlows;
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
// Auto-generated unit test for fileFlowsGenerator.js - optimized for speed
|
|
2
|
-
const mod = require('./fileFlowsGenerator.js');
|
|
3
|
-
|
|
4
|
-
describe('fileFlowsGenerator.js', () => {
|
|
5
|
-
test('generateFileFlows works', async () => {
|
|
6
|
-
// Fast assertion - TODO: implement specific test logic
|
|
7
|
-
expect(typeof mod.generateFileFlows).toBeDefined();
|
|
8
|
-
});
|
|
9
|
-
test('shouldIgnore works', async () => {
|
|
10
|
-
// Fast assertion - TODO: implement specific test logic
|
|
11
|
-
expect(typeof mod.shouldIgnore).toBeDefined();
|
|
12
|
-
});
|
|
13
|
-
test('findFiles works', async () => {
|
|
14
|
-
// Fast assertion - TODO: implement specific test logic
|
|
15
|
-
expect(typeof mod.findFiles).toBeDefined();
|
|
16
|
-
});
|
|
17
|
-
});
|
package/lib/fileIO.js
DELETED
|
@@ -1,60 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* File I/O utilities following Single Responsibility Principle
|
|
3
|
-
* Centralized safe file reading with consistent error shapes
|
|
4
|
-
* @param {string} rootDir - Root directory for relative path resolution
|
|
5
|
-
* @param {string} filePath - File path to read
|
|
6
|
-
* @returns {Object} Result object with success flag and content or error
|
|
7
|
-
* @throws {Error} File system errors are caught and returned in error property
|
|
8
|
-
*/
|
|
9
|
-
// 🚩AI: CORE_FILE_READING_UTILITIES
|
|
10
|
-
function safeReadFileSync(rootDir, filePath) {
|
|
11
|
-
const fs = require(`fs`);
|
|
12
|
-
const path = require(`path`);
|
|
13
|
-
const qerrors = require(`qerrors`);
|
|
14
|
-
|
|
15
|
-
try {
|
|
16
|
-
const fullPath = path.join(rootDir, filePath);
|
|
17
|
-
const content = fs.readFileSync(fullPath, `utf8`);
|
|
18
|
-
return { success: true, content };
|
|
19
|
-
} catch (error) {
|
|
20
|
-
require('qerrors').logError(error, `Failed to read file: ${filePath}`, { context: `FILE_READ_ERROR` });
|
|
21
|
-
return { success: false, error };
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
/**
|
|
26
|
-
* Resolves file paths safely with validation
|
|
27
|
-
* @param {string} rootDir - Root directory
|
|
28
|
-
* @param {string} filePath - File path to resolve
|
|
29
|
-
* @returns {Object} Result with success flag and resolved path or error
|
|
30
|
-
* @throws {Error} Path traversal attempts and resolution errors caught and returned
|
|
31
|
-
*/
|
|
32
|
-
function safeResolvePath(rootDir, filePath) {
|
|
33
|
-
const path = require(`path`);
|
|
34
|
-
const qerrors = require(`qerrors`);
|
|
35
|
-
|
|
36
|
-
try {
|
|
37
|
-
const resolved = path.resolve(rootDir, filePath);
|
|
38
|
-
const relative = path.relative(rootDir, resolved);
|
|
39
|
-
|
|
40
|
-
// Prevent directory traversal attacks
|
|
41
|
-
if (relative.startsWith(`..`)) {
|
|
42
|
-
const { createTypedError, ErrorTypes } = require('qerrors');
|
|
43
|
-
throw createTypedError(
|
|
44
|
-
`Path traversal attempt detected: ${filePath}`,
|
|
45
|
-
ErrorTypes.VALIDATION,
|
|
46
|
-
'PATH_TRAVERSAL_DETECTED'
|
|
47
|
-
);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
return { success: true, resolvedPath: resolved };
|
|
51
|
-
} catch (error) {
|
|
52
|
-
require('qerrors').logError(error, `Failed to resolve path: ${filePath}`, { context: `PATH_RESOLVE_ERROR` });
|
|
53
|
-
return { success: false, error };
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
module.exports = {
|
|
58
|
-
safeReadFileSync,
|
|
59
|
-
safeResolvePath
|
|
60
|
-
};
|
package/lib/fileIO.test.js
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
// Auto-generated unit test for fileIO.js - optimized for speed
|
|
2
|
-
const mod = require('./fileIO.js');
|
|
3
|
-
|
|
4
|
-
describe('fileIO.js', () => {
|
|
5
|
-
test('safeReadFileSync works', async () => {
|
|
6
|
-
// Fast assertion - TODO: implement specific test logic
|
|
7
|
-
expect(typeof mod.safeReadFileSync).toBeDefined();
|
|
8
|
-
});
|
|
9
|
-
test('safeResolvePath works', async () => {
|
|
10
|
-
// Fast assertion - TODO: implement specific test logic
|
|
11
|
-
expect(typeof mod.safeResolvePath).toBeDefined();
|
|
12
|
-
});
|
|
13
|
-
});
|
package/lib/graphUtils.js
DELETED
|
@@ -1,139 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Graph utilities for dependency analysis and file grouping
|
|
3
|
-
* Implements dependency graph building, connected components, and similarity grouping
|
|
4
|
-
* Following Single Responsibility Principle for graph operations
|
|
5
|
-
*/
|
|
6
|
-
// 🚩AI: CORE_GRAPH_ALGORITHMS
|
|
7
|
-
|
|
8
|
-
/**
|
|
9
|
-
* Builds dependency graph from file metadata
|
|
10
|
-
* @param {Array<string>} fileList - List of file paths
|
|
11
|
-
* @param {Map} fileMetadata - Map of file metadata containing dependencies
|
|
12
|
-
* @returns {Map} Dependency graph mapping files to their dependencies
|
|
13
|
-
*/
|
|
14
|
-
function buildDependencyGraph(fileList, fileMetadata) {
|
|
15
|
-
const graph = new Map();
|
|
16
|
-
|
|
17
|
-
for (const filePath of fileList) {
|
|
18
|
-
const metadata = fileMetadata.get(filePath);
|
|
19
|
-
const dependencies = metadata?.dependencies || [];
|
|
20
|
-
graph.set(filePath, dependencies);
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
return graph;
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* Finds connected components in dependency graph using DFS
|
|
28
|
-
* @param {Map} graph - Dependency graph
|
|
29
|
-
* @returns {Array<Array<string>>} Array of connected component groups
|
|
30
|
-
*/
|
|
31
|
-
function findConnectedComponents(graph) {
|
|
32
|
-
const visited = new Set();
|
|
33
|
-
const components = [];
|
|
34
|
-
|
|
35
|
-
for (const node of graph.keys()) {
|
|
36
|
-
if (!visited.has(node)) {
|
|
37
|
-
const component = [];
|
|
38
|
-
depthFirstSearch(node, graph, visited, component);
|
|
39
|
-
if (component.length > 0) {
|
|
40
|
-
components.push(component);
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
return components;
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
/**
|
|
49
|
-
* Depth-first search for graph traversal
|
|
50
|
-
* @param {string} node - Current node
|
|
51
|
-
* @param {Map} graph - Dependency graph
|
|
52
|
-
* @param {Set} visited - Set of visited nodes
|
|
53
|
-
* @param {Array} component - Current component being built
|
|
54
|
-
*/
|
|
55
|
-
function depthFirstSearch(node, graph, visited, component) {
|
|
56
|
-
if (visited.has(node)) return;
|
|
57
|
-
|
|
58
|
-
visited.add(node);
|
|
59
|
-
component.push(node);
|
|
60
|
-
|
|
61
|
-
const dependencies = graph.get(node) || [];
|
|
62
|
-
for (const dependency of dependencies) {
|
|
63
|
-
if (graph.has(dependency)) { // Only follow dependencies that exist in our file list
|
|
64
|
-
depthFirstSearch(dependency, graph, visited, component);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
/**
|
|
70
|
-
* Groups files by filename similarity for fallback grouping
|
|
71
|
-
* @param {Array<string>} files - List of file paths
|
|
72
|
-
* @returns {Map<string, Array<string>>} Map of similarity groups
|
|
73
|
-
*/
|
|
74
|
-
function groupBySimilarity(files) {
|
|
75
|
-
const path = require(`path`);
|
|
76
|
-
const groups = new Map();
|
|
77
|
-
|
|
78
|
-
for (const file of files) {
|
|
79
|
-
const basename = path.basename(file, path.extname(file));
|
|
80
|
-
const parts = basename.split(/[-_.]/).filter(part => part.length > 2);
|
|
81
|
-
|
|
82
|
-
let bestGroupKey = null;
|
|
83
|
-
let maxSimilarity = 0;
|
|
84
|
-
|
|
85
|
-
// Find most similar existing group
|
|
86
|
-
for (const [groupKey, groupFiles] of groups) {
|
|
87
|
-
const groupBasename = path.basename(groupFiles[0], path.extname(groupFiles[0]));
|
|
88
|
-
const groupParts = groupBasename.split(/[-_.]/).filter(part => part.length > 2);
|
|
89
|
-
|
|
90
|
-
const commonParts = parts.filter(part => groupParts.includes(part));
|
|
91
|
-
const similarity = commonParts.length / Math.max(parts.length || 1, groupParts.length || 1);
|
|
92
|
-
|
|
93
|
-
if (similarity > maxSimilarity && similarity > 0.3) {
|
|
94
|
-
maxSimilarity = similarity;
|
|
95
|
-
bestGroupKey = groupKey;
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
if (bestGroupKey) {
|
|
100
|
-
groups.get(bestGroupKey).push(file);
|
|
101
|
-
} else {
|
|
102
|
-
// Create new group
|
|
103
|
-
const groupKey = basename.replace(/[-_.]/g, `-`);
|
|
104
|
-
groups.set(groupKey, [file]);
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
return groups;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
/**
|
|
112
|
-
* Calculates role score for group naming
|
|
113
|
-
* @param {string} filePath - File path to score
|
|
114
|
-
* @returns {number} Role score for priority in naming
|
|
115
|
-
*/
|
|
116
|
-
function calculateRoleScore(filePath) {
|
|
117
|
-
const path = require(`path`);
|
|
118
|
-
const basename = path.basename(filePath, path.extname(filePath)).toLowerCase();
|
|
119
|
-
const fullPath = filePath.toLowerCase();
|
|
120
|
-
|
|
121
|
-
// Higher scores for more central/important files
|
|
122
|
-
let score = 0;
|
|
123
|
-
|
|
124
|
-
if (basename.includes(`index`) || basename.includes(`main`)) score += 10;
|
|
125
|
-
if (basename.includes(`generator`) || basename.includes(`engine`)) score += 8;
|
|
126
|
-
if (basename.includes(`parser`) || basename.includes(`analyzer`)) score += 6;
|
|
127
|
-
if (basename.includes(`util`) || basename.includes(`helper`)) score += 4;
|
|
128
|
-
if (basename.includes(`config`) || basename.includes(`constant`) || fullPath.includes(`config/`)) score += 7;
|
|
129
|
-
|
|
130
|
-
return score;
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
module.exports = {
|
|
134
|
-
buildDependencyGraph,
|
|
135
|
-
findConnectedComponents,
|
|
136
|
-
depthFirstSearch,
|
|
137
|
-
groupBySimilarity,
|
|
138
|
-
calculateRoleScore
|
|
139
|
-
};
|