fileflows 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +19 -11
  2. package/cli.mjs +7 -0
  3. package/config/jest-require-polyfill.cjs +20 -0
  4. package/config/jest-setup.mjs +28 -0
  5. package/config/jest.config.mjs +72 -0
  6. package/config/localVars.js +2 -109
  7. package/dist/cli.js +76 -0
  8. package/dist/config/localVars.js +76 -0
  9. package/dist/index.js +7 -0
  10. package/dist/lib/dataFlowGrouper.js +137 -0
  11. package/dist/lib/dependencyExtractor.js +46 -0
  12. package/dist/lib/fileClassifier.js +78 -0
  13. package/dist/lib/fileFlowsGenerator.js +301 -0
  14. package/dist/lib/fileIO.js +35 -0
  15. package/dist/lib/graphUtils.js +89 -0
  16. package/dist/lib/index.js +50 -0
  17. package/dist/lib/jsParser.js +131 -0
  18. package/dist/lib/otherFileParser.js +131 -0
  19. package/index.mjs +2 -0
  20. package/package.json +31 -14
  21. package/scripts/broadcast.sh +26 -0
  22. package/scripts/clean-bun-cache.mjs +14 -0
  23. package/scripts/clean-dist.mjs +7 -0
  24. package/scripts/ensure-runner.mjs +9 -0
  25. package/scripts/kill-agent.sh +24 -0
  26. package/scripts/kill-all-agents.sh +31 -0
  27. package/scripts/list-agents.sh +16 -0
  28. package/scripts/send-to-agent.sh +28 -0
  29. package/scripts/spawn-agent.sh +62 -0
  30. package/cli.js +0 -81
  31. package/config/localVars.test.js +0 -37
  32. package/index.js +0 -13
  33. package/lib/SUMMARY.md +0 -53
  34. package/lib/dataFlowGrouper.js +0 -150
  35. package/lib/dataFlowGrouper.test.js +0 -17
  36. package/lib/dependencyExtractor.js +0 -70
  37. package/lib/dependencyExtractor.test.js +0 -9
  38. package/lib/fileClassifier.js +0 -38
  39. package/lib/fileClassifier.test.js +0 -9
  40. package/lib/fileFlowsGenerator.js +0 -156
  41. package/lib/fileFlowsGenerator.test.js +0 -17
  42. package/lib/fileIO.js +0 -60
  43. package/lib/fileIO.test.js +0 -13
  44. package/lib/graphUtils.js +0 -139
  45. package/lib/graphUtils.test.js +0 -25
  46. package/lib/index.js +0 -29
  47. package/lib/index.test.js +0 -53
  48. package/lib/jsParser.js +0 -132
  49. package/lib/jsParser.test.js +0 -13
  50. package/lib/otherFileParser.js +0 -103
  51. package/lib/otherFileParser.test.js +0 -9
@@ -0,0 +1,131 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.parseOtherFile = parseOtherFile;
37
+ const localVars = __importStar(require("../config/localVars"));
38
+ function parseOtherFile(content, relPath, ext) {
39
+ const lines = content.split('\n');
40
+ switch (ext) {
41
+ case 'json': {
42
+ try {
43
+ const parsed = JSON.parse(content);
44
+ const keys = Object.keys(parsed);
45
+ const summary = [parsed.name || parsed.title || 'JSON Configuration'].filter(Boolean);
46
+ return {
47
+ Keys: keys.slice(0, localVars.MAX_JSON_KEYS),
48
+ Summary: summary
49
+ };
50
+ }
51
+ catch {
52
+ return { Keys: [], Summary: ['Malformed JSON'] };
53
+ }
54
+ }
55
+ case 'md': {
56
+ const headings = lines
57
+ .filter(l => l.startsWith('#'))
58
+ .map(l => l.replace(/^#+ /, ''))
59
+ .slice(0, 5);
60
+ const firstHeading = headings[0];
61
+ return {
62
+ Headings: headings,
63
+ Summary: firstHeading ? [firstHeading] : []
64
+ };
65
+ }
66
+ case 'yml':
67
+ case 'yaml': {
68
+ const yamlKeys = lines
69
+ .filter(line => /^[a-zA-Z0-9_]+:/.test(line))
70
+ .slice(0, localVars.MAX_YAML_KEYS)
71
+ .map(line => line.split(':')[0]);
72
+ const yamlName = lines.find(line => line.includes('name:'))?.split(':')[1]?.trim() || 'YAML Configuration';
73
+ return {
74
+ Keys: yamlKeys,
75
+ Summary: [yamlName]
76
+ };
77
+ }
78
+ case 'env': {
79
+ return {
80
+ Vars: lines
81
+ .filter(line => line.includes('='))
82
+ .map(line => line.split('=')[0])
83
+ };
84
+ }
85
+ case 'graphql': {
86
+ const types = lines
87
+ .filter(line => /^(query|mutation|type)\s+/.test(line))
88
+ .map(line => line.trim().split(/\s+/)[1])
89
+ .filter(Boolean);
90
+ return {
91
+ Types: types,
92
+ Summary: ['GraphQL Schema']
93
+ };
94
+ }
95
+ case 'sh': {
96
+ const commands = lines
97
+ .filter(line => line.trim() && !line.startsWith('#'))
98
+ .slice(0, localVars.MAX_SHELL_COMMANDS)
99
+ .map(line => line.trim().split(/\s+/)[0])
100
+ .filter(cmd => cmd && !cmd.includes('='));
101
+ const scriptPurpose = content.includes('deploy') ? 'deployment' : 'shell script';
102
+ return {
103
+ Commands: [...new Set(commands)],
104
+ Summary: [scriptPurpose]
105
+ };
106
+ }
107
+ case 'html': {
108
+ const tagLines = lines.filter(line => line.includes('<') && !line.includes('</'));
109
+ const tags = [];
110
+ for (const line of tagLines) {
111
+ const matches = line.match(/<([a-zA-Z][a-zA-Z0-9]*)/g);
112
+ if (matches) {
113
+ matches.forEach(match => {
114
+ const tagName = match.substring(1).toLowerCase();
115
+ if (tagName && !tags.includes(tagName)) {
116
+ tags.push(tagName);
117
+ }
118
+ });
119
+ }
120
+ }
121
+ const titleMatch = content.match(/<title[^>]*>(.*?)<\/title>/is);
122
+ const title = titleMatch ? titleMatch[1].trim() : 'HTML Document';
123
+ return {
124
+ Tags: tags.slice(0, localVars.MAX_HTML_TAGS),
125
+ Summary: [title]
126
+ };
127
+ }
128
+ default:
129
+ return { Summary: ['Unknown file type'] };
130
+ }
131
+ }
package/index.mjs ADDED
@@ -0,0 +1,2 @@
1
+ export * from './dist/index.js';
2
+ export { default } from './dist/index.js';
package/package.json CHANGED
@@ -1,16 +1,22 @@
1
1
  {
2
2
  "name": "fileflows",
3
- "version": "1.0.3",
3
+ "version": "1.0.5",
4
4
  "description": "CLI tool for deploying data workflow analysis documentation",
5
- "main": "cli.js",
5
+ "main": "index.mjs",
6
+ "exports": {
7
+ ".": "./index.mjs",
8
+ "./cli": "./cli.mjs"
9
+ },
6
10
  "bin": {
7
- "fileflows": "./cli.js"
11
+ "fileflows": "./cli.mjs"
8
12
  },
9
13
  "scripts": {
10
- "generate": "node cli.js",
11
- "help": "node cli.js --help",
12
- "test": "node qtests-runner.js",
13
- "start": "node bin/cli.js"
14
+ "build": "tsc",
15
+ "generate": "node cli.mjs",
16
+ "help": "node cli.mjs --help",
17
+ "test": "node --experimental-vm-modules qtests-runner.mjs",
18
+ "start": "node cli.mjs",
19
+ "pretest": "node scripts/clean-dist.mjs && node scripts/clean-bun-cache.mjs && node scripts/ensure-runner.mjs"
14
20
  },
15
21
  "keywords": [
16
22
  "file-analysis",
@@ -41,8 +47,7 @@
41
47
  "node": ">=16.0.0"
42
48
  },
43
49
  "files": [
44
- "bin/",
45
- "lib/",
50
+ "dist/",
46
51
  "config/",
47
52
  "contexts/",
48
53
  "features/",
@@ -54,20 +59,32 @@
54
59
  "dependencies": {
55
60
  "@babel/parser": "^7.28.3",
56
61
  "@babel/traverse": "^7.28.3",
57
- "qerrors": "^1.2.6",
62
+ "qerrors": "^1.2.7",
63
+ "qgenutils": "^1.0.3",
58
64
  "winston": "^3.17.0",
59
65
  "winston-daily-rotate-file": "^5.0.0"
60
66
  },
61
67
  "devDependencies": {
62
- "agentsqripts": "^1.0.8",
68
+ "@babel/core": "^7.28.5",
69
+ "@babel/preset-env": "^7.28.5",
70
+ "@bijikyu/csup": "^1.0.2",
71
+ "@openai/codex": "^0.77.0",
72
+ "@types/node": "^20.0.0",
73
+ "agentsqripts": "^1.1.0",
63
74
  "arqitect": "^1.0.7",
64
- "commoncontext": "^1.0.1",
75
+ "babel-jest": "^30.2.0",
76
+ "commoncontext": "^1.0.3",
77
+ "fileflows": "^1.0.3",
78
+ "jest": "^30.2.0",
65
79
  "loqatevars": "^1.0.6",
66
80
  "madge": "^8.0.0",
67
- "npmcontext": "^1.0.1",
68
- "qtests": "^1.1.9",
81
+ "npmcontext": "^1.0.4",
82
+ "opencode-ai": "1.0.204",
83
+ "qtests": "^2.0.0",
69
84
  "quantumagent": "^1.0.5",
70
85
  "repomix": "^1.2.0",
86
+ "ts-jest": "^29.4.6",
87
+ "typescript": "^5.6.3",
71
88
  "unqommented": "^1.1.0"
72
89
  }
73
90
  }
@@ -0,0 +1,26 @@
1
+ #!/bin/bash
2
+ # USAGE:
3
+ # ./broadcast.sh "your message here"
4
+ SESSION="codex-swarm"
5
+ MESSAGE="$*"
6
+
7
+ if [[ -z "$MESSAGE" ]]; then
8
+ echo "Usage: $0 \"message to broadcast\""
9
+ exit 1
10
+ fi
11
+
12
+ if ! tmux has-session -t "$SESSION" 2>/dev/null; then
13
+ echo "❌ Tmux session '$SESSION' is not running."
14
+ exit 1
15
+ fi
16
+
17
+ tmux list-windows -t "$SESSION" -F "#{window_name}" 2>/dev/null | while read -r WINDOW_NAME; do
18
+ if [[ -z "$WINDOW_NAME" ]]; then
19
+ continue
20
+ fi
21
+ if [[ "$WINDOW_NAME" == "root" ]]; then
22
+ continue
23
+ fi
24
+ tmux send-keys -t "$SESSION:$WINDOW_NAME" "$MESSAGE" C-m
25
+ echo "📣 Sent to $WINDOW_NAME → $MESSAGE"
26
+ done
@@ -0,0 +1,14 @@
1
+ // scripts/clean-bun-cache.mjs
2
+ // Remove Bun's install cache to avoid Jest haste-map module collisions.
3
+ import fs from 'fs';
4
+ import path from 'path';
5
+
6
+ const cacheDir = path.join(process.cwd(), '.cache', '.bun', 'install', 'cache');
7
+
8
+ if (fs.existsSync(cacheDir)) {
9
+ try {
10
+ fs.rmSync(cacheDir, { recursive: true, force: true });
11
+ } catch (error) {
12
+ // Intentionally ignore cleanup failures; Bun cache is optional for our test runs.
13
+ }
14
+ }
@@ -0,0 +1,7 @@
1
+ // scripts/clean-dist.mjs
2
+ // Remove compiled test files and __mocks__ from dist/ to prevent duplicate mock warnings.
3
+ import fs from 'fs';
4
+ import path from 'path';
5
+ function rmDirSafe(p){try{fs.rmSync(p,{recursive:true,force:true})}catch{}}
6
+ function cleanDist(root){const dist=path.join(root,'dist');try{if(!fs.existsSync(dist))return;}catch{return;}const stack=[dist];while(stack.length){const dir=stack.pop();let entries=[];try{entries=fs.readdirSync(dir,{withFileTypes:true})}catch{continue}for(const ent of entries){const full=path.join(dir,ent.name);if(ent.isDirectory()){if(ent.name==='__mocks__'){rmDirSafe(full);continue}stack.push(full);continue}if(!ent.isFile())continue;if(/\.(test|spec)\.[cm]?jsx?$/.test(ent.name)||/GeneratedTest/.test(ent.name)){try{fs.rmSync(full,{force:true})}catch{}}}}}
7
+ cleanDist(process.cwd());
@@ -0,0 +1,9 @@
1
+ // Ensures qtests-runner.mjs exists at project root by copying a valid shipped template.
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ import { fileURLToPath } from 'url';
5
+ const __filename = fileURLToPath(import.meta.url);
6
+ const __dirname = path.dirname(__filename);
7
+ const cwd = process.cwd();
8
+ function isValid(content){try{return /runCLI/.test(content) && /API Mode/.test(content);}catch{return false;}}
9
+ try{const target=path.join(cwd,'qtests-runner.mjs');if(!fs.existsSync(target)){const candidates=[path.join(cwd,'lib','templates','qtests-runner.mjs.template'),path.join(cwd,'templates','qtests-runner.mjs.template'),path.join(cwd,'node_modules','qtests','lib','templates','qtests-runner.mjs.template'),path.join(cwd,'node_modules','qtests','templates','qtests-runner.mjs.template')];let content=null;for(const p of candidates){try{if(fs.existsSync(p)){const c=fs.readFileSync(p,'utf8');if(isValid(c)){content=c;break;}}}catch{}}if(!content){/* silent no-op */}else{fs.writeFileSync(target,content,'utf8');}}}catch{/* silent */}
@@ -0,0 +1,24 @@
1
+ #!/bin/bash
2
+ # USAGE:
3
+ # ./kill-agent.sh agent-name
4
+
5
+ SESSION="codex-swarm"
6
+ NAME="$1"
7
+
8
+ if [[ -z "$NAME" ]]; then
9
+ echo "Usage: $0 agent-name"
10
+ exit 1
11
+ fi
12
+
13
+ if ! tmux has-session -t "$SESSION" 2>/dev/null; then
14
+ echo "❌ Tmux session '$SESSION' is not running."
15
+ exit 1
16
+ fi
17
+
18
+ if ! tmux list-windows -t "$SESSION" -F "#{window_name}" 2>/dev/null | grep -Fxq "$NAME"; then
19
+ echo "❌ Agent '$NAME' window not found in session '$SESSION'."
20
+ exit 1
21
+ fi
22
+
23
+ tmux kill-window -t "$SESSION:$NAME"
24
+ echo "🛑 Agent '$NAME' stopped."
@@ -0,0 +1,31 @@
1
+ #!/bin/bash
2
+ # USAGE:
3
+ # ./kill-all-agents.sh [--purge-logs]
4
+
5
+ SESSION="codex-swarm"
6
+ LOG_DIR="./logs"
7
+ PURGE_FLAG="$1"
8
+
9
+ case "$PURGE_FLAG" in
10
+ "") PURGE_LOGS=false ;;
11
+ "--purge-logs") PURGE_LOGS=true ;;
12
+ *) echo "Usage: $0 [--purge-logs]" ; exit 1 ;;
13
+ esac
14
+
15
+ if ! tmux has-session -t "$SESSION" 2>/dev/null; then
16
+ echo "❌ Tmux session '$SESSION' is not running."
17
+ exit 1
18
+ fi
19
+
20
+ tmux kill-session -t "$SESSION"
21
+
22
+ if $PURGE_LOGS; then
23
+ if [[ -d "$LOG_DIR" ]]; then
24
+ find "$LOG_DIR" -maxdepth 1 -type f -name "*.log" -exec rm -f {} +
25
+ echo "🗑️ Cleared logs in $LOG_DIR."
26
+ else
27
+ echo "ℹ️ Log directory '$LOG_DIR' not found; nothing to purge."
28
+ fi
29
+ fi
30
+
31
+ echo "🧹 All agents stopped; tmux session '$SESSION' terminated."
@@ -0,0 +1,16 @@
1
+ #!/bin/bash
2
+ LOG_DIR="./logs"
3
+ SESSION="codex-swarm"
4
+
5
+ echo "🔍 Listing active Codex agents..."
6
+
7
+ echo "📜 Logs:"
8
+ if [[ -d "$LOG_DIR" ]]; then
9
+ find "$LOG_DIR" -maxdepth 1 -type f -name "*.log" 2>/dev/null || echo " (none)"
10
+ else
11
+ echo " (none)"
12
+ fi
13
+
14
+ echo ""
15
+ echo "🪟 Tmux windows in session '$SESSION':"
16
+ tmux list-windows -t "$SESSION" 2>/dev/null || echo " (tmux session not running)"
@@ -0,0 +1,28 @@
1
+ #!/bin/bash
2
+ # USAGE:
3
+ # ./send-to-agent.sh agent-name "your prompt here"
4
+
5
+ NAME="$1"
6
+ shift
7
+ MESSAGE="$*"
8
+ SESSION="codex-swarm"
9
+ WINDOW="$SESSION:$NAME"
10
+
11
+ if [ -z "$NAME" ] || [ -z "$MESSAGE" ]; then
12
+ echo "Usage: $0 agent-name \"message to send\""
13
+ exit 1
14
+ fi
15
+
16
+ if ! tmux has-session -t "$SESSION" 2>/dev/null; then
17
+ echo "❌ Tmux session '$SESSION' is not running."
18
+ exit 1
19
+ fi
20
+
21
+ if ! tmux list-windows -t "$SESSION" -F "#{window_name}" 2>/dev/null | grep -Fxq "$NAME"; then
22
+ echo "❌ Agent '$NAME' window not found in session '$SESSION'."
23
+ exit 1
24
+ fi
25
+
26
+ tmux send-keys -t "$WINDOW" "$MESSAGE" C-m
27
+ tmux send-keys -t "$WINDOW" C-m
28
+ echo "✅ Sent to $NAME → $MESSAGE"
@@ -0,0 +1,62 @@
1
+ #!/bin/bash
2
+ # USAGE:
3
+ # ./spawn-agent.sh agent-name /path/to/dir [codex args...]
4
+
5
+ SESSION="codex-swarm"
6
+ NAME="$1"
7
+ DIR="$2"
8
+ shift 2
9
+ CODEX_ARGS=("$@")
10
+
11
+ if [[ -z "$NAME" || -z "$DIR" ]]; then
12
+ echo "Usage: $0 <agent-name> <working-directory> [codex args...]"
13
+ exit 1
14
+ fi
15
+
16
+ if [[ ! -d "$DIR" ]]; then
17
+ echo "❌ Working directory '$DIR' does not exist."
18
+ exit 1
19
+ fi
20
+
21
+ LOG_DIR="./logs"
22
+ mkdir -p "$LOG_DIR"
23
+
24
+ CODEX_CMD=(npx codex)
25
+
26
+ if [[ ${#CODEX_ARGS[@]} -eq 0 ]]; then
27
+ CODEX_CMD+=(--full-auto --ask-for-approval never)
28
+ else
29
+ ASK_FLAG_PRESENT=false
30
+ for ARG_INDEX in "${!CODEX_ARGS[@]}"; do
31
+ CURRENT_ARG="${CODEX_ARGS[$ARG_INDEX]}"
32
+ if [[ "$CURRENT_ARG" == "--ask-for-approval" || "$CURRENT_ARG" == "-a" ]]; then
33
+ ASK_FLAG_PRESENT=true
34
+ break
35
+ fi
36
+ done
37
+
38
+ CODEX_CMD+=("${CODEX_ARGS[@]}")
39
+
40
+ if [[ "$ASK_FLAG_PRESENT" == false ]]; then
41
+ CODEX_CMD+=(--ask-for-approval never)
42
+ fi
43
+ fi
44
+
45
+ printf -v CODEX_CMD_STR '%q ' "${CODEX_CMD[@]}"
46
+ CODEX_CMD_STR="${CODEX_CMD_STR% }"
47
+ printf -v WINDOW_CMD 'cd %q && %s' "$DIR" "$CODEX_CMD_STR"
48
+
49
+ STDOUT_LOG="$LOG_DIR/${NAME}.log"
50
+
51
+ tmux kill-window -t "$SESSION:$NAME" 2>/dev/null
52
+
53
+ if ! tmux has-session -t "$SESSION" 2>/dev/null; then
54
+ tmux new-session -d -s "$SESSION" -n root
55
+ fi
56
+
57
+ tmux new-window -d -t "$SESSION" -n "$NAME" "$WINDOW_CMD"
58
+ tmux pipe-pane -o -t "$SESSION:$NAME" "cat >> \"$STDOUT_LOG\""
59
+
60
+ echo "Agent '$NAME' spawned in tmux session '$SESSION'."
61
+ echo " Send messages with: ./send-to-agent.sh $NAME \"<message>\""
62
+ echo " Output log: $STDOUT_LOG"
package/cli.js DELETED
@@ -1,81 +0,0 @@
1
- #!/usr/bin/env node
2
- /**
3
- * CLI tool for generating FILE_FLOWS.md
4
- * Analyzes project files and creates documentation showing data flow relationships
5
- * Following NPM architecture guidelines with modular structure
6
- * Refactored: Global const declarations moved to config/localVars.js
7
- */
8
-
9
- /**
10
- * CLI usage display functionality
11
- * Shows help information for the CLI tool
12
- */
13
- function showUsage() {
14
- console.log(`Usage: node cli.js [options]`);
15
- console.log(``);
16
- console.log(`Options:`);
17
- console.log(` --dir <path> Directory to analyze (default: current directory)`);
18
- console.log(` --output <file> Output file path (default: FILE_FLOWS.md)`);
19
- console.log(` --help, -h Show this help message`);
20
- console.log(``);
21
- console.log(`Examples:`);
22
- console.log(` node cli.js # Analyze current directory`);
23
- console.log(` node cli.js --dir ./src # Analyze src directory`);
24
- console.log(` node cli.js --output flows.md # Custom output file`);
25
- }
26
-
27
- /**
28
- * Main CLI logic
29
- * Parses arguments and executes file flows generation
30
- */
31
- async function main() {
32
- const localVars = require(`./config/localVars`);
33
- const { generateFileFlows } = localVars.getDependencies();
34
-
35
- const args = process.argv.slice(2);
36
- let rootDir = localVars.DEFAULT_ROOT_DIR;
37
- let outputFile = null;
38
-
39
- for (let i = 0; i < args.length; i++) {
40
- const arg = args[i];
41
-
42
- if (arg === `--help` || arg === `-h`) {
43
- showUsage();
44
- process.exit(0);
45
- } else if (arg === `--dir`) {
46
- if (i + 1 < args.length) {
47
- rootDir = args[++i];
48
- } else {
49
- console.error(`Error: --dir requires a directory path`);
50
- process.exit(1);
51
- }
52
- } else if (arg === `--output`) {
53
- if (i + 1 < args.length) {
54
- outputFile = args[++i];
55
- } else {
56
- console.error(`Error: --output requires a file path`);
57
- process.exit(1);
58
- }
59
- } else {
60
- console.error(`Error: Unknown argument '${arg}'`);
61
- showUsage();
62
- process.exit(1);
63
- }
64
- }
65
-
66
- try {
67
- console.log(`🚀 Generating FILE_FLOWS.md...`);
68
- await generateFileFlows(rootDir, outputFile);
69
- console.log(`🎉 Generation complete!`);
70
- } catch (error) {
71
- console.error(`❌ Error:`, error.message);
72
- process.exit(1);
73
- }
74
- }
75
-
76
- // Run if called directly
77
- if (require.main === module) {
78
- main().catch(console.error);
79
- }
80
-
81
- module.exports = { showUsage, main };
@@ -1,37 +0,0 @@
1
- // Auto-generated unit test for localVars.js - optimized for speed
2
- const mod = require('./localVars.js');
3
-
4
- describe('localVars.js', () => {
5
- test('CODE_EXTENSIONS works', async () => {
6
- // Fast assertion - TODO: implement specific test logic
7
- expect(typeof mod.CODE_EXTENSIONS).toBeDefined();
8
- });
9
- test('ALL_EXTENSIONS works', async () => {
10
- // Fast assertion - TODO: implement specific test logic
11
- expect(typeof mod.ALL_EXTENSIONS).toBeDefined();
12
- });
13
- test('DEFAULT_ROOT_DIR works', async () => {
14
- // Fast assertion - TODO: implement specific test logic
15
- expect(typeof mod.DEFAULT_ROOT_DIR).toBeDefined();
16
- });
17
- test('DEFAULT_OUTPUT_FILE works', async () => {
18
- // Fast assertion - TODO: implement specific test logic
19
- expect(typeof mod.DEFAULT_OUTPUT_FILE).toBeDefined();
20
- });
21
- test('IGNORE_PATTERNS works', async () => {
22
- // Fast assertion - TODO: implement specific test logic
23
- expect(typeof mod.IGNORE_PATTERNS).toBeDefined();
24
- });
25
- test('MAX_JSON_KEYS works', async () => {
26
- // Fast assertion - TODO: implement specific test logic
27
- expect(typeof mod.MAX_JSON_KEYS).toBeDefined();
28
- });
29
- test('MAX_YAML_KEYS works', async () => {
30
- // Fast assertion - TODO: implement specific test logic
31
- expect(typeof mod.MAX_YAML_KEYS).toBeDefined();
32
- });
33
- test('MAX_SHELL_COMMANDS works', async () => {
34
- // Fast assertion - TODO: implement specific test logic
35
- expect(typeof mod.MAX_SHELL_COMMANDS).toBeDefined();
36
- });
37
- });
package/index.js DELETED
@@ -1,13 +0,0 @@
1
- /**
2
- * Main entry point for the file-flows-cli NPM module
3
- * Exports public functionality following NPM architecture guidelines
4
- * Refactored: Global const declarations moved to config/localVars.js
5
- */
6
-
7
- function getMainExport() {
8
- const localVars = require(`./config/localVars`);
9
- const { generateFileFlows } = localVars.getDependencies();
10
- return generateFileFlows;
11
- }
12
-
13
- module.exports = getMainExport();
package/lib/SUMMARY.md DELETED
@@ -1,53 +0,0 @@
1
- # lib/ Directory Summary
2
-
3
- ## Overview
4
- Core library modules for FILE_FLOWS.md generation, following Single Responsibility Principle with one function per file.
5
-
6
- ## File Roles & Data Flow
7
-
8
- ### Entry Points
9
- - **index.js** - Main module aggregator, exports all lib functions for external use
10
- - **fileFlowsGenerator.js** - Primary orchestrator, coordinates analysis and output generation
11
-
12
- ### Analysis Pipeline
13
- 1. **fileClassifier.js** - Classifies files by type (Code, Configuration, Documentation, etc.)
14
- 2. **jsParser.js** - AST parsing for JavaScript/TypeScript files, extracts functions and imports
15
- 3. **otherFileParser.js** - Simple parsing for non-JS files (JSON, MD, etc.)
16
- 4. **dependencyExtractor.js** - Extracts import/require dependencies from file content
17
-
18
- ### Grouping & Output
19
- 5. **dataFlowGrouper.js** - Groups files by actual data flow relationships and naming patterns
20
-
21
- ## Request/Response Flows
22
-
23
- ### Main Generation Flow
24
- ```
25
- generateFileFlows(rootDir, outputFile)
26
- ├── globby scan for files
27
- ├── groupByDataFlow(files, rootDir)
28
- │ ├── classifyFile(filePath, ext) for each file
29
- │ ├── parseJSFile(filePath, content) OR parseOtherFile(filePath, content)
30
- │ ├── extractDependencies(content) for dependency mapping
31
- │ └── clustering algorithms for flow grouping
32
- └── markdown output generation
33
- ```
34
-
35
- ### Data Dependencies
36
- - **config/localVars.js** - Centralized constants (file extensions, ignore patterns, etc.)
37
- - **External**: globby (file discovery), @babel/parser & @babel/traverse (AST parsing)
38
-
39
- ## Known Side Effects
40
- - **File System**: Reads all project files matching configured extensions
41
- - **Output**: Overwrites FILE_FLOWS.md without backup
42
- - **Memory**: Processes entire project file tree in memory
43
- - **Performance**: AST parsing scales with codebase size
44
-
45
- ## Edge Cases & Caveats
46
- - **Large Projects**: Memory usage grows with file count and complexity
47
- - **Binary Files**: Ignored via gitignore patterns but may cause issues if included
48
- - **Circular Dependencies**: Detected but may create complex flow groups
49
- - **ES Modules vs CommonJS**: Mixed module systems handled via dynamic imports
50
- - **Parse Errors**: Individual file failures don't stop overall generation
51
-
52
- ## AI Agent Task Anchors
53
- See individual files for specific `// 🚩AI:` markers indicating critical update points.