@eldrforge/kodrdriv 0.0.32 → 0.0.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +46 -69
- package/dist/application.js +146 -0
- package/dist/application.js.map +1 -0
- package/dist/arguments.js +22 -21
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +43 -21
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +46 -38
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/clean.js +28 -12
- package/dist/commands/clean.js.map +1 -1
- package/dist/commands/commit.js +132 -39
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/link.js +177 -159
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish-tree.js +19 -6
- package/dist/commands/publish-tree.js.map +1 -1
- package/dist/commands/publish.js +152 -82
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/release.js +21 -16
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/review.js +286 -60
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/select-audio.js +25 -8
- package/dist/commands/select-audio.js.map +1 -1
- package/dist/commands/unlink.js +349 -159
- package/dist/commands/unlink.js.map +1 -1
- package/dist/constants.js +14 -5
- package/dist/constants.js.map +1 -1
- package/dist/content/diff.js +7 -5
- package/dist/content/diff.js.map +1 -1
- package/dist/content/log.js +4 -1
- package/dist/content/log.js.map +1 -1
- package/dist/error/CancellationError.js +9 -0
- package/dist/error/CancellationError.js.map +1 -0
- package/dist/error/CommandErrors.js +120 -0
- package/dist/error/CommandErrors.js.map +1 -0
- package/dist/logging.js +55 -12
- package/dist/logging.js.map +1 -1
- package/dist/main.js +6 -131
- package/dist/main.js.map +1 -1
- package/dist/prompt/commit.js +4 -0
- package/dist/prompt/commit.js.map +1 -1
- package/dist/prompt/instructions/commit.md +33 -24
- package/dist/prompt/instructions/release.md +39 -5
- package/dist/prompt/release.js +41 -1
- package/dist/prompt/release.js.map +1 -1
- package/dist/types.js +9 -2
- package/dist/types.js.map +1 -1
- package/dist/util/github.js +97 -12
- package/dist/util/github.js.map +1 -1
- package/dist/util/npmOptimizations.js +174 -0
- package/dist/util/npmOptimizations.js.map +1 -0
- package/dist/util/openai.js +4 -2
- package/dist/util/openai.js.map +1 -1
- package/dist/util/performance.js +202 -0
- package/dist/util/performance.js.map +1 -0
- package/dist/util/safety.js +166 -0
- package/dist/util/safety.js.map +1 -0
- package/dist/util/storage.js +10 -0
- package/dist/util/storage.js.map +1 -1
- package/dist/util/validation.js +81 -0
- package/dist/util/validation.js.map +1 -0
- package/package.json +19 -18
- package/packages/components/package.json +4 -0
- package/packages/tools/package.json +4 -0
- package/packages/utils/package.json +4 -0
- package/scripts/pre-commit-hook.sh +52 -0
- package/test-project/package.json +1 -0
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { getLogger } from '../logging.js';
|
|
3
|
+
import { safeJsonParse, validatePackageJson } from './validation.js';
|
|
4
|
+
|
|
5
|
+
/* eslint-disable @typescript-eslint/no-unused-vars */ function _define_property(obj, key, value) {
|
|
6
|
+
if (key in obj) {
|
|
7
|
+
Object.defineProperty(obj, key, {
|
|
8
|
+
value: value,
|
|
9
|
+
enumerable: true,
|
|
10
|
+
configurable: true,
|
|
11
|
+
writable: true
|
|
12
|
+
});
|
|
13
|
+
} else {
|
|
14
|
+
obj[key] = value;
|
|
15
|
+
}
|
|
16
|
+
return obj;
|
|
17
|
+
}
|
|
18
|
+
// Performance timing helper
|
|
19
|
+
class PerformanceTimer {
|
|
20
|
+
static start(logger, operation) {
|
|
21
|
+
logger.verbose(`⏱️ Starting: ${operation}`);
|
|
22
|
+
return new PerformanceTimer(logger);
|
|
23
|
+
}
|
|
24
|
+
end(operation) {
|
|
25
|
+
const duration = Date.now() - this.startTime;
|
|
26
|
+
this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);
|
|
27
|
+
return duration;
|
|
28
|
+
}
|
|
29
|
+
constructor(logger){
|
|
30
|
+
_define_property(this, "startTime", void 0);
|
|
31
|
+
_define_property(this, "logger", void 0);
|
|
32
|
+
this.logger = logger;
|
|
33
|
+
this.startTime = Date.now();
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
const EXCLUDED_DIRECTORIES = [
|
|
37
|
+
'node_modules',
|
|
38
|
+
'dist',
|
|
39
|
+
'build',
|
|
40
|
+
'coverage',
|
|
41
|
+
'.git',
|
|
42
|
+
'.next',
|
|
43
|
+
'.nuxt',
|
|
44
|
+
'out',
|
|
45
|
+
'public',
|
|
46
|
+
'static',
|
|
47
|
+
'assets'
|
|
48
|
+
];
|
|
49
|
+
// Batch read multiple package.json files in parallel
|
|
50
|
+
const batchReadPackageJsonFiles = async (packageJsonPaths, storage, rootDir)=>{
|
|
51
|
+
const logger = getLogger();
|
|
52
|
+
const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);
|
|
53
|
+
const readPromises = packageJsonPaths.map(async (packageJsonPath)=>{
|
|
54
|
+
try {
|
|
55
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
56
|
+
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
57
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath, false);
|
|
58
|
+
const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));
|
|
59
|
+
return {
|
|
60
|
+
path: packageJsonPath,
|
|
61
|
+
packageJson,
|
|
62
|
+
relativePath: relativePath || '.'
|
|
63
|
+
};
|
|
64
|
+
} catch (error) {
|
|
65
|
+
logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
const results = await Promise.all(readPromises);
|
|
70
|
+
const validResults = results.filter((result)=>result !== null);
|
|
71
|
+
timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);
|
|
72
|
+
return validResults;
|
|
73
|
+
};
|
|
74
|
+
// Optimized recursive package.json finder with parallel processing
|
|
75
|
+
const findAllPackageJsonFiles = async (rootDir, storage)=>{
|
|
76
|
+
const logger = getLogger();
|
|
77
|
+
const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');
|
|
78
|
+
const scanForPaths = async (currentDir, depth = 0)=>{
|
|
79
|
+
// Prevent infinite recursion and overly deep scanning
|
|
80
|
+
if (depth > 5) {
|
|
81
|
+
return [];
|
|
82
|
+
}
|
|
83
|
+
try {
|
|
84
|
+
if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {
|
|
85
|
+
return [];
|
|
86
|
+
}
|
|
87
|
+
const items = await storage.listFiles(currentDir);
|
|
88
|
+
const foundPaths = [];
|
|
89
|
+
// Check for package.json in current directory
|
|
90
|
+
if (items.includes('package.json')) {
|
|
91
|
+
const packageJsonPath = path.join(currentDir, 'package.json');
|
|
92
|
+
foundPaths.push(packageJsonPath);
|
|
93
|
+
}
|
|
94
|
+
// Process subdirectories in parallel
|
|
95
|
+
const subdirPromises = [];
|
|
96
|
+
for (const item of items){
|
|
97
|
+
if (EXCLUDED_DIRECTORIES.includes(item)) {
|
|
98
|
+
continue;
|
|
99
|
+
}
|
|
100
|
+
const itemPath = path.join(currentDir, item);
|
|
101
|
+
subdirPromises.push((async ()=>{
|
|
102
|
+
try {
|
|
103
|
+
if (await storage.isDirectory(itemPath)) {
|
|
104
|
+
return await scanForPaths(itemPath, depth + 1);
|
|
105
|
+
}
|
|
106
|
+
} catch (error) {
|
|
107
|
+
logger.debug(`Skipped directory ${itemPath}: ${error.message}`);
|
|
108
|
+
}
|
|
109
|
+
return [];
|
|
110
|
+
})());
|
|
111
|
+
}
|
|
112
|
+
if (subdirPromises.length > 0) {
|
|
113
|
+
const subdirResults = await Promise.all(subdirPromises);
|
|
114
|
+
for (const subdirPaths of subdirResults){
|
|
115
|
+
foundPaths.push(...subdirPaths);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
return foundPaths;
|
|
119
|
+
} catch (error) {
|
|
120
|
+
logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);
|
|
121
|
+
return [];
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');
|
|
125
|
+
const allPaths = await scanForPaths(rootDir);
|
|
126
|
+
pathsTimer.end(`Found ${allPaths.length} package.json file paths`);
|
|
127
|
+
// Phase 2: Batch read all package.json files in parallel
|
|
128
|
+
const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);
|
|
129
|
+
timer.end(`Found ${packageJsonFiles.length} valid package.json files`);
|
|
130
|
+
return packageJsonFiles;
|
|
131
|
+
};
|
|
132
|
+
// Optimized package scanning with parallel processing
|
|
133
|
+
const scanDirectoryForPackages = async (rootDir, storage)=>{
|
|
134
|
+
const logger = getLogger();
|
|
135
|
+
const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);
|
|
136
|
+
const packageMap = new Map(); // packageName -> relativePath
|
|
137
|
+
const absoluteRootDir = path.resolve(process.cwd(), rootDir);
|
|
138
|
+
logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);
|
|
139
|
+
try {
|
|
140
|
+
// Quick existence and directory check
|
|
141
|
+
const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);
|
|
142
|
+
if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {
|
|
143
|
+
existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);
|
|
144
|
+
timer.end(`Directory invalid: ${rootDir}`);
|
|
145
|
+
return packageMap;
|
|
146
|
+
}
|
|
147
|
+
existsTimer.end(`Directory verified: ${absoluteRootDir}`);
|
|
148
|
+
// Get all items and process in parallel
|
|
149
|
+
const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);
|
|
150
|
+
const items = await storage.listFiles(absoluteRootDir);
|
|
151
|
+
listTimer.end(`Listed ${items.length} items`);
|
|
152
|
+
// Create batched promises for better performance
|
|
153
|
+
const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem
|
|
154
|
+
const batches = [];
|
|
155
|
+
for(let i = 0; i < items.length; i += BATCH_SIZE){
|
|
156
|
+
const batch = items.slice(i, i + BATCH_SIZE);
|
|
157
|
+
batches.push(batch);
|
|
158
|
+
}
|
|
159
|
+
const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);
|
|
160
|
+
for (const batch of batches){
|
|
161
|
+
const batchPromises = batch.map(async (item)=>{
|
|
162
|
+
const itemPath = path.join(absoluteRootDir, item);
|
|
163
|
+
try {
|
|
164
|
+
if (await storage.isDirectory(itemPath)) {
|
|
165
|
+
const packageJsonPath = path.join(itemPath, 'package.json');
|
|
166
|
+
if (await storage.exists(packageJsonPath)) {
|
|
167
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
168
|
+
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
169
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
170
|
+
if (packageJson.name) {
|
|
171
|
+
const relativePath = path.relative(process.cwd(), itemPath);
|
|
172
|
+
return {
|
|
173
|
+
name: packageJson.name,
|
|
174
|
+
path: relativePath
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
} catch (error) {
|
|
180
|
+
logger.debug(`Skipped ${itemPath}: ${error.message || error}`);
|
|
181
|
+
}
|
|
182
|
+
return null;
|
|
183
|
+
});
|
|
184
|
+
const batchResults = await Promise.all(batchPromises);
|
|
185
|
+
for (const result of batchResults){
|
|
186
|
+
if (result) {
|
|
187
|
+
packageMap.set(result.name, result.path);
|
|
188
|
+
logger.debug(`Found package: ${result.name} at ${result.path}`);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);
|
|
193
|
+
logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);
|
|
194
|
+
} catch (error) {
|
|
195
|
+
logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);
|
|
196
|
+
}
|
|
197
|
+
timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);
|
|
198
|
+
return packageMap;
|
|
199
|
+
};
|
|
200
|
+
|
|
201
|
+
export { PerformanceTimer, batchReadPackageJsonFiles, findAllPackageJsonFiles, scanDirectoryForPackages };
|
|
202
|
+
//# sourceMappingURL=performance.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from './validation';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('⚠️ WARNING: Found file: dependencies that should not be committed:');\n for (const file of filesWithFileDepedencies) {\n logger.warn(` 📄 ${file.path}:`);\n for (const dep of file.dependencies) {\n logger.warn(` - ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('💡 Remember to run \"kodrdriv unlink\" before committing to restore registry versions!');\n logger.warn(' Or add a pre-commit hook to prevent accidental commits of linked dependencies.');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles","scanDirectoryForPackages","packageMap","Map","absoluteRootDir","resolve","process","cwd","existsTimer","listTimer","BATCH_SIZE","batches","i","batch","slice","processTimer","batchPromises","name","batchResults","set","size","warn"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,IAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,IAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,IAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,IAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;AAEA;AACO,MAAMC,wBAAAA,GAA2B,OAAO9C,OAAAA,EAAiBD,OAAAA,GAAAA;AAC5D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;IACf,MAAMC,KAAAA,GAAQhB,iBAAiBC,KAAK,CAACC,QAAQ,CAAC,4BAA4B,EAAEY,OAAAA,CAAAA,CAAS,CAAA;IACrF,MAAM+C,UAAAA,GAAa,IAAIC,GAAAA,EAAAA,CAAAA;AAEvB,IAAA,MAAMC,kBAAkBnC,IAAAA,CAAKoC,OAAO,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIpD,OAAAA,CAAAA;AACpDZ,IAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,iCAAiC,EAAE2D,eAAAA,CAAAA,CAAiB,CAAA;IAEpE,IAAI;;QAEA,MAAMI,WAAAA,GAAcnE,iBAAiBC,KAAK,CAACC,QAAQ,CAAC,oBAAoB,EAAE6D,eAAAA,CAAAA,CAAiB,CAAA;QAC3F,IAAI,CAAC,MAAMlD,OAAAA,CAAQ+B,MAAM,CAACmB,eAAAA,CAAAA,IAAoB,CAAC,MAAMlD,OAAAA,CAAQgC,WAAW,CAACkB,eAAAA,CAAAA,EAAkB;AACvFI,YAAAA,WAAAA,CAAY9D,GAAG,CAAC,CAAC,wCAAwC,EAAE0D,eAAAA,CAAAA,CAAiB,CAAA;AAC5E/C,YAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,mBAAmB,EAAES,OAAAA,CAAAA,CAAS,CAAA;YACzC,OAAO+C,UAAAA;AACX,QAAA;AACAM,QAAAA,WAAAA,CAAY9D,GAAG,CAAC,CAAC,oBAAoB,EAAE0D,eAAAA,CAAAA,CAAiB,CAAA;;QAGxD,MAAMK,SAAAA,GAAYpE,iBAAiBC,KAAK,CAACC,QAAQ,CAAC,kBAAkB,EAAE6D,eAAAA,CAAAA,CAAiB,CAAA;AACvF,QAAA,MAAMjB,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACgB,eAAAA,CAAAA;QACtCK,SAAAA,CAAU/D,GAAG,CAAC,CAAC,OAAO,EAAEyC,KAAAA,CAAM7B,MAAM,CAAC,MAAM,CAAC,CAAA;;QAG5C,MAAMoD,UAAAA,GAAa;AACnB,QAAA,MAAMC,UAAU,EAAE;QAElB,IAAK,IAAIC,IAAI,CAAA,EAAGA,CAAAA,GAAIzB,MAAM7B,MAAM,EAAEsD,KAAKF,UAAAA,CAAY;AAC/C,YAAA,MAAMG,KAAAA,GAAQ1B,KAAAA,CAAM2B,KAAK,CAACF,GAAGA,CAAAA,GAAIF,UAAAA,CAAAA;AACjCC,YAAAA,OAAAA,CAAQnB,IAAI,CAACqB,KAAAA,CAAAA;AACjB,QAAA;AAEA,QAAA,MAAME,YAAAA,GAAe1E,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,WAAW,EAAEoE,OAAAA,CAAQrD,MAAM,CAAC,uBAAuB,CAAC,CAAA;QAEzG,KAAK,MAAMuD,SAASF,OAAAA,CAAS;AACzB,YAAA,MAAMK,aAAAA,GAAgBH,KAAAA,CAAMrD,GAAG,CAAC,OAAOkC,IAAAA,GAAAA;AACnC,gBAAA,MAAMC,QAAAA,GAAW1B,IAAAA,CAAKsB,IAAI,CAACa,eAAAA,EAAiBV,IAAAA,CAAAA;gBAC5C,IAAI;AACA,oBAAA,IAAI,MAAMxC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;AACrC,wBAAA,MAAMlC,eAAAA,GAAkBQ,IAAAA,CAAKsB,IAAI,CAACI,QAAAA,EAAU,cAAA,CAAA;AAE5C,wBAAA,IAAI,MAAMzC,OAAAA,CAAQ+B,MAAM,CAACxB,eAAAA,CAAAA,EAAkB;AACvC,4BAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;4BACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;4BACjD,MAAMK,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQH,eAAAA,CAAAA;4BAEhD,IAAIK,WAAAA,CAAYmD,IAAI,EAAE;AAClB,gCAAA,MAAMjD,eAAeC,IAAAA,CAAKC,QAAQ,CAACoC,OAAAA,CAAQC,GAAG,EAAA,EAAIZ,QAAAA,CAAAA;gCAClD,OAAO;AAAEsB,oCAAAA,IAAAA,EAAMnD,YAAYmD,IAAI;oCAAEhD,IAAAA,EAAMD;AAAa,iCAAA;AACxD,4BAAA;AACJ,wBAAA;AACJ,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,QAAQ,EAAEsB,QAAAA,CAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,IAAIF,KAAAA,CAAAA,CAAO,CAAA;AACjE,gBAAA;gBACA,OAAO,IAAA;AACX,YAAA,CAAA,CAAA;AAEA,YAAA,MAAM8C,YAAAA,GAAe,MAAM1C,OAAAA,CAAQC,GAAG,CAACuC,aAAAA,CAAAA;YAEvC,KAAK,MAAMpC,UAAUsC,YAAAA,CAAc;AAC/B,gBAAA,IAAItC,MAAAA,EAAQ;AACRsB,oBAAAA,UAAAA,CAAWiB,GAAG,CAACvC,MAAAA,CAAOqC,IAAI,EAAErC,OAAOX,IAAI,CAAA;AACvC1B,oBAAAA,MAAAA,CAAO8B,KAAK,CAAC,CAAC,eAAe,EAAEO,MAAAA,CAAOqC,IAAI,CAAC,IAAI,EAAErC,MAAAA,CAAOX,IAAI,CAAA,CAAE,CAAA;AAClE,gBAAA;AACJ,YAAA;AACJ,QAAA;AAEA8C,QAAAA,YAAAA,CAAarE,GAAG,CAAC,CAAC,UAAU,EAAEyC,KAAAA,CAAM7B,MAAM,CAAC,gBAAgB,EAAEqD,OAAAA,CAAQrD,MAAM,CAAC,QAAQ,CAAC,CAAA;AACrFf,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,MAAM,EAAEyD,UAAAA,CAAWkB,IAAI,CAAC,aAAa,EAAEjC,KAAAA,CAAM7B,MAAM,CAAC,eAAe,CAAC,CAAA;AACxF,IAAA,CAAA,CAAE,OAAOc,KAAAA,EAAO;QACZ7B,MAAAA,CAAO8E,IAAI,CAAC,CAAC,yBAAyB,EAAEjB,eAAAA,CAAgB,EAAE,EAAEhC,KAAAA,CAAAA,CAAO,CAAA;AACvE,IAAA;IAEAf,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEwD,WAAWkB,IAAI,CAAC,cAAc,EAAEjE,OAAAA,CAAAA,CAAS,CAAA;IAC5D,OAAO+C,UAAAA;AACX;;;;"}
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { getLogger } from '../logging.js';
|
|
3
|
+
import { safeJsonParse, validatePackageJson } from './validation.js';
|
|
4
|
+
|
|
5
|
+
const EXCLUDED_DIRECTORIES = [
|
|
6
|
+
'node_modules',
|
|
7
|
+
'dist',
|
|
8
|
+
'build',
|
|
9
|
+
'coverage',
|
|
10
|
+
'.git',
|
|
11
|
+
'.next',
|
|
12
|
+
'.nuxt',
|
|
13
|
+
'out',
|
|
14
|
+
'public',
|
|
15
|
+
'static',
|
|
16
|
+
'assets'
|
|
17
|
+
];
|
|
18
|
+
const findAllPackageJsonFiles = async (rootDir, storage)=>{
|
|
19
|
+
const logger = getLogger();
|
|
20
|
+
const packageJsonFiles = [];
|
|
21
|
+
const scanDirectory = async (currentDir, depth = 0)=>{
|
|
22
|
+
// Prevent infinite recursion and overly deep scanning
|
|
23
|
+
if (depth > 5) {
|
|
24
|
+
return;
|
|
25
|
+
}
|
|
26
|
+
try {
|
|
27
|
+
if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
const items = await storage.listFiles(currentDir);
|
|
31
|
+
// Check for package.json in current directory
|
|
32
|
+
if (items.includes('package.json')) {
|
|
33
|
+
const packageJsonPath = path.join(currentDir, 'package.json');
|
|
34
|
+
try {
|
|
35
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
36
|
+
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
37
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
38
|
+
const relativePath = path.relative(rootDir, currentDir);
|
|
39
|
+
packageJsonFiles.push({
|
|
40
|
+
path: packageJsonPath,
|
|
41
|
+
packageJson,
|
|
42
|
+
relativePath: relativePath || '.'
|
|
43
|
+
});
|
|
44
|
+
logger.debug(`Found package.json at: ${relativePath || '.'}`);
|
|
45
|
+
} catch (error) {
|
|
46
|
+
logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
// Scan subdirectories, excluding build/generated directories
|
|
50
|
+
for (const item of items){
|
|
51
|
+
if (EXCLUDED_DIRECTORIES.includes(item)) {
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
const itemPath = path.join(currentDir, item);
|
|
55
|
+
try {
|
|
56
|
+
if (await storage.isDirectory(itemPath)) {
|
|
57
|
+
await scanDirectory(itemPath, depth + 1);
|
|
58
|
+
}
|
|
59
|
+
} catch (error) {
|
|
60
|
+
// Skip directories that can't be accessed
|
|
61
|
+
logger.debug(`Skipped directory ${itemPath}: ${error.message}`);
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
} catch (error) {
|
|
66
|
+
logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
await scanDirectory(rootDir);
|
|
70
|
+
logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);
|
|
71
|
+
return packageJsonFiles;
|
|
72
|
+
};
|
|
73
|
+
/**
|
|
74
|
+
* Checks for file: dependencies in package.json files that should not be committed
|
|
75
|
+
* @param storage Storage utility instance
|
|
76
|
+
* @param rootDir Root directory to scan (defaults to current working directory)
|
|
77
|
+
* @returns Array of issues found, empty array if no issues
|
|
78
|
+
*/ const checkForFileDependencies = async (storage, rootDir = process.cwd())=>{
|
|
79
|
+
const logger = getLogger();
|
|
80
|
+
const issues = [];
|
|
81
|
+
try {
|
|
82
|
+
const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);
|
|
83
|
+
for (const { packageJson, relativePath } of packageJsonFiles){
|
|
84
|
+
const fileDeps = [];
|
|
85
|
+
// Check all dependency types for file: paths
|
|
86
|
+
const dependencyChecks = [
|
|
87
|
+
{
|
|
88
|
+
deps: packageJson.dependencies,
|
|
89
|
+
type: 'dependencies'
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
deps: packageJson.devDependencies,
|
|
93
|
+
type: 'devDependencies'
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
deps: packageJson.peerDependencies,
|
|
97
|
+
type: 'peerDependencies'
|
|
98
|
+
}
|
|
99
|
+
];
|
|
100
|
+
for (const { deps, type } of dependencyChecks){
|
|
101
|
+
if (deps) {
|
|
102
|
+
for (const [name, version] of Object.entries(deps)){
|
|
103
|
+
if (version.startsWith('file:')) {
|
|
104
|
+
fileDeps.push({
|
|
105
|
+
name,
|
|
106
|
+
version,
|
|
107
|
+
dependencyType: type
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
if (fileDeps.length > 0) {
|
|
114
|
+
issues.push({
|
|
115
|
+
packagePath: relativePath,
|
|
116
|
+
dependencies: fileDeps
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
} catch (error) {
|
|
121
|
+
logger.debug(`Failed to check for file dependencies: ${error.message}`);
|
|
122
|
+
}
|
|
123
|
+
return issues;
|
|
124
|
+
};
|
|
125
|
+
/**
|
|
126
|
+
* Logs file dependency issues in a user-friendly format
|
|
127
|
+
* @param issues Array of file dependency issues
|
|
128
|
+
* @param context Context for the warning (e.g., 'commit', 'link check')
|
|
129
|
+
*/ const logFileDependencyWarning = (issues, context = 'operation')=>{
|
|
130
|
+
const logger = getLogger();
|
|
131
|
+
if (issues.length === 0) {
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
logger.warn(`⚠️ WARNING: Found file: dependencies that should not be committed during ${context}:`);
|
|
135
|
+
for (const issue of issues){
|
|
136
|
+
logger.warn(` 📄 ${issue.packagePath}:`);
|
|
137
|
+
for (const dep of issue.dependencies){
|
|
138
|
+
logger.warn(` - ${dep.name}: ${dep.version} (${dep.dependencyType})`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
logger.warn('');
|
|
142
|
+
};
|
|
143
|
+
/**
|
|
144
|
+
* Provides suggestions for resolving file dependency issues
|
|
145
|
+
* @param hasUnlinkCapability Whether the current context supports unlinking
|
|
146
|
+
*/ const logFileDependencySuggestions = (hasUnlinkCapability = true)=>{
|
|
147
|
+
const logger = getLogger();
|
|
148
|
+
logger.warn('💡 To resolve this:');
|
|
149
|
+
if (hasUnlinkCapability) {
|
|
150
|
+
logger.warn(' 1. Run "kodrdriv unlink" to restore registry versions');
|
|
151
|
+
logger.warn(' 2. Complete your commit');
|
|
152
|
+
logger.warn(' 3. Run "kodrdriv link" again for local development');
|
|
153
|
+
} else {
|
|
154
|
+
logger.warn(' 1. Manually restore registry versions in package.json files');
|
|
155
|
+
logger.warn(' 2. Complete your commit');
|
|
156
|
+
logger.warn(' 3. Re-link your local dependencies');
|
|
157
|
+
}
|
|
158
|
+
logger.warn('');
|
|
159
|
+
logger.warn(' Or to bypass this check:');
|
|
160
|
+
logger.warn(' - Add --skip-file-check flag to your command');
|
|
161
|
+
logger.warn(' - Or use git commit --no-verify to skip all hooks');
|
|
162
|
+
logger.warn('');
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
export { checkForFileDependencies, logFileDependencySuggestions, logFileDependencyWarning };
|
|
166
|
+
//# sourceMappingURL=safety.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"safety.js","sources":["../../src/util/safety.ts"],"sourcesContent":["import path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from './validation';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\ninterface FileDependencyIssue {\n packagePath: string;\n dependencies: Array<{\n name: string;\n version: string;\n dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies';\n }>;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\nconst findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const packageJsonFiles: PackageJsonLocation[] = [];\n\n const scanDirectory = async (currentDir: string, depth: number = 0): Promise<void> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return;\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return;\n }\n\n const items = await storage.listFiles(currentDir);\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n const relativePath = path.relative(rootDir, currentDir);\n\n packageJsonFiles.push({\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n });\n\n logger.debug(`Found package.json at: ${relativePath || '.'}`);\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n }\n }\n\n // Scan subdirectories, excluding build/generated directories\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n await scanDirectory(itemPath, depth + 1);\n }\n } catch (error: any) {\n // Skip directories that can't be accessed\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n continue;\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n }\n };\n\n await scanDirectory(rootDir);\n\n logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);\n return packageJsonFiles;\n};\n\n/**\n * Checks for file: dependencies in package.json files that should not be committed\n * @param storage Storage utility instance\n * @param rootDir Root directory to scan (defaults to current working directory)\n * @returns Array of issues found, empty array if no issues\n */\nexport const checkForFileDependencies = async (storage: any, rootDir: string = process.cwd()): Promise<FileDependencyIssue[]> => {\n const logger = getLogger();\n const issues: FileDependencyIssue[] = [];\n\n try {\n const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);\n\n for (const { packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: Array<{name: string, version: string, dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies'}> = [];\n\n // Check all dependency types for file: paths\n const dependencyChecks = [\n { deps: packageJson.dependencies, type: 'dependencies' as const },\n { deps: packageJson.devDependencies, type: 'devDependencies' as const },\n { deps: packageJson.peerDependencies, type: 'peerDependencies' as const }\n ];\n\n for (const { deps, type } of dependencyChecks) {\n if (deps) {\n for (const [name, version] of Object.entries(deps)) {\n if (version.startsWith('file:')) {\n fileDeps.push({ name, version, dependencyType: type });\n }\n }\n }\n }\n\n if (fileDeps.length > 0) {\n issues.push({\n packagePath: relativePath,\n dependencies: fileDeps\n });\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to check for file dependencies: ${error.message}`);\n }\n\n return issues;\n};\n\n/**\n * Logs file dependency issues in a user-friendly format\n * @param issues Array of file dependency issues\n * @param context Context for the warning (e.g., 'commit', 'link check')\n */\nexport const logFileDependencyWarning = (issues: FileDependencyIssue[], context: string = 'operation'): void => {\n const logger = getLogger();\n\n if (issues.length === 0) {\n return;\n }\n\n logger.warn(`⚠️ WARNING: Found file: dependencies that should not be committed during ${context}:`);\n for (const issue of issues) {\n logger.warn(` 📄 ${issue.packagePath}:`);\n for (const dep of issue.dependencies) {\n logger.warn(` - ${dep.name}: ${dep.version} (${dep.dependencyType})`);\n }\n }\n logger.warn('');\n};\n\n/**\n * Provides suggestions for resolving file dependency issues\n * @param hasUnlinkCapability Whether the current context supports unlinking\n */\nexport const logFileDependencySuggestions = (hasUnlinkCapability: boolean = true): void => {\n const logger = getLogger();\n\n logger.warn('💡 To resolve this:');\n if (hasUnlinkCapability) {\n logger.warn(' 1. Run \"kodrdriv unlink\" to restore registry versions');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Run \"kodrdriv link\" again for local development');\n } else {\n logger.warn(' 1. Manually restore registry versions in package.json files');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Re-link your local dependencies');\n }\n logger.warn('');\n logger.warn(' Or to bypass this check:');\n logger.warn(' - Add --skip-file-check flag to your command');\n logger.warn(' - Or use git commit --no-verify to skip all hooks');\n logger.warn('');\n};\n"],"names":["EXCLUDED_DIRECTORIES","findAllPackageJsonFiles","rootDir","storage","logger","getLogger","packageJsonFiles","scanDirectory","currentDir","depth","exists","isDirectory","items","listFiles","includes","packageJsonPath","path","join","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","relative","push","debug","error","message","item","itemPath","length","checkForFileDependencies","process","cwd","issues","fileDeps","dependencyChecks","deps","dependencies","type","devDependencies","peerDependencies","name","version","Object","entries","startsWith","dependencyType","packagePath","logFileDependencyWarning","context","warn","issue","dep","logFileDependencySuggestions","hasUnlinkCapability"],"mappings":";;;;AA0BA,MAAMA,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED,MAAMC,uBAAAA,GAA0B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACpD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA0C,EAAE;AAElD,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE9D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA;AACJ,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAMN,OAAAA,CAAQO,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAML,OAAAA,CAAQQ,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA;AACJ,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMT,OAAAA,CAAQU,SAAS,CAACL,UAAAA,CAAAA;;YAGtC,IAAII,KAAAA,CAAME,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAMC,eAAAA,GAAkBC,IAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAY,cAAA,CAAA;gBAC9C,IAAI;AACA,oBAAA,MAAMU,kBAAAA,GAAqB,MAAMf,OAAAA,CAAQgB,QAAQ,CAACJ,eAAAA,EAAiB,OAAA,CAAA;oBACnE,MAAMK,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBH,eAAAA,CAAAA;oBACjD,MAAMO,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQL,eAAAA,CAAAA;AAChD,oBAAA,MAAMS,YAAAA,GAAeR,IAAAA,CAAKS,QAAQ,CAACvB,OAAAA,EAASM,UAAAA,CAAAA;AAE5CF,oBAAAA,gBAAAA,CAAiBoB,IAAI,CAAC;wBAClBV,IAAAA,EAAMD,eAAAA;AACNO,wBAAAA,WAAAA;AACAE,wBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,qBAAA,CAAA;AAEApB,oBAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uBAAuB,EAAEH,gBAAgB,GAAA,CAAA,CAAK,CAAA;AAChE,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEa,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AACvF,gBAAA;AACJ,YAAA;;YAGA,KAAK,MAAMC,QAAQlB,KAAAA,CAAO;gBACtB,IAAIZ,oBAAAA,CAAqBc,QAAQ,CAACgB,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAWf,IAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAYsB,IAAAA,CAAAA;gBACvC,IAAI;AACA,oBAAA,IAAI,MAAM3B,OAAAA,CAAQQ,WAAW,CAACoB,QAAAA,CAAAA,EAAW;wBACrC,MAAMxB,aAAAA,CAAcwB,UAAUtB,KAAAA,GAAQ,CAAA,CAAA;AAC1C,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOmB,KAAAA,EAAY;;oBAEjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,kBAAkB,EAAEI,SAAS,EAAE,EAAEH,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC9D,oBAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOD,KAAAA,EAAY;YACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,yBAAyB,EAAEnB,WAAW,EAAE,EAAEoB,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC3E,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMtB,aAAAA,CAAcL,OAAAA,CAAAA;IAEpBE,MAAAA,CAAOuB,KAAK,CAAC,CAAC,MAAM,EAAErB,gBAAAA,CAAiB0B,MAAM,CAAC,uCAAuC,CAAC,CAAA;IACtF,OAAO1B,gBAAAA;AACX,CAAA;AAEA;;;;;UAMa2B,wBAAAA,GAA2B,OAAO9B,SAAcD,OAAAA,GAAkBgC,OAAAA,CAAQC,GAAG,EAAE,GAAA;AACxF,IAAA,MAAM/B,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM+B,SAAgC,EAAE;IAExC,IAAI;QACA,MAAM9B,gBAAAA,GAAmB,MAAML,uBAAAA,CAAwBC,OAAAA,EAASC,OAAAA,CAAAA;AAEhE,QAAA,KAAK,MAAM,EAAEmB,WAAW,EAAEE,YAAY,EAAE,IAAIlB,gBAAAA,CAAkB;AAC1D,YAAA,MAAM+B,WAA4H,EAAE;;AAGpI,YAAA,MAAMC,gBAAAA,GAAmB;AACrB,gBAAA;AAAEC,oBAAAA,IAAAA,EAAMjB,YAAYkB,YAAY;oBAAEC,IAAAA,EAAM;AAAwB,iBAAA;AAChE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYoB,eAAe;oBAAED,IAAAA,EAAM;AAA2B,iBAAA;AACtE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYqB,gBAAgB;oBAAEF,IAAAA,EAAM;AAA4B;AAC3E,aAAA;AAED,YAAA,KAAK,MAAM,EAAEF,IAAI,EAAEE,IAAI,EAAE,IAAIH,gBAAAA,CAAkB;AAC3C,gBAAA,IAAIC,IAAAA,EAAM;oBACN,KAAK,MAAM,CAACK,IAAAA,EAAMC,OAAAA,CAAQ,IAAIC,MAAAA,CAAOC,OAAO,CAACR,IAAAA,CAAAA,CAAO;wBAChD,IAAIM,OAAAA,CAAQG,UAAU,CAAC,OAAA,CAAA,EAAU;AAC7BX,4BAAAA,QAAAA,CAASX,IAAI,CAAC;AAAEkB,gCAAAA,IAAAA;AAAMC,gCAAAA,OAAAA;gCAASI,cAAAA,EAAgBR;AAAK,6BAAA,CAAA;AACxD,wBAAA;AACJ,oBAAA;AACJ,gBAAA;AACJ,YAAA;YAEA,IAAIJ,QAAAA,CAASL,MAAM,GAAG,CAAA,EAAG;AACrBI,gBAAAA,MAAAA,CAAOV,IAAI,CAAC;oBACRwB,WAAAA,EAAa1B,YAAAA;oBACbgB,YAAAA,EAAcH;AAClB,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAOT,KAAAA,EAAY;AACjBxB,QAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uCAAuC,EAAEC,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC1E,IAAA;IAEA,OAAOO,MAAAA;AACX;AAEA;;;;AAIC,IACM,MAAMe,wBAAAA,GAA2B,CAACf,MAAAA,EAA+BgB,UAAkB,WAAW,GAAA;AACjG,IAAA,MAAMhD,MAAAA,GAASC,SAAAA,EAAAA;IAEf,IAAI+B,MAAAA,CAAOJ,MAAM,KAAK,CAAA,EAAG;AACrB,QAAA;AACJ,IAAA;AAEA5B,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,0EAA0E,EAAED,OAAAA,CAAQ,CAAC,CAAC,CAAA;IACnG,KAAK,MAAME,SAASlB,MAAAA,CAAQ;QACxBhC,MAAAA,CAAOiD,IAAI,CAAC,CAAC,KAAK,EAAEC,KAAAA,CAAMJ,WAAW,CAAC,CAAC,CAAC,CAAA;AACxC,QAAA,KAAK,MAAMK,GAAAA,IAAOD,KAAAA,CAAMd,YAAY,CAAE;AAClCpC,YAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,MAAM,EAAEE,GAAAA,CAAIX,IAAI,CAAC,EAAE,EAAEW,GAAAA,CAAIV,OAAO,CAAC,EAAE,EAAEU,IAAIN,cAAc,CAAC,CAAC,CAAC,CAAA;AAC3E,QAAA;AACJ,IAAA;AACA7C,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;AAEA;;;AAGC,IACM,MAAMG,4BAAAA,GAA+B,CAACC,sBAA+B,IAAI,GAAA;AAC5E,IAAA,MAAMrD,MAAAA,GAASC,SAAAA,EAAAA;AAEfD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,qBAAA,CAAA;AACZ,IAAA,IAAII,mBAAAA,EAAqB;AACrBrD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,0DAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uDAAA,CAAA;IAChB,CAAA,MAAO;AACHjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,gEAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uCAAA,CAAA;AAChB,IAAA;AACAjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,6BAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,iDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,sDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;;;;"}
|
package/dist/util/storage.js
CHANGED
|
@@ -99,6 +99,15 @@ const create = (params)=>{
|
|
|
99
99
|
const rename = async (oldPath, newPath)=>{
|
|
100
100
|
await fs.promises.rename(oldPath, newPath);
|
|
101
101
|
};
|
|
102
|
+
const deleteFile = async (path)=>{
|
|
103
|
+
try {
|
|
104
|
+
if (await exists(path)) {
|
|
105
|
+
await fs.promises.unlink(path);
|
|
106
|
+
}
|
|
107
|
+
} catch (deleteError) {
|
|
108
|
+
throw new Error(`Failed to delete file ${path}: ${deleteError.message} ${deleteError.stack}`);
|
|
109
|
+
}
|
|
110
|
+
};
|
|
102
111
|
const forEachFileIn = async (directory, callback, options = {
|
|
103
112
|
pattern: '*.*'
|
|
104
113
|
})=>{
|
|
@@ -139,6 +148,7 @@ const create = (params)=>{
|
|
|
139
148
|
readStream,
|
|
140
149
|
writeFile,
|
|
141
150
|
rename,
|
|
151
|
+
deleteFile,
|
|
142
152
|
forEachFileIn,
|
|
143
153
|
hashFile,
|
|
144
154
|
listFiles,
|
package/dist/util/storage.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"storage.js","sources":["../../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport { glob } from 'glob';\nimport path from 'path';\nimport crypto from 'crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n *\n * Additionally, abstracting storage operations allows for future flexibility -\n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n ensureDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n removeDirectory: (path: string) => Promise<void>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n // Log at debug level since this is expected when scanning directories\n // that contain both files and directories\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n // Log removed since this is expected when checking file types\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const ensureDirectory = async (path: string): Promise<void> => {\n if (!(await exists(path))) {\n await createDirectory(path);\n }\n }\n\n const removeDirectory = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.rm(path, { recursive: true, force: true });\n }\n } catch (rmError: any) {\n throw new Error(`Failed to remove directory ${path}: ${rmError.message} ${rmError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const rename = async (oldPath: string, newPath: string): Promise<void> => {\n await fs.promises.rename(oldPath, newPath);\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n ensureDirectory,\n readFile,\n readStream,\n writeFile,\n rename,\n forEachFileIn,\n hashFile,\n listFiles,\n removeDirectory,\n };\n}\n"],"names":["create","params","log","console","exists","path","fs","promises","stat","error","isDirectory","stats","isFile","isReadable","access","constants","R_OK","message","stack","isWritable","W_OK","isFileReadable","isDirectoryWritable","isDirectoryReadable","createDirectory","mkdir","recursive","mkdirError","Error","ensureDirectory","removeDirectory","rm","force","rmError","readFile","encoding","writeFile","data","rename","oldPath","newPath","forEachFileIn","directory","callback","options","pattern","files","glob","cwd","nodir","file","join","err","readStream","createReadStream","hashFile","length","crypto","createHash","update","digest","slice","listFiles","readdir"],"mappings":";;;;;AAAA;AAmCO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;;AAGnB,IAAA,MAAMC,GAAAA,GAAMD,MAAAA,CAAOC,GAAG,IAAIC,QAAQD,GAAG;AAErC,IAAA,MAAME,SAAS,OAAOC,IAAAA,GAAAA;QAClB,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;YACvB,OAAO,IAAA;;AAEX,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB,OAAO,KAAA;AACX,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMC,cAAc,OAAOL,IAAAA,GAAAA;AACvB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMD,WAAW,EAAA,EAAI;;;YAGtB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAME,SAAS,OAAOP,IAAAA,GAAAA;AAClB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMC,MAAM,EAAA,EAAI;;YAEjB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOR,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACC,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOP,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOd,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACK,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOX,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMG,iBAAiB,OAAOhB,IAAAA,GAAAA;AAC1B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMO,MAAAA,CAAOP,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AACxE,IAAA,CAAA;AAEA,IAAA,MAAMiB,sBAAsB,OAAOjB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMc,UAAAA,CAAWd,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMkB,sBAAsB,OAAOlB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMmB,kBAAkB,OAAOnB,IAAAA,GAAAA;QAC3B,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACkB,KAAK,CAACpB,IAAAA,EAAM;gBAAEqB,SAAAA,EAAW;AAAK,aAAA,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOC,UAAAA,EAAiB;AACtB,YAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAAA,CAAK,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAA,CAAE,CAAA;AAC1G,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMW,kBAAkB,OAAOxB,IAAAA,GAAAA;QAC3B,IAAI,CAAE,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,EAAQ;AACvB,YAAA,MAAMmB,eAAAA,CAAgBnB,IAAAA,CAAAA;AAC1B,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMyB,kBAAkB,OAAOzB,IAAAA,GAAAA;QAC3B,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACwB,EAAE,CAAC1B,IAAAA,EAAM;oBAAEqB,SAAAA,EAAW,IAAA;oBAAMM,KAAAA,EAAO;AAAK,iBAAA,CAAA;AAC9D,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOC,OAAAA,EAAc;AACnB,YAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,2BAA2B,EAAEvB,IAAAA,CAAK,EAAE,EAAE4B,OAAAA,CAAQhB,OAAO,CAAC,CAAC,EAAEgB,OAAAA,CAAQf,KAAK,CAAA,CAAE,CAAA;AAC7F,QAAA;AACJ,IAAA,CAAA;IAEA,MAAMgB,QAAAA,GAAW,OAAO7B,IAAAA,EAAc8B,QAAAA,GAAAA;AAClC,QAAA,OAAO,MAAM7B,EAAAA,CAAGC,QAAQ,CAAC2B,QAAQ,CAAC7B,IAAAA,EAAM;YAAE8B,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMC,SAAAA,GAAY,OAAO/B,IAAAA,EAAcgC,IAAAA,EAAuBF,QAAAA,GAAAA;AAC1D,QAAA,MAAM7B,GAAGC,QAAQ,CAAC6B,SAAS,CAAC/B,MAAMgC,IAAAA,EAAM;YAAEF,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMG,MAAAA,GAAS,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACnC,QAAA,MAAMlC,EAAAA,CAAGC,QAAQ,CAAC+B,MAAM,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACtC,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,SAAAA,EAAmBC,QAAAA,EAA2CC,OAAAA,GAA0C;QAAEC,OAAAA,EAAS;KAAO,GAAA;QACnJ,IAAI;AACA,YAAA,MAAMC,KAAAA,GAAQ,MAAMC,IAAAA,CAAKH,OAAAA,CAAQC,OAAO,EAAE;gBAAEG,GAAAA,EAAKN,SAAAA;gBAAWO,KAAAA,EAAO;AAAK,aAAA,CAAA;YACxE,KAAK,MAAMC,QAAQJ,KAAAA,CAAO;AACtB,gBAAA,MAAMH,QAAAA,CAAStC,IAAAA,CAAK8C,IAAI,CAACT,SAAAA,EAAWQ,IAAAA,CAAAA,CAAAA;AACxC,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOE,GAAAA,EAAU;AACf,YAAA,MAAM,IAAIxB,KAAAA,CAAM,CAAC,uBAAuB,EAAEgB,OAAAA,CAAQC,OAAO,CAAC,IAAI,EAAEH,SAAAA,CAAU,EAAE,EAAEU,GAAAA,CAAInC,OAAO,CAAA,CAAE,CAAA;AAC/F,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMoC,aAAa,OAAOhD,IAAAA,GAAAA;QACtB,OAAOC,EAAAA,CAAGgD,gBAAgB,CAACjD,IAAAA,CAAAA;AAC/B,IAAA,CAAA;IAEA,MAAMkD,QAAAA,GAAW,OAAOlD,IAAAA,EAAcmD,MAAAA,GAAAA;QAClC,MAAMN,IAAAA,GAAO,MAAMhB,QAAAA,CAAS7B,IAAAA,EAAM,MAAA,CAAA;AAClC,QAAA,OAAOoD,MAAAA,CAAOC,UAAU,CAAC,QAAA,CAAA,CAAUC,MAAM,CAACT,IAAAA,CAAAA,CAAMU,MAAM,CAAC,KAAA,CAAA,CAAOC,KAAK,CAAC,CAAA,EAAGL,MAAAA,CAAAA;AAC3E,IAAA,CAAA;AAEA,IAAA,MAAMM,YAAY,OAAOpB,SAAAA,GAAAA;AACrB,QAAA,OAAO,MAAMpC,EAAAA,CAAGC,QAAQ,CAACwD,OAAO,CAACrB,SAAAA,CAAAA;AACrC,IAAA,CAAA;IAEA,OAAO;AACHtC,QAAAA,MAAAA;AACAM,QAAAA,WAAAA;AACAE,QAAAA,MAAAA;AACAC,QAAAA,UAAAA;AACAM,QAAAA,UAAAA;AACAE,QAAAA,cAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,eAAAA;AACAK,QAAAA,eAAAA;AACAK,QAAAA,QAAAA;AACAmB,QAAAA,UAAAA;AACAjB,QAAAA,SAAAA;AACAE,QAAAA,MAAAA;AACAG,QAAAA,aAAAA;AACAc,QAAAA,QAAAA;AACAO,QAAAA,SAAAA;AACAhC,QAAAA;AACJ,KAAA;AACJ;;;;"}
|
|
1
|
+
{"version":3,"file":"storage.js","sources":["../../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport { glob } from 'glob';\nimport path from 'path';\nimport crypto from 'crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n *\n * Additionally, abstracting storage operations allows for future flexibility -\n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n ensureDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n deleteFile: (path: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n removeDirectory: (path: string) => Promise<void>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n // Log at debug level since this is expected when scanning directories\n // that contain both files and directories\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n // Log removed since this is expected when checking file types\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const ensureDirectory = async (path: string): Promise<void> => {\n if (!(await exists(path))) {\n await createDirectory(path);\n }\n }\n\n const removeDirectory = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.rm(path, { recursive: true, force: true });\n }\n } catch (rmError: any) {\n throw new Error(`Failed to remove directory ${path}: ${rmError.message} ${rmError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const rename = async (oldPath: string, newPath: string): Promise<void> => {\n await fs.promises.rename(oldPath, newPath);\n }\n\n const deleteFile = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.unlink(path);\n }\n } catch (deleteError: any) {\n throw new Error(`Failed to delete file ${path}: ${deleteError.message} ${deleteError.stack}`);\n }\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n ensureDirectory,\n readFile,\n readStream,\n writeFile,\n rename,\n deleteFile,\n forEachFileIn,\n hashFile,\n listFiles,\n removeDirectory,\n };\n}\n"],"names":["create","params","log","console","exists","path","fs","promises","stat","error","isDirectory","stats","isFile","isReadable","access","constants","R_OK","message","stack","isWritable","W_OK","isFileReadable","isDirectoryWritable","isDirectoryReadable","createDirectory","mkdir","recursive","mkdirError","Error","ensureDirectory","removeDirectory","rm","force","rmError","readFile","encoding","writeFile","data","rename","oldPath","newPath","deleteFile","unlink","deleteError","forEachFileIn","directory","callback","options","pattern","files","glob","cwd","nodir","file","join","err","readStream","createReadStream","hashFile","length","crypto","createHash","update","digest","slice","listFiles","readdir"],"mappings":";;;;;AAAA;AAoCO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;;AAGnB,IAAA,MAAMC,GAAAA,GAAMD,MAAAA,CAAOC,GAAG,IAAIC,QAAQD,GAAG;AAErC,IAAA,MAAME,SAAS,OAAOC,IAAAA,GAAAA;QAClB,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;YACvB,OAAO,IAAA;;AAEX,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB,OAAO,KAAA;AACX,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMC,cAAc,OAAOL,IAAAA,GAAAA;AACvB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMD,WAAW,EAAA,EAAI;;;YAGtB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAME,SAAS,OAAOP,IAAAA,GAAAA;AAClB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMC,MAAM,EAAA,EAAI;;YAEjB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOR,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACC,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOP,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOd,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACK,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOX,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMG,iBAAiB,OAAOhB,IAAAA,GAAAA;AAC1B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMO,MAAAA,CAAOP,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AACxE,IAAA,CAAA;AAEA,IAAA,MAAMiB,sBAAsB,OAAOjB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMc,UAAAA,CAAWd,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMkB,sBAAsB,OAAOlB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMmB,kBAAkB,OAAOnB,IAAAA,GAAAA;QAC3B,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACkB,KAAK,CAACpB,IAAAA,EAAM;gBAAEqB,SAAAA,EAAW;AAAK,aAAA,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOC,UAAAA,EAAiB;AACtB,YAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAAA,CAAK,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAA,CAAE,CAAA;AAC1G,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMW,kBAAkB,OAAOxB,IAAAA,GAAAA;QAC3B,IAAI,CAAE,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,EAAQ;AACvB,YAAA,MAAMmB,eAAAA,CAAgBnB,IAAAA,CAAAA;AAC1B,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMyB,kBAAkB,OAAOzB,IAAAA,GAAAA;QAC3B,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACwB,EAAE,CAAC1B,IAAAA,EAAM;oBAAEqB,SAAAA,EAAW,IAAA;oBAAMM,KAAAA,EAAO;AAAK,iBAAA,CAAA;AAC9D,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOC,OAAAA,EAAc;AACnB,YAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,2BAA2B,EAAEvB,IAAAA,CAAK,EAAE,EAAE4B,OAAAA,CAAQhB,OAAO,CAAC,CAAC,EAAEgB,OAAAA,CAAQf,KAAK,CAAA,CAAE,CAAA;AAC7F,QAAA;AACJ,IAAA,CAAA;IAEA,MAAMgB,QAAAA,GAAW,OAAO7B,IAAAA,EAAc8B,QAAAA,GAAAA;AAClC,QAAA,OAAO,MAAM7B,EAAAA,CAAGC,QAAQ,CAAC2B,QAAQ,CAAC7B,IAAAA,EAAM;YAAE8B,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMC,SAAAA,GAAY,OAAO/B,IAAAA,EAAcgC,IAAAA,EAAuBF,QAAAA,GAAAA;AAC1D,QAAA,MAAM7B,GAAGC,QAAQ,CAAC6B,SAAS,CAAC/B,MAAMgC,IAAAA,EAAM;YAAEF,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMG,MAAAA,GAAS,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACnC,QAAA,MAAMlC,EAAAA,CAAGC,QAAQ,CAAC+B,MAAM,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACtC,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOpC,IAAAA,GAAAA;QACtB,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACmC,MAAM,CAACrC,IAAAA,CAAAA;AAC7B,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOsC,WAAAA,EAAkB;AACvB,YAAA,MAAM,IAAIf,KAAAA,CAAM,CAAC,sBAAsB,EAAEvB,IAAAA,CAAK,EAAE,EAAEsC,WAAAA,CAAY1B,OAAO,CAAC,CAAC,EAAE0B,WAAAA,CAAYzB,KAAK,CAAA,CAAE,CAAA;AAChG,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAM0B,aAAAA,GAAgB,OAAOC,SAAAA,EAAmBC,QAAAA,EAA2CC,OAAAA,GAA0C;QAAEC,OAAAA,EAAS;KAAO,GAAA;QACnJ,IAAI;AACA,YAAA,MAAMC,KAAAA,GAAQ,MAAMC,IAAAA,CAAKH,OAAAA,CAAQC,OAAO,EAAE;gBAAEG,GAAAA,EAAKN,SAAAA;gBAAWO,KAAAA,EAAO;AAAK,aAAA,CAAA;YACxE,KAAK,MAAMC,QAAQJ,KAAAA,CAAO;AACtB,gBAAA,MAAMH,QAAAA,CAASzC,IAAAA,CAAKiD,IAAI,CAACT,SAAAA,EAAWQ,IAAAA,CAAAA,CAAAA;AACxC,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOE,GAAAA,EAAU;AACf,YAAA,MAAM,IAAI3B,KAAAA,CAAM,CAAC,uBAAuB,EAAEmB,OAAAA,CAAQC,OAAO,CAAC,IAAI,EAAEH,SAAAA,CAAU,EAAE,EAAEU,GAAAA,CAAItC,OAAO,CAAA,CAAE,CAAA;AAC/F,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMuC,aAAa,OAAOnD,IAAAA,GAAAA;QACtB,OAAOC,EAAAA,CAAGmD,gBAAgB,CAACpD,IAAAA,CAAAA;AAC/B,IAAA,CAAA;IAEA,MAAMqD,QAAAA,GAAW,OAAOrD,IAAAA,EAAcsD,MAAAA,GAAAA;QAClC,MAAMN,IAAAA,GAAO,MAAMnB,QAAAA,CAAS7B,IAAAA,EAAM,MAAA,CAAA;AAClC,QAAA,OAAOuD,MAAAA,CAAOC,UAAU,CAAC,QAAA,CAAA,CAAUC,MAAM,CAACT,IAAAA,CAAAA,CAAMU,MAAM,CAAC,KAAA,CAAA,CAAOC,KAAK,CAAC,CAAA,EAAGL,MAAAA,CAAAA;AAC3E,IAAA,CAAA;AAEA,IAAA,MAAMM,YAAY,OAAOpB,SAAAA,GAAAA;AACrB,QAAA,OAAO,MAAMvC,EAAAA,CAAGC,QAAQ,CAAC2D,OAAO,CAACrB,SAAAA,CAAAA;AACrC,IAAA,CAAA;IAEA,OAAO;AACHzC,QAAAA,MAAAA;AACAM,QAAAA,WAAAA;AACAE,QAAAA,MAAAA;AACAC,QAAAA,UAAAA;AACAM,QAAAA,UAAAA;AACAE,QAAAA,cAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,eAAAA;AACAK,QAAAA,eAAAA;AACAK,QAAAA,QAAAA;AACAsB,QAAAA,UAAAA;AACApB,QAAAA,SAAAA;AACAE,QAAAA,MAAAA;AACAG,QAAAA,UAAAA;AACAG,QAAAA,aAAAA;AACAc,QAAAA,QAAAA;AACAO,QAAAA,SAAAA;AACAnC,QAAAA;AACJ,KAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Runtime validation utilities for safe type handling
|
|
3
|
+
*/ /**
|
|
4
|
+
* Validates and safely casts data to ReleaseSummary type
|
|
5
|
+
*/ const validateReleaseSummary = (data)=>{
|
|
6
|
+
if (!data || typeof data !== 'object') {
|
|
7
|
+
throw new Error('Invalid release summary: not an object');
|
|
8
|
+
}
|
|
9
|
+
if (typeof data.title !== 'string') {
|
|
10
|
+
throw new Error('Invalid release summary: title must be a string');
|
|
11
|
+
}
|
|
12
|
+
if (typeof data.body !== 'string') {
|
|
13
|
+
throw new Error('Invalid release summary: body must be a string');
|
|
14
|
+
}
|
|
15
|
+
return data;
|
|
16
|
+
};
|
|
17
|
+
/**
|
|
18
|
+
* Validates and safely casts data to LinkBackup type
|
|
19
|
+
*/ const validateLinkBackup = (data)=>{
|
|
20
|
+
if (!data || typeof data !== 'object') {
|
|
21
|
+
throw new Error('Invalid link backup: not an object');
|
|
22
|
+
}
|
|
23
|
+
// Validate each backup entry
|
|
24
|
+
for (const [key, value] of Object.entries(data)){
|
|
25
|
+
if (!value || typeof value !== 'object') {
|
|
26
|
+
throw new Error(`Invalid link backup entry for ${key}: not an object`);
|
|
27
|
+
}
|
|
28
|
+
const entry = value;
|
|
29
|
+
if (typeof entry.originalVersion !== 'string') {
|
|
30
|
+
throw new Error(`Invalid link backup entry for ${key}: originalVersion must be a string`);
|
|
31
|
+
}
|
|
32
|
+
if (typeof entry.dependencyType !== 'string') {
|
|
33
|
+
throw new Error(`Invalid link backup entry for ${key}: dependencyType must be a string`);
|
|
34
|
+
}
|
|
35
|
+
if (typeof entry.relativePath !== 'string') {
|
|
36
|
+
throw new Error(`Invalid link backup entry for ${key}: relativePath must be a string`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
return data;
|
|
40
|
+
};
|
|
41
|
+
/**
|
|
42
|
+
* Safely parses JSON with error handling
|
|
43
|
+
*/ const safeJsonParse = (jsonString, context)=>{
|
|
44
|
+
try {
|
|
45
|
+
const parsed = JSON.parse(jsonString);
|
|
46
|
+
if (parsed === null || parsed === undefined) {
|
|
47
|
+
throw new Error('Parsed JSON is null or undefined');
|
|
48
|
+
}
|
|
49
|
+
return parsed;
|
|
50
|
+
} catch (error) {
|
|
51
|
+
const contextStr = context ? ` (${context})` : '';
|
|
52
|
+
throw new Error(`Failed to parse JSON${contextStr}: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
/**
|
|
56
|
+
* Validates that a value is a non-empty string
|
|
57
|
+
*/ const validateString = (value, fieldName)=>{
|
|
58
|
+
if (typeof value !== 'string') {
|
|
59
|
+
throw new Error(`${fieldName} must be a string, got ${typeof value}`);
|
|
60
|
+
}
|
|
61
|
+
if (value.trim() === '') {
|
|
62
|
+
throw new Error(`${fieldName} cannot be empty`);
|
|
63
|
+
}
|
|
64
|
+
return value;
|
|
65
|
+
};
|
|
66
|
+
/**
|
|
67
|
+
* Validates package.json structure has basic required fields
|
|
68
|
+
*/ const validatePackageJson = (data, context, requireName = true)=>{
|
|
69
|
+
if (!data || typeof data !== 'object') {
|
|
70
|
+
const contextStr = context ? ` (${context})` : '';
|
|
71
|
+
throw new Error(`Invalid package.json${contextStr}: not an object`);
|
|
72
|
+
}
|
|
73
|
+
if (requireName && typeof data.name !== 'string') {
|
|
74
|
+
const contextStr = context ? ` (${context})` : '';
|
|
75
|
+
throw new Error(`Invalid package.json${contextStr}: name must be a string`);
|
|
76
|
+
}
|
|
77
|
+
return data;
|
|
78
|
+
};
|
|
79
|
+
|
|
80
|
+
export { safeJsonParse, validateLinkBackup, validatePackageJson, validateReleaseSummary, validateString };
|
|
81
|
+
//# sourceMappingURL=validation.js.map
|