@eldrforge/kodrdriv 0.1.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/README.md +1 -0
  2. package/dist/application.js +25 -3
  3. package/dist/application.js.map +1 -1
  4. package/dist/arguments.js +103 -18
  5. package/dist/arguments.js.map +1 -1
  6. package/dist/commands/audio-commit.js +28 -7
  7. package/dist/commands/audio-commit.js.map +1 -1
  8. package/dist/commands/audio-review.js +28 -7
  9. package/dist/commands/audio-review.js.map +1 -1
  10. package/dist/commands/commit.js +75 -18
  11. package/dist/commands/commit.js.map +1 -1
  12. package/dist/commands/development.js +264 -0
  13. package/dist/commands/development.js.map +1 -0
  14. package/dist/commands/link.js +356 -181
  15. package/dist/commands/link.js.map +1 -1
  16. package/dist/commands/publish.js +166 -32
  17. package/dist/commands/publish.js.map +1 -1
  18. package/dist/commands/release.js +78 -13
  19. package/dist/commands/release.js.map +1 -1
  20. package/dist/commands/review.js +10 -6
  21. package/dist/commands/review.js.map +1 -1
  22. package/dist/commands/tree.js +450 -24
  23. package/dist/commands/tree.js.map +1 -1
  24. package/dist/commands/unlink.js +267 -372
  25. package/dist/commands/unlink.js.map +1 -1
  26. package/dist/commands/versions.js +224 -0
  27. package/dist/commands/versions.js.map +1 -0
  28. package/dist/constants.js +29 -10
  29. package/dist/constants.js.map +1 -1
  30. package/dist/content/diff.js.map +1 -1
  31. package/dist/content/files.js +192 -0
  32. package/dist/content/files.js.map +1 -0
  33. package/dist/content/log.js +16 -0
  34. package/dist/content/log.js.map +1 -1
  35. package/dist/main.js +0 -0
  36. package/dist/prompt/commit.js +9 -2
  37. package/dist/prompt/commit.js.map +1 -1
  38. package/dist/prompt/instructions/commit.md +20 -2
  39. package/dist/prompt/instructions/release.md +27 -10
  40. package/dist/prompt/instructions/review.md +75 -8
  41. package/dist/prompt/release.js +13 -5
  42. package/dist/prompt/release.js.map +1 -1
  43. package/dist/types.js +21 -5
  44. package/dist/types.js.map +1 -1
  45. package/dist/util/child.js +112 -26
  46. package/dist/util/child.js.map +1 -1
  47. package/dist/util/countdown.js +215 -0
  48. package/dist/util/countdown.js.map +1 -0
  49. package/dist/util/general.js +10 -2
  50. package/dist/util/general.js.map +1 -1
  51. package/dist/util/git.js +587 -0
  52. package/dist/util/git.js.map +1 -0
  53. package/dist/util/github.js +519 -3
  54. package/dist/util/github.js.map +1 -1
  55. package/dist/util/interactive.js +245 -79
  56. package/dist/util/interactive.js.map +1 -1
  57. package/dist/util/openai.js +70 -22
  58. package/dist/util/openai.js.map +1 -1
  59. package/dist/util/performance.js +1 -69
  60. package/dist/util/performance.js.map +1 -1
  61. package/dist/util/storage.js +28 -1
  62. package/dist/util/storage.js.map +1 -1
  63. package/dist/util/validation.js +1 -25
  64. package/dist/util/validation.js.map +1 -1
  65. package/package.json +10 -8
  66. package/test-multiline/cli/package.json +8 -0
  67. package/test-multiline/core/package.json +5 -0
  68. package/test-multiline/mobile/package.json +8 -0
  69. package/test-multiline/web/package.json +8 -0
  70. package/dist/util/npmOptimizations.js +0 -174
  71. package/dist/util/npmOptimizations.js.map +0 -1
@@ -129,74 +129,6 @@ const findAllPackageJsonFiles = async (rootDir, storage)=>{
129
129
  timer.end(`Found ${packageJsonFiles.length} valid package.json files`);
130
130
  return packageJsonFiles;
131
131
  };
132
- // Optimized package scanning with parallel processing
133
- const scanDirectoryForPackages = async (rootDir, storage)=>{
134
- const logger = getLogger();
135
- const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);
136
- const packageMap = new Map(); // packageName -> relativePath
137
- const absoluteRootDir = path__default.resolve(process.cwd(), rootDir);
138
- logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);
139
- try {
140
- // Quick existence and directory check
141
- const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);
142
- if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {
143
- existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);
144
- timer.end(`Directory invalid: ${rootDir}`);
145
- return packageMap;
146
- }
147
- existsTimer.end(`Directory verified: ${absoluteRootDir}`);
148
- // Get all items and process in parallel
149
- const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);
150
- const items = await storage.listFiles(absoluteRootDir);
151
- listTimer.end(`Listed ${items.length} items`);
152
- // Create batched promises for better performance
153
- const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem
154
- const batches = [];
155
- for(let i = 0; i < items.length; i += BATCH_SIZE){
156
- const batch = items.slice(i, i + BATCH_SIZE);
157
- batches.push(batch);
158
- }
159
- const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);
160
- for (const batch of batches){
161
- const batchPromises = batch.map(async (item)=>{
162
- const itemPath = path__default.join(absoluteRootDir, item);
163
- try {
164
- if (await storage.isDirectory(itemPath)) {
165
- const packageJsonPath = path__default.join(itemPath, 'package.json');
166
- if (await storage.exists(packageJsonPath)) {
167
- const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
168
- const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
169
- const packageJson = validatePackageJson(parsed, packageJsonPath);
170
- if (packageJson.name) {
171
- const relativePath = path__default.relative(process.cwd(), itemPath);
172
- return {
173
- name: packageJson.name,
174
- path: relativePath
175
- };
176
- }
177
- }
178
- }
179
- } catch (error) {
180
- logger.debug(`Skipped ${itemPath}: ${error.message || error}`);
181
- }
182
- return null;
183
- });
184
- const batchResults = await Promise.all(batchPromises);
185
- for (const result of batchResults){
186
- if (result) {
187
- packageMap.set(result.name, result.path);
188
- logger.debug(`Found package: ${result.name} at ${result.path}`);
189
- }
190
- }
191
- }
192
- processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);
193
- logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);
194
- } catch (error) {
195
- logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);
196
- }
197
- timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);
198
- return packageMap;
199
- };
200
132
 
201
- export { PerformanceTimer, batchReadPackageJsonFiles, findAllPackageJsonFiles, scanDirectoryForPackages };
133
+ export { PerformanceTimer, batchReadPackageJsonFiles, findAllPackageJsonFiles };
202
134
  //# sourceMappingURL=performance.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from './validation';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('⚠️ WARNING: Found file: dependencies that should not be committed:');\n for (const file of filesWithFileDepedencies) {\n logger.warn(` 📄 ${file.path}:`);\n for (const dep of file.dependencies) {\n logger.warn(` - ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('💡 Remember to run \"kodrdriv unlink\" before committing to restore registry versions!');\n logger.warn(' Or add a pre-commit hook to prevent accidental commits of linked dependencies.');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles","scanDirectoryForPackages","packageMap","Map","absoluteRootDir","resolve","process","cwd","existsTimer","listTimer","BATCH_SIZE","batches","i","batch","slice","processTimer","batchPromises","name","batchResults","set","size","warn"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;AAEA;AACO,MAAMC,wBAAAA,GAA2B,OAAO9C,OAAAA,EAAiBD,OAAAA,GAAAA;AAC5D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;IACf,MAAMC,KAAAA,GAAQhB,iBAAiBC,KAAK,CAACC,QAAQ,CAAC,4BAA4B,EAAEY,OAAAA,CAAAA,CAAS,CAAA;IACrF,MAAM+C,UAAAA,GAAa,IAAIC,GAAAA,EAAAA,CAAAA;AAEvB,IAAA,MAAMC,kBAAkBnC,aAAAA,CAAKoC,OAAO,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIpD,OAAAA,CAAAA;AACpDZ,IAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,iCAAiC,EAAE2D,eAAAA,CAAAA,CAAiB,CAAA;IAEpE,IAAI;;QAEA,MAAMI,WAAAA,GAAcnE,iBAAiBC,KAAK,CAACC,QAAQ,CAAC,oBAAoB,EAAE6D,eAAAA,CAAAA,CAAiB,CAAA;QAC3F,IAAI,CAAC,MAAMlD,OAAAA,CAAQ+B,MAAM,CAACmB,eAAAA,CAAAA,IAAoB,CAAC,MAAMlD,OAAAA,CAAQgC,WAAW,CAACkB,eAAAA,CAAAA,EAAkB;AACvFI,YAAAA,WAAAA,CAAY9D,GAAG,CAAC,CAAC,wCAAwC,EAAE0D,eAAAA,CAAAA,CAAiB,CAAA;AAC5E/C,YAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,mBAAmB,EAAES,OAAAA,CAAAA,CAAS,CAAA;YACzC,OAAO+C,UAAAA;AACX,QAAA;AACAM,QAAAA,WAAAA,CAAY9D,GAAG,CAAC,CAAC,oBAAoB,EAAE0D,eAAAA,CAAAA,CAAiB,CAAA;;QAGxD,MAAMK,SAAAA,GAAYpE,iBAAiBC,KAAK,CAACC,QAAQ,CAAC,kBAAkB,EAAE6D,eAAAA,CAAAA,CAAiB,CAAA;AACvF,QAAA,MAAMjB,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACgB,eAAAA,CAAAA;QACtCK,SAAAA,CAAU/D,GAAG,CAAC,CAAC,OAAO,EAAEyC,KAAAA,CAAM7B,MAAM,CAAC,MAAM,CAAC,CAAA;;QAG5C,MAAMoD,UAAAA,GAAa;AACnB,QAAA,MAAMC,UAAU,EAAE;QAElB,IAAK,IAAIC,IAAI,CAAA,EAAGA,CAAAA,GAAIzB,MAAM7B,MAAM,EAAEsD,KAAKF,UAAAA,CAAY;AAC/C,YAAA,MAAMG,KAAAA,GAAQ1B,KAAAA,CAAM2B,KAAK,CAACF,GAAGA,CAAAA,GAAIF,UAAAA,CAAAA;AACjCC,YAAAA,OAAAA,CAAQnB,IAAI,CAACqB,KAAAA,CAAAA;AACjB,QAAA;AAEA,QAAA,MAAME,YAAAA,GAAe1E,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,WAAW,EAAEoE,OAAAA,CAAQrD,MAAM,CAAC,uBAAuB,CAAC,CAAA;QAEzG,KAAK,MAAMuD,SAASF,OAAAA,CAAS;AACzB,YAAA,MAAMK,aAAAA,GAAgBH,KAAAA,CAAMrD,GAAG,CAAC,OAAOkC,IAAAA,GAAAA;AACnC,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACa,eAAAA,EAAiBV,IAAAA,CAAAA;gBAC5C,IAAI;AACA,oBAAA,IAAI,MAAMxC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;AACrC,wBAAA,MAAMlC,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACI,QAAAA,EAAU,cAAA,CAAA;AAE5C,wBAAA,IAAI,MAAMzC,OAAAA,CAAQ+B,MAAM,CAACxB,eAAAA,CAAAA,EAAkB;AACvC,4BAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;4BACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;4BACjD,MAAMK,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQH,eAAAA,CAAAA;4BAEhD,IAAIK,WAAAA,CAAYmD,IAAI,EAAE;AAClB,gCAAA,MAAMjD,eAAeC,aAAAA,CAAKC,QAAQ,CAACoC,OAAAA,CAAQC,GAAG,EAAA,EAAIZ,QAAAA,CAAAA;gCAClD,OAAO;AAAEsB,oCAAAA,IAAAA,EAAMnD,YAAYmD,IAAI;oCAAEhD,IAAAA,EAAMD;AAAa,iCAAA;AACxD,4BAAA;AACJ,wBAAA;AACJ,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,QAAQ,EAAEsB,QAAAA,CAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,IAAIF,KAAAA,CAAAA,CAAO,CAAA;AACjE,gBAAA;gBACA,OAAO,IAAA;AACX,YAAA,CAAA,CAAA;AAEA,YAAA,MAAM8C,YAAAA,GAAe,MAAM1C,OAAAA,CAAQC,GAAG,CAACuC,aAAAA,CAAAA;YAEvC,KAAK,MAAMpC,UAAUsC,YAAAA,CAAc;AAC/B,gBAAA,IAAItC,MAAAA,EAAQ;AACRsB,oBAAAA,UAAAA,CAAWiB,GAAG,CAACvC,MAAAA,CAAOqC,IAAI,EAAErC,OAAOX,IAAI,CAAA;AACvC1B,oBAAAA,MAAAA,CAAO8B,KAAK,CAAC,CAAC,eAAe,EAAEO,MAAAA,CAAOqC,IAAI,CAAC,IAAI,EAAErC,MAAAA,CAAOX,IAAI,CAAA,CAAE,CAAA;AAClE,gBAAA;AACJ,YAAA;AACJ,QAAA;AAEA8C,QAAAA,YAAAA,CAAarE,GAAG,CAAC,CAAC,UAAU,EAAEyC,KAAAA,CAAM7B,MAAM,CAAC,gBAAgB,EAAEqD,OAAAA,CAAQrD,MAAM,CAAC,QAAQ,CAAC,CAAA;AACrFf,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,MAAM,EAAEyD,UAAAA,CAAWkB,IAAI,CAAC,aAAa,EAAEjC,KAAAA,CAAM7B,MAAM,CAAC,eAAe,CAAC,CAAA;AACxF,IAAA,CAAA,CAAE,OAAOc,KAAAA,EAAO;QACZ7B,MAAAA,CAAO8E,IAAI,CAAC,CAAC,yBAAyB,EAAEjB,eAAAA,CAAgB,EAAE,EAAEhC,KAAAA,CAAAA,CAAO,CAAA;AACvE,IAAA;IAEAf,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEwD,WAAWkB,IAAI,CAAC,cAAc,EAAEjE,OAAAA,CAAAA,CAAS,CAAA;IAC5D,OAAO+C,UAAAA;AACX;;;;"}
1
+ {"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from './validation';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('⚠️ WARNING: Found file: dependencies that should not be committed:');\n for (const file of filesWithFileDepedencies) {\n logger.warn(` 📄 ${file.path}:`);\n for (const dep of file.dependencies) {\n logger.warn(` - ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('💡 Remember to run \"kodrdriv unlink\" before committing to restore registry versions!');\n logger.warn(' Or add a pre-commit hook to prevent accidental commits of linked dependencies.');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;;;;"}
@@ -71,7 +71,34 @@ const create = (params)=>{
71
71
  };
72
72
  const ensureDirectory = async (path)=>{
73
73
  if (!await exists(path)) {
74
- await createDirectory(path);
74
+ // Before creating the directory, check if any parent directory is blocked by a file
75
+ try {
76
+ await fs.promises.mkdir(path, {
77
+ recursive: true
78
+ });
79
+ } catch (mkdirError) {
80
+ // If mkdir fails with ENOTDIR, it means a parent directory is actually a file
81
+ if (mkdirError.code === 'ENOTDIR') {
82
+ // Find which parent directory is the problem
83
+ const pathParts = path.split('/').filter((p)=>p !== '');
84
+ let currentPath = '';
85
+ for (const part of pathParts){
86
+ currentPath = currentPath ? `${currentPath}/${part}` : part;
87
+ if (await exists(currentPath) && !await isDirectory(currentPath)) {
88
+ throw new Error(`Cannot create directory at ${path}: a file exists at ${currentPath} blocking the path`);
89
+ }
90
+ }
91
+ }
92
+ // Re-throw the original error if it's not the file-blocking-path issue or we couldn't find the blocking file
93
+ throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);
94
+ }
95
+ } else {
96
+ // Path exists, but we need to check if it's actually a directory
97
+ if (!await isDirectory(path)) {
98
+ // Path exists but is not a directory (likely a file)
99
+ throw new Error(`Cannot create directory at ${path}: a file already exists at this location`);
100
+ }
101
+ // If we reach here, the directory already exists, so nothing to do
75
102
  }
76
103
  };
77
104
  const removeDirectory = async (path)=>{
@@ -1 +1 @@
1
- {"version":3,"file":"storage.js","sources":["../../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport { glob } from 'glob';\nimport path from 'path';\nimport crypto from 'crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n *\n * Additionally, abstracting storage operations allows for future flexibility -\n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n ensureDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n deleteFile: (path: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n removeDirectory: (path: string) => Promise<void>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n // Log at debug level since this is expected when scanning directories\n // that contain both files and directories\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n // Log removed since this is expected when checking file types\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const ensureDirectory = async (path: string): Promise<void> => {\n if (!(await exists(path))) {\n await createDirectory(path);\n }\n }\n\n const removeDirectory = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.rm(path, { recursive: true, force: true });\n }\n } catch (rmError: any) {\n throw new Error(`Failed to remove directory ${path}: ${rmError.message} ${rmError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const rename = async (oldPath: string, newPath: string): Promise<void> => {\n await fs.promises.rename(oldPath, newPath);\n }\n\n const deleteFile = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.unlink(path);\n }\n } catch (deleteError: any) {\n throw new Error(`Failed to delete file ${path}: ${deleteError.message} ${deleteError.stack}`);\n }\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n ensureDirectory,\n readFile,\n readStream,\n writeFile,\n rename,\n deleteFile,\n forEachFileIn,\n hashFile,\n listFiles,\n removeDirectory,\n };\n}\n"],"names":["create","params","log","console","exists","path","fs","promises","stat","error","isDirectory","stats","isFile","isReadable","access","constants","R_OK","message","stack","isWritable","W_OK","isFileReadable","isDirectoryWritable","isDirectoryReadable","createDirectory","mkdir","recursive","mkdirError","Error","ensureDirectory","removeDirectory","rm","force","rmError","readFile","encoding","writeFile","data","rename","oldPath","newPath","deleteFile","unlink","deleteError","forEachFileIn","directory","callback","options","pattern","files","glob","cwd","nodir","file","join","err","readStream","createReadStream","hashFile","length","crypto","createHash","update","digest","slice","listFiles","readdir"],"mappings":";;;;;AAAA;AAoCO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;;AAGnB,IAAA,MAAMC,GAAAA,GAAMD,MAAAA,CAAOC,GAAG,IAAIC,QAAQD,GAAG;AAErC,IAAA,MAAME,SAAS,OAAOC,IAAAA,GAAAA;QAClB,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;YACvB,OAAO,IAAA;;AAEX,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB,OAAO,KAAA;AACX,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMC,cAAc,OAAOL,IAAAA,GAAAA;AACvB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMD,WAAW,EAAA,EAAI;;;YAGtB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAME,SAAS,OAAOP,IAAAA,GAAAA;AAClB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMC,MAAM,EAAA,EAAI;;YAEjB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOR,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACC,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOP,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOd,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACK,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOX,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMG,iBAAiB,OAAOhB,IAAAA,GAAAA;AAC1B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMO,MAAAA,CAAOP,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AACxE,IAAA,CAAA;AAEA,IAAA,MAAMiB,sBAAsB,OAAOjB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMc,UAAAA,CAAWd,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMkB,sBAAsB,OAAOlB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMmB,kBAAkB,OAAOnB,IAAAA,GAAAA;QAC3B,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACkB,KAAK,CAACpB,IAAAA,EAAM;gBAAEqB,SAAAA,EAAW;AAAK,aAAA,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOC,UAAAA,EAAiB;AACtB,YAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAAA,CAAK,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAA,CAAE,CAAA;AAC1G,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMW,kBAAkB,OAAOxB,IAAAA,GAAAA;QAC3B,IAAI,CAAE,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,EAAQ;AACvB,YAAA,MAAMmB,eAAAA,CAAgBnB,IAAAA,CAAAA;AAC1B,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMyB,kBAAkB,OAAOzB,IAAAA,GAAAA;QAC3B,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACwB,EAAE,CAAC1B,IAAAA,EAAM;oBAAEqB,SAAAA,EAAW,IAAA;oBAAMM,KAAAA,EAAO;AAAK,iBAAA,CAAA;AAC9D,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOC,OAAAA,EAAc;AACnB,YAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,2BAA2B,EAAEvB,IAAAA,CAAK,EAAE,EAAE4B,OAAAA,CAAQhB,OAAO,CAAC,CAAC,EAAEgB,OAAAA,CAAQf,KAAK,CAAA,CAAE,CAAA;AAC7F,QAAA;AACJ,IAAA,CAAA;IAEA,MAAMgB,QAAAA,GAAW,OAAO7B,IAAAA,EAAc8B,QAAAA,GAAAA;AAClC,QAAA,OAAO,MAAM7B,EAAAA,CAAGC,QAAQ,CAAC2B,QAAQ,CAAC7B,IAAAA,EAAM;YAAE8B,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMC,SAAAA,GAAY,OAAO/B,IAAAA,EAAcgC,IAAAA,EAAuBF,QAAAA,GAAAA;AAC1D,QAAA,MAAM7B,GAAGC,QAAQ,CAAC6B,SAAS,CAAC/B,MAAMgC,IAAAA,EAAM;YAAEF,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMG,MAAAA,GAAS,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACnC,QAAA,MAAMlC,EAAAA,CAAGC,QAAQ,CAAC+B,MAAM,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACtC,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOpC,IAAAA,GAAAA;QACtB,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACmC,MAAM,CAACrC,IAAAA,CAAAA;AAC7B,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOsC,WAAAA,EAAkB;AACvB,YAAA,MAAM,IAAIf,KAAAA,CAAM,CAAC,sBAAsB,EAAEvB,IAAAA,CAAK,EAAE,EAAEsC,WAAAA,CAAY1B,OAAO,CAAC,CAAC,EAAE0B,WAAAA,CAAYzB,KAAK,CAAA,CAAE,CAAA;AAChG,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAM0B,aAAAA,GAAgB,OAAOC,SAAAA,EAAmBC,QAAAA,EAA2CC,OAAAA,GAA0C;QAAEC,OAAAA,EAAS;KAAO,GAAA;QACnJ,IAAI;AACA,YAAA,MAAMC,KAAAA,GAAQ,MAAMC,IAAAA,CAAKH,OAAAA,CAAQC,OAAO,EAAE;gBAAEG,GAAAA,EAAKN,SAAAA;gBAAWO,KAAAA,EAAO;AAAK,aAAA,CAAA;YACxE,KAAK,MAAMC,QAAQJ,KAAAA,CAAO;AACtB,gBAAA,MAAMH,QAAAA,CAASzC,aAAAA,CAAKiD,IAAI,CAACT,SAAAA,EAAWQ,IAAAA,CAAAA,CAAAA;AACxC,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOE,GAAAA,EAAU;AACf,YAAA,MAAM,IAAI3B,KAAAA,CAAM,CAAC,uBAAuB,EAAEmB,OAAAA,CAAQC,OAAO,CAAC,IAAI,EAAEH,SAAAA,CAAU,EAAE,EAAEU,GAAAA,CAAItC,OAAO,CAAA,CAAE,CAAA;AAC/F,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMuC,aAAa,OAAOnD,IAAAA,GAAAA;QACtB,OAAOC,EAAAA,CAAGmD,gBAAgB,CAACpD,IAAAA,CAAAA;AAC/B,IAAA,CAAA;IAEA,MAAMqD,QAAAA,GAAW,OAAOrD,IAAAA,EAAcsD,MAAAA,GAAAA;QAClC,MAAMN,IAAAA,GAAO,MAAMnB,QAAAA,CAAS7B,IAAAA,EAAM,MAAA,CAAA;AAClC,QAAA,OAAOuD,MAAAA,CAAOC,UAAU,CAAC,QAAA,CAAA,CAAUC,MAAM,CAACT,IAAAA,CAAAA,CAAMU,MAAM,CAAC,KAAA,CAAA,CAAOC,KAAK,CAAC,CAAA,EAAGL,MAAAA,CAAAA;AAC3E,IAAA,CAAA;AAEA,IAAA,MAAMM,YAAY,OAAOpB,SAAAA,GAAAA;AACrB,QAAA,OAAO,MAAMvC,EAAAA,CAAGC,QAAQ,CAAC2D,OAAO,CAACrB,SAAAA,CAAAA;AACrC,IAAA,CAAA;IAEA,OAAO;AACHzC,QAAAA,MAAAA;AACAM,QAAAA,WAAAA;AACAE,QAAAA,MAAAA;AACAC,QAAAA,UAAAA;AACAM,QAAAA,UAAAA;AACAE,QAAAA,cAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,eAAAA;AACAK,QAAAA,eAAAA;AACAK,QAAAA,QAAAA;AACAsB,QAAAA,UAAAA;AACApB,QAAAA,SAAAA;AACAE,QAAAA,MAAAA;AACAG,QAAAA,UAAAA;AACAG,QAAAA,aAAAA;AACAc,QAAAA,QAAAA;AACAO,QAAAA,SAAAA;AACAnC,QAAAA;AACJ,KAAA;AACJ;;;;"}
1
+ {"version":3,"file":"storage.js","sources":["../../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport { glob } from 'glob';\nimport path from 'path';\nimport crypto from 'crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n *\n * Additionally, abstracting storage operations allows for future flexibility -\n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n ensureDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n deleteFile: (path: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n removeDirectory: (path: string) => Promise<void>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n // Log at debug level since this is expected when scanning directories\n // that contain both files and directories\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n // Log removed since this is expected when checking file types\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const ensureDirectory = async (path: string): Promise<void> => {\n if (!(await exists(path))) {\n // Before creating the directory, check if any parent directory is blocked by a file\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n // If mkdir fails with ENOTDIR, it means a parent directory is actually a file\n if (mkdirError.code === 'ENOTDIR') {\n // Find which parent directory is the problem\n const pathParts = path.split('/').filter(p => p !== '');\n let currentPath = '';\n for (const part of pathParts) {\n currentPath = currentPath ? `${currentPath}/${part}` : part;\n if (await exists(currentPath) && !(await isDirectory(currentPath))) {\n throw new Error(`Cannot create directory at ${path}: a file exists at ${currentPath} blocking the path`);\n }\n }\n }\n // Re-throw the original error if it's not the file-blocking-path issue or we couldn't find the blocking file\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n } else {\n // Path exists, but we need to check if it's actually a directory\n if (!(await isDirectory(path))) {\n // Path exists but is not a directory (likely a file)\n throw new Error(`Cannot create directory at ${path}: a file already exists at this location`);\n }\n // If we reach here, the directory already exists, so nothing to do\n }\n }\n\n const removeDirectory = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.rm(path, { recursive: true, force: true });\n }\n } catch (rmError: any) {\n throw new Error(`Failed to remove directory ${path}: ${rmError.message} ${rmError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const rename = async (oldPath: string, newPath: string): Promise<void> => {\n await fs.promises.rename(oldPath, newPath);\n }\n\n const deleteFile = async (path: string): Promise<void> => {\n try {\n if (await exists(path)) {\n await fs.promises.unlink(path);\n }\n } catch (deleteError: any) {\n throw new Error(`Failed to delete file ${path}: ${deleteError.message} ${deleteError.stack}`);\n }\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n ensureDirectory,\n readFile,\n readStream,\n writeFile,\n rename,\n deleteFile,\n forEachFileIn,\n hashFile,\n listFiles,\n removeDirectory,\n };\n}\n"],"names":["create","params","log","console","exists","path","fs","promises","stat","error","isDirectory","stats","isFile","isReadable","access","constants","R_OK","message","stack","isWritable","W_OK","isFileReadable","isDirectoryWritable","isDirectoryReadable","createDirectory","mkdir","recursive","mkdirError","Error","ensureDirectory","code","pathParts","split","filter","p","currentPath","part","removeDirectory","rm","force","rmError","readFile","encoding","writeFile","data","rename","oldPath","newPath","deleteFile","unlink","deleteError","forEachFileIn","directory","callback","options","pattern","files","glob","cwd","nodir","file","join","err","readStream","createReadStream","hashFile","length","crypto","createHash","update","digest","slice","listFiles","readdir"],"mappings":";;;;;AAAA;AAoCO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;;AAGnB,IAAA,MAAMC,GAAAA,GAAMD,MAAAA,CAAOC,GAAG,IAAIC,QAAQD,GAAG;AAErC,IAAA,MAAME,SAAS,OAAOC,IAAAA,GAAAA;QAClB,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;YACvB,OAAO,IAAA;;AAEX,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB,OAAO,KAAA;AACX,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMC,cAAc,OAAOL,IAAAA,GAAAA;AACvB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMD,WAAW,EAAA,EAAI;;;YAGtB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAME,SAAS,OAAOP,IAAAA,GAAAA;AAClB,QAAA,MAAMM,QAAQ,MAAML,EAAAA,CAAGC,QAAQ,CAACC,IAAI,CAACH,IAAAA,CAAAA;QACrC,IAAI,CAACM,KAAAA,CAAMC,MAAM,EAAA,EAAI;;YAEjB,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOR,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACC,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOP,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOd,IAAAA,GAAAA;QACtB,IAAI;YACA,MAAMC,EAAAA,CAAGC,QAAQ,CAACO,MAAM,CAACT,IAAAA,EAAMC,EAAAA,CAAGS,SAAS,CAACK,IAAI,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOX,KAAAA,EAAY;YACjBP,GAAAA,CAAI,CAAA,EAAGG,KAAK,uBAAuB,CAAC,EAAEI,KAAAA,CAAMQ,OAAO,EAAER,KAAAA,CAAMS,KAAK,CAAA;YAChE,OAAO,KAAA;AACX,QAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA;AAEA,IAAA,MAAMG,iBAAiB,OAAOhB,IAAAA,GAAAA;AAC1B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMO,MAAAA,CAAOP,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AACxE,IAAA,CAAA;AAEA,IAAA,MAAMiB,sBAAsB,OAAOjB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMc,UAAAA,CAAWd,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMkB,sBAAsB,OAAOlB,IAAAA,GAAAA;AAC/B,QAAA,OAAO,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,IAAS,MAAMK,WAAAA,CAAYL,IAAAA,CAAAA,IAAS,MAAMQ,UAAAA,CAAWR,IAAAA,CAAAA;AAC7E,IAAA,CAAA;AAEA,IAAA,MAAMmB,kBAAkB,OAAOnB,IAAAA,GAAAA;QAC3B,IAAI;AACA,YAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACkB,KAAK,CAACpB,IAAAA,EAAM;gBAAEqB,SAAAA,EAAW;AAAK,aAAA,CAAA;AACpD,QAAA,CAAA,CAAE,OAAOC,UAAAA,EAAiB;AACtB,YAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAAA,CAAK,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAA,CAAE,CAAA;AAC1G,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMW,kBAAkB,OAAOxB,IAAAA,GAAAA;QAC3B,IAAI,CAAE,MAAMD,MAAAA,CAAOC,IAAAA,CAAAA,EAAQ;;YAEvB,IAAI;AACA,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAACkB,KAAK,CAACpB,IAAAA,EAAM;oBAAEqB,SAAAA,EAAW;AAAK,iBAAA,CAAA;AACpD,YAAA,CAAA,CAAE,OAAOC,UAAAA,EAAiB;;gBAEtB,IAAIA,UAAAA,CAAWG,IAAI,KAAK,SAAA,EAAW;;oBAE/B,MAAMC,SAAAA,GAAY1B,KAAK2B,KAAK,CAAC,KAAKC,MAAM,CAACC,CAAAA,CAAAA,GAAKA,CAAAA,KAAM,EAAA,CAAA;AACpD,oBAAA,IAAIC,WAAAA,GAAc,EAAA;oBAClB,KAAK,MAAMC,QAAQL,SAAAA,CAAW;AAC1BI,wBAAAA,WAAAA,GAAcA,cAAc,CAAA,EAAGA,WAAAA,CAAY,CAAC,EAAEC,MAAM,GAAGA,IAAAA;AACvD,wBAAA,IAAI,MAAMhC,MAAAA,CAAO+B,WAAAA,CAAAA,IAAgB,CAAE,MAAMzB,YAAYyB,WAAAA,CAAAA,EAAe;4BAChE,MAAM,IAAIP,KAAAA,CAAM,CAAC,2BAA2B,EAAEvB,KAAK,mBAAmB,EAAE8B,WAAAA,CAAY,kBAAkB,CAAC,CAAA;AAC3G,wBAAA;AACJ,oBAAA;AACJ,gBAAA;;AAEA,gBAAA,MAAM,IAAIP,KAAAA,CAAM,CAAC,kCAAkC,EAAEvB,IAAAA,CAAK,EAAE,EAAEsB,UAAAA,CAAWV,OAAO,CAAC,CAAC,EAAEU,UAAAA,CAAWT,KAAK,CAAA,CAAE,CAAA;AAC1G,YAAA;QACJ,CAAA,MAAO;;YAEH,IAAI,CAAE,MAAMR,WAAAA,CAAYL,IAAAA,CAAAA,EAAQ;;AAE5B,gBAAA,MAAM,IAAIuB,KAAAA,CAAM,CAAC,2BAA2B,EAAEvB,IAAAA,CAAK,wCAAwC,CAAC,CAAA;AAChG,YAAA;;AAEJ,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMgC,kBAAkB,OAAOhC,IAAAA,GAAAA;QAC3B,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAAC+B,EAAE,CAACjC,IAAAA,EAAM;oBAAEqB,SAAAA,EAAW,IAAA;oBAAMa,KAAAA,EAAO;AAAK,iBAAA,CAAA;AAC9D,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOC,OAAAA,EAAc;AACnB,YAAA,MAAM,IAAIZ,KAAAA,CAAM,CAAC,2BAA2B,EAAEvB,IAAAA,CAAK,EAAE,EAAEmC,OAAAA,CAAQvB,OAAO,CAAC,CAAC,EAAEuB,OAAAA,CAAQtB,KAAK,CAAA,CAAE,CAAA;AAC7F,QAAA;AACJ,IAAA,CAAA;IAEA,MAAMuB,QAAAA,GAAW,OAAOpC,IAAAA,EAAcqC,QAAAA,GAAAA;AAClC,QAAA,OAAO,MAAMpC,EAAAA,CAAGC,QAAQ,CAACkC,QAAQ,CAACpC,IAAAA,EAAM;YAAEqC,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMC,SAAAA,GAAY,OAAOtC,IAAAA,EAAcuC,IAAAA,EAAuBF,QAAAA,GAAAA;AAC1D,QAAA,MAAMpC,GAAGC,QAAQ,CAACoC,SAAS,CAACtC,MAAMuC,IAAAA,EAAM;YAAEF,QAAAA,EAAUA;AAA2B,SAAA,CAAA;AACnF,IAAA,CAAA;IAEA,MAAMG,MAAAA,GAAS,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACnC,QAAA,MAAMzC,EAAAA,CAAGC,QAAQ,CAACsC,MAAM,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACtC,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAO3C,IAAAA,GAAAA;QACtB,IAAI;YACA,IAAI,MAAMD,OAAOC,IAAAA,CAAAA,EAAO;AACpB,gBAAA,MAAMC,EAAAA,CAAGC,QAAQ,CAAC0C,MAAM,CAAC5C,IAAAA,CAAAA;AAC7B,YAAA;AACJ,QAAA,CAAA,CAAE,OAAO6C,WAAAA,EAAkB;AACvB,YAAA,MAAM,IAAItB,KAAAA,CAAM,CAAC,sBAAsB,EAAEvB,IAAAA,CAAK,EAAE,EAAE6C,WAAAA,CAAYjC,OAAO,CAAC,CAAC,EAAEiC,WAAAA,CAAYhC,KAAK,CAAA,CAAE,CAAA;AAChG,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMiC,aAAAA,GAAgB,OAAOC,SAAAA,EAAmBC,QAAAA,EAA2CC,OAAAA,GAA0C;QAAEC,OAAAA,EAAS;KAAO,GAAA;QACnJ,IAAI;AACA,YAAA,MAAMC,KAAAA,GAAQ,MAAMC,IAAAA,CAAKH,OAAAA,CAAQC,OAAO,EAAE;gBAAEG,GAAAA,EAAKN,SAAAA;gBAAWO,KAAAA,EAAO;AAAK,aAAA,CAAA;YACxE,KAAK,MAAMC,QAAQJ,KAAAA,CAAO;AACtB,gBAAA,MAAMH,QAAAA,CAAShD,aAAAA,CAAKwD,IAAI,CAACT,SAAAA,EAAWQ,IAAAA,CAAAA,CAAAA;AACxC,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOE,GAAAA,EAAU;AACf,YAAA,MAAM,IAAIlC,KAAAA,CAAM,CAAC,uBAAuB,EAAE0B,OAAAA,CAAQC,OAAO,CAAC,IAAI,EAAEH,SAAAA,CAAU,EAAE,EAAEU,GAAAA,CAAI7C,OAAO,CAAA,CAAE,CAAA;AAC/F,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAM8C,aAAa,OAAO1D,IAAAA,GAAAA;QACtB,OAAOC,EAAAA,CAAG0D,gBAAgB,CAAC3D,IAAAA,CAAAA;AAC/B,IAAA,CAAA;IAEA,MAAM4D,QAAAA,GAAW,OAAO5D,IAAAA,EAAc6D,MAAAA,GAAAA;QAClC,MAAMN,IAAAA,GAAO,MAAMnB,QAAAA,CAASpC,IAAAA,EAAM,MAAA,CAAA;AAClC,QAAA,OAAO8D,MAAAA,CAAOC,UAAU,CAAC,QAAA,CAAA,CAAUC,MAAM,CAACT,IAAAA,CAAAA,CAAMU,MAAM,CAAC,KAAA,CAAA,CAAOC,KAAK,CAAC,CAAA,EAAGL,MAAAA,CAAAA;AAC3E,IAAA,CAAA;AAEA,IAAA,MAAMM,YAAY,OAAOpB,SAAAA,GAAAA;AACrB,QAAA,OAAO,MAAM9C,EAAAA,CAAGC,QAAQ,CAACkE,OAAO,CAACrB,SAAAA,CAAAA;AACrC,IAAA,CAAA;IAEA,OAAO;AACHhD,QAAAA,MAAAA;AACAM,QAAAA,WAAAA;AACAE,QAAAA,MAAAA;AACAC,QAAAA,UAAAA;AACAM,QAAAA,UAAAA;AACAE,QAAAA,cAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA,eAAAA;AACAK,QAAAA,eAAAA;AACAY,QAAAA,QAAAA;AACAsB,QAAAA,UAAAA;AACApB,QAAAA,SAAAA;AACAE,QAAAA,MAAAA;AACAG,QAAAA,UAAAA;AACAG,QAAAA,aAAAA;AACAc,QAAAA,QAAAA;AACAO,QAAAA,SAAAA;AACAnC,QAAAA;AACJ,KAAA;AACJ;;;;"}
@@ -14,30 +14,6 @@
14
14
  }
15
15
  return data;
16
16
  };
17
- /**
18
- * Validates and safely casts data to LinkBackup type
19
- */ const validateLinkBackup = (data)=>{
20
- if (!data || typeof data !== 'object') {
21
- throw new Error('Invalid link backup: not an object');
22
- }
23
- // Validate each backup entry
24
- for (const [key, value] of Object.entries(data)){
25
- if (!value || typeof value !== 'object') {
26
- throw new Error(`Invalid link backup entry for ${key}: not an object`);
27
- }
28
- const entry = value;
29
- if (typeof entry.originalVersion !== 'string') {
30
- throw new Error(`Invalid link backup entry for ${key}: originalVersion must be a string`);
31
- }
32
- if (typeof entry.dependencyType !== 'string') {
33
- throw new Error(`Invalid link backup entry for ${key}: dependencyType must be a string`);
34
- }
35
- if (typeof entry.relativePath !== 'string') {
36
- throw new Error(`Invalid link backup entry for ${key}: relativePath must be a string`);
37
- }
38
- }
39
- return data;
40
- };
41
17
  /**
42
18
  * Safely parses JSON with error handling
43
19
  */ const safeJsonParse = (jsonString, context)=>{
@@ -77,5 +53,5 @@
77
53
  return data;
78
54
  };
79
55
 
80
- export { safeJsonParse, validateLinkBackup, validatePackageJson, validateReleaseSummary, validateString };
56
+ export { safeJsonParse, validatePackageJson, validateReleaseSummary, validateString };
81
57
  //# sourceMappingURL=validation.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"validation.js","sources":["../../src/util/validation.ts"],"sourcesContent":["/**\n * Runtime validation utilities for safe type handling\n */\n\nexport interface ReleaseSummary {\n title: string;\n body: string;\n}\n\nexport interface LinkBackup {\n [key: string]: {\n originalVersion: string;\n dependencyType: string;\n relativePath: string;\n };\n}\n\nexport interface TranscriptionResult {\n text: string;\n [key: string]: any;\n}\n\n/**\n * Validates and safely casts data to ReleaseSummary type\n */\nexport const validateReleaseSummary = (data: any): ReleaseSummary => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid release summary: not an object');\n }\n if (typeof data.title !== 'string') {\n throw new Error('Invalid release summary: title must be a string');\n }\n if (typeof data.body !== 'string') {\n throw new Error('Invalid release summary: body must be a string');\n }\n return data as ReleaseSummary;\n};\n\n/**\n * Validates and safely casts data to LinkBackup type\n */\nexport const validateLinkBackup = (data: any): LinkBackup => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid link backup: not an object');\n }\n\n // Validate each backup entry\n for (const [key, value] of Object.entries(data)) {\n if (!value || typeof value !== 'object') {\n throw new Error(`Invalid link backup entry for ${key}: not an object`);\n }\n const entry = value as any;\n if (typeof entry.originalVersion !== 'string') {\n throw new Error(`Invalid link backup entry for ${key}: originalVersion must be a string`);\n }\n if (typeof entry.dependencyType !== 'string') {\n throw new Error(`Invalid link backup entry for ${key}: dependencyType must be a string`);\n }\n if (typeof entry.relativePath !== 'string') {\n throw new Error(`Invalid link backup entry for ${key}: relativePath must be a string`);\n }\n }\n\n return data as LinkBackup;\n};\n\n/**\n * Validates transcription result has required text property\n */\nexport const validateTranscriptionResult = (data: any): TranscriptionResult => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid transcription result: not an object');\n }\n if (typeof data.text !== 'string') {\n throw new Error('Invalid transcription result: text property must be a string');\n }\n return data as TranscriptionResult;\n};\n\n/**\n * Safely parses JSON with error handling\n */\nexport const safeJsonParse = <T = any>(jsonString: string, context?: string): T => {\n try {\n const parsed = JSON.parse(jsonString);\n if (parsed === null || parsed === undefined) {\n throw new Error('Parsed JSON is null or undefined');\n }\n return parsed;\n } catch (error) {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Failed to parse JSON${contextStr}: ${error instanceof Error ? error.message : 'Unknown error'}`);\n }\n};\n\n/**\n * Validates that a value is a non-empty string\n */\nexport const validateString = (value: any, fieldName: string): string => {\n if (typeof value !== 'string') {\n throw new Error(`${fieldName} must be a string, got ${typeof value}`);\n }\n if (value.trim() === '') {\n throw new Error(`${fieldName} cannot be empty`);\n }\n return value;\n};\n\n/**\n * Validates that a value exists and has a specific property\n */\nexport const validateHasProperty = (obj: any, property: string, context?: string): void => {\n if (!obj || typeof obj !== 'object') {\n const contextStr = context ? ` in ${context}` : '';\n throw new Error(`Object is null or not an object${contextStr}`);\n }\n if (!(property in obj)) {\n const contextStr = context ? ` in ${context}` : '';\n throw new Error(`Missing required property '${property}'${contextStr}`);\n }\n};\n\n/**\n * Validates package.json structure has basic required fields\n */\nexport const validatePackageJson = (data: any, context?: string, requireName: boolean = true): any => {\n if (!data || typeof data !== 'object') {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Invalid package.json${contextStr}: not an object`);\n }\n if (requireName && typeof data.name !== 'string') {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Invalid package.json${contextStr}: name must be a string`);\n }\n return data;\n};\n"],"names":["validateReleaseSummary","data","Error","title","body","validateLinkBackup","key","value","Object","entries","entry","originalVersion","dependencyType","relativePath","safeJsonParse","jsonString","context","parsed","JSON","parse","undefined","error","contextStr","message","validateString","fieldName","trim","validatePackageJson","requireName","name"],"mappings":"AAAA;;;;IAyBO,MAAMA,sBAAAA,GAAyB,CAACC,IAAAA,GAAAA;AACnC,IAAA,IAAI,CAACA,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;AACnC,QAAA,MAAM,IAAIC,KAAAA,CAAM,wCAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKE,KAAK,KAAK,QAAA,EAAU;AAChC,QAAA,MAAM,IAAID,KAAAA,CAAM,iDAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKG,IAAI,KAAK,QAAA,EAAU;AAC/B,QAAA,MAAM,IAAIF,KAAAA,CAAM,gDAAA,CAAA;AACpB,IAAA;IACA,OAAOD,IAAAA;AACX;AAEA;;IAGO,MAAMI,kBAAAA,GAAqB,CAACJ,IAAAA,GAAAA;AAC/B,IAAA,IAAI,CAACA,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;AACnC,QAAA,MAAM,IAAIC,KAAAA,CAAM,oCAAA,CAAA;AACpB,IAAA;;IAGA,KAAK,MAAM,CAACI,GAAAA,EAAKC,KAAAA,CAAM,IAAIC,MAAAA,CAAOC,OAAO,CAACR,IAAAA,CAAAA,CAAO;AAC7C,QAAA,IAAI,CAACM,KAAAA,IAAS,OAAOA,KAAAA,KAAU,QAAA,EAAU;AACrC,YAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,8BAA8B,EAAEI,GAAAA,CAAI,eAAe,CAAC,CAAA;AACzE,QAAA;AACA,QAAA,MAAMI,KAAAA,GAAQH,KAAAA;AACd,QAAA,IAAI,OAAOG,KAAAA,CAAMC,eAAe,KAAK,QAAA,EAAU;AAC3C,YAAA,MAAM,IAAIT,KAAAA,CAAM,CAAC,8BAA8B,EAAEI,GAAAA,CAAI,kCAAkC,CAAC,CAAA;AAC5F,QAAA;AACA,QAAA,IAAI,OAAOI,KAAAA,CAAME,cAAc,KAAK,QAAA,EAAU;AAC1C,YAAA,MAAM,IAAIV,KAAAA,CAAM,CAAC,8BAA8B,EAAEI,GAAAA,CAAI,iCAAiC,CAAC,CAAA;AAC3F,QAAA;AACA,QAAA,IAAI,OAAOI,KAAAA,CAAMG,YAAY,KAAK,QAAA,EAAU;AACxC,YAAA,MAAM,IAAIX,KAAAA,CAAM,CAAC,8BAA8B,EAAEI,GAAAA,CAAI,+BAA+B,CAAC,CAAA;AACzF,QAAA;AACJ,IAAA;IAEA,OAAOL,IAAAA;AACX;AAeA;;AAEC,IACM,MAAMa,aAAAA,GAAgB,CAAUC,UAAAA,EAAoBC,OAAAA,GAAAA;IACvD,IAAI;QACA,MAAMC,MAAAA,GAASC,IAAAA,CAAKC,KAAK,CAACJ,UAAAA,CAAAA;QAC1B,IAAIE,MAAAA,KAAW,IAAA,IAAQA,MAAAA,KAAWG,SAAAA,EAAW;AACzC,YAAA,MAAM,IAAIlB,KAAAA,CAAM,kCAAA,CAAA;AACpB,QAAA;QACA,OAAOe,MAAAA;AACX,IAAA,CAAA,CAAE,OAAOI,KAAAA,EAAO;QACZ,MAAMC,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAId,KAAAA,CAAM,CAAC,oBAAoB,EAAEoB,UAAAA,CAAW,EAAE,EAAED,KAAAA,YAAiBnB,KAAAA,GAAQmB,KAAAA,CAAME,OAAO,GAAG,eAAA,CAAA,CAAiB,CAAA;AACpH,IAAA;AACJ;AAEA;;AAEC,IACM,MAAMC,cAAAA,GAAiB,CAACjB,KAAAA,EAAYkB,SAAAA,GAAAA;IACvC,IAAI,OAAOlB,UAAU,QAAA,EAAU;AAC3B,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAA,EAAGuB,UAAU,uBAAuB,EAAE,OAAOlB,KAAAA,CAAAA,CAAO,CAAA;AACxE,IAAA;IACA,IAAIA,KAAAA,CAAMmB,IAAI,EAAA,KAAO,EAAA,EAAI;AACrB,QAAA,MAAM,IAAIxB,KAAAA,CAAM,CAAA,EAAGuB,SAAAA,CAAU,gBAAgB,CAAC,CAAA;AAClD,IAAA;IACA,OAAOlB,KAAAA;AACX;AAgBA;;AAEC,IACM,MAAMoB,mBAAAA,GAAsB,CAAC1B,IAAAA,EAAWe,OAAAA,EAAkBY,cAAuB,IAAI,GAAA;AACxF,IAAA,IAAI,CAAC3B,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;QACnC,MAAMqB,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAId,KAAAA,CAAM,CAAC,oBAAoB,EAAEoB,UAAAA,CAAW,eAAe,CAAC,CAAA;AACtE,IAAA;AACA,IAAA,IAAIM,WAAAA,IAAe,OAAO3B,IAAAA,CAAK4B,IAAI,KAAK,QAAA,EAAU;QAC9C,MAAMP,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAId,KAAAA,CAAM,CAAC,oBAAoB,EAAEoB,UAAAA,CAAW,uBAAuB,CAAC,CAAA;AAC9E,IAAA;IACA,OAAOrB,IAAAA;AACX;;;;"}
1
+ {"version":3,"file":"validation.js","sources":["../../src/util/validation.ts"],"sourcesContent":["/**\n * Runtime validation utilities for safe type handling\n */\n\nexport interface ReleaseSummary {\n title: string;\n body: string;\n}\n\n\n\nexport interface TranscriptionResult {\n text: string;\n [key: string]: any;\n}\n\n/**\n * Validates and safely casts data to ReleaseSummary type\n */\nexport const validateReleaseSummary = (data: any): ReleaseSummary => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid release summary: not an object');\n }\n if (typeof data.title !== 'string') {\n throw new Error('Invalid release summary: title must be a string');\n }\n if (typeof data.body !== 'string') {\n throw new Error('Invalid release summary: body must be a string');\n }\n return data as ReleaseSummary;\n};\n\n\n\n/**\n * Validates transcription result has required text property\n */\nexport const validateTranscriptionResult = (data: any): TranscriptionResult => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid transcription result: not an object');\n }\n if (typeof data.text !== 'string') {\n throw new Error('Invalid transcription result: text property must be a string');\n }\n return data as TranscriptionResult;\n};\n\n/**\n * Safely parses JSON with error handling\n */\nexport const safeJsonParse = <T = any>(jsonString: string, context?: string): T => {\n try {\n const parsed = JSON.parse(jsonString);\n if (parsed === null || parsed === undefined) {\n throw new Error('Parsed JSON is null or undefined');\n }\n return parsed;\n } catch (error) {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Failed to parse JSON${contextStr}: ${error instanceof Error ? error.message : 'Unknown error'}`);\n }\n};\n\n/**\n * Validates that a value is a non-empty string\n */\nexport const validateString = (value: any, fieldName: string): string => {\n if (typeof value !== 'string') {\n throw new Error(`${fieldName} must be a string, got ${typeof value}`);\n }\n if (value.trim() === '') {\n throw new Error(`${fieldName} cannot be empty`);\n }\n return value;\n};\n\n/**\n * Validates that a value exists and has a specific property\n */\nexport const validateHasProperty = (obj: any, property: string, context?: string): void => {\n if (!obj || typeof obj !== 'object') {\n const contextStr = context ? ` in ${context}` : '';\n throw new Error(`Object is null or not an object${contextStr}`);\n }\n if (!(property in obj)) {\n const contextStr = context ? ` in ${context}` : '';\n throw new Error(`Missing required property '${property}'${contextStr}`);\n }\n};\n\n/**\n * Validates package.json structure has basic required fields\n */\nexport const validatePackageJson = (data: any, context?: string, requireName: boolean = true): any => {\n if (!data || typeof data !== 'object') {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Invalid package.json${contextStr}: not an object`);\n }\n if (requireName && typeof data.name !== 'string') {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Invalid package.json${contextStr}: name must be a string`);\n }\n return data;\n};\n"],"names":["validateReleaseSummary","data","Error","title","body","safeJsonParse","jsonString","context","parsed","JSON","parse","undefined","error","contextStr","message","validateString","value","fieldName","trim","validatePackageJson","requireName","name"],"mappings":"AAAA;;;;IAmBO,MAAMA,sBAAAA,GAAyB,CAACC,IAAAA,GAAAA;AACnC,IAAA,IAAI,CAACA,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;AACnC,QAAA,MAAM,IAAIC,KAAAA,CAAM,wCAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKE,KAAK,KAAK,QAAA,EAAU;AAChC,QAAA,MAAM,IAAID,KAAAA,CAAM,iDAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKG,IAAI,KAAK,QAAA,EAAU;AAC/B,QAAA,MAAM,IAAIF,KAAAA,CAAM,gDAAA,CAAA;AACpB,IAAA;IACA,OAAOD,IAAAA;AACX;AAiBA;;AAEC,IACM,MAAMI,aAAAA,GAAgB,CAAUC,UAAAA,EAAoBC,OAAAA,GAAAA;IACvD,IAAI;QACA,MAAMC,MAAAA,GAASC,IAAAA,CAAKC,KAAK,CAACJ,UAAAA,CAAAA;QAC1B,IAAIE,MAAAA,KAAW,IAAA,IAAQA,MAAAA,KAAWG,SAAAA,EAAW;AACzC,YAAA,MAAM,IAAIT,KAAAA,CAAM,kCAAA,CAAA;AACpB,QAAA;QACA,OAAOM,MAAAA;AACX,IAAA,CAAA,CAAE,OAAOI,KAAAA,EAAO;QACZ,MAAMC,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,oBAAoB,EAAEW,UAAAA,CAAW,EAAE,EAAED,KAAAA,YAAiBV,KAAAA,GAAQU,KAAAA,CAAME,OAAO,GAAG,eAAA,CAAA,CAAiB,CAAA;AACpH,IAAA;AACJ;AAEA;;AAEC,IACM,MAAMC,cAAAA,GAAiB,CAACC,KAAAA,EAAYC,SAAAA,GAAAA;IACvC,IAAI,OAAOD,UAAU,QAAA,EAAU;AAC3B,QAAA,MAAM,IAAId,KAAAA,CAAM,CAAA,EAAGe,UAAU,uBAAuB,EAAE,OAAOD,KAAAA,CAAAA,CAAO,CAAA;AACxE,IAAA;IACA,IAAIA,KAAAA,CAAME,IAAI,EAAA,KAAO,EAAA,EAAI;AACrB,QAAA,MAAM,IAAIhB,KAAAA,CAAM,CAAA,EAAGe,SAAAA,CAAU,gBAAgB,CAAC,CAAA;AAClD,IAAA;IACA,OAAOD,KAAAA;AACX;AAgBA;;AAEC,IACM,MAAMG,mBAAAA,GAAsB,CAAClB,IAAAA,EAAWM,OAAAA,EAAkBa,cAAuB,IAAI,GAAA;AACxF,IAAA,IAAI,CAACnB,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;QACnC,MAAMY,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,oBAAoB,EAAEW,UAAAA,CAAW,eAAe,CAAC,CAAA;AACtE,IAAA;AACA,IAAA,IAAIO,WAAAA,IAAe,OAAOnB,IAAAA,CAAKoB,IAAI,KAAK,QAAA,EAAU;QAC9C,MAAMR,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,oBAAoB,EAAEW,UAAAA,CAAW,uBAAuB,CAAC,CAAA;AAC9E,IAAA;IACA,OAAOZ,IAAAA;AACX;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@eldrforge/kodrdriv",
3
- "version": "0.1.0",
3
+ "version": "1.2.0",
4
4
  "description": "Create Intelligent Release Notes or Change Logs from Git",
5
5
  "main": "dist/main.js",
6
6
  "type": "module",
@@ -12,7 +12,7 @@
12
12
  "url": "git+https://github.com/calenvarek/kodrdriv.git"
13
13
  },
14
14
  "scripts": {
15
- "build": "npm run lint && tsc --noEmit && vite build && copyfiles -u 1 \"src/**/*.md\" dist",
15
+ "build": "npm run lint && tsc --noEmit && vite build && copyfiles -u 1 \"src/**/*.md\" dist && chmod 755 ./dist/main.js",
16
16
  "start": "dist/main.js",
17
17
  "dev": "vite",
18
18
  "watch": "vite build --watch",
@@ -41,6 +41,7 @@
41
41
  "@riotprompt/riotprompt": "^0.0.8",
42
42
  "@theunwalked/cardigantime": "^0.0.16",
43
43
  "@theunwalked/unplayable": "^0.0.21",
44
+ "@types/semver": "^7.7.0",
44
45
  "commander": "^14.0.0",
45
46
  "dayjs": "^1.11.13",
46
47
  "dotenv": "^17.2.1",
@@ -48,32 +49,33 @@
48
49
  "js-yaml": "^4.1.0",
49
50
  "moment-timezone": "^0.6.0",
50
51
  "openai": "^5.10.2",
52
+ "semver": "^7.7.2",
51
53
  "shell-escape": "^0.2.0",
52
54
  "winston": "^3.17.0",
53
55
  "zod": "^3.23.8"
54
56
  },
55
57
  "devDependencies": {
56
58
  "@eslint/eslintrc": "^3.3.1",
57
- "@eslint/js": "^9.32.0",
59
+ "@eslint/js": "^9.33.0",
58
60
  "@rollup/plugin-replace": "^6.0.2",
59
61
  "@swc/core": "^1.13.3",
60
62
  "@types/js-yaml": "^4.0.9",
61
- "@types/node": "^24.2.0",
63
+ "@types/node": "^24.2.1",
62
64
  "@types/shell-escape": "^0.2.3",
63
65
  "@types/winston": "^2.4.4",
64
- "@typescript-eslint/eslint-plugin": "^8.39.0",
65
- "@typescript-eslint/parser": "^8.39.0",
66
+ "@typescript-eslint/eslint-plugin": "^8.39.1",
67
+ "@typescript-eslint/parser": "^8.39.1",
66
68
  "@vitest/coverage-v8": "^3.2.4",
67
69
  "copyfiles": "^2.4.1",
68
70
  "esbuild": "0.25.8",
69
- "eslint": "^9.32.0",
71
+ "eslint": "^9.33.0",
70
72
  "eslint-plugin-import": "^2.32.0",
71
73
  "globals": "^16.3.0",
72
74
  "mockdate": "^3.0.5",
73
75
  "rollup-plugin-preserve-shebang": "^1.0.1",
74
76
  "rollup-plugin-visualizer": "^6.0.3",
75
77
  "typescript": "^5.9.2",
76
- "vite": "^7.0.6",
78
+ "vite": "^7.1.2",
77
79
  "vite-plugin-node": "^7.0.0",
78
80
  "vitest": "^3.2.4"
79
81
  }
@@ -0,0 +1,8 @@
1
+ {
2
+ "name": "cli-tool",
3
+ "version": "1.0.0",
4
+ "description": "Command line tool",
5
+ "dependencies": {
6
+ "core-library": "^1.0.0"
7
+ }
8
+ }
@@ -0,0 +1,5 @@
1
+ {
2
+ "name": "core-library",
3
+ "version": "1.0.0",
4
+ "description": "Core library"
5
+ }
@@ -0,0 +1,8 @@
1
+ {
2
+ "name": "mobile-app",
3
+ "version": "1.0.0",
4
+ "description": "Mobile application",
5
+ "dependencies": {
6
+ "core-library": "^1.0.0"
7
+ }
8
+ }
@@ -0,0 +1,8 @@
1
+ {
2
+ "name": "web-app",
3
+ "version": "1.0.0",
4
+ "description": "Web application",
5
+ "dependencies": {
6
+ "core-library": "^1.0.0"
7
+ }
8
+ }