@eldrforge/kodrdriv 1.2.26 → 1.2.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AI-FRIENDLY-LOGGING-GUIDE.md +237 -0
- package/AI-LOGGING-MIGRATION-COMPLETE.md +371 -0
- package/ALREADY-PUBLISHED-PACKAGES-FIX.md +264 -0
- package/AUDIT-BRANCHES-PROGRESS-FIX.md +90 -0
- package/AUDIT-EXAMPLE-OUTPUT.md +113 -0
- package/CHECKPOINT-RECOVERY-FIX.md +450 -0
- package/LOGGING-MIGRATION-STATUS.md +186 -0
- package/PARALLEL-PUBLISH-FIXES-IMPLEMENTED.md +405 -0
- package/PARALLEL-PUBLISH-IMPROVEMENTS-IMPLEMENTED.md +439 -0
- package/PARALLEL-PUBLISH-QUICK-REFERENCE.md +375 -0
- package/PARALLEL_EXECUTION_FIX.md +2 -2
- package/PUBLISH_IMPROVEMENTS_IMPLEMENTED.md +294 -0
- package/VERSION-AUDIT-FIX.md +333 -0
- package/dist/application.js +6 -6
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +43 -13
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +18 -18
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +32 -32
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/clean.js +9 -9
- package/dist/commands/clean.js.map +1 -1
- package/dist/commands/commit.js +20 -20
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/development.js +91 -90
- package/dist/commands/development.js.map +1 -1
- package/dist/commands/link.js +36 -36
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish.js +345 -225
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/release.js +14 -14
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/review.js +15 -17
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/select-audio.js +5 -5
- package/dist/commands/select-audio.js.map +1 -1
- package/dist/commands/tree.js +75 -34
- package/dist/commands/tree.js.map +1 -1
- package/dist/commands/unlink.js +39 -39
- package/dist/commands/unlink.js.map +1 -1
- package/dist/commands/updates.js +150 -14
- package/dist/commands/updates.js.map +1 -1
- package/dist/commands/versions.js +14 -13
- package/dist/commands/versions.js.map +1 -1
- package/dist/constants.js +1 -1
- package/dist/content/diff.js +5 -5
- package/dist/content/diff.js.map +1 -1
- package/dist/content/files.js +2 -2
- package/dist/content/files.js.map +1 -1
- package/dist/content/log.js +3 -3
- package/dist/content/log.js.map +1 -1
- package/dist/execution/CommandValidator.js +6 -6
- package/dist/execution/CommandValidator.js.map +1 -1
- package/dist/execution/DynamicTaskPool.js +33 -10
- package/dist/execution/DynamicTaskPool.js.map +1 -1
- package/dist/execution/RecoveryManager.js +99 -21
- package/dist/execution/RecoveryManager.js.map +1 -1
- package/dist/execution/TreeExecutionAdapter.js +65 -48
- package/dist/execution/TreeExecutionAdapter.js.map +1 -1
- package/dist/main.js +2 -2
- package/dist/main.js.map +1 -1
- package/dist/util/checkpointManager.js +4 -4
- package/dist/util/checkpointManager.js.map +1 -1
- package/dist/util/dependencyGraph.js +2 -2
- package/dist/util/dependencyGraph.js.map +1 -1
- package/dist/util/fileLock.js +1 -1
- package/dist/util/fileLock.js.map +1 -1
- package/dist/util/general.js +148 -15
- package/dist/util/general.js.map +1 -1
- package/dist/util/interactive.js +2 -2
- package/dist/util/interactive.js.map +1 -1
- package/dist/util/performance.js.map +1 -1
- package/dist/util/safety.js +13 -13
- package/dist/util/safety.js.map +1 -1
- package/dist/utils/branchState.js +567 -0
- package/dist/utils/branchState.js.map +1 -0
- package/package.json +1 -1
- package/scripts/update-test-log-assertions.js +73 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"interactive.js","sources":["../../src/util/interactive.ts"],"sourcesContent":["#!/usr/bin/env node\n/**\n * Kodrdriv-specific interactive utilities\n *\n * This module contains application-specific interactive workflows that build on\n * top of the generic interactive functions from @eldrforge/ai-service.\n *\n * Re-exports from ai-service for convenience, plus kodrdriv-specific functions.\n */\n\nimport { getDryRunLogger } from '../logging';\n\n// Re-export everything from ai-service for backwards compatibility\nexport {\n getUserChoice,\n getUserTextInput,\n editContentInEditor,\n getLLMFeedbackInEditor,\n requireTTY,\n SecureTempFile,\n createSecureTempFile,\n cleanupTempFile,\n STANDARD_CHOICES,\n type Choice,\n type InteractiveOptions,\n type EditorResult,\n} from '@eldrforge/ai-service';\n\n// Kodrdriv-specific types and functions below\n\nexport interface LLMImprovementConfig {\n /** The type of content being improved (for filenames and logging) */\n contentType: string;\n /** Function that creates a prompt for improvement */\n createImprovedPrompt: (\n promptConfig: any,\n improvementContent: any,\n promptContext: any\n ) => Promise<any>;\n /** Function that calls LLM with the improved prompt */\n callLLM: (\n request: any,\n runConfig: any,\n outputDirectory: string\n ) => Promise<any>;\n /** Function that validates/processes the LLM response */\n processResponse?: (response: any) => any;\n}\n\n/**\n * Generic LLM improvement function that can be configured for different content types\n *\n * This is kodrdriv-specific orchestration logic that combines multiple ai-service\n * primitives into a higher-level workflow.\n *\n * @param currentContent The current content to improve\n * @param runConfig Runtime configuration\n * @param promptConfig Prompt configuration\n * @param promptContext Prompt context\n * @param outputDirectory Output directory for debug files\n * @param improvementConfig Configuration for this specific improvement type\n * @returns Promise resolving to the improved content\n */\nexport async function improveContentWithLLM<T>(\n currentContent: T,\n runConfig: any,\n promptConfig: any,\n promptContext: any,\n outputDirectory: string,\n improvementConfig: LLMImprovementConfig\n): Promise<T> {\n const logger = getDryRunLogger(false);\n\n logger.info(
|
|
1
|
+
{"version":3,"file":"interactive.js","sources":["../../src/util/interactive.ts"],"sourcesContent":["#!/usr/bin/env node\n/**\n * Kodrdriv-specific interactive utilities\n *\n * This module contains application-specific interactive workflows that build on\n * top of the generic interactive functions from @eldrforge/ai-service.\n *\n * Re-exports from ai-service for convenience, plus kodrdriv-specific functions.\n */\n\nimport { getDryRunLogger } from '../logging';\n\n// Re-export everything from ai-service for backwards compatibility\nexport {\n getUserChoice,\n getUserTextInput,\n editContentInEditor,\n getLLMFeedbackInEditor,\n requireTTY,\n SecureTempFile,\n createSecureTempFile,\n cleanupTempFile,\n STANDARD_CHOICES,\n type Choice,\n type InteractiveOptions,\n type EditorResult,\n} from '@eldrforge/ai-service';\n\n// Kodrdriv-specific types and functions below\n\nexport interface LLMImprovementConfig {\n /** The type of content being improved (for filenames and logging) */\n contentType: string;\n /** Function that creates a prompt for improvement */\n createImprovedPrompt: (\n promptConfig: any,\n improvementContent: any,\n promptContext: any\n ) => Promise<any>;\n /** Function that calls LLM with the improved prompt */\n callLLM: (\n request: any,\n runConfig: any,\n outputDirectory: string\n ) => Promise<any>;\n /** Function that validates/processes the LLM response */\n processResponse?: (response: any) => any;\n}\n\n/**\n * Generic LLM improvement function that can be configured for different content types\n *\n * This is kodrdriv-specific orchestration logic that combines multiple ai-service\n * primitives into a higher-level workflow.\n *\n * @param currentContent The current content to improve\n * @param runConfig Runtime configuration\n * @param promptConfig Prompt configuration\n * @param promptContext Prompt context\n * @param outputDirectory Output directory for debug files\n * @param improvementConfig Configuration for this specific improvement type\n * @returns Promise resolving to the improved content\n */\nexport async function improveContentWithLLM<T>(\n currentContent: T,\n runConfig: any,\n promptConfig: any,\n promptContext: any,\n outputDirectory: string,\n improvementConfig: LLMImprovementConfig\n): Promise<T> {\n const logger = getDryRunLogger(false);\n\n logger.info(`INTERACTIVE_LLM_IMPROVING: Requesting LLM to improve content | Content Type: ${improvementConfig.contentType} | Service: AI | Purpose: Enhance quality`);\n\n // Create the improved prompt using the provided function\n const improvedPromptResult = await improvementConfig.createImprovedPrompt(\n promptConfig,\n currentContent,\n promptContext\n );\n\n // Call the LLM with the improved prompt\n const improvedResponse = await improvementConfig.callLLM(improvedPromptResult, runConfig, outputDirectory);\n\n // Process the response if a processor is provided\n const finalResult = improvementConfig.processResponse\n ? improvementConfig.processResponse(improvedResponse)\n : improvedResponse;\n\n logger.info(`INTERACTIVE_LLM_IMPROVED: LLM provided improved content | Content Type: ${improvementConfig.contentType} | Status: enhanced`);\n return finalResult;\n}\n"],"names":["improveContentWithLLM","currentContent","runConfig","promptConfig","promptContext","outputDirectory","improvementConfig","logger","getDryRunLogger","info","contentType","improvedPromptResult","createImprovedPrompt","improvedResponse","callLLM","finalResult","processResponse"],"mappings":";;;;AAiDA,CAAA,CAAA,CAAA;;;;;;;;;;;;;AAaC,CAAA,CAAA,CAAA,CACM,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAeA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAClBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAiB,EACjBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAc,CAAA,CACdC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAiB,CAAA,CACjBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkB,CAAA,CAClBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAuB,CAAA,CACvBC,iBAAuC,CAAA,CAAA,CAAA;AAEvC,CAAA,CAAA,CAAA,CAAA,MAAMC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAASC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAgB,KAAA,CAAA,CAAA;IAE/BD,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAOE,CAAAA,CAAAA,CAAAA,CAAI,CAAC,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAA6E,CAAA,CAAEH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBI,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAW,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAyC,CAAC,CAAA,CAAA;;AAGpK,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAuB,CAAA,CAAA,CAAA,CAAA,CAAA,CAAML,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBM,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAoB,CACrET,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GACAF,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CACAG,aAAAA,CAAAA,CAAAA;;AAIJ,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMS,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAmB,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMP,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBQ,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAO,CAACH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GAAsBT,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAWG,eAAAA,CAAAA,CAAAA;;AAG1F,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMU,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAcT,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBU,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAe,CAAA,CAAA,CAC/CV,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBU,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAe,CAACH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GAClCA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA;IAENN,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAOE,CAAAA,CAAAA,CAAAA,CAAI,CAAC,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAwE,CAAA,CAAEH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBI,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAW,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAmB,CAAC,CAAA,CAAA;IACzI,OAAOK,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA;AACX,CAAA;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('⚠️ WARNING: Found file: dependencies that should not be committed:');\n for (const file of filesWithFileDepedencies) {\n logger.warn(` 📄 ${file.path}:`);\n for (const dep of file.dependencies) {\n logger.warn(` - ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('💡 Remember to run \"kodrdriv unlink\" before committing to restore registry versions!');\n logger.warn(' Or add a pre-commit hook to prevent accidental commits of linked dependencies.');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;;;;"}
|
|
1
|
+
{"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`PERFORMANCE_DIR_READ_FAILED: Unable to read directory | Directory: ${absoluteRootDir} | Error: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('FILE_DEPS_WARNING: Found file: dependencies that should not be committed | Count: ' + filesWithFileDepedencies.length + ' | Impact: May cause build issues');\n for (const file of filesWithFileDepedencies) {\n logger.warn(`FILE_DEPS_PACKAGE: Package with file dependencies | Path: ${file.path}`);\n for (const dep of file.dependencies) {\n logger.warn(`FILE_DEPS_DETAIL: File dependency detected | Dependency: ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('FILE_DEPS_RESOLUTION: Action required before committing | Command: kodrdriv unlink | Purpose: Restore registry versions');\n logger.warn('FILE_DEPS_PREVENTION: Alternative option | Action: Add pre-commit hook | Purpose: Prevent accidental commits of linked dependencies');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;;;;"}
|
package/dist/util/safety.js
CHANGED
|
@@ -131,11 +131,11 @@ const findAllPackageJsonFiles = async (rootDir, storage)=>{
|
|
|
131
131
|
if (issues.length === 0) {
|
|
132
132
|
return;
|
|
133
133
|
}
|
|
134
|
-
logger.warn(
|
|
134
|
+
logger.warn(`FILE_DEPS_WARNING: Found file: dependencies that should not be committed | Context: ${context} | Count: ${issues.length} | Impact: May cause build issues`);
|
|
135
135
|
for (const issue of issues){
|
|
136
|
-
logger.warn(`
|
|
136
|
+
logger.warn(`FILE_DEPS_PACKAGE: Package with file dependencies | Package: ${issue.packagePath}`);
|
|
137
137
|
for (const dep of issue.dependencies){
|
|
138
|
-
logger.warn(`
|
|
138
|
+
logger.warn(`FILE_DEPS_DETAIL: Dependency details | Name: ${dep.name} | Version: ${dep.version} | Type: ${dep.dependencyType}`);
|
|
139
139
|
}
|
|
140
140
|
}
|
|
141
141
|
logger.warn('');
|
|
@@ -145,20 +145,20 @@ const findAllPackageJsonFiles = async (rootDir, storage)=>{
|
|
|
145
145
|
* @param hasUnlinkCapability Whether the current context supports unlinking
|
|
146
146
|
*/ const logFileDependencySuggestions = (hasUnlinkCapability = true)=>{
|
|
147
147
|
const logger = getLogger();
|
|
148
|
-
logger.warn('
|
|
148
|
+
logger.warn('FILE_DEPS_RESOLUTION: Steps to resolve file dependency issues:');
|
|
149
149
|
if (hasUnlinkCapability) {
|
|
150
|
-
logger.warn('
|
|
151
|
-
logger.warn('
|
|
152
|
-
logger.warn('
|
|
150
|
+
logger.warn(' STEP_1: Restore registry versions | Command: kodrdriv unlink');
|
|
151
|
+
logger.warn(' STEP_2: Complete commit operation | Command: git commit');
|
|
152
|
+
logger.warn(' STEP_3: Restore local development links | Command: kodrdriv link');
|
|
153
153
|
} else {
|
|
154
|
-
logger.warn('
|
|
155
|
-
logger.warn('
|
|
156
|
-
logger.warn('
|
|
154
|
+
logger.warn(' STEP_1: Manually restore registry versions in package.json files');
|
|
155
|
+
logger.warn(' STEP_2: Complete commit operation | Command: git commit');
|
|
156
|
+
logger.warn(' STEP_3: Re-link local dependencies for development');
|
|
157
157
|
}
|
|
158
158
|
logger.warn('');
|
|
159
|
-
logger.warn('
|
|
160
|
-
logger.warn('
|
|
161
|
-
logger.warn('
|
|
159
|
+
logger.warn('FILE_DEPS_BYPASS: Alternative bypass options:');
|
|
160
|
+
logger.warn(' OPTION_1: Skip file check | Flag: --skip-file-check');
|
|
161
|
+
logger.warn(' OPTION_2: Skip all hooks | Command: git commit --no-verify');
|
|
162
162
|
logger.warn('');
|
|
163
163
|
};
|
|
164
164
|
|
package/dist/util/safety.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"safety.js","sources":["../../src/util/safety.ts"],"sourcesContent":["import path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\ninterface FileDependencyIssue {\n packagePath: string;\n dependencies: Array<{\n name: string;\n version: string;\n dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies';\n }>;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\nconst findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const packageJsonFiles: PackageJsonLocation[] = [];\n\n const scanDirectory = async (currentDir: string, depth: number = 0): Promise<void> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return;\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return;\n }\n\n const items = await storage.listFiles(currentDir);\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n const relativePath = path.relative(rootDir, currentDir);\n\n packageJsonFiles.push({\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n });\n\n logger.debug(`Found package.json at: ${relativePath || '.'}`);\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n }\n }\n\n // Scan subdirectories, excluding build/generated directories\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n await scanDirectory(itemPath, depth + 1);\n }\n } catch (error: any) {\n // Skip directories that can't be accessed\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n continue;\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n }\n };\n\n await scanDirectory(rootDir);\n\n logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);\n return packageJsonFiles;\n};\n\n/**\n * Checks for file: dependencies in package.json files that should not be committed\n * @param storage Storage utility instance\n * @param rootDir Root directory to scan (defaults to current working directory)\n * @returns Array of issues found, empty array if no issues\n */\nexport const checkForFileDependencies = async (storage: any, rootDir: string = process.cwd()): Promise<FileDependencyIssue[]> => {\n const logger = getLogger();\n const issues: FileDependencyIssue[] = [];\n\n try {\n const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);\n\n for (const { packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: Array<{name: string, version: string, dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies'}> = [];\n\n // Check all dependency types for file: paths\n const dependencyChecks = [\n { deps: packageJson.dependencies, type: 'dependencies' as const },\n { deps: packageJson.devDependencies, type: 'devDependencies' as const },\n { deps: packageJson.peerDependencies, type: 'peerDependencies' as const }\n ];\n\n for (const { deps, type } of dependencyChecks) {\n if (deps) {\n for (const [name, version] of Object.entries(deps)) {\n if (version.startsWith('file:')) {\n fileDeps.push({ name, version, dependencyType: type });\n }\n }\n }\n }\n\n if (fileDeps.length > 0) {\n issues.push({\n packagePath: relativePath,\n dependencies: fileDeps\n });\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to check for file dependencies: ${error.message}`);\n }\n\n return issues;\n};\n\n/**\n * Logs file dependency issues in a user-friendly format\n * @param issues Array of file dependency issues\n * @param context Context for the warning (e.g., 'commit', 'link check')\n */\nexport const logFileDependencyWarning = (issues: FileDependencyIssue[], context: string = 'operation'): void => {\n const logger = getLogger();\n\n if (issues.length === 0) {\n return;\n }\n\n logger.warn(`⚠️ WARNING: Found file: dependencies that should not be committed during ${context}:`);\n for (const issue of issues) {\n logger.warn(` 📄 ${issue.packagePath}:`);\n for (const dep of issue.dependencies) {\n logger.warn(` - ${dep.name}: ${dep.version} (${dep.dependencyType})`);\n }\n }\n logger.warn('');\n};\n\n/**\n * Provides suggestions for resolving file dependency issues\n * @param hasUnlinkCapability Whether the current context supports unlinking\n */\nexport const logFileDependencySuggestions = (hasUnlinkCapability: boolean = true): void => {\n const logger = getLogger();\n\n logger.warn('💡 To resolve this:');\n if (hasUnlinkCapability) {\n logger.warn(' 1. Run \"kodrdriv unlink\" to restore registry versions');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Run \"kodrdriv link\" again for local development');\n } else {\n logger.warn(' 1. Manually restore registry versions in package.json files');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Re-link your local dependencies');\n }\n logger.warn('');\n logger.warn(' Or to bypass this check:');\n logger.warn(' - Add --skip-file-check flag to your command');\n logger.warn(' - Or use git commit --no-verify to skip all hooks');\n logger.warn('');\n};\n"],"names":["EXCLUDED_DIRECTORIES","findAllPackageJsonFiles","rootDir","storage","logger","getLogger","packageJsonFiles","scanDirectory","currentDir","depth","exists","isDirectory","items","listFiles","includes","packageJsonPath","path","join","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","relative","push","debug","error","message","item","itemPath","length","checkForFileDependencies","process","cwd","issues","fileDeps","dependencyChecks","deps","dependencies","type","devDependencies","peerDependencies","name","version","Object","entries","startsWith","dependencyType","packagePath","logFileDependencyWarning","context","warn","issue","dep","logFileDependencySuggestions","hasUnlinkCapability"],"mappings":";;;;AA0BA,MAAMA,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED,MAAMC,uBAAAA,GAA0B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACpD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA0C,EAAE;AAElD,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE9D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA;AACJ,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAMN,OAAAA,CAAQO,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAML,OAAAA,CAAQQ,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA;AACJ,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMT,OAAAA,CAAQU,SAAS,CAACL,UAAAA,CAAAA;;YAGtC,IAAII,KAAAA,CAAME,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAMC,eAAAA,GAAkBC,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAY,cAAA,CAAA;gBAC9C,IAAI;AACA,oBAAA,MAAMU,kBAAAA,GAAqB,MAAMf,OAAAA,CAAQgB,QAAQ,CAACJ,eAAAA,EAAiB,OAAA,CAAA;oBACnE,MAAMK,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBH,eAAAA,CAAAA;oBACjD,MAAMO,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQL,eAAAA,CAAAA;AAChD,oBAAA,MAAMS,YAAAA,GAAeR,aAAAA,CAAKS,QAAQ,CAACvB,OAAAA,EAASM,UAAAA,CAAAA;AAE5CF,oBAAAA,gBAAAA,CAAiBoB,IAAI,CAAC;wBAClBV,IAAAA,EAAMD,eAAAA;AACNO,wBAAAA,WAAAA;AACAE,wBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,qBAAA,CAAA;AAEApB,oBAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uBAAuB,EAAEH,gBAAgB,GAAA,CAAA,CAAK,CAAA;AAChE,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEa,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AACvF,gBAAA;AACJ,YAAA;;YAGA,KAAK,MAAMC,QAAQlB,KAAAA,CAAO;gBACtB,IAAIZ,oBAAAA,CAAqBc,QAAQ,CAACgB,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAWf,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAYsB,IAAAA,CAAAA;gBACvC,IAAI;AACA,oBAAA,IAAI,MAAM3B,OAAAA,CAAQQ,WAAW,CAACoB,QAAAA,CAAAA,EAAW;wBACrC,MAAMxB,aAAAA,CAAcwB,UAAUtB,KAAAA,GAAQ,CAAA,CAAA;AAC1C,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOmB,KAAAA,EAAY;;oBAEjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,kBAAkB,EAAEI,SAAS,EAAE,EAAEH,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC9D,oBAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOD,KAAAA,EAAY;YACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,yBAAyB,EAAEnB,WAAW,EAAE,EAAEoB,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC3E,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMtB,aAAAA,CAAcL,OAAAA,CAAAA;IAEpBE,MAAAA,CAAOuB,KAAK,CAAC,CAAC,MAAM,EAAErB,gBAAAA,CAAiB0B,MAAM,CAAC,uCAAuC,CAAC,CAAA;IACtF,OAAO1B,gBAAAA;AACX,CAAA;AAEA;;;;;UAMa2B,wBAAAA,GAA2B,OAAO9B,SAAcD,OAAAA,GAAkBgC,OAAAA,CAAQC,GAAG,EAAE,GAAA;AACxF,IAAA,MAAM/B,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM+B,SAAgC,EAAE;IAExC,IAAI;QACA,MAAM9B,gBAAAA,GAAmB,MAAML,uBAAAA,CAAwBC,OAAAA,EAASC,OAAAA,CAAAA;AAEhE,QAAA,KAAK,MAAM,EAAEmB,WAAW,EAAEE,YAAY,EAAE,IAAIlB,gBAAAA,CAAkB;AAC1D,YAAA,MAAM+B,WAA4H,EAAE;;AAGpI,YAAA,MAAMC,gBAAAA,GAAmB;AACrB,gBAAA;AAAEC,oBAAAA,IAAAA,EAAMjB,YAAYkB,YAAY;oBAAEC,IAAAA,EAAM;AAAwB,iBAAA;AAChE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYoB,eAAe;oBAAED,IAAAA,EAAM;AAA2B,iBAAA;AACtE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYqB,gBAAgB;oBAAEF,IAAAA,EAAM;AAA4B;AAC3E,aAAA;AAED,YAAA,KAAK,MAAM,EAAEF,IAAI,EAAEE,IAAI,EAAE,IAAIH,gBAAAA,CAAkB;AAC3C,gBAAA,IAAIC,IAAAA,EAAM;oBACN,KAAK,MAAM,CAACK,IAAAA,EAAMC,OAAAA,CAAQ,IAAIC,MAAAA,CAAOC,OAAO,CAACR,IAAAA,CAAAA,CAAO;wBAChD,IAAIM,OAAAA,CAAQG,UAAU,CAAC,OAAA,CAAA,EAAU;AAC7BX,4BAAAA,QAAAA,CAASX,IAAI,CAAC;AAAEkB,gCAAAA,IAAAA;AAAMC,gCAAAA,OAAAA;gCAASI,cAAAA,EAAgBR;AAAK,6BAAA,CAAA;AACxD,wBAAA;AACJ,oBAAA;AACJ,gBAAA;AACJ,YAAA;YAEA,IAAIJ,QAAAA,CAASL,MAAM,GAAG,CAAA,EAAG;AACrBI,gBAAAA,MAAAA,CAAOV,IAAI,CAAC;oBACRwB,WAAAA,EAAa1B,YAAAA;oBACbgB,YAAAA,EAAcH;AAClB,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAOT,KAAAA,EAAY;AACjBxB,QAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uCAAuC,EAAEC,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC1E,IAAA;IAEA,OAAOO,MAAAA;AACX;AAEA;;;;AAIC,IACM,MAAMe,wBAAAA,GAA2B,CAACf,MAAAA,EAA+BgB,UAAkB,WAAW,GAAA;AACjG,IAAA,MAAMhD,MAAAA,GAASC,SAAAA,EAAAA;IAEf,IAAI+B,MAAAA,CAAOJ,MAAM,KAAK,CAAA,EAAG;AACrB,QAAA;AACJ,IAAA;AAEA5B,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,0EAA0E,EAAED,OAAAA,CAAQ,CAAC,CAAC,CAAA;IACnG,KAAK,MAAME,SAASlB,MAAAA,CAAQ;QACxBhC,MAAAA,CAAOiD,IAAI,CAAC,CAAC,KAAK,EAAEC,KAAAA,CAAMJ,WAAW,CAAC,CAAC,CAAC,CAAA;AACxC,QAAA,KAAK,MAAMK,GAAAA,IAAOD,KAAAA,CAAMd,YAAY,CAAE;AAClCpC,YAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,MAAM,EAAEE,GAAAA,CAAIX,IAAI,CAAC,EAAE,EAAEW,GAAAA,CAAIV,OAAO,CAAC,EAAE,EAAEU,IAAIN,cAAc,CAAC,CAAC,CAAC,CAAA;AAC3E,QAAA;AACJ,IAAA;AACA7C,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;AAEA;;;AAGC,IACM,MAAMG,4BAAAA,GAA+B,CAACC,sBAA+B,IAAI,GAAA;AAC5E,IAAA,MAAMrD,MAAAA,GAASC,SAAAA,EAAAA;AAEfD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,qBAAA,CAAA;AACZ,IAAA,IAAII,mBAAAA,EAAqB;AACrBrD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,0DAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uDAAA,CAAA;IAChB,CAAA,MAAO;AACHjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,gEAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uCAAA,CAAA;AAChB,IAAA;AACAjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,6BAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,iDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,sDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;;;;"}
|
|
1
|
+
{"version":3,"file":"safety.js","sources":["../../src/util/safety.ts"],"sourcesContent":["import path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\ninterface FileDependencyIssue {\n packagePath: string;\n dependencies: Array<{\n name: string;\n version: string;\n dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies';\n }>;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\nconst findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const packageJsonFiles: PackageJsonLocation[] = [];\n\n const scanDirectory = async (currentDir: string, depth: number = 0): Promise<void> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return;\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return;\n }\n\n const items = await storage.listFiles(currentDir);\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n const relativePath = path.relative(rootDir, currentDir);\n\n packageJsonFiles.push({\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n });\n\n logger.debug(`Found package.json at: ${relativePath || '.'}`);\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n }\n }\n\n // Scan subdirectories, excluding build/generated directories\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n await scanDirectory(itemPath, depth + 1);\n }\n } catch (error: any) {\n // Skip directories that can't be accessed\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n continue;\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n }\n };\n\n await scanDirectory(rootDir);\n\n logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);\n return packageJsonFiles;\n};\n\n/**\n * Checks for file: dependencies in package.json files that should not be committed\n * @param storage Storage utility instance\n * @param rootDir Root directory to scan (defaults to current working directory)\n * @returns Array of issues found, empty array if no issues\n */\nexport const checkForFileDependencies = async (storage: any, rootDir: string = process.cwd()): Promise<FileDependencyIssue[]> => {\n const logger = getLogger();\n const issues: FileDependencyIssue[] = [];\n\n try {\n const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);\n\n for (const { packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: Array<{name: string, version: string, dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies'}> = [];\n\n // Check all dependency types for file: paths\n const dependencyChecks = [\n { deps: packageJson.dependencies, type: 'dependencies' as const },\n { deps: packageJson.devDependencies, type: 'devDependencies' as const },\n { deps: packageJson.peerDependencies, type: 'peerDependencies' as const }\n ];\n\n for (const { deps, type } of dependencyChecks) {\n if (deps) {\n for (const [name, version] of Object.entries(deps)) {\n if (version.startsWith('file:')) {\n fileDeps.push({ name, version, dependencyType: type });\n }\n }\n }\n }\n\n if (fileDeps.length > 0) {\n issues.push({\n packagePath: relativePath,\n dependencies: fileDeps\n });\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to check for file dependencies: ${error.message}`);\n }\n\n return issues;\n};\n\n/**\n * Logs file dependency issues in a user-friendly format\n * @param issues Array of file dependency issues\n * @param context Context for the warning (e.g., 'commit', 'link check')\n */\nexport const logFileDependencyWarning = (issues: FileDependencyIssue[], context: string = 'operation'): void => {\n const logger = getLogger();\n\n if (issues.length === 0) {\n return;\n }\n\n logger.warn(`FILE_DEPS_WARNING: Found file: dependencies that should not be committed | Context: ${context} | Count: ${issues.length} | Impact: May cause build issues`);\n for (const issue of issues) {\n logger.warn(`FILE_DEPS_PACKAGE: Package with file dependencies | Package: ${issue.packagePath}`);\n for (const dep of issue.dependencies) {\n logger.warn(`FILE_DEPS_DETAIL: Dependency details | Name: ${dep.name} | Version: ${dep.version} | Type: ${dep.dependencyType}`);\n }\n }\n logger.warn('');\n};\n\n/**\n * Provides suggestions for resolving file dependency issues\n * @param hasUnlinkCapability Whether the current context supports unlinking\n */\nexport const logFileDependencySuggestions = (hasUnlinkCapability: boolean = true): void => {\n const logger = getLogger();\n\n logger.warn('FILE_DEPS_RESOLUTION: Steps to resolve file dependency issues:');\n if (hasUnlinkCapability) {\n logger.warn(' STEP_1: Restore registry versions | Command: kodrdriv unlink');\n logger.warn(' STEP_2: Complete commit operation | Command: git commit');\n logger.warn(' STEP_3: Restore local development links | Command: kodrdriv link');\n } else {\n logger.warn(' STEP_1: Manually restore registry versions in package.json files');\n logger.warn(' STEP_2: Complete commit operation | Command: git commit');\n logger.warn(' STEP_3: Re-link local dependencies for development');\n }\n logger.warn('');\n logger.warn('FILE_DEPS_BYPASS: Alternative bypass options:');\n logger.warn(' OPTION_1: Skip file check | Flag: --skip-file-check');\n logger.warn(' OPTION_2: Skip all hooks | Command: git commit --no-verify');\n logger.warn('');\n};\n"],"names":["EXCLUDED_DIRECTORIES","findAllPackageJsonFiles","rootDir","storage","logger","getLogger","packageJsonFiles","scanDirectory","currentDir","depth","exists","isDirectory","items","listFiles","includes","packageJsonPath","path","join","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","relative","push","debug","error","message","item","itemPath","length","checkForFileDependencies","process","cwd","issues","fileDeps","dependencyChecks","deps","dependencies","type","devDependencies","peerDependencies","name","version","Object","entries","startsWith","dependencyType","packagePath","logFileDependencyWarning","context","warn","issue","dep","logFileDependencySuggestions","hasUnlinkCapability"],"mappings":";;;;AA0BA,MAAMA,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED,MAAMC,uBAAAA,GAA0B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACpD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA0C,EAAE;AAElD,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE9D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA;AACJ,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAMN,OAAAA,CAAQO,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAML,OAAAA,CAAQQ,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA;AACJ,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMT,OAAAA,CAAQU,SAAS,CAACL,UAAAA,CAAAA;;YAGtC,IAAII,KAAAA,CAAME,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAMC,eAAAA,GAAkBC,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAY,cAAA,CAAA;gBAC9C,IAAI;AACA,oBAAA,MAAMU,kBAAAA,GAAqB,MAAMf,OAAAA,CAAQgB,QAAQ,CAACJ,eAAAA,EAAiB,OAAA,CAAA;oBACnE,MAAMK,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBH,eAAAA,CAAAA;oBACjD,MAAMO,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQL,eAAAA,CAAAA;AAChD,oBAAA,MAAMS,YAAAA,GAAeR,aAAAA,CAAKS,QAAQ,CAACvB,OAAAA,EAASM,UAAAA,CAAAA;AAE5CF,oBAAAA,gBAAAA,CAAiBoB,IAAI,CAAC;wBAClBV,IAAAA,EAAMD,eAAAA;AACNO,wBAAAA,WAAAA;AACAE,wBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,qBAAA,CAAA;AAEApB,oBAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uBAAuB,EAAEH,gBAAgB,GAAA,CAAA,CAAK,CAAA;AAChE,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEa,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AACvF,gBAAA;AACJ,YAAA;;YAGA,KAAK,MAAMC,QAAQlB,KAAAA,CAAO;gBACtB,IAAIZ,oBAAAA,CAAqBc,QAAQ,CAACgB,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAWf,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAYsB,IAAAA,CAAAA;gBACvC,IAAI;AACA,oBAAA,IAAI,MAAM3B,OAAAA,CAAQQ,WAAW,CAACoB,QAAAA,CAAAA,EAAW;wBACrC,MAAMxB,aAAAA,CAAcwB,UAAUtB,KAAAA,GAAQ,CAAA,CAAA;AAC1C,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOmB,KAAAA,EAAY;;oBAEjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,kBAAkB,EAAEI,SAAS,EAAE,EAAEH,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC9D,oBAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOD,KAAAA,EAAY;YACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,yBAAyB,EAAEnB,WAAW,EAAE,EAAEoB,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC3E,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMtB,aAAAA,CAAcL,OAAAA,CAAAA;IAEpBE,MAAAA,CAAOuB,KAAK,CAAC,CAAC,MAAM,EAAErB,gBAAAA,CAAiB0B,MAAM,CAAC,uCAAuC,CAAC,CAAA;IACtF,OAAO1B,gBAAAA;AACX,CAAA;AAEA;;;;;UAMa2B,wBAAAA,GAA2B,OAAO9B,SAAcD,OAAAA,GAAkBgC,OAAAA,CAAQC,GAAG,EAAE,GAAA;AACxF,IAAA,MAAM/B,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM+B,SAAgC,EAAE;IAExC,IAAI;QACA,MAAM9B,gBAAAA,GAAmB,MAAML,uBAAAA,CAAwBC,OAAAA,EAASC,OAAAA,CAAAA;AAEhE,QAAA,KAAK,MAAM,EAAEmB,WAAW,EAAEE,YAAY,EAAE,IAAIlB,gBAAAA,CAAkB;AAC1D,YAAA,MAAM+B,WAA4H,EAAE;;AAGpI,YAAA,MAAMC,gBAAAA,GAAmB;AACrB,gBAAA;AAAEC,oBAAAA,IAAAA,EAAMjB,YAAYkB,YAAY;oBAAEC,IAAAA,EAAM;AAAwB,iBAAA;AAChE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYoB,eAAe;oBAAED,IAAAA,EAAM;AAA2B,iBAAA;AACtE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYqB,gBAAgB;oBAAEF,IAAAA,EAAM;AAA4B;AAC3E,aAAA;AAED,YAAA,KAAK,MAAM,EAAEF,IAAI,EAAEE,IAAI,EAAE,IAAIH,gBAAAA,CAAkB;AAC3C,gBAAA,IAAIC,IAAAA,EAAM;oBACN,KAAK,MAAM,CAACK,IAAAA,EAAMC,OAAAA,CAAQ,IAAIC,MAAAA,CAAOC,OAAO,CAACR,IAAAA,CAAAA,CAAO;wBAChD,IAAIM,OAAAA,CAAQG,UAAU,CAAC,OAAA,CAAA,EAAU;AAC7BX,4BAAAA,QAAAA,CAASX,IAAI,CAAC;AAAEkB,gCAAAA,IAAAA;AAAMC,gCAAAA,OAAAA;gCAASI,cAAAA,EAAgBR;AAAK,6BAAA,CAAA;AACxD,wBAAA;AACJ,oBAAA;AACJ,gBAAA;AACJ,YAAA;YAEA,IAAIJ,QAAAA,CAASL,MAAM,GAAG,CAAA,EAAG;AACrBI,gBAAAA,MAAAA,CAAOV,IAAI,CAAC;oBACRwB,WAAAA,EAAa1B,YAAAA;oBACbgB,YAAAA,EAAcH;AAClB,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAOT,KAAAA,EAAY;AACjBxB,QAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uCAAuC,EAAEC,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC1E,IAAA;IAEA,OAAOO,MAAAA;AACX;AAEA;;;;AAIC,IACM,MAAMe,wBAAAA,GAA2B,CAACf,MAAAA,EAA+BgB,UAAkB,WAAW,GAAA;AACjG,IAAA,MAAMhD,MAAAA,GAASC,SAAAA,EAAAA;IAEf,IAAI+B,MAAAA,CAAOJ,MAAM,KAAK,CAAA,EAAG;AACrB,QAAA;AACJ,IAAA;AAEA5B,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,oFAAoF,EAAED,OAAAA,CAAQ,UAAU,EAAEhB,MAAAA,CAAOJ,MAAM,CAAC,iCAAiC,CAAC,CAAA;IACvK,KAAK,MAAMsB,SAASlB,MAAAA,CAAQ;AACxBhC,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,6DAA6D,EAAEC,KAAAA,CAAMJ,WAAW,CAAA,CAAE,CAAA;AAC/F,QAAA,KAAK,MAAMK,GAAAA,IAAOD,KAAAA,CAAMd,YAAY,CAAE;AAClCpC,YAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,6CAA6C,EAAEE,IAAIX,IAAI,CAAC,YAAY,EAAEW,IAAIV,OAAO,CAAC,SAAS,EAAEU,GAAAA,CAAIN,cAAc,CAAA,CAAE,CAAA;AAClI,QAAA;AACJ,IAAA;AACA7C,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;AAEA;;;AAGC,IACM,MAAMG,4BAAAA,GAA+B,CAACC,sBAA+B,IAAI,GAAA;AAC5E,IAAA,MAAMrD,MAAAA,GAASC,SAAAA,EAAAA;AAEfD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,gEAAA,CAAA;AACZ,IAAA,IAAII,mBAAAA,EAAqB;AACrBrD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,iEAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4DAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,qEAAA,CAAA;IAChB,CAAA,MAAO;AACHjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,qEAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4DAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uDAAA,CAAA;AAChB,IAAA;AACAjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,+CAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,wDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,+DAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;;;;"}
|