@eldrforge/kodrdriv 1.2.27 → 1.2.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AI-FRIENDLY-LOGGING-GUIDE.md +237 -0
- package/AI-LOGGING-MIGRATION-COMPLETE.md +371 -0
- package/ALREADY-PUBLISHED-PACKAGES-FIX.md +264 -0
- package/AUDIT-BRANCHES-PROGRESS-FIX.md +90 -0
- package/AUDIT-EXAMPLE-OUTPUT.md +113 -0
- package/CHECKPOINT-RECOVERY-FIX.md +450 -0
- package/LOGGING-MIGRATION-STATUS.md +186 -0
- package/PARALLEL-PUBLISH-DEBUGGING-GUIDE.md +441 -0
- package/PARALLEL-PUBLISH-FIXES-IMPLEMENTED.md +405 -0
- package/PARALLEL-PUBLISH-LOGGING-FIXES.md +274 -0
- package/PARALLEL-PUBLISH-QUICK-REFERENCE.md +375 -0
- package/PARALLEL_EXECUTION_FIX.md +2 -2
- package/PUBLISH_IMPROVEMENTS_IMPLEMENTED.md +4 -5
- package/VERSION-AUDIT-FIX.md +333 -0
- package/dist/application.js +6 -6
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +43 -13
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +18 -18
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +32 -32
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/clean.js +9 -9
- package/dist/commands/clean.js.map +1 -1
- package/dist/commands/commit.js +20 -20
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/development.js +88 -89
- package/dist/commands/development.js.map +1 -1
- package/dist/commands/link.js +36 -36
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish.js +318 -220
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/release.js +14 -14
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/review.js +15 -17
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/select-audio.js +5 -5
- package/dist/commands/select-audio.js.map +1 -1
- package/dist/commands/tree.js +134 -39
- package/dist/commands/tree.js.map +1 -1
- package/dist/commands/unlink.js +39 -39
- package/dist/commands/unlink.js.map +1 -1
- package/dist/commands/updates.js +150 -14
- package/dist/commands/updates.js.map +1 -1
- package/dist/commands/versions.js +14 -13
- package/dist/commands/versions.js.map +1 -1
- package/dist/constants.js +1 -1
- package/dist/content/diff.js +5 -5
- package/dist/content/diff.js.map +1 -1
- package/dist/content/files.js +2 -2
- package/dist/content/files.js.map +1 -1
- package/dist/content/log.js +3 -3
- package/dist/content/log.js.map +1 -1
- package/dist/execution/CommandValidator.js +6 -6
- package/dist/execution/CommandValidator.js.map +1 -1
- package/dist/execution/DynamicTaskPool.js +129 -19
- package/dist/execution/DynamicTaskPool.js.map +1 -1
- package/dist/execution/RecoveryManager.js +99 -21
- package/dist/execution/RecoveryManager.js.map +1 -1
- package/dist/execution/TreeExecutionAdapter.js +23 -20
- package/dist/execution/TreeExecutionAdapter.js.map +1 -1
- package/dist/main.js +2 -2
- package/dist/main.js.map +1 -1
- package/dist/util/checkpointManager.js +4 -4
- package/dist/util/checkpointManager.js.map +1 -1
- package/dist/util/dependencyGraph.js +2 -2
- package/dist/util/dependencyGraph.js.map +1 -1
- package/dist/util/fileLock.js +1 -1
- package/dist/util/fileLock.js.map +1 -1
- package/dist/util/general.js +148 -15
- package/dist/util/general.js.map +1 -1
- package/dist/util/interactive.js +2 -2
- package/dist/util/interactive.js.map +1 -1
- package/dist/util/performance.js.map +1 -1
- package/dist/util/safety.js +13 -13
- package/dist/util/safety.js.map +1 -1
- package/dist/utils/branchState.js +567 -0
- package/dist/utils/branchState.js.map +1 -0
- package/package.json +4 -4
- package/scripts/update-test-log-assertions.js +73 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"checkpointManager.js","sources":["../../src/util/checkpointManager.ts"],"sourcesContent":["import path from 'path';\nimport fs from 'fs/promises';\nimport { getLogger } from '../logging';\nimport { ParallelExecutionCheckpoint } from '../types/parallelExecution';\nimport { create as createStorage } from './storage';\n\nconst CHECKPOINT_VERSION = '1.0.0';\n\ninterface Lock {\n release: () => Promise<void>;\n}\n\nexport class CheckpointManager {\n private checkpointPath: string;\n private lockPath: string;\n private tempPath: string;\n private logger = getLogger();\n private storage = createStorage({ log: this.logger.info });\n\n constructor(outputDirectory: string = process.cwd()) {\n this.checkpointPath = path.join(outputDirectory, '.kodrdriv-parallel-context.json');\n this.lockPath = `${this.checkpointPath}.lock`;\n this.tempPath = `${this.checkpointPath}.tmp`;\n }\n\n async save(checkpoint: ParallelExecutionCheckpoint): Promise<void> {\n const lock = await this.acquireLock();\n\n try {\n // Set version and timestamp\n checkpoint.version = CHECKPOINT_VERSION;\n checkpoint.lastUpdated = new Date().toISOString();\n\n // Validate before saving\n this.validateCheckpoint(checkpoint);\n\n // Write to temp file\n const serialized = JSON.stringify(checkpoint, null, 2);\n await fs.writeFile(this.tempPath, serialized, 'utf-8');\n\n // Atomic rename\n await fs.rename(this.tempPath, this.checkpointPath);\n\n this.logger.debug(`Checkpoint saved: ${this.checkpointPath}`);\n } finally {\n await lock.release();\n }\n }\n\n async load(): Promise<ParallelExecutionCheckpoint | null> {\n if (!await this.storage.exists(this.checkpointPath)) {\n return null;\n }\n\n const lock = await this.acquireLock();\n\n try {\n const content = await fs.readFile(this.checkpointPath, 'utf-8');\n const checkpoint = JSON.parse(content) as ParallelExecutionCheckpoint;\n\n // Validate\n this.validateCheckpoint(checkpoint);\n\n // Check version\n if (!this.isCompatibleVersion(checkpoint.version)) {\n throw new Error(`Incompatible checkpoint version: ${checkpoint.version}`);\n }\n\n return checkpoint;\n } catch (error: any) {\n this.logger.error(`Failed to load checkpoint: ${error.message}`);\n\n // Try backup\n const backup = await this.loadBackup();\n if (backup) {\n this.logger.info('Recovered from backup checkpoint');\n return backup;\n }\n\n return null;\n } finally {\n await lock.release();\n }\n }\n\n async backup(): Promise<void> {\n if (!await this.storage.exists(this.checkpointPath)) {\n return;\n }\n\n const backupPath = `${this.checkpointPath}.backup`;\n await fs.copyFile(this.checkpointPath, backupPath);\n }\n\n async cleanup(): Promise<void> {\n const files = [\n this.checkpointPath,\n this.lockPath,\n this.tempPath,\n `${this.checkpointPath}.backup`\n ];\n\n await Promise.all(\n files.map(file => fs.unlink(file).catch(() => {}))\n );\n }\n\n private async acquireLock(): Promise<Lock> {\n const maxWaitMs = 30000;\n const startTime = Date.now();\n\n while (true) {\n try {\n const fileHandle = await fs.open(this.lockPath, 'wx');\n try {\n const pid = process.pid;\n const timestamp = new Date().toISOString();\n await fileHandle.writeFile(`${pid}\\n${timestamp}`);\n } finally {\n await fileHandle.close();\n }\n\n return {\n release: async () => {\n await fs.unlink(this.lockPath).catch(() => {});\n }\n };\n } catch (error: any) {\n if (error.code !== 'EEXIST') {\n throw error;\n }\n\n const elapsed = Date.now() - startTime;\n if (elapsed > maxWaitMs) {\n this.logger.warn('Breaking stale checkpoint lock');\n await fs.unlink(this.lockPath).catch(() => {});\n continue;\n }\n\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n }\n }\n\n private validateCheckpoint(checkpoint: ParallelExecutionCheckpoint): void {\n if (!checkpoint.executionId) {\n throw new Error('Invalid checkpoint: missing executionId');\n }\n\n if (!checkpoint.state) {\n throw new Error('Invalid checkpoint: missing state');\n }\n\n // Validate state consistency\n const allPackages = new Set([\n ...checkpoint.state.pending,\n ...checkpoint.state.ready,\n ...checkpoint.state.running.map(r => r.name),\n ...checkpoint.state.completed,\n ...checkpoint.state.failed.map(f => f.name),\n ...checkpoint.state.skipped\n ]);\n\n if (allPackages.size !== checkpoint.buildOrder.length) {\n this.logger.warn('Checkpoint state inconsistency detected');\n }\n }\n\n private isCompatibleVersion(version: string): boolean {\n // Simple major version check\n const [major] = version.split('.');\n const [expectedMajor] = CHECKPOINT_VERSION.split('.');\n return major === expectedMajor;\n }\n\n private async loadBackup(): Promise<ParallelExecutionCheckpoint | null> {\n const backupPath = `${this.checkpointPath}.backup`;\n if (!await this.storage.exists(backupPath)) {\n return null;\n }\n\n try {\n const content = await fs.readFile(backupPath, 'utf-8');\n return JSON.parse(content) as ParallelExecutionCheckpoint;\n } catch {\n return null;\n }\n }\n}\n"],"names":["CHECKPOINT_VERSION","CheckpointManager","save","checkpoint","lock","acquireLock","version","lastUpdated","Date","toISOString","validateCheckpoint","serialized","JSON","stringify","fs","writeFile","tempPath","rename","checkpointPath","logger","debug","release","load","storage","exists","content","readFile","parse","isCompatibleVersion","Error","error","message","backup","loadBackup","info","backupPath","copyFile","cleanup","files","lockPath","Promise","all","map","file","unlink","catch","maxWaitMs","startTime","now","fileHandle","open","pid","process","timestamp","close","code","elapsed","warn","resolve","setTimeout","executionId","state","allPackages","Set","pending","ready","running","r","name","completed","failed","f","skipped","size","buildOrder","length","major","split","expectedMajor","outputDirectory","cwd","getLogger","createStorage","log","path","join"],"mappings":";;;;;;;;;;;;;;;;;;AAMA,MAAMA,kBAAAA,GAAqB,OAAA;AAMpB,MAAMC,iBAAAA,CAAAA;IAaT,MAAMC,IAAAA,CAAKC,UAAuC,EAAiB;AAC/D,QAAA,MAAMC,IAAAA,GAAO,MAAM,IAAI,CAACC,WAAW,EAAA;QAEnC,IAAI;;AAEAF,YAAAA,UAAAA,CAAWG,OAAO,GAAGN,kBAAAA;AACrBG,YAAAA,UAAAA,CAAWI,WAAW,GAAG,IAAIC,IAAAA,EAAAA,CAAOC,WAAW,EAAA;;YAG/C,IAAI,CAACC,kBAAkB,CAACP,UAAAA,CAAAA;;AAGxB,YAAA,MAAMQ,UAAAA,GAAaC,IAAAA,CAAKC,SAAS,CAACV,YAAY,IAAA,EAAM,CAAA,CAAA;AACpD,YAAA,MAAMW,GAAGC,SAAS,CAAC,IAAI,CAACC,QAAQ,EAAEL,UAAAA,EAAY,OAAA,CAAA;;YAG9C,MAAMG,EAAAA,CAAGG,MAAM,CAAC,IAAI,CAACD,QAAQ,EAAE,IAAI,CAACE,cAAc,CAAA;YAElD,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,CAAC,kBAAkB,EAAE,IAAI,CAACF,cAAc,CAAA,CAAE,CAAA;QAChE,CAAA,QAAU;AACN,YAAA,MAAMd,KAAKiB,OAAO,EAAA;AACtB,QAAA;AACJ,IAAA;AAEA,IAAA,MAAMC,IAAAA,GAAoD;QACtD,IAAI,CAAC,MAAM,IAAI,CAACC,OAAO,CAACC,MAAM,CAAC,IAAI,CAACN,cAAc,CAAA,EAAG;YACjD,OAAO,IAAA;AACX,QAAA;AAEA,QAAA,MAAMd,IAAAA,GAAO,MAAM,IAAI,CAACC,WAAW,EAAA;QAEnC,IAAI;YACA,MAAMoB,OAAAA,GAAU,MAAMX,EAAAA,CAAGY,QAAQ,CAAC,IAAI,CAACR,cAAc,EAAE,OAAA,CAAA;YACvD,MAAMf,UAAAA,GAAaS,IAAAA,CAAKe,KAAK,CAACF,OAAAA,CAAAA;;YAG9B,IAAI,CAACf,kBAAkB,CAACP,UAAAA,CAAAA;;AAGxB,YAAA,IAAI,CAAC,IAAI,CAACyB,mBAAmB,CAACzB,UAAAA,CAAWG,OAAO,CAAA,EAAG;AAC/C,gBAAA,MAAM,IAAIuB,KAAAA,CAAM,CAAC,iCAAiC,EAAE1B,UAAAA,CAAWG,OAAO,CAAA,CAAE,CAAA;AAC5E,YAAA;YAEA,OAAOH,UAAAA;AACX,QAAA,CAAA,CAAE,OAAO2B,KAAAA,EAAY;YACjB,IAAI,CAACX,MAAM,CAACW,KAAK,CAAC,CAAC,2BAA2B,EAAEA,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;;AAG/D,YAAA,MAAMC,MAAAA,GAAS,MAAM,IAAI,CAACC,UAAU,EAAA;AACpC,YAAA,IAAID,MAAAA,EAAQ;AACR,gBAAA,IAAI,CAACb,MAAM,CAACe,IAAI,CAAC,kCAAA,CAAA;gBACjB,OAAOF,MAAAA;AACX,YAAA;YAEA,OAAO,IAAA;QACX,CAAA,QAAU;AACN,YAAA,MAAM5B,KAAKiB,OAAO,EAAA;AACtB,QAAA;AACJ,IAAA;AAEA,IAAA,MAAMW,MAAAA,GAAwB;QAC1B,IAAI,CAAC,MAAM,IAAI,CAACT,OAAO,CAACC,MAAM,CAAC,IAAI,CAACN,cAAc,CAAA,EAAG;AACjD,YAAA;AACJ,QAAA;AAEA,QAAA,MAAMiB,aAAa,CAAA,EAAG,IAAI,CAACjB,cAAc,CAAC,OAAO,CAAC;AAClD,QAAA,MAAMJ,GAAGsB,QAAQ,CAAC,IAAI,CAAClB,cAAc,EAAEiB,UAAAA,CAAAA;AAC3C,IAAA;AAEA,IAAA,MAAME,OAAAA,GAAyB;AAC3B,QAAA,MAAMC,KAAAA,GAAQ;AACV,YAAA,IAAI,CAACpB,cAAc;AACnB,YAAA,IAAI,CAACqB,QAAQ;AACb,YAAA,IAAI,CAACvB,QAAQ;AACb,YAAA,CAAA,EAAG,IAAI,CAACE,cAAc,CAAC,OAAO;AACjC,SAAA;AAED,QAAA,MAAMsB,OAAAA,CAAQC,GAAG,CACbH,KAAAA,CAAMI,GAAG,CAACC,CAAAA,IAAAA,GAAQ7B,EAAAA,CAAG8B,MAAM,CAACD,IAAAA,CAAAA,CAAME,KAAK,CAAC,IAAA,CAAO,CAAA,CAAA,CAAA,CAAA;AAEvD,IAAA;AAEA,IAAA,MAAcxC,WAAAA,GAA6B;AACvC,QAAA,MAAMyC,SAAAA,GAAY,KAAA;QAClB,MAAMC,SAAAA,GAAYvC,KAAKwC,GAAG,EAAA;AAE1B,QAAA,MAAO,IAAA,CAAM;YACT,IAAI;gBACA,MAAMC,UAAAA,GAAa,MAAMnC,EAAAA,CAAGoC,IAAI,CAAC,IAAI,CAACX,QAAQ,EAAE,IAAA,CAAA;gBAChD,IAAI;oBACA,MAAMY,GAAAA,GAAMC,QAAQD,GAAG;oBACvB,MAAME,SAAAA,GAAY,IAAI7C,IAAAA,EAAAA,CAAOC,WAAW,EAAA;AACxC,oBAAA,MAAMwC,WAAWlC,SAAS,CAAC,GAAGoC,GAAAA,CAAI,EAAE,EAAEE,SAAAA,CAAAA,CAAW,CAAA;gBACrD,CAAA,QAAU;AACN,oBAAA,MAAMJ,WAAWK,KAAK,EAAA;AAC1B,gBAAA;gBAEA,OAAO;oBACHjC,OAAAA,EAAS,UAAA;wBACL,MAAMP,EAAAA,CAAG8B,MAAM,CAAC,IAAI,CAACL,QAAQ,CAAA,CAAEM,KAAK,CAAC,IAAA,CAAO,CAAA,CAAA;AAChD,oBAAA;AACJ,iBAAA;AACJ,YAAA,CAAA,CAAE,OAAOf,KAAAA,EAAY;gBACjB,IAAIA,KAAAA,CAAMyB,IAAI,KAAK,QAAA,EAAU;oBACzB,MAAMzB,KAAAA;AACV,gBAAA;gBAEA,MAAM0B,OAAAA,GAAUhD,IAAAA,CAAKwC,GAAG,EAAA,GAAKD,SAAAA;AAC7B,gBAAA,IAAIS,UAAUV,SAAAA,EAAW;AACrB,oBAAA,IAAI,CAAC3B,MAAM,CAACsC,IAAI,CAAC,gCAAA,CAAA;oBACjB,MAAM3C,EAAAA,CAAG8B,MAAM,CAAC,IAAI,CAACL,QAAQ,CAAA,CAAEM,KAAK,CAAC,IAAA,CAAO,CAAA,CAAA;AAC5C,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAM,IAAIL,OAAAA,CAAQkB,CAAAA,OAAAA,GAAWC,WAAWD,OAAAA,EAAS,GAAA,CAAA,CAAA;AACrD,YAAA;AACJ,QAAA;AACJ,IAAA;AAEQhD,IAAAA,kBAAAA,CAAmBP,UAAuC,EAAQ;QACtE,IAAI,CAACA,UAAAA,CAAWyD,WAAW,EAAE;AACzB,YAAA,MAAM,IAAI/B,KAAAA,CAAM,yCAAA,CAAA;AACpB,QAAA;QAEA,IAAI,CAAC1B,UAAAA,CAAW0D,KAAK,EAAE;AACnB,YAAA,MAAM,IAAIhC,KAAAA,CAAM,mCAAA,CAAA;AACpB,QAAA;;QAGA,MAAMiC,WAAAA,GAAc,IAAIC,GAAAA,CAAI;eACrB5D,UAAAA,CAAW0D,KAAK,CAACG,OAAO;eACxB7D,UAAAA,CAAW0D,KAAK,CAACI,KAAK;eACtB9D,UAAAA,CAAW0D,KAAK,CAACK,OAAO,CAACxB,GAAG,CAACyB,CAAAA,CAAAA,GAAKA,CAAAA,CAAEC,IAAI,CAAA;eACxCjE,UAAAA,CAAW0D,KAAK,CAACQ,SAAS;eAC1BlE,UAAAA,CAAW0D,KAAK,CAACS,MAAM,CAAC5B,GAAG,CAAC6B,CAAAA,CAAAA,GAAKA,CAAAA,CAAEH,IAAI,CAAA;eACvCjE,UAAAA,CAAW0D,KAAK,CAACW;AACvB,SAAA,CAAA;AAED,QAAA,IAAIV,YAAYW,IAAI,KAAKtE,WAAWuE,UAAU,CAACC,MAAM,EAAE;AACnD,YAAA,IAAI,CAACxD,MAAM,CAACsC,IAAI,CAAC,yCAAA,CAAA;AACrB,QAAA;AACJ,IAAA;AAEQ7B,IAAAA,mBAAAA,CAAoBtB,OAAe,EAAW;;AAElD,QAAA,MAAM,CAACsE,KAAAA,CAAM,GAAGtE,OAAAA,CAAQuE,KAAK,CAAC,GAAA,CAAA;AAC9B,QAAA,MAAM,CAACC,aAAAA,CAAc,GAAG9E,kBAAAA,CAAmB6E,KAAK,CAAC,GAAA,CAAA;AACjD,QAAA,OAAOD,KAAAA,KAAUE,aAAAA;AACrB,IAAA;AAEA,IAAA,MAAc7C,UAAAA,GAA0D;AACpE,QAAA,MAAME,aAAa,CAAA,EAAG,IAAI,CAACjB,cAAc,CAAC,OAAO,CAAC;QAClD,IAAI,CAAC,MAAM,IAAI,CAACK,OAAO,CAACC,MAAM,CAACW,UAAAA,CAAAA,EAAa;YACxC,OAAO,IAAA;AACX,QAAA;QAEA,IAAI;AACA,YAAA,MAAMV,OAAAA,GAAU,MAAMX,EAAAA,CAAGY,QAAQ,CAACS,UAAAA,EAAY,OAAA,CAAA;YAC9C,OAAOvB,IAAAA,CAAKe,KAAK,CAACF,OAAAA,CAAAA;AACtB,QAAA,CAAA,CAAE,OAAM;YACJ,OAAO,IAAA;AACX,QAAA;AACJ,IAAA;AAxKA,IAAA,WAAA,CAAYsD,eAAAA,GAA0B3B,OAAAA,CAAQ4B,GAAG,EAAE,CAAE;AANrD,QAAA,gBAAA,CAAA,IAAA,EAAQ9D,kBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQqB,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQvB,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQG,QAAAA,EAAS8D,SAAAA,EAAAA,CAAAA;AACjB,QAAA,gBAAA,CAAA,IAAA,EAAQ1D,WAAU2D,MAAAA,CAAc;AAAEC,YAAAA,GAAAA,EAAK,IAAI,CAAChE,MAAM,CAACe;AAAK,SAAA,CAAA,CAAA;AAGpD,QAAA,IAAI,CAAChB,cAAc,GAAGkE,aAAAA,CAAKC,IAAI,CAACN,eAAAA,EAAiB,iCAAA,CAAA;QACjD,IAAI,CAACxC,QAAQ,GAAG,CAAA,EAAG,IAAI,CAACrB,cAAc,CAAC,KAAK,CAAC;QAC7C,IAAI,CAACF,QAAQ,GAAG,CAAA,EAAG,IAAI,CAACE,cAAc,CAAC,IAAI,CAAC;AAChD,IAAA;AAqKJ;;;;"}
|
|
1
|
+
{"version":3,"file":"checkpointManager.js","sources":["../../src/util/checkpointManager.ts"],"sourcesContent":["import path from 'path';\nimport fs from 'fs/promises';\nimport { getLogger } from '../logging';\nimport { ParallelExecutionCheckpoint } from '../types/parallelExecution';\nimport { create as createStorage } from './storage';\n\nconst CHECKPOINT_VERSION = '1.0.0';\n\ninterface Lock {\n release: () => Promise<void>;\n}\n\nexport class CheckpointManager {\n private checkpointPath: string;\n private lockPath: string;\n private tempPath: string;\n private logger = getLogger();\n private storage = createStorage({ log: this.logger.info });\n\n constructor(outputDirectory: string = process.cwd()) {\n this.checkpointPath = path.join(outputDirectory, '.kodrdriv-parallel-context.json');\n this.lockPath = `${this.checkpointPath}.lock`;\n this.tempPath = `${this.checkpointPath}.tmp`;\n }\n\n async save(checkpoint: ParallelExecutionCheckpoint): Promise<void> {\n const lock = await this.acquireLock();\n\n try {\n // Set version and timestamp\n checkpoint.version = CHECKPOINT_VERSION;\n checkpoint.lastUpdated = new Date().toISOString();\n\n // Validate before saving\n this.validateCheckpoint(checkpoint);\n\n // Write to temp file\n const serialized = JSON.stringify(checkpoint, null, 2);\n await fs.writeFile(this.tempPath, serialized, 'utf-8');\n\n // Atomic rename\n await fs.rename(this.tempPath, this.checkpointPath);\n\n this.logger.debug(`Checkpoint saved: ${this.checkpointPath}`);\n } finally {\n await lock.release();\n }\n }\n\n async load(): Promise<ParallelExecutionCheckpoint | null> {\n if (!await this.storage.exists(this.checkpointPath)) {\n return null;\n }\n\n const lock = await this.acquireLock();\n\n try {\n const content = await fs.readFile(this.checkpointPath, 'utf-8');\n const checkpoint = JSON.parse(content) as ParallelExecutionCheckpoint;\n\n // Validate\n this.validateCheckpoint(checkpoint);\n\n // Check version\n if (!this.isCompatibleVersion(checkpoint.version)) {\n throw new Error(`Incompatible checkpoint version: ${checkpoint.version}`);\n }\n\n return checkpoint;\n } catch (error: any) {\n this.logger.error(`CHECKPOINT_LOAD_FAILED: Failed to load checkpoint file | Error: ${error.message} | Impact: Cannot resume execution`);\n\n // Try backup\n const backup = await this.loadBackup();\n if (backup) {\n this.logger.info('CHECKPOINT_RECOVERED_BACKUP: Recovered from backup checkpoint | Source: backup | Status: loaded');\n return backup;\n }\n\n return null;\n } finally {\n await lock.release();\n }\n }\n\n async backup(): Promise<void> {\n if (!await this.storage.exists(this.checkpointPath)) {\n return;\n }\n\n const backupPath = `${this.checkpointPath}.backup`;\n await fs.copyFile(this.checkpointPath, backupPath);\n }\n\n async cleanup(): Promise<void> {\n const files = [\n this.checkpointPath,\n this.lockPath,\n this.tempPath,\n `${this.checkpointPath}.backup`\n ];\n\n await Promise.all(\n files.map(file => fs.unlink(file).catch(() => {}))\n );\n }\n\n private async acquireLock(): Promise<Lock> {\n const maxWaitMs = 30000;\n const startTime = Date.now();\n\n while (true) {\n try {\n const fileHandle = await fs.open(this.lockPath, 'wx');\n try {\n const pid = process.pid;\n const timestamp = new Date().toISOString();\n await fileHandle.writeFile(`${pid}\\n${timestamp}`);\n } finally {\n await fileHandle.close();\n }\n\n return {\n release: async () => {\n await fs.unlink(this.lockPath).catch(() => {});\n }\n };\n } catch (error: any) {\n if (error.code !== 'EEXIST') {\n throw error;\n }\n\n const elapsed = Date.now() - startTime;\n if (elapsed > maxWaitMs) {\n this.logger.warn('CHECKPOINT_LOCK_STALE: Breaking stale checkpoint lock | Reason: Lock expired | Action: Force break lock');\n await fs.unlink(this.lockPath).catch(() => {});\n continue;\n }\n\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n }\n }\n\n private validateCheckpoint(checkpoint: ParallelExecutionCheckpoint): void {\n if (!checkpoint.executionId) {\n throw new Error('Invalid checkpoint: missing executionId');\n }\n\n if (!checkpoint.state) {\n throw new Error('Invalid checkpoint: missing state');\n }\n\n // Validate state consistency\n const allPackages = new Set([\n ...checkpoint.state.pending,\n ...checkpoint.state.ready,\n ...checkpoint.state.running.map(r => r.name),\n ...checkpoint.state.completed,\n ...checkpoint.state.failed.map(f => f.name),\n ...checkpoint.state.skipped\n ]);\n\n if (allPackages.size !== checkpoint.buildOrder.length) {\n this.logger.warn('CHECKPOINT_INCONSISTENCY: Checkpoint state inconsistency detected | Issue: State validation failed | Impact: May need manual recovery');\n }\n }\n\n private isCompatibleVersion(version: string): boolean {\n // Simple major version check\n const [major] = version.split('.');\n const [expectedMajor] = CHECKPOINT_VERSION.split('.');\n return major === expectedMajor;\n }\n\n private async loadBackup(): Promise<ParallelExecutionCheckpoint | null> {\n const backupPath = `${this.checkpointPath}.backup`;\n if (!await this.storage.exists(backupPath)) {\n return null;\n }\n\n try {\n const content = await fs.readFile(backupPath, 'utf-8');\n return JSON.parse(content) as ParallelExecutionCheckpoint;\n } catch {\n return null;\n }\n }\n}\n"],"names":["CHECKPOINT_VERSION","CheckpointManager","save","checkpoint","lock","acquireLock","version","lastUpdated","Date","toISOString","validateCheckpoint","serialized","JSON","stringify","fs","writeFile","tempPath","rename","checkpointPath","logger","debug","release","load","storage","exists","content","readFile","parse","isCompatibleVersion","Error","error","message","backup","loadBackup","info","backupPath","copyFile","cleanup","files","lockPath","Promise","all","map","file","unlink","catch","maxWaitMs","startTime","now","fileHandle","open","pid","process","timestamp","close","code","elapsed","warn","resolve","setTimeout","executionId","state","allPackages","Set","pending","ready","running","r","name","completed","failed","f","skipped","size","buildOrder","length","major","split","expectedMajor","outputDirectory","cwd","getLogger","createStorage","log","path","join"],"mappings":";;;;;;;;;;;;;;;;;;AAMA,MAAMA,kBAAAA,GAAqB,OAAA;AAMpB,MAAMC,iBAAAA,CAAAA;IAaT,MAAMC,IAAAA,CAAKC,UAAuC,EAAiB;AAC/D,QAAA,MAAMC,IAAAA,GAAO,MAAM,IAAI,CAACC,WAAW,EAAA;QAEnC,IAAI;;AAEAF,YAAAA,UAAAA,CAAWG,OAAO,GAAGN,kBAAAA;AACrBG,YAAAA,UAAAA,CAAWI,WAAW,GAAG,IAAIC,IAAAA,EAAAA,CAAOC,WAAW,EAAA;;YAG/C,IAAI,CAACC,kBAAkB,CAACP,UAAAA,CAAAA;;AAGxB,YAAA,MAAMQ,UAAAA,GAAaC,IAAAA,CAAKC,SAAS,CAACV,YAAY,IAAA,EAAM,CAAA,CAAA;AACpD,YAAA,MAAMW,GAAGC,SAAS,CAAC,IAAI,CAACC,QAAQ,EAAEL,UAAAA,EAAY,OAAA,CAAA;;YAG9C,MAAMG,EAAAA,CAAGG,MAAM,CAAC,IAAI,CAACD,QAAQ,EAAE,IAAI,CAACE,cAAc,CAAA;YAElD,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,CAAC,kBAAkB,EAAE,IAAI,CAACF,cAAc,CAAA,CAAE,CAAA;QAChE,CAAA,QAAU;AACN,YAAA,MAAMd,KAAKiB,OAAO,EAAA;AACtB,QAAA;AACJ,IAAA;AAEA,IAAA,MAAMC,IAAAA,GAAoD;QACtD,IAAI,CAAC,MAAM,IAAI,CAACC,OAAO,CAACC,MAAM,CAAC,IAAI,CAACN,cAAc,CAAA,EAAG;YACjD,OAAO,IAAA;AACX,QAAA;AAEA,QAAA,MAAMd,IAAAA,GAAO,MAAM,IAAI,CAACC,WAAW,EAAA;QAEnC,IAAI;YACA,MAAMoB,OAAAA,GAAU,MAAMX,EAAAA,CAAGY,QAAQ,CAAC,IAAI,CAACR,cAAc,EAAE,OAAA,CAAA;YACvD,MAAMf,UAAAA,GAAaS,IAAAA,CAAKe,KAAK,CAACF,OAAAA,CAAAA;;YAG9B,IAAI,CAACf,kBAAkB,CAACP,UAAAA,CAAAA;;AAGxB,YAAA,IAAI,CAAC,IAAI,CAACyB,mBAAmB,CAACzB,UAAAA,CAAWG,OAAO,CAAA,EAAG;AAC/C,gBAAA,MAAM,IAAIuB,KAAAA,CAAM,CAAC,iCAAiC,EAAE1B,UAAAA,CAAWG,OAAO,CAAA,CAAE,CAAA;AAC5E,YAAA;YAEA,OAAOH,UAAAA;AACX,QAAA,CAAA,CAAE,OAAO2B,KAAAA,EAAY;AACjB,YAAA,IAAI,CAACX,MAAM,CAACW,KAAK,CAAC,CAAC,gEAAgE,EAAEA,KAAAA,CAAMC,OAAO,CAAC,kCAAkC,CAAC,CAAA;;AAGtI,YAAA,MAAMC,MAAAA,GAAS,MAAM,IAAI,CAACC,UAAU,EAAA;AACpC,YAAA,IAAID,MAAAA,EAAQ;AACR,gBAAA,IAAI,CAACb,MAAM,CAACe,IAAI,CAAC,iGAAA,CAAA;gBACjB,OAAOF,MAAAA;AACX,YAAA;YAEA,OAAO,IAAA;QACX,CAAA,QAAU;AACN,YAAA,MAAM5B,KAAKiB,OAAO,EAAA;AACtB,QAAA;AACJ,IAAA;AAEA,IAAA,MAAMW,MAAAA,GAAwB;QAC1B,IAAI,CAAC,MAAM,IAAI,CAACT,OAAO,CAACC,MAAM,CAAC,IAAI,CAACN,cAAc,CAAA,EAAG;AACjD,YAAA;AACJ,QAAA;AAEA,QAAA,MAAMiB,aAAa,CAAA,EAAG,IAAI,CAACjB,cAAc,CAAC,OAAO,CAAC;AAClD,QAAA,MAAMJ,GAAGsB,QAAQ,CAAC,IAAI,CAAClB,cAAc,EAAEiB,UAAAA,CAAAA;AAC3C,IAAA;AAEA,IAAA,MAAME,OAAAA,GAAyB;AAC3B,QAAA,MAAMC,KAAAA,GAAQ;AACV,YAAA,IAAI,CAACpB,cAAc;AACnB,YAAA,IAAI,CAACqB,QAAQ;AACb,YAAA,IAAI,CAACvB,QAAQ;AACb,YAAA,CAAA,EAAG,IAAI,CAACE,cAAc,CAAC,OAAO;AACjC,SAAA;AAED,QAAA,MAAMsB,OAAAA,CAAQC,GAAG,CACbH,KAAAA,CAAMI,GAAG,CAACC,CAAAA,IAAAA,GAAQ7B,EAAAA,CAAG8B,MAAM,CAACD,IAAAA,CAAAA,CAAME,KAAK,CAAC,IAAA,CAAO,CAAA,CAAA,CAAA,CAAA;AAEvD,IAAA;AAEA,IAAA,MAAcxC,WAAAA,GAA6B;AACvC,QAAA,MAAMyC,SAAAA,GAAY,KAAA;QAClB,MAAMC,SAAAA,GAAYvC,KAAKwC,GAAG,EAAA;AAE1B,QAAA,MAAO,IAAA,CAAM;YACT,IAAI;gBACA,MAAMC,UAAAA,GAAa,MAAMnC,EAAAA,CAAGoC,IAAI,CAAC,IAAI,CAACX,QAAQ,EAAE,IAAA,CAAA;gBAChD,IAAI;oBACA,MAAMY,GAAAA,GAAMC,QAAQD,GAAG;oBACvB,MAAME,SAAAA,GAAY,IAAI7C,IAAAA,EAAAA,CAAOC,WAAW,EAAA;AACxC,oBAAA,MAAMwC,WAAWlC,SAAS,CAAC,GAAGoC,GAAAA,CAAI,EAAE,EAAEE,SAAAA,CAAAA,CAAW,CAAA;gBACrD,CAAA,QAAU;AACN,oBAAA,MAAMJ,WAAWK,KAAK,EAAA;AAC1B,gBAAA;gBAEA,OAAO;oBACHjC,OAAAA,EAAS,UAAA;wBACL,MAAMP,EAAAA,CAAG8B,MAAM,CAAC,IAAI,CAACL,QAAQ,CAAA,CAAEM,KAAK,CAAC,IAAA,CAAO,CAAA,CAAA;AAChD,oBAAA;AACJ,iBAAA;AACJ,YAAA,CAAA,CAAE,OAAOf,KAAAA,EAAY;gBACjB,IAAIA,KAAAA,CAAMyB,IAAI,KAAK,QAAA,EAAU;oBACzB,MAAMzB,KAAAA;AACV,gBAAA;gBAEA,MAAM0B,OAAAA,GAAUhD,IAAAA,CAAKwC,GAAG,EAAA,GAAKD,SAAAA;AAC7B,gBAAA,IAAIS,UAAUV,SAAAA,EAAW;AACrB,oBAAA,IAAI,CAAC3B,MAAM,CAACsC,IAAI,CAAC,yGAAA,CAAA;oBACjB,MAAM3C,EAAAA,CAAG8B,MAAM,CAAC,IAAI,CAACL,QAAQ,CAAA,CAAEM,KAAK,CAAC,IAAA,CAAO,CAAA,CAAA;AAC5C,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAM,IAAIL,OAAAA,CAAQkB,CAAAA,OAAAA,GAAWC,WAAWD,OAAAA,EAAS,GAAA,CAAA,CAAA;AACrD,YAAA;AACJ,QAAA;AACJ,IAAA;AAEQhD,IAAAA,kBAAAA,CAAmBP,UAAuC,EAAQ;QACtE,IAAI,CAACA,UAAAA,CAAWyD,WAAW,EAAE;AACzB,YAAA,MAAM,IAAI/B,KAAAA,CAAM,yCAAA,CAAA;AACpB,QAAA;QAEA,IAAI,CAAC1B,UAAAA,CAAW0D,KAAK,EAAE;AACnB,YAAA,MAAM,IAAIhC,KAAAA,CAAM,mCAAA,CAAA;AACpB,QAAA;;QAGA,MAAMiC,WAAAA,GAAc,IAAIC,GAAAA,CAAI;eACrB5D,UAAAA,CAAW0D,KAAK,CAACG,OAAO;eACxB7D,UAAAA,CAAW0D,KAAK,CAACI,KAAK;eACtB9D,UAAAA,CAAW0D,KAAK,CAACK,OAAO,CAACxB,GAAG,CAACyB,CAAAA,CAAAA,GAAKA,CAAAA,CAAEC,IAAI,CAAA;eACxCjE,UAAAA,CAAW0D,KAAK,CAACQ,SAAS;eAC1BlE,UAAAA,CAAW0D,KAAK,CAACS,MAAM,CAAC5B,GAAG,CAAC6B,CAAAA,CAAAA,GAAKA,CAAAA,CAAEH,IAAI,CAAA;eACvCjE,UAAAA,CAAW0D,KAAK,CAACW;AACvB,SAAA,CAAA;AAED,QAAA,IAAIV,YAAYW,IAAI,KAAKtE,WAAWuE,UAAU,CAACC,MAAM,EAAE;AACnD,YAAA,IAAI,CAACxD,MAAM,CAACsC,IAAI,CAAC,uIAAA,CAAA;AACrB,QAAA;AACJ,IAAA;AAEQ7B,IAAAA,mBAAAA,CAAoBtB,OAAe,EAAW;;AAElD,QAAA,MAAM,CAACsE,KAAAA,CAAM,GAAGtE,OAAAA,CAAQuE,KAAK,CAAC,GAAA,CAAA;AAC9B,QAAA,MAAM,CAACC,aAAAA,CAAc,GAAG9E,kBAAAA,CAAmB6E,KAAK,CAAC,GAAA,CAAA;AACjD,QAAA,OAAOD,KAAAA,KAAUE,aAAAA;AACrB,IAAA;AAEA,IAAA,MAAc7C,UAAAA,GAA0D;AACpE,QAAA,MAAME,aAAa,CAAA,EAAG,IAAI,CAACjB,cAAc,CAAC,OAAO,CAAC;QAClD,IAAI,CAAC,MAAM,IAAI,CAACK,OAAO,CAACC,MAAM,CAACW,UAAAA,CAAAA,EAAa;YACxC,OAAO,IAAA;AACX,QAAA;QAEA,IAAI;AACA,YAAA,MAAMV,OAAAA,GAAU,MAAMX,EAAAA,CAAGY,QAAQ,CAACS,UAAAA,EAAY,OAAA,CAAA;YAC9C,OAAOvB,IAAAA,CAAKe,KAAK,CAACF,OAAAA,CAAAA;AACtB,QAAA,CAAA,CAAE,OAAM;YACJ,OAAO,IAAA;AACX,QAAA;AACJ,IAAA;AAxKA,IAAA,WAAA,CAAYsD,eAAAA,GAA0B3B,OAAAA,CAAQ4B,GAAG,EAAE,CAAE;AANrD,QAAA,gBAAA,CAAA,IAAA,EAAQ9D,kBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQqB,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQvB,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQG,QAAAA,EAAS8D,SAAAA,EAAAA,CAAAA;AACjB,QAAA,gBAAA,CAAA,IAAA,EAAQ1D,WAAU2D,MAAAA,CAAc;AAAEC,YAAAA,GAAAA,EAAK,IAAI,CAAChE,MAAM,CAACe;AAAK,SAAA,CAAA,CAAA;AAGpD,QAAA,IAAI,CAAChB,cAAc,GAAGkE,aAAAA,CAAKC,IAAI,CAACN,eAAAA,EAAiB,iCAAA,CAAA;QACjD,IAAI,CAACxC,QAAQ,GAAG,CAAA,EAAG,IAAI,CAACrB,cAAc,CAAC,KAAK,CAAC;QAC7C,IAAI,CAACF,QAAQ,GAAG,CAAA,EAAG,IAAI,CAACE,cAAc,CAAC,IAAI,CAAC;AAChD,IAAA;AAqKJ;;;;"}
|
|
@@ -69,7 +69,7 @@ import { create } from './storage.js';
|
|
|
69
69
|
}
|
|
70
70
|
}
|
|
71
71
|
} catch (error) {
|
|
72
|
-
logger.error(`Failed to scan directory ${directory}: ${error}`);
|
|
72
|
+
logger.error(`DEPENDENCY_GRAPH_SCAN_FAILED: Failed to scan directory | Directory: ${directory} | Error: ${error}`);
|
|
73
73
|
throw error;
|
|
74
74
|
}
|
|
75
75
|
return packageJsonPaths;
|
|
@@ -116,7 +116,7 @@ import { create } from './storage.js';
|
|
|
116
116
|
localDependencies: new Set() // Will be populated later
|
|
117
117
|
};
|
|
118
118
|
} catch (error) {
|
|
119
|
-
logger.error(`Failed to parse package.json
|
|
119
|
+
logger.error(`DEPENDENCY_GRAPH_PARSE_FAILED: Failed to parse package.json | Path: ${packageJsonPath} | Error: ${error}`);
|
|
120
120
|
throw error;
|
|
121
121
|
}
|
|
122
122
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dependencyGraph.js","sources":["../../src/util/dependencyGraph.ts"],"sourcesContent":["import path from 'path';\nimport fs from 'fs/promises';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\nimport { getLogger } from '../logging';\nimport { create as createStorage } from './storage';\n\n/**\n * Check if a file path matches a glob pattern\n */\nconst matchesPattern = (filePath: string, pattern: string): boolean => {\n // Convert simple glob patterns to regex\n const regexPattern = pattern\n .replace(/\\\\/g, '\\\\\\\\') // Escape backslashes\n .replace(/\\*\\*/g, '.*') // ** matches any path segments\n .replace(/\\*/g, '[^/]*') // * matches any characters except path separator\n .replace(/\\?/g, '.') // ? matches any single character\n .replace(/\\./g, '\\\\.'); // Escape literal dots\n\n const regex = new RegExp(`^${regexPattern}$`);\n return regex.test(filePath) || regex.test(path.basename(filePath));\n};\n\n/**\n * Check if a package should be excluded based on patterns\n */\nexport function shouldExclude(packageJsonPath: string, excludedPatterns: string[]): boolean {\n if (!excludedPatterns || excludedPatterns.length === 0) {\n return false;\n }\n\n // Check both the full path and relative path patterns\n const relativePath = path.relative(process.cwd(), packageJsonPath);\n\n return excludedPatterns.some(pattern =>\n matchesPattern(packageJsonPath, pattern) ||\n matchesPattern(relativePath, pattern) ||\n matchesPattern(path.dirname(packageJsonPath), pattern) ||\n matchesPattern(path.dirname(relativePath), pattern)\n );\n}\n\nexport interface PackageInfo {\n name: string;\n version: string;\n path: string;\n dependencies: Set<string>;\n devDependencies: Set<string>;\n localDependencies: Set<string>;\n}\n\nexport interface DependencyGraph {\n packages: Map<string, PackageInfo>;\n edges: Map<string, Set<string>>; // package -> dependencies\n reverseEdges: Map<string, Set<string>>; // package -> dependents\n}\n\nexport interface SerializedGraph {\n packages: Array<{\n name: string;\n version: string;\n path: string;\n dependencies: string[];\n }>;\n edges: Array<[string, string[]]>;\n}\n\n/**\n * Scan directory for package.json files\n */\nexport async function scanForPackageJsonFiles(\n directory: string,\n excludedPatterns: string[] = []\n): Promise<string[]> {\n const logger = getLogger();\n const packageJsonPaths: string[] = [];\n\n try {\n // First check if there's a package.json in the specified directory itself\n const directPackageJsonPath = path.join(directory, 'package.json');\n try {\n await fs.access(directPackageJsonPath);\n\n // Check if this package should be excluded\n if (!shouldExclude(directPackageJsonPath, excludedPatterns)) {\n packageJsonPaths.push(directPackageJsonPath);\n logger.verbose(`Found package.json at: ${directPackageJsonPath}`);\n } else {\n logger.verbose(`Excluding package.json at: ${directPackageJsonPath} (matches exclusion pattern)`);\n }\n } catch {\n // No package.json in the root of this directory, that's fine\n }\n\n // Then scan subdirectories for package.json files\n const entries = await fs.readdir(directory, { withFileTypes: true });\n\n for (const entry of entries) {\n if (entry.isDirectory()) {\n const subDirPath = path.join(directory, entry.name);\n const packageJsonPath = path.join(subDirPath, 'package.json');\n\n try {\n await fs.access(packageJsonPath);\n\n // Check if this package should be excluded\n if (shouldExclude(packageJsonPath, excludedPatterns)) {\n logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);\n continue;\n }\n\n packageJsonPaths.push(packageJsonPath);\n logger.verbose(`Found package.json at: ${packageJsonPath}`);\n } catch {\n // No package.json in this directory, continue\n }\n }\n }\n } catch (error) {\n logger.error(`Failed to scan directory ${directory}: ${error}`);\n throw error;\n }\n\n return packageJsonPaths;\n}\n\n/**\n * Parse a single package.json file\n */\nexport async function parsePackageJson(packageJsonPath: string): Promise<PackageInfo> {\n const logger = getLogger();\n const storage = createStorage({ log: logger.info });\n\n try {\n const content = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(content, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (!packageJson.name) {\n throw new Error(`Package at ${packageJsonPath} has no name field`);\n }\n\n const dependencies = new Set<string>();\n const devDependencies = new Set<string>();\n\n // Collect all types of dependencies\n const depTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];\n for (const depType of depTypes) {\n if (packageJson[depType]) {\n Object.keys(packageJson[depType]).forEach(dep => {\n dependencies.add(dep);\n if (depType === 'devDependencies') {\n devDependencies.add(dep);\n }\n });\n }\n }\n\n return {\n name: packageJson.name,\n version: packageJson.version || '0.0.0',\n path: path.dirname(packageJsonPath),\n dependencies,\n devDependencies,\n localDependencies: new Set() // Will be populated later\n };\n } catch (error) {\n logger.error(`Failed to parse package.json at ${packageJsonPath}: ${error}`);\n throw error;\n }\n}\n\n/**\n * Build dependency graph from package.json paths\n */\nexport async function buildDependencyGraph(\n packageJsonPaths: string[]\n): Promise<DependencyGraph> {\n const logger = getLogger();\n const packages = new Map<string, PackageInfo>();\n const edges = new Map<string, Set<string>>();\n\n // First pass: parse all package.json files\n for (const packageJsonPath of packageJsonPaths) {\n const packageInfo = await parsePackageJson(packageJsonPath);\n packages.set(packageInfo.name, packageInfo);\n logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);\n }\n\n // Second pass: identify local dependencies and build edges\n for (const [packageName, packageInfo] of packages) {\n const localDeps = new Set<string>();\n const edgesSet = new Set<string>();\n\n for (const dep of packageInfo.dependencies) {\n if (packages.has(dep)) {\n localDeps.add(dep);\n edgesSet.add(dep);\n logger.verbose(`${packageName} depends on local package: ${dep}`);\n }\n }\n\n packageInfo.localDependencies = localDeps;\n edges.set(packageName, edgesSet);\n }\n\n // Build reverse edges (dependents)\n const reverseEdges = buildReverseGraph(edges);\n\n return { packages, edges, reverseEdges };\n}\n\n/**\n * Build reverse dependency graph (package -> dependents)\n */\nexport function buildReverseGraph(\n edges: Map<string, Set<string>>\n): Map<string, Set<string>> {\n const reverse = new Map<string, Set<string>>();\n\n for (const [pkg, deps] of edges) {\n for (const dep of deps) {\n if (!reverse.has(dep)) {\n reverse.set(dep, new Set());\n }\n reverse.get(dep)!.add(pkg);\n }\n }\n\n return reverse;\n}\n\n/**\n * Perform topological sort on dependency graph\n */\nexport function topologicalSort(graph: DependencyGraph): string[] {\n const logger = getLogger();\n const { packages, edges } = graph;\n const visited = new Set<string>();\n const visiting = new Set<string>();\n const result: string[] = [];\n\n const visit = (packageName: string): void => {\n if (visited.has(packageName)) {\n return;\n }\n\n if (visiting.has(packageName)) {\n throw new Error(`Circular dependency detected involving package: ${packageName}`);\n }\n\n visiting.add(packageName);\n\n // Visit all dependencies first\n const deps = edges.get(packageName) || new Set();\n for (const dep of deps) {\n visit(dep);\n }\n\n visiting.delete(packageName);\n visited.add(packageName);\n result.push(packageName);\n };\n\n // Visit all packages\n for (const packageName of packages.keys()) {\n if (!visited.has(packageName)) {\n visit(packageName);\n }\n }\n\n logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);\n return result;\n}\n\n/**\n * Find all dependents of a package (packages that depend on it)\n */\nexport function findAllDependents(\n packageName: string,\n graph: DependencyGraph\n): Set<string> {\n const dependents = new Set<string>();\n const visited = new Set<string>();\n\n const traverse = (pkg: string) => {\n if (visited.has(pkg)) return;\n visited.add(pkg);\n\n const directDependents = graph.reverseEdges.get(pkg) || new Set();\n for (const dependent of directDependents) {\n dependents.add(dependent);\n traverse(dependent);\n }\n };\n\n traverse(packageName);\n return dependents;\n}\n\n/**\n * Serialize graph for checkpoint persistence\n */\nexport function serializeGraph(graph: DependencyGraph): SerializedGraph {\n return {\n packages: Array.from(graph.packages.values()).map(pkg => ({\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n dependencies: Array.from(pkg.dependencies)\n })),\n edges: Array.from(graph.edges.entries()).map(([pkg, deps]) => [\n pkg,\n Array.from(deps)\n ])\n };\n}\n\n/**\n * Deserialize graph from checkpoint\n */\nexport function deserializeGraph(serialized: SerializedGraph): DependencyGraph {\n const packages = new Map<string, PackageInfo>();\n const edges = new Map<string, Set<string>>();\n\n // Restore packages\n for (const pkg of serialized.packages) {\n packages.set(pkg.name, {\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n dependencies: new Set(pkg.dependencies),\n devDependencies: new Set(),\n localDependencies: new Set()\n });\n }\n\n // Restore edges\n for (const [pkg, deps] of serialized.edges) {\n edges.set(pkg, new Set(deps));\n }\n\n // Build reverse edges\n const reverseEdges = buildReverseGraph(edges);\n\n return { packages, edges, reverseEdges };\n}\n\n/**\n * Validate graph integrity\n */\nexport function validateGraph(graph: DependencyGraph): {\n valid: boolean;\n errors: string[];\n} {\n const errors: string[] = [];\n\n // Check all edge targets exist\n for (const [pkg, deps] of graph.edges) {\n for (const dep of deps) {\n if (!graph.packages.has(dep)) {\n errors.push(`Package ${pkg} depends on ${dep} which doesn't exist`);\n }\n }\n }\n\n // Check for circular dependencies\n try {\n topologicalSort(graph);\n } catch (error: any) {\n errors.push(error.message);\n }\n\n return {\n valid: errors.length === 0,\n errors\n };\n}\n"],"names":["matchesPattern","filePath","pattern","regexPattern","replace","regex","RegExp","test","path","basename","shouldExclude","packageJsonPath","excludedPatterns","length","relativePath","relative","process","cwd","some","dirname","scanForPackageJsonFiles","directory","logger","getLogger","packageJsonPaths","directPackageJsonPath","join","fs","access","push","verbose","entries","readdir","withFileTypes","entry","isDirectory","subDirPath","name","error","parsePackageJson","storage","createStorage","log","info","content","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","Error","dependencies","Set","devDependencies","depTypes","depType","Object","keys","forEach","dep","add","version","localDependencies","buildDependencyGraph","packages","Map","edges","packageInfo","set","packageName","localDeps","edgesSet","has","reverseEdges","buildReverseGraph","reverse","pkg","deps","get","topologicalSort","graph","visited","visiting","result","visit","delete","findAllDependents","dependents","traverse","directDependents","dependent"],"mappings":";;;;;;AAMA;;IAGA,MAAMA,cAAAA,GAAiB,CAACC,QAAAA,EAAkBC,OAAAA,GAAAA;;AAEtC,IAAA,MAAMC,eAAeD,OAAAA,CAChBE,OAAO,CAAC,KAAA,EAAO;KACfA,OAAO,CAAC,OAAA,EAAS,IAAA,CAAA;KACjBA,OAAO,CAAC,KAAA,EAAO,OAAA,CAAA;KACfA,OAAO,CAAC,KAAA,EAAO,GAAA,CAAA;KACfA,OAAO,CAAC,KAAA,EAAO,KAAA,CAAA,CAAA;IAEpB,MAAMC,KAAAA,GAAQ,IAAIC,MAAAA,CAAO,CAAC,CAAC,EAAEH,YAAAA,CAAa,CAAC,CAAC,CAAA;IAC5C,OAAOE,KAAAA,CAAME,IAAI,CAACN,QAAAA,CAAAA,IAAaI,MAAME,IAAI,CAACC,aAAAA,CAAKC,QAAQ,CAACR,QAAAA,CAAAA,CAAAA;AAC5D,CAAA;AAEA;;AAEC,IACM,SAASS,aAAAA,CAAcC,eAAuB,EAAEC,gBAA0B,EAAA;AAC7E,IAAA,IAAI,CAACA,gBAAAA,IAAoBA,gBAAAA,CAAiBC,MAAM,KAAK,CAAA,EAAG;QACpD,OAAO,KAAA;AACX,IAAA;;AAGA,IAAA,MAAMC,eAAeN,aAAAA,CAAKO,QAAQ,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIN,eAAAA,CAAAA;IAElD,OAAOC,gBAAAA,CAAiBM,IAAI,CAAChB,CAAAA,UACzBF,cAAAA,CAAeW,eAAAA,EAAiBT,YAChCF,cAAAA,CAAec,YAAAA,EAAcZ,YAC7BF,cAAAA,CAAeQ,aAAAA,CAAKW,OAAO,CAACR,eAAAA,CAAAA,EAAkBT,YAC9CF,cAAAA,CAAeQ,aAAAA,CAAKW,OAAO,CAACL,YAAAA,CAAAA,EAAeZ,OAAAA,CAAAA,CAAAA;AAEnD;AA2BA;;AAEC,IACM,eAAekB,uBAAAA,CAClBC,SAAiB,EACjBT,mBAA6B,EAAE,EAAA;AAE/B,IAAA,MAAMU,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA6B,EAAE;IAErC,IAAI;;AAEA,QAAA,MAAMC,qBAAAA,GAAwBjB,aAAAA,CAAKkB,IAAI,CAACL,SAAAA,EAAW,cAAA,CAAA;QACnD,IAAI;YACA,MAAMM,EAAAA,CAAGC,MAAM,CAACH,qBAAAA,CAAAA;;YAGhB,IAAI,CAACf,aAAAA,CAAce,qBAAAA,EAAuBb,gBAAAA,CAAAA,EAAmB;AACzDY,gBAAAA,gBAAAA,CAAiBK,IAAI,CAACJ,qBAAAA,CAAAA;AACtBH,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uBAAuB,EAAEL,qBAAAA,CAAAA,CAAuB,CAAA;YACpE,CAAA,MAAO;AACHH,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,2BAA2B,EAAEL,qBAAAA,CAAsB,4BAA4B,CAAC,CAAA;AACpG,YAAA;AACJ,QAAA,CAAA,CAAE,OAAM;;AAER,QAAA;;AAGA,QAAA,MAAMM,OAAAA,GAAU,MAAMJ,EAAAA,CAAGK,OAAO,CAACX,SAAAA,EAAW;YAAEY,aAAAA,EAAe;AAAK,SAAA,CAAA;QAElE,KAAK,MAAMC,SAASH,OAAAA,CAAS;YACzB,IAAIG,KAAAA,CAAMC,WAAW,EAAA,EAAI;AACrB,gBAAA,MAAMC,aAAa5B,aAAAA,CAAKkB,IAAI,CAACL,SAAAA,EAAWa,MAAMG,IAAI,CAAA;AAClD,gBAAA,MAAM1B,eAAAA,GAAkBH,aAAAA,CAAKkB,IAAI,CAACU,UAAAA,EAAY,cAAA,CAAA;gBAE9C,IAAI;oBACA,MAAMT,EAAAA,CAAGC,MAAM,CAACjB,eAAAA,CAAAA;;oBAGhB,IAAID,aAAAA,CAAcC,iBAAiBC,gBAAAA,CAAAA,EAAmB;AAClDU,wBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,2BAA2B,EAAEnB,eAAAA,CAAgB,4BAA4B,CAAC,CAAA;AAC1F,wBAAA;AACJ,oBAAA;AAEAa,oBAAAA,gBAAAA,CAAiBK,IAAI,CAAClB,eAAAA,CAAAA;AACtBW,oBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uBAAuB,EAAEnB,eAAAA,CAAAA,CAAiB,CAAA;AAC9D,gBAAA,CAAA,CAAE,OAAM;;AAER,gBAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAO2B,KAAAA,EAAO;QACZhB,MAAAA,CAAOgB,KAAK,CAAC,CAAC,yBAAyB,EAAEjB,SAAAA,CAAU,EAAE,EAAEiB,KAAAA,CAAAA,CAAO,CAAA;QAC9D,MAAMA,KAAAA;AACV,IAAA;IAEA,OAAOd,gBAAAA;AACX;AAEA;;IAGO,eAAee,gBAAAA,CAAiB5B,eAAuB,EAAA;AAC1D,IAAA,MAAMW,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMiB,UAAUC,MAAAA,CAAc;AAAEC,QAAAA,GAAAA,EAAKpB,OAAOqB;AAAK,KAAA,CAAA;IAEjD,IAAI;AACA,QAAA,MAAMC,OAAAA,GAAU,MAAMJ,OAAAA,CAAQK,QAAQ,CAAClC,eAAAA,EAAiB,OAAA,CAAA;QACxD,MAAMmC,MAAAA,GAASC,cAAcH,OAAAA,EAASjC,eAAAA,CAAAA;QACtC,MAAMqC,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQnC,eAAAA,CAAAA;QAEhD,IAAI,CAACqC,WAAAA,CAAYX,IAAI,EAAE;AACnB,YAAA,MAAM,IAAIa,KAAAA,CAAM,CAAC,WAAW,EAAEvC,eAAAA,CAAgB,kBAAkB,CAAC,CAAA;AACrE,QAAA;AAEA,QAAA,MAAMwC,eAAe,IAAIC,GAAAA,EAAAA;AACzB,QAAA,MAAMC,kBAAkB,IAAID,GAAAA,EAAAA;;AAG5B,QAAA,MAAME,QAAAA,GAAW;AAAC,YAAA,cAAA;AAAgB,YAAA,iBAAA;AAAmB,YAAA,kBAAA;AAAoB,YAAA;AAAuB,SAAA;QAChG,KAAK,MAAMC,WAAWD,QAAAA,CAAU;YAC5B,IAAIN,WAAW,CAACO,OAAAA,CAAQ,EAAE;gBACtBC,MAAAA,CAAOC,IAAI,CAACT,WAAW,CAACO,QAAQ,CAAA,CAAEG,OAAO,CAACC,CAAAA,GAAAA,GAAAA;AACtCR,oBAAAA,YAAAA,CAAaS,GAAG,CAACD,GAAAA,CAAAA;AACjB,oBAAA,IAAIJ,YAAY,iBAAA,EAAmB;AAC/BF,wBAAAA,eAAAA,CAAgBO,GAAG,CAACD,GAAAA,CAAAA;AACxB,oBAAA;AACJ,gBAAA,CAAA,CAAA;AACJ,YAAA;AACJ,QAAA;QAEA,OAAO;AACHtB,YAAAA,IAAAA,EAAMW,YAAYX,IAAI;YACtBwB,OAAAA,EAASb,WAAAA,CAAYa,OAAO,IAAI,OAAA;YAChCrD,IAAAA,EAAMA,aAAAA,CAAKW,OAAO,CAACR,eAAAA,CAAAA;AACnBwC,YAAAA,YAAAA;AACAE,YAAAA,eAAAA;YACAS,iBAAAA,EAAmB,IAAIV;AAC3B,SAAA;AACJ,IAAA,CAAA,CAAE,OAAOd,KAAAA,EAAO;QACZhB,MAAAA,CAAOgB,KAAK,CAAC,CAAC,gCAAgC,EAAE3B,eAAAA,CAAgB,EAAE,EAAE2B,KAAAA,CAAAA,CAAO,CAAA;QAC3E,MAAMA,KAAAA;AACV,IAAA;AACJ;AAEA;;IAGO,eAAeyB,oBAAAA,CAClBvC,gBAA0B,EAAA;AAE1B,IAAA,MAAMF,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMyC,WAAW,IAAIC,GAAAA,EAAAA;AACrB,IAAA,MAAMC,QAAQ,IAAID,GAAAA,EAAAA;;IAGlB,KAAK,MAAMtD,mBAAmBa,gBAAAA,CAAkB;QAC5C,MAAM2C,WAAAA,GAAc,MAAM5B,gBAAAA,CAAiB5B,eAAAA,CAAAA;AAC3CqD,QAAAA,QAAAA,CAASI,GAAG,CAACD,WAAAA,CAAY9B,IAAI,EAAE8B,WAAAA,CAAAA;AAC/B7C,QAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,gBAAgB,EAAEqC,WAAAA,CAAY9B,IAAI,CAAC,IAAI,EAAE8B,WAAAA,CAAY3D,IAAI,CAAA,CAAE,CAAA;AAC/E,IAAA;;AAGA,IAAA,KAAK,MAAM,CAAC6D,WAAAA,EAAaF,WAAAA,CAAY,IAAIH,QAAAA,CAAU;AAC/C,QAAA,MAAMM,YAAY,IAAIlB,GAAAA,EAAAA;AACtB,QAAA,MAAMmB,WAAW,IAAInB,GAAAA,EAAAA;AAErB,QAAA,KAAK,MAAMO,GAAAA,IAAOQ,WAAAA,CAAYhB,YAAY,CAAE;YACxC,IAAIa,QAAAA,CAASQ,GAAG,CAACb,GAAAA,CAAAA,EAAM;AACnBW,gBAAAA,SAAAA,CAAUV,GAAG,CAACD,GAAAA,CAAAA;AACdY,gBAAAA,QAAAA,CAASX,GAAG,CAACD,GAAAA,CAAAA;AACbrC,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAA,EAAGuC,WAAAA,CAAY,2BAA2B,EAAEV,GAAAA,CAAAA,CAAK,CAAA;AACpE,YAAA;AACJ,QAAA;AAEAQ,QAAAA,WAAAA,CAAYL,iBAAiB,GAAGQ,SAAAA;QAChCJ,KAAAA,CAAME,GAAG,CAACC,WAAAA,EAAaE,QAAAA,CAAAA;AAC3B,IAAA;;AAGA,IAAA,MAAME,eAAeC,iBAAAA,CAAkBR,KAAAA,CAAAA;IAEvC,OAAO;AAAEF,QAAAA,QAAAA;AAAUE,QAAAA,KAAAA;AAAOO,QAAAA;AAAa,KAAA;AAC3C;AAEA;;IAGO,SAASC,iBAAAA,CACZR,KAA+B,EAAA;AAE/B,IAAA,MAAMS,UAAU,IAAIV,GAAAA,EAAAA;AAEpB,IAAA,KAAK,MAAM,CAACW,GAAAA,EAAKC,IAAAA,CAAK,IAAIX,KAAAA,CAAO;QAC7B,KAAK,MAAMP,OAAOkB,IAAAA,CAAM;AACpB,YAAA,IAAI,CAACF,OAAAA,CAAQH,GAAG,CAACb,GAAAA,CAAAA,EAAM;gBACnBgB,OAAAA,CAAQP,GAAG,CAACT,GAAAA,EAAK,IAAIP,GAAAA,EAAAA,CAAAA;AACzB,YAAA;AACAuB,YAAAA,OAAAA,CAAQG,GAAG,CAACnB,GAAAA,CAAAA,CAAMC,GAAG,CAACgB,GAAAA,CAAAA;AAC1B,QAAA;AACJ,IAAA;IAEA,OAAOD,OAAAA;AACX;AAEA;;IAGO,SAASI,eAAAA,CAAgBC,KAAsB,EAAA;AAClD,IAAA,MAAM1D,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM,EAAEyC,QAAQ,EAAEE,KAAK,EAAE,GAAGc,KAAAA;AAC5B,IAAA,MAAMC,UAAU,IAAI7B,GAAAA,EAAAA;AACpB,IAAA,MAAM8B,WAAW,IAAI9B,GAAAA,EAAAA;AACrB,IAAA,MAAM+B,SAAmB,EAAE;AAE3B,IAAA,MAAMC,QAAQ,CAACf,WAAAA,GAAAA;QACX,IAAIY,OAAAA,CAAQT,GAAG,CAACH,WAAAA,CAAAA,EAAc;AAC1B,YAAA;AACJ,QAAA;QAEA,IAAIa,QAAAA,CAASV,GAAG,CAACH,WAAAA,CAAAA,EAAc;AAC3B,YAAA,MAAM,IAAInB,KAAAA,CAAM,CAAC,gDAAgD,EAAEmB,WAAAA,CAAAA,CAAa,CAAA;AACpF,QAAA;AAEAa,QAAAA,QAAAA,CAAStB,GAAG,CAACS,WAAAA,CAAAA;;AAGb,QAAA,MAAMQ,IAAAA,GAAOX,KAAAA,CAAMY,GAAG,CAACT,gBAAgB,IAAIjB,GAAAA,EAAAA;QAC3C,KAAK,MAAMO,OAAOkB,IAAAA,CAAM;YACpBO,KAAAA,CAAMzB,GAAAA,CAAAA;AACV,QAAA;AAEAuB,QAAAA,QAAAA,CAASG,MAAM,CAAChB,WAAAA,CAAAA;AAChBY,QAAAA,OAAAA,CAAQrB,GAAG,CAACS,WAAAA,CAAAA;AACZc,QAAAA,MAAAA,CAAOtD,IAAI,CAACwC,WAAAA,CAAAA;AAChB,IAAA,CAAA;;AAGA,IAAA,KAAK,MAAMA,WAAAA,IAAeL,QAAAA,CAASP,IAAI,EAAA,CAAI;AACvC,QAAA,IAAI,CAACwB,OAAAA,CAAQT,GAAG,CAACH,WAAAA,CAAAA,EAAc;YAC3Be,KAAAA,CAAMf,WAAAA,CAAAA;AACV,QAAA;AACJ,IAAA;IAEA/C,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uDAAuD,EAAEqD,MAAAA,CAAOtE,MAAM,CAAC,UAAU,CAAC,CAAA;IAClG,OAAOsE,MAAAA;AACX;AAEA;;AAEC,IACM,SAASG,iBAAAA,CACZjB,WAAmB,EACnBW,KAAsB,EAAA;AAEtB,IAAA,MAAMO,aAAa,IAAInC,GAAAA,EAAAA;AACvB,IAAA,MAAM6B,UAAU,IAAI7B,GAAAA,EAAAA;AAEpB,IAAA,MAAMoC,WAAW,CAACZ,GAAAA,GAAAA;QACd,IAAIK,OAAAA,CAAQT,GAAG,CAACI,GAAAA,CAAAA,EAAM;AACtBK,QAAAA,OAAAA,CAAQrB,GAAG,CAACgB,GAAAA,CAAAA;AAEZ,QAAA,MAAMa,mBAAmBT,KAAAA,CAAMP,YAAY,CAACK,GAAG,CAACF,QAAQ,IAAIxB,GAAAA,EAAAA;QAC5D,KAAK,MAAMsC,aAAaD,gBAAAA,CAAkB;AACtCF,YAAAA,UAAAA,CAAW3B,GAAG,CAAC8B,SAAAA,CAAAA;YACfF,QAAAA,CAASE,SAAAA,CAAAA;AACb,QAAA;AACJ,IAAA,CAAA;IAEAF,QAAAA,CAASnB,WAAAA,CAAAA;IACT,OAAOkB,UAAAA;AACX;;;;"}
|
|
1
|
+
{"version":3,"file":"dependencyGraph.js","sources":["../../src/util/dependencyGraph.ts"],"sourcesContent":["import path from 'path';\nimport fs from 'fs/promises';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\nimport { getLogger } from '../logging';\nimport { create as createStorage } from './storage';\n\n/**\n * Check if a file path matches a glob pattern\n */\nconst matchesPattern = (filePath: string, pattern: string): boolean => {\n // Convert simple glob patterns to regex\n const regexPattern = pattern\n .replace(/\\\\/g, '\\\\\\\\') // Escape backslashes\n .replace(/\\*\\*/g, '.*') // ** matches any path segments\n .replace(/\\*/g, '[^/]*') // * matches any characters except path separator\n .replace(/\\?/g, '.') // ? matches any single character\n .replace(/\\./g, '\\\\.'); // Escape literal dots\n\n const regex = new RegExp(`^${regexPattern}$`);\n return regex.test(filePath) || regex.test(path.basename(filePath));\n};\n\n/**\n * Check if a package should be excluded based on patterns\n */\nexport function shouldExclude(packageJsonPath: string, excludedPatterns: string[]): boolean {\n if (!excludedPatterns || excludedPatterns.length === 0) {\n return false;\n }\n\n // Check both the full path and relative path patterns\n const relativePath = path.relative(process.cwd(), packageJsonPath);\n\n return excludedPatterns.some(pattern =>\n matchesPattern(packageJsonPath, pattern) ||\n matchesPattern(relativePath, pattern) ||\n matchesPattern(path.dirname(packageJsonPath), pattern) ||\n matchesPattern(path.dirname(relativePath), pattern)\n );\n}\n\nexport interface PackageInfo {\n name: string;\n version: string;\n path: string;\n dependencies: Set<string>;\n devDependencies: Set<string>;\n localDependencies: Set<string>;\n}\n\nexport interface DependencyGraph {\n packages: Map<string, PackageInfo>;\n edges: Map<string, Set<string>>; // package -> dependencies\n reverseEdges: Map<string, Set<string>>; // package -> dependents\n}\n\nexport interface SerializedGraph {\n packages: Array<{\n name: string;\n version: string;\n path: string;\n dependencies: string[];\n }>;\n edges: Array<[string, string[]]>;\n}\n\n/**\n * Scan directory for package.json files\n */\nexport async function scanForPackageJsonFiles(\n directory: string,\n excludedPatterns: string[] = []\n): Promise<string[]> {\n const logger = getLogger();\n const packageJsonPaths: string[] = [];\n\n try {\n // First check if there's a package.json in the specified directory itself\n const directPackageJsonPath = path.join(directory, 'package.json');\n try {\n await fs.access(directPackageJsonPath);\n\n // Check if this package should be excluded\n if (!shouldExclude(directPackageJsonPath, excludedPatterns)) {\n packageJsonPaths.push(directPackageJsonPath);\n logger.verbose(`Found package.json at: ${directPackageJsonPath}`);\n } else {\n logger.verbose(`Excluding package.json at: ${directPackageJsonPath} (matches exclusion pattern)`);\n }\n } catch {\n // No package.json in the root of this directory, that's fine\n }\n\n // Then scan subdirectories for package.json files\n const entries = await fs.readdir(directory, { withFileTypes: true });\n\n for (const entry of entries) {\n if (entry.isDirectory()) {\n const subDirPath = path.join(directory, entry.name);\n const packageJsonPath = path.join(subDirPath, 'package.json');\n\n try {\n await fs.access(packageJsonPath);\n\n // Check if this package should be excluded\n if (shouldExclude(packageJsonPath, excludedPatterns)) {\n logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);\n continue;\n }\n\n packageJsonPaths.push(packageJsonPath);\n logger.verbose(`Found package.json at: ${packageJsonPath}`);\n } catch {\n // No package.json in this directory, continue\n }\n }\n }\n } catch (error) {\n logger.error(`DEPENDENCY_GRAPH_SCAN_FAILED: Failed to scan directory | Directory: ${directory} | Error: ${error}`);\n throw error;\n }\n\n return packageJsonPaths;\n}\n\n/**\n * Parse a single package.json file\n */\nexport async function parsePackageJson(packageJsonPath: string): Promise<PackageInfo> {\n const logger = getLogger();\n const storage = createStorage({ log: logger.info });\n\n try {\n const content = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(content, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (!packageJson.name) {\n throw new Error(`Package at ${packageJsonPath} has no name field`);\n }\n\n const dependencies = new Set<string>();\n const devDependencies = new Set<string>();\n\n // Collect all types of dependencies\n const depTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];\n for (const depType of depTypes) {\n if (packageJson[depType]) {\n Object.keys(packageJson[depType]).forEach(dep => {\n dependencies.add(dep);\n if (depType === 'devDependencies') {\n devDependencies.add(dep);\n }\n });\n }\n }\n\n return {\n name: packageJson.name,\n version: packageJson.version || '0.0.0',\n path: path.dirname(packageJsonPath),\n dependencies,\n devDependencies,\n localDependencies: new Set() // Will be populated later\n };\n } catch (error) {\n logger.error(`DEPENDENCY_GRAPH_PARSE_FAILED: Failed to parse package.json | Path: ${packageJsonPath} | Error: ${error}`);\n throw error;\n }\n}\n\n/**\n * Build dependency graph from package.json paths\n */\nexport async function buildDependencyGraph(\n packageJsonPaths: string[]\n): Promise<DependencyGraph> {\n const logger = getLogger();\n const packages = new Map<string, PackageInfo>();\n const edges = new Map<string, Set<string>>();\n\n // First pass: parse all package.json files\n for (const packageJsonPath of packageJsonPaths) {\n const packageInfo = await parsePackageJson(packageJsonPath);\n packages.set(packageInfo.name, packageInfo);\n logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);\n }\n\n // Second pass: identify local dependencies and build edges\n for (const [packageName, packageInfo] of packages) {\n const localDeps = new Set<string>();\n const edgesSet = new Set<string>();\n\n for (const dep of packageInfo.dependencies) {\n if (packages.has(dep)) {\n localDeps.add(dep);\n edgesSet.add(dep);\n logger.verbose(`${packageName} depends on local package: ${dep}`);\n }\n }\n\n packageInfo.localDependencies = localDeps;\n edges.set(packageName, edgesSet);\n }\n\n // Build reverse edges (dependents)\n const reverseEdges = buildReverseGraph(edges);\n\n return { packages, edges, reverseEdges };\n}\n\n/**\n * Build reverse dependency graph (package -> dependents)\n */\nexport function buildReverseGraph(\n edges: Map<string, Set<string>>\n): Map<string, Set<string>> {\n const reverse = new Map<string, Set<string>>();\n\n for (const [pkg, deps] of edges) {\n for (const dep of deps) {\n if (!reverse.has(dep)) {\n reverse.set(dep, new Set());\n }\n reverse.get(dep)!.add(pkg);\n }\n }\n\n return reverse;\n}\n\n/**\n * Perform topological sort on dependency graph\n */\nexport function topologicalSort(graph: DependencyGraph): string[] {\n const logger = getLogger();\n const { packages, edges } = graph;\n const visited = new Set<string>();\n const visiting = new Set<string>();\n const result: string[] = [];\n\n const visit = (packageName: string): void => {\n if (visited.has(packageName)) {\n return;\n }\n\n if (visiting.has(packageName)) {\n throw new Error(`Circular dependency detected involving package: ${packageName}`);\n }\n\n visiting.add(packageName);\n\n // Visit all dependencies first\n const deps = edges.get(packageName) || new Set();\n for (const dep of deps) {\n visit(dep);\n }\n\n visiting.delete(packageName);\n visited.add(packageName);\n result.push(packageName);\n };\n\n // Visit all packages\n for (const packageName of packages.keys()) {\n if (!visited.has(packageName)) {\n visit(packageName);\n }\n }\n\n logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);\n return result;\n}\n\n/**\n * Find all dependents of a package (packages that depend on it)\n */\nexport function findAllDependents(\n packageName: string,\n graph: DependencyGraph\n): Set<string> {\n const dependents = new Set<string>();\n const visited = new Set<string>();\n\n const traverse = (pkg: string) => {\n if (visited.has(pkg)) return;\n visited.add(pkg);\n\n const directDependents = graph.reverseEdges.get(pkg) || new Set();\n for (const dependent of directDependents) {\n dependents.add(dependent);\n traverse(dependent);\n }\n };\n\n traverse(packageName);\n return dependents;\n}\n\n/**\n * Serialize graph for checkpoint persistence\n */\nexport function serializeGraph(graph: DependencyGraph): SerializedGraph {\n return {\n packages: Array.from(graph.packages.values()).map(pkg => ({\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n dependencies: Array.from(pkg.dependencies)\n })),\n edges: Array.from(graph.edges.entries()).map(([pkg, deps]) => [\n pkg,\n Array.from(deps)\n ])\n };\n}\n\n/**\n * Deserialize graph from checkpoint\n */\nexport function deserializeGraph(serialized: SerializedGraph): DependencyGraph {\n const packages = new Map<string, PackageInfo>();\n const edges = new Map<string, Set<string>>();\n\n // Restore packages\n for (const pkg of serialized.packages) {\n packages.set(pkg.name, {\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n dependencies: new Set(pkg.dependencies),\n devDependencies: new Set(),\n localDependencies: new Set()\n });\n }\n\n // Restore edges\n for (const [pkg, deps] of serialized.edges) {\n edges.set(pkg, new Set(deps));\n }\n\n // Build reverse edges\n const reverseEdges = buildReverseGraph(edges);\n\n return { packages, edges, reverseEdges };\n}\n\n/**\n * Validate graph integrity\n */\nexport function validateGraph(graph: DependencyGraph): {\n valid: boolean;\n errors: string[];\n} {\n const errors: string[] = [];\n\n // Check all edge targets exist\n for (const [pkg, deps] of graph.edges) {\n for (const dep of deps) {\n if (!graph.packages.has(dep)) {\n errors.push(`Package ${pkg} depends on ${dep} which doesn't exist`);\n }\n }\n }\n\n // Check for circular dependencies\n try {\n topologicalSort(graph);\n } catch (error: any) {\n errors.push(error.message);\n }\n\n return {\n valid: errors.length === 0,\n errors\n };\n}\n"],"names":["matchesPattern","filePath","pattern","regexPattern","replace","regex","RegExp","test","path","basename","shouldExclude","packageJsonPath","excludedPatterns","length","relativePath","relative","process","cwd","some","dirname","scanForPackageJsonFiles","directory","logger","getLogger","packageJsonPaths","directPackageJsonPath","join","fs","access","push","verbose","entries","readdir","withFileTypes","entry","isDirectory","subDirPath","name","error","parsePackageJson","storage","createStorage","log","info","content","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","Error","dependencies","Set","devDependencies","depTypes","depType","Object","keys","forEach","dep","add","version","localDependencies","buildDependencyGraph","packages","Map","edges","packageInfo","set","packageName","localDeps","edgesSet","has","reverseEdges","buildReverseGraph","reverse","pkg","deps","get","topologicalSort","graph","visited","visiting","result","visit","delete","findAllDependents","dependents","traverse","directDependents","dependent"],"mappings":";;;;;;AAMA;;IAGA,MAAMA,cAAAA,GAAiB,CAACC,QAAAA,EAAkBC,OAAAA,GAAAA;;AAEtC,IAAA,MAAMC,eAAeD,OAAAA,CAChBE,OAAO,CAAC,KAAA,EAAO;KACfA,OAAO,CAAC,OAAA,EAAS,IAAA,CAAA;KACjBA,OAAO,CAAC,KAAA,EAAO,OAAA,CAAA;KACfA,OAAO,CAAC,KAAA,EAAO,GAAA,CAAA;KACfA,OAAO,CAAC,KAAA,EAAO,KAAA,CAAA,CAAA;IAEpB,MAAMC,KAAAA,GAAQ,IAAIC,MAAAA,CAAO,CAAC,CAAC,EAAEH,YAAAA,CAAa,CAAC,CAAC,CAAA;IAC5C,OAAOE,KAAAA,CAAME,IAAI,CAACN,QAAAA,CAAAA,IAAaI,MAAME,IAAI,CAACC,aAAAA,CAAKC,QAAQ,CAACR,QAAAA,CAAAA,CAAAA;AAC5D,CAAA;AAEA;;AAEC,IACM,SAASS,aAAAA,CAAcC,eAAuB,EAAEC,gBAA0B,EAAA;AAC7E,IAAA,IAAI,CAACA,gBAAAA,IAAoBA,gBAAAA,CAAiBC,MAAM,KAAK,CAAA,EAAG;QACpD,OAAO,KAAA;AACX,IAAA;;AAGA,IAAA,MAAMC,eAAeN,aAAAA,CAAKO,QAAQ,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIN,eAAAA,CAAAA;IAElD,OAAOC,gBAAAA,CAAiBM,IAAI,CAAChB,CAAAA,UACzBF,cAAAA,CAAeW,eAAAA,EAAiBT,YAChCF,cAAAA,CAAec,YAAAA,EAAcZ,YAC7BF,cAAAA,CAAeQ,aAAAA,CAAKW,OAAO,CAACR,eAAAA,CAAAA,EAAkBT,YAC9CF,cAAAA,CAAeQ,aAAAA,CAAKW,OAAO,CAACL,YAAAA,CAAAA,EAAeZ,OAAAA,CAAAA,CAAAA;AAEnD;AA2BA;;AAEC,IACM,eAAekB,uBAAAA,CAClBC,SAAiB,EACjBT,mBAA6B,EAAE,EAAA;AAE/B,IAAA,MAAMU,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA6B,EAAE;IAErC,IAAI;;AAEA,QAAA,MAAMC,qBAAAA,GAAwBjB,aAAAA,CAAKkB,IAAI,CAACL,SAAAA,EAAW,cAAA,CAAA;QACnD,IAAI;YACA,MAAMM,EAAAA,CAAGC,MAAM,CAACH,qBAAAA,CAAAA;;YAGhB,IAAI,CAACf,aAAAA,CAAce,qBAAAA,EAAuBb,gBAAAA,CAAAA,EAAmB;AACzDY,gBAAAA,gBAAAA,CAAiBK,IAAI,CAACJ,qBAAAA,CAAAA;AACtBH,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uBAAuB,EAAEL,qBAAAA,CAAAA,CAAuB,CAAA;YACpE,CAAA,MAAO;AACHH,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,2BAA2B,EAAEL,qBAAAA,CAAsB,4BAA4B,CAAC,CAAA;AACpG,YAAA;AACJ,QAAA,CAAA,CAAE,OAAM;;AAER,QAAA;;AAGA,QAAA,MAAMM,OAAAA,GAAU,MAAMJ,EAAAA,CAAGK,OAAO,CAACX,SAAAA,EAAW;YAAEY,aAAAA,EAAe;AAAK,SAAA,CAAA;QAElE,KAAK,MAAMC,SAASH,OAAAA,CAAS;YACzB,IAAIG,KAAAA,CAAMC,WAAW,EAAA,EAAI;AACrB,gBAAA,MAAMC,aAAa5B,aAAAA,CAAKkB,IAAI,CAACL,SAAAA,EAAWa,MAAMG,IAAI,CAAA;AAClD,gBAAA,MAAM1B,eAAAA,GAAkBH,aAAAA,CAAKkB,IAAI,CAACU,UAAAA,EAAY,cAAA,CAAA;gBAE9C,IAAI;oBACA,MAAMT,EAAAA,CAAGC,MAAM,CAACjB,eAAAA,CAAAA;;oBAGhB,IAAID,aAAAA,CAAcC,iBAAiBC,gBAAAA,CAAAA,EAAmB;AAClDU,wBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,2BAA2B,EAAEnB,eAAAA,CAAgB,4BAA4B,CAAC,CAAA;AAC1F,wBAAA;AACJ,oBAAA;AAEAa,oBAAAA,gBAAAA,CAAiBK,IAAI,CAAClB,eAAAA,CAAAA;AACtBW,oBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uBAAuB,EAAEnB,eAAAA,CAAAA,CAAiB,CAAA;AAC9D,gBAAA,CAAA,CAAE,OAAM;;AAER,gBAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAO2B,KAAAA,EAAO;QACZhB,MAAAA,CAAOgB,KAAK,CAAC,CAAC,oEAAoE,EAAEjB,SAAAA,CAAU,UAAU,EAAEiB,KAAAA,CAAAA,CAAO,CAAA;QACjH,MAAMA,KAAAA;AACV,IAAA;IAEA,OAAOd,gBAAAA;AACX;AAEA;;IAGO,eAAee,gBAAAA,CAAiB5B,eAAuB,EAAA;AAC1D,IAAA,MAAMW,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMiB,UAAUC,MAAAA,CAAc;AAAEC,QAAAA,GAAAA,EAAKpB,OAAOqB;AAAK,KAAA,CAAA;IAEjD,IAAI;AACA,QAAA,MAAMC,OAAAA,GAAU,MAAMJ,OAAAA,CAAQK,QAAQ,CAAClC,eAAAA,EAAiB,OAAA,CAAA;QACxD,MAAMmC,MAAAA,GAASC,cAAcH,OAAAA,EAASjC,eAAAA,CAAAA;QACtC,MAAMqC,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQnC,eAAAA,CAAAA;QAEhD,IAAI,CAACqC,WAAAA,CAAYX,IAAI,EAAE;AACnB,YAAA,MAAM,IAAIa,KAAAA,CAAM,CAAC,WAAW,EAAEvC,eAAAA,CAAgB,kBAAkB,CAAC,CAAA;AACrE,QAAA;AAEA,QAAA,MAAMwC,eAAe,IAAIC,GAAAA,EAAAA;AACzB,QAAA,MAAMC,kBAAkB,IAAID,GAAAA,EAAAA;;AAG5B,QAAA,MAAME,QAAAA,GAAW;AAAC,YAAA,cAAA;AAAgB,YAAA,iBAAA;AAAmB,YAAA,kBAAA;AAAoB,YAAA;AAAuB,SAAA;QAChG,KAAK,MAAMC,WAAWD,QAAAA,CAAU;YAC5B,IAAIN,WAAW,CAACO,OAAAA,CAAQ,EAAE;gBACtBC,MAAAA,CAAOC,IAAI,CAACT,WAAW,CAACO,QAAQ,CAAA,CAAEG,OAAO,CAACC,CAAAA,GAAAA,GAAAA;AACtCR,oBAAAA,YAAAA,CAAaS,GAAG,CAACD,GAAAA,CAAAA;AACjB,oBAAA,IAAIJ,YAAY,iBAAA,EAAmB;AAC/BF,wBAAAA,eAAAA,CAAgBO,GAAG,CAACD,GAAAA,CAAAA;AACxB,oBAAA;AACJ,gBAAA,CAAA,CAAA;AACJ,YAAA;AACJ,QAAA;QAEA,OAAO;AACHtB,YAAAA,IAAAA,EAAMW,YAAYX,IAAI;YACtBwB,OAAAA,EAASb,WAAAA,CAAYa,OAAO,IAAI,OAAA;YAChCrD,IAAAA,EAAMA,aAAAA,CAAKW,OAAO,CAACR,eAAAA,CAAAA;AACnBwC,YAAAA,YAAAA;AACAE,YAAAA,eAAAA;YACAS,iBAAAA,EAAmB,IAAIV;AAC3B,SAAA;AACJ,IAAA,CAAA,CAAE,OAAOd,KAAAA,EAAO;QACZhB,MAAAA,CAAOgB,KAAK,CAAC,CAAC,oEAAoE,EAAE3B,eAAAA,CAAgB,UAAU,EAAE2B,KAAAA,CAAAA,CAAO,CAAA;QACvH,MAAMA,KAAAA;AACV,IAAA;AACJ;AAEA;;IAGO,eAAeyB,oBAAAA,CAClBvC,gBAA0B,EAAA;AAE1B,IAAA,MAAMF,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMyC,WAAW,IAAIC,GAAAA,EAAAA;AACrB,IAAA,MAAMC,QAAQ,IAAID,GAAAA,EAAAA;;IAGlB,KAAK,MAAMtD,mBAAmBa,gBAAAA,CAAkB;QAC5C,MAAM2C,WAAAA,GAAc,MAAM5B,gBAAAA,CAAiB5B,eAAAA,CAAAA;AAC3CqD,QAAAA,QAAAA,CAASI,GAAG,CAACD,WAAAA,CAAY9B,IAAI,EAAE8B,WAAAA,CAAAA;AAC/B7C,QAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,gBAAgB,EAAEqC,WAAAA,CAAY9B,IAAI,CAAC,IAAI,EAAE8B,WAAAA,CAAY3D,IAAI,CAAA,CAAE,CAAA;AAC/E,IAAA;;AAGA,IAAA,KAAK,MAAM,CAAC6D,WAAAA,EAAaF,WAAAA,CAAY,IAAIH,QAAAA,CAAU;AAC/C,QAAA,MAAMM,YAAY,IAAIlB,GAAAA,EAAAA;AACtB,QAAA,MAAMmB,WAAW,IAAInB,GAAAA,EAAAA;AAErB,QAAA,KAAK,MAAMO,GAAAA,IAAOQ,WAAAA,CAAYhB,YAAY,CAAE;YACxC,IAAIa,QAAAA,CAASQ,GAAG,CAACb,GAAAA,CAAAA,EAAM;AACnBW,gBAAAA,SAAAA,CAAUV,GAAG,CAACD,GAAAA,CAAAA;AACdY,gBAAAA,QAAAA,CAASX,GAAG,CAACD,GAAAA,CAAAA;AACbrC,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAA,EAAGuC,WAAAA,CAAY,2BAA2B,EAAEV,GAAAA,CAAAA,CAAK,CAAA;AACpE,YAAA;AACJ,QAAA;AAEAQ,QAAAA,WAAAA,CAAYL,iBAAiB,GAAGQ,SAAAA;QAChCJ,KAAAA,CAAME,GAAG,CAACC,WAAAA,EAAaE,QAAAA,CAAAA;AAC3B,IAAA;;AAGA,IAAA,MAAME,eAAeC,iBAAAA,CAAkBR,KAAAA,CAAAA;IAEvC,OAAO;AAAEF,QAAAA,QAAAA;AAAUE,QAAAA,KAAAA;AAAOO,QAAAA;AAAa,KAAA;AAC3C;AAEA;;IAGO,SAASC,iBAAAA,CACZR,KAA+B,EAAA;AAE/B,IAAA,MAAMS,UAAU,IAAIV,GAAAA,EAAAA;AAEpB,IAAA,KAAK,MAAM,CAACW,GAAAA,EAAKC,IAAAA,CAAK,IAAIX,KAAAA,CAAO;QAC7B,KAAK,MAAMP,OAAOkB,IAAAA,CAAM;AACpB,YAAA,IAAI,CAACF,OAAAA,CAAQH,GAAG,CAACb,GAAAA,CAAAA,EAAM;gBACnBgB,OAAAA,CAAQP,GAAG,CAACT,GAAAA,EAAK,IAAIP,GAAAA,EAAAA,CAAAA;AACzB,YAAA;AACAuB,YAAAA,OAAAA,CAAQG,GAAG,CAACnB,GAAAA,CAAAA,CAAMC,GAAG,CAACgB,GAAAA,CAAAA;AAC1B,QAAA;AACJ,IAAA;IAEA,OAAOD,OAAAA;AACX;AAEA;;IAGO,SAASI,eAAAA,CAAgBC,KAAsB,EAAA;AAClD,IAAA,MAAM1D,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM,EAAEyC,QAAQ,EAAEE,KAAK,EAAE,GAAGc,KAAAA;AAC5B,IAAA,MAAMC,UAAU,IAAI7B,GAAAA,EAAAA;AACpB,IAAA,MAAM8B,WAAW,IAAI9B,GAAAA,EAAAA;AACrB,IAAA,MAAM+B,SAAmB,EAAE;AAE3B,IAAA,MAAMC,QAAQ,CAACf,WAAAA,GAAAA;QACX,IAAIY,OAAAA,CAAQT,GAAG,CAACH,WAAAA,CAAAA,EAAc;AAC1B,YAAA;AACJ,QAAA;QAEA,IAAIa,QAAAA,CAASV,GAAG,CAACH,WAAAA,CAAAA,EAAc;AAC3B,YAAA,MAAM,IAAInB,KAAAA,CAAM,CAAC,gDAAgD,EAAEmB,WAAAA,CAAAA,CAAa,CAAA;AACpF,QAAA;AAEAa,QAAAA,QAAAA,CAAStB,GAAG,CAACS,WAAAA,CAAAA;;AAGb,QAAA,MAAMQ,IAAAA,GAAOX,KAAAA,CAAMY,GAAG,CAACT,gBAAgB,IAAIjB,GAAAA,EAAAA;QAC3C,KAAK,MAAMO,OAAOkB,IAAAA,CAAM;YACpBO,KAAAA,CAAMzB,GAAAA,CAAAA;AACV,QAAA;AAEAuB,QAAAA,QAAAA,CAASG,MAAM,CAAChB,WAAAA,CAAAA;AAChBY,QAAAA,OAAAA,CAAQrB,GAAG,CAACS,WAAAA,CAAAA;AACZc,QAAAA,MAAAA,CAAOtD,IAAI,CAACwC,WAAAA,CAAAA;AAChB,IAAA,CAAA;;AAGA,IAAA,KAAK,MAAMA,WAAAA,IAAeL,QAAAA,CAASP,IAAI,EAAA,CAAI;AACvC,QAAA,IAAI,CAACwB,OAAAA,CAAQT,GAAG,CAACH,WAAAA,CAAAA,EAAc;YAC3Be,KAAAA,CAAMf,WAAAA,CAAAA;AACV,QAAA;AACJ,IAAA;IAEA/C,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uDAAuD,EAAEqD,MAAAA,CAAOtE,MAAM,CAAC,UAAU,CAAC,CAAA;IAClG,OAAOsE,MAAAA;AACX;AAEA;;AAEC,IACM,SAASG,iBAAAA,CACZjB,WAAmB,EACnBW,KAAsB,EAAA;AAEtB,IAAA,MAAMO,aAAa,IAAInC,GAAAA,EAAAA;AACvB,IAAA,MAAM6B,UAAU,IAAI7B,GAAAA,EAAAA;AAEpB,IAAA,MAAMoC,WAAW,CAACZ,GAAAA,GAAAA;QACd,IAAIK,OAAAA,CAAQT,GAAG,CAACI,GAAAA,CAAAA,EAAM;AACtBK,QAAAA,OAAAA,CAAQrB,GAAG,CAACgB,GAAAA,CAAAA;AAEZ,QAAA,MAAMa,mBAAmBT,KAAAA,CAAMP,YAAY,CAACK,GAAG,CAACF,QAAQ,IAAIxB,GAAAA,EAAAA;QAC5D,KAAK,MAAMsC,aAAaD,gBAAAA,CAAkB;AACtCF,YAAAA,UAAAA,CAAW3B,GAAG,CAAC8B,SAAAA,CAAAA;YACfF,QAAAA,CAASE,SAAAA,CAAAA;AACb,QAAA;AACJ,IAAA,CAAA;IAEAF,QAAAA,CAASnB,WAAAA,CAAAA;IACT,OAAOkB,UAAAA;AACX;;;;"}
|
package/dist/util/fileLock.js
CHANGED
|
@@ -195,7 +195,7 @@ function _define_property(obj, key, value) {
|
|
|
195
195
|
process.exit(143); // Standard exit code for SIGTERM
|
|
196
196
|
});
|
|
197
197
|
process.on('uncaughtException', (error)=>{
|
|
198
|
-
this.logger.error('Uncaught exception, cleaning up locks:'
|
|
198
|
+
this.logger.error('FILELOCK_UNCAUGHT_EXCEPTION: Uncaught exception detected, cleaning up locks | Error: ' + error + ' | Action: Release all locks');
|
|
199
199
|
cleanup();
|
|
200
200
|
process.exit(1);
|
|
201
201
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fileLock.js","sources":["../../src/util/fileLock.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as os from 'os';\nimport { getLogger } from '../logging';\n\n/**\n * File-based lock for cross-process synchronization\n * Uses atomic file operations to coordinate across multiple Node processes\n */\nexport class FileLock {\n private lockPath: string;\n private lockAcquired = false;\n private maxRetries = 100; // Maximum number of lock attempts\n private retryDelay = 100; // Initial retry delay in ms\n private maxRetryDelay = 2000; // Maximum retry delay in ms\n private lockTimeout = 30000; // Consider lock stale after 30 seconds\n private logger = getLogger();\n\n constructor(lockPath: string) {\n this.lockPath = lockPath;\n }\n\n /**\n * Acquire the file lock with exponential backoff retry\n */\n async lock(): Promise<void> {\n let attempts = 0;\n let currentDelay = this.retryDelay;\n\n while (attempts < this.maxRetries) {\n try {\n // Try to create lock file atomically with 'wx' flag (fails if exists)\n const lockData = {\n pid: process.pid,\n timestamp: Date.now(),\n hostname: os.hostname()\n };\n\n // Check if lock file exists and is stale\n if (fs.existsSync(this.lockPath)) {\n const lockContent = fs.readFileSync(this.lockPath, 'utf-8');\n try {\n const existingLock = JSON.parse(lockContent);\n const lockAge = Date.now() - existingLock.timestamp;\n\n // If lock is stale, try to remove it\n if (lockAge > this.lockTimeout) {\n this.logger.debug(`Removing stale lock file (age: ${lockAge}ms, pid: ${existingLock.pid})`);\n try {\n fs.unlinkSync(this.lockPath);\n } catch {\n // Lock might have been removed by another process, continue\n }\n }\n } catch {\n // Invalid lock file, try to remove it\n try {\n fs.unlinkSync(this.lockPath);\n } catch {\n // Ignore errors\n }\n }\n }\n\n // Try to acquire lock\n fs.writeFileSync(this.lockPath, JSON.stringify(lockData, null, 2), { flag: 'wx' });\n this.lockAcquired = true;\n\n if (attempts > 0) {\n this.logger.debug(`Acquired file lock after ${attempts} attempts: ${this.lockPath}`);\n }\n\n return;\n } catch (error: any) {\n if (error.code === 'EEXIST') {\n // Lock file exists, retry with backoff\n attempts++;\n\n if (attempts === 1 || attempts % 10 === 0) {\n this.logger.verbose(`Waiting for file lock (attempt ${attempts}/${this.maxRetries}): ${this.lockPath}`);\n }\n\n await new Promise(resolve => setTimeout(resolve, currentDelay));\n\n // Exponential backoff\n currentDelay = Math.min(currentDelay * 1.5, this.maxRetryDelay);\n } else {\n // Unexpected error\n throw new Error(`Failed to acquire file lock ${this.lockPath}: ${error.message}`);\n }\n }\n }\n\n throw new Error(`Failed to acquire file lock after ${this.maxRetries} attempts: ${this.lockPath}`);\n }\n\n /**\n * Release the file lock\n */\n unlock(): void {\n if (!this.lockAcquired) {\n return;\n }\n\n try {\n if (fs.existsSync(this.lockPath)) {\n fs.unlinkSync(this.lockPath);\n }\n this.lockAcquired = false;\n this.logger.silly(`Released file lock: ${this.lockPath}`);\n } catch (error: any) {\n // Lock file might have been removed by another process or stale lock cleanup\n this.logger.debug(`Error releasing file lock ${this.lockPath}: ${error.message}`);\n this.lockAcquired = false;\n }\n }\n\n /**\n * Check if this instance currently holds the lock\n */\n isLocked(): boolean {\n return this.lockAcquired;\n }\n}\n\n/**\n * Manages file-based locks for git repositories (cross-process safe)\n */\nexport class RepositoryFileLockManager {\n private locks: Map<string, FileLock> = new Map();\n private logger = getLogger();\n private cleanupRegistered = false;\n\n /**\n * Get or create a file lock for a specific git repository\n * @param repoPath Path to the git repository root\n * @returns FileLock for this repository\n */\n getRepositoryLock(repoPath: string): FileLock {\n const normalizedPath = path.resolve(repoPath);\n\n if (!this.locks.has(normalizedPath)) {\n // Resolve the actual .git directory (handles both regular repos and submodules)\n const gitDirPath = this.resolveGitDirectory(normalizedPath);\n const lockPath = path.join(gitDirPath, 'kodrdriv.lock');\n this.logger.debug(`Creating file lock for repository: ${normalizedPath} at ${lockPath}`);\n this.locks.set(normalizedPath, new FileLock(lockPath));\n\n // Register cleanup handler on first lock creation\n if (!this.cleanupRegistered) {\n this.registerCleanupHandlers();\n this.cleanupRegistered = true;\n }\n }\n\n return this.locks.get(normalizedPath)!;\n }\n\n /**\n * Resolve the actual .git directory path, handling both regular repos and submodules\n * @param repoPath Path to the repository root\n * @returns Path to the actual .git directory\n */\n private resolveGitDirectory(repoPath: string): string {\n const gitPath = path.join(repoPath, '.git');\n\n try {\n const stat = fs.statSync(gitPath);\n\n if (stat.isDirectory()) {\n // Regular git repository\n return gitPath;\n } else if (stat.isFile()) {\n // Git submodule - .git is a file with format: gitdir: <path>\n const gitFileContent = fs.readFileSync(gitPath, 'utf-8').trim();\n const match = gitFileContent.match(/^gitdir:\\s*(.+)$/);\n\n if (match && match[1]) {\n // Resolve the gitdir path (it's relative to the repo path)\n const gitDirPath = path.resolve(repoPath, match[1]);\n this.logger.debug(`Resolved submodule gitdir: ${gitDirPath}`);\n\n // Ensure the git directory exists\n if (!fs.existsSync(gitDirPath)) {\n throw new Error(`Submodule git directory does not exist: ${gitDirPath}`);\n }\n\n return gitDirPath;\n }\n\n throw new Error(`Invalid .git file format in ${gitPath}: ${gitFileContent}`);\n }\n } catch (error: any) {\n // Check if error is from statSync (file doesn't exist)\n if (error.code === 'ENOENT') {\n throw new Error(`No .git directory or file found in ${repoPath}`);\n }\n throw new Error(`Failed to resolve git directory for ${repoPath}: ${error.message}`);\n }\n\n throw new Error(`No .git directory or file found in ${repoPath}`);\n }\n\n /**\n * Register cleanup handlers to release locks on process exit\n */\n private registerCleanupHandlers(): void {\n const cleanup = () => {\n this.destroy();\n };\n\n // Handle various exit scenarios\n process.on('exit', cleanup);\n process.on('SIGINT', () => {\n cleanup();\n process.exit(130); // Standard exit code for SIGINT\n });\n process.on('SIGTERM', () => {\n cleanup();\n process.exit(143); // Standard exit code for SIGTERM\n });\n process.on('uncaughtException', (error) => {\n this.logger.error('Uncaught exception, cleaning up locks:', error);\n cleanup();\n process.exit(1);\n });\n }\n\n /**\n * Execute a git operation with repository-level file locking\n * @param repoPath Path to the git repository root\n * @param operation The async operation to execute under lock\n * @param operationName Optional name for logging\n * @returns Result of the operation\n */\n async withGitLock<T>(\n repoPath: string,\n operation: () => Promise<T>,\n operationName?: string\n ): Promise<T> {\n const lock = this.getRepositoryLock(repoPath);\n const startWait = Date.now();\n\n this.logger.silly(\n `Acquiring file lock for ${repoPath}${operationName ? ` for: ${operationName}` : ''}`\n );\n\n await lock.lock();\n\n const waitTime = Date.now() - startWait;\n if (waitTime > 100) {\n this.logger.debug(\n `Acquired file lock for ${repoPath} after ${waitTime}ms${operationName ? ` for: ${operationName}` : ''}`\n );\n }\n\n try {\n return await operation();\n } finally {\n lock.unlock();\n }\n }\n\n /**\n * Clean up all locks\n */\n destroy(): void {\n this.logger.debug(`Cleaning up ${this.locks.size} file lock(s)`);\n for (const lock of this.locks.values()) {\n lock.unlock();\n }\n this.locks.clear();\n }\n}\n"],"names":["FileLock","lock","attempts","currentDelay","retryDelay","maxRetries","lockData","pid","process","timestamp","Date","now","hostname","os","fs","existsSync","lockPath","lockContent","readFileSync","existingLock","JSON","parse","lockAge","lockTimeout","logger","debug","unlinkSync","writeFileSync","stringify","flag","lockAcquired","error","code","verbose","Promise","resolve","setTimeout","Math","min","maxRetryDelay","Error","message","unlock","silly","isLocked","getLogger","RepositoryFileLockManager","getRepositoryLock","repoPath","normalizedPath","path","locks","has","gitDirPath","resolveGitDirectory","join","set","cleanupRegistered","registerCleanupHandlers","get","gitPath","stat","statSync","isDirectory","isFile","gitFileContent","trim","match","cleanup","destroy","on","exit","withGitLock","operation","operationName","startWait","waitTime","size","values","clear","Map"],"mappings":";;;;;AAAA;;;;;;;;;;;;;;AAMA;;;AAGC,IACM,MAAMA,QAAAA,CAAAA;AAaT;;AAEC,QACD,MAAMC,IAAAA,GAAsB;AACxB,QAAA,IAAIC,QAAAA,GAAW,CAAA;QACf,IAAIC,YAAAA,GAAe,IAAI,CAACC,UAAU;AAElC,QAAA,MAAOF,QAAAA,GAAW,IAAI,CAACG,UAAU,CAAE;YAC/B,IAAI;;AAEA,gBAAA,MAAMC,QAAAA,GAAW;AACbC,oBAAAA,GAAAA,EAAKC,QAAQD,GAAG;AAChBE,oBAAAA,SAAAA,EAAWC,KAAKC,GAAG,EAAA;AACnBC,oBAAAA,QAAAA,EAAUC,GAAGD,QAAQ;AACzB,iBAAA;;AAGA,gBAAA,IAAIE,GAAGC,UAAU,CAAC,IAAI,CAACC,QAAQ,CAAA,EAAG;AAC9B,oBAAA,MAAMC,cAAcH,EAAAA,CAAGI,YAAY,CAAC,IAAI,CAACF,QAAQ,EAAE,OAAA,CAAA;oBACnD,IAAI;wBACA,MAAMG,YAAAA,GAAeC,IAAAA,CAAKC,KAAK,CAACJ,WAAAA,CAAAA;AAChC,wBAAA,MAAMK,OAAAA,GAAUZ,IAAAA,CAAKC,GAAG,EAAA,GAAKQ,aAAaV,SAAS;;AAGnD,wBAAA,IAAIa,OAAAA,GAAU,IAAI,CAACC,WAAW,EAAE;AAC5B,4BAAA,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,CAAC,+BAA+B,EAAEH,OAAAA,CAAQ,SAAS,EAAEH,YAAAA,CAAaZ,GAAG,CAAC,CAAC,CAAC,CAAA;4BAC1F,IAAI;AACAO,gCAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,4BAAA,CAAA,CAAE,OAAM;;AAER,4BAAA;AACJ,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAM;;wBAEJ,IAAI;AACAF,4BAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,wBAAA,CAAA,CAAE,OAAM;;AAER,wBAAA;AACJ,oBAAA;AACJ,gBAAA;;gBAGAF,EAAAA,CAAGa,aAAa,CAAC,IAAI,CAACX,QAAQ,EAAEI,IAAAA,CAAKQ,SAAS,CAACtB,QAAAA,EAAU,IAAA,EAAM,CAAA,CAAA,EAAI;oBAAEuB,IAAAA,EAAM;AAAK,iBAAA,CAAA;gBAChF,IAAI,CAACC,YAAY,GAAG,IAAA;AAEpB,gBAAA,IAAI5B,WAAW,CAAA,EAAG;AACd,oBAAA,IAAI,CAACsB,MAAM,CAACC,KAAK,CAAC,CAAC,yBAAyB,EAAEvB,QAAAA,CAAS,WAAW,EAAE,IAAI,CAACc,QAAQ,CAAA,CAAE,CAAA;AACvF,gBAAA;AAEA,gBAAA;AACJ,YAAA,CAAA,CAAE,OAAOe,KAAAA,EAAY;gBACjB,IAAIA,KAAAA,CAAMC,IAAI,KAAK,QAAA,EAAU;;AAEzB9B,oBAAAA,QAAAA,EAAAA;AAEA,oBAAA,IAAIA,QAAAA,KAAa,CAAA,IAAKA,QAAAA,GAAW,EAAA,KAAO,CAAA,EAAG;wBACvC,IAAI,CAACsB,MAAM,CAACS,OAAO,CAAC,CAAC,+BAA+B,EAAE/B,QAAAA,CAAS,CAAC,EAAE,IAAI,CAACG,UAAU,CAAC,GAAG,EAAE,IAAI,CAACW,QAAQ,CAAA,CAAE,CAAA;AAC1G,oBAAA;AAEA,oBAAA,MAAM,IAAIkB,OAAAA,CAAQC,CAAAA,OAAAA,GAAWC,WAAWD,OAAAA,EAAShC,YAAAA,CAAAA,CAAAA;;AAGjDA,oBAAAA,YAAAA,GAAekC,KAAKC,GAAG,CAACnC,eAAe,GAAA,EAAK,IAAI,CAACoC,aAAa,CAAA;gBAClE,CAAA,MAAO;;AAEH,oBAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,4BAA4B,EAAE,IAAI,CAACxB,QAAQ,CAAC,EAAE,EAAEe,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;AACpF,gBAAA;AACJ,YAAA;AACJ,QAAA;AAEA,QAAA,MAAM,IAAID,KAAAA,CAAM,CAAC,kCAAkC,EAAE,IAAI,CAACnC,UAAU,CAAC,WAAW,EAAE,IAAI,CAACW,QAAQ,CAAA,CAAE,CAAA;AACrG,IAAA;AAEA;;AAEC,QACD0B,MAAAA,GAAe;AACX,QAAA,IAAI,CAAC,IAAI,CAACZ,YAAY,EAAE;AACpB,YAAA;AACJ,QAAA;QAEA,IAAI;AACA,YAAA,IAAIhB,GAAGC,UAAU,CAAC,IAAI,CAACC,QAAQ,CAAA,EAAG;AAC9BF,gBAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,YAAA;YACA,IAAI,CAACc,YAAY,GAAG,KAAA;YACpB,IAAI,CAACN,MAAM,CAACmB,KAAK,CAAC,CAAC,oBAAoB,EAAE,IAAI,CAAC3B,QAAQ,CAAA,CAAE,CAAA;AAC5D,QAAA,CAAA,CAAE,OAAOe,KAAAA,EAAY;;AAEjB,YAAA,IAAI,CAACP,MAAM,CAACC,KAAK,CAAC,CAAC,0BAA0B,EAAE,IAAI,CAACT,QAAQ,CAAC,EAAE,EAAEe,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;YAChF,IAAI,CAACX,YAAY,GAAG,KAAA;AACxB,QAAA;AACJ,IAAA;AAEA;;AAEC,QACDc,QAAAA,GAAoB;QAChB,OAAO,IAAI,CAACd,YAAY;AAC5B,IAAA;AAxGA,IAAA,WAAA,CAAYd,QAAgB,CAAE;AAR9B,QAAA,gBAAA,CAAA,IAAA,EAAQA,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQc,cAAAA,EAAe,KAAA,CAAA;QACvB,gBAAA,CAAA,IAAA,EAAQzB,YAAAA,EAAa;QACrB,gBAAA,CAAA,IAAA,EAAQD,YAAAA,EAAa;QACrB,gBAAA,CAAA,IAAA,EAAQmC,eAAAA,EAAgB;QACxB,gBAAA,CAAA,IAAA,EAAQhB,aAAAA,EAAc;AACtB,QAAA,gBAAA,CAAA,IAAA,EAAQC,QAAAA,EAASqB,SAAAA,EAAAA,CAAAA;QAGb,IAAI,CAAC7B,QAAQ,GAAGA,QAAAA;AACpB,IAAA;AAuGJ;AAEA;;AAEC,IACM,MAAM8B,yBAAAA,CAAAA;AAKT;;;;QAKAC,iBAAAA,CAAkBC,QAAgB,EAAY;QAC1C,MAAMC,cAAAA,GAAiBC,IAAAA,CAAKf,OAAO,CAACa,QAAAA,CAAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAACG,KAAK,CAACC,GAAG,CAACH,cAAAA,CAAAA,EAAiB;;AAEjC,YAAA,MAAMI,UAAAA,GAAa,IAAI,CAACC,mBAAmB,CAACL,cAAAA,CAAAA;AAC5C,YAAA,MAAMjC,QAAAA,GAAWkC,IAAAA,CAAKK,IAAI,CAACF,UAAAA,EAAY,eAAA,CAAA;YACvC,IAAI,CAAC7B,MAAM,CAACC,KAAK,CAAC,CAAC,mCAAmC,EAAEwB,cAAAA,CAAe,IAAI,EAAEjC,QAAAA,CAAAA,CAAU,CAAA;AACvF,YAAA,IAAI,CAACmC,KAAK,CAACK,GAAG,CAACP,cAAAA,EAAgB,IAAIjD,QAAAA,CAASgB,QAAAA,CAAAA,CAAAA;;AAG5C,YAAA,IAAI,CAAC,IAAI,CAACyC,iBAAiB,EAAE;AACzB,gBAAA,IAAI,CAACC,uBAAuB,EAAA;gBAC5B,IAAI,CAACD,iBAAiB,GAAG,IAAA;AAC7B,YAAA;AACJ,QAAA;AAEA,QAAA,OAAO,IAAI,CAACN,KAAK,CAACQ,GAAG,CAACV,cAAAA,CAAAA;AAC1B,IAAA;AAEA;;;;QAKQK,mBAAAA,CAAoBN,QAAgB,EAAU;AAClD,QAAA,MAAMY,OAAAA,GAAUV,IAAAA,CAAKK,IAAI,CAACP,QAAAA,EAAU,MAAA,CAAA;QAEpC,IAAI;YACA,MAAMa,IAAAA,GAAO/C,EAAAA,CAAGgD,QAAQ,CAACF,OAAAA,CAAAA;YAEzB,IAAIC,IAAAA,CAAKE,WAAW,EAAA,EAAI;;gBAEpB,OAAOH,OAAAA;YACX,CAAA,MAAO,IAAIC,IAAAA,CAAKG,MAAM,EAAA,EAAI;;AAEtB,gBAAA,MAAMC,iBAAiBnD,EAAAA,CAAGI,YAAY,CAAC0C,OAAAA,EAAS,SAASM,IAAI,EAAA;gBAC7D,MAAMC,KAAAA,GAAQF,cAAAA,CAAeE,KAAK,CAAC,kBAAA,CAAA;AAEnC,gBAAA,IAAIA,KAAAA,IAASA,KAAK,CAAC,CAAA,CAAE,EAAE;;AAEnB,oBAAA,MAAMd,aAAaH,IAAAA,CAAKf,OAAO,CAACa,QAAAA,EAAUmB,KAAK,CAAC,CAAA,CAAE,CAAA;oBAClD,IAAI,CAAC3C,MAAM,CAACC,KAAK,CAAC,CAAC,2BAA2B,EAAE4B,UAAAA,CAAAA,CAAY,CAAA;;AAG5D,oBAAA,IAAI,CAACvC,EAAAA,CAAGC,UAAU,CAACsC,UAAAA,CAAAA,EAAa;AAC5B,wBAAA,MAAM,IAAIb,KAAAA,CAAM,CAAC,wCAAwC,EAAEa,UAAAA,CAAAA,CAAY,CAAA;AAC3E,oBAAA;oBAEA,OAAOA,UAAAA;AACX,gBAAA;gBAEA,MAAM,IAAIb,MAAM,CAAC,4BAA4B,EAAEoB,OAAAA,CAAQ,EAAE,EAAEK,cAAAA,CAAAA,CAAgB,CAAA;AAC/E,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOlC,KAAAA,EAAY;;YAEjB,IAAIA,KAAAA,CAAMC,IAAI,KAAK,QAAA,EAAU;AACzB,gBAAA,MAAM,IAAIQ,KAAAA,CAAM,CAAC,mCAAmC,EAAEQ,QAAAA,CAAAA,CAAU,CAAA;AACpE,YAAA;YACA,MAAM,IAAIR,KAAAA,CAAM,CAAC,oCAAoC,EAAEQ,SAAS,EAAE,EAAEjB,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;AACvF,QAAA;AAEA,QAAA,MAAM,IAAID,KAAAA,CAAM,CAAC,mCAAmC,EAAEQ,QAAAA,CAAAA,CAAU,CAAA;AACpE,IAAA;AAEA;;AAEC,QACD,uBAAQU,GAAgC;AACpC,QAAA,MAAMU,OAAAA,GAAU,IAAA;AACZ,YAAA,IAAI,CAACC,OAAO,EAAA;AAChB,QAAA,CAAA;;QAGA7D,OAAAA,CAAQ8D,EAAE,CAAC,MAAA,EAAQF,OAAAA,CAAAA;QACnB5D,OAAAA,CAAQ8D,EAAE,CAAC,QAAA,EAAU,IAAA;AACjBF,YAAAA,OAAAA,EAAAA;YACA5D,OAAAA,CAAQ+D,IAAI,CAAC,GAAA,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;QACA/D,OAAAA,CAAQ8D,EAAE,CAAC,SAAA,EAAW,IAAA;AAClBF,YAAAA,OAAAA,EAAAA;YACA5D,OAAAA,CAAQ+D,IAAI,CAAC,GAAA,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;QACA/D,OAAAA,CAAQ8D,EAAE,CAAC,mBAAA,EAAqB,CAACvC,KAAAA,GAAAA;AAC7B,YAAA,IAAI,CAACP,MAAM,CAACO,KAAK,CAAC,wCAAA,EAA0CA,KAAAA,CAAAA;AAC5DqC,YAAAA,OAAAA,EAAAA;AACA5D,YAAAA,OAAAA,CAAQ+D,IAAI,CAAC,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;AACJ,IAAA;AAEA;;;;;;AAMC,QACD,MAAMC,WAAAA,CACFxB,QAAgB,EAChByB,SAA2B,EAC3BC,aAAsB,EACZ;AACV,QAAA,MAAMzE,IAAAA,GAAO,IAAI,CAAC8C,iBAAiB,CAACC,QAAAA,CAAAA;QACpC,MAAM2B,SAAAA,GAAYjE,KAAKC,GAAG,EAAA;AAE1B,QAAA,IAAI,CAACa,MAAM,CAACmB,KAAK,CACb,CAAC,wBAAwB,EAAEK,QAAAA,CAAAA,EAAW0B,gBAAgB,CAAC,MAAM,EAAEA,aAAAA,CAAAA,CAAe,GAAG,EAAA,CAAA,CAAI,CAAA;AAGzF,QAAA,MAAMzE,KAAKA,IAAI,EAAA;QAEf,MAAM2E,QAAAA,GAAWlE,IAAAA,CAAKC,GAAG,EAAA,GAAKgE,SAAAA;AAC9B,QAAA,IAAIC,WAAW,GAAA,EAAK;YAChB,IAAI,CAACpD,MAAM,CAACC,KAAK,CACb,CAAC,uBAAuB,EAAEuB,QAAAA,CAAS,OAAO,EAAE4B,QAAAA,CAAS,EAAE,EAAEF,aAAAA,GAAgB,CAAC,MAAM,EAAEA,aAAAA,CAAAA,CAAe,GAAG,EAAA,CAAA,CAAI,CAAA;AAEhH,QAAA;QAEA,IAAI;AACA,YAAA,OAAO,MAAMD,SAAAA,EAAAA;QACjB,CAAA,QAAU;AACNxE,YAAAA,IAAAA,CAAKyC,MAAM,EAAA;AACf,QAAA;AACJ,IAAA;AAEA;;AAEC,QACD2B,OAAAA,GAAgB;AACZ,QAAA,IAAI,CAAC7C,MAAM,CAACC,KAAK,CAAC,CAAC,YAAY,EAAE,IAAI,CAAC0B,KAAK,CAAC0B,IAAI,CAAC,aAAa,CAAC,CAAA;AAC/D,QAAA,KAAK,MAAM5E,IAAAA,IAAQ,IAAI,CAACkD,KAAK,CAAC2B,MAAM,EAAA,CAAI;AACpC7E,YAAAA,IAAAA,CAAKyC,MAAM,EAAA;AACf,QAAA;QACA,IAAI,CAACS,KAAK,CAAC4B,KAAK,EAAA;AACpB,IAAA;;AA/IA,QAAA,gBAAA,CAAA,IAAA,EAAQ5B,SAA+B,IAAI6B,GAAAA,EAAAA,CAAAA;AAC3C,QAAA,gBAAA,CAAA,IAAA,EAAQxD,QAAAA,EAASqB,SAAAA,EAAAA,CAAAA;AACjB,QAAA,gBAAA,CAAA,IAAA,EAAQY,mBAAAA,EAAoB,KAAA,CAAA;;AA8IhC;;;;"}
|
|
1
|
+
{"version":3,"file":"fileLock.js","sources":["../../src/util/fileLock.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as os from 'os';\nimport { getLogger } from '../logging';\n\n/**\n * File-based lock for cross-process synchronization\n * Uses atomic file operations to coordinate across multiple Node processes\n */\nexport class FileLock {\n private lockPath: string;\n private lockAcquired = false;\n private maxRetries = 100; // Maximum number of lock attempts\n private retryDelay = 100; // Initial retry delay in ms\n private maxRetryDelay = 2000; // Maximum retry delay in ms\n private lockTimeout = 30000; // Consider lock stale after 30 seconds\n private logger = getLogger();\n\n constructor(lockPath: string) {\n this.lockPath = lockPath;\n }\n\n /**\n * Acquire the file lock with exponential backoff retry\n */\n async lock(): Promise<void> {\n let attempts = 0;\n let currentDelay = this.retryDelay;\n\n while (attempts < this.maxRetries) {\n try {\n // Try to create lock file atomically with 'wx' flag (fails if exists)\n const lockData = {\n pid: process.pid,\n timestamp: Date.now(),\n hostname: os.hostname()\n };\n\n // Check if lock file exists and is stale\n if (fs.existsSync(this.lockPath)) {\n const lockContent = fs.readFileSync(this.lockPath, 'utf-8');\n try {\n const existingLock = JSON.parse(lockContent);\n const lockAge = Date.now() - existingLock.timestamp;\n\n // If lock is stale, try to remove it\n if (lockAge > this.lockTimeout) {\n this.logger.debug(`Removing stale lock file (age: ${lockAge}ms, pid: ${existingLock.pid})`);\n try {\n fs.unlinkSync(this.lockPath);\n } catch {\n // Lock might have been removed by another process, continue\n }\n }\n } catch {\n // Invalid lock file, try to remove it\n try {\n fs.unlinkSync(this.lockPath);\n } catch {\n // Ignore errors\n }\n }\n }\n\n // Try to acquire lock\n fs.writeFileSync(this.lockPath, JSON.stringify(lockData, null, 2), { flag: 'wx' });\n this.lockAcquired = true;\n\n if (attempts > 0) {\n this.logger.debug(`Acquired file lock after ${attempts} attempts: ${this.lockPath}`);\n }\n\n return;\n } catch (error: any) {\n if (error.code === 'EEXIST') {\n // Lock file exists, retry with backoff\n attempts++;\n\n if (attempts === 1 || attempts % 10 === 0) {\n this.logger.verbose(`Waiting for file lock (attempt ${attempts}/${this.maxRetries}): ${this.lockPath}`);\n }\n\n await new Promise(resolve => setTimeout(resolve, currentDelay));\n\n // Exponential backoff\n currentDelay = Math.min(currentDelay * 1.5, this.maxRetryDelay);\n } else {\n // Unexpected error\n throw new Error(`Failed to acquire file lock ${this.lockPath}: ${error.message}`);\n }\n }\n }\n\n throw new Error(`Failed to acquire file lock after ${this.maxRetries} attempts: ${this.lockPath}`);\n }\n\n /**\n * Release the file lock\n */\n unlock(): void {\n if (!this.lockAcquired) {\n return;\n }\n\n try {\n if (fs.existsSync(this.lockPath)) {\n fs.unlinkSync(this.lockPath);\n }\n this.lockAcquired = false;\n this.logger.silly(`Released file lock: ${this.lockPath}`);\n } catch (error: any) {\n // Lock file might have been removed by another process or stale lock cleanup\n this.logger.debug(`Error releasing file lock ${this.lockPath}: ${error.message}`);\n this.lockAcquired = false;\n }\n }\n\n /**\n * Check if this instance currently holds the lock\n */\n isLocked(): boolean {\n return this.lockAcquired;\n }\n}\n\n/**\n * Manages file-based locks for git repositories (cross-process safe)\n */\nexport class RepositoryFileLockManager {\n private locks: Map<string, FileLock> = new Map();\n private logger = getLogger();\n private cleanupRegistered = false;\n\n /**\n * Get or create a file lock for a specific git repository\n * @param repoPath Path to the git repository root\n * @returns FileLock for this repository\n */\n getRepositoryLock(repoPath: string): FileLock {\n const normalizedPath = path.resolve(repoPath);\n\n if (!this.locks.has(normalizedPath)) {\n // Resolve the actual .git directory (handles both regular repos and submodules)\n const gitDirPath = this.resolveGitDirectory(normalizedPath);\n const lockPath = path.join(gitDirPath, 'kodrdriv.lock');\n this.logger.debug(`Creating file lock for repository: ${normalizedPath} at ${lockPath}`);\n this.locks.set(normalizedPath, new FileLock(lockPath));\n\n // Register cleanup handler on first lock creation\n if (!this.cleanupRegistered) {\n this.registerCleanupHandlers();\n this.cleanupRegistered = true;\n }\n }\n\n return this.locks.get(normalizedPath)!;\n }\n\n /**\n * Resolve the actual .git directory path, handling both regular repos and submodules\n * @param repoPath Path to the repository root\n * @returns Path to the actual .git directory\n */\n private resolveGitDirectory(repoPath: string): string {\n const gitPath = path.join(repoPath, '.git');\n\n try {\n const stat = fs.statSync(gitPath);\n\n if (stat.isDirectory()) {\n // Regular git repository\n return gitPath;\n } else if (stat.isFile()) {\n // Git submodule - .git is a file with format: gitdir: <path>\n const gitFileContent = fs.readFileSync(gitPath, 'utf-8').trim();\n const match = gitFileContent.match(/^gitdir:\\s*(.+)$/);\n\n if (match && match[1]) {\n // Resolve the gitdir path (it's relative to the repo path)\n const gitDirPath = path.resolve(repoPath, match[1]);\n this.logger.debug(`Resolved submodule gitdir: ${gitDirPath}`);\n\n // Ensure the git directory exists\n if (!fs.existsSync(gitDirPath)) {\n throw new Error(`Submodule git directory does not exist: ${gitDirPath}`);\n }\n\n return gitDirPath;\n }\n\n throw new Error(`Invalid .git file format in ${gitPath}: ${gitFileContent}`);\n }\n } catch (error: any) {\n // Check if error is from statSync (file doesn't exist)\n if (error.code === 'ENOENT') {\n throw new Error(`No .git directory or file found in ${repoPath}`);\n }\n throw new Error(`Failed to resolve git directory for ${repoPath}: ${error.message}`);\n }\n\n throw new Error(`No .git directory or file found in ${repoPath}`);\n }\n\n /**\n * Register cleanup handlers to release locks on process exit\n */\n private registerCleanupHandlers(): void {\n const cleanup = () => {\n this.destroy();\n };\n\n // Handle various exit scenarios\n process.on('exit', cleanup);\n process.on('SIGINT', () => {\n cleanup();\n process.exit(130); // Standard exit code for SIGINT\n });\n process.on('SIGTERM', () => {\n cleanup();\n process.exit(143); // Standard exit code for SIGTERM\n });\n process.on('uncaughtException', (error) => {\n this.logger.error('FILELOCK_UNCAUGHT_EXCEPTION: Uncaught exception detected, cleaning up locks | Error: ' + error + ' | Action: Release all locks');\n cleanup();\n process.exit(1);\n });\n }\n\n /**\n * Execute a git operation with repository-level file locking\n * @param repoPath Path to the git repository root\n * @param operation The async operation to execute under lock\n * @param operationName Optional name for logging\n * @returns Result of the operation\n */\n async withGitLock<T>(\n repoPath: string,\n operation: () => Promise<T>,\n operationName?: string\n ): Promise<T> {\n const lock = this.getRepositoryLock(repoPath);\n const startWait = Date.now();\n\n this.logger.silly(\n `Acquiring file lock for ${repoPath}${operationName ? ` for: ${operationName}` : ''}`\n );\n\n await lock.lock();\n\n const waitTime = Date.now() - startWait;\n if (waitTime > 100) {\n this.logger.debug(\n `Acquired file lock for ${repoPath} after ${waitTime}ms${operationName ? ` for: ${operationName}` : ''}`\n );\n }\n\n try {\n return await operation();\n } finally {\n lock.unlock();\n }\n }\n\n /**\n * Clean up all locks\n */\n destroy(): void {\n this.logger.debug(`Cleaning up ${this.locks.size} file lock(s)`);\n for (const lock of this.locks.values()) {\n lock.unlock();\n }\n this.locks.clear();\n }\n}\n"],"names":["FileLock","lock","attempts","currentDelay","retryDelay","maxRetries","lockData","pid","process","timestamp","Date","now","hostname","os","fs","existsSync","lockPath","lockContent","readFileSync","existingLock","JSON","parse","lockAge","lockTimeout","logger","debug","unlinkSync","writeFileSync","stringify","flag","lockAcquired","error","code","verbose","Promise","resolve","setTimeout","Math","min","maxRetryDelay","Error","message","unlock","silly","isLocked","getLogger","RepositoryFileLockManager","getRepositoryLock","repoPath","normalizedPath","path","locks","has","gitDirPath","resolveGitDirectory","join","set","cleanupRegistered","registerCleanupHandlers","get","gitPath","stat","statSync","isDirectory","isFile","gitFileContent","trim","match","cleanup","destroy","on","exit","withGitLock","operation","operationName","startWait","waitTime","size","values","clear","Map"],"mappings":";;;;;AAAA;;;;;;;;;;;;;;AAMA;;;AAGC,IACM,MAAMA,QAAAA,CAAAA;AAaT;;AAEC,QACD,MAAMC,IAAAA,GAAsB;AACxB,QAAA,IAAIC,QAAAA,GAAW,CAAA;QACf,IAAIC,YAAAA,GAAe,IAAI,CAACC,UAAU;AAElC,QAAA,MAAOF,QAAAA,GAAW,IAAI,CAACG,UAAU,CAAE;YAC/B,IAAI;;AAEA,gBAAA,MAAMC,QAAAA,GAAW;AACbC,oBAAAA,GAAAA,EAAKC,QAAQD,GAAG;AAChBE,oBAAAA,SAAAA,EAAWC,KAAKC,GAAG,EAAA;AACnBC,oBAAAA,QAAAA,EAAUC,GAAGD,QAAQ;AACzB,iBAAA;;AAGA,gBAAA,IAAIE,GAAGC,UAAU,CAAC,IAAI,CAACC,QAAQ,CAAA,EAAG;AAC9B,oBAAA,MAAMC,cAAcH,EAAAA,CAAGI,YAAY,CAAC,IAAI,CAACF,QAAQ,EAAE,OAAA,CAAA;oBACnD,IAAI;wBACA,MAAMG,YAAAA,GAAeC,IAAAA,CAAKC,KAAK,CAACJ,WAAAA,CAAAA;AAChC,wBAAA,MAAMK,OAAAA,GAAUZ,IAAAA,CAAKC,GAAG,EAAA,GAAKQ,aAAaV,SAAS;;AAGnD,wBAAA,IAAIa,OAAAA,GAAU,IAAI,CAACC,WAAW,EAAE;AAC5B,4BAAA,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,CAAC,+BAA+B,EAAEH,OAAAA,CAAQ,SAAS,EAAEH,YAAAA,CAAaZ,GAAG,CAAC,CAAC,CAAC,CAAA;4BAC1F,IAAI;AACAO,gCAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,4BAAA,CAAA,CAAE,OAAM;;AAER,4BAAA;AACJ,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAM;;wBAEJ,IAAI;AACAF,4BAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,wBAAA,CAAA,CAAE,OAAM;;AAER,wBAAA;AACJ,oBAAA;AACJ,gBAAA;;gBAGAF,EAAAA,CAAGa,aAAa,CAAC,IAAI,CAACX,QAAQ,EAAEI,IAAAA,CAAKQ,SAAS,CAACtB,QAAAA,EAAU,IAAA,EAAM,CAAA,CAAA,EAAI;oBAAEuB,IAAAA,EAAM;AAAK,iBAAA,CAAA;gBAChF,IAAI,CAACC,YAAY,GAAG,IAAA;AAEpB,gBAAA,IAAI5B,WAAW,CAAA,EAAG;AACd,oBAAA,IAAI,CAACsB,MAAM,CAACC,KAAK,CAAC,CAAC,yBAAyB,EAAEvB,QAAAA,CAAS,WAAW,EAAE,IAAI,CAACc,QAAQ,CAAA,CAAE,CAAA;AACvF,gBAAA;AAEA,gBAAA;AACJ,YAAA,CAAA,CAAE,OAAOe,KAAAA,EAAY;gBACjB,IAAIA,KAAAA,CAAMC,IAAI,KAAK,QAAA,EAAU;;AAEzB9B,oBAAAA,QAAAA,EAAAA;AAEA,oBAAA,IAAIA,QAAAA,KAAa,CAAA,IAAKA,QAAAA,GAAW,EAAA,KAAO,CAAA,EAAG;wBACvC,IAAI,CAACsB,MAAM,CAACS,OAAO,CAAC,CAAC,+BAA+B,EAAE/B,QAAAA,CAAS,CAAC,EAAE,IAAI,CAACG,UAAU,CAAC,GAAG,EAAE,IAAI,CAACW,QAAQ,CAAA,CAAE,CAAA;AAC1G,oBAAA;AAEA,oBAAA,MAAM,IAAIkB,OAAAA,CAAQC,CAAAA,OAAAA,GAAWC,WAAWD,OAAAA,EAAShC,YAAAA,CAAAA,CAAAA;;AAGjDA,oBAAAA,YAAAA,GAAekC,KAAKC,GAAG,CAACnC,eAAe,GAAA,EAAK,IAAI,CAACoC,aAAa,CAAA;gBAClE,CAAA,MAAO;;AAEH,oBAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,4BAA4B,EAAE,IAAI,CAACxB,QAAQ,CAAC,EAAE,EAAEe,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;AACpF,gBAAA;AACJ,YAAA;AACJ,QAAA;AAEA,QAAA,MAAM,IAAID,KAAAA,CAAM,CAAC,kCAAkC,EAAE,IAAI,CAACnC,UAAU,CAAC,WAAW,EAAE,IAAI,CAACW,QAAQ,CAAA,CAAE,CAAA;AACrG,IAAA;AAEA;;AAEC,QACD0B,MAAAA,GAAe;AACX,QAAA,IAAI,CAAC,IAAI,CAACZ,YAAY,EAAE;AACpB,YAAA;AACJ,QAAA;QAEA,IAAI;AACA,YAAA,IAAIhB,GAAGC,UAAU,CAAC,IAAI,CAACC,QAAQ,CAAA,EAAG;AAC9BF,gBAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,YAAA;YACA,IAAI,CAACc,YAAY,GAAG,KAAA;YACpB,IAAI,CAACN,MAAM,CAACmB,KAAK,CAAC,CAAC,oBAAoB,EAAE,IAAI,CAAC3B,QAAQ,CAAA,CAAE,CAAA;AAC5D,QAAA,CAAA,CAAE,OAAOe,KAAAA,EAAY;;AAEjB,YAAA,IAAI,CAACP,MAAM,CAACC,KAAK,CAAC,CAAC,0BAA0B,EAAE,IAAI,CAACT,QAAQ,CAAC,EAAE,EAAEe,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;YAChF,IAAI,CAACX,YAAY,GAAG,KAAA;AACxB,QAAA;AACJ,IAAA;AAEA;;AAEC,QACDc,QAAAA,GAAoB;QAChB,OAAO,IAAI,CAACd,YAAY;AAC5B,IAAA;AAxGA,IAAA,WAAA,CAAYd,QAAgB,CAAE;AAR9B,QAAA,gBAAA,CAAA,IAAA,EAAQA,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQc,cAAAA,EAAe,KAAA,CAAA;QACvB,gBAAA,CAAA,IAAA,EAAQzB,YAAAA,EAAa;QACrB,gBAAA,CAAA,IAAA,EAAQD,YAAAA,EAAa;QACrB,gBAAA,CAAA,IAAA,EAAQmC,eAAAA,EAAgB;QACxB,gBAAA,CAAA,IAAA,EAAQhB,aAAAA,EAAc;AACtB,QAAA,gBAAA,CAAA,IAAA,EAAQC,QAAAA,EAASqB,SAAAA,EAAAA,CAAAA;QAGb,IAAI,CAAC7B,QAAQ,GAAGA,QAAAA;AACpB,IAAA;AAuGJ;AAEA;;AAEC,IACM,MAAM8B,yBAAAA,CAAAA;AAKT;;;;QAKAC,iBAAAA,CAAkBC,QAAgB,EAAY;QAC1C,MAAMC,cAAAA,GAAiBC,IAAAA,CAAKf,OAAO,CAACa,QAAAA,CAAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAACG,KAAK,CAACC,GAAG,CAACH,cAAAA,CAAAA,EAAiB;;AAEjC,YAAA,MAAMI,UAAAA,GAAa,IAAI,CAACC,mBAAmB,CAACL,cAAAA,CAAAA;AAC5C,YAAA,MAAMjC,QAAAA,GAAWkC,IAAAA,CAAKK,IAAI,CAACF,UAAAA,EAAY,eAAA,CAAA;YACvC,IAAI,CAAC7B,MAAM,CAACC,KAAK,CAAC,CAAC,mCAAmC,EAAEwB,cAAAA,CAAe,IAAI,EAAEjC,QAAAA,CAAAA,CAAU,CAAA;AACvF,YAAA,IAAI,CAACmC,KAAK,CAACK,GAAG,CAACP,cAAAA,EAAgB,IAAIjD,QAAAA,CAASgB,QAAAA,CAAAA,CAAAA;;AAG5C,YAAA,IAAI,CAAC,IAAI,CAACyC,iBAAiB,EAAE;AACzB,gBAAA,IAAI,CAACC,uBAAuB,EAAA;gBAC5B,IAAI,CAACD,iBAAiB,GAAG,IAAA;AAC7B,YAAA;AACJ,QAAA;AAEA,QAAA,OAAO,IAAI,CAACN,KAAK,CAACQ,GAAG,CAACV,cAAAA,CAAAA;AAC1B,IAAA;AAEA;;;;QAKQK,mBAAAA,CAAoBN,QAAgB,EAAU;AAClD,QAAA,MAAMY,OAAAA,GAAUV,IAAAA,CAAKK,IAAI,CAACP,QAAAA,EAAU,MAAA,CAAA;QAEpC,IAAI;YACA,MAAMa,IAAAA,GAAO/C,EAAAA,CAAGgD,QAAQ,CAACF,OAAAA,CAAAA;YAEzB,IAAIC,IAAAA,CAAKE,WAAW,EAAA,EAAI;;gBAEpB,OAAOH,OAAAA;YACX,CAAA,MAAO,IAAIC,IAAAA,CAAKG,MAAM,EAAA,EAAI;;AAEtB,gBAAA,MAAMC,iBAAiBnD,EAAAA,CAAGI,YAAY,CAAC0C,OAAAA,EAAS,SAASM,IAAI,EAAA;gBAC7D,MAAMC,KAAAA,GAAQF,cAAAA,CAAeE,KAAK,CAAC,kBAAA,CAAA;AAEnC,gBAAA,IAAIA,KAAAA,IAASA,KAAK,CAAC,CAAA,CAAE,EAAE;;AAEnB,oBAAA,MAAMd,aAAaH,IAAAA,CAAKf,OAAO,CAACa,QAAAA,EAAUmB,KAAK,CAAC,CAAA,CAAE,CAAA;oBAClD,IAAI,CAAC3C,MAAM,CAACC,KAAK,CAAC,CAAC,2BAA2B,EAAE4B,UAAAA,CAAAA,CAAY,CAAA;;AAG5D,oBAAA,IAAI,CAACvC,EAAAA,CAAGC,UAAU,CAACsC,UAAAA,CAAAA,EAAa;AAC5B,wBAAA,MAAM,IAAIb,KAAAA,CAAM,CAAC,wCAAwC,EAAEa,UAAAA,CAAAA,CAAY,CAAA;AAC3E,oBAAA;oBAEA,OAAOA,UAAAA;AACX,gBAAA;gBAEA,MAAM,IAAIb,MAAM,CAAC,4BAA4B,EAAEoB,OAAAA,CAAQ,EAAE,EAAEK,cAAAA,CAAAA,CAAgB,CAAA;AAC/E,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOlC,KAAAA,EAAY;;YAEjB,IAAIA,KAAAA,CAAMC,IAAI,KAAK,QAAA,EAAU;AACzB,gBAAA,MAAM,IAAIQ,KAAAA,CAAM,CAAC,mCAAmC,EAAEQ,QAAAA,CAAAA,CAAU,CAAA;AACpE,YAAA;YACA,MAAM,IAAIR,KAAAA,CAAM,CAAC,oCAAoC,EAAEQ,SAAS,EAAE,EAAEjB,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;AACvF,QAAA;AAEA,QAAA,MAAM,IAAID,KAAAA,CAAM,CAAC,mCAAmC,EAAEQ,QAAAA,CAAAA,CAAU,CAAA;AACpE,IAAA;AAEA;;AAEC,QACD,uBAAQU,GAAgC;AACpC,QAAA,MAAMU,OAAAA,GAAU,IAAA;AACZ,YAAA,IAAI,CAACC,OAAO,EAAA;AAChB,QAAA,CAAA;;QAGA7D,OAAAA,CAAQ8D,EAAE,CAAC,MAAA,EAAQF,OAAAA,CAAAA;QACnB5D,OAAAA,CAAQ8D,EAAE,CAAC,QAAA,EAAU,IAAA;AACjBF,YAAAA,OAAAA,EAAAA;YACA5D,OAAAA,CAAQ+D,IAAI,CAAC,GAAA,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;QACA/D,OAAAA,CAAQ8D,EAAE,CAAC,SAAA,EAAW,IAAA;AAClBF,YAAAA,OAAAA,EAAAA;YACA5D,OAAAA,CAAQ+D,IAAI,CAAC,GAAA,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;QACA/D,OAAAA,CAAQ8D,EAAE,CAAC,mBAAA,EAAqB,CAACvC,KAAAA,GAAAA;AAC7B,YAAA,IAAI,CAACP,MAAM,CAACO,KAAK,CAAC,0FAA0FA,KAAAA,GAAQ,8BAAA,CAAA;AACpHqC,YAAAA,OAAAA,EAAAA;AACA5D,YAAAA,OAAAA,CAAQ+D,IAAI,CAAC,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;AACJ,IAAA;AAEA;;;;;;AAMC,QACD,MAAMC,WAAAA,CACFxB,QAAgB,EAChByB,SAA2B,EAC3BC,aAAsB,EACZ;AACV,QAAA,MAAMzE,IAAAA,GAAO,IAAI,CAAC8C,iBAAiB,CAACC,QAAAA,CAAAA;QACpC,MAAM2B,SAAAA,GAAYjE,KAAKC,GAAG,EAAA;AAE1B,QAAA,IAAI,CAACa,MAAM,CAACmB,KAAK,CACb,CAAC,wBAAwB,EAAEK,QAAAA,CAAAA,EAAW0B,gBAAgB,CAAC,MAAM,EAAEA,aAAAA,CAAAA,CAAe,GAAG,EAAA,CAAA,CAAI,CAAA;AAGzF,QAAA,MAAMzE,KAAKA,IAAI,EAAA;QAEf,MAAM2E,QAAAA,GAAWlE,IAAAA,CAAKC,GAAG,EAAA,GAAKgE,SAAAA;AAC9B,QAAA,IAAIC,WAAW,GAAA,EAAK;YAChB,IAAI,CAACpD,MAAM,CAACC,KAAK,CACb,CAAC,uBAAuB,EAAEuB,QAAAA,CAAS,OAAO,EAAE4B,QAAAA,CAAS,EAAE,EAAEF,aAAAA,GAAgB,CAAC,MAAM,EAAEA,aAAAA,CAAAA,CAAe,GAAG,EAAA,CAAA,CAAI,CAAA;AAEhH,QAAA;QAEA,IAAI;AACA,YAAA,OAAO,MAAMD,SAAAA,EAAAA;QACjB,CAAA,QAAU;AACNxE,YAAAA,IAAAA,CAAKyC,MAAM,EAAA;AACf,QAAA;AACJ,IAAA;AAEA;;AAEC,QACD2B,OAAAA,GAAgB;AACZ,QAAA,IAAI,CAAC7C,MAAM,CAACC,KAAK,CAAC,CAAC,YAAY,EAAE,IAAI,CAAC0B,KAAK,CAAC0B,IAAI,CAAC,aAAa,CAAC,CAAA;AAC/D,QAAA,KAAK,MAAM5E,IAAAA,IAAQ,IAAI,CAACkD,KAAK,CAAC2B,MAAM,EAAA,CAAI;AACpC7E,YAAAA,IAAAA,CAAKyC,MAAM,EAAA;AACf,QAAA;QACA,IAAI,CAACS,KAAK,CAAC4B,KAAK,EAAA;AACpB,IAAA;;AA/IA,QAAA,gBAAA,CAAA,IAAA,EAAQ5B,SAA+B,IAAI6B,GAAAA,EAAAA,CAAAA;AAC3C,QAAA,gBAAA,CAAA,IAAA,EAAQxD,QAAAA,EAASqB,SAAAA,EAAAA,CAAAA;AACjB,QAAA,gBAAA,CAAA,IAAA,EAAQY,mBAAAA,EAAoB,KAAA,CAAA;;AA8IhC;;;;"}
|
package/dist/util/general.js
CHANGED
|
@@ -253,7 +253,7 @@ const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
|
253
253
|
const finalTargetBranch = sourceConfig.targetBranch || targetBranch || 'main';
|
|
254
254
|
// Look at target branch's version config to determine what version it should have
|
|
255
255
|
const targetConfig = defaultConfig[finalTargetBranch];
|
|
256
|
-
logger.info(
|
|
256
|
+
logger.info(`VERSION_BRANCH_DEFAULT: Using default branch configuration | Source Branch: ${currentBranch} | Target Branch: ${finalTargetBranch} | Source: default config`);
|
|
257
257
|
if (!(targetConfig === null || targetConfig === void 0 ? void 0 : targetConfig.version)) {
|
|
258
258
|
const defaultVersion = incrementPatchVersion(currentVersion);
|
|
259
259
|
logger.debug(`No version config for target branch '${finalTargetBranch}', using default increment`);
|
|
@@ -277,7 +277,7 @@ const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
|
277
277
|
const finalTargetBranch = sourceConfig.targetBranch || targetBranch || 'main';
|
|
278
278
|
// Look at target branch's version config to determine what version it should have
|
|
279
279
|
const targetConfig = branchesConfig[finalTargetBranch];
|
|
280
|
-
logger.info(
|
|
280
|
+
logger.info(`VERSION_BRANCH_DEPENDENT: Using branch-dependent targeting | Source Branch: ${currentBranch} | Target Branch: ${finalTargetBranch} | Source: branch config`);
|
|
281
281
|
if (!(targetConfig === null || targetConfig === void 0 ? void 0 : targetConfig.version)) {
|
|
282
282
|
// No version config for target, use default increment
|
|
283
283
|
const defaultVersion = incrementPatchVersion(currentVersion);
|
|
@@ -295,7 +295,7 @@ const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
|
295
295
|
if (versionConfig.type === 'release') {
|
|
296
296
|
// Convert to release version (remove prerelease tags)
|
|
297
297
|
const releaseVersion = convertToReleaseVersion(currentVersion);
|
|
298
|
-
logger.info(
|
|
298
|
+
logger.info(`VERSION_RELEASE_CONVERSION: Converting prerelease to release version | Current: ${currentVersion} | Release: ${releaseVersion} | Action: Remove prerelease tag`);
|
|
299
299
|
return {
|
|
300
300
|
version: releaseVersion,
|
|
301
301
|
targetBranch
|
|
@@ -311,7 +311,7 @@ const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
|
311
311
|
if (targetBranchVersion) {
|
|
312
312
|
// Use the target branch version as the base and increment
|
|
313
313
|
const newVersion = incrementPrereleaseVersion(targetBranchVersion, tag);
|
|
314
|
-
logger.info(
|
|
314
|
+
logger.info(`VERSION_PRERELEASE_INCREMENT: Incrementing prerelease version | Current: ${targetBranchVersion} | New: ${newVersion} | Action: Increment prerelease number`);
|
|
315
315
|
return {
|
|
316
316
|
version: newVersion,
|
|
317
317
|
targetBranch
|
|
@@ -319,7 +319,7 @@ const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
|
319
319
|
} else {
|
|
320
320
|
// No version in target branch, use current version as base
|
|
321
321
|
const newVersion = incrementPrereleaseVersion(currentVersion, tag);
|
|
322
|
-
logger.info(
|
|
322
|
+
logger.info(`VERSION_PRERELEASE_CREATE: Creating new prerelease version | Current: ${currentVersion} | New: ${newVersion} | Action: Add prerelease tag`);
|
|
323
323
|
return {
|
|
324
324
|
version: newVersion,
|
|
325
325
|
targetBranch
|
|
@@ -329,7 +329,7 @@ const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
|
329
329
|
// Just add/change the prerelease tag without incrementing
|
|
330
330
|
const baseVersion = convertToReleaseVersion(currentVersion);
|
|
331
331
|
const newVersion = `${baseVersion}-${tag}.0`;
|
|
332
|
-
logger.info(
|
|
332
|
+
logger.info(`VERSION_PRERELEASE_TAG: Setting prerelease tag | Current: ${currentVersion} | New: ${newVersion} | Tag: ${versionConfig.tag}`);
|
|
333
333
|
return {
|
|
334
334
|
version: newVersion,
|
|
335
335
|
targetBranch
|
|
@@ -375,11 +375,11 @@ const confirmVersionInteractively = async (currentVersion, proposedVersion, targ
|
|
|
375
375
|
const { getLogger } = await import('../logging.js');
|
|
376
376
|
requireTTY('Interactive version confirmation requires a terminal.');
|
|
377
377
|
const logger = getLogger();
|
|
378
|
-
logger.info(`\
|
|
379
|
-
logger.info(`
|
|
380
|
-
logger.info(`
|
|
378
|
+
logger.info(`\nVERSION_CONFIRMATION: Version confirmation required | Current: ${currentVersion} | Proposed: ${proposedVersion}`);
|
|
379
|
+
logger.info(`VERSION_CURRENT: Current package version | Version: ${currentVersion}`);
|
|
380
|
+
logger.info(`VERSION_PROPOSED: Proposed new version | Version: ${proposedVersion}`);
|
|
381
381
|
if (targetVersionInput) {
|
|
382
|
-
logger.info(`
|
|
382
|
+
logger.info(`VERSION_TARGET_INPUT: Target version provided | Input: ${targetVersionInput}`);
|
|
383
383
|
}
|
|
384
384
|
const choices = [
|
|
385
385
|
{
|
|
@@ -406,7 +406,7 @@ const confirmVersionInteractively = async (currentVersion, proposedVersion, targ
|
|
|
406
406
|
throw new Error(`Invalid version format: ${customVersion}. Expected format: "x.y.z"`);
|
|
407
407
|
}
|
|
408
408
|
const cleanCustomVersion = customVersion.startsWith('v') ? customVersion.slice(1) : customVersion;
|
|
409
|
-
logger.info(
|
|
409
|
+
logger.info(`VERSION_CUSTOM_SELECTED: Using custom version from user input | Version: ${cleanCustomVersion} | Source: interactive input`);
|
|
410
410
|
return cleanCustomVersion;
|
|
411
411
|
}
|
|
412
412
|
case 'a':
|
|
@@ -485,22 +485,155 @@ const getTimestampedArchivedTranscriptFilename = ()=>{
|
|
|
485
485
|
await storage.writeFile(archivedAudioPath, audioBuffer, 'binary');
|
|
486
486
|
logger.debug('Archived audio file to: %s', archivedAudioPath);
|
|
487
487
|
} else {
|
|
488
|
-
logger.warn('Original audio file not
|
|
488
|
+
logger.warn('AUDIO_FILE_NOT_FOUND: Original audio file not accessible | Path: %s | Impact: Cannot archive original', originalAudioPath);
|
|
489
489
|
}
|
|
490
490
|
// Save transcription text
|
|
491
491
|
const transcriptContent = `# Audio Transcription Archive\n\n**Original Audio File:** ${originalAudioPath}\n**Archived:** ${new Date().toISOString()}\n\n## Transcription\n\n${transcriptionText}`;
|
|
492
492
|
await storage.writeFile(archivedTranscriptPath, transcriptContent, 'utf8');
|
|
493
493
|
logger.debug('Archived transcription to: %s', archivedTranscriptPath);
|
|
494
|
-
logger.info('
|
|
494
|
+
logger.info('AUDIO_ARCHIVED: Audio and transcript archived successfully | Audio: %s | Transcript: %s | Status: archived', archivedAudioFilename, archivedTranscriptFilename);
|
|
495
495
|
return {
|
|
496
496
|
audioPath: archivedAudioPath,
|
|
497
497
|
transcriptPath: archivedTranscriptPath
|
|
498
498
|
};
|
|
499
499
|
} catch (error) {
|
|
500
|
-
logger.error('Failed to archive audio: %s', error.message);
|
|
500
|
+
logger.error('AUDIO_ARCHIVE_FAILED: Failed to archive audio files | Error: %s | Impact: Audio not preserved', error.message);
|
|
501
501
|
throw new Error(`Audio archiving failed: ${error.message}`);
|
|
502
502
|
}
|
|
503
503
|
};
|
|
504
|
+
/**
|
|
505
|
+
* Query npm registry for published version of a package
|
|
506
|
+
* Returns null if package is not published or on error
|
|
507
|
+
*/ const getNpmPublishedVersion = async (packageName)=>{
|
|
508
|
+
const logger = getLogger();
|
|
509
|
+
try {
|
|
510
|
+
const { runSecure } = await import('@eldrforge/git-tools');
|
|
511
|
+
// Use npm view to get the latest published version
|
|
512
|
+
// --json flag ensures parseable output
|
|
513
|
+
const { stdout } = await runSecure('npm', [
|
|
514
|
+
'view',
|
|
515
|
+
packageName,
|
|
516
|
+
'version',
|
|
517
|
+
'--json'
|
|
518
|
+
]);
|
|
519
|
+
if (!stdout || stdout.trim() === '') {
|
|
520
|
+
logger.verbose(`Package ${packageName} not found on npm registry`);
|
|
521
|
+
return null;
|
|
522
|
+
}
|
|
523
|
+
// npm view returns just the version string for a single version
|
|
524
|
+
const version = stdout.trim().replace(/^["']|["']$/g, ''); // Remove quotes if present
|
|
525
|
+
logger.verbose(`Found ${packageName}@${version} on npm registry`);
|
|
526
|
+
return version;
|
|
527
|
+
} catch (error) {
|
|
528
|
+
// Package not found or network error
|
|
529
|
+
logger.verbose(`Could not query npm for ${packageName}: ${error.message}`);
|
|
530
|
+
return null;
|
|
531
|
+
}
|
|
532
|
+
};
|
|
533
|
+
/**
|
|
534
|
+
* Get detailed info about a tag including the version it points to
|
|
535
|
+
*/ const getTagInfo = async (tagName)=>{
|
|
536
|
+
try {
|
|
537
|
+
const { runSecure, validateGitRef } = await import('@eldrforge/git-tools');
|
|
538
|
+
if (!validateGitRef(tagName)) {
|
|
539
|
+
throw new Error(`Invalid tag name: ${tagName}`);
|
|
540
|
+
}
|
|
541
|
+
// Check if tag exists
|
|
542
|
+
const { stdout: tagList } = await runSecure('git', [
|
|
543
|
+
'tag',
|
|
544
|
+
'-l',
|
|
545
|
+
tagName
|
|
546
|
+
]);
|
|
547
|
+
if (tagList.trim() !== tagName) {
|
|
548
|
+
return {
|
|
549
|
+
exists: false
|
|
550
|
+
};
|
|
551
|
+
}
|
|
552
|
+
// Get the commit the tag points to
|
|
553
|
+
const { stdout: commit } = await runSecure('git', [
|
|
554
|
+
'rev-list',
|
|
555
|
+
'-n',
|
|
556
|
+
'1',
|
|
557
|
+
tagName
|
|
558
|
+
]);
|
|
559
|
+
// Extract version from tag name (assumes format like v1.2.3 or working/v1.2.3)
|
|
560
|
+
const versionMatch = tagName.match(/v?(\d+\.\d+\.\d+(?:-[a-zA-Z0-9.-]+)?)/);
|
|
561
|
+
const version = versionMatch ? versionMatch[1] : undefined;
|
|
562
|
+
return {
|
|
563
|
+
exists: true,
|
|
564
|
+
commit: commit.trim(),
|
|
565
|
+
version
|
|
566
|
+
};
|
|
567
|
+
} catch {
|
|
568
|
+
return null;
|
|
569
|
+
}
|
|
570
|
+
};
|
|
571
|
+
/**
|
|
572
|
+
* Check if a version is a development/prerelease version (has prerelease tag)
|
|
573
|
+
*/ const isDevelopmentVersion = (version)=>{
|
|
574
|
+
// Development versions have prerelease tags: 1.2.3-dev.0, 1.2.3-alpha.1, etc.
|
|
575
|
+
return version.includes('-');
|
|
576
|
+
};
|
|
577
|
+
/**
|
|
578
|
+
* Get expected version pattern for a branch
|
|
579
|
+
*/ const getExpectedVersionPattern = (branchName)=>{
|
|
580
|
+
// Development/working branches should have prerelease versions
|
|
581
|
+
const devBranchPatterns = /^(working|development|dev|feature\/|wip\/)/i;
|
|
582
|
+
if (devBranchPatterns.test(branchName)) {
|
|
583
|
+
return {
|
|
584
|
+
pattern: /^\d+\.\d+\.\d+-[a-zA-Z0-9.-]+$/,
|
|
585
|
+
description: 'X.Y.Z-<tag> (e.g., 1.2.3-dev.0)',
|
|
586
|
+
isDevelopment: true
|
|
587
|
+
};
|
|
588
|
+
}
|
|
589
|
+
// Main/master/production branches should have release versions
|
|
590
|
+
const releaseBranchPatterns = /^(main|master|production|release\/)/i;
|
|
591
|
+
if (releaseBranchPatterns.test(branchName)) {
|
|
592
|
+
return {
|
|
593
|
+
pattern: /^\d+\.\d+\.\d+$/,
|
|
594
|
+
description: 'X.Y.Z (e.g., 1.2.3)',
|
|
595
|
+
isDevelopment: false
|
|
596
|
+
};
|
|
597
|
+
}
|
|
598
|
+
// For other branches, allow both but prefer release versions
|
|
599
|
+
return {
|
|
600
|
+
pattern: /^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/,
|
|
601
|
+
description: 'X.Y.Z or X.Y.Z-<tag>',
|
|
602
|
+
isDevelopment: false
|
|
603
|
+
};
|
|
604
|
+
};
|
|
605
|
+
/**
|
|
606
|
+
* Validate version against branch expectations
|
|
607
|
+
*/ const validateVersionForBranch = (version, branchName)=>{
|
|
608
|
+
const expected = getExpectedVersionPattern(branchName);
|
|
609
|
+
if (!expected.pattern.test(version)) {
|
|
610
|
+
return {
|
|
611
|
+
valid: false,
|
|
612
|
+
issue: `Invalid version format for branch '${branchName}'`,
|
|
613
|
+
fix: `Version should match ${expected.description}`
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
const isDevVersion = isDevelopmentVersion(version);
|
|
617
|
+
// Development branches should have development versions
|
|
618
|
+
if (expected.isDevelopment && !isDevVersion) {
|
|
619
|
+
return {
|
|
620
|
+
valid: false,
|
|
621
|
+
issue: `Release version on development branch '${branchName}'`,
|
|
622
|
+
fix: 'Run kodrdriv development to update to development version'
|
|
623
|
+
};
|
|
624
|
+
}
|
|
625
|
+
// Release branches should NOT have development versions
|
|
626
|
+
if (!expected.isDevelopment && branchName.match(/^(main|master|production|release\/)/) && isDevVersion) {
|
|
627
|
+
return {
|
|
628
|
+
valid: false,
|
|
629
|
+
issue: `Development version on release branch '${branchName}'`,
|
|
630
|
+
fix: 'Do not commit development versions to release branches'
|
|
631
|
+
};
|
|
632
|
+
}
|
|
633
|
+
return {
|
|
634
|
+
valid: true
|
|
635
|
+
};
|
|
636
|
+
};
|
|
504
637
|
|
|
505
|
-
export { archiveAudio, calculateBranchDependentVersion, calculateTargetVersion, checkIfTagExists, confirmVersionInteractively, convertToReleaseVersion, findDevelopmentBranch, getOutputPath, getTimestampedArchivedAudioFilename, getTimestampedArchivedTranscriptFilename, getTimestampedAudioFilename, getTimestampedCommitFilename, getTimestampedFilename, getTimestampedReleaseNotesFilename, getTimestampedRequestFilename, getTimestampedResponseFilename, getTimestampedReviewFilename, getTimestampedReviewNotesFilename, getVersionFromBranch, incrementMajorVersion, incrementMinorVersion, incrementPatchVersion, incrementPrereleaseVersion, stringifyJSON, validateVersionString };
|
|
638
|
+
export { archiveAudio, calculateBranchDependentVersion, calculateTargetVersion, checkIfTagExists, confirmVersionInteractively, convertToReleaseVersion, findDevelopmentBranch, getExpectedVersionPattern, getNpmPublishedVersion, getOutputPath, getTagInfo, getTimestampedArchivedAudioFilename, getTimestampedArchivedTranscriptFilename, getTimestampedAudioFilename, getTimestampedCommitFilename, getTimestampedFilename, getTimestampedReleaseNotesFilename, getTimestampedRequestFilename, getTimestampedResponseFilename, getTimestampedReviewFilename, getTimestampedReviewNotesFilename, getVersionFromBranch, incrementMajorVersion, incrementMinorVersion, incrementPatchVersion, incrementPrereleaseVersion, isDevelopmentVersion, stringifyJSON, validateVersionForBranch, validateVersionString };
|
|
506
639
|
//# sourceMappingURL=general.js.map
|