@eldrforge/kodrdriv 1.2.22 → 1.2.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/PARALLEL-EXECUTION-FIXES.md +132 -0
  2. package/PARALLEL_EXECUTION_FIX.md +146 -0
  3. package/RECOVERY-FIXES.md +72 -0
  4. package/dist/arguments.js +26 -3
  5. package/dist/arguments.js.map +1 -1
  6. package/dist/commands/audio-commit.js +3 -3
  7. package/dist/commands/audio-commit.js.map +1 -1
  8. package/dist/commands/audio-review.js +13 -13
  9. package/dist/commands/audio-review.js.map +1 -1
  10. package/dist/commands/link.js +13 -13
  11. package/dist/commands/link.js.map +1 -1
  12. package/dist/commands/publish.js +200 -146
  13. package/dist/commands/publish.js.map +1 -1
  14. package/dist/commands/review.js +6 -6
  15. package/dist/commands/review.js.map +1 -1
  16. package/dist/commands/select-audio.js +4 -4
  17. package/dist/commands/select-audio.js.map +1 -1
  18. package/dist/commands/tree.js +242 -318
  19. package/dist/commands/tree.js.map +1 -1
  20. package/dist/commands/unlink.js +8 -8
  21. package/dist/commands/unlink.js.map +1 -1
  22. package/dist/commands/versions.js +3 -3
  23. package/dist/commands/versions.js.map +1 -1
  24. package/dist/constants.js +4 -4
  25. package/dist/constants.js.map +1 -1
  26. package/dist/content/diff.js +5 -2
  27. package/dist/content/diff.js.map +1 -1
  28. package/dist/content/files.js +4 -4
  29. package/dist/content/files.js.map +1 -1
  30. package/dist/execution/CommandValidator.js +160 -0
  31. package/dist/execution/CommandValidator.js.map +1 -0
  32. package/dist/execution/DependencyChecker.js +102 -0
  33. package/dist/execution/DependencyChecker.js.map +1 -0
  34. package/dist/execution/DynamicTaskPool.js +455 -0
  35. package/dist/execution/DynamicTaskPool.js.map +1 -0
  36. package/dist/execution/RecoveryManager.js +502 -0
  37. package/dist/execution/RecoveryManager.js.map +1 -0
  38. package/dist/execution/ResourceMonitor.js +125 -0
  39. package/dist/execution/ResourceMonitor.js.map +1 -0
  40. package/dist/execution/Scheduler.js +98 -0
  41. package/dist/execution/Scheduler.js.map +1 -0
  42. package/dist/execution/TreeExecutionAdapter.js +170 -0
  43. package/dist/execution/TreeExecutionAdapter.js.map +1 -0
  44. package/dist/logging.js +3 -3
  45. package/dist/logging.js.map +1 -1
  46. package/dist/ui/ProgressFormatter.js +230 -0
  47. package/dist/ui/ProgressFormatter.js.map +1 -0
  48. package/dist/util/checkpointManager.js +168 -0
  49. package/dist/util/checkpointManager.js.map +1 -0
  50. package/dist/util/dependencyGraph.js +224 -0
  51. package/dist/util/dependencyGraph.js.map +1 -0
  52. package/dist/util/fileLock.js +204 -0
  53. package/dist/util/fileLock.js.map +1 -0
  54. package/dist/util/general.js +5 -5
  55. package/dist/util/general.js.map +1 -1
  56. package/dist/util/gitMutex.js +116 -0
  57. package/dist/util/gitMutex.js.map +1 -0
  58. package/dist/util/mutex.js +96 -0
  59. package/dist/util/mutex.js.map +1 -0
  60. package/dist/util/performance.js +4 -4
  61. package/dist/util/performance.js.map +1 -1
  62. package/dist/util/safety.js +4 -4
  63. package/dist/util/safety.js.map +1 -1
  64. package/dist/util/storage.js +2 -2
  65. package/dist/util/storage.js.map +1 -1
  66. package/package.json +9 -9
@@ -0,0 +1,224 @@
1
+ import path__default from 'path';
2
+ import fs from 'fs/promises';
3
+ import { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';
4
+ import { getLogger } from '../logging.js';
5
+ import { create } from './storage.js';
6
+
7
+ /**
8
+ * Check if a file path matches a glob pattern
9
+ */ const matchesPattern = (filePath, pattern)=>{
10
+ // Convert simple glob patterns to regex
11
+ const regexPattern = pattern.replace(/\\/g, '\\\\') // Escape backslashes
12
+ .replace(/\*\*/g, '.*') // ** matches any path segments
13
+ .replace(/\*/g, '[^/]*') // * matches any characters except path separator
14
+ .replace(/\?/g, '.') // ? matches any single character
15
+ .replace(/\./g, '\\.'); // Escape literal dots
16
+ const regex = new RegExp(`^${regexPattern}$`);
17
+ return regex.test(filePath) || regex.test(path__default.basename(filePath));
18
+ };
19
+ /**
20
+ * Check if a package should be excluded based on patterns
21
+ */ function shouldExclude(packageJsonPath, excludedPatterns) {
22
+ if (!excludedPatterns || excludedPatterns.length === 0) {
23
+ return false;
24
+ }
25
+ // Check both the full path and relative path patterns
26
+ const relativePath = path__default.relative(process.cwd(), packageJsonPath);
27
+ return excludedPatterns.some((pattern)=>matchesPattern(packageJsonPath, pattern) || matchesPattern(relativePath, pattern) || matchesPattern(path__default.dirname(packageJsonPath), pattern) || matchesPattern(path__default.dirname(relativePath), pattern));
28
+ }
29
+ /**
30
+ * Scan directory for package.json files
31
+ */ async function scanForPackageJsonFiles(directory, excludedPatterns = []) {
32
+ const logger = getLogger();
33
+ const packageJsonPaths = [];
34
+ try {
35
+ // First check if there's a package.json in the specified directory itself
36
+ const directPackageJsonPath = path__default.join(directory, 'package.json');
37
+ try {
38
+ await fs.access(directPackageJsonPath);
39
+ // Check if this package should be excluded
40
+ if (!shouldExclude(directPackageJsonPath, excludedPatterns)) {
41
+ packageJsonPaths.push(directPackageJsonPath);
42
+ logger.verbose(`Found package.json at: ${directPackageJsonPath}`);
43
+ } else {
44
+ logger.verbose(`Excluding package.json at: ${directPackageJsonPath} (matches exclusion pattern)`);
45
+ }
46
+ } catch {
47
+ // No package.json in the root of this directory, that's fine
48
+ }
49
+ // Then scan subdirectories for package.json files
50
+ const entries = await fs.readdir(directory, {
51
+ withFileTypes: true
52
+ });
53
+ for (const entry of entries){
54
+ if (entry.isDirectory()) {
55
+ const subDirPath = path__default.join(directory, entry.name);
56
+ const packageJsonPath = path__default.join(subDirPath, 'package.json');
57
+ try {
58
+ await fs.access(packageJsonPath);
59
+ // Check if this package should be excluded
60
+ if (shouldExclude(packageJsonPath, excludedPatterns)) {
61
+ logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);
62
+ continue;
63
+ }
64
+ packageJsonPaths.push(packageJsonPath);
65
+ logger.verbose(`Found package.json at: ${packageJsonPath}`);
66
+ } catch {
67
+ // No package.json in this directory, continue
68
+ }
69
+ }
70
+ }
71
+ } catch (error) {
72
+ logger.error(`Failed to scan directory ${directory}: ${error}`);
73
+ throw error;
74
+ }
75
+ return packageJsonPaths;
76
+ }
77
+ /**
78
+ * Parse a single package.json file
79
+ */ async function parsePackageJson(packageJsonPath) {
80
+ const logger = getLogger();
81
+ const storage = create({
82
+ log: logger.info
83
+ });
84
+ try {
85
+ const content = await storage.readFile(packageJsonPath, 'utf-8');
86
+ const parsed = safeJsonParse(content, packageJsonPath);
87
+ const packageJson = validatePackageJson(parsed, packageJsonPath);
88
+ if (!packageJson.name) {
89
+ throw new Error(`Package at ${packageJsonPath} has no name field`);
90
+ }
91
+ const dependencies = new Set();
92
+ const devDependencies = new Set();
93
+ // Collect all types of dependencies
94
+ const depTypes = [
95
+ 'dependencies',
96
+ 'devDependencies',
97
+ 'peerDependencies',
98
+ 'optionalDependencies'
99
+ ];
100
+ for (const depType of depTypes){
101
+ if (packageJson[depType]) {
102
+ Object.keys(packageJson[depType]).forEach((dep)=>{
103
+ dependencies.add(dep);
104
+ if (depType === 'devDependencies') {
105
+ devDependencies.add(dep);
106
+ }
107
+ });
108
+ }
109
+ }
110
+ return {
111
+ name: packageJson.name,
112
+ version: packageJson.version || '0.0.0',
113
+ path: path__default.dirname(packageJsonPath),
114
+ dependencies,
115
+ devDependencies,
116
+ localDependencies: new Set() // Will be populated later
117
+ };
118
+ } catch (error) {
119
+ logger.error(`Failed to parse package.json at ${packageJsonPath}: ${error}`);
120
+ throw error;
121
+ }
122
+ }
123
+ /**
124
+ * Build dependency graph from package.json paths
125
+ */ async function buildDependencyGraph(packageJsonPaths) {
126
+ const logger = getLogger();
127
+ const packages = new Map();
128
+ const edges = new Map();
129
+ // First pass: parse all package.json files
130
+ for (const packageJsonPath of packageJsonPaths){
131
+ const packageInfo = await parsePackageJson(packageJsonPath);
132
+ packages.set(packageInfo.name, packageInfo);
133
+ logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);
134
+ }
135
+ // Second pass: identify local dependencies and build edges
136
+ for (const [packageName, packageInfo] of packages){
137
+ const localDeps = new Set();
138
+ const edgesSet = new Set();
139
+ for (const dep of packageInfo.dependencies){
140
+ if (packages.has(dep)) {
141
+ localDeps.add(dep);
142
+ edgesSet.add(dep);
143
+ logger.verbose(`${packageName} depends on local package: ${dep}`);
144
+ }
145
+ }
146
+ packageInfo.localDependencies = localDeps;
147
+ edges.set(packageName, edgesSet);
148
+ }
149
+ // Build reverse edges (dependents)
150
+ const reverseEdges = buildReverseGraph(edges);
151
+ return {
152
+ packages,
153
+ edges,
154
+ reverseEdges
155
+ };
156
+ }
157
+ /**
158
+ * Build reverse dependency graph (package -> dependents)
159
+ */ function buildReverseGraph(edges) {
160
+ const reverse = new Map();
161
+ for (const [pkg, deps] of edges){
162
+ for (const dep of deps){
163
+ if (!reverse.has(dep)) {
164
+ reverse.set(dep, new Set());
165
+ }
166
+ reverse.get(dep).add(pkg);
167
+ }
168
+ }
169
+ return reverse;
170
+ }
171
+ /**
172
+ * Perform topological sort on dependency graph
173
+ */ function topologicalSort(graph) {
174
+ const logger = getLogger();
175
+ const { packages, edges } = graph;
176
+ const visited = new Set();
177
+ const visiting = new Set();
178
+ const result = [];
179
+ const visit = (packageName)=>{
180
+ if (visited.has(packageName)) {
181
+ return;
182
+ }
183
+ if (visiting.has(packageName)) {
184
+ throw new Error(`Circular dependency detected involving package: ${packageName}`);
185
+ }
186
+ visiting.add(packageName);
187
+ // Visit all dependencies first
188
+ const deps = edges.get(packageName) || new Set();
189
+ for (const dep of deps){
190
+ visit(dep);
191
+ }
192
+ visiting.delete(packageName);
193
+ visited.add(packageName);
194
+ result.push(packageName);
195
+ };
196
+ // Visit all packages
197
+ for (const packageName of packages.keys()){
198
+ if (!visited.has(packageName)) {
199
+ visit(packageName);
200
+ }
201
+ }
202
+ logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);
203
+ return result;
204
+ }
205
+ /**
206
+ * Find all dependents of a package (packages that depend on it)
207
+ */ function findAllDependents(packageName, graph) {
208
+ const dependents = new Set();
209
+ const visited = new Set();
210
+ const traverse = (pkg)=>{
211
+ if (visited.has(pkg)) return;
212
+ visited.add(pkg);
213
+ const directDependents = graph.reverseEdges.get(pkg) || new Set();
214
+ for (const dependent of directDependents){
215
+ dependents.add(dependent);
216
+ traverse(dependent);
217
+ }
218
+ };
219
+ traverse(packageName);
220
+ return dependents;
221
+ }
222
+
223
+ export { buildDependencyGraph, buildReverseGraph, findAllDependents, parsePackageJson, scanForPackageJsonFiles, shouldExclude, topologicalSort };
224
+ //# sourceMappingURL=dependencyGraph.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"dependencyGraph.js","sources":["../../src/util/dependencyGraph.ts"],"sourcesContent":["import path from 'path';\nimport fs from 'fs/promises';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\nimport { getLogger } from '../logging';\nimport { create as createStorage } from './storage';\n\n/**\n * Check if a file path matches a glob pattern\n */\nconst matchesPattern = (filePath: string, pattern: string): boolean => {\n // Convert simple glob patterns to regex\n const regexPattern = pattern\n .replace(/\\\\/g, '\\\\\\\\') // Escape backslashes\n .replace(/\\*\\*/g, '.*') // ** matches any path segments\n .replace(/\\*/g, '[^/]*') // * matches any characters except path separator\n .replace(/\\?/g, '.') // ? matches any single character\n .replace(/\\./g, '\\\\.'); // Escape literal dots\n\n const regex = new RegExp(`^${regexPattern}$`);\n return regex.test(filePath) || regex.test(path.basename(filePath));\n};\n\n/**\n * Check if a package should be excluded based on patterns\n */\nexport function shouldExclude(packageJsonPath: string, excludedPatterns: string[]): boolean {\n if (!excludedPatterns || excludedPatterns.length === 0) {\n return false;\n }\n\n // Check both the full path and relative path patterns\n const relativePath = path.relative(process.cwd(), packageJsonPath);\n\n return excludedPatterns.some(pattern =>\n matchesPattern(packageJsonPath, pattern) ||\n matchesPattern(relativePath, pattern) ||\n matchesPattern(path.dirname(packageJsonPath), pattern) ||\n matchesPattern(path.dirname(relativePath), pattern)\n );\n}\n\nexport interface PackageInfo {\n name: string;\n version: string;\n path: string;\n dependencies: Set<string>;\n devDependencies: Set<string>;\n localDependencies: Set<string>;\n}\n\nexport interface DependencyGraph {\n packages: Map<string, PackageInfo>;\n edges: Map<string, Set<string>>; // package -> dependencies\n reverseEdges: Map<string, Set<string>>; // package -> dependents\n}\n\nexport interface SerializedGraph {\n packages: Array<{\n name: string;\n version: string;\n path: string;\n dependencies: string[];\n }>;\n edges: Array<[string, string[]]>;\n}\n\n/**\n * Scan directory for package.json files\n */\nexport async function scanForPackageJsonFiles(\n directory: string,\n excludedPatterns: string[] = []\n): Promise<string[]> {\n const logger = getLogger();\n const packageJsonPaths: string[] = [];\n\n try {\n // First check if there's a package.json in the specified directory itself\n const directPackageJsonPath = path.join(directory, 'package.json');\n try {\n await fs.access(directPackageJsonPath);\n\n // Check if this package should be excluded\n if (!shouldExclude(directPackageJsonPath, excludedPatterns)) {\n packageJsonPaths.push(directPackageJsonPath);\n logger.verbose(`Found package.json at: ${directPackageJsonPath}`);\n } else {\n logger.verbose(`Excluding package.json at: ${directPackageJsonPath} (matches exclusion pattern)`);\n }\n } catch {\n // No package.json in the root of this directory, that's fine\n }\n\n // Then scan subdirectories for package.json files\n const entries = await fs.readdir(directory, { withFileTypes: true });\n\n for (const entry of entries) {\n if (entry.isDirectory()) {\n const subDirPath = path.join(directory, entry.name);\n const packageJsonPath = path.join(subDirPath, 'package.json');\n\n try {\n await fs.access(packageJsonPath);\n\n // Check if this package should be excluded\n if (shouldExclude(packageJsonPath, excludedPatterns)) {\n logger.verbose(`Excluding package.json at: ${packageJsonPath} (matches exclusion pattern)`);\n continue;\n }\n\n packageJsonPaths.push(packageJsonPath);\n logger.verbose(`Found package.json at: ${packageJsonPath}`);\n } catch {\n // No package.json in this directory, continue\n }\n }\n }\n } catch (error) {\n logger.error(`Failed to scan directory ${directory}: ${error}`);\n throw error;\n }\n\n return packageJsonPaths;\n}\n\n/**\n * Parse a single package.json file\n */\nexport async function parsePackageJson(packageJsonPath: string): Promise<PackageInfo> {\n const logger = getLogger();\n const storage = createStorage({ log: logger.info });\n\n try {\n const content = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(content, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (!packageJson.name) {\n throw new Error(`Package at ${packageJsonPath} has no name field`);\n }\n\n const dependencies = new Set<string>();\n const devDependencies = new Set<string>();\n\n // Collect all types of dependencies\n const depTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];\n for (const depType of depTypes) {\n if (packageJson[depType]) {\n Object.keys(packageJson[depType]).forEach(dep => {\n dependencies.add(dep);\n if (depType === 'devDependencies') {\n devDependencies.add(dep);\n }\n });\n }\n }\n\n return {\n name: packageJson.name,\n version: packageJson.version || '0.0.0',\n path: path.dirname(packageJsonPath),\n dependencies,\n devDependencies,\n localDependencies: new Set() // Will be populated later\n };\n } catch (error) {\n logger.error(`Failed to parse package.json at ${packageJsonPath}: ${error}`);\n throw error;\n }\n}\n\n/**\n * Build dependency graph from package.json paths\n */\nexport async function buildDependencyGraph(\n packageJsonPaths: string[]\n): Promise<DependencyGraph> {\n const logger = getLogger();\n const packages = new Map<string, PackageInfo>();\n const edges = new Map<string, Set<string>>();\n\n // First pass: parse all package.json files\n for (const packageJsonPath of packageJsonPaths) {\n const packageInfo = await parsePackageJson(packageJsonPath);\n packages.set(packageInfo.name, packageInfo);\n logger.verbose(`Parsed package: ${packageInfo.name} at ${packageInfo.path}`);\n }\n\n // Second pass: identify local dependencies and build edges\n for (const [packageName, packageInfo] of packages) {\n const localDeps = new Set<string>();\n const edgesSet = new Set<string>();\n\n for (const dep of packageInfo.dependencies) {\n if (packages.has(dep)) {\n localDeps.add(dep);\n edgesSet.add(dep);\n logger.verbose(`${packageName} depends on local package: ${dep}`);\n }\n }\n\n packageInfo.localDependencies = localDeps;\n edges.set(packageName, edgesSet);\n }\n\n // Build reverse edges (dependents)\n const reverseEdges = buildReverseGraph(edges);\n\n return { packages, edges, reverseEdges };\n}\n\n/**\n * Build reverse dependency graph (package -> dependents)\n */\nexport function buildReverseGraph(\n edges: Map<string, Set<string>>\n): Map<string, Set<string>> {\n const reverse = new Map<string, Set<string>>();\n\n for (const [pkg, deps] of edges) {\n for (const dep of deps) {\n if (!reverse.has(dep)) {\n reverse.set(dep, new Set());\n }\n reverse.get(dep)!.add(pkg);\n }\n }\n\n return reverse;\n}\n\n/**\n * Perform topological sort on dependency graph\n */\nexport function topologicalSort(graph: DependencyGraph): string[] {\n const logger = getLogger();\n const { packages, edges } = graph;\n const visited = new Set<string>();\n const visiting = new Set<string>();\n const result: string[] = [];\n\n const visit = (packageName: string): void => {\n if (visited.has(packageName)) {\n return;\n }\n\n if (visiting.has(packageName)) {\n throw new Error(`Circular dependency detected involving package: ${packageName}`);\n }\n\n visiting.add(packageName);\n\n // Visit all dependencies first\n const deps = edges.get(packageName) || new Set();\n for (const dep of deps) {\n visit(dep);\n }\n\n visiting.delete(packageName);\n visited.add(packageName);\n result.push(packageName);\n };\n\n // Visit all packages\n for (const packageName of packages.keys()) {\n if (!visited.has(packageName)) {\n visit(packageName);\n }\n }\n\n logger.verbose(`Topological sort completed. Build order determined for ${result.length} packages.`);\n return result;\n}\n\n/**\n * Find all dependents of a package (packages that depend on it)\n */\nexport function findAllDependents(\n packageName: string,\n graph: DependencyGraph\n): Set<string> {\n const dependents = new Set<string>();\n const visited = new Set<string>();\n\n const traverse = (pkg: string) => {\n if (visited.has(pkg)) return;\n visited.add(pkg);\n\n const directDependents = graph.reverseEdges.get(pkg) || new Set();\n for (const dependent of directDependents) {\n dependents.add(dependent);\n traverse(dependent);\n }\n };\n\n traverse(packageName);\n return dependents;\n}\n\n/**\n * Serialize graph for checkpoint persistence\n */\nexport function serializeGraph(graph: DependencyGraph): SerializedGraph {\n return {\n packages: Array.from(graph.packages.values()).map(pkg => ({\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n dependencies: Array.from(pkg.dependencies)\n })),\n edges: Array.from(graph.edges.entries()).map(([pkg, deps]) => [\n pkg,\n Array.from(deps)\n ])\n };\n}\n\n/**\n * Deserialize graph from checkpoint\n */\nexport function deserializeGraph(serialized: SerializedGraph): DependencyGraph {\n const packages = new Map<string, PackageInfo>();\n const edges = new Map<string, Set<string>>();\n\n // Restore packages\n for (const pkg of serialized.packages) {\n packages.set(pkg.name, {\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n dependencies: new Set(pkg.dependencies),\n devDependencies: new Set(),\n localDependencies: new Set()\n });\n }\n\n // Restore edges\n for (const [pkg, deps] of serialized.edges) {\n edges.set(pkg, new Set(deps));\n }\n\n // Build reverse edges\n const reverseEdges = buildReverseGraph(edges);\n\n return { packages, edges, reverseEdges };\n}\n\n/**\n * Validate graph integrity\n */\nexport function validateGraph(graph: DependencyGraph): {\n valid: boolean;\n errors: string[];\n} {\n const errors: string[] = [];\n\n // Check all edge targets exist\n for (const [pkg, deps] of graph.edges) {\n for (const dep of deps) {\n if (!graph.packages.has(dep)) {\n errors.push(`Package ${pkg} depends on ${dep} which doesn't exist`);\n }\n }\n }\n\n // Check for circular dependencies\n try {\n topologicalSort(graph);\n } catch (error: any) {\n errors.push(error.message);\n }\n\n return {\n valid: errors.length === 0,\n errors\n };\n}\n"],"names":["matchesPattern","filePath","pattern","regexPattern","replace","regex","RegExp","test","path","basename","shouldExclude","packageJsonPath","excludedPatterns","length","relativePath","relative","process","cwd","some","dirname","scanForPackageJsonFiles","directory","logger","getLogger","packageJsonPaths","directPackageJsonPath","join","fs","access","push","verbose","entries","readdir","withFileTypes","entry","isDirectory","subDirPath","name","error","parsePackageJson","storage","createStorage","log","info","content","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","Error","dependencies","Set","devDependencies","depTypes","depType","Object","keys","forEach","dep","add","version","localDependencies","buildDependencyGraph","packages","Map","edges","packageInfo","set","packageName","localDeps","edgesSet","has","reverseEdges","buildReverseGraph","reverse","pkg","deps","get","topologicalSort","graph","visited","visiting","result","visit","delete","findAllDependents","dependents","traverse","directDependents","dependent"],"mappings":";;;;;;AAMA;;IAGA,MAAMA,cAAAA,GAAiB,CAACC,QAAAA,EAAkBC,OAAAA,GAAAA;;AAEtC,IAAA,MAAMC,eAAeD,OAAAA,CAChBE,OAAO,CAAC,KAAA,EAAO;KACfA,OAAO,CAAC,OAAA,EAAS,IAAA,CAAA;KACjBA,OAAO,CAAC,KAAA,EAAO,OAAA,CAAA;KACfA,OAAO,CAAC,KAAA,EAAO,GAAA,CAAA;KACfA,OAAO,CAAC,KAAA,EAAO,KAAA,CAAA,CAAA;IAEpB,MAAMC,KAAAA,GAAQ,IAAIC,MAAAA,CAAO,CAAC,CAAC,EAAEH,YAAAA,CAAa,CAAC,CAAC,CAAA;IAC5C,OAAOE,KAAAA,CAAME,IAAI,CAACN,QAAAA,CAAAA,IAAaI,MAAME,IAAI,CAACC,aAAAA,CAAKC,QAAQ,CAACR,QAAAA,CAAAA,CAAAA;AAC5D,CAAA;AAEA;;AAEC,IACM,SAASS,aAAAA,CAAcC,eAAuB,EAAEC,gBAA0B,EAAA;AAC7E,IAAA,IAAI,CAACA,gBAAAA,IAAoBA,gBAAAA,CAAiBC,MAAM,KAAK,CAAA,EAAG;QACpD,OAAO,KAAA;AACX,IAAA;;AAGA,IAAA,MAAMC,eAAeN,aAAAA,CAAKO,QAAQ,CAACC,OAAAA,CAAQC,GAAG,EAAA,EAAIN,eAAAA,CAAAA;IAElD,OAAOC,gBAAAA,CAAiBM,IAAI,CAAChB,CAAAA,UACzBF,cAAAA,CAAeW,eAAAA,EAAiBT,YAChCF,cAAAA,CAAec,YAAAA,EAAcZ,YAC7BF,cAAAA,CAAeQ,aAAAA,CAAKW,OAAO,CAACR,eAAAA,CAAAA,EAAkBT,YAC9CF,cAAAA,CAAeQ,aAAAA,CAAKW,OAAO,CAACL,YAAAA,CAAAA,EAAeZ,OAAAA,CAAAA,CAAAA;AAEnD;AA2BA;;AAEC,IACM,eAAekB,uBAAAA,CAClBC,SAAiB,EACjBT,mBAA6B,EAAE,EAAA;AAE/B,IAAA,MAAMU,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA6B,EAAE;IAErC,IAAI;;AAEA,QAAA,MAAMC,qBAAAA,GAAwBjB,aAAAA,CAAKkB,IAAI,CAACL,SAAAA,EAAW,cAAA,CAAA;QACnD,IAAI;YACA,MAAMM,EAAAA,CAAGC,MAAM,CAACH,qBAAAA,CAAAA;;YAGhB,IAAI,CAACf,aAAAA,CAAce,qBAAAA,EAAuBb,gBAAAA,CAAAA,EAAmB;AACzDY,gBAAAA,gBAAAA,CAAiBK,IAAI,CAACJ,qBAAAA,CAAAA;AACtBH,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uBAAuB,EAAEL,qBAAAA,CAAAA,CAAuB,CAAA;YACpE,CAAA,MAAO;AACHH,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,2BAA2B,EAAEL,qBAAAA,CAAsB,4BAA4B,CAAC,CAAA;AACpG,YAAA;AACJ,QAAA,CAAA,CAAE,OAAM;;AAER,QAAA;;AAGA,QAAA,MAAMM,OAAAA,GAAU,MAAMJ,EAAAA,CAAGK,OAAO,CAACX,SAAAA,EAAW;YAAEY,aAAAA,EAAe;AAAK,SAAA,CAAA;QAElE,KAAK,MAAMC,SAASH,OAAAA,CAAS;YACzB,IAAIG,KAAAA,CAAMC,WAAW,EAAA,EAAI;AACrB,gBAAA,MAAMC,aAAa5B,aAAAA,CAAKkB,IAAI,CAACL,SAAAA,EAAWa,MAAMG,IAAI,CAAA;AAClD,gBAAA,MAAM1B,eAAAA,GAAkBH,aAAAA,CAAKkB,IAAI,CAACU,UAAAA,EAAY,cAAA,CAAA;gBAE9C,IAAI;oBACA,MAAMT,EAAAA,CAAGC,MAAM,CAACjB,eAAAA,CAAAA;;oBAGhB,IAAID,aAAAA,CAAcC,iBAAiBC,gBAAAA,CAAAA,EAAmB;AAClDU,wBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,2BAA2B,EAAEnB,eAAAA,CAAgB,4BAA4B,CAAC,CAAA;AAC1F,wBAAA;AACJ,oBAAA;AAEAa,oBAAAA,gBAAAA,CAAiBK,IAAI,CAAClB,eAAAA,CAAAA;AACtBW,oBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uBAAuB,EAAEnB,eAAAA,CAAAA,CAAiB,CAAA;AAC9D,gBAAA,CAAA,CAAE,OAAM;;AAER,gBAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAO2B,KAAAA,EAAO;QACZhB,MAAAA,CAAOgB,KAAK,CAAC,CAAC,yBAAyB,EAAEjB,SAAAA,CAAU,EAAE,EAAEiB,KAAAA,CAAAA,CAAO,CAAA;QAC9D,MAAMA,KAAAA;AACV,IAAA;IAEA,OAAOd,gBAAAA;AACX;AAEA;;IAGO,eAAee,gBAAAA,CAAiB5B,eAAuB,EAAA;AAC1D,IAAA,MAAMW,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMiB,UAAUC,MAAAA,CAAc;AAAEC,QAAAA,GAAAA,EAAKpB,OAAOqB;AAAK,KAAA,CAAA;IAEjD,IAAI;AACA,QAAA,MAAMC,OAAAA,GAAU,MAAMJ,OAAAA,CAAQK,QAAQ,CAAClC,eAAAA,EAAiB,OAAA,CAAA;QACxD,MAAMmC,MAAAA,GAASC,cAAcH,OAAAA,EAASjC,eAAAA,CAAAA;QACtC,MAAMqC,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQnC,eAAAA,CAAAA;QAEhD,IAAI,CAACqC,WAAAA,CAAYX,IAAI,EAAE;AACnB,YAAA,MAAM,IAAIa,KAAAA,CAAM,CAAC,WAAW,EAAEvC,eAAAA,CAAgB,kBAAkB,CAAC,CAAA;AACrE,QAAA;AAEA,QAAA,MAAMwC,eAAe,IAAIC,GAAAA,EAAAA;AACzB,QAAA,MAAMC,kBAAkB,IAAID,GAAAA,EAAAA;;AAG5B,QAAA,MAAME,QAAAA,GAAW;AAAC,YAAA,cAAA;AAAgB,YAAA,iBAAA;AAAmB,YAAA,kBAAA;AAAoB,YAAA;AAAuB,SAAA;QAChG,KAAK,MAAMC,WAAWD,QAAAA,CAAU;YAC5B,IAAIN,WAAW,CAACO,OAAAA,CAAQ,EAAE;gBACtBC,MAAAA,CAAOC,IAAI,CAACT,WAAW,CAACO,QAAQ,CAAA,CAAEG,OAAO,CAACC,CAAAA,GAAAA,GAAAA;AACtCR,oBAAAA,YAAAA,CAAaS,GAAG,CAACD,GAAAA,CAAAA;AACjB,oBAAA,IAAIJ,YAAY,iBAAA,EAAmB;AAC/BF,wBAAAA,eAAAA,CAAgBO,GAAG,CAACD,GAAAA,CAAAA;AACxB,oBAAA;AACJ,gBAAA,CAAA,CAAA;AACJ,YAAA;AACJ,QAAA;QAEA,OAAO;AACHtB,YAAAA,IAAAA,EAAMW,YAAYX,IAAI;YACtBwB,OAAAA,EAASb,WAAAA,CAAYa,OAAO,IAAI,OAAA;YAChCrD,IAAAA,EAAMA,aAAAA,CAAKW,OAAO,CAACR,eAAAA,CAAAA;AACnBwC,YAAAA,YAAAA;AACAE,YAAAA,eAAAA;YACAS,iBAAAA,EAAmB,IAAIV;AAC3B,SAAA;AACJ,IAAA,CAAA,CAAE,OAAOd,KAAAA,EAAO;QACZhB,MAAAA,CAAOgB,KAAK,CAAC,CAAC,gCAAgC,EAAE3B,eAAAA,CAAgB,EAAE,EAAE2B,KAAAA,CAAAA,CAAO,CAAA;QAC3E,MAAMA,KAAAA;AACV,IAAA;AACJ;AAEA;;IAGO,eAAeyB,oBAAAA,CAClBvC,gBAA0B,EAAA;AAE1B,IAAA,MAAMF,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMyC,WAAW,IAAIC,GAAAA,EAAAA;AACrB,IAAA,MAAMC,QAAQ,IAAID,GAAAA,EAAAA;;IAGlB,KAAK,MAAMtD,mBAAmBa,gBAAAA,CAAkB;QAC5C,MAAM2C,WAAAA,GAAc,MAAM5B,gBAAAA,CAAiB5B,eAAAA,CAAAA;AAC3CqD,QAAAA,QAAAA,CAASI,GAAG,CAACD,WAAAA,CAAY9B,IAAI,EAAE8B,WAAAA,CAAAA;AAC/B7C,QAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAC,gBAAgB,EAAEqC,WAAAA,CAAY9B,IAAI,CAAC,IAAI,EAAE8B,WAAAA,CAAY3D,IAAI,CAAA,CAAE,CAAA;AAC/E,IAAA;;AAGA,IAAA,KAAK,MAAM,CAAC6D,WAAAA,EAAaF,WAAAA,CAAY,IAAIH,QAAAA,CAAU;AAC/C,QAAA,MAAMM,YAAY,IAAIlB,GAAAA,EAAAA;AACtB,QAAA,MAAMmB,WAAW,IAAInB,GAAAA,EAAAA;AAErB,QAAA,KAAK,MAAMO,GAAAA,IAAOQ,WAAAA,CAAYhB,YAAY,CAAE;YACxC,IAAIa,QAAAA,CAASQ,GAAG,CAACb,GAAAA,CAAAA,EAAM;AACnBW,gBAAAA,SAAAA,CAAUV,GAAG,CAACD,GAAAA,CAAAA;AACdY,gBAAAA,QAAAA,CAASX,GAAG,CAACD,GAAAA,CAAAA;AACbrC,gBAAAA,MAAAA,CAAOQ,OAAO,CAAC,CAAA,EAAGuC,WAAAA,CAAY,2BAA2B,EAAEV,GAAAA,CAAAA,CAAK,CAAA;AACpE,YAAA;AACJ,QAAA;AAEAQ,QAAAA,WAAAA,CAAYL,iBAAiB,GAAGQ,SAAAA;QAChCJ,KAAAA,CAAME,GAAG,CAACC,WAAAA,EAAaE,QAAAA,CAAAA;AAC3B,IAAA;;AAGA,IAAA,MAAME,eAAeC,iBAAAA,CAAkBR,KAAAA,CAAAA;IAEvC,OAAO;AAAEF,QAAAA,QAAAA;AAAUE,QAAAA,KAAAA;AAAOO,QAAAA;AAAa,KAAA;AAC3C;AAEA;;IAGO,SAASC,iBAAAA,CACZR,KAA+B,EAAA;AAE/B,IAAA,MAAMS,UAAU,IAAIV,GAAAA,EAAAA;AAEpB,IAAA,KAAK,MAAM,CAACW,GAAAA,EAAKC,IAAAA,CAAK,IAAIX,KAAAA,CAAO;QAC7B,KAAK,MAAMP,OAAOkB,IAAAA,CAAM;AACpB,YAAA,IAAI,CAACF,OAAAA,CAAQH,GAAG,CAACb,GAAAA,CAAAA,EAAM;gBACnBgB,OAAAA,CAAQP,GAAG,CAACT,GAAAA,EAAK,IAAIP,GAAAA,EAAAA,CAAAA;AACzB,YAAA;AACAuB,YAAAA,OAAAA,CAAQG,GAAG,CAACnB,GAAAA,CAAAA,CAAMC,GAAG,CAACgB,GAAAA,CAAAA;AAC1B,QAAA;AACJ,IAAA;IAEA,OAAOD,OAAAA;AACX;AAEA;;IAGO,SAASI,eAAAA,CAAgBC,KAAsB,EAAA;AAClD,IAAA,MAAM1D,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM,EAAEyC,QAAQ,EAAEE,KAAK,EAAE,GAAGc,KAAAA;AAC5B,IAAA,MAAMC,UAAU,IAAI7B,GAAAA,EAAAA;AACpB,IAAA,MAAM8B,WAAW,IAAI9B,GAAAA,EAAAA;AACrB,IAAA,MAAM+B,SAAmB,EAAE;AAE3B,IAAA,MAAMC,QAAQ,CAACf,WAAAA,GAAAA;QACX,IAAIY,OAAAA,CAAQT,GAAG,CAACH,WAAAA,CAAAA,EAAc;AAC1B,YAAA;AACJ,QAAA;QAEA,IAAIa,QAAAA,CAASV,GAAG,CAACH,WAAAA,CAAAA,EAAc;AAC3B,YAAA,MAAM,IAAInB,KAAAA,CAAM,CAAC,gDAAgD,EAAEmB,WAAAA,CAAAA,CAAa,CAAA;AACpF,QAAA;AAEAa,QAAAA,QAAAA,CAAStB,GAAG,CAACS,WAAAA,CAAAA;;AAGb,QAAA,MAAMQ,IAAAA,GAAOX,KAAAA,CAAMY,GAAG,CAACT,gBAAgB,IAAIjB,GAAAA,EAAAA;QAC3C,KAAK,MAAMO,OAAOkB,IAAAA,CAAM;YACpBO,KAAAA,CAAMzB,GAAAA,CAAAA;AACV,QAAA;AAEAuB,QAAAA,QAAAA,CAASG,MAAM,CAAChB,WAAAA,CAAAA;AAChBY,QAAAA,OAAAA,CAAQrB,GAAG,CAACS,WAAAA,CAAAA;AACZc,QAAAA,MAAAA,CAAOtD,IAAI,CAACwC,WAAAA,CAAAA;AAChB,IAAA,CAAA;;AAGA,IAAA,KAAK,MAAMA,WAAAA,IAAeL,QAAAA,CAASP,IAAI,EAAA,CAAI;AACvC,QAAA,IAAI,CAACwB,OAAAA,CAAQT,GAAG,CAACH,WAAAA,CAAAA,EAAc;YAC3Be,KAAAA,CAAMf,WAAAA,CAAAA;AACV,QAAA;AACJ,IAAA;IAEA/C,MAAAA,CAAOQ,OAAO,CAAC,CAAC,uDAAuD,EAAEqD,MAAAA,CAAOtE,MAAM,CAAC,UAAU,CAAC,CAAA;IAClG,OAAOsE,MAAAA;AACX;AAEA;;AAEC,IACM,SAASG,iBAAAA,CACZjB,WAAmB,EACnBW,KAAsB,EAAA;AAEtB,IAAA,MAAMO,aAAa,IAAInC,GAAAA,EAAAA;AACvB,IAAA,MAAM6B,UAAU,IAAI7B,GAAAA,EAAAA;AAEpB,IAAA,MAAMoC,WAAW,CAACZ,GAAAA,GAAAA;QACd,IAAIK,OAAAA,CAAQT,GAAG,CAACI,GAAAA,CAAAA,EAAM;AACtBK,QAAAA,OAAAA,CAAQrB,GAAG,CAACgB,GAAAA,CAAAA;AAEZ,QAAA,MAAMa,mBAAmBT,KAAAA,CAAMP,YAAY,CAACK,GAAG,CAACF,QAAQ,IAAIxB,GAAAA,EAAAA;QAC5D,KAAK,MAAMsC,aAAaD,gBAAAA,CAAkB;AACtCF,YAAAA,UAAAA,CAAW3B,GAAG,CAAC8B,SAAAA,CAAAA;YACfF,QAAAA,CAASE,SAAAA,CAAAA;AACb,QAAA;AACJ,IAAA,CAAA;IAEAF,QAAAA,CAASnB,WAAAA,CAAAA;IACT,OAAOkB,UAAAA;AACX;;;;"}
@@ -0,0 +1,204 @@
1
+ import * as fs from 'fs';
2
+ import * as path from 'path';
3
+ import * as os from 'os';
4
+ import { getLogger } from '../logging.js';
5
+
6
+ // eslint-disable-next-line no-restricted-imports
7
+ function _define_property(obj, key, value) {
8
+ if (key in obj) {
9
+ Object.defineProperty(obj, key, {
10
+ value: value,
11
+ enumerable: true,
12
+ configurable: true,
13
+ writable: true
14
+ });
15
+ } else {
16
+ obj[key] = value;
17
+ }
18
+ return obj;
19
+ }
20
+ /**
21
+ * File-based lock for cross-process synchronization
22
+ * Uses atomic file operations to coordinate across multiple Node processes
23
+ */ class FileLock {
24
+ /**
25
+ * Acquire the file lock with exponential backoff retry
26
+ */ async lock() {
27
+ let attempts = 0;
28
+ let currentDelay = this.retryDelay;
29
+ while(attempts < this.maxRetries){
30
+ try {
31
+ // Try to create lock file atomically with 'wx' flag (fails if exists)
32
+ const lockData = {
33
+ pid: process.pid,
34
+ timestamp: Date.now(),
35
+ hostname: os.hostname()
36
+ };
37
+ // Check if lock file exists and is stale
38
+ if (fs.existsSync(this.lockPath)) {
39
+ const lockContent = fs.readFileSync(this.lockPath, 'utf-8');
40
+ try {
41
+ const existingLock = JSON.parse(lockContent);
42
+ const lockAge = Date.now() - existingLock.timestamp;
43
+ // If lock is stale, try to remove it
44
+ if (lockAge > this.lockTimeout) {
45
+ this.logger.debug(`Removing stale lock file (age: ${lockAge}ms, pid: ${existingLock.pid})`);
46
+ try {
47
+ fs.unlinkSync(this.lockPath);
48
+ } catch {
49
+ // Lock might have been removed by another process, continue
50
+ }
51
+ }
52
+ } catch {
53
+ // Invalid lock file, try to remove it
54
+ try {
55
+ fs.unlinkSync(this.lockPath);
56
+ } catch {
57
+ // Ignore errors
58
+ }
59
+ }
60
+ }
61
+ // Try to acquire lock
62
+ fs.writeFileSync(this.lockPath, JSON.stringify(lockData, null, 2), {
63
+ flag: 'wx'
64
+ });
65
+ this.lockAcquired = true;
66
+ if (attempts > 0) {
67
+ this.logger.debug(`Acquired file lock after ${attempts} attempts: ${this.lockPath}`);
68
+ }
69
+ return;
70
+ } catch (error) {
71
+ if (error.code === 'EEXIST') {
72
+ // Lock file exists, retry with backoff
73
+ attempts++;
74
+ if (attempts === 1 || attempts % 10 === 0) {
75
+ this.logger.verbose(`Waiting for file lock (attempt ${attempts}/${this.maxRetries}): ${this.lockPath}`);
76
+ }
77
+ await new Promise((resolve)=>setTimeout(resolve, currentDelay));
78
+ // Exponential backoff
79
+ currentDelay = Math.min(currentDelay * 1.5, this.maxRetryDelay);
80
+ } else {
81
+ // Unexpected error
82
+ throw new Error(`Failed to acquire file lock ${this.lockPath}: ${error.message}`);
83
+ }
84
+ }
85
+ }
86
+ throw new Error(`Failed to acquire file lock after ${this.maxRetries} attempts: ${this.lockPath}`);
87
+ }
88
+ /**
89
+ * Release the file lock
90
+ */ unlock() {
91
+ if (!this.lockAcquired) {
92
+ return;
93
+ }
94
+ try {
95
+ if (fs.existsSync(this.lockPath)) {
96
+ fs.unlinkSync(this.lockPath);
97
+ }
98
+ this.lockAcquired = false;
99
+ this.logger.silly(`Released file lock: ${this.lockPath}`);
100
+ } catch (error) {
101
+ // Lock file might have been removed by another process or stale lock cleanup
102
+ this.logger.debug(`Error releasing file lock ${this.lockPath}: ${error.message}`);
103
+ this.lockAcquired = false;
104
+ }
105
+ }
106
+ /**
107
+ * Check if this instance currently holds the lock
108
+ */ isLocked() {
109
+ return this.lockAcquired;
110
+ }
111
+ constructor(lockPath){
112
+ _define_property(this, "lockPath", void 0);
113
+ _define_property(this, "lockAcquired", false);
114
+ _define_property(this, "maxRetries", 100); // Maximum number of lock attempts
115
+ _define_property(this, "retryDelay", 100); // Initial retry delay in ms
116
+ _define_property(this, "maxRetryDelay", 2000); // Maximum retry delay in ms
117
+ _define_property(this, "lockTimeout", 30000); // Consider lock stale after 30 seconds
118
+ _define_property(this, "logger", getLogger());
119
+ this.lockPath = lockPath;
120
+ }
121
+ }
122
+ /**
123
+ * Manages file-based locks for git repositories (cross-process safe)
124
+ */ class RepositoryFileLockManager {
125
+ /**
126
+ * Get or create a file lock for a specific git repository
127
+ * @param repoPath Path to the git repository root
128
+ * @returns FileLock for this repository
129
+ */ getRepositoryLock(repoPath) {
130
+ const normalizedPath = path.resolve(repoPath);
131
+ if (!this.locks.has(normalizedPath)) {
132
+ // Create lock file in .git directory to ensure it's in the repo
133
+ const lockPath = path.join(normalizedPath, '.git', 'kodrdriv.lock');
134
+ this.logger.debug(`Creating file lock for repository: ${normalizedPath}`);
135
+ this.locks.set(normalizedPath, new FileLock(lockPath));
136
+ // Register cleanup handler on first lock creation
137
+ if (!this.cleanupRegistered) {
138
+ this.registerCleanupHandlers();
139
+ this.cleanupRegistered = true;
140
+ }
141
+ }
142
+ return this.locks.get(normalizedPath);
143
+ }
144
+ /**
145
+ * Register cleanup handlers to release locks on process exit
146
+ */ registerCleanupHandlers() {
147
+ const cleanup = ()=>{
148
+ this.destroy();
149
+ };
150
+ // Handle various exit scenarios
151
+ process.on('exit', cleanup);
152
+ process.on('SIGINT', ()=>{
153
+ cleanup();
154
+ process.exit(130); // Standard exit code for SIGINT
155
+ });
156
+ process.on('SIGTERM', ()=>{
157
+ cleanup();
158
+ process.exit(143); // Standard exit code for SIGTERM
159
+ });
160
+ process.on('uncaughtException', (error)=>{
161
+ this.logger.error('Uncaught exception, cleaning up locks:', error);
162
+ cleanup();
163
+ process.exit(1);
164
+ });
165
+ }
166
+ /**
167
+ * Execute a git operation with repository-level file locking
168
+ * @param repoPath Path to the git repository root
169
+ * @param operation The async operation to execute under lock
170
+ * @param operationName Optional name for logging
171
+ * @returns Result of the operation
172
+ */ async withGitLock(repoPath, operation, operationName) {
173
+ const lock = this.getRepositoryLock(repoPath);
174
+ const startWait = Date.now();
175
+ this.logger.silly(`Acquiring file lock for ${repoPath}${operationName ? ` for: ${operationName}` : ''}`);
176
+ await lock.lock();
177
+ const waitTime = Date.now() - startWait;
178
+ if (waitTime > 100) {
179
+ this.logger.debug(`Acquired file lock for ${repoPath} after ${waitTime}ms${operationName ? ` for: ${operationName}` : ''}`);
180
+ }
181
+ try {
182
+ return await operation();
183
+ } finally{
184
+ lock.unlock();
185
+ }
186
+ }
187
+ /**
188
+ * Clean up all locks
189
+ */ destroy() {
190
+ this.logger.debug(`Cleaning up ${this.locks.size} file lock(s)`);
191
+ for (const lock of this.locks.values()){
192
+ lock.unlock();
193
+ }
194
+ this.locks.clear();
195
+ }
196
+ constructor(){
197
+ _define_property(this, "locks", new Map());
198
+ _define_property(this, "logger", getLogger());
199
+ _define_property(this, "cleanupRegistered", false);
200
+ }
201
+ }
202
+
203
+ export { FileLock, RepositoryFileLockManager };
204
+ //# sourceMappingURL=fileLock.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"fileLock.js","sources":["../../src/util/fileLock.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as os from 'os';\nimport { getLogger } from '../logging';\n\n/**\n * File-based lock for cross-process synchronization\n * Uses atomic file operations to coordinate across multiple Node processes\n */\nexport class FileLock {\n private lockPath: string;\n private lockAcquired = false;\n private maxRetries = 100; // Maximum number of lock attempts\n private retryDelay = 100; // Initial retry delay in ms\n private maxRetryDelay = 2000; // Maximum retry delay in ms\n private lockTimeout = 30000; // Consider lock stale after 30 seconds\n private logger = getLogger();\n\n constructor(lockPath: string) {\n this.lockPath = lockPath;\n }\n\n /**\n * Acquire the file lock with exponential backoff retry\n */\n async lock(): Promise<void> {\n let attempts = 0;\n let currentDelay = this.retryDelay;\n\n while (attempts < this.maxRetries) {\n try {\n // Try to create lock file atomically with 'wx' flag (fails if exists)\n const lockData = {\n pid: process.pid,\n timestamp: Date.now(),\n hostname: os.hostname()\n };\n\n // Check if lock file exists and is stale\n if (fs.existsSync(this.lockPath)) {\n const lockContent = fs.readFileSync(this.lockPath, 'utf-8');\n try {\n const existingLock = JSON.parse(lockContent);\n const lockAge = Date.now() - existingLock.timestamp;\n\n // If lock is stale, try to remove it\n if (lockAge > this.lockTimeout) {\n this.logger.debug(`Removing stale lock file (age: ${lockAge}ms, pid: ${existingLock.pid})`);\n try {\n fs.unlinkSync(this.lockPath);\n } catch {\n // Lock might have been removed by another process, continue\n }\n }\n } catch {\n // Invalid lock file, try to remove it\n try {\n fs.unlinkSync(this.lockPath);\n } catch {\n // Ignore errors\n }\n }\n }\n\n // Try to acquire lock\n fs.writeFileSync(this.lockPath, JSON.stringify(lockData, null, 2), { flag: 'wx' });\n this.lockAcquired = true;\n\n if (attempts > 0) {\n this.logger.debug(`Acquired file lock after ${attempts} attempts: ${this.lockPath}`);\n }\n\n return;\n } catch (error: any) {\n if (error.code === 'EEXIST') {\n // Lock file exists, retry with backoff\n attempts++;\n\n if (attempts === 1 || attempts % 10 === 0) {\n this.logger.verbose(`Waiting for file lock (attempt ${attempts}/${this.maxRetries}): ${this.lockPath}`);\n }\n\n await new Promise(resolve => setTimeout(resolve, currentDelay));\n\n // Exponential backoff\n currentDelay = Math.min(currentDelay * 1.5, this.maxRetryDelay);\n } else {\n // Unexpected error\n throw new Error(`Failed to acquire file lock ${this.lockPath}: ${error.message}`);\n }\n }\n }\n\n throw new Error(`Failed to acquire file lock after ${this.maxRetries} attempts: ${this.lockPath}`);\n }\n\n /**\n * Release the file lock\n */\n unlock(): void {\n if (!this.lockAcquired) {\n return;\n }\n\n try {\n if (fs.existsSync(this.lockPath)) {\n fs.unlinkSync(this.lockPath);\n }\n this.lockAcquired = false;\n this.logger.silly(`Released file lock: ${this.lockPath}`);\n } catch (error: any) {\n // Lock file might have been removed by another process or stale lock cleanup\n this.logger.debug(`Error releasing file lock ${this.lockPath}: ${error.message}`);\n this.lockAcquired = false;\n }\n }\n\n /**\n * Check if this instance currently holds the lock\n */\n isLocked(): boolean {\n return this.lockAcquired;\n }\n}\n\n/**\n * Manages file-based locks for git repositories (cross-process safe)\n */\nexport class RepositoryFileLockManager {\n private locks: Map<string, FileLock> = new Map();\n private logger = getLogger();\n private cleanupRegistered = false;\n\n /**\n * Get or create a file lock for a specific git repository\n * @param repoPath Path to the git repository root\n * @returns FileLock for this repository\n */\n getRepositoryLock(repoPath: string): FileLock {\n const normalizedPath = path.resolve(repoPath);\n\n if (!this.locks.has(normalizedPath)) {\n // Create lock file in .git directory to ensure it's in the repo\n const lockPath = path.join(normalizedPath, '.git', 'kodrdriv.lock');\n this.logger.debug(`Creating file lock for repository: ${normalizedPath}`);\n this.locks.set(normalizedPath, new FileLock(lockPath));\n\n // Register cleanup handler on first lock creation\n if (!this.cleanupRegistered) {\n this.registerCleanupHandlers();\n this.cleanupRegistered = true;\n }\n }\n\n return this.locks.get(normalizedPath)!;\n }\n\n /**\n * Register cleanup handlers to release locks on process exit\n */\n private registerCleanupHandlers(): void {\n const cleanup = () => {\n this.destroy();\n };\n\n // Handle various exit scenarios\n process.on('exit', cleanup);\n process.on('SIGINT', () => {\n cleanup();\n process.exit(130); // Standard exit code for SIGINT\n });\n process.on('SIGTERM', () => {\n cleanup();\n process.exit(143); // Standard exit code for SIGTERM\n });\n process.on('uncaughtException', (error) => {\n this.logger.error('Uncaught exception, cleaning up locks:', error);\n cleanup();\n process.exit(1);\n });\n }\n\n /**\n * Execute a git operation with repository-level file locking\n * @param repoPath Path to the git repository root\n * @param operation The async operation to execute under lock\n * @param operationName Optional name for logging\n * @returns Result of the operation\n */\n async withGitLock<T>(\n repoPath: string,\n operation: () => Promise<T>,\n operationName?: string\n ): Promise<T> {\n const lock = this.getRepositoryLock(repoPath);\n const startWait = Date.now();\n\n this.logger.silly(\n `Acquiring file lock for ${repoPath}${operationName ? ` for: ${operationName}` : ''}`\n );\n\n await lock.lock();\n\n const waitTime = Date.now() - startWait;\n if (waitTime > 100) {\n this.logger.debug(\n `Acquired file lock for ${repoPath} after ${waitTime}ms${operationName ? ` for: ${operationName}` : ''}`\n );\n }\n\n try {\n return await operation();\n } finally {\n lock.unlock();\n }\n }\n\n /**\n * Clean up all locks\n */\n destroy(): void {\n this.logger.debug(`Cleaning up ${this.locks.size} file lock(s)`);\n for (const lock of this.locks.values()) {\n lock.unlock();\n }\n this.locks.clear();\n }\n}\n"],"names":["FileLock","lock","attempts","currentDelay","retryDelay","maxRetries","lockData","pid","process","timestamp","Date","now","hostname","os","fs","existsSync","lockPath","lockContent","readFileSync","existingLock","JSON","parse","lockAge","lockTimeout","logger","debug","unlinkSync","writeFileSync","stringify","flag","lockAcquired","error","code","verbose","Promise","resolve","setTimeout","Math","min","maxRetryDelay","Error","message","unlock","silly","isLocked","getLogger","RepositoryFileLockManager","getRepositoryLock","repoPath","normalizedPath","path","locks","has","join","set","cleanupRegistered","registerCleanupHandlers","get","cleanup","destroy","on","exit","withGitLock","operation","operationName","startWait","waitTime","size","values","clear","Map"],"mappings":";;;;;AAAA;;;;;;;;;;;;;;AAMA;;;AAGC,IACM,MAAMA,QAAAA,CAAAA;AAaT;;AAEC,QACD,MAAMC,IAAAA,GAAsB;AACxB,QAAA,IAAIC,QAAAA,GAAW,CAAA;QACf,IAAIC,YAAAA,GAAe,IAAI,CAACC,UAAU;AAElC,QAAA,MAAOF,QAAAA,GAAW,IAAI,CAACG,UAAU,CAAE;YAC/B,IAAI;;AAEA,gBAAA,MAAMC,QAAAA,GAAW;AACbC,oBAAAA,GAAAA,EAAKC,QAAQD,GAAG;AAChBE,oBAAAA,SAAAA,EAAWC,KAAKC,GAAG,EAAA;AACnBC,oBAAAA,QAAAA,EAAUC,GAAGD,QAAQ;AACzB,iBAAA;;AAGA,gBAAA,IAAIE,GAAGC,UAAU,CAAC,IAAI,CAACC,QAAQ,CAAA,EAAG;AAC9B,oBAAA,MAAMC,cAAcH,EAAAA,CAAGI,YAAY,CAAC,IAAI,CAACF,QAAQ,EAAE,OAAA,CAAA;oBACnD,IAAI;wBACA,MAAMG,YAAAA,GAAeC,IAAAA,CAAKC,KAAK,CAACJ,WAAAA,CAAAA;AAChC,wBAAA,MAAMK,OAAAA,GAAUZ,IAAAA,CAAKC,GAAG,EAAA,GAAKQ,aAAaV,SAAS;;AAGnD,wBAAA,IAAIa,OAAAA,GAAU,IAAI,CAACC,WAAW,EAAE;AAC5B,4BAAA,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,CAAC,+BAA+B,EAAEH,OAAAA,CAAQ,SAAS,EAAEH,YAAAA,CAAaZ,GAAG,CAAC,CAAC,CAAC,CAAA;4BAC1F,IAAI;AACAO,gCAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,4BAAA,CAAA,CAAE,OAAM;;AAER,4BAAA;AACJ,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAM;;wBAEJ,IAAI;AACAF,4BAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,wBAAA,CAAA,CAAE,OAAM;;AAER,wBAAA;AACJ,oBAAA;AACJ,gBAAA;;gBAGAF,EAAAA,CAAGa,aAAa,CAAC,IAAI,CAACX,QAAQ,EAAEI,IAAAA,CAAKQ,SAAS,CAACtB,QAAAA,EAAU,IAAA,EAAM,CAAA,CAAA,EAAI;oBAAEuB,IAAAA,EAAM;AAAK,iBAAA,CAAA;gBAChF,IAAI,CAACC,YAAY,GAAG,IAAA;AAEpB,gBAAA,IAAI5B,WAAW,CAAA,EAAG;AACd,oBAAA,IAAI,CAACsB,MAAM,CAACC,KAAK,CAAC,CAAC,yBAAyB,EAAEvB,QAAAA,CAAS,WAAW,EAAE,IAAI,CAACc,QAAQ,CAAA,CAAE,CAAA;AACvF,gBAAA;AAEA,gBAAA;AACJ,YAAA,CAAA,CAAE,OAAOe,KAAAA,EAAY;gBACjB,IAAIA,KAAAA,CAAMC,IAAI,KAAK,QAAA,EAAU;;AAEzB9B,oBAAAA,QAAAA,EAAAA;AAEA,oBAAA,IAAIA,QAAAA,KAAa,CAAA,IAAKA,QAAAA,GAAW,EAAA,KAAO,CAAA,EAAG;wBACvC,IAAI,CAACsB,MAAM,CAACS,OAAO,CAAC,CAAC,+BAA+B,EAAE/B,QAAAA,CAAS,CAAC,EAAE,IAAI,CAACG,UAAU,CAAC,GAAG,EAAE,IAAI,CAACW,QAAQ,CAAA,CAAE,CAAA;AAC1G,oBAAA;AAEA,oBAAA,MAAM,IAAIkB,OAAAA,CAAQC,CAAAA,OAAAA,GAAWC,WAAWD,OAAAA,EAAShC,YAAAA,CAAAA,CAAAA;;AAGjDA,oBAAAA,YAAAA,GAAekC,KAAKC,GAAG,CAACnC,eAAe,GAAA,EAAK,IAAI,CAACoC,aAAa,CAAA;gBAClE,CAAA,MAAO;;AAEH,oBAAA,MAAM,IAAIC,KAAAA,CAAM,CAAC,4BAA4B,EAAE,IAAI,CAACxB,QAAQ,CAAC,EAAE,EAAEe,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;AACpF,gBAAA;AACJ,YAAA;AACJ,QAAA;AAEA,QAAA,MAAM,IAAID,KAAAA,CAAM,CAAC,kCAAkC,EAAE,IAAI,CAACnC,UAAU,CAAC,WAAW,EAAE,IAAI,CAACW,QAAQ,CAAA,CAAE,CAAA;AACrG,IAAA;AAEA;;AAEC,QACD0B,MAAAA,GAAe;AACX,QAAA,IAAI,CAAC,IAAI,CAACZ,YAAY,EAAE;AACpB,YAAA;AACJ,QAAA;QAEA,IAAI;AACA,YAAA,IAAIhB,GAAGC,UAAU,CAAC,IAAI,CAACC,QAAQ,CAAA,EAAG;AAC9BF,gBAAAA,EAAAA,CAAGY,UAAU,CAAC,IAAI,CAACV,QAAQ,CAAA;AAC/B,YAAA;YACA,IAAI,CAACc,YAAY,GAAG,KAAA;YACpB,IAAI,CAACN,MAAM,CAACmB,KAAK,CAAC,CAAC,oBAAoB,EAAE,IAAI,CAAC3B,QAAQ,CAAA,CAAE,CAAA;AAC5D,QAAA,CAAA,CAAE,OAAOe,KAAAA,EAAY;;AAEjB,YAAA,IAAI,CAACP,MAAM,CAACC,KAAK,CAAC,CAAC,0BAA0B,EAAE,IAAI,CAACT,QAAQ,CAAC,EAAE,EAAEe,KAAAA,CAAMU,OAAO,CAAA,CAAE,CAAA;YAChF,IAAI,CAACX,YAAY,GAAG,KAAA;AACxB,QAAA;AACJ,IAAA;AAEA;;AAEC,QACDc,QAAAA,GAAoB;QAChB,OAAO,IAAI,CAACd,YAAY;AAC5B,IAAA;AAxGA,IAAA,WAAA,CAAYd,QAAgB,CAAE;AAR9B,QAAA,gBAAA,CAAA,IAAA,EAAQA,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQc,cAAAA,EAAe,KAAA,CAAA;QACvB,gBAAA,CAAA,IAAA,EAAQzB,YAAAA,EAAa;QACrB,gBAAA,CAAA,IAAA,EAAQD,YAAAA,EAAa;QACrB,gBAAA,CAAA,IAAA,EAAQmC,eAAAA,EAAgB;QACxB,gBAAA,CAAA,IAAA,EAAQhB,aAAAA,EAAc;AACtB,QAAA,gBAAA,CAAA,IAAA,EAAQC,QAAAA,EAASqB,SAAAA,EAAAA,CAAAA;QAGb,IAAI,CAAC7B,QAAQ,GAAGA,QAAAA;AACpB,IAAA;AAuGJ;AAEA;;AAEC,IACM,MAAM8B,yBAAAA,CAAAA;AAKT;;;;QAKAC,iBAAAA,CAAkBC,QAAgB,EAAY;QAC1C,MAAMC,cAAAA,GAAiBC,IAAAA,CAAKf,OAAO,CAACa,QAAAA,CAAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAACG,KAAK,CAACC,GAAG,CAACH,cAAAA,CAAAA,EAAiB;;AAEjC,YAAA,MAAMjC,QAAAA,GAAWkC,IAAAA,CAAKG,IAAI,CAACJ,gBAAgB,MAAA,EAAQ,eAAA,CAAA;YACnD,IAAI,CAACzB,MAAM,CAACC,KAAK,CAAC,CAAC,mCAAmC,EAAEwB,cAAAA,CAAAA,CAAgB,CAAA;AACxE,YAAA,IAAI,CAACE,KAAK,CAACG,GAAG,CAACL,cAAAA,EAAgB,IAAIjD,QAAAA,CAASgB,QAAAA,CAAAA,CAAAA;;AAG5C,YAAA,IAAI,CAAC,IAAI,CAACuC,iBAAiB,EAAE;AACzB,gBAAA,IAAI,CAACC,uBAAuB,EAAA;gBAC5B,IAAI,CAACD,iBAAiB,GAAG,IAAA;AAC7B,YAAA;AACJ,QAAA;AAEA,QAAA,OAAO,IAAI,CAACJ,KAAK,CAACM,GAAG,CAACR,cAAAA,CAAAA;AAC1B,IAAA;AAEA;;AAEC,QACD,uBAAQO,GAAgC;AACpC,QAAA,MAAME,OAAAA,GAAU,IAAA;AACZ,YAAA,IAAI,CAACC,OAAO,EAAA;AAChB,QAAA,CAAA;;QAGAnD,OAAAA,CAAQoD,EAAE,CAAC,MAAA,EAAQF,OAAAA,CAAAA;QACnBlD,OAAAA,CAAQoD,EAAE,CAAC,QAAA,EAAU,IAAA;AACjBF,YAAAA,OAAAA,EAAAA;YACAlD,OAAAA,CAAQqD,IAAI,CAAC,GAAA,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;QACArD,OAAAA,CAAQoD,EAAE,CAAC,SAAA,EAAW,IAAA;AAClBF,YAAAA,OAAAA,EAAAA;YACAlD,OAAAA,CAAQqD,IAAI,CAAC,GAAA,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;QACArD,OAAAA,CAAQoD,EAAE,CAAC,mBAAA,EAAqB,CAAC7B,KAAAA,GAAAA;AAC7B,YAAA,IAAI,CAACP,MAAM,CAACO,KAAK,CAAC,wCAAA,EAA0CA,KAAAA,CAAAA;AAC5D2B,YAAAA,OAAAA,EAAAA;AACAlD,YAAAA,OAAAA,CAAQqD,IAAI,CAAC,CAAA,CAAA;AACjB,QAAA,CAAA,CAAA;AACJ,IAAA;AAEA;;;;;;AAMC,QACD,MAAMC,WAAAA,CACFd,QAAgB,EAChBe,SAA2B,EAC3BC,aAAsB,EACZ;AACV,QAAA,MAAM/D,IAAAA,GAAO,IAAI,CAAC8C,iBAAiB,CAACC,QAAAA,CAAAA;QACpC,MAAMiB,SAAAA,GAAYvD,KAAKC,GAAG,EAAA;AAE1B,QAAA,IAAI,CAACa,MAAM,CAACmB,KAAK,CACb,CAAC,wBAAwB,EAAEK,QAAAA,CAAAA,EAAWgB,gBAAgB,CAAC,MAAM,EAAEA,aAAAA,CAAAA,CAAe,GAAG,EAAA,CAAA,CAAI,CAAA;AAGzF,QAAA,MAAM/D,KAAKA,IAAI,EAAA;QAEf,MAAMiE,QAAAA,GAAWxD,IAAAA,CAAKC,GAAG,EAAA,GAAKsD,SAAAA;AAC9B,QAAA,IAAIC,WAAW,GAAA,EAAK;YAChB,IAAI,CAAC1C,MAAM,CAACC,KAAK,CACb,CAAC,uBAAuB,EAAEuB,QAAAA,CAAS,OAAO,EAAEkB,QAAAA,CAAS,EAAE,EAAEF,aAAAA,GAAgB,CAAC,MAAM,EAAEA,aAAAA,CAAAA,CAAe,GAAG,EAAA,CAAA,CAAI,CAAA;AAEhH,QAAA;QAEA,IAAI;AACA,YAAA,OAAO,MAAMD,SAAAA,EAAAA;QACjB,CAAA,QAAU;AACN9D,YAAAA,IAAAA,CAAKyC,MAAM,EAAA;AACf,QAAA;AACJ,IAAA;AAEA;;AAEC,QACDiB,OAAAA,GAAgB;AACZ,QAAA,IAAI,CAACnC,MAAM,CAACC,KAAK,CAAC,CAAC,YAAY,EAAE,IAAI,CAAC0B,KAAK,CAACgB,IAAI,CAAC,aAAa,CAAC,CAAA;AAC/D,QAAA,KAAK,MAAMlE,IAAAA,IAAQ,IAAI,CAACkD,KAAK,CAACiB,MAAM,EAAA,CAAI;AACpCnE,YAAAA,IAAAA,CAAKyC,MAAM,EAAA;AACf,QAAA;QACA,IAAI,CAACS,KAAK,CAACkB,KAAK,EAAA;AACpB,IAAA;;AAjGA,QAAA,gBAAA,CAAA,IAAA,EAAQlB,SAA+B,IAAImB,GAAAA,EAAAA,CAAAA;AAC3C,QAAA,gBAAA,CAAA,IAAA,EAAQ9C,QAAAA,EAASqB,SAAAA,EAAAA,CAAAA;AACjB,QAAA,gBAAA,CAAA,IAAA,EAAQU,mBAAAA,EAAoB,KAAA,CAAA;;AAgGhC;;;;"}
@@ -1,4 +1,4 @@
1
- import path from 'path';
1
+ import path__default from 'path';
2
2
  import { create } from './storage.js';
3
3
  import { getLogger } from '../logging.js';
4
4
  import * as fs from 'fs';
@@ -416,7 +416,7 @@ const confirmVersionInteractively = async (currentVersion, proposedVersion, targ
416
416
  }
417
417
  };
418
418
  const getOutputPath = (outputDirectory, filename)=>{
419
- return path.join(outputDirectory, filename);
419
+ return path__default.join(outputDirectory, filename);
420
420
  };
421
421
  const getTimestampedFilename = (baseName, extension = '.json')=>{
422
422
  const now = new Date();
@@ -471,13 +471,13 @@ const getTimestampedArchivedTranscriptFilename = ()=>{
471
471
  // Ensure the output directory exists (should already be output/kodrdriv)
472
472
  await storage.ensureDirectory(outputDirectory);
473
473
  // Get file extension from original audio file
474
- const originalExtension = path.extname(originalAudioPath);
474
+ const originalExtension = path__default.extname(originalAudioPath);
475
475
  // Generate timestamped filenames
476
476
  const archivedAudioFilename = getTimestampedArchivedAudioFilename(originalExtension);
477
477
  const archivedTranscriptFilename = getTimestampedArchivedTranscriptFilename();
478
478
  // Full paths for archived files - directly in the output directory
479
- const archivedAudioPath = path.join(outputDirectory, archivedAudioFilename);
480
- const archivedTranscriptPath = path.join(outputDirectory, archivedTranscriptFilename);
479
+ const archivedAudioPath = path__default.join(outputDirectory, archivedAudioFilename);
480
+ const archivedTranscriptPath = path__default.join(outputDirectory, archivedTranscriptFilename);
481
481
  // Copy audio file if it exists
482
482
  if (await storage.isFileReadable(originalAudioPath)) {
483
483
  // Read original audio file as buffer using fs directly for binary files