@triedotdev/mcp 1.0.93 → 1.0.97

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/README.md +145 -137
  2. package/dist/chunk-APMV77PU.js +313 -0
  3. package/dist/chunk-APMV77PU.js.map +1 -0
  4. package/dist/{chunk-MZI46HQT.js → chunk-B3MNN3XB.js} +13 -18
  5. package/dist/{chunk-MZI46HQT.js.map → chunk-B3MNN3XB.js.map} +1 -1
  6. package/dist/{chunk-5Z7O66DE.js → chunk-F4NJ4CBP.js} +2 -2
  7. package/dist/{chunk-YTJXD664.js → chunk-FNCCZ3XB.js} +1222 -75
  8. package/dist/chunk-FNCCZ3XB.js.map +1 -0
  9. package/dist/chunk-G76DYVGX.js +136 -0
  10. package/dist/chunk-G76DYVGX.js.map +1 -0
  11. package/dist/chunk-HSNE46VE.js +956 -0
  12. package/dist/chunk-HSNE46VE.js.map +1 -0
  13. package/dist/{chunk-LVVG2DMW.js → chunk-IXO4G4D3.js} +2 -2
  14. package/dist/{chunk-LP4MVJDW.js → chunk-JDHR5BDR.js} +2 -3
  15. package/dist/chunk-NIASHOAB.js +1304 -0
  16. package/dist/chunk-NIASHOAB.js.map +1 -0
  17. package/dist/{chunk-NMGINYYX.js → chunk-OVRG5RP3.js} +6 -7
  18. package/dist/chunk-OVRG5RP3.js.map +1 -0
  19. package/dist/{chunk-T5UOH56R.js → chunk-R3I2GCZC.js} +3 -3
  20. package/dist/{chunk-RDOJCRKJ.js → chunk-R4AAPFXC.js} +2 -2
  21. package/dist/{chunk-R6AUYN3R.js → chunk-SLL2MDJD.js} +786 -4668
  22. package/dist/chunk-SLL2MDJD.js.map +1 -0
  23. package/dist/cli/create-agent.js +931 -7
  24. package/dist/cli/create-agent.js.map +1 -1
  25. package/dist/cli/main.js +151 -383
  26. package/dist/cli/main.js.map +1 -1
  27. package/dist/cli/yolo-daemon.js +13 -20
  28. package/dist/cli/yolo-daemon.js.map +1 -1
  29. package/dist/{goal-manager-ESJCJXFS.js → goal-manager-LAOT4QQX.js} +6 -6
  30. package/dist/guardian-agent-M352CBE5.js +19 -0
  31. package/dist/index.js +1025 -1550
  32. package/dist/index.js.map +1 -1
  33. package/dist/{issue-store-JZ2LCQEG.js → issue-store-W2X33X2X.js} +4 -4
  34. package/dist/{progress-PH6NNWZM.js → progress-PQVEM7BR.js} +2 -2
  35. package/dist/{vibe-code-signatures-K4UIWKJZ.js → vibe-code-signatures-ELEWJFGZ.js} +3 -3
  36. package/dist/{vulnerability-signatures-ZKVLMBRG.js → vulnerability-signatures-EIJQX2TS.js} +3 -3
  37. package/dist/workers/agent-worker.js +2 -11
  38. package/dist/workers/agent-worker.js.map +1 -1
  39. package/package.json +2 -2
  40. package/dist/agent-smith-QYDXPFPJ.js +0 -14
  41. package/dist/agent-smith-runner-GXGDJTSR.js +0 -573
  42. package/dist/agent-smith-runner-GXGDJTSR.js.map +0 -1
  43. package/dist/cache-manager-7SKX3IGO.js +0 -10
  44. package/dist/chunk-74NPKTZV.js +0 -141
  45. package/dist/chunk-74NPKTZV.js.map +0 -1
  46. package/dist/chunk-BG2BHWCC.js +0 -10879
  47. package/dist/chunk-BG2BHWCC.js.map +0 -1
  48. package/dist/chunk-CUXXRM3T.js +0 -2124
  49. package/dist/chunk-CUXXRM3T.js.map +0 -1
  50. package/dist/chunk-D25EIBPO.js +0 -183
  51. package/dist/chunk-D25EIBPO.js.map +0 -1
  52. package/dist/chunk-F55XBLIA.js +0 -536
  53. package/dist/chunk-F55XBLIA.js.map +0 -1
  54. package/dist/chunk-HFQ5ORON.js +0 -279
  55. package/dist/chunk-HFQ5ORON.js.map +0 -1
  56. package/dist/chunk-IOUOVBJZ.js +0 -175
  57. package/dist/chunk-IOUOVBJZ.js.map +0 -1
  58. package/dist/chunk-KWDNYWOR.js +0 -2270
  59. package/dist/chunk-KWDNYWOR.js.map +0 -1
  60. package/dist/chunk-LT7MKIXU.js +0 -266
  61. package/dist/chunk-LT7MKIXU.js.map +0 -1
  62. package/dist/chunk-MURGTWG4.js +0 -279
  63. package/dist/chunk-MURGTWG4.js.map +0 -1
  64. package/dist/chunk-NMGINYYX.js.map +0 -1
  65. package/dist/chunk-R6AUYN3R.js.map +0 -1
  66. package/dist/chunk-SJFJ6GLR.js +0 -955
  67. package/dist/chunk-SJFJ6GLR.js.map +0 -1
  68. package/dist/chunk-YTJXD664.js.map +0 -1
  69. package/dist/git-PZV3BBYI.js +0 -29
  70. package/dist/guardian-agent-ZHJXLBOU.js +0 -21
  71. package/dist/progress-PH6NNWZM.js.map +0 -1
  72. package/dist/vibe-code-signatures-K4UIWKJZ.js.map +0 -1
  73. package/dist/vulnerability-signatures-ZKVLMBRG.js.map +0 -1
  74. /package/dist/{chunk-5Z7O66DE.js.map → chunk-F4NJ4CBP.js.map} +0 -0
  75. /package/dist/{chunk-LVVG2DMW.js.map → chunk-IXO4G4D3.js.map} +0 -0
  76. /package/dist/{chunk-LP4MVJDW.js.map → chunk-JDHR5BDR.js.map} +0 -0
  77. /package/dist/{chunk-T5UOH56R.js.map → chunk-R3I2GCZC.js.map} +0 -0
  78. /package/dist/{chunk-RDOJCRKJ.js.map → chunk-R4AAPFXC.js.map} +0 -0
  79. /package/dist/{agent-smith-QYDXPFPJ.js.map → goal-manager-LAOT4QQX.js.map} +0 -0
  80. /package/dist/{cache-manager-7SKX3IGO.js.map → guardian-agent-M352CBE5.js.map} +0 -0
  81. /package/dist/{git-PZV3BBYI.js.map → issue-store-W2X33X2X.js.map} +0 -0
  82. /package/dist/{goal-manager-ESJCJXFS.js.map → progress-PQVEM7BR.js.map} +0 -0
  83. /package/dist/{guardian-agent-ZHJXLBOU.js.map → vibe-code-signatures-ELEWJFGZ.js.map} +0 -0
  84. /package/dist/{issue-store-JZ2LCQEG.js.map → vulnerability-signatures-EIJQX2TS.js.map} +0 -0
@@ -1,279 +0,0 @@
1
- import {
2
- getTrieDirectory
3
- } from "./chunk-RDOJCRKJ.js";
4
- import {
5
- isInteractiveMode
6
- } from "./chunk-D25EIBPO.js";
7
-
8
- // src/utils/cache-manager.ts
9
- import { readFile, writeFile, mkdir, stat } from "fs/promises";
10
- import { join } from "path";
11
- import { createHash } from "crypto";
12
- var CacheManager = class {
13
- cacheDir;
14
- indexPath;
15
- VERSION = "1.0.0";
16
- MAX_AGE_MS = 24 * 60 * 60 * 1e3;
17
- // 24 hours
18
- MAX_ENTRIES = 1e3;
19
- constructor(baseDir) {
20
- this.cacheDir = join(getTrieDirectory(baseDir), "cache");
21
- this.indexPath = join(this.cacheDir, "index.json");
22
- }
23
- /**
24
- * Generate cache key for a file and agent combination
25
- */
26
- generateCacheKey(filePath, agent, fileHash) {
27
- const key = `${filePath}:${agent}:${fileHash}`;
28
- return createHash("sha256").update(key).digest("hex").slice(0, 16);
29
- }
30
- /**
31
- * Get file hash for cache validation
32
- */
33
- async getFileHash(filePath) {
34
- try {
35
- const content = await readFile(filePath, "utf-8");
36
- const stats = await stat(filePath);
37
- const hash = createHash("sha256").update(content).digest("hex").slice(0, 16);
38
- return {
39
- hash,
40
- size: stats.size,
41
- mtime: stats.mtime.getTime()
42
- };
43
- } catch {
44
- return { hash: "", size: 0, mtime: 0 };
45
- }
46
- }
47
- /**
48
- * Load cache index
49
- */
50
- async loadIndex() {
51
- try {
52
- const content = await readFile(this.indexPath, "utf-8");
53
- return JSON.parse(content);
54
- } catch {
55
- return {
56
- version: this.VERSION,
57
- created: Date.now(),
58
- entries: {}
59
- };
60
- }
61
- }
62
- /**
63
- * Save cache index
64
- */
65
- async saveIndex(index) {
66
- try {
67
- await mkdir(this.cacheDir, { recursive: true });
68
- await writeFile(this.indexPath, JSON.stringify(index, null, 2));
69
- } catch (error) {
70
- if (!isInteractiveMode()) {
71
- console.warn("Failed to save cache index:", error);
72
- }
73
- }
74
- }
75
- /**
76
- * Clean up expired entries
77
- */
78
- cleanupExpired(index) {
79
- const now = Date.now();
80
- const validEntries = {};
81
- for (const [key, entry] of Object.entries(index.entries)) {
82
- if (now - entry.timestamp < this.MAX_AGE_MS) {
83
- validEntries[key] = entry;
84
- }
85
- }
86
- const entries = Object.entries(validEntries);
87
- if (entries.length > this.MAX_ENTRIES) {
88
- entries.sort((a, b) => b[1].timestamp - a[1].timestamp);
89
- const limited = entries.slice(0, this.MAX_ENTRIES);
90
- return {
91
- ...index,
92
- entries: Object.fromEntries(limited)
93
- };
94
- }
95
- return {
96
- ...index,
97
- entries: validEntries
98
- };
99
- }
100
- /**
101
- * Get cached result for a file and agent
102
- *
103
- * Cache automatically invalidates when files change:
104
- * - Cache key includes file hash: hash(filePath:agent:fileHash)
105
- * - When file changes, hash changes, so cache key changes
106
- * - Old cache entry won't be found (different key)
107
- * - File is automatically rescanned
108
- *
109
- * This means cache auto-updates when Claude fixes code - no manual invalidation needed!
110
- */
111
- async getCached(filePath, agent) {
112
- try {
113
- const { hash, size: _size, mtime: _mtime } = await this.getFileHash(filePath);
114
- if (!hash) return null;
115
- const index = await this.loadIndex();
116
- const cacheKey = this.generateCacheKey(filePath, agent, hash);
117
- const entry = index.entries[cacheKey];
118
- if (!entry) return null;
119
- const isValid = entry.fileHash === hash && entry.version === this.VERSION && Date.now() - entry.timestamp < this.MAX_AGE_MS;
120
- if (!isValid) {
121
- delete index.entries[cacheKey];
122
- await this.saveIndex(index);
123
- return null;
124
- }
125
- return entry.issues;
126
- } catch {
127
- return null;
128
- }
129
- }
130
- /**
131
- * Cache result for a file and agent
132
- */
133
- async setCached(filePath, agent, issues, executionTime) {
134
- try {
135
- const { hash, size } = await this.getFileHash(filePath);
136
- if (!hash) return;
137
- const index = await this.loadIndex();
138
- const cacheKey = this.generateCacheKey(filePath, agent, hash);
139
- index.entries[cacheKey] = {
140
- version: this.VERSION,
141
- timestamp: Date.now(),
142
- fileHash: hash,
143
- fileSize: size,
144
- agent,
145
- issues,
146
- executionTime
147
- };
148
- const cleanedIndex = this.cleanupExpired(index);
149
- await this.saveIndex(cleanedIndex);
150
- } catch (error) {
151
- if (!isInteractiveMode()) {
152
- console.warn("Failed to cache result:", error);
153
- }
154
- }
155
- }
156
- /**
157
- * Check if multiple files have cached results
158
- */
159
- async getCachedBatch(files, agent) {
160
- const results = /* @__PURE__ */ new Map();
161
- await Promise.all(
162
- files.map(async (file) => {
163
- const cached = await this.getCached(file, agent);
164
- if (cached) {
165
- results.set(file, cached);
166
- }
167
- })
168
- );
169
- return results;
170
- }
171
- /**
172
- * Get cache statistics
173
- */
174
- async getStats() {
175
- try {
176
- const index = await this.loadIndex();
177
- const entries = Object.values(index.entries);
178
- const totalSizeKB = entries.reduce((acc, entry) => acc + entry.fileSize, 0) / 1024;
179
- const timestamps = entries.map((e) => e.timestamp);
180
- const agents = [...new Set(entries.map((e) => e.agent))];
181
- return {
182
- totalEntries: entries.length,
183
- totalSizeKB: Math.round(totalSizeKB),
184
- oldestEntry: timestamps.length > 0 ? Math.min(...timestamps) : null,
185
- newestEntry: timestamps.length > 0 ? Math.max(...timestamps) : null,
186
- agents
187
- };
188
- } catch {
189
- return {
190
- totalEntries: 0,
191
- totalSizeKB: 0,
192
- oldestEntry: null,
193
- newestEntry: null,
194
- agents: []
195
- };
196
- }
197
- }
198
- /**
199
- * Clean up stale cache entries by verifying file hashes
200
- * This removes entries where files have changed or no longer exist
201
- * Called periodically to keep cache clean
202
- *
203
- * Note: Since cache keys are hashed, we can't easily reverse-engineer file paths.
204
- * However, when getCached() is called, it naturally invalidates stale entries
205
- * by checking if the current file hash matches the cached hash. This method
206
- * proactively cleans up entries for known changed files.
207
- */
208
- async cleanupStaleEntries(filePaths) {
209
- try {
210
- const index = await this.loadIndex();
211
- let removedCount = 0;
212
- const keysToRemove = [];
213
- if (filePaths && filePaths.length > 0) {
214
- const agents = /* @__PURE__ */ new Set();
215
- for (const entry of Object.values(index.entries)) {
216
- agents.add(entry.agent);
217
- }
218
- for (const filePath of filePaths) {
219
- try {
220
- const { hash: currentHash } = await this.getFileHash(filePath);
221
- if (!currentHash) {
222
- continue;
223
- }
224
- for (const agent of agents) {
225
- const expectedKey = this.generateCacheKey(filePath, agent, currentHash);
226
- for (const [key, entry] of Object.entries(index.entries)) {
227
- if (entry.agent !== agent) continue;
228
- if (entry.fileHash !== currentHash) {
229
- const oldKey = this.generateCacheKey(filePath, agent, entry.fileHash);
230
- if (oldKey === key) {
231
- keysToRemove.push(key);
232
- removedCount++;
233
- }
234
- }
235
- }
236
- }
237
- } catch {
238
- continue;
239
- }
240
- }
241
- }
242
- const uniqueKeys = [...new Set(keysToRemove)];
243
- for (const key of uniqueKeys) {
244
- delete index.entries[key];
245
- }
246
- if (removedCount > 0) {
247
- await this.saveIndex(index);
248
- }
249
- return removedCount;
250
- } catch (error) {
251
- if (!isInteractiveMode()) {
252
- console.warn("Failed to cleanup stale cache entries:", error);
253
- }
254
- return 0;
255
- }
256
- }
257
- /**
258
- * Clear all cache
259
- */
260
- async clear() {
261
- try {
262
- const emptyIndex = {
263
- version: this.VERSION,
264
- created: Date.now(),
265
- entries: {}
266
- };
267
- await this.saveIndex(emptyIndex);
268
- } catch (error) {
269
- if (!isInteractiveMode()) {
270
- console.warn("Failed to clear cache:", error);
271
- }
272
- }
273
- }
274
- };
275
-
276
- export {
277
- CacheManager
278
- };
279
- //# sourceMappingURL=chunk-HFQ5ORON.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/utils/cache-manager.ts"],"sourcesContent":["import { readFile, writeFile, mkdir, stat } from 'fs/promises';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport type { Issue } from '../types/index.js';\nimport { isInteractiveMode } from './progress.js';\nimport { getTrieDirectory } from './workspace.js';\n\ninterface CacheEntry {\n version: string;\n timestamp: number;\n fileHash: string;\n fileSize: number;\n agent: string;\n issues: Issue[];\n executionTime: number;\n}\n\ninterface CacheIndex {\n version: string;\n created: number;\n entries: Record<string, CacheEntry>;\n}\n\nexport class CacheManager {\n private cacheDir: string;\n private indexPath: string;\n private readonly VERSION = '1.0.0';\n private readonly MAX_AGE_MS = 24 * 60 * 60 * 1000; // 24 hours\n private readonly MAX_ENTRIES = 1000;\n\n constructor(baseDir: string) {\n this.cacheDir = join(getTrieDirectory(baseDir), 'cache');\n this.indexPath = join(this.cacheDir, 'index.json');\n }\n\n /**\n * Generate cache key for a file and agent combination\n */\n private generateCacheKey(filePath: string, agent: string, fileHash: string): string {\n const key = `${filePath}:${agent}:${fileHash}`;\n return createHash('sha256').update(key).digest('hex').slice(0, 16);\n }\n\n /**\n * Get file hash for cache validation\n */\n private async getFileHash(filePath: string): Promise<{ hash: string; size: number; mtime: number }> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const stats = await stat(filePath);\n const hash = createHash('sha256').update(content).digest('hex').slice(0, 16);\n return {\n hash,\n size: stats.size,\n mtime: stats.mtime.getTime()\n };\n } catch {\n return { hash: '', size: 0, mtime: 0 };\n }\n }\n\n /**\n * Load cache index\n */\n private async loadIndex(): Promise<CacheIndex> {\n try {\n const content = await readFile(this.indexPath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return {\n version: this.VERSION,\n created: Date.now(),\n entries: {}\n };\n }\n }\n\n /**\n * Save cache index\n */\n private async saveIndex(index: CacheIndex): Promise<void> {\n try {\n await mkdir(this.cacheDir, { recursive: true });\n await writeFile(this.indexPath, JSON.stringify(index, null, 2));\n } catch (error) {\n if (!isInteractiveMode()) {\n console.warn('Failed to save cache index:', error);\n }\n }\n }\n\n /**\n * Clean up expired entries\n */\n private cleanupExpired(index: CacheIndex): CacheIndex {\n const now = Date.now();\n const validEntries: Record<string, CacheEntry> = {};\n\n for (const [key, entry] of Object.entries(index.entries)) {\n if (now - entry.timestamp < this.MAX_AGE_MS) {\n validEntries[key] = entry;\n }\n }\n\n // If still too many, keep the most recent ones\n const entries = Object.entries(validEntries);\n if (entries.length > this.MAX_ENTRIES) {\n entries.sort((a, b) => b[1].timestamp - a[1].timestamp);\n const limited = entries.slice(0, this.MAX_ENTRIES);\n return {\n ...index,\n entries: Object.fromEntries(limited)\n };\n }\n\n return {\n ...index,\n entries: validEntries\n };\n }\n\n /**\n * Get cached result for a file and agent\n * \n * Cache automatically invalidates when files change:\n * - Cache key includes file hash: hash(filePath:agent:fileHash)\n * - When file changes, hash changes, so cache key changes\n * - Old cache entry won't be found (different key)\n * - File is automatically rescanned\n * \n * This means cache auto-updates when Claude fixes code - no manual invalidation needed!\n */\n async getCached(filePath: string, agent: string): Promise<Issue[] | null> {\n try {\n const { hash, size: _size, mtime: _mtime } = await this.getFileHash(filePath);\n if (!hash) return null;\n\n const index = await this.loadIndex();\n const cacheKey = this.generateCacheKey(filePath, agent, hash);\n const entry = index.entries[cacheKey];\n\n if (!entry) return null;\n\n // Validate entry is still fresh\n // Double-check hash matches (defense in depth)\n const isValid = entry.fileHash === hash &&\n entry.version === this.VERSION &&\n (Date.now() - entry.timestamp) < this.MAX_AGE_MS;\n\n if (!isValid) {\n delete index.entries[cacheKey];\n await this.saveIndex(index);\n return null;\n }\n\n return entry.issues;\n } catch {\n return null;\n }\n }\n\n /**\n * Cache result for a file and agent\n */\n async setCached(\n filePath: string,\n agent: string,\n issues: Issue[],\n executionTime: number\n ): Promise<void> {\n try {\n const { hash, size } = await this.getFileHash(filePath);\n if (!hash) return;\n\n const index = await this.loadIndex();\n const cacheKey = this.generateCacheKey(filePath, agent, hash);\n\n index.entries[cacheKey] = {\n version: this.VERSION,\n timestamp: Date.now(),\n fileHash: hash,\n fileSize: size,\n agent,\n issues,\n executionTime\n };\n\n // Clean up old entries\n const cleanedIndex = this.cleanupExpired(index);\n await this.saveIndex(cleanedIndex);\n } catch (error) {\n if (!isInteractiveMode()) {\n console.warn('Failed to cache result:', error);\n }\n }\n }\n\n /**\n * Check if multiple files have cached results\n */\n async getCachedBatch(files: string[], agent: string): Promise<Map<string, Issue[]>> {\n const results = new Map<string, Issue[]>();\n\n await Promise.all(\n files.map(async (file) => {\n const cached = await this.getCached(file, agent);\n if (cached) {\n results.set(file, cached);\n }\n })\n );\n\n return results;\n }\n\n /**\n * Get cache statistics\n */\n async getStats(): Promise<{\n totalEntries: number;\n totalSizeKB: number;\n oldestEntry: number | null;\n newestEntry: number | null;\n agents: string[];\n hitRate?: number;\n }> {\n try {\n const index = await this.loadIndex();\n const entries = Object.values(index.entries);\n\n const totalSizeKB = entries.reduce((acc, entry) => acc + entry.fileSize, 0) / 1024;\n const timestamps = entries.map(e => e.timestamp);\n const agents = [...new Set(entries.map(e => e.agent))];\n\n return {\n totalEntries: entries.length,\n totalSizeKB: Math.round(totalSizeKB),\n oldestEntry: timestamps.length > 0 ? Math.min(...timestamps) : null,\n newestEntry: timestamps.length > 0 ? Math.max(...timestamps) : null,\n agents\n };\n } catch {\n return {\n totalEntries: 0,\n totalSizeKB: 0,\n oldestEntry: null,\n newestEntry: null,\n agents: []\n };\n }\n }\n\n /**\n * Clean up stale cache entries by verifying file hashes\n * This removes entries where files have changed or no longer exist\n * Called periodically to keep cache clean\n * \n * Note: Since cache keys are hashed, we can't easily reverse-engineer file paths.\n * However, when getCached() is called, it naturally invalidates stale entries\n * by checking if the current file hash matches the cached hash. This method\n * proactively cleans up entries for known changed files.\n */\n async cleanupStaleEntries(filePaths?: string[]): Promise<number> {\n try {\n const index = await this.loadIndex();\n let removedCount = 0;\n const keysToRemove: string[] = [];\n\n // If specific files provided, check those files against all cache entries\n // We can't perfectly match entries to files without storing paths, but we can\n // try to identify likely matches by checking if regenerating keys matches\n if (filePaths && filePaths.length > 0) {\n // Get all unique agents from cache entries\n const agents = new Set<string>();\n for (const entry of Object.values(index.entries)) {\n agents.add(entry.agent);\n }\n\n // For each file+agent combination, check if cache entry is stale\n for (const filePath of filePaths) {\n try {\n const { hash: currentHash } = await this.getFileHash(filePath);\n if (!currentHash) {\n // File doesn't exist - can't clean up without knowing which entries are for it\n continue;\n }\n\n // Check each agent's potential cache entries for this file\n for (const agent of agents) {\n // Generate what the cache key SHOULD be with current hash\n const expectedKey = this.generateCacheKey(filePath, agent, currentHash);\n \n // Check all entries to find ones that might be for this file\n for (const [key, entry] of Object.entries(index.entries)) {\n if (entry.agent !== agent) continue;\n \n // If entry hash doesn't match current hash, it might be stale\n if (entry.fileHash !== currentHash) {\n // Check if this key was generated with the old hash for this file\n const oldKey = this.generateCacheKey(filePath, agent, entry.fileHash);\n if (oldKey === key) {\n // This entry is for this file but hash doesn't match - stale!\n keysToRemove.push(key);\n removedCount++;\n }\n }\n }\n }\n } catch {\n // File doesn't exist or can't be read - skip\n continue;\n }\n }\n }\n\n // Remove stale entries (avoid duplicates)\n const uniqueKeys = [...new Set(keysToRemove)];\n for (const key of uniqueKeys) {\n delete index.entries[key];\n }\n\n if (removedCount > 0) {\n await this.saveIndex(index);\n }\n\n return removedCount;\n } catch (error) {\n if (!isInteractiveMode()) {\n console.warn('Failed to cleanup stale cache entries:', error);\n }\n return 0;\n }\n }\n\n /**\n * Clear all cache\n */\n async clear(): Promise<void> {\n try {\n const emptyIndex: CacheIndex = {\n version: this.VERSION,\n created: Date.now(),\n entries: {}\n };\n await this.saveIndex(emptyIndex);\n } catch (error) {\n if (!isInteractiveMode()) {\n console.warn('Failed to clear cache:', error);\n }\n }\n }\n}"],"mappings":";;;;;;;;AAAA,SAAS,UAAU,WAAW,OAAO,YAAY;AACjD,SAAS,YAAY;AACrB,SAAS,kBAAkB;AAqBpB,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EACA;AAAA,EACS,UAAU;AAAA,EACV,aAAa,KAAK,KAAK,KAAK;AAAA;AAAA,EAC5B,cAAc;AAAA,EAE/B,YAAY,SAAiB;AAC3B,SAAK,WAAW,KAAK,iBAAiB,OAAO,GAAG,OAAO;AACvD,SAAK,YAAY,KAAK,KAAK,UAAU,YAAY;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAkB,OAAe,UAA0B;AAClF,UAAM,MAAM,GAAG,QAAQ,IAAI,KAAK,IAAI,QAAQ;AAC5C,WAAO,WAAW,QAAQ,EAAE,OAAO,GAAG,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YAAY,UAA0E;AAClG,QAAI;AACF,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,YAAM,QAAQ,MAAM,KAAK,QAAQ;AACjC,YAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAC3E,aAAO;AAAA,QACL;AAAA,QACA,MAAM,MAAM;AAAA,QACZ,OAAO,MAAM,MAAM,QAAQ;AAAA,MAC7B;AAAA,IACF,QAAQ;AACN,aAAO,EAAE,MAAM,IAAI,MAAM,GAAG,OAAO,EAAE;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YAAiC;AAC7C,QAAI;AACF,YAAM,UAAU,MAAM,SAAS,KAAK,WAAW,OAAO;AACtD,aAAO,KAAK,MAAM,OAAO;AAAA,IAC3B,QAAQ;AACN,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd,SAAS,KAAK,IAAI;AAAA,QAClB,SAAS,CAAC;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,UAAU,OAAkC;AACxD,QAAI;AACF,YAAM,MAAM,KAAK,UAAU,EAAE,WAAW,KAAK,CAAC;AAC9C,YAAM,UAAU,KAAK,WAAW,KAAK,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,IAChE,SAAS,OAAO;AACd,UAAI,CAAC,kBAAkB,GAAG;AACxB,gBAAQ,KAAK,+BAA+B,KAAK;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,OAA+B;AACpD,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,eAA2C,CAAC;AAElD,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AACxD,UAAI,MAAM,MAAM,YAAY,KAAK,YAAY;AAC3C,qBAAa,GAAG,IAAI;AAAA,MACtB;AAAA,IACF;AAGA,UAAM,UAAU,OAAO,QAAQ,YAAY;AAC3C,QAAI,QAAQ,SAAS,KAAK,aAAa;AACrC,cAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,YAAY,EAAE,CAAC,EAAE,SAAS;AACtD,YAAM,UAAU,QAAQ,MAAM,GAAG,KAAK,WAAW;AACjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS,OAAO,YAAY,OAAO;AAAA,MACrC;AAAA,IACF;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,SAAS;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,UAAU,UAAkB,OAAwC;AACxE,QAAI;AACF,YAAM,EAAE,MAAM,MAAM,OAAO,OAAO,OAAO,IAAI,MAAM,KAAK,YAAY,QAAQ;AAC5E,UAAI,CAAC,KAAM,QAAO;AAElB,YAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,YAAM,WAAW,KAAK,iBAAiB,UAAU,OAAO,IAAI;AAC5D,YAAM,QAAQ,MAAM,QAAQ,QAAQ;AAEpC,UAAI,CAAC,MAAO,QAAO;AAInB,YAAM,UAAU,MAAM,aAAa,QACpB,MAAM,YAAY,KAAK,WACtB,KAAK,IAAI,IAAI,MAAM,YAAa,KAAK;AAErD,UAAI,CAAC,SAAS;AACZ,eAAO,MAAM,QAAQ,QAAQ;AAC7B,cAAM,KAAK,UAAU,KAAK;AAC1B,eAAO;AAAA,MACT;AAEA,aAAO,MAAM;AAAA,IACf,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UACJ,UACA,OACA,QACA,eACe;AACf,QAAI;AACF,YAAM,EAAE,MAAM,KAAK,IAAI,MAAM,KAAK,YAAY,QAAQ;AACtD,UAAI,CAAC,KAAM;AAEX,YAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,YAAM,WAAW,KAAK,iBAAiB,UAAU,OAAO,IAAI;AAE5D,YAAM,QAAQ,QAAQ,IAAI;AAAA,QACxB,SAAS,KAAK;AAAA,QACd,WAAW,KAAK,IAAI;AAAA,QACpB,UAAU;AAAA,QACV,UAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAGA,YAAM,eAAe,KAAK,eAAe,KAAK;AAC9C,YAAM,KAAK,UAAU,YAAY;AAAA,IACnC,SAAS,OAAO;AACd,UAAI,CAAC,kBAAkB,GAAG;AACxB,gBAAQ,KAAK,2BAA2B,KAAK;AAAA,MAC/C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,OAAiB,OAA8C;AAClF,UAAM,UAAU,oBAAI,IAAqB;AAEzC,UAAM,QAAQ;AAAA,MACZ,MAAM,IAAI,OAAO,SAAS;AACxB,cAAM,SAAS,MAAM,KAAK,UAAU,MAAM,KAAK;AAC/C,YAAI,QAAQ;AACV,kBAAQ,IAAI,MAAM,MAAM;AAAA,QAC1B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAOH;AACD,QAAI;AACF,YAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,YAAM,UAAU,OAAO,OAAO,MAAM,OAAO;AAE3C,YAAM,cAAc,QAAQ,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,UAAU,CAAC,IAAI;AAC9E,YAAM,aAAa,QAAQ,IAAI,OAAK,EAAE,SAAS;AAC/C,YAAM,SAAS,CAAC,GAAG,IAAI,IAAI,QAAQ,IAAI,OAAK,EAAE,KAAK,CAAC,CAAC;AAErD,aAAO;AAAA,QACL,cAAc,QAAQ;AAAA,QACtB,aAAa,KAAK,MAAM,WAAW;AAAA,QACnC,aAAa,WAAW,SAAS,IAAI,KAAK,IAAI,GAAG,UAAU,IAAI;AAAA,QAC/D,aAAa,WAAW,SAAS,IAAI,KAAK,IAAI,GAAG,UAAU,IAAI;AAAA,QAC/D;AAAA,MACF;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,QACL,cAAc;AAAA,QACd,aAAa;AAAA,QACb,aAAa;AAAA,QACb,aAAa;AAAA,QACb,QAAQ,CAAC;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,oBAAoB,WAAuC;AAC/D,QAAI;AACF,YAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,UAAI,eAAe;AACnB,YAAM,eAAyB,CAAC;AAKhC,UAAI,aAAa,UAAU,SAAS,GAAG;AAErC,cAAM,SAAS,oBAAI,IAAY;AAC/B,mBAAW,SAAS,OAAO,OAAO,MAAM,OAAO,GAAG;AAChD,iBAAO,IAAI,MAAM,KAAK;AAAA,QACxB;AAGA,mBAAW,YAAY,WAAW;AAChC,cAAI;AACF,kBAAM,EAAE,MAAM,YAAY,IAAI,MAAM,KAAK,YAAY,QAAQ;AAC7D,gBAAI,CAAC,aAAa;AAEhB;AAAA,YACF;AAGA,uBAAW,SAAS,QAAQ;AAE1B,oBAAM,cAAc,KAAK,iBAAiB,UAAU,OAAO,WAAW;AAGtE,yBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AACxD,oBAAI,MAAM,UAAU,MAAO;AAG3B,oBAAI,MAAM,aAAa,aAAa;AAElC,wBAAM,SAAS,KAAK,iBAAiB,UAAU,OAAO,MAAM,QAAQ;AACpE,sBAAI,WAAW,KAAK;AAElB,iCAAa,KAAK,GAAG;AACrB;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,QAAQ;AAEN;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,aAAa,CAAC,GAAG,IAAI,IAAI,YAAY,CAAC;AAC5C,iBAAW,OAAO,YAAY;AAC5B,eAAO,MAAM,QAAQ,GAAG;AAAA,MAC1B;AAEA,UAAI,eAAe,GAAG;AACpB,cAAM,KAAK,UAAU,KAAK;AAAA,MAC5B;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,CAAC,kBAAkB,GAAG;AACxB,gBAAQ,KAAK,0CAA0C,KAAK;AAAA,MAC9D;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI;AACF,YAAM,aAAyB;AAAA,QAC7B,SAAS,KAAK;AAAA,QACd,SAAS,KAAK,IAAI;AAAA,QAClB,SAAS,CAAC;AAAA,MACZ;AACA,YAAM,KAAK,UAAU,UAAU;AAAA,IACjC,SAAS,OAAO;AACd,UAAI,CAAC,kBAAkB,GAAG;AACxB,gBAAQ,KAAK,0BAA0B,KAAK;AAAA,MAC9C;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -1,175 +0,0 @@
1
- import {
2
- runExecFile
3
- } from "./chunk-MURGTWG4.js";
4
-
5
- // src/agent/git.ts
6
- import { existsSync } from "fs";
7
- import path from "path";
8
- async function execGit(args, cwd) {
9
- try {
10
- const { stdout } = await runExecFile(
11
- "git",
12
- ["-C", cwd, ...args],
13
- { actor: "internal:git", triggeredBy: "manual", targetPath: cwd },
14
- { maxBuffer: 10 * 1024 * 1024, captureOutput: false }
15
- );
16
- return stdout.trim();
17
- } catch (error) {
18
- const stderr = error?.stderr?.toString();
19
- if (stderr?.includes("not a git repository") || stderr?.includes("does not have any commits")) {
20
- return null;
21
- }
22
- throw error;
23
- }
24
- }
25
- async function ensureRepo(projectPath) {
26
- const result = await execGit(["rev-parse", "--is-inside-work-tree"], projectPath);
27
- return result === "true";
28
- }
29
- function parseNameStatus(output) {
30
- return output.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => {
31
- const parts = line.split(" ");
32
- const status = parts[0] ?? "";
33
- const filePath = parts[1] ?? "";
34
- const oldPath = parts[2];
35
- const change = { status, path: filePath };
36
- if (oldPath) change.oldPath = oldPath;
37
- return change;
38
- }).filter((entry) => entry.path.length > 0);
39
- }
40
- async function getRecentCommits(projectPath, limit) {
41
- const isRepo = await ensureRepo(projectPath);
42
- if (!isRepo) return [];
43
- const output = await execGit(
44
- ["log", `-n`, String(limit), "--pretty=format:%H%x09%an%x09%ad%x09%s", "--date=iso"],
45
- projectPath
46
- );
47
- if (!output) return [];
48
- return output.split("\n").map((line) => {
49
- const [hash, author, date, message] = line.split(" ");
50
- return { hash, author, date, message };
51
- });
52
- }
53
- async function getLastCommit(projectPath) {
54
- const commits = await getRecentCommits(projectPath, 1);
55
- return commits[0] ?? null;
56
- }
57
- async function getStagedChanges(projectPath) {
58
- const isRepo = await ensureRepo(projectPath);
59
- if (!isRepo) return [];
60
- const output = await execGit(["diff", "--cached", "--name-status"], projectPath);
61
- if (!output) return [];
62
- return parseNameStatus(output);
63
- }
64
- async function getUncommittedChanges(projectPath) {
65
- const isRepo = await ensureRepo(projectPath);
66
- if (!isRepo) return [];
67
- const changes = [];
68
- const unstaged = await execGit(["diff", "--name-status"], projectPath);
69
- if (unstaged) {
70
- changes.push(...parseNameStatus(unstaged));
71
- }
72
- const untracked = await execGit(["ls-files", "--others", "--exclude-standard"], projectPath);
73
- if (untracked) {
74
- changes.push(
75
- ...untracked.split("\n").map((p) => p.trim()).filter(Boolean).map((p) => ({ status: "??", path: p }))
76
- );
77
- }
78
- return changes;
79
- }
80
- async function getDiff(projectPath, commitHash) {
81
- const isRepo = await ensureRepo(projectPath);
82
- if (!isRepo) return "";
83
- const diff = await execGit(["show", commitHash, "--unified=3", "--no-color"], projectPath);
84
- return diff ?? "";
85
- }
86
- async function getWorkingTreeDiff(projectPath, stagedOnly = false) {
87
- const isRepo = await ensureRepo(projectPath);
88
- if (!isRepo) return "";
89
- const args = stagedOnly ? ["diff", "--cached", "--unified=3", "--no-color"] : ["diff", "--unified=3", "--no-color"];
90
- const diff = await execGit(args, projectPath);
91
- return diff ?? "";
92
- }
93
- async function getUnpushedCommits(projectPath) {
94
- const isRepo = await ensureRepo(projectPath);
95
- if (!isRepo) return [];
96
- const upstream = await execGit(["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"], projectPath);
97
- if (!upstream) {
98
- return getRecentCommits(projectPath, 10);
99
- }
100
- const output = await execGit(["log", `${upstream}..HEAD`, "--pretty=format:%H%x09%an%x09%ad%x09%s", "--date=iso"], projectPath);
101
- if (!output) return [];
102
- return output.split("\n").filter(Boolean).map((line) => {
103
- const [hash, author, date, message] = line.split(" ");
104
- return { hash, author, date, message };
105
- });
106
- }
107
- function resolveRepoPath(projectPath) {
108
- const gitDir = path.join(projectPath, ".git");
109
- if (existsSync(gitDir)) return projectPath;
110
- return projectPath;
111
- }
112
- async function isGitRepo(projectPath) {
113
- const result = await execGit(["rev-parse", "--is-inside-work-tree"], projectPath);
114
- return result === "true";
115
- }
116
- async function getChangedFilesSinceTimestamp(projectPath, timestamp) {
117
- const isRepo = await isGitRepo(projectPath);
118
- if (!isRepo) return null;
119
- try {
120
- const sinceDate = new Date(timestamp).toISOString();
121
- const GIT_TIMEOUT_MS = 5e3;
122
- const startTime = Date.now();
123
- const committedChangesPromise = execGit(
124
- ["log", `--since=${sinceDate}`, "--name-only", "--pretty=format:"],
125
- projectPath
126
- );
127
- const committedChangesTimeout = new Promise((resolve) => {
128
- setTimeout(() => resolve(null), GIT_TIMEOUT_MS);
129
- });
130
- const committedChanges = await Promise.race([committedChangesPromise, committedChangesTimeout]);
131
- if (Date.now() - startTime > GIT_TIMEOUT_MS) {
132
- return null;
133
- }
134
- const stagedPromise = execGit(["diff", "--cached", "--name-only"], projectPath);
135
- const unstagedPromise = execGit(["diff", "--name-only"], projectPath);
136
- const untrackedPromise = execGit(
137
- ["ls-files", "--others", "--exclude-standard"],
138
- projectPath
139
- );
140
- const timeoutPromise = new Promise((resolve) => {
141
- setTimeout(() => resolve(null), Math.max(0, GIT_TIMEOUT_MS - (Date.now() - startTime)));
142
- });
143
- const [stagedChanges, unstagedChanges, untrackedFiles] = await Promise.race([
144
- Promise.all([stagedPromise, unstagedPromise, untrackedPromise]),
145
- timeoutPromise.then(() => [null, null, null])
146
- ]);
147
- const changedFiles = /* @__PURE__ */ new Set();
148
- const addFiles = (output) => {
149
- if (output) {
150
- output.split("\n").map((f) => f.trim()).filter(Boolean).forEach((f) => changedFiles.add(path.join(projectPath, f)));
151
- }
152
- };
153
- addFiles(committedChanges);
154
- addFiles(stagedChanges);
155
- addFiles(unstagedChanges);
156
- addFiles(untrackedFiles);
157
- return Array.from(changedFiles);
158
- } catch {
159
- return null;
160
- }
161
- }
162
-
163
- export {
164
- getRecentCommits,
165
- getLastCommit,
166
- getStagedChanges,
167
- getUncommittedChanges,
168
- getDiff,
169
- getWorkingTreeDiff,
170
- getUnpushedCommits,
171
- resolveRepoPath,
172
- isGitRepo,
173
- getChangedFilesSinceTimestamp
174
- };
175
- //# sourceMappingURL=chunk-IOUOVBJZ.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/agent/git.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport path from 'node:path';\nimport { runExecFile } from '../utils/command-runner.js';\n\nexport interface Commit {\n hash: string;\n author: string;\n date: string;\n message: string;\n}\n\nexport interface Change {\n path: string;\n status: string;\n oldPath?: string;\n}\n\nasync function execGit(args: string[], cwd: string): Promise<string | null> {\n try {\n const { stdout } = await runExecFile(\n 'git',\n ['-C', cwd, ...args],\n { actor: 'internal:git', triggeredBy: 'manual', targetPath: cwd },\n { maxBuffer: 10 * 1024 * 1024, captureOutput: false }\n );\n return stdout.trim();\n } catch (error: any) {\n const stderr: string | undefined = error?.stderr?.toString();\n // Gracefully handle non-git directories and repos with no commits\n if (stderr?.includes('not a git repository') || stderr?.includes('does not have any commits')) {\n return null;\n }\n throw error;\n }\n}\n\nasync function ensureRepo(projectPath: string): Promise<boolean> {\n const result = await execGit(['rev-parse', '--is-inside-work-tree'], projectPath);\n return result === 'true';\n}\n\nfunction parseNameStatus(output: string): Change[] {\n return output\n .split('\\n')\n .map((line) => line.trim())\n .filter(Boolean)\n .map((line) => {\n const parts = line.split('\\t');\n const status = parts[0] ?? '';\n const filePath = parts[1] ?? '';\n const oldPath = parts[2];\n const change: Change = { status, path: filePath };\n if (oldPath) change.oldPath = oldPath;\n return change;\n })\n .filter((entry) => entry.path.length > 0);\n}\n\nexport async function getRecentCommits(projectPath: string, limit: number): Promise<Commit[]> {\n const isRepo = await ensureRepo(projectPath);\n if (!isRepo) return [];\n\n const output = await execGit(\n ['log', `-n`, String(limit), '--pretty=format:%H%x09%an%x09%ad%x09%s', '--date=iso'],\n projectPath\n );\n\n if (!output) return [];\n\n return output.split('\\n').map((line) => {\n const [hash, author, date, message] = line.split('\\t');\n return { hash, author, date, message } as Commit;\n });\n}\n\nexport async function getLastCommit(projectPath: string): Promise<Commit | null> {\n const commits = await getRecentCommits(projectPath, 1);\n return commits[0] ?? null;\n}\n\nexport async function getStagedChanges(projectPath: string): Promise<Change[]> {\n const isRepo = await ensureRepo(projectPath);\n if (!isRepo) return [];\n\n const output = await execGit(['diff', '--cached', '--name-status'], projectPath);\n if (!output) return [];\n return parseNameStatus(output);\n}\n\nexport async function getUncommittedChanges(projectPath: string): Promise<Change[]> {\n const isRepo = await ensureRepo(projectPath);\n if (!isRepo) return [];\n\n const changes: Change[] = [];\n\n const unstaged = await execGit(['diff', '--name-status'], projectPath);\n if (unstaged) {\n changes.push(...parseNameStatus(unstaged));\n }\n\n const untracked = await execGit(['ls-files', '--others', '--exclude-standard'], projectPath);\n if (untracked) {\n changes.push(\n ...untracked\n .split('\\n')\n .map((p) => p.trim())\n .filter(Boolean)\n .map((p) => ({ status: '??', path: p }))\n );\n }\n\n return changes;\n}\n\nexport async function getDiff(projectPath: string, commitHash: string): Promise<string> {\n const isRepo = await ensureRepo(projectPath);\n if (!isRepo) return '';\n\n const diff = await execGit(['show', commitHash, '--unified=3', '--no-color'], projectPath);\n return diff ?? '';\n}\n\nexport async function getWorkingTreeDiff(projectPath: string, stagedOnly = false): Promise<string> {\n const isRepo = await ensureRepo(projectPath);\n if (!isRepo) return '';\n\n const args = stagedOnly ? ['diff', '--cached', '--unified=3', '--no-color'] : ['diff', '--unified=3', '--no-color'];\n const diff = await execGit(args, projectPath);\n return diff ?? '';\n}\n\nexport async function getUnpushedCommits(projectPath: string): Promise<Commit[]> {\n const isRepo = await ensureRepo(projectPath);\n if (!isRepo) return [];\n\n // Handles detached HEAD by falling back to HEAD if upstream missing\n const upstream = await execGit(['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], projectPath);\n if (!upstream) {\n return getRecentCommits(projectPath, 10);\n }\n\n const output = await execGit(['log', `${upstream}..HEAD`, '--pretty=format:%H%x09%an%x09%ad%x09%s', '--date=iso'], projectPath);\n if (!output) return [];\n\n return output.split('\\n').filter(Boolean).map((line) => {\n const [hash, author, date, message] = line.split('\\t');\n return { hash, author, date, message } as Commit;\n });\n}\n\nexport function resolveRepoPath(projectPath: string): string {\n const gitDir = path.join(projectPath, '.git');\n if (existsSync(gitDir)) return projectPath;\n return projectPath;\n}\n\n/**\n * Check if the given path is inside a git repository\n */\nexport async function isGitRepo(projectPath: string): Promise<boolean> {\n const result = await execGit(['rev-parse', '--is-inside-work-tree'], projectPath);\n return result === 'true';\n}\n\n/**\n * Get files changed since a given timestamp\n * Uses git log to find commits after timestamp, then gets affected files\n * Returns null if not a git repo or on error\n */\nexport async function getChangedFilesSinceTimestamp(\n projectPath: string,\n timestamp: number\n): Promise<string[] | null> {\n const isRepo = await isGitRepo(projectPath);\n if (!isRepo) return null;\n\n try {\n // Convert timestamp to ISO date for git\n const sinceDate = new Date(timestamp).toISOString();\n \n // Set timeout for git operations (5 seconds total)\n const GIT_TIMEOUT_MS = 5000;\n const startTime = Date.now();\n \n // Get all files that changed in commits since the timestamp\n // Use Promise.race to timeout if git operations take too long\n const committedChangesPromise = execGit(\n ['log', `--since=${sinceDate}`, '--name-only', '--pretty=format:'],\n projectPath\n );\n const committedChangesTimeout = new Promise<string | null>((resolve) => {\n setTimeout(() => resolve(null), GIT_TIMEOUT_MS);\n });\n const committedChanges = await Promise.race([committedChangesPromise, committedChangesTimeout]);\n\n // Check if we've exceeded timeout\n if (Date.now() - startTime > GIT_TIMEOUT_MS) {\n return null;\n }\n\n // Get currently modified files (staged + unstaged) - run in parallel with timeout\n const stagedPromise = execGit(['diff', '--cached', '--name-only'], projectPath);\n const unstagedPromise = execGit(['diff', '--name-only'], projectPath);\n const untrackedPromise = execGit(\n ['ls-files', '--others', '--exclude-standard'],\n projectPath\n );\n \n const timeoutPromise = new Promise<null>((resolve) => {\n setTimeout(() => resolve(null), Math.max(0, GIT_TIMEOUT_MS - (Date.now() - startTime)));\n });\n \n const [stagedChanges, unstagedChanges, untrackedFiles] = await Promise.race([\n Promise.all([stagedPromise, unstagedPromise, untrackedPromise]),\n timeoutPromise.then(() => [null, null, null] as const)\n ]);\n\n // Combine all changed files\n const changedFiles = new Set<string>();\n \n const addFiles = (output: string | null) => {\n if (output) {\n output.split('\\n')\n .map(f => f.trim())\n .filter(Boolean)\n .forEach(f => changedFiles.add(path.join(projectPath, f)));\n }\n };\n\n addFiles(committedChanges);\n addFiles(stagedChanges);\n addFiles(unstagedChanges);\n addFiles(untrackedFiles);\n\n return Array.from(changedFiles);\n } catch {\n return null;\n }\n}\n"],"mappings":";;;;;AAAA,SAAS,kBAAkB;AAC3B,OAAO,UAAU;AAgBjB,eAAe,QAAQ,MAAgB,KAAqC;AAC1E,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB;AAAA,MACA,CAAC,MAAM,KAAK,GAAG,IAAI;AAAA,MACnB,EAAE,OAAO,gBAAgB,aAAa,UAAU,YAAY,IAAI;AAAA,MAChE,EAAE,WAAW,KAAK,OAAO,MAAM,eAAe,MAAM;AAAA,IACtD;AACA,WAAO,OAAO,KAAK;AAAA,EACrB,SAAS,OAAY;AACnB,UAAM,SAA6B,OAAO,QAAQ,SAAS;AAE3D,QAAI,QAAQ,SAAS,sBAAsB,KAAK,QAAQ,SAAS,2BAA2B,GAAG;AAC7F,aAAO;AAAA,IACT;AACA,UAAM;AAAA,EACR;AACF;AAEA,eAAe,WAAW,aAAuC;AAC/D,QAAM,SAAS,MAAM,QAAQ,CAAC,aAAa,uBAAuB,GAAG,WAAW;AAChF,SAAO,WAAW;AACpB;AAEA,SAAS,gBAAgB,QAA0B;AACjD,SAAO,OACJ,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC,EACzB,OAAO,OAAO,EACd,IAAI,CAAC,SAAS;AACb,UAAM,QAAQ,KAAK,MAAM,GAAI;AAC7B,UAAM,SAAS,MAAM,CAAC,KAAK;AAC3B,UAAM,WAAW,MAAM,CAAC,KAAK;AAC7B,UAAM,UAAU,MAAM,CAAC;AACvB,UAAM,SAAiB,EAAE,QAAQ,MAAM,SAAS;AAChD,QAAI,QAAS,QAAO,UAAU;AAC9B,WAAO;AAAA,EACT,CAAC,EACA,OAAO,CAAC,UAAU,MAAM,KAAK,SAAS,CAAC;AAC5C;AAEA,eAAsB,iBAAiB,aAAqB,OAAkC;AAC5F,QAAM,SAAS,MAAM,WAAW,WAAW;AAC3C,MAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,QAAM,SAAS,MAAM;AAAA,IACnB,CAAC,OAAO,MAAM,OAAO,KAAK,GAAG,0CAA0C,YAAY;AAAA,IACnF;AAAA,EACF;AAEA,MAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,SAAO,OAAO,MAAM,IAAI,EAAE,IAAI,CAAC,SAAS;AACtC,UAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,IAAI,KAAK,MAAM,GAAI;AACrD,WAAO,EAAE,MAAM,QAAQ,MAAM,QAAQ;AAAA,EACvC,CAAC;AACH;AAEA,eAAsB,cAAc,aAA6C;AAC/E,QAAM,UAAU,MAAM,iBAAiB,aAAa,CAAC;AACrD,SAAO,QAAQ,CAAC,KAAK;AACvB;AAEA,eAAsB,iBAAiB,aAAwC;AAC7E,QAAM,SAAS,MAAM,WAAW,WAAW;AAC3C,MAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,QAAM,SAAS,MAAM,QAAQ,CAAC,QAAQ,YAAY,eAAe,GAAG,WAAW;AAC/E,MAAI,CAAC,OAAQ,QAAO,CAAC;AACrB,SAAO,gBAAgB,MAAM;AAC/B;AAEA,eAAsB,sBAAsB,aAAwC;AAClF,QAAM,SAAS,MAAM,WAAW,WAAW;AAC3C,MAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,QAAM,UAAoB,CAAC;AAE3B,QAAM,WAAW,MAAM,QAAQ,CAAC,QAAQ,eAAe,GAAG,WAAW;AACrE,MAAI,UAAU;AACZ,YAAQ,KAAK,GAAG,gBAAgB,QAAQ,CAAC;AAAA,EAC3C;AAEA,QAAM,YAAY,MAAM,QAAQ,CAAC,YAAY,YAAY,oBAAoB,GAAG,WAAW;AAC3F,MAAI,WAAW;AACb,YAAQ;AAAA,MACN,GAAG,UACA,MAAM,IAAI,EACV,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,EACnB,OAAO,OAAO,EACd,IAAI,CAAC,OAAO,EAAE,QAAQ,MAAM,MAAM,EAAE,EAAE;AAAA,IAC3C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,eAAsB,QAAQ,aAAqB,YAAqC;AACtF,QAAM,SAAS,MAAM,WAAW,WAAW;AAC3C,MAAI,CAAC,OAAQ,QAAO;AAEpB,QAAM,OAAO,MAAM,QAAQ,CAAC,QAAQ,YAAY,eAAe,YAAY,GAAG,WAAW;AACzF,SAAO,QAAQ;AACjB;AAEA,eAAsB,mBAAmB,aAAqB,aAAa,OAAwB;AACjG,QAAM,SAAS,MAAM,WAAW,WAAW;AAC3C,MAAI,CAAC,OAAQ,QAAO;AAEpB,QAAM,OAAO,aAAa,CAAC,QAAQ,YAAY,eAAe,YAAY,IAAI,CAAC,QAAQ,eAAe,YAAY;AAClH,QAAM,OAAO,MAAM,QAAQ,MAAM,WAAW;AAC5C,SAAO,QAAQ;AACjB;AAEA,eAAsB,mBAAmB,aAAwC;AAC/E,QAAM,SAAS,MAAM,WAAW,WAAW;AAC3C,MAAI,CAAC,OAAQ,QAAO,CAAC;AAGrB,QAAM,WAAW,MAAM,QAAQ,CAAC,aAAa,gBAAgB,wBAAwB,MAAM,GAAG,WAAW;AACzG,MAAI,CAAC,UAAU;AACb,WAAO,iBAAiB,aAAa,EAAE;AAAA,EACzC;AAEA,QAAM,SAAS,MAAM,QAAQ,CAAC,OAAO,GAAG,QAAQ,UAAU,0CAA0C,YAAY,GAAG,WAAW;AAC9H,MAAI,CAAC,OAAQ,QAAO,CAAC;AAErB,SAAO,OAAO,MAAM,IAAI,EAAE,OAAO,OAAO,EAAE,IAAI,CAAC,SAAS;AACtD,UAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,IAAI,KAAK,MAAM,GAAI;AACrD,WAAO,EAAE,MAAM,QAAQ,MAAM,QAAQ;AAAA,EACvC,CAAC;AACH;AAEO,SAAS,gBAAgB,aAA6B;AAC3D,QAAM,SAAS,KAAK,KAAK,aAAa,MAAM;AAC5C,MAAI,WAAW,MAAM,EAAG,QAAO;AAC/B,SAAO;AACT;AAKA,eAAsB,UAAU,aAAuC;AACrE,QAAM,SAAS,MAAM,QAAQ,CAAC,aAAa,uBAAuB,GAAG,WAAW;AAChF,SAAO,WAAW;AACpB;AAOA,eAAsB,8BACpB,aACA,WAC0B;AAC1B,QAAM,SAAS,MAAM,UAAU,WAAW;AAC1C,MAAI,CAAC,OAAQ,QAAO;AAEpB,MAAI;AAEF,UAAM,YAAY,IAAI,KAAK,SAAS,EAAE,YAAY;AAGlD,UAAM,iBAAiB;AACvB,UAAM,YAAY,KAAK,IAAI;AAI3B,UAAM,0BAA0B;AAAA,MAC9B,CAAC,OAAO,WAAW,SAAS,IAAI,eAAe,kBAAkB;AAAA,MACjE;AAAA,IACF;AACA,UAAM,0BAA0B,IAAI,QAAuB,CAAC,YAAY;AACtE,iBAAW,MAAM,QAAQ,IAAI,GAAG,cAAc;AAAA,IAChD,CAAC;AACD,UAAM,mBAAmB,MAAM,QAAQ,KAAK,CAAC,yBAAyB,uBAAuB,CAAC;AAG9F,QAAI,KAAK,IAAI,IAAI,YAAY,gBAAgB;AAC3C,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,QAAQ,CAAC,QAAQ,YAAY,aAAa,GAAG,WAAW;AAC9E,UAAM,kBAAkB,QAAQ,CAAC,QAAQ,aAAa,GAAG,WAAW;AACpE,UAAM,mBAAmB;AAAA,MACvB,CAAC,YAAY,YAAY,oBAAoB;AAAA,MAC7C;AAAA,IACF;AAEA,UAAM,iBAAiB,IAAI,QAAc,CAAC,YAAY;AACpD,iBAAW,MAAM,QAAQ,IAAI,GAAG,KAAK,IAAI,GAAG,kBAAkB,KAAK,IAAI,IAAI,UAAU,CAAC;AAAA,IACxF,CAAC;AAED,UAAM,CAAC,eAAe,iBAAiB,cAAc,IAAI,MAAM,QAAQ,KAAK;AAAA,MAC1E,QAAQ,IAAI,CAAC,eAAe,iBAAiB,gBAAgB,CAAC;AAAA,MAC9D,eAAe,KAAK,MAAM,CAAC,MAAM,MAAM,IAAI,CAAU;AAAA,IACvD,CAAC;AAGD,UAAM,eAAe,oBAAI,IAAY;AAErC,UAAM,WAAW,CAAC,WAA0B;AAC1C,UAAI,QAAQ;AACV,eAAO,MAAM,IAAI,EACd,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAO,EACd,QAAQ,OAAK,aAAa,IAAI,KAAK,KAAK,aAAa,CAAC,CAAC,CAAC;AAAA,MAC7D;AAAA,IACF;AAEA,aAAS,gBAAgB;AACzB,aAAS,aAAa;AACtB,aAAS,eAAe;AACxB,aAAS,cAAc;AAEvB,WAAO,MAAM,KAAK,YAAY;AAAA,EAChC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}