@triedotdev/mcp 1.0.169 → 1.0.171

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/README.md +62 -540
  2. package/dist/chunk-2YXOBNKW.js +619 -0
  3. package/dist/chunk-2YXOBNKW.js.map +1 -0
  4. package/dist/chunk-QR64Y5TI.js +363 -0
  5. package/dist/chunk-QR64Y5TI.js.map +1 -0
  6. package/dist/cli/main.d.ts +0 -15
  7. package/dist/cli/main.js +356 -3098
  8. package/dist/cli/main.js.map +1 -1
  9. package/dist/index.js +2 -34
  10. package/dist/index.js.map +1 -1
  11. package/dist/server/mcp-server.js +2 -34
  12. package/package.json +8 -31
  13. package/dist/autonomy-config-FSERX3O3.js +0 -30
  14. package/dist/autonomy-config-FSERX3O3.js.map +0 -1
  15. package/dist/chat-store-JNGNTDSN.js +0 -15
  16. package/dist/chat-store-JNGNTDSN.js.map +0 -1
  17. package/dist/chunk-2HF65EHQ.js +0 -311
  18. package/dist/chunk-2HF65EHQ.js.map +0 -1
  19. package/dist/chunk-3XR6WVAW.js +0 -4011
  20. package/dist/chunk-3XR6WVAW.js.map +0 -1
  21. package/dist/chunk-43X6JBEM.js +0 -36
  22. package/dist/chunk-43X6JBEM.js.map +0 -1
  23. package/dist/chunk-6NLHFIYA.js +0 -344
  24. package/dist/chunk-6NLHFIYA.js.map +0 -1
  25. package/dist/chunk-7IO4YUI3.js +0 -1827
  26. package/dist/chunk-7IO4YUI3.js.map +0 -1
  27. package/dist/chunk-AHD2CBQ7.js +0 -846
  28. package/dist/chunk-AHD2CBQ7.js.map +0 -1
  29. package/dist/chunk-BUTOP5EB.js +0 -931
  30. package/dist/chunk-BUTOP5EB.js.map +0 -1
  31. package/dist/chunk-DGUM43GV.js +0 -11
  32. package/dist/chunk-DGUM43GV.js.map +0 -1
  33. package/dist/chunk-EFWVF6TI.js +0 -267
  34. package/dist/chunk-EFWVF6TI.js.map +0 -1
  35. package/dist/chunk-F6WFNUAY.js +0 -216
  36. package/dist/chunk-F6WFNUAY.js.map +0 -1
  37. package/dist/chunk-FBNURWRY.js +0 -662
  38. package/dist/chunk-FBNURWRY.js.map +0 -1
  39. package/dist/chunk-FQ45QP5A.js +0 -361
  40. package/dist/chunk-FQ45QP5A.js.map +0 -1
  41. package/dist/chunk-FVRO5RN3.js +0 -1306
  42. package/dist/chunk-FVRO5RN3.js.map +0 -1
  43. package/dist/chunk-G2TGF6TR.js +0 -573
  44. package/dist/chunk-G2TGF6TR.js.map +0 -1
  45. package/dist/chunk-G3I7SZLW.js +0 -354
  46. package/dist/chunk-G3I7SZLW.js.map +0 -1
  47. package/dist/chunk-GTKYBOXL.js +0 -700
  48. package/dist/chunk-GTKYBOXL.js.map +0 -1
  49. package/dist/chunk-HVCDY3AK.js +0 -850
  50. package/dist/chunk-HVCDY3AK.js.map +0 -1
  51. package/dist/chunk-I2O5OYQT.js +0 -727
  52. package/dist/chunk-I2O5OYQT.js.map +0 -1
  53. package/dist/chunk-JVMBCWKS.js +0 -348
  54. package/dist/chunk-JVMBCWKS.js.map +0 -1
  55. package/dist/chunk-KCUOWRPX.js +0 -816
  56. package/dist/chunk-KCUOWRPX.js.map +0 -1
  57. package/dist/chunk-KDHN2ZQE.js +0 -313
  58. package/dist/chunk-KDHN2ZQE.js.map +0 -1
  59. package/dist/chunk-ME2OERF5.js +0 -345
  60. package/dist/chunk-ME2OERF5.js.map +0 -1
  61. package/dist/chunk-OBQ74FOU.js +0 -27
  62. package/dist/chunk-OBQ74FOU.js.map +0 -1
  63. package/dist/chunk-Q5EKA5YA.js +0 -254
  64. package/dist/chunk-Q5EKA5YA.js.map +0 -1
  65. package/dist/chunk-Q63FFI6D.js +0 -132
  66. package/dist/chunk-Q63FFI6D.js.map +0 -1
  67. package/dist/chunk-SASNMSB5.js +0 -12597
  68. package/dist/chunk-SASNMSB5.js.map +0 -1
  69. package/dist/chunk-T63OHG4Q.js +0 -440
  70. package/dist/chunk-T63OHG4Q.js.map +0 -1
  71. package/dist/chunk-TN5WEKWI.js +0 -173
  72. package/dist/chunk-TN5WEKWI.js.map +0 -1
  73. package/dist/chunk-VUL52BQL.js +0 -402
  74. package/dist/chunk-VUL52BQL.js.map +0 -1
  75. package/dist/chunk-VVITXIHN.js +0 -189
  76. package/dist/chunk-VVITXIHN.js.map +0 -1
  77. package/dist/chunk-WCN7S3EI.js +0 -14
  78. package/dist/chunk-WCN7S3EI.js.map +0 -1
  79. package/dist/chunk-XPZZFPBZ.js +0 -491
  80. package/dist/chunk-XPZZFPBZ.js.map +0 -1
  81. package/dist/chunk-ZJF5FTBX.js +0 -1396
  82. package/dist/chunk-ZJF5FTBX.js.map +0 -1
  83. package/dist/chunk-ZV2K6M7T.js +0 -74
  84. package/dist/chunk-ZV2K6M7T.js.map +0 -1
  85. package/dist/cli/create-agent.d.ts +0 -1
  86. package/dist/cli/create-agent.js +0 -1050
  87. package/dist/cli/create-agent.js.map +0 -1
  88. package/dist/cli/yolo-daemon.d.ts +0 -1
  89. package/dist/cli/yolo-daemon.js +0 -421
  90. package/dist/cli/yolo-daemon.js.map +0 -1
  91. package/dist/client-NJPZE5JT.js +0 -28
  92. package/dist/client-NJPZE5JT.js.map +0 -1
  93. package/dist/codebase-index-VAPF32XX.js +0 -12
  94. package/dist/codebase-index-VAPF32XX.js.map +0 -1
  95. package/dist/fast-analyzer-3GCCZMLK.js +0 -216
  96. package/dist/fast-analyzer-3GCCZMLK.js.map +0 -1
  97. package/dist/git-EO5SRFMN.js +0 -28
  98. package/dist/git-EO5SRFMN.js.map +0 -1
  99. package/dist/github-ingester-ZOKK6GRS.js +0 -11
  100. package/dist/github-ingester-ZOKK6GRS.js.map +0 -1
  101. package/dist/goal-manager-QUKX2W6C.js +0 -25
  102. package/dist/goal-manager-QUKX2W6C.js.map +0 -1
  103. package/dist/goal-validator-2SFSKKVU.js +0 -24
  104. package/dist/goal-validator-2SFSKKVU.js.map +0 -1
  105. package/dist/graph-B3NA4S7I.js +0 -10
  106. package/dist/graph-B3NA4S7I.js.map +0 -1
  107. package/dist/hypothesis-KCPBR652.js +0 -23
  108. package/dist/hypothesis-KCPBR652.js.map +0 -1
  109. package/dist/incident-index-EFNUSGWL.js +0 -11
  110. package/dist/incident-index-EFNUSGWL.js.map +0 -1
  111. package/dist/insight-store-EC4PLSAW.js +0 -22
  112. package/dist/insight-store-EC4PLSAW.js.map +0 -1
  113. package/dist/issue-store-YAXTNRRY.js +0 -36
  114. package/dist/issue-store-YAXTNRRY.js.map +0 -1
  115. package/dist/ledger-TWZTGDFA.js +0 -58
  116. package/dist/ledger-TWZTGDFA.js.map +0 -1
  117. package/dist/linear-ingester-XXPAZZRW.js +0 -11
  118. package/dist/linear-ingester-XXPAZZRW.js.map +0 -1
  119. package/dist/output-manager-RVJ37XKA.js +0 -13
  120. package/dist/output-manager-RVJ37XKA.js.map +0 -1
  121. package/dist/parse-goal-violation-SACGFG3C.js +0 -8
  122. package/dist/parse-goal-violation-SACGFG3C.js.map +0 -1
  123. package/dist/pattern-discovery-F7LU5K6E.js +0 -8
  124. package/dist/pattern-discovery-F7LU5K6E.js.map +0 -1
  125. package/dist/progress-SRQ2V3BP.js +0 -18
  126. package/dist/progress-SRQ2V3BP.js.map +0 -1
  127. package/dist/project-state-AHPA77SM.js +0 -28
  128. package/dist/project-state-AHPA77SM.js.map +0 -1
  129. package/dist/sync-M2FSWPBC.js +0 -12
  130. package/dist/sync-M2FSWPBC.js.map +0 -1
  131. package/dist/terminal-spawn-5YXDMUCF.js +0 -157
  132. package/dist/terminal-spawn-5YXDMUCF.js.map +0 -1
  133. package/dist/tiered-storage-DYNC5CQ6.js +0 -13
  134. package/dist/tiered-storage-DYNC5CQ6.js.map +0 -1
  135. package/dist/trie-agent-I3HAHY2G.js +0 -26
  136. package/dist/trie-agent-I3HAHY2G.js.map +0 -1
  137. package/dist/ui/chat.html +0 -1014
  138. package/dist/ui/goals.html +0 -967
  139. package/dist/ui/hypotheses.html +0 -1011
  140. package/dist/ui/ledger.html +0 -954
  141. package/dist/ui/nudges.html +0 -995
  142. package/dist/vibe-code-signatures-5ZULYP3D.js +0 -987
  143. package/dist/vibe-code-signatures-5ZULYP3D.js.map +0 -1
  144. package/dist/vulnerability-signatures-2URZSXAQ.js +0 -983
  145. package/dist/vulnerability-signatures-2URZSXAQ.js.map +0 -1
@@ -1,1396 +0,0 @@
1
- import {
2
- atomicWriteJSON
3
- } from "./chunk-43X6JBEM.js";
4
- import {
5
- getTrieDirectory,
6
- getWorkingDirectory
7
- } from "./chunk-VVITXIHN.js";
8
- import {
9
- getLastCommit,
10
- isGitRepo
11
- } from "./chunk-2HF65EHQ.js";
12
- import {
13
- __require
14
- } from "./chunk-DGUM43GV.js";
15
-
16
- // src/memory/ledger.ts
17
- import { createHash } from "crypto";
18
- import { mkdir as mkdir2, readFile as readFile2, writeFile, stat as stat2, unlink as unlink2 } from "fs/promises";
19
- import { existsSync as existsSync4 } from "fs";
20
- import { createGzip, createGunzip } from "zlib";
21
- import { pipeline } from "stream/promises";
22
- import { createReadStream, createWriteStream } from "fs";
23
- import { join as join3 } from "path";
24
-
25
- // src/utils/file-lock.ts
26
- import { open, unlink, readFile, stat, mkdir } from "fs/promises";
27
- import { existsSync } from "fs";
28
- import { dirname } from "path";
29
- var activeLocks = /* @__PURE__ */ new Set();
30
- function setupCleanupHandler() {
31
- const cleanup = () => {
32
- for (const lockPath of activeLocks) {
33
- try {
34
- const fs = __require("fs");
35
- if (fs.existsSync(lockPath)) {
36
- fs.unlinkSync(lockPath);
37
- }
38
- } catch {
39
- }
40
- }
41
- };
42
- process.on("exit", cleanup);
43
- process.on("SIGINT", () => {
44
- cleanup();
45
- process.exit(130);
46
- });
47
- process.on("SIGTERM", () => {
48
- cleanup();
49
- process.exit(143);
50
- });
51
- }
52
- var cleanupHandlerInstalled = false;
53
- async function withFileLock(filePath, fn, options = {}) {
54
- const {
55
- timeout = 1e4,
56
- retryDelay = 50,
57
- maxRetryDelay = 500,
58
- staleTimeout = 3e4
59
- } = options;
60
- const lockPath = `${filePath}.lock`;
61
- const start = Date.now();
62
- let currentDelay = retryDelay;
63
- if (!cleanupHandlerInstalled) {
64
- setupCleanupHandler();
65
- cleanupHandlerInstalled = true;
66
- }
67
- await mkdir(dirname(lockPath), { recursive: true });
68
- while (true) {
69
- try {
70
- await cleanupStaleLock(lockPath, staleTimeout);
71
- const lockInfo = {
72
- pid: process.pid,
73
- timestamp: Date.now(),
74
- hostname: __require("os").hostname()
75
- };
76
- const handle = await open(lockPath, "wx");
77
- await handle.writeFile(JSON.stringify(lockInfo));
78
- await handle.close();
79
- activeLocks.add(lockPath);
80
- try {
81
- return await fn();
82
- } finally {
83
- activeLocks.delete(lockPath);
84
- await unlink(lockPath).catch(() => {
85
- });
86
- }
87
- } catch (err) {
88
- if (err.code !== "EEXIST") {
89
- throw err;
90
- }
91
- if (Date.now() - start > timeout) {
92
- let lockHolder = "unknown";
93
- try {
94
- const content = await readFile(lockPath, "utf-8");
95
- const info = JSON.parse(content);
96
- lockHolder = `PID ${info.pid} on ${info.hostname}`;
97
- } catch {
98
- }
99
- throw new Error(
100
- `Timeout acquiring lock for ${filePath} after ${timeout}ms. Lock held by: ${lockHolder}`
101
- );
102
- }
103
- await new Promise((resolve) => setTimeout(resolve, currentDelay));
104
- currentDelay = Math.min(currentDelay * 2, maxRetryDelay);
105
- }
106
- }
107
- }
108
- async function cleanupStaleLock(lockPath, staleTimeout) {
109
- if (!existsSync(lockPath)) {
110
- return;
111
- }
112
- try {
113
- const content = await readFile(lockPath, "utf-8");
114
- const info = JSON.parse(content);
115
- const lockAge = Date.now() - info.timestamp;
116
- if (lockAge > staleTimeout) {
117
- console.warn(`[FileLock] Removing stale lock (age: ${Math.round(lockAge / 1e3)}s): ${lockPath}`);
118
- await unlink(lockPath);
119
- return;
120
- }
121
- const currentHostname = __require("os").hostname();
122
- if (info.hostname === currentHostname) {
123
- if (!isProcessRunning(info.pid)) {
124
- console.warn(`[FileLock] Removing orphaned lock (PID ${info.pid} not running): ${lockPath}`);
125
- await unlink(lockPath);
126
- return;
127
- }
128
- }
129
- } catch {
130
- try {
131
- const stats = await stat(lockPath);
132
- const fileAge = Date.now() - stats.mtimeMs;
133
- if (fileAge > staleTimeout) {
134
- console.warn(`[FileLock] Removing stale/corrupted lock: ${lockPath}`);
135
- await unlink(lockPath);
136
- }
137
- } catch {
138
- }
139
- }
140
- }
141
- function isProcessRunning(pid) {
142
- try {
143
- process.kill(pid, 0);
144
- return true;
145
- } catch {
146
- return false;
147
- }
148
- }
149
-
150
- // src/memory/crypto-keys.ts
151
- import * as ed25519 from "@noble/ed25519";
152
- import { randomBytes } from "crypto";
153
- import { existsSync as existsSync2, mkdirSync, readFileSync, writeFileSync } from "fs";
154
- import { join } from "path";
155
- function getKeysDirectory(workDir) {
156
- const trieDir = getTrieDirectory(workDir || process.cwd());
157
- return join(trieDir, "keys");
158
- }
159
- function getDefaultKeyPath(workDir) {
160
- return join(getKeysDirectory(workDir), "signing-key.json");
161
- }
162
- async function generateKeyPair() {
163
- const privateKeyBytes = randomBytes(32);
164
- const publicKeyBytes = await ed25519.getPublicKeyAsync(privateKeyBytes);
165
- return {
166
- publicKey: Buffer.from(publicKeyBytes).toString("hex"),
167
- privateKey: Buffer.from(privateKeyBytes).toString("hex")
168
- };
169
- }
170
- function saveKeyPair(keyPair, workDir) {
171
- const keysDir = getKeysDirectory(workDir);
172
- if (!existsSync2(keysDir)) {
173
- mkdirSync(keysDir, { recursive: true });
174
- }
175
- const keyPath = getDefaultKeyPath(workDir);
176
- const keyData = {
177
- ...keyPair,
178
- createdAt: (/* @__PURE__ */ new Date()).toISOString(),
179
- version: 1
180
- };
181
- writeFileSync(keyPath, JSON.stringify(keyData, null, 2), "utf-8");
182
- console.error("\u26A0\uFE0F Signing key created at:", keyPath);
183
- console.error(" Keep this file secure and add to .gitignore");
184
- }
185
- function loadKeyPair(workDir) {
186
- const keyPath = getDefaultKeyPath(workDir);
187
- if (!existsSync2(keyPath)) {
188
- return null;
189
- }
190
- try {
191
- const keyData = JSON.parse(readFileSync(keyPath, "utf-8"));
192
- return {
193
- publicKey: keyData.publicKey,
194
- privateKey: keyData.privateKey
195
- };
196
- } catch (error) {
197
- console.error("Failed to load signing key:", error);
198
- return null;
199
- }
200
- }
201
- async function getOrCreateKeyPair(workDir) {
202
- const existing = loadKeyPair(workDir);
203
- if (existing) {
204
- return existing;
205
- }
206
- const newKeyPair = await generateKeyPair();
207
- saveKeyPair(newKeyPair, workDir);
208
- return newKeyPair;
209
- }
210
- async function signData(data, privateKey) {
211
- const dataBytes = Buffer.from(data, "utf-8");
212
- const privateKeyBytes = Buffer.from(privateKey, "hex");
213
- const signatureBytes = await ed25519.signAsync(dataBytes, privateKeyBytes);
214
- return Buffer.from(signatureBytes).toString("hex");
215
- }
216
- async function verifySignature(data, signature, publicKey) {
217
- try {
218
- const dataBytes = Buffer.from(data, "utf-8");
219
- const signatureBytes = Buffer.from(signature, "hex");
220
- const publicKeyBytes = Buffer.from(publicKey, "hex");
221
- return await ed25519.verifyAsync(signatureBytes, dataBytes, publicKeyBytes);
222
- } catch (error) {
223
- console.error("Signature verification failed:", error);
224
- return false;
225
- }
226
- }
227
- async function signHash(hash, workDir) {
228
- const keyPair = await getOrCreateKeyPair(workDir);
229
- const signature = await signData(hash, keyPair.privateKey);
230
- return {
231
- signature,
232
- publicKey: keyPair.publicKey,
233
- algorithm: "Ed25519",
234
- signedAt: (/* @__PURE__ */ new Date()).toISOString()
235
- };
236
- }
237
- async function verifyHashSignature(hash, signatureData) {
238
- if (signatureData.algorithm !== "Ed25519") {
239
- console.error("Unsupported signature algorithm:", signatureData.algorithm);
240
- return false;
241
- }
242
- return await verifySignature(hash, signatureData.signature, signatureData.publicKey);
243
- }
244
- function getPublicKey(workDir) {
245
- const keyPair = loadKeyPair(workDir);
246
- return keyPair?.publicKey || null;
247
- }
248
- function hasSigningKey(workDir) {
249
- const keyPath = getDefaultKeyPath(workDir);
250
- return existsSync2(keyPath);
251
- }
252
-
253
- // src/memory/git-integration.ts
254
- import { exec } from "child_process";
255
- import { promisify } from "util";
256
- import { existsSync as existsSync3 } from "fs";
257
- import { join as join2 } from "path";
258
- var execAsync = promisify(exec);
259
- async function isGitIntegrationEnabled(workDir) {
260
- try {
261
- const gitDir = join2(workDir, ".git");
262
- if (!existsSync3(gitDir)) {
263
- return false;
264
- }
265
- const configPath = join2(getTrieDirectory(workDir), "config.json");
266
- if (!existsSync3(configPath)) {
267
- return true;
268
- }
269
- const config = JSON.parse(await import("fs/promises").then((fs) => fs.readFile(configPath, "utf-8")));
270
- return config.gitIntegration !== false;
271
- } catch {
272
- return false;
273
- }
274
- }
275
- async function autoCommitLedger(workDir, message) {
276
- try {
277
- const enabled = await isGitIntegrationEnabled(workDir);
278
- if (!enabled) {
279
- return { committed: false, error: "Git integration disabled" };
280
- }
281
- const ledgerPath = join2(getTrieDirectory(workDir), "memory", "ledger.json");
282
- const { stdout: statusOutput } = await execAsync("git status --porcelain", { cwd: workDir });
283
- const hasLedgerChanges = statusOutput.includes("ledger.json");
284
- if (!hasLedgerChanges) {
285
- return { committed: false, error: "No ledger changes to commit" };
286
- }
287
- await execAsync(`git add ${ledgerPath}`, { cwd: workDir });
288
- const commitMessage = message || "ledger: append entries";
289
- await execAsync(
290
- `git commit -m "${commitMessage}"`,
291
- { cwd: workDir }
292
- );
293
- const { stdout: hashOutput } = await execAsync("git rev-parse HEAD", { cwd: workDir });
294
- const commitHash = hashOutput.trim();
295
- return {
296
- committed: true,
297
- commitHash
298
- };
299
- } catch (error) {
300
- return {
301
- committed: false,
302
- error: error instanceof Error ? error.message : String(error)
303
- };
304
- }
305
- }
306
- async function ensureKeysIgnored(workDir) {
307
- try {
308
- const gitignorePath = join2(workDir, ".gitignore");
309
- const fs = await import("fs/promises");
310
- let gitignore = "";
311
- if (existsSync3(gitignorePath)) {
312
- gitignore = await fs.readFile(gitignorePath, "utf-8");
313
- }
314
- if (gitignore.includes(".trie/keys/")) {
315
- return;
316
- }
317
- const addition = "\n# Trie signing keys (keep secure, do not commit)\n.trie/keys/\n";
318
- await fs.appendFile(gitignorePath, addition, "utf-8");
319
- } catch (error) {
320
- console.error("Failed to update .gitignore:", error);
321
- }
322
- }
323
-
324
- // src/memory/ledger.ts
325
- var LEDGER_FILENAME = "ledger.json";
326
- var SHARED_LEDGER_DIR = ".trie-shared";
327
- var MANIFEST_FILENAME = "ledger-manifest.json";
328
- var SYNC_STATE_FILENAME = "ledger-sync.json";
329
- var GENESIS_HASH = "0".repeat(64);
330
- var LEDGER_VERSION = 2;
331
- var ConcurrentModificationError = class extends Error {
332
- constructor(message) {
333
- super(message);
334
- this.name = "ConcurrentModificationError";
335
- }
336
- };
337
- async function signLedgerEntry(entry, workDir) {
338
- try {
339
- const signatureData = await signHash(entry.hash, workDir);
340
- return {
341
- ...entry,
342
- signature: signatureData.signature,
343
- publicKey: signatureData.publicKey,
344
- signedAt: signatureData.signedAt,
345
- signatureAlgorithm: "Ed25519"
346
- };
347
- } catch (error) {
348
- console.error("Failed to sign ledger entry:", error);
349
- return entry;
350
- }
351
- }
352
- async function verifyLedgerEntry(entry) {
353
- if (!entry.signature || !entry.publicKey) {
354
- return true;
355
- }
356
- if (entry.signatureAlgorithm !== "Ed25519") {
357
- console.error("Unsupported signature algorithm:", entry.signatureAlgorithm);
358
- return false;
359
- }
360
- const signatureData = {
361
- signature: entry.signature,
362
- publicKey: entry.publicKey,
363
- algorithm: "Ed25519",
364
- signedAt: entry.signedAt || ""
365
- };
366
- return await verifyHashSignature(entry.hash, signatureData);
367
- }
368
- async function verifyBlockSignatures(block) {
369
- const invalidEntries = [];
370
- for (const entry of block.entries) {
371
- const isValid = await verifyLedgerEntry(entry);
372
- if (!isValid) {
373
- invalidEntries.push(entry.id);
374
- }
375
- }
376
- return {
377
- valid: invalidEntries.length === 0,
378
- invalidEntries
379
- };
380
- }
381
- async function appendIssuesToLedger(issues, workDir, author) {
382
- if (issues.length === 0) return null;
383
- const projectDir = workDir || getWorkingDirectory(void 0, true);
384
- const memoryDir = join3(getTrieDirectory(projectDir), "memory");
385
- await mkdir2(memoryDir, { recursive: true });
386
- const ledgerPath = join3(memoryDir, LEDGER_FILENAME);
387
- return withFileLock(ledgerPath, async () => {
388
- const isRepo = await isGitRepo(projectDir);
389
- const lastCommit = isRepo ? await getLastCommit(projectDir) : null;
390
- const blockAuthor = author || lastCommit?.author || "unknown";
391
- const blocks = await loadLedger(projectDir);
392
- const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
393
- const now = (/* @__PURE__ */ new Date()).toISOString();
394
- const shouldSign = hasSigningKey(projectDir);
395
- let entries = await Promise.all(issues.map(async (issue) => {
396
- const context = {
397
- detectionMethod: "automated-scan",
398
- confidence: issue.confidence || 80,
399
- ruleId: issue.agent,
400
- exposureScope: "repository",
401
- riskFactors: [],
402
- escalatedTo: blockAuthor
403
- };
404
- if (issue.line !== void 0) {
405
- context.line = issue.line;
406
- }
407
- if (lastCommit?.hash) {
408
- context.gitCommit = lastCommit.hash;
409
- }
410
- if (lastCommit?.branch) {
411
- context.gitBranch = lastCommit.branch;
412
- }
413
- if (issue.category === "security") {
414
- context.complianceViolations = ["security-policy"];
415
- context.requiresNotification = issue.severity === "critical";
416
- }
417
- if (issue.category === "pii" || issue.issue?.toLowerCase().includes("personal")) {
418
- context.complianceViolations = ["GDPR", "privacy-policy"];
419
- context.requiresNotification = true;
420
- context.exposureScope = "public";
421
- context.riskFactors.push("data-exposure", "privacy-violation");
422
- }
423
- try {
424
- const filePath = join3(projectDir, issue.file);
425
- if (existsSync4(filePath)) {
426
- const { readFile: readFile3, stat: stat3 } = await import("fs/promises");
427
- const stats = await stat3(filePath);
428
- context.fileSize = stats.size;
429
- context.fileModified = stats.mtime.toISOString();
430
- if (issue.line && issue.line > 0) {
431
- const content = await readFile3(filePath, "utf-8");
432
- const lines = content.split("\n");
433
- const startLine = Math.max(0, issue.line - 3);
434
- const endLine = Math.min(lines.length, issue.line + 2);
435
- const snippet = lines.slice(startLine, endLine).join("\n");
436
- context.codeSnippet = snippet.length > 500 ? snippet.slice(0, 500) + "..." : snippet;
437
- }
438
- }
439
- } catch (error) {
440
- console.debug("Failed to collect file metadata:", error);
441
- }
442
- return {
443
- id: issue.id,
444
- hash: issue.hash,
445
- severity: issue.severity,
446
- file: issue.file,
447
- agent: issue.agent,
448
- timestamp: issue.timestamp,
449
- status: "active",
450
- // Include semantic content for ambient awareness
451
- issue: issue.issue,
452
- fix: issue.fix,
453
- // Enhanced forensic context
454
- context
455
- };
456
- }));
457
- if (shouldSign) {
458
- entries = await Promise.all(entries.map((entry) => signLedgerEntry(entry, projectDir)));
459
- }
460
- const previousBlock = blocks[blocks.length - 1];
461
- const block = previousBlock && previousBlock.date === today ? previousBlock : await createSyncableBlock(today, now, previousBlock?.blockHash ?? GENESIS_HASH, blockAuthor, projectDir, lastCommit?.hash, blocks.length);
462
- if (block !== previousBlock) {
463
- blocks.push(block);
464
- }
465
- block.entries = [...block.entries, ...entries];
466
- block.merkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
467
- block.blockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
468
- block.updatedAt = now;
469
- await saveLedgerInternal(blocks, projectDir);
470
- if (await isGitRepo(projectDir)) {
471
- await ensureKeysIgnored(projectDir);
472
- const commitMessage = `ledger: append ${entries.length} ${entries.length === 1 ? "entry" : "entries"}`;
473
- await autoCommitLedger(projectDir, commitMessage);
474
- }
475
- return block;
476
- }, { timeout: 15e3 });
477
- }
478
- async function verifyLedger(workDir) {
479
- const projectDir = workDir || getWorkingDirectory(void 0, true);
480
- const blocks = await loadLedger(projectDir);
481
- if (blocks.length === 0) {
482
- return { valid: true };
483
- }
484
- for (let i = 0; i < blocks.length; i += 1) {
485
- const block = blocks[i];
486
- if (!block) {
487
- return { valid: false, error: `Block ${i} missing` };
488
- }
489
- const expectedPreviousHash = i === 0 ? GENESIS_HASH : blocks[i - 1]?.blockHash;
490
- if (!expectedPreviousHash) {
491
- return { valid: false, error: `Block ${i} missing previous block` };
492
- }
493
- if (block.previousHash !== expectedPreviousHash) {
494
- return { valid: false, error: `Block ${i} previous hash mismatch` };
495
- }
496
- const computedMerkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
497
- if (block.merkleRoot !== computedMerkleRoot) {
498
- return { valid: false, error: `Block ${i} merkle root mismatch` };
499
- }
500
- const computedBlockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
501
- if (block.blockHash !== computedBlockHash) {
502
- return { valid: false, error: `Block ${i} block hash mismatch` };
503
- }
504
- const signatureVerification = await verifyBlockSignatures(block);
505
- if (!signatureVerification.valid) {
506
- return {
507
- valid: false,
508
- error: `Block ${i} has invalid signatures: ${signatureVerification.invalidEntries.join(", ")}`
509
- };
510
- }
511
- }
512
- return { valid: true };
513
- }
514
- function computeMerkleRoot(hashes) {
515
- if (hashes.length === 0) {
516
- return sha256("");
517
- }
518
- let level = hashes.slice();
519
- while (level.length > 1) {
520
- const nextLevel = [];
521
- for (let i = 0; i < level.length; i += 2) {
522
- const left = level[i];
523
- const right = level[i + 1] ?? left;
524
- nextLevel.push(sha256(`${left}:${right}`));
525
- }
526
- level = nextLevel;
527
- }
528
- return level[0];
529
- }
530
- function computeBlockHash(previousHash, merkleRoot, date, version) {
531
- return sha256(`${version}:${date}:${previousHash}:${merkleRoot}`);
532
- }
533
- async function createSyncableBlock(date, now, previousHash, author, projectDir, gitCommit, chainHeight = 0) {
534
- const metadata = {};
535
- try {
536
- metadata.hostname = __require("os").hostname();
537
- metadata.workingDirectory = projectDir;
538
- metadata.scanId = `scan-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
539
- metadata.toolVersion = process.env.npm_package_version || "unknown";
540
- if (await isGitRepo(projectDir)) {
541
- const { getLastCommit: getLastCommit2 } = await import("./git-EO5SRFMN.js");
542
- const lastCommit = await getLastCommit2(projectDir);
543
- if (lastCommit?.hash) {
544
- metadata.gitHeadCommit = lastCommit.hash;
545
- }
546
- }
547
- metadata.summary = {
548
- totalIssues: 0,
549
- issuesBySeverity: {},
550
- issuesByAgent: {},
551
- newIssues: 0,
552
- resolvedIssues: 0
553
- };
554
- } catch (error) {
555
- console.debug("Failed to collect block metadata:", error);
556
- }
557
- return {
558
- version: LEDGER_VERSION,
559
- date,
560
- entries: [],
561
- previousHash,
562
- merkleRoot: "",
563
- blockHash: "",
564
- createdAt: now,
565
- updatedAt: now,
566
- author,
567
- chainHeight,
568
- metadata,
569
- ...gitCommit && { gitCommit }
570
- };
571
- }
572
- async function loadLedger(projectDir) {
573
- const result = await loadLedgerWithHash(projectDir);
574
- return result.blocks;
575
- }
576
- async function loadLedgerWithHash(projectDir) {
577
- const ledgerPath = join3(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
578
- try {
579
- if (!existsSync4(ledgerPath)) {
580
- return { blocks: [], contentHash: sha256("[]") };
581
- }
582
- const content = await readFile2(ledgerPath, "utf-8");
583
- const parsed = JSON.parse(content);
584
- if (parsed && parsed._format === "ledger-v2") {
585
- const file = parsed;
586
- return {
587
- blocks: file.blocks || [],
588
- contentHash: file._contentHash
589
- };
590
- }
591
- if (!Array.isArray(parsed)) {
592
- return { blocks: [], contentHash: sha256("[]") };
593
- }
594
- const blocks = parsed;
595
- const contentHash = sha256(JSON.stringify(blocks));
596
- return { blocks, contentHash };
597
- } catch {
598
- return { blocks: [], contentHash: sha256("[]") };
599
- }
600
- }
601
- async function getLedgerBlocks(workDir) {
602
- const projectDir = workDir || getWorkingDirectory(void 0, true);
603
- return loadLedger(projectDir);
604
- }
605
- async function getLedgerBlocksWithHash(workDir) {
606
- const projectDir = workDir || getWorkingDirectory(void 0, true);
607
- return loadLedgerWithHash(projectDir);
608
- }
609
- async function saveLedgerOptimistic(blocks, expectedHash, workDir) {
610
- const projectDir = workDir || getWorkingDirectory(void 0, true);
611
- const memoryDir = join3(getTrieDirectory(projectDir), "memory");
612
- const ledgerPath = join3(memoryDir, LEDGER_FILENAME);
613
- await withFileLock(ledgerPath, async () => {
614
- await saveLedgerWithConcurrencyCheck(blocks, projectDir, expectedHash);
615
- }, { timeout: 1e4 });
616
- }
617
- async function saveLedger(blocks, projectDir) {
618
- const ledgerPath = join3(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
619
- await withFileLock(ledgerPath, async () => {
620
- await saveLedgerWithFormat(ledgerPath, blocks);
621
- }, { timeout: 1e4 });
622
- }
623
- async function saveLedgerInternal(blocks, projectDir) {
624
- const ledgerPath = join3(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
625
- await saveLedgerWithFormat(ledgerPath, blocks);
626
- }
627
- async function saveLedgerWithConcurrencyCheck(blocks, projectDir, expectedHash) {
628
- const ledgerPath = join3(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
629
- const { contentHash: currentHash } = await loadLedgerWithHash(projectDir);
630
- if (currentHash !== expectedHash) {
631
- throw new ConcurrentModificationError(
632
- `Ledger was modified by another process. Expected hash: ${expectedHash.slice(0, 16)}..., Current hash: ${currentHash.slice(0, 16)}...`
633
- );
634
- }
635
- await saveLedgerWithFormat(ledgerPath, blocks);
636
- }
637
- async function saveLedgerWithFormat(ledgerPath, blocks) {
638
- const blocksJson = JSON.stringify(blocks);
639
- const contentHash = sha256(blocksJson);
640
- const ledgerFile = {
641
- _format: "ledger-v2",
642
- _contentHash: contentHash,
643
- _lastModified: (/* @__PURE__ */ new Date()).toISOString(),
644
- blocks
645
- };
646
- await atomicWriteJSON(ledgerPath, ledgerFile);
647
- }
648
- function sha256(input) {
649
- return createHash("sha256").update(input).digest("hex");
650
- }
651
- function getSharedLedgerDir(projectDir) {
652
- return join3(projectDir, SHARED_LEDGER_DIR);
653
- }
654
- function getActiveBlocksDir(projectDir) {
655
- return join3(getSharedLedgerDir(projectDir), "active");
656
- }
657
- function getArchivedBlocksDir(projectDir) {
658
- return join3(getSharedLedgerDir(projectDir), "archived");
659
- }
660
- function getManifestPath(projectDir) {
661
- return join3(getSharedLedgerDir(projectDir), MANIFEST_FILENAME);
662
- }
663
- function getSyncStatePath(projectDir) {
664
- return join3(getTrieDirectory(projectDir), "memory", SYNC_STATE_FILENAME);
665
- }
666
- async function ensureSharedStorageStructure(projectDir) {
667
- const sharedDir = getSharedLedgerDir(projectDir);
668
- const activeDir = getActiveBlocksDir(projectDir);
669
- const archivedDir = getArchivedBlocksDir(projectDir);
670
- await mkdir2(sharedDir, { recursive: true });
671
- await mkdir2(activeDir, { recursive: true });
672
- await mkdir2(archivedDir, { recursive: true });
673
- }
674
- async function loadManifest(projectDir) {
675
- const manifestPath = getManifestPath(projectDir);
676
- try {
677
- if (!existsSync4(manifestPath)) return null;
678
- const content = await readFile2(manifestPath, "utf-8");
679
- return JSON.parse(content);
680
- } catch {
681
- return null;
682
- }
683
- }
684
- async function saveManifest(manifest, projectDir) {
685
- const manifestPath = getManifestPath(projectDir);
686
- await atomicWriteJSON(manifestPath, manifest);
687
- }
688
- async function createDefaultManifest(_projectDir) {
689
- const now = (/* @__PURE__ */ new Date()).toISOString();
690
- return {
691
- version: LEDGER_VERSION,
692
- created: now,
693
- lastSync: now,
694
- totalBlocks: 0,
695
- totalEntries: 0,
696
- activeBlocks: [],
697
- archivedBlocks: [],
698
- index: {
699
- byDate: {},
700
- byAuthor: {},
701
- bySeverity: {}
702
- },
703
- compressionConfig: {
704
- enabled: true,
705
- archiveAfterDays: 30,
706
- compressionLevel: 6,
707
- maxHotStorageSize: 50 * 1024 * 1024
708
- // 50MB
709
- }
710
- };
711
- }
712
- async function loadSyncState(projectDir) {
713
- const syncStatePath = getSyncStatePath(projectDir);
714
- try {
715
- if (!existsSync4(syncStatePath)) return null;
716
- const content = await readFile2(syncStatePath, "utf-8");
717
- return JSON.parse(content);
718
- } catch {
719
- return null;
720
- }
721
- }
722
- async function saveSyncState(syncState, projectDir) {
723
- const syncStatePath = getSyncStatePath(projectDir);
724
- const memoryDir = join3(getTrieDirectory(projectDir), "memory");
725
- await mkdir2(memoryDir, { recursive: true });
726
- await atomicWriteJSON(syncStatePath, syncState);
727
- }
728
- async function initializeSharedLedger(workDir) {
729
- const projectDir = workDir || getWorkingDirectory(void 0, true);
730
- await ensureSharedStorageStructure(projectDir);
731
- const existingManifest = await loadManifest(projectDir);
732
- if (!existingManifest) {
733
- const manifest = await createDefaultManifest(projectDir);
734
- await saveManifest(manifest, projectDir);
735
- }
736
- const existingSyncState = await loadSyncState(projectDir);
737
- if (!existingSyncState) {
738
- const now = (/* @__PURE__ */ new Date()).toISOString();
739
- const syncState = {
740
- lastSyncTimestamp: now,
741
- conflicts: [],
742
- localChanges: false,
743
- sharedChanges: false
744
- };
745
- await saveSyncState(syncState, projectDir);
746
- }
747
- }
748
- async function syncLedgerFromShared(workDir) {
749
- const projectDir = workDir || getWorkingDirectory(void 0, true);
750
- const memoryDir = join3(getTrieDirectory(projectDir), "memory");
751
- const ledgerPath = join3(memoryDir, LEDGER_FILENAME);
752
- await initializeSharedLedger(projectDir);
753
- return withFileLock(ledgerPath, async () => {
754
- const manifest = await loadManifest(projectDir);
755
- const localBlocks = await loadLedger(projectDir);
756
- const sharedBlocks = await loadSharedBlocks(projectDir);
757
- if (!manifest) {
758
- throw new Error("Failed to load ledger manifest");
759
- }
760
- const mergeResult = await mergeChains(localBlocks, sharedBlocks, "timestamp");
761
- await saveLedgerInternal(mergeResult.mergedChain, projectDir);
762
- const syncState = {
763
- lastSyncTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
764
- conflicts: mergeResult.conflicts,
765
- localChanges: false,
766
- sharedChanges: false
767
- };
768
- await saveSyncState(syncState, projectDir);
769
- return mergeResult;
770
- }, { timeout: 3e4 });
771
- }
772
- async function pushLedgerToShared(workDir) {
773
- const projectDir = workDir || getWorkingDirectory(void 0, true);
774
- const memoryDir = join3(getTrieDirectory(projectDir), "memory");
775
- const ledgerPath = join3(memoryDir, LEDGER_FILENAME);
776
- await initializeSharedLedger(projectDir);
777
- await withFileLock(ledgerPath, async () => {
778
- const localBlocks = await loadLedger(projectDir);
779
- const manifest = await loadManifest(projectDir) || await createDefaultManifest(projectDir);
780
- const activeDir = getActiveBlocksDir(projectDir);
781
- for (const block of localBlocks) {
782
- const blockFilename = `${block.date}.json`;
783
- const blockPath = join3(activeDir, blockFilename);
784
- if (!existsSync4(blockPath) || block.updatedAt > manifest.lastSync) {
785
- await atomicWriteJSON(blockPath, block);
786
- manifest.index.byDate[block.date] = `active/${blockFilename}`;
787
- const blockAuthor = block.author;
788
- if (blockAuthor) {
789
- if (!manifest.index.byAuthor[blockAuthor]) {
790
- manifest.index.byAuthor[blockAuthor] = [];
791
- }
792
- if (!manifest.index.byAuthor[blockAuthor].includes(`active/${blockFilename}`)) {
793
- manifest.index.byAuthor[blockAuthor].push(`active/${blockFilename}`);
794
- }
795
- }
796
- if (!manifest.activeBlocks.includes(blockFilename)) {
797
- manifest.activeBlocks.push(blockFilename);
798
- }
799
- }
800
- }
801
- manifest.lastSync = (/* @__PURE__ */ new Date()).toISOString();
802
- manifest.totalBlocks = manifest.activeBlocks.length + manifest.archivedBlocks.length;
803
- manifest.totalEntries = localBlocks.reduce((sum, block) => sum + block.entries.length, 0);
804
- await saveManifest(manifest, projectDir);
805
- }, { timeout: 3e4 });
806
- }
807
- async function loadSharedBlocks(projectDir) {
808
- const manifest = await loadManifest(projectDir);
809
- if (!manifest) return [];
810
- const blocks = [];
811
- const activeDir = getActiveBlocksDir(projectDir);
812
- for (const filename of manifest.activeBlocks) {
813
- const blockPath = join3(activeDir, filename);
814
- try {
815
- if (existsSync4(blockPath)) {
816
- const content = await readFile2(blockPath, "utf-8");
817
- const block = JSON.parse(content);
818
- blocks.push(block);
819
- }
820
- } catch (error) {
821
- console.warn(`Failed to load shared block ${filename}:`, error);
822
- }
823
- }
824
- return blocks.sort((a, b) => a.date.localeCompare(b.date));
825
- }
826
- async function mergeChains(localBlocks, remoteBlocks, strategy = "timestamp") {
827
- const conflicts = [];
828
- const mergedBlocks = /* @__PURE__ */ new Map();
829
- const stats = {
830
- localBlocks: localBlocks.length,
831
- remoteBlocks: remoteBlocks.length,
832
- mergedBlocks: 0,
833
- duplicatesRemoved: 0
834
- };
835
- const localByDate = /* @__PURE__ */ new Map();
836
- for (const block of localBlocks) {
837
- localByDate.set(block.date, block);
838
- }
839
- const remoteByDate = /* @__PURE__ */ new Map();
840
- for (const block of remoteBlocks) {
841
- remoteByDate.set(block.date, block);
842
- }
843
- const allDates = /* @__PURE__ */ new Set([
844
- ...Array.from(localByDate.keys()),
845
- ...Array.from(remoteByDate.keys())
846
- ]);
847
- for (const date of Array.from(allDates)) {
848
- const localBlock = localByDate.get(date);
849
- const remoteBlock = remoteByDate.get(date);
850
- if (localBlock && remoteBlock) {
851
- const conflict = detectBlockConflict(localBlock, remoteBlock);
852
- if (conflict) {
853
- conflicts.push(conflict);
854
- const resolvedBlock = resolveConflict(conflict, strategy);
855
- if (resolvedBlock) {
856
- mergedBlocks.set(date, resolvedBlock);
857
- }
858
- } else {
859
- const mergedBlock = mergeBlockEntries(localBlock, remoteBlock);
860
- mergedBlocks.set(date, mergedBlock);
861
- }
862
- } else if (localBlock) {
863
- mergedBlocks.set(date, localBlock);
864
- } else if (remoteBlock) {
865
- mergedBlocks.set(date, remoteBlock);
866
- }
867
- }
868
- const resultBlocks = Array.from(mergedBlocks.values()).sort((a, b) => a.date.localeCompare(b.date)).map((block, index) => ({
869
- ...block,
870
- chainHeight: index
871
- }));
872
- stats.mergedBlocks = resultBlocks.length;
873
- stats.duplicatesRemoved = stats.localBlocks + stats.remoteBlocks - stats.mergedBlocks;
874
- return {
875
- mergedChain: resultBlocks,
876
- conflicts,
877
- resolutionStrategy: strategy,
878
- stats
879
- };
880
- }
881
- function detectBlockConflict(localBlock, remoteBlock) {
882
- if (localBlock.blockHash !== remoteBlock.blockHash) {
883
- return {
884
- type: "hash_mismatch",
885
- description: `Different block content for date ${localBlock.date}`,
886
- localBlock,
887
- remoteBlock
888
- };
889
- }
890
- const hasConflictingEntries = localBlock.entries.some((localEntry) => {
891
- const conflictingRemoteEntry = remoteBlock.entries.find(
892
- (remoteEntry) => remoteEntry.id === localEntry.id && remoteEntry.hash !== localEntry.hash
893
- );
894
- return !!conflictingRemoteEntry;
895
- });
896
- if (hasConflictingEntries) {
897
- return {
898
- type: "duplicate_entry",
899
- description: `Conflicting entries found for date ${localBlock.date}`,
900
- localBlock,
901
- remoteBlock
902
- };
903
- }
904
- return null;
905
- }
906
- function resolveConflict(conflict, strategy) {
907
- if (!conflict.localBlock || !conflict.remoteBlock) {
908
- return null;
909
- }
910
- const mergedBlock = mergeBlockEntries(conflict.localBlock, conflict.remoteBlock);
911
- mergedBlock.conflictResolved = true;
912
- switch (strategy) {
913
- case "timestamp":
914
- if (conflict.remoteBlock.updatedAt > conflict.localBlock.updatedAt) {
915
- mergedBlock.author = `${conflict.remoteBlock.author}+${conflict.localBlock.author}`;
916
- } else {
917
- mergedBlock.author = `${conflict.localBlock.author}+${conflict.remoteBlock.author}`;
918
- }
919
- break;
920
- case "longest":
921
- if (conflict.remoteBlock.entries.length > conflict.localBlock.entries.length) {
922
- mergedBlock.author = `${conflict.remoteBlock.author}+${conflict.localBlock.author}`;
923
- } else {
924
- mergedBlock.author = `${conflict.localBlock.author}+${conflict.remoteBlock.author}`;
925
- }
926
- break;
927
- case "manual":
928
- default:
929
- mergedBlock.author = `${conflict.localBlock.author}+${conflict.remoteBlock.author}`;
930
- break;
931
- }
932
- return mergedBlock;
933
- }
934
- function mergeBlockEntries(localBlock, remoteBlock) {
935
- const entryMap = /* @__PURE__ */ new Map();
936
- for (const entry of localBlock.entries) {
937
- entryMap.set(entry.hash, entry);
938
- }
939
- for (const entry of remoteBlock.entries) {
940
- entryMap.set(entry.hash, entry);
941
- }
942
- const mergedEntries = Array.from(entryMap.values());
943
- const now = (/* @__PURE__ */ new Date()).toISOString();
944
- const authors = [localBlock.author, remoteBlock.author].filter(Boolean);
945
- const combinedAuthor = authors.length > 1 ? authors.join("+") : authors[0] || "unknown";
946
- return {
947
- ...localBlock,
948
- entries: mergedEntries,
949
- author: combinedAuthor,
950
- updatedAt: now,
951
- merkleRoot: computeMerkleRoot(mergedEntries.map((e) => e.hash)),
952
- blockHash: computeBlockHash(
953
- localBlock.previousHash,
954
- computeMerkleRoot(mergedEntries.map((e) => e.hash)),
955
- localBlock.date,
956
- localBlock.version
957
- )
958
- };
959
- }
960
- async function migrateLegacyLedger(workDir) {
961
- const projectDir = workDir || getWorkingDirectory(void 0, true);
962
- const legacyLedgerPath = join3(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
963
- if (!existsSync4(legacyLedgerPath)) {
964
- return false;
965
- }
966
- try {
967
- const legacyBlocks = await loadLedger(projectDir);
968
- if (legacyBlocks.length > 0 && legacyBlocks[0].author !== void 0) {
969
- return false;
970
- }
971
- console.log(`Migrating legacy ledger with ${legacyBlocks.length} blocks...`);
972
- const isRepo = await isGitRepo(projectDir);
973
- const lastCommit = isRepo ? await getLastCommit(projectDir) : null;
974
- const defaultAuthor = lastCommit?.author || "legacy-migration";
975
- const migratedBlocks = legacyBlocks.map((block, index) => ({
976
- ...block,
977
- version: LEDGER_VERSION,
978
- author: defaultAuthor,
979
- chainHeight: index,
980
- syncedAt: (/* @__PURE__ */ new Date()).toISOString(),
981
- ...lastCommit?.hash && { gitCommit: lastCommit.hash }
982
- }));
983
- await initializeSharedLedger(projectDir);
984
- await saveLedger(migratedBlocks, projectDir);
985
- await pushLedgerToShared(projectDir);
986
- const backupPath = `${legacyLedgerPath}.backup.${Date.now()}`;
987
- await writeFile(backupPath, JSON.stringify(legacyBlocks, null, 2));
988
- console.log(`\u2713 Migration complete. Backup saved to ${backupPath}`);
989
- return true;
990
- } catch (error) {
991
- console.error("Failed to migrate legacy ledger:", error);
992
- return false;
993
- }
994
- }
995
- async function detectLegacyLedger(workDir) {
996
- const projectDir = workDir || getWorkingDirectory(void 0, true);
997
- const legacyLedgerPath = join3(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
998
- if (!existsSync4(legacyLedgerPath)) {
999
- return false;
1000
- }
1001
- try {
1002
- const blocks = await loadLedger(projectDir);
1003
- return blocks.length > 0 && blocks[0].author === void 0;
1004
- } catch {
1005
- return false;
1006
- }
1007
- }
1008
- async function getLedgerSyncStatus(workDir) {
1009
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1010
- const hasLegacyLedger = await detectLegacyLedger(projectDir);
1011
- const manifest = await loadManifest(projectDir);
1012
- const syncState = await loadSyncState(projectDir);
1013
- const localBlocks = await loadLedger(projectDir);
1014
- const sharedBlocks = await loadSharedBlocks(projectDir);
1015
- return {
1016
- isInitialized: !!manifest,
1017
- hasLegacyLedger,
1018
- syncState,
1019
- manifest,
1020
- localBlocks: localBlocks.length,
1021
- sharedBlocks: sharedBlocks.length,
1022
- conflicts: syncState?.conflicts.length || 0
1023
- };
1024
- }
1025
- async function compressOldBlocks(workDir) {
1026
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1027
- const manifest = await loadManifest(projectDir);
1028
- if (!manifest || !manifest.compressionConfig.enabled) {
1029
- return { archived: 0, sizeReduction: 0 };
1030
- }
1031
- const activeDir = getActiveBlocksDir(projectDir);
1032
- const archivedDir = getArchivedBlocksDir(projectDir);
1033
- const cutoffDate = /* @__PURE__ */ new Date();
1034
- cutoffDate.setDate(cutoffDate.getDate() - manifest.compressionConfig.archiveAfterDays);
1035
- let archivedCount = 0;
1036
- let originalSize = 0;
1037
- let compressedSize = 0;
1038
- const blocksByMonth = /* @__PURE__ */ new Map();
1039
- for (const blockFile of manifest.activeBlocks) {
1040
- const blockDate = blockFile.replace(".json", "");
1041
- if (new Date(blockDate) < cutoffDate) {
1042
- const monthKey = blockDate.slice(0, 7);
1043
- if (!blocksByMonth.has(monthKey)) {
1044
- blocksByMonth.set(monthKey, []);
1045
- }
1046
- blocksByMonth.get(monthKey).push(blockFile);
1047
- }
1048
- }
1049
- for (const [monthKey, blockFiles] of Array.from(blocksByMonth.entries())) {
1050
- const archivePath = join3(archivedDir, `${monthKey}.tar.gz`);
1051
- if (existsSync4(archivePath)) {
1052
- continue;
1053
- }
1054
- console.log(`Archiving ${blockFiles.length} blocks for ${monthKey}...`);
1055
- const monthlyBlocks = [];
1056
- for (const blockFile of blockFiles) {
1057
- const blockPath = join3(activeDir, blockFile);
1058
- try {
1059
- const stats = await stat2(blockPath);
1060
- originalSize += stats.size;
1061
- const content = await readFile2(blockPath, "utf-8");
1062
- const block = JSON.parse(content);
1063
- monthlyBlocks.push(block);
1064
- } catch (error) {
1065
- console.warn(`Failed to read block ${blockFile}:`, error);
1066
- }
1067
- }
1068
- if (monthlyBlocks.length > 0) {
1069
- const archiveData = JSON.stringify(monthlyBlocks);
1070
- const tempPath = `${archivePath}.tmp`;
1071
- await pipeline(
1072
- Buffer.from(archiveData),
1073
- createGzip({ level: manifest.compressionConfig.compressionLevel }),
1074
- createWriteStream(tempPath)
1075
- );
1076
- const compressedStats = await stat2(tempPath);
1077
- compressedSize += compressedStats.size;
1078
- await writeFile(archivePath, await readFile2(tempPath));
1079
- await unlink2(tempPath);
1080
- for (const blockFile of blockFiles) {
1081
- const blockPath = join3(activeDir, blockFile);
1082
- await unlink2(blockPath);
1083
- const index = manifest.activeBlocks.indexOf(blockFile);
1084
- if (index > -1) {
1085
- manifest.activeBlocks.splice(index, 1);
1086
- }
1087
- }
1088
- manifest.archivedBlocks.push(`${monthKey}.tar.gz`);
1089
- archivedCount += blockFiles.length;
1090
- }
1091
- }
1092
- if (archivedCount > 0) {
1093
- await saveManifest(manifest, projectDir);
1094
- }
1095
- return {
1096
- archived: archivedCount,
1097
- sizeReduction: originalSize > 0 ? Math.round((originalSize - compressedSize) / originalSize * 100) : 0
1098
- };
1099
- }
1100
- async function loadArchivedBlocks(projectDir, monthKey) {
1101
- const archivedDir = getArchivedBlocksDir(projectDir);
1102
- const archivePath = join3(archivedDir, `${monthKey}.tar.gz`);
1103
- if (!existsSync4(archivePath)) {
1104
- return [];
1105
- }
1106
- try {
1107
- const chunks = [];
1108
- await pipeline(
1109
- createReadStream(archivePath),
1110
- createGunzip(),
1111
- async function* (source) {
1112
- for await (const chunk of source) {
1113
- chunks.push(chunk);
1114
- }
1115
- yield Buffer.concat(chunks);
1116
- }
1117
- );
1118
- const decompressedData = Buffer.concat(chunks).toString("utf-8");
1119
- return JSON.parse(decompressedData);
1120
- } catch (error) {
1121
- console.error(`Failed to load archived blocks for ${monthKey}:`, error);
1122
- return [];
1123
- }
1124
- }
1125
- async function getStorageStats(workDir) {
1126
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1127
- const manifest = await loadManifest(projectDir);
1128
- if (!manifest) {
1129
- return {
1130
- activeBlocks: 0,
1131
- archivedBlocks: 0,
1132
- activeSize: 0,
1133
- archivedSize: 0,
1134
- compressionRatio: 0,
1135
- totalEntries: 0
1136
- };
1137
- }
1138
- const activeDir = getActiveBlocksDir(projectDir);
1139
- const archivedDir = getArchivedBlocksDir(projectDir);
1140
- let activeSize = 0;
1141
- let archivedSize = 0;
1142
- for (const blockFile of manifest.activeBlocks) {
1143
- const blockPath = join3(activeDir, blockFile);
1144
- try {
1145
- if (existsSync4(blockPath)) {
1146
- const stats = await stat2(blockPath);
1147
- activeSize += stats.size;
1148
- }
1149
- } catch {
1150
- }
1151
- }
1152
- for (const archiveFile of manifest.archivedBlocks) {
1153
- const archivePath = join3(archivedDir, archiveFile);
1154
- try {
1155
- if (existsSync4(archivePath)) {
1156
- const stats = await stat2(archivePath);
1157
- archivedSize += stats.size;
1158
- }
1159
- } catch {
1160
- }
1161
- }
1162
- const estimatedUncompressed = archivedSize * 5;
1163
- const compressionRatio = estimatedUncompressed > 0 ? Math.round((1 - archivedSize / estimatedUncompressed) * 100) : 0;
1164
- return {
1165
- activeBlocks: manifest.activeBlocks.length,
1166
- archivedBlocks: manifest.archivedBlocks.length,
1167
- activeSize,
1168
- archivedSize,
1169
- compressionRatio,
1170
- totalEntries: manifest.totalEntries
1171
- };
1172
- }
1173
- async function shouldCompress(workDir) {
1174
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1175
- const manifest = await loadManifest(projectDir);
1176
- if (!manifest || !manifest.compressionConfig.enabled) {
1177
- return false;
1178
- }
1179
- const stats = await getStorageStats(projectDir);
1180
- const exceedsSize = stats.activeSize > manifest.compressionConfig.maxHotStorageSize;
1181
- const hasOldBlocks = manifest.activeBlocks.some((blockFile) => {
1182
- const blockDate = blockFile.replace(".json", "");
1183
- const cutoffDate = /* @__PURE__ */ new Date();
1184
- cutoffDate.setDate(cutoffDate.getDate() - manifest.compressionConfig.archiveAfterDays);
1185
- return new Date(blockDate) < cutoffDate;
1186
- });
1187
- return exceedsSize || hasOldBlocks;
1188
- }
1189
- async function correctLedgerEntries(entryIds, reason, correctionType = "corrected", workDir, author) {
1190
- if (entryIds.length === 0) {
1191
- return {
1192
- success: false,
1193
- correctedEntries: 0,
1194
- error: "No entry IDs provided"
1195
- };
1196
- }
1197
- if (!reason || reason.trim().length === 0) {
1198
- return {
1199
- success: false,
1200
- correctedEntries: 0,
1201
- error: "Correction reason is required"
1202
- };
1203
- }
1204
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1205
- const memoryDir = join3(getTrieDirectory(projectDir), "memory");
1206
- const ledgerPath = join3(memoryDir, LEDGER_FILENAME);
1207
- return withFileLock(ledgerPath, async () => {
1208
- const blocks = await loadLedger(projectDir);
1209
- const entriesToCorrect = [];
1210
- for (const block of blocks) {
1211
- for (const entry of block.entries) {
1212
- if (entryIds.includes(entry.id) && entry.status === "active") {
1213
- entriesToCorrect.push(entry);
1214
- }
1215
- }
1216
- }
1217
- if (entriesToCorrect.length === 0) {
1218
- return {
1219
- success: false,
1220
- correctedEntries: 0,
1221
- error: "No active entries found with the provided IDs"
1222
- };
1223
- }
1224
- try {
1225
- const now = (/* @__PURE__ */ new Date()).toISOString();
1226
- const isRepo = await isGitRepo(projectDir);
1227
- const lastCommit = isRepo ? await getLastCommit(projectDir) : null;
1228
- const correctionAuthor = author || lastCommit?.author || "unknown";
1229
- const correctionEntries = entriesToCorrect.map((entry) => {
1230
- const correctionId = `correction-${entry.id}-${Date.now()}`;
1231
- return {
1232
- id: correctionId,
1233
- hash: sha256(`${correctionId}:${entry.hash}:${reason}:${now}`),
1234
- severity: "info",
1235
- file: entry.file,
1236
- agent: "ledger-correction",
1237
- timestamp: now,
1238
- status: "active",
1239
- correction: `Correcting entry ${entry.id}: ${reason}`,
1240
- correctedBy: entry.id
1241
- };
1242
- });
1243
- for (const block of blocks) {
1244
- let blockModified = false;
1245
- for (const entry of block.entries) {
1246
- if (entryIds.includes(entry.id) && entry.status === "active") {
1247
- entry.status = correctionType;
1248
- entry.correctionTimestamp = now;
1249
- entry.correction = reason;
1250
- blockModified = true;
1251
- }
1252
- }
1253
- if (blockModified) {
1254
- block.merkleRoot = computeMerkleRoot(block.entries.map((e) => e.hash));
1255
- block.blockHash = computeBlockHash(
1256
- block.previousHash,
1257
- block.merkleRoot,
1258
- block.date,
1259
- block.version
1260
- );
1261
- block.updatedAt = now;
1262
- }
1263
- }
1264
- await saveLedgerInternal(blocks, projectDir);
1265
- const correctionBlock = await appendCorrectionEntries(
1266
- correctionEntries,
1267
- projectDir,
1268
- correctionAuthor
1269
- );
1270
- return {
1271
- success: true,
1272
- correctedEntries: entriesToCorrect.length,
1273
- ...correctionBlock && { correctionBlock }
1274
- };
1275
- } catch (error) {
1276
- return {
1277
- success: false,
1278
- correctedEntries: 0,
1279
- error: `Failed to correct entries: ${error instanceof Error ? error.message : "Unknown error"}`
1280
- };
1281
- }
1282
- }, { timeout: 15e3 });
1283
- }
1284
- async function appendCorrectionEntries(correctionEntries, projectDir, author) {
1285
- const blocks = await loadLedger(projectDir);
1286
- const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
1287
- const now = (/* @__PURE__ */ new Date()).toISOString();
1288
- const previousBlock = blocks[blocks.length - 1];
1289
- const block = previousBlock && previousBlock.date === today ? previousBlock : await createSyncableBlock(today, now, previousBlock?.blockHash ?? GENESIS_HASH, author, projectDir, void 0, blocks.length);
1290
- if (block !== previousBlock) {
1291
- blocks.push(block);
1292
- }
1293
- block.entries = [...block.entries, ...correctionEntries];
1294
- block.merkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
1295
- block.blockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
1296
- block.updatedAt = now;
1297
- await saveLedgerInternal(blocks, projectDir);
1298
- return block;
1299
- }
1300
- async function getLedgerEntries(workDir, includeStatus) {
1301
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1302
- const blocks = await loadLedger(projectDir);
1303
- const statusFilter = includeStatus || ["active"];
1304
- const entries = [];
1305
- for (const block of blocks) {
1306
- for (const entry of block.entries) {
1307
- const entryStatus = entry.status || "active";
1308
- if (statusFilter.includes(entryStatus)) {
1309
- entries.push(entry);
1310
- }
1311
- }
1312
- }
1313
- return entries;
1314
- }
1315
- async function getEntryCorrectionHistory(entryIds, workDir) {
1316
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1317
- const blocks = await loadLedger(projectDir);
1318
- const history = /* @__PURE__ */ new Map();
1319
- for (const entryId of entryIds) {
1320
- const corrections = [];
1321
- let original = null;
1322
- for (const block of blocks) {
1323
- for (const entry of block.entries) {
1324
- if (entry.id === entryId) {
1325
- original = entry;
1326
- }
1327
- if (entry.correctedBy === entryId) {
1328
- corrections.push(entry);
1329
- }
1330
- }
1331
- }
1332
- if (original) {
1333
- history.set(entryId, { original, corrections });
1334
- }
1335
- }
1336
- return history;
1337
- }
1338
- async function getCorrectionStats(workDir) {
1339
- const projectDir = workDir || getWorkingDirectory(void 0, true);
1340
- const blocks = await loadLedger(projectDir);
1341
- let totalEntries = 0;
1342
- let activeEntries = 0;
1343
- let correctedEntries = 0;
1344
- let falsePositives = 0;
1345
- for (const block of blocks) {
1346
- for (const entry of block.entries) {
1347
- totalEntries++;
1348
- const status = entry.status || "active";
1349
- if (status === "active") activeEntries++;
1350
- else if (status === "corrected") correctedEntries++;
1351
- else if (status === "false-positive") falsePositives++;
1352
- }
1353
- }
1354
- const correctionRate = totalEntries > 0 ? (correctedEntries + falsePositives) / totalEntries * 100 : 0;
1355
- return {
1356
- totalEntries,
1357
- activeEntries,
1358
- correctedEntries,
1359
- falsePositives,
1360
- correctionRate: Math.round(correctionRate * 100) / 100
1361
- };
1362
- }
1363
-
1364
- export {
1365
- withFileLock,
1366
- generateKeyPair,
1367
- saveKeyPair,
1368
- loadKeyPair,
1369
- getPublicKey,
1370
- hasSigningKey,
1371
- ConcurrentModificationError,
1372
- signLedgerEntry,
1373
- verifyLedgerEntry,
1374
- verifyBlockSignatures,
1375
- appendIssuesToLedger,
1376
- verifyLedger,
1377
- computeMerkleRoot,
1378
- getLedgerBlocks,
1379
- getLedgerBlocksWithHash,
1380
- saveLedgerOptimistic,
1381
- initializeSharedLedger,
1382
- syncLedgerFromShared,
1383
- pushLedgerToShared,
1384
- migrateLegacyLedger,
1385
- detectLegacyLedger,
1386
- getLedgerSyncStatus,
1387
- compressOldBlocks,
1388
- loadArchivedBlocks,
1389
- getStorageStats,
1390
- shouldCompress,
1391
- correctLedgerEntries,
1392
- getLedgerEntries,
1393
- getEntryCorrectionHistory,
1394
- getCorrectionStats
1395
- };
1396
- //# sourceMappingURL=chunk-ZJF5FTBX.js.map