@peac/capture-node 0.10.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,864 @@
1
+ 'use strict';
2
+
3
+ var fs2 = require('fs/promises');
4
+ var path = require('path');
5
+ var captureCore = require('@peac/capture-core');
6
+ var os = require('os');
7
+ var fs = require('fs');
8
+
9
+ function _interopNamespace(e) {
10
+ if (e && e.__esModule) return e;
11
+ var n = Object.create(null);
12
+ if (e) {
13
+ Object.keys(e).forEach(function (k) {
14
+ if (k !== 'default') {
15
+ var d = Object.getOwnPropertyDescriptor(e, k);
16
+ Object.defineProperty(n, k, d.get ? d : {
17
+ enumerable: true,
18
+ get: function () { return e[k]; }
19
+ });
20
+ }
21
+ });
22
+ }
23
+ n.default = e;
24
+ return Object.freeze(n);
25
+ }
26
+
27
+ var fs2__namespace = /*#__PURE__*/_interopNamespace(fs2);
28
+ var path__namespace = /*#__PURE__*/_interopNamespace(path);
29
+
30
+ // src/fs-spool-store.ts
31
+
32
+ // src/errors.ts
33
+ var SpoolFullError = class extends Error {
34
+ constructor(current, max, unit) {
35
+ super(`Spool full: ${current}/${max} ${unit}`);
36
+ this.current = current;
37
+ this.max = max;
38
+ this.unit = unit;
39
+ this.name = "SpoolFullError";
40
+ }
41
+ code = "E_SPOOL_FULL";
42
+ };
43
+ var SpoolCorruptError = class extends Error {
44
+ constructor(reason, corruptAtSequence, details) {
45
+ super(
46
+ `Spool corrupt: ${reason}` + (corruptAtSequence !== void 0 ? ` at sequence ${corruptAtSequence}` : "") + (details ? ` -- ${details}` : "")
47
+ );
48
+ this.reason = reason;
49
+ this.corruptAtSequence = corruptAtSequence;
50
+ this.details = details;
51
+ this.name = "SpoolCorruptError";
52
+ }
53
+ code = "E_SPOOL_CORRUPT";
54
+ };
55
+ var LockfileError = class extends Error {
56
+ constructor(lockPath, holderPid) {
57
+ const pidInfo = holderPid !== void 0 ? ` PID: ${holderPid}.` : "";
58
+ super(
59
+ `Another PEAC instance holds the lock.${pidInfo} If stale, delete ${lockPath} or set allowStaleLockBreak: true`
60
+ );
61
+ this.lockPath = lockPath;
62
+ this.holderPid = holderPid;
63
+ this.name = "LockfileError";
64
+ }
65
+ code = "E_LOCKFILE";
66
+ };
67
+ async function acquireLock(filePath, options) {
68
+ const lockPath = filePath + ".lock";
69
+ const allowStaleBreak = options?.allowStaleLockBreak ?? false;
70
+ const staleMaxAge = options?.staleLockMaxAgeMs ?? 36e5;
71
+ const payload = {
72
+ pid: process.pid,
73
+ startTime: Date.now(),
74
+ hostname: getHostname(),
75
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
76
+ };
77
+ try {
78
+ await fs2__namespace.writeFile(lockPath, JSON.stringify(payload, null, 2) + "\n", {
79
+ flag: "wx",
80
+ mode: 420
81
+ });
82
+ } catch (err) {
83
+ if (!isEnoent(err) && isEexist(err)) {
84
+ if (allowStaleBreak) {
85
+ const broke = await tryBreakStaleLock(lockPath, staleMaxAge);
86
+ if (broke) {
87
+ try {
88
+ await fs2__namespace.writeFile(lockPath, JSON.stringify(payload, null, 2) + "\n", {
89
+ flag: "wx",
90
+ mode: 420
91
+ });
92
+ } catch (retryErr) {
93
+ if (isEexist(retryErr)) {
94
+ const holder = await readLockHolder(lockPath);
95
+ throw new LockfileError(lockPath, holder?.pid);
96
+ }
97
+ throw retryErr;
98
+ }
99
+ } else {
100
+ const holder = await readLockHolder(lockPath);
101
+ throw new LockfileError(lockPath, holder?.pid);
102
+ }
103
+ } else {
104
+ const holder = await readLockHolder(lockPath);
105
+ throw new LockfileError(lockPath, holder?.pid);
106
+ }
107
+ } else {
108
+ throw err;
109
+ }
110
+ }
111
+ let released = false;
112
+ return {
113
+ lockPath,
114
+ async release() {
115
+ if (released) return;
116
+ released = true;
117
+ try {
118
+ await fs2__namespace.unlink(lockPath);
119
+ } catch {
120
+ }
121
+ }
122
+ };
123
+ }
124
+ async function readLockHolder(lockPath) {
125
+ try {
126
+ const content = await fs2__namespace.readFile(lockPath, "utf-8");
127
+ return JSON.parse(content);
128
+ } catch {
129
+ return void 0;
130
+ }
131
+ }
132
+ async function tryBreakStaleLock(lockPath, maxAgeMs) {
133
+ try {
134
+ const stat3 = await fs2__namespace.stat(lockPath);
135
+ const ageMs = Date.now() - stat3.mtimeMs;
136
+ if (ageMs > maxAgeMs) {
137
+ await fs2__namespace.unlink(lockPath);
138
+ return true;
139
+ }
140
+ return false;
141
+ } catch {
142
+ return true;
143
+ }
144
+ }
145
+ function getHostname() {
146
+ try {
147
+ return os.hostname();
148
+ } catch {
149
+ return "unknown";
150
+ }
151
+ }
152
+ function isEexist(err) {
153
+ return err?.code === "EEXIST";
154
+ }
155
+ function isEnoent(err) {
156
+ return err?.code === "ENOENT";
157
+ }
158
+ function stripTrailingCR(buf) {
159
+ if (buf.length > 0 && buf[buf.length - 1] === 13) {
160
+ return buf.subarray(0, buf.length - 1);
161
+ }
162
+ return buf;
163
+ }
164
+ async function* streamLines(options) {
165
+ const { filePath, maxLineBytes, highWaterMark = 64 * 1024 } = options;
166
+ const stream = fs.createReadStream(filePath, { highWaterMark });
167
+ let chunks = [];
168
+ let accumulatedBytes = 0;
169
+ let lineStartOffset = 0;
170
+ let fileOffset = 0;
171
+ let lineTooLarge = false;
172
+ try {
173
+ for await (const rawChunk of stream) {
174
+ const chunk = Buffer.isBuffer(rawChunk) ? rawChunk : Buffer.from(rawChunk);
175
+ let chunkOffset = 0;
176
+ while (chunkOffset < chunk.length) {
177
+ const newlineIndex = chunk.indexOf(10, chunkOffset);
178
+ if (newlineIndex === -1) {
179
+ const remainder = chunk.subarray(chunkOffset);
180
+ accumulatedBytes += remainder.length;
181
+ if (accumulatedBytes > maxLineBytes) {
182
+ if (!lineTooLarge) {
183
+ chunks = [];
184
+ lineTooLarge = true;
185
+ }
186
+ accumulatedBytes = maxLineBytes + 1;
187
+ } else {
188
+ chunks.push(Buffer.from(remainder));
189
+ }
190
+ fileOffset += remainder.length;
191
+ chunkOffset = chunk.length;
192
+ } else {
193
+ const segment = chunk.subarray(chunkOffset, newlineIndex);
194
+ const segmentLength = segment.length;
195
+ accumulatedBytes += segmentLength;
196
+ if (lineTooLarge || accumulatedBytes > maxLineBytes) {
197
+ yield {
198
+ kind: "line_too_large",
199
+ byteOffset: lineStartOffset,
200
+ accumulatedBytes: lineTooLarge ? maxLineBytes + 1 : accumulatedBytes
201
+ };
202
+ } else {
203
+ chunks.push(Buffer.from(segment));
204
+ let lineBuffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks);
205
+ lineBuffer = stripTrailingCR(lineBuffer);
206
+ const line = lineBuffer.toString("utf-8");
207
+ yield {
208
+ kind: "line",
209
+ line,
210
+ byteOffset: lineStartOffset,
211
+ byteLength: accumulatedBytes
212
+ };
213
+ }
214
+ fileOffset += segmentLength + 1;
215
+ lineStartOffset = fileOffset;
216
+ chunks = [];
217
+ accumulatedBytes = 0;
218
+ lineTooLarge = false;
219
+ chunkOffset = newlineIndex + 1;
220
+ }
221
+ }
222
+ }
223
+ if (accumulatedBytes > 0) {
224
+ if (lineTooLarge || accumulatedBytes > maxLineBytes) {
225
+ yield {
226
+ kind: "line_too_large",
227
+ byteOffset: lineStartOffset,
228
+ accumulatedBytes: lineTooLarge ? maxLineBytes + 1 : accumulatedBytes
229
+ };
230
+ } else {
231
+ let lineBuffer = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks);
232
+ lineBuffer = stripTrailingCR(lineBuffer);
233
+ const line = lineBuffer.toString("utf-8");
234
+ yield {
235
+ kind: "incomplete_tail",
236
+ line,
237
+ byteOffset: lineStartOffset,
238
+ byteLength: accumulatedBytes
239
+ };
240
+ }
241
+ }
242
+ } finally {
243
+ stream.destroy();
244
+ }
245
+ }
246
+ async function truncateFile(filePath, byteOffset) {
247
+ const { truncate } = await import('fs/promises');
248
+ await truncate(filePath, byteOffset);
249
+ }
250
+
251
+ // src/fs-spool-store.ts
252
+ var META_VERSION = 1;
253
+ var DEFAULT_MAX_LINE_BYTES = 4 * 1024 * 1024;
254
+ async function createFsSpoolStore(options) {
255
+ const store = new FsSpoolStore(options);
256
+ await store.init();
257
+ return store;
258
+ }
259
+ function getFsSpoolDiagnostics(store) {
260
+ if (store !== null && typeof store === "object" && "diagnostics" in store && typeof store.diagnostics === "function") {
261
+ return store.diagnostics();
262
+ }
263
+ return void 0;
264
+ }
265
+ var FsSpoolStore = class {
266
+ filePath;
267
+ metaPath;
268
+ maxEntries;
269
+ maxFileBytes;
270
+ maxLineBytes;
271
+ autoCommitIntervalMs;
272
+ lockOptions;
273
+ onWarning;
274
+ // In-memory state (loaded on init, updated on append)
275
+ sequence = 0;
276
+ headDigest = captureCore.GENESIS_DIGEST;
277
+ entryCount = 0;
278
+ fileBytes = 0;
279
+ // File handle for appending
280
+ fd = null;
281
+ lock = null;
282
+ closed = false;
283
+ corrupt = false;
284
+ corruptReason;
285
+ corruptAtSequence;
286
+ firstCreate = false;
287
+ // Auto-commit
288
+ dirty = false;
289
+ autoCommitTimer = null;
290
+ constructor(options) {
291
+ this.filePath = options.filePath;
292
+ this.metaPath = options.filePath + ".meta.json";
293
+ this.maxEntries = options.maxEntries ?? 1e5;
294
+ this.maxFileBytes = options.maxFileBytes ?? 100 * 1024 * 1024;
295
+ this.maxLineBytes = options.maxLineBytes ?? DEFAULT_MAX_LINE_BYTES;
296
+ this.autoCommitIntervalMs = options.autoCommitIntervalMs ?? 5e3;
297
+ this.lockOptions = options.lockOptions;
298
+ this.onWarning = options.onWarning ?? (() => {
299
+ });
300
+ }
301
+ /**
302
+ * Initialize: acquire lock, load or recover state, open file handle.
303
+ */
304
+ async init() {
305
+ const dirPath = path__namespace.dirname(this.filePath);
306
+ await fs2__namespace.mkdir(dirPath, { recursive: true });
307
+ this.lock = await acquireLock(this.filePath, this.lockOptions);
308
+ try {
309
+ let fileExists;
310
+ try {
311
+ await fs2__namespace.access(this.filePath);
312
+ fileExists = true;
313
+ } catch {
314
+ fileExists = false;
315
+ }
316
+ this.firstCreate = !fileExists;
317
+ const metaLoaded = fileExists && await this.tryLoadMeta();
318
+ if (!metaLoaded && fileExists) {
319
+ await this.fullScan();
320
+ }
321
+ this.fd = await fs2__namespace.open(this.filePath, "a");
322
+ if (this.autoCommitIntervalMs > 0) {
323
+ this.autoCommitTimer = setInterval(() => {
324
+ if (this.dirty && !this.closed) {
325
+ this.commit().catch((err) => {
326
+ this.onWarning(`Auto-commit failed: ${String(err)}`);
327
+ });
328
+ }
329
+ }, this.autoCommitIntervalMs);
330
+ if (typeof this.autoCommitTimer === "object" && "unref" in this.autoCommitTimer) {
331
+ this.autoCommitTimer.unref();
332
+ }
333
+ }
334
+ } catch (err) {
335
+ await this.lock.release();
336
+ this.lock = null;
337
+ throw err;
338
+ }
339
+ }
340
+ // ===========================================================================
341
+ // SpoolStore Interface
342
+ // ===========================================================================
343
+ async append(entry) {
344
+ this.assertWritable();
345
+ if (this.entryCount >= this.maxEntries) {
346
+ throw new SpoolFullError(this.entryCount, this.maxEntries, "entries");
347
+ }
348
+ const line = JSON.stringify(entry) + "\n";
349
+ const lineBytes = Buffer.byteLength(line, "utf-8");
350
+ if (this.fileBytes + lineBytes > this.maxFileBytes) {
351
+ throw new SpoolFullError(this.fileBytes, this.maxFileBytes, "bytes");
352
+ }
353
+ if (entry.prev_entry_digest !== this.headDigest) {
354
+ throw new Error(
355
+ `Invalid chain: expected prev_entry_digest ${this.headDigest}, got ${entry.prev_entry_digest}`
356
+ );
357
+ }
358
+ if (entry.sequence !== this.sequence + 1) {
359
+ throw new Error(`Invalid sequence: expected ${this.sequence + 1}, got ${entry.sequence}`);
360
+ }
361
+ await this.fd.write(line, null, "utf-8");
362
+ this.headDigest = entry.entry_digest;
363
+ this.sequence = entry.sequence;
364
+ this.entryCount++;
365
+ this.fileBytes += lineBytes;
366
+ this.dirty = true;
367
+ return entry.sequence;
368
+ }
369
+ async commit() {
370
+ this.assertNotClosed();
371
+ if (!this.dirty || !this.fd) return;
372
+ await this.fd.sync();
373
+ this.dirty = false;
374
+ if (this.firstCreate) {
375
+ this.firstCreate = false;
376
+ await this.fsyncDir().catch((err) => {
377
+ this.onWarning(`Directory fsync failed (non-critical): ${String(err)}`);
378
+ });
379
+ }
380
+ await this.writeMeta().catch((err) => {
381
+ this.onWarning(`Meta file write failed: ${String(err)}`);
382
+ });
383
+ }
384
+ async read(fromSequence, limit) {
385
+ this.assertNotClosed();
386
+ const entries = [];
387
+ const effectiveLimit = limit !== void 0 && limit > 0 ? limit : Infinity;
388
+ for await (const result of streamLines({
389
+ filePath: this.filePath,
390
+ maxLineBytes: this.maxLineBytes
391
+ })) {
392
+ if (result.kind === "line_too_large") {
393
+ this.onWarning(
394
+ `Line at byte offset ${result.byteOffset} exceeds maxLineBytes (${result.accumulatedBytes}/${this.maxLineBytes}) -- marking corrupt`
395
+ );
396
+ this.setCorrupt("LINE_TOO_LARGE", this.sequence);
397
+ break;
398
+ }
399
+ if (!result.line.trim()) continue;
400
+ let entry;
401
+ try {
402
+ entry = JSON.parse(result.line);
403
+ } catch {
404
+ continue;
405
+ }
406
+ if (entry.sequence >= fromSequence) {
407
+ entries.push(entry);
408
+ if (entries.length >= effectiveLimit) {
409
+ break;
410
+ }
411
+ }
412
+ }
413
+ return entries;
414
+ }
415
+ async getHeadDigest() {
416
+ this.assertNotClosed();
417
+ return this.headDigest;
418
+ }
419
+ async getSequence() {
420
+ this.assertNotClosed();
421
+ return this.sequence;
422
+ }
423
+ async close() {
424
+ if (this.closed) return;
425
+ this.closed = true;
426
+ if (this.autoCommitTimer !== null) {
427
+ clearInterval(this.autoCommitTimer);
428
+ this.autoCommitTimer = null;
429
+ }
430
+ if (this.fd) {
431
+ if (this.dirty) {
432
+ try {
433
+ await this.fd.sync();
434
+ this.dirty = false;
435
+ await this.writeMeta().catch(() => {
436
+ });
437
+ } catch {
438
+ }
439
+ }
440
+ await this.fd.close();
441
+ this.fd = null;
442
+ }
443
+ if (this.lock) {
444
+ await this.lock.release();
445
+ this.lock = null;
446
+ }
447
+ }
448
+ // ===========================================================================
449
+ // Diagnostics (P0-7: queryable without parsing error strings)
450
+ // ===========================================================================
451
+ /**
452
+ * Get a diagnostic snapshot for operator tooling.
453
+ *
454
+ * Use `getFsSpoolDiagnostics(store)` if you have a generic SpoolStore
455
+ * reference and need to probe for this method via type guard.
456
+ */
457
+ diagnostics() {
458
+ const full = this.entryCount >= this.maxEntries || this.fileBytes >= this.maxFileBytes;
459
+ return {
460
+ mode: full || this.corrupt ? "read_only" : "active",
461
+ spoolFull: full,
462
+ spoolCorrupt: this.corrupt,
463
+ corruptReason: this.corruptReason,
464
+ corruptAtSequence: this.corruptAtSequence,
465
+ entryCount: this.entryCount,
466
+ fileBytes: this.fileBytes,
467
+ maxEntries: this.maxEntries,
468
+ maxFileBytes: this.maxFileBytes,
469
+ filePath: this.filePath
470
+ };
471
+ }
472
+ /** Whether the spool has reached its hard-cap limit. */
473
+ get isFull() {
474
+ return this.entryCount >= this.maxEntries || this.fileBytes >= this.maxFileBytes;
475
+ }
476
+ /** Whether linkage corruption was detected on load. */
477
+ get isCorrupt() {
478
+ return this.corrupt;
479
+ }
480
+ /** Current entry count. */
481
+ get currentEntryCount() {
482
+ return this.entryCount;
483
+ }
484
+ /** Current file size in bytes. */
485
+ get currentFileBytes() {
486
+ return this.fileBytes;
487
+ }
488
+ /** Configured max entries. */
489
+ get maxEntryLimit() {
490
+ return this.maxEntries;
491
+ }
492
+ /** Configured max file bytes. */
493
+ get maxBytesLimit() {
494
+ return this.maxFileBytes;
495
+ }
496
+ // ===========================================================================
497
+ // Initialization Internals
498
+ // ===========================================================================
499
+ /**
500
+ * Try to load state from meta file (fast path).
501
+ * Returns true if meta was valid and loaded.
502
+ */
503
+ async tryLoadMeta() {
504
+ try {
505
+ let metaContent;
506
+ try {
507
+ metaContent = await fs2__namespace.readFile(this.metaPath, "utf-8");
508
+ } catch {
509
+ const legacyMetaPath = this.filePath.replace(/\.jsonl$/, ".meta.json");
510
+ if (legacyMetaPath !== this.metaPath) {
511
+ try {
512
+ metaContent = await fs2__namespace.readFile(legacyMetaPath, "utf-8");
513
+ } catch {
514
+ return false;
515
+ }
516
+ } else {
517
+ return false;
518
+ }
519
+ }
520
+ const fileStat = await fs2__namespace.stat(this.filePath);
521
+ const meta = JSON.parse(metaContent);
522
+ if (meta.metaVersion !== META_VERSION) {
523
+ this.onWarning(
524
+ `Meta file version mismatch (expected ${META_VERSION}, got ${meta.metaVersion}) -- falling back to full scan`
525
+ );
526
+ return false;
527
+ }
528
+ if (meta.fileBytes !== fileStat.size || meta.mtimeMs !== fileStat.mtimeMs) {
529
+ this.onWarning(
530
+ `Meta file mismatch (fileBytes: ${meta.fileBytes}/${fileStat.size}, mtimeMs: ${meta.mtimeMs}/${fileStat.mtimeMs}) -- falling back to full scan`
531
+ );
532
+ return false;
533
+ }
534
+ this.sequence = meta.sequence;
535
+ this.headDigest = meta.headDigest;
536
+ this.entryCount = meta.entryCount;
537
+ this.fileBytes = meta.fileBytes;
538
+ return true;
539
+ } catch {
540
+ return false;
541
+ }
542
+ }
543
+ /**
544
+ * Full JSONL scan: stream line by line, verify linkage, recover from crash.
545
+ *
546
+ * Uses the custom streaming line parser (streamLines) which enforces
547
+ * maxLineBytes BEFORE materializing the line as a JS string. This prevents
548
+ * memory blowup from a single giant line -- the primary "local file DoS" vector.
549
+ *
550
+ * Crash recovery rules:
551
+ * - Incomplete last line (no trailing newline + invalid JSON): truncate tail only.
552
+ * - Invalid JSON mid-file (before the last line): mark spool_corrupt, do NOT auto-repair.
553
+ * Auto-repair mid-file would mask real tampering.
554
+ */
555
+ async fullScan() {
556
+ let prevDigest = captureCore.GENESIS_DIGEST;
557
+ let shouldTruncateAt;
558
+ let lineNumber = 0;
559
+ for await (const result of streamLines({
560
+ filePath: this.filePath,
561
+ maxLineBytes: this.maxLineBytes
562
+ })) {
563
+ lineNumber++;
564
+ if (result.kind === "line_too_large") {
565
+ this.onWarning(
566
+ `Line ${lineNumber} exceeds maxLineBytes (${result.accumulatedBytes}/${this.maxLineBytes}) -- marking spool corrupt`
567
+ );
568
+ this.setCorrupt("LINE_TOO_LARGE", this.sequence);
569
+ break;
570
+ }
571
+ const lineStr = result.line;
572
+ if (!lineStr.trim()) continue;
573
+ let entry;
574
+ try {
575
+ entry = JSON.parse(lineStr);
576
+ } catch {
577
+ if (result.kind === "incomplete_tail") {
578
+ this.onWarning(
579
+ `Incomplete last line detected (${result.byteLength} bytes) -- truncating`
580
+ );
581
+ shouldTruncateAt = result.byteOffset;
582
+ } else {
583
+ this.onWarning(`Malformed JSON at line ${lineNumber} -- marking spool corrupt`);
584
+ this.setCorrupt("MALFORMED_JSON", this.sequence);
585
+ }
586
+ break;
587
+ }
588
+ if (entry.prev_entry_digest !== prevDigest) {
589
+ this.onWarning(
590
+ `Chain linkage broken at sequence ${entry.sequence}: expected ${prevDigest}, got ${entry.prev_entry_digest}`
591
+ );
592
+ this.setCorrupt("CHAIN_BROKEN", entry.sequence);
593
+ break;
594
+ }
595
+ prevDigest = entry.entry_digest;
596
+ this.sequence = entry.sequence;
597
+ this.headDigest = entry.entry_digest;
598
+ this.entryCount++;
599
+ }
600
+ if (shouldTruncateAt !== void 0 && !this.corrupt) {
601
+ await truncateFile(this.filePath, shouldTruncateAt);
602
+ }
603
+ try {
604
+ const stat3 = await fs2__namespace.stat(this.filePath);
605
+ this.fileBytes = stat3.size;
606
+ } catch {
607
+ this.fileBytes = 0;
608
+ }
609
+ }
610
+ /**
611
+ * Write meta file for fast startup.
612
+ */
613
+ async writeMeta() {
614
+ const stat3 = await fs2__namespace.stat(this.filePath);
615
+ const meta = {
616
+ metaVersion: META_VERSION,
617
+ sequence: this.sequence,
618
+ headDigest: this.headDigest,
619
+ entryCount: this.entryCount,
620
+ fileBytes: stat3.size,
621
+ mtimeMs: stat3.mtimeMs
622
+ };
623
+ const tmpPath = this.metaPath + ".tmp";
624
+ await fs2__namespace.writeFile(tmpPath, JSON.stringify(meta, null, 2) + "\n", "utf-8");
625
+ await fs2__namespace.rename(tmpPath, this.metaPath);
626
+ }
627
+ /**
628
+ * fsync the parent directory for directory entry durability on first creation.
629
+ * Best-effort: not all platforms support this (e.g. Windows).
630
+ */
631
+ async fsyncDir() {
632
+ const dirPath = path__namespace.dirname(this.filePath);
633
+ const dirFd = await fs2__namespace.open(dirPath, "r");
634
+ try {
635
+ await dirFd.sync();
636
+ } finally {
637
+ await dirFd.close();
638
+ }
639
+ }
640
+ // ===========================================================================
641
+ // Guards
642
+ // ===========================================================================
643
+ /**
644
+ * Mark the spool as corrupt with a specific reason.
645
+ */
646
+ setCorrupt(reason, atSequence) {
647
+ this.corrupt = true;
648
+ this.corruptReason = reason;
649
+ this.corruptAtSequence = atSequence;
650
+ }
651
+ assertNotClosed() {
652
+ if (this.closed) {
653
+ throw new Error("FsSpoolStore is closed");
654
+ }
655
+ }
656
+ assertWritable() {
657
+ this.assertNotClosed();
658
+ if (this.corrupt) {
659
+ throw new SpoolCorruptError(this.corruptReason ?? "CHAIN_BROKEN", this.corruptAtSequence);
660
+ }
661
+ }
662
+ };
663
+ async function createFsDedupeIndex(options) {
664
+ const index = new FsDedupeIndex(options);
665
+ await index.init();
666
+ return index;
667
+ }
668
+ var FsDedupeIndex = class {
669
+ filePath;
670
+ entries = /* @__PURE__ */ new Map();
671
+ fd = null;
672
+ closed = false;
673
+ firstCreate = false;
674
+ constructor(options) {
675
+ this.filePath = options.filePath;
676
+ }
677
+ /**
678
+ * Initialize: load existing entries from file.
679
+ */
680
+ async init() {
681
+ await fs2__namespace.mkdir(path__namespace.dirname(this.filePath), { recursive: true });
682
+ try {
683
+ await fs2__namespace.access(this.filePath);
684
+ this.firstCreate = false;
685
+ } catch {
686
+ this.firstCreate = true;
687
+ }
688
+ await this.loadFromFile();
689
+ this.fd = await fs2__namespace.open(this.filePath, "a");
690
+ }
691
+ // ===========================================================================
692
+ // DedupeIndex Interface
693
+ // ===========================================================================
694
+ async get(actionId) {
695
+ this.assertNotClosed();
696
+ return this.entries.get(actionId);
697
+ }
698
+ async set(actionId, entry) {
699
+ this.assertNotClosed();
700
+ this.entries.set(actionId, { ...entry });
701
+ await this.appendOp({ op: "set", actionId, entry });
702
+ }
703
+ async has(actionId) {
704
+ this.assertNotClosed();
705
+ return this.entries.has(actionId);
706
+ }
707
+ async markEmitted(actionId) {
708
+ this.assertNotClosed();
709
+ const entry = this.entries.get(actionId);
710
+ if (!entry) return false;
711
+ entry.emitted = true;
712
+ await this.appendOp({ op: "emit", actionId });
713
+ return true;
714
+ }
715
+ async delete(actionId) {
716
+ this.assertNotClosed();
717
+ const existed = this.entries.delete(actionId);
718
+ if (existed) {
719
+ await this.appendOp({ op: "delete", actionId });
720
+ }
721
+ return existed;
722
+ }
723
+ async size() {
724
+ this.assertNotClosed();
725
+ return this.entries.size;
726
+ }
727
+ async clear() {
728
+ this.assertNotClosed();
729
+ this.entries.clear();
730
+ if (this.fd) {
731
+ await this.fd.close();
732
+ }
733
+ await fs2__namespace.writeFile(this.filePath, "", "utf-8");
734
+ this.fd = await fs2__namespace.open(this.filePath, "a");
735
+ }
736
+ // ===========================================================================
737
+ // Extra: commit() -- not on DedupeIndex interface
738
+ //
739
+ // Callers use type guard:
740
+ // if ('commit' in dedupe && typeof dedupe.commit === 'function')
741
+ // await dedupe.commit();
742
+ // ===========================================================================
743
+ /**
744
+ * Flush pending writes to durable storage.
745
+ *
746
+ * Not part of the DedupeIndex interface (capture-core stays stable).
747
+ * Callers use type guard to detect this method on the concrete type.
748
+ */
749
+ async commit() {
750
+ this.assertNotClosed();
751
+ if (this.fd) {
752
+ await this.fd.sync();
753
+ }
754
+ if (this.firstCreate) {
755
+ this.firstCreate = false;
756
+ await this.fsyncDir().catch(() => {
757
+ });
758
+ }
759
+ }
760
+ // ===========================================================================
761
+ // Close
762
+ // ===========================================================================
763
+ async close() {
764
+ if (this.closed) return;
765
+ this.closed = true;
766
+ if (this.fd) {
767
+ try {
768
+ await this.fd.sync();
769
+ } catch {
770
+ }
771
+ await this.fd.close();
772
+ this.fd = null;
773
+ }
774
+ }
775
+ // ===========================================================================
776
+ // Internals
777
+ // ===========================================================================
778
+ /**
779
+ * Load entries from the dedupe file.
780
+ * Last-write-wins for duplicate actionIds (handles crash replay).
781
+ */
782
+ async loadFromFile() {
783
+ let content;
784
+ try {
785
+ content = await fs2__namespace.readFile(this.filePath, "utf-8");
786
+ } catch (err) {
787
+ if (err?.code === "ENOENT") {
788
+ return;
789
+ }
790
+ throw err;
791
+ }
792
+ const lines = content.split("\n");
793
+ for (const line of lines) {
794
+ if (!line.trim()) continue;
795
+ let op;
796
+ try {
797
+ op = JSON.parse(line);
798
+ } catch {
799
+ continue;
800
+ }
801
+ switch (op.op) {
802
+ case "set":
803
+ if (op.actionId && op.entry) {
804
+ this.entries.set(op.actionId, op.entry);
805
+ }
806
+ break;
807
+ case "emit":
808
+ if (op.actionId) {
809
+ const entry = this.entries.get(op.actionId);
810
+ if (entry) {
811
+ entry.emitted = true;
812
+ }
813
+ }
814
+ break;
815
+ case "delete":
816
+ if (op.actionId) {
817
+ this.entries.delete(op.actionId);
818
+ }
819
+ break;
820
+ case "clear":
821
+ this.entries.clear();
822
+ break;
823
+ }
824
+ }
825
+ }
826
+ /**
827
+ * Append an operation to the dedupe file (no fsync).
828
+ */
829
+ async appendOp(op) {
830
+ if (!this.fd) return;
831
+ const line = JSON.stringify(op) + "\n";
832
+ await this.fd.write(line, null, "utf-8");
833
+ }
834
+ /**
835
+ * fsync the parent directory for directory entry durability on first creation.
836
+ * Best-effort: not all platforms support this (e.g. Windows).
837
+ */
838
+ async fsyncDir() {
839
+ const dirPath = path__namespace.dirname(this.filePath);
840
+ const dirFd = await fs2__namespace.open(dirPath, "r");
841
+ try {
842
+ await dirFd.sync();
843
+ } finally {
844
+ await dirFd.close();
845
+ }
846
+ }
847
+ assertNotClosed() {
848
+ if (this.closed) {
849
+ throw new Error("FsDedupeIndex is closed");
850
+ }
851
+ }
852
+ };
853
+
854
+ exports.FsDedupeIndex = FsDedupeIndex;
855
+ exports.FsSpoolStore = FsSpoolStore;
856
+ exports.LockfileError = LockfileError;
857
+ exports.SpoolCorruptError = SpoolCorruptError;
858
+ exports.SpoolFullError = SpoolFullError;
859
+ exports.acquireLock = acquireLock;
860
+ exports.createFsDedupeIndex = createFsDedupeIndex;
861
+ exports.createFsSpoolStore = createFsSpoolStore;
862
+ exports.getFsSpoolDiagnostics = getFsSpoolDiagnostics;
863
+ //# sourceMappingURL=index.cjs.map
864
+ //# sourceMappingURL=index.cjs.map