@frostpillar/frostpillar-storage-engine 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/LICENSE +21 -0
  2. package/README-JA.md +1205 -0
  3. package/README.md +1204 -0
  4. package/dist/drivers/file.cjs +960 -0
  5. package/dist/drivers/file.d.ts +3 -0
  6. package/dist/drivers/file.js +18 -0
  7. package/dist/drivers/indexedDB.cjs +570 -0
  8. package/dist/drivers/indexedDB.d.ts +3 -0
  9. package/dist/drivers/indexedDB.js +18 -0
  10. package/dist/drivers/localStorage.cjs +668 -0
  11. package/dist/drivers/localStorage.d.ts +3 -0
  12. package/dist/drivers/localStorage.js +23 -0
  13. package/dist/drivers/opfs.cjs +550 -0
  14. package/dist/drivers/opfs.d.ts +3 -0
  15. package/dist/drivers/opfs.js +18 -0
  16. package/dist/drivers/syncStorage.cjs +898 -0
  17. package/dist/drivers/syncStorage.d.ts +3 -0
  18. package/dist/drivers/syncStorage.js +22 -0
  19. package/dist/drivers/validation.d.ts +1 -0
  20. package/dist/drivers/validation.js +8 -0
  21. package/dist/errors/index.d.ts +32 -0
  22. package/dist/errors/index.js +48 -0
  23. package/dist/frostpillar-storage-engine.min.js +1 -0
  24. package/dist/index.cjs +2957 -0
  25. package/dist/index.d.ts +7 -0
  26. package/dist/index.js +6 -0
  27. package/dist/storage/backend/asyncDurableAutoCommitController.d.ts +26 -0
  28. package/dist/storage/backend/asyncDurableAutoCommitController.js +188 -0
  29. package/dist/storage/backend/asyncMutex.d.ts +7 -0
  30. package/dist/storage/backend/asyncMutex.js +38 -0
  31. package/dist/storage/backend/autoCommit.d.ts +2 -0
  32. package/dist/storage/backend/autoCommit.js +22 -0
  33. package/dist/storage/backend/capacity.d.ts +2 -0
  34. package/dist/storage/backend/capacity.js +27 -0
  35. package/dist/storage/backend/capacityResolver.d.ts +3 -0
  36. package/dist/storage/backend/capacityResolver.js +25 -0
  37. package/dist/storage/backend/encoding.d.ts +17 -0
  38. package/dist/storage/backend/encoding.js +148 -0
  39. package/dist/storage/backend/types.d.ts +184 -0
  40. package/dist/storage/backend/types.js +1 -0
  41. package/dist/storage/btree/recordKeyIndexBTree.d.ts +39 -0
  42. package/dist/storage/btree/recordKeyIndexBTree.js +104 -0
  43. package/dist/storage/config/config.browser.d.ts +4 -0
  44. package/dist/storage/config/config.browser.js +8 -0
  45. package/dist/storage/config/config.d.ts +1 -0
  46. package/dist/storage/config/config.js +1 -0
  47. package/dist/storage/config/config.node.d.ts +4 -0
  48. package/dist/storage/config/config.node.js +74 -0
  49. package/dist/storage/config/config.shared.d.ts +6 -0
  50. package/dist/storage/config/config.shared.js +105 -0
  51. package/dist/storage/datastore/Datastore.d.ts +47 -0
  52. package/dist/storage/datastore/Datastore.js +525 -0
  53. package/dist/storage/datastore/datastoreClose.d.ts +12 -0
  54. package/dist/storage/datastore/datastoreClose.js +60 -0
  55. package/dist/storage/datastore/datastoreKeyDefinition.d.ts +7 -0
  56. package/dist/storage/datastore/datastoreKeyDefinition.js +60 -0
  57. package/dist/storage/datastore/datastoreLifecycle.d.ts +18 -0
  58. package/dist/storage/datastore/datastoreLifecycle.js +63 -0
  59. package/dist/storage/datastore/mutationById.d.ts +29 -0
  60. package/dist/storage/datastore/mutationById.js +71 -0
  61. package/dist/storage/drivers/IndexedDB/indexedDBBackend.d.ts +11 -0
  62. package/dist/storage/drivers/IndexedDB/indexedDBBackend.js +109 -0
  63. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.d.ts +27 -0
  64. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.js +60 -0
  65. package/dist/storage/drivers/IndexedDB/indexedDBConfig.d.ts +7 -0
  66. package/dist/storage/drivers/IndexedDB/indexedDBConfig.js +24 -0
  67. package/dist/storage/drivers/file/fileBackend.d.ts +5 -0
  68. package/dist/storage/drivers/file/fileBackend.js +168 -0
  69. package/dist/storage/drivers/file/fileBackendController.d.ts +31 -0
  70. package/dist/storage/drivers/file/fileBackendController.js +72 -0
  71. package/dist/storage/drivers/file/fileBackendSnapshot.d.ts +10 -0
  72. package/dist/storage/drivers/file/fileBackendSnapshot.js +166 -0
  73. package/dist/storage/drivers/localStorage/localStorageBackend.d.ts +10 -0
  74. package/dist/storage/drivers/localStorage/localStorageBackend.js +156 -0
  75. package/dist/storage/drivers/localStorage/localStorageBackendController.d.ts +24 -0
  76. package/dist/storage/drivers/localStorage/localStorageBackendController.js +35 -0
  77. package/dist/storage/drivers/localStorage/localStorageConfig.d.ts +10 -0
  78. package/dist/storage/drivers/localStorage/localStorageConfig.js +16 -0
  79. package/dist/storage/drivers/localStorage/localStorageLayout.d.ts +5 -0
  80. package/dist/storage/drivers/localStorage/localStorageLayout.js +29 -0
  81. package/dist/storage/drivers/opfs/opfsBackend.d.ts +12 -0
  82. package/dist/storage/drivers/opfs/opfsBackend.js +142 -0
  83. package/dist/storage/drivers/opfs/opfsBackendController.d.ts +26 -0
  84. package/dist/storage/drivers/opfs/opfsBackendController.js +44 -0
  85. package/dist/storage/drivers/syncStorage/syncStorageAdapter.d.ts +2 -0
  86. package/dist/storage/drivers/syncStorage/syncStorageAdapter.js +123 -0
  87. package/dist/storage/drivers/syncStorage/syncStorageBackend.d.ts +11 -0
  88. package/dist/storage/drivers/syncStorage/syncStorageBackend.js +169 -0
  89. package/dist/storage/drivers/syncStorage/syncStorageBackendController.d.ts +24 -0
  90. package/dist/storage/drivers/syncStorage/syncStorageBackendController.js +34 -0
  91. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.d.ts +2 -0
  92. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.js +28 -0
  93. package/dist/storage/drivers/syncStorage/syncStorageConfig.d.ts +13 -0
  94. package/dist/storage/drivers/syncStorage/syncStorageConfig.js +42 -0
  95. package/dist/storage/drivers/syncStorage/syncStorageQuota.d.ts +3 -0
  96. package/dist/storage/drivers/syncStorage/syncStorageQuota.js +45 -0
  97. package/dist/storage/record/ordering.d.ts +3 -0
  98. package/dist/storage/record/ordering.js +7 -0
  99. package/dist/types.d.ts +125 -0
  100. package/dist/types.js +1 -0
  101. package/dist/validation/metadata.d.ts +1 -0
  102. package/dist/validation/metadata.js +7 -0
  103. package/dist/validation/payload.d.ts +7 -0
  104. package/dist/validation/payload.js +135 -0
  105. package/dist/validation/typeGuards.d.ts +1 -0
  106. package/dist/validation/typeGuards.js +7 -0
  107. package/package.json +110 -0
package/dist/index.cjs ADDED
@@ -0,0 +1,2957 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ BinaryFormatError: () => BinaryFormatError,
24
+ ClosedDatastoreError: () => ClosedDatastoreError,
25
+ ConfigurationError: () => ConfigurationError,
26
+ DatabaseLockedError: () => DatabaseLockedError,
27
+ Datastore: () => Datastore,
28
+ FrostpillarError: () => FrostpillarError,
29
+ IndexCorruptionError: () => IndexCorruptionError,
30
+ InvalidQueryRangeError: () => InvalidQueryRangeError,
31
+ PageCorruptionError: () => PageCorruptionError,
32
+ QuotaExceededError: () => QuotaExceededError,
33
+ StorageEngineError: () => StorageEngineError,
34
+ UnsupportedBackendError: () => UnsupportedBackendError,
35
+ ValidationError: () => ValidationError,
36
+ indexedDBDriver: () => indexedDBDriver,
37
+ localStorageDriver: () => localStorageDriver,
38
+ opfsDriver: () => opfsDriver,
39
+ syncStorageDriver: () => syncStorageDriver
40
+ });
41
+ module.exports = __toCommonJS(index_exports);
42
+
43
+ // src/errors/index.ts
44
+ var FrostpillarError = class extends Error {
45
+ constructor(message, options) {
46
+ super(message);
47
+ this.name = new.target.name;
48
+ if (options !== void 0) {
49
+ this.cause = options.cause;
50
+ }
51
+ }
52
+ };
53
+ var ValidationError = class extends FrostpillarError {
54
+ };
55
+ var InvalidQueryRangeError = class extends FrostpillarError {
56
+ };
57
+ var ConfigurationError = class extends FrostpillarError {
58
+ };
59
+ var UnsupportedBackendError = class extends FrostpillarError {
60
+ };
61
+ var ClosedDatastoreError = class extends FrostpillarError {
62
+ };
63
+ var StorageEngineError = class extends FrostpillarError {
64
+ };
65
+ var DatabaseLockedError = class extends StorageEngineError {
66
+ };
67
+ var BinaryFormatError = class extends StorageEngineError {
68
+ };
69
+ var PageCorruptionError = class extends StorageEngineError {
70
+ };
71
+ var IndexCorruptionError = class extends StorageEngineError {
72
+ };
73
+ var QuotaExceededError = class extends FrostpillarError {
74
+ };
75
+ var toStorageEngineError = (error, fallbackMessage) => {
76
+ if (error instanceof StorageEngineError) {
77
+ return error;
78
+ }
79
+ if (error instanceof Error) {
80
+ return new StorageEngineError(`${fallbackMessage}: ${error.message}`, {
81
+ cause: error
82
+ });
83
+ }
84
+ return new StorageEngineError(fallbackMessage, { cause: error });
85
+ };
86
+ var toErrorInstance = (error, fallbackMessage) => {
87
+ if (error instanceof Error) {
88
+ return error;
89
+ }
90
+ return new Error(fallbackMessage, { cause: error });
91
+ };
92
+
93
+ // src/storage/record/ordering.ts
94
+ var toPublicRecord = (entryId, key, record) => {
95
+ return {
96
+ _id: entryId,
97
+ key,
98
+ payload: record.payload
99
+ };
100
+ };
101
+
102
+ // src/storage/backend/autoCommit.ts
103
+ var emitAutoCommitErrorToListeners = (listeners, error) => {
104
+ const storageError = error instanceof StorageEngineError ? error : new StorageEngineError(
105
+ error instanceof Error ? error.message : "Unknown auto-commit storage failure.",
106
+ { cause: error }
107
+ );
108
+ const event = {
109
+ source: "autoCommit",
110
+ error: storageError,
111
+ occurredAt: Date.now()
112
+ };
113
+ for (const listener of listeners) {
114
+ try {
115
+ const delivered = listener(event);
116
+ void Promise.resolve(delivered).catch(() => void 0);
117
+ } catch {
118
+ }
119
+ }
120
+ };
121
+
122
+ // src/storage/backend/asyncMutex.ts
123
+ var AsyncMutex = class {
124
+ constructor() {
125
+ this.queue = [];
126
+ this.head = 0;
127
+ this.locked = false;
128
+ }
129
+ acquire() {
130
+ if (!this.locked) {
131
+ this.locked = true;
132
+ return Promise.resolve(this.createRelease());
133
+ }
134
+ return new Promise((resolve) => {
135
+ this.queue.push(() => resolve(this.createRelease()));
136
+ });
137
+ }
138
+ createRelease() {
139
+ let released = false;
140
+ return () => {
141
+ if (released) return;
142
+ released = true;
143
+ if (this.head < this.queue.length) {
144
+ const next = this.queue[this.head];
145
+ this.queue[this.head] = void 0;
146
+ this.head += 1;
147
+ if (this.head > 1024 && this.head > this.queue.length >>> 1) {
148
+ this.queue = this.queue.slice(this.head);
149
+ this.head = 0;
150
+ }
151
+ next();
152
+ } else {
153
+ this.queue.length = 0;
154
+ this.head = 0;
155
+ this.locked = false;
156
+ }
157
+ };
158
+ }
159
+ };
160
+
161
+ // src/storage/backend/encoding.ts
162
+ var computeUtf8ByteLengthJs = (value) => {
163
+ let bytes = 0;
164
+ for (let i = 0; i < value.length; i++) {
165
+ const code = value.charCodeAt(i);
166
+ if (code <= 127) {
167
+ bytes += 1;
168
+ } else if (code <= 2047) {
169
+ bytes += 2;
170
+ } else if (code >= 55296 && code <= 56319) {
171
+ const next = i + 1 < value.length ? value.charCodeAt(i + 1) : 0;
172
+ if (next >= 56320 && next <= 57343) {
173
+ bytes += 4;
174
+ i++;
175
+ } else {
176
+ bytes += 3;
177
+ }
178
+ } else if (code >= 56320 && code <= 57343) {
179
+ bytes += 3;
180
+ } else {
181
+ bytes += 3;
182
+ }
183
+ }
184
+ return bytes;
185
+ };
186
+ var hasBuffer = typeof Buffer !== "undefined" && typeof Buffer.byteLength === "function";
187
+ var computeUtf8ByteLength = hasBuffer ? (value) => Buffer.byteLength(value, "utf8") : computeUtf8ByteLengthJs;
188
+ var estimateJsonStringBytes = (value) => {
189
+ let bytes = 2;
190
+ for (let i = 0; i < value.length; i++) {
191
+ const code = value.charCodeAt(i);
192
+ if (code === 34 || code === 92) {
193
+ bytes += 2;
194
+ } else if (code <= 31) {
195
+ if (code === 8 || code === 9 || code === 10 || code === 12 || code === 13) {
196
+ bytes += 2;
197
+ } else {
198
+ bytes += 6;
199
+ }
200
+ } else if (code <= 127) {
201
+ bytes += 1;
202
+ } else if (code <= 2047) {
203
+ bytes += 2;
204
+ } else if (code >= 55296 && code <= 56319) {
205
+ const next = i + 1 < value.length ? value.charCodeAt(i + 1) : 0;
206
+ if (next >= 56320 && next <= 57343) {
207
+ bytes += 4;
208
+ i++;
209
+ } else {
210
+ bytes += 6;
211
+ }
212
+ } else if (code >= 56320 && code <= 57343) {
213
+ bytes += 6;
214
+ } else {
215
+ bytes += 3;
216
+ }
217
+ }
218
+ return bytes;
219
+ };
220
+ var estimateObjectSizeBytes = (value) => {
221
+ if (value === null) {
222
+ return 4;
223
+ }
224
+ switch (typeof value) {
225
+ case "boolean":
226
+ return value ? 4 : 5;
227
+ // "true" / "false"
228
+ case "number":
229
+ return String(value).length;
230
+ case "string":
231
+ return estimateJsonStringBytes(value);
232
+ case "object": {
233
+ const obj = value;
234
+ let size = 2;
235
+ let visibleCount = 0;
236
+ for (const k of Object.keys(obj)) {
237
+ const v = obj[k];
238
+ if (v === void 0) {
239
+ continue;
240
+ }
241
+ if (visibleCount > 0) {
242
+ size += 1;
243
+ }
244
+ size += estimateJsonStringBytes(k) + 1;
245
+ size += estimateObjectSizeBytes(v);
246
+ visibleCount++;
247
+ }
248
+ return size;
249
+ }
250
+ default:
251
+ return 0;
252
+ }
253
+ };
254
+ var JSON_ROOT_WRAPPER_OVERHEAD = 15;
255
+ var estimateRecordSizeBytes = (key, payload) => {
256
+ return estimateObjectSizeBytes(key) + estimateObjectSizeBytes(payload) + JSON_ROOT_WRAPPER_OVERHEAD;
257
+ };
258
+ var estimateKeySizeBytes = (key) => {
259
+ return estimateObjectSizeBytes(key);
260
+ };
261
+
262
+ // src/validation/payload.ts
263
+ var MAX_PAYLOAD_DEPTH = 64;
264
+ var MAX_PAYLOAD_KEY_BYTES = 1024;
265
+ var MAX_PAYLOAD_STRING_BYTES = 65535;
266
+ var MAX_PAYLOAD_KEYS_PER_OBJECT = 256;
267
+ var MAX_PAYLOAD_KEYS_TOTAL = 4096;
268
+ var MAX_PAYLOAD_TOTAL_BYTES = 1048576;
269
+ var NULL_ESTIMATION_BYTES = 4;
270
+ var JSON_KEY_COLON_OVERHEAD = 1;
271
+ var JSON_OBJECT_BRACE_OVERHEAD = 2;
272
+ var JSON_ROOT_WRAPPER_OVERHEAD2 = 15;
273
+ var isPlainObject = (value) => {
274
+ if (typeof value !== "object" || value === null) {
275
+ return false;
276
+ }
277
+ if (Array.isArray(value)) {
278
+ return false;
279
+ }
280
+ const objectValue = value;
281
+ const prototype = Object.getPrototypeOf(objectValue);
282
+ return prototype === Object.prototype || prototype === null;
283
+ };
284
+ var addValidationBytes = (state, bytes) => {
285
+ state.totalValidationBytes += bytes;
286
+ if (state.totalValidationBytes > MAX_PAYLOAD_TOTAL_BYTES) {
287
+ throw new ValidationError(
288
+ `Payload aggregate validation bytes must be <= ${MAX_PAYLOAD_TOTAL_BYTES}.`
289
+ );
290
+ }
291
+ };
292
+ var validatePayloadKey = (key, state) => {
293
+ if (key.trim().length === 0) {
294
+ throw new ValidationError("Payload keys must be non-empty strings.");
295
+ }
296
+ if (key === "__proto__" || key === "constructor" || key === "prototype") {
297
+ throw new ValidationError(
298
+ `Payload key "${key}" is reserved and not allowed.`
299
+ );
300
+ }
301
+ const keyBytes = computeUtf8ByteLength(key);
302
+ if (keyBytes > MAX_PAYLOAD_KEY_BYTES) {
303
+ throw new ValidationError(
304
+ `Payload key UTF-8 byte length must be <= ${MAX_PAYLOAD_KEY_BYTES}.`
305
+ );
306
+ }
307
+ state.totalKeyCount += 1;
308
+ if (state.totalKeyCount > MAX_PAYLOAD_KEYS_TOTAL) {
309
+ throw new ValidationError(
310
+ `Payload total key count must be <= ${MAX_PAYLOAD_KEYS_TOTAL}.`
311
+ );
312
+ }
313
+ addValidationBytes(state, estimateJsonStringBytes(key) + JSON_KEY_COLON_OVERHEAD);
314
+ };
315
+ var validateAndCloneValue = (value, depth, state) => {
316
+ if (value === null) {
317
+ addValidationBytes(state, NULL_ESTIMATION_BYTES);
318
+ return null;
319
+ }
320
+ if (typeof value === "string") {
321
+ const stringBytes = computeUtf8ByteLength(value);
322
+ if (stringBytes > MAX_PAYLOAD_STRING_BYTES) {
323
+ throw new ValidationError(
324
+ `Payload string UTF-8 byte length must be <= ${MAX_PAYLOAD_STRING_BYTES}.`
325
+ );
326
+ }
327
+ addValidationBytes(state, estimateJsonStringBytes(value));
328
+ return value;
329
+ }
330
+ if (typeof value === "number") {
331
+ if (!Number.isFinite(value)) {
332
+ throw new ValidationError("Payload number values must be finite.");
333
+ }
334
+ addValidationBytes(state, String(value).length);
335
+ return value;
336
+ }
337
+ if (typeof value === "boolean") {
338
+ addValidationBytes(state, value ? 4 : 5);
339
+ return value;
340
+ }
341
+ if (typeof value === "bigint") {
342
+ throw new ValidationError("Payload bigint values are not supported.");
343
+ }
344
+ if (typeof value === "object") {
345
+ if (Array.isArray(value)) {
346
+ throw new ValidationError("Payload arrays are not supported.");
347
+ }
348
+ if (!isPlainObject(value)) {
349
+ throw new ValidationError("Payload values must be plain objects.");
350
+ }
351
+ return validateAndClonePayloadObject(value, depth + 1, state);
352
+ }
353
+ throw new ValidationError(
354
+ "Payload values must be string | number | boolean | null or nested object."
355
+ );
356
+ };
357
+ var validateAndClonePayloadObject = (payloadObject, depth, state) => {
358
+ const objectLevel = depth + 1;
359
+ if (objectLevel > MAX_PAYLOAD_DEPTH) {
360
+ throw new ValidationError(`Payload nesting depth must be <= ${MAX_PAYLOAD_DEPTH}.`);
361
+ }
362
+ if (state.activePath.has(payloadObject)) {
363
+ throw new ValidationError("Circular payload references are not supported.");
364
+ }
365
+ const entries = Object.entries(payloadObject);
366
+ if (entries.length > MAX_PAYLOAD_KEYS_PER_OBJECT) {
367
+ throw new ValidationError(`Payload object key count must be <= ${MAX_PAYLOAD_KEYS_PER_OBJECT}.`);
368
+ }
369
+ state.activePath.add(payloadObject);
370
+ const entryCount = entries.length;
371
+ const commaBytes = entryCount > 1 ? entryCount - 1 : 0;
372
+ addValidationBytes(state, JSON_OBJECT_BRACE_OVERHEAD + commaBytes);
373
+ const copied = {};
374
+ for (const [key, value] of entries) {
375
+ validatePayloadKey(key, state);
376
+ copied[key] = validateAndCloneValue(value, depth, state);
377
+ }
378
+ state.activePath.delete(payloadObject);
379
+ return copied;
380
+ };
381
+ var validateAndNormalizePayload = (payload) => {
382
+ if (!isPlainObject(payload)) {
383
+ throw new ValidationError("payload must be a non-null plain object.");
384
+ }
385
+ const state = {
386
+ activePath: /* @__PURE__ */ new WeakSet(),
387
+ totalKeyCount: 0,
388
+ totalValidationBytes: 0
389
+ };
390
+ const cloned = validateAndClonePayloadObject(payload, 0, state);
391
+ const sizeBytes = state.totalValidationBytes + JSON_ROOT_WRAPPER_OVERHEAD2;
392
+ return { payload: cloned, sizeBytes };
393
+ };
394
+
395
+ // src/storage/backend/capacity.ts
396
+ var enforceCapacityPolicy = (capacityState, currentSizeBytes, encodedBytes, getRecordCount, evictOldestRecord) => {
397
+ if (capacityState === null) {
398
+ return currentSizeBytes;
399
+ }
400
+ if (encodedBytes > capacityState.maxSizeBytes) {
401
+ throw new QuotaExceededError(
402
+ "Record exceeds configured capacity.maxSize boundary."
403
+ );
404
+ }
405
+ if (capacityState.policy === "strict") {
406
+ if (currentSizeBytes + encodedBytes > capacityState.maxSizeBytes) {
407
+ throw new QuotaExceededError(
408
+ "Insert exceeds configured capacity.maxSize under strict policy."
409
+ );
410
+ }
411
+ return currentSizeBytes;
412
+ }
413
+ let nextSizeBytes = currentSizeBytes;
414
+ while (nextSizeBytes + encodedBytes > capacityState.maxSizeBytes) {
415
+ if (getRecordCount() === 0) {
416
+ throw new QuotaExceededError(
417
+ "Record cannot fit in turnover policy with empty datastore."
418
+ );
419
+ }
420
+ const evictedBytes = evictOldestRecord();
421
+ if (!Number.isSafeInteger(evictedBytes) || evictedBytes <= 0) {
422
+ throw new IndexCorruptionError(
423
+ "Turnover eviction reported non-progressing reclaimed bytes."
424
+ );
425
+ }
426
+ nextSizeBytes -= evictedBytes;
427
+ }
428
+ return Math.max(0, nextSizeBytes);
429
+ };
430
+
431
+ // src/storage/config/config.shared.ts
432
+ var BYTE_SIZE_REGEX = /^(\d+)(B|KB|MB|GB)$/;
433
+ var BYTE_SIZE_MULTIPLIER = {
434
+ B: 1,
435
+ KB: 1024,
436
+ MB: 1024 * 1024,
437
+ GB: 1024 * 1024 * 1024
438
+ };
439
+ var FREQUENCY_REGEX = /^(\d+)(ms|s|m|h)$/;
440
+ var FREQUENCY_MULTIPLIER = {
441
+ ms: 1,
442
+ s: 1e3,
443
+ m: 60 * 1e3,
444
+ h: 60 * 60 * 1e3
445
+ };
446
+ var normalizeByteSizeInput = (value) => {
447
+ if (value === "backendLimit") {
448
+ throw new ConfigurationError(
449
+ 'capacity.maxSize "backendLimit" must be resolved before capacity parsing.'
450
+ );
451
+ }
452
+ if (typeof value === "number") {
453
+ if (!Number.isSafeInteger(value) || value <= 0) {
454
+ throw new ConfigurationError(
455
+ "capacity.maxSize must be a positive safe integer."
456
+ );
457
+ }
458
+ return value;
459
+ }
460
+ const matched = BYTE_SIZE_REGEX.exec(value);
461
+ if (matched === null) {
462
+ throw new ConfigurationError(
463
+ "capacity.maxSize string must be <positive><B|KB|MB|GB>."
464
+ );
465
+ }
466
+ const amount = Number(matched[1]);
467
+ if (!Number.isSafeInteger(amount) || amount <= 0) {
468
+ throw new ConfigurationError(
469
+ "capacity.maxSize must be a positive safe integer."
470
+ );
471
+ }
472
+ const unit = matched[2];
473
+ const multiplier = BYTE_SIZE_MULTIPLIER[unit];
474
+ const total = amount * multiplier;
475
+ if (!Number.isSafeInteger(total) || total <= 0) {
476
+ throw new ConfigurationError("capacity.maxSize exceeds safe integer range.");
477
+ }
478
+ return total;
479
+ };
480
+ var parseCapacityConfig = (capacity) => {
481
+ if (capacity === void 0) {
482
+ return null;
483
+ }
484
+ const maxSizeBytes = normalizeByteSizeInput(capacity.maxSize);
485
+ const policy = capacity.policy ?? "strict";
486
+ if (policy !== "strict" && policy !== "turnover") {
487
+ throw new ConfigurationError('capacity.policy must be "strict" or "turnover".');
488
+ }
489
+ return { maxSizeBytes, policy };
490
+ };
491
+ var parseFrequencyString = (frequency) => {
492
+ const matched = FREQUENCY_REGEX.exec(frequency);
493
+ if (matched === null) {
494
+ throw new ConfigurationError(
495
+ "autoCommit.frequency string must be one of: <positive>ms, <positive>s, <positive>m, <positive>h."
496
+ );
497
+ }
498
+ const amount = Number(matched[1]);
499
+ if (!Number.isSafeInteger(amount) || amount <= 0) {
500
+ throw new ConfigurationError(
501
+ "autoCommit.frequency string amount must be a positive safe integer."
502
+ );
503
+ }
504
+ const unit = matched[2];
505
+ const multiplier = FREQUENCY_MULTIPLIER[unit];
506
+ const intervalMs = amount * multiplier;
507
+ if (!Number.isSafeInteger(intervalMs) || intervalMs <= 0) {
508
+ throw new ConfigurationError(
509
+ "autoCommit.frequency exceeds safe integer range."
510
+ );
511
+ }
512
+ return intervalMs;
513
+ };
514
+ var parseAutoCommitConfig = (autoCommit) => {
515
+ if (autoCommit?.maxPendingBytes !== void 0) {
516
+ if (!Number.isSafeInteger(autoCommit.maxPendingBytes) || autoCommit.maxPendingBytes <= 0) {
517
+ throw new ConfigurationError(
518
+ "autoCommit.maxPendingBytes must be a positive safe integer."
519
+ );
520
+ }
521
+ }
522
+ const maxPendingBytes = autoCommit?.maxPendingBytes ?? null;
523
+ const frequency = autoCommit?.frequency;
524
+ if (frequency === void 0 || frequency === "immediate") {
525
+ return { frequency: "immediate", intervalMs: null, maxPendingBytes };
526
+ }
527
+ if (typeof frequency === "number") {
528
+ if (!Number.isSafeInteger(frequency) || frequency <= 0) {
529
+ throw new ConfigurationError(
530
+ "autoCommit.frequency number must be a positive safe integer."
531
+ );
532
+ }
533
+ return { frequency: "scheduled", intervalMs: frequency, maxPendingBytes };
534
+ }
535
+ const intervalMs = parseFrequencyString(frequency);
536
+ return { frequency: "scheduled", intervalMs, maxPendingBytes };
537
+ };
538
+ var VALID_DUPLICATE_KEY_POLICIES = [
539
+ "allow",
540
+ "replace",
541
+ "reject"
542
+ ];
543
+ var parseDuplicateKeyConfig = (duplicateKeys) => {
544
+ if (duplicateKeys === void 0) {
545
+ return "allow";
546
+ }
547
+ if (!VALID_DUPLICATE_KEY_POLICIES.includes(duplicateKeys)) {
548
+ throw new ConfigurationError(
549
+ 'duplicateKeys must be "allow", "replace", or "reject".'
550
+ );
551
+ }
552
+ return duplicateKeys;
553
+ };
554
+
555
+ // src/storage/backend/capacityResolver.ts
556
+ var resolveCapacityConfigWithBackendLimit = (config) => {
557
+ if (config.capacity === void 0) {
558
+ return void 0;
559
+ }
560
+ if (config.capacity.maxSize !== "backendLimit") {
561
+ return config.capacity;
562
+ }
563
+ if (config.driver === void 0) {
564
+ throw new ConfigurationError(
565
+ 'capacity.maxSize "backendLimit" requires a durable driver.'
566
+ );
567
+ }
568
+ if (config.driver.resolveBackendLimitBytes === void 0) {
569
+ throw new ConfigurationError(
570
+ 'capacity.maxSize "backendLimit" is not supported by the selected driver.'
571
+ );
572
+ }
573
+ const resolvedMaxSize = config.driver.resolveBackendLimitBytes();
574
+ return {
575
+ ...config.capacity,
576
+ maxSize: resolvedMaxSize
577
+ };
578
+ };
579
+ var resolveCapacityState = (config) => {
580
+ const resolvedCapacityConfig = resolveCapacityConfigWithBackendLimit(config);
581
+ return parseCapacityConfig(resolvedCapacityConfig);
582
+ };
583
+
584
+ // src/storage/datastore/datastoreLifecycle.ts
585
+ var DatastoreLifecycle = class {
586
+ constructor() {
587
+ this.closed = false;
588
+ this.closing = false;
589
+ this.closeInFlight = null;
590
+ this.activeOperationCount = 0;
591
+ this.activeOperationsDrained = null;
592
+ this.resolveActiveOperationsDrained = null;
593
+ }
594
+ isClosed() {
595
+ return this.closed;
596
+ }
597
+ markClosing() {
598
+ this.closing = true;
599
+ }
600
+ markClosed() {
601
+ this.closed = true;
602
+ this.closing = false;
603
+ }
604
+ getCloseInFlight() {
605
+ return this.closeInFlight;
606
+ }
607
+ setCloseInFlight(closeInFlight) {
608
+ this.closeInFlight = closeInFlight;
609
+ }
610
+ ensureOpen() {
611
+ if (this.closed || this.closing) {
612
+ throw new ClosedDatastoreError("Datastore has been closed.");
613
+ }
614
+ }
615
+ beginOperation() {
616
+ this.ensureOpen();
617
+ this.activeOperationCount += 1;
618
+ }
619
+ endOperation() {
620
+ this.activeOperationCount -= 1;
621
+ if (this.activeOperationCount === 0 && this.resolveActiveOperationsDrained !== null) {
622
+ const resolve = this.resolveActiveOperationsDrained;
623
+ this.resolveActiveOperationsDrained = null;
624
+ this.activeOperationsDrained = null;
625
+ resolve();
626
+ }
627
+ }
628
+ waitForActiveOperationsToDrain() {
629
+ if (this.activeOperationCount === 0) {
630
+ return Promise.resolve();
631
+ }
632
+ if (this.activeOperationsDrained === null) {
633
+ this.activeOperationsDrained = new Promise((resolve) => {
634
+ this.resolveActiveOperationsDrained = resolve;
635
+ });
636
+ }
637
+ return this.activeOperationsDrained;
638
+ }
639
+ };
640
+
641
+ // src/storage/datastore/mutationById.ts
642
+ var getPublicRecordById = (keyIndex, entryId) => {
643
+ const entry = keyIndex.peekById(entryId);
644
+ if (entry === null) {
645
+ return null;
646
+ }
647
+ return toPublicRecord(entryId, entry.key, entry.value);
648
+ };
649
+ var buildMergedPayload = (targetRecord, patch, entryKey, skipValidation) => {
650
+ const merged = { ...targetRecord.payload, ...patch };
651
+ if (skipValidation) {
652
+ return {
653
+ payload: merged,
654
+ sizeBytes: estimateRecordSizeBytes(entryKey, merged)
655
+ };
656
+ }
657
+ const validationResult = validateAndNormalizePayload(merged);
658
+ const keyBytes = estimateKeySizeBytes(entryKey);
659
+ return {
660
+ payload: validationResult.payload,
661
+ sizeBytes: validationResult.sizeBytes + keyBytes
662
+ };
663
+ };
664
+ var updateRecordById = (options) => {
665
+ const entry = options.keyIndex.peekById(options.id);
666
+ if (entry === null) {
667
+ return { updated: false, currentSizeBytes: options.currentSizeBytes, durabilitySignalBytes: 0 };
668
+ }
669
+ const targetRecord = entry.value;
670
+ const oldSize = targetRecord.sizeBytes;
671
+ const mergedResult = buildMergedPayload(targetRecord, options.patch, entry.key, options.skipPayloadValidation);
672
+ const mergedPayload = mergedResult.payload;
673
+ const newSize = mergedResult.sizeBytes;
674
+ const encodedDelta = newSize - oldSize;
675
+ if (options.capacityState !== null && encodedDelta > 0 && options.currentSizeBytes + encodedDelta > options.capacityState.maxSizeBytes) {
676
+ throw new QuotaExceededError("updateById exceeds configured capacity.maxSize boundary.");
677
+ }
678
+ const updatedRecord = {
679
+ payload: mergedPayload,
680
+ sizeBytes: newSize
681
+ };
682
+ if (options.keyIndex.updateById(options.id, updatedRecord) === null) {
683
+ throw new IndexCorruptionError("Record index state is inconsistent during updateById.");
684
+ }
685
+ return {
686
+ updated: true,
687
+ currentSizeBytes: Math.max(0, options.currentSizeBytes + encodedDelta),
688
+ durabilitySignalBytes: Math.abs(encodedDelta)
689
+ };
690
+ };
691
+ var deleteRecordById = (options) => {
692
+ const removedFromIndex = options.keyIndex.removeById(options.id);
693
+ if (removedFromIndex === null) {
694
+ return {
695
+ deleted: false,
696
+ currentSizeBytes: options.currentSizeBytes,
697
+ durabilitySignalBytes: 0
698
+ };
699
+ }
700
+ const freedBytes = removedFromIndex.value.sizeBytes;
701
+ return {
702
+ deleted: true,
703
+ currentSizeBytes: Math.max(
704
+ 0,
705
+ options.currentSizeBytes - freedBytes
706
+ ),
707
+ durabilitySignalBytes: freedBytes
708
+ };
709
+ };
710
+
711
+ // src/storage/datastore/datastoreClose.ts
712
+ var readAggregateErrorConstructor = () => {
713
+ const candidate = globalThis.AggregateError;
714
+ if (typeof candidate !== "function") {
715
+ return null;
716
+ }
717
+ return candidate;
718
+ };
719
+ var createCloseAggregateError = (deferredError, closeError) => {
720
+ const aggregateErrorConstructor = readAggregateErrorConstructor();
721
+ if (aggregateErrorConstructor !== null) {
722
+ return new aggregateErrorConstructor(
723
+ [deferredError, closeError],
724
+ "Datastore close failed with multiple errors."
725
+ );
726
+ }
727
+ const fallbackError = new Error(
728
+ "Datastore close failed with multiple errors."
729
+ );
730
+ fallbackError.errors = [deferredError, closeError];
731
+ return fallbackError;
732
+ };
733
+ var closeDatastore = async (options) => {
734
+ if (options.lifecycle.isClosed()) {
735
+ return;
736
+ }
737
+ const closeInFlight = options.lifecycle.getCloseInFlight();
738
+ if (closeInFlight !== null) {
739
+ await closeInFlight;
740
+ return;
741
+ }
742
+ options.lifecycle.markClosing();
743
+ const closeOperation = performClose(options).finally(() => {
744
+ options.lifecycle.setCloseInFlight(null);
745
+ });
746
+ options.lifecycle.setCloseInFlight(closeOperation);
747
+ await closeOperation;
748
+ };
749
+ var performClose = async (options) => {
750
+ const pendingInit = options.getPendingInit();
751
+ if (pendingInit !== null) {
752
+ await pendingInit;
753
+ }
754
+ await options.lifecycle.waitForActiveOperationsToDrain();
755
+ let deferredError = options.getPendingInitError();
756
+ try {
757
+ await options.getBackendController()?.close();
758
+ } catch (error) {
759
+ const closeError = toErrorInstance(
760
+ error,
761
+ "Datastore close failed with a non-Error value."
762
+ );
763
+ if (deferredError === null) {
764
+ deferredError = closeError;
765
+ } else {
766
+ deferredError = createCloseAggregateError(deferredError, closeError);
767
+ }
768
+ }
769
+ options.setBackendController(null);
770
+ options.setPendingInitError(null);
771
+ options.lifecycle.markClosed();
772
+ options.clearInMemoryState();
773
+ if (deferredError !== null) {
774
+ throw deferredError;
775
+ }
776
+ };
777
+
778
+ // src/storage/btree/recordKeyIndexBTree.ts
779
+ var import_frostpillar_btree = require("@frostpillar/frostpillar-btree");
780
+ var normalizeComparatorResult = (compared) => {
781
+ if (!Number.isFinite(compared) || !Number.isInteger(compared)) {
782
+ throw new IndexCorruptionError(
783
+ "key comparator must return a finite integer result."
784
+ );
785
+ }
786
+ if (compared === 0) {
787
+ return 0;
788
+ }
789
+ return compared < 0 ? -1 : 1;
790
+ };
791
+ var clampComparatorResult = (compared) => {
792
+ if (compared === 0) return 0;
793
+ return compared < 0 ? -1 : 1;
794
+ };
795
+ var buildWrappedComparator = (compareKeys) => {
796
+ return (left, right) => {
797
+ const result = compareKeys(left, right);
798
+ if (result !== result) {
799
+ throw new IndexCorruptionError("key comparator must not return NaN.");
800
+ }
801
+ return clampComparatorResult(result);
802
+ };
803
+ };
804
+ var RecordKeyIndexBTree = class _RecordKeyIndexBTree {
805
+ constructor(config) {
806
+ const wrappedComparator = buildWrappedComparator(config.compareKeys);
807
+ const treeConfig = {
808
+ compareKeys: wrappedComparator,
809
+ duplicateKeys: config.duplicateKeys ?? "allow",
810
+ enableEntryIdLookup: true
811
+ };
812
+ this.tree = new import_frostpillar_btree.InMemoryBTree(treeConfig);
813
+ }
814
+ put(key, value) {
815
+ return this.tree.put(key, value);
816
+ }
817
+ putMany(entries) {
818
+ return this.tree.putMany(entries);
819
+ }
820
+ peekById(entryId) {
821
+ return this.tree.peekById(entryId);
822
+ }
823
+ updateById(entryId, value) {
824
+ return this.tree.updateById(entryId, value);
825
+ }
826
+ removeById(entryId) {
827
+ return this.tree.removeById(entryId);
828
+ }
829
+ rangeQuery(start, end) {
830
+ return this.tree.range(start, end);
831
+ }
832
+ deleteRange(start, end) {
833
+ return this.tree.deleteRange(start, end, {
834
+ lowerBound: "inclusive",
835
+ upperBound: "inclusive"
836
+ });
837
+ }
838
+ snapshot() {
839
+ return this.tree.snapshot();
840
+ }
841
+ peekLast() {
842
+ return this.tree.peekLast();
843
+ }
844
+ popFirst() {
845
+ return this.tree.popFirst();
846
+ }
847
+ size() {
848
+ return this.tree.size();
849
+ }
850
+ findFirst(key) {
851
+ return this.tree.findFirst(key);
852
+ }
853
+ findLast(key) {
854
+ return this.tree.findLast(key);
855
+ }
856
+ hasKey(key) {
857
+ return this.tree.hasKey(key);
858
+ }
859
+ keys() {
860
+ return this.tree.keys();
861
+ }
862
+ toJSON() {
863
+ return this.tree.toJSON();
864
+ }
865
+ static fromJSON(json, config) {
866
+ const wrappedComparator = buildWrappedComparator(config.compareKeys);
867
+ const adapter = Object.create(_RecordKeyIndexBTree.prototype);
868
+ const resolvedPolicy = config.duplicateKeys ?? "allow";
869
+ const patchedJSON = resolvedPolicy !== json.config.duplicateKeys ? { ...json, config: { ...json.config, duplicateKeys: resolvedPolicy } } : json;
870
+ adapter.tree = import_frostpillar_btree.InMemoryBTree.fromJSON(patchedJSON, wrappedComparator);
871
+ return adapter;
872
+ }
873
+ clear() {
874
+ this.tree.clear();
875
+ }
876
+ };
877
+
878
+ // src/storage/datastore/datastoreKeyDefinition.ts
879
+ var ensureNonEmptyStringKey = (value, fieldName) => {
880
+ if (typeof value !== "string") {
881
+ throw new ValidationError(`${fieldName} must be a string.`);
882
+ }
883
+ if (value.length === 0) {
884
+ throw new ValidationError(`${fieldName} must be a non-empty string.`);
885
+ }
886
+ return value;
887
+ };
888
+ var DEFAULT_STRING_KEY_DEFINITION = {
889
+ normalize: (value, fieldName) => {
890
+ return ensureNonEmptyStringKey(value, fieldName);
891
+ },
892
+ compare: (left, right) => {
893
+ if (left < right) {
894
+ return -1;
895
+ }
896
+ if (left > right) {
897
+ return 1;
898
+ }
899
+ return 0;
900
+ },
901
+ serialize: (key) => {
902
+ return ensureNonEmptyStringKey(key, "key");
903
+ },
904
+ deserialize: (serialized) => {
905
+ return ensureNonEmptyStringKey(serialized, "serialized key");
906
+ }
907
+ };
908
+ var validateKeyDefinition = (definition) => {
909
+ if (typeof definition.normalize !== "function") {
910
+ throw new ConfigurationError("config.key.normalize must be a function.");
911
+ }
912
+ if (typeof definition.compare !== "function") {
913
+ throw new ConfigurationError("config.key.compare must be a function.");
914
+ }
915
+ if (typeof definition.serialize !== "function") {
916
+ throw new ConfigurationError("config.key.serialize must be a function.");
917
+ }
918
+ if (typeof definition.deserialize !== "function") {
919
+ throw new ConfigurationError("config.key.deserialize must be a function.");
920
+ }
921
+ };
922
+ var resolveKeyDefinition = (config) => {
923
+ if (config.key === void 0) {
924
+ return DEFAULT_STRING_KEY_DEFINITION;
925
+ }
926
+ validateKeyDefinition(config.key);
927
+ return config.key;
928
+ };
929
+ var readRawInsertKey = (rawRecord) => {
930
+ if (Object.prototype.hasOwnProperty.call(rawRecord, "key")) {
931
+ return {
932
+ rawKey: rawRecord.key,
933
+ keyFieldName: "key"
934
+ };
935
+ }
936
+ throw new ValidationError('Record must include "key".');
937
+ };
938
+
939
+ // src/storage/datastore/Datastore.ts
940
+ var Datastore = class {
941
+ constructor(config) {
942
+ this.errorListeners = /* @__PURE__ */ new Set();
943
+ this.keyDefinition = resolveKeyDefinition(config);
944
+ const duplicateKeys = parseDuplicateKeyConfig(config.duplicateKeys);
945
+ this.duplicateKeyPolicy = duplicateKeys;
946
+ this.keyIndex = new RecordKeyIndexBTree({
947
+ compareKeys: (left, right) => this.keyDefinition.compare(left, right),
948
+ duplicateKeys
949
+ });
950
+ this.capacityState = resolveCapacityState(config);
951
+ this.skipPayloadValidation = config.skipPayloadValidation === true;
952
+ this.lifecycle = new DatastoreLifecycle();
953
+ this.writeMutex = new AsyncMutex();
954
+ this.currentSizeBytes = 0;
955
+ this.backendController = null;
956
+ this.pendingInit = null;
957
+ this.pendingInitError = null;
958
+ if (config.driver === void 0) {
959
+ if (config.autoCommit !== void 0) {
960
+ throw new ConfigurationError(
961
+ "autoCommit requires a durable driver."
962
+ );
963
+ }
964
+ return;
965
+ }
966
+ const backendInit = config.driver.init({
967
+ getSnapshot: () => ({
968
+ treeJSON: this.keyIndex.toJSON()
969
+ }),
970
+ autoCommit: config.autoCommit,
971
+ onAutoCommitError: (error) => {
972
+ emitAutoCommitErrorToListeners(this.errorListeners, error);
973
+ }
974
+ });
975
+ if (!isPromiseLike(backendInit)) {
976
+ this.applyBackendInitResult(backendInit);
977
+ return;
978
+ }
979
+ this.pendingInit = Promise.resolve(backendInit).then((result) => {
980
+ this.applyBackendInitResult(result);
981
+ }).catch((error) => {
982
+ this.pendingInitError = toErrorInstance(error, "Datastore backend initialization failed with a non-Error value.");
983
+ }).finally(() => {
984
+ this.pendingInit = null;
985
+ });
986
+ }
987
+ put(record) {
988
+ return this.runWithOpenExclusive(() => this.putSingle(record));
989
+ }
990
+ get(key) {
991
+ return this.runWithOpen(() => {
992
+ const normalizedKey = this.keyDefinition.normalize(key, "key");
993
+ return this.keyIndex.rangeQuery(normalizedKey, normalizedKey).map((e) => toPublicRecord(e.entryId, e.key, e.value));
994
+ });
995
+ }
996
+ getFirst(key) {
997
+ return this.runWithOpen(() => {
998
+ const normalizedKey = this.keyDefinition.normalize(key, "key");
999
+ const entry = this.keyIndex.findFirst(normalizedKey);
1000
+ if (entry === null) {
1001
+ return null;
1002
+ }
1003
+ return toPublicRecord(entry.entryId, entry.key, entry.value);
1004
+ });
1005
+ }
1006
+ getLast(key) {
1007
+ return this.runWithOpen(() => {
1008
+ const normalizedKey = this.keyDefinition.normalize(key, "key");
1009
+ const entry = this.keyIndex.findLast(normalizedKey);
1010
+ if (entry === null) {
1011
+ return null;
1012
+ }
1013
+ return toPublicRecord(entry.entryId, entry.key, entry.value);
1014
+ });
1015
+ }
1016
+ delete(key) {
1017
+ return this.runWithOpenExclusive(() => this.deleteSingle(key));
1018
+ }
1019
+ has(key) {
1020
+ return this.runWithOpen(() => {
1021
+ const normalizedKey = this.keyDefinition.normalize(key, "key");
1022
+ return this.keyIndex.hasKey(normalizedKey);
1023
+ });
1024
+ }
1025
+ getAll() {
1026
+ return this.runWithOpen(() => {
1027
+ return this.keyIndex.snapshot().map((e) => toPublicRecord(e.entryId, e.key, e.value));
1028
+ });
1029
+ }
1030
+ getRange(start, end) {
1031
+ return this.runWithOpen(() => {
1032
+ const normalizedStart = this.keyDefinition.normalize(start, "start");
1033
+ const normalizedEnd = this.keyDefinition.normalize(end, "end");
1034
+ if (normalizeComparatorResult(this.keyDefinition.compare(normalizedStart, normalizedEnd)) > 0) {
1035
+ throw new InvalidQueryRangeError("start must be <= end.");
1036
+ }
1037
+ return this.keyIndex.rangeQuery(normalizedStart, normalizedEnd).map((e) => toPublicRecord(e.entryId, e.key, e.value));
1038
+ });
1039
+ }
1040
+ getMany(keys) {
1041
+ return this.runWithOpen(() => {
1042
+ const normalizedKeys = [];
1043
+ for (const key of keys) {
1044
+ normalizedKeys.push(this.keyDefinition.normalize(key, "key"));
1045
+ }
1046
+ normalizedKeys.sort(
1047
+ (left, right) => clampComparatorResult(this.keyDefinition.compare(left, right))
1048
+ );
1049
+ const results = [];
1050
+ let lastKey = void 0;
1051
+ for (let i = 0; i < normalizedKeys.length; i += 1) {
1052
+ if (i > 0 && clampComparatorResult(this.keyDefinition.compare(normalizedKeys[i], lastKey)) === 0) {
1053
+ continue;
1054
+ }
1055
+ lastKey = normalizedKeys[i];
1056
+ const entries = this.keyIndex.rangeQuery(normalizedKeys[i], normalizedKeys[i]);
1057
+ for (const entry of entries) {
1058
+ results.push(toPublicRecord(entry.entryId, entry.key, entry.value));
1059
+ }
1060
+ }
1061
+ return results;
1062
+ });
1063
+ }
1064
+ putMany(records) {
1065
+ return this.runWithOpenExclusive(async () => {
1066
+ if (this.capacityState === null && this.backendController === null) {
1067
+ for (const record of records) {
1068
+ const { rawKey, keyFieldName } = readRawInsertKey(record);
1069
+ const normalizedKey = this.keyDefinition.normalize(rawKey, keyFieldName);
1070
+ if (this.duplicateKeyPolicy === "reject" && this.keyIndex.findFirst(normalizedKey) !== null) {
1071
+ throw new ValidationError("Duplicate key rejected: a record with this key already exists.");
1072
+ }
1073
+ const normalizedPayload = this.skipPayloadValidation ? record.payload : validateAndNormalizePayload(record.payload).payload;
1074
+ this.keyIndex.put(normalizedKey, { payload: normalizedPayload, sizeBytes: 0 });
1075
+ }
1076
+ return;
1077
+ }
1078
+ if (this.capacityState === null) {
1079
+ for (const record of records) {
1080
+ await this.putSingle(record);
1081
+ }
1082
+ return;
1083
+ }
1084
+ if (this.capacityState.policy === "turnover") {
1085
+ for (const record of records) {
1086
+ await this.putSingle(record);
1087
+ }
1088
+ return;
1089
+ }
1090
+ await this.putManyStrict(records);
1091
+ });
1092
+ }
1093
+ deleteMany(keys) {
1094
+ return this.runWithOpenExclusive(async () => {
1095
+ if (this.backendController === null) {
1096
+ let totalRemoved2 = 0;
1097
+ for (const key of keys) {
1098
+ const normalizedKey = this.keyDefinition.normalize(key, "key");
1099
+ const entries = this.keyIndex.rangeQuery(normalizedKey, normalizedKey);
1100
+ if (entries.length === 0) {
1101
+ continue;
1102
+ }
1103
+ let freedBytes = 0;
1104
+ for (const entry of entries) {
1105
+ freedBytes += entry.value.sizeBytes;
1106
+ }
1107
+ totalRemoved2 += this.keyIndex.deleteRange(normalizedKey, normalizedKey);
1108
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes - freedBytes);
1109
+ }
1110
+ return totalRemoved2;
1111
+ }
1112
+ let totalRemoved = 0;
1113
+ for (const key of keys) {
1114
+ totalRemoved += await this.deleteSingle(key);
1115
+ }
1116
+ return totalRemoved;
1117
+ });
1118
+ }
1119
+ clear() {
1120
+ return this.runWithOpenExclusive(async () => {
1121
+ this.keyIndex.clear();
1122
+ this.currentSizeBytes = 0;
1123
+ await this.backendController?.handleCleared();
1124
+ });
1125
+ }
1126
+ count() {
1127
+ return this.runWithOpen(() => {
1128
+ return this.keyIndex.size();
1129
+ });
1130
+ }
1131
+ keys() {
1132
+ return this.runWithOpen(() => {
1133
+ const distinctKeys = [];
1134
+ let lastKey = void 0;
1135
+ let isFirst = true;
1136
+ for (const key of this.keyIndex.keys()) {
1137
+ if (isFirst || clampComparatorResult(this.keyDefinition.compare(key, lastKey)) !== 0) {
1138
+ distinctKeys.push(key);
1139
+ lastKey = key;
1140
+ isFirst = false;
1141
+ }
1142
+ }
1143
+ return distinctKeys;
1144
+ });
1145
+ }
1146
+ getById(id) {
1147
+ return this.runWithOpen(() => {
1148
+ return getPublicRecordById(this.keyIndex, id);
1149
+ });
1150
+ }
1151
+ updateById(id, patch) {
1152
+ return this.runWithOpenExclusive(async () => {
1153
+ const result = updateRecordById({
1154
+ keyIndex: this.keyIndex,
1155
+ id,
1156
+ patch,
1157
+ capacityState: this.capacityState,
1158
+ currentSizeBytes: this.currentSizeBytes,
1159
+ skipPayloadValidation: this.skipPayloadValidation
1160
+ });
1161
+ if (!result.updated) {
1162
+ return false;
1163
+ }
1164
+ this.currentSizeBytes = result.currentSizeBytes;
1165
+ await this.backendController?.handleRecordAppended(
1166
+ result.durabilitySignalBytes
1167
+ );
1168
+ return true;
1169
+ });
1170
+ }
1171
+ deleteById(id) {
1172
+ return this.runWithOpenExclusive(async () => {
1173
+ const result = deleteRecordById({
1174
+ keyIndex: this.keyIndex,
1175
+ id,
1176
+ currentSizeBytes: this.currentSizeBytes
1177
+ });
1178
+ if (!result.deleted) {
1179
+ return false;
1180
+ }
1181
+ this.currentSizeBytes = result.currentSizeBytes;
1182
+ await this.backendController?.handleRecordAppended(
1183
+ result.durabilitySignalBytes
1184
+ );
1185
+ return true;
1186
+ });
1187
+ }
1188
+ commit() {
1189
+ return this.runWithOpenExclusive(async () => {
1190
+ await this.backendController?.commitNow();
1191
+ });
1192
+ }
1193
+ on(event, listener) {
1194
+ if (event !== "error") {
1195
+ throw new ValidationError('Only "error" event is supported.');
1196
+ }
1197
+ this.errorListeners.add(listener);
1198
+ return () => {
1199
+ this.off(event, listener);
1200
+ };
1201
+ }
1202
+ off(event, listener) {
1203
+ if (event !== "error") {
1204
+ throw new ValidationError('Only "error" event is supported.');
1205
+ }
1206
+ this.errorListeners.delete(listener);
1207
+ }
1208
+ async close() {
1209
+ await closeDatastore({
1210
+ lifecycle: this.lifecycle,
1211
+ getPendingInit: () => this.pendingInit,
1212
+ getPendingInitError: () => this.pendingInitError,
1213
+ setPendingInitError: (pendingInitError) => {
1214
+ this.pendingInitError = pendingInitError;
1215
+ },
1216
+ getBackendController: () => this.backendController,
1217
+ setBackendController: (backendController) => {
1218
+ this.backendController = backendController;
1219
+ },
1220
+ clearInMemoryState: () => {
1221
+ this.keyIndex.clear();
1222
+ this.errorListeners.clear();
1223
+ }
1224
+ });
1225
+ }
1226
+ resolvePayload(record, normalizedKey) {
1227
+ if (this.skipPayloadValidation) {
1228
+ const payload = record.payload;
1229
+ return { payload, encodedBytes: estimateRecordSizeBytes(normalizedKey, payload) };
1230
+ }
1231
+ const validationResult = validateAndNormalizePayload(record.payload);
1232
+ const keyBytes = estimateKeySizeBytes(normalizedKey);
1233
+ return { payload: validationResult.payload, encodedBytes: validationResult.sizeBytes + keyBytes };
1234
+ }
1235
+ async putSingle(record) {
1236
+ const { rawKey, keyFieldName } = readRawInsertKey(record);
1237
+ const normalizedKey = this.keyDefinition.normalize(rawKey, keyFieldName);
1238
+ if (this.duplicateKeyPolicy === "reject" && this.keyIndex.findFirst(normalizedKey) !== null) {
1239
+ throw new ValidationError(
1240
+ "Duplicate key rejected: a record with this key already exists."
1241
+ );
1242
+ }
1243
+ if (this.capacityState === null && this.backendController === null) {
1244
+ const normalizedPayload2 = this.skipPayloadValidation ? record.payload : validateAndNormalizePayload(record.payload).payload;
1245
+ this.keyIndex.put(normalizedKey, { payload: normalizedPayload2, sizeBytes: 0 });
1246
+ return;
1247
+ }
1248
+ const { payload: normalizedPayload, encodedBytes } = this.resolvePayload(record, normalizedKey);
1249
+ if (this.capacityState === null) {
1250
+ this.keyIndex.put(normalizedKey, { payload: normalizedPayload, sizeBytes: encodedBytes });
1251
+ await this.backendController.handleRecordAppended(encodedBytes);
1252
+ return;
1253
+ }
1254
+ const persistedRecord = { payload: normalizedPayload, sizeBytes: encodedBytes };
1255
+ if (encodedBytes > this.capacityState.maxSizeBytes) {
1256
+ throw new QuotaExceededError("Record exceeds configured capacity.maxSize boundary.");
1257
+ }
1258
+ if (this.duplicateKeyPolicy === "replace") {
1259
+ const existing = this.keyIndex.findFirst(normalizedKey);
1260
+ if (existing !== null) {
1261
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes - existing.value.sizeBytes);
1262
+ this.keyIndex.removeById(existing.entryId);
1263
+ }
1264
+ }
1265
+ this.currentSizeBytes = enforceCapacityPolicy(
1266
+ this.capacityState,
1267
+ this.currentSizeBytes,
1268
+ encodedBytes,
1269
+ () => this.keyIndex.size(),
1270
+ () => {
1271
+ const evicted = this.keyIndex.popFirst();
1272
+ if (evicted === null) {
1273
+ throw new IndexCorruptionError("Record buffer reported empty state during turnover eviction.");
1274
+ }
1275
+ return evicted.value.sizeBytes;
1276
+ }
1277
+ );
1278
+ this.keyIndex.put(normalizedKey, persistedRecord);
1279
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes + encodedBytes);
1280
+ await this.backendController?.handleRecordAppended(encodedBytes);
1281
+ }
1282
+ async putManyStrict(records) {
1283
+ const capacityState = this.capacityState;
1284
+ const compare = this.keyDefinition.compare;
1285
+ const tagged = [];
1286
+ for (let i = 0; i < records.length; i += 1) {
1287
+ const { rawKey, keyFieldName } = readRawInsertKey(records[i]);
1288
+ tagged.push({ idx: i, normalizedKey: this.keyDefinition.normalize(rawKey, keyFieldName), record: records[i] });
1289
+ }
1290
+ tagged.sort((a, b) => {
1291
+ const cmp = clampComparatorResult(compare(a.normalizedKey, b.normalizedKey));
1292
+ return cmp !== 0 ? cmp : a.idx - b.idx;
1293
+ });
1294
+ const { prepared, totalBatchDelta } = this.buildStrictBatchEntries(tagged, compare, capacityState.maxSizeBytes);
1295
+ if (this.currentSizeBytes + totalBatchDelta > capacityState.maxSizeBytes) {
1296
+ throw new QuotaExceededError("Insert exceeds configured capacity.maxSize under strict policy.");
1297
+ }
1298
+ let effectiveTotalDelta = 0;
1299
+ let totalEncodedBytes = 0;
1300
+ for (const { normalizedKey, persistedRecord, encodedBytes, replacedBytes } of prepared) {
1301
+ const actualReplaced = replacedBytes > 0 && this.keyIndex.findFirst(normalizedKey) === null ? 0 : replacedBytes;
1302
+ effectiveTotalDelta += encodedBytes - actualReplaced;
1303
+ totalEncodedBytes += encodedBytes;
1304
+ this.keyIndex.put(normalizedKey, persistedRecord);
1305
+ }
1306
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes + effectiveTotalDelta);
1307
+ await this.backendController?.handleRecordAppended(totalEncodedBytes);
1308
+ }
1309
+ buildStrictBatchEntries(tagged, compare, maxSizeBytes) {
1310
+ const prepared = [];
1311
+ let totalBatchDelta = 0;
1312
+ for (let i = 0; i < tagged.length; i += 1) {
1313
+ const { normalizedKey, record } = tagged[i];
1314
+ const isIntraBatchDuplicate = i > 0 && clampComparatorResult(compare(tagged[i - 1].normalizedKey, normalizedKey)) === 0;
1315
+ if (this.duplicateKeyPolicy === "reject") {
1316
+ if (isIntraBatchDuplicate || this.keyIndex.findFirst(normalizedKey) !== null) {
1317
+ throw new ValidationError("Duplicate key rejected: a record with this key already exists.");
1318
+ }
1319
+ }
1320
+ const { payload: normalizedPayload, encodedBytes } = this.resolvePayload(record, normalizedKey);
1321
+ if (encodedBytes > maxSizeBytes) {
1322
+ throw new QuotaExceededError("Record exceeds configured capacity.maxSize boundary.");
1323
+ }
1324
+ let replacedBytes = 0;
1325
+ if (this.duplicateKeyPolicy === "replace" && isIntraBatchDuplicate) {
1326
+ const prev = prepared[prepared.length - 1];
1327
+ totalBatchDelta -= prev.encodedBytes - prev.replacedBytes;
1328
+ replacedBytes = prev.replacedBytes;
1329
+ prepared.pop();
1330
+ } else if (this.duplicateKeyPolicy === "replace") {
1331
+ const existing = this.keyIndex.findFirst(normalizedKey);
1332
+ replacedBytes = existing !== null ? existing.value.sizeBytes : 0;
1333
+ }
1334
+ const persistedRecord = { payload: normalizedPayload, sizeBytes: encodedBytes };
1335
+ totalBatchDelta += encodedBytes - replacedBytes;
1336
+ prepared.push({ normalizedKey, persistedRecord, encodedBytes, replacedBytes });
1337
+ }
1338
+ return { prepared, totalBatchDelta };
1339
+ }
1340
+ async deleteSingle(key) {
1341
+ const normalizedKey = this.keyDefinition.normalize(key, "key");
1342
+ const entries = this.keyIndex.rangeQuery(normalizedKey, normalizedKey);
1343
+ if (entries.length === 0) {
1344
+ return 0;
1345
+ }
1346
+ let freedBytes = 0;
1347
+ for (const entry of entries) {
1348
+ freedBytes += entry.value.sizeBytes;
1349
+ }
1350
+ const removedCount = this.keyIndex.deleteRange(normalizedKey, normalizedKey);
1351
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes - freedBytes);
1352
+ await this.backendController?.handleRecordAppended(freedBytes);
1353
+ return removedCount;
1354
+ }
1355
+ // P7: Synchronous fast-path — avoids async/Promise overhead for read operations
1356
+ // when no pending init exists.
1357
+ runWithOpen(operation) {
1358
+ if (this.pendingInit !== null) {
1359
+ return this.pendingInit.then(() => {
1360
+ if (this.pendingInitError !== null) {
1361
+ throw this.pendingInitError;
1362
+ }
1363
+ return this.executeWithLifecycle(operation);
1364
+ });
1365
+ }
1366
+ if (this.pendingInitError !== null) {
1367
+ return Promise.reject(this.pendingInitError);
1368
+ }
1369
+ try {
1370
+ return Promise.resolve(this.executeWithLifecycle(operation));
1371
+ } catch (error) {
1372
+ return Promise.reject(error instanceof Error ? error : new Error(String(error)));
1373
+ }
1374
+ }
1375
+ executeWithLifecycle(operation) {
1376
+ this.lifecycle.beginOperation();
1377
+ try {
1378
+ const result = operation();
1379
+ if (isPromiseLike(result)) {
1380
+ return Promise.resolve(result).then(
1381
+ (value) => {
1382
+ this.lifecycle.endOperation();
1383
+ return value;
1384
+ },
1385
+ (error) => {
1386
+ this.lifecycle.endOperation();
1387
+ throw error;
1388
+ }
1389
+ );
1390
+ }
1391
+ this.lifecycle.endOperation();
1392
+ return result;
1393
+ } catch (error) {
1394
+ this.lifecycle.endOperation();
1395
+ throw error;
1396
+ }
1397
+ }
1398
+ async runWithOpenExclusive(operation) {
1399
+ const release = await this.writeMutex.acquire();
1400
+ try {
1401
+ return await this.runWithOpen(operation);
1402
+ } finally {
1403
+ release();
1404
+ }
1405
+ }
1406
+ applyBackendInitResult(result) {
1407
+ if (result.initialTreeJSON !== null) {
1408
+ this.keyIndex = RecordKeyIndexBTree.fromJSON(
1409
+ result.initialTreeJSON,
1410
+ {
1411
+ compareKeys: (left, right) => {
1412
+ return this.keyDefinition.compare(left, right);
1413
+ },
1414
+ duplicateKeys: this.duplicateKeyPolicy
1415
+ }
1416
+ );
1417
+ this.backfillMissingSizeBytes();
1418
+ }
1419
+ this.currentSizeBytes = result.initialCurrentSizeBytes;
1420
+ this.backendController = result.controller;
1421
+ }
1422
+ backfillMissingSizeBytes() {
1423
+ for (const entry of this.keyIndex.snapshot()) {
1424
+ if (typeof entry.value.sizeBytes !== "number") {
1425
+ const patched = {
1426
+ payload: entry.value.payload,
1427
+ sizeBytes: estimateRecordSizeBytes(entry.key, entry.value.payload)
1428
+ };
1429
+ this.keyIndex.updateById(entry.entryId, patched);
1430
+ }
1431
+ }
1432
+ }
1433
+ };
1434
+ var isPromiseLike = (value) => {
1435
+ if (typeof value !== "object" && typeof value !== "function" || value === null) {
1436
+ return false;
1437
+ }
1438
+ return typeof value.then === "function";
1439
+ };
1440
+
1441
+ // src/storage/backend/asyncDurableAutoCommitController.ts
1442
+ var AsyncDurableAutoCommitController = class {
1443
+ constructor(autoCommit, onAutoCommitError) {
1444
+ this.autoCommit = autoCommit;
1445
+ this.onAutoCommitError = onAutoCommitError;
1446
+ this.pendingAutoCommitBytes = 0;
1447
+ this.dirtyFromClear = false;
1448
+ this.autoCommitTimer = null;
1449
+ this.commitInFlight = null;
1450
+ this.pendingForegroundCommitRequest = false;
1451
+ this.pendingBackgroundCommitRequest = false;
1452
+ this.closed = false;
1453
+ this.startAutoCommitSchedule();
1454
+ }
1455
+ handleRecordAppended(encodedBytes) {
1456
+ if (this.autoCommit.frequency === "immediate") {
1457
+ return this.commitNow();
1458
+ }
1459
+ this.pendingAutoCommitBytes += encodedBytes;
1460
+ if (this.autoCommit.maxPendingBytes !== null && this.pendingAutoCommitBytes >= this.autoCommit.maxPendingBytes) {
1461
+ return this.queueCommitRequest("foreground");
1462
+ }
1463
+ return Promise.resolve();
1464
+ }
1465
+ handleCleared() {
1466
+ this.dirtyFromClear = true;
1467
+ if (this.autoCommit.frequency === "immediate") {
1468
+ return this.commitNow();
1469
+ }
1470
+ return this.queueCommitRequest("background");
1471
+ }
1472
+ commitNow() {
1473
+ return this.queueCommitRequest("foreground");
1474
+ }
1475
+ async close() {
1476
+ if (this.closed) {
1477
+ return;
1478
+ }
1479
+ this.closed = true;
1480
+ this.stopAutoCommitSchedule();
1481
+ await this.waitForCommitSettlement();
1482
+ let flushError = null;
1483
+ if (this.pendingAutoCommitBytes > 0 || this.dirtyFromClear) {
1484
+ try {
1485
+ await this.executeSingleCommit();
1486
+ this.pendingAutoCommitBytes = 0;
1487
+ this.dirtyFromClear = false;
1488
+ } catch (error) {
1489
+ flushError = toErrorInstance(
1490
+ error,
1491
+ "Final close-time flush commit failed with a non-Error value."
1492
+ );
1493
+ }
1494
+ }
1495
+ let drainError = null;
1496
+ try {
1497
+ await this.onCloseAfterDrain();
1498
+ } catch (error) {
1499
+ drainError = toErrorInstance(
1500
+ error,
1501
+ "onCloseAfterDrain failed with a non-Error value."
1502
+ );
1503
+ }
1504
+ if (flushError !== null && drainError !== null) {
1505
+ throw createCloseAggregateError2(flushError, drainError);
1506
+ }
1507
+ if (flushError !== null) {
1508
+ throw flushError;
1509
+ }
1510
+ if (drainError !== null) {
1511
+ throw drainError;
1512
+ }
1513
+ }
1514
+ getPendingAutoCommitBytes() {
1515
+ return this.pendingAutoCommitBytes;
1516
+ }
1517
+ onCloseAfterDrain() {
1518
+ return Promise.resolve();
1519
+ }
1520
+ waitForCommitSettlement() {
1521
+ if (this.commitInFlight === null) {
1522
+ return Promise.resolve();
1523
+ }
1524
+ return this.commitInFlight.then(() => void 0).catch(() => void 0);
1525
+ }
1526
+ queueCommitRequest(requestType) {
1527
+ if (requestType === "foreground") {
1528
+ this.pendingForegroundCommitRequest = true;
1529
+ } else {
1530
+ this.pendingBackgroundCommitRequest = true;
1531
+ }
1532
+ if (this.commitInFlight === null) {
1533
+ this.commitInFlight = this.runCommitLoop().finally(() => {
1534
+ this.commitInFlight = null;
1535
+ });
1536
+ }
1537
+ if (requestType === "background") {
1538
+ return Promise.resolve();
1539
+ }
1540
+ return this.commitInFlight;
1541
+ }
1542
+ async runCommitLoop() {
1543
+ let shouldContinue = true;
1544
+ while (shouldContinue) {
1545
+ const runForeground = this.pendingForegroundCommitRequest;
1546
+ const runBackground = this.pendingBackgroundCommitRequest;
1547
+ const runClear = this.dirtyFromClear;
1548
+ this.pendingForegroundCommitRequest = false;
1549
+ this.pendingBackgroundCommitRequest = false;
1550
+ this.dirtyFromClear = false;
1551
+ const shouldRunCommit = runForeground || runBackground && (this.pendingAutoCommitBytes > 0 || runClear);
1552
+ if (!shouldRunCommit) {
1553
+ shouldContinue = false;
1554
+ continue;
1555
+ }
1556
+ try {
1557
+ const committedPendingBytes = this.pendingAutoCommitBytes;
1558
+ await this.executeSingleCommit();
1559
+ this.pendingAutoCommitBytes = Math.max(
1560
+ 0,
1561
+ this.pendingAutoCommitBytes - committedPendingBytes
1562
+ );
1563
+ } catch (error) {
1564
+ if (runClear) {
1565
+ this.dirtyFromClear = true;
1566
+ }
1567
+ if (runForeground) {
1568
+ throw toErrorInstance(
1569
+ error,
1570
+ "Foreground auto-commit failed with a non-Error value."
1571
+ );
1572
+ }
1573
+ this.onAutoCommitError(error);
1574
+ }
1575
+ if (!this.pendingForegroundCommitRequest && !this.pendingBackgroundCommitRequest) {
1576
+ shouldContinue = false;
1577
+ }
1578
+ }
1579
+ }
1580
+ startAutoCommitSchedule() {
1581
+ if (this.autoCommit.frequency !== "scheduled" || this.autoCommit.intervalMs === null) {
1582
+ return;
1583
+ }
1584
+ this.autoCommitTimer = setInterval(() => {
1585
+ this.handleAutoCommitTick();
1586
+ }, this.autoCommit.intervalMs);
1587
+ if (typeof this.autoCommitTimer === "object" && this.autoCommitTimer !== null && "unref" in this.autoCommitTimer) {
1588
+ this.autoCommitTimer.unref();
1589
+ }
1590
+ }
1591
+ stopAutoCommitSchedule() {
1592
+ if (this.autoCommitTimer === null) {
1593
+ return;
1594
+ }
1595
+ clearInterval(this.autoCommitTimer);
1596
+ this.autoCommitTimer = null;
1597
+ }
1598
+ handleAutoCommitTick() {
1599
+ if (this.closed) {
1600
+ return;
1601
+ }
1602
+ if (this.pendingAutoCommitBytes <= 0 && !this.dirtyFromClear) {
1603
+ return;
1604
+ }
1605
+ void this.queueCommitRequest("background");
1606
+ }
1607
+ };
1608
+ var readAggregateErrorConstructor2 = () => {
1609
+ const candidate = globalThis.AggregateError;
1610
+ if (typeof candidate !== "function") {
1611
+ return null;
1612
+ }
1613
+ return candidate;
1614
+ };
1615
+ var createCloseAggregateError2 = (flushError, drainError) => {
1616
+ const aggregateErrorConstructor = readAggregateErrorConstructor2();
1617
+ if (aggregateErrorConstructor !== null) {
1618
+ return new aggregateErrorConstructor(
1619
+ [flushError, drainError],
1620
+ "Close failed: both final flush and drain produced errors."
1621
+ );
1622
+ }
1623
+ const fallbackError = new Error(
1624
+ "Close failed: both final flush and drain produced errors."
1625
+ );
1626
+ fallbackError.errors = [flushError, drainError];
1627
+ return fallbackError;
1628
+ };
1629
+
1630
+ // src/storage/drivers/localStorage/localStorageConfig.ts
1631
+ var DEFAULT_LOCAL_STORAGE_MAX_CHUNK_CHARS = 32768;
1632
+ var DEFAULT_LOCAL_STORAGE_MAX_CHUNKS = 64;
1633
+ var parseLocalStorageConfig = (config) => {
1634
+ const keyPrefix = config?.keyPrefix ?? "frostpillar";
1635
+ const databaseKey = config?.databaseKey ?? "default";
1636
+ const maxChunkChars = config?.maxChunkChars ?? DEFAULT_LOCAL_STORAGE_MAX_CHUNK_CHARS;
1637
+ const maxChunks = config?.maxChunks ?? DEFAULT_LOCAL_STORAGE_MAX_CHUNKS;
1638
+ if (!Number.isSafeInteger(maxChunkChars) || maxChunkChars <= 0) {
1639
+ throw new ConfigurationError(
1640
+ "localStorage.maxChunkChars must be a positive safe integer."
1641
+ );
1642
+ }
1643
+ if (!Number.isSafeInteger(maxChunks) || maxChunks <= 0) {
1644
+ throw new ConfigurationError(
1645
+ "localStorage.maxChunks must be a positive safe integer."
1646
+ );
1647
+ }
1648
+ return { keyPrefix, databaseKey, maxChunkChars, maxChunks };
1649
+ };
1650
+
1651
+ // src/validation/metadata.ts
1652
+ var parseNonNegativeSafeInteger = (value, fieldName, backendName) => {
1653
+ if (typeof value !== "number" || !Number.isSafeInteger(value) || value < 0) {
1654
+ throw new StorageEngineError(
1655
+ `${backendName} ${fieldName} must be a non-negative safe integer.`
1656
+ );
1657
+ }
1658
+ return value;
1659
+ };
1660
+
1661
+ // src/storage/drivers/localStorage/localStorageLayout.ts
1662
+ var manifestKey = (keyPrefix, databaseKey) => `${keyPrefix}:ls:${databaseKey}:manifest`;
1663
+ var chunkKey = (keyPrefix, databaseKey, generation, index) => `${keyPrefix}:ls:${databaseKey}:g:${generation}:chunk:${index}`;
1664
+ var cleanupGenerationChunks = (state, generation, knownChunkCount) => {
1665
+ if (knownChunkCount !== null) {
1666
+ if (knownChunkCount <= 0) {
1667
+ return;
1668
+ }
1669
+ for (let i = 0; i < knownChunkCount; i += 1) {
1670
+ state.adapter.removeItem(
1671
+ chunkKey(state.keyPrefix, state.databaseKey, generation, i)
1672
+ );
1673
+ }
1674
+ return;
1675
+ }
1676
+ if (state.maxChunks <= 0) {
1677
+ return;
1678
+ }
1679
+ for (let i = 0; i < state.maxChunks; i += 1) {
1680
+ const key = chunkKey(state.keyPrefix, state.databaseKey, generation, i);
1681
+ if (state.adapter.getItem(key) !== null) {
1682
+ state.adapter.removeItem(key);
1683
+ }
1684
+ }
1685
+ };
1686
+ var isQuotaBrowserError = (error) => {
1687
+ if (!(error instanceof Error)) {
1688
+ return false;
1689
+ }
1690
+ return error.name === "QuotaExceededError" || error.name === "NS_ERROR_DOM_QUOTA_REACHED";
1691
+ };
1692
+
1693
+ // src/storage/drivers/localStorage/localStorageBackend.ts
1694
+ var LS_MAGIC = "FPLS_META";
1695
+ var LS_VERSION = 2;
1696
+ var detectGlobalLocalStorage = () => {
1697
+ try {
1698
+ const g = globalThis;
1699
+ const ls = g.localStorage;
1700
+ if (ls === null || ls === void 0) {
1701
+ return null;
1702
+ }
1703
+ return ls;
1704
+ } catch {
1705
+ return null;
1706
+ }
1707
+ };
1708
+ var createLocalStorageBackendState = (adapter, keyPrefix, databaseKey, maxChunkChars, maxChunks) => ({
1709
+ adapter,
1710
+ keyPrefix,
1711
+ databaseKey,
1712
+ maxChunkChars,
1713
+ maxChunks,
1714
+ activeGeneration: 0,
1715
+ commitId: 0,
1716
+ activeChunkCount: 0
1717
+ });
1718
+ var parseLocalStorageManifest = (manifestRaw, maxChunks) => {
1719
+ let manifest;
1720
+ try {
1721
+ manifest = JSON.parse(manifestRaw);
1722
+ } catch {
1723
+ throw new StorageEngineError("localStorage manifest JSON is malformed.");
1724
+ }
1725
+ if (manifest.magic !== LS_MAGIC || manifest.version !== LS_VERSION) {
1726
+ throw new StorageEngineError(
1727
+ "localStorage manifest magic/version mismatch."
1728
+ );
1729
+ }
1730
+ const chunkCount = parseNonNegativeSafeInteger(
1731
+ manifest.chunkCount,
1732
+ "manifest.chunkCount",
1733
+ "localStorage"
1734
+ );
1735
+ if (chunkCount > maxChunks) {
1736
+ throw new StorageEngineError(
1737
+ `localStorage snapshot requires ${chunkCount} chunks but maxChunks is ${maxChunks}.`
1738
+ );
1739
+ }
1740
+ return manifest;
1741
+ };
1742
+ var loadLocalStorageChunks = (state, activeGeneration, chunkCount) => {
1743
+ const chunks = [];
1744
+ for (let i = 0; i < chunkCount; i += 1) {
1745
+ const cKey = chunkKey(
1746
+ state.keyPrefix,
1747
+ state.databaseKey,
1748
+ activeGeneration,
1749
+ i
1750
+ );
1751
+ const chunkValue = state.adapter.getItem(cKey);
1752
+ if (typeof chunkValue !== "string") {
1753
+ throw new StorageEngineError(
1754
+ `localStorage chunk "${cKey}" is missing or not a string.`
1755
+ );
1756
+ }
1757
+ chunks.push(chunkValue);
1758
+ }
1759
+ const treeJson = chunks.join("");
1760
+ let parsedTreeJSON;
1761
+ try {
1762
+ parsedTreeJSON = JSON.parse(treeJson);
1763
+ } catch {
1764
+ throw new StorageEngineError("localStorage chunk data JSON is malformed.");
1765
+ }
1766
+ if (typeof parsedTreeJSON !== "object" || parsedTreeJSON === null || Array.isArray(parsedTreeJSON)) {
1767
+ throw new PageCorruptionError("treeJSON must be a non-null plain object.");
1768
+ }
1769
+ return {
1770
+ treeJSON: parsedTreeJSON,
1771
+ rawJsonLength: computeUtf8ByteLength(treeJson)
1772
+ };
1773
+ };
1774
+ var loadLocalStorageSnapshot = (state) => {
1775
+ const mKey = manifestKey(state.keyPrefix, state.databaseKey);
1776
+ const manifestRaw = state.adapter.getItem(mKey);
1777
+ if (manifestRaw === null) {
1778
+ return { treeJSON: null, currentSizeBytes: 0 };
1779
+ }
1780
+ const manifest = parseLocalStorageManifest(manifestRaw, state.maxChunks);
1781
+ const activeGeneration = parseNonNegativeSafeInteger(
1782
+ manifest.activeGeneration,
1783
+ "manifest.activeGeneration",
1784
+ "localStorage"
1785
+ );
1786
+ const commitId = parseNonNegativeSafeInteger(
1787
+ manifest.commitId,
1788
+ "manifest.commitId",
1789
+ "localStorage"
1790
+ );
1791
+ const chunkCount = parseNonNegativeSafeInteger(
1792
+ manifest.chunkCount,
1793
+ "manifest.chunkCount",
1794
+ "localStorage"
1795
+ );
1796
+ const { treeJSON, rawJsonLength } = loadLocalStorageChunks(state, activeGeneration, chunkCount);
1797
+ const currentSizeBytes = rawJsonLength;
1798
+ state.activeGeneration = activeGeneration;
1799
+ state.commitId = commitId;
1800
+ state.activeChunkCount = chunkCount;
1801
+ return { treeJSON, currentSizeBytes };
1802
+ };
1803
+ var splitTreeJSONIntoChunks = (treeJSON, maxChunkChars, maxChunks, driverName) => {
1804
+ const dataJson = JSON.stringify(treeJSON);
1805
+ const chunks = [];
1806
+ for (let i = 0; i < dataJson.length; i += maxChunkChars) {
1807
+ chunks.push(dataJson.slice(i, i + maxChunkChars));
1808
+ }
1809
+ if (chunks.length > maxChunks) {
1810
+ throw new QuotaExceededError(
1811
+ `${driverName} snapshot requires ${chunks.length} chunks but maxChunks is ${maxChunks}.`
1812
+ );
1813
+ }
1814
+ return chunks;
1815
+ };
1816
+ var ensureCommitCountersSafe = (state) => {
1817
+ if (state.commitId >= Number.MAX_SAFE_INTEGER) {
1818
+ throw new StorageEngineError(
1819
+ "localStorage commitId has reached Number.MAX_SAFE_INTEGER."
1820
+ );
1821
+ }
1822
+ if (state.activeGeneration >= Number.MAX_SAFE_INTEGER) {
1823
+ throw new StorageEngineError(
1824
+ "localStorage activeGeneration has reached Number.MAX_SAFE_INTEGER."
1825
+ );
1826
+ }
1827
+ };
1828
+ var prepareLocalStorageCommit = (state, treeJSON) => {
1829
+ const nextCommitId = state.commitId + 1;
1830
+ const nextGeneration = state.activeGeneration + 1;
1831
+ const chunks = splitTreeJSONIntoChunks(
1832
+ treeJSON,
1833
+ state.maxChunkChars,
1834
+ state.maxChunks,
1835
+ "localStorage"
1836
+ );
1837
+ const newManifest = {
1838
+ magic: LS_MAGIC,
1839
+ version: LS_VERSION,
1840
+ activeGeneration: nextGeneration,
1841
+ commitId: nextCommitId,
1842
+ chunkCount: chunks.length
1843
+ };
1844
+ return {
1845
+ nextCommitId,
1846
+ nextGeneration,
1847
+ chunks,
1848
+ manifestJson: JSON.stringify(newManifest)
1849
+ };
1850
+ };
1851
+ var writeLocalStorageCommit = (state, preparedCommit) => {
1852
+ try {
1853
+ cleanupGenerationChunks(state, preparedCommit.nextGeneration, null);
1854
+ for (let i = 0; i < preparedCommit.chunks.length; i += 1) {
1855
+ state.adapter.setItem(
1856
+ chunkKey(
1857
+ state.keyPrefix,
1858
+ state.databaseKey,
1859
+ preparedCommit.nextGeneration,
1860
+ i
1861
+ ),
1862
+ preparedCommit.chunks[i]
1863
+ );
1864
+ }
1865
+ state.adapter.setItem(
1866
+ manifestKey(state.keyPrefix, state.databaseKey),
1867
+ preparedCommit.manifestJson
1868
+ );
1869
+ } catch (error) {
1870
+ if (isQuotaBrowserError(error) || error instanceof QuotaExceededError) {
1871
+ throw new QuotaExceededError(
1872
+ "localStorage quota exceeded during commit."
1873
+ );
1874
+ }
1875
+ throw new StorageEngineError("localStorage write failed during commit.");
1876
+ }
1877
+ };
1878
+ var commitLocalStorageSnapshot = (state, treeJSON) => {
1879
+ ensureCommitCountersSafe(state);
1880
+ const preparedCommit = prepareLocalStorageCommit(state, treeJSON);
1881
+ writeLocalStorageCommit(state, preparedCommit);
1882
+ const previousGeneration = state.activeGeneration;
1883
+ const previousChunkCount = state.activeChunkCount;
1884
+ state.activeGeneration = preparedCommit.nextGeneration;
1885
+ state.commitId = preparedCommit.nextCommitId;
1886
+ state.activeChunkCount = preparedCommit.chunks.length;
1887
+ cleanupGenerationChunks(
1888
+ state,
1889
+ previousGeneration,
1890
+ previousChunkCount
1891
+ );
1892
+ };
1893
+
1894
+ // src/storage/drivers/localStorage/localStorageBackendController.ts
1895
+ var LocalStorageBackendController = class _LocalStorageBackendController extends AsyncDurableAutoCommitController {
1896
+ constructor(backend, autoCommit, getSnapshot, onAutoCommitError) {
1897
+ super(autoCommit, onAutoCommitError);
1898
+ this.backend = backend;
1899
+ this.getSnapshot = getSnapshot;
1900
+ }
1901
+ static create(options) {
1902
+ const adapter = detectGlobalLocalStorage();
1903
+ if (adapter === null) {
1904
+ throw new UnsupportedBackendError(
1905
+ "localStorage is not available in the current runtime environment."
1906
+ );
1907
+ }
1908
+ const lsConfig = parseLocalStorageConfig(options.config);
1909
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
1910
+ const backend = createLocalStorageBackendState(
1911
+ adapter,
1912
+ lsConfig.keyPrefix,
1913
+ lsConfig.databaseKey,
1914
+ lsConfig.maxChunkChars,
1915
+ lsConfig.maxChunks
1916
+ );
1917
+ const loaded = loadLocalStorageSnapshot(backend);
1918
+ const controller = new _LocalStorageBackendController(
1919
+ backend,
1920
+ autoCommit,
1921
+ options.getSnapshot,
1922
+ options.onAutoCommitError
1923
+ );
1924
+ return {
1925
+ controller,
1926
+ initialTreeJSON: loaded.treeJSON,
1927
+ initialCurrentSizeBytes: loaded.currentSizeBytes
1928
+ };
1929
+ }
1930
+ executeSingleCommit() {
1931
+ const snapshot = this.getSnapshot();
1932
+ commitLocalStorageSnapshot(
1933
+ this.backend,
1934
+ snapshot.treeJSON
1935
+ );
1936
+ return Promise.resolve();
1937
+ }
1938
+ };
1939
+
1940
+ // src/drivers/localStorage.ts
1941
+ var localStorageDriver = (options = {}) => {
1942
+ return {
1943
+ init: (callbacks) => {
1944
+ const result = LocalStorageBackendController.create({
1945
+ config: options,
1946
+ autoCommit: callbacks.autoCommit,
1947
+ getSnapshot: callbacks.getSnapshot,
1948
+ onAutoCommitError: callbacks.onAutoCommitError
1949
+ });
1950
+ return {
1951
+ controller: result.controller,
1952
+ initialTreeJSON: result.initialTreeJSON,
1953
+ initialCurrentSizeBytes: result.initialCurrentSizeBytes
1954
+ };
1955
+ },
1956
+ resolveBackendLimitBytes: () => {
1957
+ const { maxChunkChars, maxChunks } = parseLocalStorageConfig(options);
1958
+ return maxChunkChars * maxChunks;
1959
+ }
1960
+ };
1961
+ };
1962
+
1963
+ // src/storage/drivers/IndexedDB/indexedDBConfig.ts
1964
+ var ensureNonEmptyString = (value, optionName) => {
1965
+ if (value.trim().length === 0) {
1966
+ throw new ConfigurationError(`${optionName} must be a non-empty string.`);
1967
+ }
1968
+ };
1969
+ var parseIndexedDBConfig = (config) => {
1970
+ const databaseName = config?.databaseName ?? "frostpillar";
1971
+ const objectStoreName = config?.objectStoreName ?? "frostpillar";
1972
+ const version = config?.version ?? 1;
1973
+ ensureNonEmptyString(databaseName, "indexedDB.databaseName");
1974
+ ensureNonEmptyString(objectStoreName, "indexedDB.objectStoreName");
1975
+ if (objectStoreName === "_meta") {
1976
+ throw new ConfigurationError(
1977
+ 'indexedDB.objectStoreName must not be "_meta" because it is reserved for internal metadata.'
1978
+ );
1979
+ }
1980
+ if (!Number.isSafeInteger(version) || version <= 0) {
1981
+ throw new ConfigurationError(
1982
+ "indexedDB.version must be a positive safe integer."
1983
+ );
1984
+ }
1985
+ return {
1986
+ databaseName,
1987
+ objectStoreName,
1988
+ version
1989
+ };
1990
+ };
1991
+
1992
+ // src/storage/drivers/IndexedDB/indexedDBBackend.ts
1993
+ var IDB_MAGIC = "FPIDB_META";
1994
+ var IDB_VERSION_VALUE = 2;
1995
+ var IDB_META_STORE = "_meta";
1996
+ var IDB_META_KEY = "config";
1997
+ var detectGlobalIndexedDB = () => {
1998
+ try {
1999
+ const g = globalThis;
2000
+ const idb = g.indexedDB;
2001
+ if (idb === null || idb === void 0) {
2002
+ return null;
2003
+ }
2004
+ return idb;
2005
+ } catch {
2006
+ return null;
2007
+ }
2008
+ };
2009
+ var idbRequest = (req) => new Promise((resolve, reject) => {
2010
+ req.onsuccess = (event) => {
2011
+ resolve(event.target.result);
2012
+ };
2013
+ req.onerror = (event) => {
2014
+ reject(
2015
+ new StorageEngineError(
2016
+ `IndexedDB request failed: ${String(event.target.error?.message ?? "unknown")}`
2017
+ )
2018
+ );
2019
+ };
2020
+ });
2021
+ var idbTransaction = (tx) => new Promise((resolve, reject) => {
2022
+ tx.oncomplete = () => {
2023
+ resolve();
2024
+ };
2025
+ tx.onerror = () => {
2026
+ reject(new StorageEngineError("IndexedDB transaction failed."));
2027
+ };
2028
+ });
2029
+ var openIndexedDB = (factory, databaseName, objectStoreName, version) => new Promise((resolve, reject) => {
2030
+ const request = factory.open(databaseName, version);
2031
+ request.onupgradeneeded = (event) => {
2032
+ const db = event.target.result;
2033
+ if (db === null) {
2034
+ return;
2035
+ }
2036
+ if (!db.objectStoreNames.contains(objectStoreName)) {
2037
+ db.createObjectStore(objectStoreName);
2038
+ }
2039
+ if (!db.objectStoreNames.contains(IDB_META_STORE)) {
2040
+ db.createObjectStore(IDB_META_STORE);
2041
+ }
2042
+ };
2043
+ request.onsuccess = (event) => {
2044
+ const db = event.target.result;
2045
+ if (db === null) {
2046
+ reject(new StorageEngineError("IndexedDB open returned null database."));
2047
+ return;
2048
+ }
2049
+ resolve(db);
2050
+ };
2051
+ request.onerror = (event) => {
2052
+ reject(
2053
+ new StorageEngineError(
2054
+ `IndexedDB open failed: ${String(event.target.error?.message ?? "unknown")}`
2055
+ )
2056
+ );
2057
+ };
2058
+ });
2059
+ var loadIndexedDBSnapshot = async (db, _objectStoreName) => {
2060
+ const tx = db.transaction(
2061
+ [IDB_META_STORE],
2062
+ "readonly"
2063
+ );
2064
+ const txDone = idbTransaction(tx);
2065
+ const metaStore = tx.objectStore(IDB_META_STORE);
2066
+ const metaRaw = await idbRequest(metaStore.get(IDB_META_KEY));
2067
+ await txDone;
2068
+ if (metaRaw === null || metaRaw === void 0) {
2069
+ return { treeJSON: null, currentSizeBytes: 0, commitId: 0 };
2070
+ }
2071
+ const meta = metaRaw;
2072
+ if (meta.magic !== IDB_MAGIC || meta.version !== IDB_VERSION_VALUE) {
2073
+ throw new StorageEngineError("IndexedDB metadata magic/version mismatch.");
2074
+ }
2075
+ const commitId = parseNonNegativeSafeInteger(
2076
+ meta.commitId,
2077
+ "meta.commitId",
2078
+ "IndexedDB"
2079
+ );
2080
+ const treeJSON = meta.treeJSON;
2081
+ if (typeof treeJSON !== "object" || treeJSON === null || Array.isArray(treeJSON)) {
2082
+ throw new PageCorruptionError("treeJSON must be a non-null plain object.");
2083
+ }
2084
+ const currentSizeBytes = computeUtf8ByteLength(JSON.stringify(treeJSON));
2085
+ return { treeJSON, currentSizeBytes, commitId };
2086
+ };
2087
+ var commitIndexedDBSnapshot = async (db, _objectStoreName, treeJSON, commitId) => {
2088
+ const tx = db.transaction(
2089
+ [IDB_META_STORE],
2090
+ "readwrite"
2091
+ );
2092
+ const txDone = idbTransaction(tx);
2093
+ const metaStore = tx.objectStore(IDB_META_STORE);
2094
+ const meta = {
2095
+ magic: IDB_MAGIC,
2096
+ version: IDB_VERSION_VALUE,
2097
+ commitId,
2098
+ treeJSON
2099
+ };
2100
+ metaStore.put(meta, IDB_META_KEY);
2101
+ await txDone;
2102
+ };
2103
+
2104
+ // src/storage/drivers/IndexedDB/indexedDBBackendController.ts
2105
+ var IndexedDBBackendController = class _IndexedDBBackendController extends AsyncDurableAutoCommitController {
2106
+ constructor(db, objectStoreName, commitId, autoCommit, getSnapshot, onAutoCommitError) {
2107
+ super(autoCommit, onAutoCommitError);
2108
+ this.db = db;
2109
+ this.objectStoreName = objectStoreName;
2110
+ this.commitId = commitId;
2111
+ this.getSnapshot = getSnapshot;
2112
+ }
2113
+ static async create(options) {
2114
+ const factory = detectGlobalIndexedDB();
2115
+ if (factory === null) {
2116
+ throw new UnsupportedBackendError(
2117
+ "indexedDB is not available in the current runtime environment."
2118
+ );
2119
+ }
2120
+ const idbConfig = parseIndexedDBConfig(options.config);
2121
+ const { databaseName, objectStoreName, version } = idbConfig;
2122
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
2123
+ const db = await openIndexedDB(factory, databaseName, objectStoreName, version);
2124
+ let loaded;
2125
+ try {
2126
+ loaded = await loadIndexedDBSnapshot(db, objectStoreName);
2127
+ } catch (error) {
2128
+ try {
2129
+ db.close();
2130
+ } catch {
2131
+ }
2132
+ throw toErrorInstance(
2133
+ error,
2134
+ "IndexedDB bootstrap failed with a non-Error value."
2135
+ );
2136
+ }
2137
+ const controller = new _IndexedDBBackendController(
2138
+ db,
2139
+ objectStoreName,
2140
+ loaded.commitId,
2141
+ autoCommit,
2142
+ options.getSnapshot,
2143
+ options.onAutoCommitError
2144
+ );
2145
+ return {
2146
+ controller,
2147
+ initialTreeJSON: loaded.treeJSON,
2148
+ initialCurrentSizeBytes: loaded.currentSizeBytes
2149
+ };
2150
+ }
2151
+ async executeSingleCommit() {
2152
+ const snapshot = this.getSnapshot();
2153
+ if (this.commitId >= Number.MAX_SAFE_INTEGER) {
2154
+ throw new StorageEngineError("IndexedDB commitId has reached Number.MAX_SAFE_INTEGER.");
2155
+ }
2156
+ const nextCommitId = this.commitId + 1;
2157
+ await commitIndexedDBSnapshot(
2158
+ this.db,
2159
+ this.objectStoreName,
2160
+ snapshot.treeJSON,
2161
+ nextCommitId
2162
+ );
2163
+ this.commitId = nextCommitId;
2164
+ }
2165
+ onCloseAfterDrain() {
2166
+ this.db.close();
2167
+ return Promise.resolve();
2168
+ }
2169
+ };
2170
+
2171
+ // src/drivers/indexedDB.ts
2172
+ var indexedDBDriver = (options = {}) => {
2173
+ return {
2174
+ init: async (callbacks) => {
2175
+ const result = await IndexedDBBackendController.create({
2176
+ config: options,
2177
+ autoCommit: callbacks.autoCommit,
2178
+ getSnapshot: callbacks.getSnapshot,
2179
+ onAutoCommitError: callbacks.onAutoCommitError
2180
+ });
2181
+ return {
2182
+ controller: result.controller,
2183
+ initialTreeJSON: result.initialTreeJSON,
2184
+ initialCurrentSizeBytes: result.initialCurrentSizeBytes
2185
+ };
2186
+ }
2187
+ };
2188
+ };
2189
+
2190
+ // src/storage/drivers/opfs/opfsBackend.ts
2191
+ var OPFS_MAGIC = "FPOPFS_META";
2192
+ var OPFS_VERSION_VALUE = 2;
2193
+ var META_FILE = "meta.json";
2194
+ var DATA_FILE_A = "data-a.json";
2195
+ var DATA_FILE_B = "data-b.json";
2196
+ var isNotFoundError = (error) => {
2197
+ if (!(error instanceof Error)) {
2198
+ return false;
2199
+ }
2200
+ return error.name === "NotFoundError";
2201
+ };
2202
+ var isManifestObject = (value) => {
2203
+ return value !== null && typeof value === "object" && !Array.isArray(value);
2204
+ };
2205
+ var detectGlobalOpfs = () => {
2206
+ try {
2207
+ const nav = globalThis;
2208
+ if (typeof nav.navigator?.storage?.getDirectory === "function") {
2209
+ return nav.navigator.storage;
2210
+ }
2211
+ return null;
2212
+ } catch {
2213
+ return null;
2214
+ }
2215
+ };
2216
+ var openOpfsDirectory = async (storageRoot, directoryName) => {
2217
+ const root = await storageRoot.getDirectory();
2218
+ return root.getDirectoryHandle(directoryName, { create: true });
2219
+ };
2220
+ var parseOpfsManifest = (metaText) => {
2221
+ let manifestRaw;
2222
+ try {
2223
+ manifestRaw = JSON.parse(metaText);
2224
+ } catch {
2225
+ throw new StorageEngineError("OPFS meta.json JSON is malformed.");
2226
+ }
2227
+ if (!isManifestObject(manifestRaw)) {
2228
+ throw new StorageEngineError("OPFS meta.json must be a JSON object.");
2229
+ }
2230
+ const manifest = manifestRaw;
2231
+ if (manifest.magic !== OPFS_MAGIC || manifest.version !== OPFS_VERSION_VALUE) {
2232
+ throw new StorageEngineError("OPFS meta.json magic/version mismatch.");
2233
+ }
2234
+ if (manifest.activeData !== "a" && manifest.activeData !== "b") {
2235
+ throw new StorageEngineError('OPFS meta.json activeData must be "a" or "b".');
2236
+ }
2237
+ const commitId = parseNonNegativeSafeInteger(
2238
+ manifest.commitId,
2239
+ "meta.json commitId",
2240
+ "OPFS"
2241
+ );
2242
+ return { manifest, commitId, activeData: manifest.activeData };
2243
+ };
2244
+ var loadOpfsDataFile = async (dir, dataFileName) => {
2245
+ let dataText;
2246
+ try {
2247
+ const dataHandle = await dir.getFileHandle(dataFileName, { create: false });
2248
+ const dataFile = await dataHandle.getFile();
2249
+ dataText = await dataFile.text();
2250
+ } catch {
2251
+ throw new StorageEngineError(`OPFS active data file "${dataFileName}" not found.`);
2252
+ }
2253
+ let parsedTreeJSON;
2254
+ try {
2255
+ parsedTreeJSON = JSON.parse(dataText);
2256
+ } catch {
2257
+ throw new StorageEngineError("OPFS data file JSON is malformed.");
2258
+ }
2259
+ if (typeof parsedTreeJSON !== "object" || parsedTreeJSON === null || Array.isArray(parsedTreeJSON)) {
2260
+ throw new PageCorruptionError("treeJSON must be a non-null plain object.");
2261
+ }
2262
+ return {
2263
+ treeJSON: parsedTreeJSON,
2264
+ rawJsonLength: computeUtf8ByteLength(dataText)
2265
+ };
2266
+ };
2267
+ var loadOpfsSnapshot = async (dir) => {
2268
+ let metaText;
2269
+ try {
2270
+ const metaHandle = await dir.getFileHandle(META_FILE, { create: false });
2271
+ const metaFile = await metaHandle.getFile();
2272
+ metaText = await metaFile.text();
2273
+ } catch (error) {
2274
+ if (!isNotFoundError(error)) {
2275
+ throw toStorageEngineError(error, "OPFS meta.json read failed");
2276
+ }
2277
+ return {
2278
+ treeJSON: null,
2279
+ currentSizeBytes: 0,
2280
+ commitId: 0,
2281
+ activeData: "a"
2282
+ };
2283
+ }
2284
+ const { commitId, activeData } = parseOpfsManifest(metaText);
2285
+ const dataFileName = activeData === "a" ? DATA_FILE_A : DATA_FILE_B;
2286
+ const { treeJSON, rawJsonLength } = await loadOpfsDataFile(dir, dataFileName);
2287
+ const currentSizeBytes = rawJsonLength;
2288
+ return { treeJSON, currentSizeBytes, commitId, activeData };
2289
+ };
2290
+ var commitOpfsSnapshot = async (dir, currentActiveData, treeJSON, commitId) => {
2291
+ const nextActiveData = currentActiveData === "a" ? "b" : "a";
2292
+ const dataFileName = nextActiveData === "a" ? DATA_FILE_A : DATA_FILE_B;
2293
+ const dataJson = JSON.stringify(treeJSON);
2294
+ try {
2295
+ const dataHandle = await dir.getFileHandle(dataFileName, { create: true });
2296
+ const dataWritable = await dataHandle.createWritable();
2297
+ await dataWritable.write(dataJson);
2298
+ await dataWritable.close();
2299
+ const newManifest = {
2300
+ magic: OPFS_MAGIC,
2301
+ version: OPFS_VERSION_VALUE,
2302
+ activeData: nextActiveData,
2303
+ commitId
2304
+ };
2305
+ const metaHandle = await dir.getFileHandle(META_FILE, { create: true });
2306
+ const metaWritable = await metaHandle.createWritable();
2307
+ await metaWritable.write(JSON.stringify(newManifest));
2308
+ await metaWritable.close();
2309
+ } catch (error) {
2310
+ throw toStorageEngineError(error, "OPFS commit failed");
2311
+ }
2312
+ return nextActiveData;
2313
+ };
2314
+
2315
+ // src/storage/drivers/opfs/opfsBackendController.ts
2316
+ var DEFAULT_DIRECTORY_NAME = "frostpillar";
2317
+ var OpfsBackendController = class _OpfsBackendController extends AsyncDurableAutoCommitController {
2318
+ constructor(dir, activeData, commitId, autoCommit, getSnapshot, onAutoCommitError) {
2319
+ super(autoCommit, onAutoCommitError);
2320
+ this.dir = dir;
2321
+ this.activeData = activeData;
2322
+ this.commitId = commitId;
2323
+ this.getSnapshot = getSnapshot;
2324
+ }
2325
+ static async create(options) {
2326
+ const storageRoot = detectGlobalOpfs();
2327
+ if (storageRoot === null) {
2328
+ throw new UnsupportedBackendError(
2329
+ "opfs (Origin Private File System) is not available in the current runtime environment."
2330
+ );
2331
+ }
2332
+ const opfsConfig = options.config;
2333
+ const directoryName = opfsConfig?.directoryName ?? DEFAULT_DIRECTORY_NAME;
2334
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
2335
+ const dir = await openOpfsDirectory(storageRoot, directoryName);
2336
+ const loaded = await loadOpfsSnapshot(dir);
2337
+ const controller = new _OpfsBackendController(
2338
+ dir,
2339
+ loaded.activeData,
2340
+ loaded.commitId,
2341
+ autoCommit,
2342
+ options.getSnapshot,
2343
+ options.onAutoCommitError
2344
+ );
2345
+ return {
2346
+ controller,
2347
+ initialTreeJSON: loaded.treeJSON,
2348
+ initialCurrentSizeBytes: loaded.currentSizeBytes
2349
+ };
2350
+ }
2351
+ async executeSingleCommit() {
2352
+ const snapshot = this.getSnapshot();
2353
+ if (this.commitId >= Number.MAX_SAFE_INTEGER) {
2354
+ throw new StorageEngineError("OPFS commitId has reached Number.MAX_SAFE_INTEGER.");
2355
+ }
2356
+ const nextCommitId = this.commitId + 1;
2357
+ this.activeData = await commitOpfsSnapshot(
2358
+ this.dir,
2359
+ this.activeData,
2360
+ snapshot.treeJSON,
2361
+ nextCommitId
2362
+ );
2363
+ this.commitId = nextCommitId;
2364
+ }
2365
+ };
2366
+
2367
+ // src/drivers/opfs.ts
2368
+ var opfsDriver = (options = {}) => {
2369
+ return {
2370
+ init: async (callbacks) => {
2371
+ const result = await OpfsBackendController.create({
2372
+ config: options,
2373
+ autoCommit: callbacks.autoCommit,
2374
+ getSnapshot: callbacks.getSnapshot,
2375
+ onAutoCommitError: callbacks.onAutoCommitError
2376
+ });
2377
+ return {
2378
+ controller: result.controller,
2379
+ initialTreeJSON: result.initialTreeJSON,
2380
+ initialCurrentSizeBytes: result.initialCurrentSizeBytes
2381
+ };
2382
+ }
2383
+ };
2384
+ };
2385
+
2386
+ // src/storage/drivers/syncStorage/syncStorageConfig.ts
2387
+ var DEFAULT_SYNC_STORAGE_MAX_TOTAL_BYTES = 102400;
2388
+ var parseSyncStorageMaxTotalBytesForBackendLimit = (config) => {
2389
+ const maxTotalBytes = config?.maxTotalBytes ?? DEFAULT_SYNC_STORAGE_MAX_TOTAL_BYTES;
2390
+ if (!Number.isSafeInteger(maxTotalBytes) || maxTotalBytes <= 0) {
2391
+ throw new ConfigurationError(
2392
+ "syncStorage.maxTotalBytes must be a positive safe integer."
2393
+ );
2394
+ }
2395
+ return maxTotalBytes;
2396
+ };
2397
+ var parseSyncStorageConfig = (config) => {
2398
+ const keyPrefix = config?.keyPrefix ?? "frostpillar";
2399
+ const databaseKey = config?.databaseKey ?? "default";
2400
+ const maxChunkChars = config?.maxChunkChars ?? 6e3;
2401
+ const maxChunks = config?.maxChunks ?? 511;
2402
+ const maxItemBytes = config?.maxItemBytes ?? 8192;
2403
+ const maxTotalBytes = parseSyncStorageMaxTotalBytesForBackendLimit(config);
2404
+ const maxItems = config?.maxItems ?? 512;
2405
+ if (!Number.isSafeInteger(maxChunkChars) || maxChunkChars <= 0) {
2406
+ throw new ConfigurationError(
2407
+ "syncStorage.maxChunkChars must be a positive safe integer."
2408
+ );
2409
+ }
2410
+ if (!Number.isSafeInteger(maxChunks) || maxChunks <= 0) {
2411
+ throw new ConfigurationError(
2412
+ "syncStorage.maxChunks must be a positive safe integer."
2413
+ );
2414
+ }
2415
+ if (!Number.isSafeInteger(maxItemBytes) || maxItemBytes <= 0) {
2416
+ throw new ConfigurationError(
2417
+ "syncStorage.maxItemBytes must be a positive safe integer."
2418
+ );
2419
+ }
2420
+ if (!Number.isSafeInteger(maxItems) || maxItems <= 0) {
2421
+ throw new ConfigurationError(
2422
+ "syncStorage.maxItems must be a positive safe integer."
2423
+ );
2424
+ }
2425
+ if (maxChunks + 1 > maxItems) {
2426
+ throw new ConfigurationError(
2427
+ "syncStorage.maxChunks + 1 (manifest item) must be <= syncStorage.maxItems."
2428
+ );
2429
+ }
2430
+ return {
2431
+ keyPrefix,
2432
+ databaseKey,
2433
+ maxChunkChars,
2434
+ maxChunks,
2435
+ maxItemBytes,
2436
+ maxTotalBytes,
2437
+ maxItems
2438
+ };
2439
+ };
2440
+
2441
+ // src/validation/typeGuards.ts
2442
+ var isRecordObject = (value) => {
2443
+ if (typeof value !== "object" || value === null || Array.isArray(value)) {
2444
+ return false;
2445
+ }
2446
+ const prototype = Object.getPrototypeOf(value);
2447
+ return prototype === Object.prototype || prototype === null;
2448
+ };
2449
+
2450
+ // src/storage/drivers/syncStorage/syncStorageAdapter.ts
2451
+ var readChromeRuntimeError = (runtime) => {
2452
+ const runtimeMessage = runtime?.lastError?.message;
2453
+ if (runtimeMessage === void 0) {
2454
+ return null;
2455
+ }
2456
+ if (runtimeMessage.trim().length === 0) {
2457
+ return new Error("chrome.runtime.lastError is set with an empty message.");
2458
+ }
2459
+ return new Error(runtimeMessage);
2460
+ };
2461
+ var callChromeCallbackGet = (syncArea, runtime, keys) => {
2462
+ return new Promise((resolve, reject) => {
2463
+ try {
2464
+ syncArea.get(keys, (items) => {
2465
+ const runtimeError = readChromeRuntimeError(runtime);
2466
+ if (runtimeError !== null) {
2467
+ reject(runtimeError);
2468
+ return;
2469
+ }
2470
+ resolve(items);
2471
+ });
2472
+ } catch (error) {
2473
+ reject(
2474
+ toErrorInstance(error, "chrome.storage.sync.get failed with a non-Error value.")
2475
+ );
2476
+ }
2477
+ });
2478
+ };
2479
+ var callChromeCallbackSet = (syncArea, runtime, items) => {
2480
+ return new Promise((resolve, reject) => {
2481
+ try {
2482
+ syncArea.set(items, () => {
2483
+ const runtimeError = readChromeRuntimeError(runtime);
2484
+ if (runtimeError !== null) {
2485
+ reject(runtimeError);
2486
+ return;
2487
+ }
2488
+ resolve();
2489
+ });
2490
+ } catch (error) {
2491
+ reject(
2492
+ toErrorInstance(error, "chrome.storage.sync.set failed with a non-Error value.")
2493
+ );
2494
+ }
2495
+ });
2496
+ };
2497
+ var callChromeCallbackRemove = (syncArea, runtime, keys) => {
2498
+ return new Promise((resolve, reject) => {
2499
+ try {
2500
+ syncArea.remove(keys, () => {
2501
+ const runtimeError = readChromeRuntimeError(runtime);
2502
+ if (runtimeError !== null) {
2503
+ reject(runtimeError);
2504
+ return;
2505
+ }
2506
+ resolve();
2507
+ });
2508
+ } catch (error) {
2509
+ reject(
2510
+ toErrorInstance(
2511
+ error,
2512
+ "chrome.storage.sync.remove failed with a non-Error value."
2513
+ )
2514
+ );
2515
+ }
2516
+ });
2517
+ };
2518
+ var createBrowserPromiseSyncStorageAdapter = (syncArea) => {
2519
+ return {
2520
+ getItems: async (keys) => {
2521
+ return await syncArea.get(keys);
2522
+ },
2523
+ setItems: async (items) => {
2524
+ await syncArea.set(items);
2525
+ },
2526
+ removeItems: async (keys) => {
2527
+ await syncArea.remove(keys);
2528
+ }
2529
+ };
2530
+ };
2531
+ var createChromeCallbackSyncStorageAdapter = (syncArea, runtime) => {
2532
+ return {
2533
+ getItems: async (keys) => {
2534
+ return await callChromeCallbackGet(syncArea, runtime, keys);
2535
+ },
2536
+ setItems: async (items) => {
2537
+ await callChromeCallbackSet(syncArea, runtime, items);
2538
+ },
2539
+ removeItems: async (keys) => {
2540
+ await callChromeCallbackRemove(syncArea, runtime, keys);
2541
+ }
2542
+ };
2543
+ };
2544
+ var hasSyncAreaFunctionShape = (value) => {
2545
+ if (!isRecordObject(value)) {
2546
+ return false;
2547
+ }
2548
+ return typeof value.get === "function" && typeof value.set === "function" && typeof value.remove === "function";
2549
+ };
2550
+ var hasBrowserPromiseSyncArea = (value) => {
2551
+ return hasSyncAreaFunctionShape(value);
2552
+ };
2553
+ var hasChromeCallbackSyncArea = (value) => {
2554
+ return hasSyncAreaFunctionShape(value);
2555
+ };
2556
+ var detectGlobalSyncStorage = () => {
2557
+ try {
2558
+ const globals = globalThis;
2559
+ const browserSync = globals.browser?.storage?.sync;
2560
+ if (hasBrowserPromiseSyncArea(browserSync)) {
2561
+ return createBrowserPromiseSyncStorageAdapter(browserSync);
2562
+ }
2563
+ const chromeSync = globals.chrome?.storage?.sync;
2564
+ if (hasChromeCallbackSyncArea(chromeSync)) {
2565
+ const runtime = globals.chrome?.runtime ?? null;
2566
+ return createChromeCallbackSyncStorageAdapter(chromeSync, runtime);
2567
+ }
2568
+ return null;
2569
+ } catch {
2570
+ return null;
2571
+ }
2572
+ };
2573
+
2574
+ // src/storage/drivers/syncStorage/syncStorageChunkMaintenance.ts
2575
+ var cleanupGenerationChunks2 = async (state, generation, knownChunkCount, chunkKeyResolver) => {
2576
+ if (knownChunkCount !== null) {
2577
+ if (knownChunkCount <= 0) {
2578
+ return;
2579
+ }
2580
+ const knownKeys = [];
2581
+ for (let i = 0; i < knownChunkCount; i += 1) {
2582
+ knownKeys.push(chunkKeyResolver(generation, i));
2583
+ }
2584
+ await state.adapter.removeItems(knownKeys);
2585
+ return;
2586
+ }
2587
+ if (state.maxChunks <= 0) {
2588
+ return;
2589
+ }
2590
+ const speculativeKeys = [];
2591
+ for (let i = 0; i < state.maxChunks; i += 1) {
2592
+ speculativeKeys.push(chunkKeyResolver(generation, i));
2593
+ }
2594
+ const maybeChunks = await state.adapter.getItems(speculativeKeys);
2595
+ const discoveredKeys = speculativeKeys.filter((key) => {
2596
+ return Object.prototype.hasOwnProperty.call(maybeChunks, key);
2597
+ });
2598
+ if (discoveredKeys.length === 0) {
2599
+ return;
2600
+ }
2601
+ await state.adapter.removeItems(discoveredKeys);
2602
+ };
2603
+
2604
+ // src/storage/drivers/syncStorage/syncStorageQuota.ts
2605
+ var utf8Encoder = new TextEncoder();
2606
+ var computeSyncStorageItemBytes = (key, value) => {
2607
+ const valueJson = JSON.stringify(value);
2608
+ if (valueJson === void 0) {
2609
+ throw new StorageEngineError(
2610
+ `syncStorage value for key "${key}" cannot be serialized.`
2611
+ );
2612
+ }
2613
+ return utf8Encoder.encode(key).byteLength + utf8Encoder.encode(valueJson).byteLength;
2614
+ };
2615
+ var isQuotaBrowserError2 = (error) => {
2616
+ if (error instanceof QuotaExceededError) {
2617
+ return true;
2618
+ }
2619
+ if (!(error instanceof Error)) {
2620
+ return false;
2621
+ }
2622
+ const normalized = `${error.name}:${error.message}`;
2623
+ return /quota|max_items|quota_bytes|quota_bytes_per_item/i.test(normalized);
2624
+ };
2625
+ var validateSyncStorageCommitQuota = (state, generation, chunks, manifest, resolveChunkKey, manifestStorageKey) => {
2626
+ const pendingItems = chunks.map(
2627
+ (chunkValue, chunkIndex) => {
2628
+ return {
2629
+ key: resolveChunkKey(generation, chunkIndex),
2630
+ value: chunkValue
2631
+ };
2632
+ }
2633
+ );
2634
+ pendingItems.push({
2635
+ key: manifestStorageKey,
2636
+ value: manifest
2637
+ });
2638
+ if (pendingItems.length > state.maxItems) {
2639
+ throw new QuotaExceededError(
2640
+ `syncStorage snapshot requires ${pendingItems.length} items but maxItems is ${state.maxItems}.`
2641
+ );
2642
+ }
2643
+ let totalBytes = 0;
2644
+ for (const pendingItem of pendingItems) {
2645
+ const itemBytes = computeSyncStorageItemBytes(
2646
+ pendingItem.key,
2647
+ pendingItem.value
2648
+ );
2649
+ if (itemBytes > state.maxItemBytes) {
2650
+ throw new QuotaExceededError(
2651
+ `syncStorage item "${pendingItem.key}" requires ${itemBytes} bytes but maxItemBytes is ${state.maxItemBytes}.`
2652
+ );
2653
+ }
2654
+ totalBytes += itemBytes;
2655
+ }
2656
+ if (totalBytes > state.maxTotalBytes) {
2657
+ throw new QuotaExceededError(
2658
+ `syncStorage snapshot requires ${totalBytes} bytes but maxTotalBytes is ${state.maxTotalBytes}.`
2659
+ );
2660
+ }
2661
+ };
2662
+
2663
+ // src/storage/drivers/syncStorage/syncStorageBackend.ts
2664
+ var SYNC_STORAGE_MAGIC = "FPSYNC_META";
2665
+ var SYNC_STORAGE_VERSION = 2;
2666
+ var manifestKey2 = (keyPrefix, databaseKey) => {
2667
+ return `${keyPrefix}:sync:${databaseKey}:manifest`;
2668
+ };
2669
+ var chunkKey2 = (keyPrefix, databaseKey, generation, index) => {
2670
+ return `${keyPrefix}:sync:${databaseKey}:g:${generation}:chunk:${index}`;
2671
+ };
2672
+ var createSyncStorageBackendState = (adapter, keyPrefix, databaseKey, maxChunkChars, maxChunks, maxItemBytes, maxTotalBytes, maxItems) => {
2673
+ return {
2674
+ adapter,
2675
+ keyPrefix,
2676
+ databaseKey,
2677
+ maxChunkChars,
2678
+ maxChunks,
2679
+ maxItemBytes,
2680
+ maxTotalBytes,
2681
+ maxItems,
2682
+ activeGeneration: 0,
2683
+ commitId: 0,
2684
+ activeChunkCount: 0
2685
+ };
2686
+ };
2687
+ var parseSyncManifest = (manifestUnknown, maxChunks) => {
2688
+ if (!isRecordObject(manifestUnknown)) {
2689
+ throw new StorageEngineError("syncStorage manifest must be an object.");
2690
+ }
2691
+ const manifest = manifestUnknown;
2692
+ if (manifest.magic !== SYNC_STORAGE_MAGIC || manifest.version !== SYNC_STORAGE_VERSION) {
2693
+ throw new StorageEngineError("syncStorage manifest magic/version mismatch.");
2694
+ }
2695
+ const chunkCount = parseNonNegativeSafeInteger(
2696
+ manifest.chunkCount,
2697
+ "manifest.chunkCount",
2698
+ "syncStorage"
2699
+ );
2700
+ if (chunkCount > maxChunks) {
2701
+ throw new StorageEngineError(
2702
+ `syncStorage snapshot requires ${chunkCount} chunks but maxChunks is ${maxChunks}.`
2703
+ );
2704
+ }
2705
+ return manifest;
2706
+ };
2707
+ var loadSyncChunksAndDecodeTreeJSON = async (state, activeGeneration, chunkCount) => {
2708
+ const chunkKeys = [];
2709
+ for (let i = 0; i < chunkCount; i += 1) {
2710
+ chunkKeys.push(
2711
+ chunkKey2(state.keyPrefix, state.databaseKey, activeGeneration, i)
2712
+ );
2713
+ }
2714
+ const chunkValuesByKey = chunkKeys.length === 0 ? {} : await state.adapter.getItems(chunkKeys);
2715
+ const chunks = [];
2716
+ for (const cKey of chunkKeys) {
2717
+ const chunkValue = chunkValuesByKey[cKey];
2718
+ if (typeof chunkValue !== "string") {
2719
+ throw new StorageEngineError(
2720
+ `syncStorage chunk "${cKey}" is missing or not a string.`
2721
+ );
2722
+ }
2723
+ chunks.push(chunkValue);
2724
+ }
2725
+ const treeJson = chunks.join("");
2726
+ let parsedTreeJSON;
2727
+ try {
2728
+ parsedTreeJSON = JSON.parse(treeJson);
2729
+ } catch {
2730
+ throw new StorageEngineError("syncStorage chunk data JSON is malformed.");
2731
+ }
2732
+ if (typeof parsedTreeJSON !== "object" || parsedTreeJSON === null || Array.isArray(parsedTreeJSON)) {
2733
+ throw new PageCorruptionError("treeJSON must be a non-null plain object.");
2734
+ }
2735
+ return {
2736
+ treeJSON: parsedTreeJSON,
2737
+ rawJsonLength: computeUtf8ByteLength(treeJson)
2738
+ };
2739
+ };
2740
+ var loadSyncStorageSnapshot = async (state) => {
2741
+ const mKey = manifestKey2(state.keyPrefix, state.databaseKey);
2742
+ const manifestMap = await state.adapter.getItems([mKey]);
2743
+ const manifestUnknown = manifestMap[mKey];
2744
+ if (manifestUnknown === void 0) {
2745
+ return { treeJSON: null, currentSizeBytes: 0 };
2746
+ }
2747
+ const manifest = parseSyncManifest(manifestUnknown, state.maxChunks);
2748
+ const activeGeneration = parseNonNegativeSafeInteger(
2749
+ manifest.activeGeneration,
2750
+ "manifest.activeGeneration",
2751
+ "syncStorage"
2752
+ );
2753
+ const commitId = parseNonNegativeSafeInteger(
2754
+ manifest.commitId,
2755
+ "manifest.commitId",
2756
+ "syncStorage"
2757
+ );
2758
+ const chunkCount = parseNonNegativeSafeInteger(
2759
+ manifest.chunkCount,
2760
+ "manifest.chunkCount",
2761
+ "syncStorage"
2762
+ );
2763
+ const { treeJSON, rawJsonLength } = await loadSyncChunksAndDecodeTreeJSON(
2764
+ state,
2765
+ activeGeneration,
2766
+ chunkCount
2767
+ );
2768
+ const currentSizeBytes = rawJsonLength;
2769
+ state.activeGeneration = activeGeneration;
2770
+ state.commitId = commitId;
2771
+ state.activeChunkCount = chunkCount;
2772
+ return { treeJSON, currentSizeBytes };
2773
+ };
2774
+ var buildSyncChunkKeyResolver = (state) => {
2775
+ return (generation, index) => {
2776
+ return chunkKey2(state.keyPrefix, state.databaseKey, generation, index);
2777
+ };
2778
+ };
2779
+ var buildSyncCommitItems = (state, chunks, newManifest, nextGeneration) => {
2780
+ const mKey = manifestKey2(state.keyPrefix, state.databaseKey);
2781
+ const items = { [mKey]: newManifest };
2782
+ for (let i = 0; i < chunks.length; i += 1) {
2783
+ const cKey = chunkKey2(state.keyPrefix, state.databaseKey, nextGeneration, i);
2784
+ items[cKey] = chunks[i];
2785
+ }
2786
+ return items;
2787
+ };
2788
+ var splitSyncTreeJSONIntoChunks = (treeJSON, maxChunkChars, maxChunks) => {
2789
+ const dataJson = JSON.stringify(treeJSON);
2790
+ const chunks = [];
2791
+ for (let i = 0; i < dataJson.length; i += maxChunkChars) {
2792
+ chunks.push(dataJson.slice(i, i + maxChunkChars));
2793
+ }
2794
+ if (chunks.length > maxChunks) {
2795
+ throw new QuotaExceededError(
2796
+ `syncStorage snapshot requires ${chunks.length} chunks but maxChunks is ${maxChunks}.`
2797
+ );
2798
+ }
2799
+ return chunks;
2800
+ };
2801
+ var ensureSyncCommitCountersSafe = (state) => {
2802
+ if (state.commitId >= Number.MAX_SAFE_INTEGER) {
2803
+ throw new StorageEngineError("syncStorage commitId has reached Number.MAX_SAFE_INTEGER.");
2804
+ }
2805
+ if (state.activeGeneration >= Number.MAX_SAFE_INTEGER) {
2806
+ throw new StorageEngineError("syncStorage activeGeneration has reached Number.MAX_SAFE_INTEGER.");
2807
+ }
2808
+ };
2809
+ var commitSyncStorageSnapshot = async (state, treeJSON) => {
2810
+ ensureSyncCommitCountersSafe(state);
2811
+ const nextCommitId = state.commitId + 1;
2812
+ const nextGeneration = state.activeGeneration + 1;
2813
+ const chunks = splitSyncTreeJSONIntoChunks(
2814
+ treeJSON,
2815
+ state.maxChunkChars,
2816
+ state.maxChunks
2817
+ );
2818
+ const newManifest = {
2819
+ magic: SYNC_STORAGE_MAGIC,
2820
+ version: SYNC_STORAGE_VERSION,
2821
+ activeGeneration: nextGeneration,
2822
+ commitId: nextCommitId,
2823
+ chunkCount: chunks.length
2824
+ };
2825
+ const resolveChunkKey = buildSyncChunkKeyResolver(state);
2826
+ const mKey = manifestKey2(state.keyPrefix, state.databaseKey);
2827
+ validateSyncStorageCommitQuota(
2828
+ state,
2829
+ nextGeneration,
2830
+ chunks,
2831
+ newManifest,
2832
+ resolveChunkKey,
2833
+ mKey
2834
+ );
2835
+ const newSnapshotItems = buildSyncCommitItems(
2836
+ state,
2837
+ chunks,
2838
+ newManifest,
2839
+ nextGeneration
2840
+ );
2841
+ try {
2842
+ await cleanupGenerationChunks2(state, nextGeneration, null, resolveChunkKey);
2843
+ } catch {
2844
+ }
2845
+ try {
2846
+ await state.adapter.setItems(newSnapshotItems);
2847
+ } catch (error) {
2848
+ if (isQuotaBrowserError2(error)) {
2849
+ throw new QuotaExceededError("syncStorage quota exceeded during commit.");
2850
+ }
2851
+ throw new StorageEngineError(
2852
+ "syncStorage write failed during commit.",
2853
+ { cause: error }
2854
+ );
2855
+ }
2856
+ const previousGeneration = state.activeGeneration;
2857
+ const previousChunkCount = state.activeChunkCount;
2858
+ state.activeGeneration = nextGeneration;
2859
+ state.commitId = nextCommitId;
2860
+ state.activeChunkCount = chunks.length;
2861
+ await cleanupGenerationChunks2(
2862
+ state,
2863
+ previousGeneration,
2864
+ previousChunkCount,
2865
+ resolveChunkKey
2866
+ );
2867
+ };
2868
+
2869
+ // src/storage/drivers/syncStorage/syncStorageBackendController.ts
2870
+ var SyncStorageBackendController = class _SyncStorageBackendController extends AsyncDurableAutoCommitController {
2871
+ constructor(backend, autoCommit, getSnapshot, onAutoCommitError) {
2872
+ super(autoCommit, onAutoCommitError);
2873
+ this.backend = backend;
2874
+ this.getSnapshot = getSnapshot;
2875
+ }
2876
+ static async create(options) {
2877
+ const adapter = detectGlobalSyncStorage();
2878
+ if (adapter === null) {
2879
+ throw new UnsupportedBackendError(
2880
+ "browser sync storage is not available in the current runtime environment."
2881
+ );
2882
+ }
2883
+ const syncConfig = parseSyncStorageConfig(options.config);
2884
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
2885
+ const backend = createSyncStorageBackendState(
2886
+ adapter,
2887
+ syncConfig.keyPrefix,
2888
+ syncConfig.databaseKey,
2889
+ syncConfig.maxChunkChars,
2890
+ syncConfig.maxChunks,
2891
+ syncConfig.maxItemBytes,
2892
+ syncConfig.maxTotalBytes,
2893
+ syncConfig.maxItems
2894
+ );
2895
+ const loaded = await loadSyncStorageSnapshot(backend);
2896
+ const controller = new _SyncStorageBackendController(
2897
+ backend,
2898
+ autoCommit,
2899
+ options.getSnapshot,
2900
+ options.onAutoCommitError
2901
+ );
2902
+ return {
2903
+ controller,
2904
+ initialTreeJSON: loaded.treeJSON,
2905
+ initialCurrentSizeBytes: loaded.currentSizeBytes
2906
+ };
2907
+ }
2908
+ async executeSingleCommit() {
2909
+ const snapshot = this.getSnapshot();
2910
+ await commitSyncStorageSnapshot(
2911
+ this.backend,
2912
+ snapshot.treeJSON
2913
+ );
2914
+ }
2915
+ };
2916
+
2917
+ // src/drivers/syncStorage.ts
2918
+ var syncStorageDriver = (options = {}) => {
2919
+ return {
2920
+ init: async (callbacks) => {
2921
+ const result = await SyncStorageBackendController.create({
2922
+ config: options,
2923
+ autoCommit: callbacks.autoCommit,
2924
+ getSnapshot: callbacks.getSnapshot,
2925
+ onAutoCommitError: callbacks.onAutoCommitError
2926
+ });
2927
+ return {
2928
+ controller: result.controller,
2929
+ initialTreeJSON: result.initialTreeJSON,
2930
+ initialCurrentSizeBytes: result.initialCurrentSizeBytes
2931
+ };
2932
+ },
2933
+ resolveBackendLimitBytes: () => {
2934
+ return parseSyncStorageMaxTotalBytesForBackendLimit(options);
2935
+ }
2936
+ };
2937
+ };
2938
+ // Annotate the CommonJS export names for ESM import in node:
2939
+ 0 && (module.exports = {
2940
+ BinaryFormatError,
2941
+ ClosedDatastoreError,
2942
+ ConfigurationError,
2943
+ DatabaseLockedError,
2944
+ Datastore,
2945
+ FrostpillarError,
2946
+ IndexCorruptionError,
2947
+ InvalidQueryRangeError,
2948
+ PageCorruptionError,
2949
+ QuotaExceededError,
2950
+ StorageEngineError,
2951
+ UnsupportedBackendError,
2952
+ ValidationError,
2953
+ indexedDBDriver,
2954
+ localStorageDriver,
2955
+ opfsDriver,
2956
+ syncStorageDriver
2957
+ });