@frostpillar/frostpillar-storage-engine 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/LICENSE +21 -0
  2. package/README-JA.md +1205 -0
  3. package/README.md +1204 -0
  4. package/dist/drivers/file.cjs +960 -0
  5. package/dist/drivers/file.d.ts +3 -0
  6. package/dist/drivers/file.js +18 -0
  7. package/dist/drivers/indexedDB.cjs +570 -0
  8. package/dist/drivers/indexedDB.d.ts +3 -0
  9. package/dist/drivers/indexedDB.js +18 -0
  10. package/dist/drivers/localStorage.cjs +668 -0
  11. package/dist/drivers/localStorage.d.ts +3 -0
  12. package/dist/drivers/localStorage.js +23 -0
  13. package/dist/drivers/opfs.cjs +550 -0
  14. package/dist/drivers/opfs.d.ts +3 -0
  15. package/dist/drivers/opfs.js +18 -0
  16. package/dist/drivers/syncStorage.cjs +898 -0
  17. package/dist/drivers/syncStorage.d.ts +3 -0
  18. package/dist/drivers/syncStorage.js +22 -0
  19. package/dist/drivers/validation.d.ts +1 -0
  20. package/dist/drivers/validation.js +8 -0
  21. package/dist/errors/index.d.ts +32 -0
  22. package/dist/errors/index.js +48 -0
  23. package/dist/frostpillar-storage-engine.min.js +1 -0
  24. package/dist/index.cjs +2957 -0
  25. package/dist/index.d.ts +7 -0
  26. package/dist/index.js +6 -0
  27. package/dist/storage/backend/asyncDurableAutoCommitController.d.ts +26 -0
  28. package/dist/storage/backend/asyncDurableAutoCommitController.js +188 -0
  29. package/dist/storage/backend/asyncMutex.d.ts +7 -0
  30. package/dist/storage/backend/asyncMutex.js +38 -0
  31. package/dist/storage/backend/autoCommit.d.ts +2 -0
  32. package/dist/storage/backend/autoCommit.js +22 -0
  33. package/dist/storage/backend/capacity.d.ts +2 -0
  34. package/dist/storage/backend/capacity.js +27 -0
  35. package/dist/storage/backend/capacityResolver.d.ts +3 -0
  36. package/dist/storage/backend/capacityResolver.js +25 -0
  37. package/dist/storage/backend/encoding.d.ts +17 -0
  38. package/dist/storage/backend/encoding.js +148 -0
  39. package/dist/storage/backend/types.d.ts +184 -0
  40. package/dist/storage/backend/types.js +1 -0
  41. package/dist/storage/btree/recordKeyIndexBTree.d.ts +39 -0
  42. package/dist/storage/btree/recordKeyIndexBTree.js +104 -0
  43. package/dist/storage/config/config.browser.d.ts +4 -0
  44. package/dist/storage/config/config.browser.js +8 -0
  45. package/dist/storage/config/config.d.ts +1 -0
  46. package/dist/storage/config/config.js +1 -0
  47. package/dist/storage/config/config.node.d.ts +4 -0
  48. package/dist/storage/config/config.node.js +74 -0
  49. package/dist/storage/config/config.shared.d.ts +6 -0
  50. package/dist/storage/config/config.shared.js +105 -0
  51. package/dist/storage/datastore/Datastore.d.ts +47 -0
  52. package/dist/storage/datastore/Datastore.js +525 -0
  53. package/dist/storage/datastore/datastoreClose.d.ts +12 -0
  54. package/dist/storage/datastore/datastoreClose.js +60 -0
  55. package/dist/storage/datastore/datastoreKeyDefinition.d.ts +7 -0
  56. package/dist/storage/datastore/datastoreKeyDefinition.js +60 -0
  57. package/dist/storage/datastore/datastoreLifecycle.d.ts +18 -0
  58. package/dist/storage/datastore/datastoreLifecycle.js +63 -0
  59. package/dist/storage/datastore/mutationById.d.ts +29 -0
  60. package/dist/storage/datastore/mutationById.js +71 -0
  61. package/dist/storage/drivers/IndexedDB/indexedDBBackend.d.ts +11 -0
  62. package/dist/storage/drivers/IndexedDB/indexedDBBackend.js +109 -0
  63. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.d.ts +27 -0
  64. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.js +60 -0
  65. package/dist/storage/drivers/IndexedDB/indexedDBConfig.d.ts +7 -0
  66. package/dist/storage/drivers/IndexedDB/indexedDBConfig.js +24 -0
  67. package/dist/storage/drivers/file/fileBackend.d.ts +5 -0
  68. package/dist/storage/drivers/file/fileBackend.js +168 -0
  69. package/dist/storage/drivers/file/fileBackendController.d.ts +31 -0
  70. package/dist/storage/drivers/file/fileBackendController.js +72 -0
  71. package/dist/storage/drivers/file/fileBackendSnapshot.d.ts +10 -0
  72. package/dist/storage/drivers/file/fileBackendSnapshot.js +166 -0
  73. package/dist/storage/drivers/localStorage/localStorageBackend.d.ts +10 -0
  74. package/dist/storage/drivers/localStorage/localStorageBackend.js +156 -0
  75. package/dist/storage/drivers/localStorage/localStorageBackendController.d.ts +24 -0
  76. package/dist/storage/drivers/localStorage/localStorageBackendController.js +35 -0
  77. package/dist/storage/drivers/localStorage/localStorageConfig.d.ts +10 -0
  78. package/dist/storage/drivers/localStorage/localStorageConfig.js +16 -0
  79. package/dist/storage/drivers/localStorage/localStorageLayout.d.ts +5 -0
  80. package/dist/storage/drivers/localStorage/localStorageLayout.js +29 -0
  81. package/dist/storage/drivers/opfs/opfsBackend.d.ts +12 -0
  82. package/dist/storage/drivers/opfs/opfsBackend.js +142 -0
  83. package/dist/storage/drivers/opfs/opfsBackendController.d.ts +26 -0
  84. package/dist/storage/drivers/opfs/opfsBackendController.js +44 -0
  85. package/dist/storage/drivers/syncStorage/syncStorageAdapter.d.ts +2 -0
  86. package/dist/storage/drivers/syncStorage/syncStorageAdapter.js +123 -0
  87. package/dist/storage/drivers/syncStorage/syncStorageBackend.d.ts +11 -0
  88. package/dist/storage/drivers/syncStorage/syncStorageBackend.js +169 -0
  89. package/dist/storage/drivers/syncStorage/syncStorageBackendController.d.ts +24 -0
  90. package/dist/storage/drivers/syncStorage/syncStorageBackendController.js +34 -0
  91. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.d.ts +2 -0
  92. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.js +28 -0
  93. package/dist/storage/drivers/syncStorage/syncStorageConfig.d.ts +13 -0
  94. package/dist/storage/drivers/syncStorage/syncStorageConfig.js +42 -0
  95. package/dist/storage/drivers/syncStorage/syncStorageQuota.d.ts +3 -0
  96. package/dist/storage/drivers/syncStorage/syncStorageQuota.js +45 -0
  97. package/dist/storage/record/ordering.d.ts +3 -0
  98. package/dist/storage/record/ordering.js +7 -0
  99. package/dist/types.d.ts +125 -0
  100. package/dist/types.js +1 -0
  101. package/dist/validation/metadata.d.ts +1 -0
  102. package/dist/validation/metadata.js +7 -0
  103. package/dist/validation/payload.d.ts +7 -0
  104. package/dist/validation/payload.js +135 -0
  105. package/dist/validation/typeGuards.d.ts +1 -0
  106. package/dist/validation/typeGuards.js +7 -0
  107. package/package.json +110 -0
@@ -0,0 +1,525 @@
1
+ /* eslint-disable max-lines */
2
+ import { ConfigurationError, IndexCorruptionError, InvalidQueryRangeError, QuotaExceededError, ValidationError, toErrorInstance, } from '../../errors/index.js';
3
+ import { toPublicRecord } from '../record/ordering.js';
4
+ import { emitAutoCommitErrorToListeners } from '../backend/autoCommit.js';
5
+ import { AsyncMutex } from '../backend/asyncMutex.js';
6
+ import { validateAndNormalizePayload } from '../../validation/payload.js';
7
+ import { enforceCapacityPolicy } from '../backend/capacity.js';
8
+ import { resolveCapacityState } from '../backend/capacityResolver.js';
9
+ import { estimateKeySizeBytes, estimateRecordSizeBytes } from '../backend/encoding.js';
10
+ import { parseDuplicateKeyConfig } from '../config/config.shared.js';
11
+ import { DatastoreLifecycle } from './datastoreLifecycle.js';
12
+ import { deleteRecordById, getPublicRecordById, updateRecordById, } from './mutationById.js';
13
+ import { closeDatastore } from './datastoreClose.js';
14
+ import { RecordKeyIndexBTree, clampComparatorResult, normalizeComparatorResult, } from '../btree/recordKeyIndexBTree.js';
15
+ import { readRawInsertKey, resolveKeyDefinition, } from './datastoreKeyDefinition.js';
16
+ export class Datastore {
17
+ errorListeners;
18
+ keyIndex;
19
+ keyDefinition;
20
+ duplicateKeyPolicy;
21
+ capacityState;
22
+ skipPayloadValidation;
23
+ lifecycle;
24
+ writeMutex;
25
+ currentSizeBytes;
26
+ backendController;
27
+ pendingInit;
28
+ pendingInitError;
29
+ constructor(config) {
30
+ this.errorListeners = new Set();
31
+ this.keyDefinition = resolveKeyDefinition(config);
32
+ const duplicateKeys = parseDuplicateKeyConfig(config.duplicateKeys);
33
+ this.duplicateKeyPolicy = duplicateKeys;
34
+ this.keyIndex = new RecordKeyIndexBTree({
35
+ compareKeys: (left, right) => this.keyDefinition.compare(left, right),
36
+ duplicateKeys,
37
+ });
38
+ this.capacityState = resolveCapacityState(config);
39
+ this.skipPayloadValidation = config.skipPayloadValidation === true;
40
+ this.lifecycle = new DatastoreLifecycle();
41
+ this.writeMutex = new AsyncMutex();
42
+ this.currentSizeBytes = 0;
43
+ this.backendController = null;
44
+ this.pendingInit = null;
45
+ this.pendingInitError = null;
46
+ if (config.driver === undefined) {
47
+ if (config.autoCommit !== undefined) {
48
+ throw new ConfigurationError('autoCommit requires a durable driver.');
49
+ }
50
+ return;
51
+ }
52
+ const backendInit = config.driver.init({
53
+ getSnapshot: () => ({
54
+ treeJSON: this.keyIndex.toJSON(),
55
+ }),
56
+ autoCommit: config.autoCommit,
57
+ onAutoCommitError: (error) => {
58
+ emitAutoCommitErrorToListeners(this.errorListeners, error);
59
+ },
60
+ });
61
+ if (!isPromiseLike(backendInit)) {
62
+ this.applyBackendInitResult(backendInit);
63
+ return;
64
+ }
65
+ this.pendingInit = Promise.resolve(backendInit)
66
+ .then((result) => { this.applyBackendInitResult(result); })
67
+ .catch((error) => {
68
+ this.pendingInitError = toErrorInstance(error, 'Datastore backend initialization failed with a non-Error value.');
69
+ })
70
+ .finally(() => {
71
+ this.pendingInit = null; // clear atomically after init settles (single-flight)
72
+ });
73
+ }
74
+ put(record) {
75
+ return this.runWithOpenExclusive(() => this.putSingle(record));
76
+ }
77
+ get(key) {
78
+ return this.runWithOpen(() => {
79
+ const normalizedKey = this.keyDefinition.normalize(key, 'key');
80
+ return this.keyIndex.rangeQuery(normalizedKey, normalizedKey).map((e) => toPublicRecord(e.entryId, e.key, e.value));
81
+ });
82
+ }
83
+ getFirst(key) {
84
+ return this.runWithOpen(() => {
85
+ const normalizedKey = this.keyDefinition.normalize(key, 'key');
86
+ const entry = this.keyIndex.findFirst(normalizedKey);
87
+ if (entry === null) {
88
+ return null;
89
+ }
90
+ return toPublicRecord(entry.entryId, entry.key, entry.value);
91
+ });
92
+ }
93
+ getLast(key) {
94
+ return this.runWithOpen(() => {
95
+ const normalizedKey = this.keyDefinition.normalize(key, 'key');
96
+ const entry = this.keyIndex.findLast(normalizedKey);
97
+ if (entry === null) {
98
+ return null;
99
+ }
100
+ return toPublicRecord(entry.entryId, entry.key, entry.value);
101
+ });
102
+ }
103
+ delete(key) {
104
+ return this.runWithOpenExclusive(() => this.deleteSingle(key));
105
+ }
106
+ has(key) {
107
+ return this.runWithOpen(() => {
108
+ const normalizedKey = this.keyDefinition.normalize(key, 'key');
109
+ return this.keyIndex.hasKey(normalizedKey);
110
+ });
111
+ }
112
+ getAll() {
113
+ return this.runWithOpen(() => {
114
+ return this.keyIndex.snapshot().map((e) => toPublicRecord(e.entryId, e.key, e.value));
115
+ });
116
+ }
117
+ getRange(start, end) {
118
+ return this.runWithOpen(() => {
119
+ const normalizedStart = this.keyDefinition.normalize(start, 'start');
120
+ const normalizedEnd = this.keyDefinition.normalize(end, 'end');
121
+ if (normalizeComparatorResult(this.keyDefinition.compare(normalizedStart, normalizedEnd)) > 0) {
122
+ throw new InvalidQueryRangeError('start must be <= end.');
123
+ }
124
+ return this.keyIndex.rangeQuery(normalizedStart, normalizedEnd).map((e) => toPublicRecord(e.entryId, e.key, e.value));
125
+ });
126
+ }
127
+ getMany(keys) {
128
+ return this.runWithOpen(() => {
129
+ const normalizedKeys = [];
130
+ for (const key of keys) {
131
+ normalizedKeys.push(this.keyDefinition.normalize(key, 'key'));
132
+ }
133
+ normalizedKeys.sort((left, right) => clampComparatorResult(this.keyDefinition.compare(left, right)));
134
+ const results = [];
135
+ let lastKey = undefined;
136
+ for (let i = 0; i < normalizedKeys.length; i += 1) {
137
+ if (i > 0 && clampComparatorResult(this.keyDefinition.compare(normalizedKeys[i], lastKey)) === 0) {
138
+ continue;
139
+ }
140
+ lastKey = normalizedKeys[i];
141
+ const entries = this.keyIndex.rangeQuery(normalizedKeys[i], normalizedKeys[i]);
142
+ for (const entry of entries) {
143
+ results.push(toPublicRecord(entry.entryId, entry.key, entry.value));
144
+ }
145
+ }
146
+ return results;
147
+ });
148
+ }
149
+ putMany(records) {
150
+ return this.runWithOpenExclusive(async () => {
151
+ // P12: Pure in-memory sync loop — no capacity, no backend, no microtask overhead
152
+ if (this.capacityState === null && this.backendController === null) {
153
+ for (const record of records) {
154
+ const { rawKey, keyFieldName } = readRawInsertKey(record);
155
+ const normalizedKey = this.keyDefinition.normalize(rawKey, keyFieldName);
156
+ if (this.duplicateKeyPolicy === 'reject' && this.keyIndex.findFirst(normalizedKey) !== null) {
157
+ throw new ValidationError('Duplicate key rejected: a record with this key already exists.');
158
+ }
159
+ const normalizedPayload = this.skipPayloadValidation
160
+ ? record.payload
161
+ : validateAndNormalizePayload(record.payload).payload;
162
+ this.keyIndex.put(normalizedKey, { payload: normalizedPayload, sizeBytes: 0 });
163
+ }
164
+ return;
165
+ }
166
+ // No capacity but has backend: need async for handleRecordAppended
167
+ if (this.capacityState === null) {
168
+ for (const record of records) {
169
+ await this.putSingle(record);
170
+ }
171
+ return;
172
+ }
173
+ // Turnover policy: per-record path (eviction is order-dependent)
174
+ if (this.capacityState.policy === 'turnover') {
175
+ for (const record of records) {
176
+ await this.putSingle(record);
177
+ }
178
+ return;
179
+ }
180
+ // Strict policy: atomic batch — prepare phase first, then insert
181
+ await this.putManyStrict(records);
182
+ });
183
+ }
184
+ deleteMany(keys) {
185
+ return this.runWithOpenExclusive(async () => {
186
+ // P12: Pure in-memory sync loop — no backend, no microtask overhead
187
+ if (this.backendController === null) {
188
+ let totalRemoved = 0;
189
+ for (const key of keys) {
190
+ const normalizedKey = this.keyDefinition.normalize(key, 'key');
191
+ const entries = this.keyIndex.rangeQuery(normalizedKey, normalizedKey);
192
+ if (entries.length === 0) {
193
+ continue;
194
+ }
195
+ let freedBytes = 0;
196
+ for (const entry of entries) {
197
+ freedBytes += entry.value.sizeBytes;
198
+ }
199
+ totalRemoved += this.keyIndex.deleteRange(normalizedKey, normalizedKey);
200
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes - freedBytes);
201
+ }
202
+ return totalRemoved;
203
+ }
204
+ let totalRemoved = 0;
205
+ for (const key of keys) {
206
+ totalRemoved += await this.deleteSingle(key);
207
+ }
208
+ return totalRemoved;
209
+ });
210
+ }
211
+ clear() {
212
+ return this.runWithOpenExclusive(async () => {
213
+ this.keyIndex.clear();
214
+ this.currentSizeBytes = 0;
215
+ await this.backendController?.handleCleared();
216
+ });
217
+ }
218
+ count() {
219
+ return this.runWithOpen(() => {
220
+ return this.keyIndex.size();
221
+ });
222
+ }
223
+ keys() {
224
+ return this.runWithOpen(() => {
225
+ const distinctKeys = [];
226
+ let lastKey = undefined;
227
+ let isFirst = true;
228
+ for (const key of this.keyIndex.keys()) {
229
+ if (isFirst || clampComparatorResult(this.keyDefinition.compare(key, lastKey)) !== 0) {
230
+ distinctKeys.push(key);
231
+ lastKey = key;
232
+ isFirst = false;
233
+ }
234
+ }
235
+ return distinctKeys;
236
+ });
237
+ }
238
+ getById(id) {
239
+ return this.runWithOpen(() => {
240
+ return getPublicRecordById(this.keyIndex, id);
241
+ });
242
+ }
243
+ updateById(id, patch) {
244
+ return this.runWithOpenExclusive(async () => {
245
+ const result = updateRecordById({
246
+ keyIndex: this.keyIndex,
247
+ id,
248
+ patch,
249
+ capacityState: this.capacityState,
250
+ currentSizeBytes: this.currentSizeBytes,
251
+ skipPayloadValidation: this.skipPayloadValidation,
252
+ });
253
+ if (!result.updated) {
254
+ return false;
255
+ }
256
+ this.currentSizeBytes = result.currentSizeBytes;
257
+ await this.backendController?.handleRecordAppended(result.durabilitySignalBytes);
258
+ return true;
259
+ });
260
+ }
261
+ deleteById(id) {
262
+ return this.runWithOpenExclusive(async () => {
263
+ const result = deleteRecordById({
264
+ keyIndex: this.keyIndex,
265
+ id,
266
+ currentSizeBytes: this.currentSizeBytes,
267
+ });
268
+ if (!result.deleted) {
269
+ return false;
270
+ }
271
+ this.currentSizeBytes = result.currentSizeBytes;
272
+ await this.backendController?.handleRecordAppended(result.durabilitySignalBytes);
273
+ return true;
274
+ });
275
+ }
276
+ commit() {
277
+ return this.runWithOpenExclusive(async () => {
278
+ await this.backendController?.commitNow();
279
+ });
280
+ }
281
+ on(event, listener) {
282
+ if (event !== 'error') {
283
+ throw new ValidationError('Only "error" event is supported.');
284
+ }
285
+ this.errorListeners.add(listener);
286
+ return () => { this.off(event, listener); };
287
+ }
288
+ off(event, listener) {
289
+ if (event !== 'error') {
290
+ throw new ValidationError('Only "error" event is supported.');
291
+ }
292
+ this.errorListeners.delete(listener);
293
+ }
294
+ async close() {
295
+ await closeDatastore({
296
+ lifecycle: this.lifecycle,
297
+ getPendingInit: () => this.pendingInit,
298
+ getPendingInitError: () => this.pendingInitError,
299
+ setPendingInitError: (pendingInitError) => {
300
+ this.pendingInitError = pendingInitError;
301
+ },
302
+ getBackendController: () => this.backendController,
303
+ setBackendController: (backendController) => {
304
+ this.backendController = backendController;
305
+ },
306
+ clearInMemoryState: () => {
307
+ this.keyIndex.clear();
308
+ this.errorListeners.clear();
309
+ },
310
+ });
311
+ }
312
+ resolvePayload(record, normalizedKey) {
313
+ if (this.skipPayloadValidation) {
314
+ const payload = record.payload;
315
+ return { payload, encodedBytes: estimateRecordSizeBytes(normalizedKey, payload) };
316
+ }
317
+ const validationResult = validateAndNormalizePayload(record.payload);
318
+ const keyBytes = estimateKeySizeBytes(normalizedKey);
319
+ return { payload: validationResult.payload, encodedBytes: validationResult.sizeBytes + keyBytes };
320
+ }
321
+ async putSingle(record) {
322
+ const { rawKey, keyFieldName } = readRawInsertKey(record);
323
+ const normalizedKey = this.keyDefinition.normalize(rawKey, keyFieldName);
324
+ // Fast-reject before expensive validation/serialization
325
+ if (this.duplicateKeyPolicy === 'reject' && this.keyIndex.findFirst(normalizedKey) !== null) {
326
+ throw new ValidationError('Duplicate key rejected: a record with this key already exists.');
327
+ }
328
+ // P5-A: Capacity-Bypass Fast Path — no capacity, no size tracking needed
329
+ if (this.capacityState === null && this.backendController === null) {
330
+ const normalizedPayload = this.skipPayloadValidation
331
+ ? record.payload
332
+ : validateAndNormalizePayload(record.payload).payload;
333
+ this.keyIndex.put(normalizedKey, { payload: normalizedPayload, sizeBytes: 0 });
334
+ return;
335
+ }
336
+ // Size computation needed (capacity or durable backend)
337
+ const { payload: normalizedPayload, encodedBytes } = this.resolvePayload(record, normalizedKey);
338
+ if (this.capacityState === null) {
339
+ // Durable but no capacity: bytes for backend signal only
340
+ this.keyIndex.put(normalizedKey, { payload: normalizedPayload, sizeBytes: encodedBytes });
341
+ await this.backendController.handleRecordAppended(encodedBytes);
342
+ return;
343
+ }
344
+ // Full enforcement path (capacity configured)
345
+ const persistedRecord = { payload: normalizedPayload, sizeBytes: encodedBytes };
346
+ if (encodedBytes > this.capacityState.maxSizeBytes) {
347
+ throw new QuotaExceededError('Record exceeds configured capacity.maxSize boundary.');
348
+ }
349
+ // For replace policy: remove the existing record before capacity enforcement
350
+ // so that the turnover eviction loop cannot evict the replacement target,
351
+ // which would cause the capacity delta to be miscalculated.
352
+ if (this.duplicateKeyPolicy === 'replace') {
353
+ const existing = this.keyIndex.findFirst(normalizedKey);
354
+ if (existing !== null) {
355
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes - existing.value.sizeBytes);
356
+ this.keyIndex.removeById(existing.entryId);
357
+ }
358
+ }
359
+ this.currentSizeBytes = enforceCapacityPolicy(this.capacityState, this.currentSizeBytes, encodedBytes, () => this.keyIndex.size(), () => {
360
+ const evicted = this.keyIndex.popFirst();
361
+ if (evicted === null) {
362
+ throw new IndexCorruptionError('Record buffer reported empty state during turnover eviction.');
363
+ }
364
+ return evicted.value.sizeBytes;
365
+ });
366
+ this.keyIndex.put(normalizedKey, persistedRecord);
367
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes + encodedBytes);
368
+ await this.backendController?.handleRecordAppended(encodedBytes);
369
+ }
370
+ async putManyStrict(records) {
371
+ const capacityState = this.capacityState;
372
+ const compare = this.keyDefinition.compare;
373
+ // Phase 1: Normalize all records and tag with original index — O(M)
374
+ const tagged = [];
375
+ for (let i = 0; i < records.length; i += 1) {
376
+ const { rawKey, keyFieldName } = readRawInsertKey(records[i]);
377
+ tagged.push({ idx: i, normalizedKey: this.keyDefinition.normalize(rawKey, keyFieldName), record: records[i] });
378
+ }
379
+ // Phase 2: Sort by (key, originalIndex) — O(M log M)
380
+ tagged.sort((a, b) => {
381
+ const cmp = clampComparatorResult(compare(a.normalizedKey, b.normalizedKey));
382
+ return cmp !== 0 ? cmp : a.idx - b.idx;
383
+ });
384
+ // Phase 3: Detect duplicates and build the deduplicated insertion list — O(M)
385
+ const { prepared, totalBatchDelta } = this.buildStrictBatchEntries(tagged, compare, capacityState.maxSizeBytes);
386
+ // Phase 4: Budget check — all-or-nothing
387
+ if (this.currentSizeBytes + totalBatchDelta > capacityState.maxSizeBytes) {
388
+ throw new QuotaExceededError('Insert exceeds configured capacity.maxSize under strict policy.');
389
+ }
390
+ // Phase 5: Insert — safe to mutate
391
+ let effectiveTotalDelta = 0;
392
+ let totalEncodedBytes = 0;
393
+ for (const { normalizedKey, persistedRecord, encodedBytes, replacedBytes } of prepared) {
394
+ const actualReplaced = replacedBytes > 0 && this.keyIndex.findFirst(normalizedKey) === null ? 0 : replacedBytes;
395
+ effectiveTotalDelta += encodedBytes - actualReplaced;
396
+ totalEncodedBytes += encodedBytes;
397
+ this.keyIndex.put(normalizedKey, persistedRecord);
398
+ }
399
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes + effectiveTotalDelta);
400
+ await this.backendController?.handleRecordAppended(totalEncodedBytes);
401
+ }
402
+ buildStrictBatchEntries(tagged, compare, maxSizeBytes) {
403
+ const prepared = [];
404
+ let totalBatchDelta = 0;
405
+ for (let i = 0; i < tagged.length; i += 1) {
406
+ const { normalizedKey, record } = tagged[i];
407
+ const isIntraBatchDuplicate = i > 0 && clampComparatorResult(compare(tagged[i - 1].normalizedKey, normalizedKey)) === 0;
408
+ if (this.duplicateKeyPolicy === 'reject') {
409
+ if (isIntraBatchDuplicate || this.keyIndex.findFirst(normalizedKey) !== null) {
410
+ throw new ValidationError('Duplicate key rejected: a record with this key already exists.');
411
+ }
412
+ }
413
+ const { payload: normalizedPayload, encodedBytes } = this.resolvePayload(record, normalizedKey);
414
+ if (encodedBytes > maxSizeBytes) {
415
+ throw new QuotaExceededError('Record exceeds configured capacity.maxSize boundary.');
416
+ }
417
+ let replacedBytes = 0;
418
+ if (this.duplicateKeyPolicy === 'replace' && isIntraBatchDuplicate) {
419
+ const prev = prepared[prepared.length - 1];
420
+ totalBatchDelta -= prev.encodedBytes - prev.replacedBytes;
421
+ replacedBytes = prev.replacedBytes;
422
+ prepared.pop();
423
+ }
424
+ else if (this.duplicateKeyPolicy === 'replace') {
425
+ const existing = this.keyIndex.findFirst(normalizedKey);
426
+ replacedBytes = existing !== null ? existing.value.sizeBytes : 0;
427
+ }
428
+ const persistedRecord = { payload: normalizedPayload, sizeBytes: encodedBytes };
429
+ totalBatchDelta += encodedBytes - replacedBytes;
430
+ prepared.push({ normalizedKey, persistedRecord, encodedBytes, replacedBytes });
431
+ }
432
+ return { prepared, totalBatchDelta };
433
+ }
434
+ async deleteSingle(key) {
435
+ const normalizedKey = this.keyDefinition.normalize(key, 'key');
436
+ const entries = this.keyIndex.rangeQuery(normalizedKey, normalizedKey);
437
+ if (entries.length === 0) {
438
+ return 0;
439
+ }
440
+ let freedBytes = 0;
441
+ for (const entry of entries) {
442
+ freedBytes += entry.value.sizeBytes;
443
+ }
444
+ const removedCount = this.keyIndex.deleteRange(normalizedKey, normalizedKey);
445
+ this.currentSizeBytes = Math.max(0, this.currentSizeBytes - freedBytes);
446
+ await this.backendController?.handleRecordAppended(freedBytes);
447
+ return removedCount;
448
+ }
449
+ // P7: Synchronous fast-path — avoids async/Promise overhead for read operations
450
+ // when no pending init exists.
451
+ runWithOpen(operation) {
452
+ if (this.pendingInit !== null) {
453
+ return this.pendingInit.then(() => {
454
+ if (this.pendingInitError !== null) {
455
+ throw this.pendingInitError;
456
+ }
457
+ return this.executeWithLifecycle(operation);
458
+ });
459
+ }
460
+ if (this.pendingInitError !== null) {
461
+ return Promise.reject(this.pendingInitError);
462
+ }
463
+ try {
464
+ return Promise.resolve(this.executeWithLifecycle(operation));
465
+ }
466
+ catch (error) {
467
+ return Promise.reject(error instanceof Error ? error : new Error(String(error)));
468
+ }
469
+ }
470
+ executeWithLifecycle(operation) {
471
+ this.lifecycle.beginOperation();
472
+ try {
473
+ const result = operation();
474
+ if (isPromiseLike(result)) {
475
+ return Promise.resolve(result).then((value) => { this.lifecycle.endOperation(); return value; }, (error) => { this.lifecycle.endOperation(); throw error; });
476
+ }
477
+ this.lifecycle.endOperation();
478
+ return result;
479
+ }
480
+ catch (error) {
481
+ this.lifecycle.endOperation();
482
+ throw error;
483
+ }
484
+ }
485
+ async runWithOpenExclusive(operation) {
486
+ const release = await this.writeMutex.acquire();
487
+ try {
488
+ return await this.runWithOpen(operation);
489
+ }
490
+ finally {
491
+ release();
492
+ }
493
+ }
494
+ applyBackendInitResult(result) {
495
+ if (result.initialTreeJSON !== null) {
496
+ this.keyIndex = RecordKeyIndexBTree.fromJSON(result.initialTreeJSON, {
497
+ compareKeys: (left, right) => {
498
+ return this.keyDefinition.compare(left, right);
499
+ },
500
+ duplicateKeys: this.duplicateKeyPolicy,
501
+ });
502
+ this.backfillMissingSizeBytes();
503
+ }
504
+ this.currentSizeBytes = result.initialCurrentSizeBytes;
505
+ this.backendController = result.controller;
506
+ }
507
+ backfillMissingSizeBytes() {
508
+ for (const entry of this.keyIndex.snapshot()) {
509
+ if (typeof entry.value.sizeBytes !== 'number') {
510
+ const patched = {
511
+ payload: entry.value.payload,
512
+ sizeBytes: estimateRecordSizeBytes(entry.key, entry.value.payload),
513
+ };
514
+ this.keyIndex.updateById(entry.entryId, patched);
515
+ }
516
+ }
517
+ }
518
+ }
519
+ const isPromiseLike = (value) => {
520
+ if ((typeof value !== 'object' && typeof value !== 'function') ||
521
+ value === null) {
522
+ return false;
523
+ }
524
+ return typeof value.then === 'function';
525
+ };
@@ -0,0 +1,12 @@
1
+ import { DatastoreLifecycle } from './datastoreLifecycle.js';
2
+ import type { DurableBackendController } from '../backend/types.js';
3
+ export interface DatastoreCloseOptions {
4
+ lifecycle: DatastoreLifecycle;
5
+ getPendingInit: () => Promise<void> | null;
6
+ getPendingInitError: () => Error | null;
7
+ setPendingInitError: (pendingInitError: Error | null) => void;
8
+ getBackendController: () => DurableBackendController | null;
9
+ setBackendController: (backendController: DurableBackendController | null) => void;
10
+ clearInMemoryState: () => void;
11
+ }
12
+ export declare const closeDatastore: (options: DatastoreCloseOptions) => Promise<void>;
@@ -0,0 +1,60 @@
1
+ import { toErrorInstance } from '../../errors/index.js';
2
+ const readAggregateErrorConstructor = () => {
3
+ const candidate = globalThis.AggregateError;
4
+ if (typeof candidate !== 'function') {
5
+ return null;
6
+ }
7
+ return candidate;
8
+ };
9
+ const createCloseAggregateError = (deferredError, closeError) => {
10
+ const aggregateErrorConstructor = readAggregateErrorConstructor();
11
+ if (aggregateErrorConstructor !== null) {
12
+ return new aggregateErrorConstructor([deferredError, closeError], 'Datastore close failed with multiple errors.');
13
+ }
14
+ const fallbackError = new Error('Datastore close failed with multiple errors.');
15
+ fallbackError.errors = [deferredError, closeError];
16
+ return fallbackError;
17
+ };
18
+ export const closeDatastore = async (options) => {
19
+ if (options.lifecycle.isClosed()) {
20
+ return;
21
+ }
22
+ const closeInFlight = options.lifecycle.getCloseInFlight();
23
+ if (closeInFlight !== null) {
24
+ await closeInFlight;
25
+ return;
26
+ }
27
+ options.lifecycle.markClosing();
28
+ const closeOperation = performClose(options).finally(() => {
29
+ options.lifecycle.setCloseInFlight(null);
30
+ });
31
+ options.lifecycle.setCloseInFlight(closeOperation);
32
+ await closeOperation;
33
+ };
34
+ const performClose = async (options) => {
35
+ const pendingInit = options.getPendingInit();
36
+ if (pendingInit !== null) {
37
+ await pendingInit;
38
+ }
39
+ await options.lifecycle.waitForActiveOperationsToDrain();
40
+ let deferredError = options.getPendingInitError();
41
+ try {
42
+ await options.getBackendController()?.close();
43
+ }
44
+ catch (error) {
45
+ const closeError = toErrorInstance(error, 'Datastore close failed with a non-Error value.');
46
+ if (deferredError === null) {
47
+ deferredError = closeError;
48
+ }
49
+ else {
50
+ deferredError = createCloseAggregateError(deferredError, closeError);
51
+ }
52
+ }
53
+ options.setBackendController(null);
54
+ options.setPendingInitError(null);
55
+ options.lifecycle.markClosed();
56
+ options.clearInMemoryState();
57
+ if (deferredError !== null) {
58
+ throw deferredError;
59
+ }
60
+ };
@@ -0,0 +1,7 @@
1
+ import type { DatastoreConfig, DatastoreKeyDefinition } from '../../types.js';
2
+ export declare const DEFAULT_STRING_KEY_DEFINITION: DatastoreKeyDefinition<string, string>;
3
+ export declare const resolveKeyDefinition: (config: DatastoreConfig) => DatastoreKeyDefinition<unknown, unknown>;
4
+ export declare const readRawInsertKey: (rawRecord: Record<string, unknown>) => {
5
+ rawKey: unknown;
6
+ keyFieldName: string;
7
+ };
@@ -0,0 +1,60 @@
1
+ import { ConfigurationError, ValidationError, } from '../../errors/index.js';
2
+ const ensureNonEmptyStringKey = (value, fieldName) => {
3
+ if (typeof value !== 'string') {
4
+ throw new ValidationError(`${fieldName} must be a string.`);
5
+ }
6
+ if (value.length === 0) {
7
+ throw new ValidationError(`${fieldName} must be a non-empty string.`);
8
+ }
9
+ return value;
10
+ };
11
+ export const DEFAULT_STRING_KEY_DEFINITION = {
12
+ normalize: (value, fieldName) => {
13
+ return ensureNonEmptyStringKey(value, fieldName);
14
+ },
15
+ compare: (left, right) => {
16
+ if (left < right) {
17
+ return -1;
18
+ }
19
+ if (left > right) {
20
+ return 1;
21
+ }
22
+ return 0;
23
+ },
24
+ serialize: (key) => {
25
+ return ensureNonEmptyStringKey(key, 'key');
26
+ },
27
+ deserialize: (serialized) => {
28
+ return ensureNonEmptyStringKey(serialized, 'serialized key');
29
+ },
30
+ };
31
+ const validateKeyDefinition = (definition) => {
32
+ if (typeof definition.normalize !== 'function') {
33
+ throw new ConfigurationError('config.key.normalize must be a function.');
34
+ }
35
+ if (typeof definition.compare !== 'function') {
36
+ throw new ConfigurationError('config.key.compare must be a function.');
37
+ }
38
+ if (typeof definition.serialize !== 'function') {
39
+ throw new ConfigurationError('config.key.serialize must be a function.');
40
+ }
41
+ if (typeof definition.deserialize !== 'function') {
42
+ throw new ConfigurationError('config.key.deserialize must be a function.');
43
+ }
44
+ };
45
+ export const resolveKeyDefinition = (config) => {
46
+ if (config.key === undefined) {
47
+ return DEFAULT_STRING_KEY_DEFINITION;
48
+ }
49
+ validateKeyDefinition(config.key);
50
+ return config.key;
51
+ };
52
+ export const readRawInsertKey = (rawRecord) => {
53
+ if (Object.prototype.hasOwnProperty.call(rawRecord, 'key')) {
54
+ return {
55
+ rawKey: rawRecord.key,
56
+ keyFieldName: 'key',
57
+ };
58
+ }
59
+ throw new ValidationError('Record must include "key".');
60
+ };
@@ -0,0 +1,18 @@
1
+ export declare class DatastoreLifecycle {
2
+ private closed;
3
+ private closing;
4
+ private closeInFlight;
5
+ private activeOperationCount;
6
+ private activeOperationsDrained;
7
+ private resolveActiveOperationsDrained;
8
+ constructor();
9
+ isClosed(): boolean;
10
+ markClosing(): void;
11
+ markClosed(): void;
12
+ getCloseInFlight(): Promise<void> | null;
13
+ setCloseInFlight(closeInFlight: Promise<void> | null): void;
14
+ ensureOpen(): void;
15
+ beginOperation(): void;
16
+ endOperation(): void;
17
+ waitForActiveOperationsToDrain(): Promise<void>;
18
+ }