@frostpillar/frostpillar-storage-engine 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/LICENSE +21 -0
  2. package/README-JA.md +1205 -0
  3. package/README.md +1204 -0
  4. package/dist/drivers/file.cjs +960 -0
  5. package/dist/drivers/file.d.ts +3 -0
  6. package/dist/drivers/file.js +18 -0
  7. package/dist/drivers/indexedDB.cjs +570 -0
  8. package/dist/drivers/indexedDB.d.ts +3 -0
  9. package/dist/drivers/indexedDB.js +18 -0
  10. package/dist/drivers/localStorage.cjs +668 -0
  11. package/dist/drivers/localStorage.d.ts +3 -0
  12. package/dist/drivers/localStorage.js +23 -0
  13. package/dist/drivers/opfs.cjs +550 -0
  14. package/dist/drivers/opfs.d.ts +3 -0
  15. package/dist/drivers/opfs.js +18 -0
  16. package/dist/drivers/syncStorage.cjs +898 -0
  17. package/dist/drivers/syncStorage.d.ts +3 -0
  18. package/dist/drivers/syncStorage.js +22 -0
  19. package/dist/drivers/validation.d.ts +1 -0
  20. package/dist/drivers/validation.js +8 -0
  21. package/dist/errors/index.d.ts +32 -0
  22. package/dist/errors/index.js +48 -0
  23. package/dist/frostpillar-storage-engine.min.js +1 -0
  24. package/dist/index.cjs +2957 -0
  25. package/dist/index.d.ts +7 -0
  26. package/dist/index.js +6 -0
  27. package/dist/storage/backend/asyncDurableAutoCommitController.d.ts +26 -0
  28. package/dist/storage/backend/asyncDurableAutoCommitController.js +188 -0
  29. package/dist/storage/backend/asyncMutex.d.ts +7 -0
  30. package/dist/storage/backend/asyncMutex.js +38 -0
  31. package/dist/storage/backend/autoCommit.d.ts +2 -0
  32. package/dist/storage/backend/autoCommit.js +22 -0
  33. package/dist/storage/backend/capacity.d.ts +2 -0
  34. package/dist/storage/backend/capacity.js +27 -0
  35. package/dist/storage/backend/capacityResolver.d.ts +3 -0
  36. package/dist/storage/backend/capacityResolver.js +25 -0
  37. package/dist/storage/backend/encoding.d.ts +17 -0
  38. package/dist/storage/backend/encoding.js +148 -0
  39. package/dist/storage/backend/types.d.ts +184 -0
  40. package/dist/storage/backend/types.js +1 -0
  41. package/dist/storage/btree/recordKeyIndexBTree.d.ts +39 -0
  42. package/dist/storage/btree/recordKeyIndexBTree.js +104 -0
  43. package/dist/storage/config/config.browser.d.ts +4 -0
  44. package/dist/storage/config/config.browser.js +8 -0
  45. package/dist/storage/config/config.d.ts +1 -0
  46. package/dist/storage/config/config.js +1 -0
  47. package/dist/storage/config/config.node.d.ts +4 -0
  48. package/dist/storage/config/config.node.js +74 -0
  49. package/dist/storage/config/config.shared.d.ts +6 -0
  50. package/dist/storage/config/config.shared.js +105 -0
  51. package/dist/storage/datastore/Datastore.d.ts +47 -0
  52. package/dist/storage/datastore/Datastore.js +525 -0
  53. package/dist/storage/datastore/datastoreClose.d.ts +12 -0
  54. package/dist/storage/datastore/datastoreClose.js +60 -0
  55. package/dist/storage/datastore/datastoreKeyDefinition.d.ts +7 -0
  56. package/dist/storage/datastore/datastoreKeyDefinition.js +60 -0
  57. package/dist/storage/datastore/datastoreLifecycle.d.ts +18 -0
  58. package/dist/storage/datastore/datastoreLifecycle.js +63 -0
  59. package/dist/storage/datastore/mutationById.d.ts +29 -0
  60. package/dist/storage/datastore/mutationById.js +71 -0
  61. package/dist/storage/drivers/IndexedDB/indexedDBBackend.d.ts +11 -0
  62. package/dist/storage/drivers/IndexedDB/indexedDBBackend.js +109 -0
  63. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.d.ts +27 -0
  64. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.js +60 -0
  65. package/dist/storage/drivers/IndexedDB/indexedDBConfig.d.ts +7 -0
  66. package/dist/storage/drivers/IndexedDB/indexedDBConfig.js +24 -0
  67. package/dist/storage/drivers/file/fileBackend.d.ts +5 -0
  68. package/dist/storage/drivers/file/fileBackend.js +168 -0
  69. package/dist/storage/drivers/file/fileBackendController.d.ts +31 -0
  70. package/dist/storage/drivers/file/fileBackendController.js +72 -0
  71. package/dist/storage/drivers/file/fileBackendSnapshot.d.ts +10 -0
  72. package/dist/storage/drivers/file/fileBackendSnapshot.js +166 -0
  73. package/dist/storage/drivers/localStorage/localStorageBackend.d.ts +10 -0
  74. package/dist/storage/drivers/localStorage/localStorageBackend.js +156 -0
  75. package/dist/storage/drivers/localStorage/localStorageBackendController.d.ts +24 -0
  76. package/dist/storage/drivers/localStorage/localStorageBackendController.js +35 -0
  77. package/dist/storage/drivers/localStorage/localStorageConfig.d.ts +10 -0
  78. package/dist/storage/drivers/localStorage/localStorageConfig.js +16 -0
  79. package/dist/storage/drivers/localStorage/localStorageLayout.d.ts +5 -0
  80. package/dist/storage/drivers/localStorage/localStorageLayout.js +29 -0
  81. package/dist/storage/drivers/opfs/opfsBackend.d.ts +12 -0
  82. package/dist/storage/drivers/opfs/opfsBackend.js +142 -0
  83. package/dist/storage/drivers/opfs/opfsBackendController.d.ts +26 -0
  84. package/dist/storage/drivers/opfs/opfsBackendController.js +44 -0
  85. package/dist/storage/drivers/syncStorage/syncStorageAdapter.d.ts +2 -0
  86. package/dist/storage/drivers/syncStorage/syncStorageAdapter.js +123 -0
  87. package/dist/storage/drivers/syncStorage/syncStorageBackend.d.ts +11 -0
  88. package/dist/storage/drivers/syncStorage/syncStorageBackend.js +169 -0
  89. package/dist/storage/drivers/syncStorage/syncStorageBackendController.d.ts +24 -0
  90. package/dist/storage/drivers/syncStorage/syncStorageBackendController.js +34 -0
  91. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.d.ts +2 -0
  92. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.js +28 -0
  93. package/dist/storage/drivers/syncStorage/syncStorageConfig.d.ts +13 -0
  94. package/dist/storage/drivers/syncStorage/syncStorageConfig.js +42 -0
  95. package/dist/storage/drivers/syncStorage/syncStorageQuota.d.ts +3 -0
  96. package/dist/storage/drivers/syncStorage/syncStorageQuota.js +45 -0
  97. package/dist/storage/record/ordering.d.ts +3 -0
  98. package/dist/storage/record/ordering.js +7 -0
  99. package/dist/types.d.ts +125 -0
  100. package/dist/types.js +1 -0
  101. package/dist/validation/metadata.d.ts +1 -0
  102. package/dist/validation/metadata.js +7 -0
  103. package/dist/validation/payload.d.ts +7 -0
  104. package/dist/validation/payload.js +135 -0
  105. package/dist/validation/typeGuards.d.ts +1 -0
  106. package/dist/validation/typeGuards.js +7 -0
  107. package/package.json +110 -0
@@ -0,0 +1,72 @@
1
+ import { existsSync } from 'node:fs';
2
+ import { ConfigurationError } from '../../../errors/index.js';
3
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
4
+ import { parseAutoCommitConfig } from '../../config/config.shared.js';
5
+ import { cleanupStaleGenerationFiles, createFileBackend, releaseFileLock } from './fileBackend.js';
6
+ import { commitFileBackendSnapshot, loadFileSnapshot, writeInitialFileSnapshot, } from './fileBackendSnapshot.js';
7
+ export class FileBackendController extends AsyncDurableAutoCommitController {
8
+ backend;
9
+ getSnapshot;
10
+ testHooks;
11
+ constructor(backend, autoCommit, getSnapshot, onAutoCommitError, testHooks) {
12
+ super(autoCommit, onAutoCommitError);
13
+ this.backend = backend;
14
+ this.getSnapshot = getSnapshot;
15
+ this.testHooks = testHooks;
16
+ }
17
+ static create(options) {
18
+ validateNoLegacyTestHooks(options.config);
19
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
20
+ const backend = createFileBackend(options.config);
21
+ let initialTreeJSON = null;
22
+ let initialCurrentSizeBytes = 0;
23
+ try {
24
+ if (!existsSync(backend.sidecarPath)) {
25
+ writeInitialFileSnapshot(backend);
26
+ }
27
+ else {
28
+ const loaded = loadFileSnapshot(backend);
29
+ initialTreeJSON = loaded.treeJSON;
30
+ initialCurrentSizeBytes = loaded.currentSizeBytes;
31
+ cleanupStaleGenerationFiles(backend);
32
+ }
33
+ }
34
+ catch (error) {
35
+ if (backend.lockAcquired) {
36
+ releaseFileLock(backend);
37
+ }
38
+ throw error;
39
+ }
40
+ const controller = new FileBackendController(backend, autoCommit, options.getSnapshot, options.onAutoCommitError, normalizeTestHooks(options.testHooks));
41
+ return {
42
+ controller,
43
+ initialTreeJSON,
44
+ initialCurrentSizeBytes,
45
+ };
46
+ }
47
+ async executeSingleCommit() {
48
+ await this.testHooks?.beforeCommit?.();
49
+ const snapshot = this.getSnapshot();
50
+ commitFileBackendSnapshot(this.backend, snapshot.treeJSON);
51
+ await this.testHooks?.afterCommit?.();
52
+ }
53
+ onCloseAfterDrain() {
54
+ if (this.backend.lockAcquired) {
55
+ releaseFileLock(this.backend);
56
+ }
57
+ return Promise.resolve();
58
+ }
59
+ }
60
+ const validateNoLegacyTestHooks = (config) => {
61
+ const withLegacyHooks = config;
62
+ if (!('__testHooks' in withLegacyHooks)) {
63
+ return;
64
+ }
65
+ throw new ConfigurationError('config.__testHooks is not supported. Pass testHooks via FileBackendController.create options.');
66
+ };
67
+ const normalizeTestHooks = (testHooks) => {
68
+ if (testHooks === undefined) {
69
+ return null;
70
+ }
71
+ return testHooks;
72
+ };
@@ -0,0 +1,10 @@
1
+ import type { BTreeJSON } from '../../../types.js';
2
+ import type { FileBackendState } from '../../backend/types.js';
3
+ interface LoadedFileSnapshot {
4
+ currentSizeBytes: number;
5
+ treeJSON: BTreeJSON<unknown, unknown>;
6
+ }
7
+ export declare const writeInitialFileSnapshot: (backend: FileBackendState) => void;
8
+ export declare const loadFileSnapshot: (backend: FileBackendState) => LoadedFileSnapshot;
9
+ export declare const commitFileBackendSnapshot: (backend: FileBackendState, treeJSON: BTreeJSON<unknown, unknown>) => void;
10
+ export {};
@@ -0,0 +1,166 @@
1
+ import { closeSync, existsSync, fsyncSync, openSync, readFileSync, renameSync, unlinkSync, writeSync, } from 'node:fs';
2
+ import { join } from 'node:path';
3
+ import { PageCorruptionError, StorageEngineError, toStorageEngineError, } from '../../../errors/index.js';
4
+ import { computeUtf8ByteLength } from '../../backend/encoding.js';
5
+ import { RecordKeyIndexBTree } from '../../btree/recordKeyIndexBTree.js';
6
+ const writeFsync = (filePath, content) => {
7
+ const fd = openSync(filePath, 'w');
8
+ try {
9
+ writeSync(fd, content, null, 'utf8');
10
+ fsyncSync(fd);
11
+ }
12
+ finally {
13
+ closeSync(fd);
14
+ }
15
+ };
16
+ const fsyncDirectory = (dirPath) => {
17
+ const fd = openSync(dirPath, 'r');
18
+ try {
19
+ fsyncSync(fd);
20
+ }
21
+ finally {
22
+ closeSync(fd);
23
+ }
24
+ };
25
+ const SIDE_CAR_MAGIC = 'FPGE_META';
26
+ const GENERATION_MAGIC = 'FPGE_DATA';
27
+ const FORMAT_VERSION = 2;
28
+ const noOpComparator = () => 0;
29
+ const createEmptyTreeJSON = () => {
30
+ const tree = new RecordKeyIndexBTree({
31
+ compareKeys: noOpComparator,
32
+ });
33
+ return tree.toJSON();
34
+ };
35
+ const ensureNonNegativeSafeInteger = (value, field) => {
36
+ if (!Number.isSafeInteger(value) || typeof value !== 'number' || value < 0) {
37
+ throw new PageCorruptionError(`${field} must be a non-negative safe integer.`);
38
+ }
39
+ return value;
40
+ };
41
+ const validateActiveDataFileName = (value, baseFileName) => {
42
+ if (typeof value !== 'string' || value.length === 0) {
43
+ throw new PageCorruptionError('sidecar.activeDataFile must be a non-empty string.');
44
+ }
45
+ if (value.includes('/') || value.includes('\\')) {
46
+ throw new PageCorruptionError('sidecar.activeDataFile must be a file name without path separators.');
47
+ }
48
+ const expectedPrefix = `${baseFileName}.g.`;
49
+ if (!value.startsWith(expectedPrefix)) {
50
+ throw new PageCorruptionError('sidecar.activeDataFile must follow committed generation file naming.');
51
+ }
52
+ const commitSuffix = value.slice(expectedPrefix.length);
53
+ if (!/^\d+$/.test(commitSuffix)) {
54
+ throw new PageCorruptionError('sidecar.activeDataFile commit suffix must be an unsigned decimal integer.');
55
+ }
56
+ return value;
57
+ };
58
+ export const writeInitialFileSnapshot = (backend) => {
59
+ const generation = {
60
+ magic: GENERATION_MAGIC,
61
+ version: FORMAT_VERSION,
62
+ treeJSON: createEmptyTreeJSON(),
63
+ };
64
+ const activeDataPath = join(backend.directoryPath, backend.activeDataFile);
65
+ const sidecar = {
66
+ magic: SIDE_CAR_MAGIC,
67
+ version: FORMAT_VERSION,
68
+ activeDataFile: backend.activeDataFile,
69
+ commitId: backend.commitId,
70
+ };
71
+ try {
72
+ writeFsync(activeDataPath, JSON.stringify(generation));
73
+ writeFsync(backend.sidecarPath, JSON.stringify(sidecar, null, 2));
74
+ }
75
+ catch (error) {
76
+ throw toStorageEngineError(error, 'Failed to initialize file backend snapshot');
77
+ }
78
+ };
79
+ const applySidecarToBackend = (backend, parsedSidecar) => {
80
+ if (parsedSidecar.magic !== SIDE_CAR_MAGIC ||
81
+ parsedSidecar.version !== FORMAT_VERSION) {
82
+ throw new PageCorruptionError('Invalid sidecar magic/version.');
83
+ }
84
+ backend.activeDataFile = validateActiveDataFileName(parsedSidecar.activeDataFile, backend.baseFileName);
85
+ backend.commitId = ensureNonNegativeSafeInteger(parsedSidecar.commitId, 'sidecar.commitId');
86
+ };
87
+ const loadAndValidateGenerationFile = (backend) => {
88
+ const activeDataPath = join(backend.directoryPath, backend.activeDataFile);
89
+ if (!existsSync(activeDataPath)) {
90
+ throw new PageCorruptionError('Active generation file referenced by sidecar is missing.');
91
+ }
92
+ const generationSource = readFileSync(activeDataPath, 'utf8');
93
+ const parsedGeneration = JSON.parse(generationSource);
94
+ if (parsedGeneration.magic !== GENERATION_MAGIC ||
95
+ parsedGeneration.version !== FORMAT_VERSION) {
96
+ throw new PageCorruptionError('Invalid generation magic/version.');
97
+ }
98
+ const treeJsonSizeBytes = computeUtf8ByteLength(JSON.stringify(parsedGeneration.treeJSON));
99
+ return { generation: parsedGeneration, treeJsonSizeBytes };
100
+ };
101
+ export const loadFileSnapshot = (backend) => {
102
+ try {
103
+ const sidecarSource = readFileSync(backend.sidecarPath, 'utf8');
104
+ const parsedSidecar = JSON.parse(sidecarSource);
105
+ applySidecarToBackend(backend, parsedSidecar);
106
+ const validatedGeneration = loadAndValidateGenerationFile(backend);
107
+ const treeJSON = validatedGeneration.generation.treeJSON;
108
+ if (typeof treeJSON !== 'object' || treeJSON === null || Array.isArray(treeJSON)) {
109
+ throw new PageCorruptionError('treeJSON must be a non-null plain object.');
110
+ }
111
+ const currentSizeBytes = validatedGeneration.treeJsonSizeBytes;
112
+ return { treeJSON, currentSizeBytes };
113
+ }
114
+ catch (error) {
115
+ // PageCorruptionError already extends StorageEngineError, so it is passed
116
+ // through unchanged while unknown errors are normalized.
117
+ throw toStorageEngineError(error, 'Failed to load file backend snapshot');
118
+ }
119
+ };
120
+ export const commitFileBackendSnapshot = (backend, treeJSON) => {
121
+ if (backend.commitId >= Number.MAX_SAFE_INTEGER) {
122
+ throw new StorageEngineError('File backend commitId has reached Number.MAX_SAFE_INTEGER.');
123
+ }
124
+ const nextCommitId = backend.commitId + 1;
125
+ const nextActiveDataFile = `${backend.baseFileName}.g.${nextCommitId}`;
126
+ const generationTempPath = join(backend.directoryPath, `${nextActiveDataFile}.tmp`);
127
+ const generationPath = join(backend.directoryPath, nextActiveDataFile);
128
+ const sidecarTempPath = `${backend.sidecarPath}.tmp`;
129
+ const generation = {
130
+ magic: GENERATION_MAGIC,
131
+ version: FORMAT_VERSION,
132
+ treeJSON,
133
+ };
134
+ const sidecar = {
135
+ magic: SIDE_CAR_MAGIC,
136
+ version: FORMAT_VERSION,
137
+ activeDataFile: nextActiveDataFile,
138
+ commitId: nextCommitId,
139
+ };
140
+ const previousActiveDataFile = backend.activeDataFile;
141
+ try {
142
+ writeFsync(generationTempPath, JSON.stringify(generation));
143
+ renameSync(generationTempPath, generationPath);
144
+ writeFsync(sidecarTempPath, JSON.stringify(sidecar, null, 2));
145
+ renameSync(sidecarTempPath, backend.sidecarPath);
146
+ fsyncDirectory(backend.directoryPath);
147
+ backend.activeDataFile = nextActiveDataFile;
148
+ backend.commitId = nextCommitId;
149
+ }
150
+ catch (error) {
151
+ throw toStorageEngineError(error, 'File commit failed');
152
+ }
153
+ if (previousActiveDataFile !== nextActiveDataFile) {
154
+ const previousPath = join(backend.directoryPath, previousActiveDataFile);
155
+ try {
156
+ if (existsSync(previousPath)) {
157
+ unlinkSync(previousPath);
158
+ }
159
+ }
160
+ catch {
161
+ // Best-effort cleanup: failing to delete a stale generation file
162
+ // does not compromise data integrity since the sidecar already
163
+ // points to the new generation.
164
+ }
165
+ }
166
+ };
@@ -0,0 +1,10 @@
1
+ import type { BTreeJSON } from '../../../types.js';
2
+ import type { LocalStorageAdapter, LocalStorageBackendState } from '../../backend/types.js';
3
+ export interface LoadedLocalStorageSnapshot {
4
+ treeJSON: BTreeJSON<unknown, unknown> | null;
5
+ currentSizeBytes: number;
6
+ }
7
+ export declare const detectGlobalLocalStorage: () => LocalStorageAdapter | null;
8
+ export declare const createLocalStorageBackendState: (adapter: LocalStorageAdapter, keyPrefix: string, databaseKey: string, maxChunkChars: number, maxChunks: number) => LocalStorageBackendState;
9
+ export declare const loadLocalStorageSnapshot: (state: LocalStorageBackendState) => LoadedLocalStorageSnapshot;
10
+ export declare const commitLocalStorageSnapshot: (state: LocalStorageBackendState, treeJSON: BTreeJSON<unknown, unknown>) => void;
@@ -0,0 +1,156 @@
1
+ import { PageCorruptionError, QuotaExceededError, StorageEngineError, } from '../../../errors/index.js';
2
+ import { parseNonNegativeSafeInteger } from '../../../validation/metadata.js';
3
+ import { chunkKey, cleanupGenerationChunks, isQuotaBrowserError, manifestKey, } from './localStorageLayout.js';
4
+ import { computeUtf8ByteLength } from '../../backend/encoding.js';
5
+ const LS_MAGIC = 'FPLS_META';
6
+ const LS_VERSION = 2;
7
+ export const detectGlobalLocalStorage = () => {
8
+ try {
9
+ const g = globalThis;
10
+ const ls = g.localStorage;
11
+ if (ls === null || ls === undefined) {
12
+ return null;
13
+ }
14
+ return ls;
15
+ }
16
+ catch {
17
+ return null;
18
+ }
19
+ };
20
+ export const createLocalStorageBackendState = (adapter, keyPrefix, databaseKey, maxChunkChars, maxChunks) => ({
21
+ adapter,
22
+ keyPrefix,
23
+ databaseKey,
24
+ maxChunkChars,
25
+ maxChunks,
26
+ activeGeneration: 0,
27
+ commitId: 0,
28
+ activeChunkCount: 0,
29
+ });
30
+ const parseLocalStorageManifest = (manifestRaw, maxChunks) => {
31
+ let manifest;
32
+ try {
33
+ manifest = JSON.parse(manifestRaw);
34
+ }
35
+ catch {
36
+ throw new StorageEngineError('localStorage manifest JSON is malformed.');
37
+ }
38
+ if (manifest.magic !== LS_MAGIC || manifest.version !== LS_VERSION) {
39
+ throw new StorageEngineError('localStorage manifest magic/version mismatch.');
40
+ }
41
+ const chunkCount = parseNonNegativeSafeInteger(manifest.chunkCount, 'manifest.chunkCount', 'localStorage');
42
+ if (chunkCount > maxChunks) {
43
+ throw new StorageEngineError(`localStorage snapshot requires ${chunkCount} chunks but maxChunks is ${maxChunks}.`);
44
+ }
45
+ return manifest;
46
+ };
47
+ const loadLocalStorageChunks = (state, activeGeneration, chunkCount) => {
48
+ const chunks = [];
49
+ for (let i = 0; i < chunkCount; i += 1) {
50
+ const cKey = chunkKey(state.keyPrefix, state.databaseKey, activeGeneration, i);
51
+ const chunkValue = state.adapter.getItem(cKey);
52
+ if (typeof chunkValue !== 'string') {
53
+ throw new StorageEngineError(`localStorage chunk "${cKey}" is missing or not a string.`);
54
+ }
55
+ chunks.push(chunkValue);
56
+ }
57
+ const treeJson = chunks.join('');
58
+ let parsedTreeJSON;
59
+ try {
60
+ parsedTreeJSON = JSON.parse(treeJson);
61
+ }
62
+ catch {
63
+ throw new StorageEngineError('localStorage chunk data JSON is malformed.');
64
+ }
65
+ if (typeof parsedTreeJSON !== 'object' || parsedTreeJSON === null || Array.isArray(parsedTreeJSON)) {
66
+ throw new PageCorruptionError('treeJSON must be a non-null plain object.');
67
+ }
68
+ return {
69
+ treeJSON: parsedTreeJSON,
70
+ rawJsonLength: computeUtf8ByteLength(treeJson),
71
+ };
72
+ };
73
+ export const loadLocalStorageSnapshot = (state) => {
74
+ const mKey = manifestKey(state.keyPrefix, state.databaseKey);
75
+ const manifestRaw = state.adapter.getItem(mKey);
76
+ if (manifestRaw === null) {
77
+ return { treeJSON: null, currentSizeBytes: 0 };
78
+ }
79
+ const manifest = parseLocalStorageManifest(manifestRaw, state.maxChunks);
80
+ const activeGeneration = parseNonNegativeSafeInteger(manifest.activeGeneration, 'manifest.activeGeneration', 'localStorage');
81
+ const commitId = parseNonNegativeSafeInteger(manifest.commitId, 'manifest.commitId', 'localStorage');
82
+ const chunkCount = parseNonNegativeSafeInteger(manifest.chunkCount, 'manifest.chunkCount', 'localStorage');
83
+ const { treeJSON, rawJsonLength } = loadLocalStorageChunks(state, activeGeneration, chunkCount);
84
+ const currentSizeBytes = rawJsonLength;
85
+ state.activeGeneration = activeGeneration;
86
+ state.commitId = commitId;
87
+ state.activeChunkCount = chunkCount;
88
+ return { treeJSON, currentSizeBytes };
89
+ };
90
+ const splitTreeJSONIntoChunks = (treeJSON, maxChunkChars, maxChunks, driverName) => {
91
+ const dataJson = JSON.stringify(treeJSON);
92
+ const chunks = [];
93
+ for (let i = 0; i < dataJson.length; i += maxChunkChars) {
94
+ chunks.push(dataJson.slice(i, i + maxChunkChars));
95
+ }
96
+ if (chunks.length > maxChunks) {
97
+ throw new QuotaExceededError(`${driverName} snapshot requires ${chunks.length} chunks but maxChunks is ${maxChunks}.`);
98
+ }
99
+ return chunks;
100
+ };
101
+ const ensureCommitCountersSafe = (state) => {
102
+ if (state.commitId >= Number.MAX_SAFE_INTEGER) {
103
+ throw new StorageEngineError('localStorage commitId has reached Number.MAX_SAFE_INTEGER.');
104
+ }
105
+ if (state.activeGeneration >= Number.MAX_SAFE_INTEGER) {
106
+ throw new StorageEngineError('localStorage activeGeneration has reached Number.MAX_SAFE_INTEGER.');
107
+ }
108
+ };
109
+ const prepareLocalStorageCommit = (state, treeJSON) => {
110
+ const nextCommitId = state.commitId + 1;
111
+ const nextGeneration = state.activeGeneration + 1;
112
+ const chunks = splitTreeJSONIntoChunks(treeJSON, state.maxChunkChars, state.maxChunks, 'localStorage');
113
+ const newManifest = {
114
+ magic: LS_MAGIC,
115
+ version: LS_VERSION,
116
+ activeGeneration: nextGeneration,
117
+ commitId: nextCommitId,
118
+ chunkCount: chunks.length,
119
+ };
120
+ return {
121
+ nextCommitId,
122
+ nextGeneration,
123
+ chunks,
124
+ manifestJson: JSON.stringify(newManifest),
125
+ };
126
+ };
127
+ const writeLocalStorageCommit = (state, preparedCommit) => {
128
+ try {
129
+ // Retry on the same generation index after failures must not read stale chunks.
130
+ cleanupGenerationChunks(state, preparedCommit.nextGeneration, null);
131
+ for (let i = 0; i < preparedCommit.chunks.length; i += 1) {
132
+ state.adapter.setItem(chunkKey(state.keyPrefix, state.databaseKey, preparedCommit.nextGeneration, i), preparedCommit.chunks[i]);
133
+ }
134
+ state.adapter.setItem(manifestKey(state.keyPrefix, state.databaseKey), preparedCommit.manifestJson);
135
+ }
136
+ catch (error) {
137
+ if (isQuotaBrowserError(error) || error instanceof QuotaExceededError) {
138
+ throw new QuotaExceededError('localStorage quota exceeded during commit.');
139
+ }
140
+ throw new StorageEngineError('localStorage write failed during commit.');
141
+ }
142
+ };
143
+ export const commitLocalStorageSnapshot = (state, treeJSON) => {
144
+ ensureCommitCountersSafe(state);
145
+ const preparedCommit = prepareLocalStorageCommit(state, treeJSON);
146
+ writeLocalStorageCommit(state, preparedCommit);
147
+ // Update state before cleanup so that a cleanup failure does not leave
148
+ // stale generation/commitId values while storage already points to the new manifest.
149
+ const previousGeneration = state.activeGeneration;
150
+ const previousChunkCount = state.activeChunkCount;
151
+ state.activeGeneration = preparedCommit.nextGeneration;
152
+ state.commitId = preparedCommit.nextCommitId;
153
+ state.activeChunkCount = preparedCommit.chunks.length;
154
+ // Clean up old generation chunks after manifest switch (best-effort)
155
+ cleanupGenerationChunks(state, previousGeneration, previousChunkCount);
156
+ };
@@ -0,0 +1,24 @@
1
+ import type { AutoCommitConfig, BTreeJSON, LocalStorageConfig } from '../../../types.js';
2
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
3
+ import type { DurableBackendController } from '../../backend/types.js';
4
+ export interface LocalStorageBackendControllerSnapshot {
5
+ treeJSON: BTreeJSON<unknown, unknown>;
6
+ }
7
+ export interface LocalStorageBackendControllerCreateOptions {
8
+ config: LocalStorageConfig;
9
+ autoCommit?: AutoCommitConfig;
10
+ getSnapshot: () => LocalStorageBackendControllerSnapshot;
11
+ onAutoCommitError: (error: unknown) => void;
12
+ }
13
+ export interface LocalStorageBackendControllerCreateResult {
14
+ controller: LocalStorageBackendController;
15
+ initialTreeJSON: BTreeJSON<unknown, unknown> | null;
16
+ initialCurrentSizeBytes: number;
17
+ }
18
+ export declare class LocalStorageBackendController extends AsyncDurableAutoCommitController implements DurableBackendController {
19
+ private readonly backend;
20
+ private readonly getSnapshot;
21
+ private constructor();
22
+ static create(options: LocalStorageBackendControllerCreateOptions): LocalStorageBackendControllerCreateResult;
23
+ protected executeSingleCommit(): Promise<void>;
24
+ }
@@ -0,0 +1,35 @@
1
+ import { UnsupportedBackendError } from '../../../errors/index.js';
2
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
3
+ import { parseAutoCommitConfig } from '../../config/config.shared.js';
4
+ import { parseLocalStorageConfig } from './localStorageConfig.js';
5
+ import { commitLocalStorageSnapshot, createLocalStorageBackendState, detectGlobalLocalStorage, loadLocalStorageSnapshot, } from './localStorageBackend.js';
6
+ export class LocalStorageBackendController extends AsyncDurableAutoCommitController {
7
+ backend;
8
+ getSnapshot;
9
+ constructor(backend, autoCommit, getSnapshot, onAutoCommitError) {
10
+ super(autoCommit, onAutoCommitError);
11
+ this.backend = backend;
12
+ this.getSnapshot = getSnapshot;
13
+ }
14
+ static create(options) {
15
+ const adapter = detectGlobalLocalStorage();
16
+ if (adapter === null) {
17
+ throw new UnsupportedBackendError('localStorage is not available in the current runtime environment.');
18
+ }
19
+ const lsConfig = parseLocalStorageConfig(options.config);
20
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
21
+ const backend = createLocalStorageBackendState(adapter, lsConfig.keyPrefix, lsConfig.databaseKey, lsConfig.maxChunkChars, lsConfig.maxChunks);
22
+ const loaded = loadLocalStorageSnapshot(backend);
23
+ const controller = new LocalStorageBackendController(backend, autoCommit, options.getSnapshot, options.onAutoCommitError);
24
+ return {
25
+ controller,
26
+ initialTreeJSON: loaded.treeJSON,
27
+ initialCurrentSizeBytes: loaded.currentSizeBytes,
28
+ };
29
+ }
30
+ executeSingleCommit() {
31
+ const snapshot = this.getSnapshot();
32
+ commitLocalStorageSnapshot(this.backend, snapshot.treeJSON);
33
+ return Promise.resolve();
34
+ }
35
+ }
@@ -0,0 +1,10 @@
1
+ import type { LocalStorageConfig } from '../../../types.js';
2
+ export interface ParsedLocalStorageConfig {
3
+ keyPrefix: string;
4
+ databaseKey: string;
5
+ maxChunkChars: number;
6
+ maxChunks: number;
7
+ }
8
+ export declare const DEFAULT_LOCAL_STORAGE_MAX_CHUNK_CHARS = 32768;
9
+ export declare const DEFAULT_LOCAL_STORAGE_MAX_CHUNKS = 64;
10
+ export declare const parseLocalStorageConfig: (config?: LocalStorageConfig) => ParsedLocalStorageConfig;
@@ -0,0 +1,16 @@
1
+ import { ConfigurationError } from '../../../errors/index.js';
2
+ export const DEFAULT_LOCAL_STORAGE_MAX_CHUNK_CHARS = 32768;
3
+ export const DEFAULT_LOCAL_STORAGE_MAX_CHUNKS = 64;
4
+ export const parseLocalStorageConfig = (config) => {
5
+ const keyPrefix = config?.keyPrefix ?? 'frostpillar';
6
+ const databaseKey = config?.databaseKey ?? 'default';
7
+ const maxChunkChars = config?.maxChunkChars ?? DEFAULT_LOCAL_STORAGE_MAX_CHUNK_CHARS;
8
+ const maxChunks = config?.maxChunks ?? DEFAULT_LOCAL_STORAGE_MAX_CHUNKS;
9
+ if (!Number.isSafeInteger(maxChunkChars) || maxChunkChars <= 0) {
10
+ throw new ConfigurationError('localStorage.maxChunkChars must be a positive safe integer.');
11
+ }
12
+ if (!Number.isSafeInteger(maxChunks) || maxChunks <= 0) {
13
+ throw new ConfigurationError('localStorage.maxChunks must be a positive safe integer.');
14
+ }
15
+ return { keyPrefix, databaseKey, maxChunkChars, maxChunks };
16
+ };
@@ -0,0 +1,5 @@
1
+ import type { LocalStorageBackendState } from '../../backend/types.js';
2
+ export declare const manifestKey: (keyPrefix: string, databaseKey: string) => string;
3
+ export declare const chunkKey: (keyPrefix: string, databaseKey: string, generation: number, index: number) => string;
4
+ export declare const cleanupGenerationChunks: (state: LocalStorageBackendState, generation: number, knownChunkCount: number | null) => void;
5
+ export declare const isQuotaBrowserError: (error: unknown) => boolean;
@@ -0,0 +1,29 @@
1
+ export const manifestKey = (keyPrefix, databaseKey) => `${keyPrefix}:ls:${databaseKey}:manifest`;
2
+ export const chunkKey = (keyPrefix, databaseKey, generation, index) => `${keyPrefix}:ls:${databaseKey}:g:${generation}:chunk:${index}`;
3
+ export const cleanupGenerationChunks = (state, generation, knownChunkCount) => {
4
+ if (knownChunkCount !== null) {
5
+ if (knownChunkCount <= 0) {
6
+ return;
7
+ }
8
+ for (let i = 0; i < knownChunkCount; i += 1) {
9
+ state.adapter.removeItem(chunkKey(state.keyPrefix, state.databaseKey, generation, i));
10
+ }
11
+ return;
12
+ }
13
+ if (state.maxChunks <= 0) {
14
+ return;
15
+ }
16
+ for (let i = 0; i < state.maxChunks; i += 1) {
17
+ const key = chunkKey(state.keyPrefix, state.databaseKey, generation, i);
18
+ if (state.adapter.getItem(key) !== null) {
19
+ state.adapter.removeItem(key);
20
+ }
21
+ }
22
+ };
23
+ export const isQuotaBrowserError = (error) => {
24
+ if (!(error instanceof Error)) {
25
+ return false;
26
+ }
27
+ return (error.name === 'QuotaExceededError' ||
28
+ error.name === 'NS_ERROR_DOM_QUOTA_REACHED');
29
+ };
@@ -0,0 +1,12 @@
1
+ import type { BTreeJSON } from '../../../types.js';
2
+ import type { OpfsDirectoryHandle, OpfsStorageRoot } from '../../backend/types.js';
3
+ export interface LoadedOpfsSnapshot {
4
+ treeJSON: BTreeJSON<unknown, unknown> | null;
5
+ currentSizeBytes: number;
6
+ commitId: number;
7
+ activeData: 'a' | 'b';
8
+ }
9
+ export declare const detectGlobalOpfs: () => OpfsStorageRoot | null;
10
+ export declare const openOpfsDirectory: (storageRoot: OpfsStorageRoot, directoryName: string) => Promise<OpfsDirectoryHandle>;
11
+ export declare const loadOpfsSnapshot: (dir: OpfsDirectoryHandle) => Promise<LoadedOpfsSnapshot>;
12
+ export declare const commitOpfsSnapshot: (dir: OpfsDirectoryHandle, currentActiveData: "a" | "b", treeJSON: BTreeJSON<unknown, unknown>, commitId: number) => Promise<"a" | "b">;