@frostpillar/frostpillar-storage-engine 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/LICENSE +21 -0
  2. package/README-JA.md +1205 -0
  3. package/README.md +1204 -0
  4. package/dist/drivers/file.cjs +960 -0
  5. package/dist/drivers/file.d.ts +3 -0
  6. package/dist/drivers/file.js +18 -0
  7. package/dist/drivers/indexedDB.cjs +570 -0
  8. package/dist/drivers/indexedDB.d.ts +3 -0
  9. package/dist/drivers/indexedDB.js +18 -0
  10. package/dist/drivers/localStorage.cjs +668 -0
  11. package/dist/drivers/localStorage.d.ts +3 -0
  12. package/dist/drivers/localStorage.js +23 -0
  13. package/dist/drivers/opfs.cjs +550 -0
  14. package/dist/drivers/opfs.d.ts +3 -0
  15. package/dist/drivers/opfs.js +18 -0
  16. package/dist/drivers/syncStorage.cjs +898 -0
  17. package/dist/drivers/syncStorage.d.ts +3 -0
  18. package/dist/drivers/syncStorage.js +22 -0
  19. package/dist/drivers/validation.d.ts +1 -0
  20. package/dist/drivers/validation.js +8 -0
  21. package/dist/errors/index.d.ts +32 -0
  22. package/dist/errors/index.js +48 -0
  23. package/dist/frostpillar-storage-engine.min.js +1 -0
  24. package/dist/index.cjs +2957 -0
  25. package/dist/index.d.ts +7 -0
  26. package/dist/index.js +6 -0
  27. package/dist/storage/backend/asyncDurableAutoCommitController.d.ts +26 -0
  28. package/dist/storage/backend/asyncDurableAutoCommitController.js +188 -0
  29. package/dist/storage/backend/asyncMutex.d.ts +7 -0
  30. package/dist/storage/backend/asyncMutex.js +38 -0
  31. package/dist/storage/backend/autoCommit.d.ts +2 -0
  32. package/dist/storage/backend/autoCommit.js +22 -0
  33. package/dist/storage/backend/capacity.d.ts +2 -0
  34. package/dist/storage/backend/capacity.js +27 -0
  35. package/dist/storage/backend/capacityResolver.d.ts +3 -0
  36. package/dist/storage/backend/capacityResolver.js +25 -0
  37. package/dist/storage/backend/encoding.d.ts +17 -0
  38. package/dist/storage/backend/encoding.js +148 -0
  39. package/dist/storage/backend/types.d.ts +184 -0
  40. package/dist/storage/backend/types.js +1 -0
  41. package/dist/storage/btree/recordKeyIndexBTree.d.ts +39 -0
  42. package/dist/storage/btree/recordKeyIndexBTree.js +104 -0
  43. package/dist/storage/config/config.browser.d.ts +4 -0
  44. package/dist/storage/config/config.browser.js +8 -0
  45. package/dist/storage/config/config.d.ts +1 -0
  46. package/dist/storage/config/config.js +1 -0
  47. package/dist/storage/config/config.node.d.ts +4 -0
  48. package/dist/storage/config/config.node.js +74 -0
  49. package/dist/storage/config/config.shared.d.ts +6 -0
  50. package/dist/storage/config/config.shared.js +105 -0
  51. package/dist/storage/datastore/Datastore.d.ts +47 -0
  52. package/dist/storage/datastore/Datastore.js +525 -0
  53. package/dist/storage/datastore/datastoreClose.d.ts +12 -0
  54. package/dist/storage/datastore/datastoreClose.js +60 -0
  55. package/dist/storage/datastore/datastoreKeyDefinition.d.ts +7 -0
  56. package/dist/storage/datastore/datastoreKeyDefinition.js +60 -0
  57. package/dist/storage/datastore/datastoreLifecycle.d.ts +18 -0
  58. package/dist/storage/datastore/datastoreLifecycle.js +63 -0
  59. package/dist/storage/datastore/mutationById.d.ts +29 -0
  60. package/dist/storage/datastore/mutationById.js +71 -0
  61. package/dist/storage/drivers/IndexedDB/indexedDBBackend.d.ts +11 -0
  62. package/dist/storage/drivers/IndexedDB/indexedDBBackend.js +109 -0
  63. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.d.ts +27 -0
  64. package/dist/storage/drivers/IndexedDB/indexedDBBackendController.js +60 -0
  65. package/dist/storage/drivers/IndexedDB/indexedDBConfig.d.ts +7 -0
  66. package/dist/storage/drivers/IndexedDB/indexedDBConfig.js +24 -0
  67. package/dist/storage/drivers/file/fileBackend.d.ts +5 -0
  68. package/dist/storage/drivers/file/fileBackend.js +168 -0
  69. package/dist/storage/drivers/file/fileBackendController.d.ts +31 -0
  70. package/dist/storage/drivers/file/fileBackendController.js +72 -0
  71. package/dist/storage/drivers/file/fileBackendSnapshot.d.ts +10 -0
  72. package/dist/storage/drivers/file/fileBackendSnapshot.js +166 -0
  73. package/dist/storage/drivers/localStorage/localStorageBackend.d.ts +10 -0
  74. package/dist/storage/drivers/localStorage/localStorageBackend.js +156 -0
  75. package/dist/storage/drivers/localStorage/localStorageBackendController.d.ts +24 -0
  76. package/dist/storage/drivers/localStorage/localStorageBackendController.js +35 -0
  77. package/dist/storage/drivers/localStorage/localStorageConfig.d.ts +10 -0
  78. package/dist/storage/drivers/localStorage/localStorageConfig.js +16 -0
  79. package/dist/storage/drivers/localStorage/localStorageLayout.d.ts +5 -0
  80. package/dist/storage/drivers/localStorage/localStorageLayout.js +29 -0
  81. package/dist/storage/drivers/opfs/opfsBackend.d.ts +12 -0
  82. package/dist/storage/drivers/opfs/opfsBackend.js +142 -0
  83. package/dist/storage/drivers/opfs/opfsBackendController.d.ts +26 -0
  84. package/dist/storage/drivers/opfs/opfsBackendController.js +44 -0
  85. package/dist/storage/drivers/syncStorage/syncStorageAdapter.d.ts +2 -0
  86. package/dist/storage/drivers/syncStorage/syncStorageAdapter.js +123 -0
  87. package/dist/storage/drivers/syncStorage/syncStorageBackend.d.ts +11 -0
  88. package/dist/storage/drivers/syncStorage/syncStorageBackend.js +169 -0
  89. package/dist/storage/drivers/syncStorage/syncStorageBackendController.d.ts +24 -0
  90. package/dist/storage/drivers/syncStorage/syncStorageBackendController.js +34 -0
  91. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.d.ts +2 -0
  92. package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.js +28 -0
  93. package/dist/storage/drivers/syncStorage/syncStorageConfig.d.ts +13 -0
  94. package/dist/storage/drivers/syncStorage/syncStorageConfig.js +42 -0
  95. package/dist/storage/drivers/syncStorage/syncStorageQuota.d.ts +3 -0
  96. package/dist/storage/drivers/syncStorage/syncStorageQuota.js +45 -0
  97. package/dist/storage/record/ordering.d.ts +3 -0
  98. package/dist/storage/record/ordering.js +7 -0
  99. package/dist/types.d.ts +125 -0
  100. package/dist/types.js +1 -0
  101. package/dist/validation/metadata.d.ts +1 -0
  102. package/dist/validation/metadata.js +7 -0
  103. package/dist/validation/payload.d.ts +7 -0
  104. package/dist/validation/payload.js +135 -0
  105. package/dist/validation/typeGuards.d.ts +1 -0
  106. package/dist/validation/typeGuards.js +7 -0
  107. package/package.json +110 -0
@@ -0,0 +1,142 @@
1
+ import { PageCorruptionError, StorageEngineError, toStorageEngineError } from '../../../errors/index.js';
2
+ import { parseNonNegativeSafeInteger } from '../../../validation/metadata.js';
3
+ import { computeUtf8ByteLength } from '../../backend/encoding.js';
4
+ const OPFS_MAGIC = 'FPOPFS_META';
5
+ const OPFS_VERSION_VALUE = 2;
6
+ const META_FILE = 'meta.json';
7
+ const DATA_FILE_A = 'data-a.json';
8
+ const DATA_FILE_B = 'data-b.json';
9
+ const isNotFoundError = (error) => {
10
+ if (!(error instanceof Error)) {
11
+ return false;
12
+ }
13
+ return error.name === 'NotFoundError';
14
+ };
15
+ const isManifestObject = (value) => {
16
+ return value !== null && typeof value === 'object' && !Array.isArray(value);
17
+ };
18
+ // ---------------------------------------------------------------------------
19
+ // Global detection
20
+ // ---------------------------------------------------------------------------
21
+ export const detectGlobalOpfs = () => {
22
+ try {
23
+ const nav = globalThis;
24
+ if (typeof nav.navigator?.storage?.getDirectory === 'function') {
25
+ return nav.navigator.storage;
26
+ }
27
+ return null;
28
+ }
29
+ catch {
30
+ return null;
31
+ }
32
+ };
33
+ // ---------------------------------------------------------------------------
34
+ // Directory helpers
35
+ // ---------------------------------------------------------------------------
36
+ export const openOpfsDirectory = async (storageRoot, directoryName) => {
37
+ const root = await storageRoot.getDirectory();
38
+ return root.getDirectoryHandle(directoryName, { create: true });
39
+ };
40
+ // ---------------------------------------------------------------------------
41
+ // Load snapshot
42
+ // ---------------------------------------------------------------------------
43
+ const parseOpfsManifest = (metaText) => {
44
+ let manifestRaw;
45
+ try {
46
+ manifestRaw = JSON.parse(metaText);
47
+ }
48
+ catch {
49
+ throw new StorageEngineError('OPFS meta.json JSON is malformed.');
50
+ }
51
+ if (!isManifestObject(manifestRaw)) {
52
+ throw new StorageEngineError('OPFS meta.json must be a JSON object.');
53
+ }
54
+ const manifest = manifestRaw;
55
+ if (manifest.magic !== OPFS_MAGIC || manifest.version !== OPFS_VERSION_VALUE) {
56
+ throw new StorageEngineError('OPFS meta.json magic/version mismatch.');
57
+ }
58
+ if (manifest.activeData !== 'a' && manifest.activeData !== 'b') {
59
+ throw new StorageEngineError('OPFS meta.json activeData must be "a" or "b".');
60
+ }
61
+ const commitId = parseNonNegativeSafeInteger(manifest.commitId, 'meta.json commitId', 'OPFS');
62
+ return { manifest, commitId, activeData: manifest.activeData };
63
+ };
64
+ const loadOpfsDataFile = async (dir, dataFileName) => {
65
+ let dataText;
66
+ try {
67
+ const dataHandle = await dir.getFileHandle(dataFileName, { create: false });
68
+ const dataFile = await dataHandle.getFile();
69
+ dataText = await dataFile.text();
70
+ }
71
+ catch {
72
+ throw new StorageEngineError(`OPFS active data file "${dataFileName}" not found.`);
73
+ }
74
+ let parsedTreeJSON;
75
+ try {
76
+ parsedTreeJSON = JSON.parse(dataText);
77
+ }
78
+ catch {
79
+ throw new StorageEngineError('OPFS data file JSON is malformed.');
80
+ }
81
+ if (typeof parsedTreeJSON !== 'object' || parsedTreeJSON === null || Array.isArray(parsedTreeJSON)) {
82
+ throw new PageCorruptionError('treeJSON must be a non-null plain object.');
83
+ }
84
+ return {
85
+ treeJSON: parsedTreeJSON,
86
+ rawJsonLength: computeUtf8ByteLength(dataText),
87
+ };
88
+ };
89
+ export const loadOpfsSnapshot = async (dir) => {
90
+ let metaText;
91
+ try {
92
+ const metaHandle = await dir.getFileHandle(META_FILE, { create: false });
93
+ const metaFile = await metaHandle.getFile();
94
+ metaText = await metaFile.text();
95
+ }
96
+ catch (error) {
97
+ if (!isNotFoundError(error)) {
98
+ throw toStorageEngineError(error, 'OPFS meta.json read failed');
99
+ }
100
+ return {
101
+ treeJSON: null,
102
+ currentSizeBytes: 0,
103
+ commitId: 0,
104
+ activeData: 'a',
105
+ };
106
+ }
107
+ const { commitId, activeData } = parseOpfsManifest(metaText);
108
+ const dataFileName = activeData === 'a' ? DATA_FILE_A : DATA_FILE_B;
109
+ const { treeJSON, rawJsonLength } = await loadOpfsDataFile(dir, dataFileName);
110
+ const currentSizeBytes = rawJsonLength;
111
+ return { treeJSON, currentSizeBytes, commitId, activeData };
112
+ };
113
+ // ---------------------------------------------------------------------------
114
+ // Commit snapshot (ping-pong)
115
+ // ---------------------------------------------------------------------------
116
+ export const commitOpfsSnapshot = async (dir, currentActiveData, treeJSON, commitId) => {
117
+ const nextActiveData = currentActiveData === 'a' ? 'b' : 'a';
118
+ const dataFileName = nextActiveData === 'a' ? DATA_FILE_A : DATA_FILE_B;
119
+ const dataJson = JSON.stringify(treeJSON);
120
+ try {
121
+ // Write to the inactive data file first
122
+ const dataHandle = await dir.getFileHandle(dataFileName, { create: true });
123
+ const dataWritable = await dataHandle.createWritable();
124
+ await dataWritable.write(dataJson);
125
+ await dataWritable.close();
126
+ // Then update meta to point to the newly written file
127
+ const newManifest = {
128
+ magic: OPFS_MAGIC,
129
+ version: OPFS_VERSION_VALUE,
130
+ activeData: nextActiveData,
131
+ commitId,
132
+ };
133
+ const metaHandle = await dir.getFileHandle(META_FILE, { create: true });
134
+ const metaWritable = await metaHandle.createWritable();
135
+ await metaWritable.write(JSON.stringify(newManifest));
136
+ await metaWritable.close();
137
+ }
138
+ catch (error) {
139
+ throw toStorageEngineError(error, 'OPFS commit failed');
140
+ }
141
+ return nextActiveData;
142
+ };
@@ -0,0 +1,26 @@
1
+ import type { AutoCommitConfig, BTreeJSON, OpfsConfig } from '../../../types.js';
2
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
3
+ import type { DurableBackendController } from '../../backend/types.js';
4
+ export interface OpfsBackendControllerSnapshot {
5
+ treeJSON: BTreeJSON<unknown, unknown>;
6
+ }
7
+ export interface OpfsBackendControllerCreateOptions {
8
+ config: OpfsConfig;
9
+ autoCommit?: AutoCommitConfig;
10
+ getSnapshot: () => OpfsBackendControllerSnapshot;
11
+ onAutoCommitError: (error: unknown) => void;
12
+ }
13
+ export interface OpfsBackendControllerCreateResult {
14
+ controller: OpfsBackendController;
15
+ initialTreeJSON: BTreeJSON<unknown, unknown> | null;
16
+ initialCurrentSizeBytes: number;
17
+ }
18
+ export declare class OpfsBackendController extends AsyncDurableAutoCommitController implements DurableBackendController {
19
+ private readonly dir;
20
+ private readonly getSnapshot;
21
+ private activeData;
22
+ private commitId;
23
+ private constructor();
24
+ static create(options: OpfsBackendControllerCreateOptions): Promise<OpfsBackendControllerCreateResult>;
25
+ protected executeSingleCommit(): Promise<void>;
26
+ }
@@ -0,0 +1,44 @@
1
+ import { StorageEngineError, UnsupportedBackendError } from '../../../errors/index.js';
2
+ import { parseAutoCommitConfig } from '../../config/config.shared.js';
3
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
4
+ import { commitOpfsSnapshot, detectGlobalOpfs, loadOpfsSnapshot, openOpfsDirectory, } from './opfsBackend.js';
5
+ const DEFAULT_DIRECTORY_NAME = 'frostpillar';
6
+ export class OpfsBackendController extends AsyncDurableAutoCommitController {
7
+ dir;
8
+ getSnapshot;
9
+ activeData;
10
+ commitId;
11
+ constructor(dir, activeData, commitId, autoCommit, getSnapshot, onAutoCommitError) {
12
+ super(autoCommit, onAutoCommitError);
13
+ this.dir = dir;
14
+ this.activeData = activeData;
15
+ this.commitId = commitId;
16
+ this.getSnapshot = getSnapshot;
17
+ }
18
+ static async create(options) {
19
+ const storageRoot = detectGlobalOpfs();
20
+ if (storageRoot === null) {
21
+ throw new UnsupportedBackendError('opfs (Origin Private File System) is not available in the current runtime environment.');
22
+ }
23
+ const opfsConfig = options.config;
24
+ const directoryName = opfsConfig?.directoryName ?? DEFAULT_DIRECTORY_NAME;
25
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
26
+ const dir = await openOpfsDirectory(storageRoot, directoryName);
27
+ const loaded = await loadOpfsSnapshot(dir);
28
+ const controller = new OpfsBackendController(dir, loaded.activeData, loaded.commitId, autoCommit, options.getSnapshot, options.onAutoCommitError);
29
+ return {
30
+ controller,
31
+ initialTreeJSON: loaded.treeJSON,
32
+ initialCurrentSizeBytes: loaded.currentSizeBytes,
33
+ };
34
+ }
35
+ async executeSingleCommit() {
36
+ const snapshot = this.getSnapshot();
37
+ if (this.commitId >= Number.MAX_SAFE_INTEGER) {
38
+ throw new StorageEngineError('OPFS commitId has reached Number.MAX_SAFE_INTEGER.');
39
+ }
40
+ const nextCommitId = this.commitId + 1;
41
+ this.activeData = await commitOpfsSnapshot(this.dir, this.activeData, snapshot.treeJSON, nextCommitId);
42
+ this.commitId = nextCommitId;
43
+ }
44
+ }
@@ -0,0 +1,2 @@
1
+ import type { SyncStorageAdapter } from '../../backend/types.js';
2
+ export declare const detectGlobalSyncStorage: () => SyncStorageAdapter | null;
@@ -0,0 +1,123 @@
1
+ import { toErrorInstance } from '../../../errors/index.js';
2
+ import { isRecordObject } from '../../../validation/typeGuards.js';
3
+ const readChromeRuntimeError = (runtime) => {
4
+ const runtimeMessage = runtime?.lastError?.message;
5
+ if (runtimeMessage === undefined) {
6
+ return null;
7
+ }
8
+ if (runtimeMessage.trim().length === 0) {
9
+ return new Error('chrome.runtime.lastError is set with an empty message.');
10
+ }
11
+ return new Error(runtimeMessage);
12
+ };
13
+ const callChromeCallbackGet = (syncArea, runtime, keys) => {
14
+ return new Promise((resolve, reject) => {
15
+ try {
16
+ syncArea.get(keys, (items) => {
17
+ const runtimeError = readChromeRuntimeError(runtime);
18
+ if (runtimeError !== null) {
19
+ reject(runtimeError);
20
+ return;
21
+ }
22
+ resolve(items);
23
+ });
24
+ }
25
+ catch (error) {
26
+ reject(toErrorInstance(error, 'chrome.storage.sync.get failed with a non-Error value.'));
27
+ }
28
+ });
29
+ };
30
+ const callChromeCallbackSet = (syncArea, runtime, items) => {
31
+ return new Promise((resolve, reject) => {
32
+ try {
33
+ syncArea.set(items, () => {
34
+ const runtimeError = readChromeRuntimeError(runtime);
35
+ if (runtimeError !== null) {
36
+ reject(runtimeError);
37
+ return;
38
+ }
39
+ resolve();
40
+ });
41
+ }
42
+ catch (error) {
43
+ reject(toErrorInstance(error, 'chrome.storage.sync.set failed with a non-Error value.'));
44
+ }
45
+ });
46
+ };
47
+ const callChromeCallbackRemove = (syncArea, runtime, keys) => {
48
+ return new Promise((resolve, reject) => {
49
+ try {
50
+ syncArea.remove(keys, () => {
51
+ const runtimeError = readChromeRuntimeError(runtime);
52
+ if (runtimeError !== null) {
53
+ reject(runtimeError);
54
+ return;
55
+ }
56
+ resolve();
57
+ });
58
+ }
59
+ catch (error) {
60
+ reject(toErrorInstance(error, 'chrome.storage.sync.remove failed with a non-Error value.'));
61
+ }
62
+ });
63
+ };
64
+ const createBrowserPromiseSyncStorageAdapter = (syncArea) => {
65
+ return {
66
+ getItems: async (keys) => {
67
+ return await syncArea.get(keys);
68
+ },
69
+ setItems: async (items) => {
70
+ await syncArea.set(items);
71
+ },
72
+ removeItems: async (keys) => {
73
+ await syncArea.remove(keys);
74
+ },
75
+ };
76
+ };
77
+ const createChromeCallbackSyncStorageAdapter = (syncArea, runtime) => {
78
+ return {
79
+ getItems: async (keys) => {
80
+ return await callChromeCallbackGet(syncArea, runtime, keys);
81
+ },
82
+ setItems: async (items) => {
83
+ await callChromeCallbackSet(syncArea, runtime, items);
84
+ },
85
+ removeItems: async (keys) => {
86
+ await callChromeCallbackRemove(syncArea, runtime, keys);
87
+ },
88
+ };
89
+ };
90
+ const hasSyncAreaFunctionShape = (value) => {
91
+ if (!isRecordObject(value)) {
92
+ return false;
93
+ }
94
+ return (typeof value.get === 'function' &&
95
+ typeof value.set === 'function' &&
96
+ typeof value.remove === 'function');
97
+ };
98
+ const hasBrowserPromiseSyncArea = (value) => {
99
+ return hasSyncAreaFunctionShape(value);
100
+ };
101
+ const hasChromeCallbackSyncArea = (value) => {
102
+ return hasSyncAreaFunctionShape(value);
103
+ };
104
+ export const detectGlobalSyncStorage = () => {
105
+ try {
106
+ const globals = globalThis;
107
+ // Prefer the Promise API when both namespaces exist. The callback API is a
108
+ // fallback for runtimes where only chrome.* is available.
109
+ const browserSync = globals.browser?.storage?.sync;
110
+ if (hasBrowserPromiseSyncArea(browserSync)) {
111
+ return createBrowserPromiseSyncStorageAdapter(browserSync);
112
+ }
113
+ const chromeSync = globals.chrome?.storage?.sync;
114
+ if (hasChromeCallbackSyncArea(chromeSync)) {
115
+ const runtime = globals.chrome?.runtime ?? null;
116
+ return createChromeCallbackSyncStorageAdapter(chromeSync, runtime);
117
+ }
118
+ return null;
119
+ }
120
+ catch {
121
+ return null;
122
+ }
123
+ };
@@ -0,0 +1,11 @@
1
+ import type { BTreeJSON } from '../../../types.js';
2
+ import type { SyncStorageAdapter, SyncStorageBackendState } from '../../backend/types.js';
3
+ import { detectGlobalSyncStorage } from './syncStorageAdapter.js';
4
+ export interface LoadedSyncStorageSnapshot {
5
+ treeJSON: BTreeJSON<unknown, unknown> | null;
6
+ currentSizeBytes: number;
7
+ }
8
+ export { detectGlobalSyncStorage };
9
+ export declare const createSyncStorageBackendState: (adapter: SyncStorageAdapter, keyPrefix: string, databaseKey: string, maxChunkChars: number, maxChunks: number, maxItemBytes: number, maxTotalBytes: number, maxItems: number) => SyncStorageBackendState;
10
+ export declare const loadSyncStorageSnapshot: (state: SyncStorageBackendState) => Promise<LoadedSyncStorageSnapshot>;
11
+ export declare const commitSyncStorageSnapshot: (state: SyncStorageBackendState, treeJSON: BTreeJSON<unknown, unknown>) => Promise<void>;
@@ -0,0 +1,169 @@
1
+ import { PageCorruptionError, QuotaExceededError, StorageEngineError, } from '../../../errors/index.js';
2
+ import { isRecordObject } from '../../../validation/typeGuards.js';
3
+ import { parseNonNegativeSafeInteger } from '../../../validation/metadata.js';
4
+ import { detectGlobalSyncStorage } from './syncStorageAdapter.js';
5
+ import { cleanupGenerationChunks } from './syncStorageChunkMaintenance.js';
6
+ import { isQuotaBrowserError, validateSyncStorageCommitQuota, } from './syncStorageQuota.js';
7
+ import { computeUtf8ByteLength } from '../../backend/encoding.js';
8
+ const SYNC_STORAGE_MAGIC = 'FPSYNC_META';
9
+ const SYNC_STORAGE_VERSION = 2;
10
+ const manifestKey = (keyPrefix, databaseKey) => {
11
+ return `${keyPrefix}:sync:${databaseKey}:manifest`;
12
+ };
13
+ const chunkKey = (keyPrefix, databaseKey, generation, index) => {
14
+ return `${keyPrefix}:sync:${databaseKey}:g:${generation}:chunk:${index}`;
15
+ };
16
+ export { detectGlobalSyncStorage };
17
+ export const createSyncStorageBackendState = (adapter, keyPrefix, databaseKey, maxChunkChars, maxChunks, maxItemBytes, maxTotalBytes, maxItems) => {
18
+ return {
19
+ adapter,
20
+ keyPrefix,
21
+ databaseKey,
22
+ maxChunkChars,
23
+ maxChunks,
24
+ maxItemBytes,
25
+ maxTotalBytes,
26
+ maxItems,
27
+ activeGeneration: 0,
28
+ commitId: 0,
29
+ activeChunkCount: 0,
30
+ };
31
+ };
32
+ const parseSyncManifest = (manifestUnknown, maxChunks) => {
33
+ if (!isRecordObject(manifestUnknown)) {
34
+ throw new StorageEngineError('syncStorage manifest must be an object.');
35
+ }
36
+ const manifest = manifestUnknown;
37
+ if (manifest.magic !== SYNC_STORAGE_MAGIC ||
38
+ manifest.version !== SYNC_STORAGE_VERSION) {
39
+ throw new StorageEngineError('syncStorage manifest magic/version mismatch.');
40
+ }
41
+ const chunkCount = parseNonNegativeSafeInteger(manifest.chunkCount, 'manifest.chunkCount', 'syncStorage');
42
+ if (chunkCount > maxChunks) {
43
+ throw new StorageEngineError(`syncStorage snapshot requires ${chunkCount} chunks but maxChunks is ${maxChunks}.`);
44
+ }
45
+ return manifest;
46
+ };
47
+ const loadSyncChunksAndDecodeTreeJSON = async (state, activeGeneration, chunkCount) => {
48
+ const chunkKeys = [];
49
+ for (let i = 0; i < chunkCount; i += 1) {
50
+ chunkKeys.push(chunkKey(state.keyPrefix, state.databaseKey, activeGeneration, i));
51
+ }
52
+ const chunkValuesByKey = chunkKeys.length === 0
53
+ ? {}
54
+ : await state.adapter.getItems(chunkKeys);
55
+ const chunks = [];
56
+ for (const cKey of chunkKeys) {
57
+ const chunkValue = chunkValuesByKey[cKey];
58
+ if (typeof chunkValue !== 'string') {
59
+ throw new StorageEngineError(`syncStorage chunk "${cKey}" is missing or not a string.`);
60
+ }
61
+ chunks.push(chunkValue);
62
+ }
63
+ const treeJson = chunks.join('');
64
+ let parsedTreeJSON;
65
+ try {
66
+ parsedTreeJSON = JSON.parse(treeJson);
67
+ }
68
+ catch {
69
+ throw new StorageEngineError('syncStorage chunk data JSON is malformed.');
70
+ }
71
+ if (typeof parsedTreeJSON !== 'object' || parsedTreeJSON === null || Array.isArray(parsedTreeJSON)) {
72
+ throw new PageCorruptionError('treeJSON must be a non-null plain object.');
73
+ }
74
+ return {
75
+ treeJSON: parsedTreeJSON,
76
+ rawJsonLength: computeUtf8ByteLength(treeJson),
77
+ };
78
+ };
79
+ export const loadSyncStorageSnapshot = async (state) => {
80
+ const mKey = manifestKey(state.keyPrefix, state.databaseKey);
81
+ const manifestMap = await state.adapter.getItems([mKey]);
82
+ const manifestUnknown = manifestMap[mKey];
83
+ if (manifestUnknown === undefined) {
84
+ return { treeJSON: null, currentSizeBytes: 0 };
85
+ }
86
+ const manifest = parseSyncManifest(manifestUnknown, state.maxChunks);
87
+ const activeGeneration = parseNonNegativeSafeInteger(manifest.activeGeneration, 'manifest.activeGeneration', 'syncStorage');
88
+ const commitId = parseNonNegativeSafeInteger(manifest.commitId, 'manifest.commitId', 'syncStorage');
89
+ const chunkCount = parseNonNegativeSafeInteger(manifest.chunkCount, 'manifest.chunkCount', 'syncStorage');
90
+ const { treeJSON, rawJsonLength } = await loadSyncChunksAndDecodeTreeJSON(state, activeGeneration, chunkCount);
91
+ const currentSizeBytes = rawJsonLength;
92
+ state.activeGeneration = activeGeneration;
93
+ state.commitId = commitId;
94
+ state.activeChunkCount = chunkCount;
95
+ return { treeJSON, currentSizeBytes };
96
+ };
97
+ const buildSyncChunkKeyResolver = (state) => {
98
+ return (generation, index) => {
99
+ return chunkKey(state.keyPrefix, state.databaseKey, generation, index);
100
+ };
101
+ };
102
+ const buildSyncCommitItems = (state, chunks, newManifest, nextGeneration) => {
103
+ const mKey = manifestKey(state.keyPrefix, state.databaseKey);
104
+ const items = { [mKey]: newManifest };
105
+ for (let i = 0; i < chunks.length; i += 1) {
106
+ const cKey = chunkKey(state.keyPrefix, state.databaseKey, nextGeneration, i);
107
+ items[cKey] = chunks[i];
108
+ }
109
+ return items;
110
+ };
111
+ const splitSyncTreeJSONIntoChunks = (treeJSON, maxChunkChars, maxChunks) => {
112
+ const dataJson = JSON.stringify(treeJSON);
113
+ const chunks = [];
114
+ for (let i = 0; i < dataJson.length; i += maxChunkChars) {
115
+ chunks.push(dataJson.slice(i, i + maxChunkChars));
116
+ }
117
+ if (chunks.length > maxChunks) {
118
+ throw new QuotaExceededError(`syncStorage snapshot requires ${chunks.length} chunks but maxChunks is ${maxChunks}.`);
119
+ }
120
+ return chunks;
121
+ };
122
+ const ensureSyncCommitCountersSafe = (state) => {
123
+ if (state.commitId >= Number.MAX_SAFE_INTEGER) {
124
+ throw new StorageEngineError('syncStorage commitId has reached Number.MAX_SAFE_INTEGER.');
125
+ }
126
+ if (state.activeGeneration >= Number.MAX_SAFE_INTEGER) {
127
+ throw new StorageEngineError('syncStorage activeGeneration has reached Number.MAX_SAFE_INTEGER.');
128
+ }
129
+ };
130
+ export const commitSyncStorageSnapshot = async (state, treeJSON) => {
131
+ ensureSyncCommitCountersSafe(state);
132
+ const nextCommitId = state.commitId + 1;
133
+ const nextGeneration = state.activeGeneration + 1;
134
+ const chunks = splitSyncTreeJSONIntoChunks(treeJSON, state.maxChunkChars, state.maxChunks);
135
+ const newManifest = {
136
+ magic: SYNC_STORAGE_MAGIC,
137
+ version: SYNC_STORAGE_VERSION,
138
+ activeGeneration: nextGeneration,
139
+ commitId: nextCommitId,
140
+ chunkCount: chunks.length,
141
+ };
142
+ const resolveChunkKey = buildSyncChunkKeyResolver(state);
143
+ const mKey = manifestKey(state.keyPrefix, state.databaseKey);
144
+ validateSyncStorageCommitQuota(state, nextGeneration, chunks, newManifest, resolveChunkKey, mKey);
145
+ const newSnapshotItems = buildSyncCommitItems(state, chunks, newManifest, nextGeneration);
146
+ // Stale chunks in the next generation are maintenance-only and uncommitted.
147
+ // Commit write should proceed even when this cleanup fails transiently.
148
+ try {
149
+ await cleanupGenerationChunks(state, nextGeneration, null, resolveChunkKey);
150
+ }
151
+ catch {
152
+ // Ignore cleanup-only failures and proceed with commit write.
153
+ }
154
+ try {
155
+ await state.adapter.setItems(newSnapshotItems);
156
+ }
157
+ catch (error) {
158
+ if (isQuotaBrowserError(error)) {
159
+ throw new QuotaExceededError('syncStorage quota exceeded during commit.');
160
+ }
161
+ throw new StorageEngineError('syncStorage write failed during commit.', { cause: error });
162
+ }
163
+ const previousGeneration = state.activeGeneration;
164
+ const previousChunkCount = state.activeChunkCount;
165
+ state.activeGeneration = nextGeneration;
166
+ state.commitId = nextCommitId;
167
+ state.activeChunkCount = chunks.length;
168
+ await cleanupGenerationChunks(state, previousGeneration, previousChunkCount, resolveChunkKey);
169
+ };
@@ -0,0 +1,24 @@
1
+ import type { AutoCommitConfig, BTreeJSON, SyncStorageConfig } from '../../../types.js';
2
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
3
+ import type { DurableBackendController } from '../../backend/types.js';
4
+ export interface SyncStorageBackendControllerSnapshot {
5
+ treeJSON: BTreeJSON<unknown, unknown>;
6
+ }
7
+ export interface SyncStorageBackendControllerCreateOptions {
8
+ config: SyncStorageConfig;
9
+ autoCommit?: AutoCommitConfig;
10
+ getSnapshot: () => SyncStorageBackendControllerSnapshot;
11
+ onAutoCommitError: (error: unknown) => void;
12
+ }
13
+ export interface SyncStorageBackendControllerCreateResult {
14
+ controller: SyncStorageBackendController;
15
+ initialTreeJSON: BTreeJSON<unknown, unknown> | null;
16
+ initialCurrentSizeBytes: number;
17
+ }
18
+ export declare class SyncStorageBackendController extends AsyncDurableAutoCommitController implements DurableBackendController {
19
+ private readonly backend;
20
+ private readonly getSnapshot;
21
+ private constructor();
22
+ static create(options: SyncStorageBackendControllerCreateOptions): Promise<SyncStorageBackendControllerCreateResult>;
23
+ protected executeSingleCommit(): Promise<void>;
24
+ }
@@ -0,0 +1,34 @@
1
+ import { UnsupportedBackendError } from '../../../errors/index.js';
2
+ import { parseAutoCommitConfig } from '../../config/config.shared.js';
3
+ import { parseSyncStorageConfig } from './syncStorageConfig.js';
4
+ import { AsyncDurableAutoCommitController } from '../../backend/asyncDurableAutoCommitController.js';
5
+ import { commitSyncStorageSnapshot, createSyncStorageBackendState, detectGlobalSyncStorage, loadSyncStorageSnapshot, } from './syncStorageBackend.js';
6
+ export class SyncStorageBackendController extends AsyncDurableAutoCommitController {
7
+ backend;
8
+ getSnapshot;
9
+ constructor(backend, autoCommit, getSnapshot, onAutoCommitError) {
10
+ super(autoCommit, onAutoCommitError);
11
+ this.backend = backend;
12
+ this.getSnapshot = getSnapshot;
13
+ }
14
+ static async create(options) {
15
+ const adapter = detectGlobalSyncStorage();
16
+ if (adapter === null) {
17
+ throw new UnsupportedBackendError('browser sync storage is not available in the current runtime environment.');
18
+ }
19
+ const syncConfig = parseSyncStorageConfig(options.config);
20
+ const autoCommit = parseAutoCommitConfig(options.autoCommit);
21
+ const backend = createSyncStorageBackendState(adapter, syncConfig.keyPrefix, syncConfig.databaseKey, syncConfig.maxChunkChars, syncConfig.maxChunks, syncConfig.maxItemBytes, syncConfig.maxTotalBytes, syncConfig.maxItems);
22
+ const loaded = await loadSyncStorageSnapshot(backend);
23
+ const controller = new SyncStorageBackendController(backend, autoCommit, options.getSnapshot, options.onAutoCommitError);
24
+ return {
25
+ controller,
26
+ initialTreeJSON: loaded.treeJSON,
27
+ initialCurrentSizeBytes: loaded.currentSizeBytes,
28
+ };
29
+ }
30
+ async executeSingleCommit() {
31
+ const snapshot = this.getSnapshot();
32
+ await commitSyncStorageSnapshot(this.backend, snapshot.treeJSON);
33
+ }
34
+ }
@@ -0,0 +1,2 @@
1
+ import type { SyncStorageBackendState } from '../../backend/types.js';
2
+ export declare const cleanupGenerationChunks: (state: SyncStorageBackendState, generation: number, knownChunkCount: number | null, chunkKeyResolver: (generation: number, index: number) => string) => Promise<void>;
@@ -0,0 +1,28 @@
1
+ export const cleanupGenerationChunks = async (state, generation, knownChunkCount, chunkKeyResolver) => {
2
+ if (knownChunkCount !== null) {
3
+ if (knownChunkCount <= 0) {
4
+ return;
5
+ }
6
+ const knownKeys = [];
7
+ for (let i = 0; i < knownChunkCount; i += 1) {
8
+ knownKeys.push(chunkKeyResolver(generation, i));
9
+ }
10
+ await state.adapter.removeItems(knownKeys);
11
+ return;
12
+ }
13
+ if (state.maxChunks <= 0) {
14
+ return;
15
+ }
16
+ const speculativeKeys = [];
17
+ for (let i = 0; i < state.maxChunks; i += 1) {
18
+ speculativeKeys.push(chunkKeyResolver(generation, i));
19
+ }
20
+ const maybeChunks = await state.adapter.getItems(speculativeKeys);
21
+ const discoveredKeys = speculativeKeys.filter((key) => {
22
+ return Object.prototype.hasOwnProperty.call(maybeChunks, key);
23
+ });
24
+ if (discoveredKeys.length === 0) {
25
+ return;
26
+ }
27
+ await state.adapter.removeItems(discoveredKeys);
28
+ };
@@ -0,0 +1,13 @@
1
+ import type { SyncStorageConfig } from '../../../types.js';
2
+ export interface ParsedSyncStorageConfig {
3
+ keyPrefix: string;
4
+ databaseKey: string;
5
+ maxChunkChars: number;
6
+ maxChunks: number;
7
+ maxItemBytes: number;
8
+ maxTotalBytes: number;
9
+ maxItems: number;
10
+ }
11
+ export declare const DEFAULT_SYNC_STORAGE_MAX_TOTAL_BYTES = 102400;
12
+ export declare const parseSyncStorageMaxTotalBytesForBackendLimit: (config?: SyncStorageConfig) => number;
13
+ export declare const parseSyncStorageConfig: (config?: SyncStorageConfig) => ParsedSyncStorageConfig;
@@ -0,0 +1,42 @@
1
+ import { ConfigurationError } from '../../../errors/index.js';
2
+ export const DEFAULT_SYNC_STORAGE_MAX_TOTAL_BYTES = 102400;
3
+ export const parseSyncStorageMaxTotalBytesForBackendLimit = (config) => {
4
+ const maxTotalBytes = config?.maxTotalBytes ?? DEFAULT_SYNC_STORAGE_MAX_TOTAL_BYTES;
5
+ if (!Number.isSafeInteger(maxTotalBytes) || maxTotalBytes <= 0) {
6
+ throw new ConfigurationError('syncStorage.maxTotalBytes must be a positive safe integer.');
7
+ }
8
+ return maxTotalBytes;
9
+ };
10
+ export const parseSyncStorageConfig = (config) => {
11
+ const keyPrefix = config?.keyPrefix ?? 'frostpillar';
12
+ const databaseKey = config?.databaseKey ?? 'default';
13
+ const maxChunkChars = config?.maxChunkChars ?? 6000;
14
+ const maxChunks = config?.maxChunks ?? 511;
15
+ const maxItemBytes = config?.maxItemBytes ?? 8192;
16
+ const maxTotalBytes = parseSyncStorageMaxTotalBytesForBackendLimit(config);
17
+ const maxItems = config?.maxItems ?? 512;
18
+ if (!Number.isSafeInteger(maxChunkChars) || maxChunkChars <= 0) {
19
+ throw new ConfigurationError('syncStorage.maxChunkChars must be a positive safe integer.');
20
+ }
21
+ if (!Number.isSafeInteger(maxChunks) || maxChunks <= 0) {
22
+ throw new ConfigurationError('syncStorage.maxChunks must be a positive safe integer.');
23
+ }
24
+ if (!Number.isSafeInteger(maxItemBytes) || maxItemBytes <= 0) {
25
+ throw new ConfigurationError('syncStorage.maxItemBytes must be a positive safe integer.');
26
+ }
27
+ if (!Number.isSafeInteger(maxItems) || maxItems <= 0) {
28
+ throw new ConfigurationError('syncStorage.maxItems must be a positive safe integer.');
29
+ }
30
+ if (maxChunks + 1 > maxItems) {
31
+ throw new ConfigurationError('syncStorage.maxChunks + 1 (manifest item) must be <= syncStorage.maxItems.');
32
+ }
33
+ return {
34
+ keyPrefix,
35
+ databaseKey,
36
+ maxChunkChars,
37
+ maxChunks,
38
+ maxItemBytes,
39
+ maxTotalBytes,
40
+ maxItems,
41
+ };
42
+ };