@frostpillar/frostpillar-storage-engine 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README-JA.md +1205 -0
- package/README.md +1204 -0
- package/dist/drivers/file.cjs +960 -0
- package/dist/drivers/file.d.ts +3 -0
- package/dist/drivers/file.js +18 -0
- package/dist/drivers/indexedDB.cjs +570 -0
- package/dist/drivers/indexedDB.d.ts +3 -0
- package/dist/drivers/indexedDB.js +18 -0
- package/dist/drivers/localStorage.cjs +668 -0
- package/dist/drivers/localStorage.d.ts +3 -0
- package/dist/drivers/localStorage.js +23 -0
- package/dist/drivers/opfs.cjs +550 -0
- package/dist/drivers/opfs.d.ts +3 -0
- package/dist/drivers/opfs.js +18 -0
- package/dist/drivers/syncStorage.cjs +898 -0
- package/dist/drivers/syncStorage.d.ts +3 -0
- package/dist/drivers/syncStorage.js +22 -0
- package/dist/drivers/validation.d.ts +1 -0
- package/dist/drivers/validation.js +8 -0
- package/dist/errors/index.d.ts +32 -0
- package/dist/errors/index.js +48 -0
- package/dist/frostpillar-storage-engine.min.js +1 -0
- package/dist/index.cjs +2957 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +6 -0
- package/dist/storage/backend/asyncDurableAutoCommitController.d.ts +26 -0
- package/dist/storage/backend/asyncDurableAutoCommitController.js +188 -0
- package/dist/storage/backend/asyncMutex.d.ts +7 -0
- package/dist/storage/backend/asyncMutex.js +38 -0
- package/dist/storage/backend/autoCommit.d.ts +2 -0
- package/dist/storage/backend/autoCommit.js +22 -0
- package/dist/storage/backend/capacity.d.ts +2 -0
- package/dist/storage/backend/capacity.js +27 -0
- package/dist/storage/backend/capacityResolver.d.ts +3 -0
- package/dist/storage/backend/capacityResolver.js +25 -0
- package/dist/storage/backend/encoding.d.ts +17 -0
- package/dist/storage/backend/encoding.js +148 -0
- package/dist/storage/backend/types.d.ts +184 -0
- package/dist/storage/backend/types.js +1 -0
- package/dist/storage/btree/recordKeyIndexBTree.d.ts +39 -0
- package/dist/storage/btree/recordKeyIndexBTree.js +104 -0
- package/dist/storage/config/config.browser.d.ts +4 -0
- package/dist/storage/config/config.browser.js +8 -0
- package/dist/storage/config/config.d.ts +1 -0
- package/dist/storage/config/config.js +1 -0
- package/dist/storage/config/config.node.d.ts +4 -0
- package/dist/storage/config/config.node.js +74 -0
- package/dist/storage/config/config.shared.d.ts +6 -0
- package/dist/storage/config/config.shared.js +105 -0
- package/dist/storage/datastore/Datastore.d.ts +47 -0
- package/dist/storage/datastore/Datastore.js +525 -0
- package/dist/storage/datastore/datastoreClose.d.ts +12 -0
- package/dist/storage/datastore/datastoreClose.js +60 -0
- package/dist/storage/datastore/datastoreKeyDefinition.d.ts +7 -0
- package/dist/storage/datastore/datastoreKeyDefinition.js +60 -0
- package/dist/storage/datastore/datastoreLifecycle.d.ts +18 -0
- package/dist/storage/datastore/datastoreLifecycle.js +63 -0
- package/dist/storage/datastore/mutationById.d.ts +29 -0
- package/dist/storage/datastore/mutationById.js +71 -0
- package/dist/storage/drivers/IndexedDB/indexedDBBackend.d.ts +11 -0
- package/dist/storage/drivers/IndexedDB/indexedDBBackend.js +109 -0
- package/dist/storage/drivers/IndexedDB/indexedDBBackendController.d.ts +27 -0
- package/dist/storage/drivers/IndexedDB/indexedDBBackendController.js +60 -0
- package/dist/storage/drivers/IndexedDB/indexedDBConfig.d.ts +7 -0
- package/dist/storage/drivers/IndexedDB/indexedDBConfig.js +24 -0
- package/dist/storage/drivers/file/fileBackend.d.ts +5 -0
- package/dist/storage/drivers/file/fileBackend.js +168 -0
- package/dist/storage/drivers/file/fileBackendController.d.ts +31 -0
- package/dist/storage/drivers/file/fileBackendController.js +72 -0
- package/dist/storage/drivers/file/fileBackendSnapshot.d.ts +10 -0
- package/dist/storage/drivers/file/fileBackendSnapshot.js +166 -0
- package/dist/storage/drivers/localStorage/localStorageBackend.d.ts +10 -0
- package/dist/storage/drivers/localStorage/localStorageBackend.js +156 -0
- package/dist/storage/drivers/localStorage/localStorageBackendController.d.ts +24 -0
- package/dist/storage/drivers/localStorage/localStorageBackendController.js +35 -0
- package/dist/storage/drivers/localStorage/localStorageConfig.d.ts +10 -0
- package/dist/storage/drivers/localStorage/localStorageConfig.js +16 -0
- package/dist/storage/drivers/localStorage/localStorageLayout.d.ts +5 -0
- package/dist/storage/drivers/localStorage/localStorageLayout.js +29 -0
- package/dist/storage/drivers/opfs/opfsBackend.d.ts +12 -0
- package/dist/storage/drivers/opfs/opfsBackend.js +142 -0
- package/dist/storage/drivers/opfs/opfsBackendController.d.ts +26 -0
- package/dist/storage/drivers/opfs/opfsBackendController.js +44 -0
- package/dist/storage/drivers/syncStorage/syncStorageAdapter.d.ts +2 -0
- package/dist/storage/drivers/syncStorage/syncStorageAdapter.js +123 -0
- package/dist/storage/drivers/syncStorage/syncStorageBackend.d.ts +11 -0
- package/dist/storage/drivers/syncStorage/syncStorageBackend.js +169 -0
- package/dist/storage/drivers/syncStorage/syncStorageBackendController.d.ts +24 -0
- package/dist/storage/drivers/syncStorage/syncStorageBackendController.js +34 -0
- package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.d.ts +2 -0
- package/dist/storage/drivers/syncStorage/syncStorageChunkMaintenance.js +28 -0
- package/dist/storage/drivers/syncStorage/syncStorageConfig.d.ts +13 -0
- package/dist/storage/drivers/syncStorage/syncStorageConfig.js +42 -0
- package/dist/storage/drivers/syncStorage/syncStorageQuota.d.ts +3 -0
- package/dist/storage/drivers/syncStorage/syncStorageQuota.js +45 -0
- package/dist/storage/record/ordering.d.ts +3 -0
- package/dist/storage/record/ordering.js +7 -0
- package/dist/types.d.ts +125 -0
- package/dist/types.js +1 -0
- package/dist/validation/metadata.d.ts +1 -0
- package/dist/validation/metadata.js +7 -0
- package/dist/validation/payload.d.ts +7 -0
- package/dist/validation/payload.js +135 -0
- package/dist/validation/typeGuards.d.ts +1 -0
- package/dist/validation/typeGuards.js +7 -0
- package/package.json +110 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import type { BTreeJSON, CapacityPolicy } from '../../types.js';
|
|
2
|
+
export interface CapacityState {
|
|
3
|
+
maxSizeBytes: number;
|
|
4
|
+
policy: CapacityPolicy;
|
|
5
|
+
}
|
|
6
|
+
export interface FileAutoCommitState {
|
|
7
|
+
frequency: 'immediate' | 'scheduled';
|
|
8
|
+
intervalMs: number | null;
|
|
9
|
+
maxPendingBytes: number | null;
|
|
10
|
+
}
|
|
11
|
+
export type IntervalTimerHandle = ReturnType<typeof setInterval>;
|
|
12
|
+
export interface FileGenerationSnapshot {
|
|
13
|
+
magic: string;
|
|
14
|
+
version: number;
|
|
15
|
+
treeJSON: BTreeJSON<unknown, unknown>;
|
|
16
|
+
}
|
|
17
|
+
export interface FileSidecarSnapshot {
|
|
18
|
+
magic: string;
|
|
19
|
+
version: number;
|
|
20
|
+
activeDataFile: string;
|
|
21
|
+
commitId: number;
|
|
22
|
+
}
|
|
23
|
+
export interface FileBackendState {
|
|
24
|
+
dataFilePath: string;
|
|
25
|
+
directoryPath: string;
|
|
26
|
+
baseFileName: string;
|
|
27
|
+
sidecarPath: string;
|
|
28
|
+
lockPath: string;
|
|
29
|
+
activeDataFile: string;
|
|
30
|
+
commitId: number;
|
|
31
|
+
lockAcquired: boolean;
|
|
32
|
+
}
|
|
33
|
+
export interface DurableBackendController {
|
|
34
|
+
handleRecordAppended(encodedBytes: number): Promise<void>;
|
|
35
|
+
handleCleared(): Promise<void>;
|
|
36
|
+
commitNow(): Promise<void>;
|
|
37
|
+
close(): Promise<void>;
|
|
38
|
+
}
|
|
39
|
+
export interface LocalStorageAdapter {
|
|
40
|
+
getItem(key: string): string | null;
|
|
41
|
+
setItem(key: string, value: string): void;
|
|
42
|
+
removeItem(key: string): void;
|
|
43
|
+
}
|
|
44
|
+
export interface LocalStorageBackendState {
|
|
45
|
+
adapter: LocalStorageAdapter;
|
|
46
|
+
keyPrefix: string;
|
|
47
|
+
databaseKey: string;
|
|
48
|
+
maxChunkChars: number;
|
|
49
|
+
maxChunks: number;
|
|
50
|
+
activeGeneration: number;
|
|
51
|
+
commitId: number;
|
|
52
|
+
activeChunkCount: number;
|
|
53
|
+
}
|
|
54
|
+
export interface LocalStorageManifest {
|
|
55
|
+
magic: string;
|
|
56
|
+
version: number;
|
|
57
|
+
activeGeneration: number;
|
|
58
|
+
commitId: number;
|
|
59
|
+
chunkCount: number;
|
|
60
|
+
}
|
|
61
|
+
export interface SyncStorageAdapter {
|
|
62
|
+
getItems(keys: string[]): Promise<Record<string, unknown>>;
|
|
63
|
+
setItems(items: Record<string, unknown>): Promise<void>;
|
|
64
|
+
removeItems(keys: string[]): Promise<void>;
|
|
65
|
+
}
|
|
66
|
+
export interface BrowserSyncStorageAreaPromiseAdapter {
|
|
67
|
+
get(keys: string | string[] | null): Promise<Record<string, unknown>>;
|
|
68
|
+
set(items: Record<string, unknown>): Promise<void>;
|
|
69
|
+
remove(keys: string | string[]): Promise<void>;
|
|
70
|
+
}
|
|
71
|
+
export interface ChromeRuntimeLastError {
|
|
72
|
+
message?: string;
|
|
73
|
+
}
|
|
74
|
+
export interface ChromeRuntimeAdapter {
|
|
75
|
+
lastError?: ChromeRuntimeLastError;
|
|
76
|
+
}
|
|
77
|
+
export interface ChromeSyncStorageAreaCallbackAdapter {
|
|
78
|
+
get(keys: string | string[] | null, callback: (items: Record<string, unknown>) => void): void;
|
|
79
|
+
set(items: Record<string, unknown>, callback: () => void): void;
|
|
80
|
+
remove(keys: string | string[], callback: () => void): void;
|
|
81
|
+
}
|
|
82
|
+
export interface SyncStorageBackendState {
|
|
83
|
+
adapter: SyncStorageAdapter;
|
|
84
|
+
keyPrefix: string;
|
|
85
|
+
databaseKey: string;
|
|
86
|
+
maxChunkChars: number;
|
|
87
|
+
maxChunks: number;
|
|
88
|
+
maxItemBytes: number;
|
|
89
|
+
maxTotalBytes: number;
|
|
90
|
+
maxItems: number;
|
|
91
|
+
activeGeneration: number;
|
|
92
|
+
commitId: number;
|
|
93
|
+
activeChunkCount: number;
|
|
94
|
+
}
|
|
95
|
+
export interface SyncStorageManifest {
|
|
96
|
+
magic: string;
|
|
97
|
+
version: number;
|
|
98
|
+
activeGeneration: number;
|
|
99
|
+
commitId: number;
|
|
100
|
+
chunkCount: number;
|
|
101
|
+
}
|
|
102
|
+
export interface IDBRequestHandle<T> {
|
|
103
|
+
result: T;
|
|
104
|
+
onsuccess: ((event: {
|
|
105
|
+
target: IDBRequestHandle<T>;
|
|
106
|
+
}) => void) | null;
|
|
107
|
+
onerror: ((event: {
|
|
108
|
+
target: IDBRequestHandle<T>;
|
|
109
|
+
}) => void) | null;
|
|
110
|
+
}
|
|
111
|
+
export interface IDBTreeRecord {
|
|
112
|
+
magic: string;
|
|
113
|
+
version: number;
|
|
114
|
+
commitId: number;
|
|
115
|
+
treeJSON: BTreeJSON<unknown, unknown>;
|
|
116
|
+
}
|
|
117
|
+
export interface IDBObjectStoreHandle {
|
|
118
|
+
get(key: string): IDBRequestHandle<IDBTreeRecord | undefined>;
|
|
119
|
+
put(value: IDBTreeRecord, key?: string): IDBRequestHandle<string>;
|
|
120
|
+
clear(): IDBRequestHandle<undefined>;
|
|
121
|
+
}
|
|
122
|
+
export interface IDBTransactionHandle {
|
|
123
|
+
oncomplete: (() => void) | null;
|
|
124
|
+
onerror: (() => void) | null;
|
|
125
|
+
objectStore(name: string): IDBObjectStoreHandle;
|
|
126
|
+
}
|
|
127
|
+
export interface IDBDatabaseHandle {
|
|
128
|
+
close(): void;
|
|
129
|
+
createObjectStore(name: string, options?: {
|
|
130
|
+
keyPath?: string;
|
|
131
|
+
}): IDBObjectStoreHandle;
|
|
132
|
+
objectStoreNames: {
|
|
133
|
+
contains(name: string): boolean;
|
|
134
|
+
};
|
|
135
|
+
transaction(storeNames: string[], mode: string): IDBTransactionHandle;
|
|
136
|
+
}
|
|
137
|
+
export interface IDBOpenRequestHandle {
|
|
138
|
+
result: IDBDatabaseHandle | null;
|
|
139
|
+
error: Error | null;
|
|
140
|
+
onsuccess: ((event: {
|
|
141
|
+
target: IDBOpenRequestHandle;
|
|
142
|
+
}) => void) | null;
|
|
143
|
+
onerror: ((event: {
|
|
144
|
+
target: IDBOpenRequestHandle;
|
|
145
|
+
}) => void) | null;
|
|
146
|
+
onupgradeneeded: ((event: {
|
|
147
|
+
target: IDBOpenRequestHandle;
|
|
148
|
+
oldVersion: number;
|
|
149
|
+
newVersion: number;
|
|
150
|
+
}) => void) | null;
|
|
151
|
+
}
|
|
152
|
+
export interface IDBFactoryAdapter {
|
|
153
|
+
open(name: string, version?: number): IDBOpenRequestHandle;
|
|
154
|
+
}
|
|
155
|
+
export interface OpfsFileWritable {
|
|
156
|
+
write(data: string): Promise<void>;
|
|
157
|
+
close(): Promise<void>;
|
|
158
|
+
}
|
|
159
|
+
export interface OpfsFileHandle {
|
|
160
|
+
getFile(): Promise<{
|
|
161
|
+
text(): Promise<string>;
|
|
162
|
+
}>;
|
|
163
|
+
createWritable(opts?: {
|
|
164
|
+
keepExistingData?: boolean;
|
|
165
|
+
}): Promise<OpfsFileWritable>;
|
|
166
|
+
}
|
|
167
|
+
export interface OpfsDirectoryHandle {
|
|
168
|
+
getDirectoryHandle(name: string, opts?: {
|
|
169
|
+
create?: boolean;
|
|
170
|
+
}): Promise<OpfsDirectoryHandle>;
|
|
171
|
+
getFileHandle(name: string, opts?: {
|
|
172
|
+
create?: boolean;
|
|
173
|
+
}): Promise<OpfsFileHandle>;
|
|
174
|
+
removeEntry(name: string): Promise<void>;
|
|
175
|
+
}
|
|
176
|
+
export interface OpfsStorageRoot {
|
|
177
|
+
getDirectory(): Promise<OpfsDirectoryHandle>;
|
|
178
|
+
}
|
|
179
|
+
export interface OpfsManifest {
|
|
180
|
+
magic: string;
|
|
181
|
+
version: number;
|
|
182
|
+
activeData: 'a' | 'b';
|
|
183
|
+
commitId: number;
|
|
184
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { type BTreeEntry, type BTreeJSON, type DuplicateKeyPolicy, type EntryId } from '@frostpillar/frostpillar-btree';
|
|
2
|
+
export type { BTreeEntry, BTreeJSON, DuplicateKeyPolicy, EntryId };
|
|
3
|
+
export interface RecordKeyIndexBTreeStats {
|
|
4
|
+
height: number;
|
|
5
|
+
leafCount: number;
|
|
6
|
+
branchCount: number;
|
|
7
|
+
entryCount: number;
|
|
8
|
+
}
|
|
9
|
+
export interface RecordKeyIndexBTreeConfig<TKey> {
|
|
10
|
+
compareKeys: (left: TKey, right: TKey) => number;
|
|
11
|
+
duplicateKeys?: DuplicateKeyPolicy;
|
|
12
|
+
}
|
|
13
|
+
export declare const normalizeComparatorResult: (compared: number) => number;
|
|
14
|
+
export declare const clampComparatorResult: (compared: number) => number;
|
|
15
|
+
export declare class RecordKeyIndexBTree<TKey = unknown, TValue = unknown> {
|
|
16
|
+
private readonly tree;
|
|
17
|
+
constructor(config: RecordKeyIndexBTreeConfig<TKey>);
|
|
18
|
+
put(key: TKey, value: TValue): EntryId;
|
|
19
|
+
putMany(entries: readonly {
|
|
20
|
+
key: TKey;
|
|
21
|
+
value: TValue;
|
|
22
|
+
}[]): EntryId[];
|
|
23
|
+
peekById(entryId: EntryId): BTreeEntry<TKey, TValue> | null;
|
|
24
|
+
updateById(entryId: EntryId, value: TValue): BTreeEntry<TKey, TValue> | null;
|
|
25
|
+
removeById(entryId: EntryId): BTreeEntry<TKey, TValue> | null;
|
|
26
|
+
rangeQuery(start: TKey, end: TKey): BTreeEntry<TKey, TValue>[];
|
|
27
|
+
deleteRange(start: TKey, end: TKey): number;
|
|
28
|
+
snapshot(): BTreeEntry<TKey, TValue>[];
|
|
29
|
+
peekLast(): BTreeEntry<TKey, TValue> | null;
|
|
30
|
+
popFirst(): BTreeEntry<TKey, TValue> | null;
|
|
31
|
+
size(): number;
|
|
32
|
+
findFirst(key: TKey): BTreeEntry<TKey, TValue> | null;
|
|
33
|
+
findLast(key: TKey): BTreeEntry<TKey, TValue> | null;
|
|
34
|
+
hasKey(key: TKey): boolean;
|
|
35
|
+
keys(): IterableIterator<TKey>;
|
|
36
|
+
toJSON(): BTreeJSON<TKey, TValue>;
|
|
37
|
+
static fromJSON<TKey, TValue>(json: BTreeJSON<TKey, TValue>, config: RecordKeyIndexBTreeConfig<TKey>): RecordKeyIndexBTree<TKey, TValue>;
|
|
38
|
+
clear(): void;
|
|
39
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { InMemoryBTree, } from '@frostpillar/frostpillar-btree';
|
|
2
|
+
import { IndexCorruptionError } from '../../errors/index.js';
|
|
3
|
+
export const normalizeComparatorResult = (compared) => {
|
|
4
|
+
if (!Number.isFinite(compared) || !Number.isInteger(compared)) {
|
|
5
|
+
throw new IndexCorruptionError('key comparator must return a finite integer result.');
|
|
6
|
+
}
|
|
7
|
+
if (compared === 0) {
|
|
8
|
+
return 0;
|
|
9
|
+
}
|
|
10
|
+
return compared < 0 ? -1 : 1;
|
|
11
|
+
};
|
|
12
|
+
// Lightweight clamper for hot-path comparator wrapping — no validation overhead.
|
|
13
|
+
export const clampComparatorResult = (compared) => {
|
|
14
|
+
if (compared === 0)
|
|
15
|
+
return 0;
|
|
16
|
+
return compared < 0 ? -1 : 1;
|
|
17
|
+
};
|
|
18
|
+
const buildWrappedComparator = (compareKeys) => {
|
|
19
|
+
return (left, right) => {
|
|
20
|
+
const result = compareKeys(left, right);
|
|
21
|
+
// NaN check: x !== x is true only for NaN. Prevents silent BTree corruption.
|
|
22
|
+
if (result !== result) {
|
|
23
|
+
throw new IndexCorruptionError('key comparator must not return NaN.');
|
|
24
|
+
}
|
|
25
|
+
return clampComparatorResult(result);
|
|
26
|
+
};
|
|
27
|
+
};
|
|
28
|
+
export class RecordKeyIndexBTree {
|
|
29
|
+
tree;
|
|
30
|
+
constructor(config) {
|
|
31
|
+
const wrappedComparator = buildWrappedComparator(config.compareKeys);
|
|
32
|
+
const treeConfig = {
|
|
33
|
+
compareKeys: wrappedComparator,
|
|
34
|
+
duplicateKeys: config.duplicateKeys ?? 'allow',
|
|
35
|
+
enableEntryIdLookup: true,
|
|
36
|
+
};
|
|
37
|
+
this.tree = new InMemoryBTree(treeConfig);
|
|
38
|
+
}
|
|
39
|
+
put(key, value) {
|
|
40
|
+
return this.tree.put(key, value);
|
|
41
|
+
}
|
|
42
|
+
putMany(entries) {
|
|
43
|
+
return this.tree.putMany(entries);
|
|
44
|
+
}
|
|
45
|
+
peekById(entryId) {
|
|
46
|
+
return this.tree.peekById(entryId);
|
|
47
|
+
}
|
|
48
|
+
updateById(entryId, value) {
|
|
49
|
+
return this.tree.updateById(entryId, value);
|
|
50
|
+
}
|
|
51
|
+
removeById(entryId) {
|
|
52
|
+
return this.tree.removeById(entryId);
|
|
53
|
+
}
|
|
54
|
+
rangeQuery(start, end) {
|
|
55
|
+
return this.tree.range(start, end);
|
|
56
|
+
}
|
|
57
|
+
deleteRange(start, end) {
|
|
58
|
+
return this.tree.deleteRange(start, end, {
|
|
59
|
+
lowerBound: 'inclusive',
|
|
60
|
+
upperBound: 'inclusive',
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
snapshot() {
|
|
64
|
+
return this.tree.snapshot();
|
|
65
|
+
}
|
|
66
|
+
peekLast() {
|
|
67
|
+
return this.tree.peekLast();
|
|
68
|
+
}
|
|
69
|
+
popFirst() {
|
|
70
|
+
return this.tree.popFirst();
|
|
71
|
+
}
|
|
72
|
+
size() {
|
|
73
|
+
return this.tree.size();
|
|
74
|
+
}
|
|
75
|
+
findFirst(key) {
|
|
76
|
+
return this.tree.findFirst(key);
|
|
77
|
+
}
|
|
78
|
+
findLast(key) {
|
|
79
|
+
return this.tree.findLast(key);
|
|
80
|
+
}
|
|
81
|
+
hasKey(key) {
|
|
82
|
+
return this.tree.hasKey(key);
|
|
83
|
+
}
|
|
84
|
+
keys() {
|
|
85
|
+
return this.tree.keys();
|
|
86
|
+
}
|
|
87
|
+
toJSON() {
|
|
88
|
+
return this.tree.toJSON();
|
|
89
|
+
}
|
|
90
|
+
static fromJSON(json, config) {
|
|
91
|
+
const wrappedComparator = buildWrappedComparator(config.compareKeys);
|
|
92
|
+
const adapter = Object.create(RecordKeyIndexBTree.prototype);
|
|
93
|
+
const resolvedPolicy = config.duplicateKeys ?? 'allow';
|
|
94
|
+
const patchedJSON = resolvedPolicy !== json.config.duplicateKeys
|
|
95
|
+
? { ...json, config: { ...json.config, duplicateKeys: resolvedPolicy } }
|
|
96
|
+
: json;
|
|
97
|
+
adapter.tree =
|
|
98
|
+
InMemoryBTree.fromJSON(patchedJSON, wrappedComparator);
|
|
99
|
+
return adapter;
|
|
100
|
+
}
|
|
101
|
+
clear() {
|
|
102
|
+
this.tree.clear();
|
|
103
|
+
}
|
|
104
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { FileBackendConfig } from '../../types.js';
|
|
2
|
+
export { parseCapacityConfig, parseAutoCommitConfig, } from './config.shared.js';
|
|
3
|
+
export declare const ensureCanonicalPathWithinWorkingDirectory: (_targetPath: string, _optionName: string) => void;
|
|
4
|
+
export declare const resolveFileDataPath: (_config: FileBackendConfig) => string;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { UnsupportedBackendError } from '../../errors/index.js';
|
|
2
|
+
export { parseCapacityConfig, parseAutoCommitConfig, } from './config.shared.js';
|
|
3
|
+
export const ensureCanonicalPathWithinWorkingDirectory = (_targetPath, _optionName) => {
|
|
4
|
+
throw new UnsupportedBackendError('Path canonicalization is unavailable in browser bundle profile "core".');
|
|
5
|
+
};
|
|
6
|
+
export const resolveFileDataPath = (_config) => {
|
|
7
|
+
throw new UnsupportedBackendError('File backend path resolution is unavailable in browser bundle profile "core".');
|
|
8
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './config.shared.js';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './config.shared.js';
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { FileBackendConfig } from '../../types.js';
|
|
2
|
+
export { parseCapacityConfig, parseAutoCommitConfig, } from './config.shared.js';
|
|
3
|
+
export declare const ensureCanonicalPathWithinWorkingDirectory: (targetPath: string, optionName: string) => void;
|
|
4
|
+
export declare const resolveFileDataPath: (config: FileBackendConfig) => string;
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { existsSync, realpathSync } from 'node:fs';
|
|
2
|
+
import { dirname, isAbsolute, join, relative, resolve } from 'node:path';
|
|
3
|
+
import { ConfigurationError } from '../../errors/index.js';
|
|
4
|
+
export { parseCapacityConfig, parseAutoCommitConfig, } from './config.shared.js';
|
|
5
|
+
const containsPathTraversalToken = (value) => {
|
|
6
|
+
return value.includes('..');
|
|
7
|
+
};
|
|
8
|
+
const hasPathSeparator = (value) => {
|
|
9
|
+
return value.includes('/') || value.includes('\\');
|
|
10
|
+
};
|
|
11
|
+
const isPathWithinBaseDirectory = (targetPath, baseDirectory) => {
|
|
12
|
+
const relativePath = relative(baseDirectory, targetPath);
|
|
13
|
+
return (relativePath === '' ||
|
|
14
|
+
(!relativePath.startsWith('..') && !isAbsolute(relativePath)));
|
|
15
|
+
};
|
|
16
|
+
const resolveNearestExistingAncestor = (targetPath) => {
|
|
17
|
+
let currentPath = resolve(targetPath);
|
|
18
|
+
while (!existsSync(currentPath)) {
|
|
19
|
+
const parentPath = dirname(currentPath);
|
|
20
|
+
if (parentPath === currentPath) {
|
|
21
|
+
break;
|
|
22
|
+
}
|
|
23
|
+
currentPath = parentPath;
|
|
24
|
+
}
|
|
25
|
+
return currentPath;
|
|
26
|
+
};
|
|
27
|
+
const resolveCanonicalPathForContainment = (targetPath) => {
|
|
28
|
+
const resolvedTargetPath = resolve(targetPath);
|
|
29
|
+
const nearestExistingAncestor = resolveNearestExistingAncestor(resolvedTargetPath);
|
|
30
|
+
const canonicalAncestor = realpathSync(nearestExistingAncestor);
|
|
31
|
+
const relativeSuffix = relative(nearestExistingAncestor, resolvedTargetPath);
|
|
32
|
+
return resolve(join(canonicalAncestor, relativeSuffix));
|
|
33
|
+
};
|
|
34
|
+
export const ensureCanonicalPathWithinWorkingDirectory = (targetPath, optionName) => {
|
|
35
|
+
const canonicalWorkingDirectory = realpathSync(resolve(process.cwd()));
|
|
36
|
+
const canonicalTargetPath = resolveCanonicalPathForContainment(targetPath);
|
|
37
|
+
if (!isPathWithinBaseDirectory(canonicalTargetPath, canonicalWorkingDirectory)) {
|
|
38
|
+
throw new ConfigurationError(`${optionName} must stay within process.cwd().`);
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
const ensureSafeFileNameFragment = (value, optionName) => {
|
|
42
|
+
if (hasPathSeparator(value) || containsPathTraversalToken(value)) {
|
|
43
|
+
throw new ConfigurationError(`${optionName} must not contain path separators or traversal tokens.`);
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
export const resolveFileDataPath = (config) => {
|
|
47
|
+
if (config.filePath !== undefined && config.target !== undefined) {
|
|
48
|
+
throw new ConfigurationError('filePath and target cannot be specified together.');
|
|
49
|
+
}
|
|
50
|
+
if (config.filePath !== undefined) {
|
|
51
|
+
const resolvedFilePath = resolve(config.filePath);
|
|
52
|
+
ensureCanonicalPathWithinWorkingDirectory(resolvedFilePath, 'filePath');
|
|
53
|
+
return resolvedFilePath;
|
|
54
|
+
}
|
|
55
|
+
if (config.target === undefined) {
|
|
56
|
+
return resolve('./frostpillar.fpdb');
|
|
57
|
+
}
|
|
58
|
+
if (config.target.kind === 'path') {
|
|
59
|
+
const resolvedFilePath = resolve(config.target.filePath);
|
|
60
|
+
ensureCanonicalPathWithinWorkingDirectory(resolvedFilePath, 'target.filePath');
|
|
61
|
+
return resolvedFilePath;
|
|
62
|
+
}
|
|
63
|
+
const directoryPath = resolve(config.target.directory);
|
|
64
|
+
ensureCanonicalPathWithinWorkingDirectory(directoryPath, 'target.directory');
|
|
65
|
+
const filePrefix = config.target.filePrefix ?? '';
|
|
66
|
+
const fileName = config.target.fileName ?? 'frostpillar';
|
|
67
|
+
ensureSafeFileNameFragment(filePrefix, 'target.filePrefix');
|
|
68
|
+
ensureSafeFileNameFragment(fileName, 'target.fileName');
|
|
69
|
+
const resolvedFilePath = resolve(join(directoryPath, `${filePrefix}${fileName}.fpdb`));
|
|
70
|
+
if (!isPathWithinBaseDirectory(resolvedFilePath, directoryPath)) {
|
|
71
|
+
throw new ConfigurationError('Resolved file path must stay within target.directory.');
|
|
72
|
+
}
|
|
73
|
+
return resolvedFilePath;
|
|
74
|
+
};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { AutoCommitConfig, CapacityConfig, DuplicateKeyPolicy } from '../../types.js';
|
|
2
|
+
import type { CapacityState, FileAutoCommitState } from '../backend/types.js';
|
|
3
|
+
export declare const parseCapacityConfig: (capacity?: CapacityConfig) => CapacityState | null;
|
|
4
|
+
export declare const parseAutoCommitConfig: (autoCommit?: AutoCommitConfig) => FileAutoCommitState;
|
|
5
|
+
/** Validates and defaults `duplicateKeys` config. Safe for JS callers passing arbitrary values. */
|
|
6
|
+
export declare const parseDuplicateKeyConfig: (duplicateKeys?: DuplicateKeyPolicy) => DuplicateKeyPolicy;
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { ConfigurationError } from '../../errors/index.js';
|
|
2
|
+
const BYTE_SIZE_REGEX = /^(\d+)(B|KB|MB|GB)$/;
|
|
3
|
+
const BYTE_SIZE_MULTIPLIER = {
|
|
4
|
+
B: 1,
|
|
5
|
+
KB: 1024,
|
|
6
|
+
MB: 1024 * 1024,
|
|
7
|
+
GB: 1024 * 1024 * 1024,
|
|
8
|
+
};
|
|
9
|
+
const FREQUENCY_REGEX = /^(\d+)(ms|s|m|h)$/;
|
|
10
|
+
const FREQUENCY_MULTIPLIER = {
|
|
11
|
+
ms: 1,
|
|
12
|
+
s: 1000,
|
|
13
|
+
m: 60 * 1000,
|
|
14
|
+
h: 60 * 60 * 1000,
|
|
15
|
+
};
|
|
16
|
+
const normalizeByteSizeInput = (value) => {
|
|
17
|
+
if (value === 'backendLimit') {
|
|
18
|
+
throw new ConfigurationError('capacity.maxSize "backendLimit" must be resolved before capacity parsing.');
|
|
19
|
+
}
|
|
20
|
+
if (typeof value === 'number') {
|
|
21
|
+
if (!Number.isSafeInteger(value) || value <= 0) {
|
|
22
|
+
throw new ConfigurationError('capacity.maxSize must be a positive safe integer.');
|
|
23
|
+
}
|
|
24
|
+
return value;
|
|
25
|
+
}
|
|
26
|
+
const matched = BYTE_SIZE_REGEX.exec(value);
|
|
27
|
+
if (matched === null) {
|
|
28
|
+
throw new ConfigurationError('capacity.maxSize string must be <positive><B|KB|MB|GB>.');
|
|
29
|
+
}
|
|
30
|
+
const amount = Number(matched[1]);
|
|
31
|
+
if (!Number.isSafeInteger(amount) || amount <= 0) {
|
|
32
|
+
throw new ConfigurationError('capacity.maxSize must be a positive safe integer.');
|
|
33
|
+
}
|
|
34
|
+
const unit = matched[2];
|
|
35
|
+
const multiplier = BYTE_SIZE_MULTIPLIER[unit];
|
|
36
|
+
const total = amount * multiplier;
|
|
37
|
+
if (!Number.isSafeInteger(total) || total <= 0) {
|
|
38
|
+
throw new ConfigurationError('capacity.maxSize exceeds safe integer range.');
|
|
39
|
+
}
|
|
40
|
+
return total;
|
|
41
|
+
};
|
|
42
|
+
export const parseCapacityConfig = (capacity) => {
|
|
43
|
+
if (capacity === undefined) {
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
const maxSizeBytes = normalizeByteSizeInput(capacity.maxSize);
|
|
47
|
+
const policy = capacity.policy ?? 'strict';
|
|
48
|
+
if (policy !== 'strict' && policy !== 'turnover') {
|
|
49
|
+
throw new ConfigurationError('capacity.policy must be "strict" or "turnover".');
|
|
50
|
+
}
|
|
51
|
+
return { maxSizeBytes, policy };
|
|
52
|
+
};
|
|
53
|
+
const parseFrequencyString = (frequency) => {
|
|
54
|
+
const matched = FREQUENCY_REGEX.exec(frequency);
|
|
55
|
+
if (matched === null) {
|
|
56
|
+
throw new ConfigurationError('autoCommit.frequency string must be one of: <positive>ms, <positive>s, <positive>m, <positive>h.');
|
|
57
|
+
}
|
|
58
|
+
const amount = Number(matched[1]);
|
|
59
|
+
if (!Number.isSafeInteger(amount) || amount <= 0) {
|
|
60
|
+
throw new ConfigurationError('autoCommit.frequency string amount must be a positive safe integer.');
|
|
61
|
+
}
|
|
62
|
+
const unit = matched[2];
|
|
63
|
+
const multiplier = FREQUENCY_MULTIPLIER[unit];
|
|
64
|
+
const intervalMs = amount * multiplier;
|
|
65
|
+
if (!Number.isSafeInteger(intervalMs) || intervalMs <= 0) {
|
|
66
|
+
throw new ConfigurationError('autoCommit.frequency exceeds safe integer range.');
|
|
67
|
+
}
|
|
68
|
+
return intervalMs;
|
|
69
|
+
};
|
|
70
|
+
export const parseAutoCommitConfig = (autoCommit) => {
|
|
71
|
+
if (autoCommit?.maxPendingBytes !== undefined) {
|
|
72
|
+
if (!Number.isSafeInteger(autoCommit.maxPendingBytes) ||
|
|
73
|
+
autoCommit.maxPendingBytes <= 0) {
|
|
74
|
+
throw new ConfigurationError('autoCommit.maxPendingBytes must be a positive safe integer.');
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const maxPendingBytes = autoCommit?.maxPendingBytes ?? null;
|
|
78
|
+
const frequency = autoCommit?.frequency;
|
|
79
|
+
if (frequency === undefined || frequency === 'immediate') {
|
|
80
|
+
return { frequency: 'immediate', intervalMs: null, maxPendingBytes };
|
|
81
|
+
}
|
|
82
|
+
if (typeof frequency === 'number') {
|
|
83
|
+
if (!Number.isSafeInteger(frequency) || frequency <= 0) {
|
|
84
|
+
throw new ConfigurationError('autoCommit.frequency number must be a positive safe integer.');
|
|
85
|
+
}
|
|
86
|
+
return { frequency: 'scheduled', intervalMs: frequency, maxPendingBytes };
|
|
87
|
+
}
|
|
88
|
+
const intervalMs = parseFrequencyString(frequency);
|
|
89
|
+
return { frequency: 'scheduled', intervalMs, maxPendingBytes };
|
|
90
|
+
};
|
|
91
|
+
const VALID_DUPLICATE_KEY_POLICIES = [
|
|
92
|
+
'allow',
|
|
93
|
+
'replace',
|
|
94
|
+
'reject',
|
|
95
|
+
];
|
|
96
|
+
/** Validates and defaults `duplicateKeys` config. Safe for JS callers passing arbitrary values. */
|
|
97
|
+
export const parseDuplicateKeyConfig = (duplicateKeys) => {
|
|
98
|
+
if (duplicateKeys === undefined) {
|
|
99
|
+
return 'allow';
|
|
100
|
+
}
|
|
101
|
+
if (!VALID_DUPLICATE_KEY_POLICIES.includes(duplicateKeys)) {
|
|
102
|
+
throw new ConfigurationError('duplicateKeys must be "allow", "replace", or "reject".');
|
|
103
|
+
}
|
|
104
|
+
return duplicateKeys;
|
|
105
|
+
};
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import type { DatastoreConfig, DatastoreErrorListener, EntryId, InputRecord, KeyedRecord } from '../../types.js';
|
|
2
|
+
export declare class Datastore {
|
|
3
|
+
private readonly errorListeners;
|
|
4
|
+
private keyIndex;
|
|
5
|
+
private readonly keyDefinition;
|
|
6
|
+
private readonly duplicateKeyPolicy;
|
|
7
|
+
private readonly capacityState;
|
|
8
|
+
private readonly skipPayloadValidation;
|
|
9
|
+
private readonly lifecycle;
|
|
10
|
+
private readonly writeMutex;
|
|
11
|
+
private currentSizeBytes;
|
|
12
|
+
private backendController;
|
|
13
|
+
private pendingInit;
|
|
14
|
+
private pendingInitError;
|
|
15
|
+
constructor(config: DatastoreConfig);
|
|
16
|
+
put(record: InputRecord<unknown>): Promise<void>;
|
|
17
|
+
get(key: unknown): Promise<KeyedRecord<unknown>[]>;
|
|
18
|
+
getFirst(key: unknown): Promise<KeyedRecord<unknown> | null>;
|
|
19
|
+
getLast(key: unknown): Promise<KeyedRecord<unknown> | null>;
|
|
20
|
+
delete(key: unknown): Promise<number>;
|
|
21
|
+
has(key: unknown): Promise<boolean>;
|
|
22
|
+
getAll(): Promise<KeyedRecord<unknown>[]>;
|
|
23
|
+
getRange(start: unknown, end: unknown): Promise<KeyedRecord<unknown>[]>;
|
|
24
|
+
getMany(keys: unknown[]): Promise<KeyedRecord<unknown>[]>;
|
|
25
|
+
putMany(records: InputRecord<unknown>[]): Promise<void>;
|
|
26
|
+
deleteMany(keys: unknown[]): Promise<number>;
|
|
27
|
+
clear(): Promise<void>;
|
|
28
|
+
count(): Promise<number>;
|
|
29
|
+
keys(): Promise<unknown[]>;
|
|
30
|
+
getById(id: EntryId): Promise<KeyedRecord<unknown> | null>;
|
|
31
|
+
updateById(id: EntryId, patch: Partial<KeyedRecord<unknown>['payload']>): Promise<boolean>;
|
|
32
|
+
deleteById(id: EntryId): Promise<boolean>;
|
|
33
|
+
commit(): Promise<void>;
|
|
34
|
+
on(event: 'error', listener: DatastoreErrorListener): () => void;
|
|
35
|
+
off(event: 'error', listener: DatastoreErrorListener): void;
|
|
36
|
+
close(): Promise<void>;
|
|
37
|
+
private resolvePayload;
|
|
38
|
+
private putSingle;
|
|
39
|
+
private putManyStrict;
|
|
40
|
+
private buildStrictBatchEntries;
|
|
41
|
+
private deleteSingle;
|
|
42
|
+
private runWithOpen;
|
|
43
|
+
private executeWithLifecycle;
|
|
44
|
+
private runWithOpenExclusive;
|
|
45
|
+
private applyBackendInitResult;
|
|
46
|
+
private backfillMissingSizeBytes;
|
|
47
|
+
}
|