sliftutils 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cursorrules +161 -0
- package/.eslintrc.js +38 -0
- package/.vscode/settings.json +39 -0
- package/bundler/buffer.js +2370 -0
- package/bundler/bundleEntry.ts +32 -0
- package/bundler/bundleEntryCaller.ts +8 -0
- package/bundler/bundleRequire.ts +244 -0
- package/bundler/bundleWrapper.ts +115 -0
- package/bundler/bundler.ts +72 -0
- package/bundler/flattenSourceMaps.ts +0 -0
- package/bundler/sourceMaps.ts +261 -0
- package/misc/environment.ts +11 -0
- package/misc/types.ts +3 -0
- package/misc/zip.ts +37 -0
- package/package.json +24 -0
- package/spec.txt +33 -0
- package/storage/CachedStorage.ts +32 -0
- package/storage/DelayedStorage.ts +30 -0
- package/storage/DiskCollection.ts +272 -0
- package/storage/FileFolderAPI.tsx +427 -0
- package/storage/IStorage.ts +40 -0
- package/storage/IndexedDBFileFolderAPI.ts +170 -0
- package/storage/JSONStorage.ts +35 -0
- package/storage/PendingManager.tsx +63 -0
- package/storage/PendingStorage.ts +47 -0
- package/storage/PrivateFileSystemStorage.ts +192 -0
- package/storage/StorageObservable.ts +122 -0
- package/storage/TransactionStorage.ts +485 -0
- package/storage/fileSystemPointer.ts +81 -0
- package/storage/storage.d.ts +41 -0
- package/tsconfig.json +31 -0
- package/web/DropdownCustom.tsx +150 -0
- package/web/FullscreenModal.tsx +75 -0
- package/web/GenericFormat.tsx +186 -0
- package/web/Input.tsx +350 -0
- package/web/InputLabel.tsx +288 -0
- package/web/InputPicker.tsx +158 -0
- package/web/LocalStorageParam.ts +56 -0
- package/web/SyncedController.ts +405 -0
- package/web/SyncedLoadingIndicator.tsx +37 -0
- package/web/Table.tsx +188 -0
- package/web/URLParam.ts +84 -0
- package/web/asyncObservable.ts +40 -0
- package/web/colors.tsx +14 -0
- package/web/mobxTyped.ts +29 -0
- package/web/modal.tsx +18 -0
- package/web/observer.tsx +35 -0
|
@@ -0,0 +1,485 @@
|
|
|
1
|
+
import { PromiseObj, sort, throttleFunction } from "socket-function/src/misc";
|
|
2
|
+
import { IStorage, IStorageRaw } from "./IStorage";
|
|
3
|
+
import { Zip } from "../misc/zip";
|
|
4
|
+
import { runInSerial } from "socket-function/src/batching";
|
|
5
|
+
import { formatNumber, formatTime } from "socket-function/src/formatting/format";
|
|
6
|
+
import { setPending } from "./PendingManager";
|
|
7
|
+
import { isInBuild } from "../misc/environment";
|
|
8
|
+
|
|
9
|
+
/*
|
|
10
|
+
// Spec:
|
|
11
|
+
// - Zip individual large values
|
|
12
|
+
// - Stores a transaction log
|
|
13
|
+
// - Transaction log has a header, which is JSON, for things such as "zipped"
|
|
14
|
+
// - Transaction log uses length prefixed values, with a special 8 bytes to denote the end,
|
|
15
|
+
// and 8 for the start.
|
|
16
|
+
// - When transaction log is iterated on, if the bytes at the end (using the length prefix)
|
|
17
|
+
// don't match the end bytes OR the start bytes are wrong, we skip the start bytes,
|
|
18
|
+
// and iterating until we find new start bytes that match our special bytes. Then we try
|
|
19
|
+
// to read these values.
|
|
20
|
+
// - Each transaction entry has a byte for flags, one bit which denotes if the value is zipped or not.
|
|
21
|
+
// - Compresses the log after there are 3X entries than keys (and > 100)
|
|
22
|
+
// - Both dedupe keys, and zip
|
|
23
|
+
// - Assumes all files in rawStorage ending with .tx are transaction logs
|
|
24
|
+
// - Names files like `${generation}.tx`
|
|
25
|
+
// - When compressing, we increment the generation, and write to a new file, and delete
|
|
26
|
+
// any generations that are older than the new one
|
|
27
|
+
// - On reading, we read from all generation files that exist, in case some are corrupted
|
|
28
|
+
// - On load, loads in all transaction logs, and stores all values in a Map<string, Buffer>
|
|
29
|
+
// - On writes immediately updates the in memory Map, and then writes to the transaction log
|
|
30
|
+
// - Caches the last transaction file name in memory
|
|
31
|
+
// - Makes sure all file system writes (but not Map updates) are done with fileLockSection,
|
|
32
|
+
// so they never overlap.
|
|
33
|
+
// - Buffers pending appends in memory, so they can written all at once (after the first one
|
|
34
|
+
// is blocking in fileLockSection).
|
|
35
|
+
|
|
36
|
+
UPDATE now we use chunks, because append is too slow.
|
|
37
|
+
|
|
38
|
+
IMPORTANT! If there are multiple writers, we clobber writes from other writers when we compress
|
|
39
|
+
*/
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
const FILE_CHUNK_SIZE = 1024 * 1024;
|
|
43
|
+
|
|
44
|
+
const FILE_ZIP_THRESHOLD = 16 * 1024 * 1024;
|
|
45
|
+
|
|
46
|
+
const ZIP_THRESHOLD = 4096;
|
|
47
|
+
const START_BYTES = Buffer.from([236, 49, 112, 121, 27, 127, 227, 63]);
|
|
48
|
+
const END_BYTES = Buffer.from([220, 111, 243, 202, 200, 79, 213, 63]);
|
|
49
|
+
// Delay writes, so we batch better, and thrash the disk less
|
|
50
|
+
const WRITE_DELAY = 500;
|
|
51
|
+
|
|
52
|
+
interface TransactionEntry {
|
|
53
|
+
key: string;
|
|
54
|
+
value: Buffer | undefined;
|
|
55
|
+
isZipped: boolean;
|
|
56
|
+
time: number;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
interface TransactionHeader {
|
|
60
|
+
zipped: boolean;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const fileLockSection = runInSerial(async (fnc: () => Promise<void>) => {
|
|
64
|
+
await fnc();
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
const CHUNK_EXT = ".chunk";
|
|
68
|
+
const ourId = Date.now() + Math.random();
|
|
69
|
+
|
|
70
|
+
export class TransactionStorage implements IStorage<Buffer> {
|
|
71
|
+
public cache: Map<string, TransactionEntry> = new Map();
|
|
72
|
+
private currentChunk = 0;
|
|
73
|
+
private currentChunkSize = 0;
|
|
74
|
+
private entryCount = 0;
|
|
75
|
+
|
|
76
|
+
private static allStorage: TransactionStorage[] = [];
|
|
77
|
+
|
|
78
|
+
constructor(
|
|
79
|
+
private rawStorage: IStorageRaw,
|
|
80
|
+
private debugName: string,
|
|
81
|
+
private writeDelay = WRITE_DELAY
|
|
82
|
+
) {
|
|
83
|
+
TransactionStorage.allStorage.push(this);
|
|
84
|
+
}
|
|
85
|
+
// Helps get rid of parse errors which constantly log. Also, uses less space
|
|
86
|
+
public static async compressAll() {
|
|
87
|
+
for (let storage of TransactionStorage.allStorage) {
|
|
88
|
+
await storage.compressTransactionLog(true);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
private init: Promise<void> | undefined = this.loadAllTransactions();
|
|
93
|
+
|
|
94
|
+
private getChunk(chunk: number) { return `${chunk}_${ourId}${CHUNK_EXT}`; }
|
|
95
|
+
|
|
96
|
+
public async get(key: string): Promise<Buffer | undefined> {
|
|
97
|
+
await this.init;
|
|
98
|
+
const value = this.cache.get(key);
|
|
99
|
+
if (value && value.isZipped && value.value) {
|
|
100
|
+
value.value = await Zip.gunzip(value.value);
|
|
101
|
+
value.isZipped = false;
|
|
102
|
+
}
|
|
103
|
+
return value?.value;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
public async set(key: string, value: Buffer): Promise<void> {
|
|
107
|
+
if (this.init) await this.init;
|
|
108
|
+
|
|
109
|
+
// Time is set on disk write, as Date.now() is too slow
|
|
110
|
+
let entry: TransactionEntry = { key, value, isZipped: false, time: 0 };
|
|
111
|
+
this.cache.set(key, entry);
|
|
112
|
+
|
|
113
|
+
if (value.length >= ZIP_THRESHOLD) {
|
|
114
|
+
value = await Zip.gzip(value);
|
|
115
|
+
entry.value = value;
|
|
116
|
+
entry.isZipped = true;
|
|
117
|
+
}
|
|
118
|
+
await this.pushAppend(entry);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
public async remove(key: string): Promise<void> {
|
|
122
|
+
if (this.init) await this.init;
|
|
123
|
+
this.cache.delete(key);
|
|
124
|
+
|
|
125
|
+
await this.pushAppend({ key, value: undefined, isZipped: false, time: 0 });
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
public async getInfo(key: string): Promise<{ size: number; lastModified: number } | undefined> {
|
|
129
|
+
await this.init;
|
|
130
|
+
const value = this.cache.get(key);
|
|
131
|
+
if (!value?.value) return undefined;
|
|
132
|
+
return { size: value.value.length, lastModified: value.time };
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
private pendingAppends: TransactionEntry[] = [];
|
|
136
|
+
private extraAppends = 0;
|
|
137
|
+
private pendingWrite: Promise<void> | undefined;
|
|
138
|
+
async pushAppend(entry: TransactionEntry): Promise<void> {
|
|
139
|
+
this.entryCount++;
|
|
140
|
+
this.pendingAppends.push(entry);
|
|
141
|
+
void this.updatePendingAppends();
|
|
142
|
+
if (this.pendingWrite) return this.pendingWrite;
|
|
143
|
+
this.pendingWrite = fileLockSection(async () => {
|
|
144
|
+
// Delay to allow batching, and deduping
|
|
145
|
+
await new Promise(resolve => setTimeout(resolve, this.writeDelay));
|
|
146
|
+
let curAppends = this.pendingAppends;
|
|
147
|
+
this.pendingAppends = [];
|
|
148
|
+
this.pendingWrite = undefined;
|
|
149
|
+
{
|
|
150
|
+
let appendsDeduped: Map<string, TransactionEntry> = new Map();
|
|
151
|
+
for (const entry of curAppends) {
|
|
152
|
+
appendsDeduped.set(entry.key, entry);
|
|
153
|
+
}
|
|
154
|
+
curAppends = Array.from(appendsDeduped.values());
|
|
155
|
+
}
|
|
156
|
+
this.extraAppends += curAppends.length;
|
|
157
|
+
void this.updatePendingAppends();
|
|
158
|
+
if (curAppends.length === 0) return;
|
|
159
|
+
try {
|
|
160
|
+
|
|
161
|
+
let time = Date.now();
|
|
162
|
+
for (let entry of curAppends) {
|
|
163
|
+
entry.time = time;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
let newSum = 0;
|
|
167
|
+
let buffers: Buffer[] = [];
|
|
168
|
+
for (const entry of curAppends) {
|
|
169
|
+
let buffer = this.serializeTransactionEntry(entry);
|
|
170
|
+
buffers.push(buffer);
|
|
171
|
+
newSum += buffer.length;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
let newChunks = this.chunkBuffers(buffers);
|
|
175
|
+
for (let chunk of newChunks) {
|
|
176
|
+
let file = this.getChunk(this.currentChunk);
|
|
177
|
+
if (!await this.rawStorage.get(file)) {
|
|
178
|
+
let { header, headerBuffer } = this.getHeader(false);
|
|
179
|
+
await this.rawStorage.set(file, headerBuffer);
|
|
180
|
+
}
|
|
181
|
+
let content = chunk.buffer;
|
|
182
|
+
await this.rawStorage.append(file, content);
|
|
183
|
+
this.currentChunkSize += content.length;
|
|
184
|
+
if (this.currentChunkSize >= FILE_CHUNK_SIZE) {
|
|
185
|
+
this.currentChunk++;
|
|
186
|
+
this.currentChunkSize = 0;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
await this.compressTransactionLog();
|
|
191
|
+
} finally {
|
|
192
|
+
this.extraAppends -= curAppends.length;
|
|
193
|
+
void this.updatePendingAppends();
|
|
194
|
+
}
|
|
195
|
+
});
|
|
196
|
+
await this.pendingWrite;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
private updatePendingAppends = throttleFunction(100, async () => {
|
|
200
|
+
let appendCount = this.pendingAppends.length + this.extraAppends;
|
|
201
|
+
let group = `Transaction (${this.debugName})`;
|
|
202
|
+
//console.log(`Update pending appends ${group}: ${appendCount}`);
|
|
203
|
+
if (!appendCount) {
|
|
204
|
+
setPending(group, "");
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
setPending(group, `Pending appends: ${appendCount}`);
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
public async getKeys(): Promise<string[]> {
|
|
211
|
+
if (this.init) await this.init;
|
|
212
|
+
return Array.from(this.cache.keys());
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
private async loadAllTransactions(): Promise<void> {
|
|
217
|
+
if (isInBuild()) return;
|
|
218
|
+
let time = Date.now();
|
|
219
|
+
const keys = await this.rawStorage.getKeys();
|
|
220
|
+
const transactionFiles = keys.filter(key => key.endsWith(CHUNK_EXT));
|
|
221
|
+
|
|
222
|
+
sort(transactionFiles, x => parseInt(x));
|
|
223
|
+
|
|
224
|
+
let size = 0;
|
|
225
|
+
for (const file of transactionFiles) {
|
|
226
|
+
let chunk = parseInt(file);
|
|
227
|
+
let curSize = await this.loadTransactionFile(file);
|
|
228
|
+
if (chunk >= this.currentChunk) {
|
|
229
|
+
this.currentChunk = chunk;
|
|
230
|
+
this.currentChunkSize = curSize;
|
|
231
|
+
}
|
|
232
|
+
size += curSize;
|
|
233
|
+
}
|
|
234
|
+
time = Date.now() - time;
|
|
235
|
+
if (time > 50) {
|
|
236
|
+
console.log(`Loaded ${this.debugName} in ${formatTime(time)}, ${formatNumber(this.cache.size)} keys, ${formatNumber(size)}B`);
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
this.init = undefined;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
private async loadTransactionFile(filename: string): Promise<number> {
|
|
243
|
+
const fullFile = await this.rawStorage.get(filename);
|
|
244
|
+
if (!fullFile) return 0;
|
|
245
|
+
if (fullFile.length < 4) {
|
|
246
|
+
//console.error(`Transaction in ${this.debugName} file ${filename} is too small, skipping`);
|
|
247
|
+
return 0;
|
|
248
|
+
}
|
|
249
|
+
let headerSize = fullFile.readUInt32LE(0);
|
|
250
|
+
let headerBuffer = fullFile.slice(4, 4 + headerSize);
|
|
251
|
+
let header: TransactionHeader;
|
|
252
|
+
try {
|
|
253
|
+
header = JSON.parse(headerBuffer.toString());
|
|
254
|
+
} catch (e) {
|
|
255
|
+
console.error(`Failed to parse header of transaction file in ${this.debugName}, ${filename}`);
|
|
256
|
+
return 0;
|
|
257
|
+
}
|
|
258
|
+
let content = fullFile.slice(4 + headerSize);
|
|
259
|
+
if (header.zipped) {
|
|
260
|
+
content = await Zip.gunzip(content);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
let offset = 0;
|
|
264
|
+
let entries: TransactionEntry[] = [];
|
|
265
|
+
while (offset < content.length) {
|
|
266
|
+
if (!content.slice(offset, offset + START_BYTES.length).equals(START_BYTES)) {
|
|
267
|
+
let s = offset;
|
|
268
|
+
while (offset < content.length && !content.slice(offset, offset + START_BYTES.length).equals(START_BYTES)) {
|
|
269
|
+
offset++;
|
|
270
|
+
}
|
|
271
|
+
let len = offset - s;
|
|
272
|
+
console.warn(`Found bad bytes in ${filename}, skipping ${len} bytes at offset ${s}. Total file bytes ${content.length}, read ${entries.length} entries`);
|
|
273
|
+
if (offset >= content.length) break;
|
|
274
|
+
}
|
|
275
|
+
let entryObj: { entry: TransactionEntry, offset: number } | undefined;
|
|
276
|
+
try {
|
|
277
|
+
entryObj = this.readTransactionEntry(content, offset);
|
|
278
|
+
} catch (e: any) {
|
|
279
|
+
if (e.message.includes("Read past end of buffer")) {
|
|
280
|
+
offset += 1;
|
|
281
|
+
continue;
|
|
282
|
+
}
|
|
283
|
+
throw e;
|
|
284
|
+
}
|
|
285
|
+
if (!entryObj) {
|
|
286
|
+
console.warn(`Failed to read transaction entry in in ${this.debugName}, file ${filename} at offset ${offset}, skipping bad bytes, reading remainder of file`);
|
|
287
|
+
offset++;
|
|
288
|
+
continue;
|
|
289
|
+
}
|
|
290
|
+
this.entryCount++;
|
|
291
|
+
let { entry } = entryObj;
|
|
292
|
+
offset = entryObj.offset;
|
|
293
|
+
entries.push(entry);
|
|
294
|
+
|
|
295
|
+
if (entry.value === undefined) {
|
|
296
|
+
this.cache.delete(entry.key);
|
|
297
|
+
} else {
|
|
298
|
+
let prev = this.cache.get(entry.key);
|
|
299
|
+
if (prev && (prev.time > entry.time)) {
|
|
300
|
+
continue;
|
|
301
|
+
}
|
|
302
|
+
this.cache.set(entry.key, entry);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
return fullFile.length;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
private readTransactionEntry(buffer: Buffer, offset: number): {
|
|
309
|
+
entry: TransactionEntry;
|
|
310
|
+
offset: number;
|
|
311
|
+
} | undefined {
|
|
312
|
+
function readSlice(count: number) {
|
|
313
|
+
const slice = buffer.slice(offset, offset + count);
|
|
314
|
+
if (slice.length < count) throw new Error(`Read past end of buffer at offset ${offset}/${buffer.length}`);
|
|
315
|
+
offset += count;
|
|
316
|
+
return slice;
|
|
317
|
+
}
|
|
318
|
+
if (!readSlice(START_BYTES.length).equals(START_BYTES)) return undefined;
|
|
319
|
+
|
|
320
|
+
const keyLength = readSlice(4).readUInt32LE(0);
|
|
321
|
+
const valueLength = readSlice(4).readUInt32LE(0);
|
|
322
|
+
const time = readSlice(8).readDoubleLE(0);
|
|
323
|
+
const flags = readSlice(1).readUInt8(0);
|
|
324
|
+
|
|
325
|
+
const key = readSlice(keyLength).toString();
|
|
326
|
+
let value = readSlice(valueLength);
|
|
327
|
+
|
|
328
|
+
if (!readSlice(END_BYTES.length).equals(END_BYTES)) return undefined;
|
|
329
|
+
|
|
330
|
+
let isZipped = (flags & 1) === 1;
|
|
331
|
+
let isDelete = (flags & 2) === 2;
|
|
332
|
+
|
|
333
|
+
let entry: TransactionEntry = { key, value, isZipped, time };
|
|
334
|
+
if (isDelete) {
|
|
335
|
+
entry.value = undefined;
|
|
336
|
+
}
|
|
337
|
+
return { entry, offset };
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// TODO: Make this directly go from TransactionEntry[] to Buffer, by pre-allocating, so it is more efficient
|
|
341
|
+
private serializeTransactionEntry(entry: TransactionEntry): Buffer {
|
|
342
|
+
let keyBuffer = Buffer.from(entry.key);
|
|
343
|
+
const buffer = Buffer.alloc(
|
|
344
|
+
START_BYTES.length + 4 + 4 + 8 + 1 + keyBuffer.length + (entry.value?.length || 0) + END_BYTES.length
|
|
345
|
+
);
|
|
346
|
+
let offset = 0;
|
|
347
|
+
|
|
348
|
+
START_BYTES.copy(buffer, offset);
|
|
349
|
+
offset += START_BYTES.length;
|
|
350
|
+
|
|
351
|
+
buffer.writeUInt32LE(keyBuffer.length, offset);
|
|
352
|
+
offset += 4;
|
|
353
|
+
|
|
354
|
+
buffer.writeUInt32LE(entry.value ? entry.value.length : 0, offset);
|
|
355
|
+
offset += 4;
|
|
356
|
+
|
|
357
|
+
buffer.writeDoubleLE(entry.time, offset);
|
|
358
|
+
offset += 8;
|
|
359
|
+
|
|
360
|
+
let flags = 0;
|
|
361
|
+
if (entry.isZipped) flags |= 1;
|
|
362
|
+
if (entry.value === undefined) flags |= 2;
|
|
363
|
+
buffer.writeUInt8(flags, offset);
|
|
364
|
+
offset += 1;
|
|
365
|
+
|
|
366
|
+
keyBuffer.copy(buffer, offset);
|
|
367
|
+
offset += keyBuffer.length;
|
|
368
|
+
|
|
369
|
+
if (entry.value) {
|
|
370
|
+
entry.value.copy(buffer, offset);
|
|
371
|
+
offset += entry.value.length;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
END_BYTES.copy(buffer, offset);
|
|
375
|
+
offset += END_BYTES.length;
|
|
376
|
+
|
|
377
|
+
return buffer;
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
private getHeader(zip: boolean) {
|
|
381
|
+
const header: TransactionHeader = { zipped: zip };
|
|
382
|
+
let headerBuffer = Buffer.from(JSON.stringify(header));
|
|
383
|
+
let headerSize = Buffer.alloc(4);
|
|
384
|
+
headerSize.writeUInt32LE(headerBuffer.length, 0);
|
|
385
|
+
return { header, headerBuffer: Buffer.concat([headerSize, headerBuffer]) };
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
private chunkBuffers(buffers: Buffer[]) {
|
|
389
|
+
let newChunks: {
|
|
390
|
+
buffers: Buffer[];
|
|
391
|
+
size: number;
|
|
392
|
+
}[] = [];
|
|
393
|
+
newChunks.push({ buffers: [], size: 0 });
|
|
394
|
+
for (const buffer of buffers) {
|
|
395
|
+
if (newChunks[newChunks.length - 1].size + buffer.length >= FILE_CHUNK_SIZE) {
|
|
396
|
+
newChunks.push({ buffers: [], size: 0 });
|
|
397
|
+
}
|
|
398
|
+
newChunks[newChunks.length - 1].buffers.push(buffer);
|
|
399
|
+
newChunks[newChunks.length - 1].size += buffer.length;
|
|
400
|
+
}
|
|
401
|
+
return newChunks.map(x => ({ buffer: Buffer.concat(x.buffers), size: x.size }));
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
private compressing = false;
|
|
406
|
+
private async compressTransactionLog(force?: boolean): Promise<void> {
|
|
407
|
+
if (this.compressing) return;
|
|
408
|
+
this.compressing = true;
|
|
409
|
+
|
|
410
|
+
let existingDiskEntries = await this.rawStorage.getKeys();
|
|
411
|
+
existingDiskEntries = existingDiskEntries.filter(x => x.endsWith(CHUNK_EXT));
|
|
412
|
+
let compressNow = force || (
|
|
413
|
+
this.entryCount > 100 && this.entryCount > this.cache.size * 3
|
|
414
|
+
// NOTE: This compress check breaks down if we only have very large values, but... those
|
|
415
|
+
// don't work ANYWAYS (it is better to use one file per value instead).
|
|
416
|
+
// - Maybe we should throw, or at least warn, on sets of value > 1MB,
|
|
417
|
+
// at which point they should just use a file per value
|
|
418
|
+
|| existingDiskEntries.length > Math.max(10, Math.ceil(this.entryCount / 1000))
|
|
419
|
+
|| existingDiskEntries.length > 1000 * 10
|
|
420
|
+
);
|
|
421
|
+
if (!compressNow) return;
|
|
422
|
+
console.log(`Compressing ${this.debugName} transaction log, ${this.entryCount} entries, ${this.cache.size} keys`);
|
|
423
|
+
|
|
424
|
+
// Load off disk, in case there are other writes. We still race with them, but at least
|
|
425
|
+
// this reduces the race condition considerably
|
|
426
|
+
|
|
427
|
+
sort(existingDiskEntries, x => parseInt(x));
|
|
428
|
+
for (let entry of existingDiskEntries) {
|
|
429
|
+
await this.loadTransactionFile(entry);
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
this.entryCount = this.cache.size;
|
|
433
|
+
|
|
434
|
+
let nextStart = Math.max(...existingDiskEntries.map(x => parseInt(x))) + 1;
|
|
435
|
+
|
|
436
|
+
let buffers: Buffer[] = [];
|
|
437
|
+
for (const entry of this.cache.values()) {
|
|
438
|
+
let buffer = this.serializeTransactionEntry(entry);
|
|
439
|
+
buffers.push(buffer);
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
let newChunks = this.chunkBuffers(buffers);
|
|
443
|
+
|
|
444
|
+
let curChunk = nextStart;
|
|
445
|
+
for (let chunk of newChunks) {
|
|
446
|
+
let file = this.getChunk(curChunk++);
|
|
447
|
+
let content = chunk.buffer;
|
|
448
|
+
let { header, headerBuffer } = this.getHeader(
|
|
449
|
+
// AND, never compress the last one, otherwise we can't append to it!
|
|
450
|
+
content.length >= FILE_ZIP_THRESHOLD && chunk !== newChunks[newChunks.length - 1]
|
|
451
|
+
);
|
|
452
|
+
if (header.zipped) {
|
|
453
|
+
content = await Zip.gzip(content);
|
|
454
|
+
}
|
|
455
|
+
let buffer = Buffer.concat([headerBuffer, content]);
|
|
456
|
+
await this.rawStorage.set(file, buffer);
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
// This is the ONLY time we can delete old files, as we know for sure the new file has all of our data.
|
|
460
|
+
// Any future readers won't know this, unless they write it themselves (or unless they audit it against
|
|
461
|
+
// the other generations, which is annoying).
|
|
462
|
+
for (const file of existingDiskEntries) {
|
|
463
|
+
await this.rawStorage.remove(file);
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
this.currentChunk = curChunk;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
public async reset() {
|
|
470
|
+
await fileLockSection(async () => {
|
|
471
|
+
let existingDiskEntries = await this.rawStorage.getKeys();
|
|
472
|
+
existingDiskEntries = existingDiskEntries.filter(x => x.endsWith(CHUNK_EXT));
|
|
473
|
+
|
|
474
|
+
try {
|
|
475
|
+
await Promise.allSettled(existingDiskEntries.map(x => this.rawStorage.remove(x)));
|
|
476
|
+
} catch { }
|
|
477
|
+
|
|
478
|
+
this.pendingAppends = [];
|
|
479
|
+
this.cache.clear();
|
|
480
|
+
this.currentChunk = 0;
|
|
481
|
+
this.currentChunkSize = 0;
|
|
482
|
+
this.entryCount = 0;
|
|
483
|
+
});
|
|
484
|
+
}
|
|
485
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { lazy } from "socket-function/src/caching";
|
|
2
|
+
import { nextId } from "socket-function/src/misc";
|
|
3
|
+
|
|
4
|
+
const objectStoreName = "fileSystemPointerDB";
|
|
5
|
+
const db = lazy(async () => {
|
|
6
|
+
let db = indexedDB.open("fileSystemPointerDB_f298e962-bd8a-46b9-8098-25db633f4ed3", 1);
|
|
7
|
+
db.addEventListener("upgradeneeded", () => {
|
|
8
|
+
db.result.createObjectStore(objectStoreName, {});
|
|
9
|
+
});
|
|
10
|
+
await new Promise(resolve => db.addEventListener("success", resolve));
|
|
11
|
+
return db.result;
|
|
12
|
+
});
|
|
13
|
+
async function getTransaction() {
|
|
14
|
+
let database = await db();
|
|
15
|
+
if (!database) return undefined;
|
|
16
|
+
return database.transaction(objectStoreName, "readwrite").objectStore(objectStoreName);
|
|
17
|
+
}
|
|
18
|
+
async function write(key: string, value: FileSystemFileHandle | FileSystemDirectoryHandle) {
|
|
19
|
+
let transaction = await getTransaction();
|
|
20
|
+
if (!transaction) return;
|
|
21
|
+
let req = transaction.put(value, key);
|
|
22
|
+
await new Promise((resolve, reject) => {
|
|
23
|
+
req.addEventListener("success", resolve);
|
|
24
|
+
req.addEventListener("error", reject);
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
async function read(key: string): Promise<FileSystemFileHandle | FileSystemDirectoryHandle | undefined> {
|
|
28
|
+
let transaction = await getTransaction();
|
|
29
|
+
if (!transaction) return;
|
|
30
|
+
let req = transaction.get(key);
|
|
31
|
+
await new Promise((resolve, reject) => {
|
|
32
|
+
req.addEventListener("success", resolve);
|
|
33
|
+
req.addEventListener("error", reject);
|
|
34
|
+
});
|
|
35
|
+
return req.result;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export type FileSystemPointer = string;
|
|
39
|
+
export async function storeFileSystemPointer(config: {
|
|
40
|
+
mode: "read" | "readwrite";
|
|
41
|
+
handle: FileSystemFileHandle | FileSystemDirectoryHandle;
|
|
42
|
+
}): Promise<FileSystemPointer> {
|
|
43
|
+
await (config.handle as any).requestPermission({ mode: config.mode });
|
|
44
|
+
let key = nextId() + "_" + config.mode;
|
|
45
|
+
await write(key, config.handle);
|
|
46
|
+
return key;
|
|
47
|
+
}
|
|
48
|
+
export async function deleteFileSystemPointer(pointer: FileSystemPointer) {
|
|
49
|
+
let transaction = await getTransaction();
|
|
50
|
+
if (!transaction) return;
|
|
51
|
+
let req = transaction.delete(pointer);
|
|
52
|
+
await new Promise((resolve, reject) => {
|
|
53
|
+
req.addEventListener("success", resolve);
|
|
54
|
+
req.addEventListener("error", reject);
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export async function getFileSystemPointer(config: {
|
|
59
|
+
pointer: FileSystemPointer;
|
|
60
|
+
}): Promise<{
|
|
61
|
+
// NOTE: We have to call requestPermission, so... user activation is required (as in,
|
|
62
|
+
// this need to be called inside of a button).
|
|
63
|
+
// IMPORTANT! In some circumstances user activation is not required (with multiple tabs,
|
|
64
|
+
// and potentially with https://developer.chrome.com/blog/persistent-permissions-for-the-file-system-access-api),
|
|
65
|
+
// so... trying to call onUserActivation immmediately is a good idea (although it might throw).
|
|
66
|
+
onUserActivation(modeOverride?: "read" | "readwrite"): Promise<FileSystemFileHandle | FileSystemDirectoryHandle>
|
|
67
|
+
} | undefined
|
|
68
|
+
> {
|
|
69
|
+
const handle = await read(config.pointer);
|
|
70
|
+
if (!handle) return;
|
|
71
|
+
let mode = config.pointer.split("_").at(-1);
|
|
72
|
+
return {
|
|
73
|
+
async onUserActivation(modeOverride) {
|
|
74
|
+
let testMode = await (handle as any).queryPermission({ mode: mode });
|
|
75
|
+
if (testMode !== mode) {
|
|
76
|
+
await (handle as any).requestPermission({ mode: modeOverride ?? mode });
|
|
77
|
+
}
|
|
78
|
+
return handle;
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
declare module "node-forge" {
|
|
4
|
+
declare type Ed25519PublicKey = {
|
|
5
|
+
publicKeyBytes: Buffer;
|
|
6
|
+
} & Buffer;
|
|
7
|
+
declare type Ed25519PrivateKey = {
|
|
8
|
+
privateKeyBytes: Buffer;
|
|
9
|
+
} & Buffer;
|
|
10
|
+
class ed25519 {
|
|
11
|
+
static generateKeyPair(): { publicKey: Ed25519PublicKey, privateKey: Ed25519PrivateKey };
|
|
12
|
+
static privateKeyToPem(key: Ed25519PrivateKey): string;
|
|
13
|
+
static privateKeyFromPem(pem: string): Ed25519PrivateKey;
|
|
14
|
+
static publicKeyToPem(key: Ed25519PublicKey): string;
|
|
15
|
+
static publicKeyFromPem(pem: string): Ed25519PublicKey;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
interface FileSystemDirectoryHandle {
|
|
21
|
+
[Symbol.asyncIterator](): AsyncIterator<[string, FileSystemFileHandle | FileSystemDirectoryHandle]>;
|
|
22
|
+
requestPermission(config: { mode: "read" | "readwrite" }): Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
interface FileSystemFileHandle {
|
|
25
|
+
getFile(): File;
|
|
26
|
+
createWritable(): FileSystemWritableFileStream;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
interface Window {
|
|
30
|
+
showSaveFilePicker(config?: {
|
|
31
|
+
types: {
|
|
32
|
+
description: string; accept: { [mimeType: string]: string[] }
|
|
33
|
+
}[];
|
|
34
|
+
}): Promise<FileSystemFileHandle>;
|
|
35
|
+
showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
|
|
36
|
+
showOpenFilePicker(config?: {
|
|
37
|
+
types: {
|
|
38
|
+
description: string; accept: { [mimeType: string]: string[] }
|
|
39
|
+
}[];
|
|
40
|
+
}): Promise<FileSystemFileHandle[]>;
|
|
41
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"strict": true,
|
|
4
|
+
"module": "CommonJS",
|
|
5
|
+
"esModuleInterop": true,
|
|
6
|
+
"allowSyntheticDefaultImports": true,
|
|
7
|
+
"moduleResolution": "node",
|
|
8
|
+
"target": "ES2018",
|
|
9
|
+
"lib": [
|
|
10
|
+
"ESNext",
|
|
11
|
+
"dom",
|
|
12
|
+
"dom.iterable"
|
|
13
|
+
],
|
|
14
|
+
"jsx": "react",
|
|
15
|
+
"alwaysStrict": true,
|
|
16
|
+
"jsxFactory": "preact.createElement",
|
|
17
|
+
"jsxFragmentFactory": "preact.Fragment",
|
|
18
|
+
"types": [
|
|
19
|
+
"node",
|
|
20
|
+
],
|
|
21
|
+
"experimentalDecorators": true,
|
|
22
|
+
"emitDecoratorMetadata": false,
|
|
23
|
+
"skipLibCheck": true,
|
|
24
|
+
"inlineSourceMap": true,
|
|
25
|
+
"inlineSources": true,
|
|
26
|
+
},
|
|
27
|
+
"exclude": [
|
|
28
|
+
"node_modules",
|
|
29
|
+
"*_venv",
|
|
30
|
+
]
|
|
31
|
+
}
|