@dxos/random-access-storage 0.8.4-main.1da679c → 0.8.4-main.21d9917

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,801 @@
1
+ import { createRequire } from 'node:module';const require = createRequire(import.meta.url);
2
+ import {
3
+ AbstractStorage,
4
+ Directory,
5
+ MemoryStorage,
6
+ StorageType,
7
+ __commonJS,
8
+ __require,
9
+ __toESM,
10
+ getFullPath,
11
+ stringDiff,
12
+ wrapFile
13
+ } from "../chunk-OW63I6TJ.mjs";
14
+
15
+ // ../../../node_modules/.pnpm/random-access-idb@1.2.2_patch_hash=207ec2404ef8b5e9e6d9de8ce83586d215ef77b94a7d0cadced03125874a1d9e/node_modules/random-access-idb/lib/blocks.js
16
+ var require_blocks = __commonJS({
17
+ "../../../node_modules/.pnpm/random-access-idb@1.2.2_patch_hash=207ec2404ef8b5e9e6d9de8ce83586d215ef77b94a7d0cadced03125874a1d9e/node_modules/random-access-idb/lib/blocks.js"(exports, module) {
18
+ module.exports = function(size, start, end) {
19
+ var result = [];
20
+ for (var n = Math.floor(start / size) * size; n < end; n += size) {
21
+ result.push({
22
+ block: Math.floor(n / size),
23
+ start: Math.max(n, start) % size,
24
+ end: Math.min(n + size, end) % size || size
25
+ });
26
+ }
27
+ return result;
28
+ };
29
+ }
30
+ });
31
+
32
+ // ../../../node_modules/.pnpm/random-access-idb@1.2.2_patch_hash=207ec2404ef8b5e9e6d9de8ce83586d215ef77b94a7d0cadced03125874a1d9e/node_modules/random-access-idb/index.js
33
+ var require_random_access_idb = __commonJS({
34
+ "../../../node_modules/.pnpm/random-access-idb@1.2.2_patch_hash=207ec2404ef8b5e9e6d9de8ce83586d215ef77b94a7d0cadced03125874a1d9e/node_modules/random-access-idb/index.js"(exports, module) {
35
+ var RandomAccess = __require("random-access-storage");
36
+ var inherits = __require("inherits");
37
+ var nextTick = __require("next-tick");
38
+ var once = __require("once");
39
+ var blocks = require_blocks();
40
+ var bufferFrom = __require("buffer-from");
41
+ var bufferAlloc = __require("buffer-alloc");
42
+ var DELIM2 = "\0";
43
+ var win = typeof window !== "undefined" ? window : typeof self !== "undefined" ? self : {};
44
+ module.exports = function(dbname, xopts) {
45
+ if (!xopts) xopts = {};
46
+ var idb2 = xopts.idb || (typeof win !== "undefined" ? win.indexedDB || win.mozIndexedDB || win.webkitIndexedDB || win.msIndexedDB : null);
47
+ if (!idb2) throw new Error("indexedDB not present and not given");
48
+ var db = null, dbqueue = [];
49
+ if (typeof idb2.open === "function") {
50
+ var req = idb2.open(dbname);
51
+ req.addEventListener("upgradeneeded", function() {
52
+ db = req.result;
53
+ db.createObjectStore("data");
54
+ });
55
+ req.addEventListener("success", function() {
56
+ db = req.result;
57
+ dbqueue.forEach(function(cb) {
58
+ cb(db);
59
+ });
60
+ dbqueue = null;
61
+ });
62
+ } else {
63
+ db = idb2;
64
+ }
65
+ function getdb(cb) {
66
+ if (db) nextTick(function() {
67
+ cb(db);
68
+ });
69
+ else dbqueue.push(cb);
70
+ }
71
+ return {
72
+ create: function(name, opts) {
73
+ if (typeof name === "object") {
74
+ opts = name;
75
+ name = opts.name;
76
+ }
77
+ if (!opts) opts = {};
78
+ opts.name = name;
79
+ return new Store(Object.assign({ db: getdb }, xopts, opts));
80
+ },
81
+ getdb
82
+ };
83
+ };
84
+ function Store(opts) {
85
+ if (!(this instanceof Store)) return new Store(opts);
86
+ RandomAccess.call(this);
87
+ if (!opts) opts = {};
88
+ if (typeof opts === "string") opts = { name: opts };
89
+ this.size = opts.size || 4096;
90
+ this.name = opts.name;
91
+ this.length = opts.length || 0;
92
+ this._getdb = opts.db;
93
+ }
94
+ inherits(Store, RandomAccess);
95
+ Store.prototype._blocks = function(i, j) {
96
+ return blocks(this.size, i, j);
97
+ };
98
+ Store.prototype._read = function(req) {
99
+ var self2 = this;
100
+ var buffers = [];
101
+ self2._store("readonly", function(err, store) {
102
+ if ((self2.length || 0) < req.offset + req.size) {
103
+ return req.callback(new Error("Could not satisfy length"));
104
+ }
105
+ if (err) return req.callback(err);
106
+ var offsets = self2._blocks(req.offset, req.offset + req.size);
107
+ var pending = offsets.length + 1;
108
+ var firstBlock = offsets.length > 0 ? offsets[0].block : 0;
109
+ var j = 0;
110
+ for (var i = 0; i < offsets.length; i++) (function(o) {
111
+ var key = self2.name + DELIM2 + o.block;
112
+ backify(store.get(key), function(err2, ev) {
113
+ if (err2) return req.callback(err2);
114
+ buffers[o.block - firstBlock] = ev.target.result ? bufferFrom(ev.target.result.subarray(o.start, o.end)) : bufferAlloc(o.end - o.start);
115
+ if (--pending === 0) req.callback(null, Buffer.concat(buffers));
116
+ });
117
+ })(offsets[i]);
118
+ if (--pending === 0) req.callback(null, Buffer.concat(buffers));
119
+ });
120
+ };
121
+ Store.prototype._write = function(req) {
122
+ var self2 = this;
123
+ self2._store("readwrite", function(err, store) {
124
+ if (err) return req.callback(err);
125
+ var offsets = self2._blocks(req.offset, req.offset + req.data.length);
126
+ var pending = 1;
127
+ var buffers = {};
128
+ for (var i = 0; i < offsets.length; i++) (function(o, i2) {
129
+ if (o.end - o.start === self2.size) return;
130
+ pending++;
131
+ var key = self2.name + DELIM2 + o.block;
132
+ backify(store.get(key), function(err2, ev) {
133
+ if (err2) return req.callback(err2);
134
+ buffers[i2] = bufferFrom(ev.target.result || bufferAlloc(self2.size));
135
+ if (--pending === 0) write(store, offsets, buffers);
136
+ });
137
+ })(offsets[i], i);
138
+ if (--pending === 0) write(store, offsets, buffers);
139
+ });
140
+ function write(store, offsets, buffers) {
141
+ var block;
142
+ for (var i = 0, j = 0; i < offsets.length; i++) {
143
+ var o = offsets[i];
144
+ var len = o.end - o.start;
145
+ if (len === self2.size) {
146
+ block = bufferFrom(req.data.slice(j, j + len));
147
+ } else {
148
+ block = buffers[i];
149
+ req.data.copy(block, o.start, j, j + len);
150
+ }
151
+ store.put(block, self2.name + DELIM2 + o.block);
152
+ j += len;
153
+ }
154
+ var length = Math.max(self2.length || 0, req.offset + req.data.length);
155
+ store.put(length, self2.name + DELIM2 + "length");
156
+ store.transaction.addEventListener("complete", function() {
157
+ self2.length = length;
158
+ req.callback(null);
159
+ });
160
+ store.transaction.addEventListener("error", function(err) {
161
+ req.callback(err);
162
+ });
163
+ }
164
+ };
165
+ Store.prototype._store = function(mode, cb) {
166
+ cb = once(cb);
167
+ var self2 = this;
168
+ self2._getdb(function(db) {
169
+ var tx = db.transaction(["data"], mode);
170
+ var store = tx.objectStore("data");
171
+ tx.addEventListener("error", cb);
172
+ cb(null, store);
173
+ });
174
+ };
175
+ Store.prototype._open = function(req) {
176
+ var self2 = this;
177
+ this._getdb(function(db) {
178
+ self2._store("readonly", function(err, store) {
179
+ backify(store.get(self2.name + DELIM2 + "length"), function(err2, ev) {
180
+ self2.length = ev.target.result || 0;
181
+ req.callback(null);
182
+ });
183
+ });
184
+ });
185
+ };
186
+ Store.prototype._close = function(req) {
187
+ this._getdb(function(db) {
188
+ req.callback();
189
+ });
190
+ };
191
+ Store.prototype._stat = function(req) {
192
+ var self2 = this;
193
+ nextTick(function() {
194
+ req.callback(null, { size: self2.length });
195
+ });
196
+ };
197
+ function backify(r, cb) {
198
+ r.addEventListener("success", function(ev) {
199
+ cb(null, ev);
200
+ });
201
+ r.addEventListener("error", cb);
202
+ }
203
+ }
204
+ });
205
+
206
+ // src/browser/idb-storage.ts
207
+ var import_random_access_idb = __toESM(require_random_access_idb(), 1);
208
+ import { invariant } from "@dxos/invariant";
209
+ var __dxlog_file = "/__w/dxos/dxos/packages/common/random-access-storage/src/browser/idb-storage.ts";
210
+ var DELIM = "\0";
211
+ var IDbStorage = class extends AbstractStorage {
212
+ type = StorageType.IDB;
213
+ _db;
214
+ _store = "data";
215
+ _initialized = false;
216
+ _fileStorage;
217
+ constructor(path) {
218
+ super(path);
219
+ this._fileStorage = this._createFileStorage(path);
220
+ }
221
+ _createFileStorage(path) {
222
+ const database = (0, import_random_access_idb.default)(path);
223
+ let res;
224
+ this._db = new Promise((resolve) => {
225
+ res = resolve;
226
+ });
227
+ database.getdb(res);
228
+ return database.create;
229
+ }
230
+ async close() {
231
+ await this._closeFilesInPath("");
232
+ }
233
+ async reset() {
234
+ await this._closeFilesInPath("");
235
+ await this._remove("");
236
+ }
237
+ async _destroy() {
238
+ throw new Error("Unreachable");
239
+ }
240
+ _createFile(path, filename) {
241
+ const file = this._fileStorage(getFullPath(path, filename));
242
+ file.destroy = (cb) => {
243
+ void this._db.then((db) => {
244
+ const lowerBound = getFullPath(path, filename);
245
+ const upperBound = `${lowerBound}\uFFFF`;
246
+ const range = IDBKeyRange.bound(lowerBound, upperBound);
247
+ const transaction = db.transaction(this._store, "readwrite");
248
+ const objectStore = transaction.objectStore(this._store);
249
+ objectStore.delete(range);
250
+ transaction.oncomplete = () => {
251
+ file.destroyed = true;
252
+ file.unlinked = true;
253
+ file.closed = true;
254
+ cb(null);
255
+ };
256
+ transaction.onerror = () => cb(transaction.error);
257
+ });
258
+ };
259
+ file.deletable = true;
260
+ return file;
261
+ }
262
+ async _loadFiles(path) {
263
+ const db = await this._db;
264
+ invariant(db, "Database is not initialized.", {
265
+ F: __dxlog_file,
266
+ L: 85,
267
+ S: this,
268
+ A: [
269
+ "db",
270
+ "'Database is not initialized.'"
271
+ ]
272
+ });
273
+ const lowerBound = path;
274
+ const upperBound = `${path}\uFFFF`;
275
+ const range = IDBKeyRange.bound(lowerBound, upperBound);
276
+ const transaction = db.transaction(this._store);
277
+ const objectStore = transaction.objectStore(this._store);
278
+ const request = objectStore.openCursor(range);
279
+ return new Promise((resolve, reject) => {
280
+ transaction.onerror = () => {
281
+ reject(request.error);
282
+ };
283
+ request.onsuccess = (event) => {
284
+ const cursor = event.target.result;
285
+ if (cursor) {
286
+ const filename = String(cursor.key).split(DELIM)[0];
287
+ if (filename && !this._files.has(getFullPath(this.path, filename))) {
288
+ const file = this._createFile(path, filename);
289
+ this._files.set(getFullPath(this.path, filename), wrapFile(file, this.type));
290
+ }
291
+ cursor.continue();
292
+ } else {
293
+ resolve();
294
+ }
295
+ };
296
+ });
297
+ }
298
+ async _getFiles(path) {
299
+ if (!this._initialized) {
300
+ await this._loadFiles(this.path);
301
+ this._initialized = true;
302
+ }
303
+ return super._getFiles(path);
304
+ }
305
+ };
306
+
307
+ // src/browser/web-fs.ts
308
+ import { EventEmitter } from "node:events";
309
+ import { callbackify } from "node:util";
310
+ import { synchronized } from "@dxos/async";
311
+ import { invariant as invariant2 } from "@dxos/invariant";
312
+ import { log } from "@dxos/log";
313
+ import { TimeSeriesCounter, trace } from "@dxos/tracing";
314
+ function _ts_decorate(decorators, target, key, desc) {
315
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
316
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
317
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
318
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
319
+ }
320
+ var __dxlog_file2 = "/__w/dxos/dxos/packages/common/random-access-storage/src/browser/web-fs.ts";
321
+ var WebFS = class {
322
+ path;
323
+ _files = /* @__PURE__ */ new Map();
324
+ type = StorageType.WEBFS;
325
+ _root;
326
+ constructor(path) {
327
+ this.path = path;
328
+ }
329
+ get size() {
330
+ return this._files.size;
331
+ }
332
+ _getFiles(path) {
333
+ const fullName = this._getFullFilename(this.path, path);
334
+ return new Map([
335
+ ...this._files.entries()
336
+ ].filter(([path2, file]) => {
337
+ return path2.includes(fullName) && !file.destroyed;
338
+ }));
339
+ }
340
+ async _list(path) {
341
+ const fullName = this._getFullFilename(path);
342
+ const root = await this._initialize();
343
+ const entries = [];
344
+ for await (const entry of root.keys()) {
345
+ if (entry.startsWith(fullName + "_") && !entry.endsWith(".crswap")) {
346
+ entries.push(entry.slice(fullName.length + 1));
347
+ }
348
+ }
349
+ return entries;
350
+ }
351
+ async _initialize() {
352
+ if (this._root) {
353
+ return this._root;
354
+ }
355
+ this._root = await navigator.storage.getDirectory();
356
+ invariant2(this._root, "Root is undefined", {
357
+ F: __dxlog_file2,
358
+ L: 74,
359
+ S: this,
360
+ A: [
361
+ "this._root",
362
+ "'Root is undefined'"
363
+ ]
364
+ });
365
+ return this._root;
366
+ }
367
+ createDirectory(sub = "") {
368
+ return new Directory({
369
+ type: this.type,
370
+ path: getFullPath(this.path, sub),
371
+ list: (path) => this._list(path),
372
+ getOrCreateFile: (...args) => this.getOrCreateFile(...args),
373
+ remove: () => this._delete(sub),
374
+ onFlush: async () => {
375
+ await Promise.all(Array.from(this._getFiles(sub)).map(([_, file]) => file.flush()));
376
+ }
377
+ });
378
+ }
379
+ getOrCreateFile(path, filename, opts) {
380
+ const fullName = this._getFullFilename(path, filename);
381
+ const existingFile = this._files.get(fullName);
382
+ if (existingFile) {
383
+ return existingFile;
384
+ }
385
+ const file = this._createFile(fullName);
386
+ this._files.set(fullName, file);
387
+ return file;
388
+ }
389
+ _createFile(fullName) {
390
+ return new WebFile({
391
+ fileName: fullName,
392
+ file: this._initialize().then((root) => root.getFileHandle(fullName, {
393
+ create: true
394
+ })),
395
+ destroy: async () => {
396
+ this._files.delete(fullName);
397
+ const root = await this._initialize();
398
+ return root.removeEntry(fullName);
399
+ }
400
+ });
401
+ }
402
+ async _delete(path) {
403
+ await Promise.all(Array.from(this._getFiles(path)).map(async ([path2, file]) => {
404
+ await file.destroy().catch((err) => log.warn(err, void 0, {
405
+ F: __dxlog_file2,
406
+ L: 117,
407
+ S: this,
408
+ C: (f, a) => f(...a)
409
+ }));
410
+ this._files.delete(path2);
411
+ }));
412
+ }
413
+ async reset() {
414
+ await this._initialize();
415
+ for await (const filename of await this._root.keys()) {
416
+ await this._root.removeEntry(filename, {
417
+ recursive: true
418
+ }).catch((err) => log.warn("failed to remove an entry", {
419
+ filename,
420
+ err
421
+ }, {
422
+ F: __dxlog_file2,
423
+ L: 127,
424
+ S: this,
425
+ C: (f, a) => f(...a)
426
+ }));
427
+ this._files.delete(filename);
428
+ }
429
+ this._root = void 0;
430
+ }
431
+ async close() {
432
+ await Promise.all(Array.from(this._files.values()).map((file) => {
433
+ return file.close().catch((e) => log.warn("failed to close a file", {
434
+ file: file.fileName,
435
+ e
436
+ }, {
437
+ F: __dxlog_file2,
438
+ L: 137,
439
+ S: this,
440
+ C: (f, a) => f(...a)
441
+ }));
442
+ }));
443
+ }
444
+ _getFullFilename(path, filename) {
445
+ if (filename) {
446
+ return getFullPath(path, filename).replace(/\//g, "_");
447
+ } else {
448
+ return path.replace(/\//g, "_");
449
+ }
450
+ }
451
+ async getDiskInfo() {
452
+ let used = 0;
453
+ const recurse = async (handle) => {
454
+ const promises = [];
455
+ for await (const entry of handle.values()) {
456
+ promises.push((async () => {
457
+ switch (entry.kind) {
458
+ case "file":
459
+ used += await entry.getFile().then((f) => f.size);
460
+ break;
461
+ case "directory":
462
+ await recurse(entry);
463
+ break;
464
+ }
465
+ })());
466
+ }
467
+ await Promise.all(promises);
468
+ };
469
+ await recurse(this._root);
470
+ return {
471
+ used
472
+ };
473
+ }
474
+ };
475
+ _ts_decorate([
476
+ synchronized
477
+ ], WebFS.prototype, "_initialize", null);
478
+ var WebFile = class extends EventEmitter {
479
+ fileName;
480
+ _fileHandle;
481
+ _destroy;
482
+ /**
483
+ * Current view of the file contents.
484
+ */
485
+ _buffer = null;
486
+ _loadBufferPromise = null;
487
+ _flushScheduled = false;
488
+ _flushPromise = Promise.resolve();
489
+ /**
490
+ * Used to discard unnecessary scheduled flushes.
491
+ * If _flushNow() is called with a lower sequence number it should early exit.
492
+ */
493
+ _flushSequence = 0;
494
+ //
495
+ // Metrics
496
+ //
497
+ _flushes = new TimeSeriesCounter();
498
+ _operations = new TimeSeriesCounter();
499
+ _reads = new TimeSeriesCounter();
500
+ _readBytes = new TimeSeriesCounter();
501
+ _writes = new TimeSeriesCounter();
502
+ _writeBytes = new TimeSeriesCounter();
503
+ get _bufferSize() {
504
+ return this._buffer?.length;
505
+ }
506
+ constructor({ fileName, file, destroy }) {
507
+ super();
508
+ this.fileName = fileName;
509
+ this._fileHandle = file;
510
+ this._destroy = destroy;
511
+ void this._loadBufferGuarded();
512
+ }
513
+ type = StorageType.WEBFS;
514
+ //
515
+ // random-access-storage library compatibility
516
+ //
517
+ // TODO(dmaretskyi): Are those all needed?
518
+ opened = true;
519
+ suspended = false;
520
+ closed = false;
521
+ unlinked = false;
522
+ writing = false;
523
+ readable = true;
524
+ writable = true;
525
+ deletable = true;
526
+ truncatable = true;
527
+ statable = true;
528
+ destroyed = false;
529
+ directory = "";
530
+ // TODO(dmaretskyi): is this used?
531
+ filename = "";
532
+ native = {
533
+ write: callbackify(this.write.bind(this)),
534
+ read: callbackify(this.read.bind(this)),
535
+ del: callbackify(this.del.bind(this)),
536
+ stat: callbackify(this.stat.bind(this)),
537
+ destroy: callbackify(this.destroy.bind(this)),
538
+ truncate: callbackify(this.truncate?.bind(this))
539
+ };
540
+ async _loadBuffer() {
541
+ const fileHandle = await this._fileHandle;
542
+ const file = await fileHandle.getFile();
543
+ this._buffer = new Uint8Array(await file.arrayBuffer());
544
+ }
545
+ async _loadBufferGuarded() {
546
+ await (this._loadBufferPromise ??= this._loadBuffer());
547
+ }
548
+ // Do not call directly, use _flushLater or _flushNow.
549
+ async _flushCache(sequence) {
550
+ if (this.destroyed || sequence < this._flushSequence) {
551
+ return;
552
+ }
553
+ this._flushSequence = sequence + 1;
554
+ this._flushes.inc();
555
+ await this._loadBufferGuarded();
556
+ invariant2(this._buffer, void 0, {
557
+ F: __dxlog_file2,
558
+ L: 301,
559
+ S: this,
560
+ A: [
561
+ "this._buffer",
562
+ ""
563
+ ]
564
+ });
565
+ const fileHandle = await this._fileHandle;
566
+ const writable = await fileHandle.createWritable({
567
+ keepExistingData: true
568
+ });
569
+ await writable.write({
570
+ type: "write",
571
+ data: this._buffer,
572
+ position: 0
573
+ });
574
+ await writable.close();
575
+ }
576
+ _flushLater() {
577
+ if (this._flushScheduled) {
578
+ return;
579
+ }
580
+ const sequence = this._flushSequence;
581
+ setTimeout(async () => {
582
+ await this._flushPromise;
583
+ this._flushScheduled = false;
584
+ this._flushPromise = this._flushCache(sequence).catch((err) => log.warn(err, void 0, {
585
+ F: __dxlog_file2,
586
+ L: 319,
587
+ S: this,
588
+ C: (f, a) => f(...a)
589
+ }));
590
+ });
591
+ this._flushScheduled = true;
592
+ }
593
+ async _flushNow() {
594
+ await this._flushPromise;
595
+ this._flushPromise = this._flushCache(this._flushSequence).catch((err) => log.warn(err, void 0, {
596
+ F: __dxlog_file2,
597
+ L: 327,
598
+ S: this,
599
+ C: (f, a) => f(...a)
600
+ }));
601
+ await this._flushPromise;
602
+ }
603
+ async read(offset, size) {
604
+ this.assertNotDestroyed("Read");
605
+ this._operations.inc();
606
+ this._reads.inc();
607
+ this._readBytes.inc(size);
608
+ if (!this._buffer) {
609
+ await this._loadBufferGuarded();
610
+ invariant2(this._buffer, void 0, {
611
+ F: __dxlog_file2,
612
+ L: 340,
613
+ S: this,
614
+ A: [
615
+ "this._buffer",
616
+ ""
617
+ ]
618
+ });
619
+ }
620
+ if (offset + size > this._buffer.length) {
621
+ throw new Error("Read out of bounds");
622
+ }
623
+ return Buffer.from(this._buffer.slice(offset, offset + size));
624
+ }
625
+ async write(offset, data) {
626
+ this.assertNotDestroyed("Write");
627
+ this._operations.inc();
628
+ this._writes.inc();
629
+ this._writeBytes.inc(data.length);
630
+ if (!this._buffer) {
631
+ await this._loadBufferGuarded();
632
+ invariant2(this._buffer, void 0, {
633
+ F: __dxlog_file2,
634
+ L: 360,
635
+ S: this,
636
+ A: [
637
+ "this._buffer",
638
+ ""
639
+ ]
640
+ });
641
+ }
642
+ if (offset + data.length <= this._buffer.length) {
643
+ this._buffer.set(data, offset);
644
+ } else {
645
+ const newCache = new Uint8Array(offset + data.length);
646
+ newCache.set(this._buffer);
647
+ newCache.set(data, offset);
648
+ this._buffer = newCache;
649
+ }
650
+ this._flushLater();
651
+ }
652
+ async del(offset, size) {
653
+ this.assertNotDestroyed("Del");
654
+ this._operations.inc();
655
+ if (offset < 0 || size <= 0) {
656
+ return;
657
+ }
658
+ if (!this._buffer) {
659
+ await this._loadBufferGuarded();
660
+ invariant2(this._buffer, void 0, {
661
+ F: __dxlog_file2,
662
+ L: 387,
663
+ S: this,
664
+ A: [
665
+ "this._buffer",
666
+ ""
667
+ ]
668
+ });
669
+ }
670
+ let leftoverSize = 0;
671
+ if (offset + size < this._buffer.length) {
672
+ leftoverSize = this._buffer.length - (offset + size);
673
+ this._buffer.set(this._buffer.slice(offset + size, offset + size + leftoverSize), offset);
674
+ }
675
+ this._buffer = this._buffer.slice(0, offset + leftoverSize);
676
+ this._flushLater();
677
+ }
678
+ async stat() {
679
+ this.assertNotDestroyed("Truncate");
680
+ this._operations.inc();
681
+ if (!this._buffer) {
682
+ await this._loadBufferGuarded();
683
+ invariant2(this._buffer, void 0, {
684
+ F: __dxlog_file2,
685
+ L: 409,
686
+ S: this,
687
+ A: [
688
+ "this._buffer",
689
+ ""
690
+ ]
691
+ });
692
+ }
693
+ return {
694
+ size: this._buffer.length
695
+ };
696
+ }
697
+ async truncate(offset) {
698
+ this.assertNotDestroyed("Truncate");
699
+ this._operations.inc();
700
+ if (!this._buffer) {
701
+ await this._loadBufferGuarded();
702
+ invariant2(this._buffer, void 0, {
703
+ F: __dxlog_file2,
704
+ L: 424,
705
+ S: this,
706
+ A: [
707
+ "this._buffer",
708
+ ""
709
+ ]
710
+ });
711
+ }
712
+ this._buffer = this._buffer.slice(0, offset);
713
+ this._flushLater();
714
+ }
715
+ async flush() {
716
+ this.assertNotDestroyed("Flush");
717
+ await this._flushNow();
718
+ }
719
+ /**
720
+ * It's best to avoid using this method as it doesn't really close a file.
721
+ * We could update the _opened flag and add a guard like for destroyed, but this would break
722
+ * the FileSystemFileHandle sharing required for browser tests to run, where writes are
723
+ * not immediately visible if using different file handles.
724
+ */
725
+ async close() {
726
+ await this._flushNow();
727
+ }
728
+ async destroy() {
729
+ if (!this.destroyed) {
730
+ await this._flushNow();
731
+ this.destroyed = true;
732
+ return await this._destroy();
733
+ }
734
+ }
735
+ assertNotDestroyed(operation) {
736
+ if (this.destroyed) {
737
+ throw new Error(`${operation} on a destroyed or closed file`);
738
+ }
739
+ }
740
+ };
741
+ _ts_decorate([
742
+ trace.info()
743
+ ], WebFile.prototype, "fileName", void 0);
744
+ _ts_decorate([
745
+ trace.metricsCounter()
746
+ ], WebFile.prototype, "_flushes", void 0);
747
+ _ts_decorate([
748
+ trace.metricsCounter()
749
+ ], WebFile.prototype, "_operations", void 0);
750
+ _ts_decorate([
751
+ trace.metricsCounter()
752
+ ], WebFile.prototype, "_reads", void 0);
753
+ _ts_decorate([
754
+ trace.metricsCounter()
755
+ ], WebFile.prototype, "_readBytes", void 0);
756
+ _ts_decorate([
757
+ trace.metricsCounter()
758
+ ], WebFile.prototype, "_writes", void 0);
759
+ _ts_decorate([
760
+ trace.metricsCounter()
761
+ ], WebFile.prototype, "_writeBytes", void 0);
762
+ _ts_decorate([
763
+ trace.info()
764
+ ], WebFile.prototype, "_bufferSize", null);
765
+ _ts_decorate([
766
+ synchronized
767
+ ], WebFile.prototype, "destroy", null);
768
+
769
+ // src/browser/storage.ts
770
+ var createStorage = ({ type, root = "" } = {}) => {
771
+ if (type === void 0) {
772
+ return new IDbStorage(root);
773
+ }
774
+ switch (type) {
775
+ case StorageType.RAM: {
776
+ return new MemoryStorage(root);
777
+ }
778
+ case StorageType.IDB:
779
+ case StorageType.CHROME:
780
+ case StorageType.FIREFOX: {
781
+ return new IDbStorage(root);
782
+ }
783
+ case StorageType.WEBFS: {
784
+ return new WebFS(root);
785
+ }
786
+ default: {
787
+ throw new Error(`Invalid type: ${type}`);
788
+ }
789
+ }
790
+ };
791
+ export {
792
+ AbstractStorage,
793
+ Directory,
794
+ MemoryStorage,
795
+ StorageType,
796
+ createStorage,
797
+ getFullPath,
798
+ stringDiff,
799
+ wrapFile
800
+ };
801
+ //# sourceMappingURL=index.mjs.map