@fireproof/core 0.16.6 → 0.17.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (35) hide show
  1. package/README.md +15 -6
  2. package/dist/browser/fireproof.cjs +79 -62
  3. package/dist/browser/fireproof.cjs.map +1 -1
  4. package/dist/browser/fireproof.d.cts +4 -4
  5. package/dist/browser/fireproof.d.ts +4 -4
  6. package/dist/browser/fireproof.global.js +730 -281
  7. package/dist/browser/fireproof.global.js.map +1 -1
  8. package/dist/browser/fireproof.js +81 -64
  9. package/dist/browser/fireproof.js.map +1 -1
  10. package/dist/browser/metafile-cjs.json +1 -1
  11. package/dist/browser/metafile-esm.json +1 -1
  12. package/dist/browser/metafile-iife.json +1 -1
  13. package/dist/memory/fireproof.cjs +79 -62
  14. package/dist/memory/fireproof.cjs.map +1 -1
  15. package/dist/memory/fireproof.d.cts +4 -4
  16. package/dist/memory/fireproof.d.ts +4 -4
  17. package/dist/memory/fireproof.global.js +730 -281
  18. package/dist/memory/fireproof.global.js.map +1 -1
  19. package/dist/memory/fireproof.js +81 -64
  20. package/dist/memory/fireproof.js.map +1 -1
  21. package/dist/memory/metafile-cjs.json +1 -1
  22. package/dist/memory/metafile-esm.json +1 -1
  23. package/dist/memory/metafile-iife.json +1 -1
  24. package/dist/node/fireproof.cjs +79 -62
  25. package/dist/node/fireproof.cjs.map +1 -1
  26. package/dist/node/fireproof.d.cts +4 -4
  27. package/dist/node/fireproof.d.ts +4 -4
  28. package/dist/node/fireproof.global.js +730 -281
  29. package/dist/node/fireproof.global.js.map +1 -1
  30. package/dist/node/fireproof.js +81 -64
  31. package/dist/node/fireproof.js.map +1 -1
  32. package/dist/node/metafile-cjs.json +1 -1
  33. package/dist/node/metafile-esm.json +1 -1
  34. package/dist/node/metafile-iife.json +1 -1
  35. package/package.json +4 -7
@@ -932,7 +932,7 @@ var Fireproof = (() => {
932
932
  this.tail = this.head;
933
933
  this.states = null;
934
934
  }
935
- var create14 = function create15() {
935
+ var create18 = function create19() {
936
936
  return util.Buffer ? function create_buffer_setup() {
937
937
  return (Writer.create = function create_buffer() {
938
938
  return new BufferWriter();
@@ -941,7 +941,7 @@ var Fireproof = (() => {
941
941
  return new Writer();
942
942
  };
943
943
  };
944
- Writer.create = create14();
944
+ Writer.create = create18();
945
945
  Writer.alloc = function alloc3(size) {
946
946
  return new util.Array(size);
947
947
  };
@@ -1087,7 +1087,7 @@ var Fireproof = (() => {
1087
1087
  };
1088
1088
  Writer._configure = function(BufferWriter_) {
1089
1089
  BufferWriter = BufferWriter_;
1090
- Writer.create = create14();
1090
+ Writer.create = create18();
1091
1091
  BufferWriter._configure();
1092
1092
  };
1093
1093
  }
@@ -1170,14 +1170,14 @@ var Fireproof = (() => {
1170
1170
  return new Reader(buffer2);
1171
1171
  throw Error("illegal buffer");
1172
1172
  };
1173
- var create14 = function create15() {
1173
+ var create18 = function create19() {
1174
1174
  return util.Buffer ? function create_buffer_setup(buffer2) {
1175
1175
  return (Reader.create = function create_buffer(buffer3) {
1176
1176
  return util.Buffer.isBuffer(buffer3) ? new BufferReader(buffer3) : create_array(buffer3);
1177
1177
  })(buffer2);
1178
1178
  } : create_array;
1179
1179
  };
1180
- Reader.create = create14();
1180
+ Reader.create = create18();
1181
1181
  Reader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */
1182
1182
  util.Array.prototype.slice;
1183
1183
  Reader.prototype.uint32 = /* @__PURE__ */ function read_uint32_setup() {
@@ -1345,7 +1345,7 @@ var Fireproof = (() => {
1345
1345
  };
1346
1346
  Reader._configure = function(BufferReader_) {
1347
1347
  BufferReader = BufferReader_;
1348
- Reader.create = create14();
1348
+ Reader.create = create18();
1349
1349
  BufferReader._configure();
1350
1350
  var fn = util.Long ? "toLong" : (
1351
1351
  /* istanbul ignore next */
@@ -1500,13 +1500,13 @@ var Fireproof = (() => {
1500
1500
  protobuf.util = require_minimal();
1501
1501
  protobuf.rpc = require_rpc();
1502
1502
  protobuf.roots = require_roots();
1503
- protobuf.configure = configure5;
1504
- function configure5() {
1503
+ protobuf.configure = configure6;
1504
+ function configure6() {
1505
1505
  protobuf.util._configure();
1506
1506
  protobuf.Writer._configure(protobuf.BufferWriter);
1507
1507
  protobuf.Reader._configure(protobuf.BufferReader);
1508
1508
  }
1509
- configure5();
1509
+ configure6();
1510
1510
  }
1511
1511
  });
1512
1512
 
@@ -4067,24 +4067,41 @@ var Fireproof = (() => {
4067
4067
  var cidSymbol = Symbol.for("@ipld/js-cid/CID");
4068
4068
 
4069
4069
  // ../../node_modules/.pnpm/multiformats@12.1.3/node_modules/multiformats/src/link.js
4070
+ var isLink = (value) => {
4071
+ if (value == null) {
4072
+ return false;
4073
+ }
4074
+ const withSlash = (
4075
+ /** @type {{'/'?: Uint8Array, bytes: Uint8Array}} */
4076
+ value
4077
+ );
4078
+ if (withSlash["/"] != null && withSlash["/"] === withSlash.bytes) {
4079
+ return true;
4080
+ }
4081
+ const withAsCID = (
4082
+ /** @type {{'asCID'?: unknown}} */
4083
+ value
4084
+ );
4085
+ if (withAsCID.asCID === value) {
4086
+ return true;
4087
+ }
4088
+ return false;
4089
+ };
4070
4090
  var parse = (source, base4) => CID.parse(source, base4);
4071
4091
 
4072
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/block.js
4092
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/block.js
4073
4093
  var MemoryBlockstore = class {
4074
4094
  /** @type {Map<string, Uint8Array>} */
4075
4095
  #blocks = /* @__PURE__ */ new Map();
4076
4096
  /**
4077
- * @param {Array<AnyBlock>} [blocks]
4097
+ * @param {Array<import('multiformats').Block>} [blocks]
4078
4098
  */
4079
4099
  constructor(blocks) {
4080
4100
  if (blocks) {
4081
4101
  this.#blocks = new Map(blocks.map((b) => [b.cid.toString(), b.bytes]));
4082
4102
  }
4083
4103
  }
4084
- /**
4085
- * @param {import('./link').AnyLink} cid
4086
- * @returns {Promise<AnyBlock | undefined>}
4087
- */
4104
+ /** @type {API.BlockFetcher['get']} */
4088
4105
  async get(cid) {
4089
4106
  const bytes = this.#blocks.get(cid.toString());
4090
4107
  if (!bytes)
@@ -4092,24 +4109,24 @@ var Fireproof = (() => {
4092
4109
  return { cid, bytes };
4093
4110
  }
4094
4111
  /**
4095
- * @param {import('./link').AnyLink} cid
4112
+ * @param {API.UnknownLink} cid
4096
4113
  * @param {Uint8Array} bytes
4097
4114
  */
4098
4115
  async put(cid, bytes) {
4099
4116
  this.#blocks.set(cid.toString(), bytes);
4100
4117
  }
4101
4118
  /**
4102
- * @param {import('./link').AnyLink} cid
4119
+ * @param {API.UnknownLink} cid
4103
4120
  * @param {Uint8Array} bytes
4104
4121
  */
4105
4122
  putSync(cid, bytes) {
4106
4123
  this.#blocks.set(cid.toString(), bytes);
4107
4124
  }
4108
- /** @param {import('./link').AnyLink} cid */
4125
+ /** @param {API.UnknownLink} cid */
4109
4126
  async delete(cid) {
4110
4127
  this.#blocks.delete(cid.toString());
4111
4128
  }
4112
- /** @param {import('./link').AnyLink} cid */
4129
+ /** @param {API.UnknownLink} cid */
4113
4130
  deleteSync(cid) {
4114
4131
  this.#blocks.delete(cid.toString());
4115
4132
  }
@@ -4120,13 +4137,13 @@ var Fireproof = (() => {
4120
4137
  }
4121
4138
  };
4122
4139
  var MultiBlockFetcher = class {
4123
- /** @type {BlockFetcher[]} */
4140
+ /** @type {API.BlockFetcher[]} */
4124
4141
  #fetchers;
4125
- /** @param {BlockFetcher[]} fetchers */
4142
+ /** @param {API.BlockFetcher[]} fetchers */
4126
4143
  constructor(...fetchers) {
4127
4144
  this.#fetchers = fetchers;
4128
4145
  }
4129
- /** @param {import('./link').AnyLink} link */
4146
+ /** @type {API.BlockFetcher['get']} */
4130
4147
  async get(link2) {
4131
4148
  for (const f of this.#fetchers) {
4132
4149
  const v = await f.get(link2);
@@ -4378,10 +4395,10 @@ var Fireproof = (() => {
4378
4395
  * @param {any} [value]
4379
4396
  * @param {number} [encodedLength]
4380
4397
  */
4381
- constructor(type2, value, encodedLength) {
4398
+ constructor(type2, value, encodedLength2) {
4382
4399
  this.type = type2;
4383
4400
  this.value = value;
4384
- this.encodedLength = encodedLength;
4401
+ this.encodedLength = encodedLength2;
4385
4402
  this.encodedBytes = void 0;
4386
4403
  this.byteValue = void 0;
4387
4404
  }
@@ -10183,6 +10200,22 @@ You can use close({ resize: true }) to resize header`);
10183
10200
  async commit(t, done, opts = { noLoader: false, compact: false }) {
10184
10201
  return this.commitQueue.enqueue(() => this._commitInternal(t, done, opts));
10185
10202
  }
10203
+ async cacheTransaction(t) {
10204
+ for await (const block of t.entries()) {
10205
+ const sBlock = block.cid.toString();
10206
+ if (!this.getBlockCache.has(sBlock)) {
10207
+ this.getBlockCache.set(sBlock, block);
10208
+ }
10209
+ }
10210
+ }
10211
+ async cacheCarReader(reader) {
10212
+ for await (const block of reader.blocks()) {
10213
+ const sBlock = block.cid.toString();
10214
+ if (!this.getBlockCache.has(sBlock)) {
10215
+ this.getBlockCache.set(sBlock, block);
10216
+ }
10217
+ }
10218
+ }
10186
10219
  async _commitInternal(t, done, opts = { noLoader: false, compact: false }) {
10187
10220
  await this.ready;
10188
10221
  const header = done;
@@ -10190,6 +10223,7 @@ You can use close({ resize: true }) to resize header`);
10190
10223
  let roots = await this.prepareRoots(fp, t);
10191
10224
  const { cid, bytes } = await this.prepareCarFile(roots[0], t, !!opts.public);
10192
10225
  await this.carStore.save({ cid, bytes });
10226
+ await this.cacheTransaction(t);
10193
10227
  const newDbMeta = { car: cid, key: this.key || null };
10194
10228
  await this.remoteWAL.enqueue(newDbMeta, opts);
10195
10229
  await this.metaStore.save(newDbMeta);
@@ -10242,13 +10276,8 @@ You can use close({ resize: true }) to resize header`);
10242
10276
  }
10243
10277
  async *entries() {
10244
10278
  await this.ready;
10245
- for (const cid of this.carLog) {
10246
- const reader = await this.loadCar(cid);
10247
- if (!reader)
10248
- throw new Error(`missing car reader ${cid.toString()}`);
10249
- for await (const block of reader.blocks()) {
10250
- yield block;
10251
- }
10279
+ for (const [, block] of this.getBlockCache) {
10280
+ yield block;
10252
10281
  }
10253
10282
  }
10254
10283
  async getBlock(cid) {
@@ -10263,10 +10292,9 @@ You can use close({ resize: true }) to resize header`);
10263
10292
  if (!reader) {
10264
10293
  throw new Error(`missing car reader ${carCid.toString()}`);
10265
10294
  }
10266
- const block = await reader.get(cid);
10267
- if (block) {
10268
- return block;
10269
- }
10295
+ await this.cacheCarReader(reader);
10296
+ if (this.getBlockCache.has(sCid))
10297
+ return this.getBlockCache.get(sCid);
10270
10298
  throw new Error(`block not in reader: ${cid.toString()}`);
10271
10299
  })
10272
10300
  ).catch(() => void 0);
@@ -10319,7 +10347,7 @@ You can use close({ resize: true }) to resize header`);
10319
10347
  }
10320
10348
  async ensureDecryptedReader(reader) {
10321
10349
  const theKey = await this._getKey();
10322
- if (!(theKey && this.ebOpts.crypto))
10350
+ if (this.ebOpts.public || !(theKey && this.ebOpts.crypto))
10323
10351
  return reader;
10324
10352
  const { blocks, root: root2 } = await decodeEncryptedCar(this.ebOpts.crypto, theKey, reader);
10325
10353
  return {
@@ -10397,8 +10425,10 @@ You can use close({ resize: true }) to resize header`);
10397
10425
  if (this.ebOpts.autoCompact && this.loader.carLog.length > this.ebOpts.autoCompact) {
10398
10426
  setTimeout(() => void this.compact(), 10);
10399
10427
  }
10400
- if (car)
10428
+ if (car) {
10429
+ this.transactions.delete(t);
10401
10430
  return { ...done, car };
10431
+ }
10402
10432
  throw new Error("failed to commit car");
10403
10433
  }
10404
10434
  return done;
@@ -10449,13 +10479,19 @@ You can use close({ resize: true }) to resize header`);
10449
10479
  }
10450
10480
  async *entries() {
10451
10481
  const seen = /* @__PURE__ */ new Set();
10452
- for (const t of this.transactions) {
10453
- for await (const blk of t.entries()) {
10454
- if (seen.has(blk.cid.toString()))
10455
- continue;
10456
- seen.add(blk.cid.toString());
10482
+ if (this.loader) {
10483
+ for await (const blk of this.loader.entries()) {
10457
10484
  yield blk;
10458
10485
  }
10486
+ } else {
10487
+ for (const t of this.transactions) {
10488
+ for await (const blk of t.entries()) {
10489
+ if (seen.has(blk.cid.toString()))
10490
+ continue;
10491
+ seen.add(blk.cid.toString());
10492
+ yield blk;
10493
+ }
10494
+ }
10459
10495
  }
10460
10496
  }
10461
10497
  };
@@ -18999,8 +19035,8 @@ ${key.data.toString("base64")}
18999
19035
  return await (0, import_promises.writeFile)(path, data);
19000
19036
  }
19001
19037
 
19002
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/clock.js
19003
- async function advance(blocks, head, event) {
19038
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/clock/index.js
19039
+ var advance = async (blocks, head, event) => {
19004
19040
  const events = new EventFetcher(blocks);
19005
19041
  const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
19006
19042
  if (headmap.has(event.toString()))
@@ -19022,11 +19058,11 @@ ${key.data.toString("base64")}
19022
19058
  }
19023
19059
  }
19024
19060
  return head.concat(event);
19025
- }
19061
+ };
19026
19062
  var EventBlock = class extends Block {
19027
19063
  /**
19028
19064
  * @param {object} config
19029
- * @param {EventLink<T>} config.cid
19065
+ * @param {API.EventLink<T>} config.cid
19030
19066
  * @param {Event} config.value
19031
19067
  * @param {Uint8Array} config.bytes
19032
19068
  * @param {string} config.prefix
@@ -19038,20 +19074,20 @@ ${key.data.toString("base64")}
19038
19074
  /**
19039
19075
  * @template T
19040
19076
  * @param {T} data
19041
- * @param {EventLink<T>[]} [parents]
19077
+ * @param {API.EventLink<T>[]} [parents]
19042
19078
  */
19043
19079
  static create(data, parents) {
19044
19080
  return encodeEventBlock({ data, parents: parents ?? [] });
19045
19081
  }
19046
19082
  };
19047
19083
  var EventFetcher = class {
19048
- /** @param {import('./block').BlockFetcher} blocks */
19084
+ /** @param {API.BlockFetcher} blocks */
19049
19085
  constructor(blocks) {
19050
19086
  this._blocks = blocks;
19051
19087
  }
19052
19088
  /**
19053
- * @param {EventLink<T>} link
19054
- * @returns {Promise<EventBlockView<T>>}
19089
+ * @param {API.EventLink<T>} link
19090
+ * @returns {Promise<API.EventBlockView<T>>}
19055
19091
  */
19056
19092
  async get(link2) {
19057
19093
  const block = await this._blocks.get(link2);
@@ -19060,15 +19096,15 @@ ${key.data.toString("base64")}
19060
19096
  return decodeEventBlock(block.bytes);
19061
19097
  }
19062
19098
  };
19063
- async function encodeEventBlock(value) {
19099
+ var encodeEventBlock = async (value) => {
19064
19100
  const { cid, bytes } = await encode7({ value, codec: src_exports, hasher: sha256 });
19065
19101
  return new Block({ cid, value, bytes });
19066
- }
19067
- async function decodeEventBlock(bytes) {
19102
+ };
19103
+ var decodeEventBlock = async (bytes) => {
19068
19104
  const { cid, value } = await decode11({ bytes, codec: src_exports, hasher: sha256 });
19069
19105
  return new Block({ cid, value, bytes });
19070
- }
19071
- async function contains(events, a, b) {
19106
+ };
19107
+ var contains = async (events, a, b) => {
19072
19108
  if (a.toString() === b.toString())
19073
19109
  return true;
19074
19110
  const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
@@ -19089,8 +19125,8 @@ ${key.data.toString("base64")}
19089
19125
  links3.push(...event.parents);
19090
19126
  }
19091
19127
  return false;
19092
- }
19093
- async function* vis(blocks, head, options = {}) {
19128
+ };
19129
+ var vis = async function* (blocks, head, options = {}) {
19094
19130
  const renderNodeLabel = options.renderNodeLabel ?? ((b) => shortLink(b.cid));
19095
19131
  const events = new EventFetcher(blocks);
19096
19132
  yield "digraph clock {";
@@ -19122,15 +19158,18 @@ ${key.data.toString("base64")}
19122
19158
  links3.push(...block.value.parents);
19123
19159
  }
19124
19160
  yield "}";
19125
- }
19161
+ };
19126
19162
  var shortLink = (l) => `${String(l).slice(0, 4)}..${String(l).slice(-4)}`;
19127
19163
 
19128
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/shard.js
19164
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/shard.js
19165
+ var MaxKeyLength = 64;
19166
+ var MaxShardSize = 512 * 1024;
19167
+ var CID_TAG2 = new Token(Type.tag, 42);
19129
19168
  var ShardBlock = class extends Block {
19130
19169
  /**
19131
19170
  * @param {object} config
19132
- * @param {ShardLink} config.cid
19133
- * @param {Shard} config.value
19171
+ * @param {API.ShardLink} config.cid
19172
+ * @param {API.Shard} config.value
19134
19173
  * @param {Uint8Array} config.bytes
19135
19174
  * @param {string} config.prefix
19136
19175
  */
@@ -19138,94 +19177,105 @@ ${key.data.toString("base64")}
19138
19177
  super({ cid, value, bytes });
19139
19178
  this.prefix = prefix;
19140
19179
  }
19141
- static create() {
19142
- return encodeShardBlock([]);
19180
+ /** @param {API.ShardOptions} [options] */
19181
+ static create(options) {
19182
+ return encodeBlock(create7(options));
19143
19183
  }
19144
19184
  };
19185
+ var create7 = (options) => ({ entries: [], ...configure(options) });
19186
+ var configure = (options) => ({
19187
+ maxSize: options?.maxSize ?? MaxShardSize,
19188
+ maxKeyLength: options?.maxKeyLength ?? MaxKeyLength
19189
+ });
19190
+ var withEntries = (entries3, options) => ({ ...create7(options), entries: entries3 });
19145
19191
  var decodeCache = /* @__PURE__ */ new WeakMap();
19146
- async function encodeShardBlock(value, prefix) {
19192
+ var encodeBlock = async (value, prefix) => {
19147
19193
  const { cid, bytes } = await encode7({ value, codec: src_exports, hasher: sha256 });
19148
19194
  const block = new ShardBlock({ cid, value, bytes, prefix: prefix ?? "" });
19149
19195
  decodeCache.set(block.bytes, block);
19150
19196
  return block;
19151
- }
19152
- async function decodeShardBlock(bytes, prefix) {
19197
+ };
19198
+ var decodeBlock = async (bytes, prefix) => {
19153
19199
  const block = decodeCache.get(bytes);
19154
19200
  if (block)
19155
19201
  return block;
19156
19202
  const { cid, value } = await decode11({ bytes, codec: src_exports, hasher: sha256 });
19157
- if (!Array.isArray(value))
19203
+ if (!isShard(value))
19158
19204
  throw new Error(`invalid shard: ${cid}`);
19159
19205
  return new ShardBlock({ cid, value, bytes, prefix: prefix ?? "" });
19160
- }
19206
+ };
19207
+ var isShard = (value) => value != null && typeof value === "object" && Array.isArray(value.entries) && typeof value.maxSize === "number" && typeof value.maxKeyLength === "number";
19208
+ var isShardLink = (value) => isLink(value) && value.code === code;
19161
19209
  var ShardFetcher = class {
19162
- /** @param {import('./block').BlockFetcher} blocks */
19210
+ /** @param {API.BlockFetcher} blocks */
19163
19211
  constructor(blocks) {
19164
19212
  this._blocks = blocks;
19165
19213
  }
19166
19214
  /**
19167
- * @param {ShardLink} link
19215
+ * @param {API.ShardLink} link
19168
19216
  * @param {string} [prefix]
19169
- * @returns {Promise<ShardBlockView>}
19217
+ * @returns {Promise<API.ShardBlockView>}
19170
19218
  */
19171
19219
  async get(link2, prefix = "") {
19172
19220
  const block = await this._blocks.get(link2);
19173
19221
  if (!block)
19174
19222
  throw new Error(`missing block: ${link2}`);
19175
- return decodeShardBlock(block.bytes, prefix);
19176
- }
19177
- };
19178
- function putEntry(target, entry) {
19179
- if (!target.length)
19180
- return [entry];
19181
- const shard = [];
19182
- for (const [i, [k, v]] of target.entries()) {
19183
- if (entry[0] === k) {
19184
- if (Array.isArray(entry[1])) {
19185
- if (Array.isArray(v) && v[1] != null && entry[1][1] == null) {
19186
- shard.push([k, [entry[1][0], v[1]]]);
19223
+ return decodeBlock(block.bytes, prefix);
19224
+ }
19225
+ };
19226
+ var putEntry = (target, newEntry) => {
19227
+ const entries3 = [];
19228
+ for (const [i, entry] of target.entries()) {
19229
+ const [k, v] = entry;
19230
+ if (newEntry[0] === k) {
19231
+ if (Array.isArray(newEntry[1])) {
19232
+ if (Array.isArray(v) && v[1] != null && newEntry[1][1] == null) {
19233
+ entries3.push([k, [newEntry[1][0], v[1]]]);
19187
19234
  } else {
19188
- shard.push(entry);
19235
+ entries3.push(newEntry);
19189
19236
  }
19190
19237
  } else {
19191
- const newEntry = Array.isArray(v) ? [k, [v[0], entry[1]]] : entry;
19192
- shard.push(newEntry);
19238
+ if (Array.isArray(v)) {
19239
+ entries3.push([k, [v[0], newEntry[1]]]);
19240
+ } else {
19241
+ entries3.push(newEntry);
19242
+ }
19193
19243
  }
19194
19244
  for (let j = i + 1; j < target.length; j++) {
19195
- shard.push(target[j]);
19245
+ entries3.push(target[j]);
19196
19246
  }
19197
- return shard;
19247
+ return entries3;
19198
19248
  }
19199
- if (i === 0 && entry[0] < k) {
19200
- shard.push(entry);
19249
+ if (i === 0 && newEntry[0] < k) {
19250
+ entries3.push(newEntry);
19201
19251
  for (let j = i; j < target.length; j++) {
19202
- shard.push(target[j]);
19252
+ entries3.push(target[j]);
19203
19253
  }
19204
- return shard;
19254
+ return entries3;
19205
19255
  }
19206
- if (i > 0 && entry[0] > target[i - 1][0] && entry[0] < k) {
19207
- shard.push(entry);
19256
+ if (i > 0 && newEntry[0] > target[i - 1][0] && newEntry[0] < k) {
19257
+ entries3.push(newEntry);
19208
19258
  for (let j = i; j < target.length; j++) {
19209
- shard.push(target[j]);
19259
+ entries3.push(target[j]);
19210
19260
  }
19211
- return shard;
19261
+ return entries3;
19212
19262
  }
19213
- shard.push([k, v]);
19263
+ entries3.push(entry);
19214
19264
  }
19215
- shard.push(entry);
19216
- return shard;
19217
- }
19218
- function findCommonPrefix(shard, skey) {
19219
- const startidx = shard.findIndex(([k]) => skey === k);
19265
+ entries3.push(newEntry);
19266
+ return entries3;
19267
+ };
19268
+ var findCommonPrefix = (entries3, skey) => {
19269
+ const startidx = entries3.findIndex(([k]) => skey === k);
19220
19270
  if (startidx === -1)
19221
19271
  throw new Error(`key not found in shard: ${skey}`);
19222
19272
  let i = startidx;
19223
19273
  let pfx;
19224
19274
  while (true) {
19225
- pfx = shard[i][0].slice(0, -1);
19275
+ pfx = entries3[i][0].slice(0, -1);
19226
19276
  if (pfx.length) {
19227
19277
  while (true) {
19228
- const matches = shard.filter((entry) => entry[0].startsWith(pfx));
19278
+ const matches = entries3.filter((entry) => entry[0].startsWith(pfx));
19229
19279
  if (matches.length > 1)
19230
19280
  return { prefix: pfx, matches };
19231
19281
  pfx = pfx.slice(0, -1);
@@ -19234,19 +19284,50 @@ ${key.data.toString("base64")}
19234
19284
  }
19235
19285
  }
19236
19286
  i++;
19237
- if (i >= shard.length) {
19287
+ if (i >= entries3.length) {
19238
19288
  i = 0;
19239
19289
  }
19240
19290
  if (i === startidx) {
19241
19291
  return;
19242
19292
  }
19243
19293
  }
19244
- }
19294
+ };
19295
+ var encodedLength = (shard) => {
19296
+ let entriesLength = 0;
19297
+ for (const entry of shard.entries) {
19298
+ entriesLength += entryEncodedLength(entry);
19299
+ }
19300
+ const tokens = [
19301
+ new Token(Type.map, 3),
19302
+ new Token(Type.string, "entries"),
19303
+ new Token(Type.array, shard.entries.length),
19304
+ new Token(Type.string, "maxKeyLength"),
19305
+ new Token(Type.uint, shard.maxKeyLength),
19306
+ new Token(Type.string, "maxSize"),
19307
+ new Token(Type.uint, shard.maxSize)
19308
+ ];
19309
+ return tokensToLength(tokens) + entriesLength;
19310
+ };
19311
+ var entryEncodedLength = (entry) => {
19312
+ const tokens = [
19313
+ new Token(Type.array, entry.length),
19314
+ new Token(Type.string, entry[0])
19315
+ ];
19316
+ if (Array.isArray(entry[1])) {
19317
+ tokens.push(new Token(Type.array, entry[1].length));
19318
+ for (const link2 of entry[1]) {
19319
+ tokens.push(CID_TAG2);
19320
+ tokens.push(new Token(Type.bytes, { length: link2.byteLength + 1 }));
19321
+ }
19322
+ } else {
19323
+ tokens.push(CID_TAG2);
19324
+ tokens.push(new Token(Type.bytes, { length: entry[1].byteLength + 1 }));
19325
+ }
19326
+ return tokensToLength(tokens);
19327
+ };
19245
19328
 
19246
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/index.js
19247
- var MaxKeyLength = 64;
19248
- var MaxShardSize = 512 * 1024;
19249
- async function put(blocks, root2, key, value, options = {}) {
19329
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/index.js
19330
+ var put = async (blocks, root2, key, value) => {
19250
19331
  const shards = new ShardFetcher(blocks);
19251
19332
  const rshard = await shards.get(root2);
19252
19333
  const path = await traverse(shards, rshard, key);
@@ -19254,31 +19335,40 @@ ${key.data.toString("base64")}
19254
19335
  const skey = key.slice(target.prefix.length);
19255
19336
  let entry = [skey, value];
19256
19337
  const additions = [];
19257
- if (skey.length > MaxKeyLength) {
19258
- const pfxskeys = Array.from(Array(Math.ceil(skey.length / MaxKeyLength)), (_, i) => {
19259
- const start = i * MaxKeyLength;
19338
+ if (skey.length > target.value.maxKeyLength) {
19339
+ const pfxskeys = Array.from(Array(Math.ceil(skey.length / target.value.maxKeyLength)), (_, i) => {
19340
+ const start = i * target.value.maxKeyLength;
19260
19341
  return {
19261
19342
  prefix: target.prefix + skey.slice(0, start),
19262
- skey: skey.slice(start, start + MaxKeyLength)
19343
+ skey: skey.slice(start, start + target.value.maxKeyLength)
19263
19344
  };
19264
19345
  });
19265
- let child2 = await encodeShardBlock([[pfxskeys[pfxskeys.length - 1].skey, value]], pfxskeys[pfxskeys.length - 1].prefix);
19346
+ let child2 = await encodeBlock(
19347
+ withEntries([[pfxskeys[pfxskeys.length - 1].skey, value]], target.value),
19348
+ pfxskeys[pfxskeys.length - 1].prefix
19349
+ );
19266
19350
  additions.push(child2);
19267
19351
  for (let i = pfxskeys.length - 2; i > 0; i--) {
19268
- child2 = await encodeShardBlock([[pfxskeys[i].skey, [child2.cid]]], pfxskeys[i].prefix);
19352
+ child2 = await encodeBlock(
19353
+ withEntries([[pfxskeys[i].skey, [child2.cid]]], target.value),
19354
+ pfxskeys[i].prefix
19355
+ );
19269
19356
  additions.push(child2);
19270
19357
  }
19271
19358
  entry = [pfxskeys[0].skey, [child2.cid]];
19272
19359
  }
19273
- let shard = putEntry(target.value, entry);
19274
- let child = await encodeShardBlock(shard, target.prefix);
19275
- if (child.bytes.length > (options.maxShardSize ?? MaxShardSize)) {
19276
- const common = findCommonPrefix(shard, entry[0]);
19360
+ let shard = withEntries(putEntry(target.value.entries, entry), target.value);
19361
+ let child = await encodeBlock(shard, target.prefix);
19362
+ if (child.bytes.length > shard.maxSize) {
19363
+ const common = findCommonPrefix(shard.entries, entry[0]);
19277
19364
  if (!common)
19278
19365
  throw new Error("shard limit reached");
19279
19366
  const { prefix, matches } = common;
19280
- const block = await encodeShardBlock(
19281
- matches.filter(([k]) => k !== prefix).map(([k, v]) => [k.slice(prefix.length), v]),
19367
+ const block = await encodeBlock(
19368
+ withEntries(
19369
+ matches.filter(([k]) => k !== prefix).map(([k, v]) => [k.slice(prefix.length), v]),
19370
+ shard
19371
+ ),
19282
19372
  target.prefix + prefix
19283
19373
  );
19284
19374
  additions.push(block);
@@ -19292,9 +19382,9 @@ ${key.data.toString("base64")}
19292
19382
  } else {
19293
19383
  value2 = [block.cid];
19294
19384
  }
19295
- shard = shard.filter((e) => matches.every((m) => e[0] !== m[0]));
19296
- shard = putEntry(shard, [prefix, value2]);
19297
- child = await encodeShardBlock(shard, target.prefix);
19385
+ shard.entries = shard.entries.filter((e) => matches.every((m) => e[0] !== m[0]));
19386
+ shard = withEntries(putEntry(shard.entries, [prefix, value2]), shard);
19387
+ child = await encodeBlock(shard, target.prefix);
19298
19388
  }
19299
19389
  if (child.cid.toString() === target.cid.toString()) {
19300
19390
  return { root: root2, additions: [], removals: [] };
@@ -19303,94 +19393,104 @@ ${key.data.toString("base64")}
19303
19393
  for (let i = path.length - 2; i >= 0; i--) {
19304
19394
  const parent = path[i];
19305
19395
  const key2 = child.prefix.slice(parent.prefix.length);
19306
- const value2 = parent.value.map((entry2) => {
19307
- const [k, v] = entry2;
19308
- if (k !== key2)
19309
- return entry2;
19310
- if (!Array.isArray(v))
19311
- throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
19312
- return (
19313
- /** @type {import('./shard').ShardEntry} */
19314
- v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
19315
- );
19316
- });
19317
- child = await encodeShardBlock(value2, parent.prefix);
19396
+ const value2 = withEntries(
19397
+ parent.value.entries.map((entry2) => {
19398
+ const [k, v] = entry2;
19399
+ if (k !== key2)
19400
+ return entry2;
19401
+ if (!Array.isArray(v))
19402
+ throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
19403
+ return (
19404
+ /** @type {API.ShardEntry} */
19405
+ v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
19406
+ );
19407
+ }),
19408
+ parent.value
19409
+ );
19410
+ child = await encodeBlock(value2, parent.prefix);
19318
19411
  additions.push(child);
19319
19412
  }
19320
19413
  return { root: additions[additions.length - 1].cid, additions, removals: path };
19321
- }
19322
- async function get2(blocks, root2, key) {
19414
+ };
19415
+ var get2 = async (blocks, root2, key) => {
19323
19416
  const shards = new ShardFetcher(blocks);
19324
19417
  const rshard = await shards.get(root2);
19325
19418
  const path = await traverse(shards, rshard, key);
19326
19419
  const target = path[path.length - 1];
19327
19420
  const skey = key.slice(target.prefix.length);
19328
- const entry = target.value.find(([k]) => k === skey);
19421
+ const entry = target.value.entries.find(([k]) => k === skey);
19329
19422
  if (!entry)
19330
19423
  return;
19331
19424
  return Array.isArray(entry[1]) ? entry[1][1] : entry[1];
19332
- }
19333
- async function del(blocks, root2, key) {
19425
+ };
19426
+ var del = async (blocks, root2, key) => {
19334
19427
  const shards = new ShardFetcher(blocks);
19335
19428
  const rshard = await shards.get(root2);
19336
19429
  const path = await traverse(shards, rshard, key);
19337
19430
  const target = path[path.length - 1];
19338
19431
  const skey = key.slice(target.prefix.length);
19339
- const entryidx = target.value.findIndex(([k]) => k === skey);
19432
+ const entryidx = target.value.entries.findIndex(([k]) => k === skey);
19340
19433
  if (entryidx === -1)
19341
19434
  return { root: root2, additions: [], removals: [] };
19342
- const entry = target.value[entryidx];
19343
- if (Array.isArray(entry[1]) && entry[1][1] == null)
19435
+ const entry = target.value.entries[entryidx];
19436
+ if (Array.isArray(entry[1]) && entry[1][1] == null) {
19344
19437
  return { root: root2, additions: [], removals: [] };
19438
+ }
19345
19439
  const additions = [];
19346
19440
  const removals = [...path];
19347
- let shard = [...target.value];
19441
+ let shard = withEntries([...target.value.entries], target.value);
19348
19442
  if (Array.isArray(entry[1])) {
19349
- shard[entryidx] = [entry[0], [entry[1][0]]];
19443
+ shard.entries[entryidx] = [entry[0], [entry[1][0]]];
19350
19444
  } else {
19351
- shard.splice(entryidx, 1);
19352
- while (!shard.length) {
19445
+ shard.entries.splice(entryidx, 1);
19446
+ while (!shard.entries.length) {
19353
19447
  const child2 = path[path.length - 1];
19354
19448
  const parent = path[path.length - 2];
19355
19449
  if (!parent)
19356
19450
  break;
19357
19451
  path.pop();
19358
- shard = parent.value.filter((e) => {
19359
- if (!Array.isArray(e[1]))
19360
- return true;
19361
- return e[1][0].toString() !== child2.cid.toString();
19362
- });
19452
+ shard = withEntries(
19453
+ parent.value.entries.filter((e) => {
19454
+ if (!Array.isArray(e[1]))
19455
+ return true;
19456
+ return e[1][0].toString() !== child2.cid.toString();
19457
+ }),
19458
+ parent.value
19459
+ );
19363
19460
  }
19364
19461
  }
19365
- let child = await encodeShardBlock(shard, path[path.length - 1].prefix);
19462
+ let child = await encodeBlock(shard, path[path.length - 1].prefix);
19366
19463
  additions.push(child);
19367
19464
  for (let i = path.length - 2; i >= 0; i--) {
19368
19465
  const parent = path[i];
19369
19466
  const key2 = child.prefix.slice(parent.prefix.length);
19370
- const value = parent.value.map((entry2) => {
19371
- const [k, v] = entry2;
19372
- if (k !== key2)
19373
- return entry2;
19374
- if (!Array.isArray(v))
19375
- throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
19376
- return (
19377
- /** @type {import('./shard').ShardEntry} */
19378
- v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
19379
- );
19380
- });
19381
- child = await encodeShardBlock(value, parent.prefix);
19467
+ const value = withEntries(
19468
+ parent.value.entries.map((entry2) => {
19469
+ const [k, v] = entry2;
19470
+ if (k !== key2)
19471
+ return entry2;
19472
+ if (!Array.isArray(v))
19473
+ throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
19474
+ return (
19475
+ /** @type {API.ShardEntry} */
19476
+ v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
19477
+ );
19478
+ }),
19479
+ parent.value
19480
+ );
19481
+ child = await encodeBlock(value, parent.prefix);
19382
19482
  additions.push(child);
19383
19483
  }
19384
19484
  return { root: additions[additions.length - 1].cid, additions, removals };
19385
- }
19386
- async function* entries(blocks, root2, options = {}) {
19485
+ };
19486
+ var entries = async function* (blocks, root2, options = {}) {
19387
19487
  const { prefix } = options;
19388
19488
  const shards = new ShardFetcher(blocks);
19389
19489
  const rshard = await shards.get(root2);
19390
19490
  yield* (
19391
- /** @returns {AsyncIterableIterator<import('./shard').ShardValueEntry>} */
19491
+ /** @returns {AsyncIterableIterator<API.ShardValueEntry>} */
19392
19492
  async function* ents(shard) {
19393
- for (const entry of shard.value) {
19493
+ for (const entry of shard.value.entries) {
19394
19494
  const key = shard.prefix + entry[0];
19395
19495
  if (Array.isArray(entry[1])) {
19396
19496
  if (entry[1][1]) {
@@ -19416,9 +19516,9 @@ ${key.data.toString("base64")}
19416
19516
  }
19417
19517
  }(rshard)
19418
19518
  );
19419
- }
19420
- async function traverse(shards, shard, key) {
19421
- for (const [k, v] of shard.value) {
19519
+ };
19520
+ var traverse = async (shards, shard, key) => {
19521
+ for (const [k, v] of shard.value.entries) {
19422
19522
  if (key === k)
19423
19523
  return [shard];
19424
19524
  if (key.startsWith(k) && Array.isArray(v)) {
@@ -19427,16 +19527,204 @@ ${key.data.toString("base64")}
19427
19527
  }
19428
19528
  }
19429
19529
  return [shard];
19430
- }
19530
+ };
19531
+
19532
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/batch/shard.js
19533
+ var create8 = (init2) => ({
19534
+ base: init2?.base,
19535
+ prefix: init2?.prefix ?? "",
19536
+ entries: init2?.entries ?? [],
19537
+ ...configure(init2)
19538
+ });
19539
+
19540
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/batch/index.js
19541
+ var Batcher = class _Batcher {
19542
+ #committed = false;
19543
+ /**
19544
+ * @param {object} init
19545
+ * @param {API.BlockFetcher} init.blocks Block storage.
19546
+ * @param {API.BatcherShardEntry[]} init.entries The entries in this shard.
19547
+ * @param {string} init.prefix Key prefix.
19548
+ * @param {number} init.maxSize
19549
+ * @param {number} init.maxKeyLength
19550
+ * @param {API.ShardBlockView} init.base Original shard this batcher is based on.
19551
+ */
19552
+ constructor({ blocks, entries: entries3, prefix, maxSize, maxKeyLength, base: base4 }) {
19553
+ this.blocks = blocks;
19554
+ this.prefix = prefix;
19555
+ this.entries = entries3;
19556
+ this.base = base4;
19557
+ this.maxSize = maxSize;
19558
+ this.maxKeyLength = maxKeyLength;
19559
+ }
19560
+ /**
19561
+ * @param {string} key The key of the value to put.
19562
+ * @param {API.UnknownLink} value The value to put.
19563
+ * @returns {Promise<void>}
19564
+ */
19565
+ async put(key, value) {
19566
+ if (this.#committed)
19567
+ throw new BatchCommittedError();
19568
+ return put2(this.blocks, this, key, value);
19569
+ }
19570
+ async commit() {
19571
+ if (this.#committed)
19572
+ throw new BatchCommittedError();
19573
+ this.#committed = true;
19574
+ return commit(this);
19575
+ }
19576
+ /**
19577
+ * @param {object} init
19578
+ * @param {API.BlockFetcher} init.blocks Block storage.
19579
+ * @param {API.ShardLink} init.link CID of the shard block.
19580
+ * @param {string} init.prefix
19581
+ */
19582
+ static async create({ blocks, link: link2, prefix }) {
19583
+ const shards = new ShardFetcher(blocks);
19584
+ const base4 = await shards.get(link2);
19585
+ return new _Batcher({ blocks, entries: base4.value.entries, prefix, base: base4, ...configure(base4.value) });
19586
+ }
19587
+ };
19588
+ var put2 = async (blocks, shard, key, value) => {
19589
+ const shards = new ShardFetcher(blocks);
19590
+ const dest = await traverse2(shards, key, shard);
19591
+ if (dest.shard !== shard) {
19592
+ shard = dest.shard;
19593
+ key = dest.key;
19594
+ }
19595
+ let entry = [key, value];
19596
+ let batcher;
19597
+ if (key.length > shard.maxKeyLength) {
19598
+ const pfxskeys = Array.from(Array(Math.ceil(key.length / shard.maxKeyLength)), (_, i) => {
19599
+ const start = i * shard.maxKeyLength;
19600
+ return {
19601
+ prefix: shard.prefix + key.slice(0, start),
19602
+ key: key.slice(start, start + shard.maxKeyLength)
19603
+ };
19604
+ });
19605
+ entry = [pfxskeys[pfxskeys.length - 1].key, value];
19606
+ batcher = create8({
19607
+ entries: [entry],
19608
+ prefix: pfxskeys[pfxskeys.length - 1].prefix,
19609
+ ...configure(shard)
19610
+ });
19611
+ for (let i = pfxskeys.length - 2; i > 0; i--) {
19612
+ entry = [pfxskeys[i].key, [batcher]];
19613
+ batcher = create8({
19614
+ entries: [entry],
19615
+ prefix: pfxskeys[i].prefix,
19616
+ ...configure(shard)
19617
+ });
19618
+ }
19619
+ entry = [pfxskeys[0].key, [batcher]];
19620
+ }
19621
+ shard.entries = putEntry(asShardEntries(shard.entries), asShardEntry(entry));
19622
+ const size = encodedLength(withEntries(asShardEntries(shard.entries), shard));
19623
+ if (size > shard.maxSize) {
19624
+ const common = findCommonPrefix(
19625
+ asShardEntries(shard.entries),
19626
+ entry[0]
19627
+ );
19628
+ if (!common)
19629
+ throw new Error("shard limit reached");
19630
+ const { prefix } = common;
19631
+ const matches = common.matches;
19632
+ const entries3 = matches.filter((m) => m[0] !== prefix).map((m) => {
19633
+ m = [...m];
19634
+ m[0] = m[0].slice(prefix.length);
19635
+ return m;
19636
+ });
19637
+ const batcher2 = create8({
19638
+ entries: entries3,
19639
+ prefix: shard.prefix + prefix,
19640
+ ...configure(shard)
19641
+ });
19642
+ let value2;
19643
+ const pfxmatch = matches.find((m) => m[0] === prefix);
19644
+ if (pfxmatch) {
19645
+ if (Array.isArray(pfxmatch[1])) {
19646
+ throw new Error(`expected "${prefix}" to be a shard value but found a shard link`);
19647
+ }
19648
+ value2 = [batcher2, pfxmatch[1]];
19649
+ } else {
19650
+ value2 = [batcher2];
19651
+ }
19652
+ shard.entries = putEntry(
19653
+ asShardEntries(shard.entries.filter((e) => matches.every((m) => e[0] !== m[0]))),
19654
+ asShardEntry([prefix, value2])
19655
+ );
19656
+ }
19657
+ };
19658
+ var traverse2 = async (shards, key, shard) => {
19659
+ for (const e of shard.entries) {
19660
+ const [k, v] = e;
19661
+ if (key <= k)
19662
+ break;
19663
+ if (key.startsWith(k) && Array.isArray(v)) {
19664
+ if (isShardLink(v[0])) {
19665
+ const blk = await shards.get(v[0], shard.prefix + k);
19666
+ v[0] = create8({ base: blk, prefix: blk.prefix, ...blk.value });
19667
+ }
19668
+ return traverse2(shards, key.slice(k.length), v[0]);
19669
+ }
19670
+ }
19671
+ return { shard, key };
19672
+ };
19673
+ var commit = async (shard) => {
19674
+ const additions = [];
19675
+ const removals = [];
19676
+ const entries3 = [];
19677
+ for (const entry of shard.entries) {
19678
+ if (Array.isArray(entry[1]) && !isShardLink(entry[1][0])) {
19679
+ const result = await commit(entry[1][0]);
19680
+ entries3.push([
19681
+ entry[0],
19682
+ entry[1][1] == null ? [result.root] : [result.root, entry[1][1]]
19683
+ ]);
19684
+ additions.push(...result.additions);
19685
+ removals.push(...result.removals);
19686
+ } else {
19687
+ entries3.push(asShardEntry(entry));
19688
+ }
19689
+ }
19690
+ const block = await encodeBlock(withEntries(entries3, shard), shard.prefix);
19691
+ additions.push(block);
19692
+ if (shard.base && shard.base.cid.toString() === block.cid.toString()) {
19693
+ return { root: block.cid, additions: [], removals: [] };
19694
+ }
19695
+ if (shard.base)
19696
+ removals.push(shard.base);
19697
+ return { root: block.cid, additions, removals };
19698
+ };
19699
+ var asShardEntries = (entries3) => (
19700
+ /** @type {API.ShardEntry[]} */
19701
+ entries3
19702
+ );
19703
+ var asShardEntry = (entry) => (
19704
+ /** @type {API.ShardEntry} */
19705
+ entry
19706
+ );
19707
+ var create9 = (blocks, root2) => Batcher.create({ blocks, link: root2, prefix: "" });
19708
+ var BatchCommittedError = class _BatchCommittedError extends Error {
19709
+ /**
19710
+ * @param {string} [message]
19711
+ * @param {ErrorOptions} [options]
19712
+ */
19713
+ constructor(message2, options) {
19714
+ super(message2 ?? "batch already committed", options);
19715
+ this.code = _BatchCommittedError.code;
19716
+ }
19717
+ static code = "ERR_BATCH_COMMITTED";
19718
+ };
19431
19719
 
19432
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/crdt.js
19433
- async function put2(blocks, head, key, value, options) {
19720
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/crdt/index.js
19721
+ var put3 = async (blocks, head, key, value) => {
19434
19722
  const mblocks = new MemoryBlockstore();
19435
19723
  blocks = new MultiBlockFetcher(mblocks, blocks);
19436
19724
  if (!head.length) {
19437
19725
  const shard = await ShardBlock.create();
19438
19726
  mblocks.putSync(shard.cid, shard.bytes);
19439
- const result2 = await put(blocks, shard.cid, key, value, options);
19727
+ const result2 = await put(blocks, shard.cid, key, value);
19440
19728
  const data2 = { type: "put", root: result2.root, key, value };
19441
19729
  const event2 = await EventBlock.create(data2, head);
19442
19730
  head = await advance(blocks, head, event2.cid);
@@ -19458,10 +19746,22 @@ ${key.data.toString("base64")}
19458
19746
  const additions = /* @__PURE__ */ new Map();
19459
19747
  const removals = /* @__PURE__ */ new Map();
19460
19748
  for (const { value: event2 } of sorted) {
19461
- if (!["put", "del"].includes(event2.data.type)) {
19462
- throw new Error(`unknown event type: ${event2.data.type}`);
19749
+ let result2;
19750
+ if (event2.data.type === "put") {
19751
+ result2 = await put(blocks, root2, event2.data.key, event2.data.value);
19752
+ } else if (event2.data.type === "del") {
19753
+ result2 = await del(blocks, root2, event2.data.key);
19754
+ } else if (event2.data.type === "batch") {
19755
+ const batch2 = await create9(blocks, root2);
19756
+ for (const op of event2.data.ops) {
19757
+ if (op.type !== "put")
19758
+ throw new Error(`unsupported batch operation: ${op.type}`);
19759
+ await batch2.put(op.key, op.value);
19760
+ }
19761
+ result2 = await batch2.commit();
19762
+ } else {
19763
+ throw new Error(`unknown operation: ${event2.data.type}`);
19463
19764
  }
19464
- const result2 = event2.data.type === "put" ? await put(blocks, root2, event2.data.key, event2.data.value) : await del(blocks, root2, event2.data.key);
19465
19765
  root2 = result2.root;
19466
19766
  for (const a of result2.additions) {
19467
19767
  mblocks.putSync(a.cid, a.bytes);
@@ -19471,7 +19771,7 @@ ${key.data.toString("base64")}
19471
19771
  removals.set(r.cid.toString(), r);
19472
19772
  }
19473
19773
  }
19474
- const result = await put(blocks, root2, key, value, options);
19774
+ const result = await put(blocks, root2, key, value);
19475
19775
  if (result.root.toString() === root2.toString()) {
19476
19776
  return { root: root2, additions: [], removals: [], head };
19477
19777
  }
@@ -19499,8 +19799,8 @@ ${key.data.toString("base64")}
19499
19799
  head,
19500
19800
  event
19501
19801
  };
19502
- }
19503
- async function root(blocks, head) {
19802
+ };
19803
+ var root = async (blocks, head) => {
19504
19804
  if (!head.length)
19505
19805
  throw new Error("cannot determine root of headless clock");
19506
19806
  const mblocks = new MemoryBlockstore();
@@ -19520,10 +19820,22 @@ ${key.data.toString("base64")}
19520
19820
  const additions = /* @__PURE__ */ new Map();
19521
19821
  const removals = /* @__PURE__ */ new Map();
19522
19822
  for (const { value: event } of sorted) {
19523
- if (!["put", "del"].includes(event.data.type)) {
19524
- throw new Error(`unknown event type: ${event.data.type}`);
19823
+ let result;
19824
+ if (event.data.type === "put") {
19825
+ result = await put(blocks, root2, event.data.key, event.data.value);
19826
+ } else if (event.data.type === "del") {
19827
+ result = await del(blocks, root2, event.data.key);
19828
+ } else if (event.data.type === "batch") {
19829
+ const batch2 = await create9(blocks, root2);
19830
+ for (const op of event.data.ops) {
19831
+ if (op.type !== "put")
19832
+ throw new Error(`unsupported batch operation: ${op.type}`);
19833
+ await batch2.put(op.key, op.value);
19834
+ }
19835
+ result = await batch2.commit();
19836
+ } else {
19837
+ throw new Error(`unknown operation: ${event.data.type}`);
19525
19838
  }
19526
- const result = event.data.type === "put" ? await put(blocks, root2, event.data.key, event.data.value) : await del(blocks, root2, event.data.key);
19527
19839
  root2 = result.root;
19528
19840
  for (const a of result.additions) {
19529
19841
  mblocks.putSync(a.cid, a.bytes);
@@ -19544,8 +19856,8 @@ ${key.data.toString("base64")}
19544
19856
  additions: [...additions.values()],
19545
19857
  removals: [...removals.values()]
19546
19858
  };
19547
- }
19548
- async function get3(blocks, head, key) {
19859
+ };
19860
+ var get3 = async (blocks, head, key) => {
19549
19861
  if (!head.length)
19550
19862
  return;
19551
19863
  const result = await root(blocks, head);
@@ -19553,8 +19865,8 @@ ${key.data.toString("base64")}
19553
19865
  blocks = new MultiBlockFetcher(new MemoryBlockstore(result.additions), blocks);
19554
19866
  }
19555
19867
  return get2(blocks, result.root, key);
19556
- }
19557
- async function* entries2(blocks, head, options) {
19868
+ };
19869
+ var entries2 = async function* (blocks, head, options) {
19558
19870
  if (!head.length)
19559
19871
  return;
19560
19872
  const result = await root(blocks, head);
@@ -19562,8 +19874,8 @@ ${key.data.toString("base64")}
19562
19874
  blocks = new MultiBlockFetcher(new MemoryBlockstore(result.additions), blocks);
19563
19875
  }
19564
19876
  yield* entries(blocks, result.root, options);
19565
- }
19566
- async function findCommonAncestor(events, children) {
19877
+ };
19878
+ var findCommonAncestor = async (events, children) => {
19567
19879
  if (!children.length)
19568
19880
  return;
19569
19881
  const candidates = children.map((c) => [c]);
@@ -19582,14 +19894,14 @@ ${key.data.toString("base64")}
19582
19894
  if (!changed)
19583
19895
  return;
19584
19896
  }
19585
- }
19586
- async function findAncestorCandidate(events, root2) {
19897
+ };
19898
+ var findAncestorCandidate = async (events, root2) => {
19587
19899
  const { value: event } = await events.get(root2);
19588
19900
  if (!event.parents.length)
19589
19901
  return root2;
19590
19902
  return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents);
19591
- }
19592
- function findCommonString(arrays) {
19903
+ };
19904
+ var findCommonString = (arrays) => {
19593
19905
  arrays = arrays.map((a) => [...a]);
19594
19906
  for (const arr of arrays) {
19595
19907
  for (const item of arr) {
@@ -19605,9 +19917,9 @@ ${key.data.toString("base64")}
19605
19917
  return item;
19606
19918
  }
19607
19919
  }
19608
- }
19609
- async function findSortedEvents(events, head, tail) {
19610
- if (head.length === 1 && String(head[0]) === String(tail)) {
19920
+ };
19921
+ var findSortedEvents = async (events, head, tail) => {
19922
+ if (head.length === 1 && head[0].toString() === tail.toString()) {
19611
19923
  return [];
19612
19924
  }
19613
19925
  const weights = /* @__PURE__ */ new Map();
@@ -19632,8 +19944,8 @@ ${key.data.toString("base64")}
19632
19944
  }
19633
19945
  }
19634
19946
  return Array.from(buckets).sort((a, b) => b[0] - a[0]).flatMap(([, es]) => es.sort((a, b) => String(a.cid) < String(b.cid) ? -1 : 1));
19635
- }
19636
- async function findEvents(events, start, end, depth = 0) {
19947
+ };
19948
+ var findEvents = async (events, start, end, depth = 0) => {
19637
19949
  const event = await events.get(start);
19638
19950
  const acc = [{ event, depth }];
19639
19951
  const { parents } = event.value;
@@ -19641,7 +19953,128 @@ ${key.data.toString("base64")}
19641
19953
  return acc;
19642
19954
  const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
19643
19955
  return acc.concat(...rest);
19644
- }
19956
+ };
19957
+
19958
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/crdt/batch/index.js
19959
+ var Batcher2 = class _Batcher {
19960
+ #committed = false;
19961
+ /**
19962
+ * @param {object} init
19963
+ * @param {API.BlockFetcher} init.blocks Block storage.
19964
+ * @param {API.EventLink<API.Operation>[]} init.head Merkle clock head.
19965
+ * @param {API.BatcherShardEntry[]} init.entries The entries in this shard.
19966
+ * @param {string} init.prefix Key prefix.
19967
+ * @param {number} init.maxSize
19968
+ * @param {number} init.maxKeyLength
19969
+ * @param {API.ShardBlockView} init.base Original shard this batcher is based on.
19970
+ * @param {API.ShardBlockView[]} init.additions Additions to include in the committed batch.
19971
+ * @param {API.ShardBlockView[]} init.removals Removals to include in the committed batch.
19972
+ */
19973
+ constructor({ blocks, head, entries: entries3, prefix, maxSize, maxKeyLength, base: base4, additions, removals }) {
19974
+ this.blocks = blocks;
19975
+ this.head = head;
19976
+ this.prefix = prefix;
19977
+ this.entries = entries3;
19978
+ this.base = base4;
19979
+ this.maxSize = maxSize;
19980
+ this.maxKeyLength = maxKeyLength;
19981
+ this.additions = additions;
19982
+ this.removals = removals;
19983
+ this.ops = [];
19984
+ }
19985
+ /**
19986
+ * @param {string} key The key of the value to put.
19987
+ * @param {API.UnknownLink} value The value to put.
19988
+ * @returns {Promise<void>}
19989
+ */
19990
+ async put(key, value) {
19991
+ if (this.#committed)
19992
+ throw new BatchCommittedError();
19993
+ await put2(this.blocks, this, key, value);
19994
+ this.ops.push({ type: "put", key, value });
19995
+ }
19996
+ async commit() {
19997
+ if (this.#committed)
19998
+ throw new BatchCommittedError();
19999
+ this.#committed = true;
20000
+ const res = await commit(this);
20001
+ const data = { type: "batch", ops: this.ops, root: res.root };
20002
+ const event = await EventBlock.create(data, this.head);
20003
+ const mblocks = new MemoryBlockstore();
20004
+ const blocks = new MultiBlockFetcher(mblocks, this.blocks);
20005
+ mblocks.putSync(event.cid, event.bytes);
20006
+ const head = await advance(blocks, this.head, event.cid);
20007
+ const additions = /* @__PURE__ */ new Map();
20008
+ const removals = /* @__PURE__ */ new Map();
20009
+ for (const a of this.additions) {
20010
+ additions.set(a.cid.toString(), a);
20011
+ }
20012
+ for (const r of this.removals) {
20013
+ removals.set(r.cid.toString(), r);
20014
+ }
20015
+ for (const a of res.additions) {
20016
+ if (removals.has(a.cid.toString())) {
20017
+ removals.delete(a.cid.toString());
20018
+ }
20019
+ additions.set(a.cid.toString(), a);
20020
+ }
20021
+ for (const r of res.removals) {
20022
+ if (additions.has(r.cid.toString())) {
20023
+ additions.delete(r.cid.toString());
20024
+ } else {
20025
+ removals.set(r.cid.toString(), r);
20026
+ }
20027
+ }
20028
+ return {
20029
+ head,
20030
+ event,
20031
+ root: res.root,
20032
+ additions: [...additions.values()],
20033
+ removals: [...removals.values()]
20034
+ };
20035
+ }
20036
+ /**
20037
+ * @param {object} init
20038
+ * @param {API.BlockFetcher} init.blocks Block storage.
20039
+ * @param {API.EventLink<API.Operation>[]} init.head Merkle clock head.
20040
+ * @param {string} init.prefix
20041
+ */
20042
+ static async create({ blocks, head, prefix }) {
20043
+ const mblocks = new MemoryBlockstore();
20044
+ blocks = new MultiBlockFetcher(mblocks, blocks);
20045
+ if (!head.length) {
20046
+ const base5 = await ShardBlock.create();
20047
+ mblocks.putSync(base5.cid, base5.bytes);
20048
+ return new _Batcher({
20049
+ blocks,
20050
+ head,
20051
+ entries: [],
20052
+ prefix,
20053
+ base: base5,
20054
+ additions: [base5],
20055
+ removals: [],
20056
+ ...configure(base5.value)
20057
+ });
20058
+ }
20059
+ const { root: root2, additions, removals } = await root(blocks, head);
20060
+ for (const a of additions) {
20061
+ mblocks.putSync(a.cid, a.bytes);
20062
+ }
20063
+ const shards = new ShardFetcher(blocks);
20064
+ const base4 = await shards.get(root2);
20065
+ return new _Batcher({
20066
+ blocks,
20067
+ head,
20068
+ entries: base4.value.entries,
20069
+ prefix,
20070
+ base: base4,
20071
+ additions,
20072
+ removals,
20073
+ ...configure(base4.value)
20074
+ });
20075
+ }
20076
+ };
20077
+ var create10 = (blocks, head) => Batcher2.create({ blocks, head, prefix: "" });
19645
20078
 
19646
20079
  // ../../node_modules/.pnpm/@ipld+unixfs@2.1.2/node_modules/@ipld/unixfs/src/codec.js
19647
20080
  var codec_exports = {};
@@ -22449,7 +22882,7 @@ ${key.data.toString("base64")}
22449
22882
  };
22450
22883
 
22451
22884
  // ../../node_modules/.pnpm/multiformats@11.0.2/node_modules/multiformats/src/hashes/digest.js
22452
- var create7 = (code8, digest3) => {
22885
+ var create11 = (code8, digest3) => {
22453
22886
  const size = digest3.byteLength;
22454
22887
  const sizeOffset = encodingLength3(code8);
22455
22888
  const digestOffset = sizeOffset + encodingLength3(size);
@@ -22518,7 +22951,7 @@ ${key.data.toString("base64")}
22518
22951
  digest(input) {
22519
22952
  if (input instanceof Uint8Array) {
22520
22953
  const result = this.encode(input);
22521
- return result instanceof Uint8Array ? create7(this.code, result) : result.then((digest3) => create7(this.code, digest3));
22954
+ return result instanceof Uint8Array ? create11(this.code, result) : result.then((digest3) => create11(this.code, digest3));
22522
22955
  } else {
22523
22956
  throw Error("Unknown type, must be binary type");
22524
22957
  }
@@ -23059,7 +23492,7 @@ ${key.data.toString("base64")}
23059
23492
  switch (this.version) {
23060
23493
  case 0: {
23061
23494
  const { code: code8, digest: digest3 } = this.multihash;
23062
- const multihash = create7(code8, digest3);
23495
+ const multihash = create11(code8, digest3);
23063
23496
  return (
23064
23497
  /** @type {CID<Data, Format, Alg, 1>} */
23065
23498
  _CID.createV1(this.code, multihash)
@@ -23590,7 +24023,7 @@ ${key.data.toString("base64")}
23590
24023
  hasher: sha2562,
23591
24024
  linker: { createLink: CID3.createV1 }
23592
24025
  });
23593
- var configure = (config2) => ({
24026
+ var configure2 = (config2) => ({
23594
24027
  ...defaults2(),
23595
24028
  ...config2
23596
24029
  });
@@ -23599,7 +24032,7 @@ ${key.data.toString("base64")}
23599
24032
  name: name3,
23600
24033
  encode: encodeFileChunk
23601
24034
  };
23602
- var create8 = ({ writer, metadata = {}, settings: settings2 = defaults2() }) => new FileWriterView(init(writer, metadata, configure(settings2)));
24035
+ var create12 = ({ writer, metadata = {}, settings: settings2 = defaults2() }) => new FileWriterView(init(writer, metadata, configure2(settings2)));
23603
24036
  var write4 = async (view, bytes) => {
23604
24037
  await perform(view, send({ type: "write", bytes }));
23605
24038
  return view;
@@ -23658,7 +24091,7 @@ ${key.data.toString("base64")}
23658
24091
 
23659
24092
  // ../../node_modules/.pnpm/@ipld+unixfs@2.1.2/node_modules/@ipld/unixfs/src/directory.js
23660
24093
  var defaults3 = defaults2;
23661
- var create9 = ({ writer, settings: settings2 = defaults3(), metadata = {} }) => new DirectoryWriter({
24094
+ var create13 = ({ writer, settings: settings2 = defaults3(), metadata = {} }) => new DirectoryWriter({
23662
24095
  writer,
23663
24096
  metadata,
23664
24097
  settings: settings2,
@@ -23859,7 +24292,7 @@ ${key.data.toString("base64")}
23859
24292
  /** @type {Uint8Array} */
23860
24293
  murmur364.encode(bytes)
23861
24294
  );
23862
- var configure4 = ({ bitWidth: bitWidth2 = 8, hash = hash64 }) => {
24295
+ var configure5 = ({ bitWidth: bitWidth2 = 8, hash = hash64 }) => {
23863
24296
  const hashSize = hash(new Uint8Array()).byteLength;
23864
24297
  const options = { bitWidth: bitWidth2, hash, hashSize };
23865
24298
  const at = (path, depth) => read4(path, depth, options);
@@ -23896,7 +24329,7 @@ ${key.data.toString("base64")}
23896
24329
  var bitWidth = 8;
23897
24330
  var config = {
23898
24331
  bitWidth,
23899
- Path: configure4({ bitWidth })
24332
+ Path: configure5({ bitWidth })
23900
24333
  };
23901
24334
 
23902
24335
  // ../../node_modules/.pnpm/@ipld+unixfs@2.1.2/node_modules/@ipld/unixfs/src/lib.js
@@ -23920,14 +24353,14 @@ ${key.data.toString("base64")}
23920
24353
  */
23921
24354
  constructor({ writer, settings: settings2 }) {
23922
24355
  this.writer = writer;
23923
- this.settings = configure(settings2);
24356
+ this.settings = configure2(settings2);
23924
24357
  }
23925
24358
  /**
23926
24359
  * @template [L=unknown]
23927
24360
  * @param {API.WriterOptions<L|Layout>} config
23928
24361
  */
23929
24362
  createFileWriter({ settings: settings2 = this.settings, metadata } = {}) {
23930
- return create8({
24363
+ return create12({
23931
24364
  writer: this.writer,
23932
24365
  settings: settings2,
23933
24366
  metadata
@@ -23938,7 +24371,7 @@ ${key.data.toString("base64")}
23938
24371
  * @param {API.WriterOptions<L|Layout>} config
23939
24372
  */
23940
24373
  createDirectoryWriter({ settings: settings2 = this.settings, metadata } = {}) {
23941
- return create9({
24374
+ return create13({
23942
24375
  writer: this.writer,
23943
24376
  settings: settings2,
23944
24377
  metadata
@@ -27208,7 +27641,7 @@ ${key.data.toString("base64")}
27208
27641
  var import_err_code7 = __toESM(require_err_code(), 1);
27209
27642
 
27210
27643
  // ../../node_modules/.pnpm/eventemitter3@5.0.1/node_modules/eventemitter3/index.mjs
27211
- var import_index6 = __toESM(require_eventemitter3(), 1);
27644
+ var import_index5 = __toESM(require_eventemitter3(), 1);
27212
27645
 
27213
27646
  // ../../node_modules/.pnpm/p-timeout@6.1.2/node_modules/p-timeout/index.js
27214
27647
  var TimeoutError = class extends Error {
@@ -27343,7 +27776,7 @@ ${key.data.toString("base64")}
27343
27776
  };
27344
27777
 
27345
27778
  // ../../node_modules/.pnpm/p-queue@8.0.1/node_modules/p-queue/dist/index.js
27346
- var PQueue = class extends import_index6.default {
27779
+ var PQueue = class extends import_index5.default {
27347
27780
  #carryoverConcurrencyCount;
27348
27781
  #isIntervalIgnored;
27349
27782
  #intervalCount = 0;
@@ -27985,7 +28418,7 @@ ${key.data.toString("base64")}
27985
28418
 
27986
28419
  // src/files.ts
27987
28420
  var queuingStrategy = withCapacity();
27988
- var settings = configure({
28421
+ var settings = configure2({
27989
28422
  fileChunkEncoder: raw_exports,
27990
28423
  smallFileEncoder: raw_exports,
27991
28424
  // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
@@ -28034,7 +28467,7 @@ ${key.data.toString("base64")}
28034
28467
  this.#file = file;
28035
28468
  }
28036
28469
  async finalize(writer) {
28037
- const unixfsFileWriter = create8(writer);
28470
+ const unixfsFileWriter = create12(writer);
28038
28471
  await this.#file.stream().pipeTo(
28039
28472
  new WritableStream({
28040
28473
  async write(chunk) {
@@ -28047,30 +28480,43 @@ ${key.data.toString("base64")}
28047
28480
  };
28048
28481
 
28049
28482
  // src/crdt-helpers.ts
28050
- async function applyBulkUpdateToCrdt(tblocks, head, updates, options) {
28051
- let result;
28052
- for (const update2 of updates) {
28053
- const link2 = await writeDocContent(tblocks, update2);
28054
- result = await put2(tblocks, head, update2.key, link2, options);
28055
- const resRoot = result.root.toString();
28056
- const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
28057
- if (!isReturned) {
28058
- const hasRoot = await tblocks.get(result.root);
28059
- if (!hasRoot) {
28060
- throw new Error(
28061
- `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
28062
- );
28063
- result.head = head;
28064
- }
28483
+ function time(tag2) {
28484
+ }
28485
+ function timeEnd(tag2) {
28486
+ }
28487
+ async function applyBulkUpdateToCrdt(tblocks, head, updates) {
28488
+ let result = null;
28489
+ if (updates.length > 1) {
28490
+ const batch2 = await create10(tblocks, head);
28491
+ for (const update2 of updates) {
28492
+ const link2 = await writeDocContent(tblocks, update2);
28493
+ await batch2.put(update2.key, link2);
28065
28494
  }
28066
- if (result.event) {
28067
- for (const { cid, bytes } of [...result.additions, result.event]) {
28068
- tblocks.putSync(cid, bytes);
28495
+ result = await batch2.commit();
28496
+ } else {
28497
+ for (const update2 of updates) {
28498
+ const link2 = await writeDocContent(tblocks, update2);
28499
+ result = await put3(tblocks, head, update2.key, link2);
28500
+ const resRoot = result.root.toString();
28501
+ const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
28502
+ if (!isReturned) {
28503
+ const hasRoot = await tblocks.get(result.root);
28504
+ if (!hasRoot) {
28505
+ throw new Error(
28506
+ `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
28507
+ );
28508
+ }
28069
28509
  }
28070
- head = result.head;
28071
28510
  }
28072
28511
  }
28073
- return { head };
28512
+ if (!result)
28513
+ throw new Error("Missing result");
28514
+ if (result.event) {
28515
+ for (const { cid, bytes } of [...result.additions, ...result.removals, result.event]) {
28516
+ tblocks.putSync(cid, bytes);
28517
+ }
28518
+ }
28519
+ return { head: result.head };
28074
28520
  }
28075
28521
  async function writeDocContent(blocks, update2) {
28076
28522
  let value;
@@ -28208,37 +28654,33 @@ ${key.data.toString("base64")}
28208
28654
  const { value: event } = await eventsFetcher.get(link2);
28209
28655
  if (!event)
28210
28656
  continue;
28211
- const { key, value } = event.data;
28212
- if (keys.has(key)) {
28213
- if (event.parents) {
28214
- updates = await gatherUpdates(
28215
- blocks,
28216
- eventsFetcher,
28217
- event.parents,
28218
- since,
28219
- updates,
28220
- keys,
28221
- didLinks,
28222
- limit
28223
- );
28224
- }
28225
- } else {
28226
- keys.add(key);
28227
- const docValue = await getValueFromLink(blocks, value);
28228
- updates.push({ key, value: docValue.doc, del: docValue.del, clock: link2 });
28229
- limit--;
28230
- if (event.parents) {
28231
- updates = await gatherUpdates(
28232
- blocks,
28233
- eventsFetcher,
28234
- event.parents,
28235
- since,
28236
- updates,
28237
- keys,
28238
- didLinks,
28239
- limit
28240
- );
28241
- }
28657
+ const { type: type2 } = event.data;
28658
+ let ops = [];
28659
+ if (type2 === "batch") {
28660
+ ops = event.data.ops;
28661
+ } else if (type2 === "put") {
28662
+ ops = [event.data];
28663
+ }
28664
+ for (let i = ops.length - 1; i >= 0; i--) {
28665
+ const { key, value } = ops[i];
28666
+ if (!keys.has(key)) {
28667
+ const docValue = await getValueFromLink(blocks, value);
28668
+ updates.push({ key, value: docValue.doc, del: docValue.del, clock: link2 });
28669
+ limit--;
28670
+ keys.add(key);
28671
+ }
28672
+ }
28673
+ if (event.parents) {
28674
+ updates = await gatherUpdates(
28675
+ blocks,
28676
+ eventsFetcher,
28677
+ event.parents,
28678
+ since,
28679
+ updates,
28680
+ keys,
28681
+ didLinks,
28682
+ limit
28683
+ );
28242
28684
  }
28243
28685
  }
28244
28686
  return updates;
@@ -28261,25 +28703,32 @@ ${key.data.toString("base64")}
28261
28703
  return;
28262
28704
  }
28263
28705
  isCompacting = true;
28706
+ time("compact head");
28264
28707
  for (const cid of head) {
28265
28708
  const bl = await blockLog.get(cid);
28266
28709
  if (!bl)
28267
28710
  throw new Error("Missing head block: " + cid.toString());
28268
28711
  }
28269
- for await (const entry of getAllEntries(blockLog, head)) {
28270
- }
28271
- for await (const [, link2] of entries2(blockLog, head)) {
28272
- const bl = await blockLog.get(link2);
28273
- if (!bl)
28274
- throw new Error("Missing entry block: " + link2.toString());
28712
+ timeEnd("compact head");
28713
+ time("compact all entries");
28714
+ for await (const _entry of getAllEntries(blockLog, head)) {
28275
28715
  }
28716
+ timeEnd("compact all entries");
28717
+ time("compact clock vis");
28276
28718
  for await (const _line of vis(blockLog, head)) {
28277
28719
  }
28720
+ timeEnd("compact clock vis");
28721
+ time("compact root");
28278
28722
  const result = await root(blockLog, head);
28723
+ timeEnd("compact root");
28724
+ time("compact root blocks");
28279
28725
  for (const { cid, bytes } of [...result.additions, ...result.removals]) {
28280
28726
  blockLog.loggedBlocks.putSync(cid, bytes);
28281
28727
  }
28728
+ timeEnd("compact root blocks");
28729
+ time("compact changes");
28282
28730
  await clockChangesSince(blockLog, head, [], {});
28731
+ timeEnd("compact changes");
28283
28732
  isCompacting = false;
28284
28733
  }
28285
28734
  async function getBlock(blocks, cidString) {
@@ -28404,7 +28853,7 @@ ${key.data.toString("base64")}
28404
28853
  });
28405
28854
  return getNode;
28406
28855
  };
28407
- var create12 = ({ get: get7, cache: cache4, chunker: chunker3, list, codec, hasher, sorted, compare: compare5, ...opts }) => {
28856
+ var create16 = ({ get: get7, cache: cache4, chunker: chunker3, list, codec, hasher, sorted, compare: compare5, ...opts }) => {
28408
28857
  if (!sorted)
28409
28858
  list = list.sort(({ key: a }, { key: b }) => compare5(a, b));
28410
28859
  const getNode = createGetNode2(get7, cache4, chunker3, codec, hasher, compare5, opts);
@@ -28585,12 +29034,12 @@ ${key.data.toString("base64")}
28585
29034
  ...classes2,
28586
29035
  compare: compare3
28587
29036
  };
28588
- var create13 = (opts) => {
29037
+ var create17 = (opts) => {
28589
29038
  opts = {
28590
29039
  ...defaults4,
28591
29040
  ...opts
28592
29041
  };
28593
- return create12(opts);
29042
+ return create16(opts);
28594
29043
  };
28595
29044
  var load3 = (opts) => {
28596
29045
  opts = {
@@ -28666,7 +29115,7 @@ ${key.data.toString("base64")}
28666
29115
  if (!inIndex.cid) {
28667
29116
  let returnRootBlock = null;
28668
29117
  let returnNode = null;
28669
- for await (const node of await create13({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
29118
+ for await (const node of await create17({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
28670
29119
  const block = await node.block;
28671
29120
  await tblocks.put(block.cid, block.bytes);
28672
29121
  returnRootBlock = block;
@@ -29140,12 +29589,12 @@ ${key.data.toString("base64")}
29140
29589
  }
29141
29590
  });
29142
29591
  }
29143
- async bulk(updates, options) {
29592
+ async bulk(updates) {
29144
29593
  await this.ready;
29145
29594
  const prevHead = [...this.clock.head];
29146
29595
  const meta = await this.blockstore.transaction(
29147
29596
  async (blocks) => {
29148
- const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates, options);
29597
+ const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates);
29149
29598
  updates = updates.map(({ key, value, del: del2, clock }) => {
29150
29599
  readFiles(this.blockstore, { doc: value });
29151
29600
  return { key, value, del: del2, clock };