@fireproof/core 0.16.7 → 0.17.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (35) hide show
  1. package/README.md +15 -6
  2. package/dist/browser/fireproof.cjs +62 -56
  3. package/dist/browser/fireproof.cjs.map +1 -1
  4. package/dist/browser/fireproof.d.cts +4 -4
  5. package/dist/browser/fireproof.d.ts +4 -4
  6. package/dist/browser/fireproof.global.js +713 -275
  7. package/dist/browser/fireproof.global.js.map +1 -1
  8. package/dist/browser/fireproof.js +64 -58
  9. package/dist/browser/fireproof.js.map +1 -1
  10. package/dist/browser/metafile-cjs.json +1 -1
  11. package/dist/browser/metafile-esm.json +1 -1
  12. package/dist/browser/metafile-iife.json +1 -1
  13. package/dist/memory/fireproof.cjs +62 -56
  14. package/dist/memory/fireproof.cjs.map +1 -1
  15. package/dist/memory/fireproof.d.cts +4 -4
  16. package/dist/memory/fireproof.d.ts +4 -4
  17. package/dist/memory/fireproof.global.js +713 -275
  18. package/dist/memory/fireproof.global.js.map +1 -1
  19. package/dist/memory/fireproof.js +64 -58
  20. package/dist/memory/fireproof.js.map +1 -1
  21. package/dist/memory/metafile-cjs.json +1 -1
  22. package/dist/memory/metafile-esm.json +1 -1
  23. package/dist/memory/metafile-iife.json +1 -1
  24. package/dist/node/fireproof.cjs +62 -56
  25. package/dist/node/fireproof.cjs.map +1 -1
  26. package/dist/node/fireproof.d.cts +4 -4
  27. package/dist/node/fireproof.d.ts +4 -4
  28. package/dist/node/fireproof.global.js +713 -275
  29. package/dist/node/fireproof.global.js.map +1 -1
  30. package/dist/node/fireproof.js +64 -58
  31. package/dist/node/fireproof.js.map +1 -1
  32. package/dist/node/metafile-cjs.json +1 -1
  33. package/dist/node/metafile-esm.json +1 -1
  34. package/dist/node/metafile-iife.json +1 -1
  35. package/package.json +4 -7
@@ -925,7 +925,7 @@ var Fireproof = (() => {
925
925
  this.tail = this.head;
926
926
  this.states = null;
927
927
  }
928
- var create14 = function create15() {
928
+ var create18 = function create19() {
929
929
  return util.Buffer ? function create_buffer_setup() {
930
930
  return (Writer.create = function create_buffer() {
931
931
  return new BufferWriter();
@@ -934,7 +934,7 @@ var Fireproof = (() => {
934
934
  return new Writer();
935
935
  };
936
936
  };
937
- Writer.create = create14();
937
+ Writer.create = create18();
938
938
  Writer.alloc = function alloc3(size) {
939
939
  return new util.Array(size);
940
940
  };
@@ -1080,7 +1080,7 @@ var Fireproof = (() => {
1080
1080
  };
1081
1081
  Writer._configure = function(BufferWriter_) {
1082
1082
  BufferWriter = BufferWriter_;
1083
- Writer.create = create14();
1083
+ Writer.create = create18();
1084
1084
  BufferWriter._configure();
1085
1085
  };
1086
1086
  }
@@ -1163,14 +1163,14 @@ var Fireproof = (() => {
1163
1163
  return new Reader(buffer2);
1164
1164
  throw Error("illegal buffer");
1165
1165
  };
1166
- var create14 = function create15() {
1166
+ var create18 = function create19() {
1167
1167
  return util.Buffer ? function create_buffer_setup(buffer2) {
1168
1168
  return (Reader.create = function create_buffer(buffer3) {
1169
1169
  return util.Buffer.isBuffer(buffer3) ? new BufferReader(buffer3) : create_array(buffer3);
1170
1170
  })(buffer2);
1171
1171
  } : create_array;
1172
1172
  };
1173
- Reader.create = create14();
1173
+ Reader.create = create18();
1174
1174
  Reader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */
1175
1175
  util.Array.prototype.slice;
1176
1176
  Reader.prototype.uint32 = /* @__PURE__ */ function read_uint32_setup() {
@@ -1338,7 +1338,7 @@ var Fireproof = (() => {
1338
1338
  };
1339
1339
  Reader._configure = function(BufferReader_) {
1340
1340
  BufferReader = BufferReader_;
1341
- Reader.create = create14();
1341
+ Reader.create = create18();
1342
1342
  BufferReader._configure();
1343
1343
  var fn = util.Long ? "toLong" : (
1344
1344
  /* istanbul ignore next */
@@ -1493,13 +1493,13 @@ var Fireproof = (() => {
1493
1493
  protobuf.util = require_minimal();
1494
1494
  protobuf.rpc = require_rpc();
1495
1495
  protobuf.roots = require_roots();
1496
- protobuf.configure = configure5;
1497
- function configure5() {
1496
+ protobuf.configure = configure6;
1497
+ function configure6() {
1498
1498
  protobuf.util._configure();
1499
1499
  protobuf.Writer._configure(protobuf.BufferWriter);
1500
1500
  protobuf.Reader._configure(protobuf.BufferReader);
1501
1501
  }
1502
- configure5();
1502
+ configure6();
1503
1503
  }
1504
1504
  });
1505
1505
 
@@ -4060,24 +4060,41 @@ var Fireproof = (() => {
4060
4060
  var cidSymbol = Symbol.for("@ipld/js-cid/CID");
4061
4061
 
4062
4062
  // ../../node_modules/.pnpm/multiformats@12.1.3/node_modules/multiformats/src/link.js
4063
+ var isLink = (value) => {
4064
+ if (value == null) {
4065
+ return false;
4066
+ }
4067
+ const withSlash = (
4068
+ /** @type {{'/'?: Uint8Array, bytes: Uint8Array}} */
4069
+ value
4070
+ );
4071
+ if (withSlash["/"] != null && withSlash["/"] === withSlash.bytes) {
4072
+ return true;
4073
+ }
4074
+ const withAsCID = (
4075
+ /** @type {{'asCID'?: unknown}} */
4076
+ value
4077
+ );
4078
+ if (withAsCID.asCID === value) {
4079
+ return true;
4080
+ }
4081
+ return false;
4082
+ };
4063
4083
  var parse = (source, base4) => CID.parse(source, base4);
4064
4084
 
4065
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/block.js
4085
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/block.js
4066
4086
  var MemoryBlockstore = class {
4067
4087
  /** @type {Map<string, Uint8Array>} */
4068
4088
  #blocks = /* @__PURE__ */ new Map();
4069
4089
  /**
4070
- * @param {Array<AnyBlock>} [blocks]
4090
+ * @param {Array<import('multiformats').Block>} [blocks]
4071
4091
  */
4072
4092
  constructor(blocks) {
4073
4093
  if (blocks) {
4074
4094
  this.#blocks = new Map(blocks.map((b) => [b.cid.toString(), b.bytes]));
4075
4095
  }
4076
4096
  }
4077
- /**
4078
- * @param {import('./link').AnyLink} cid
4079
- * @returns {Promise<AnyBlock | undefined>}
4080
- */
4097
+ /** @type {API.BlockFetcher['get']} */
4081
4098
  async get(cid) {
4082
4099
  const bytes = this.#blocks.get(cid.toString());
4083
4100
  if (!bytes)
@@ -4085,24 +4102,24 @@ var Fireproof = (() => {
4085
4102
  return { cid, bytes };
4086
4103
  }
4087
4104
  /**
4088
- * @param {import('./link').AnyLink} cid
4105
+ * @param {API.UnknownLink} cid
4089
4106
  * @param {Uint8Array} bytes
4090
4107
  */
4091
4108
  async put(cid, bytes) {
4092
4109
  this.#blocks.set(cid.toString(), bytes);
4093
4110
  }
4094
4111
  /**
4095
- * @param {import('./link').AnyLink} cid
4112
+ * @param {API.UnknownLink} cid
4096
4113
  * @param {Uint8Array} bytes
4097
4114
  */
4098
4115
  putSync(cid, bytes) {
4099
4116
  this.#blocks.set(cid.toString(), bytes);
4100
4117
  }
4101
- /** @param {import('./link').AnyLink} cid */
4118
+ /** @param {API.UnknownLink} cid */
4102
4119
  async delete(cid) {
4103
4120
  this.#blocks.delete(cid.toString());
4104
4121
  }
4105
- /** @param {import('./link').AnyLink} cid */
4122
+ /** @param {API.UnknownLink} cid */
4106
4123
  deleteSync(cid) {
4107
4124
  this.#blocks.delete(cid.toString());
4108
4125
  }
@@ -4113,13 +4130,13 @@ var Fireproof = (() => {
4113
4130
  }
4114
4131
  };
4115
4132
  var MultiBlockFetcher = class {
4116
- /** @type {BlockFetcher[]} */
4133
+ /** @type {API.BlockFetcher[]} */
4117
4134
  #fetchers;
4118
- /** @param {BlockFetcher[]} fetchers */
4135
+ /** @param {API.BlockFetcher[]} fetchers */
4119
4136
  constructor(...fetchers) {
4120
4137
  this.#fetchers = fetchers;
4121
4138
  }
4122
- /** @param {import('./link').AnyLink} link */
4139
+ /** @type {API.BlockFetcher['get']} */
4123
4140
  async get(link2) {
4124
4141
  for (const f of this.#fetchers) {
4125
4142
  const v = await f.get(link2);
@@ -4368,10 +4385,10 @@ var Fireproof = (() => {
4368
4385
  * @param {any} [value]
4369
4386
  * @param {number} [encodedLength]
4370
4387
  */
4371
- constructor(type2, value, encodedLength) {
4388
+ constructor(type2, value, encodedLength2) {
4372
4389
  this.type = type2;
4373
4390
  this.value = value;
4374
- this.encodedLength = encodedLength;
4391
+ this.encodedLength = encodedLength2;
4375
4392
  this.encodedBytes = void 0;
4376
4393
  this.byteValue = void 0;
4377
4394
  }
@@ -10122,6 +10139,22 @@ You can use close({ resize: true }) to resize header`);
10122
10139
  async commit(t, done, opts = { noLoader: false, compact: false }) {
10123
10140
  return this.commitQueue.enqueue(() => this._commitInternal(t, done, opts));
10124
10141
  }
10142
+ async cacheTransaction(t) {
10143
+ for await (const block of t.entries()) {
10144
+ const sBlock = block.cid.toString();
10145
+ if (!this.getBlockCache.has(sBlock)) {
10146
+ this.getBlockCache.set(sBlock, block);
10147
+ }
10148
+ }
10149
+ }
10150
+ async cacheCarReader(reader) {
10151
+ for await (const block of reader.blocks()) {
10152
+ const sBlock = block.cid.toString();
10153
+ if (!this.getBlockCache.has(sBlock)) {
10154
+ this.getBlockCache.set(sBlock, block);
10155
+ }
10156
+ }
10157
+ }
10125
10158
  async _commitInternal(t, done, opts = { noLoader: false, compact: false }) {
10126
10159
  await this.ready;
10127
10160
  const header = done;
@@ -10129,6 +10162,7 @@ You can use close({ resize: true }) to resize header`);
10129
10162
  let roots = await this.prepareRoots(fp, t);
10130
10163
  const { cid, bytes } = await this.prepareCarFile(roots[0], t, !!opts.public);
10131
10164
  await this.carStore.save({ cid, bytes });
10165
+ await this.cacheTransaction(t);
10132
10166
  const newDbMeta = { car: cid, key: this.key || null };
10133
10167
  await this.remoteWAL.enqueue(newDbMeta, opts);
10134
10168
  await this.metaStore.save(newDbMeta);
@@ -10181,13 +10215,8 @@ You can use close({ resize: true }) to resize header`);
10181
10215
  }
10182
10216
  async *entries() {
10183
10217
  await this.ready;
10184
- for (const cid of this.carLog) {
10185
- const reader = await this.loadCar(cid);
10186
- if (!reader)
10187
- throw new Error(`missing car reader ${cid.toString()}`);
10188
- for await (const block of reader.blocks()) {
10189
- yield block;
10190
- }
10218
+ for (const [, block] of this.getBlockCache) {
10219
+ yield block;
10191
10220
  }
10192
10221
  }
10193
10222
  async getBlock(cid) {
@@ -10202,10 +10231,9 @@ You can use close({ resize: true }) to resize header`);
10202
10231
  if (!reader) {
10203
10232
  throw new Error(`missing car reader ${carCid.toString()}`);
10204
10233
  }
10205
- const block = await reader.get(cid);
10206
- if (block) {
10207
- return block;
10208
- }
10234
+ await this.cacheCarReader(reader);
10235
+ if (this.getBlockCache.has(sCid))
10236
+ return this.getBlockCache.get(sCid);
10209
10237
  throw new Error(`block not in reader: ${cid.toString()}`);
10210
10238
  })
10211
10239
  ).catch(() => void 0);
@@ -10258,7 +10286,7 @@ You can use close({ resize: true }) to resize header`);
10258
10286
  }
10259
10287
  async ensureDecryptedReader(reader) {
10260
10288
  const theKey = await this._getKey();
10261
- if (!(theKey && this.ebOpts.crypto))
10289
+ if (this.ebOpts.public || !(theKey && this.ebOpts.crypto))
10262
10290
  return reader;
10263
10291
  const { blocks, root: root2 } = await decodeEncryptedCar(this.ebOpts.crypto, theKey, reader);
10264
10292
  return {
@@ -10336,8 +10364,10 @@ You can use close({ resize: true }) to resize header`);
10336
10364
  if (this.ebOpts.autoCompact && this.loader.carLog.length > this.ebOpts.autoCompact) {
10337
10365
  setTimeout(() => void this.compact(), 10);
10338
10366
  }
10339
- if (car)
10367
+ if (car) {
10368
+ this.transactions.delete(t);
10340
10369
  return { ...done, car };
10370
+ }
10341
10371
  throw new Error("failed to commit car");
10342
10372
  }
10343
10373
  return done;
@@ -10388,13 +10418,19 @@ You can use close({ resize: true }) to resize header`);
10388
10418
  }
10389
10419
  async *entries() {
10390
10420
  const seen = /* @__PURE__ */ new Set();
10391
- for (const t of this.transactions) {
10392
- for await (const blk of t.entries()) {
10393
- if (seen.has(blk.cid.toString()))
10394
- continue;
10395
- seen.add(blk.cid.toString());
10421
+ if (this.loader) {
10422
+ for await (const blk of this.loader.entries()) {
10396
10423
  yield blk;
10397
10424
  }
10425
+ } else {
10426
+ for (const t of this.transactions) {
10427
+ for await (const blk of t.entries()) {
10428
+ if (seen.has(blk.cid.toString()))
10429
+ continue;
10430
+ seen.add(blk.cid.toString());
10431
+ yield blk;
10432
+ }
10433
+ }
10398
10434
  }
10399
10435
  }
10400
10436
  };
@@ -10945,8 +10981,8 @@ You can use close({ resize: true }) to resize header`);
10945
10981
  }
10946
10982
  };
10947
10983
 
10948
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/clock.js
10949
- async function advance(blocks, head, event) {
10984
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/clock/index.js
10985
+ var advance = async (blocks, head, event) => {
10950
10986
  const events = new EventFetcher(blocks);
10951
10987
  const headmap = new Map(head.map((cid) => [cid.toString(), cid]));
10952
10988
  if (headmap.has(event.toString()))
@@ -10968,11 +11004,11 @@ You can use close({ resize: true }) to resize header`);
10968
11004
  }
10969
11005
  }
10970
11006
  return head.concat(event);
10971
- }
11007
+ };
10972
11008
  var EventBlock = class extends Block {
10973
11009
  /**
10974
11010
  * @param {object} config
10975
- * @param {EventLink<T>} config.cid
11011
+ * @param {API.EventLink<T>} config.cid
10976
11012
  * @param {Event} config.value
10977
11013
  * @param {Uint8Array} config.bytes
10978
11014
  * @param {string} config.prefix
@@ -10984,20 +11020,20 @@ You can use close({ resize: true }) to resize header`);
10984
11020
  /**
10985
11021
  * @template T
10986
11022
  * @param {T} data
10987
- * @param {EventLink<T>[]} [parents]
11023
+ * @param {API.EventLink<T>[]} [parents]
10988
11024
  */
10989
11025
  static create(data, parents) {
10990
11026
  return encodeEventBlock({ data, parents: parents ?? [] });
10991
11027
  }
10992
11028
  };
10993
11029
  var EventFetcher = class {
10994
- /** @param {import('./block').BlockFetcher} blocks */
11030
+ /** @param {API.BlockFetcher} blocks */
10995
11031
  constructor(blocks) {
10996
11032
  this._blocks = blocks;
10997
11033
  }
10998
11034
  /**
10999
- * @param {EventLink<T>} link
11000
- * @returns {Promise<EventBlockView<T>>}
11035
+ * @param {API.EventLink<T>} link
11036
+ * @returns {Promise<API.EventBlockView<T>>}
11001
11037
  */
11002
11038
  async get(link2) {
11003
11039
  const block = await this._blocks.get(link2);
@@ -11006,15 +11042,15 @@ You can use close({ resize: true }) to resize header`);
11006
11042
  return decodeEventBlock(block.bytes);
11007
11043
  }
11008
11044
  };
11009
- async function encodeEventBlock(value) {
11045
+ var encodeEventBlock = async (value) => {
11010
11046
  const { cid, bytes } = await encode7({ value, codec: src_exports, hasher: sha256 });
11011
11047
  return new Block({ cid, value, bytes });
11012
- }
11013
- async function decodeEventBlock(bytes) {
11048
+ };
11049
+ var decodeEventBlock = async (bytes) => {
11014
11050
  const { cid, value } = await decode11({ bytes, codec: src_exports, hasher: sha256 });
11015
11051
  return new Block({ cid, value, bytes });
11016
- }
11017
- async function contains(events, a, b) {
11052
+ };
11053
+ var contains = async (events, a, b) => {
11018
11054
  if (a.toString() === b.toString())
11019
11055
  return true;
11020
11056
  const [{ value: aevent }, { value: bevent }] = await Promise.all([events.get(a), events.get(b)]);
@@ -11035,8 +11071,8 @@ You can use close({ resize: true }) to resize header`);
11035
11071
  links3.push(...event.parents);
11036
11072
  }
11037
11073
  return false;
11038
- }
11039
- async function* vis(blocks, head, options = {}) {
11074
+ };
11075
+ var vis = async function* (blocks, head, options = {}) {
11040
11076
  const renderNodeLabel = options.renderNodeLabel ?? ((b) => shortLink(b.cid));
11041
11077
  const events = new EventFetcher(blocks);
11042
11078
  yield "digraph clock {";
@@ -11068,15 +11104,18 @@ You can use close({ resize: true }) to resize header`);
11068
11104
  links3.push(...block.value.parents);
11069
11105
  }
11070
11106
  yield "}";
11071
- }
11107
+ };
11072
11108
  var shortLink = (l) => `${String(l).slice(0, 4)}..${String(l).slice(-4)}`;
11073
11109
 
11074
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/shard.js
11110
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/shard.js
11111
+ var MaxKeyLength = 64;
11112
+ var MaxShardSize = 512 * 1024;
11113
+ var CID_TAG2 = new Token(Type.tag, 42);
11075
11114
  var ShardBlock = class extends Block {
11076
11115
  /**
11077
11116
  * @param {object} config
11078
- * @param {ShardLink} config.cid
11079
- * @param {Shard} config.value
11117
+ * @param {API.ShardLink} config.cid
11118
+ * @param {API.Shard} config.value
11080
11119
  * @param {Uint8Array} config.bytes
11081
11120
  * @param {string} config.prefix
11082
11121
  */
@@ -11084,94 +11123,105 @@ You can use close({ resize: true }) to resize header`);
11084
11123
  super({ cid, value, bytes });
11085
11124
  this.prefix = prefix;
11086
11125
  }
11087
- static create() {
11088
- return encodeShardBlock([]);
11126
+ /** @param {API.ShardOptions} [options] */
11127
+ static create(options) {
11128
+ return encodeBlock(create7(options));
11089
11129
  }
11090
11130
  };
11131
+ var create7 = (options) => ({ entries: [], ...configure(options) });
11132
+ var configure = (options) => ({
11133
+ maxSize: options?.maxSize ?? MaxShardSize,
11134
+ maxKeyLength: options?.maxKeyLength ?? MaxKeyLength
11135
+ });
11136
+ var withEntries = (entries3, options) => ({ ...create7(options), entries: entries3 });
11091
11137
  var decodeCache = /* @__PURE__ */ new WeakMap();
11092
- async function encodeShardBlock(value, prefix) {
11138
+ var encodeBlock = async (value, prefix) => {
11093
11139
  const { cid, bytes } = await encode7({ value, codec: src_exports, hasher: sha256 });
11094
11140
  const block = new ShardBlock({ cid, value, bytes, prefix: prefix ?? "" });
11095
11141
  decodeCache.set(block.bytes, block);
11096
11142
  return block;
11097
- }
11098
- async function decodeShardBlock(bytes, prefix) {
11143
+ };
11144
+ var decodeBlock = async (bytes, prefix) => {
11099
11145
  const block = decodeCache.get(bytes);
11100
11146
  if (block)
11101
11147
  return block;
11102
11148
  const { cid, value } = await decode11({ bytes, codec: src_exports, hasher: sha256 });
11103
- if (!Array.isArray(value))
11149
+ if (!isShard(value))
11104
11150
  throw new Error(`invalid shard: ${cid}`);
11105
11151
  return new ShardBlock({ cid, value, bytes, prefix: prefix ?? "" });
11106
- }
11152
+ };
11153
+ var isShard = (value) => value != null && typeof value === "object" && Array.isArray(value.entries) && typeof value.maxSize === "number" && typeof value.maxKeyLength === "number";
11154
+ var isShardLink = (value) => isLink(value) && value.code === code;
11107
11155
  var ShardFetcher = class {
11108
- /** @param {import('./block').BlockFetcher} blocks */
11156
+ /** @param {API.BlockFetcher} blocks */
11109
11157
  constructor(blocks) {
11110
11158
  this._blocks = blocks;
11111
11159
  }
11112
11160
  /**
11113
- * @param {ShardLink} link
11161
+ * @param {API.ShardLink} link
11114
11162
  * @param {string} [prefix]
11115
- * @returns {Promise<ShardBlockView>}
11163
+ * @returns {Promise<API.ShardBlockView>}
11116
11164
  */
11117
11165
  async get(link2, prefix = "") {
11118
11166
  const block = await this._blocks.get(link2);
11119
11167
  if (!block)
11120
11168
  throw new Error(`missing block: ${link2}`);
11121
- return decodeShardBlock(block.bytes, prefix);
11122
- }
11123
- };
11124
- function putEntry(target, entry) {
11125
- if (!target.length)
11126
- return [entry];
11127
- const shard = [];
11128
- for (const [i, [k, v]] of target.entries()) {
11129
- if (entry[0] === k) {
11130
- if (Array.isArray(entry[1])) {
11131
- if (Array.isArray(v) && v[1] != null && entry[1][1] == null) {
11132
- shard.push([k, [entry[1][0], v[1]]]);
11169
+ return decodeBlock(block.bytes, prefix);
11170
+ }
11171
+ };
11172
+ var putEntry = (target, newEntry) => {
11173
+ const entries3 = [];
11174
+ for (const [i, entry] of target.entries()) {
11175
+ const [k, v] = entry;
11176
+ if (newEntry[0] === k) {
11177
+ if (Array.isArray(newEntry[1])) {
11178
+ if (Array.isArray(v) && v[1] != null && newEntry[1][1] == null) {
11179
+ entries3.push([k, [newEntry[1][0], v[1]]]);
11133
11180
  } else {
11134
- shard.push(entry);
11181
+ entries3.push(newEntry);
11135
11182
  }
11136
11183
  } else {
11137
- const newEntry = Array.isArray(v) ? [k, [v[0], entry[1]]] : entry;
11138
- shard.push(newEntry);
11184
+ if (Array.isArray(v)) {
11185
+ entries3.push([k, [v[0], newEntry[1]]]);
11186
+ } else {
11187
+ entries3.push(newEntry);
11188
+ }
11139
11189
  }
11140
11190
  for (let j = i + 1; j < target.length; j++) {
11141
- shard.push(target[j]);
11191
+ entries3.push(target[j]);
11142
11192
  }
11143
- return shard;
11193
+ return entries3;
11144
11194
  }
11145
- if (i === 0 && entry[0] < k) {
11146
- shard.push(entry);
11195
+ if (i === 0 && newEntry[0] < k) {
11196
+ entries3.push(newEntry);
11147
11197
  for (let j = i; j < target.length; j++) {
11148
- shard.push(target[j]);
11198
+ entries3.push(target[j]);
11149
11199
  }
11150
- return shard;
11200
+ return entries3;
11151
11201
  }
11152
- if (i > 0 && entry[0] > target[i - 1][0] && entry[0] < k) {
11153
- shard.push(entry);
11202
+ if (i > 0 && newEntry[0] > target[i - 1][0] && newEntry[0] < k) {
11203
+ entries3.push(newEntry);
11154
11204
  for (let j = i; j < target.length; j++) {
11155
- shard.push(target[j]);
11205
+ entries3.push(target[j]);
11156
11206
  }
11157
- return shard;
11207
+ return entries3;
11158
11208
  }
11159
- shard.push([k, v]);
11209
+ entries3.push(entry);
11160
11210
  }
11161
- shard.push(entry);
11162
- return shard;
11163
- }
11164
- function findCommonPrefix(shard, skey) {
11165
- const startidx = shard.findIndex(([k]) => skey === k);
11211
+ entries3.push(newEntry);
11212
+ return entries3;
11213
+ };
11214
+ var findCommonPrefix = (entries3, skey) => {
11215
+ const startidx = entries3.findIndex(([k]) => skey === k);
11166
11216
  if (startidx === -1)
11167
11217
  throw new Error(`key not found in shard: ${skey}`);
11168
11218
  let i = startidx;
11169
11219
  let pfx;
11170
11220
  while (true) {
11171
- pfx = shard[i][0].slice(0, -1);
11221
+ pfx = entries3[i][0].slice(0, -1);
11172
11222
  if (pfx.length) {
11173
11223
  while (true) {
11174
- const matches = shard.filter((entry) => entry[0].startsWith(pfx));
11224
+ const matches = entries3.filter((entry) => entry[0].startsWith(pfx));
11175
11225
  if (matches.length > 1)
11176
11226
  return { prefix: pfx, matches };
11177
11227
  pfx = pfx.slice(0, -1);
@@ -11180,19 +11230,50 @@ You can use close({ resize: true }) to resize header`);
11180
11230
  }
11181
11231
  }
11182
11232
  i++;
11183
- if (i >= shard.length) {
11233
+ if (i >= entries3.length) {
11184
11234
  i = 0;
11185
11235
  }
11186
11236
  if (i === startidx) {
11187
11237
  return;
11188
11238
  }
11189
11239
  }
11190
- }
11240
+ };
11241
+ var encodedLength = (shard) => {
11242
+ let entriesLength = 0;
11243
+ for (const entry of shard.entries) {
11244
+ entriesLength += entryEncodedLength(entry);
11245
+ }
11246
+ const tokens = [
11247
+ new Token(Type.map, 3),
11248
+ new Token(Type.string, "entries"),
11249
+ new Token(Type.array, shard.entries.length),
11250
+ new Token(Type.string, "maxKeyLength"),
11251
+ new Token(Type.uint, shard.maxKeyLength),
11252
+ new Token(Type.string, "maxSize"),
11253
+ new Token(Type.uint, shard.maxSize)
11254
+ ];
11255
+ return tokensToLength(tokens) + entriesLength;
11256
+ };
11257
+ var entryEncodedLength = (entry) => {
11258
+ const tokens = [
11259
+ new Token(Type.array, entry.length),
11260
+ new Token(Type.string, entry[0])
11261
+ ];
11262
+ if (Array.isArray(entry[1])) {
11263
+ tokens.push(new Token(Type.array, entry[1].length));
11264
+ for (const link2 of entry[1]) {
11265
+ tokens.push(CID_TAG2);
11266
+ tokens.push(new Token(Type.bytes, { length: link2.byteLength + 1 }));
11267
+ }
11268
+ } else {
11269
+ tokens.push(CID_TAG2);
11270
+ tokens.push(new Token(Type.bytes, { length: entry[1].byteLength + 1 }));
11271
+ }
11272
+ return tokensToLength(tokens);
11273
+ };
11191
11274
 
11192
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/index.js
11193
- var MaxKeyLength = 64;
11194
- var MaxShardSize = 512 * 1024;
11195
- async function put(blocks, root2, key, value, options = {}) {
11275
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/index.js
11276
+ var put = async (blocks, root2, key, value) => {
11196
11277
  const shards = new ShardFetcher(blocks);
11197
11278
  const rshard = await shards.get(root2);
11198
11279
  const path = await traverse(shards, rshard, key);
@@ -11200,31 +11281,40 @@ You can use close({ resize: true }) to resize header`);
11200
11281
  const skey = key.slice(target.prefix.length);
11201
11282
  let entry = [skey, value];
11202
11283
  const additions = [];
11203
- if (skey.length > MaxKeyLength) {
11204
- const pfxskeys = Array.from(Array(Math.ceil(skey.length / MaxKeyLength)), (_, i) => {
11205
- const start = i * MaxKeyLength;
11284
+ if (skey.length > target.value.maxKeyLength) {
11285
+ const pfxskeys = Array.from(Array(Math.ceil(skey.length / target.value.maxKeyLength)), (_, i) => {
11286
+ const start = i * target.value.maxKeyLength;
11206
11287
  return {
11207
11288
  prefix: target.prefix + skey.slice(0, start),
11208
- skey: skey.slice(start, start + MaxKeyLength)
11289
+ skey: skey.slice(start, start + target.value.maxKeyLength)
11209
11290
  };
11210
11291
  });
11211
- let child2 = await encodeShardBlock([[pfxskeys[pfxskeys.length - 1].skey, value]], pfxskeys[pfxskeys.length - 1].prefix);
11292
+ let child2 = await encodeBlock(
11293
+ withEntries([[pfxskeys[pfxskeys.length - 1].skey, value]], target.value),
11294
+ pfxskeys[pfxskeys.length - 1].prefix
11295
+ );
11212
11296
  additions.push(child2);
11213
11297
  for (let i = pfxskeys.length - 2; i > 0; i--) {
11214
- child2 = await encodeShardBlock([[pfxskeys[i].skey, [child2.cid]]], pfxskeys[i].prefix);
11298
+ child2 = await encodeBlock(
11299
+ withEntries([[pfxskeys[i].skey, [child2.cid]]], target.value),
11300
+ pfxskeys[i].prefix
11301
+ );
11215
11302
  additions.push(child2);
11216
11303
  }
11217
11304
  entry = [pfxskeys[0].skey, [child2.cid]];
11218
11305
  }
11219
- let shard = putEntry(target.value, entry);
11220
- let child = await encodeShardBlock(shard, target.prefix);
11221
- if (child.bytes.length > (options.maxShardSize ?? MaxShardSize)) {
11222
- const common = findCommonPrefix(shard, entry[0]);
11306
+ let shard = withEntries(putEntry(target.value.entries, entry), target.value);
11307
+ let child = await encodeBlock(shard, target.prefix);
11308
+ if (child.bytes.length > shard.maxSize) {
11309
+ const common = findCommonPrefix(shard.entries, entry[0]);
11223
11310
  if (!common)
11224
11311
  throw new Error("shard limit reached");
11225
11312
  const { prefix, matches } = common;
11226
- const block = await encodeShardBlock(
11227
- matches.filter(([k]) => k !== prefix).map(([k, v]) => [k.slice(prefix.length), v]),
11313
+ const block = await encodeBlock(
11314
+ withEntries(
11315
+ matches.filter(([k]) => k !== prefix).map(([k, v]) => [k.slice(prefix.length), v]),
11316
+ shard
11317
+ ),
11228
11318
  target.prefix + prefix
11229
11319
  );
11230
11320
  additions.push(block);
@@ -11238,9 +11328,9 @@ You can use close({ resize: true }) to resize header`);
11238
11328
  } else {
11239
11329
  value2 = [block.cid];
11240
11330
  }
11241
- shard = shard.filter((e) => matches.every((m) => e[0] !== m[0]));
11242
- shard = putEntry(shard, [prefix, value2]);
11243
- child = await encodeShardBlock(shard, target.prefix);
11331
+ shard.entries = shard.entries.filter((e) => matches.every((m) => e[0] !== m[0]));
11332
+ shard = withEntries(putEntry(shard.entries, [prefix, value2]), shard);
11333
+ child = await encodeBlock(shard, target.prefix);
11244
11334
  }
11245
11335
  if (child.cid.toString() === target.cid.toString()) {
11246
11336
  return { root: root2, additions: [], removals: [] };
@@ -11249,94 +11339,104 @@ You can use close({ resize: true }) to resize header`);
11249
11339
  for (let i = path.length - 2; i >= 0; i--) {
11250
11340
  const parent = path[i];
11251
11341
  const key2 = child.prefix.slice(parent.prefix.length);
11252
- const value2 = parent.value.map((entry2) => {
11253
- const [k, v] = entry2;
11254
- if (k !== key2)
11255
- return entry2;
11256
- if (!Array.isArray(v))
11257
- throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
11258
- return (
11259
- /** @type {import('./shard').ShardEntry} */
11260
- v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
11261
- );
11262
- });
11263
- child = await encodeShardBlock(value2, parent.prefix);
11342
+ const value2 = withEntries(
11343
+ parent.value.entries.map((entry2) => {
11344
+ const [k, v] = entry2;
11345
+ if (k !== key2)
11346
+ return entry2;
11347
+ if (!Array.isArray(v))
11348
+ throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
11349
+ return (
11350
+ /** @type {API.ShardEntry} */
11351
+ v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
11352
+ );
11353
+ }),
11354
+ parent.value
11355
+ );
11356
+ child = await encodeBlock(value2, parent.prefix);
11264
11357
  additions.push(child);
11265
11358
  }
11266
11359
  return { root: additions[additions.length - 1].cid, additions, removals: path };
11267
- }
11268
- async function get2(blocks, root2, key) {
11360
+ };
11361
+ var get2 = async (blocks, root2, key) => {
11269
11362
  const shards = new ShardFetcher(blocks);
11270
11363
  const rshard = await shards.get(root2);
11271
11364
  const path = await traverse(shards, rshard, key);
11272
11365
  const target = path[path.length - 1];
11273
11366
  const skey = key.slice(target.prefix.length);
11274
- const entry = target.value.find(([k]) => k === skey);
11367
+ const entry = target.value.entries.find(([k]) => k === skey);
11275
11368
  if (!entry)
11276
11369
  return;
11277
11370
  return Array.isArray(entry[1]) ? entry[1][1] : entry[1];
11278
- }
11279
- async function del(blocks, root2, key) {
11371
+ };
11372
+ var del = async (blocks, root2, key) => {
11280
11373
  const shards = new ShardFetcher(blocks);
11281
11374
  const rshard = await shards.get(root2);
11282
11375
  const path = await traverse(shards, rshard, key);
11283
11376
  const target = path[path.length - 1];
11284
11377
  const skey = key.slice(target.prefix.length);
11285
- const entryidx = target.value.findIndex(([k]) => k === skey);
11378
+ const entryidx = target.value.entries.findIndex(([k]) => k === skey);
11286
11379
  if (entryidx === -1)
11287
11380
  return { root: root2, additions: [], removals: [] };
11288
- const entry = target.value[entryidx];
11289
- if (Array.isArray(entry[1]) && entry[1][1] == null)
11381
+ const entry = target.value.entries[entryidx];
11382
+ if (Array.isArray(entry[1]) && entry[1][1] == null) {
11290
11383
  return { root: root2, additions: [], removals: [] };
11384
+ }
11291
11385
  const additions = [];
11292
11386
  const removals = [...path];
11293
- let shard = [...target.value];
11387
+ let shard = withEntries([...target.value.entries], target.value);
11294
11388
  if (Array.isArray(entry[1])) {
11295
- shard[entryidx] = [entry[0], [entry[1][0]]];
11389
+ shard.entries[entryidx] = [entry[0], [entry[1][0]]];
11296
11390
  } else {
11297
- shard.splice(entryidx, 1);
11298
- while (!shard.length) {
11391
+ shard.entries.splice(entryidx, 1);
11392
+ while (!shard.entries.length) {
11299
11393
  const child2 = path[path.length - 1];
11300
11394
  const parent = path[path.length - 2];
11301
11395
  if (!parent)
11302
11396
  break;
11303
11397
  path.pop();
11304
- shard = parent.value.filter((e) => {
11305
- if (!Array.isArray(e[1]))
11306
- return true;
11307
- return e[1][0].toString() !== child2.cid.toString();
11308
- });
11398
+ shard = withEntries(
11399
+ parent.value.entries.filter((e) => {
11400
+ if (!Array.isArray(e[1]))
11401
+ return true;
11402
+ return e[1][0].toString() !== child2.cid.toString();
11403
+ }),
11404
+ parent.value
11405
+ );
11309
11406
  }
11310
11407
  }
11311
- let child = await encodeShardBlock(shard, path[path.length - 1].prefix);
11408
+ let child = await encodeBlock(shard, path[path.length - 1].prefix);
11312
11409
  additions.push(child);
11313
11410
  for (let i = path.length - 2; i >= 0; i--) {
11314
11411
  const parent = path[i];
11315
11412
  const key2 = child.prefix.slice(parent.prefix.length);
11316
- const value = parent.value.map((entry2) => {
11317
- const [k, v] = entry2;
11318
- if (k !== key2)
11319
- return entry2;
11320
- if (!Array.isArray(v))
11321
- throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
11322
- return (
11323
- /** @type {import('./shard').ShardEntry} */
11324
- v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
11325
- );
11326
- });
11327
- child = await encodeShardBlock(value, parent.prefix);
11413
+ const value = withEntries(
11414
+ parent.value.entries.map((entry2) => {
11415
+ const [k, v] = entry2;
11416
+ if (k !== key2)
11417
+ return entry2;
11418
+ if (!Array.isArray(v))
11419
+ throw new Error(`"${key2}" is not a shard link in: ${parent.cid}`);
11420
+ return (
11421
+ /** @type {API.ShardEntry} */
11422
+ v[1] == null ? [k, [child.cid]] : [k, [child.cid, v[1]]]
11423
+ );
11424
+ }),
11425
+ parent.value
11426
+ );
11427
+ child = await encodeBlock(value, parent.prefix);
11328
11428
  additions.push(child);
11329
11429
  }
11330
11430
  return { root: additions[additions.length - 1].cid, additions, removals };
11331
- }
11332
- async function* entries(blocks, root2, options = {}) {
11431
+ };
11432
+ var entries = async function* (blocks, root2, options = {}) {
11333
11433
  const { prefix } = options;
11334
11434
  const shards = new ShardFetcher(blocks);
11335
11435
  const rshard = await shards.get(root2);
11336
11436
  yield* (
11337
- /** @returns {AsyncIterableIterator<import('./shard').ShardValueEntry>} */
11437
+ /** @returns {AsyncIterableIterator<API.ShardValueEntry>} */
11338
11438
  async function* ents(shard) {
11339
- for (const entry of shard.value) {
11439
+ for (const entry of shard.value.entries) {
11340
11440
  const key = shard.prefix + entry[0];
11341
11441
  if (Array.isArray(entry[1])) {
11342
11442
  if (entry[1][1]) {
@@ -11362,9 +11462,9 @@ You can use close({ resize: true }) to resize header`);
11362
11462
  }
11363
11463
  }(rshard)
11364
11464
  );
11365
- }
11366
- async function traverse(shards, shard, key) {
11367
- for (const [k, v] of shard.value) {
11465
+ };
11466
+ var traverse = async (shards, shard, key) => {
11467
+ for (const [k, v] of shard.value.entries) {
11368
11468
  if (key === k)
11369
11469
  return [shard];
11370
11470
  if (key.startsWith(k) && Array.isArray(v)) {
@@ -11373,16 +11473,204 @@ You can use close({ resize: true }) to resize header`);
11373
11473
  }
11374
11474
  }
11375
11475
  return [shard];
11376
- }
11476
+ };
11477
+
11478
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/batch/shard.js
11479
+ var create8 = (init2) => ({
11480
+ base: init2?.base,
11481
+ prefix: init2?.prefix ?? "",
11482
+ entries: init2?.entries ?? [],
11483
+ ...configure(init2)
11484
+ });
11485
+
11486
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/batch/index.js
11487
+ var Batcher = class _Batcher {
11488
+ #committed = false;
11489
+ /**
11490
+ * @param {object} init
11491
+ * @param {API.BlockFetcher} init.blocks Block storage.
11492
+ * @param {API.BatcherShardEntry[]} init.entries The entries in this shard.
11493
+ * @param {string} init.prefix Key prefix.
11494
+ * @param {number} init.maxSize
11495
+ * @param {number} init.maxKeyLength
11496
+ * @param {API.ShardBlockView} init.base Original shard this batcher is based on.
11497
+ */
11498
+ constructor({ blocks, entries: entries3, prefix, maxSize, maxKeyLength, base: base4 }) {
11499
+ this.blocks = blocks;
11500
+ this.prefix = prefix;
11501
+ this.entries = entries3;
11502
+ this.base = base4;
11503
+ this.maxSize = maxSize;
11504
+ this.maxKeyLength = maxKeyLength;
11505
+ }
11506
+ /**
11507
+ * @param {string} key The key of the value to put.
11508
+ * @param {API.UnknownLink} value The value to put.
11509
+ * @returns {Promise<void>}
11510
+ */
11511
+ async put(key, value) {
11512
+ if (this.#committed)
11513
+ throw new BatchCommittedError();
11514
+ return put2(this.blocks, this, key, value);
11515
+ }
11516
+ async commit() {
11517
+ if (this.#committed)
11518
+ throw new BatchCommittedError();
11519
+ this.#committed = true;
11520
+ return commit(this);
11521
+ }
11522
+ /**
11523
+ * @param {object} init
11524
+ * @param {API.BlockFetcher} init.blocks Block storage.
11525
+ * @param {API.ShardLink} init.link CID of the shard block.
11526
+ * @param {string} init.prefix
11527
+ */
11528
+ static async create({ blocks, link: link2, prefix }) {
11529
+ const shards = new ShardFetcher(blocks);
11530
+ const base4 = await shards.get(link2);
11531
+ return new _Batcher({ blocks, entries: base4.value.entries, prefix, base: base4, ...configure(base4.value) });
11532
+ }
11533
+ };
11534
+ var put2 = async (blocks, shard, key, value) => {
11535
+ const shards = new ShardFetcher(blocks);
11536
+ const dest = await traverse2(shards, key, shard);
11537
+ if (dest.shard !== shard) {
11538
+ shard = dest.shard;
11539
+ key = dest.key;
11540
+ }
11541
+ let entry = [key, value];
11542
+ let batcher;
11543
+ if (key.length > shard.maxKeyLength) {
11544
+ const pfxskeys = Array.from(Array(Math.ceil(key.length / shard.maxKeyLength)), (_, i) => {
11545
+ const start = i * shard.maxKeyLength;
11546
+ return {
11547
+ prefix: shard.prefix + key.slice(0, start),
11548
+ key: key.slice(start, start + shard.maxKeyLength)
11549
+ };
11550
+ });
11551
+ entry = [pfxskeys[pfxskeys.length - 1].key, value];
11552
+ batcher = create8({
11553
+ entries: [entry],
11554
+ prefix: pfxskeys[pfxskeys.length - 1].prefix,
11555
+ ...configure(shard)
11556
+ });
11557
+ for (let i = pfxskeys.length - 2; i > 0; i--) {
11558
+ entry = [pfxskeys[i].key, [batcher]];
11559
+ batcher = create8({
11560
+ entries: [entry],
11561
+ prefix: pfxskeys[i].prefix,
11562
+ ...configure(shard)
11563
+ });
11564
+ }
11565
+ entry = [pfxskeys[0].key, [batcher]];
11566
+ }
11567
+ shard.entries = putEntry(asShardEntries(shard.entries), asShardEntry(entry));
11568
+ const size = encodedLength(withEntries(asShardEntries(shard.entries), shard));
11569
+ if (size > shard.maxSize) {
11570
+ const common = findCommonPrefix(
11571
+ asShardEntries(shard.entries),
11572
+ entry[0]
11573
+ );
11574
+ if (!common)
11575
+ throw new Error("shard limit reached");
11576
+ const { prefix } = common;
11577
+ const matches = common.matches;
11578
+ const entries3 = matches.filter((m) => m[0] !== prefix).map((m) => {
11579
+ m = [...m];
11580
+ m[0] = m[0].slice(prefix.length);
11581
+ return m;
11582
+ });
11583
+ const batcher2 = create8({
11584
+ entries: entries3,
11585
+ prefix: shard.prefix + prefix,
11586
+ ...configure(shard)
11587
+ });
11588
+ let value2;
11589
+ const pfxmatch = matches.find((m) => m[0] === prefix);
11590
+ if (pfxmatch) {
11591
+ if (Array.isArray(pfxmatch[1])) {
11592
+ throw new Error(`expected "${prefix}" to be a shard value but found a shard link`);
11593
+ }
11594
+ value2 = [batcher2, pfxmatch[1]];
11595
+ } else {
11596
+ value2 = [batcher2];
11597
+ }
11598
+ shard.entries = putEntry(
11599
+ asShardEntries(shard.entries.filter((e) => matches.every((m) => e[0] !== m[0]))),
11600
+ asShardEntry([prefix, value2])
11601
+ );
11602
+ }
11603
+ };
11604
+ var traverse2 = async (shards, key, shard) => {
11605
+ for (const e of shard.entries) {
11606
+ const [k, v] = e;
11607
+ if (key <= k)
11608
+ break;
11609
+ if (key.startsWith(k) && Array.isArray(v)) {
11610
+ if (isShardLink(v[0])) {
11611
+ const blk = await shards.get(v[0], shard.prefix + k);
11612
+ v[0] = create8({ base: blk, prefix: blk.prefix, ...blk.value });
11613
+ }
11614
+ return traverse2(shards, key.slice(k.length), v[0]);
11615
+ }
11616
+ }
11617
+ return { shard, key };
11618
+ };
11619
+ var commit = async (shard) => {
11620
+ const additions = [];
11621
+ const removals = [];
11622
+ const entries3 = [];
11623
+ for (const entry of shard.entries) {
11624
+ if (Array.isArray(entry[1]) && !isShardLink(entry[1][0])) {
11625
+ const result = await commit(entry[1][0]);
11626
+ entries3.push([
11627
+ entry[0],
11628
+ entry[1][1] == null ? [result.root] : [result.root, entry[1][1]]
11629
+ ]);
11630
+ additions.push(...result.additions);
11631
+ removals.push(...result.removals);
11632
+ } else {
11633
+ entries3.push(asShardEntry(entry));
11634
+ }
11635
+ }
11636
+ const block = await encodeBlock(withEntries(entries3, shard), shard.prefix);
11637
+ additions.push(block);
11638
+ if (shard.base && shard.base.cid.toString() === block.cid.toString()) {
11639
+ return { root: block.cid, additions: [], removals: [] };
11640
+ }
11641
+ if (shard.base)
11642
+ removals.push(shard.base);
11643
+ return { root: block.cid, additions, removals };
11644
+ };
11645
+ var asShardEntries = (entries3) => (
11646
+ /** @type {API.ShardEntry[]} */
11647
+ entries3
11648
+ );
11649
+ var asShardEntry = (entry) => (
11650
+ /** @type {API.ShardEntry} */
11651
+ entry
11652
+ );
11653
+ var create9 = (blocks, root2) => Batcher.create({ blocks, link: root2, prefix: "" });
11654
+ var BatchCommittedError = class _BatchCommittedError extends Error {
11655
+ /**
11656
+ * @param {string} [message]
11657
+ * @param {ErrorOptions} [options]
11658
+ */
11659
+ constructor(message2, options) {
11660
+ super(message2 ?? "batch already committed", options);
11661
+ this.code = _BatchCommittedError.code;
11662
+ }
11663
+ static code = "ERR_BATCH_COMMITTED";
11664
+ };
11377
11665
 
11378
- // ../../node_modules/.pnpm/@alanshaw+pail@0.3.4/node_modules/@alanshaw/pail/src/crdt.js
11379
- async function put2(blocks, head, key, value, options) {
11666
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/crdt/index.js
11667
+ var put3 = async (blocks, head, key, value) => {
11380
11668
  const mblocks = new MemoryBlockstore();
11381
11669
  blocks = new MultiBlockFetcher(mblocks, blocks);
11382
11670
  if (!head.length) {
11383
11671
  const shard = await ShardBlock.create();
11384
11672
  mblocks.putSync(shard.cid, shard.bytes);
11385
- const result2 = await put(blocks, shard.cid, key, value, options);
11673
+ const result2 = await put(blocks, shard.cid, key, value);
11386
11674
  const data2 = { type: "put", root: result2.root, key, value };
11387
11675
  const event2 = await EventBlock.create(data2, head);
11388
11676
  head = await advance(blocks, head, event2.cid);
@@ -11404,10 +11692,22 @@ You can use close({ resize: true }) to resize header`);
11404
11692
  const additions = /* @__PURE__ */ new Map();
11405
11693
  const removals = /* @__PURE__ */ new Map();
11406
11694
  for (const { value: event2 } of sorted) {
11407
- if (!["put", "del"].includes(event2.data.type)) {
11408
- throw new Error(`unknown event type: ${event2.data.type}`);
11695
+ let result2;
11696
+ if (event2.data.type === "put") {
11697
+ result2 = await put(blocks, root2, event2.data.key, event2.data.value);
11698
+ } else if (event2.data.type === "del") {
11699
+ result2 = await del(blocks, root2, event2.data.key);
11700
+ } else if (event2.data.type === "batch") {
11701
+ const batch2 = await create9(blocks, root2);
11702
+ for (const op of event2.data.ops) {
11703
+ if (op.type !== "put")
11704
+ throw new Error(`unsupported batch operation: ${op.type}`);
11705
+ await batch2.put(op.key, op.value);
11706
+ }
11707
+ result2 = await batch2.commit();
11708
+ } else {
11709
+ throw new Error(`unknown operation: ${event2.data.type}`);
11409
11710
  }
11410
- const result2 = event2.data.type === "put" ? await put(blocks, root2, event2.data.key, event2.data.value) : await del(blocks, root2, event2.data.key);
11411
11711
  root2 = result2.root;
11412
11712
  for (const a of result2.additions) {
11413
11713
  mblocks.putSync(a.cid, a.bytes);
@@ -11417,7 +11717,7 @@ You can use close({ resize: true }) to resize header`);
11417
11717
  removals.set(r.cid.toString(), r);
11418
11718
  }
11419
11719
  }
11420
- const result = await put(blocks, root2, key, value, options);
11720
+ const result = await put(blocks, root2, key, value);
11421
11721
  if (result.root.toString() === root2.toString()) {
11422
11722
  return { root: root2, additions: [], removals: [], head };
11423
11723
  }
@@ -11445,8 +11745,8 @@ You can use close({ resize: true }) to resize header`);
11445
11745
  head,
11446
11746
  event
11447
11747
  };
11448
- }
11449
- async function root(blocks, head) {
11748
+ };
11749
+ var root = async (blocks, head) => {
11450
11750
  if (!head.length)
11451
11751
  throw new Error("cannot determine root of headless clock");
11452
11752
  const mblocks = new MemoryBlockstore();
@@ -11466,10 +11766,22 @@ You can use close({ resize: true }) to resize header`);
11466
11766
  const additions = /* @__PURE__ */ new Map();
11467
11767
  const removals = /* @__PURE__ */ new Map();
11468
11768
  for (const { value: event } of sorted) {
11469
- if (!["put", "del"].includes(event.data.type)) {
11470
- throw new Error(`unknown event type: ${event.data.type}`);
11769
+ let result;
11770
+ if (event.data.type === "put") {
11771
+ result = await put(blocks, root2, event.data.key, event.data.value);
11772
+ } else if (event.data.type === "del") {
11773
+ result = await del(blocks, root2, event.data.key);
11774
+ } else if (event.data.type === "batch") {
11775
+ const batch2 = await create9(blocks, root2);
11776
+ for (const op of event.data.ops) {
11777
+ if (op.type !== "put")
11778
+ throw new Error(`unsupported batch operation: ${op.type}`);
11779
+ await batch2.put(op.key, op.value);
11780
+ }
11781
+ result = await batch2.commit();
11782
+ } else {
11783
+ throw new Error(`unknown operation: ${event.data.type}`);
11471
11784
  }
11472
- const result = event.data.type === "put" ? await put(blocks, root2, event.data.key, event.data.value) : await del(blocks, root2, event.data.key);
11473
11785
  root2 = result.root;
11474
11786
  for (const a of result.additions) {
11475
11787
  mblocks.putSync(a.cid, a.bytes);
@@ -11490,8 +11802,8 @@ You can use close({ resize: true }) to resize header`);
11490
11802
  additions: [...additions.values()],
11491
11803
  removals: [...removals.values()]
11492
11804
  };
11493
- }
11494
- async function get3(blocks, head, key) {
11805
+ };
11806
+ var get3 = async (blocks, head, key) => {
11495
11807
  if (!head.length)
11496
11808
  return;
11497
11809
  const result = await root(blocks, head);
@@ -11499,8 +11811,8 @@ You can use close({ resize: true }) to resize header`);
11499
11811
  blocks = new MultiBlockFetcher(new MemoryBlockstore(result.additions), blocks);
11500
11812
  }
11501
11813
  return get2(blocks, result.root, key);
11502
- }
11503
- async function* entries2(blocks, head, options) {
11814
+ };
11815
+ var entries2 = async function* (blocks, head, options) {
11504
11816
  if (!head.length)
11505
11817
  return;
11506
11818
  const result = await root(blocks, head);
@@ -11508,8 +11820,8 @@ You can use close({ resize: true }) to resize header`);
11508
11820
  blocks = new MultiBlockFetcher(new MemoryBlockstore(result.additions), blocks);
11509
11821
  }
11510
11822
  yield* entries(blocks, result.root, options);
11511
- }
11512
- async function findCommonAncestor(events, children) {
11823
+ };
11824
+ var findCommonAncestor = async (events, children) => {
11513
11825
  if (!children.length)
11514
11826
  return;
11515
11827
  const candidates = children.map((c) => [c]);
@@ -11528,14 +11840,14 @@ You can use close({ resize: true }) to resize header`);
11528
11840
  if (!changed)
11529
11841
  return;
11530
11842
  }
11531
- }
11532
- async function findAncestorCandidate(events, root2) {
11843
+ };
11844
+ var findAncestorCandidate = async (events, root2) => {
11533
11845
  const { value: event } = await events.get(root2);
11534
11846
  if (!event.parents.length)
11535
11847
  return root2;
11536
11848
  return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents);
11537
- }
11538
- function findCommonString(arrays) {
11849
+ };
11850
+ var findCommonString = (arrays) => {
11539
11851
  arrays = arrays.map((a) => [...a]);
11540
11852
  for (const arr of arrays) {
11541
11853
  for (const item of arr) {
@@ -11551,9 +11863,9 @@ You can use close({ resize: true }) to resize header`);
11551
11863
  return item;
11552
11864
  }
11553
11865
  }
11554
- }
11555
- async function findSortedEvents(events, head, tail) {
11556
- if (head.length === 1 && String(head[0]) === String(tail)) {
11866
+ };
11867
+ var findSortedEvents = async (events, head, tail) => {
11868
+ if (head.length === 1 && head[0].toString() === tail.toString()) {
11557
11869
  return [];
11558
11870
  }
11559
11871
  const weights = /* @__PURE__ */ new Map();
@@ -11578,8 +11890,8 @@ You can use close({ resize: true }) to resize header`);
11578
11890
  }
11579
11891
  }
11580
11892
  return Array.from(buckets).sort((a, b) => b[0] - a[0]).flatMap(([, es]) => es.sort((a, b) => String(a.cid) < String(b.cid) ? -1 : 1));
11581
- }
11582
- async function findEvents(events, start, end, depth = 0) {
11893
+ };
11894
+ var findEvents = async (events, start, end, depth = 0) => {
11583
11895
  const event = await events.get(start);
11584
11896
  const acc = [{ event, depth }];
11585
11897
  const { parents } = event.value;
@@ -11587,7 +11899,128 @@ You can use close({ resize: true }) to resize header`);
11587
11899
  return acc;
11588
11900
  const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)));
11589
11901
  return acc.concat(...rest);
11590
- }
11902
+ };
11903
+
11904
+ // ../../node_modules/.pnpm/@web3-storage+pail@0.4.0/node_modules/@web3-storage/pail/src/crdt/batch/index.js
11905
+ var Batcher2 = class _Batcher {
11906
+ #committed = false;
11907
+ /**
11908
+ * @param {object} init
11909
+ * @param {API.BlockFetcher} init.blocks Block storage.
11910
+ * @param {API.EventLink<API.Operation>[]} init.head Merkle clock head.
11911
+ * @param {API.BatcherShardEntry[]} init.entries The entries in this shard.
11912
+ * @param {string} init.prefix Key prefix.
11913
+ * @param {number} init.maxSize
11914
+ * @param {number} init.maxKeyLength
11915
+ * @param {API.ShardBlockView} init.base Original shard this batcher is based on.
11916
+ * @param {API.ShardBlockView[]} init.additions Additions to include in the committed batch.
11917
+ * @param {API.ShardBlockView[]} init.removals Removals to include in the committed batch.
11918
+ */
11919
+ constructor({ blocks, head, entries: entries3, prefix, maxSize, maxKeyLength, base: base4, additions, removals }) {
11920
+ this.blocks = blocks;
11921
+ this.head = head;
11922
+ this.prefix = prefix;
11923
+ this.entries = entries3;
11924
+ this.base = base4;
11925
+ this.maxSize = maxSize;
11926
+ this.maxKeyLength = maxKeyLength;
11927
+ this.additions = additions;
11928
+ this.removals = removals;
11929
+ this.ops = [];
11930
+ }
11931
+ /**
11932
+ * @param {string} key The key of the value to put.
11933
+ * @param {API.UnknownLink} value The value to put.
11934
+ * @returns {Promise<void>}
11935
+ */
11936
+ async put(key, value) {
11937
+ if (this.#committed)
11938
+ throw new BatchCommittedError();
11939
+ await put2(this.blocks, this, key, value);
11940
+ this.ops.push({ type: "put", key, value });
11941
+ }
11942
+ async commit() {
11943
+ if (this.#committed)
11944
+ throw new BatchCommittedError();
11945
+ this.#committed = true;
11946
+ const res = await commit(this);
11947
+ const data = { type: "batch", ops: this.ops, root: res.root };
11948
+ const event = await EventBlock.create(data, this.head);
11949
+ const mblocks = new MemoryBlockstore();
11950
+ const blocks = new MultiBlockFetcher(mblocks, this.blocks);
11951
+ mblocks.putSync(event.cid, event.bytes);
11952
+ const head = await advance(blocks, this.head, event.cid);
11953
+ const additions = /* @__PURE__ */ new Map();
11954
+ const removals = /* @__PURE__ */ new Map();
11955
+ for (const a of this.additions) {
11956
+ additions.set(a.cid.toString(), a);
11957
+ }
11958
+ for (const r of this.removals) {
11959
+ removals.set(r.cid.toString(), r);
11960
+ }
11961
+ for (const a of res.additions) {
11962
+ if (removals.has(a.cid.toString())) {
11963
+ removals.delete(a.cid.toString());
11964
+ }
11965
+ additions.set(a.cid.toString(), a);
11966
+ }
11967
+ for (const r of res.removals) {
11968
+ if (additions.has(r.cid.toString())) {
11969
+ additions.delete(r.cid.toString());
11970
+ } else {
11971
+ removals.set(r.cid.toString(), r);
11972
+ }
11973
+ }
11974
+ return {
11975
+ head,
11976
+ event,
11977
+ root: res.root,
11978
+ additions: [...additions.values()],
11979
+ removals: [...removals.values()]
11980
+ };
11981
+ }
11982
+ /**
11983
+ * @param {object} init
11984
+ * @param {API.BlockFetcher} init.blocks Block storage.
11985
+ * @param {API.EventLink<API.Operation>[]} init.head Merkle clock head.
11986
+ * @param {string} init.prefix
11987
+ */
11988
+ static async create({ blocks, head, prefix }) {
11989
+ const mblocks = new MemoryBlockstore();
11990
+ blocks = new MultiBlockFetcher(mblocks, blocks);
11991
+ if (!head.length) {
11992
+ const base5 = await ShardBlock.create();
11993
+ mblocks.putSync(base5.cid, base5.bytes);
11994
+ return new _Batcher({
11995
+ blocks,
11996
+ head,
11997
+ entries: [],
11998
+ prefix,
11999
+ base: base5,
12000
+ additions: [base5],
12001
+ removals: [],
12002
+ ...configure(base5.value)
12003
+ });
12004
+ }
12005
+ const { root: root2, additions, removals } = await root(blocks, head);
12006
+ for (const a of additions) {
12007
+ mblocks.putSync(a.cid, a.bytes);
12008
+ }
12009
+ const shards = new ShardFetcher(blocks);
12010
+ const base4 = await shards.get(root2);
12011
+ return new _Batcher({
12012
+ blocks,
12013
+ head,
12014
+ entries: base4.value.entries,
12015
+ prefix,
12016
+ base: base4,
12017
+ additions,
12018
+ removals,
12019
+ ...configure(base4.value)
12020
+ });
12021
+ }
12022
+ };
12023
+ var create10 = (blocks, head) => Batcher2.create({ blocks, head, prefix: "" });
11591
12024
 
11592
12025
  // ../../node_modules/.pnpm/@ipld+unixfs@2.1.2/node_modules/@ipld/unixfs/src/codec.js
11593
12026
  var codec_exports = {};
@@ -14392,7 +14825,7 @@ You can use close({ resize: true }) to resize header`);
14392
14825
  };
14393
14826
 
14394
14827
  // ../../node_modules/.pnpm/multiformats@11.0.2/node_modules/multiformats/src/hashes/digest.js
14395
- var create7 = (code8, digest3) => {
14828
+ var create11 = (code8, digest3) => {
14396
14829
  const size = digest3.byteLength;
14397
14830
  const sizeOffset = encodingLength3(code8);
14398
14831
  const digestOffset = sizeOffset + encodingLength3(size);
@@ -14461,7 +14894,7 @@ You can use close({ resize: true }) to resize header`);
14461
14894
  digest(input) {
14462
14895
  if (input instanceof Uint8Array) {
14463
14896
  const result = this.encode(input);
14464
- return result instanceof Uint8Array ? create7(this.code, result) : result.then((digest3) => create7(this.code, digest3));
14897
+ return result instanceof Uint8Array ? create11(this.code, result) : result.then((digest3) => create11(this.code, digest3));
14465
14898
  } else {
14466
14899
  throw Error("Unknown type, must be binary type");
14467
14900
  }
@@ -15008,7 +15441,7 @@ You can use close({ resize: true }) to resize header`);
15008
15441
  switch (this.version) {
15009
15442
  case 0: {
15010
15443
  const { code: code8, digest: digest3 } = this.multihash;
15011
- const multihash = create7(code8, digest3);
15444
+ const multihash = create11(code8, digest3);
15012
15445
  return (
15013
15446
  /** @type {CID<Data, Format, Alg, 1>} */
15014
15447
  _CID.createV1(this.code, multihash)
@@ -15539,7 +15972,7 @@ You can use close({ resize: true }) to resize header`);
15539
15972
  hasher: sha2562,
15540
15973
  linker: { createLink: CID3.createV1 }
15541
15974
  });
15542
- var configure = (config2) => ({
15975
+ var configure2 = (config2) => ({
15543
15976
  ...defaults2(),
15544
15977
  ...config2
15545
15978
  });
@@ -15548,7 +15981,7 @@ You can use close({ resize: true }) to resize header`);
15548
15981
  name: name3,
15549
15982
  encode: encodeFileChunk
15550
15983
  };
15551
- var create8 = ({ writer, metadata = {}, settings: settings2 = defaults2() }) => new FileWriterView(init(writer, metadata, configure(settings2)));
15984
+ var create12 = ({ writer, metadata = {}, settings: settings2 = defaults2() }) => new FileWriterView(init(writer, metadata, configure2(settings2)));
15552
15985
  var write4 = async (view, bytes) => {
15553
15986
  await perform(view, send({ type: "write", bytes }));
15554
15987
  return view;
@@ -15607,7 +16040,7 @@ You can use close({ resize: true }) to resize header`);
15607
16040
 
15608
16041
  // ../../node_modules/.pnpm/@ipld+unixfs@2.1.2/node_modules/@ipld/unixfs/src/directory.js
15609
16042
  var defaults3 = defaults2;
15610
- var create9 = ({ writer, settings: settings2 = defaults3(), metadata = {} }) => new DirectoryWriter({
16043
+ var create13 = ({ writer, settings: settings2 = defaults3(), metadata = {} }) => new DirectoryWriter({
15611
16044
  writer,
15612
16045
  metadata,
15613
16046
  settings: settings2,
@@ -15808,7 +16241,7 @@ You can use close({ resize: true }) to resize header`);
15808
16241
  /** @type {Uint8Array} */
15809
16242
  murmur364.encode(bytes)
15810
16243
  );
15811
- var configure4 = ({ bitWidth: bitWidth2 = 8, hash = hash64 }) => {
16244
+ var configure5 = ({ bitWidth: bitWidth2 = 8, hash = hash64 }) => {
15812
16245
  const hashSize = hash(new Uint8Array()).byteLength;
15813
16246
  const options = { bitWidth: bitWidth2, hash, hashSize };
15814
16247
  const at = (path, depth) => read4(path, depth, options);
@@ -15845,7 +16278,7 @@ You can use close({ resize: true }) to resize header`);
15845
16278
  var bitWidth = 8;
15846
16279
  var config = {
15847
16280
  bitWidth,
15848
- Path: configure4({ bitWidth })
16281
+ Path: configure5({ bitWidth })
15849
16282
  };
15850
16283
 
15851
16284
  // ../../node_modules/.pnpm/@ipld+unixfs@2.1.2/node_modules/@ipld/unixfs/src/lib.js
@@ -15869,14 +16302,14 @@ You can use close({ resize: true }) to resize header`);
15869
16302
  */
15870
16303
  constructor({ writer, settings: settings2 }) {
15871
16304
  this.writer = writer;
15872
- this.settings = configure(settings2);
16305
+ this.settings = configure2(settings2);
15873
16306
  }
15874
16307
  /**
15875
16308
  * @template [L=unknown]
15876
16309
  * @param {API.WriterOptions<L|Layout>} config
15877
16310
  */
15878
16311
  createFileWriter({ settings: settings2 = this.settings, metadata } = {}) {
15879
- return create8({
16312
+ return create12({
15880
16313
  writer: this.writer,
15881
16314
  settings: settings2,
15882
16315
  metadata
@@ -15887,7 +16320,7 @@ You can use close({ resize: true }) to resize header`);
15887
16320
  * @param {API.WriterOptions<L|Layout>} config
15888
16321
  */
15889
16322
  createDirectoryWriter({ settings: settings2 = this.settings, metadata } = {}) {
15890
- return create9({
16323
+ return create13({
15891
16324
  writer: this.writer,
15892
16325
  settings: settings2,
15893
16326
  metadata
@@ -19159,7 +19592,7 @@ You can use close({ resize: true }) to resize header`);
19159
19592
  var import_err_code7 = __toESM(require_err_code(), 1);
19160
19593
 
19161
19594
  // ../../node_modules/.pnpm/eventemitter3@5.0.1/node_modules/eventemitter3/index.mjs
19162
- var import_index6 = __toESM(require_eventemitter3(), 1);
19595
+ var import_index5 = __toESM(require_eventemitter3(), 1);
19163
19596
 
19164
19597
  // ../../node_modules/.pnpm/p-timeout@6.1.2/node_modules/p-timeout/index.js
19165
19598
  var TimeoutError = class extends Error {
@@ -19294,7 +19727,7 @@ You can use close({ resize: true }) to resize header`);
19294
19727
  };
19295
19728
 
19296
19729
  // ../../node_modules/.pnpm/p-queue@8.0.1/node_modules/p-queue/dist/index.js
19297
- var PQueue = class extends import_index6.default {
19730
+ var PQueue = class extends import_index5.default {
19298
19731
  #carryoverConcurrencyCount;
19299
19732
  #isIntervalIgnored;
19300
19733
  #intervalCount = 0;
@@ -19936,7 +20369,7 @@ You can use close({ resize: true }) to resize header`);
19936
20369
 
19937
20370
  // src/files.ts
19938
20371
  var queuingStrategy = withCapacity();
19939
- var settings = configure({
20372
+ var settings = configure2({
19940
20373
  fileChunkEncoder: raw_exports,
19941
20374
  smallFileEncoder: raw_exports,
19942
20375
  // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call
@@ -19985,7 +20418,7 @@ You can use close({ resize: true }) to resize header`);
19985
20418
  this.#file = file;
19986
20419
  }
19987
20420
  async finalize(writer) {
19988
- const unixfsFileWriter = create8(writer);
20421
+ const unixfsFileWriter = create12(writer);
19989
20422
  await this.#file.stream().pipeTo(
19990
20423
  new WritableStream({
19991
20424
  async write(chunk) {
@@ -20002,30 +20435,39 @@ You can use close({ resize: true }) to resize header`);
20002
20435
  }
20003
20436
  function timeEnd(tag2) {
20004
20437
  }
20005
- async function applyBulkUpdateToCrdt(tblocks, head, updates, options) {
20006
- let result;
20007
- for (const update2 of updates) {
20008
- const link2 = await writeDocContent(tblocks, update2);
20009
- result = await put2(tblocks, head, update2.key, link2, options);
20010
- const resRoot = result.root.toString();
20011
- const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
20012
- if (!isReturned) {
20013
- const hasRoot = await tblocks.get(result.root);
20014
- if (!hasRoot) {
20015
- throw new Error(
20016
- `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
20017
- );
20018
- result.head = head;
20019
- }
20438
+ async function applyBulkUpdateToCrdt(tblocks, head, updates) {
20439
+ let result = null;
20440
+ if (updates.length > 1) {
20441
+ const batch2 = await create10(tblocks, head);
20442
+ for (const update2 of updates) {
20443
+ const link2 = await writeDocContent(tblocks, update2);
20444
+ await batch2.put(update2.key, link2);
20020
20445
  }
20021
- if (result.event) {
20022
- for (const { cid, bytes } of [...result.additions, result.event]) {
20023
- tblocks.putSync(cid, bytes);
20446
+ result = await batch2.commit();
20447
+ } else {
20448
+ for (const update2 of updates) {
20449
+ const link2 = await writeDocContent(tblocks, update2);
20450
+ result = await put3(tblocks, head, update2.key, link2);
20451
+ const resRoot = result.root.toString();
20452
+ const isReturned = result.additions.some((a) => a.cid.toString() === resRoot);
20453
+ if (!isReturned) {
20454
+ const hasRoot = await tblocks.get(result.root);
20455
+ if (!hasRoot) {
20456
+ throw new Error(
20457
+ `missing root in additions: ${result.additions.length} ${resRoot} keys: ${updates.map((u) => u.key).toString()}`
20458
+ );
20459
+ }
20024
20460
  }
20025
- head = result.head;
20026
20461
  }
20027
20462
  }
20028
- return { head };
20463
+ if (!result)
20464
+ throw new Error("Missing result");
20465
+ if (result.event) {
20466
+ for (const { cid, bytes } of [...result.additions, ...result.removals, result.event]) {
20467
+ tblocks.putSync(cid, bytes);
20468
+ }
20469
+ }
20470
+ return { head: result.head };
20029
20471
  }
20030
20472
  async function writeDocContent(blocks, update2) {
20031
20473
  let value;
@@ -20163,37 +20605,33 @@ You can use close({ resize: true }) to resize header`);
20163
20605
  const { value: event } = await eventsFetcher.get(link2);
20164
20606
  if (!event)
20165
20607
  continue;
20166
- const { key, value } = event.data;
20167
- if (keys.has(key)) {
20168
- if (event.parents) {
20169
- updates = await gatherUpdates(
20170
- blocks,
20171
- eventsFetcher,
20172
- event.parents,
20173
- since,
20174
- updates,
20175
- keys,
20176
- didLinks,
20177
- limit
20178
- );
20179
- }
20180
- } else {
20181
- keys.add(key);
20182
- const docValue = await getValueFromLink(blocks, value);
20183
- updates.push({ key, value: docValue.doc, del: docValue.del, clock: link2 });
20184
- limit--;
20185
- if (event.parents) {
20186
- updates = await gatherUpdates(
20187
- blocks,
20188
- eventsFetcher,
20189
- event.parents,
20190
- since,
20191
- updates,
20192
- keys,
20193
- didLinks,
20194
- limit
20195
- );
20196
- }
20608
+ const { type: type2 } = event.data;
20609
+ let ops = [];
20610
+ if (type2 === "batch") {
20611
+ ops = event.data.ops;
20612
+ } else if (type2 === "put") {
20613
+ ops = [event.data];
20614
+ }
20615
+ for (let i = ops.length - 1; i >= 0; i--) {
20616
+ const { key, value } = ops[i];
20617
+ if (!keys.has(key)) {
20618
+ const docValue = await getValueFromLink(blocks, value);
20619
+ updates.push({ key, value: docValue.doc, del: docValue.del, clock: link2 });
20620
+ limit--;
20621
+ keys.add(key);
20622
+ }
20623
+ }
20624
+ if (event.parents) {
20625
+ updates = await gatherUpdates(
20626
+ blocks,
20627
+ eventsFetcher,
20628
+ event.parents,
20629
+ since,
20630
+ updates,
20631
+ keys,
20632
+ didLinks,
20633
+ limit
20634
+ );
20197
20635
  }
20198
20636
  }
20199
20637
  return updates;
@@ -20366,7 +20804,7 @@ You can use close({ resize: true }) to resize header`);
20366
20804
  });
20367
20805
  return getNode;
20368
20806
  };
20369
- var create12 = ({ get: get7, cache: cache4, chunker: chunker3, list, codec, hasher, sorted, compare: compare5, ...opts }) => {
20807
+ var create16 = ({ get: get7, cache: cache4, chunker: chunker3, list, codec, hasher, sorted, compare: compare5, ...opts }) => {
20370
20808
  if (!sorted)
20371
20809
  list = list.sort(({ key: a }, { key: b }) => compare5(a, b));
20372
20810
  const getNode = createGetNode2(get7, cache4, chunker3, codec, hasher, compare5, opts);
@@ -20547,12 +20985,12 @@ You can use close({ resize: true }) to resize header`);
20547
20985
  ...classes2,
20548
20986
  compare: compare3
20549
20987
  };
20550
- var create13 = (opts) => {
20988
+ var create17 = (opts) => {
20551
20989
  opts = {
20552
20990
  ...defaults4,
20553
20991
  ...opts
20554
20992
  };
20555
- return create12(opts);
20993
+ return create16(opts);
20556
20994
  };
20557
20995
  var load3 = (opts) => {
20558
20996
  opts = {
@@ -20628,7 +21066,7 @@ You can use close({ resize: true }) to resize header`);
20628
21066
  if (!inIndex.cid) {
20629
21067
  let returnRootBlock = null;
20630
21068
  let returnNode = null;
20631
- for await (const node of await create13({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
21069
+ for await (const node of await create17({ get: makeProllyGetBlock(tblocks), list: indexEntries, ...opts })) {
20632
21070
  const block = await node.block;
20633
21071
  await tblocks.put(block.cid, block.bytes);
20634
21072
  returnRootBlock = block;
@@ -21102,12 +21540,12 @@ You can use close({ resize: true }) to resize header`);
21102
21540
  }
21103
21541
  });
21104
21542
  }
21105
- async bulk(updates, options) {
21543
+ async bulk(updates) {
21106
21544
  await this.ready;
21107
21545
  const prevHead = [...this.clock.head];
21108
21546
  const meta = await this.blockstore.transaction(
21109
21547
  async (blocks) => {
21110
- const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates, options);
21548
+ const { head } = await applyBulkUpdateToCrdt(blocks, this.clock.head, updates);
21111
21549
  updates = updates.map(({ key, value, del: del2, clock }) => {
21112
21550
  readFiles(this.blockstore, { doc: value });
21113
21551
  return { key, value, del: del2, clock };