@ipld/car 4.0.0 → 4.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +109 -0
  2. package/api.ts +22 -4
  3. package/buffer-writer +1 -0
  4. package/cjs/browser-test/common.js +3 -3
  5. package/cjs/browser-test/node-test-large.js +8 -8
  6. package/cjs/browser-test/test-buffer-writer.js +330 -0
  7. package/cjs/browser-test/test-errors.js +2 -2
  8. package/cjs/browser-test/test-indexer.js +1 -1
  9. package/cjs/browser-test/test-reader.js +2 -2
  10. package/cjs/browser-test/test-writer.js +3 -3
  11. package/cjs/lib/buffer-writer.js +161 -0
  12. package/cjs/lib/decoder.js +2 -2
  13. package/cjs/lib/encoder.js +3 -3
  14. package/cjs/lib/iterator-channel.js +1 -1
  15. package/cjs/node-test/common.js +3 -3
  16. package/cjs/node-test/node-test-large.js +8 -8
  17. package/cjs/node-test/test-buffer-writer.js +330 -0
  18. package/cjs/node-test/test-errors.js +2 -2
  19. package/cjs/node-test/test-indexer.js +1 -1
  20. package/cjs/node-test/test-reader.js +2 -2
  21. package/cjs/node-test/test-writer.js +3 -3
  22. package/esm/browser-test/test-buffer-writer.js +311 -0
  23. package/esm/browser-test/test-indexer.js +1 -1
  24. package/esm/browser-test/test-reader.js +2 -2
  25. package/esm/browser-test/test-writer.js +3 -3
  26. package/esm/lib/buffer-writer.js +126 -0
  27. package/esm/lib/encoder.js +1 -1
  28. package/esm/lib/iterator-channel.js +1 -1
  29. package/esm/node-test/test-buffer-writer.js +311 -0
  30. package/esm/node-test/test-indexer.js +1 -1
  31. package/esm/node-test/test-reader.js +2 -2
  32. package/esm/node-test/test-writer.js +3 -3
  33. package/lib/buffer-writer.js +286 -0
  34. package/lib/encoder.js +1 -1
  35. package/lib/iterator-channel.js +1 -1
  36. package/package.json +14 -4
  37. package/test/test-buffer-writer.js +256 -0
  38. package/test/test-indexer.js +1 -1
  39. package/test/test-reader.js +2 -2
  40. package/test/test-writer.js +3 -3
  41. package/tsconfig.json +1 -0
  42. package/types/api.d.ts +16 -0
  43. package/types/api.d.ts.map +1 -1
  44. package/types/lib/buffer-writer.d.ts +86 -0
  45. package/types/lib/buffer-writer.d.ts.map +1 -0
  46. package/types/test/test-buffer-writer.d.ts +2 -0
  47. package/types/test/test-buffer-writer.d.ts.map +1 -0
@@ -0,0 +1,311 @@
1
+ import * as CarBufferWriter from '../lib/buffer-writer.js';
2
+ import { CarReader } from '../lib/reader.js';
3
+ import { createHeader } from '../lib/encoder.js';
4
+ import { assert } from './common.js';
5
+ import {
6
+ CID,
7
+ varint
8
+ } from 'multiformats';
9
+ import * as CBOR from '@ipld/dag-cbor';
10
+ import {
11
+ sha256,
12
+ sha512
13
+ } from 'multiformats/hashes/sha2';
14
+ import { identity } from 'multiformats/hashes/identity';
15
+ import * as Raw from 'multiformats/codecs/raw';
16
+ import * as Block from 'multiformats/block';
17
+ describe('CarBufferWriter', () => {
18
+ const cid = CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu');
19
+ describe('calculateHeaderLength', async () => {
20
+ for (const count of [
21
+ 0,
22
+ 1,
23
+ 10,
24
+ 18,
25
+ 24,
26
+ 48,
27
+ 124,
28
+ 255,
29
+ 258,
30
+ 65536 - 1,
31
+ 65536
32
+ ]) {
33
+ it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
34
+ const roots = new Array(count).fill(cid);
35
+ const sizes = new Array(count).fill(cid.bytes.byteLength);
36
+ assert.deepEqual(CarBufferWriter.calculateHeaderLength(sizes), createHeader(roots).byteLength);
37
+ });
38
+ it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
39
+ const roots = new Array(count).fill(cid);
40
+ const rootLengths = roots.map(c => c.bytes.byteLength);
41
+ assert.deepEqual(CarBufferWriter.calculateHeaderLength(rootLengths), createHeader(roots).byteLength);
42
+ });
43
+ }
44
+ it('estimate on large CIDs', () => {
45
+ const largeCID = CID.parse(`bafkqbbac${ 'a'.repeat(416) }`);
46
+ assert.equal(CarBufferWriter.calculateHeaderLength([
47
+ cid.bytes.byteLength,
48
+ largeCID.bytes.byteLength
49
+ ]), createHeader([
50
+ cid,
51
+ largeCID
52
+ ]).byteLength);
53
+ });
54
+ it('estimate on large CIDs 2', () => {
55
+ const largeCID = CID.createV1(Raw.code, identity.digest(new Uint8Array(512).fill(1)));
56
+ assert.equal(CarBufferWriter.calculateHeaderLength([
57
+ cid.bytes.byteLength,
58
+ largeCID.bytes.byteLength
59
+ ]), createHeader([
60
+ cid,
61
+ largeCID
62
+ ]).byteLength);
63
+ });
64
+ });
65
+ describe('writer', () => {
66
+ it('estimate header and write blocks', async () => {
67
+ const headerSize = CarBufferWriter.estimateHeaderLength(1);
68
+ const dataSize = 256;
69
+ const buffer = new ArrayBuffer(headerSize + dataSize);
70
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize });
71
+ const b1 = await Block.encode({
72
+ value: { hello: 'world' },
73
+ codec: CBOR,
74
+ hasher: sha256
75
+ });
76
+ writer.write(b1);
77
+ const b2 = await Block.encode({
78
+ value: { bye: 'world' },
79
+ codec: CBOR,
80
+ hasher: sha256
81
+ });
82
+ writer.write(b2);
83
+ writer.addRoot(b1.cid);
84
+ const bytes = writer.close();
85
+ const reader = await CarReader.fromBytes(bytes);
86
+ assert.deepEqual(await reader.getRoots(), [b1.cid]);
87
+ assert.deepEqual(reader._blocks, [
88
+ {
89
+ cid: b1.cid,
90
+ bytes: b1.bytes
91
+ },
92
+ {
93
+ cid: b2.cid,
94
+ bytes: b2.bytes
95
+ }
96
+ ]);
97
+ });
98
+ it('overestimate header', async () => {
99
+ const headerSize = CarBufferWriter.estimateHeaderLength(2);
100
+ const dataSize = 256;
101
+ const buffer = new ArrayBuffer(headerSize + dataSize);
102
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize });
103
+ const b1 = await Block.encode({
104
+ value: { hello: 'world' },
105
+ codec: CBOR,
106
+ hasher: sha256
107
+ });
108
+ writer.write(b1);
109
+ const b2 = await Block.encode({
110
+ value: { bye: 'world' },
111
+ codec: CBOR,
112
+ hasher: sha256
113
+ });
114
+ writer.write(b2);
115
+ writer.addRoot(b1.cid);
116
+ assert.throws(() => writer.close(), /Header size was overestimate/);
117
+ const bytes = writer.close({ resize: true });
118
+ const reader = await CarReader.fromBytes(bytes);
119
+ assert.deepEqual(await reader.getRoots(), [b1.cid]);
120
+ assert.deepEqual(reader._blocks, [
121
+ {
122
+ cid: b1.cid,
123
+ bytes: b1.bytes
124
+ },
125
+ {
126
+ cid: b2.cid,
127
+ bytes: b2.bytes
128
+ }
129
+ ]);
130
+ });
131
+ it('underestimate header', async () => {
132
+ const headerSize = CarBufferWriter.estimateHeaderLength(2);
133
+ const dataSize = 300;
134
+ const buffer = new ArrayBuffer(headerSize + dataSize);
135
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize });
136
+ const b1 = await Block.encode({
137
+ value: { hello: 'world' },
138
+ codec: CBOR,
139
+ hasher: sha256
140
+ });
141
+ writer.write(b1);
142
+ writer.addRoot(b1.cid);
143
+ const b2 = await Block.encode({
144
+ value: { bye: 'world' },
145
+ codec: CBOR,
146
+ hasher: sha512
147
+ });
148
+ writer.write(b2);
149
+ assert.throws(() => writer.addRoot(b2.cid), /has no capacity/);
150
+ writer.addRoot(b2.cid, { resize: true });
151
+ const bytes = writer.close();
152
+ const reader = await CarReader.fromBytes(bytes);
153
+ assert.deepEqual(await reader.getRoots(), [
154
+ b1.cid,
155
+ b2.cid
156
+ ]);
157
+ assert.deepEqual(reader._blocks, [
158
+ {
159
+ cid: b1.cid,
160
+ bytes: b1.bytes
161
+ },
162
+ {
163
+ cid: b2.cid,
164
+ bytes: b2.bytes
165
+ }
166
+ ]);
167
+ });
168
+ });
169
+ it('has no space for the root', async () => {
170
+ const headerSize = CarBufferWriter.estimateHeaderLength(1);
171
+ const dataSize = 100;
172
+ const buffer = new ArrayBuffer(headerSize + dataSize);
173
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize });
174
+ const b1 = await Block.encode({
175
+ value: { hello: 'world' },
176
+ codec: CBOR,
177
+ hasher: sha256
178
+ });
179
+ writer.write(b1);
180
+ writer.addRoot(b1.cid);
181
+ const b2 = await Block.encode({
182
+ value: { bye: 'world' },
183
+ codec: CBOR,
184
+ hasher: sha256
185
+ });
186
+ writer.write(b2);
187
+ assert.throws(() => writer.addRoot(b2.cid), /Buffer has no capacity for a new root/);
188
+ assert.throws(() => writer.addRoot(b2.cid, { resize: true }), /Buffer has no capacity for a new root/);
189
+ const bytes = writer.close();
190
+ const reader = await CarReader.fromBytes(bytes);
191
+ assert.deepEqual(await reader.getRoots(), [b1.cid]);
192
+ assert.deepEqual(reader._blocks, [
193
+ {
194
+ cid: b1.cid,
195
+ bytes: b1.bytes
196
+ },
197
+ {
198
+ cid: b2.cid,
199
+ bytes: b2.bytes
200
+ }
201
+ ]);
202
+ });
203
+ it('has no space for the block', async () => {
204
+ const headerSize = CarBufferWriter.estimateHeaderLength(1);
205
+ const dataSize = 58;
206
+ const buffer = new ArrayBuffer(headerSize + dataSize);
207
+ const writer = CarBufferWriter.createWriter(buffer, { headerSize });
208
+ const b1 = await Block.encode({
209
+ value: { hello: 'world' },
210
+ codec: CBOR,
211
+ hasher: sha256
212
+ });
213
+ writer.write(b1);
214
+ writer.addRoot(b1.cid);
215
+ const b2 = await Block.encode({
216
+ value: { bye: 'world' },
217
+ codec: CBOR,
218
+ hasher: sha256
219
+ });
220
+ assert.throws(() => writer.write(b2), /Buffer has no capacity for this block/);
221
+ const bytes = writer.close();
222
+ const reader = await CarReader.fromBytes(bytes);
223
+ assert.deepEqual(await reader.getRoots(), [b1.cid]);
224
+ assert.deepEqual(reader._blocks, [{
225
+ cid: b1.cid,
226
+ bytes: b1.bytes
227
+ }]);
228
+ });
229
+ it('provide roots', async () => {
230
+ const b1 = await Block.encode({
231
+ value: { hello: 'world' },
232
+ codec: CBOR,
233
+ hasher: sha256
234
+ });
235
+ const b2 = await Block.encode({
236
+ value: { bye: 'world' },
237
+ codec: CBOR,
238
+ hasher: sha512
239
+ });
240
+ const buffer = new ArrayBuffer(300);
241
+ const writer = CarBufferWriter.createWriter(buffer, {
242
+ roots: [
243
+ b1.cid,
244
+ b2.cid
245
+ ]
246
+ });
247
+ writer.write(b1);
248
+ writer.write(b2);
249
+ const bytes = writer.close();
250
+ const reader = await CarReader.fromBytes(bytes);
251
+ assert.deepEqual(await reader.getRoots(), [
252
+ b1.cid,
253
+ b2.cid
254
+ ]);
255
+ assert.deepEqual(reader._blocks, [
256
+ {
257
+ cid: b1.cid,
258
+ bytes: b1.bytes
259
+ },
260
+ {
261
+ cid: b2.cid,
262
+ bytes: b2.bytes
263
+ }
264
+ ]);
265
+ });
266
+ it('provide large CID root', async () => {
267
+ const bytes = new Uint8Array(512).fill(1);
268
+ const b1 = await Block.encode({
269
+ value: { hello: 'world' },
270
+ codec: CBOR,
271
+ hasher: sha256
272
+ });
273
+ const b2 = {
274
+ cid: CID.createV1(Raw.code, identity.digest(bytes)),
275
+ bytes
276
+ };
277
+ const headerSize = CBOR.encode({
278
+ version: 1,
279
+ roots: [
280
+ b1.cid,
281
+ b2.cid
282
+ ]
283
+ }).byteLength;
284
+ const bodySize = CarBufferWriter.blockLength(b1) + CarBufferWriter.blockLength(b2);
285
+ const varintSize = varint.encodingLength(headerSize);
286
+ const writer = CarBufferWriter.createWriter(new ArrayBuffer(varintSize + headerSize + bodySize), {
287
+ roots: [
288
+ b1.cid,
289
+ b2.cid
290
+ ]
291
+ });
292
+ writer.write(b1);
293
+ writer.write(b2);
294
+ const car = writer.close();
295
+ const reader = await CarReader.fromBytes(car);
296
+ assert.deepEqual(await reader.getRoots(), [
297
+ b1.cid,
298
+ b2.cid
299
+ ]);
300
+ assert.deepEqual(reader._blocks, [
301
+ {
302
+ cid: b1.cid,
303
+ bytes: b1.bytes
304
+ },
305
+ {
306
+ cid: b2.cid,
307
+ bytes: b2.bytes
308
+ }
309
+ ]);
310
+ });
311
+ });
@@ -24,7 +24,7 @@ describe('CarIndexer fromBytes()', () => {
24
24
  const indexer = await CarIndexer.fromBytes(goCarV2Bytes);
25
25
  const roots = await indexer.getRoots();
26
26
  assert.strictEqual(roots.length, 1);
27
- assert(goCarV2Roots[0].equals(roots[0]));
27
+ assert.ok(goCarV2Roots[0].equals(roots[0]));
28
28
  assert.strictEqual(indexer.version, 2);
29
29
  const indexData = [];
30
30
  for await (const index of indexer) {
@@ -68,13 +68,13 @@ describe('CarReader fromBytes()', () => {
68
68
  const reader = await CarReader.fromBytes(goCarV2Bytes);
69
69
  const roots = await reader.getRoots();
70
70
  assert.strictEqual(roots.length, 1);
71
- assert(goCarV2Roots[0].equals(roots[0]));
71
+ assert.ok(goCarV2Roots[0].equals(roots[0]));
72
72
  assert.strictEqual(reader.version, 2);
73
73
  for (const {cid} of goCarV2Index) {
74
74
  const block = await reader.get(cid);
75
75
  assert.isDefined(block);
76
76
  if (block) {
77
- assert(cid.equals(block.cid));
77
+ assert.ok(cid.equals(block.cid));
78
78
  let content;
79
79
  if (cid.code === dagPb.code) {
80
80
  content = dagPb.decode(block.bytes);
@@ -193,9 +193,9 @@ describe('CarWriter', () => {
193
193
  const rawBytes = await append(0);
194
194
  const pbBytes = await append(1);
195
195
  const cborBytes = await append(2);
196
- assert(rawBytes.length > 0);
197
- assert(pbBytes.length > 0);
198
- assert(cborBytes.length > 0);
196
+ assert.ok(rawBytes.length > 0);
197
+ assert.ok(pbBytes.length > 0);
198
+ assert.ok(cborBytes.length > 0);
199
199
  const reassembled = concatBytes([
200
200
  headerBytes,
201
201
  rawBytes,
@@ -0,0 +1,286 @@
1
+ import varint from 'varint'
2
+ import { Token, Type } from 'cborg'
3
+ import { tokensToLength } from 'cborg/length'
4
+ import * as CBOR from '@ipld/dag-cbor'
5
+
6
+ /**
7
+ * @typedef {import('../api').CID} CID
8
+ * @typedef {import('../api').Block} Block
9
+ * @typedef {import('../api').CarBufferWriter} Writer
10
+ * @typedef {import('../api').CarBufferWriterOptions} Options
11
+ * @typedef {import('./coding').CarEncoder} CarEncoder
12
+ */
13
+
14
+ /**
15
+ * A simple CAR writer that writes to a pre-allocated buffer.
16
+ *
17
+ * @class
18
+ * @name CarBufferWriter
19
+ * @implements {Writer}
20
+ */
21
+ class CarBufferWriter {
22
+ /**
23
+ * @param {Uint8Array} bytes
24
+ * @param {number} headerSize
25
+ */
26
+ constructor (bytes, headerSize) {
27
+ /** @readonly */
28
+ this.bytes = bytes
29
+ this.byteOffset = headerSize
30
+
31
+ /**
32
+ * @readonly
33
+ * @type {CID[]}
34
+ */
35
+ this.roots = []
36
+ this.headerSize = headerSize
37
+ }
38
+
39
+ /**
40
+ * Add a root to this writer, to be used to create a header when the CAR is
41
+ * finalized with {@link CarBufferWriter.close `close()`}
42
+ *
43
+ * @param {CID} root
44
+ * @param {{resize?:boolean}} [options]
45
+ * @returns {CarBufferWriter}
46
+ */
47
+ addRoot (root, options) {
48
+ addRoot(this, root, options)
49
+ return this
50
+ }
51
+
52
+ /**
53
+ * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
54
+ * Throws if there is not enough capacity.
55
+ *
56
+ * @param {Block} block A `{ cid:CID, bytes:Uint8Array }` pair.
57
+ * @returns {CarBufferWriter}
58
+ */
59
+ write (block) {
60
+ addBlock(this, block)
61
+ return this
62
+ }
63
+
64
+ /**
65
+ * Finalize the CAR and return it as a `Uint8Array`.
66
+ *
67
+ * @param {object} [options]
68
+ * @param {boolean} [options.resize]
69
+ * @returns {Uint8Array}
70
+ */
71
+ close (options) {
72
+ return close(this, options)
73
+ }
74
+ }
75
+
76
+ /**
77
+ * @param {CarBufferWriter} writer
78
+ * @param {CID} root
79
+ * @param {{resize?:boolean}} [options]
80
+ */
81
+ export const addRoot = (writer, root, { resize = false } = {}) => {
82
+ const { bytes, headerSize, byteOffset, roots } = writer
83
+ writer.roots.push(root)
84
+ const size = headerLength(writer)
85
+ // If there is not enough space for the new root
86
+ if (size > headerSize) {
87
+ // Check if we root would fit if we were to resize the head.
88
+ if (size - headerSize + byteOffset < bytes.byteLength) {
89
+ // If resize is enabled resize head
90
+ if (resize) {
91
+ resizeHeader(writer, size)
92
+ // otherwise remove head and throw an error suggesting to resize
93
+ } else {
94
+ roots.pop()
95
+ throw new RangeError(`Header of size ${headerSize} has no capacity for new root ${root}.
96
+ However there is a space in the buffer and you could call addRoot(root, { resize: root }) to resize header to make a space for this root.`)
97
+ }
98
+ // If head would not fit even with resize pop new root and throw error
99
+ } else {
100
+ roots.pop()
101
+ throw new RangeError(`Buffer has no capacity for a new root ${root}`)
102
+ }
103
+ }
104
+ }
105
+
106
+ /**
107
+ * Calculates number of bytes required for storing given block in CAR. Useful in
108
+ * estimating size of an `ArrayBuffer` for the `CarBufferWriter`.
109
+ *
110
+ * @name CarBufferWriter.blockLength(Block)
111
+ * @param {Block} block
112
+ * @returns {number}
113
+ */
114
+ export const blockLength = ({ cid, bytes }) => {
115
+ const size = cid.bytes.byteLength + bytes.byteLength
116
+ return varint.encodingLength(size) + size
117
+ }
118
+
119
+ /**
120
+ * @param {CarBufferWriter} writer
121
+ * @param {Block} block
122
+ */
123
+ export const addBlock = (writer, { cid, bytes }) => {
124
+ const byteLength = cid.bytes.byteLength + bytes.byteLength
125
+ const size = varint.encode(byteLength)
126
+ if (writer.byteOffset + size.length + byteLength > writer.bytes.byteLength) {
127
+ throw new RangeError('Buffer has no capacity for this block')
128
+ } else {
129
+ writeBytes(writer, size)
130
+ writeBytes(writer, cid.bytes)
131
+ writeBytes(writer, bytes)
132
+ }
133
+ }
134
+
135
+ /**
136
+ * @param {CarBufferWriter} writer
137
+ * @param {object} [options]
138
+ * @param {boolean} [options.resize]
139
+ */
140
+ export const close = (writer, { resize = false } = {}) => {
141
+ const { roots, bytes, byteOffset, headerSize } = writer
142
+
143
+ const headerBytes = CBOR.encode({ version: 1, roots })
144
+ const varintBytes = varint.encode(headerBytes.length)
145
+
146
+ const size = varintBytes.length + headerBytes.byteLength
147
+ const offset = headerSize - size
148
+
149
+ // If header size estimate was accurate we just write header and return
150
+ // view into buffer.
151
+ if (offset === 0) {
152
+ writeHeader(writer, varintBytes, headerBytes)
153
+ return bytes.subarray(0, byteOffset)
154
+ // If header was overestimated and `{resize: true}` is passed resize header
155
+ } else if (resize) {
156
+ resizeHeader(writer, size)
157
+ writeHeader(writer, varintBytes, headerBytes)
158
+ return bytes.subarray(0, writer.byteOffset)
159
+ } else {
160
+ throw new RangeError(`Header size was overestimated.
161
+ You can use close({ resize: true }) to resize header`)
162
+ }
163
+ }
164
+
165
+ /**
166
+ * @param {CarBufferWriter} writer
167
+ * @param {number} byteLength
168
+ */
169
+ export const resizeHeader = (writer, byteLength) => {
170
+ const { bytes, headerSize } = writer
171
+ // Move data section to a new offset
172
+ bytes.set(bytes.subarray(headerSize, writer.byteOffset), byteLength)
173
+ // Update header size & byteOffset
174
+ writer.byteOffset += byteLength - headerSize
175
+ writer.headerSize = byteLength
176
+ }
177
+
178
+ /**
179
+ * @param {CarBufferWriter} writer
180
+ * @param {number[]|Uint8Array} bytes
181
+ */
182
+
183
+ const writeBytes = (writer, bytes) => {
184
+ writer.bytes.set(bytes, writer.byteOffset)
185
+ writer.byteOffset += bytes.length
186
+ }
187
+ /**
188
+ * @param {{bytes:Uint8Array}} writer
189
+ * @param {number[]} varint
190
+ * @param {Uint8Array} header
191
+ */
192
+ const writeHeader = ({ bytes }, varint, header) => {
193
+ bytes.set(varint)
194
+ bytes.set(header, varint.length)
195
+ }
196
+
197
+ const headerPreludeTokens = [
198
+ new Token(Type.map, 2),
199
+ new Token(Type.string, 'version'),
200
+ new Token(Type.uint, 1),
201
+ new Token(Type.string, 'roots')
202
+ ]
203
+
204
+ const CID_TAG = new Token(Type.tag, 42)
205
+
206
+ /**
207
+ * Calculates header size given the array of byteLength for roots.
208
+ *
209
+ * @name CarBufferWriter.calculateHeaderLength(rootLengths)
210
+ * @param {number[]} rootLengths
211
+ * @returns {number}
212
+ */
213
+ export const calculateHeaderLength = (rootLengths) => {
214
+ const tokens = [...headerPreludeTokens]
215
+ tokens.push(new Token(Type.array, rootLengths.length))
216
+ for (const rootLength of rootLengths) {
217
+ tokens.push(CID_TAG)
218
+ tokens.push(new Token(Type.bytes, { length: rootLength + 1 }))
219
+ }
220
+ const length = tokensToLength(tokens) // no options needed here because we have simple tokens
221
+ return varint.encodingLength(length) + length
222
+ }
223
+
224
+ /**
225
+ * Calculates header size given the array of roots.
226
+ *
227
+ * @name CarBufferWriter.headerLength({ roots })
228
+ * @param {object} options
229
+ * @param {CID[]} options.roots
230
+ * @returns {number}
231
+ */
232
+ export const headerLength = ({ roots }) =>
233
+ calculateHeaderLength(roots.map(cid => cid.bytes.byteLength))
234
+
235
+ /**
236
+ * Estimates header size given a count of the roots and the expected byte length
237
+ * of the root CIDs. The default length works for a standard CIDv1 with a
238
+ * single-byte multihash code, such as SHA2-256 (i.e. the most common CIDv1).
239
+ *
240
+ * @name CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength])
241
+ * @param {number} rootCount
242
+ * @param {number} [rootByteLength]
243
+ * @returns {number}
244
+ */
245
+ export const estimateHeaderLength = (rootCount, rootByteLength = 36) =>
246
+ calculateHeaderLength(new Array(rootCount).fill(rootByteLength))
247
+
248
+ /**
249
+ * Creates synchronous CAR writer that can be used to encode blocks into a given
250
+ * buffer. Optionally you could pass `byteOffset` and `byteLength` to specify a
251
+ * range inside buffer to write into. If car file is going to have `roots` you
252
+ * need to either pass them under `options.roots` (from which header size will
253
+ * be calculated) or provide `options.headerSize` to allocate required space
254
+ * in the buffer. You may also provide known `roots` and `headerSize` to
255
+ * allocate space for the roots that may not be known ahead of time.
256
+ *
257
+ * Note: Incorrect `headerSize` may lead to copying bytes inside a buffer
258
+ * which will have a negative impact on performance.
259
+ *
260
+ * @name CarBufferWriter.createWriter(buffer[, options])
261
+ * @param {ArrayBuffer} buffer
262
+ * @param {object} [options]
263
+ * @param {CID[]} [options.roots]
264
+ * @param {number} [options.byteOffset]
265
+ * @param {number} [options.byteLength]
266
+ * @param {number} [options.headerSize]
267
+ * @returns {CarBufferWriter}
268
+ */
269
+ export const createWriter = (
270
+ buffer,
271
+ {
272
+ roots = [],
273
+ byteOffset = 0,
274
+ byteLength = buffer.byteLength,
275
+ headerSize = headerLength({ roots })
276
+ } = {}
277
+ ) => {
278
+ const bytes = new Uint8Array(buffer, byteOffset, byteLength)
279
+
280
+ const writer = new CarBufferWriter(bytes, headerSize)
281
+ for (const root of roots) {
282
+ writer.addRoot(root)
283
+ }
284
+
285
+ return writer
286
+ }
package/lib/encoder.js CHANGED
@@ -59,7 +59,7 @@ function createEncoder (writer) {
59
59
  * @returns {Promise<void>}
60
60
  */
61
61
  async close () {
62
- return writer.end()
62
+ await writer.end()
63
63
  /* c8 ignore next 2 */
64
64
  // Node.js 12 c8 bug
65
65
  }
@@ -52,7 +52,7 @@ export function create () {
52
52
  ended = true
53
53
  const drainer = makeDrainer()
54
54
  outWaitResolver()
55
- return drainer
55
+ await drainer
56
56
  /* c8 ignore next 2 */
57
57
  // Node.js 12 c8 bug
58
58
  }