@ipld/car 4.0.0 → 4.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +109 -0
  2. package/api.ts +22 -4
  3. package/buffer-writer +1 -0
  4. package/cjs/browser-test/common.js +3 -3
  5. package/cjs/browser-test/node-test-large.js +8 -8
  6. package/cjs/browser-test/test-buffer-writer.js +330 -0
  7. package/cjs/browser-test/test-errors.js +2 -2
  8. package/cjs/browser-test/test-indexer.js +1 -1
  9. package/cjs/browser-test/test-reader.js +2 -2
  10. package/cjs/browser-test/test-writer.js +3 -3
  11. package/cjs/lib/buffer-writer.js +161 -0
  12. package/cjs/lib/decoder.js +2 -2
  13. package/cjs/lib/encoder.js +3 -3
  14. package/cjs/lib/iterator-channel.js +1 -1
  15. package/cjs/node-test/common.js +3 -3
  16. package/cjs/node-test/node-test-large.js +8 -8
  17. package/cjs/node-test/test-buffer-writer.js +330 -0
  18. package/cjs/node-test/test-errors.js +2 -2
  19. package/cjs/node-test/test-indexer.js +1 -1
  20. package/cjs/node-test/test-reader.js +2 -2
  21. package/cjs/node-test/test-writer.js +3 -3
  22. package/esm/browser-test/test-buffer-writer.js +311 -0
  23. package/esm/browser-test/test-indexer.js +1 -1
  24. package/esm/browser-test/test-reader.js +2 -2
  25. package/esm/browser-test/test-writer.js +3 -3
  26. package/esm/lib/buffer-writer.js +126 -0
  27. package/esm/lib/encoder.js +1 -1
  28. package/esm/lib/iterator-channel.js +1 -1
  29. package/esm/node-test/test-buffer-writer.js +311 -0
  30. package/esm/node-test/test-indexer.js +1 -1
  31. package/esm/node-test/test-reader.js +2 -2
  32. package/esm/node-test/test-writer.js +3 -3
  33. package/lib/buffer-writer.js +286 -0
  34. package/lib/encoder.js +1 -1
  35. package/lib/iterator-channel.js +1 -1
  36. package/package.json +14 -4
  37. package/test/test-buffer-writer.js +256 -0
  38. package/test/test-indexer.js +1 -1
  39. package/test/test-reader.js +2 -2
  40. package/test/test-writer.js +3 -3
  41. package/tsconfig.json +1 -0
  42. package/types/api.d.ts +16 -0
  43. package/types/api.d.ts.map +1 -1
  44. package/types/lib/buffer-writer.d.ts +86 -0
  45. package/types/lib/buffer-writer.d.ts.map +1 -0
  46. package/types/test/test-buffer-writer.d.ts +2 -0
  47. package/types/test/test-buffer-writer.d.ts.map +1 -0
package/README.md CHANGED
@@ -240,6 +240,15 @@ be directly fed to a
240
240
  * [`async CarWriter.createAppender()`](#CarWriter__createAppender)
241
241
  * [`async CarWriter.updateRootsInBytes(bytes, roots)`](#CarWriter__updateRootsInBytes)
242
242
  * [`async CarWriter.updateRootsInFile(fd, roots)`](#CarWriter__updateRootsInFile)
243
+ * [`class CarBufferWriter`](#CarBufferWriter)
244
+ * [`CarBufferWriter#addRoot(root, options)`](#CarBufferWriter_addRoot)
245
+ * [`CarBufferWriter#write(block)`](#CarBufferWriter_write)
246
+ * [`CarBufferWriter#close([options])`](#CarBufferWriter_close)
247
+ * [`CarBufferWriter.blockLength(Block)`](#CarBufferWriter__blockLength__Block__)
248
+ * [`CarBufferWriter.calculateHeaderLength(rootLengths)`](#CarBufferWriter__calculateHeaderLength__rootLengths__)
249
+ * [`CarBufferWriter.headerLength({ roots })`](#CarBufferWriter__headerLength______roots______)
250
+ * [`CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength])`](#CarBufferWriter__estimateHeaderLength__rootCount______rootByteLength____)
251
+ * [`CarBufferWriter.createWriter(buffer[, options])`](#CarBufferWriter__createWriter__buffer______options____)
243
252
  * [`async decoder.readHeader(reader)`](#async__decoder__readHeader__reader__)
244
253
  * [`async decoder.readBlockHead(reader)`](#async__decoder__readBlockHead__reader__)
245
254
  * [`decoder.createDecoder(reader)`](#decoder__createDecoder__reader__)
@@ -766,6 +775,106 @@ replaced encode as the same length as the new roots.
766
775
  This function is **only available in Node.js** and not a browser
767
776
  environment.
768
777
 
778
+ <a name="CarBufferWriter"></a>
779
+ ### `class CarBufferWriter`
780
+
781
+ A simple CAR writer that writes to a pre-allocated buffer.
782
+
783
+ <a name="CarBufferWriter_addRoot"></a>
784
+ ### `CarBufferWriter#addRoot(root, options)`
785
+
786
+ * `root` `(CID)`
787
+ * `options`
788
+
789
+ * Returns: `CarBufferWriter`
790
+
791
+ Add a root to this writer, to be used to create a header when the CAR is
792
+ finalized with [`close()`](#CarBufferWriter__close)
793
+
794
+ <a name="CarBufferWriter_write"></a>
795
+ ### `CarBufferWriter#write(block)`
796
+
797
+ * `block` `(Block)`: A `{ cid:CID, bytes:Uint8Array }` pair.
798
+
799
+ * Returns: `CarBufferWriter`
800
+
801
+ Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive.
802
+ Throws if there is not enough capacity.
803
+
804
+ <a name="CarBufferWriter_close"></a>
805
+ ### `CarBufferWriter#close([options])`
806
+
807
+ * `options` `(object, optional)`
808
+ * `options.resize` `(boolean, optional)`
809
+
810
+ * Returns: `Uint8Array`
811
+
812
+ Finalize the CAR and return it as a `Uint8Array`.
813
+
814
+ <a name="CarBufferWriter__blockLength__Block__"></a>
815
+ ### `CarBufferWriter.blockLength(Block)`
816
+
817
+ * `block` `(Block)`
818
+
819
+ * Returns: `number`
820
+
821
+ Calculates number of bytes required for storing given block in CAR. Useful in
822
+ estimating size of an `ArrayBuffer` for the `CarBufferWriter`.
823
+
824
+ <a name="CarBufferWriter__calculateHeaderLength__rootLengths__"></a>
825
+ ### `CarBufferWriter.calculateHeaderLength(rootLengths)`
826
+
827
+ * `rootLengths` `(number[])`
828
+
829
+ * Returns: `number`
830
+
831
+ Calculates header size given the array of byteLength for roots.
832
+
833
+ <a name="CarBufferWriter__headerLength______roots______"></a>
834
+ ### `CarBufferWriter.headerLength({ roots })`
835
+
836
+ * `options` `(object)`
837
+ * `options.roots` `(CID[])`
838
+
839
+ * Returns: `number`
840
+
841
+ Calculates header size given the array of roots.
842
+
843
+ <a name="CarBufferWriter__estimateHeaderLength__rootCount______rootByteLength____"></a>
844
+ ### `CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength])`
845
+
846
+ * `rootCount` `(number)`
847
+ * `rootByteLength` `(number, optional)`
848
+
849
+ * Returns: `number`
850
+
851
+ Estimates header size given a count of the roots and the expected byte length
852
+ of the root CIDs. The default length works for a standard CIDv1 with a
853
+ single-byte multihash code, such as SHA2-256 (i.e. the most common CIDv1).
854
+
855
+ <a name="CarBufferWriter__createWriter__buffer______options____"></a>
856
+ ### `CarBufferWriter.createWriter(buffer[, options])`
857
+
858
+ * `buffer` `(ArrayBuffer)`
859
+ * `options` `(object, optional)`
860
+ * `options.roots` `(CID[], optional)`
861
+ * `options.byteOffset` `(number, optional)`
862
+ * `options.byteLength` `(number, optional)`
863
+ * `options.headerSize` `(number, optional)`
864
+
865
+ * Returns: `CarBufferWriter`
866
+
867
+ Creates synchronous CAR writer that can be used to encode blocks into a given
868
+ buffer. Optionally you could pass `byteOffset` and `byteLength` to specify a
869
+ range inside buffer to write into. If car file is going to have `roots` you
870
+ need to either pass them under `options.roots` (from which header size will
871
+ be calculated) or provide `options.headerSize` to allocate required space
872
+ in the buffer. You may also provide known `roots` and `headerSize` to
873
+ allocate space for the roots that may not be known ahead of time.
874
+
875
+ Note: Incorrect `headerSize` may lead to copying bytes inside a buffer
876
+ which will have a negative impact on performance.
877
+
769
878
  <a name="async__decoder__readHeader__reader__"></a>
770
879
  ### `async decoder.readHeader(reader)`
771
880
 
package/api.ts CHANGED
@@ -1,17 +1,21 @@
1
1
  import { CID } from 'multiformats/cid'
2
2
 
3
+ export type { CID }
3
4
  /* Generic types for interfacing with block storage */
4
5
 
5
- export type Block = { cid: CID, bytes: Uint8Array }
6
+ export type Block = {
7
+ cid: CID
8
+ bytes: Uint8Array
9
+ }
6
10
 
7
11
  export type BlockHeader = {
8
- cid: CID,
9
- length: number,
12
+ cid: CID
13
+ length: number
10
14
  blockLength: number
11
15
  }
12
16
 
13
17
  export type BlockIndex = BlockHeader & {
14
- offset: number,
18
+ offset: number
15
19
  blockOffset: number
16
20
  }
17
21
 
@@ -36,6 +40,20 @@ export interface BlockWriter {
36
40
  close(): Promise<void>
37
41
  }
38
42
 
43
+ export interface CarBufferWriter {
44
+ addRoot(root:CID, options?:{ resize?: boolean }):CarBufferWriter
45
+ write(block: Block): CarBufferWriter
46
+ close(options?:{ resize?: boolean }): Uint8Array
47
+ }
48
+
49
+ export interface CarBufferWriterOptions {
50
+ roots?: CID[] // defaults to []
51
+ byteOffset?: number // defaults to 0
52
+ byteLength?: number // defaults to buffer.byteLength
53
+
54
+ headerSize?: number // defaults to size needed for provided roots
55
+ }
56
+
39
57
  export interface WriterChannel {
40
58
  writer: BlockWriter
41
59
  out: AsyncIterable<Uint8Array>
package/buffer-writer ADDED
@@ -0,0 +1 @@
1
+ module.exports = require('./cjs/lib/buffer-writer.js')
@@ -5,7 +5,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
5
5
  var multiformats = require('multiformats');
6
6
  var sha2 = require('multiformats/hashes/sha2');
7
7
  var raw = require('multiformats/codecs/raw');
8
- var dagCbor = require('@ipld/dag-cbor');
8
+ var CBOR = require('@ipld/dag-cbor');
9
9
  var dagPb = require('@ipld/dag-pb');
10
10
  var chai = require('chai');
11
11
  var chaiAsPromised = require('chai-as-promised');
@@ -31,7 +31,7 @@ function _interopNamespace(e) {
31
31
  }
32
32
 
33
33
  var raw__namespace = /*#__PURE__*/_interopNamespace(raw);
34
- var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
34
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
35
35
  var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
36
36
  var chai__default = /*#__PURE__*/_interopDefaultLegacy(chai);
37
37
  var chaiAsPromised__default = /*#__PURE__*/_interopDefaultLegacy(chaiAsPromised);
@@ -101,7 +101,7 @@ async function makeData() {
101
101
  cborBlocks.push(await toBlock({
102
102
  name: b[0],
103
103
  link: b[1]
104
- }, dagCbor__namespace));
104
+ }, CBOR__namespace));
105
105
  }
106
106
  allBlocks = [
107
107
  [
@@ -4,7 +4,7 @@ var fs = require('fs');
4
4
  var stream = require('stream');
5
5
  var ipldGarbage = require('ipld-garbage');
6
6
  var varint = require('varint');
7
- var dagCbor = require('@ipld/dag-cbor');
7
+ var CBOR = require('@ipld/dag-cbor');
8
8
  var sha2 = require('multiformats/hashes/sha2');
9
9
  var cid = require('multiformats/cid');
10
10
  require('../car-browser.js');
@@ -36,7 +36,7 @@ function _interopNamespace(e) {
36
36
 
37
37
  var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
38
38
  var varint__default = /*#__PURE__*/_interopDefaultLegacy(varint);
39
- var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
39
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
40
40
 
41
41
  describe('Large CAR', () => {
42
42
  const objects = [];
@@ -45,7 +45,7 @@ describe('Large CAR', () => {
45
45
  it('create, no roots', async () => {
46
46
  const {writer, out} = writerBrowser.CarWriter.create([]);
47
47
  stream.Readable.from(out).pipe(fs__default["default"].createWriteStream('./test.car'));
48
- let offset = dagCbor__namespace.encode({
48
+ let offset = CBOR__namespace.encode({
49
49
  version: 1,
50
50
  roots: []
51
51
  }).length;
@@ -53,9 +53,9 @@ describe('Large CAR', () => {
53
53
  for (let i = 0; i < 500; i++) {
54
54
  const obj = ipldGarbage.garbage(1000);
55
55
  objects.push(obj);
56
- const bytes = dagCbor__namespace.encode(obj);
56
+ const bytes = CBOR__namespace.encode(obj);
57
57
  const hash = await sha2.sha256.digest(bytes);
58
- const cid$1 = cid.CID.create(1, dagCbor__namespace.code, hash);
58
+ const cid$1 = cid.CID.create(1, CBOR__namespace.code, hash);
59
59
  cids.push(cid$1.toString());
60
60
  const blockLength = bytes.length;
61
61
  let length = cid$1.bytes.length + blockLength;
@@ -101,7 +101,7 @@ describe('Large CAR', () => {
101
101
  let i = 0;
102
102
  for await (const {cid, bytes} of reader.blocks()) {
103
103
  common.assert.strictEqual(cid.toString(), cids[i], `cid #${ i } ${ cid } <> ${ cids[i] }`);
104
- const obj = dagCbor__namespace.decode(bytes);
104
+ const obj = CBOR__namespace.decode(bytes);
105
105
  common.assert.deepStrictEqual(obj, objects[i], `object #${ i }`);
106
106
  i++;
107
107
  }
@@ -112,7 +112,7 @@ describe('Large CAR', () => {
112
112
  let i = 0;
113
113
  for await (const {cid, bytes} of reader.blocks()) {
114
114
  common.assert.strictEqual(cid.toString(), cids[i], `cid #${ i } ${ cid } <> ${ cids[i] }`);
115
- const obj = dagCbor__namespace.decode(bytes);
115
+ const obj = CBOR__namespace.decode(bytes);
116
116
  common.assert.deepStrictEqual(obj, objects[i], `object #${ i }`);
117
117
  i++;
118
118
  }
@@ -123,7 +123,7 @@ describe('Large CAR', () => {
123
123
  let i = 0;
124
124
  for await (const {cid, bytes} of reader.blocks()) {
125
125
  common.assert.strictEqual(cid.toString(), cids[i], `cid #${ i } ${ cid } <> ${ cids[i] }`);
126
- const obj = dagCbor__namespace.decode(bytes);
126
+ const obj = CBOR__namespace.decode(bytes);
127
127
  common.assert.deepStrictEqual(obj, objects[i], `object #${ i }`);
128
128
  i++;
129
129
  }
@@ -0,0 +1,330 @@
1
+ 'use strict';
2
+
3
+ var bufferWriter = require('../lib/buffer-writer.js');
4
+ var readerBrowser = require('../lib/reader-browser.js');
5
+ var encoder = require('../lib/encoder.js');
6
+ var common = require('./common.js');
7
+ var multiformats = require('multiformats');
8
+ var CBOR = require('@ipld/dag-cbor');
9
+ var sha2 = require('multiformats/hashes/sha2');
10
+ var identity = require('multiformats/hashes/identity');
11
+ var raw = require('multiformats/codecs/raw');
12
+ var Block = require('multiformats/block');
13
+
14
+ function _interopNamespace(e) {
15
+ if (e && e.__esModule) return e;
16
+ var n = Object.create(null);
17
+ if (e) {
18
+ Object.keys(e).forEach(function (k) {
19
+ if (k !== 'default') {
20
+ var d = Object.getOwnPropertyDescriptor(e, k);
21
+ Object.defineProperty(n, k, d.get ? d : {
22
+ enumerable: true,
23
+ get: function () { return e[k]; }
24
+ });
25
+ }
26
+ });
27
+ }
28
+ n["default"] = e;
29
+ return Object.freeze(n);
30
+ }
31
+
32
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
33
+ var raw__namespace = /*#__PURE__*/_interopNamespace(raw);
34
+ var Block__namespace = /*#__PURE__*/_interopNamespace(Block);
35
+
36
+ describe('CarBufferWriter', () => {
37
+ const cid = multiformats.CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu');
38
+ describe('calculateHeaderLength', async () => {
39
+ for (const count of [
40
+ 0,
41
+ 1,
42
+ 10,
43
+ 18,
44
+ 24,
45
+ 48,
46
+ 124,
47
+ 255,
48
+ 258,
49
+ 65536 - 1,
50
+ 65536
51
+ ]) {
52
+ it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
53
+ const roots = new Array(count).fill(cid);
54
+ const sizes = new Array(count).fill(cid.bytes.byteLength);
55
+ common.assert.deepEqual(bufferWriter.calculateHeaderLength(sizes), encoder.createHeader(roots).byteLength);
56
+ });
57
+ it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
58
+ const roots = new Array(count).fill(cid);
59
+ const rootLengths = roots.map(c => c.bytes.byteLength);
60
+ common.assert.deepEqual(bufferWriter.calculateHeaderLength(rootLengths), encoder.createHeader(roots).byteLength);
61
+ });
62
+ }
63
+ it('estimate on large CIDs', () => {
64
+ const largeCID = multiformats.CID.parse(`bafkqbbac${ 'a'.repeat(416) }`);
65
+ common.assert.equal(bufferWriter.calculateHeaderLength([
66
+ cid.bytes.byteLength,
67
+ largeCID.bytes.byteLength
68
+ ]), encoder.createHeader([
69
+ cid,
70
+ largeCID
71
+ ]).byteLength);
72
+ });
73
+ it('estimate on large CIDs 2', () => {
74
+ const largeCID = multiformats.CID.createV1(raw__namespace.code, identity.identity.digest(new Uint8Array(512).fill(1)));
75
+ common.assert.equal(bufferWriter.calculateHeaderLength([
76
+ cid.bytes.byteLength,
77
+ largeCID.bytes.byteLength
78
+ ]), encoder.createHeader([
79
+ cid,
80
+ largeCID
81
+ ]).byteLength);
82
+ });
83
+ });
84
+ describe('writer', () => {
85
+ it('estimate header and write blocks', async () => {
86
+ const headerSize = bufferWriter.estimateHeaderLength(1);
87
+ const dataSize = 256;
88
+ const buffer = new ArrayBuffer(headerSize + dataSize);
89
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
90
+ const b1 = await Block__namespace.encode({
91
+ value: { hello: 'world' },
92
+ codec: CBOR__namespace,
93
+ hasher: sha2.sha256
94
+ });
95
+ writer.write(b1);
96
+ const b2 = await Block__namespace.encode({
97
+ value: { bye: 'world' },
98
+ codec: CBOR__namespace,
99
+ hasher: sha2.sha256
100
+ });
101
+ writer.write(b2);
102
+ writer.addRoot(b1.cid);
103
+ const bytes = writer.close();
104
+ const reader = await readerBrowser.CarReader.fromBytes(bytes);
105
+ common.assert.deepEqual(await reader.getRoots(), [b1.cid]);
106
+ common.assert.deepEqual(reader._blocks, [
107
+ {
108
+ cid: b1.cid,
109
+ bytes: b1.bytes
110
+ },
111
+ {
112
+ cid: b2.cid,
113
+ bytes: b2.bytes
114
+ }
115
+ ]);
116
+ });
117
+ it('overestimate header', async () => {
118
+ const headerSize = bufferWriter.estimateHeaderLength(2);
119
+ const dataSize = 256;
120
+ const buffer = new ArrayBuffer(headerSize + dataSize);
121
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
122
+ const b1 = await Block__namespace.encode({
123
+ value: { hello: 'world' },
124
+ codec: CBOR__namespace,
125
+ hasher: sha2.sha256
126
+ });
127
+ writer.write(b1);
128
+ const b2 = await Block__namespace.encode({
129
+ value: { bye: 'world' },
130
+ codec: CBOR__namespace,
131
+ hasher: sha2.sha256
132
+ });
133
+ writer.write(b2);
134
+ writer.addRoot(b1.cid);
135
+ common.assert.throws(() => writer.close(), /Header size was overestimate/);
136
+ const bytes = writer.close({ resize: true });
137
+ const reader = await readerBrowser.CarReader.fromBytes(bytes);
138
+ common.assert.deepEqual(await reader.getRoots(), [b1.cid]);
139
+ common.assert.deepEqual(reader._blocks, [
140
+ {
141
+ cid: b1.cid,
142
+ bytes: b1.bytes
143
+ },
144
+ {
145
+ cid: b2.cid,
146
+ bytes: b2.bytes
147
+ }
148
+ ]);
149
+ });
150
+ it('underestimate header', async () => {
151
+ const headerSize = bufferWriter.estimateHeaderLength(2);
152
+ const dataSize = 300;
153
+ const buffer = new ArrayBuffer(headerSize + dataSize);
154
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
155
+ const b1 = await Block__namespace.encode({
156
+ value: { hello: 'world' },
157
+ codec: CBOR__namespace,
158
+ hasher: sha2.sha256
159
+ });
160
+ writer.write(b1);
161
+ writer.addRoot(b1.cid);
162
+ const b2 = await Block__namespace.encode({
163
+ value: { bye: 'world' },
164
+ codec: CBOR__namespace,
165
+ hasher: sha2.sha512
166
+ });
167
+ writer.write(b2);
168
+ common.assert.throws(() => writer.addRoot(b2.cid), /has no capacity/);
169
+ writer.addRoot(b2.cid, { resize: true });
170
+ const bytes = writer.close();
171
+ const reader = await readerBrowser.CarReader.fromBytes(bytes);
172
+ common.assert.deepEqual(await reader.getRoots(), [
173
+ b1.cid,
174
+ b2.cid
175
+ ]);
176
+ common.assert.deepEqual(reader._blocks, [
177
+ {
178
+ cid: b1.cid,
179
+ bytes: b1.bytes
180
+ },
181
+ {
182
+ cid: b2.cid,
183
+ bytes: b2.bytes
184
+ }
185
+ ]);
186
+ });
187
+ });
188
+ it('has no space for the root', async () => {
189
+ const headerSize = bufferWriter.estimateHeaderLength(1);
190
+ const dataSize = 100;
191
+ const buffer = new ArrayBuffer(headerSize + dataSize);
192
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
193
+ const b1 = await Block__namespace.encode({
194
+ value: { hello: 'world' },
195
+ codec: CBOR__namespace,
196
+ hasher: sha2.sha256
197
+ });
198
+ writer.write(b1);
199
+ writer.addRoot(b1.cid);
200
+ const b2 = await Block__namespace.encode({
201
+ value: { bye: 'world' },
202
+ codec: CBOR__namespace,
203
+ hasher: sha2.sha256
204
+ });
205
+ writer.write(b2);
206
+ common.assert.throws(() => writer.addRoot(b2.cid), /Buffer has no capacity for a new root/);
207
+ common.assert.throws(() => writer.addRoot(b2.cid, { resize: true }), /Buffer has no capacity for a new root/);
208
+ const bytes = writer.close();
209
+ const reader = await readerBrowser.CarReader.fromBytes(bytes);
210
+ common.assert.deepEqual(await reader.getRoots(), [b1.cid]);
211
+ common.assert.deepEqual(reader._blocks, [
212
+ {
213
+ cid: b1.cid,
214
+ bytes: b1.bytes
215
+ },
216
+ {
217
+ cid: b2.cid,
218
+ bytes: b2.bytes
219
+ }
220
+ ]);
221
+ });
222
+ it('has no space for the block', async () => {
223
+ const headerSize = bufferWriter.estimateHeaderLength(1);
224
+ const dataSize = 58;
225
+ const buffer = new ArrayBuffer(headerSize + dataSize);
226
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
227
+ const b1 = await Block__namespace.encode({
228
+ value: { hello: 'world' },
229
+ codec: CBOR__namespace,
230
+ hasher: sha2.sha256
231
+ });
232
+ writer.write(b1);
233
+ writer.addRoot(b1.cid);
234
+ const b2 = await Block__namespace.encode({
235
+ value: { bye: 'world' },
236
+ codec: CBOR__namespace,
237
+ hasher: sha2.sha256
238
+ });
239
+ common.assert.throws(() => writer.write(b2), /Buffer has no capacity for this block/);
240
+ const bytes = writer.close();
241
+ const reader = await readerBrowser.CarReader.fromBytes(bytes);
242
+ common.assert.deepEqual(await reader.getRoots(), [b1.cid]);
243
+ common.assert.deepEqual(reader._blocks, [{
244
+ cid: b1.cid,
245
+ bytes: b1.bytes
246
+ }]);
247
+ });
248
+ it('provide roots', async () => {
249
+ const b1 = await Block__namespace.encode({
250
+ value: { hello: 'world' },
251
+ codec: CBOR__namespace,
252
+ hasher: sha2.sha256
253
+ });
254
+ const b2 = await Block__namespace.encode({
255
+ value: { bye: 'world' },
256
+ codec: CBOR__namespace,
257
+ hasher: sha2.sha512
258
+ });
259
+ const buffer = new ArrayBuffer(300);
260
+ const writer = bufferWriter.createWriter(buffer, {
261
+ roots: [
262
+ b1.cid,
263
+ b2.cid
264
+ ]
265
+ });
266
+ writer.write(b1);
267
+ writer.write(b2);
268
+ const bytes = writer.close();
269
+ const reader = await readerBrowser.CarReader.fromBytes(bytes);
270
+ common.assert.deepEqual(await reader.getRoots(), [
271
+ b1.cid,
272
+ b2.cid
273
+ ]);
274
+ common.assert.deepEqual(reader._blocks, [
275
+ {
276
+ cid: b1.cid,
277
+ bytes: b1.bytes
278
+ },
279
+ {
280
+ cid: b2.cid,
281
+ bytes: b2.bytes
282
+ }
283
+ ]);
284
+ });
285
+ it('provide large CID root', async () => {
286
+ const bytes = new Uint8Array(512).fill(1);
287
+ const b1 = await Block__namespace.encode({
288
+ value: { hello: 'world' },
289
+ codec: CBOR__namespace,
290
+ hasher: sha2.sha256
291
+ });
292
+ const b2 = {
293
+ cid: multiformats.CID.createV1(raw__namespace.code, identity.identity.digest(bytes)),
294
+ bytes
295
+ };
296
+ const headerSize = CBOR__namespace.encode({
297
+ version: 1,
298
+ roots: [
299
+ b1.cid,
300
+ b2.cid
301
+ ]
302
+ }).byteLength;
303
+ const bodySize = bufferWriter.blockLength(b1) + bufferWriter.blockLength(b2);
304
+ const varintSize = multiformats.varint.encodingLength(headerSize);
305
+ const writer = bufferWriter.createWriter(new ArrayBuffer(varintSize + headerSize + bodySize), {
306
+ roots: [
307
+ b1.cid,
308
+ b2.cid
309
+ ]
310
+ });
311
+ writer.write(b1);
312
+ writer.write(b2);
313
+ const car = writer.close();
314
+ const reader = await readerBrowser.CarReader.fromBytes(car);
315
+ common.assert.deepEqual(await reader.getRoots(), [
316
+ b1.cid,
317
+ b2.cid
318
+ ]);
319
+ common.assert.deepEqual(reader._blocks, [
320
+ {
321
+ cid: b1.cid,
322
+ bytes: b1.bytes
323
+ },
324
+ {
325
+ cid: b2.cid,
326
+ bytes: b2.bytes
327
+ }
328
+ ]);
329
+ });
330
+ });
@@ -1,13 +1,13 @@
1
1
  'use strict';
2
2
 
3
3
  var multiformats = require('multiformats');
4
- var dagCbor = require('@ipld/dag-cbor');
4
+ var CBOR = require('@ipld/dag-cbor');
5
5
  var varint = require('varint');
6
6
  var readerBrowser = require('../lib/reader-browser.js');
7
7
  var common = require('./common.js');
8
8
 
9
9
  function makeHeader(block) {
10
- const u = dagCbor.encode(block);
10
+ const u = CBOR.encode(block);
11
11
  const l = varint.encode(u.length);
12
12
  const u2 = new Uint8Array(u.length + l.length);
13
13
  u2.set(l, 0);
@@ -19,7 +19,7 @@ describe('CarIndexer fromBytes()', () => {
19
19
  const indexer$1 = await indexer.CarIndexer.fromBytes(common.goCarV2Bytes);
20
20
  const roots = await indexer$1.getRoots();
21
21
  common.assert.strictEqual(roots.length, 1);
22
- common.assert(common.goCarV2Roots[0].equals(roots[0]));
22
+ common.assert.ok(common.goCarV2Roots[0].equals(roots[0]));
23
23
  common.assert.strictEqual(indexer$1.version, 2);
24
24
  const indexData = [];
25
25
  for await (const index of indexer$1) {
@@ -76,13 +76,13 @@ describe('CarReader fromBytes()', () => {
76
76
  const reader = await readerBrowser.CarReader.fromBytes(common.goCarV2Bytes);
77
77
  const roots = await reader.getRoots();
78
78
  common.assert.strictEqual(roots.length, 1);
79
- common.assert(common.goCarV2Roots[0].equals(roots[0]));
79
+ common.assert.ok(common.goCarV2Roots[0].equals(roots[0]));
80
80
  common.assert.strictEqual(reader.version, 2);
81
81
  for (const {cid} of common.goCarV2Index) {
82
82
  const block = await reader.get(cid);
83
83
  common.assert.isDefined(block);
84
84
  if (block) {
85
- common.assert(cid.equals(block.cid));
85
+ common.assert.ok(cid.equals(block.cid));
86
86
  let content;
87
87
  if (cid.code === dagPb__namespace.code) {
88
88
  content = dagPb__namespace.decode(block.bytes);
@@ -182,9 +182,9 @@ describe('CarWriter', () => {
182
182
  const rawBytes = await append(0);
183
183
  const pbBytes = await append(1);
184
184
  const cborBytes = await append(2);
185
- common.assert(rawBytes.length > 0);
186
- common.assert(pbBytes.length > 0);
187
- common.assert(cborBytes.length > 0);
185
+ common.assert.ok(rawBytes.length > 0);
186
+ common.assert.ok(pbBytes.length > 0);
187
+ common.assert.ok(cborBytes.length > 0);
188
188
  const reassembled = concatBytes([
189
189
  headerBytes,
190
190
  rawBytes,