@ipld/car 4.0.0 → 4.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +109 -0
  2. package/api.ts +22 -4
  3. package/buffer-writer +1 -0
  4. package/cjs/browser-test/common.js +3 -3
  5. package/cjs/browser-test/node-test-large.js +8 -8
  6. package/cjs/browser-test/test-buffer-writer.js +330 -0
  7. package/cjs/browser-test/test-errors.js +2 -2
  8. package/cjs/browser-test/test-indexer.js +1 -1
  9. package/cjs/browser-test/test-reader.js +2 -2
  10. package/cjs/browser-test/test-writer.js +3 -3
  11. package/cjs/lib/buffer-writer.js +161 -0
  12. package/cjs/lib/decoder.js +2 -2
  13. package/cjs/lib/encoder.js +3 -3
  14. package/cjs/lib/iterator-channel.js +1 -1
  15. package/cjs/node-test/common.js +3 -3
  16. package/cjs/node-test/node-test-large.js +8 -8
  17. package/cjs/node-test/test-buffer-writer.js +330 -0
  18. package/cjs/node-test/test-errors.js +2 -2
  19. package/cjs/node-test/test-indexer.js +1 -1
  20. package/cjs/node-test/test-reader.js +2 -2
  21. package/cjs/node-test/test-writer.js +3 -3
  22. package/esm/browser-test/test-buffer-writer.js +311 -0
  23. package/esm/browser-test/test-indexer.js +1 -1
  24. package/esm/browser-test/test-reader.js +2 -2
  25. package/esm/browser-test/test-writer.js +3 -3
  26. package/esm/lib/buffer-writer.js +126 -0
  27. package/esm/lib/encoder.js +1 -1
  28. package/esm/lib/iterator-channel.js +1 -1
  29. package/esm/node-test/test-buffer-writer.js +311 -0
  30. package/esm/node-test/test-indexer.js +1 -1
  31. package/esm/node-test/test-reader.js +2 -2
  32. package/esm/node-test/test-writer.js +3 -3
  33. package/lib/buffer-writer.js +286 -0
  34. package/lib/encoder.js +1 -1
  35. package/lib/iterator-channel.js +1 -1
  36. package/package.json +14 -4
  37. package/test/test-buffer-writer.js +256 -0
  38. package/test/test-indexer.js +1 -1
  39. package/test/test-reader.js +2 -2
  40. package/test/test-writer.js +3 -3
  41. package/tsconfig.json +1 -0
  42. package/types/api.d.ts +16 -0
  43. package/types/api.d.ts.map +1 -1
  44. package/types/lib/buffer-writer.d.ts +86 -0
  45. package/types/lib/buffer-writer.d.ts.map +1 -0
  46. package/types/test/test-buffer-writer.d.ts +2 -0
  47. package/types/test/test-buffer-writer.d.ts.map +1 -0
@@ -0,0 +1,161 @@
1
+ 'use strict';
2
+
3
+ Object.defineProperty(exports, '__esModule', { value: true });
4
+
5
+ var varint = require('varint');
6
+ var cborg = require('cborg');
7
+ var length = require('cborg/length');
8
+ var CBOR = require('@ipld/dag-cbor');
9
+
10
+ function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
11
+
12
+ function _interopNamespace(e) {
13
+ if (e && e.__esModule) return e;
14
+ var n = Object.create(null);
15
+ if (e) {
16
+ Object.keys(e).forEach(function (k) {
17
+ if (k !== 'default') {
18
+ var d = Object.getOwnPropertyDescriptor(e, k);
19
+ Object.defineProperty(n, k, d.get ? d : {
20
+ enumerable: true,
21
+ get: function () { return e[k]; }
22
+ });
23
+ }
24
+ });
25
+ }
26
+ n["default"] = e;
27
+ return Object.freeze(n);
28
+ }
29
+
30
+ var varint__default = /*#__PURE__*/_interopDefaultLegacy(varint);
31
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
32
+
33
+ class CarBufferWriter {
34
+ constructor(bytes, headerSize) {
35
+ this.bytes = bytes;
36
+ this.byteOffset = headerSize;
37
+ this.roots = [];
38
+ this.headerSize = headerSize;
39
+ }
40
+ addRoot(root, options) {
41
+ addRoot(this, root, options);
42
+ return this;
43
+ }
44
+ write(block) {
45
+ addBlock(this, block);
46
+ return this;
47
+ }
48
+ close(options) {
49
+ return close(this, options);
50
+ }
51
+ }
52
+ const addRoot = (writer, root, {
53
+ resize = false
54
+ } = {}) => {
55
+ const {bytes, headerSize, byteOffset, roots} = writer;
56
+ writer.roots.push(root);
57
+ const size = headerLength(writer);
58
+ if (size > headerSize) {
59
+ if (size - headerSize + byteOffset < bytes.byteLength) {
60
+ if (resize) {
61
+ resizeHeader(writer, size);
62
+ } else {
63
+ roots.pop();
64
+ throw new RangeError(`Header of size ${ headerSize } has no capacity for new root ${ root }.
65
+ However there is a space in the buffer and you could call addRoot(root, { resize: root }) to resize header to make a space for this root.`);
66
+ }
67
+ } else {
68
+ roots.pop();
69
+ throw new RangeError(`Buffer has no capacity for a new root ${ root }`);
70
+ }
71
+ }
72
+ };
73
+ const blockLength = ({cid, bytes}) => {
74
+ const size = cid.bytes.byteLength + bytes.byteLength;
75
+ return varint__default["default"].encodingLength(size) + size;
76
+ };
77
+ const addBlock = (writer, {cid, bytes}) => {
78
+ const byteLength = cid.bytes.byteLength + bytes.byteLength;
79
+ const size = varint__default["default"].encode(byteLength);
80
+ if (writer.byteOffset + size.length + byteLength > writer.bytes.byteLength) {
81
+ throw new RangeError('Buffer has no capacity for this block');
82
+ } else {
83
+ writeBytes(writer, size);
84
+ writeBytes(writer, cid.bytes);
85
+ writeBytes(writer, bytes);
86
+ }
87
+ };
88
+ const close = (writer, {
89
+ resize = false
90
+ } = {}) => {
91
+ const {roots, bytes, byteOffset, headerSize} = writer;
92
+ const headerBytes = CBOR__namespace.encode({
93
+ version: 1,
94
+ roots
95
+ });
96
+ const varintBytes = varint__default["default"].encode(headerBytes.length);
97
+ const size = varintBytes.length + headerBytes.byteLength;
98
+ const offset = headerSize - size;
99
+ if (offset === 0) {
100
+ writeHeader(writer, varintBytes, headerBytes);
101
+ return bytes.subarray(0, byteOffset);
102
+ } else if (resize) {
103
+ resizeHeader(writer, size);
104
+ writeHeader(writer, varintBytes, headerBytes);
105
+ return bytes.subarray(0, writer.byteOffset);
106
+ } else {
107
+ throw new RangeError(`Header size was overestimated.
108
+ You can use close({ resize: true }) to resize header`);
109
+ }
110
+ };
111
+ const resizeHeader = (writer, byteLength) => {
112
+ const {bytes, headerSize} = writer;
113
+ bytes.set(bytes.subarray(headerSize, writer.byteOffset), byteLength);
114
+ writer.byteOffset += byteLength - headerSize;
115
+ writer.headerSize = byteLength;
116
+ };
117
+ const writeBytes = (writer, bytes) => {
118
+ writer.bytes.set(bytes, writer.byteOffset);
119
+ writer.byteOffset += bytes.length;
120
+ };
121
+ const writeHeader = ({bytes}, varint, header) => {
122
+ bytes.set(varint);
123
+ bytes.set(header, varint.length);
124
+ };
125
+ const headerPreludeTokens = [
126
+ new cborg.Token(cborg.Type.map, 2),
127
+ new cborg.Token(cborg.Type.string, 'version'),
128
+ new cborg.Token(cborg.Type.uint, 1),
129
+ new cborg.Token(cborg.Type.string, 'roots')
130
+ ];
131
+ const CID_TAG = new cborg.Token(cborg.Type.tag, 42);
132
+ const calculateHeaderLength = rootLengths => {
133
+ const tokens = [...headerPreludeTokens];
134
+ tokens.push(new cborg.Token(cborg.Type.array, rootLengths.length));
135
+ for (const rootLength of rootLengths) {
136
+ tokens.push(CID_TAG);
137
+ tokens.push(new cborg.Token(cborg.Type.bytes, { length: rootLength + 1 }));
138
+ }
139
+ const length$1 = length.tokensToLength(tokens);
140
+ return varint__default["default"].encodingLength(length$1) + length$1;
141
+ };
142
+ const headerLength = ({roots}) => calculateHeaderLength(roots.map(cid => cid.bytes.byteLength));
143
+ const estimateHeaderLength = (rootCount, rootByteLength = 36) => calculateHeaderLength(new Array(rootCount).fill(rootByteLength));
144
+ const createWriter = (buffer, {roots = [], byteOffset = 0, byteLength = buffer.byteLength, headerSize = headerLength({ roots })} = {}) => {
145
+ const bytes = new Uint8Array(buffer, byteOffset, byteLength);
146
+ const writer = new CarBufferWriter(bytes, headerSize);
147
+ for (const root of roots) {
148
+ writer.addRoot(root);
149
+ }
150
+ return writer;
151
+ };
152
+
153
+ exports.addBlock = addBlock;
154
+ exports.addRoot = addRoot;
155
+ exports.blockLength = blockLength;
156
+ exports.calculateHeaderLength = calculateHeaderLength;
157
+ exports.close = close;
158
+ exports.createWriter = createWriter;
159
+ exports.estimateHeaderLength = estimateHeaderLength;
160
+ exports.headerLength = headerLength;
161
+ exports.resizeHeader = resizeHeader;
@@ -5,7 +5,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
5
5
  var varint = require('varint');
6
6
  var cid = require('multiformats/cid');
7
7
  var Digest = require('multiformats/hashes/digest');
8
- var dagCbor = require('@ipld/dag-cbor');
8
+ var CBOR = require('@ipld/dag-cbor');
9
9
  var headerValidator = require('./header-validator.js');
10
10
 
11
11
  function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
@@ -70,7 +70,7 @@ async function readHeader(reader, strictVersion) {
70
70
  }
71
71
  const header = await reader.exactly(length);
72
72
  reader.seek(length);
73
- const block = dagCbor.decode(header);
73
+ const block = CBOR.decode(header);
74
74
  if (!headerValidator.CarHeader(block)) {
75
75
  throw new Error('Invalid CAR header format');
76
76
  }
@@ -3,14 +3,14 @@
3
3
  Object.defineProperty(exports, '__esModule', { value: true });
4
4
 
5
5
  var varint = require('varint');
6
- var dagCbor = require('@ipld/dag-cbor');
6
+ var CBOR = require('@ipld/dag-cbor');
7
7
 
8
8
  function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
9
9
 
10
10
  var varint__default = /*#__PURE__*/_interopDefaultLegacy(varint);
11
11
 
12
12
  function createHeader(roots) {
13
- const headerBytes = dagCbor.encode({
13
+ const headerBytes = CBOR.encode({
14
14
  version: 1,
15
15
  roots
16
16
  });
@@ -35,7 +35,7 @@ function createEncoder(writer) {
35
35
  }
36
36
  },
37
37
  async close() {
38
- return writer.end();
38
+ await writer.end();
39
39
  }
40
40
  };
41
41
  }
@@ -34,7 +34,7 @@ function create() {
34
34
  ended = true;
35
35
  const drainer = makeDrainer();
36
36
  outWaitResolver();
37
- return drainer;
37
+ await drainer;
38
38
  }
39
39
  };
40
40
  const iterator = {
@@ -5,7 +5,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
5
5
  var multiformats = require('multiformats');
6
6
  var sha2 = require('multiformats/hashes/sha2');
7
7
  var raw = require('multiformats/codecs/raw');
8
- var dagCbor = require('@ipld/dag-cbor');
8
+ var CBOR = require('@ipld/dag-cbor');
9
9
  var dagPb = require('@ipld/dag-pb');
10
10
  var chai = require('chai');
11
11
  var chaiAsPromised = require('chai-as-promised');
@@ -31,7 +31,7 @@ function _interopNamespace(e) {
31
31
  }
32
32
 
33
33
  var raw__namespace = /*#__PURE__*/_interopNamespace(raw);
34
- var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
34
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
35
35
  var dagPb__namespace = /*#__PURE__*/_interopNamespace(dagPb);
36
36
  var chai__default = /*#__PURE__*/_interopDefaultLegacy(chai);
37
37
  var chaiAsPromised__default = /*#__PURE__*/_interopDefaultLegacy(chaiAsPromised);
@@ -101,7 +101,7 @@ async function makeData() {
101
101
  cborBlocks.push(await toBlock({
102
102
  name: b[0],
103
103
  link: b[1]
104
- }, dagCbor__namespace));
104
+ }, CBOR__namespace));
105
105
  }
106
106
  allBlocks = [
107
107
  [
@@ -4,7 +4,7 @@ var fs = require('fs');
4
4
  var stream = require('stream');
5
5
  var ipldGarbage = require('ipld-garbage');
6
6
  var varint = require('varint');
7
- var dagCbor = require('@ipld/dag-cbor');
7
+ var CBOR = require('@ipld/dag-cbor');
8
8
  var sha2 = require('multiformats/hashes/sha2');
9
9
  var cid = require('multiformats/cid');
10
10
  require('../car.js');
@@ -36,7 +36,7 @@ function _interopNamespace(e) {
36
36
 
37
37
  var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
38
38
  var varint__default = /*#__PURE__*/_interopDefaultLegacy(varint);
39
- var dagCbor__namespace = /*#__PURE__*/_interopNamespace(dagCbor);
39
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
40
40
 
41
41
  describe('Large CAR', () => {
42
42
  const objects = [];
@@ -45,7 +45,7 @@ describe('Large CAR', () => {
45
45
  it('create, no roots', async () => {
46
46
  const {writer: writer$1, out} = writer.CarWriter.create([]);
47
47
  stream.Readable.from(out).pipe(fs__default["default"].createWriteStream('./test.car'));
48
- let offset = dagCbor__namespace.encode({
48
+ let offset = CBOR__namespace.encode({
49
49
  version: 1,
50
50
  roots: []
51
51
  }).length;
@@ -53,9 +53,9 @@ describe('Large CAR', () => {
53
53
  for (let i = 0; i < 500; i++) {
54
54
  const obj = ipldGarbage.garbage(1000);
55
55
  objects.push(obj);
56
- const bytes = dagCbor__namespace.encode(obj);
56
+ const bytes = CBOR__namespace.encode(obj);
57
57
  const hash = await sha2.sha256.digest(bytes);
58
- const cid$1 = cid.CID.create(1, dagCbor__namespace.code, hash);
58
+ const cid$1 = cid.CID.create(1, CBOR__namespace.code, hash);
59
59
  cids.push(cid$1.toString());
60
60
  const blockLength = bytes.length;
61
61
  let length = cid$1.bytes.length + blockLength;
@@ -101,7 +101,7 @@ describe('Large CAR', () => {
101
101
  let i = 0;
102
102
  for await (const {cid, bytes} of reader$1.blocks()) {
103
103
  common.assert.strictEqual(cid.toString(), cids[i], `cid #${ i } ${ cid } <> ${ cids[i] }`);
104
- const obj = dagCbor__namespace.decode(bytes);
104
+ const obj = CBOR__namespace.decode(bytes);
105
105
  common.assert.deepStrictEqual(obj, objects[i], `object #${ i }`);
106
106
  i++;
107
107
  }
@@ -112,7 +112,7 @@ describe('Large CAR', () => {
112
112
  let i = 0;
113
113
  for await (const {cid, bytes} of reader$1.blocks()) {
114
114
  common.assert.strictEqual(cid.toString(), cids[i], `cid #${ i } ${ cid } <> ${ cids[i] }`);
115
- const obj = dagCbor__namespace.decode(bytes);
115
+ const obj = CBOR__namespace.decode(bytes);
116
116
  common.assert.deepStrictEqual(obj, objects[i], `object #${ i }`);
117
117
  i++;
118
118
  }
@@ -123,7 +123,7 @@ describe('Large CAR', () => {
123
123
  let i = 0;
124
124
  for await (const {cid, bytes} of reader.blocks()) {
125
125
  common.assert.strictEqual(cid.toString(), cids[i], `cid #${ i } ${ cid } <> ${ cids[i] }`);
126
- const obj = dagCbor__namespace.decode(bytes);
126
+ const obj = CBOR__namespace.decode(bytes);
127
127
  common.assert.deepStrictEqual(obj, objects[i], `object #${ i }`);
128
128
  i++;
129
129
  }
@@ -0,0 +1,330 @@
1
+ 'use strict';
2
+
3
+ var bufferWriter = require('../lib/buffer-writer.js');
4
+ var reader = require('../lib/reader.js');
5
+ var encoder = require('../lib/encoder.js');
6
+ var common = require('./common.js');
7
+ var multiformats = require('multiformats');
8
+ var CBOR = require('@ipld/dag-cbor');
9
+ var sha2 = require('multiformats/hashes/sha2');
10
+ var identity = require('multiformats/hashes/identity');
11
+ var raw = require('multiformats/codecs/raw');
12
+ var Block = require('multiformats/block');
13
+
14
+ function _interopNamespace(e) {
15
+ if (e && e.__esModule) return e;
16
+ var n = Object.create(null);
17
+ if (e) {
18
+ Object.keys(e).forEach(function (k) {
19
+ if (k !== 'default') {
20
+ var d = Object.getOwnPropertyDescriptor(e, k);
21
+ Object.defineProperty(n, k, d.get ? d : {
22
+ enumerable: true,
23
+ get: function () { return e[k]; }
24
+ });
25
+ }
26
+ });
27
+ }
28
+ n["default"] = e;
29
+ return Object.freeze(n);
30
+ }
31
+
32
+ var CBOR__namespace = /*#__PURE__*/_interopNamespace(CBOR);
33
+ var raw__namespace = /*#__PURE__*/_interopNamespace(raw);
34
+ var Block__namespace = /*#__PURE__*/_interopNamespace(Block);
35
+
36
+ describe('CarBufferWriter', () => {
37
+ const cid = multiformats.CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu');
38
+ describe('calculateHeaderLength', async () => {
39
+ for (const count of [
40
+ 0,
41
+ 1,
42
+ 10,
43
+ 18,
44
+ 24,
45
+ 48,
46
+ 124,
47
+ 255,
48
+ 258,
49
+ 65536 - 1,
50
+ 65536
51
+ ]) {
52
+ it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
53
+ const roots = new Array(count).fill(cid);
54
+ const sizes = new Array(count).fill(cid.bytes.byteLength);
55
+ common.assert.deepEqual(bufferWriter.calculateHeaderLength(sizes), encoder.createHeader(roots).byteLength);
56
+ });
57
+ it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
58
+ const roots = new Array(count).fill(cid);
59
+ const rootLengths = roots.map(c => c.bytes.byteLength);
60
+ common.assert.deepEqual(bufferWriter.calculateHeaderLength(rootLengths), encoder.createHeader(roots).byteLength);
61
+ });
62
+ }
63
+ it('estimate on large CIDs', () => {
64
+ const largeCID = multiformats.CID.parse(`bafkqbbac${ 'a'.repeat(416) }`);
65
+ common.assert.equal(bufferWriter.calculateHeaderLength([
66
+ cid.bytes.byteLength,
67
+ largeCID.bytes.byteLength
68
+ ]), encoder.createHeader([
69
+ cid,
70
+ largeCID
71
+ ]).byteLength);
72
+ });
73
+ it('estimate on large CIDs 2', () => {
74
+ const largeCID = multiformats.CID.createV1(raw__namespace.code, identity.identity.digest(new Uint8Array(512).fill(1)));
75
+ common.assert.equal(bufferWriter.calculateHeaderLength([
76
+ cid.bytes.byteLength,
77
+ largeCID.bytes.byteLength
78
+ ]), encoder.createHeader([
79
+ cid,
80
+ largeCID
81
+ ]).byteLength);
82
+ });
83
+ });
84
+ describe('writer', () => {
85
+ it('estimate header and write blocks', async () => {
86
+ const headerSize = bufferWriter.estimateHeaderLength(1);
87
+ const dataSize = 256;
88
+ const buffer = new ArrayBuffer(headerSize + dataSize);
89
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
90
+ const b1 = await Block__namespace.encode({
91
+ value: { hello: 'world' },
92
+ codec: CBOR__namespace,
93
+ hasher: sha2.sha256
94
+ });
95
+ writer.write(b1);
96
+ const b2 = await Block__namespace.encode({
97
+ value: { bye: 'world' },
98
+ codec: CBOR__namespace,
99
+ hasher: sha2.sha256
100
+ });
101
+ writer.write(b2);
102
+ writer.addRoot(b1.cid);
103
+ const bytes = writer.close();
104
+ const reader$1 = await reader.CarReader.fromBytes(bytes);
105
+ common.assert.deepEqual(await reader$1.getRoots(), [b1.cid]);
106
+ common.assert.deepEqual(reader$1._blocks, [
107
+ {
108
+ cid: b1.cid,
109
+ bytes: b1.bytes
110
+ },
111
+ {
112
+ cid: b2.cid,
113
+ bytes: b2.bytes
114
+ }
115
+ ]);
116
+ });
117
+ it('overestimate header', async () => {
118
+ const headerSize = bufferWriter.estimateHeaderLength(2);
119
+ const dataSize = 256;
120
+ const buffer = new ArrayBuffer(headerSize + dataSize);
121
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
122
+ const b1 = await Block__namespace.encode({
123
+ value: { hello: 'world' },
124
+ codec: CBOR__namespace,
125
+ hasher: sha2.sha256
126
+ });
127
+ writer.write(b1);
128
+ const b2 = await Block__namespace.encode({
129
+ value: { bye: 'world' },
130
+ codec: CBOR__namespace,
131
+ hasher: sha2.sha256
132
+ });
133
+ writer.write(b2);
134
+ writer.addRoot(b1.cid);
135
+ common.assert.throws(() => writer.close(), /Header size was overestimate/);
136
+ const bytes = writer.close({ resize: true });
137
+ const reader$1 = await reader.CarReader.fromBytes(bytes);
138
+ common.assert.deepEqual(await reader$1.getRoots(), [b1.cid]);
139
+ common.assert.deepEqual(reader$1._blocks, [
140
+ {
141
+ cid: b1.cid,
142
+ bytes: b1.bytes
143
+ },
144
+ {
145
+ cid: b2.cid,
146
+ bytes: b2.bytes
147
+ }
148
+ ]);
149
+ });
150
+ it('underestimate header', async () => {
151
+ const headerSize = bufferWriter.estimateHeaderLength(2);
152
+ const dataSize = 300;
153
+ const buffer = new ArrayBuffer(headerSize + dataSize);
154
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
155
+ const b1 = await Block__namespace.encode({
156
+ value: { hello: 'world' },
157
+ codec: CBOR__namespace,
158
+ hasher: sha2.sha256
159
+ });
160
+ writer.write(b1);
161
+ writer.addRoot(b1.cid);
162
+ const b2 = await Block__namespace.encode({
163
+ value: { bye: 'world' },
164
+ codec: CBOR__namespace,
165
+ hasher: sha2.sha512
166
+ });
167
+ writer.write(b2);
168
+ common.assert.throws(() => writer.addRoot(b2.cid), /has no capacity/);
169
+ writer.addRoot(b2.cid, { resize: true });
170
+ const bytes = writer.close();
171
+ const reader$1 = await reader.CarReader.fromBytes(bytes);
172
+ common.assert.deepEqual(await reader$1.getRoots(), [
173
+ b1.cid,
174
+ b2.cid
175
+ ]);
176
+ common.assert.deepEqual(reader$1._blocks, [
177
+ {
178
+ cid: b1.cid,
179
+ bytes: b1.bytes
180
+ },
181
+ {
182
+ cid: b2.cid,
183
+ bytes: b2.bytes
184
+ }
185
+ ]);
186
+ });
187
+ });
188
+ it('has no space for the root', async () => {
189
+ const headerSize = bufferWriter.estimateHeaderLength(1);
190
+ const dataSize = 100;
191
+ const buffer = new ArrayBuffer(headerSize + dataSize);
192
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
193
+ const b1 = await Block__namespace.encode({
194
+ value: { hello: 'world' },
195
+ codec: CBOR__namespace,
196
+ hasher: sha2.sha256
197
+ });
198
+ writer.write(b1);
199
+ writer.addRoot(b1.cid);
200
+ const b2 = await Block__namespace.encode({
201
+ value: { bye: 'world' },
202
+ codec: CBOR__namespace,
203
+ hasher: sha2.sha256
204
+ });
205
+ writer.write(b2);
206
+ common.assert.throws(() => writer.addRoot(b2.cid), /Buffer has no capacity for a new root/);
207
+ common.assert.throws(() => writer.addRoot(b2.cid, { resize: true }), /Buffer has no capacity for a new root/);
208
+ const bytes = writer.close();
209
+ const reader$1 = await reader.CarReader.fromBytes(bytes);
210
+ common.assert.deepEqual(await reader$1.getRoots(), [b1.cid]);
211
+ common.assert.deepEqual(reader$1._blocks, [
212
+ {
213
+ cid: b1.cid,
214
+ bytes: b1.bytes
215
+ },
216
+ {
217
+ cid: b2.cid,
218
+ bytes: b2.bytes
219
+ }
220
+ ]);
221
+ });
222
+ it('has no space for the block', async () => {
223
+ const headerSize = bufferWriter.estimateHeaderLength(1);
224
+ const dataSize = 58;
225
+ const buffer = new ArrayBuffer(headerSize + dataSize);
226
+ const writer = bufferWriter.createWriter(buffer, { headerSize });
227
+ const b1 = await Block__namespace.encode({
228
+ value: { hello: 'world' },
229
+ codec: CBOR__namespace,
230
+ hasher: sha2.sha256
231
+ });
232
+ writer.write(b1);
233
+ writer.addRoot(b1.cid);
234
+ const b2 = await Block__namespace.encode({
235
+ value: { bye: 'world' },
236
+ codec: CBOR__namespace,
237
+ hasher: sha2.sha256
238
+ });
239
+ common.assert.throws(() => writer.write(b2), /Buffer has no capacity for this block/);
240
+ const bytes = writer.close();
241
+ const reader$1 = await reader.CarReader.fromBytes(bytes);
242
+ common.assert.deepEqual(await reader$1.getRoots(), [b1.cid]);
243
+ common.assert.deepEqual(reader$1._blocks, [{
244
+ cid: b1.cid,
245
+ bytes: b1.bytes
246
+ }]);
247
+ });
248
+ it('provide roots', async () => {
249
+ const b1 = await Block__namespace.encode({
250
+ value: { hello: 'world' },
251
+ codec: CBOR__namespace,
252
+ hasher: sha2.sha256
253
+ });
254
+ const b2 = await Block__namespace.encode({
255
+ value: { bye: 'world' },
256
+ codec: CBOR__namespace,
257
+ hasher: sha2.sha512
258
+ });
259
+ const buffer = new ArrayBuffer(300);
260
+ const writer = bufferWriter.createWriter(buffer, {
261
+ roots: [
262
+ b1.cid,
263
+ b2.cid
264
+ ]
265
+ });
266
+ writer.write(b1);
267
+ writer.write(b2);
268
+ const bytes = writer.close();
269
+ const reader$1 = await reader.CarReader.fromBytes(bytes);
270
+ common.assert.deepEqual(await reader$1.getRoots(), [
271
+ b1.cid,
272
+ b2.cid
273
+ ]);
274
+ common.assert.deepEqual(reader$1._blocks, [
275
+ {
276
+ cid: b1.cid,
277
+ bytes: b1.bytes
278
+ },
279
+ {
280
+ cid: b2.cid,
281
+ bytes: b2.bytes
282
+ }
283
+ ]);
284
+ });
285
+ it('provide large CID root', async () => {
286
+ const bytes = new Uint8Array(512).fill(1);
287
+ const b1 = await Block__namespace.encode({
288
+ value: { hello: 'world' },
289
+ codec: CBOR__namespace,
290
+ hasher: sha2.sha256
291
+ });
292
+ const b2 = {
293
+ cid: multiformats.CID.createV1(raw__namespace.code, identity.identity.digest(bytes)),
294
+ bytes
295
+ };
296
+ const headerSize = CBOR__namespace.encode({
297
+ version: 1,
298
+ roots: [
299
+ b1.cid,
300
+ b2.cid
301
+ ]
302
+ }).byteLength;
303
+ const bodySize = bufferWriter.blockLength(b1) + bufferWriter.blockLength(b2);
304
+ const varintSize = multiformats.varint.encodingLength(headerSize);
305
+ const writer = bufferWriter.createWriter(new ArrayBuffer(varintSize + headerSize + bodySize), {
306
+ roots: [
307
+ b1.cid,
308
+ b2.cid
309
+ ]
310
+ });
311
+ writer.write(b1);
312
+ writer.write(b2);
313
+ const car = writer.close();
314
+ const reader$1 = await reader.CarReader.fromBytes(car);
315
+ common.assert.deepEqual(await reader$1.getRoots(), [
316
+ b1.cid,
317
+ b2.cid
318
+ ]);
319
+ common.assert.deepEqual(reader$1._blocks, [
320
+ {
321
+ cid: b1.cid,
322
+ bytes: b1.bytes
323
+ },
324
+ {
325
+ cid: b2.cid,
326
+ bytes: b2.bytes
327
+ }
328
+ ]);
329
+ });
330
+ });
@@ -1,13 +1,13 @@
1
1
  'use strict';
2
2
 
3
3
  var multiformats = require('multiformats');
4
- var dagCbor = require('@ipld/dag-cbor');
4
+ var CBOR = require('@ipld/dag-cbor');
5
5
  var varint = require('varint');
6
6
  var reader = require('../lib/reader.js');
7
7
  var common = require('./common.js');
8
8
 
9
9
  function makeHeader(block) {
10
- const u = dagCbor.encode(block);
10
+ const u = CBOR.encode(block);
11
11
  const l = varint.encode(u.length);
12
12
  const u2 = new Uint8Array(u.length + l.length);
13
13
  u2.set(l, 0);
@@ -19,7 +19,7 @@ describe('CarIndexer fromBytes()', () => {
19
19
  const indexer$1 = await indexer.CarIndexer.fromBytes(common.goCarV2Bytes);
20
20
  const roots = await indexer$1.getRoots();
21
21
  common.assert.strictEqual(roots.length, 1);
22
- common.assert(common.goCarV2Roots[0].equals(roots[0]));
22
+ common.assert.ok(common.goCarV2Roots[0].equals(roots[0]));
23
23
  common.assert.strictEqual(indexer$1.version, 2);
24
24
  const indexData = [];
25
25
  for await (const index of indexer$1) {
@@ -76,13 +76,13 @@ describe('CarReader fromBytes()', () => {
76
76
  const reader$1 = await reader.CarReader.fromBytes(common.goCarV2Bytes);
77
77
  const roots = await reader$1.getRoots();
78
78
  common.assert.strictEqual(roots.length, 1);
79
- common.assert(common.goCarV2Roots[0].equals(roots[0]));
79
+ common.assert.ok(common.goCarV2Roots[0].equals(roots[0]));
80
80
  common.assert.strictEqual(reader$1.version, 2);
81
81
  for (const {cid} of common.goCarV2Index) {
82
82
  const block = await reader$1.get(cid);
83
83
  common.assert.isDefined(block);
84
84
  if (block) {
85
- common.assert(cid.equals(block.cid));
85
+ common.assert.ok(cid.equals(block.cid));
86
86
  let content;
87
87
  if (cid.code === dagPb__namespace.code) {
88
88
  content = dagPb__namespace.decode(block.bytes);
@@ -182,9 +182,9 @@ describe('CarWriter', () => {
182
182
  const rawBytes = await append(0);
183
183
  const pbBytes = await append(1);
184
184
  const cborBytes = await append(2);
185
- common.assert(rawBytes.length > 0);
186
- common.assert(pbBytes.length > 0);
187
- common.assert(cborBytes.length > 0);
185
+ common.assert.ok(rawBytes.length > 0);
186
+ common.assert.ok(pbBytes.length > 0);
187
+ common.assert.ok(cborBytes.length > 0);
188
188
  const reassembled = concatBytes([
189
189
  headerBytes,
190
190
  rawBytes,