@ipld/car 3.2.3 → 4.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +183 -2
- package/api.ts +22 -4
- package/buffer-writer +1 -0
- package/cjs/browser-test/common.js +78 -3
- package/cjs/browser-test/node-test-large.js +8 -8
- package/cjs/browser-test/test-buffer-writer.js +330 -0
- package/cjs/browser-test/test-errors.js +57 -34
- package/cjs/browser-test/test-indexer.js +12 -0
- package/cjs/browser-test/test-reader.js +83 -0
- package/cjs/lib/buffer-writer.js +161 -0
- package/cjs/lib/decoder.js +72 -15
- package/cjs/lib/encoder.js +2 -2
- package/cjs/lib/header-validator.js +29 -0
- package/cjs/lib/reader-browser.js +7 -7
- package/cjs/lib/writer-browser.js +1 -1
- package/cjs/node-test/common.js +78 -3
- package/cjs/node-test/node-test-large.js +8 -8
- package/cjs/node-test/test-buffer-writer.js +330 -0
- package/cjs/node-test/test-errors.js +57 -34
- package/cjs/node-test/test-indexer.js +12 -0
- package/cjs/node-test/test-reader.js +83 -0
- package/esm/browser-test/common.js +76 -1
- package/esm/browser-test/test-buffer-writer.js +311 -0
- package/esm/browser-test/test-errors.js +57 -33
- package/esm/browser-test/test-indexer.js +15 -0
- package/esm/browser-test/test-reader.js +90 -1
- package/esm/lib/buffer-writer.js +126 -0
- package/esm/lib/decoder.js +69 -13
- package/esm/lib/header-validator.js +23 -0
- package/esm/lib/reader-browser.js +7 -8
- package/esm/lib/writer-browser.js +1 -1
- package/esm/node-test/common.js +76 -1
- package/esm/node-test/test-buffer-writer.js +311 -0
- package/esm/node-test/test-errors.js +57 -33
- package/esm/node-test/test-indexer.js +15 -0
- package/esm/node-test/test-reader.js +90 -1
- package/examples/car-to-fixture.js +1 -4
- package/examples/dump-index.js +24 -0
- package/examples/test-examples.js +33 -0
- package/lib/buffer-writer.js +286 -0
- package/lib/coding.ts +17 -2
- package/lib/decoder.js +130 -14
- package/lib/header-validator.js +33 -0
- package/lib/header.ipldsch +6 -0
- package/lib/reader-browser.js +11 -11
- package/lib/writer-browser.js +1 -1
- package/package.json +17 -7
- package/test/_fixtures_to_js.mjs +24 -0
- package/test/common.js +49 -3
- package/test/go.carv2 +0 -0
- package/test/test-buffer-writer.js +256 -0
- package/test/test-errors.js +52 -30
- package/test/test-indexer.js +24 -1
- package/test/test-reader.js +94 -1
- package/tsconfig.json +3 -1
- package/types/api.d.ts +16 -0
- package/types/api.d.ts.map +1 -1
- package/types/lib/buffer-writer.d.ts +86 -0
- package/types/lib/buffer-writer.d.ts.map +1 -0
- package/types/lib/coding.d.ts +14 -4
- package/types/lib/coding.d.ts.map +1 -1
- package/types/lib/decoder.d.ts +38 -2
- package/types/lib/decoder.d.ts.map +1 -1
- package/types/lib/header-validator.d.ts +2 -0
- package/types/lib/header-validator.d.ts.map +1 -0
- package/types/lib/reader-browser.d.ts +15 -7
- package/types/lib/reader-browser.d.ts.map +1 -1
- package/types/test/_fixtures_to_js.d.mts +3 -0
- package/types/test/_fixtures_to_js.d.mts.map +1 -0
- package/types/test/common.d.ts +13 -0
- package/types/test/common.d.ts.map +1 -1
- package/types/test/fixtures-expectations.d.ts +63 -0
- package/types/test/fixtures-expectations.d.ts.map +1 -0
- package/types/test/fixtures.d.ts +3 -0
- package/types/test/fixtures.d.ts.map +1 -0
- package/types/test/test-buffer-writer.d.ts +2 -0
- package/types/test/test-buffer-writer.d.ts.map +1 -0
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
import * as CarBufferWriter from '../lib/buffer-writer.js';
|
|
2
|
+
import { CarReader } from '../lib/reader-browser.js';
|
|
3
|
+
import { createHeader } from '../lib/encoder.js';
|
|
4
|
+
import { assert } from './common.js';
|
|
5
|
+
import {
|
|
6
|
+
CID,
|
|
7
|
+
varint
|
|
8
|
+
} from 'multiformats';
|
|
9
|
+
import * as CBOR from '@ipld/dag-cbor';
|
|
10
|
+
import {
|
|
11
|
+
sha256,
|
|
12
|
+
sha512
|
|
13
|
+
} from 'multiformats/hashes/sha2';
|
|
14
|
+
import { identity } from 'multiformats/hashes/identity';
|
|
15
|
+
import * as Raw from 'multiformats/codecs/raw';
|
|
16
|
+
import * as Block from 'multiformats/block';
|
|
17
|
+
describe('CarBufferWriter', () => {
|
|
18
|
+
const cid = CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu');
|
|
19
|
+
describe('calculateHeaderLength', async () => {
|
|
20
|
+
for (const count of [
|
|
21
|
+
0,
|
|
22
|
+
1,
|
|
23
|
+
10,
|
|
24
|
+
18,
|
|
25
|
+
24,
|
|
26
|
+
48,
|
|
27
|
+
124,
|
|
28
|
+
255,
|
|
29
|
+
258,
|
|
30
|
+
65536 - 1,
|
|
31
|
+
65536
|
|
32
|
+
]) {
|
|
33
|
+
it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
|
|
34
|
+
const roots = new Array(count).fill(cid);
|
|
35
|
+
const sizes = new Array(count).fill(cid.bytes.byteLength);
|
|
36
|
+
assert.deepEqual(CarBufferWriter.calculateHeaderLength(sizes), createHeader(roots).byteLength);
|
|
37
|
+
});
|
|
38
|
+
it(`calculateHeaderLength(new Array(${ count }).fill(36))`, () => {
|
|
39
|
+
const roots = new Array(count).fill(cid);
|
|
40
|
+
const rootLengths = roots.map(c => c.bytes.byteLength);
|
|
41
|
+
assert.deepEqual(CarBufferWriter.calculateHeaderLength(rootLengths), createHeader(roots).byteLength);
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
it('estimate on large CIDs', () => {
|
|
45
|
+
const largeCID = CID.parse(`bafkqbbac${ 'a'.repeat(416) }`);
|
|
46
|
+
assert.equal(CarBufferWriter.calculateHeaderLength([
|
|
47
|
+
cid.bytes.byteLength,
|
|
48
|
+
largeCID.bytes.byteLength
|
|
49
|
+
]), createHeader([
|
|
50
|
+
cid,
|
|
51
|
+
largeCID
|
|
52
|
+
]).byteLength);
|
|
53
|
+
});
|
|
54
|
+
it('estimate on large CIDs 2', () => {
|
|
55
|
+
const largeCID = CID.createV1(Raw.code, identity.digest(new Uint8Array(512).fill(1)));
|
|
56
|
+
assert.equal(CarBufferWriter.calculateHeaderLength([
|
|
57
|
+
cid.bytes.byteLength,
|
|
58
|
+
largeCID.bytes.byteLength
|
|
59
|
+
]), createHeader([
|
|
60
|
+
cid,
|
|
61
|
+
largeCID
|
|
62
|
+
]).byteLength);
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
describe('writer', () => {
|
|
66
|
+
it('estimate header and write blocks', async () => {
|
|
67
|
+
const headerSize = CarBufferWriter.estimateHeaderLength(1);
|
|
68
|
+
const dataSize = 256;
|
|
69
|
+
const buffer = new ArrayBuffer(headerSize + dataSize);
|
|
70
|
+
const writer = CarBufferWriter.createWriter(buffer, { headerSize });
|
|
71
|
+
const b1 = await Block.encode({
|
|
72
|
+
value: { hello: 'world' },
|
|
73
|
+
codec: CBOR,
|
|
74
|
+
hasher: sha256
|
|
75
|
+
});
|
|
76
|
+
writer.write(b1);
|
|
77
|
+
const b2 = await Block.encode({
|
|
78
|
+
value: { bye: 'world' },
|
|
79
|
+
codec: CBOR,
|
|
80
|
+
hasher: sha256
|
|
81
|
+
});
|
|
82
|
+
writer.write(b2);
|
|
83
|
+
writer.addRoot(b1.cid);
|
|
84
|
+
const bytes = writer.close();
|
|
85
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
86
|
+
assert.deepEqual(await reader.getRoots(), [b1.cid]);
|
|
87
|
+
assert.deepEqual(reader._blocks, [
|
|
88
|
+
{
|
|
89
|
+
cid: b1.cid,
|
|
90
|
+
bytes: b1.bytes
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
cid: b2.cid,
|
|
94
|
+
bytes: b2.bytes
|
|
95
|
+
}
|
|
96
|
+
]);
|
|
97
|
+
});
|
|
98
|
+
it('overestimate header', async () => {
|
|
99
|
+
const headerSize = CarBufferWriter.estimateHeaderLength(2);
|
|
100
|
+
const dataSize = 256;
|
|
101
|
+
const buffer = new ArrayBuffer(headerSize + dataSize);
|
|
102
|
+
const writer = CarBufferWriter.createWriter(buffer, { headerSize });
|
|
103
|
+
const b1 = await Block.encode({
|
|
104
|
+
value: { hello: 'world' },
|
|
105
|
+
codec: CBOR,
|
|
106
|
+
hasher: sha256
|
|
107
|
+
});
|
|
108
|
+
writer.write(b1);
|
|
109
|
+
const b2 = await Block.encode({
|
|
110
|
+
value: { bye: 'world' },
|
|
111
|
+
codec: CBOR,
|
|
112
|
+
hasher: sha256
|
|
113
|
+
});
|
|
114
|
+
writer.write(b2);
|
|
115
|
+
writer.addRoot(b1.cid);
|
|
116
|
+
assert.throws(() => writer.close(), /Header size was overestimate/);
|
|
117
|
+
const bytes = writer.close({ resize: true });
|
|
118
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
119
|
+
assert.deepEqual(await reader.getRoots(), [b1.cid]);
|
|
120
|
+
assert.deepEqual(reader._blocks, [
|
|
121
|
+
{
|
|
122
|
+
cid: b1.cid,
|
|
123
|
+
bytes: b1.bytes
|
|
124
|
+
},
|
|
125
|
+
{
|
|
126
|
+
cid: b2.cid,
|
|
127
|
+
bytes: b2.bytes
|
|
128
|
+
}
|
|
129
|
+
]);
|
|
130
|
+
});
|
|
131
|
+
it('underestimate header', async () => {
|
|
132
|
+
const headerSize = CarBufferWriter.estimateHeaderLength(2);
|
|
133
|
+
const dataSize = 300;
|
|
134
|
+
const buffer = new ArrayBuffer(headerSize + dataSize);
|
|
135
|
+
const writer = CarBufferWriter.createWriter(buffer, { headerSize });
|
|
136
|
+
const b1 = await Block.encode({
|
|
137
|
+
value: { hello: 'world' },
|
|
138
|
+
codec: CBOR,
|
|
139
|
+
hasher: sha256
|
|
140
|
+
});
|
|
141
|
+
writer.write(b1);
|
|
142
|
+
writer.addRoot(b1.cid);
|
|
143
|
+
const b2 = await Block.encode({
|
|
144
|
+
value: { bye: 'world' },
|
|
145
|
+
codec: CBOR,
|
|
146
|
+
hasher: sha512
|
|
147
|
+
});
|
|
148
|
+
writer.write(b2);
|
|
149
|
+
assert.throws(() => writer.addRoot(b2.cid), /has no capacity/);
|
|
150
|
+
writer.addRoot(b2.cid, { resize: true });
|
|
151
|
+
const bytes = writer.close();
|
|
152
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
153
|
+
assert.deepEqual(await reader.getRoots(), [
|
|
154
|
+
b1.cid,
|
|
155
|
+
b2.cid
|
|
156
|
+
]);
|
|
157
|
+
assert.deepEqual(reader._blocks, [
|
|
158
|
+
{
|
|
159
|
+
cid: b1.cid,
|
|
160
|
+
bytes: b1.bytes
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
cid: b2.cid,
|
|
164
|
+
bytes: b2.bytes
|
|
165
|
+
}
|
|
166
|
+
]);
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
it('has no space for the root', async () => {
|
|
170
|
+
const headerSize = CarBufferWriter.estimateHeaderLength(1);
|
|
171
|
+
const dataSize = 100;
|
|
172
|
+
const buffer = new ArrayBuffer(headerSize + dataSize);
|
|
173
|
+
const writer = CarBufferWriter.createWriter(buffer, { headerSize });
|
|
174
|
+
const b1 = await Block.encode({
|
|
175
|
+
value: { hello: 'world' },
|
|
176
|
+
codec: CBOR,
|
|
177
|
+
hasher: sha256
|
|
178
|
+
});
|
|
179
|
+
writer.write(b1);
|
|
180
|
+
writer.addRoot(b1.cid);
|
|
181
|
+
const b2 = await Block.encode({
|
|
182
|
+
value: { bye: 'world' },
|
|
183
|
+
codec: CBOR,
|
|
184
|
+
hasher: sha256
|
|
185
|
+
});
|
|
186
|
+
writer.write(b2);
|
|
187
|
+
assert.throws(() => writer.addRoot(b2.cid), /Buffer has no capacity for a new root/);
|
|
188
|
+
assert.throws(() => writer.addRoot(b2.cid, { resize: true }), /Buffer has no capacity for a new root/);
|
|
189
|
+
const bytes = writer.close();
|
|
190
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
191
|
+
assert.deepEqual(await reader.getRoots(), [b1.cid]);
|
|
192
|
+
assert.deepEqual(reader._blocks, [
|
|
193
|
+
{
|
|
194
|
+
cid: b1.cid,
|
|
195
|
+
bytes: b1.bytes
|
|
196
|
+
},
|
|
197
|
+
{
|
|
198
|
+
cid: b2.cid,
|
|
199
|
+
bytes: b2.bytes
|
|
200
|
+
}
|
|
201
|
+
]);
|
|
202
|
+
});
|
|
203
|
+
it('has no space for the block', async () => {
|
|
204
|
+
const headerSize = CarBufferWriter.estimateHeaderLength(1);
|
|
205
|
+
const dataSize = 58;
|
|
206
|
+
const buffer = new ArrayBuffer(headerSize + dataSize);
|
|
207
|
+
const writer = CarBufferWriter.createWriter(buffer, { headerSize });
|
|
208
|
+
const b1 = await Block.encode({
|
|
209
|
+
value: { hello: 'world' },
|
|
210
|
+
codec: CBOR,
|
|
211
|
+
hasher: sha256
|
|
212
|
+
});
|
|
213
|
+
writer.write(b1);
|
|
214
|
+
writer.addRoot(b1.cid);
|
|
215
|
+
const b2 = await Block.encode({
|
|
216
|
+
value: { bye: 'world' },
|
|
217
|
+
codec: CBOR,
|
|
218
|
+
hasher: sha256
|
|
219
|
+
});
|
|
220
|
+
assert.throws(() => writer.write(b2), /Buffer has no capacity for this block/);
|
|
221
|
+
const bytes = writer.close();
|
|
222
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
223
|
+
assert.deepEqual(await reader.getRoots(), [b1.cid]);
|
|
224
|
+
assert.deepEqual(reader._blocks, [{
|
|
225
|
+
cid: b1.cid,
|
|
226
|
+
bytes: b1.bytes
|
|
227
|
+
}]);
|
|
228
|
+
});
|
|
229
|
+
it('provide roots', async () => {
|
|
230
|
+
const b1 = await Block.encode({
|
|
231
|
+
value: { hello: 'world' },
|
|
232
|
+
codec: CBOR,
|
|
233
|
+
hasher: sha256
|
|
234
|
+
});
|
|
235
|
+
const b2 = await Block.encode({
|
|
236
|
+
value: { bye: 'world' },
|
|
237
|
+
codec: CBOR,
|
|
238
|
+
hasher: sha512
|
|
239
|
+
});
|
|
240
|
+
const buffer = new ArrayBuffer(300);
|
|
241
|
+
const writer = CarBufferWriter.createWriter(buffer, {
|
|
242
|
+
roots: [
|
|
243
|
+
b1.cid,
|
|
244
|
+
b2.cid
|
|
245
|
+
]
|
|
246
|
+
});
|
|
247
|
+
writer.write(b1);
|
|
248
|
+
writer.write(b2);
|
|
249
|
+
const bytes = writer.close();
|
|
250
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
251
|
+
assert.deepEqual(await reader.getRoots(), [
|
|
252
|
+
b1.cid,
|
|
253
|
+
b2.cid
|
|
254
|
+
]);
|
|
255
|
+
assert.deepEqual(reader._blocks, [
|
|
256
|
+
{
|
|
257
|
+
cid: b1.cid,
|
|
258
|
+
bytes: b1.bytes
|
|
259
|
+
},
|
|
260
|
+
{
|
|
261
|
+
cid: b2.cid,
|
|
262
|
+
bytes: b2.bytes
|
|
263
|
+
}
|
|
264
|
+
]);
|
|
265
|
+
});
|
|
266
|
+
it('provide large CID root', async () => {
|
|
267
|
+
const bytes = new Uint8Array(512).fill(1);
|
|
268
|
+
const b1 = await Block.encode({
|
|
269
|
+
value: { hello: 'world' },
|
|
270
|
+
codec: CBOR,
|
|
271
|
+
hasher: sha256
|
|
272
|
+
});
|
|
273
|
+
const b2 = {
|
|
274
|
+
cid: CID.createV1(Raw.code, identity.digest(bytes)),
|
|
275
|
+
bytes
|
|
276
|
+
};
|
|
277
|
+
const headerSize = CBOR.encode({
|
|
278
|
+
version: 1,
|
|
279
|
+
roots: [
|
|
280
|
+
b1.cid,
|
|
281
|
+
b2.cid
|
|
282
|
+
]
|
|
283
|
+
}).byteLength;
|
|
284
|
+
const bodySize = CarBufferWriter.blockLength(b1) + CarBufferWriter.blockLength(b2);
|
|
285
|
+
const varintSize = varint.encodingLength(headerSize);
|
|
286
|
+
const writer = CarBufferWriter.createWriter(new ArrayBuffer(varintSize + headerSize + bodySize), {
|
|
287
|
+
roots: [
|
|
288
|
+
b1.cid,
|
|
289
|
+
b2.cid
|
|
290
|
+
]
|
|
291
|
+
});
|
|
292
|
+
writer.write(b1);
|
|
293
|
+
writer.write(b2);
|
|
294
|
+
const car = writer.close();
|
|
295
|
+
const reader = await CarReader.fromBytes(car);
|
|
296
|
+
assert.deepEqual(await reader.getRoots(), [
|
|
297
|
+
b1.cid,
|
|
298
|
+
b2.cid
|
|
299
|
+
]);
|
|
300
|
+
assert.deepEqual(reader._blocks, [
|
|
301
|
+
{
|
|
302
|
+
cid: b1.cid,
|
|
303
|
+
bytes: b1.bytes
|
|
304
|
+
},
|
|
305
|
+
{
|
|
306
|
+
cid: b2.cid,
|
|
307
|
+
bytes: b2.bytes
|
|
308
|
+
}
|
|
309
|
+
]);
|
|
310
|
+
});
|
|
311
|
+
});
|
|
@@ -4,7 +4,8 @@ import { encode as vEncode } from 'varint';
|
|
|
4
4
|
import { CarReader } from '../lib/reader-browser.js';
|
|
5
5
|
import {
|
|
6
6
|
carBytes,
|
|
7
|
-
assert
|
|
7
|
+
assert,
|
|
8
|
+
goCarV2Bytes
|
|
8
9
|
} from './common.js';
|
|
9
10
|
function makeHeader(block) {
|
|
10
11
|
const u = cbEncode(block);
|
|
@@ -26,42 +27,65 @@ describe('Misc errors', () => {
|
|
|
26
27
|
});
|
|
27
28
|
});
|
|
28
29
|
it('bad version', async () => {
|
|
29
|
-
const buf2 = bytes.fromHex('
|
|
30
|
-
assert.strictEqual(bytes.toHex(makeHeader({ version:
|
|
31
|
-
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR version:
|
|
30
|
+
const buf2 = bytes.fromHex('0aa16776657273696f6e03');
|
|
31
|
+
assert.strictEqual(bytes.toHex(makeHeader({ version: 3 })), '0aa16776657273696f6e03');
|
|
32
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR version: 3');
|
|
32
33
|
});
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
34
|
+
describe('bad header', async () => {
|
|
35
|
+
it('sanity check', async () => {
|
|
36
|
+
const buf2 = makeHeader({
|
|
37
|
+
version: 1,
|
|
38
|
+
roots: []
|
|
39
|
+
});
|
|
40
|
+
await assert.isFulfilled(CarReader.fromBytes(buf2));
|
|
37
41
|
});
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
buf2 = makeHeader({
|
|
42
|
-
version: '1',
|
|
43
|
-
roots: []
|
|
42
|
+
it('no \'version\' array', async () => {
|
|
43
|
+
const buf2 = makeHeader({ roots: [] });
|
|
44
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
44
45
|
});
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
46
|
+
it('bad \'version\' type', async () => {
|
|
47
|
+
const buf2 = makeHeader({
|
|
48
|
+
version: '1',
|
|
49
|
+
roots: []
|
|
50
|
+
});
|
|
51
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
51
52
|
});
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
53
|
+
it('no \'roots\' array', async () => {
|
|
54
|
+
const buf2 = makeHeader({ version: 1 });
|
|
55
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
56
|
+
});
|
|
57
|
+
it('bad \'roots\' type', async () => {
|
|
58
|
+
const buf2 = makeHeader({
|
|
59
|
+
version: 1,
|
|
60
|
+
roots: {}
|
|
61
|
+
});
|
|
62
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
63
|
+
});
|
|
64
|
+
it('extraneous properties', async () => {
|
|
65
|
+
const buf2 = makeHeader({
|
|
66
|
+
version: 1,
|
|
67
|
+
roots: [],
|
|
68
|
+
blip: true
|
|
69
|
+
});
|
|
70
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
71
|
+
});
|
|
72
|
+
it('not an object', async () => {
|
|
73
|
+
const buf2 = makeHeader([
|
|
74
|
+
1,
|
|
75
|
+
[]
|
|
76
|
+
]);
|
|
77
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
78
|
+
});
|
|
79
|
+
it('not an object', async () => {
|
|
80
|
+
const buf2 = makeHeader(null);
|
|
81
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
82
|
+
});
|
|
83
|
+
it('recursive v2 header', async () => {
|
|
84
|
+
const v2Header = goCarV2Bytes.slice(0, 51);
|
|
85
|
+
const buf2 = new Uint8Array(51 * 2);
|
|
86
|
+
buf2.set(v2Header, 0);
|
|
87
|
+
buf2.set(v2Header, 51);
|
|
88
|
+
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR version: 2 (expected 1)');
|
|
57
89
|
});
|
|
58
|
-
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
59
|
-
buf2 = makeHeader([
|
|
60
|
-
1,
|
|
61
|
-
[]
|
|
62
|
-
]);
|
|
63
|
-
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
64
|
-
buf2 = makeHeader(null);
|
|
65
|
-
await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format');
|
|
66
90
|
});
|
|
67
91
|
});
|
|
@@ -2,6 +2,9 @@ import { CarIndexer } from '../lib/indexer.js';
|
|
|
2
2
|
import {
|
|
3
3
|
goCarBytes,
|
|
4
4
|
goCarIndex,
|
|
5
|
+
goCarV2Bytes,
|
|
6
|
+
goCarV2Roots,
|
|
7
|
+
goCarV2Index,
|
|
5
8
|
makeIterable,
|
|
6
9
|
assert
|
|
7
10
|
} from './common.js';
|
|
@@ -17,6 +20,18 @@ describe('CarIndexer fromBytes()', () => {
|
|
|
17
20
|
}
|
|
18
21
|
assert.deepStrictEqual(indexData, goCarIndex);
|
|
19
22
|
});
|
|
23
|
+
it('v2 complete', async () => {
|
|
24
|
+
const indexer = await CarIndexer.fromBytes(goCarV2Bytes);
|
|
25
|
+
const roots = await indexer.getRoots();
|
|
26
|
+
assert.strictEqual(roots.length, 1);
|
|
27
|
+
assert(goCarV2Roots[0].equals(roots[0]));
|
|
28
|
+
assert.strictEqual(indexer.version, 2);
|
|
29
|
+
const indexData = [];
|
|
30
|
+
for await (const index of indexer) {
|
|
31
|
+
indexData.push(index);
|
|
32
|
+
}
|
|
33
|
+
assert.deepStrictEqual(indexData, goCarV2Index);
|
|
34
|
+
});
|
|
20
35
|
it('bad argument', async () => {
|
|
21
36
|
for (const arg of [
|
|
22
37
|
true,
|
|
@@ -1,12 +1,22 @@
|
|
|
1
1
|
import { CarReader } from '../lib/reader-browser.js';
|
|
2
2
|
import { CarWriter } from '../lib/writer-browser.js';
|
|
3
|
+
import {
|
|
4
|
+
bytesReader,
|
|
5
|
+
readHeader
|
|
6
|
+
} from '../lib/decoder.js';
|
|
3
7
|
import * as Block from 'multiformats/block';
|
|
4
8
|
import { sha256 } from 'multiformats/hashes/sha2';
|
|
5
9
|
import * as raw from 'multiformats/codecs/raw';
|
|
10
|
+
import { base64 } from 'multiformats/bases/base64';
|
|
11
|
+
import * as dagPb from '@ipld/dag-pb';
|
|
6
12
|
import {
|
|
7
13
|
carBytes,
|
|
8
14
|
makeIterable,
|
|
9
|
-
assert
|
|
15
|
+
assert,
|
|
16
|
+
goCarV2Bytes,
|
|
17
|
+
goCarV2Roots,
|
|
18
|
+
goCarV2Index,
|
|
19
|
+
goCarV2Contents
|
|
10
20
|
} from './common.js';
|
|
11
21
|
import {
|
|
12
22
|
verifyRoots,
|
|
@@ -15,6 +25,8 @@ import {
|
|
|
15
25
|
verifyBlocks,
|
|
16
26
|
verifyCids
|
|
17
27
|
} from './verify-store-reader.js';
|
|
28
|
+
import { data as fixtures } from './fixtures.js';
|
|
29
|
+
import { expectations as fixtureExpectations } from './fixtures-expectations.js';
|
|
18
30
|
describe('CarReader fromBytes()', () => {
|
|
19
31
|
it('complete', async () => {
|
|
20
32
|
const reader = await CarReader.fromBytes(carBytes);
|
|
@@ -52,6 +64,29 @@ describe('CarReader fromBytes()', () => {
|
|
|
52
64
|
message: 'Unexpected end of data'
|
|
53
65
|
});
|
|
54
66
|
});
|
|
67
|
+
it('v2 complete', async () => {
|
|
68
|
+
const reader = await CarReader.fromBytes(goCarV2Bytes);
|
|
69
|
+
const roots = await reader.getRoots();
|
|
70
|
+
assert.strictEqual(roots.length, 1);
|
|
71
|
+
assert(goCarV2Roots[0].equals(roots[0]));
|
|
72
|
+
assert.strictEqual(reader.version, 2);
|
|
73
|
+
for (const {cid} of goCarV2Index) {
|
|
74
|
+
const block = await reader.get(cid);
|
|
75
|
+
assert.isDefined(block);
|
|
76
|
+
if (block) {
|
|
77
|
+
assert(cid.equals(block.cid));
|
|
78
|
+
let content;
|
|
79
|
+
if (cid.code === dagPb.code) {
|
|
80
|
+
content = dagPb.decode(block.bytes);
|
|
81
|
+
} else if (cid.code === 85) {
|
|
82
|
+
content = new TextDecoder().decode(block.bytes);
|
|
83
|
+
} else {
|
|
84
|
+
assert.fail('Unexpected codec');
|
|
85
|
+
}
|
|
86
|
+
assert.deepStrictEqual(content, goCarV2Contents[cid.toString()]);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
});
|
|
55
90
|
it('decode error - trailing null bytes', async () => {
|
|
56
91
|
const bytes = new Uint8Array(carBytes.length + 5);
|
|
57
92
|
bytes.set(carBytes);
|
|
@@ -178,4 +213,58 @@ describe('CarReader fromIterable()', () => {
|
|
|
178
213
|
message: 'Unexpected end of data'
|
|
179
214
|
});
|
|
180
215
|
});
|
|
216
|
+
it('v2 decode error - truncated', async () => {
|
|
217
|
+
const bytes = goCarV2Bytes.slice();
|
|
218
|
+
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
219
|
+
dv.setBigUint64(35, BigInt(448 - 10), true);
|
|
220
|
+
await assert.isRejected(CarReader.fromIterable(makeIterable(bytes, 64)), {
|
|
221
|
+
name: 'Error',
|
|
222
|
+
message: 'Unexpected end of data'
|
|
223
|
+
});
|
|
224
|
+
});
|
|
225
|
+
});
|
|
226
|
+
describe('Shared fixtures', () => {
|
|
227
|
+
describe('Header', () => {
|
|
228
|
+
for (const [name, {
|
|
229
|
+
version: expectedVersion,
|
|
230
|
+
err: expectedError
|
|
231
|
+
}] of Object.entries(fixtureExpectations)) {
|
|
232
|
+
it(name, async () => {
|
|
233
|
+
const data = base64.baseDecode(fixtures[name]);
|
|
234
|
+
let header;
|
|
235
|
+
try {
|
|
236
|
+
header = await readHeader(bytesReader(data));
|
|
237
|
+
} catch (err) {
|
|
238
|
+
if (expectedError != null) {
|
|
239
|
+
assert.equal(err.message, expectedError);
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
assert.ifError(err);
|
|
243
|
+
}
|
|
244
|
+
if (expectedError != null) {
|
|
245
|
+
assert.fail(`Expected error: ${ expectedError }`);
|
|
246
|
+
}
|
|
247
|
+
assert.isDefined(header, 'did not decode header');
|
|
248
|
+
if (expectedVersion != null && header != null) {
|
|
249
|
+
assert.strictEqual(header.version, expectedVersion);
|
|
250
|
+
}
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
});
|
|
254
|
+
describe('Contents', () => {
|
|
255
|
+
for (const [name, {cids: expectedCids}] of Object.entries(fixtureExpectations)) {
|
|
256
|
+
if (expectedCids == null) {
|
|
257
|
+
continue;
|
|
258
|
+
}
|
|
259
|
+
it(name, async () => {
|
|
260
|
+
const data = base64.baseDecode(fixtures[name]);
|
|
261
|
+
const reader = await CarReader.fromBytes(data);
|
|
262
|
+
let i = 0;
|
|
263
|
+
for await (const cid of reader.cids()) {
|
|
264
|
+
assert.strictEqual(cid.toString(), expectedCids[i++]);
|
|
265
|
+
}
|
|
266
|
+
assert.strictEqual(i, expectedCids.length);
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
});
|
|
181
270
|
});
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import varint from 'varint';
|
|
2
|
+
import {
|
|
3
|
+
Token,
|
|
4
|
+
Type
|
|
5
|
+
} from 'cborg';
|
|
6
|
+
import { tokensToLength } from 'cborg/length';
|
|
7
|
+
import * as CBOR from '@ipld/dag-cbor';
|
|
8
|
+
class CarBufferWriter {
|
|
9
|
+
constructor(bytes, headerSize) {
|
|
10
|
+
this.bytes = bytes;
|
|
11
|
+
this.byteOffset = headerSize;
|
|
12
|
+
this.roots = [];
|
|
13
|
+
this.headerSize = headerSize;
|
|
14
|
+
}
|
|
15
|
+
addRoot(root, options) {
|
|
16
|
+
addRoot(this, root, options);
|
|
17
|
+
return this;
|
|
18
|
+
}
|
|
19
|
+
write(block) {
|
|
20
|
+
addBlock(this, block);
|
|
21
|
+
return this;
|
|
22
|
+
}
|
|
23
|
+
close(options) {
|
|
24
|
+
return close(this, options);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
export const addRoot = (writer, root, {
|
|
28
|
+
resize = false
|
|
29
|
+
} = {}) => {
|
|
30
|
+
const {bytes, headerSize, byteOffset, roots} = writer;
|
|
31
|
+
writer.roots.push(root);
|
|
32
|
+
const size = headerLength(writer);
|
|
33
|
+
if (size > headerSize) {
|
|
34
|
+
if (size - headerSize + byteOffset < bytes.byteLength) {
|
|
35
|
+
if (resize) {
|
|
36
|
+
resizeHeader(writer, size);
|
|
37
|
+
} else {
|
|
38
|
+
roots.pop();
|
|
39
|
+
throw new RangeError(`Header of size ${ headerSize } has no capacity for new root ${ root }.
|
|
40
|
+
However there is a space in the buffer and you could call addRoot(root, { resize: root }) to resize header to make a space for this root.`);
|
|
41
|
+
}
|
|
42
|
+
} else {
|
|
43
|
+
roots.pop();
|
|
44
|
+
throw new RangeError(`Buffer has no capacity for a new root ${ root }`);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
export const blockLength = ({cid, bytes}) => {
|
|
49
|
+
const size = cid.bytes.byteLength + bytes.byteLength;
|
|
50
|
+
return varint.encodingLength(size) + size;
|
|
51
|
+
};
|
|
52
|
+
export const addBlock = (writer, {cid, bytes}) => {
|
|
53
|
+
const byteLength = cid.bytes.byteLength + bytes.byteLength;
|
|
54
|
+
const size = varint.encode(byteLength);
|
|
55
|
+
if (writer.byteOffset + size.length + byteLength > writer.bytes.byteLength) {
|
|
56
|
+
throw new RangeError('Buffer has no capacity for this block');
|
|
57
|
+
} else {
|
|
58
|
+
writeBytes(writer, size);
|
|
59
|
+
writeBytes(writer, cid.bytes);
|
|
60
|
+
writeBytes(writer, bytes);
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
export const close = (writer, {
|
|
64
|
+
resize = false
|
|
65
|
+
} = {}) => {
|
|
66
|
+
const {roots, bytes, byteOffset, headerSize} = writer;
|
|
67
|
+
const headerBytes = CBOR.encode({
|
|
68
|
+
version: 1,
|
|
69
|
+
roots
|
|
70
|
+
});
|
|
71
|
+
const varintBytes = varint.encode(headerBytes.length);
|
|
72
|
+
const size = varintBytes.length + headerBytes.byteLength;
|
|
73
|
+
const offset = headerSize - size;
|
|
74
|
+
if (offset === 0) {
|
|
75
|
+
writeHeader(writer, varintBytes, headerBytes);
|
|
76
|
+
return bytes.subarray(0, byteOffset);
|
|
77
|
+
} else if (resize) {
|
|
78
|
+
resizeHeader(writer, size);
|
|
79
|
+
writeHeader(writer, varintBytes, headerBytes);
|
|
80
|
+
return bytes.subarray(0, writer.byteOffset);
|
|
81
|
+
} else {
|
|
82
|
+
throw new RangeError(`Header size was overestimated.
|
|
83
|
+
You can use close({ resize: true }) to resize header`);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
export const resizeHeader = (writer, byteLength) => {
|
|
87
|
+
const {bytes, headerSize} = writer;
|
|
88
|
+
bytes.set(bytes.subarray(headerSize, writer.byteOffset), byteLength);
|
|
89
|
+
writer.byteOffset += byteLength - headerSize;
|
|
90
|
+
writer.headerSize = byteLength;
|
|
91
|
+
};
|
|
92
|
+
const writeBytes = (writer, bytes) => {
|
|
93
|
+
writer.bytes.set(bytes, writer.byteOffset);
|
|
94
|
+
writer.byteOffset += bytes.length;
|
|
95
|
+
};
|
|
96
|
+
const writeHeader = ({bytes}, varint, header) => {
|
|
97
|
+
bytes.set(varint);
|
|
98
|
+
bytes.set(header, varint.length);
|
|
99
|
+
};
|
|
100
|
+
const headerPreludeTokens = [
|
|
101
|
+
new Token(Type.map, 2),
|
|
102
|
+
new Token(Type.string, 'version'),
|
|
103
|
+
new Token(Type.uint, 1),
|
|
104
|
+
new Token(Type.string, 'roots')
|
|
105
|
+
];
|
|
106
|
+
const CID_TAG = new Token(Type.tag, 42);
|
|
107
|
+
export const calculateHeaderLength = rootLengths => {
|
|
108
|
+
const tokens = [...headerPreludeTokens];
|
|
109
|
+
tokens.push(new Token(Type.array, rootLengths.length));
|
|
110
|
+
for (const rootLength of rootLengths) {
|
|
111
|
+
tokens.push(CID_TAG);
|
|
112
|
+
tokens.push(new Token(Type.bytes, { length: rootLength + 1 }));
|
|
113
|
+
}
|
|
114
|
+
const length = tokensToLength(tokens);
|
|
115
|
+
return varint.encodingLength(length) + length;
|
|
116
|
+
};
|
|
117
|
+
export const headerLength = ({roots}) => calculateHeaderLength(roots.map(cid => cid.bytes.byteLength));
|
|
118
|
+
export const estimateHeaderLength = (rootCount, rootByteLength = 36) => calculateHeaderLength(new Array(rootCount).fill(rootByteLength));
|
|
119
|
+
export const createWriter = (buffer, {roots = [], byteOffset = 0, byteLength = buffer.byteLength, headerSize = headerLength({ roots })} = {}) => {
|
|
120
|
+
const bytes = new Uint8Array(buffer, byteOffset, byteLength);
|
|
121
|
+
const writer = new CarBufferWriter(bytes, headerSize);
|
|
122
|
+
for (const root of roots) {
|
|
123
|
+
writer.addRoot(root);
|
|
124
|
+
}
|
|
125
|
+
return writer;
|
|
126
|
+
};
|