@autonomys/auto-dag-data 1.0.8 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/compression/index.d.ts.map +1 -1
- package/dist/compression/index.js +1 -1
- package/dist/encryption/index.d.ts +3 -2
- package/dist/encryption/index.d.ts.map +1 -1
- package/dist/ipld/chunker.d.ts +1 -0
- package/dist/ipld/chunker.d.ts.map +1 -1
- package/dist/ipld/chunker.js +1 -1
- package/dist/ipld/nodes.d.ts.map +1 -1
- package/dist/ipld/nodes.js +1 -0
- package/jest.config.ts +1 -0
- package/package.json +2 -2
- package/src/compression/index.ts +1 -1
- package/src/encryption/index.ts +3 -2
- package/src/ipld/chunker.ts +1 -1
- package/src/ipld/nodes.ts +1 -0
- package/tests/chunker.spec.ts +133 -4
- package/tests/compression.spec.ts +81 -0
- package/tests/encryption.spec.ts +21 -0
- package/tests/fileRetrievability.spec.ts +181 -0
- package/tests/nodes.spec.ts +24 -1
- package/tests/offchainMetadata.spec.ts +149 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/compression/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAGpD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAE/C,eAAO,MAAM,sBAAsB,QAAc,CAAA;AAEjD,wBAAuB,YAAY,CACjC,IAAI,EAAE,aAAa,CAAC,MAAM,CAAC,EAC3B,EACE,KAAS,EACT,SAAkC,EAClC,SAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB;AAED,wBAAuB,cAAc,CACnC,cAAc,EAAE,aAAa,CAAC,MAAM,CAAC,EACrC,EACE,SAAkC,EAClC,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/compression/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAGpD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAE/C,eAAO,MAAM,sBAAsB,QAAc,CAAA;AAEjD,wBAAuB,YAAY,CACjC,IAAI,EAAE,aAAa,CAAC,MAAM,CAAC,EAC3B,EACE,KAAS,EACT,SAAkC,EAClC,SAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB;AAED,wBAAuB,cAAc,CACnC,cAAc,EAAE,aAAa,CAAC,MAAM,CAAC,EACrC,EACE,SAAkC,EAClC,SAAS,EACT,KAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB"}
|
|
@@ -64,7 +64,7 @@ export function compressFile(file_1, _a) {
|
|
|
64
64
|
});
|
|
65
65
|
}
|
|
66
66
|
export function decompressFile(compressedFile_1, _a) {
|
|
67
|
-
return __asyncGenerator(this, arguments, function* decompressFile_1(compressedFile, { chunkSize = COMPRESSION_CHUNK_SIZE, algorithm
|
|
67
|
+
return __asyncGenerator(this, arguments, function* decompressFile_1(compressedFile, { chunkSize = COMPRESSION_CHUNK_SIZE, algorithm, level = 9, }) {
|
|
68
68
|
var _b, e_2, _c, _d;
|
|
69
69
|
if (algorithm !== CompressionAlgorithm.ZLIB) {
|
|
70
70
|
throw new Error('Unsupported compression algorithm');
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
+
import { AwaitIterable } from 'interface-store';
|
|
1
2
|
import { EncryptionOptions } from '../metadata/index.js';
|
|
2
3
|
import type { PickPartial } from '../utils/types.js';
|
|
3
4
|
import { PasswordGenerationOptions } from './types.js';
|
|
4
5
|
export declare const ENCRYPTING_CHUNK_SIZE: number;
|
|
5
6
|
export declare const getKeyFromPassword: ({ password, salt }: PasswordGenerationOptions) => Promise<CryptoKey>;
|
|
6
|
-
export declare const encryptFile: (file:
|
|
7
|
-
export declare const decryptFile: (file:
|
|
7
|
+
export declare const encryptFile: (file: AwaitIterable<Buffer>, password: string, { chunkSize, algorithm }: PickPartial<EncryptionOptions, "algorithm">) => AsyncIterable<Buffer>;
|
|
8
|
+
export declare const decryptFile: (file: AwaitIterable<Buffer>, password: string, { chunkSize, algorithm }: PickPartial<EncryptionOptions, "algorithm">) => AsyncIterable<Buffer>;
|
|
8
9
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/encryption/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAuB,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAE7E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,yBAAyB,EAAE,MAAM,YAAY,CAAA;AAItD,eAAO,MAAM,qBAAqB,QAAc,CAAA;AAMhD,eAAO,MAAM,kBAAkB,uBAA8B,yBAAyB,uBAyBrF,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACkC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC5F,aAAa,CAAC,MAAM,CAetB,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACiC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC3F,aAAa,CAAC,MAAM,CA+BtB,CAAA"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/encryption/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC/C,OAAO,EAAuB,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAE7E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,yBAAyB,EAAE,MAAM,YAAY,CAAA;AAItD,eAAO,MAAM,qBAAqB,QAAc,CAAA;AAMhD,eAAO,MAAM,kBAAkB,uBAA8B,yBAAyB,uBAyBrF,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACkC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC5F,aAAa,CAAC,MAAM,CAetB,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACiC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC3F,aAAa,CAAC,MAAM,CA+BtB,CAAA"}
|
package/dist/ipld/chunker.d.ts
CHANGED
|
@@ -10,6 +10,7 @@ type ChunkerLimits = {
|
|
|
10
10
|
};
|
|
11
11
|
type ChunkerOptions = ChunkerLimits & FileUploadOptions;
|
|
12
12
|
export declare const DEFAULT_NODE_MAX_SIZE = 65535;
|
|
13
|
+
export declare const MAX_NAME_SIZE = 255;
|
|
13
14
|
export declare const NODE_METADATA_SIZE: number;
|
|
14
15
|
export declare const DEFAULT_MAX_CHUNK_SIZE: number;
|
|
15
16
|
export declare const LINK_SIZE_IN_BYTES = 40;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;AAEvD,eAAO,MAAM,qBAAqB,QAAQ,CAAA;
|
|
1
|
+
{"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;AAEvD,eAAO,MAAM,qBAAqB,QAAQ,CAAA;AAS1C,eAAO,MAAM,aAAa,MAAM,CAAA;AAQhC,eAAO,MAAM,kBAAkB,QAML,CAAA;AAE1B,eAAO,MAAM,sBAAsB,QAA6C,CAAA;AAEhF,eAAO,MAAM,kBAAkB,KAAK,CAAA;AACpC,eAAO,MAAM,yBAAyB,QAA0D,CAAA;AAEhG,eAAO,MAAM,uBAAuB,eACtB,cAAc,QACpB,aAAa,CAAC,MAAM,CAAC,aAChB,MAAM,aACN,MAAM,8DAMd,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAWb,CAAA;AAED,eAAO,MAAM,2BAA2B,eAC1B,cAAc,YAChB,gBAAgB,WAClB;IAAE,WAAW,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAItD,OAAO,CAAC,GAAG,CAiBb,CAAA;AA0CD,eAAO,MAAM,mCAAmC,eAClC,cAAc,UAClB,aAAa,CAAC,GAAG,CAAC,YAChB,MAAM,GAAG,SAAS,aACjB,MAAM,YACP,QAAQ,2EAMf,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAgDb,CAAA;AAED,eAAO,MAAM,yBAAyB,eACxB,cAAc,YAChB,GAAG,EAAE,QACT,MAAM,QACN,MAAM,2EAMT,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CA4Bb,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,yBAAyB,eACxB,cAAc,UAClB,aAAa,CAAC,MAAM,CAAC,YACnB,QAAQ,qBACyB;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,KAGnE,OAAO,CAAC,MAAM,CAiBhB,CAAA;AAED,eAAO,MAAM,iBAAiB,SACtB,MAAM,YACH,MAAM,KACd,MAOF,CAAA"}
|
package/dist/ipld/chunker.js
CHANGED
|
@@ -41,7 +41,7 @@ const NODE_LINK_DEPTH_SIZE = 4;
|
|
|
41
41
|
// u64 -> 8 bytes
|
|
42
42
|
const NODE_SIZE_SIZE = 8;
|
|
43
43
|
// Limit at 255 string length (Mac Limit)
|
|
44
|
-
const MAX_NAME_SIZE = 255;
|
|
44
|
+
export const MAX_NAME_SIZE = 255;
|
|
45
45
|
const END_OF_STRING_BYTE = 1;
|
|
46
46
|
const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE;
|
|
47
47
|
// Upload options may be amplified in the future
|
package/dist/ipld/nodes.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAI1E,OAAO,EAAc,MAAM,EAAE,MAAM,YAAY,CAAA;AAG/C,eAAO,MAAM,uBAAuB,SAC5B,MAAM,gBACC,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,gBACpB,MAAM,KAClB,MAcA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAChC,MAAM,gBACC,MAAM,KAClB,MAWA,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,
|
|
1
|
+
{"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAI1E,OAAO,EAAc,MAAM,EAAE,MAAM,YAAY,CAAA;AAG/C,eAAO,MAAM,uBAAuB,SAC5B,MAAM,gBACC,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,gBACpB,MAAM,KAClB,MAcA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAChC,MAAM,gBACC,MAAM,KAClB,MAWA,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,MAeF,CAAA"}
|
package/dist/ipld/nodes.js
CHANGED
package/jest.config.ts
CHANGED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@autonomys/auto-dag-data",
|
|
3
3
|
"packageManager": "yarn@4.1.1",
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.9",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"repository": {
|
|
@@ -48,5 +48,5 @@
|
|
|
48
48
|
"protons": "^7.6.0",
|
|
49
49
|
"protons-runtime": "^5.5.0"
|
|
50
50
|
},
|
|
51
|
-
"gitHead": "
|
|
51
|
+
"gitHead": "c8da43fe9c2ba885d1ff5e0241d9a41e2c4f3389"
|
|
52
52
|
}
|
package/src/compression/index.ts
CHANGED
|
@@ -49,7 +49,7 @@ export async function* decompressFile(
|
|
|
49
49
|
compressedFile: AwaitIterable<Buffer>,
|
|
50
50
|
{
|
|
51
51
|
chunkSize = COMPRESSION_CHUNK_SIZE,
|
|
52
|
-
algorithm
|
|
52
|
+
algorithm,
|
|
53
53
|
level = 9,
|
|
54
54
|
}: PickPartial<CompressionOptions, 'algorithm'>,
|
|
55
55
|
): AsyncIterable<Buffer> {
|
package/src/encryption/index.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Crypto } from '@peculiar/webcrypto'
|
|
2
2
|
import { randomBytes } from 'crypto'
|
|
3
|
+
import { AwaitIterable } from 'interface-store'
|
|
3
4
|
import { EncryptionAlgorithm, EncryptionOptions } from '../metadata/index.js'
|
|
4
5
|
import { asyncByChunk } from '../utils/async.js'
|
|
5
6
|
import type { PickPartial } from '../utils/types.js'
|
|
@@ -41,7 +42,7 @@ export const getKeyFromPassword = async ({ password, salt }: PasswordGenerationO
|
|
|
41
42
|
}
|
|
42
43
|
|
|
43
44
|
export const encryptFile = async function* (
|
|
44
|
-
file:
|
|
45
|
+
file: AwaitIterable<Buffer>,
|
|
45
46
|
password: string,
|
|
46
47
|
{ chunkSize = ENCRYPTING_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
|
|
47
48
|
): AsyncIterable<Buffer> {
|
|
@@ -62,7 +63,7 @@ export const encryptFile = async function* (
|
|
|
62
63
|
}
|
|
63
64
|
|
|
64
65
|
export const decryptFile = async function* (
|
|
65
|
-
file:
|
|
66
|
+
file: AwaitIterable<Buffer>,
|
|
66
67
|
password: string,
|
|
67
68
|
{ chunkSize = ENCRYPTED_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
|
|
68
69
|
): AsyncIterable<Buffer> {
|
package/src/ipld/chunker.ts
CHANGED
|
@@ -24,7 +24,7 @@ const NODE_LINK_DEPTH_SIZE = 4
|
|
|
24
24
|
// u64 -> 8 bytes
|
|
25
25
|
const NODE_SIZE_SIZE = 8
|
|
26
26
|
// Limit at 255 string length (Mac Limit)
|
|
27
|
-
const MAX_NAME_SIZE = 255
|
|
27
|
+
export const MAX_NAME_SIZE = 255
|
|
28
28
|
const END_OF_STRING_BYTE = 1
|
|
29
29
|
const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE
|
|
30
30
|
// Upload options may be amplified in the future
|
package/src/ipld/nodes.ts
CHANGED
package/tests/chunker.spec.ts
CHANGED
|
@@ -1,14 +1,24 @@
|
|
|
1
1
|
import { BaseBlockstore, MemoryBlockstore } from 'blockstore-core'
|
|
2
|
-
import { cidOfNode, cidToString, createSingleFileIpldNode } from '../src'
|
|
3
2
|
import {
|
|
3
|
+
cidOfNode,
|
|
4
|
+
cidToString,
|
|
5
|
+
createFileChunkIpldNode,
|
|
6
|
+
createSingleFileIpldNode,
|
|
7
|
+
fileBuilders,
|
|
8
|
+
} from '../src'
|
|
9
|
+
import {
|
|
10
|
+
DEFAULT_MAX_CHUNK_SIZE,
|
|
4
11
|
LINK_SIZE_IN_BYTES,
|
|
12
|
+
MAX_NAME_SIZE,
|
|
5
13
|
NODE_METADATA_SIZE,
|
|
14
|
+
processBufferToIPLDFormatFromChunks,
|
|
15
|
+
processChunksToIPLDFormat,
|
|
6
16
|
processFileToIPLDFormat,
|
|
7
17
|
processFolderToIPLDFormat,
|
|
8
18
|
processMetadataToIPLDFormat,
|
|
9
19
|
} from '../src/ipld/chunker'
|
|
10
|
-
import { createNode, decodeNode, PBNode } from '../src/ipld/utils'
|
|
11
|
-
import {
|
|
20
|
+
import { createNode, decodeNode, encodeNode, PBNode } from '../src/ipld/utils'
|
|
21
|
+
import { fileMetadata, IPLDNodeData, MetadataType, OffchainMetadata } from '../src/metadata'
|
|
12
22
|
|
|
13
23
|
describe('chunker', () => {
|
|
14
24
|
describe('file creation', () => {
|
|
@@ -88,6 +98,22 @@ describe('chunker', () => {
|
|
|
88
98
|
})
|
|
89
99
|
})
|
|
90
100
|
|
|
101
|
+
it('create a file with long name should throw an error', async () => {
|
|
102
|
+
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
|
|
103
|
+
const blockstore = new MemoryBlockstore()
|
|
104
|
+
expect(() =>
|
|
105
|
+
processFileToIPLDFormat(blockstore, [Buffer.from('hello')], BigInt(5), name),
|
|
106
|
+
).toThrow(`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`)
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
it('create a file with long name from buffer should throw an error', async () => {
|
|
110
|
+
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
|
|
111
|
+
const blockstore = new MemoryBlockstore()
|
|
112
|
+
await expect(
|
|
113
|
+
processBufferToIPLDFormatFromChunks(blockstore, [], name, BigInt(5), fileBuilders),
|
|
114
|
+
).rejects.toThrow(`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`)
|
|
115
|
+
})
|
|
116
|
+
|
|
91
117
|
it('create a file dag with inlinks', async () => {
|
|
92
118
|
const chunkLength = 1000
|
|
93
119
|
const maxNodeSize = chunkLength + NODE_METADATA_SIZE
|
|
@@ -194,6 +220,89 @@ describe('chunker', () => {
|
|
|
194
220
|
expect(rootCount).toBe(1)
|
|
195
221
|
expect(inlinkCount).toBe(3)
|
|
196
222
|
})
|
|
223
|
+
|
|
224
|
+
it('create a folder with long name should throw an error', async () => {
|
|
225
|
+
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
|
|
226
|
+
const blockstore = new MemoryBlockstore()
|
|
227
|
+
await expect(processFolderToIPLDFormat(blockstore, [], name, BigInt(1000))).rejects.toThrow(
|
|
228
|
+
`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`,
|
|
229
|
+
)
|
|
230
|
+
})
|
|
231
|
+
})
|
|
232
|
+
|
|
233
|
+
describe('asyncronous file creation', () => {
|
|
234
|
+
it('process chunks to IPLD format should return the leftover buffer', async () => {
|
|
235
|
+
const filename = 'test.txt'
|
|
236
|
+
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
|
|
237
|
+
const chunksCount = 1.5
|
|
238
|
+
const buffer = Buffer.from(
|
|
239
|
+
Array.from({ length: chunkSize * chunksCount })
|
|
240
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
241
|
+
.join(''),
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
const leftoverSize = buffer.length % chunkSize
|
|
245
|
+
const blockstore = new MemoryBlockstore()
|
|
246
|
+
const leftover = await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)
|
|
247
|
+
expect(leftover.length).toBe(leftoverSize)
|
|
248
|
+
})
|
|
249
|
+
|
|
250
|
+
it('process chunks with exact chunk size len(leftover)=0', async () => {
|
|
251
|
+
const filename = 'test.txt'
|
|
252
|
+
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
|
|
253
|
+
const chunksCount = 4
|
|
254
|
+
const buffer = Buffer.from(
|
|
255
|
+
Array.from({ length: chunkSize * chunksCount })
|
|
256
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
257
|
+
.join(''),
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
const blockstore = new MemoryBlockstore()
|
|
261
|
+
const leftover = await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)
|
|
262
|
+
|
|
263
|
+
expect(leftover.length).toBe(0)
|
|
264
|
+
})
|
|
265
|
+
|
|
266
|
+
it('process file by chunks', async () => {
|
|
267
|
+
const filename = 'test.txt'
|
|
268
|
+
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
|
|
269
|
+
const chunksCount = 4.5
|
|
270
|
+
const buffer = Buffer.from(
|
|
271
|
+
Array.from({ length: chunkSize * chunksCount })
|
|
272
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
273
|
+
.join(''),
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
const blockstore = new MemoryBlockstore()
|
|
277
|
+
const leftover = await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)
|
|
278
|
+
const leftoverCid = createFileChunkIpldNode(leftover)
|
|
279
|
+
await blockstore.put(cidOfNode(leftoverCid), encodeNode(leftoverCid))
|
|
280
|
+
|
|
281
|
+
const mapCIDs = (async function* () {
|
|
282
|
+
for await (const { cid } of blockstore.getAll()) {
|
|
283
|
+
yield cid
|
|
284
|
+
}
|
|
285
|
+
})()
|
|
286
|
+
|
|
287
|
+
const headCID = await processBufferToIPLDFormatFromChunks(
|
|
288
|
+
blockstore,
|
|
289
|
+
mapCIDs,
|
|
290
|
+
filename,
|
|
291
|
+
BigInt(buffer.length),
|
|
292
|
+
fileBuilders,
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
const headNode = decodeNode(await blockstore.get(headCID))
|
|
296
|
+
expect(headNode?.Links.length).toBe(Math.ceil(chunksCount))
|
|
297
|
+
expect(cidToString(headNode?.Links[headNode.Links.length - 1].Hash)).toEqual(
|
|
298
|
+
cidToString(cidOfNode(leftoverCid)),
|
|
299
|
+
)
|
|
300
|
+
const ipldMetadata = IPLDNodeData.decode(headNode?.Data ?? new Uint8Array())
|
|
301
|
+
expect(ipldMetadata.name).toBe(filename)
|
|
302
|
+
expect(ipldMetadata.type).toBe(MetadataType.File)
|
|
303
|
+
expect(ipldMetadata.linkDepth).toBe(1)
|
|
304
|
+
expect(ipldMetadata.size!.toString()).toBe(buffer.length.toString())
|
|
305
|
+
})
|
|
197
306
|
})
|
|
198
307
|
|
|
199
308
|
describe('metadata creation', () => {
|
|
@@ -209,11 +318,31 @@ describe('chunker', () => {
|
|
|
209
318
|
}
|
|
210
319
|
|
|
211
320
|
const blockstore = new MemoryBlockstore()
|
|
212
|
-
|
|
321
|
+
await processMetadataToIPLDFormat(blockstore, metadata)
|
|
213
322
|
const nodes = await nodesFromBlockstore(blockstore)
|
|
214
323
|
expect(nodes.length).toBe(1)
|
|
215
324
|
})
|
|
216
325
|
|
|
326
|
+
it('create a metadata dag with long name should throw an error', async () => {
|
|
327
|
+
const name = 'a'.repeat(MAX_NAME_SIZE + 1)
|
|
328
|
+
const metadata = fileMetadata(
|
|
329
|
+
cidOfNode(createNode(Buffer.from(Math.random().toString()))),
|
|
330
|
+
[
|
|
331
|
+
{
|
|
332
|
+
cid: cidToString(cidOfNode(createNode(Buffer.from(Math.random().toString())))),
|
|
333
|
+
size: BigInt(1000),
|
|
334
|
+
},
|
|
335
|
+
],
|
|
336
|
+
BigInt(1000),
|
|
337
|
+
name,
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
const blockstore = new MemoryBlockstore()
|
|
341
|
+
await expect(processMetadataToIPLDFormat(blockstore, metadata)).rejects.toThrow(
|
|
342
|
+
`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`,
|
|
343
|
+
)
|
|
344
|
+
})
|
|
345
|
+
|
|
217
346
|
it('large metadata dag represented into multiple nodes', async () => {
|
|
218
347
|
const metadata: OffchainMetadata = {
|
|
219
348
|
type: 'file',
|
|
@@ -1,5 +1,10 @@
|
|
|
1
|
+
import { AwaitIterable } from 'interface-store'
|
|
1
2
|
import { compressFile, COMPRESSION_CHUNK_SIZE, CompressionAlgorithm, decompressFile } from '../src'
|
|
2
3
|
|
|
4
|
+
const awaitIterable = async (it: AwaitIterable<Buffer>) => {
|
|
5
|
+
for await (const _ of it);
|
|
6
|
+
}
|
|
7
|
+
|
|
3
8
|
describe('compression', () => {
|
|
4
9
|
it('compresses and decompresses a file with default options', async () => {
|
|
5
10
|
const file = Buffer.from('hello'.repeat(1000))
|
|
@@ -55,4 +60,80 @@ describe('compression', () => {
|
|
|
55
60
|
|
|
56
61
|
expect(decompressedBuffer.toString()).toBe(file.toString())
|
|
57
62
|
})
|
|
63
|
+
|
|
64
|
+
it('asynchronously iterates over the compressed file for chunked compression', async () => {
|
|
65
|
+
const chunkSize = COMPRESSION_CHUNK_SIZE
|
|
66
|
+
const chunks = 5
|
|
67
|
+
const chunk = Buffer.from('hello'.repeat(chunkSize))
|
|
68
|
+
const compressed = compressFile(
|
|
69
|
+
(async function* () {
|
|
70
|
+
for (let i = 0; i < chunks; i++) {
|
|
71
|
+
yield chunk
|
|
72
|
+
await new Promise((resolve) => setTimeout(resolve, 50))
|
|
73
|
+
}
|
|
74
|
+
})(),
|
|
75
|
+
{
|
|
76
|
+
level: 9,
|
|
77
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
78
|
+
},
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
await awaitIterable(compressed)
|
|
82
|
+
}, 10_000)
|
|
83
|
+
|
|
84
|
+
it('throws an error if the compression algorithm is not supported', async () => {
|
|
85
|
+
await expect(
|
|
86
|
+
awaitIterable(compressFile([Buffer.from('hello')], { algorithm: 'efwhhgfew' as any })),
|
|
87
|
+
).rejects.toThrow('Unsupported compression algorithm')
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
it('throws an error if the compression level is invalid', async () => {
|
|
91
|
+
await expect(
|
|
92
|
+
awaitIterable(
|
|
93
|
+
compressFile([Buffer.from('hello')], {
|
|
94
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
95
|
+
level: -1 as any,
|
|
96
|
+
}),
|
|
97
|
+
),
|
|
98
|
+
).rejects.toThrow('Invalid compression level')
|
|
99
|
+
})
|
|
100
|
+
|
|
101
|
+
it('throws an error if the chunk size is invalid', async () => {
|
|
102
|
+
await expect(
|
|
103
|
+
awaitIterable(
|
|
104
|
+
compressFile([Buffer.from('hello')], {
|
|
105
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
106
|
+
chunkSize: 0,
|
|
107
|
+
}),
|
|
108
|
+
),
|
|
109
|
+
).rejects.toThrow('Invalid chunk size')
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
it('throws an error if the decompression algorithm is not supported', async () => {
|
|
113
|
+
await expect(
|
|
114
|
+
awaitIterable(decompressFile([Buffer.from('hello')], { algorithm: 'efwhhgfew' as any })),
|
|
115
|
+
).rejects.toThrow('Unsupported compression algorithm')
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
it('throws an error if the decompression chunk size is invalid', async () => {
|
|
119
|
+
await expect(
|
|
120
|
+
awaitIterable(
|
|
121
|
+
decompressFile([Buffer.from('hello')], {
|
|
122
|
+
chunkSize: 0,
|
|
123
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
124
|
+
}),
|
|
125
|
+
),
|
|
126
|
+
).rejects.toThrow('Invalid chunk size')
|
|
127
|
+
})
|
|
128
|
+
|
|
129
|
+
it('throws an error if the compression level is invalid', async () => {
|
|
130
|
+
await expect(
|
|
131
|
+
awaitIterable(
|
|
132
|
+
decompressFile([Buffer.from('hello')], {
|
|
133
|
+
level: -1 as any,
|
|
134
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
135
|
+
}),
|
|
136
|
+
),
|
|
137
|
+
).rejects.toThrow('Invalid compression level')
|
|
138
|
+
})
|
|
58
139
|
})
|
package/tests/encryption.spec.ts
CHANGED
|
@@ -1,5 +1,10 @@
|
|
|
1
|
+
import { AwaitIterable } from 'interface-store'
|
|
1
2
|
import { decryptFile, encryptFile, EncryptionAlgorithm } from '../src'
|
|
2
3
|
|
|
4
|
+
const awaitIterable = async (it: AwaitIterable<Buffer>) => {
|
|
5
|
+
for await (const _ of it);
|
|
6
|
+
}
|
|
7
|
+
|
|
3
8
|
describe('encryption', () => {
|
|
4
9
|
it('encrypts and decrypts a file with default chunk size', async () => {
|
|
5
10
|
const chunk = 'hello'
|
|
@@ -101,4 +106,20 @@ describe('encryption', () => {
|
|
|
101
106
|
decryptedBuffer = Buffer.concat([decryptedBuffer, chunk])
|
|
102
107
|
}
|
|
103
108
|
})
|
|
109
|
+
|
|
110
|
+
it('throws an error if the encryption algorithm is not supported', async () => {
|
|
111
|
+
await expect(
|
|
112
|
+
awaitIterable(
|
|
113
|
+
encryptFile([Buffer.from('hello')], 'password', { algorithm: 'efwhhgfew' as any }),
|
|
114
|
+
),
|
|
115
|
+
).rejects.toThrow('Unsupported encryption algorithm')
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
it('throws an error if the decryption algorithm is not supported', async () => {
|
|
119
|
+
await expect(
|
|
120
|
+
awaitIterable(
|
|
121
|
+
decryptFile([Buffer.from('hello')], 'password', { algorithm: 'efwhhgfew' as any }),
|
|
122
|
+
),
|
|
123
|
+
).rejects.toThrow('Unsupported encryption algorithm')
|
|
124
|
+
})
|
|
104
125
|
})
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import { MemoryBlockstore } from 'blockstore-core'
|
|
2
|
+
import {
|
|
3
|
+
createChunkedFileIpldNode,
|
|
4
|
+
createSingleFileIpldNode,
|
|
5
|
+
decodeIPLDNodeData,
|
|
6
|
+
decodeNode,
|
|
7
|
+
DEFAULT_MAX_CHUNK_SIZE,
|
|
8
|
+
encodeNode,
|
|
9
|
+
fileBuilders,
|
|
10
|
+
processBufferToIPLDFormatFromChunks,
|
|
11
|
+
processChunksToIPLDFormat,
|
|
12
|
+
processFileToIPLDFormat,
|
|
13
|
+
} from '../src'
|
|
14
|
+
|
|
15
|
+
describe('file retrievability', () => {
|
|
16
|
+
it('should be able to retrieve a file', () => {
|
|
17
|
+
const filename = 'test.txt'
|
|
18
|
+
const buffer = Buffer.from('hello world')
|
|
19
|
+
const encodedNode = encodeNode(createSingleFileIpldNode(buffer, filename))
|
|
20
|
+
|
|
21
|
+
const decodedNode = decodeIPLDNodeData(encodedNode)
|
|
22
|
+
|
|
23
|
+
expect(decodedNode.name).toBe(filename)
|
|
24
|
+
expect(decodedNode.size!.toString()).toBe(buffer.length.toString())
|
|
25
|
+
expect(Buffer.from(decodedNode.data ?? '').toString()).toBe(buffer.toString())
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
it('should be able to retrieve a file with chunked file', async () => {
|
|
29
|
+
const filename = 'test.txt'
|
|
30
|
+
const expectedChunks = 4
|
|
31
|
+
const fileSize = expectedChunks * DEFAULT_MAX_CHUNK_SIZE
|
|
32
|
+
const buffer = Buffer.from(
|
|
33
|
+
Array.from({ length: fileSize })
|
|
34
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
35
|
+
.join(''),
|
|
36
|
+
)
|
|
37
|
+
const blockstore = new MemoryBlockstore()
|
|
38
|
+
const headCID = await processFileToIPLDFormat(
|
|
39
|
+
blockstore,
|
|
40
|
+
[buffer],
|
|
41
|
+
BigInt(buffer.length),
|
|
42
|
+
filename,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
const node = await blockstore.get(headCID)
|
|
46
|
+
const decodedNode = decodeNode(node)
|
|
47
|
+
|
|
48
|
+
expect(decodedNode.Links.length).toBe(expectedChunks)
|
|
49
|
+
const chunks = await Promise.all(
|
|
50
|
+
decodedNode.Links.map(async (e) => {
|
|
51
|
+
const chunk = await blockstore.get(e.Hash)
|
|
52
|
+
const decodedChunk = decodeIPLDNodeData(chunk)
|
|
53
|
+
expect(decodedChunk.data).toBeDefined()
|
|
54
|
+
return Buffer.from(decodedChunk.data!)
|
|
55
|
+
}),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
const allChunks = await Promise.all(chunks)
|
|
59
|
+
const finalBuffer = Buffer.concat(allChunks)
|
|
60
|
+
expect(finalBuffer.toString()).toBe(buffer.toString())
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it('should be able to retrieve a file with chunked file with uneven chunk size', async () => {
|
|
64
|
+
const filename = 'test.txt'
|
|
65
|
+
const expectedChunks = 1.5
|
|
66
|
+
const fileSize = Math.floor(expectedChunks * DEFAULT_MAX_CHUNK_SIZE)
|
|
67
|
+
const buffer = Buffer.from(
|
|
68
|
+
Array.from({ length: fileSize })
|
|
69
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
70
|
+
.join(''),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
const blockstore = new MemoryBlockstore()
|
|
74
|
+
const headCID = await processFileToIPLDFormat(
|
|
75
|
+
blockstore,
|
|
76
|
+
[buffer],
|
|
77
|
+
BigInt(buffer.length),
|
|
78
|
+
filename,
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
const node = await blockstore.get(headCID)
|
|
82
|
+
const decodedNode = decodeNode(node)
|
|
83
|
+
|
|
84
|
+
expect(decodedNode.Links.length).toBe(Math.ceil(expectedChunks))
|
|
85
|
+
const chunks = await Promise.all(
|
|
86
|
+
decodedNode.Links.map(async (e) => {
|
|
87
|
+
const chunk = await blockstore.get(e.Hash)
|
|
88
|
+
const decodedChunk = decodeIPLDNodeData(chunk)
|
|
89
|
+
expect(decodedChunk.data).toBeDefined()
|
|
90
|
+
return Buffer.from(decodedChunk.data!)
|
|
91
|
+
}),
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
const allChunks = await Promise.all(chunks)
|
|
95
|
+
const finalBuffer = Buffer.concat(allChunks)
|
|
96
|
+
expect(finalBuffer.toString()).toBe(buffer.toString())
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
it('should be able to retrieve a file with chunked file with different chunk size and uneven chunk size ', async () => {
|
|
100
|
+
const filename = 'test.txt'
|
|
101
|
+
|
|
102
|
+
const expectedChunks = 2
|
|
103
|
+
const chunkSize = Math.floor((DEFAULT_MAX_CHUNK_SIZE * 100) / 121)
|
|
104
|
+
const fileSize = Math.floor(expectedChunks * chunkSize)
|
|
105
|
+
const buffer = Buffer.from(
|
|
106
|
+
Array.from({ length: fileSize })
|
|
107
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
108
|
+
.join(''),
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
const blockstore = new MemoryBlockstore()
|
|
112
|
+
const headCID = await processFileToIPLDFormat(
|
|
113
|
+
blockstore,
|
|
114
|
+
[buffer],
|
|
115
|
+
BigInt(buffer.length),
|
|
116
|
+
filename,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
const node = await blockstore.get(headCID)
|
|
120
|
+
const decodedNode = decodeNode(node)
|
|
121
|
+
|
|
122
|
+
expect(decodedNode.Links.length).toBe(Math.ceil(expectedChunks))
|
|
123
|
+
const chunks = await Promise.all(
|
|
124
|
+
decodedNode.Links.map(async (e) => {
|
|
125
|
+
const chunk = await blockstore.get(e.Hash)
|
|
126
|
+
const decodedChunk = decodeIPLDNodeData(chunk)
|
|
127
|
+
expect(decodedChunk.data).toBeDefined()
|
|
128
|
+
return Buffer.from(decodedChunk.data!)
|
|
129
|
+
}),
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
const allChunks = await Promise.all(chunks)
|
|
133
|
+
const finalBuffer = Buffer.concat(allChunks)
|
|
134
|
+
expect(finalBuffer.toString()).toBe(buffer.toString())
|
|
135
|
+
})
|
|
136
|
+
|
|
137
|
+
it('should retrieve a file generated asynchronously', async () => {
|
|
138
|
+
const filename = 'test.txt'
|
|
139
|
+
const chunkSize = DEFAULT_MAX_CHUNK_SIZE
|
|
140
|
+
const chunksCount = 50
|
|
141
|
+
const buffer = Buffer.from(
|
|
142
|
+
Array.from({ length: chunkSize * chunksCount })
|
|
143
|
+
.map(() => Math.floor(Math.random() * 16).toString(16))
|
|
144
|
+
.join(''),
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
const blockstore = new MemoryBlockstore()
|
|
148
|
+
await processChunksToIPLDFormat(blockstore, [buffer], fileBuilders)
|
|
149
|
+
|
|
150
|
+
const mapCIDs = (async function* () {
|
|
151
|
+
for await (const { cid } of blockstore.getAll()) {
|
|
152
|
+
yield cid
|
|
153
|
+
}
|
|
154
|
+
})()
|
|
155
|
+
|
|
156
|
+
const headCID = await processBufferToIPLDFormatFromChunks(
|
|
157
|
+
blockstore,
|
|
158
|
+
mapCIDs,
|
|
159
|
+
filename,
|
|
160
|
+
BigInt(buffer.length),
|
|
161
|
+
fileBuilders,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
const node = await blockstore.get(headCID)
|
|
165
|
+
const decodedNode = decodeNode(node)
|
|
166
|
+
|
|
167
|
+
expect(decodedNode.Links.length).toBe(chunksCount)
|
|
168
|
+
const chunks = await Promise.all(
|
|
169
|
+
decodedNode.Links.map(async (e) => {
|
|
170
|
+
const chunk = await blockstore.get(e.Hash)
|
|
171
|
+
const decodedChunk = decodeIPLDNodeData(chunk)
|
|
172
|
+
expect(decodedChunk.data).toBeDefined()
|
|
173
|
+
return Buffer.from(decodedChunk.data!)
|
|
174
|
+
}),
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
const allChunks = await Promise.all(chunks)
|
|
178
|
+
const finalBuffer = Buffer.concat(allChunks)
|
|
179
|
+
expect(finalBuffer.toString()).toBe(buffer.toString())
|
|
180
|
+
})
|
|
181
|
+
})
|
package/tests/nodes.spec.ts
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import {
|
|
2
2
|
cidOfNode,
|
|
3
|
+
cidToString,
|
|
3
4
|
createChunkedFileIpldNode,
|
|
4
5
|
createFileChunkIpldNode,
|
|
5
6
|
createSingleFileIpldNode,
|
|
7
|
+
fileMetadata,
|
|
6
8
|
} from '../src/index.js'
|
|
7
|
-
import {
|
|
9
|
+
import { createMetadataNode, createNode, DEFAULT_NODE_MAX_SIZE } from '../src/ipld/index.js'
|
|
8
10
|
import { IPLDNodeData, MetadataType } from '../src/metadata/onchain/protobuf/OnchainMetadata.js'
|
|
11
|
+
import { stringifyMetadata } from '../src/utils/metadata.js'
|
|
9
12
|
|
|
10
13
|
describe('node creation', () => {
|
|
11
14
|
describe('files nodes', () => {
|
|
@@ -77,4 +80,24 @@ describe('node creation', () => {
|
|
|
77
80
|
expect(decoded.linkDepth).toBe(0)
|
|
78
81
|
})
|
|
79
82
|
})
|
|
83
|
+
|
|
84
|
+
describe('metadata nodes', () => {
|
|
85
|
+
it('metadata node | correctly params setup', () => {
|
|
86
|
+
const randomCID = cidOfNode(createNode(Buffer.from(Math.random().toString())))
|
|
87
|
+
const metadata = fileMetadata(
|
|
88
|
+
randomCID,
|
|
89
|
+
[{ cid: cidToString(randomCID), size: BigInt(1000) }],
|
|
90
|
+
BigInt(1000),
|
|
91
|
+
'test.txt',
|
|
92
|
+
)
|
|
93
|
+
const metadataSize = Buffer.from(stringifyMetadata(metadata)).length
|
|
94
|
+
|
|
95
|
+
const metadataNode = createMetadataNode(metadata)
|
|
96
|
+
|
|
97
|
+
const decoded = IPLDNodeData.decode(metadataNode.Data ?? new Uint8Array())
|
|
98
|
+
expect(decoded.type).toBe(MetadataType.Metadata)
|
|
99
|
+
expect(decoded.name).toBe('test.txt')
|
|
100
|
+
expect(decoded.size!.toString()).toBe(BigInt(metadataSize).toString())
|
|
101
|
+
})
|
|
102
|
+
})
|
|
80
103
|
})
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import { MemoryBlockstore } from 'blockstore-core'
|
|
2
|
+
import {
|
|
3
|
+
childrenMetadataFromNode,
|
|
4
|
+
cidOfNode,
|
|
5
|
+
cidToString,
|
|
6
|
+
CompressionAlgorithm,
|
|
7
|
+
createFolderIpldNode,
|
|
8
|
+
createNode,
|
|
9
|
+
createSingleFileIpldNode,
|
|
10
|
+
EncryptionAlgorithm,
|
|
11
|
+
fileMetadata,
|
|
12
|
+
folderMetadata,
|
|
13
|
+
processFolderToIPLDFormat,
|
|
14
|
+
} from '../src'
|
|
15
|
+
|
|
16
|
+
export const stringifyWithBigInt = (obj: any) => {
|
|
17
|
+
return JSON.stringify(obj, (key, value) => (typeof value === 'bigint' ? value.toString() : value))
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
describe('offchain metadata', () => {
|
|
21
|
+
describe('file metadata', () => {
|
|
22
|
+
it('matches expected structure', () => {
|
|
23
|
+
const buffer = Buffer.from('hello world')
|
|
24
|
+
const cid = cidOfNode(createSingleFileIpldNode(buffer, 'test.txt'))
|
|
25
|
+
const chunks = [{ size: BigInt(11).valueOf(), cid: cidToString(cid) }]
|
|
26
|
+
const metadata = fileMetadata(cid, chunks, BigInt(buffer.length), 'test.txt')
|
|
27
|
+
expect(metadata.name).toBe('test.txt')
|
|
28
|
+
expect(metadata.totalSize === BigInt(buffer.length)).toBe(true)
|
|
29
|
+
expect(metadata.type).toBe('file')
|
|
30
|
+
expect(metadata.dataCid).toBe(cidToString(cid))
|
|
31
|
+
expect(stringifyWithBigInt(metadata.chunks)).toEqual(stringifyWithBigInt(chunks))
|
|
32
|
+
expect(metadata.uploadOptions).toEqual({
|
|
33
|
+
compression: undefined,
|
|
34
|
+
encryption: undefined,
|
|
35
|
+
})
|
|
36
|
+
expect(metadata.totalChunks).toBe(1)
|
|
37
|
+
expect(metadata.mimeType).toBeUndefined()
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
it('matches expected structure with mime type', () => {
|
|
41
|
+
const buffer = Buffer.from('hello world')
|
|
42
|
+
const cid = cidOfNode(createSingleFileIpldNode(buffer, 'test.txt'))
|
|
43
|
+
const chunks = [{ size: BigInt(11).valueOf(), cid: cidToString(cid) }]
|
|
44
|
+
const metadata = fileMetadata(cid, chunks, BigInt(buffer.length), 'test.txt', 'text/plain')
|
|
45
|
+
expect(metadata.name).toBe('test.txt')
|
|
46
|
+
expect(metadata.totalSize === BigInt(buffer.length)).toBe(true)
|
|
47
|
+
expect(metadata.type).toBe('file')
|
|
48
|
+
expect(metadata.dataCid).toBe(cidToString(cid))
|
|
49
|
+
expect(stringifyWithBigInt(metadata.chunks)).toEqual(stringifyWithBigInt(chunks))
|
|
50
|
+
expect(metadata.uploadOptions).toEqual({
|
|
51
|
+
compression: undefined,
|
|
52
|
+
encryption: undefined,
|
|
53
|
+
})
|
|
54
|
+
expect(metadata.mimeType).toBe('text/plain')
|
|
55
|
+
})
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
describe('folder metadata', () => {
|
|
59
|
+
it('matches expected structure', async () => {
|
|
60
|
+
const CIDs = Array.from({ length: 10 }, () =>
|
|
61
|
+
cidOfNode(createNode(Buffer.from(Math.random().toString()))),
|
|
62
|
+
)
|
|
63
|
+
const name = 'test'
|
|
64
|
+
const childSize = BigInt(1000)
|
|
65
|
+
const blockstore = new MemoryBlockstore()
|
|
66
|
+
const folder = await processFolderToIPLDFormat(blockstore, CIDs, 'test', childSize)
|
|
67
|
+
const children = CIDs.map((cid) => ({
|
|
68
|
+
cid: cidToString(cid),
|
|
69
|
+
totalSize: childSize,
|
|
70
|
+
type: 'file' as const,
|
|
71
|
+
}))
|
|
72
|
+
|
|
73
|
+
const metadata = folderMetadata(cidToString(folder), children, name)
|
|
74
|
+
|
|
75
|
+
const totalSize = childSize * BigInt(CIDs.length)
|
|
76
|
+
expect(metadata.name).toBe(name)
|
|
77
|
+
expect(metadata.totalSize.toString()).toBe(totalSize.toString())
|
|
78
|
+
expect(metadata.type).toBe('folder')
|
|
79
|
+
expect(metadata.dataCid).toBe(cidToString(folder))
|
|
80
|
+
expect(stringifyWithBigInt(metadata.children)).toEqual(stringifyWithBigInt(children))
|
|
81
|
+
expect(metadata.uploadOptions).toEqual({
|
|
82
|
+
compression: undefined,
|
|
83
|
+
encryption: undefined,
|
|
84
|
+
})
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
it('matches expected structure with upload options', async () => {
|
|
88
|
+
const CIDs = Array.from({ length: 10 }, () =>
|
|
89
|
+
cidOfNode(createNode(Buffer.from(Math.random().toString()))),
|
|
90
|
+
)
|
|
91
|
+
const childSize = BigInt(1000)
|
|
92
|
+
const name = 'test'
|
|
93
|
+
const blockstore = new MemoryBlockstore()
|
|
94
|
+
const folder = await processFolderToIPLDFormat(blockstore, CIDs, name, childSize)
|
|
95
|
+
const children = CIDs.map((cid) => ({
|
|
96
|
+
cid: cidToString(cid),
|
|
97
|
+
totalSize: childSize,
|
|
98
|
+
type: 'file' as const,
|
|
99
|
+
}))
|
|
100
|
+
|
|
101
|
+
const metadata = folderMetadata(cidToString(folder), children, name, {
|
|
102
|
+
compression: {
|
|
103
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
104
|
+
level: 1,
|
|
105
|
+
},
|
|
106
|
+
encryption: {
|
|
107
|
+
algorithm: EncryptionAlgorithm.AES_256_GCM,
|
|
108
|
+
},
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
const totalSize = childSize * BigInt(CIDs.length)
|
|
112
|
+
expect(metadata.name).toBe(name)
|
|
113
|
+
expect(metadata.totalSize.toString()).toBe(totalSize.toString())
|
|
114
|
+
expect(metadata.type).toBe('folder')
|
|
115
|
+
expect(metadata.dataCid).toBe(cidToString(folder))
|
|
116
|
+
expect(stringifyWithBigInt(metadata.children)).toEqual(stringifyWithBigInt(children))
|
|
117
|
+
expect(metadata.uploadOptions).toEqual({
|
|
118
|
+
compression: {
|
|
119
|
+
algorithm: CompressionAlgorithm.ZLIB,
|
|
120
|
+
level: 1,
|
|
121
|
+
},
|
|
122
|
+
encryption: {
|
|
123
|
+
algorithm: EncryptionAlgorithm.AES_256_GCM,
|
|
124
|
+
},
|
|
125
|
+
})
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
it('file children metadata from node', () => {
|
|
129
|
+
const node = createSingleFileIpldNode(Buffer.from('hello world'), 'test.txt')
|
|
130
|
+
const metadata = childrenMetadataFromNode(node)
|
|
131
|
+
expect(metadata.cid).toBe(cidToString(cidOfNode(node)))
|
|
132
|
+
expect(metadata.totalSize.toString()).toBe('11')
|
|
133
|
+
expect(metadata.type).toBe('file')
|
|
134
|
+
expect(metadata.name).toBe('test.txt')
|
|
135
|
+
})
|
|
136
|
+
|
|
137
|
+
it('folder children metadata from node', () => {
|
|
138
|
+
const name = 'test'
|
|
139
|
+
const size = BigInt(1000)
|
|
140
|
+
const cid = cidOfNode(createFolderIpldNode([], name, 0, size))
|
|
141
|
+
const node = createFolderIpldNode([cid], name, 0, size)
|
|
142
|
+
const metadata = childrenMetadataFromNode(node)
|
|
143
|
+
expect(metadata.cid).toBe(cidToString(cidOfNode(node)))
|
|
144
|
+
expect(metadata.totalSize.toString()).toBe(size.toString())
|
|
145
|
+
expect(metadata.type).toBe('folder')
|
|
146
|
+
expect(metadata.name).toBe(name)
|
|
147
|
+
})
|
|
148
|
+
})
|
|
149
|
+
})
|