@autonomys/auto-dag-data 1.0.7 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -16
- package/dist/compression/index.d.ts.map +1 -1
- package/dist/compression/index.js +1 -1
- package/dist/encryption/index.d.ts +3 -2
- package/dist/encryption/index.d.ts.map +1 -1
- package/dist/ipld/chunker.d.ts +4 -2
- package/dist/ipld/chunker.d.ts.map +1 -1
- package/dist/ipld/chunker.js +23 -21
- package/dist/ipld/nodes.d.ts.map +1 -1
- package/dist/ipld/nodes.js +12 -11
- package/jest.config.ts +1 -0
- package/package.json +2 -2
- package/src/compression/index.ts +1 -1
- package/src/encryption/index.ts +3 -2
- package/src/ipld/chunker.ts +17 -15
- package/src/ipld/nodes.ts +12 -11
- package/tests/chunker.spec.ts +133 -4
- package/tests/compression.spec.ts +81 -0
- package/tests/encryption.spec.ts +21 -0
- package/tests/fileRetrievability.spec.ts +181 -0
- package/tests/nodes.spec.ts +25 -2
- package/tests/offchainMetadata.spec.ts +149 -0
package/README.md
CHANGED
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
|
|
9
9
|
## Overview
|
|
10
10
|
|
|
11
|
-
The **Autonomys Auto
|
|
11
|
+
The **Autonomys Auto Dag Data SDK** (`@autonomys/auto-dag-data`) provides utilities for creating and managing IPLD DAGs (InterPlanetary Linked Data Directed Acyclic Graphs) for files and folders. It facilitates chunking large files, handling metadata, and creating folder structures suitable for distributed storage systems like IPFS.
|
|
12
12
|
|
|
13
13
|
## Features
|
|
14
14
|
|
|
@@ -20,7 +20,7 @@ The **Autonomys Auto DAG Data SDK** (`@autonomys/auto-dag-data`) provides utilit
|
|
|
20
20
|
|
|
21
21
|
## Installation
|
|
22
22
|
|
|
23
|
-
You can install Auto-
|
|
23
|
+
You can install Auto-Dag-Data using npm or yarn:
|
|
24
24
|
|
|
25
25
|
```bash
|
|
26
26
|
npm install @autonomys/auto-dag-data
|
|
@@ -36,23 +36,27 @@ yarn add @autonomys/auto-dag-data
|
|
|
36
36
|
|
|
37
37
|
### Creating an IPLD DAG from a File
|
|
38
38
|
|
|
39
|
-
To create an IPLD DAG from a file, you can use the `
|
|
39
|
+
To create an IPLD DAG from a file, you can use the `processFileToIPLDFormat` function:
|
|
40
40
|
|
|
41
41
|
```typescript
|
|
42
|
-
import {
|
|
42
|
+
import { processFileToIPLDFormat } from '@autonomys/auto-dag-data'
|
|
43
|
+
import { MemoryBlockstore } from 'blockstore-core/memory'
|
|
43
44
|
import fs from 'fs'
|
|
44
45
|
|
|
45
|
-
const
|
|
46
|
+
const fileStream = fs.createReadStream('path/to/your/file.txt')
|
|
47
|
+
const fileSize = fs.statSync('path/to/your/file.txt').size
|
|
46
48
|
|
|
47
|
-
const
|
|
49
|
+
const blockstore = new MemoryBlockstore()
|
|
50
|
+
const fileCID = processFileToIPLDFormat(blockstore, fileStream, totalSize, 'file.txt')
|
|
48
51
|
```
|
|
49
52
|
|
|
50
53
|
### Creating an IPLD DAG from a Folder
|
|
51
54
|
|
|
52
|
-
To
|
|
55
|
+
To generate an IPLD DAG from a folder, you can use the `processFolderToIPLDFormat` function:
|
|
53
56
|
|
|
54
57
|
```typescript
|
|
55
|
-
import {
|
|
58
|
+
import { processFolderToIPLDFormat, decodeNode } from '@autonomys/auto-dag-data'
|
|
59
|
+
import { MemoryBlockstore } from 'blockstore-core/memory'
|
|
56
60
|
import { CID } from 'multiformats'
|
|
57
61
|
|
|
58
62
|
// Example child CIDs and folder information
|
|
@@ -60,9 +64,12 @@ const childCIDs: CID[] = [
|
|
|
60
64
|
/* array of CIDs */
|
|
61
65
|
]
|
|
62
66
|
const folderName = 'my-folder'
|
|
63
|
-
const folderSize = 1024 // size in bytes
|
|
67
|
+
const folderSize = 1024 // size in bytes (the sum of their children size)
|
|
64
68
|
|
|
65
|
-
const
|
|
69
|
+
const blockstore = new MemoryBlockstore()
|
|
70
|
+
const folderCID = processFolderToIPLDFormat(blockstore, childCIDs, folderName, folderSize)
|
|
71
|
+
|
|
72
|
+
const node = decodeNode(blockstore.get(folderCID))
|
|
66
73
|
```
|
|
67
74
|
|
|
68
75
|
### Working with CIDs
|
|
@@ -115,14 +122,16 @@ const metadataNode = createMetadataNode(metadata)
|
|
|
115
122
|
### Example: Creating a File DAG and Converting to CID
|
|
116
123
|
|
|
117
124
|
```typescript
|
|
118
|
-
import {
|
|
125
|
+
import { processFileToIPLDFormat } from '@autonomys/auto-dag-data'
|
|
126
|
+
import { MemoryBlockstore } from 'blockstore-core/memory'
|
|
119
127
|
import fs from 'fs'
|
|
120
128
|
|
|
121
|
-
const
|
|
129
|
+
const fileStream = fs.createReadStream('path/to/your/file.txt')
|
|
130
|
+
const fileSize = fs.statSync('path/to/your/file.txt').size
|
|
122
131
|
|
|
123
|
-
const
|
|
132
|
+
const blockstore = new MemoryBlockstore()
|
|
133
|
+
const cid = processFileToIPLDFormat(blockstore, fileStream, totalSize, 'file.txt')
|
|
124
134
|
|
|
125
|
-
const cid = cidOfNode(dag.headCID)
|
|
126
135
|
const cidString = cidToString(cid)
|
|
127
136
|
|
|
128
137
|
console.log(`CID of the file DAG: ${cidString}`)
|
|
@@ -137,13 +146,14 @@ import {
|
|
|
137
146
|
cidToString,
|
|
138
147
|
type OffchainMetadata,
|
|
139
148
|
} from '@autonomys/auto-dag-data'
|
|
149
|
+
import { MemoryBlockstore } from 'blockstore-core/memory'
|
|
140
150
|
import fs from 'fs'
|
|
141
151
|
|
|
142
152
|
const metadata: OffchainMetadata = fs.readFileSync('path/to/your/metadata.json')
|
|
143
153
|
|
|
144
|
-
const
|
|
154
|
+
const blockstore = new MemoryBlockstore()
|
|
155
|
+
const cid = processMetadataToIPLDFormat(blockstore, metadata)
|
|
145
156
|
|
|
146
|
-
const cid = cidOfNode(dag.headCID)
|
|
147
157
|
const cidString = cidToString(cid)
|
|
148
158
|
|
|
149
159
|
console.log(`CID of the metadata DAG: ${cidString}`)
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/compression/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAGpD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAE/C,eAAO,MAAM,sBAAsB,QAAc,CAAA;AAEjD,wBAAuB,YAAY,CACjC,IAAI,EAAE,aAAa,CAAC,MAAM,CAAC,EAC3B,EACE,KAAS,EACT,SAAkC,EAClC,SAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB;AAED,wBAAuB,cAAc,CACnC,cAAc,EAAE,aAAa,CAAC,MAAM,CAAC,EACrC,EACE,SAAkC,EAClC,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/compression/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAGpD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAE/C,eAAO,MAAM,sBAAsB,QAAc,CAAA;AAEjD,wBAAuB,YAAY,CACjC,IAAI,EAAE,aAAa,CAAC,MAAM,CAAC,EAC3B,EACE,KAAS,EACT,SAAkC,EAClC,SAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB;AAED,wBAAuB,cAAc,CACnC,cAAc,EAAE,aAAa,CAAC,MAAM,CAAC,EACrC,EACE,SAAkC,EAClC,SAAS,EACT,KAAS,GACV,EAAE,WAAW,CAAC,kBAAkB,EAAE,WAAW,CAAC,GAC9C,aAAa,CAAC,MAAM,CAAC,CA6BvB"}
|
|
@@ -64,7 +64,7 @@ export function compressFile(file_1, _a) {
|
|
|
64
64
|
});
|
|
65
65
|
}
|
|
66
66
|
export function decompressFile(compressedFile_1, _a) {
|
|
67
|
-
return __asyncGenerator(this, arguments, function* decompressFile_1(compressedFile, { chunkSize = COMPRESSION_CHUNK_SIZE, algorithm
|
|
67
|
+
return __asyncGenerator(this, arguments, function* decompressFile_1(compressedFile, { chunkSize = COMPRESSION_CHUNK_SIZE, algorithm, level = 9, }) {
|
|
68
68
|
var _b, e_2, _c, _d;
|
|
69
69
|
if (algorithm !== CompressionAlgorithm.ZLIB) {
|
|
70
70
|
throw new Error('Unsupported compression algorithm');
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
+
import { AwaitIterable } from 'interface-store';
|
|
1
2
|
import { EncryptionOptions } from '../metadata/index.js';
|
|
2
3
|
import type { PickPartial } from '../utils/types.js';
|
|
3
4
|
import { PasswordGenerationOptions } from './types.js';
|
|
4
5
|
export declare const ENCRYPTING_CHUNK_SIZE: number;
|
|
5
6
|
export declare const getKeyFromPassword: ({ password, salt }: PasswordGenerationOptions) => Promise<CryptoKey>;
|
|
6
|
-
export declare const encryptFile: (file:
|
|
7
|
-
export declare const decryptFile: (file:
|
|
7
|
+
export declare const encryptFile: (file: AwaitIterable<Buffer>, password: string, { chunkSize, algorithm }: PickPartial<EncryptionOptions, "algorithm">) => AsyncIterable<Buffer>;
|
|
8
|
+
export declare const decryptFile: (file: AwaitIterable<Buffer>, password: string, { chunkSize, algorithm }: PickPartial<EncryptionOptions, "algorithm">) => AsyncIterable<Buffer>;
|
|
8
9
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/encryption/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAuB,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAE7E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,yBAAyB,EAAE,MAAM,YAAY,CAAA;AAItD,eAAO,MAAM,qBAAqB,QAAc,CAAA;AAMhD,eAAO,MAAM,kBAAkB,uBAA8B,yBAAyB,uBAyBrF,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACkC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC5F,aAAa,CAAC,MAAM,CAetB,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACiC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC3F,aAAa,CAAC,MAAM,CA+BtB,CAAA"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/encryption/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC/C,OAAO,EAAuB,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAE7E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAA;AACpD,OAAO,EAAE,yBAAyB,EAAE,MAAM,YAAY,CAAA;AAItD,eAAO,MAAM,qBAAqB,QAAc,CAAA;AAMhD,eAAO,MAAM,kBAAkB,uBAA8B,yBAAyB,uBAyBrF,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACkC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC5F,aAAa,CAAC,MAAM,CAetB,CAAA;AAED,eAAO,MAAM,WAAW,SAChB,aAAa,CAAC,MAAM,CAAC,YACjB,MAAM,4BACiC,WAAW,CAAC,iBAAiB,EAAE,WAAW,CAAC,KAC3F,aAAa,CAAC,MAAM,CA+BtB,CAAA"}
|
package/dist/ipld/chunker.d.ts
CHANGED
|
@@ -9,6 +9,8 @@ type ChunkerLimits = {
|
|
|
9
9
|
maxLinkPerNode: number;
|
|
10
10
|
};
|
|
11
11
|
type ChunkerOptions = ChunkerLimits & FileUploadOptions;
|
|
12
|
+
export declare const DEFAULT_NODE_MAX_SIZE = 65535;
|
|
13
|
+
export declare const MAX_NAME_SIZE = 255;
|
|
12
14
|
export declare const NODE_METADATA_SIZE: number;
|
|
13
15
|
export declare const DEFAULT_MAX_CHUNK_SIZE: number;
|
|
14
16
|
export declare const LINK_SIZE_IN_BYTES = 40;
|
|
@@ -24,8 +26,8 @@ export declare const processFolderToIPLDFormat: (blockstore: BaseBlockstore, chi
|
|
|
24
26
|
* Process chunks to IPLD format, return the last chunk if it's not full
|
|
25
27
|
* @returns the last chunk if it's not full, otherwise an empty buffer
|
|
26
28
|
*/
|
|
27
|
-
export declare const processChunksToIPLDFormat: (blockstore: BaseBlockstore, chunks: AwaitIterable<Buffer>, builders: Builders, {
|
|
28
|
-
|
|
29
|
+
export declare const processChunksToIPLDFormat: (blockstore: BaseBlockstore, chunks: AwaitIterable<Buffer>, builders: Builders, { maxChunkSize }?: {
|
|
30
|
+
maxChunkSize?: number;
|
|
29
31
|
}) => Promise<Buffer>;
|
|
30
32
|
export declare const ensureNodeMaxSize: (node: PBNode, maxSize?: number) => PBNode;
|
|
31
33
|
export {};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;
|
|
1
|
+
{"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;AAEvD,eAAO,MAAM,qBAAqB,QAAQ,CAAA;AAS1C,eAAO,MAAM,aAAa,MAAM,CAAA;AAQhC,eAAO,MAAM,kBAAkB,QAML,CAAA;AAE1B,eAAO,MAAM,sBAAsB,QAA6C,CAAA;AAEhF,eAAO,MAAM,kBAAkB,KAAK,CAAA;AACpC,eAAO,MAAM,yBAAyB,QAA0D,CAAA;AAEhG,eAAO,MAAM,uBAAuB,eACtB,cAAc,QACpB,aAAa,CAAC,MAAM,CAAC,aAChB,MAAM,aACN,MAAM,8DAMd,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAWb,CAAA;AAED,eAAO,MAAM,2BAA2B,eAC1B,cAAc,YAChB,gBAAgB,WAClB;IAAE,WAAW,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAItD,OAAO,CAAC,GAAG,CAiBb,CAAA;AA0CD,eAAO,MAAM,mCAAmC,eAClC,cAAc,UAClB,aAAa,CAAC,GAAG,CAAC,YAChB,MAAM,GAAG,SAAS,aACjB,MAAM,YACP,QAAQ,2EAMf,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAgDb,CAAA;AAED,eAAO,MAAM,yBAAyB,eACxB,cAAc,YAChB,GAAG,EAAE,QACT,MAAM,QACN,MAAM,2EAMT,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CA4Bb,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,yBAAyB,eACxB,cAAc,UAClB,aAAa,CAAC,MAAM,CAAC,YACnB,QAAQ,qBACyB;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,KAGnE,OAAO,CAAC,MAAM,CAiBhB,CAAA;AAED,eAAO,MAAM,iBAAiB,SACtB,MAAM,YACH,MAAM,KACd,MAOF,CAAA"}
|
package/dist/ipld/chunker.js
CHANGED
|
@@ -33,7 +33,7 @@ import { stringifyMetadata } from '../utils/metadata.js';
|
|
|
33
33
|
import { fileBuilders, metadataBuilders } from './builders.js';
|
|
34
34
|
import { createFolderInlinkIpldNode, createFolderIpldNode } from './nodes.js';
|
|
35
35
|
import { chunkBuffer, encodeNode } from './utils.js';
|
|
36
|
-
const DEFAULT_NODE_MAX_SIZE = 65535;
|
|
36
|
+
export const DEFAULT_NODE_MAX_SIZE = 65535;
|
|
37
37
|
// u8 -> 1 byte (may grow in the future but unlikely further than 255)
|
|
38
38
|
const NODE_TYPE_SIZE = 1;
|
|
39
39
|
// u32 -> 4 bytes
|
|
@@ -41,7 +41,7 @@ const NODE_LINK_DEPTH_SIZE = 4;
|
|
|
41
41
|
// u64 -> 8 bytes
|
|
42
42
|
const NODE_SIZE_SIZE = 8;
|
|
43
43
|
// Limit at 255 string length (Mac Limit)
|
|
44
|
-
const MAX_NAME_SIZE = 255;
|
|
44
|
+
export const MAX_NAME_SIZE = 255;
|
|
45
45
|
const END_OF_STRING_BYTE = 1;
|
|
46
46
|
const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE;
|
|
47
47
|
// Upload options may be amplified in the future
|
|
@@ -57,8 +57,8 @@ export const NODE_METADATA_SIZE = NODE_TYPE_SIZE +
|
|
|
57
57
|
export const DEFAULT_MAX_CHUNK_SIZE = DEFAULT_NODE_MAX_SIZE - NODE_METADATA_SIZE;
|
|
58
58
|
export const LINK_SIZE_IN_BYTES = 40;
|
|
59
59
|
export const DEFAULT_MAX_LINK_PER_NODE = Math.floor(DEFAULT_MAX_CHUNK_SIZE / LINK_SIZE_IN_BYTES);
|
|
60
|
-
export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, { maxNodeSize =
|
|
61
|
-
maxNodeSize:
|
|
60
|
+
export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, { maxNodeSize = DEFAULT_NODE_MAX_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
|
|
61
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
62
62
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
63
63
|
encryption: undefined,
|
|
64
64
|
compression: undefined,
|
|
@@ -74,7 +74,7 @@ export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, {
|
|
|
74
74
|
});
|
|
75
75
|
};
|
|
76
76
|
export const processMetadataToIPLDFormat = (blockstore_1, metadata_1, ...args_1) => __awaiter(void 0, [blockstore_1, metadata_1, ...args_1], void 0, function* (blockstore, metadata, limits = {
|
|
77
|
-
maxNodeSize:
|
|
77
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
78
78
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
79
79
|
}) {
|
|
80
80
|
if (metadata.name && metadata.name.length > MAX_NAME_SIZE) {
|
|
@@ -87,8 +87,8 @@ export const processMetadataToIPLDFormat = (blockstore_1, metadata_1, ...args_1)
|
|
|
87
87
|
});
|
|
88
88
|
})(), metadata.name, BigInt(buffer.byteLength), metadataBuilders, limits);
|
|
89
89
|
});
|
|
90
|
-
const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, buffer, filename, totalSize, builders, { maxNodeSize: maxNodeSize =
|
|
91
|
-
maxNodeSize:
|
|
90
|
+
const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, buffer, filename, totalSize, builders, { maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
|
|
91
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
92
92
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
93
93
|
encryption: undefined,
|
|
94
94
|
compression: undefined,
|
|
@@ -124,8 +124,8 @@ const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize
|
|
|
124
124
|
compression,
|
|
125
125
|
});
|
|
126
126
|
});
|
|
127
|
-
export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, filename, totalSize, builders, { maxNodeSize: maxNodeSize =
|
|
128
|
-
maxNodeSize:
|
|
127
|
+
export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, filename, totalSize, builders, { maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
|
|
128
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
129
129
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
130
130
|
encryption: undefined,
|
|
131
131
|
compression: undefined,
|
|
@@ -186,9 +186,9 @@ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, file
|
|
|
186
186
|
yield blockstore.put(headCID, encodeNode(head));
|
|
187
187
|
return headCID;
|
|
188
188
|
});
|
|
189
|
-
export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size_1, ...args_1) => __awaiter(void 0, [blockstore_1, children_1, name_1, size_1, ...args_1], void 0, function* (blockstore, children, name, size, { maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, maxNodeSize: maxNodeSize =
|
|
189
|
+
export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size_1, ...args_1) => __awaiter(void 0, [blockstore_1, children_1, name_1, size_1, ...args_1], void 0, function* (blockstore, children, name, size, { maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE, compression = undefined, encryption = undefined, } = {
|
|
190
190
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
191
|
-
maxNodeSize:
|
|
191
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
192
192
|
compression: undefined,
|
|
193
193
|
encryption: undefined,
|
|
194
194
|
}) {
|
|
@@ -221,18 +221,20 @@ export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size
|
|
|
221
221
|
* Process chunks to IPLD format, return the last chunk if it's not full
|
|
222
222
|
* @returns the last chunk if it's not full, otherwise an empty buffer
|
|
223
223
|
*/
|
|
224
|
-
export const processChunksToIPLDFormat = (blockstore_1, chunks_1, builders_1,
|
|
225
|
-
|
|
224
|
+
export const processChunksToIPLDFormat = (blockstore_1, chunks_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE } = {
|
|
225
|
+
maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
|
|
226
|
+
}) {
|
|
227
|
+
var _a, e_3, _b, _c;
|
|
226
228
|
const bufferChunks = chunkBuffer(chunks, {
|
|
227
|
-
maxChunkSize
|
|
229
|
+
maxChunkSize,
|
|
228
230
|
ignoreLastChunk: false,
|
|
229
231
|
});
|
|
230
232
|
try {
|
|
231
|
-
for (var
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
const chunk =
|
|
235
|
-
if (chunk.byteLength <
|
|
233
|
+
for (var _d = true, bufferChunks_2 = __asyncValues(bufferChunks), bufferChunks_2_1; bufferChunks_2_1 = yield bufferChunks_2.next(), _a = bufferChunks_2_1.done, !_a; _d = true) {
|
|
234
|
+
_c = bufferChunks_2_1.value;
|
|
235
|
+
_d = false;
|
|
236
|
+
const chunk = _c;
|
|
237
|
+
if (chunk.byteLength < maxChunkSize) {
|
|
236
238
|
return chunk;
|
|
237
239
|
}
|
|
238
240
|
const node = builders.chunk(chunk);
|
|
@@ -243,13 +245,13 @@ export const processChunksToIPLDFormat = (blockstore_1, chunks_1, builders_1, _a
|
|
|
243
245
|
catch (e_3_1) { e_3 = { error: e_3_1 }; }
|
|
244
246
|
finally {
|
|
245
247
|
try {
|
|
246
|
-
if (!
|
|
248
|
+
if (!_d && !_a && (_b = bufferChunks_2.return)) yield _b.call(bufferChunks_2);
|
|
247
249
|
}
|
|
248
250
|
finally { if (e_3) throw e_3.error; }
|
|
249
251
|
}
|
|
250
252
|
return Buffer.alloc(0);
|
|
251
253
|
});
|
|
252
|
-
export const ensureNodeMaxSize = (node, maxSize =
|
|
254
|
+
export const ensureNodeMaxSize = (node, maxSize = DEFAULT_NODE_MAX_SIZE) => {
|
|
253
255
|
const nodeSize = encodeNode(node).byteLength;
|
|
254
256
|
if (nodeSize > maxSize) {
|
|
255
257
|
throw new Error(`Node is too large to fit in a single chunk: ${nodeSize} > ${maxSize}`);
|
package/dist/ipld/nodes.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAI1E,OAAO,EAAc,MAAM,EAAE,MAAM,YAAY,CAAA;AAG/C,eAAO,MAAM,uBAAuB,SAC5B,MAAM,gBACC,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,gBACpB,MAAM,KAClB,MAcA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAChC,MAAM,gBACC,MAAM,KAClB,MAWA,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,
|
|
1
|
+
{"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAI1E,OAAO,EAAc,MAAM,EAAE,MAAM,YAAY,CAAA;AAG/C,eAAO,MAAM,uBAAuB,SAC5B,MAAM,gBACC,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,gBACpB,MAAM,KAClB,MAcA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAChC,MAAM,gBACC,MAAM,KAClB,MAWA,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,MAeF,CAAA"}
|
package/dist/ipld/nodes.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { encodeIPLDNodeData, MetadataType } from '../metadata/onchain/index.js';
|
|
2
2
|
import { stringifyMetadata } from '../utils/metadata.js';
|
|
3
|
-
import {
|
|
3
|
+
import { DEFAULT_NODE_MAX_SIZE, ensureNodeMaxSize } from './chunker.js';
|
|
4
4
|
import { createNode } from './index.js';
|
|
5
5
|
/// Creates a file chunk ipld node
|
|
6
|
-
export const createFileChunkIpldNode = (data, maxNodeSize =
|
|
6
|
+
export const createFileChunkIpldNode = (data, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
7
7
|
type: MetadataType.FileChunk,
|
|
8
8
|
size: BigInt(data.length).valueOf(),
|
|
9
9
|
linkDepth: 0,
|
|
@@ -12,7 +12,7 @@ export const createFileChunkIpldNode = (data, maxNodeSize = DEFAULT_MAX_CHUNK_SI
|
|
|
12
12
|
// Creates a file ipld node
|
|
13
13
|
// links: the CIDs of the file's contents
|
|
14
14
|
// @todo: add the file's metadata
|
|
15
|
-
export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeSize =
|
|
15
|
+
export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_NODE_MAX_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
16
16
|
type: MetadataType.File,
|
|
17
17
|
name,
|
|
18
18
|
size,
|
|
@@ -21,7 +21,7 @@ export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeS
|
|
|
21
21
|
}), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
|
|
22
22
|
// Creates a file ipld node
|
|
23
23
|
// links: the CIDs of the file's contents
|
|
24
|
-
export const createFileInlinkIpldNode = (links, size, linkDepth, maxNodeSize =
|
|
24
|
+
export const createFileInlinkIpldNode = (links, size, linkDepth, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
25
25
|
type: MetadataType.FileInlink,
|
|
26
26
|
size: BigInt(size).valueOf(),
|
|
27
27
|
linkDepth,
|
|
@@ -29,7 +29,7 @@ export const createFileInlinkIpldNode = (links, size, linkDepth, maxNodeSize = D
|
|
|
29
29
|
// Creates a file ipld node
|
|
30
30
|
// links: the CIDs of the file's contents
|
|
31
31
|
// @todo: add the file's metadata
|
|
32
|
-
export const createSingleFileIpldNode = (data, name, uploadOptions, maxNodeSize =
|
|
32
|
+
export const createSingleFileIpldNode = (data, name, uploadOptions, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
33
33
|
type: MetadataType.File,
|
|
34
34
|
name,
|
|
35
35
|
size: BigInt(data.length).valueOf(),
|
|
@@ -40,7 +40,7 @@ export const createSingleFileIpldNode = (data, name, uploadOptions, maxNodeSize
|
|
|
40
40
|
// Creates a file ipld node
|
|
41
41
|
// links: the CIDs of the file's contents
|
|
42
42
|
// @todo: add the file's metadata
|
|
43
|
-
export const createMetadataInlinkIpldNode = (links, size, linkDepth, maxNodeSize =
|
|
43
|
+
export const createMetadataInlinkIpldNode = (links, size, linkDepth, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
44
44
|
type: MetadataType.FileInlink,
|
|
45
45
|
size: BigInt(size).valueOf(),
|
|
46
46
|
linkDepth,
|
|
@@ -55,13 +55,13 @@ export const createSingleMetadataIpldNode = (data, name) => createNode(encodeIPL
|
|
|
55
55
|
linkDepth: 0,
|
|
56
56
|
data,
|
|
57
57
|
}), []);
|
|
58
|
-
export const createMetadataChunkIpldNode = (data, maxNodeSize =
|
|
58
|
+
export const createMetadataChunkIpldNode = (data, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
59
59
|
type: MetadataType.MetadataChunk,
|
|
60
60
|
size: BigInt(data.length).valueOf(),
|
|
61
61
|
linkDepth: 0,
|
|
62
62
|
data,
|
|
63
63
|
})), maxNodeSize);
|
|
64
|
-
export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxNodeSize =
|
|
64
|
+
export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
65
65
|
type: MetadataType.Metadata,
|
|
66
66
|
name,
|
|
67
67
|
size,
|
|
@@ -70,24 +70,25 @@ export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxN
|
|
|
70
70
|
// Creates a folder ipld node
|
|
71
71
|
// links: the CIDs of the folder's contents
|
|
72
72
|
// @todo: add the folder's metadata
|
|
73
|
-
export const createFolderIpldNode = (links, name, linkDepth, size, maxNodeSize =
|
|
73
|
+
export const createFolderIpldNode = (links, name, linkDepth, size, maxNodeSize = DEFAULT_NODE_MAX_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
74
74
|
type: MetadataType.Folder,
|
|
75
75
|
name,
|
|
76
76
|
size,
|
|
77
77
|
linkDepth,
|
|
78
78
|
uploadOptions,
|
|
79
79
|
}), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
|
|
80
|
-
export const createFolderInlinkIpldNode = (links, linkDepth, maxNodeSize =
|
|
80
|
+
export const createFolderInlinkIpldNode = (links, linkDepth, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
81
81
|
type: MetadataType.FolderInlink,
|
|
82
82
|
linkDepth,
|
|
83
83
|
}), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
|
|
84
84
|
/// Creates a metadata ipld node
|
|
85
|
-
export const createMetadataNode = (metadata, maxNodeSize =
|
|
85
|
+
export const createMetadataNode = (metadata, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => {
|
|
86
86
|
const data = Buffer.from(stringifyMetadata(metadata));
|
|
87
87
|
return ensureNodeMaxSize(createNode(encodeIPLDNodeData({
|
|
88
88
|
type: MetadataType.Metadata,
|
|
89
89
|
name: metadata.name,
|
|
90
90
|
linkDepth: 0,
|
|
91
91
|
data,
|
|
92
|
+
size: BigInt(data.length).valueOf(),
|
|
92
93
|
})), maxNodeSize);
|
|
93
94
|
};
|
package/jest.config.ts
CHANGED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@autonomys/auto-dag-data",
|
|
3
3
|
"packageManager": "yarn@4.1.1",
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.9",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"repository": {
|
|
@@ -48,5 +48,5 @@
|
|
|
48
48
|
"protons": "^7.6.0",
|
|
49
49
|
"protons-runtime": "^5.5.0"
|
|
50
50
|
},
|
|
51
|
-
"gitHead": "
|
|
51
|
+
"gitHead": "c8da43fe9c2ba885d1ff5e0241d9a41e2c4f3389"
|
|
52
52
|
}
|
package/src/compression/index.ts
CHANGED
|
@@ -49,7 +49,7 @@ export async function* decompressFile(
|
|
|
49
49
|
compressedFile: AwaitIterable<Buffer>,
|
|
50
50
|
{
|
|
51
51
|
chunkSize = COMPRESSION_CHUNK_SIZE,
|
|
52
|
-
algorithm
|
|
52
|
+
algorithm,
|
|
53
53
|
level = 9,
|
|
54
54
|
}: PickPartial<CompressionOptions, 'algorithm'>,
|
|
55
55
|
): AsyncIterable<Buffer> {
|
package/src/encryption/index.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Crypto } from '@peculiar/webcrypto'
|
|
2
2
|
import { randomBytes } from 'crypto'
|
|
3
|
+
import { AwaitIterable } from 'interface-store'
|
|
3
4
|
import { EncryptionAlgorithm, EncryptionOptions } from '../metadata/index.js'
|
|
4
5
|
import { asyncByChunk } from '../utils/async.js'
|
|
5
6
|
import type { PickPartial } from '../utils/types.js'
|
|
@@ -41,7 +42,7 @@ export const getKeyFromPassword = async ({ password, salt }: PasswordGenerationO
|
|
|
41
42
|
}
|
|
42
43
|
|
|
43
44
|
export const encryptFile = async function* (
|
|
44
|
-
file:
|
|
45
|
+
file: AwaitIterable<Buffer>,
|
|
45
46
|
password: string,
|
|
46
47
|
{ chunkSize = ENCRYPTING_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
|
|
47
48
|
): AsyncIterable<Buffer> {
|
|
@@ -62,7 +63,7 @@ export const encryptFile = async function* (
|
|
|
62
63
|
}
|
|
63
64
|
|
|
64
65
|
export const decryptFile = async function* (
|
|
65
|
-
file:
|
|
66
|
+
file: AwaitIterable<Buffer>,
|
|
66
67
|
password: string,
|
|
67
68
|
{ chunkSize = ENCRYPTED_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
|
|
68
69
|
): AsyncIterable<Buffer> {
|
package/src/ipld/chunker.ts
CHANGED
|
@@ -15,7 +15,7 @@ type ChunkerLimits = {
|
|
|
15
15
|
|
|
16
16
|
type ChunkerOptions = ChunkerLimits & FileUploadOptions
|
|
17
17
|
|
|
18
|
-
const DEFAULT_NODE_MAX_SIZE = 65535
|
|
18
|
+
export const DEFAULT_NODE_MAX_SIZE = 65535
|
|
19
19
|
|
|
20
20
|
// u8 -> 1 byte (may grow in the future but unlikely further than 255)
|
|
21
21
|
const NODE_TYPE_SIZE = 1
|
|
@@ -24,7 +24,7 @@ const NODE_LINK_DEPTH_SIZE = 4
|
|
|
24
24
|
// u64 -> 8 bytes
|
|
25
25
|
const NODE_SIZE_SIZE = 8
|
|
26
26
|
// Limit at 255 string length (Mac Limit)
|
|
27
|
-
const MAX_NAME_SIZE = 255
|
|
27
|
+
export const MAX_NAME_SIZE = 255
|
|
28
28
|
const END_OF_STRING_BYTE = 1
|
|
29
29
|
const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE
|
|
30
30
|
// Upload options may be amplified in the future
|
|
@@ -51,12 +51,12 @@ export const processFileToIPLDFormat = (
|
|
|
51
51
|
totalSize: bigint,
|
|
52
52
|
filename?: string,
|
|
53
53
|
{
|
|
54
|
-
maxNodeSize =
|
|
54
|
+
maxNodeSize = DEFAULT_NODE_MAX_SIZE,
|
|
55
55
|
maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
|
|
56
56
|
encryption = undefined,
|
|
57
57
|
compression = undefined,
|
|
58
58
|
}: Partial<ChunkerOptions> = {
|
|
59
|
-
maxNodeSize:
|
|
59
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
60
60
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
61
61
|
encryption: undefined,
|
|
62
62
|
compression: undefined,
|
|
@@ -78,7 +78,7 @@ export const processMetadataToIPLDFormat = async (
|
|
|
78
78
|
blockstore: BaseBlockstore,
|
|
79
79
|
metadata: OffchainMetadata,
|
|
80
80
|
limits: { maxNodeSize: number; maxLinkPerNode: number } = {
|
|
81
|
-
maxNodeSize:
|
|
81
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
82
82
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
83
83
|
},
|
|
84
84
|
): Promise<CID> => {
|
|
@@ -107,12 +107,12 @@ const processBufferToIPLDFormat = async (
|
|
|
107
107
|
totalSize: bigint,
|
|
108
108
|
builders: Builders,
|
|
109
109
|
{
|
|
110
|
-
maxNodeSize: maxNodeSize =
|
|
110
|
+
maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE,
|
|
111
111
|
maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
|
|
112
112
|
encryption = undefined,
|
|
113
113
|
compression = undefined,
|
|
114
114
|
}: ChunkerOptions = {
|
|
115
|
-
maxNodeSize:
|
|
115
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
116
116
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
117
117
|
encryption: undefined,
|
|
118
118
|
compression: undefined,
|
|
@@ -147,12 +147,12 @@ export const processBufferToIPLDFormatFromChunks = async (
|
|
|
147
147
|
totalSize: bigint,
|
|
148
148
|
builders: Builders,
|
|
149
149
|
{
|
|
150
|
-
maxNodeSize: maxNodeSize =
|
|
150
|
+
maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE,
|
|
151
151
|
maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
|
|
152
152
|
encryption = undefined,
|
|
153
153
|
compression = undefined,
|
|
154
154
|
}: Partial<ChunkerOptions> = {
|
|
155
|
-
maxNodeSize:
|
|
155
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
156
156
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
157
157
|
encryption: undefined,
|
|
158
158
|
compression: undefined,
|
|
@@ -214,12 +214,12 @@ export const processFolderToIPLDFormat = async (
|
|
|
214
214
|
size: bigint,
|
|
215
215
|
{
|
|
216
216
|
maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
|
|
217
|
-
maxNodeSize: maxNodeSize =
|
|
217
|
+
maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE,
|
|
218
218
|
compression = undefined,
|
|
219
219
|
encryption = undefined,
|
|
220
220
|
}: Partial<ChunkerOptions> = {
|
|
221
221
|
maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
|
|
222
|
-
maxNodeSize:
|
|
222
|
+
maxNodeSize: DEFAULT_NODE_MAX_SIZE,
|
|
223
223
|
compression: undefined,
|
|
224
224
|
encryption: undefined,
|
|
225
225
|
},
|
|
@@ -261,15 +261,17 @@ export const processChunksToIPLDFormat = async (
|
|
|
261
261
|
blockstore: BaseBlockstore,
|
|
262
262
|
chunks: AwaitIterable<Buffer>,
|
|
263
263
|
builders: Builders,
|
|
264
|
-
{
|
|
264
|
+
{ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE }: { maxChunkSize?: number } = {
|
|
265
|
+
maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
|
|
266
|
+
},
|
|
265
267
|
): Promise<Buffer> => {
|
|
266
268
|
const bufferChunks = chunkBuffer(chunks, {
|
|
267
|
-
maxChunkSize
|
|
269
|
+
maxChunkSize,
|
|
268
270
|
ignoreLastChunk: false,
|
|
269
271
|
})
|
|
270
272
|
|
|
271
273
|
for await (const chunk of bufferChunks) {
|
|
272
|
-
if (chunk.byteLength <
|
|
274
|
+
if (chunk.byteLength < maxChunkSize) {
|
|
273
275
|
return chunk
|
|
274
276
|
}
|
|
275
277
|
|
|
@@ -283,7 +285,7 @@ export const processChunksToIPLDFormat = async (
|
|
|
283
285
|
|
|
284
286
|
export const ensureNodeMaxSize = (
|
|
285
287
|
node: PBNode,
|
|
286
|
-
maxSize: number =
|
|
288
|
+
maxSize: number = DEFAULT_NODE_MAX_SIZE,
|
|
287
289
|
): PBNode => {
|
|
288
290
|
const nodeSize = encodeNode(node).byteLength
|
|
289
291
|
if (nodeSize > maxSize) {
|