@autonomys/auto-dag-data 1.0.6 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Auto DAG Data
1
+ # Autonomys Auto DAG Data SDK
2
2
 
3
3
  ![Autonomys Banner](https://github.com/autonomys/auto-sdk/blob/main/.github/images/autonomys-banner.webp)
4
4
 
@@ -8,7 +8,7 @@
8
8
 
9
9
  ## Overview
10
10
 
11
- The **Autonomys Auto DAG Data SDK** (`@autonomys/auto-dag-data`) provides utilities for creating and managing IPLD DAGs (InterPlanetary Linked Data Directed Acyclic Graphs) for files and folders. It facilitates chunking large files, handling metadata, and creating folder structures suitable for distributed storage systems like IPFS.
11
+ The **Autonomys Auto Dag Data SDK** (`@autonomys/auto-dag-data`) provides utilities for creating and managing IPLD DAGs (InterPlanetary Linked Data Directed Acyclic Graphs) for files and folders. It facilitates chunking large files, handling metadata, and creating folder structures suitable for distributed storage systems like IPFS.
12
12
 
13
13
  ## Features
14
14
 
@@ -20,7 +20,7 @@ The **Autonomys Auto DAG Data SDK** (`@autonomys/auto-dag-data`) provides utilit
20
20
 
21
21
  ## Installation
22
22
 
23
- You can install Auto-DAG-Data using npm or yarn:
23
+ You can install Auto-Dag-Data using npm or yarn:
24
24
 
25
25
  ```bash
26
26
  npm install @autonomys/auto-dag-data
@@ -36,23 +36,27 @@ yarn add @autonomys/auto-dag-data
36
36
 
37
37
  ### Creating an IPLD DAG from a File
38
38
 
39
- To create an IPLD DAG from a file, you can use the `createFileIPLDDag` function:
39
+ To create an IPLD DAG from a file, you can use the `processFileToIPLDFormat` function:
40
40
 
41
41
  ```typescript
42
- import { createFileIPLDDag } from '@autonomys/auto-dag-data'
42
+ import { processFileToIPLDFormat } from '@autonomys/auto-dag-data'
43
+ import { MemoryBlockstore } from 'blockstore-core/memory'
43
44
  import fs from 'fs'
44
45
 
45
- const fileBuffer = fs.readFileSync('path/to/your/file.txt')
46
+ const fileStream = fs.createReadStream('path/to/your/file.txt')
47
+ const fileSize = fs.statSync('path/to/your/file.txt').size
46
48
 
47
- const dag = createFileIPLDDag(fileBuffer, 'file.txt')
49
+ const blockstore = new MemoryBlockstore()
50
+ const fileCID = processFileToIPLDFormat(blockstore, fileStream, totalSize, 'file.txt')
48
51
  ```
49
52
 
50
53
  ### Creating an IPLD DAG from a Folder
51
54
 
52
- To create an IPLD DAG from a folder, you can use the `createFolderIPLDDag` function:
55
+ To generate an IPLD DAG from a folder, you can use the `processFolderToIPLDFormat` function:
53
56
 
54
57
  ```typescript
55
- import { createFolderIPLDDag } from '@autonomys/auto-dag-data'
58
+ import { processFolderToIPLDFormat, decodeNode } from '@autonomys/auto-dag-data'
59
+ import { MemoryBlockstore } from 'blockstore-core/memory'
56
60
  import { CID } from 'multiformats'
57
61
 
58
62
  // Example child CIDs and folder information
@@ -60,9 +64,12 @@ const childCIDs: CID[] = [
60
64
  /* array of CIDs */
61
65
  ]
62
66
  const folderName = 'my-folder'
63
- const folderSize = 1024 // size in bytes
67
+ const folderSize = 1024 // size in bytes (the sum of their children size)
64
68
 
65
- const folderDag = createFolderIPLDDag(childCIDs, folderName, folderSize)
69
+ const blockstore = new MemoryBlockstore()
70
+ const folderCID = processFolderToIPLDFormat(blockstore, childCIDs, folderName, folderSize)
71
+
72
+ const node = decodeNode(blockstore.get(folderCID))
66
73
  ```
67
74
 
68
75
  ### Working with CIDs
@@ -115,14 +122,16 @@ const metadataNode = createMetadataNode(metadata)
115
122
  ### Example: Creating a File DAG and Converting to CID
116
123
 
117
124
  ```typescript
118
- import { createFileIPLDDag, cidOfNode, cidToString } from '@autonomys/auto-dag-data'
125
+ import { processFileToIPLDFormat } from '@autonomys/auto-dag-data'
126
+ import { MemoryBlockstore } from 'blockstore-core/memory'
119
127
  import fs from 'fs'
120
128
 
121
- const fileBuffer = fs.readFileSync('path/to/your/file.txt')
129
+ const fileStream = fs.createReadStream('path/to/your/file.txt')
130
+ const fileSize = fs.statSync('path/to/your/file.txt').size
122
131
 
123
- const dag = createFileIPLDDag(fileBuffer, 'file.txt')
132
+ const blockstore = new MemoryBlockstore()
133
+ const cid = processFileToIPLDFormat(blockstore, fileStream, totalSize, 'file.txt')
124
134
 
125
- const cid = cidOfNode(dag.headCID)
126
135
  const cidString = cidToString(cid)
127
136
 
128
137
  console.log(`CID of the file DAG: ${cidString}`)
@@ -137,13 +146,14 @@ import {
137
146
  cidToString,
138
147
  type OffchainMetadata,
139
148
  } from '@autonomys/auto-dag-data'
149
+ import { MemoryBlockstore } from 'blockstore-core/memory'
140
150
  import fs from 'fs'
141
151
 
142
152
  const metadata: OffchainMetadata = fs.readFileSync('path/to/your/metadata.json')
143
153
 
144
- const dag = createMetadataIPLDDag(metadata)
154
+ const blockstore = new MemoryBlockstore()
155
+ const cid = processMetadataToIPLDFormat(blockstore, metadata)
145
156
 
146
- const cid = cidOfNode(dag.headCID)
147
157
  const cidString = cidToString(cid)
148
158
 
149
159
  console.log(`CID of the metadata DAG: ${cidString}`)
@@ -3,8 +3,8 @@ import { FileUploadOptions } from '../metadata/index.js';
3
3
  import { PBNode } from './index.js';
4
4
  export interface Builders {
5
5
  inlink: (links: CID[], size: number, linkDepth: number, chunkSize: number) => PBNode;
6
- chunk: (data: Buffer) => PBNode;
7
- root: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number, fileUploadOptions?: FileUploadOptions) => PBNode;
6
+ chunk: (data: Buffer, maxNodeSize?: number) => PBNode;
7
+ root: (links: CID[], size: bigint, linkDepth: number, name?: string, maxNodeSize?: number, fileUploadOptions?: FileUploadOptions) => PBNode;
8
8
  single: (data: Buffer, filename?: string, fileUploadOptions?: FileUploadOptions) => PBNode;
9
9
  }
10
10
  export declare const metadataBuilders: Builders;
@@ -1 +1 @@
1
- {"version":3,"file":"builders.d.ts","sourceRoot":"","sources":["../../src/ipld/builders.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AACxD,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAYnC,MAAM,WAAW,QAAQ;IACvB,MAAM,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,MAAM,CAAA;IACpF,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,MAAM,CAAA;IAC/B,IAAI,EAAE,CACJ,KAAK,EAAE,GAAG,EAAE,EACZ,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,MAAM,EACjB,IAAI,CAAC,EAAE,MAAM,EACb,WAAW,CAAC,EAAE,MAAM,EACpB,iBAAiB,CAAC,EAAE,iBAAiB,KAClC,MAAM,CAAA;IACX,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,KAAK,MAAM,CAAA;CAC3F;AACD,eAAO,MAAM,gBAAgB,EAAE,QAK9B,CAAA;AAED,eAAO,MAAM,YAAY,EAAE,QAK1B,CAAA"}
1
+ {"version":3,"file":"builders.d.ts","sourceRoot":"","sources":["../../src/ipld/builders.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AACxD,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAYnC,MAAM,WAAW,QAAQ;IACvB,MAAM,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,MAAM,CAAA;IACpF,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,MAAM,KAAK,MAAM,CAAA;IACrD,IAAI,EAAE,CACJ,KAAK,EAAE,GAAG,EAAE,EACZ,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,MAAM,EACjB,IAAI,CAAC,EAAE,MAAM,EACb,WAAW,CAAC,EAAE,MAAM,EACpB,iBAAiB,CAAC,EAAE,iBAAiB,KAClC,MAAM,CAAA;IACX,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,KAAK,MAAM,CAAA;CAC3F;AACD,eAAO,MAAM,gBAAgB,EAAE,QAK9B,CAAA;AAED,eAAO,MAAM,YAAY,EAAE,QAK1B,CAAA"}
@@ -5,24 +5,27 @@ import { FileUploadOptions, OffchainMetadata } from '../metadata/index.js';
5
5
  import { Builders } from './builders.js';
6
6
  import { PBNode } from './utils.js';
7
7
  type ChunkerLimits = {
8
- maxChunkSize: number;
8
+ maxNodeSize: number;
9
9
  maxLinkPerNode: number;
10
10
  };
11
11
  type ChunkerOptions = ChunkerLimits & FileUploadOptions;
12
+ export declare const DEFAULT_NODE_MAX_SIZE = 65535;
13
+ export declare const NODE_METADATA_SIZE: number;
12
14
  export declare const DEFAULT_MAX_CHUNK_SIZE: number;
15
+ export declare const LINK_SIZE_IN_BYTES = 40;
13
16
  export declare const DEFAULT_MAX_LINK_PER_NODE: number;
14
- export declare const processFileToIPLDFormat: (blockstore: BaseBlockstore, file: AwaitIterable<Buffer>, totalSize: number, filename?: string, { maxChunkSize, maxLinkPerNode, encryption, compression, }?: Partial<ChunkerOptions>) => Promise<CID>;
17
+ export declare const processFileToIPLDFormat: (blockstore: BaseBlockstore, file: AwaitIterable<Buffer>, totalSize: bigint, filename?: string, { maxNodeSize, maxLinkPerNode, encryption, compression, }?: Partial<ChunkerOptions>) => Promise<CID>;
15
18
  export declare const processMetadataToIPLDFormat: (blockstore: BaseBlockstore, metadata: OffchainMetadata, limits?: {
16
- maxChunkSize: number;
19
+ maxNodeSize: number;
17
20
  maxLinkPerNode: number;
18
21
  }) => Promise<CID>;
19
- export declare const processBufferToIPLDFormatFromChunks: (blockstore: BaseBlockstore, chunks: AwaitIterable<CID>, filename: string | undefined, totalSize: number, builders: Builders, { maxChunkSize, maxLinkPerNode, encryption, compression, }?: Partial<ChunkerOptions>) => Promise<CID>;
20
- export declare const processFolderToIPLDFormat: (blockstore: BaseBlockstore, children: CID[], name: string, size: number, { maxLinkPerNode, maxChunkSize, compression, encryption, }?: Partial<ChunkerOptions>) => Promise<CID>;
22
+ export declare const processBufferToIPLDFormatFromChunks: (blockstore: BaseBlockstore, chunks: AwaitIterable<CID>, filename: string | undefined, totalSize: bigint, builders: Builders, { maxNodeSize: maxNodeSize, maxLinkPerNode, encryption, compression, }?: Partial<ChunkerOptions>) => Promise<CID>;
23
+ export declare const processFolderToIPLDFormat: (blockstore: BaseBlockstore, children: CID[], name: string, size: bigint, { maxLinkPerNode, maxNodeSize: maxNodeSize, compression, encryption, }?: Partial<ChunkerOptions>) => Promise<CID>;
21
24
  /**
22
25
  * Process chunks to IPLD format, return the last chunk if it's not full
23
26
  * @returns the last chunk if it's not full, otherwise an empty buffer
24
27
  */
25
- export declare const processChunksToIPLDFormat: (blockstore: BaseBlockstore, chunks: AwaitIterable<Buffer>, builders: Builders, { maxChunkSize }: {
28
+ export declare const processChunksToIPLDFormat: (blockstore: BaseBlockstore, chunks: AwaitIterable<Buffer>, builders: Builders, { maxChunkSize }?: {
26
29
  maxChunkSize?: number;
27
30
  }) => Promise<Buffer>;
28
31
  export declare const ensureNodeMaxSize: (node: PBNode, maxSize?: number) => PBNode;
@@ -1 +1 @@
1
- {"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;AAEvD,eAAO,MAAM,sBAAsB,QAAY,CAAA;AAG/C,eAAO,MAAM,yBAAyB,QAAwD,CAAA;AAE9F,eAAO,MAAM,uBAAuB,eACtB,cAAc,QACpB,aAAa,CAAC,MAAM,CAAC,aAChB,MAAM,aACN,MAAM,+DAMd,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAOb,CAAA;AAED,eAAO,MAAM,2BAA2B,eAC1B,cAAc,YAChB,gBAAgB,WAClB;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAIvD,OAAO,CAAC,GAAG,CAab,CAAA;AAsCD,eAAO,MAAM,mCAAmC,eAClC,cAAc,UAClB,aAAa,CAAC,GAAG,CAAC,YAChB,MAAM,GAAG,SAAS,aACjB,MAAM,YACP,QAAQ,+DAMf,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CA4Cb,CAAA;AAED,eAAO,MAAM,yBAAyB,eACxB,cAAc,YAChB,GAAG,EAAE,QACT,MAAM,QACN,MAAM,+DAMT,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAwBb,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,yBAAyB,eACxB,cAAc,UAClB,aAAa,CAAC,MAAM,CAAC,YACnB,QAAQ,oBACyB;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,KACnE,OAAO,CAAC,MAAM,CAchB,CAAA;AAED,eAAO,MAAM,iBAAiB,SACtB,MAAM,YACH,MAAM,KACd,MAOF,CAAA"}
1
+ {"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;AAEvD,eAAO,MAAM,qBAAqB,QAAQ,CAAA;AAiB1C,eAAO,MAAM,kBAAkB,QAML,CAAA;AAE1B,eAAO,MAAM,sBAAsB,QAA6C,CAAA;AAEhF,eAAO,MAAM,kBAAkB,KAAK,CAAA;AACpC,eAAO,MAAM,yBAAyB,QAA0D,CAAA;AAEhG,eAAO,MAAM,uBAAuB,eACtB,cAAc,QACpB,aAAa,CAAC,MAAM,CAAC,aAChB,MAAM,aACN,MAAM,8DAMd,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAWb,CAAA;AAED,eAAO,MAAM,2BAA2B,eAC1B,cAAc,YAChB,gBAAgB,WAClB;IAAE,WAAW,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAItD,OAAO,CAAC,GAAG,CAiBb,CAAA;AA0CD,eAAO,MAAM,mCAAmC,eAClC,cAAc,UAClB,aAAa,CAAC,GAAG,CAAC,YAChB,MAAM,GAAG,SAAS,aACjB,MAAM,YACP,QAAQ,2EAMf,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAgDb,CAAA;AAED,eAAO,MAAM,yBAAyB,eACxB,cAAc,YAChB,GAAG,EAAE,QACT,MAAM,QACN,MAAM,2EAMT,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CA4Bb,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,yBAAyB,eACxB,cAAc,UAClB,aAAa,CAAC,MAAM,CAAC,YACnB,QAAQ,qBACyB;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,KAGnE,OAAO,CAAC,MAAM,CAiBhB,CAAA;AAED,eAAO,MAAM,iBAAiB,SACtB,MAAM,YACH,MAAM,KACd,MAOF,CAAA"}
@@ -29,45 +29,75 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
29
29
  };
30
30
  import { cidOfNode } from '../cid/index.js';
31
31
  import { decodeIPLDNodeData } from '../metadata/index.js';
32
+ import { stringifyMetadata } from '../utils/metadata.js';
32
33
  import { fileBuilders, metadataBuilders } from './builders.js';
33
34
  import { createFolderInlinkIpldNode, createFolderIpldNode } from './nodes.js';
34
35
  import { chunkBuffer, encodeNode } from './utils.js';
35
- export const DEFAULT_MAX_CHUNK_SIZE = 64 * 1024;
36
- const ESTIMATED_LINK_SIZE_IN_BYTES = 64;
37
- export const DEFAULT_MAX_LINK_PER_NODE = DEFAULT_MAX_CHUNK_SIZE / ESTIMATED_LINK_SIZE_IN_BYTES;
38
- export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
39
- maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
36
+ export const DEFAULT_NODE_MAX_SIZE = 65535;
37
+ // u8 -> 1 byte (may grow in the future but unlikely further than 255)
38
+ const NODE_TYPE_SIZE = 1;
39
+ // u32 -> 4 bytes
40
+ const NODE_LINK_DEPTH_SIZE = 4;
41
+ // u64 -> 8 bytes
42
+ const NODE_SIZE_SIZE = 8;
43
+ // Limit at 255 string length (Mac Limit)
44
+ const MAX_NAME_SIZE = 255;
45
+ const END_OF_STRING_BYTE = 1;
46
+ const NODE_NAME_SIZE = MAX_NAME_SIZE + END_OF_STRING_BYTE;
47
+ // Upload options may be amplified in the future
48
+ const NODE_UPLOAD_OPTIONS_SIZE = 100;
49
+ // Reserve 100 bytes for future use
50
+ const NODE_RESERVED_SIZE = 100;
51
+ export const NODE_METADATA_SIZE = NODE_TYPE_SIZE +
52
+ NODE_LINK_DEPTH_SIZE +
53
+ NODE_SIZE_SIZE +
54
+ NODE_NAME_SIZE +
55
+ NODE_RESERVED_SIZE +
56
+ NODE_UPLOAD_OPTIONS_SIZE;
57
+ export const DEFAULT_MAX_CHUNK_SIZE = DEFAULT_NODE_MAX_SIZE - NODE_METADATA_SIZE;
58
+ export const LINK_SIZE_IN_BYTES = 40;
59
+ export const DEFAULT_MAX_LINK_PER_NODE = Math.floor(DEFAULT_MAX_CHUNK_SIZE / LINK_SIZE_IN_BYTES);
60
+ export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, { maxNodeSize = DEFAULT_NODE_MAX_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
61
+ maxNodeSize: DEFAULT_NODE_MAX_SIZE,
40
62
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
41
63
  encryption: undefined,
42
64
  compression: undefined,
43
65
  }) => {
66
+ if (filename && filename.length > MAX_NAME_SIZE) {
67
+ throw new Error(`Filename is too long: ${filename.length} > ${MAX_NAME_SIZE}`);
68
+ }
44
69
  return processBufferToIPLDFormat(blockstore, file, filename, totalSize, fileBuilders, {
45
- maxChunkSize,
70
+ maxNodeSize,
46
71
  maxLinkPerNode,
47
72
  encryption,
48
73
  compression,
49
74
  });
50
75
  };
51
76
  export const processMetadataToIPLDFormat = (blockstore_1, metadata_1, ...args_1) => __awaiter(void 0, [blockstore_1, metadata_1, ...args_1], void 0, function* (blockstore, metadata, limits = {
52
- maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
77
+ maxNodeSize: DEFAULT_NODE_MAX_SIZE,
53
78
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
54
79
  }) {
55
- const buffer = Buffer.from(JSON.stringify(metadata));
56
- const name = `${metadata.name}.metadata.json`;
80
+ if (metadata.name && metadata.name.length > MAX_NAME_SIZE) {
81
+ throw new Error(`Filename is too long: ${metadata.name.length} > ${MAX_NAME_SIZE}`);
82
+ }
83
+ const buffer = Buffer.from(stringifyMetadata(metadata));
57
84
  return processBufferToIPLDFormat(blockstore, (function () {
58
85
  return __asyncGenerator(this, arguments, function* () {
59
86
  yield yield __await(buffer);
60
87
  });
61
- })(), name, buffer.byteLength, metadataBuilders, limits);
88
+ })(), metadata.name, BigInt(buffer.byteLength), metadataBuilders, limits);
62
89
  });
63
- const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, buffer, filename, totalSize, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
64
- maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
90
+ const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, buffer, filename, totalSize, builders, { maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
91
+ maxNodeSize: DEFAULT_NODE_MAX_SIZE,
65
92
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
66
93
  encryption: undefined,
67
94
  compression: undefined,
68
95
  }) {
69
96
  var _a, e_1, _b, _c;
70
- const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxChunkSize });
97
+ if (filename && filename.length > MAX_NAME_SIZE) {
98
+ throw new Error(`Filename is too long: ${filename.length} > ${MAX_NAME_SIZE}`);
99
+ }
100
+ const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxNodeSize - NODE_METADATA_SIZE });
71
101
  let CIDs = [];
72
102
  try {
73
103
  for (var _d = true, bufferChunks_1 = __asyncValues(bufferChunks), bufferChunks_1_1; bufferChunks_1_1 = yield bufferChunks_1.next(), _a = bufferChunks_1_1.done, !_a; _d = true) {
@@ -89,19 +119,22 @@ const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize
89
119
  }
90
120
  return processBufferToIPLDFormatFromChunks(blockstore, CIDs, filename, totalSize, builders, {
91
121
  maxLinkPerNode,
92
- maxChunkSize,
122
+ maxNodeSize,
93
123
  encryption,
94
124
  compression,
95
125
  });
96
126
  });
97
- export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, filename, totalSize, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
98
- maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
127
+ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, filename, totalSize, builders, { maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
128
+ maxNodeSize: DEFAULT_NODE_MAX_SIZE,
99
129
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
100
130
  encryption: undefined,
101
131
  compression: undefined,
102
132
  }) {
103
133
  var _a, chunks_2, chunks_2_1;
104
134
  var _b, e_2, _c, _d;
135
+ if (filename && filename.length > MAX_NAME_SIZE) {
136
+ throw new Error(`Filename is too long: ${filename.length} > ${MAX_NAME_SIZE}`);
137
+ }
105
138
  let chunkCount = 0;
106
139
  let CIDs = [];
107
140
  try {
@@ -137,7 +170,7 @@ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, file
137
170
  const newCIDs = [];
138
171
  for (let i = 0; i < CIDs.length; i += maxLinkPerNode) {
139
172
  const chunk = CIDs.slice(i, i + maxLinkPerNode);
140
- const node = builders.inlink(chunk, chunk.length, depth, maxChunkSize);
173
+ const node = builders.inlink(chunk, chunk.length, depth, maxNodeSize);
141
174
  const cid = cidOfNode(node);
142
175
  yield blockstore.put(cid, encodeNode(node));
143
176
  newCIDs.push(cid);
@@ -145,7 +178,7 @@ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, file
145
178
  depth++;
146
179
  CIDs = newCIDs;
147
180
  }
148
- const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize, {
181
+ const head = builders.root(CIDs, totalSize, depth, filename, maxNodeSize, {
149
182
  compression,
150
183
  encryption,
151
184
  });
@@ -153,12 +186,15 @@ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, file
153
186
  yield blockstore.put(headCID, encodeNode(head));
154
187
  return headCID;
155
188
  });
156
- export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size_1, ...args_1) => __awaiter(void 0, [blockstore_1, children_1, name_1, size_1, ...args_1], void 0, function* (blockstore, children, name, size, { maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, compression = undefined, encryption = undefined, } = {
189
+ export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size_1, ...args_1) => __awaiter(void 0, [blockstore_1, children_1, name_1, size_1, ...args_1], void 0, function* (blockstore, children, name, size, { maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, maxNodeSize: maxNodeSize = DEFAULT_NODE_MAX_SIZE, compression = undefined, encryption = undefined, } = {
157
190
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
158
- maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
191
+ maxNodeSize: DEFAULT_NODE_MAX_SIZE,
159
192
  compression: undefined,
160
193
  encryption: undefined,
161
194
  }) {
195
+ if (name.length > MAX_NAME_SIZE) {
196
+ throw new Error(`Filename is too long: ${name.length} > ${MAX_NAME_SIZE}`);
197
+ }
162
198
  let cids = children;
163
199
  let depth = 0;
164
200
  while (cids.length > maxLinkPerNode) {
@@ -173,7 +209,7 @@ export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size
173
209
  cids = newCIDs;
174
210
  depth++;
175
211
  }
176
- const node = createFolderIpldNode(cids, name, depth, size, maxChunkSize, {
212
+ const node = createFolderIpldNode(cids, name, depth, size, maxNodeSize, {
177
213
  compression,
178
214
  encryption,
179
215
  });
@@ -185,14 +221,19 @@ export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size
185
221
  * Process chunks to IPLD format, return the last chunk if it's not full
186
222
  * @returns the last chunk if it's not full, otherwise an empty buffer
187
223
  */
188
- export const processChunksToIPLDFormat = (blockstore_1, chunks_1, builders_1, _a) => __awaiter(void 0, [blockstore_1, chunks_1, builders_1, _a], void 0, function* (blockstore, chunks, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE }) {
189
- var _b, e_3, _c, _d;
190
- const bufferChunks = chunkBuffer(chunks, { maxChunkSize, ignoreLastChunk: false });
224
+ export const processChunksToIPLDFormat = (blockstore_1, chunks_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE } = {
225
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
226
+ }) {
227
+ var _a, e_3, _b, _c;
228
+ const bufferChunks = chunkBuffer(chunks, {
229
+ maxChunkSize,
230
+ ignoreLastChunk: false,
231
+ });
191
232
  try {
192
- for (var _e = true, bufferChunks_2 = __asyncValues(bufferChunks), bufferChunks_2_1; bufferChunks_2_1 = yield bufferChunks_2.next(), _b = bufferChunks_2_1.done, !_b; _e = true) {
193
- _d = bufferChunks_2_1.value;
194
- _e = false;
195
- const chunk = _d;
233
+ for (var _d = true, bufferChunks_2 = __asyncValues(bufferChunks), bufferChunks_2_1; bufferChunks_2_1 = yield bufferChunks_2.next(), _a = bufferChunks_2_1.done, !_a; _d = true) {
234
+ _c = bufferChunks_2_1.value;
235
+ _d = false;
236
+ const chunk = _c;
196
237
  if (chunk.byteLength < maxChunkSize) {
197
238
  return chunk;
198
239
  }
@@ -204,13 +245,13 @@ export const processChunksToIPLDFormat = (blockstore_1, chunks_1, builders_1, _a
204
245
  catch (e_3_1) { e_3 = { error: e_3_1 }; }
205
246
  finally {
206
247
  try {
207
- if (!_e && !_b && (_c = bufferChunks_2.return)) yield _c.call(bufferChunks_2);
248
+ if (!_d && !_a && (_b = bufferChunks_2.return)) yield _b.call(bufferChunks_2);
208
249
  }
209
250
  finally { if (e_3) throw e_3.error; }
210
251
  }
211
252
  return Buffer.alloc(0);
212
253
  });
213
- export const ensureNodeMaxSize = (node, maxSize = DEFAULT_MAX_CHUNK_SIZE) => {
254
+ export const ensureNodeMaxSize = (node, maxSize = DEFAULT_NODE_MAX_SIZE) => {
214
255
  const nodeSize = encodeNode(node).byteLength;
215
256
  if (nodeSize > maxSize) {
216
257
  throw new Error(`Node is too large to fit in a single chunk: ${nodeSize} > ${maxSize}`);
@@ -1,15 +1,15 @@
1
1
  import { CID } from 'multiformats/cid';
2
2
  import { FileUploadOptions, OffchainMetadata } from '../metadata/index.js';
3
3
  import { PBNode } from './index.js';
4
- export declare const createFileChunkIpldNode: (data: Buffer) => PBNode;
5
- export declare const createChunkedFileIpldNode: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number, uploadOptions?: FileUploadOptions) => PBNode;
4
+ export declare const createFileChunkIpldNode: (data: Buffer, maxNodeSize?: number) => PBNode;
5
+ export declare const createChunkedFileIpldNode: (links: CID[], size: bigint, linkDepth: number, name?: string, maxNodeSize?: number, uploadOptions?: FileUploadOptions) => PBNode;
6
6
  export declare const createFileInlinkIpldNode: (links: CID[], size: number, linkDepth: number, maxNodeSize?: number) => PBNode;
7
- export declare const createSingleFileIpldNode: (data: Buffer, name?: string, uploadOptions?: FileUploadOptions) => PBNode;
7
+ export declare const createSingleFileIpldNode: (data: Buffer, name?: string, uploadOptions?: FileUploadOptions, maxNodeSize?: number) => PBNode;
8
8
  export declare const createMetadataInlinkIpldNode: (links: CID[], size: number, linkDepth: number, maxNodeSize?: number) => PBNode;
9
9
  export declare const createSingleMetadataIpldNode: (data: Buffer, name?: string) => PBNode;
10
- export declare const createMetadataChunkIpldNode: (data: Buffer) => PBNode;
11
- export declare const createChunkedMetadataIpldNode: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number) => PBNode;
12
- export declare const createFolderIpldNode: (links: CID[], name: string, linkDepth: number, size: number, maxNodeSize?: number, uploadOptions?: FileUploadOptions) => PBNode;
10
+ export declare const createMetadataChunkIpldNode: (data: Buffer, maxNodeSize?: number) => PBNode;
11
+ export declare const createChunkedMetadataIpldNode: (links: CID[], size: bigint, linkDepth: number, name?: string, maxNodeSize?: number) => PBNode;
12
+ export declare const createFolderIpldNode: (links: CID[], name: string, linkDepth: number, size: bigint, maxNodeSize?: number, uploadOptions?: FileUploadOptions) => PBNode;
13
13
  export declare const createFolderInlinkIpldNode: (links: CID[], linkDepth: number, maxNodeSize?: number) => PBNode;
14
14
  export declare const createMetadataNode: (metadata: OffchainMetadata, maxNodeSize?: number) => PBNode;
15
15
  //# sourceMappingURL=nodes.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAG1E,OAAO,EAAc,MAAM,EAAE,MAAM,YAAY,CAAA;AAG/C,eAAO,MAAM,uBAAuB,SAAU,MAAM,KAAG,MASpD,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,KAChC,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAAU,MAAM,KAAG,MAQxD,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,MAcF,CAAA"}
1
+ {"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAI1E,OAAO,EAAc,MAAM,EAAE,MAAM,YAAY,CAAA;AAG/C,eAAO,MAAM,uBAAuB,SAC5B,MAAM,gBACC,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,gBACpB,MAAM,KAClB,MAcA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAChC,MAAM,gBACC,MAAM,KAClB,MAWA,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,MAcF,CAAA"}
@@ -1,17 +1,18 @@
1
1
  import { encodeIPLDNodeData, MetadataType } from '../metadata/onchain/index.js';
2
- import { DEFAULT_MAX_CHUNK_SIZE, ensureNodeMaxSize } from './chunker.js';
2
+ import { stringifyMetadata } from '../utils/metadata.js';
3
+ import { DEFAULT_NODE_MAX_SIZE, ensureNodeMaxSize } from './chunker.js';
3
4
  import { createNode } from './index.js';
4
5
  /// Creates a file chunk ipld node
5
- export const createFileChunkIpldNode = (data) => createNode(encodeIPLDNodeData({
6
+ export const createFileChunkIpldNode = (data, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
6
7
  type: MetadataType.FileChunk,
7
- size: data.length,
8
+ size: BigInt(data.length).valueOf(),
8
9
  linkDepth: 0,
9
10
  data,
10
- }), []);
11
+ }), []), maxNodeSize);
11
12
  // Creates a file ipld node
12
13
  // links: the CIDs of the file's contents
13
14
  // @todo: add the file's metadata
14
- export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
15
+ export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_NODE_MAX_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
15
16
  type: MetadataType.File,
16
17
  name,
17
18
  size,
@@ -20,28 +21,28 @@ export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeS
20
21
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
21
22
  // Creates a file ipld node
22
23
  // links: the CIDs of the file's contents
23
- export const createFileInlinkIpldNode = (links, size, linkDepth, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
24
+ export const createFileInlinkIpldNode = (links, size, linkDepth, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
24
25
  type: MetadataType.FileInlink,
25
- size,
26
+ size: BigInt(size).valueOf(),
26
27
  linkDepth,
27
28
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
28
29
  // Creates a file ipld node
29
30
  // links: the CIDs of the file's contents
30
31
  // @todo: add the file's metadata
31
- export const createSingleFileIpldNode = (data, name, uploadOptions) => createNode(encodeIPLDNodeData({
32
+ export const createSingleFileIpldNode = (data, name, uploadOptions, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
32
33
  type: MetadataType.File,
33
34
  name,
34
- size: data.length,
35
+ size: BigInt(data.length).valueOf(),
35
36
  linkDepth: 0,
36
37
  data,
37
38
  uploadOptions,
38
- }), []);
39
+ }), []), maxNodeSize);
39
40
  // Creates a file ipld node
40
41
  // links: the CIDs of the file's contents
41
42
  // @todo: add the file's metadata
42
- export const createMetadataInlinkIpldNode = (links, size, linkDepth, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
43
+ export const createMetadataInlinkIpldNode = (links, size, linkDepth, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
43
44
  type: MetadataType.FileInlink,
44
- size,
45
+ size: BigInt(size).valueOf(),
45
46
  linkDepth,
46
47
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
47
48
  // Creates a file ipld node
@@ -50,17 +51,17 @@ export const createMetadataInlinkIpldNode = (links, size, linkDepth, maxNodeSize
50
51
  export const createSingleMetadataIpldNode = (data, name) => createNode(encodeIPLDNodeData({
51
52
  type: MetadataType.Metadata,
52
53
  name,
53
- size: data.length,
54
+ size: BigInt(data.length).valueOf(),
54
55
  linkDepth: 0,
55
56
  data,
56
57
  }), []);
57
- export const createMetadataChunkIpldNode = (data) => createNode(encodeIPLDNodeData({
58
+ export const createMetadataChunkIpldNode = (data, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
58
59
  type: MetadataType.MetadataChunk,
59
- size: data.length,
60
+ size: BigInt(data.length).valueOf(),
60
61
  linkDepth: 0,
61
62
  data,
62
- }));
63
- export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
63
+ })), maxNodeSize);
64
+ export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
64
65
  type: MetadataType.Metadata,
65
66
  name,
66
67
  size,
@@ -69,20 +70,20 @@ export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxN
69
70
  // Creates a folder ipld node
70
71
  // links: the CIDs of the folder's contents
71
72
  // @todo: add the folder's metadata
72
- export const createFolderIpldNode = (links, name, linkDepth, size, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
73
+ export const createFolderIpldNode = (links, name, linkDepth, size, maxNodeSize = DEFAULT_NODE_MAX_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
73
74
  type: MetadataType.Folder,
74
75
  name,
75
76
  size,
76
77
  linkDepth,
77
78
  uploadOptions,
78
79
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
79
- export const createFolderInlinkIpldNode = (links, linkDepth, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
80
+ export const createFolderInlinkIpldNode = (links, linkDepth, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
80
81
  type: MetadataType.FolderInlink,
81
82
  linkDepth,
82
83
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
83
84
  /// Creates a metadata ipld node
84
- export const createMetadataNode = (metadata, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => {
85
- const data = Buffer.from(JSON.stringify(metadata));
85
+ export const createMetadataNode = (metadata, maxNodeSize = DEFAULT_NODE_MAX_SIZE) => {
86
+ const data = Buffer.from(stringifyMetadata(metadata));
86
87
  return ensureNodeMaxSize(createNode(encodeIPLDNodeData({
87
88
  type: MetadataType.Metadata,
88
89
  name: metadata.name,
@@ -5,14 +5,14 @@ export type OffchainFileMetadata = {
5
5
  dataCid: string;
6
6
  name?: string;
7
7
  mimeType?: string;
8
- totalSize: number;
8
+ totalSize: bigint;
9
9
  totalChunks: number;
10
10
  chunks: ChunkInfo[];
11
11
  uploadOptions?: FileUploadOptions;
12
12
  };
13
13
  export interface ChunkInfo {
14
- size: number;
14
+ size: bigint;
15
15
  cid: string;
16
16
  }
17
- export declare const fileMetadata: (headCID: CID, chunks: ChunkInfo[], totalSize: number, name?: string | null, mimeType?: string | null, uploadOptions?: FileUploadOptions) => OffchainFileMetadata;
17
+ export declare const fileMetadata: (headCID: CID, chunks: ChunkInfo[], totalSize: bigint, name?: string | null, mimeType?: string | null, uploadOptions?: FileUploadOptions) => OffchainFileMetadata;
18
18
  //# sourceMappingURL=file.d.ts.map
@@ -5,13 +5,13 @@ interface ChildrenMetadata {
5
5
  type: 'folder' | 'file';
6
6
  name?: string;
7
7
  cid: string;
8
- totalSize: number;
8
+ totalSize: bigint;
9
9
  }
10
10
  export type OffchainFolderMetadata = {
11
11
  type: 'folder';
12
12
  dataCid: string;
13
13
  name?: string;
14
- totalSize: number;
14
+ totalSize: bigint;
15
15
  totalFiles: number;
16
16
  children: ChildrenMetadata[];
17
17
  uploadOptions: FileUploadOptions;
@@ -9,7 +9,7 @@ export const childrenMetadataFromNode = (node) => {
9
9
  return {
10
10
  type: ipldData.type === MetadataType.File ? 'file' : 'folder',
11
11
  cid: cidToString(cidOfNode(node)),
12
- totalSize: (_a = ipldData.size) !== null && _a !== void 0 ? _a : 0,
12
+ totalSize: (_a = ipldData.size) !== null && _a !== void 0 ? _a : BigInt(0).valueOf(),
13
13
  name: ipldData.name,
14
14
  };
15
15
  };
@@ -17,7 +17,7 @@ export const folderMetadata = (cid, children, name, uploadOptions = {}) => {
17
17
  cid = typeof cid === 'string' ? cid : cidToString(cid);
18
18
  return {
19
19
  dataCid: cid,
20
- totalSize: children.reduce((acc, child) => acc + child.totalSize, 0),
20
+ totalSize: children.reduce((acc, child) => acc + child.totalSize, BigInt(0).valueOf()),
21
21
  totalFiles: children.length,
22
22
  children,
23
23
  type: 'folder',
@@ -3,7 +3,7 @@ import type { Uint8ArrayList } from 'uint8arraylist';
3
3
  export interface IPLDNodeData {
4
4
  type: MetadataType;
5
5
  linkDepth: number;
6
- size?: number;
6
+ size?: bigint;
7
7
  name?: string;
8
8
  data?: Uint8Array;
9
9
  uploadOptions?: FileUploadOptions;
@@ -23,7 +23,7 @@ export var IPLDNodeData;
23
23
  }
24
24
  if (obj.size != null) {
25
25
  w.uint32(24);
26
- w.int32(obj.size);
26
+ w.int64(obj.size);
27
27
  }
28
28
  if (obj.name != null) {
29
29
  w.uint32(34);
@@ -59,7 +59,7 @@ export var IPLDNodeData;
59
59
  break;
60
60
  }
61
61
  case 3: {
62
- obj.size = reader.int32();
62
+ obj.size = reader.int64();
63
63
  break;
64
64
  }
65
65
  case 4: {
@@ -0,0 +1,3 @@
1
+ import { OffchainMetadata } from '../metadata/index.js';
2
+ export declare const stringifyMetadata: (metadata: OffchainMetadata) => string;
3
+ //# sourceMappingURL=metadata.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"metadata.d.ts","sourceRoot":"","sources":["../../src/utils/metadata.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAEvD,eAAO,MAAM,iBAAiB,aAAc,gBAAgB,KAAG,MAG5D,CAAA"}
@@ -0,0 +1 @@
1
+ export const stringifyMetadata = (metadata) => JSON.stringify(metadata, (_, v) => typeof v === 'bigint' || v instanceof BigInt ? v.toString() : v);