@autonomys/auto-drive 0.7.2 → 0.7.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,11 @@
1
1
  import { CID } from 'multiformats/cid';
2
2
  import { PBNode } from '../ipld/index.js';
3
+ import { FileUploadOptions } from '../metadata/index.js';
3
4
  export interface Builders {
4
5
  inlink: (links: CID[], size: number, linkDepth: number, chunkSize: number) => PBNode;
5
6
  chunk: (data: Buffer) => PBNode;
6
- root: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number) => PBNode;
7
- single: (data: Buffer, filename?: string) => PBNode;
7
+ root: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number, fileUploadOptions?: FileUploadOptions) => PBNode;
8
+ single: (data: Buffer, filename?: string, fileUploadOptions?: FileUploadOptions) => PBNode;
8
9
  }
9
10
  export declare const metadataBuilders: Builders;
10
11
  export declare const fileBuilders: Builders;
@@ -1 +1 @@
1
- {"version":3,"file":"builders.d.ts","sourceRoot":"","sources":["../../src/ipld/builders.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAA;AAYzC,MAAM,WAAW,QAAQ;IACvB,MAAM,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,MAAM,CAAA;IACpF,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,MAAM,CAAA;IAC/B,IAAI,EAAE,CACJ,KAAK,EAAE,GAAG,EAAE,EACZ,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,MAAM,EACjB,IAAI,CAAC,EAAE,MAAM,EACb,WAAW,CAAC,EAAE,MAAM,KACjB,MAAM,CAAA;IACX,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,CAAA;CACpD;AACD,eAAO,MAAM,gBAAgB,EAAE,QAK9B,CAAA;AAED,eAAO,MAAM,YAAY,EAAE,QAK1B,CAAA"}
1
+ {"version":3,"file":"builders.d.ts","sourceRoot":"","sources":["../../src/ipld/builders.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAA;AACzC,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAA;AAYxD,MAAM,WAAW,QAAQ;IACvB,MAAM,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,MAAM,CAAA;IACpF,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,MAAM,CAAA;IAC/B,IAAI,EAAE,CACJ,KAAK,EAAE,GAAG,EAAE,EACZ,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,MAAM,EACjB,IAAI,CAAC,EAAE,MAAM,EACb,WAAW,CAAC,EAAE,MAAM,EACpB,iBAAiB,CAAC,EAAE,iBAAiB,KAClC,MAAM,CAAA;IACX,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,KAAK,MAAM,CAAA;CAC3F;AACD,eAAO,MAAM,gBAAgB,EAAE,QAK9B,CAAA;AAED,eAAO,MAAM,YAAY,EAAE,QAK1B,CAAA"}
@@ -1,26 +1,23 @@
1
1
  import type { BaseBlockstore } from 'blockstore-core';
2
2
  import type { AwaitIterable } from 'interface-store';
3
3
  import { CID } from 'multiformats';
4
- import { OffchainMetadata } from '../metadata/index.js';
4
+ import { FileUploadOptions, OffchainMetadata } from '../metadata/index.js';
5
5
  import { Builders } from './builders.js';
6
6
  import { PBNode } from './utils.js';
7
- export declare const DEFAULT_MAX_CHUNK_SIZE: number;
8
- export declare const DEFAULT_MAX_LINK_PER_NODE: number;
9
- export declare const processFileToIPLDFormat: (blockstore: BaseBlockstore, file: AwaitIterable<Buffer>, totalSize: number, filename?: string, { maxChunkSize, maxLinkPerNode }?: {
7
+ type ChunkerLimits = {
10
8
  maxChunkSize: number;
11
9
  maxLinkPerNode: number;
12
- }) => Promise<CID>;
10
+ };
11
+ type ChunkerOptions = ChunkerLimits & FileUploadOptions;
12
+ export declare const DEFAULT_MAX_CHUNK_SIZE: number;
13
+ export declare const DEFAULT_MAX_LINK_PER_NODE: number;
14
+ export declare const processFileToIPLDFormat: (blockstore: BaseBlockstore, file: AwaitIterable<Buffer>, totalSize: number, filename?: string, { maxChunkSize, maxLinkPerNode, encryption, compression, }?: Partial<ChunkerOptions>) => Promise<CID>;
13
15
  export declare const processMetadataToIPLDFormat: (blockstore: BaseBlockstore, metadata: OffchainMetadata, limits?: {
14
16
  maxChunkSize: number;
15
17
  maxLinkPerNode: number;
16
18
  }) => Promise<CID>;
17
- export declare const processBufferToIPLDFormatFromChunks: (blockstore: BaseBlockstore, chunks: AwaitIterable<CID>, filename: string | undefined, totalSize: number, builders: Builders, { maxLinkPerNode, maxChunkSize }?: {
18
- maxLinkPerNode: number;
19
- maxChunkSize: number;
20
- }) => Promise<CID>;
21
- export declare const processFolderToIPLDFormat: (blockstore: BaseBlockstore, children: CID[], name: string, size: number, { maxLinkPerNode }?: {
22
- maxLinkPerNode: number;
23
- }) => Promise<CID>;
19
+ export declare const processBufferToIPLDFormatFromChunks: (blockstore: BaseBlockstore, chunks: AwaitIterable<CID>, filename: string | undefined, totalSize: number, builders: Builders, { maxChunkSize, maxLinkPerNode, encryption, compression, }?: Partial<ChunkerOptions>) => Promise<CID>;
20
+ export declare const processFolderToIPLDFormat: (blockstore: BaseBlockstore, children: CID[], name: string, size: number, { maxLinkPerNode, maxChunkSize, compression, encryption, }?: Partial<ChunkerOptions>) => Promise<CID>;
24
21
  /**
25
22
  * Process chunks to IPLD format, return the last chunk if it's not full
26
23
  * @returns the last chunk if it's not full, otherwise an empty buffer
@@ -29,4 +26,5 @@ export declare const processChunksToIPLDFormat: (blockstore: BaseBlockstore, chu
29
26
  maxChunkSize?: number;
30
27
  }) => Promise<Buffer>;
31
28
  export declare const ensureNodeMaxSize: (node: PBNode, maxSize?: number) => PBNode;
29
+ export {};
32
30
  //# sourceMappingURL=chunker.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAC3E,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,eAAO,MAAM,sBAAsB,QAAY,CAAA;AAG/C,eAAO,MAAM,yBAAyB,QAAwD,CAAA;AAE9F,eAAO,MAAM,uBAAuB,eACtB,cAAc,QACpB,aAAa,CAAC,MAAM,CAAC,aAChB,MAAM,aACN,MAAM,qCACiB;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAIjF,OAAO,CAAC,GAAG,CAKb,CAAA;AAED,eAAO,MAAM,2BAA2B,eAC1B,cAAc,YAChB,gBAAgB,WAClB;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAIvD,OAAO,CAAC,GAAG,CAab,CAAA;AA6BD,eAAO,MAAM,mCAAmC,eAClC,cAAc,UAClB,aAAa,CAAC,GAAG,CAAC,YAChB,MAAM,GAAG,SAAS,aACjB,MAAM,YACP,QAAQ,qCACgB;IAAE,cAAc,EAAE,MAAM,CAAC;IAAC,YAAY,EAAE,MAAM,CAAA;CAAE,KAIjF,OAAO,CAAC,GAAG,CAsCb,CAAA;AAED,eAAO,MAAM,yBAAyB,eACxB,cAAc,YAChB,GAAG,EAAE,QACT,MAAM,QACN,MAAM,uBACQ;IAAE,cAAc,EAAE,MAAM,CAAA;CAAE,KAC7C,OAAO,CAAC,GAAG,CAqBb,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,yBAAyB,eACxB,cAAc,UAClB,aAAa,CAAC,MAAM,CAAC,YACnB,QAAQ,oBACyB;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,KACnE,OAAO,CAAC,MAAM,CAchB,CAAA;AAED,eAAO,MAAM,iBAAiB,SACtB,MAAM,YACH,MAAM,KACd,MAOF,CAAA"}
1
+ {"version":3,"file":"chunker.d.ts","sourceRoot":"","sources":["../../src/ipld/chunker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AACrD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AACpD,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAsB,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,QAAQ,EAAkC,MAAM,eAAe,CAAA;AAExE,OAAO,EAA2B,MAAM,EAAE,MAAM,YAAY,CAAA;AAE5D,KAAK,aAAa,GAAG;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,cAAc,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,KAAK,cAAc,GAAG,aAAa,GAAG,iBAAiB,CAAA;AAEvD,eAAO,MAAM,sBAAsB,QAAY,CAAA;AAG/C,eAAO,MAAM,yBAAyB,QAAwD,CAAA;AAE9F,eAAO,MAAM,uBAAuB,eACtB,cAAc,QACpB,aAAa,CAAC,MAAM,CAAC,aAChB,MAAM,aACN,MAAM,+DAMd,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAOb,CAAA;AAED,eAAO,MAAM,2BAA2B,eAC1B,cAAc,YAChB,gBAAgB,WAClB;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,cAAc,EAAE,MAAM,CAAA;CAAE,KAIvD,OAAO,CAAC,GAAG,CAab,CAAA;AAsCD,eAAO,MAAM,mCAAmC,eAClC,cAAc,UAClB,aAAa,CAAC,GAAG,CAAC,YAChB,MAAM,GAAG,SAAS,aACjB,MAAM,YACP,QAAQ,+DAMf,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CA4Cb,CAAA;AAED,eAAO,MAAM,yBAAyB,eACxB,cAAc,YAChB,GAAG,EAAE,QACT,MAAM,QACN,MAAM,+DAMT,OAAO,CAAC,cAAc,CAAC,KAMzB,OAAO,CAAC,GAAG,CAwBb,CAAA;AAED;;;GAGG;AACH,eAAO,MAAM,yBAAyB,eACxB,cAAc,UAClB,aAAa,CAAC,MAAM,CAAC,YACnB,QAAQ,oBACyB;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,KACnE,OAAO,CAAC,MAAM,CAchB,CAAA;AAED,eAAO,MAAM,iBAAiB,SACtB,MAAM,YACH,MAAM,KACd,MAOF,CAAA"}
@@ -35,13 +35,17 @@ import { chunkBuffer, encodeNode } from './utils.js';
35
35
  export const DEFAULT_MAX_CHUNK_SIZE = 64 * 1024;
36
36
  const ESTIMATED_LINK_SIZE_IN_BYTES = 64;
37
37
  export const DEFAULT_MAX_LINK_PER_NODE = DEFAULT_MAX_CHUNK_SIZE / ESTIMATED_LINK_SIZE_IN_BYTES;
38
- export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, { maxChunkSize, maxLinkPerNode } = {
38
+ export const processFileToIPLDFormat = (blockstore, file, totalSize, filename, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
39
39
  maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
40
40
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
41
+ encryption: undefined,
42
+ compression: undefined,
41
43
  }) => {
42
44
  return processBufferToIPLDFormat(blockstore, file, filename, totalSize, fileBuilders, {
43
45
  maxChunkSize,
44
46
  maxLinkPerNode,
47
+ encryption,
48
+ compression,
45
49
  });
46
50
  };
47
51
  export const processMetadataToIPLDFormat = (blockstore_1, metadata_1, ...args_1) => __awaiter(void 0, [blockstore_1, metadata_1, ...args_1], void 0, function* (blockstore, metadata, limits = {
@@ -56,12 +60,14 @@ export const processMetadataToIPLDFormat = (blockstore_1, metadata_1, ...args_1)
56
60
  });
57
61
  })(), name, buffer.byteLength, metadataBuilders, limits);
58
62
  });
59
- const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, buffer, filename, totalSize, builders, { maxChunkSize, maxLinkPerNode } = {
63
+ const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, buffer_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, buffer, filename, totalSize, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
60
64
  maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
61
65
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
66
+ encryption: undefined,
67
+ compression: undefined,
62
68
  }) {
63
69
  var _a, e_1, _b, _c;
64
- const bufferChunks = chunkBuffer(buffer, { maxChunkSize });
70
+ const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxChunkSize });
65
71
  let CIDs = [];
66
72
  try {
67
73
  for (var _d = true, bufferChunks_1 = __asyncValues(bufferChunks), bufferChunks_1_1; bufferChunks_1_1 = yield bufferChunks_1.next(), _a = bufferChunks_1_1.done, !_a; _d = true) {
@@ -84,11 +90,15 @@ const processBufferToIPLDFormat = (blockstore_1, buffer_1, filename_1, totalSize
84
90
  return processBufferToIPLDFormatFromChunks(blockstore, CIDs, filename, totalSize, builders, {
85
91
  maxLinkPerNode,
86
92
  maxChunkSize,
93
+ encryption,
94
+ compression,
87
95
  });
88
96
  });
89
- export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, filename, totalSize, builders, { maxLinkPerNode, maxChunkSize } = {
90
- maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
97
+ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1) => __awaiter(void 0, [blockstore_1, chunks_1, filename_1, totalSize_1, builders_1, ...args_1], void 0, function* (blockstore, chunks, filename, totalSize, builders, { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, encryption = undefined, compression = undefined, } = {
91
98
  maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
99
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
100
+ encryption: undefined,
101
+ compression: undefined,
92
102
  }) {
93
103
  var _a, chunks_2, chunks_2_1;
94
104
  var _b, e_2, _c, _d;
@@ -114,7 +124,10 @@ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, file
114
124
  const nodeBytes = yield blockstore.get(CIDs[0]);
115
125
  yield blockstore.delete(CIDs[0]);
116
126
  const data = decodeIPLDNodeData(nodeBytes);
117
- const singleNode = builders.single(Buffer.from(data.data), filename);
127
+ const singleNode = builders.single(Buffer.from(data.data), filename, {
128
+ compression,
129
+ encryption,
130
+ });
118
131
  yield blockstore.put(cidOfNode(singleNode), encodeNode(singleNode));
119
132
  const headCID = cidOfNode(singleNode);
120
133
  return headCID;
@@ -132,12 +145,20 @@ export const processBufferToIPLDFormatFromChunks = (blockstore_1, chunks_1, file
132
145
  depth++;
133
146
  CIDs = newCIDs;
134
147
  }
135
- const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize);
148
+ const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize, {
149
+ compression,
150
+ encryption,
151
+ });
136
152
  const headCID = cidOfNode(head);
137
153
  yield blockstore.put(headCID, encodeNode(head));
138
154
  return headCID;
139
155
  });
140
- export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size_1, ...args_1) => __awaiter(void 0, [blockstore_1, children_1, name_1, size_1, ...args_1], void 0, function* (blockstore, children, name, size, { maxLinkPerNode } = { maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE }) {
156
+ export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size_1, ...args_1) => __awaiter(void 0, [blockstore_1, children_1, name_1, size_1, ...args_1], void 0, function* (blockstore, children, name, size, { maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE, maxChunkSize = DEFAULT_MAX_CHUNK_SIZE, compression = undefined, encryption = undefined, } = {
157
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
158
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
159
+ compression: undefined,
160
+ encryption: undefined,
161
+ }) {
141
162
  let cids = children;
142
163
  let depth = 0;
143
164
  while (cids.length > maxLinkPerNode) {
@@ -152,7 +173,10 @@ export const processFolderToIPLDFormat = (blockstore_1, children_1, name_1, size
152
173
  cids = newCIDs;
153
174
  depth++;
154
175
  }
155
- const node = createFolderIpldNode(cids, name, depth, size);
176
+ const node = createFolderIpldNode(cids, name, depth, size, maxChunkSize, {
177
+ compression,
178
+ encryption,
179
+ });
156
180
  const cid = cidOfNode(node);
157
181
  yield blockstore.put(cid, encodeNode(node));
158
182
  return cid;
@@ -1,15 +1,15 @@
1
1
  import { CID } from 'multiformats/cid';
2
2
  import { PBNode } from '../ipld/index.js';
3
- import { OffchainMetadata } from '../metadata/index.js';
3
+ import { FileUploadOptions, OffchainMetadata } from '../metadata/index.js';
4
4
  export declare const createFileChunkIpldNode: (data: Buffer) => PBNode;
5
- export declare const createChunkedFileIpldNode: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number) => PBNode;
5
+ export declare const createChunkedFileIpldNode: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number, uploadOptions?: FileUploadOptions) => PBNode;
6
6
  export declare const createFileInlinkIpldNode: (links: CID[], size: number, linkDepth: number, maxNodeSize?: number) => PBNode;
7
- export declare const createSingleFileIpldNode: (data: Buffer, name?: string) => PBNode;
7
+ export declare const createSingleFileIpldNode: (data: Buffer, name?: string, uploadOptions?: FileUploadOptions) => PBNode;
8
8
  export declare const createMetadataInlinkIpldNode: (links: CID[], size: number, linkDepth: number, maxNodeSize?: number) => PBNode;
9
9
  export declare const createSingleMetadataIpldNode: (data: Buffer, name?: string) => PBNode;
10
10
  export declare const createMetadataChunkIpldNode: (data: Buffer) => PBNode;
11
11
  export declare const createChunkedMetadataIpldNode: (links: CID[], size: number, linkDepth: number, name?: string, maxNodeSize?: number) => PBNode;
12
- export declare const createFolderIpldNode: (links: CID[], name: string, linkDepth: number, size: number, maxNodeSize?: number) => PBNode;
12
+ export declare const createFolderIpldNode: (links: CID[], name: string, linkDepth: number, size: number, maxNodeSize?: number, uploadOptions?: FileUploadOptions) => PBNode;
13
13
  export declare const createFolderInlinkIpldNode: (links: CID[], linkDepth: number, maxNodeSize?: number) => PBNode;
14
14
  export declare const createMetadataNode: (metadata: OffchainMetadata, maxNodeSize?: number) => PBNode;
15
15
  //# sourceMappingURL=nodes.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAc,MAAM,EAAE,MAAM,kBAAkB,CAAA;AACrD,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAKvD,eAAO,MAAM,uBAAuB,SAAU,MAAM,KAAG,MASpD,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAAU,MAAM,SAAS,MAAM,KAAG,MAUpE,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAAU,MAAM,KAAG,MAQxD,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,KAClB,MAYA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,MAcF,CAAA"}
1
+ {"version":3,"file":"nodes.d.ts","sourceRoot":"","sources":["../../src/ipld/nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,kBAAkB,CAAA;AACtC,OAAO,EAAc,MAAM,EAAE,MAAM,kBAAkB,CAAA;AACrD,OAAO,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAA;AAK1E,eAAO,MAAM,uBAAuB,SAAU,MAAM,KAAG,MASpD,CAAA;AAKH,eAAO,MAAM,yBAAyB,UAC7B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAGH,eAAO,MAAM,wBAAwB,UAC5B,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,wBAAwB,SAC7B,MAAM,SACL,MAAM,kBACG,iBAAiB,KAChC,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,UAChC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,gBACJ,MAAM,KAClB,MAWA,CAAA;AAKH,eAAO,MAAM,4BAA4B,SAAU,MAAM,SAAS,MAAM,KAAG,MAUxE,CAAA;AAEH,eAAO,MAAM,2BAA2B,SAAU,MAAM,KAAG,MAQxD,CAAA;AAEH,eAAO,MAAM,6BAA6B,UACjC,GAAG,EAAE,QACN,MAAM,aACD,MAAM,SACV,MAAM,gBACA,MAAM,KAClB,MAYA,CAAA;AAKH,eAAO,MAAM,oBAAoB,UACxB,GAAG,EAAE,QACN,MAAM,aACD,MAAM,QACX,MAAM,gBACC,MAAM,kBACH,iBAAiB,KAChC,MAaA,CAAA;AAEH,eAAO,MAAM,0BAA0B,UAC9B,GAAG,EAAE,aACD,MAAM,gBACJ,MAAM,KAClB,MAUA,CAAA;AAGH,eAAO,MAAM,kBAAkB,aACnB,gBAAgB,gBACb,MAAM,KAClB,MAcF,CAAA"}
@@ -11,11 +11,12 @@ export const createFileChunkIpldNode = (data) => createNode(encodeIPLDNodeData({
11
11
  // Creates a file ipld node
12
12
  // links: the CIDs of the file's contents
13
13
  // @todo: add the file's metadata
14
- export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
14
+ export const createChunkedFileIpldNode = (links, size, linkDepth, name, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
15
15
  type: MetadataType.File,
16
16
  name,
17
17
  size,
18
18
  linkDepth,
19
+ uploadOptions,
19
20
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
20
21
  // Creates a file ipld node
21
22
  // links: the CIDs of the file's contents
@@ -27,12 +28,13 @@ export const createFileInlinkIpldNode = (links, size, linkDepth, maxNodeSize = D
27
28
  // Creates a file ipld node
28
29
  // links: the CIDs of the file's contents
29
30
  // @todo: add the file's metadata
30
- export const createSingleFileIpldNode = (data, name) => createNode(encodeIPLDNodeData({
31
+ export const createSingleFileIpldNode = (data, name, uploadOptions) => createNode(encodeIPLDNodeData({
31
32
  type: MetadataType.File,
32
33
  name,
33
34
  size: data.length,
34
35
  linkDepth: 0,
35
36
  data,
37
+ uploadOptions,
36
38
  }), []);
37
39
  // Creates a file ipld node
38
40
  // links: the CIDs of the file's contents
@@ -67,11 +69,12 @@ export const createChunkedMetadataIpldNode = (links, size, linkDepth, name, maxN
67
69
  // Creates a folder ipld node
68
70
  // links: the CIDs of the folder's contents
69
71
  // @todo: add the folder's metadata
70
- export const createFolderIpldNode = (links, name, linkDepth, size, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
72
+ export const createFolderIpldNode = (links, name, linkDepth, size, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE, uploadOptions) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
71
73
  type: MetadataType.Folder,
72
74
  name,
73
75
  size,
74
76
  linkDepth,
77
+ uploadOptions,
75
78
  }), links.map((cid) => ({ Hash: cid }))), maxNodeSize);
76
79
  export const createFolderInlinkIpldNode = (links, linkDepth, maxNodeSize = DEFAULT_MAX_CHUNK_SIZE) => ensureNodeMaxSize(createNode(encodeIPLDNodeData({
77
80
  type: MetadataType.FolderInlink,
@@ -1,4 +1,5 @@
1
1
  import { CID } from 'multiformats';
2
+ import { FileUploadOptions } from '../../index.js';
2
3
  export type OffchainFileMetadata = {
3
4
  type: 'file';
4
5
  dataCid: string;
@@ -7,10 +8,11 @@ export type OffchainFileMetadata = {
7
8
  totalSize: number;
8
9
  totalChunks: number;
9
10
  chunks: ChunkInfo[];
11
+ uploadOptions?: FileUploadOptions;
10
12
  };
11
13
  export interface ChunkInfo {
12
14
  size: number;
13
15
  cid: string;
14
16
  }
15
- export declare const fileMetadata: (headCID: CID, chunks: ChunkInfo[], totalSize: number, name?: string | null, mimeType?: string | null) => OffchainFileMetadata;
17
+ export declare const fileMetadata: (headCID: CID, chunks: ChunkInfo[], totalSize: number, name?: string | null, mimeType?: string | null, uploadOptions?: FileUploadOptions) => OffchainFileMetadata;
16
18
  //# sourceMappingURL=file.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../../src/metadata/offchain/file.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAGlC,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,MAAM,EAAE,SAAS,EAAE,CAAA;CACpB,CAAA;AAED,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,GAAG,EAAE,MAAM,CAAA;CACZ;AAED,eAAO,MAAM,YAAY,YACd,GAAG,UACJ,SAAS,EAAE,aACR,MAAM,SACV,MAAM,GAAG,IAAI,aACT,MAAM,GAAG,IAAI,KACvB,oBAUF,CAAA"}
1
+ {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../../src/metadata/offchain/file.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAClC,OAAO,EAAe,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAE/D,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,MAAM,EAAE,SAAS,EAAE,CAAA;IACnB,aAAa,CAAC,EAAE,iBAAiB,CAAA;CAClC,CAAA;AAED,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,GAAG,EAAE,MAAM,CAAA;CACZ;AAED,eAAO,MAAM,YAAY,YACd,GAAG,UACJ,SAAS,EAAE,aACR,MAAM,SACV,MAAM,GAAG,IAAI,aACT,MAAM,GAAG,IAAI,kBACT,iBAAiB,KAI/B,oBAWF,CAAA"}
@@ -1,5 +1,8 @@
1
1
  import { cidToString } from '../../index.js';
2
- export const fileMetadata = (headCID, chunks, totalSize, name, mimeType) => {
2
+ export const fileMetadata = (headCID, chunks, totalSize, name, mimeType, uploadOptions = {
3
+ compression: undefined,
4
+ encryption: undefined,
5
+ }) => {
3
6
  return {
4
7
  type: 'file',
5
8
  dataCid: cidToString(headCID),
@@ -8,5 +11,6 @@ export const fileMetadata = (headCID, chunks, totalSize, name, mimeType) => {
8
11
  totalSize,
9
12
  totalChunks: chunks.length,
10
13
  chunks,
14
+ uploadOptions,
11
15
  };
12
16
  };
@@ -1,5 +1,6 @@
1
1
  import { CID } from 'multiformats';
2
2
  import { PBNode } from '../../ipld/index.js';
3
+ import { FileUploadOptions } from '../onchain/index.js';
3
4
  interface ChildrenMetadata {
4
5
  type: 'folder' | 'file';
5
6
  name?: string;
@@ -13,8 +14,9 @@ export type OffchainFolderMetadata = {
13
14
  totalSize: number;
14
15
  totalFiles: number;
15
16
  children: ChildrenMetadata[];
17
+ uploadOptions: FileUploadOptions;
16
18
  };
17
19
  export declare const childrenMetadataFromNode: (node: PBNode) => ChildrenMetadata;
18
- export declare const folderMetadata: (cid: CID | string, children: ChildrenMetadata[], name?: string | null) => OffchainFolderMetadata;
20
+ export declare const folderMetadata: (cid: CID | string, children: ChildrenMetadata[], name?: string | null, uploadOptions?: FileUploadOptions) => OffchainFolderMetadata;
19
21
  export {};
20
22
  //# sourceMappingURL=folder.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"folder.d.ts","sourceRoot":"","sources":["../../../src/metadata/offchain/folder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAA;AAG5C,UAAU,gBAAgB;IACxB,IAAI,EAAE,QAAQ,GAAG,MAAM,CAAA;IACvB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,SAAS,EAAE,MAAM,CAAA;CAClB;AAED,MAAM,MAAM,sBAAsB,GAAG;IACnC,IAAI,EAAE,QAAQ,CAAA;IACd,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;CAC7B,CAAA;AAED,eAAO,MAAM,wBAAwB,SAAU,MAAM,KAAG,gBAYvD,CAAA;AAED,eAAO,MAAM,cAAc,QACpB,GAAG,GAAG,MAAM,YACP,gBAAgB,EAAE,SACrB,MAAM,GAAG,IAAI,KACnB,sBAWF,CAAA"}
1
+ {"version":3,"file":"folder.d.ts","sourceRoot":"","sources":["../../../src/metadata/offchain/folder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAElC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAA;AAC5C,OAAO,EAAE,iBAAiB,EAA8B,MAAM,qBAAqB,CAAA;AAEnF,UAAU,gBAAgB;IACxB,IAAI,EAAE,QAAQ,GAAG,MAAM,CAAA;IACvB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,SAAS,EAAE,MAAM,CAAA;CAClB;AAED,MAAM,MAAM,sBAAsB,GAAG;IACnC,IAAI,EAAE,QAAQ,CAAA;IACd,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;IAC5B,aAAa,EAAE,iBAAiB,CAAA;CACjC,CAAA;AAED,eAAO,MAAM,wBAAwB,SAAU,MAAM,KAAG,gBAYvD,CAAA;AAED,eAAO,MAAM,cAAc,QACpB,GAAG,GAAG,MAAM,YACP,gBAAgB,EAAE,SACrB,MAAM,GAAG,IAAI,kBACL,iBAAiB,KAC/B,sBAYF,CAAA"}
@@ -13,7 +13,7 @@ export const childrenMetadataFromNode = (node) => {
13
13
  name: ipldData.name,
14
14
  };
15
15
  };
16
- export const folderMetadata = (cid, children, name) => {
16
+ export const folderMetadata = (cid, children, name, uploadOptions = {}) => {
17
17
  cid = typeof cid === 'string' ? cid : cidToString(cid);
18
18
  return {
19
19
  dataCid: cid,
@@ -22,5 +22,6 @@ export const folderMetadata = (cid, children, name) => {
22
22
  children,
23
23
  type: 'folder',
24
24
  name: name !== null && name !== void 0 ? name : undefined,
25
+ uploadOptions,
25
26
  };
26
27
  };
@@ -6,6 +6,7 @@ export interface IPLDNodeData {
6
6
  size?: number;
7
7
  name?: string;
8
8
  data?: Uint8Array;
9
+ uploadOptions?: FileUploadOptions;
9
10
  }
10
11
  export declare namespace IPLDNodeData {
11
12
  const codec: () => Codec<IPLDNodeData>;
@@ -25,4 +26,44 @@ export declare enum MetadataType {
25
26
  export declare namespace MetadataType {
26
27
  const codec: () => Codec<MetadataType>;
27
28
  }
29
+ export interface FileUploadOptions {
30
+ compression?: CompressionOptions;
31
+ encryption?: EncryptionOptions;
32
+ }
33
+ export declare namespace FileUploadOptions {
34
+ const codec: () => Codec<FileUploadOptions>;
35
+ const encode: (obj: Partial<FileUploadOptions>) => Uint8Array;
36
+ const decode: (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions<FileUploadOptions>) => FileUploadOptions;
37
+ }
38
+ export interface CompressionOptions {
39
+ algorithm: CompressionAlgorithm;
40
+ level?: number;
41
+ chunkSize?: number;
42
+ }
43
+ export declare namespace CompressionOptions {
44
+ const codec: () => Codec<CompressionOptions>;
45
+ const encode: (obj: Partial<CompressionOptions>) => Uint8Array;
46
+ const decode: (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions<CompressionOptions>) => CompressionOptions;
47
+ }
48
+ export interface EncryptionOptions {
49
+ algorithm: EncryptionAlgorithm;
50
+ chunkSize?: number;
51
+ }
52
+ export declare namespace EncryptionOptions {
53
+ const codec: () => Codec<EncryptionOptions>;
54
+ const encode: (obj: Partial<EncryptionOptions>) => Uint8Array;
55
+ const decode: (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions<EncryptionOptions>) => EncryptionOptions;
56
+ }
57
+ export declare enum CompressionAlgorithm {
58
+ ZLIB = "ZLIB"
59
+ }
60
+ export declare namespace CompressionAlgorithm {
61
+ const codec: () => Codec<CompressionAlgorithm>;
62
+ }
63
+ export declare enum EncryptionAlgorithm {
64
+ AES_256_GCM = "AES_256_GCM"
65
+ }
66
+ export declare namespace EncryptionAlgorithm {
67
+ const codec: () => Codec<EncryptionAlgorithm>;
68
+ }
28
69
  //# sourceMappingURL=OnchainMetadata.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"OnchainMetadata.d.ts","sourceRoot":"","sources":["../../../../src/metadata/onchain/protobuf/OnchainMetadata.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,KAAK,KAAK,EAAiB,KAAK,aAAa,EAAuC,MAAM,iBAAiB,CAAA;AACpH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAA;AAEpD,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,YAAY,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,UAAU,CAAA;CAClB;AAED,yBAAiB,YAAY,CAAC;IAGrB,MAAM,KAAK,QAAO,KAAK,CAAC,YAAY,CA+E1C,CAAA;IAEM,MAAM,MAAM,QAAS,OAAO,CAAC,YAAY,CAAC,KAAG,UAEnD,CAAA;IAEM,MAAM,MAAM,QAAS,UAAU,GAAG,cAAc,SAAS,aAAa,CAAC,YAAY,CAAC,KAAG,YAE7F,CAAA;CACF;AAED,oBAAY,YAAY;IACtB,IAAI,SAAS;IACb,UAAU,eAAe;IACzB,SAAS,cAAc;IACvB,MAAM,WAAW;IACjB,YAAY,iBAAiB;IAC7B,QAAQ,aAAa;IACrB,cAAc,mBAAmB;IACjC,aAAa,kBAAkB;CAChC;AAaD,yBAAiB,YAAY,CAAC;IACrB,MAAM,KAAK,QAAO,KAAK,CAAC,YAAY,CAE1C,CAAA;CACF"}
1
+ {"version":3,"file":"OnchainMetadata.d.ts","sourceRoot":"","sources":["../../../../src/metadata/onchain/protobuf/OnchainMetadata.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,KAAK,KAAK,EAAiB,KAAK,aAAa,EAAuC,MAAM,iBAAiB,CAAA;AACpH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAA;AAEpD,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,YAAY,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,UAAU,CAAA;IACjB,aAAa,CAAC,EAAE,iBAAiB,CAAA;CAClC;AAED,yBAAiB,YAAY,CAAC;IAGrB,MAAM,KAAK,QAAO,KAAK,CAAC,YAAY,CA0F1C,CAAA;IAEM,MAAM,MAAM,QAAS,OAAO,CAAC,YAAY,CAAC,KAAG,UAEnD,CAAA;IAEM,MAAM,MAAM,QAAS,UAAU,GAAG,cAAc,SAAS,aAAa,CAAC,YAAY,CAAC,KAAG,YAE7F,CAAA;CACF;AAED,oBAAY,YAAY;IACtB,IAAI,SAAS;IACb,UAAU,eAAe;IACzB,SAAS,cAAc;IACvB,MAAM,WAAW;IACjB,YAAY,iBAAiB;IAC7B,QAAQ,aAAa;IACrB,cAAc,mBAAmB;IACjC,aAAa,kBAAkB;CAChC;AAaD,yBAAiB,YAAY,CAAC;IACrB,MAAM,KAAK,QAAO,KAAK,CAAC,YAAY,CAE1C,CAAA;CACF;AACD,MAAM,WAAW,iBAAiB;IAChC,WAAW,CAAC,EAAE,kBAAkB,CAAA;IAChC,UAAU,CAAC,EAAE,iBAAiB,CAAA;CAC/B;AAED,yBAAiB,iBAAiB,CAAC;IAG1B,MAAM,KAAK,QAAO,KAAK,CAAC,iBAAiB,CAqD/C,CAAA;IAEM,MAAM,MAAM,QAAS,OAAO,CAAC,iBAAiB,CAAC,KAAG,UAExD,CAAA;IAEM,MAAM,MAAM,QAAS,UAAU,GAAG,cAAc,SAAS,aAAa,CAAC,iBAAiB,CAAC,KAAG,iBAElG,CAAA;CACF;AAED,MAAM,WAAW,kBAAkB;IACjC,SAAS,EAAE,oBAAoB,CAAA;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,SAAS,CAAC,EAAE,MAAM,CAAA;CACnB;AAED,yBAAiB,kBAAkB,CAAC;IAG3B,MAAM,KAAK,QAAO,KAAK,CAAC,kBAAkB,CA4DhD,CAAA;IAEM,MAAM,MAAM,QAAS,OAAO,CAAC,kBAAkB,CAAC,KAAG,UAEzD,CAAA;IAEM,MAAM,MAAM,QAAS,UAAU,GAAG,cAAc,SAAS,aAAa,CAAC,kBAAkB,CAAC,KAAG,kBAEnG,CAAA;CACF;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,mBAAmB,CAAA;IAC9B,SAAS,CAAC,EAAE,MAAM,CAAA;CACnB;AAED,yBAAiB,iBAAiB,CAAC;IAG1B,MAAM,KAAK,QAAO,KAAK,CAAC,iBAAiB,CAmD/C,CAAA;IAEM,MAAM,MAAM,QAAS,OAAO,CAAC,iBAAiB,CAAC,KAAG,UAExD,CAAA;IAEM,MAAM,MAAM,QAAS,UAAU,GAAG,cAAc,SAAS,aAAa,CAAC,iBAAiB,CAAC,KAAG,iBAElG,CAAA;CACF;AAED,oBAAY,oBAAoB;IAC9B,IAAI,SAAS;CACd;AAMD,yBAAiB,oBAAoB,CAAC;IAC7B,MAAM,KAAK,QAAO,KAAK,CAAC,oBAAoB,CAElD,CAAA;CACF;AACD,oBAAY,mBAAmB;IAC7B,WAAW,gBAAgB;CAC5B;AAMD,yBAAiB,mBAAmB,CAAC;IAC5B,MAAM,KAAK,QAAO,KAAK,CAAC,mBAAmB,CAEjD,CAAA;CACF"}
@@ -33,10 +33,15 @@ export var IPLDNodeData;
33
33
  w.uint32(42);
34
34
  w.bytes(obj.data);
35
35
  }
36
+ if (obj.uploadOptions != null) {
37
+ w.uint32(50);
38
+ FileUploadOptions.codec().encode(obj.uploadOptions, w);
39
+ }
36
40
  if (opts.lengthDelimited !== false) {
37
41
  w.ldelim();
38
42
  }
39
43
  }, (reader, length, opts = {}) => {
44
+ var _a;
40
45
  const obj = {
41
46
  type: MetadataType.File,
42
47
  linkDepth: 0
@@ -65,6 +70,12 @@ export var IPLDNodeData;
65
70
  obj.data = reader.bytes();
66
71
  break;
67
72
  }
73
+ case 6: {
74
+ obj.uploadOptions = FileUploadOptions.codec().decode(reader, reader.uint32(), {
75
+ limits: (_a = opts.limits) === null || _a === void 0 ? void 0 : _a.uploadOptions
76
+ });
77
+ break;
78
+ }
68
79
  default: {
69
80
  reader.skipType(tag & 7);
70
81
  break;
@@ -110,3 +121,202 @@ var __MetadataTypeValues;
110
121
  return enumeration(__MetadataTypeValues);
111
122
  };
112
123
  })(MetadataType || (MetadataType = {}));
124
+ export var FileUploadOptions;
125
+ (function (FileUploadOptions) {
126
+ let _codec;
127
+ FileUploadOptions.codec = () => {
128
+ if (_codec == null) {
129
+ _codec = message((obj, w, opts = {}) => {
130
+ if (opts.lengthDelimited !== false) {
131
+ w.fork();
132
+ }
133
+ if (obj.compression != null) {
134
+ w.uint32(10);
135
+ CompressionOptions.codec().encode(obj.compression, w);
136
+ }
137
+ if (obj.encryption != null) {
138
+ w.uint32(18);
139
+ EncryptionOptions.codec().encode(obj.encryption, w);
140
+ }
141
+ if (opts.lengthDelimited !== false) {
142
+ w.ldelim();
143
+ }
144
+ }, (reader, length, opts = {}) => {
145
+ var _a, _b;
146
+ const obj = {};
147
+ const end = length == null ? reader.len : reader.pos + length;
148
+ while (reader.pos < end) {
149
+ const tag = reader.uint32();
150
+ switch (tag >>> 3) {
151
+ case 1: {
152
+ obj.compression = CompressionOptions.codec().decode(reader, reader.uint32(), {
153
+ limits: (_a = opts.limits) === null || _a === void 0 ? void 0 : _a.compression
154
+ });
155
+ break;
156
+ }
157
+ case 2: {
158
+ obj.encryption = EncryptionOptions.codec().decode(reader, reader.uint32(), {
159
+ limits: (_b = opts.limits) === null || _b === void 0 ? void 0 : _b.encryption
160
+ });
161
+ break;
162
+ }
163
+ default: {
164
+ reader.skipType(tag & 7);
165
+ break;
166
+ }
167
+ }
168
+ }
169
+ return obj;
170
+ });
171
+ }
172
+ return _codec;
173
+ };
174
+ FileUploadOptions.encode = (obj) => {
175
+ return encodeMessage(obj, FileUploadOptions.codec());
176
+ };
177
+ FileUploadOptions.decode = (buf, opts) => {
178
+ return decodeMessage(buf, FileUploadOptions.codec(), opts);
179
+ };
180
+ })(FileUploadOptions || (FileUploadOptions = {}));
181
+ export var CompressionOptions;
182
+ (function (CompressionOptions) {
183
+ let _codec;
184
+ CompressionOptions.codec = () => {
185
+ if (_codec == null) {
186
+ _codec = message((obj, w, opts = {}) => {
187
+ if (opts.lengthDelimited !== false) {
188
+ w.fork();
189
+ }
190
+ if (obj.algorithm != null && __CompressionAlgorithmValues[obj.algorithm] !== 0) {
191
+ w.uint32(8);
192
+ CompressionAlgorithm.codec().encode(obj.algorithm, w);
193
+ }
194
+ if (obj.level != null) {
195
+ w.uint32(16);
196
+ w.int32(obj.level);
197
+ }
198
+ if (obj.chunkSize != null) {
199
+ w.uint32(24);
200
+ w.int32(obj.chunkSize);
201
+ }
202
+ if (opts.lengthDelimited !== false) {
203
+ w.ldelim();
204
+ }
205
+ }, (reader, length, opts = {}) => {
206
+ const obj = {
207
+ algorithm: CompressionAlgorithm.ZLIB
208
+ };
209
+ const end = length == null ? reader.len : reader.pos + length;
210
+ while (reader.pos < end) {
211
+ const tag = reader.uint32();
212
+ switch (tag >>> 3) {
213
+ case 1: {
214
+ obj.algorithm = CompressionAlgorithm.codec().decode(reader);
215
+ break;
216
+ }
217
+ case 2: {
218
+ obj.level = reader.int32();
219
+ break;
220
+ }
221
+ case 3: {
222
+ obj.chunkSize = reader.int32();
223
+ break;
224
+ }
225
+ default: {
226
+ reader.skipType(tag & 7);
227
+ break;
228
+ }
229
+ }
230
+ }
231
+ return obj;
232
+ });
233
+ }
234
+ return _codec;
235
+ };
236
+ CompressionOptions.encode = (obj) => {
237
+ return encodeMessage(obj, CompressionOptions.codec());
238
+ };
239
+ CompressionOptions.decode = (buf, opts) => {
240
+ return decodeMessage(buf, CompressionOptions.codec(), opts);
241
+ };
242
+ })(CompressionOptions || (CompressionOptions = {}));
243
+ export var EncryptionOptions;
244
+ (function (EncryptionOptions) {
245
+ let _codec;
246
+ EncryptionOptions.codec = () => {
247
+ if (_codec == null) {
248
+ _codec = message((obj, w, opts = {}) => {
249
+ if (opts.lengthDelimited !== false) {
250
+ w.fork();
251
+ }
252
+ if (obj.algorithm != null && __EncryptionAlgorithmValues[obj.algorithm] !== 0) {
253
+ w.uint32(8);
254
+ EncryptionAlgorithm.codec().encode(obj.algorithm, w);
255
+ }
256
+ if (obj.chunkSize != null) {
257
+ w.uint32(16);
258
+ w.int32(obj.chunkSize);
259
+ }
260
+ if (opts.lengthDelimited !== false) {
261
+ w.ldelim();
262
+ }
263
+ }, (reader, length, opts = {}) => {
264
+ const obj = {
265
+ algorithm: EncryptionAlgorithm.AES_256_GCM
266
+ };
267
+ const end = length == null ? reader.len : reader.pos + length;
268
+ while (reader.pos < end) {
269
+ const tag = reader.uint32();
270
+ switch (tag >>> 3) {
271
+ case 1: {
272
+ obj.algorithm = EncryptionAlgorithm.codec().decode(reader);
273
+ break;
274
+ }
275
+ case 2: {
276
+ obj.chunkSize = reader.int32();
277
+ break;
278
+ }
279
+ default: {
280
+ reader.skipType(tag & 7);
281
+ break;
282
+ }
283
+ }
284
+ }
285
+ return obj;
286
+ });
287
+ }
288
+ return _codec;
289
+ };
290
+ EncryptionOptions.encode = (obj) => {
291
+ return encodeMessage(obj, EncryptionOptions.codec());
292
+ };
293
+ EncryptionOptions.decode = (buf, opts) => {
294
+ return decodeMessage(buf, EncryptionOptions.codec(), opts);
295
+ };
296
+ })(EncryptionOptions || (EncryptionOptions = {}));
297
+ export var CompressionAlgorithm;
298
+ (function (CompressionAlgorithm) {
299
+ CompressionAlgorithm["ZLIB"] = "ZLIB";
300
+ })(CompressionAlgorithm || (CompressionAlgorithm = {}));
301
+ var __CompressionAlgorithmValues;
302
+ (function (__CompressionAlgorithmValues) {
303
+ __CompressionAlgorithmValues[__CompressionAlgorithmValues["ZLIB"] = 0] = "ZLIB";
304
+ })(__CompressionAlgorithmValues || (__CompressionAlgorithmValues = {}));
305
+ (function (CompressionAlgorithm) {
306
+ CompressionAlgorithm.codec = () => {
307
+ return enumeration(__CompressionAlgorithmValues);
308
+ };
309
+ })(CompressionAlgorithm || (CompressionAlgorithm = {}));
310
+ export var EncryptionAlgorithm;
311
+ (function (EncryptionAlgorithm) {
312
+ EncryptionAlgorithm["AES_256_GCM"] = "AES_256_GCM";
313
+ })(EncryptionAlgorithm || (EncryptionAlgorithm = {}));
314
+ var __EncryptionAlgorithmValues;
315
+ (function (__EncryptionAlgorithmValues) {
316
+ __EncryptionAlgorithmValues[__EncryptionAlgorithmValues["AES_256_GCM"] = 0] = "AES_256_GCM";
317
+ })(__EncryptionAlgorithmValues || (__EncryptionAlgorithmValues = {}));
318
+ (function (EncryptionAlgorithm) {
319
+ EncryptionAlgorithm.codec = () => {
320
+ return enumeration(__EncryptionAlgorithmValues);
321
+ };
322
+ })(EncryptionAlgorithm || (EncryptionAlgorithm = {}));
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@autonomys/auto-drive",
3
3
  "packageManager": "yarn@4.1.1",
4
- "version": "0.7.2",
4
+ "version": "0.7.4",
5
5
  "license": "MIT",
6
6
  "main": "dist/index.js",
7
7
  "repository": {
@@ -41,5 +41,5 @@
41
41
  "protons": "^7.6.0",
42
42
  "protons-runtime": "^5.5.0"
43
43
  },
44
- "gitHead": "2c7b1be8b6c2f8fbbfc5bcc6aa6cb9b1f363ce83"
44
+ "gitHead": "67a6e877c7516104bc82807e91f5fc193372b671"
45
45
  }
@@ -1,5 +1,6 @@
1
1
  import { CID } from 'multiformats/cid'
2
2
  import { PBNode } from '../ipld/index.js'
3
+ import { FileUploadOptions } from '../metadata/index.js'
3
4
  import {
4
5
  createChunkedFileIpldNode,
5
6
  createChunkedMetadataIpldNode,
@@ -20,8 +21,9 @@ export interface Builders {
20
21
  linkDepth: number,
21
22
  name?: string,
22
23
  maxNodeSize?: number,
24
+ fileUploadOptions?: FileUploadOptions,
23
25
  ) => PBNode
24
- single: (data: Buffer, filename?: string) => PBNode
26
+ single: (data: Buffer, filename?: string, fileUploadOptions?: FileUploadOptions) => PBNode
25
27
  }
26
28
  export const metadataBuilders: Builders = {
27
29
  inlink: createMetadataInlinkIpldNode,
@@ -2,11 +2,18 @@ import type { BaseBlockstore } from 'blockstore-core'
2
2
  import type { AwaitIterable } from 'interface-store'
3
3
  import { CID } from 'multiformats'
4
4
  import { cidOfNode } from '../cid/index.js'
5
- import { decodeIPLDNodeData, OffchainMetadata } from '../metadata/index.js'
5
+ import { decodeIPLDNodeData, FileUploadOptions, OffchainMetadata } from '../metadata/index.js'
6
6
  import { Builders, fileBuilders, metadataBuilders } from './builders.js'
7
7
  import { createFolderInlinkIpldNode, createFolderIpldNode } from './nodes.js'
8
8
  import { chunkBuffer, encodeNode, PBNode } from './utils.js'
9
9
 
10
+ type ChunkerLimits = {
11
+ maxChunkSize: number
12
+ maxLinkPerNode: number
13
+ }
14
+
15
+ type ChunkerOptions = ChunkerLimits & FileUploadOptions
16
+
10
17
  export const DEFAULT_MAX_CHUNK_SIZE = 64 * 1024
11
18
 
12
19
  const ESTIMATED_LINK_SIZE_IN_BYTES = 64
@@ -17,14 +24,23 @@ export const processFileToIPLDFormat = (
17
24
  file: AwaitIterable<Buffer>,
18
25
  totalSize: number,
19
26
  filename?: string,
20
- { maxChunkSize, maxLinkPerNode }: { maxChunkSize: number; maxLinkPerNode: number } = {
27
+ {
28
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
29
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
30
+ encryption = undefined,
31
+ compression = undefined,
32
+ }: Partial<ChunkerOptions> = {
21
33
  maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
22
34
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
35
+ encryption: undefined,
36
+ compression: undefined,
23
37
  },
24
38
  ): Promise<CID> => {
25
39
  return processBufferToIPLDFormat(blockstore, file, filename, totalSize, fileBuilders, {
26
40
  maxChunkSize,
27
41
  maxLinkPerNode,
42
+ encryption,
43
+ compression,
28
44
  })
29
45
  }
30
46
 
@@ -56,12 +72,19 @@ const processBufferToIPLDFormat = async (
56
72
  filename: string | undefined,
57
73
  totalSize: number,
58
74
  builders: Builders,
59
- { maxChunkSize, maxLinkPerNode }: { maxChunkSize: number; maxLinkPerNode: number } = {
75
+ {
76
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
77
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
78
+ encryption = undefined,
79
+ compression = undefined,
80
+ }: ChunkerOptions = {
60
81
  maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
61
82
  maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
83
+ encryption: undefined,
84
+ compression: undefined,
62
85
  },
63
86
  ): Promise<CID> => {
64
- const bufferChunks = chunkBuffer(buffer, { maxChunkSize })
87
+ const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxChunkSize })
65
88
 
66
89
  let CIDs: CID[] = []
67
90
  for await (const chunk of bufferChunks) {
@@ -74,6 +97,8 @@ const processBufferToIPLDFormat = async (
74
97
  return processBufferToIPLDFormatFromChunks(blockstore, CIDs, filename, totalSize, builders, {
75
98
  maxLinkPerNode,
76
99
  maxChunkSize,
100
+ encryption,
101
+ compression,
77
102
  })
78
103
  }
79
104
 
@@ -83,9 +108,16 @@ export const processBufferToIPLDFormatFromChunks = async (
83
108
  filename: string | undefined,
84
109
  totalSize: number,
85
110
  builders: Builders,
86
- { maxLinkPerNode, maxChunkSize }: { maxLinkPerNode: number; maxChunkSize: number } = {
87
- maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
111
+ {
112
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
113
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
114
+ encryption = undefined,
115
+ compression = undefined,
116
+ }: Partial<ChunkerOptions> = {
88
117
  maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
118
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
119
+ encryption: undefined,
120
+ compression: undefined,
89
121
  },
90
122
  ): Promise<CID> => {
91
123
  let chunkCount = 0
@@ -99,7 +131,10 @@ export const processBufferToIPLDFormatFromChunks = async (
99
131
  const nodeBytes = await blockstore.get(CIDs[0])
100
132
  await blockstore.delete(CIDs[0])
101
133
  const data = decodeIPLDNodeData(nodeBytes)
102
- const singleNode = builders.single(Buffer.from(data.data!), filename)
134
+ const singleNode = builders.single(Buffer.from(data.data!), filename, {
135
+ compression,
136
+ encryption,
137
+ })
103
138
  await blockstore.put(cidOfNode(singleNode), encodeNode(singleNode))
104
139
  const headCID = cidOfNode(singleNode)
105
140
 
@@ -120,7 +155,10 @@ export const processBufferToIPLDFormatFromChunks = async (
120
155
  depth++
121
156
  CIDs = newCIDs
122
157
  }
123
- const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize)
158
+ const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize, {
159
+ compression,
160
+ encryption,
161
+ })
124
162
  const headCID = cidOfNode(head)
125
163
  await blockstore.put(headCID, encodeNode(head))
126
164
 
@@ -132,7 +170,17 @@ export const processFolderToIPLDFormat = async (
132
170
  children: CID[],
133
171
  name: string,
134
172
  size: number,
135
- { maxLinkPerNode }: { maxLinkPerNode: number } = { maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE },
173
+ {
174
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
175
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
176
+ compression = undefined,
177
+ encryption = undefined,
178
+ }: Partial<ChunkerOptions> = {
179
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
180
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
181
+ compression: undefined,
182
+ encryption: undefined,
183
+ },
136
184
  ): Promise<CID> => {
137
185
  let cids = children
138
186
  let depth = 0
@@ -149,7 +197,10 @@ export const processFolderToIPLDFormat = async (
149
197
  depth++
150
198
  }
151
199
 
152
- const node = createFolderIpldNode(cids, name, depth, size)
200
+ const node = createFolderIpldNode(cids, name, depth, size, maxChunkSize, {
201
+ compression,
202
+ encryption,
203
+ })
153
204
  const cid = cidOfNode(node)
154
205
  await blockstore.put(cid, encodeNode(node))
155
206
 
package/src/ipld/nodes.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { CID } from 'multiformats/cid'
2
2
  import { createNode, PBNode } from '../ipld/index.js'
3
- import { OffchainMetadata } from '../metadata/index.js'
3
+ import { FileUploadOptions, OffchainMetadata } from '../metadata/index.js'
4
4
  import { encodeIPLDNodeData, MetadataType } from '../metadata/onchain/index.js'
5
5
  import { DEFAULT_MAX_CHUNK_SIZE, ensureNodeMaxSize } from './chunker.js'
6
6
 
@@ -25,6 +25,7 @@ export const createChunkedFileIpldNode = (
25
25
  linkDepth: number,
26
26
  name?: string,
27
27
  maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE,
28
+ uploadOptions?: FileUploadOptions,
28
29
  ): PBNode =>
29
30
  ensureNodeMaxSize(
30
31
  createNode(
@@ -33,6 +34,7 @@ export const createChunkedFileIpldNode = (
33
34
  name,
34
35
  size,
35
36
  linkDepth,
37
+ uploadOptions,
36
38
  }),
37
39
  links.map((cid) => ({ Hash: cid })),
38
40
  ),
@@ -61,7 +63,11 @@ export const createFileInlinkIpldNode = (
61
63
  // Creates a file ipld node
62
64
  // links: the CIDs of the file's contents
63
65
  // @todo: add the file's metadata
64
- export const createSingleFileIpldNode = (data: Buffer, name?: string): PBNode =>
66
+ export const createSingleFileIpldNode = (
67
+ data: Buffer,
68
+ name?: string,
69
+ uploadOptions?: FileUploadOptions,
70
+ ): PBNode =>
65
71
  createNode(
66
72
  encodeIPLDNodeData({
67
73
  type: MetadataType.File,
@@ -69,6 +75,7 @@ export const createSingleFileIpldNode = (data: Buffer, name?: string): PBNode =>
69
75
  size: data.length,
70
76
  linkDepth: 0,
71
77
  data,
78
+ uploadOptions,
72
79
  }),
73
80
  [],
74
81
  )
@@ -148,6 +155,7 @@ export const createFolderIpldNode = (
148
155
  linkDepth: number,
149
156
  size: number,
150
157
  maxNodeSize: number = DEFAULT_MAX_CHUNK_SIZE,
158
+ uploadOptions?: FileUploadOptions,
151
159
  ): PBNode =>
152
160
  ensureNodeMaxSize(
153
161
  createNode(
@@ -156,6 +164,7 @@ export const createFolderIpldNode = (
156
164
  name,
157
165
  size,
158
166
  linkDepth,
167
+ uploadOptions,
159
168
  }),
160
169
  links.map((cid) => ({ Hash: cid })),
161
170
  ),
@@ -1,5 +1,5 @@
1
1
  import { CID } from 'multiformats'
2
- import { cidToString } from '../../index.js'
2
+ import { cidToString, FileUploadOptions } from '../../index.js'
3
3
 
4
4
  export type OffchainFileMetadata = {
5
5
  type: 'file'
@@ -9,6 +9,7 @@ export type OffchainFileMetadata = {
9
9
  totalSize: number
10
10
  totalChunks: number
11
11
  chunks: ChunkInfo[]
12
+ uploadOptions?: FileUploadOptions
12
13
  }
13
14
 
14
15
  export interface ChunkInfo {
@@ -22,6 +23,10 @@ export const fileMetadata = (
22
23
  totalSize: number,
23
24
  name?: string | null,
24
25
  mimeType?: string | null,
26
+ uploadOptions: FileUploadOptions = {
27
+ compression: undefined,
28
+ encryption: undefined,
29
+ },
25
30
  ): OffchainFileMetadata => {
26
31
  return {
27
32
  type: 'file',
@@ -31,5 +36,6 @@ export const fileMetadata = (
31
36
  totalSize,
32
37
  totalChunks: chunks.length,
33
38
  chunks,
39
+ uploadOptions,
34
40
  }
35
41
  }
@@ -1,7 +1,7 @@
1
1
  import { CID } from 'multiformats'
2
2
  import { cidOfNode, cidToString } from '../../cid/index.js'
3
3
  import { PBNode } from '../../ipld/index.js'
4
- import { IPLDNodeData, MetadataType } from '../onchain/index.js'
4
+ import { FileUploadOptions, IPLDNodeData, MetadataType } from '../onchain/index.js'
5
5
 
6
6
  interface ChildrenMetadata {
7
7
  type: 'folder' | 'file'
@@ -17,6 +17,7 @@ export type OffchainFolderMetadata = {
17
17
  totalSize: number
18
18
  totalFiles: number
19
19
  children: ChildrenMetadata[]
20
+ uploadOptions: FileUploadOptions
20
21
  }
21
22
 
22
23
  export const childrenMetadataFromNode = (node: PBNode): ChildrenMetadata => {
@@ -37,6 +38,7 @@ export const folderMetadata = (
37
38
  cid: CID | string,
38
39
  children: ChildrenMetadata[],
39
40
  name?: string | null,
41
+ uploadOptions: FileUploadOptions = {},
40
42
  ): OffchainFolderMetadata => {
41
43
  cid = typeof cid === 'string' ? cid : cidToString(cid)
42
44
 
@@ -47,5 +49,6 @@ export const folderMetadata = (
47
49
  children,
48
50
  type: 'folder',
49
51
  name: name ?? undefined,
52
+ uploadOptions,
50
53
  }
51
54
  }
@@ -6,6 +6,7 @@ message IPLDNodeData {
6
6
  optional int32 size = 3;
7
7
  optional string name = 4;
8
8
  optional bytes data = 5;
9
+ optional FileUploadOptions uploadOptions = 6;
9
10
  }
10
11
 
11
12
  // MetadataType defines the possible types of metadata.
@@ -19,3 +20,27 @@ enum MetadataType {
19
20
  MetadataInlink = 6;
20
21
  MetadataChunk = 7;
21
22
  }
23
+
24
+ message FileUploadOptions {
25
+ optional CompressionOptions compression = 1;
26
+ optional EncryptionOptions encryption = 2;
27
+ }
28
+
29
+ message CompressionOptions {
30
+ CompressionAlgorithm algorithm = 1;
31
+ optional int32 level = 2;
32
+ optional int32 chunkSize = 3;
33
+ }
34
+
35
+ message EncryptionOptions {
36
+ EncryptionAlgorithm algorithm = 1;
37
+ optional int32 chunkSize = 2;
38
+ }
39
+
40
+ enum CompressionAlgorithm {
41
+ ZLIB = 0;
42
+ }
43
+
44
+ enum EncryptionAlgorithm {
45
+ AES_256_GCM = 0;
46
+ }
@@ -13,6 +13,7 @@ export interface IPLDNodeData {
13
13
  size?: number
14
14
  name?: string
15
15
  data?: Uint8Array
16
+ uploadOptions?: FileUploadOptions
16
17
  }
17
18
 
18
19
  export namespace IPLDNodeData {
@@ -50,6 +51,11 @@ export namespace IPLDNodeData {
50
51
  w.bytes(obj.data)
51
52
  }
52
53
 
54
+ if (obj.uploadOptions != null) {
55
+ w.uint32(50)
56
+ FileUploadOptions.codec().encode(obj.uploadOptions, w)
57
+ }
58
+
53
59
  if (opts.lengthDelimited !== false) {
54
60
  w.ldelim()
55
61
  }
@@ -85,6 +91,12 @@ export namespace IPLDNodeData {
85
91
  obj.data = reader.bytes()
86
92
  break
87
93
  }
94
+ case 6: {
95
+ obj.uploadOptions = FileUploadOptions.codec().decode(reader, reader.uint32(), {
96
+ limits: opts.limits?.uploadOptions
97
+ })
98
+ break
99
+ }
88
100
  default: {
89
101
  reader.skipType(tag & 7)
90
102
  break
@@ -135,3 +147,251 @@ export namespace MetadataType {
135
147
  return enumeration<MetadataType>(__MetadataTypeValues)
136
148
  }
137
149
  }
150
+ export interface FileUploadOptions {
151
+ compression?: CompressionOptions
152
+ encryption?: EncryptionOptions
153
+ }
154
+
155
+ export namespace FileUploadOptions {
156
+ let _codec: Codec<FileUploadOptions>
157
+
158
+ export const codec = (): Codec<FileUploadOptions> => {
159
+ if (_codec == null) {
160
+ _codec = message<FileUploadOptions>((obj, w, opts = {}) => {
161
+ if (opts.lengthDelimited !== false) {
162
+ w.fork()
163
+ }
164
+
165
+ if (obj.compression != null) {
166
+ w.uint32(10)
167
+ CompressionOptions.codec().encode(obj.compression, w)
168
+ }
169
+
170
+ if (obj.encryption != null) {
171
+ w.uint32(18)
172
+ EncryptionOptions.codec().encode(obj.encryption, w)
173
+ }
174
+
175
+ if (opts.lengthDelimited !== false) {
176
+ w.ldelim()
177
+ }
178
+ }, (reader, length, opts = {}) => {
179
+ const obj: any = {}
180
+
181
+ const end = length == null ? reader.len : reader.pos + length
182
+
183
+ while (reader.pos < end) {
184
+ const tag = reader.uint32()
185
+
186
+ switch (tag >>> 3) {
187
+ case 1: {
188
+ obj.compression = CompressionOptions.codec().decode(reader, reader.uint32(), {
189
+ limits: opts.limits?.compression
190
+ })
191
+ break
192
+ }
193
+ case 2: {
194
+ obj.encryption = EncryptionOptions.codec().decode(reader, reader.uint32(), {
195
+ limits: opts.limits?.encryption
196
+ })
197
+ break
198
+ }
199
+ default: {
200
+ reader.skipType(tag & 7)
201
+ break
202
+ }
203
+ }
204
+ }
205
+
206
+ return obj
207
+ })
208
+ }
209
+
210
+ return _codec
211
+ }
212
+
213
+ export const encode = (obj: Partial<FileUploadOptions>): Uint8Array => {
214
+ return encodeMessage(obj, FileUploadOptions.codec())
215
+ }
216
+
217
+ export const decode = (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions<FileUploadOptions>): FileUploadOptions => {
218
+ return decodeMessage(buf, FileUploadOptions.codec(), opts)
219
+ }
220
+ }
221
+
222
+ export interface CompressionOptions {
223
+ algorithm: CompressionAlgorithm
224
+ level?: number
225
+ chunkSize?: number
226
+ }
227
+
228
+ export namespace CompressionOptions {
229
+ let _codec: Codec<CompressionOptions>
230
+
231
+ export const codec = (): Codec<CompressionOptions> => {
232
+ if (_codec == null) {
233
+ _codec = message<CompressionOptions>((obj, w, opts = {}) => {
234
+ if (opts.lengthDelimited !== false) {
235
+ w.fork()
236
+ }
237
+
238
+ if (obj.algorithm != null && __CompressionAlgorithmValues[obj.algorithm] !== 0) {
239
+ w.uint32(8)
240
+ CompressionAlgorithm.codec().encode(obj.algorithm, w)
241
+ }
242
+
243
+ if (obj.level != null) {
244
+ w.uint32(16)
245
+ w.int32(obj.level)
246
+ }
247
+
248
+ if (obj.chunkSize != null) {
249
+ w.uint32(24)
250
+ w.int32(obj.chunkSize)
251
+ }
252
+
253
+ if (opts.lengthDelimited !== false) {
254
+ w.ldelim()
255
+ }
256
+ }, (reader, length, opts = {}) => {
257
+ const obj: any = {
258
+ algorithm: CompressionAlgorithm.ZLIB
259
+ }
260
+
261
+ const end = length == null ? reader.len : reader.pos + length
262
+
263
+ while (reader.pos < end) {
264
+ const tag = reader.uint32()
265
+
266
+ switch (tag >>> 3) {
267
+ case 1: {
268
+ obj.algorithm = CompressionAlgorithm.codec().decode(reader)
269
+ break
270
+ }
271
+ case 2: {
272
+ obj.level = reader.int32()
273
+ break
274
+ }
275
+ case 3: {
276
+ obj.chunkSize = reader.int32()
277
+ break
278
+ }
279
+ default: {
280
+ reader.skipType(tag & 7)
281
+ break
282
+ }
283
+ }
284
+ }
285
+
286
+ return obj
287
+ })
288
+ }
289
+
290
+ return _codec
291
+ }
292
+
293
+ export const encode = (obj: Partial<CompressionOptions>): Uint8Array => {
294
+ return encodeMessage(obj, CompressionOptions.codec())
295
+ }
296
+
297
+ export const decode = (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions<CompressionOptions>): CompressionOptions => {
298
+ return decodeMessage(buf, CompressionOptions.codec(), opts)
299
+ }
300
+ }
301
+
302
+ export interface EncryptionOptions {
303
+ algorithm: EncryptionAlgorithm
304
+ chunkSize?: number
305
+ }
306
+
307
+ export namespace EncryptionOptions {
308
+ let _codec: Codec<EncryptionOptions>
309
+
310
+ export const codec = (): Codec<EncryptionOptions> => {
311
+ if (_codec == null) {
312
+ _codec = message<EncryptionOptions>((obj, w, opts = {}) => {
313
+ if (opts.lengthDelimited !== false) {
314
+ w.fork()
315
+ }
316
+
317
+ if (obj.algorithm != null && __EncryptionAlgorithmValues[obj.algorithm] !== 0) {
318
+ w.uint32(8)
319
+ EncryptionAlgorithm.codec().encode(obj.algorithm, w)
320
+ }
321
+
322
+ if (obj.chunkSize != null) {
323
+ w.uint32(16)
324
+ w.int32(obj.chunkSize)
325
+ }
326
+
327
+ if (opts.lengthDelimited !== false) {
328
+ w.ldelim()
329
+ }
330
+ }, (reader, length, opts = {}) => {
331
+ const obj: any = {
332
+ algorithm: EncryptionAlgorithm.AES_256_GCM
333
+ }
334
+
335
+ const end = length == null ? reader.len : reader.pos + length
336
+
337
+ while (reader.pos < end) {
338
+ const tag = reader.uint32()
339
+
340
+ switch (tag >>> 3) {
341
+ case 1: {
342
+ obj.algorithm = EncryptionAlgorithm.codec().decode(reader)
343
+ break
344
+ }
345
+ case 2: {
346
+ obj.chunkSize = reader.int32()
347
+ break
348
+ }
349
+ default: {
350
+ reader.skipType(tag & 7)
351
+ break
352
+ }
353
+ }
354
+ }
355
+
356
+ return obj
357
+ })
358
+ }
359
+
360
+ return _codec
361
+ }
362
+
363
+ export const encode = (obj: Partial<EncryptionOptions>): Uint8Array => {
364
+ return encodeMessage(obj, EncryptionOptions.codec())
365
+ }
366
+
367
+ export const decode = (buf: Uint8Array | Uint8ArrayList, opts?: DecodeOptions<EncryptionOptions>): EncryptionOptions => {
368
+ return decodeMessage(buf, EncryptionOptions.codec(), opts)
369
+ }
370
+ }
371
+
372
+ export enum CompressionAlgorithm {
373
+ ZLIB = 'ZLIB'
374
+ }
375
+
376
+ enum __CompressionAlgorithmValues {
377
+ ZLIB = 0
378
+ }
379
+
380
+ export namespace CompressionAlgorithm {
381
+ export const codec = (): Codec<CompressionAlgorithm> => {
382
+ return enumeration<CompressionAlgorithm>(__CompressionAlgorithmValues)
383
+ }
384
+ }
385
+ export enum EncryptionAlgorithm {
386
+ AES_256_GCM = 'AES_256_GCM'
387
+ }
388
+
389
+ enum __EncryptionAlgorithmValues {
390
+ AES_256_GCM = 0
391
+ }
392
+
393
+ export namespace EncryptionAlgorithm {
394
+ export const codec = (): Codec<EncryptionAlgorithm> => {
395
+ return enumeration<EncryptionAlgorithm>(__EncryptionAlgorithmValues)
396
+ }
397
+ }