@autonomys/auto-dag-data 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. package/LICENSE +18 -0
  2. package/README.md +167 -0
  3. package/dist/cid/index.d.ts +9 -0
  4. package/dist/cid/index.d.ts.map +1 -0
  5. package/dist/cid/index.js +20 -0
  6. package/dist/compression/index.d.ts +7 -0
  7. package/dist/compression/index.d.ts.map +1 -0
  8. package/dist/compression/index.js +106 -0
  9. package/dist/compression/types.d.ts +9 -0
  10. package/dist/compression/types.d.ts.map +1 -0
  11. package/dist/compression/types.js +1 -0
  12. package/dist/encryption/index.d.ts +8 -0
  13. package/dist/encryption/index.d.ts.map +1 -0
  14. package/dist/encryption/index.js +121 -0
  15. package/dist/encryption/types.d.ts +5 -0
  16. package/dist/encryption/types.d.ts.map +1 -0
  17. package/dist/encryption/types.js +1 -0
  18. package/dist/index.d.ts +6 -0
  19. package/dist/index.d.ts.map +1 -0
  20. package/dist/index.js +5 -0
  21. package/dist/ipld/blockstore/base.d.ts +9 -0
  22. package/dist/ipld/blockstore/base.d.ts.map +1 -0
  23. package/dist/ipld/blockstore/base.js +1 -0
  24. package/dist/ipld/blockstore/index.d.ts +3 -0
  25. package/dist/ipld/blockstore/index.d.ts.map +1 -0
  26. package/dist/ipld/blockstore/index.js +2 -0
  27. package/dist/ipld/blockstore/memory.d.ts +13 -0
  28. package/dist/ipld/blockstore/memory.d.ts.map +1 -0
  29. package/dist/ipld/blockstore/memory.js +57 -0
  30. package/dist/ipld/builders.d.ts +12 -0
  31. package/dist/ipld/builders.d.ts.map +1 -0
  32. package/dist/ipld/builders.js +13 -0
  33. package/dist/ipld/chunker.d.ts +30 -0
  34. package/dist/ipld/chunker.d.ts.map +1 -0
  35. package/dist/ipld/chunker.js +219 -0
  36. package/dist/ipld/index.d.ts +5 -0
  37. package/dist/ipld/index.d.ts.map +1 -0
  38. package/dist/ipld/index.js +4 -0
  39. package/dist/ipld/nodes.d.ts +15 -0
  40. package/dist/ipld/nodes.d.ts.map +1 -0
  41. package/dist/ipld/nodes.js +92 -0
  42. package/dist/ipld/utils.d.ts +8 -0
  43. package/dist/ipld/utils.d.ts.map +1 -0
  44. package/dist/ipld/utils.js +50 -0
  45. package/dist/metadata/index.d.ts +3 -0
  46. package/dist/metadata/index.d.ts.map +1 -0
  47. package/dist/metadata/index.js +2 -0
  48. package/dist/metadata/offchain/base.d.ts +4 -0
  49. package/dist/metadata/offchain/base.d.ts.map +1 -0
  50. package/dist/metadata/offchain/base.js +1 -0
  51. package/dist/metadata/offchain/file.d.ts +18 -0
  52. package/dist/metadata/offchain/file.d.ts.map +1 -0
  53. package/dist/metadata/offchain/file.js +16 -0
  54. package/dist/metadata/offchain/folder.d.ts +22 -0
  55. package/dist/metadata/offchain/folder.d.ts.map +1 -0
  56. package/dist/metadata/offchain/folder.js +27 -0
  57. package/dist/metadata/offchain/index.d.ts +4 -0
  58. package/dist/metadata/offchain/index.d.ts.map +1 -0
  59. package/dist/metadata/offchain/index.js +3 -0
  60. package/dist/metadata/onchain/index.d.ts +3 -0
  61. package/dist/metadata/onchain/index.d.ts.map +1 -0
  62. package/dist/metadata/onchain/index.js +2 -0
  63. package/dist/metadata/onchain/protobuf/OnchainMetadata.d.ts +69 -0
  64. package/dist/metadata/onchain/protobuf/OnchainMetadata.d.ts.map +1 -0
  65. package/dist/metadata/onchain/protobuf/OnchainMetadata.js +322 -0
  66. package/dist/metadata/onchain/utils.d.ts +4 -0
  67. package/dist/metadata/onchain/utils.d.ts.map +1 -0
  68. package/dist/metadata/onchain/utils.js +12 -0
  69. package/dist/src/cid/index.d.ts +9 -0
  70. package/dist/src/cid/index.d.ts.map +1 -0
  71. package/dist/src/cid/index.js +20 -0
  72. package/dist/src/index.d.ts +4 -0
  73. package/dist/src/index.d.ts.map +1 -0
  74. package/dist/src/index.js +3 -0
  75. package/dist/src/ipld/builders.d.ts +11 -0
  76. package/dist/src/ipld/builders.d.ts.map +1 -0
  77. package/dist/src/ipld/builders.js +13 -0
  78. package/dist/src/ipld/chunker.d.ts +22 -0
  79. package/dist/src/ipld/chunker.d.ts.map +1 -0
  80. package/dist/src/ipld/chunker.js +144 -0
  81. package/dist/src/ipld/index.d.ts +4 -0
  82. package/dist/src/ipld/index.d.ts.map +1 -0
  83. package/dist/src/ipld/index.js +3 -0
  84. package/dist/src/ipld/nodes.d.ts +15 -0
  85. package/dist/src/ipld/nodes.d.ts.map +1 -0
  86. package/dist/src/ipld/nodes.js +89 -0
  87. package/dist/src/ipld/utils.d.ts +5 -0
  88. package/dist/src/ipld/utils.d.ts.map +1 -0
  89. package/dist/src/ipld/utils.js +51 -0
  90. package/dist/src/metadata/index.d.ts +3 -0
  91. package/dist/src/metadata/index.d.ts.map +1 -0
  92. package/dist/src/metadata/index.js +2 -0
  93. package/dist/src/metadata/offchain/base.d.ts +4 -0
  94. package/dist/src/metadata/offchain/base.d.ts.map +1 -0
  95. package/dist/src/metadata/offchain/base.js +1 -0
  96. package/dist/src/metadata/offchain/file.d.ts +16 -0
  97. package/dist/src/metadata/offchain/file.d.ts.map +1 -0
  98. package/dist/src/metadata/offchain/file.js +19 -0
  99. package/dist/src/metadata/offchain/folder.d.ts +17 -0
  100. package/dist/src/metadata/offchain/folder.d.ts.map +1 -0
  101. package/dist/src/metadata/offchain/folder.js +10 -0
  102. package/dist/src/metadata/offchain/index.d.ts +4 -0
  103. package/dist/src/metadata/offchain/index.d.ts.map +1 -0
  104. package/dist/src/metadata/offchain/index.js +3 -0
  105. package/dist/src/metadata/onchain/index.d.ts +3 -0
  106. package/dist/src/metadata/onchain/index.d.ts.map +1 -0
  107. package/dist/src/metadata/onchain/index.js +2 -0
  108. package/dist/src/metadata/onchain/protobuf/OnchainMetadata.d.ts +28 -0
  109. package/dist/src/metadata/onchain/protobuf/OnchainMetadata.d.ts.map +1 -0
  110. package/dist/src/metadata/onchain/protobuf/OnchainMetadata.js +112 -0
  111. package/dist/src/metadata/onchain/utils.d.ts +4 -0
  112. package/dist/src/metadata/onchain/utils.d.ts.map +1 -0
  113. package/dist/src/metadata/onchain/utils.js +12 -0
  114. package/dist/utils/async.d.ts +3 -0
  115. package/dist/utils/async.d.ts.map +1 -0
  116. package/dist/utils/async.js +48 -0
  117. package/dist/utils/index.d.ts +2 -0
  118. package/dist/utils/index.d.ts.map +1 -0
  119. package/dist/utils/index.js +1 -0
  120. package/dist/utils/types.d.ts +2 -0
  121. package/dist/utils/types.d.ts.map +1 -0
  122. package/dist/utils/types.js +1 -0
  123. package/jest.config.ts +17 -0
  124. package/package.json +46 -0
  125. package/src/cid/index.ts +26 -0
  126. package/src/compression/index.ts +84 -0
  127. package/src/compression/types.ts +11 -0
  128. package/src/encryption/index.ts +99 -0
  129. package/src/encryption/types.ts +4 -0
  130. package/src/index.ts +5 -0
  131. package/src/ipld/builders.ts +40 -0
  132. package/src/ipld/chunker.ts +245 -0
  133. package/src/ipld/index.ts +4 -0
  134. package/src/ipld/nodes.ts +208 -0
  135. package/src/ipld/utils.ts +21 -0
  136. package/src/metadata/index.ts +2 -0
  137. package/src/metadata/offchain/base.ts +4 -0
  138. package/src/metadata/offchain/file.ts +41 -0
  139. package/src/metadata/offchain/folder.ts +54 -0
  140. package/src/metadata/offchain/index.ts +3 -0
  141. package/src/metadata/onchain/index.ts +2 -0
  142. package/src/metadata/onchain/protobuf/OnchainMetadata.proto +46 -0
  143. package/src/metadata/onchain/protobuf/OnchainMetadata.ts +397 -0
  144. package/src/metadata/onchain/utils.ts +15 -0
  145. package/src/utils/async.ts +20 -0
  146. package/src/utils/index.ts +1 -0
  147. package/src/utils/types.ts +1 -0
  148. package/tests/chunker.spec.ts +294 -0
  149. package/tests/cid.spec.ts +20 -0
  150. package/tests/compression.spec.ts +58 -0
  151. package/tests/encryption.spec.ts +67 -0
  152. package/tests/nodes.spec.ts +74 -0
  153. package/tsconfig.json +14 -0
@@ -0,0 +1,48 @@
1
+ var __asyncValues = (this && this.__asyncValues) || function (o) {
2
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
3
+ var m = o[Symbol.asyncIterator], i;
4
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
5
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
6
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
7
+ };
8
+ var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
9
+ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
10
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
11
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
12
+ return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i;
13
+ function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; }
14
+ function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } }
15
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
16
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
17
+ function fulfill(value) { resume("next", value); }
18
+ function reject(value) { resume("throw", value); }
19
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
20
+ };
21
+ export const asyncByChunk = function (iterable_1, chunkSize_1) {
22
+ return __asyncGenerator(this, arguments, function* (iterable, chunkSize, ignoreLastChunk = false) {
23
+ var _a, e_1, _b, _c;
24
+ let accumulated = Buffer.alloc(0);
25
+ try {
26
+ for (var _d = true, iterable_2 = __asyncValues(iterable), iterable_2_1; iterable_2_1 = yield __await(iterable_2.next()), _a = iterable_2_1.done, !_a; _d = true) {
27
+ _c = iterable_2_1.value;
28
+ _d = false;
29
+ const chunk = _c;
30
+ accumulated = Buffer.concat([accumulated, chunk]);
31
+ while (accumulated.length >= chunkSize) {
32
+ yield yield __await(accumulated.subarray(0, chunkSize));
33
+ accumulated = accumulated.subarray(chunkSize);
34
+ }
35
+ }
36
+ }
37
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
38
+ finally {
39
+ try {
40
+ if (!_d && !_a && (_b = iterable_2.return)) yield __await(_b.call(iterable_2));
41
+ }
42
+ finally { if (e_1) throw e_1.error; }
43
+ }
44
+ if (accumulated.length > 0 && !ignoreLastChunk) {
45
+ yield yield __await(accumulated);
46
+ }
47
+ });
48
+ };
@@ -0,0 +1,2 @@
1
+ export * from './async.js';
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAA"}
@@ -0,0 +1 @@
1
+ export * from './async.js';
@@ -0,0 +1,2 @@
1
+ export type PickPartial<T, K extends keyof T> = Pick<T, K> & Partial<T>;
2
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/utils/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,WAAW,CAAC,CAAC,EAAE,CAAC,SAAS,MAAM,CAAC,IAAI,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAA"}
@@ -0,0 +1 @@
1
+ export {};
package/jest.config.ts ADDED
@@ -0,0 +1,17 @@
1
+ const { createDefaultEsmPreset } = require('ts-jest')
2
+
3
+ module.exports = {
4
+ ...createDefaultEsmPreset(),
5
+ extensionsToTreatAsEsm: ['.ts'],
6
+ moduleNameMapper: {
7
+ '^(\\.{1,2}/.*)\\.js$': '$1',
8
+ },
9
+ transform: {
10
+ '^.+\\.tsx?$': [
11
+ 'ts-jest',
12
+ {
13
+ useESM: true,
14
+ },
15
+ ],
16
+ },
17
+ }
package/package.json ADDED
@@ -0,0 +1,46 @@
1
+ {
2
+ "name": "@autonomys/auto-dag-data",
3
+ "packageManager": "yarn@4.1.1",
4
+ "version": "0.8.2",
5
+ "license": "MIT",
6
+ "main": "dist/index.js",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "https://github.com/autonomys/auto-sdk"
10
+ },
11
+ "author": {
12
+ "name": "Autonomys",
13
+ "url": "https://www.autonomys.net"
14
+ },
15
+ "type": "module",
16
+ "scripts": {
17
+ "build": "tsc",
18
+ "pb": "yarn protons src/metadata/onchain/protobuf/OnchainMetadata.proto -o src/metadata/onchain/protobuf",
19
+ "clean": "rm -rf dist",
20
+ "format": "prettier --write \"src/**/*.ts\"",
21
+ "test": "yarn node --experimental-vm-modules $(yarn bin jest)"
22
+ },
23
+ "exports": {
24
+ ".": "./dist/index.js",
25
+ "./protobuf": "./dist/metadata/onchain/protobuf/onchainMetadata.js"
26
+ },
27
+ "devDependencies": {
28
+ "@types/jest": "^29.5.13",
29
+ "blockstore-core": "^5.0.2",
30
+ "interface-store": "^6.0.2",
31
+ "jest": "^29.7.0",
32
+ "protobufjs": "^7.4.0",
33
+ "ts-jest": "^29.2.5",
34
+ "typescript": "^5.6.2"
35
+ },
36
+ "dependencies": {
37
+ "@ipld/dag-pb": "^4.1.2",
38
+ "blake3": "1.1.0",
39
+ "fflate": "^0.8.2",
40
+ "multiformats": "^13.2.2",
41
+ "protobufjs": "^7.4.0",
42
+ "protons": "^7.6.0",
43
+ "protons-runtime": "^5.5.0"
44
+ },
45
+ "gitHead": "027bc98a4e2191c6ce374b3db807a7ae67b7dd10"
46
+ }
@@ -0,0 +1,26 @@
1
+ import { hash } from 'blake3'
2
+ import * as base32 from 'multiformats/bases/base32'
3
+ import { CID } from 'multiformats/cid'
4
+ import * as raw from 'multiformats/codecs/raw'
5
+ import { create } from 'multiformats/hashes/digest'
6
+ import { encodeNode, PBNode } from '../ipld/utils.js'
7
+
8
+ export const BLAKE3_CODE = 0x1f
9
+
10
+ export const cidOfNode = (node: PBNode) => {
11
+ return cidFromBlakeHash(hash(encodeNode(node)))
12
+ }
13
+
14
+ export const cidToString = (cid: CID) => {
15
+ return cid.toString(base32.base32)
16
+ }
17
+
18
+ export const stringToCid = (str: string) => {
19
+ return CID.parse(str, base32.base32)
20
+ }
21
+
22
+ export const cidFromBlakeHash = (hash: Buffer) => {
23
+ return CID.create(1, raw.code, create(BLAKE3_CODE, hash))
24
+ }
25
+
26
+ export const blake3HashFromCid = (cid: CID) => cid.multihash.digest
@@ -0,0 +1,84 @@
1
+ import { Unzlib, Zlib } from 'fflate'
2
+ import type { AwaitIterable } from 'interface-store'
3
+ import { CompressionAlgorithm } from '../metadata/index.js'
4
+ import { asyncByChunk } from '../utils/async.js'
5
+ import type { PickPartial } from '../utils/types.js'
6
+ import { CompressionOptions } from './types.js'
7
+
8
+ export const COMPRESSION_CHUNK_SIZE = 1024 * 1024
9
+
10
+ export async function* compressFile(
11
+ file: AwaitIterable<Buffer>,
12
+ {
13
+ level = 9,
14
+ chunkSize = COMPRESSION_CHUNK_SIZE,
15
+ algorithm,
16
+ }: PickPartial<CompressionOptions, 'algorithm'>,
17
+ ): AsyncIterable<Buffer> {
18
+ if (algorithm !== CompressionAlgorithm.ZLIB) {
19
+ throw new Error('Unsupported compression algorithm')
20
+ }
21
+ if (level < 0 || level > 9) {
22
+ throw new Error('Invalid compression level')
23
+ }
24
+ if (chunkSize <= 0) {
25
+ throw new Error('Invalid chunk size')
26
+ }
27
+
28
+ const zlib = new Zlib({ level })
29
+ const compressedChunks: Buffer[] = []
30
+
31
+ zlib.ondata = (chunk) => {
32
+ compressedChunks.push(Buffer.from(chunk))
33
+ }
34
+
35
+ for await (const chunk of asyncByChunk(file, chunkSize)) {
36
+ zlib.push(chunk, false)
37
+ while (compressedChunks.length > 0) {
38
+ yield compressedChunks.shift()!
39
+ }
40
+ }
41
+
42
+ zlib.push(new Uint8Array(), true)
43
+ while (compressedChunks.length > 0) {
44
+ yield compressedChunks.shift()!
45
+ }
46
+ }
47
+
48
+ export async function* decompressFile(
49
+ compressedFile: AwaitIterable<Buffer>,
50
+ {
51
+ chunkSize = COMPRESSION_CHUNK_SIZE,
52
+ algorithm = CompressionAlgorithm.ZLIB,
53
+ level = 9,
54
+ }: PickPartial<CompressionOptions, 'algorithm'>,
55
+ ): AsyncIterable<Buffer> {
56
+ if (algorithm !== CompressionAlgorithm.ZLIB) {
57
+ throw new Error('Unsupported compression algorithm')
58
+ }
59
+ if (chunkSize <= 0) {
60
+ throw new Error('Invalid chunk size')
61
+ }
62
+ if (level < 0 || level > 9) {
63
+ throw new Error('Invalid compression level')
64
+ }
65
+
66
+ const unzlib = new Unzlib()
67
+ const decompressedChunks: Buffer[] = []
68
+
69
+ unzlib.ondata = (chunk) => {
70
+ decompressedChunks.push(Buffer.from(chunk))
71
+ }
72
+
73
+ for await (const chunk of asyncByChunk(compressedFile, chunkSize)) {
74
+ unzlib.push(chunk, false)
75
+ while (decompressedChunks.length > 0) {
76
+ yield decompressedChunks.shift()!
77
+ }
78
+ }
79
+
80
+ unzlib.push(new Uint8Array(), true)
81
+ while (decompressedChunks.length > 0) {
82
+ yield decompressedChunks.shift()!
83
+ }
84
+ }
@@ -0,0 +1,11 @@
1
+ import { CompressionAlgorithm } from '../metadata/index.js'
2
+
3
+ export type CompressionLevel = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9
4
+
5
+ export type ZLibOptions = {
6
+ algorithm: CompressionAlgorithm.ZLIB
7
+ level: CompressionLevel
8
+ chunkSize: number
9
+ }
10
+
11
+ export type CompressionOptions = ZLibOptions
@@ -0,0 +1,99 @@
1
+ import { Crypto } from '@peculiar/webcrypto'
2
+ import { randomBytes } from 'crypto'
3
+ import { EncryptionAlgorithm, EncryptionOptions } from '../metadata/index.js'
4
+ import { asyncByChunk } from '../utils/async.js'
5
+ import type { PickPartial } from '../utils/types.js'
6
+ import { PasswordGenerationOptions } from './types.js'
7
+
8
+ const crypto = new Crypto()
9
+
10
+ export const ENCRYPTING_CHUNK_SIZE = 1024 * 1024
11
+ const IV_SIZE = 16
12
+ const TAG_SIZE = 16
13
+ const ENCRYPTED_CHUNK_SIZE = ENCRYPTING_CHUNK_SIZE + IV_SIZE + TAG_SIZE
14
+ const SALT_SIZE = 32
15
+
16
+ export const getKeyFromPassword = async ({ password, salt }: PasswordGenerationOptions) => {
17
+ const encoder = new TextEncoder()
18
+ const saltHash =
19
+ typeof salt === 'string' ? await crypto.subtle.digest('SHA-256', encoder.encode(salt)) : salt
20
+
21
+ const keyMaterial = await crypto.subtle.importKey(
22
+ 'raw',
23
+ encoder.encode(password),
24
+ 'PBKDF2',
25
+ false,
26
+ ['deriveBits', 'deriveKey'],
27
+ )
28
+
29
+ return crypto.subtle.deriveKey(
30
+ {
31
+ name: 'PBKDF2',
32
+ salt: saltHash,
33
+ iterations: 100000,
34
+ hash: 'SHA-256',
35
+ },
36
+ keyMaterial,
37
+ { name: 'AES-GCM', length: 256 },
38
+ false,
39
+ ['encrypt', 'decrypt'],
40
+ )
41
+ }
42
+
43
+ export const encryptFile = async function* (
44
+ file: AsyncIterable<Buffer>,
45
+ password: string,
46
+ { chunkSize = ENCRYPTING_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
47
+ ): AsyncIterable<Buffer> {
48
+ if (algorithm !== EncryptionAlgorithm.AES_256_GCM) {
49
+ throw new Error('Unsupported encryption algorithm')
50
+ }
51
+
52
+ const salt = randomBytes(SALT_SIZE)
53
+ const key = await getKeyFromPassword({ password, salt })
54
+
55
+ yield salt
56
+
57
+ for await (const chunk of asyncByChunk(file, chunkSize)) {
58
+ const iv = crypto.getRandomValues(new Uint8Array(IV_SIZE))
59
+ const encrypted = await crypto.subtle.encrypt({ name: 'AES-GCM', iv }, key, chunk)
60
+ yield Buffer.concat([Buffer.from(iv), Buffer.from(encrypted)])
61
+ }
62
+ }
63
+
64
+ export const decryptFile = async function* (
65
+ file: AsyncIterable<Buffer>,
66
+ password: string,
67
+ { chunkSize = ENCRYPTED_CHUNK_SIZE, algorithm }: PickPartial<EncryptionOptions, 'algorithm'>,
68
+ ): AsyncIterable<Buffer> {
69
+ if (algorithm !== EncryptionAlgorithm.AES_256_GCM) {
70
+ throw new Error('Unsupported encryption algorithm')
71
+ }
72
+
73
+ let key: CryptoKey | undefined = undefined
74
+ let chunks = Buffer.alloc(0)
75
+ for await (const chunk of file) {
76
+ chunks = Buffer.concat([chunks, chunk])
77
+
78
+ if (chunks.length >= SALT_SIZE && !key) {
79
+ const salt = chunks.subarray(0, 32)
80
+ key = await getKeyFromPassword({ password, salt })
81
+ chunks = chunks.subarray(SALT_SIZE)
82
+ }
83
+
84
+ while (key && chunks.length >= chunkSize) {
85
+ const iv = chunks.subarray(0, IV_SIZE)
86
+ const encryptedChunk = chunk.subarray(IV_SIZE, chunkSize)
87
+ const decrypted = await crypto.subtle.decrypt({ name: 'AES-GCM', iv }, key, encryptedChunk)
88
+ chunks = chunks.subarray(chunkSize)
89
+ yield Buffer.from(decrypted)
90
+ }
91
+ }
92
+
93
+ if (key && chunks.length > 0) {
94
+ const iv = chunks.subarray(0, IV_SIZE)
95
+ const encryptedChunk = chunks.subarray(IV_SIZE, chunkSize)
96
+ const decrypted = await crypto.subtle.decrypt({ name: 'AES-GCM', iv }, key, encryptedChunk)
97
+ yield Buffer.from(decrypted)
98
+ }
99
+ }
@@ -0,0 +1,4 @@
1
+ export type PasswordGenerationOptions = {
2
+ password: string
3
+ salt: string | Uint8Array
4
+ }
package/src/index.ts ADDED
@@ -0,0 +1,5 @@
1
+ export * from './cid/index.js'
2
+ export * from './compression/index.js'
3
+ export * from './encryption/index.js'
4
+ export * from './ipld/index.js'
5
+ export * from './metadata/index.js'
@@ -0,0 +1,40 @@
1
+ import { CID } from 'multiformats/cid'
2
+ import { FileUploadOptions } from '../metadata/index.js'
3
+ import { PBNode } from './index.js'
4
+ import {
5
+ createChunkedFileIpldNode,
6
+ createChunkedMetadataIpldNode,
7
+ createFileChunkIpldNode,
8
+ createFileInlinkIpldNode,
9
+ createMetadataChunkIpldNode,
10
+ createMetadataInlinkIpldNode,
11
+ createSingleFileIpldNode,
12
+ createSingleMetadataIpldNode,
13
+ } from './nodes.js'
14
+
15
+ export interface Builders {
16
+ inlink: (links: CID[], size: number, linkDepth: number, chunkSize: number) => PBNode
17
+ chunk: (data: Buffer) => PBNode
18
+ root: (
19
+ links: CID[],
20
+ size: number,
21
+ linkDepth: number,
22
+ name?: string,
23
+ maxNodeSize?: number,
24
+ fileUploadOptions?: FileUploadOptions,
25
+ ) => PBNode
26
+ single: (data: Buffer, filename?: string, fileUploadOptions?: FileUploadOptions) => PBNode
27
+ }
28
+ export const metadataBuilders: Builders = {
29
+ inlink: createMetadataInlinkIpldNode,
30
+ chunk: createMetadataChunkIpldNode,
31
+ root: createChunkedMetadataIpldNode,
32
+ single: createSingleMetadataIpldNode,
33
+ }
34
+
35
+ export const fileBuilders: Builders = {
36
+ inlink: createFileInlinkIpldNode,
37
+ chunk: createFileChunkIpldNode,
38
+ root: createChunkedFileIpldNode,
39
+ single: createSingleFileIpldNode,
40
+ }
@@ -0,0 +1,245 @@
1
+ import type { BaseBlockstore } from 'blockstore-core'
2
+ import type { AwaitIterable } from 'interface-store'
3
+ import { CID } from 'multiformats'
4
+ import { cidOfNode } from '../cid/index.js'
5
+ import { decodeIPLDNodeData, FileUploadOptions, OffchainMetadata } from '../metadata/index.js'
6
+ import { Builders, fileBuilders, metadataBuilders } from './builders.js'
7
+ import { createFolderInlinkIpldNode, createFolderIpldNode } from './nodes.js'
8
+ import { chunkBuffer, encodeNode, PBNode } from './utils.js'
9
+
10
+ type ChunkerLimits = {
11
+ maxChunkSize: number
12
+ maxLinkPerNode: number
13
+ }
14
+
15
+ type ChunkerOptions = ChunkerLimits & FileUploadOptions
16
+
17
+ export const DEFAULT_MAX_CHUNK_SIZE = 64 * 1024
18
+
19
+ const ESTIMATED_LINK_SIZE_IN_BYTES = 64
20
+ export const DEFAULT_MAX_LINK_PER_NODE = DEFAULT_MAX_CHUNK_SIZE / ESTIMATED_LINK_SIZE_IN_BYTES
21
+
22
+ export const processFileToIPLDFormat = (
23
+ blockstore: BaseBlockstore,
24
+ file: AwaitIterable<Buffer>,
25
+ totalSize: number,
26
+ filename?: string,
27
+ {
28
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
29
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
30
+ encryption = undefined,
31
+ compression = undefined,
32
+ }: Partial<ChunkerOptions> = {
33
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
34
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
35
+ encryption: undefined,
36
+ compression: undefined,
37
+ },
38
+ ): Promise<CID> => {
39
+ return processBufferToIPLDFormat(blockstore, file, filename, totalSize, fileBuilders, {
40
+ maxChunkSize,
41
+ maxLinkPerNode,
42
+ encryption,
43
+ compression,
44
+ })
45
+ }
46
+
47
+ export const processMetadataToIPLDFormat = async (
48
+ blockstore: BaseBlockstore,
49
+ metadata: OffchainMetadata,
50
+ limits: { maxChunkSize: number; maxLinkPerNode: number } = {
51
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
52
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
53
+ },
54
+ ): Promise<CID> => {
55
+ const buffer = Buffer.from(JSON.stringify(metadata))
56
+ const name = `${metadata.name}.metadata.json`
57
+ return processBufferToIPLDFormat(
58
+ blockstore,
59
+ (async function* () {
60
+ yield buffer
61
+ })(),
62
+ name,
63
+ buffer.byteLength,
64
+ metadataBuilders,
65
+ limits,
66
+ )
67
+ }
68
+
69
+ const processBufferToIPLDFormat = async (
70
+ blockstore: BaseBlockstore,
71
+ buffer: AwaitIterable<Buffer>,
72
+ filename: string | undefined,
73
+ totalSize: number,
74
+ builders: Builders,
75
+ {
76
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
77
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
78
+ encryption = undefined,
79
+ compression = undefined,
80
+ }: ChunkerOptions = {
81
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
82
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
83
+ encryption: undefined,
84
+ compression: undefined,
85
+ },
86
+ ): Promise<CID> => {
87
+ const bufferChunks = chunkBuffer(buffer, { maxChunkSize: maxChunkSize })
88
+
89
+ let CIDs: CID[] = []
90
+ for await (const chunk of bufferChunks) {
91
+ const node = builders.chunk(chunk)
92
+ const cid = cidOfNode(node)
93
+ await blockstore.put(cid, encodeNode(node))
94
+ CIDs.push(cid)
95
+ }
96
+
97
+ return processBufferToIPLDFormatFromChunks(blockstore, CIDs, filename, totalSize, builders, {
98
+ maxLinkPerNode,
99
+ maxChunkSize,
100
+ encryption,
101
+ compression,
102
+ })
103
+ }
104
+
105
+ export const processBufferToIPLDFormatFromChunks = async (
106
+ blockstore: BaseBlockstore,
107
+ chunks: AwaitIterable<CID>,
108
+ filename: string | undefined,
109
+ totalSize: number,
110
+ builders: Builders,
111
+ {
112
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
113
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
114
+ encryption = undefined,
115
+ compression = undefined,
116
+ }: Partial<ChunkerOptions> = {
117
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
118
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
119
+ encryption: undefined,
120
+ compression: undefined,
121
+ },
122
+ ): Promise<CID> => {
123
+ let chunkCount = 0
124
+ let CIDs: CID[] = []
125
+ for await (const chunk of chunks) {
126
+ CIDs.push(chunk)
127
+ chunkCount++
128
+ }
129
+
130
+ if (CIDs.length === 1) {
131
+ const nodeBytes = await blockstore.get(CIDs[0])
132
+ await blockstore.delete(CIDs[0])
133
+ const data = decodeIPLDNodeData(nodeBytes)
134
+ const singleNode = builders.single(Buffer.from(data.data!), filename, {
135
+ compression,
136
+ encryption,
137
+ })
138
+ await blockstore.put(cidOfNode(singleNode), encodeNode(singleNode))
139
+ const headCID = cidOfNode(singleNode)
140
+
141
+ return headCID
142
+ }
143
+
144
+ let depth = 1
145
+ while (CIDs.length > maxLinkPerNode) {
146
+ const newCIDs: CID[] = []
147
+ for (let i = 0; i < CIDs.length; i += maxLinkPerNode) {
148
+ const chunk = CIDs.slice(i, i + maxLinkPerNode)
149
+
150
+ const node = builders.inlink(chunk, chunk.length, depth, maxChunkSize)
151
+ const cid = cidOfNode(node)
152
+ await blockstore.put(cid, encodeNode(node))
153
+ newCIDs.push(cid)
154
+ }
155
+ depth++
156
+ CIDs = newCIDs
157
+ }
158
+ const head = builders.root(CIDs, totalSize, depth, filename, maxChunkSize, {
159
+ compression,
160
+ encryption,
161
+ })
162
+ const headCID = cidOfNode(head)
163
+ await blockstore.put(headCID, encodeNode(head))
164
+
165
+ return headCID
166
+ }
167
+
168
+ export const processFolderToIPLDFormat = async (
169
+ blockstore: BaseBlockstore,
170
+ children: CID[],
171
+ name: string,
172
+ size: number,
173
+ {
174
+ maxLinkPerNode = DEFAULT_MAX_LINK_PER_NODE,
175
+ maxChunkSize = DEFAULT_MAX_CHUNK_SIZE,
176
+ compression = undefined,
177
+ encryption = undefined,
178
+ }: Partial<ChunkerOptions> = {
179
+ maxLinkPerNode: DEFAULT_MAX_LINK_PER_NODE,
180
+ maxChunkSize: DEFAULT_MAX_CHUNK_SIZE,
181
+ compression: undefined,
182
+ encryption: undefined,
183
+ },
184
+ ): Promise<CID> => {
185
+ let cids = children
186
+ let depth = 0
187
+ while (cids.length > maxLinkPerNode) {
188
+ const newCIDs: CID[] = []
189
+ for (let i = 0; i < cids.length; i += maxLinkPerNode) {
190
+ const chunk = cids.slice(i, i + maxLinkPerNode)
191
+ const node = createFolderInlinkIpldNode(chunk, depth)
192
+ const cid = cidOfNode(node)
193
+ await blockstore.put(cid, encodeNode(node))
194
+ newCIDs.push(cid)
195
+ }
196
+ cids = newCIDs
197
+ depth++
198
+ }
199
+
200
+ const node = createFolderIpldNode(cids, name, depth, size, maxChunkSize, {
201
+ compression,
202
+ encryption,
203
+ })
204
+ const cid = cidOfNode(node)
205
+ await blockstore.put(cid, encodeNode(node))
206
+
207
+ return cid
208
+ }
209
+
210
+ /**
211
+ * Process chunks to IPLD format, return the last chunk if it's not full
212
+ * @returns the last chunk if it's not full, otherwise an empty buffer
213
+ */
214
+ export const processChunksToIPLDFormat = async (
215
+ blockstore: BaseBlockstore,
216
+ chunks: AwaitIterable<Buffer>,
217
+ builders: Builders,
218
+ { maxChunkSize = DEFAULT_MAX_CHUNK_SIZE }: { maxChunkSize?: number },
219
+ ): Promise<Buffer> => {
220
+ const bufferChunks = chunkBuffer(chunks, { maxChunkSize, ignoreLastChunk: false })
221
+
222
+ for await (const chunk of bufferChunks) {
223
+ if (chunk.byteLength < maxChunkSize) {
224
+ return chunk
225
+ }
226
+
227
+ const node = builders.chunk(chunk)
228
+ const cid = cidOfNode(node)
229
+ await blockstore.put(cid, encodeNode(node))
230
+ }
231
+
232
+ return Buffer.alloc(0)
233
+ }
234
+
235
+ export const ensureNodeMaxSize = (
236
+ node: PBNode,
237
+ maxSize: number = DEFAULT_MAX_CHUNK_SIZE,
238
+ ): PBNode => {
239
+ const nodeSize = encodeNode(node).byteLength
240
+ if (nodeSize > maxSize) {
241
+ throw new Error(`Node is too large to fit in a single chunk: ${nodeSize} > ${maxSize}`)
242
+ }
243
+
244
+ return node
245
+ }
@@ -0,0 +1,4 @@
1
+ export * from './builders.js'
2
+ export * from './chunker.js'
3
+ export * from './nodes.js'
4
+ export * from './utils.js'