@snaha/swarm-id 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +431 -0
- package/dist/chunk/bmt.d.ts +17 -0
- package/dist/chunk/bmt.d.ts.map +1 -0
- package/dist/chunk/cac.d.ts +18 -0
- package/dist/chunk/cac.d.ts.map +1 -0
- package/dist/chunk/constants.d.ts +10 -0
- package/dist/chunk/constants.d.ts.map +1 -0
- package/dist/chunk/encrypted-cac.d.ts +48 -0
- package/dist/chunk/encrypted-cac.d.ts.map +1 -0
- package/dist/chunk/encryption.d.ts +86 -0
- package/dist/chunk/encryption.d.ts.map +1 -0
- package/dist/chunk/index.d.ts +6 -0
- package/dist/chunk/index.d.ts.map +1 -0
- package/dist/index.d.ts +46 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/proxy/act/act.d.ts +78 -0
- package/dist/proxy/act/act.d.ts.map +1 -0
- package/dist/proxy/act/crypto.d.ts +44 -0
- package/dist/proxy/act/crypto.d.ts.map +1 -0
- package/dist/proxy/act/grantee-list.d.ts +82 -0
- package/dist/proxy/act/grantee-list.d.ts.map +1 -0
- package/dist/proxy/act/history.d.ts +183 -0
- package/dist/proxy/act/history.d.ts.map +1 -0
- package/dist/proxy/act/index.d.ts +104 -0
- package/dist/proxy/act/index.d.ts.map +1 -0
- package/dist/proxy/chunking-encrypted.d.ts +14 -0
- package/dist/proxy/chunking-encrypted.d.ts.map +1 -0
- package/dist/proxy/chunking.d.ts +15 -0
- package/dist/proxy/chunking.d.ts.map +1 -0
- package/dist/proxy/download-data.d.ts +16 -0
- package/dist/proxy/download-data.d.ts.map +1 -0
- package/dist/proxy/feed-manifest.d.ts +62 -0
- package/dist/proxy/feed-manifest.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/async-finder.d.ts +77 -0
- package/dist/proxy/feeds/epochs/async-finder.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/epoch.d.ts +88 -0
- package/dist/proxy/feeds/epochs/epoch.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/finder.d.ts +67 -0
- package/dist/proxy/feeds/epochs/finder.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/index.d.ts +35 -0
- package/dist/proxy/feeds/epochs/index.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/test-utils.d.ts +93 -0
- package/dist/proxy/feeds/epochs/test-utils.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/types.d.ts +109 -0
- package/dist/proxy/feeds/epochs/types.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/updater.d.ts +68 -0
- package/dist/proxy/feeds/epochs/updater.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/utils.d.ts +22 -0
- package/dist/proxy/feeds/epochs/utils.d.ts.map +1 -0
- package/dist/proxy/feeds/index.d.ts +5 -0
- package/dist/proxy/feeds/index.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/async-finder.d.ts +14 -0
- package/dist/proxy/feeds/sequence/async-finder.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/finder.d.ts +17 -0
- package/dist/proxy/feeds/sequence/finder.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/index.d.ts +23 -0
- package/dist/proxy/feeds/sequence/index.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/types.d.ts +80 -0
- package/dist/proxy/feeds/sequence/types.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/updater.d.ts +26 -0
- package/dist/proxy/feeds/sequence/updater.d.ts.map +1 -0
- package/dist/proxy/index.d.ts +6 -0
- package/dist/proxy/index.d.ts.map +1 -0
- package/dist/proxy/manifest-builder.d.ts +183 -0
- package/dist/proxy/manifest-builder.d.ts.map +1 -0
- package/dist/proxy/mantaray-encrypted.d.ts +27 -0
- package/dist/proxy/mantaray-encrypted.d.ts.map +1 -0
- package/dist/proxy/mantaray.d.ts +26 -0
- package/dist/proxy/mantaray.d.ts.map +1 -0
- package/dist/proxy/types.d.ts +29 -0
- package/dist/proxy/types.d.ts.map +1 -0
- package/dist/proxy/upload-data.d.ts +17 -0
- package/dist/proxy/upload-data.d.ts.map +1 -0
- package/dist/proxy/upload-encrypted-data.d.ts +103 -0
- package/dist/proxy/upload-encrypted-data.d.ts.map +1 -0
- package/dist/schemas.d.ts +240 -0
- package/dist/schemas.d.ts.map +1 -0
- package/dist/storage/debounced-uploader.d.ts +62 -0
- package/dist/storage/debounced-uploader.d.ts.map +1 -0
- package/dist/storage/utilization-store.d.ts +108 -0
- package/dist/storage/utilization-store.d.ts.map +1 -0
- package/dist/swarm-id-auth.d.ts +74 -0
- package/dist/swarm-id-auth.d.ts.map +1 -0
- package/dist/swarm-id-auth.js +2 -0
- package/dist/swarm-id-auth.js.map +1 -0
- package/dist/swarm-id-client.d.ts +878 -0
- package/dist/swarm-id-client.d.ts.map +1 -0
- package/dist/swarm-id-client.js +2 -0
- package/dist/swarm-id-client.js.map +1 -0
- package/dist/swarm-id-proxy.d.ts +236 -0
- package/dist/swarm-id-proxy.d.ts.map +1 -0
- package/dist/swarm-id-proxy.js +2 -0
- package/dist/swarm-id-proxy.js.map +1 -0
- package/dist/swarm-id.esm.js +2 -0
- package/dist/swarm-id.esm.js.map +1 -0
- package/dist/swarm-id.umd.js +2 -0
- package/dist/swarm-id.umd.js.map +1 -0
- package/dist/sync/index.d.ts +9 -0
- package/dist/sync/index.d.ts.map +1 -0
- package/dist/sync/key-derivation.d.ts +25 -0
- package/dist/sync/key-derivation.d.ts.map +1 -0
- package/dist/sync/restore-account.d.ts +28 -0
- package/dist/sync/restore-account.d.ts.map +1 -0
- package/dist/sync/serialization.d.ts +16 -0
- package/dist/sync/serialization.d.ts.map +1 -0
- package/dist/sync/store-interfaces.d.ts +53 -0
- package/dist/sync/store-interfaces.d.ts.map +1 -0
- package/dist/sync/sync-account.d.ts +44 -0
- package/dist/sync/sync-account.d.ts.map +1 -0
- package/dist/sync/types.d.ts +13 -0
- package/dist/sync/types.d.ts.map +1 -0
- package/dist/test-fixtures.d.ts +17 -0
- package/dist/test-fixtures.d.ts.map +1 -0
- package/dist/types-BD_VkNn0.js +2 -0
- package/dist/types-BD_VkNn0.js.map +1 -0
- package/dist/types-lJCaT-50.js +2 -0
- package/dist/types-lJCaT-50.js.map +1 -0
- package/dist/types.d.ts +2157 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/utils/account-payload.d.ts +94 -0
- package/dist/utils/account-payload.d.ts.map +1 -0
- package/dist/utils/account-state-snapshot.d.ts +38 -0
- package/dist/utils/account-state-snapshot.d.ts.map +1 -0
- package/dist/utils/backup-encryption.d.ts +127 -0
- package/dist/utils/backup-encryption.d.ts.map +1 -0
- package/dist/utils/batch-utilization.d.ts +432 -0
- package/dist/utils/batch-utilization.d.ts.map +1 -0
- package/dist/utils/constants.d.ts +11 -0
- package/dist/utils/constants.d.ts.map +1 -0
- package/dist/utils/hex.d.ts +17 -0
- package/dist/utils/hex.d.ts.map +1 -0
- package/dist/utils/key-derivation.d.ts +92 -0
- package/dist/utils/key-derivation.d.ts.map +1 -0
- package/dist/utils/storage-managers.d.ts +65 -0
- package/dist/utils/storage-managers.d.ts.map +1 -0
- package/dist/utils/swarm-id-export.d.ts +24 -0
- package/dist/utils/swarm-id-export.d.ts.map +1 -0
- package/dist/utils/ttl.d.ts +49 -0
- package/dist/utils/ttl.d.ts.map +1 -0
- package/dist/utils/url.d.ts +41 -0
- package/dist/utils/url.d.ts.map +1 -0
- package/dist/utils/versioned-storage.d.ts +131 -0
- package/dist/utils/versioned-storage.d.ts.map +1 -0
- package/package.json +78 -0
- package/src/chunk/bmt.test.ts +217 -0
- package/src/chunk/bmt.ts +57 -0
- package/src/chunk/cac.test.ts +214 -0
- package/src/chunk/cac.ts +65 -0
- package/src/chunk/constants.ts +18 -0
- package/src/chunk/encrypted-cac.test.ts +385 -0
- package/src/chunk/encrypted-cac.ts +131 -0
- package/src/chunk/encryption.test.ts +352 -0
- package/src/chunk/encryption.ts +300 -0
- package/src/chunk/index.ts +47 -0
- package/src/index.ts +430 -0
- package/src/proxy/act/act.test.ts +278 -0
- package/src/proxy/act/act.ts +158 -0
- package/src/proxy/act/bee-compat.test.ts +948 -0
- package/src/proxy/act/crypto.test.ts +436 -0
- package/src/proxy/act/crypto.ts +376 -0
- package/src/proxy/act/grantee-list.test.ts +393 -0
- package/src/proxy/act/grantee-list.ts +239 -0
- package/src/proxy/act/history.test.ts +360 -0
- package/src/proxy/act/history.ts +413 -0
- package/src/proxy/act/index.test.ts +748 -0
- package/src/proxy/act/index.ts +853 -0
- package/src/proxy/chunking-encrypted.ts +95 -0
- package/src/proxy/chunking.ts +65 -0
- package/src/proxy/download-data.ts +448 -0
- package/src/proxy/feed-manifest.ts +174 -0
- package/src/proxy/feeds/epochs/async-finder.ts +372 -0
- package/src/proxy/feeds/epochs/epoch.test.ts +249 -0
- package/src/proxy/feeds/epochs/epoch.ts +181 -0
- package/src/proxy/feeds/epochs/finder.ts +282 -0
- package/src/proxy/feeds/epochs/index.ts +73 -0
- package/src/proxy/feeds/epochs/integration.test.ts +1336 -0
- package/src/proxy/feeds/epochs/test-utils.ts +274 -0
- package/src/proxy/feeds/epochs/types.ts +128 -0
- package/src/proxy/feeds/epochs/updater.ts +192 -0
- package/src/proxy/feeds/epochs/utils.ts +62 -0
- package/src/proxy/feeds/index.ts +5 -0
- package/src/proxy/feeds/sequence/async-finder.ts +31 -0
- package/src/proxy/feeds/sequence/finder.ts +73 -0
- package/src/proxy/feeds/sequence/index.ts +54 -0
- package/src/proxy/feeds/sequence/integration.test.ts +966 -0
- package/src/proxy/feeds/sequence/types.ts +103 -0
- package/src/proxy/feeds/sequence/updater.ts +71 -0
- package/src/proxy/index.ts +5 -0
- package/src/proxy/manifest-builder.test.ts +427 -0
- package/src/proxy/manifest-builder.ts +679 -0
- package/src/proxy/mantaray-encrypted.ts +78 -0
- package/src/proxy/mantaray.ts +104 -0
- package/src/proxy/types.ts +32 -0
- package/src/proxy/upload-data.ts +189 -0
- package/src/proxy/upload-encrypted-data.ts +658 -0
- package/src/schemas.ts +299 -0
- package/src/storage/debounced-uploader.ts +192 -0
- package/src/storage/utilization-store.ts +397 -0
- package/src/swarm-id-client.test.ts +99 -0
- package/src/swarm-id-client.ts +3095 -0
- package/src/swarm-id-proxy.ts +3891 -0
- package/src/sync/index.ts +28 -0
- package/src/sync/restore-account.ts +90 -0
- package/src/sync/serialization.ts +39 -0
- package/src/sync/store-interfaces.ts +62 -0
- package/src/sync/sync-account.test.ts +302 -0
- package/src/sync/sync-account.ts +396 -0
- package/src/sync/types.ts +11 -0
- package/src/test-fixtures.ts +109 -0
- package/src/types.ts +1651 -0
- package/src/utils/account-state-snapshot.test.ts +595 -0
- package/src/utils/account-state-snapshot.ts +94 -0
- package/src/utils/backup-encryption.test.ts +442 -0
- package/src/utils/backup-encryption.ts +352 -0
- package/src/utils/batch-utilization.ts +1309 -0
- package/src/utils/constants.ts +20 -0
- package/src/utils/hex.ts +27 -0
- package/src/utils/key-derivation.ts +197 -0
- package/src/utils/storage-managers.ts +365 -0
- package/src/utils/ttl.ts +129 -0
- package/src/utils/url.test.ts +136 -0
- package/src/utils/url.ts +71 -0
- package/src/utils/versioned-storage.ts +323 -0
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { Reference, Span } from "@ethersphere/bee-js"
|
|
2
|
+
import { calculateChunkAddress, newChunkEncrypter } from "../chunk"
|
|
3
|
+
import { Binary } from "cafe-utility"
|
|
4
|
+
|
|
5
|
+
// Constants for encrypted chunking
|
|
6
|
+
export const CHUNK_SIZE = 4096
|
|
7
|
+
export const ENCRYPTED_REFS_PER_CHUNK = 64 // 4096 / 64 = 64 encrypted refs per intermediate chunk
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Build encrypted merkle tree from chunk references
|
|
11
|
+
* Adapted from bee-js encrypted-chunk-stream.ts
|
|
12
|
+
* Returns root reference (64 bytes: 32-byte address + 32-byte encryption key)
|
|
13
|
+
*/
|
|
14
|
+
export async function buildEncryptedMerkleTree(
|
|
15
|
+
encryptedChunks: Array<{
|
|
16
|
+
address: Uint8Array
|
|
17
|
+
key: Uint8Array
|
|
18
|
+
span: bigint
|
|
19
|
+
}>,
|
|
20
|
+
onChunk: (encryptedChunkData: Uint8Array) => Promise<void>,
|
|
21
|
+
): Promise<Reference> {
|
|
22
|
+
// Single chunk case
|
|
23
|
+
if (encryptedChunks.length === 1) {
|
|
24
|
+
// Return 64-byte reference: address + key
|
|
25
|
+
const ref = new Uint8Array(64)
|
|
26
|
+
ref.set(encryptedChunks[0].address, 0)
|
|
27
|
+
ref.set(encryptedChunks[0].key, 32)
|
|
28
|
+
|
|
29
|
+
return new Reference(ref)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Multi-chunk case: build intermediate chunks
|
|
33
|
+
// Each intermediate chunk can hold 64 references (64 bytes each = 4096 bytes)
|
|
34
|
+
const intermediateChunks: Array<{
|
|
35
|
+
address: Uint8Array
|
|
36
|
+
key: Uint8Array
|
|
37
|
+
span: bigint
|
|
38
|
+
}> = []
|
|
39
|
+
|
|
40
|
+
for (let i = 0; i < encryptedChunks.length; i += ENCRYPTED_REFS_PER_CHUNK) {
|
|
41
|
+
const refs = encryptedChunks.slice(
|
|
42
|
+
i,
|
|
43
|
+
Math.min(i + ENCRYPTED_REFS_PER_CHUNK, encryptedChunks.length),
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
// Calculate total span from all children (this is the total data size, not refs size!)
|
|
47
|
+
const totalSpan = refs.reduce((sum, ref) => sum + ref.span, 0n)
|
|
48
|
+
|
|
49
|
+
// Build intermediate chunk payload containing all 64-byte references
|
|
50
|
+
// IMPORTANT: Pad to 4096 bytes with zeros BEFORE encryption
|
|
51
|
+
// This ensures that after decryption, the unused bytes are zeros,
|
|
52
|
+
// allowing ChunkPayloadSize to correctly determine the actual payload size
|
|
53
|
+
const payload = new Uint8Array(4096) // Pre-filled with zeros
|
|
54
|
+
refs.forEach((ref, idx) => {
|
|
55
|
+
payload.set(ref.address, idx * 64)
|
|
56
|
+
payload.set(ref.key, idx * 64 + 32)
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
// Create chunk with correct span (total data size) + payload
|
|
60
|
+
const spanBytes = Span.fromBigInt(totalSpan).toUint8Array()
|
|
61
|
+
const chunkData = Binary.concatBytes(spanBytes, payload)
|
|
62
|
+
|
|
63
|
+
// Encrypt the chunk ONCE to get address and key
|
|
64
|
+
const encrypter = newChunkEncrypter()
|
|
65
|
+
const { key, encryptedSpan, encryptedData } =
|
|
66
|
+
encrypter.encryptChunk(chunkData)
|
|
67
|
+
const encryptedChunkData = Binary.concatBytes(encryptedSpan, encryptedData)
|
|
68
|
+
|
|
69
|
+
// Calculate address from encrypted chunk
|
|
70
|
+
const address = await calculateChunkAddress(encryptedChunkData)
|
|
71
|
+
|
|
72
|
+
// Pass the ENCRYPTED chunk data to callback for upload
|
|
73
|
+
// This ensures the uploaded chunk has the same address we calculated
|
|
74
|
+
await onChunk(encryptedChunkData)
|
|
75
|
+
|
|
76
|
+
// Store reference with address, key, and span
|
|
77
|
+
intermediateChunks.push({
|
|
78
|
+
address: address.toUint8Array(),
|
|
79
|
+
key,
|
|
80
|
+
span: totalSpan,
|
|
81
|
+
})
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Recursively build tree if we have more than one intermediate chunk
|
|
85
|
+
if (intermediateChunks.length > 1) {
|
|
86
|
+
return buildEncryptedMerkleTree(intermediateChunks, onChunk)
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Return root reference (64 bytes)
|
|
90
|
+
const rootRef = new Uint8Array(64)
|
|
91
|
+
rootRef.set(intermediateChunks[0].address, 0)
|
|
92
|
+
rootRef.set(intermediateChunks[0].key, 32)
|
|
93
|
+
|
|
94
|
+
return new Reference(rootRef)
|
|
95
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { Reference } from "@ethersphere/bee-js"
|
|
2
|
+
import { makeContentAddressedChunk, type ContentAddressedChunk } from "../chunk"
|
|
3
|
+
import type { ChunkReference } from "./types"
|
|
4
|
+
|
|
5
|
+
// Constants
|
|
6
|
+
export const CHUNK_SIZE = 4096
|
|
7
|
+
export const REFS_PER_CHUNK = 64 // 4096 / 64 = 64 refs per intermediate chunk
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Split data into 4096-byte chunks
|
|
11
|
+
*/
|
|
12
|
+
export function splitDataIntoChunks(data: Uint8Array): Uint8Array[] {
|
|
13
|
+
const chunks: Uint8Array[] = []
|
|
14
|
+
for (let i = 0; i < data.length; i += CHUNK_SIZE) {
|
|
15
|
+
chunks.push(data.slice(i, Math.min(i + CHUNK_SIZE, data.length)))
|
|
16
|
+
}
|
|
17
|
+
return chunks
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Build merkle tree from chunk references
|
|
22
|
+
* Returns root reference (32 bytes)
|
|
23
|
+
*/
|
|
24
|
+
export async function buildMerkleTree(
|
|
25
|
+
chunkRefs: ChunkReference[],
|
|
26
|
+
onIntermediateChunk: (chunk: ContentAddressedChunk) => Promise<void>,
|
|
27
|
+
): Promise<Reference> {
|
|
28
|
+
// Single chunk - return direct reference
|
|
29
|
+
if (chunkRefs.length === 1) {
|
|
30
|
+
return new Reference(chunkRefs[0].address)
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Build intermediate level
|
|
34
|
+
const intermediateRefs: ChunkReference[] = []
|
|
35
|
+
|
|
36
|
+
for (let i = 0; i < chunkRefs.length; i += REFS_PER_CHUNK) {
|
|
37
|
+
const refs = chunkRefs.slice(
|
|
38
|
+
i,
|
|
39
|
+
Math.min(i + REFS_PER_CHUNK, chunkRefs.length),
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
// Build payload with 32-byte references
|
|
43
|
+
const payload = new Uint8Array(refs.length * 32)
|
|
44
|
+
refs.forEach((ref, idx) => {
|
|
45
|
+
payload.set(ref.address, idx * 32)
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
// Create intermediate chunk
|
|
49
|
+
const chunk = makeContentAddressedChunk(payload)
|
|
50
|
+
intermediateRefs.push({
|
|
51
|
+
address: chunk.address.toUint8Array(),
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
// Upload intermediate chunk
|
|
55
|
+
await onIntermediateChunk(chunk)
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Recursively build tree if needed
|
|
59
|
+
if (intermediateRefs.length > 1) {
|
|
60
|
+
return buildMerkleTree(intermediateRefs, onIntermediateChunk)
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Return root reference
|
|
64
|
+
return new Reference(intermediateRefs[0].address)
|
|
65
|
+
}
|
|
@@ -0,0 +1,448 @@
|
|
|
1
|
+
import {
|
|
2
|
+
EthAddress,
|
|
3
|
+
Identifier,
|
|
4
|
+
Reference,
|
|
5
|
+
Signature,
|
|
6
|
+
} from "@ethersphere/bee-js"
|
|
7
|
+
import type {
|
|
8
|
+
Bee,
|
|
9
|
+
BeeRequestOptions,
|
|
10
|
+
DownloadOptions,
|
|
11
|
+
} from "@ethersphere/bee-js"
|
|
12
|
+
import {
|
|
13
|
+
calculateChunkAddress,
|
|
14
|
+
decryptChunkData as decryptChunk,
|
|
15
|
+
DEFAULT_DOWNLOAD_CONCURRENCY,
|
|
16
|
+
ENCRYPTED_REF_SIZE,
|
|
17
|
+
IDENTIFIER_SIZE,
|
|
18
|
+
MAX_PAYLOAD_SIZE,
|
|
19
|
+
SOC_HEADER_SIZE,
|
|
20
|
+
SPAN_SIZE,
|
|
21
|
+
UNENCRYPTED_REF_SIZE,
|
|
22
|
+
} from "../chunk"
|
|
23
|
+
import { Binary } from "cafe-utility"
|
|
24
|
+
import type { UploadProgress } from "./types"
|
|
25
|
+
import type { SingleOwnerChunk } from "../types"
|
|
26
|
+
import { hexToUint8Array } from "../utils/hex"
|
|
27
|
+
|
|
28
|
+
function readSpan(spanBytes: Uint8Array): number {
|
|
29
|
+
const view = new DataView(
|
|
30
|
+
spanBytes.buffer,
|
|
31
|
+
spanBytes.byteOffset,
|
|
32
|
+
spanBytes.byteLength,
|
|
33
|
+
)
|
|
34
|
+
return Number(view.getBigUint64(0, true))
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function makeSocAddress(identifier: Identifier, owner: EthAddress): Reference {
|
|
38
|
+
return new Reference(
|
|
39
|
+
Binary.keccak256(
|
|
40
|
+
Binary.concatBytes(identifier.toUint8Array(), owner.toUint8Array()),
|
|
41
|
+
),
|
|
42
|
+
)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function makeSingleOwnerChunkFromData(
|
|
46
|
+
data: Uint8Array,
|
|
47
|
+
address: Reference,
|
|
48
|
+
expectedOwner: EthAddress,
|
|
49
|
+
encryptionKey?: Uint8Array,
|
|
50
|
+
): SingleOwnerChunk {
|
|
51
|
+
const identifier = new Identifier(data.slice(0, IDENTIFIER_SIZE))
|
|
52
|
+
const signature = Signature.fromSlice(data, IDENTIFIER_SIZE)
|
|
53
|
+
const cacData = data.slice(SOC_HEADER_SIZE)
|
|
54
|
+
const cacAddress = calculateChunkAddress(cacData)
|
|
55
|
+
const digest = Binary.concatBytes(
|
|
56
|
+
identifier.toUint8Array(),
|
|
57
|
+
cacAddress.toUint8Array(),
|
|
58
|
+
)
|
|
59
|
+
const recoveredOwner = signature.recoverPublicKey(digest).address()
|
|
60
|
+
|
|
61
|
+
if (!recoveredOwner.equals(expectedOwner)) {
|
|
62
|
+
throw new Error("SOC owner mismatch")
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const socAddress = makeSocAddress(identifier, recoveredOwner)
|
|
66
|
+
if (!Binary.equals(address.toUint8Array(), socAddress.toUint8Array())) {
|
|
67
|
+
throw new Error("SOC data does not match given address")
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
let spanBytes: Uint8Array
|
|
71
|
+
let payload: Uint8Array
|
|
72
|
+
let rebuiltData: Uint8Array
|
|
73
|
+
|
|
74
|
+
if (encryptionKey) {
|
|
75
|
+
const decrypted = decryptChunk(encryptionKey, cacData)
|
|
76
|
+
spanBytes = decrypted.slice(0, SPAN_SIZE)
|
|
77
|
+
const span = readSpan(spanBytes)
|
|
78
|
+
payload = decrypted.slice(SPAN_SIZE, SPAN_SIZE + span)
|
|
79
|
+
rebuiltData = Binary.concatBytes(
|
|
80
|
+
identifier.toUint8Array(),
|
|
81
|
+
signature.toUint8Array(),
|
|
82
|
+
spanBytes,
|
|
83
|
+
payload,
|
|
84
|
+
)
|
|
85
|
+
} else {
|
|
86
|
+
spanBytes = data.slice(SOC_HEADER_SIZE, SOC_HEADER_SIZE + SPAN_SIZE)
|
|
87
|
+
const span = readSpan(spanBytes)
|
|
88
|
+
if (span > 4096) {
|
|
89
|
+
throw new Error(
|
|
90
|
+
"SOC payload length is invalid; this chunk likely requires decryption",
|
|
91
|
+
)
|
|
92
|
+
}
|
|
93
|
+
payload = data.slice(
|
|
94
|
+
SOC_HEADER_SIZE + SPAN_SIZE,
|
|
95
|
+
SOC_HEADER_SIZE + SPAN_SIZE + span,
|
|
96
|
+
)
|
|
97
|
+
rebuiltData = data
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const span = readSpan(spanBytes)
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
data: rebuiltData,
|
|
104
|
+
identifier: identifier.toHex(),
|
|
105
|
+
signature: signature.toHex(),
|
|
106
|
+
span,
|
|
107
|
+
payload,
|
|
108
|
+
address: address.toHex(),
|
|
109
|
+
owner: recoveredOwner.toHex(),
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Represents a reference with optional encryption key
|
|
115
|
+
*/
|
|
116
|
+
interface ChunkRef {
|
|
117
|
+
address: Uint8Array
|
|
118
|
+
encryptionKey?: Uint8Array
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Parse a reference string into address and optional encryption key
|
|
123
|
+
*/
|
|
124
|
+
function parseReference(reference: string): ChunkRef {
|
|
125
|
+
const refBytes = hexToUint8Array(reference)
|
|
126
|
+
|
|
127
|
+
if (refBytes.length === UNENCRYPTED_REF_SIZE) {
|
|
128
|
+
return { address: refBytes }
|
|
129
|
+
} else if (refBytes.length === ENCRYPTED_REF_SIZE) {
|
|
130
|
+
return {
|
|
131
|
+
address: refBytes.slice(0, UNENCRYPTED_REF_SIZE),
|
|
132
|
+
encryptionKey: refBytes.slice(UNENCRYPTED_REF_SIZE),
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
throw new Error(
|
|
137
|
+
`Invalid reference length: ${refBytes.length}, expected ${UNENCRYPTED_REF_SIZE} or ${ENCRYPTED_REF_SIZE}`,
|
|
138
|
+
)
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Extract references from an intermediate chunk payload
|
|
143
|
+
*/
|
|
144
|
+
function extractReferences(
|
|
145
|
+
payload: Uint8Array,
|
|
146
|
+
totalSpan: number,
|
|
147
|
+
isEncrypted: boolean,
|
|
148
|
+
): ChunkRef[] {
|
|
149
|
+
const refs: ChunkRef[] = []
|
|
150
|
+
const refSize = isEncrypted ? ENCRYPTED_REF_SIZE : UNENCRYPTED_REF_SIZE
|
|
151
|
+
|
|
152
|
+
// Calculate actual number of refs based on payload content
|
|
153
|
+
// For encrypted, refs are 64 bytes each; for unencrypted, 32 bytes each
|
|
154
|
+
const maxRefs = Math.floor(payload.length / refSize)
|
|
155
|
+
|
|
156
|
+
// Calculate expected number of refs based on span
|
|
157
|
+
// Each leaf chunk can hold up to MAX_PAYLOAD_SIZE bytes
|
|
158
|
+
const expectedLeafChunks = Math.ceil(totalSpan / MAX_PAYLOAD_SIZE)
|
|
159
|
+
|
|
160
|
+
// We need to determine how many refs are actually in this chunk
|
|
161
|
+
// For intermediate chunks at higher levels, the number of children varies
|
|
162
|
+
const numRefs = Math.min(maxRefs, expectedLeafChunks)
|
|
163
|
+
|
|
164
|
+
for (let i = 0; i < numRefs; i++) {
|
|
165
|
+
const offset = i * refSize
|
|
166
|
+
if (offset + refSize > payload.length) break
|
|
167
|
+
|
|
168
|
+
const address = payload.slice(offset, offset + UNENCRYPTED_REF_SIZE)
|
|
169
|
+
|
|
170
|
+
// Check if this is a zero address (padding)
|
|
171
|
+
if (address.every((b) => b === 0)) break
|
|
172
|
+
|
|
173
|
+
if (isEncrypted) {
|
|
174
|
+
const key = payload.slice(
|
|
175
|
+
offset + UNENCRYPTED_REF_SIZE,
|
|
176
|
+
offset + ENCRYPTED_REF_SIZE,
|
|
177
|
+
)
|
|
178
|
+
refs.push({ address, encryptionKey: key })
|
|
179
|
+
} else {
|
|
180
|
+
refs.push({ address })
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return refs
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Download and process a single chunk
|
|
189
|
+
*/
|
|
190
|
+
async function downloadAndProcessChunk(
|
|
191
|
+
bee: Bee,
|
|
192
|
+
ref: ChunkRef,
|
|
193
|
+
requestOptions?: BeeRequestOptions,
|
|
194
|
+
): Promise<{ span: number; payload: Uint8Array }> {
|
|
195
|
+
const addressHex = Binary.uint8ArrayToHex(ref.address)
|
|
196
|
+
const rawChunk = await bee.downloadChunk(
|
|
197
|
+
addressHex,
|
|
198
|
+
undefined,
|
|
199
|
+
requestOptions,
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
let chunkData: Uint8Array
|
|
203
|
+
if (ref.encryptionKey) {
|
|
204
|
+
chunkData = decryptChunk(ref.encryptionKey, rawChunk)
|
|
205
|
+
} else {
|
|
206
|
+
chunkData = rawChunk
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const span = readSpan(chunkData.slice(0, SPAN_SIZE))
|
|
210
|
+
const payload = chunkData.slice(SPAN_SIZE)
|
|
211
|
+
|
|
212
|
+
return { span, payload }
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Recursively join chunks to reconstruct data
|
|
217
|
+
* Uses parallel fetching for performance
|
|
218
|
+
*/
|
|
219
|
+
async function joinChunks(
|
|
220
|
+
bee: Bee,
|
|
221
|
+
ref: ChunkRef,
|
|
222
|
+
isEncrypted: boolean,
|
|
223
|
+
concurrency: number,
|
|
224
|
+
onChunkDownloaded: () => void,
|
|
225
|
+
requestOptions?: BeeRequestOptions,
|
|
226
|
+
): Promise<Uint8Array> {
|
|
227
|
+
const { span, payload } = await downloadAndProcessChunk(
|
|
228
|
+
bee,
|
|
229
|
+
ref,
|
|
230
|
+
requestOptions,
|
|
231
|
+
)
|
|
232
|
+
onChunkDownloaded()
|
|
233
|
+
|
|
234
|
+
// Leaf chunk: data is in payload
|
|
235
|
+
if (span <= MAX_PAYLOAD_SIZE) {
|
|
236
|
+
return payload.slice(0, span)
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Intermediate chunk: contains references to other chunks
|
|
240
|
+
const childRefs = extractReferences(payload, span, isEncrypted)
|
|
241
|
+
|
|
242
|
+
if (childRefs.length === 0) {
|
|
243
|
+
throw new Error("No valid references found in intermediate chunk")
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Download children in parallel with concurrency limit
|
|
247
|
+
const results: Uint8Array[] = new Array(childRefs.length)
|
|
248
|
+
|
|
249
|
+
// Process in batches respecting concurrency limit
|
|
250
|
+
for (let i = 0; i < childRefs.length; i += concurrency) {
|
|
251
|
+
const batch = childRefs.slice(
|
|
252
|
+
i,
|
|
253
|
+
Math.min(i + concurrency, childRefs.length),
|
|
254
|
+
)
|
|
255
|
+
const batchResults = await Promise.all(
|
|
256
|
+
batch.map((childRef) =>
|
|
257
|
+
joinChunks(
|
|
258
|
+
bee,
|
|
259
|
+
childRef,
|
|
260
|
+
isEncrypted,
|
|
261
|
+
concurrency,
|
|
262
|
+
onChunkDownloaded,
|
|
263
|
+
requestOptions,
|
|
264
|
+
),
|
|
265
|
+
),
|
|
266
|
+
)
|
|
267
|
+
for (let j = 0; j < batchResults.length; j++) {
|
|
268
|
+
results[i + j] = batchResults[j]
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// Concatenate all results
|
|
273
|
+
const totalLength = results.reduce((sum, r) => sum + r.length, 0)
|
|
274
|
+
const result = new Uint8Array(totalLength)
|
|
275
|
+
let offset = 0
|
|
276
|
+
for (const chunk of results) {
|
|
277
|
+
result.set(chunk, offset)
|
|
278
|
+
offset += chunk.length
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
return result
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Estimate total chunks for progress tracking
|
|
286
|
+
* This is an approximation based on span
|
|
287
|
+
*/
|
|
288
|
+
function estimateTotalChunks(span: number): number {
|
|
289
|
+
if (span <= MAX_PAYLOAD_SIZE) {
|
|
290
|
+
return 1
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Number of leaf chunks
|
|
294
|
+
const leafChunks = Math.ceil(span / MAX_PAYLOAD_SIZE)
|
|
295
|
+
|
|
296
|
+
// For a full binary tree with 64-way branching, intermediate chunks
|
|
297
|
+
// This is a rough estimate - actual count depends on tree structure
|
|
298
|
+
let intermediateChunks = 0
|
|
299
|
+
let level = leafChunks
|
|
300
|
+
while (level > 1) {
|
|
301
|
+
const branchingFactor = 64 // REFS_PER_CHUNK
|
|
302
|
+
level = Math.ceil(level / branchingFactor)
|
|
303
|
+
intermediateChunks += level
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
return leafChunks + intermediateChunks
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
/**
|
|
310
|
+
* Download data using only the chunk API
|
|
311
|
+
* This ensures encrypted data remains encrypted during transmission and avoids metadata leakage
|
|
312
|
+
*
|
|
313
|
+
* Supports both:
|
|
314
|
+
* - Regular references (64 hex chars = 32 bytes)
|
|
315
|
+
* - Encrypted references (128 hex chars = 64 bytes: 32-byte address + 32-byte encryption key)
|
|
316
|
+
*/
|
|
317
|
+
export async function downloadDataWithChunkAPI(
|
|
318
|
+
bee: Bee,
|
|
319
|
+
reference: string,
|
|
320
|
+
_options?: DownloadOptions,
|
|
321
|
+
onProgress?: (progress: UploadProgress) => void,
|
|
322
|
+
requestOptions?: BeeRequestOptions,
|
|
323
|
+
): Promise<Uint8Array> {
|
|
324
|
+
const rootRef = parseReference(reference)
|
|
325
|
+
const isEncrypted = rootRef.encryptionKey !== undefined
|
|
326
|
+
|
|
327
|
+
// First, download root chunk to get span for progress estimation
|
|
328
|
+
const { span, payload } = await downloadAndProcessChunk(
|
|
329
|
+
bee,
|
|
330
|
+
rootRef,
|
|
331
|
+
requestOptions,
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
// For leaf chunks (small data), return immediately
|
|
335
|
+
if (span <= MAX_PAYLOAD_SIZE) {
|
|
336
|
+
if (onProgress) {
|
|
337
|
+
onProgress({ total: 1, processed: 1 })
|
|
338
|
+
}
|
|
339
|
+
return payload.slice(0, span)
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// Estimate total chunks for progress tracking
|
|
343
|
+
const estimatedTotal = estimateTotalChunks(span)
|
|
344
|
+
let processedChunks = 1 // Already downloaded root
|
|
345
|
+
|
|
346
|
+
const onChunkDownloaded = () => {
|
|
347
|
+
processedChunks++
|
|
348
|
+
if (onProgress) {
|
|
349
|
+
onProgress({ total: estimatedTotal, processed: processedChunks })
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// Report initial progress
|
|
354
|
+
if (onProgress) {
|
|
355
|
+
onProgress({ total: estimatedTotal, processed: 1 })
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// Extract child references and join recursively
|
|
359
|
+
const childRefs = extractReferences(payload, span, isEncrypted)
|
|
360
|
+
|
|
361
|
+
if (childRefs.length === 0) {
|
|
362
|
+
throw new Error("No valid references found in root chunk")
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// Download children in parallel
|
|
366
|
+
const results: Uint8Array[] = new Array(childRefs.length)
|
|
367
|
+
|
|
368
|
+
for (let i = 0; i < childRefs.length; i += DEFAULT_DOWNLOAD_CONCURRENCY) {
|
|
369
|
+
const batch = childRefs.slice(
|
|
370
|
+
i,
|
|
371
|
+
Math.min(i + DEFAULT_DOWNLOAD_CONCURRENCY, childRefs.length),
|
|
372
|
+
)
|
|
373
|
+
const batchResults = await Promise.all(
|
|
374
|
+
batch.map((childRef) =>
|
|
375
|
+
joinChunks(
|
|
376
|
+
bee,
|
|
377
|
+
childRef,
|
|
378
|
+
isEncrypted,
|
|
379
|
+
DEFAULT_DOWNLOAD_CONCURRENCY,
|
|
380
|
+
onChunkDownloaded,
|
|
381
|
+
requestOptions,
|
|
382
|
+
),
|
|
383
|
+
),
|
|
384
|
+
)
|
|
385
|
+
for (let j = 0; j < batchResults.length; j++) {
|
|
386
|
+
results[i + j] = batchResults[j]
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
// Concatenate all results
|
|
391
|
+
const totalLength = results.reduce((sum, r) => sum + r.length, 0)
|
|
392
|
+
const result = new Uint8Array(totalLength)
|
|
393
|
+
let offset = 0
|
|
394
|
+
for (const chunk of results) {
|
|
395
|
+
result.set(chunk, offset)
|
|
396
|
+
offset += chunk.length
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
// Final progress update
|
|
400
|
+
if (onProgress) {
|
|
401
|
+
onProgress({ total: processedChunks, processed: processedChunks })
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
return result
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
export async function downloadSOC(
|
|
408
|
+
bee: Bee,
|
|
409
|
+
owner: string | Uint8Array | EthAddress,
|
|
410
|
+
identifier: string | Uint8Array | Identifier,
|
|
411
|
+
requestOptions?: BeeRequestOptions,
|
|
412
|
+
): Promise<SingleOwnerChunk> {
|
|
413
|
+
const ownerAddress = new EthAddress(owner)
|
|
414
|
+
const id = new Identifier(identifier)
|
|
415
|
+
const socAddress = makeSocAddress(id, ownerAddress)
|
|
416
|
+
|
|
417
|
+
const data = await bee.downloadChunk(
|
|
418
|
+
socAddress.toHex(),
|
|
419
|
+
undefined,
|
|
420
|
+
requestOptions,
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
return makeSingleOwnerChunkFromData(data, socAddress, ownerAddress)
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
export async function downloadEncryptedSOC(
|
|
427
|
+
bee: Bee,
|
|
428
|
+
owner: string | Uint8Array | EthAddress,
|
|
429
|
+
identifier: string | Uint8Array | Identifier,
|
|
430
|
+
encryptionKey: string | Uint8Array,
|
|
431
|
+
requestOptions?: BeeRequestOptions,
|
|
432
|
+
): Promise<SingleOwnerChunk> {
|
|
433
|
+
const ownerAddress = new EthAddress(owner)
|
|
434
|
+
const id = new Identifier(identifier)
|
|
435
|
+
const socAddress = makeSocAddress(id, ownerAddress)
|
|
436
|
+
const keyBytes =
|
|
437
|
+
typeof encryptionKey === "string"
|
|
438
|
+
? hexToUint8Array(encryptionKey)
|
|
439
|
+
: encryptionKey
|
|
440
|
+
|
|
441
|
+
const data = await bee.downloadChunk(
|
|
442
|
+
socAddress.toHex(),
|
|
443
|
+
undefined,
|
|
444
|
+
requestOptions,
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
return makeSingleOwnerChunkFromData(data, socAddress, ownerAddress, keyBytes)
|
|
448
|
+
}
|