@snaha/swarm-id 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +431 -0
- package/dist/chunk/bmt.d.ts +17 -0
- package/dist/chunk/bmt.d.ts.map +1 -0
- package/dist/chunk/cac.d.ts +18 -0
- package/dist/chunk/cac.d.ts.map +1 -0
- package/dist/chunk/constants.d.ts +10 -0
- package/dist/chunk/constants.d.ts.map +1 -0
- package/dist/chunk/encrypted-cac.d.ts +48 -0
- package/dist/chunk/encrypted-cac.d.ts.map +1 -0
- package/dist/chunk/encryption.d.ts +86 -0
- package/dist/chunk/encryption.d.ts.map +1 -0
- package/dist/chunk/index.d.ts +6 -0
- package/dist/chunk/index.d.ts.map +1 -0
- package/dist/index.d.ts +46 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/proxy/act/act.d.ts +78 -0
- package/dist/proxy/act/act.d.ts.map +1 -0
- package/dist/proxy/act/crypto.d.ts +44 -0
- package/dist/proxy/act/crypto.d.ts.map +1 -0
- package/dist/proxy/act/grantee-list.d.ts +82 -0
- package/dist/proxy/act/grantee-list.d.ts.map +1 -0
- package/dist/proxy/act/history.d.ts +183 -0
- package/dist/proxy/act/history.d.ts.map +1 -0
- package/dist/proxy/act/index.d.ts +104 -0
- package/dist/proxy/act/index.d.ts.map +1 -0
- package/dist/proxy/chunking-encrypted.d.ts +14 -0
- package/dist/proxy/chunking-encrypted.d.ts.map +1 -0
- package/dist/proxy/chunking.d.ts +15 -0
- package/dist/proxy/chunking.d.ts.map +1 -0
- package/dist/proxy/download-data.d.ts +16 -0
- package/dist/proxy/download-data.d.ts.map +1 -0
- package/dist/proxy/feed-manifest.d.ts +62 -0
- package/dist/proxy/feed-manifest.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/async-finder.d.ts +77 -0
- package/dist/proxy/feeds/epochs/async-finder.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/epoch.d.ts +88 -0
- package/dist/proxy/feeds/epochs/epoch.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/finder.d.ts +67 -0
- package/dist/proxy/feeds/epochs/finder.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/index.d.ts +35 -0
- package/dist/proxy/feeds/epochs/index.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/test-utils.d.ts +93 -0
- package/dist/proxy/feeds/epochs/test-utils.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/types.d.ts +109 -0
- package/dist/proxy/feeds/epochs/types.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/updater.d.ts +68 -0
- package/dist/proxy/feeds/epochs/updater.d.ts.map +1 -0
- package/dist/proxy/feeds/epochs/utils.d.ts +22 -0
- package/dist/proxy/feeds/epochs/utils.d.ts.map +1 -0
- package/dist/proxy/feeds/index.d.ts +5 -0
- package/dist/proxy/feeds/index.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/async-finder.d.ts +14 -0
- package/dist/proxy/feeds/sequence/async-finder.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/finder.d.ts +17 -0
- package/dist/proxy/feeds/sequence/finder.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/index.d.ts +23 -0
- package/dist/proxy/feeds/sequence/index.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/types.d.ts +80 -0
- package/dist/proxy/feeds/sequence/types.d.ts.map +1 -0
- package/dist/proxy/feeds/sequence/updater.d.ts +26 -0
- package/dist/proxy/feeds/sequence/updater.d.ts.map +1 -0
- package/dist/proxy/index.d.ts +6 -0
- package/dist/proxy/index.d.ts.map +1 -0
- package/dist/proxy/manifest-builder.d.ts +183 -0
- package/dist/proxy/manifest-builder.d.ts.map +1 -0
- package/dist/proxy/mantaray-encrypted.d.ts +27 -0
- package/dist/proxy/mantaray-encrypted.d.ts.map +1 -0
- package/dist/proxy/mantaray.d.ts +26 -0
- package/dist/proxy/mantaray.d.ts.map +1 -0
- package/dist/proxy/types.d.ts +29 -0
- package/dist/proxy/types.d.ts.map +1 -0
- package/dist/proxy/upload-data.d.ts +17 -0
- package/dist/proxy/upload-data.d.ts.map +1 -0
- package/dist/proxy/upload-encrypted-data.d.ts +103 -0
- package/dist/proxy/upload-encrypted-data.d.ts.map +1 -0
- package/dist/schemas.d.ts +240 -0
- package/dist/schemas.d.ts.map +1 -0
- package/dist/storage/debounced-uploader.d.ts +62 -0
- package/dist/storage/debounced-uploader.d.ts.map +1 -0
- package/dist/storage/utilization-store.d.ts +108 -0
- package/dist/storage/utilization-store.d.ts.map +1 -0
- package/dist/swarm-id-auth.d.ts +74 -0
- package/dist/swarm-id-auth.d.ts.map +1 -0
- package/dist/swarm-id-auth.js +2 -0
- package/dist/swarm-id-auth.js.map +1 -0
- package/dist/swarm-id-client.d.ts +878 -0
- package/dist/swarm-id-client.d.ts.map +1 -0
- package/dist/swarm-id-client.js +2 -0
- package/dist/swarm-id-client.js.map +1 -0
- package/dist/swarm-id-proxy.d.ts +236 -0
- package/dist/swarm-id-proxy.d.ts.map +1 -0
- package/dist/swarm-id-proxy.js +2 -0
- package/dist/swarm-id-proxy.js.map +1 -0
- package/dist/swarm-id.esm.js +2 -0
- package/dist/swarm-id.esm.js.map +1 -0
- package/dist/swarm-id.umd.js +2 -0
- package/dist/swarm-id.umd.js.map +1 -0
- package/dist/sync/index.d.ts +9 -0
- package/dist/sync/index.d.ts.map +1 -0
- package/dist/sync/key-derivation.d.ts +25 -0
- package/dist/sync/key-derivation.d.ts.map +1 -0
- package/dist/sync/restore-account.d.ts +28 -0
- package/dist/sync/restore-account.d.ts.map +1 -0
- package/dist/sync/serialization.d.ts +16 -0
- package/dist/sync/serialization.d.ts.map +1 -0
- package/dist/sync/store-interfaces.d.ts +53 -0
- package/dist/sync/store-interfaces.d.ts.map +1 -0
- package/dist/sync/sync-account.d.ts +44 -0
- package/dist/sync/sync-account.d.ts.map +1 -0
- package/dist/sync/types.d.ts +13 -0
- package/dist/sync/types.d.ts.map +1 -0
- package/dist/test-fixtures.d.ts +17 -0
- package/dist/test-fixtures.d.ts.map +1 -0
- package/dist/types-BD_VkNn0.js +2 -0
- package/dist/types-BD_VkNn0.js.map +1 -0
- package/dist/types-lJCaT-50.js +2 -0
- package/dist/types-lJCaT-50.js.map +1 -0
- package/dist/types.d.ts +2157 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/utils/account-payload.d.ts +94 -0
- package/dist/utils/account-payload.d.ts.map +1 -0
- package/dist/utils/account-state-snapshot.d.ts +38 -0
- package/dist/utils/account-state-snapshot.d.ts.map +1 -0
- package/dist/utils/backup-encryption.d.ts +127 -0
- package/dist/utils/backup-encryption.d.ts.map +1 -0
- package/dist/utils/batch-utilization.d.ts +432 -0
- package/dist/utils/batch-utilization.d.ts.map +1 -0
- package/dist/utils/constants.d.ts +11 -0
- package/dist/utils/constants.d.ts.map +1 -0
- package/dist/utils/hex.d.ts +17 -0
- package/dist/utils/hex.d.ts.map +1 -0
- package/dist/utils/key-derivation.d.ts +92 -0
- package/dist/utils/key-derivation.d.ts.map +1 -0
- package/dist/utils/storage-managers.d.ts +65 -0
- package/dist/utils/storage-managers.d.ts.map +1 -0
- package/dist/utils/swarm-id-export.d.ts +24 -0
- package/dist/utils/swarm-id-export.d.ts.map +1 -0
- package/dist/utils/ttl.d.ts +49 -0
- package/dist/utils/ttl.d.ts.map +1 -0
- package/dist/utils/url.d.ts +41 -0
- package/dist/utils/url.d.ts.map +1 -0
- package/dist/utils/versioned-storage.d.ts +131 -0
- package/dist/utils/versioned-storage.d.ts.map +1 -0
- package/package.json +78 -0
- package/src/chunk/bmt.test.ts +217 -0
- package/src/chunk/bmt.ts +57 -0
- package/src/chunk/cac.test.ts +214 -0
- package/src/chunk/cac.ts +65 -0
- package/src/chunk/constants.ts +18 -0
- package/src/chunk/encrypted-cac.test.ts +385 -0
- package/src/chunk/encrypted-cac.ts +131 -0
- package/src/chunk/encryption.test.ts +352 -0
- package/src/chunk/encryption.ts +300 -0
- package/src/chunk/index.ts +47 -0
- package/src/index.ts +430 -0
- package/src/proxy/act/act.test.ts +278 -0
- package/src/proxy/act/act.ts +158 -0
- package/src/proxy/act/bee-compat.test.ts +948 -0
- package/src/proxy/act/crypto.test.ts +436 -0
- package/src/proxy/act/crypto.ts +376 -0
- package/src/proxy/act/grantee-list.test.ts +393 -0
- package/src/proxy/act/grantee-list.ts +239 -0
- package/src/proxy/act/history.test.ts +360 -0
- package/src/proxy/act/history.ts +413 -0
- package/src/proxy/act/index.test.ts +748 -0
- package/src/proxy/act/index.ts +853 -0
- package/src/proxy/chunking-encrypted.ts +95 -0
- package/src/proxy/chunking.ts +65 -0
- package/src/proxy/download-data.ts +448 -0
- package/src/proxy/feed-manifest.ts +174 -0
- package/src/proxy/feeds/epochs/async-finder.ts +372 -0
- package/src/proxy/feeds/epochs/epoch.test.ts +249 -0
- package/src/proxy/feeds/epochs/epoch.ts +181 -0
- package/src/proxy/feeds/epochs/finder.ts +282 -0
- package/src/proxy/feeds/epochs/index.ts +73 -0
- package/src/proxy/feeds/epochs/integration.test.ts +1336 -0
- package/src/proxy/feeds/epochs/test-utils.ts +274 -0
- package/src/proxy/feeds/epochs/types.ts +128 -0
- package/src/proxy/feeds/epochs/updater.ts +192 -0
- package/src/proxy/feeds/epochs/utils.ts +62 -0
- package/src/proxy/feeds/index.ts +5 -0
- package/src/proxy/feeds/sequence/async-finder.ts +31 -0
- package/src/proxy/feeds/sequence/finder.ts +73 -0
- package/src/proxy/feeds/sequence/index.ts +54 -0
- package/src/proxy/feeds/sequence/integration.test.ts +966 -0
- package/src/proxy/feeds/sequence/types.ts +103 -0
- package/src/proxy/feeds/sequence/updater.ts +71 -0
- package/src/proxy/index.ts +5 -0
- package/src/proxy/manifest-builder.test.ts +427 -0
- package/src/proxy/manifest-builder.ts +679 -0
- package/src/proxy/mantaray-encrypted.ts +78 -0
- package/src/proxy/mantaray.ts +104 -0
- package/src/proxy/types.ts +32 -0
- package/src/proxy/upload-data.ts +189 -0
- package/src/proxy/upload-encrypted-data.ts +658 -0
- package/src/schemas.ts +299 -0
- package/src/storage/debounced-uploader.ts +192 -0
- package/src/storage/utilization-store.ts +397 -0
- package/src/swarm-id-client.test.ts +99 -0
- package/src/swarm-id-client.ts +3095 -0
- package/src/swarm-id-proxy.ts +3891 -0
- package/src/sync/index.ts +28 -0
- package/src/sync/restore-account.ts +90 -0
- package/src/sync/serialization.ts +39 -0
- package/src/sync/store-interfaces.ts +62 -0
- package/src/sync/sync-account.test.ts +302 -0
- package/src/sync/sync-account.ts +396 -0
- package/src/sync/types.ts +11 -0
- package/src/test-fixtures.ts +109 -0
- package/src/types.ts +1651 -0
- package/src/utils/account-state-snapshot.test.ts +595 -0
- package/src/utils/account-state-snapshot.ts +94 -0
- package/src/utils/backup-encryption.test.ts +442 -0
- package/src/utils/backup-encryption.ts +352 -0
- package/src/utils/batch-utilization.ts +1309 -0
- package/src/utils/constants.ts +20 -0
- package/src/utils/hex.ts +27 -0
- package/src/utils/key-derivation.ts +197 -0
- package/src/utils/storage-managers.ts +365 -0
- package/src/utils/ttl.ts +129 -0
- package/src/utils/url.test.ts +136 -0
- package/src/utils/url.ts +71 -0
- package/src/utils/versioned-storage.ts +323 -0
|
@@ -0,0 +1,1336 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Integration tests for epoch feeds
|
|
3
|
+
*
|
|
4
|
+
* Based on the Go implementation tests from bee/pkg/feeds/epochs
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"
|
|
8
|
+
import { Binary } from "cafe-utility"
|
|
9
|
+
import { PrivateKey } from "@ethersphere/bee-js"
|
|
10
|
+
import { SyncEpochFinder } from "./finder"
|
|
11
|
+
import { AsyncEpochFinder } from "./async-finder"
|
|
12
|
+
import { BasicEpochUpdater } from "./updater"
|
|
13
|
+
import { EpochIndex, MAX_LEVEL } from "./epoch"
|
|
14
|
+
import type { EpochUpdateHints, EpochUpdateResult } from "./types"
|
|
15
|
+
import {
|
|
16
|
+
MockBee,
|
|
17
|
+
MockChunkStore,
|
|
18
|
+
createTestSigner,
|
|
19
|
+
createTestTopic,
|
|
20
|
+
createTestReference,
|
|
21
|
+
createMockStamper,
|
|
22
|
+
mockFetch,
|
|
23
|
+
} from "./test-utils"
|
|
24
|
+
|
|
25
|
+
const SPAN_SIZE = 8
|
|
26
|
+
const ENCRYPTION_KEY_SIZE = 32
|
|
27
|
+
|
|
28
|
+
function createTestReference64(seed: number): Uint8Array {
|
|
29
|
+
const ref = new Uint8Array(64)
|
|
30
|
+
const view = new DataView(ref.buffer)
|
|
31
|
+
view.setBigUint64(0, BigInt(seed), false)
|
|
32
|
+
// Ensure second half is non-zero so truncation bugs are caught.
|
|
33
|
+
for (let i = 32; i < 64; i++) {
|
|
34
|
+
ref[i] = (seed + i) & 0xff
|
|
35
|
+
}
|
|
36
|
+
return ref
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
class CountingMockBee extends MockBee {
|
|
40
|
+
public downloadCalls = 0
|
|
41
|
+
|
|
42
|
+
override async downloadChunk(reference: string): Promise<Uint8Array> {
|
|
43
|
+
this.downloadCalls++
|
|
44
|
+
const lower = reference.toLowerCase()
|
|
45
|
+
if (!this.getStore().has(lower)) {
|
|
46
|
+
throw new Error("Request failed with status code 500")
|
|
47
|
+
}
|
|
48
|
+
return this.getStore().get(lower)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
class MixedErrorMockBee extends CountingMockBee {
|
|
53
|
+
override async downloadChunk(reference: string): Promise<Uint8Array> {
|
|
54
|
+
this.downloadCalls++
|
|
55
|
+
const lower = reference.toLowerCase()
|
|
56
|
+
if (this.getStore().has(lower)) {
|
|
57
|
+
return this.getStore().get(lower)
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const selector = lower.charCodeAt(0) % 3
|
|
61
|
+
if (selector === 0) {
|
|
62
|
+
throw new Error("Request failed with status code 404")
|
|
63
|
+
}
|
|
64
|
+
if (selector === 1) {
|
|
65
|
+
throw new Error("Request failed with status code 500")
|
|
66
|
+
}
|
|
67
|
+
throw new Error("timeout of 2000ms exceeded")
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
async function putEpochSoc(
|
|
72
|
+
store: MockChunkStore,
|
|
73
|
+
signer: ReturnType<typeof createTestSigner>,
|
|
74
|
+
topic: ReturnType<typeof createTestTopic>,
|
|
75
|
+
epoch: EpochIndex,
|
|
76
|
+
payload: Uint8Array,
|
|
77
|
+
): Promise<void> {
|
|
78
|
+
const epochHash = await epoch.marshalBinary()
|
|
79
|
+
const identifier = Binary.keccak256(
|
|
80
|
+
Binary.concatBytes(topic.toUint8Array(), epochHash),
|
|
81
|
+
)
|
|
82
|
+
const span = new Uint8Array(SPAN_SIZE)
|
|
83
|
+
new DataView(span.buffer).setBigUint64(0, BigInt(payload.length), true)
|
|
84
|
+
const contentHash = Binary.keccak256(Binary.concatBytes(span, payload))
|
|
85
|
+
const toSign = Binary.concatBytes(identifier, contentHash)
|
|
86
|
+
const signature = signer.sign(toSign)
|
|
87
|
+
const socData = Binary.concatBytes(
|
|
88
|
+
identifier,
|
|
89
|
+
signature.toUint8Array(),
|
|
90
|
+
span,
|
|
91
|
+
payload,
|
|
92
|
+
)
|
|
93
|
+
const owner = signer.publicKey().address().toUint8Array()
|
|
94
|
+
const address = Binary.keccak256(Binary.concatBytes(identifier, owner))
|
|
95
|
+
await store.put(Binary.uint8ArrayToHex(address), socData)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function payloadWithTimestamp(
|
|
99
|
+
timestamp: bigint,
|
|
100
|
+
reference: Uint8Array,
|
|
101
|
+
): Uint8Array {
|
|
102
|
+
// v1 format: span(8) + timestamp(8) + reference(32)
|
|
103
|
+
// Span (8 bytes, little-endian) - required for Bee /bzz/ compatibility
|
|
104
|
+
const span = new Uint8Array(8)
|
|
105
|
+
new DataView(span.buffer).setBigUint64(0, BigInt(reference.length), true)
|
|
106
|
+
|
|
107
|
+
// Timestamp (8 bytes, big-endian)
|
|
108
|
+
const ts = new Uint8Array(8)
|
|
109
|
+
new DataView(ts.buffer).setBigUint64(0, timestamp, false)
|
|
110
|
+
|
|
111
|
+
return Binary.concatBytes(span, ts, reference)
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Mock uploadEncryptedSOC to store unencrypted SOC data in the mock store.
|
|
116
|
+
*
|
|
117
|
+
* The real function encrypts the payload, but the finder expects to read
|
|
118
|
+
* unencrypted SOC data (identifier + signature + span + payload).
|
|
119
|
+
* This mock bypasses encryption so the finder can parse the data directly.
|
|
120
|
+
*/
|
|
121
|
+
vi.mock("../../upload-encrypted-data", async (importOriginal) => {
|
|
122
|
+
const mod =
|
|
123
|
+
await importOriginal<typeof import("../../upload-encrypted-data")>()
|
|
124
|
+
const { Binary } = await import("cafe-utility")
|
|
125
|
+
|
|
126
|
+
return {
|
|
127
|
+
...mod,
|
|
128
|
+
uploadEncryptedSOC: async (
|
|
129
|
+
bee: any,
|
|
130
|
+
_stamper: any,
|
|
131
|
+
signer: any,
|
|
132
|
+
identifier: any,
|
|
133
|
+
data: Uint8Array,
|
|
134
|
+
) => {
|
|
135
|
+
// Create span (little-endian uint64 of data length)
|
|
136
|
+
const span = new Uint8Array(SPAN_SIZE)
|
|
137
|
+
const spanView = new DataView(span.buffer)
|
|
138
|
+
spanView.setBigUint64(0, BigInt(data.length), true)
|
|
139
|
+
|
|
140
|
+
// Sign: hash(identifier + hash(span + data))
|
|
141
|
+
const contentHash = Binary.keccak256(Binary.concatBytes(span, data))
|
|
142
|
+
const toSign = Binary.concatBytes(identifier.toUint8Array(), contentHash)
|
|
143
|
+
const signature = signer.sign(toSign)
|
|
144
|
+
|
|
145
|
+
// Build SOC data: identifier(32) + signature(65) + span(8) + payload
|
|
146
|
+
const socData = Binary.concatBytes(
|
|
147
|
+
identifier.toUint8Array(),
|
|
148
|
+
signature.toUint8Array(),
|
|
149
|
+
span,
|
|
150
|
+
data,
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
// Calculate SOC address: Keccak256(identifier + owner)
|
|
154
|
+
const owner = signer.publicKey().address()
|
|
155
|
+
const socAddress = Binary.keccak256(
|
|
156
|
+
Binary.concatBytes(identifier.toUint8Array(), owner.toUint8Array()),
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
// Store directly in mock store (bypassing fetch/encryption)
|
|
160
|
+
const store = bee.getStore()
|
|
161
|
+
const reference = Binary.uint8ArrayToHex(socAddress)
|
|
162
|
+
await store.put(reference, socData)
|
|
163
|
+
|
|
164
|
+
return {
|
|
165
|
+
socAddress,
|
|
166
|
+
encryptionKey: new Uint8Array(ENCRYPTION_KEY_SIZE),
|
|
167
|
+
tagUid: 0,
|
|
168
|
+
}
|
|
169
|
+
},
|
|
170
|
+
}
|
|
171
|
+
})
|
|
172
|
+
|
|
173
|
+
describe("Epoch Feeds Integration", () => {
|
|
174
|
+
let store: MockChunkStore
|
|
175
|
+
let bee: MockBee
|
|
176
|
+
let signer: ReturnType<typeof createTestSigner>
|
|
177
|
+
let topic: ReturnType<typeof createTestTopic>
|
|
178
|
+
let stamper: ReturnType<typeof createMockStamper>
|
|
179
|
+
|
|
180
|
+
beforeEach(() => {
|
|
181
|
+
store = new MockChunkStore()
|
|
182
|
+
bee = new MockBee(store)
|
|
183
|
+
signer = createTestSigner()
|
|
184
|
+
topic = createTestTopic()
|
|
185
|
+
stamper = createMockStamper()
|
|
186
|
+
mockFetch(store, signer.publicKey().address())
|
|
187
|
+
})
|
|
188
|
+
|
|
189
|
+
afterEach(() => {
|
|
190
|
+
store.clear()
|
|
191
|
+
})
|
|
192
|
+
|
|
193
|
+
describe("Basic Updater and Finder", () => {
|
|
194
|
+
it("should return undefined when no updates exist", async () => {
|
|
195
|
+
const owner = signer.publicKey().address()
|
|
196
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
197
|
+
|
|
198
|
+
const result = await finder.findAt(100n, 0n)
|
|
199
|
+
expect(result).toBeUndefined()
|
|
200
|
+
})
|
|
201
|
+
|
|
202
|
+
it("should store and retrieve first update", async () => {
|
|
203
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
204
|
+
const owner = updater.getOwner()
|
|
205
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
206
|
+
|
|
207
|
+
// Create update
|
|
208
|
+
const at = 100n
|
|
209
|
+
const reference = createTestReference(1)
|
|
210
|
+
|
|
211
|
+
const result = await updater.update(at, reference, stamper)
|
|
212
|
+
|
|
213
|
+
// Should return epoch info for next update
|
|
214
|
+
expect(result.epoch).toBeDefined()
|
|
215
|
+
expect(result.epoch.level).toBe(MAX_LEVEL) // First update uses root epoch
|
|
216
|
+
expect(result.timestamp).toBe(at)
|
|
217
|
+
|
|
218
|
+
// Find at same time
|
|
219
|
+
const found = await finder.findAt(at, 0n)
|
|
220
|
+
expect(found).toBeDefined()
|
|
221
|
+
expect(found).toHaveLength(32)
|
|
222
|
+
})
|
|
223
|
+
|
|
224
|
+
it("should find update at any timestamp via root epoch", async () => {
|
|
225
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
226
|
+
const owner = updater.getOwner()
|
|
227
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
228
|
+
|
|
229
|
+
const at = 100n
|
|
230
|
+
const reference = createTestReference(1)
|
|
231
|
+
|
|
232
|
+
await updater.update(at, reference, stamper)
|
|
233
|
+
|
|
234
|
+
// Root epoch is findable at the upload timestamp
|
|
235
|
+
expect(await finder.findAt(at, 0n)).toEqual(reference)
|
|
236
|
+
// Root epoch is also findable at any FUTURE timestamp
|
|
237
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(reference)
|
|
238
|
+
expect(await finder.findAt(999999n, 0n)).toEqual(reference)
|
|
239
|
+
})
|
|
240
|
+
|
|
241
|
+
it("should not find update before it was created", async () => {
|
|
242
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
243
|
+
const owner = updater.getOwner()
|
|
244
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
245
|
+
|
|
246
|
+
const at = 100n
|
|
247
|
+
const reference = createTestReference(1)
|
|
248
|
+
|
|
249
|
+
await updater.update(at, reference, stamper)
|
|
250
|
+
|
|
251
|
+
// Try to find at earlier time
|
|
252
|
+
const result = await finder.findAt(50n, 0n)
|
|
253
|
+
expect(result).toBeUndefined()
|
|
254
|
+
})
|
|
255
|
+
|
|
256
|
+
it("should return epoch hints for stateless operation", async () => {
|
|
257
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
258
|
+
const at = 100n
|
|
259
|
+
const reference = createTestReference(1)
|
|
260
|
+
|
|
261
|
+
const result = await updater.update(at, reference, stamper)
|
|
262
|
+
|
|
263
|
+
// First update should use root epoch
|
|
264
|
+
expect(result.epoch.start).toBe(0n)
|
|
265
|
+
expect(result.epoch.level).toBe(MAX_LEVEL)
|
|
266
|
+
expect(result.timestamp).toBe(at)
|
|
267
|
+
expect(result.socAddress).toBeDefined()
|
|
268
|
+
expect(result.socAddress.length).toBe(32)
|
|
269
|
+
})
|
|
270
|
+
})
|
|
271
|
+
|
|
272
|
+
describe("Multiple Updates (Stateless Epoch Calculation)", () => {
|
|
273
|
+
it("should use different epochs when hints are provided", async () => {
|
|
274
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
275
|
+
const owner = updater.getOwner()
|
|
276
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
277
|
+
|
|
278
|
+
const ref1 = createTestReference(1)
|
|
279
|
+
const ref2 = createTestReference(2)
|
|
280
|
+
|
|
281
|
+
// First update - no hints, uses root epoch
|
|
282
|
+
const result1 = await updater.update(10n, ref1, stamper)
|
|
283
|
+
expect(result1.epoch.level).toBe(MAX_LEVEL)
|
|
284
|
+
|
|
285
|
+
// Second update with hints - uses child epoch
|
|
286
|
+
const hints: EpochUpdateHints = {
|
|
287
|
+
lastEpoch: result1.epoch,
|
|
288
|
+
lastTimestamp: result1.timestamp,
|
|
289
|
+
}
|
|
290
|
+
const result2 = await updater.update(20n, ref2, stamper, undefined, hints)
|
|
291
|
+
|
|
292
|
+
// Second update should use a child epoch (lower level)
|
|
293
|
+
expect(result2.epoch.level).toBeLessThan(MAX_LEVEL)
|
|
294
|
+
|
|
295
|
+
// Both updates should be findable
|
|
296
|
+
expect(await finder.findAt(10n, 0n)).toEqual(ref1)
|
|
297
|
+
expect(await finder.findAt(20n, 0n)).toEqual(ref2)
|
|
298
|
+
})
|
|
299
|
+
|
|
300
|
+
it("should find correct update at each timestamp with hints", async () => {
|
|
301
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
302
|
+
const owner = updater.getOwner()
|
|
303
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
304
|
+
|
|
305
|
+
const updates: {
|
|
306
|
+
at: bigint
|
|
307
|
+
ref: Uint8Array
|
|
308
|
+
result: EpochUpdateResult
|
|
309
|
+
}[] = []
|
|
310
|
+
let hints: EpochUpdateHints | undefined
|
|
311
|
+
|
|
312
|
+
// Create multiple updates with proper hints
|
|
313
|
+
for (let i = 0; i < 5; i++) {
|
|
314
|
+
const at = BigInt((i + 1) * 10)
|
|
315
|
+
const reference = createTestReference(i)
|
|
316
|
+
const result = await updater.update(
|
|
317
|
+
at,
|
|
318
|
+
reference,
|
|
319
|
+
stamper,
|
|
320
|
+
undefined,
|
|
321
|
+
hints,
|
|
322
|
+
)
|
|
323
|
+
updates.push({ at, ref: reference, result })
|
|
324
|
+
hints = {
|
|
325
|
+
lastEpoch: result.epoch,
|
|
326
|
+
lastTimestamp: result.timestamp,
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// All updates should be findable at their respective timestamps
|
|
331
|
+
for (const { at, ref } of updates) {
|
|
332
|
+
const found = await finder.findAt(at, 0n)
|
|
333
|
+
expect(found).toEqual(ref)
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// Query at latest time should return latest update
|
|
337
|
+
const latestUpdate = updates[updates.length - 1]
|
|
338
|
+
expect(await finder.findAt(latestUpdate.at + 100n, 0n)).toEqual(
|
|
339
|
+
latestUpdate.ref,
|
|
340
|
+
)
|
|
341
|
+
})
|
|
342
|
+
|
|
343
|
+
it("should handle sparse updates with hints", async () => {
|
|
344
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
345
|
+
const owner = updater.getOwner()
|
|
346
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
347
|
+
|
|
348
|
+
const ref1 = createTestReference(1)
|
|
349
|
+
const ref2 = createTestReference(2)
|
|
350
|
+
const ref3 = createTestReference(3)
|
|
351
|
+
|
|
352
|
+
// First update
|
|
353
|
+
const result1 = await updater.update(10n, ref1, stamper)
|
|
354
|
+
|
|
355
|
+
// Second update with hints
|
|
356
|
+
const result2 = await updater.update(1000n, ref2, stamper, undefined, {
|
|
357
|
+
lastEpoch: result1.epoch,
|
|
358
|
+
lastTimestamp: result1.timestamp,
|
|
359
|
+
})
|
|
360
|
+
|
|
361
|
+
// Third update with hints
|
|
362
|
+
await updater.update(100000n, ref3, stamper, undefined, {
|
|
363
|
+
lastEpoch: result2.epoch,
|
|
364
|
+
lastTimestamp: result2.timestamp,
|
|
365
|
+
})
|
|
366
|
+
|
|
367
|
+
// All updates should be findable
|
|
368
|
+
expect(await finder.findAt(10n, 0n)).toEqual(ref1)
|
|
369
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(ref2)
|
|
370
|
+
expect(await finder.findAt(100000n, 0n)).toEqual(ref3)
|
|
371
|
+
})
|
|
372
|
+
|
|
373
|
+
it("should overwrite at root epoch when no hints provided", async () => {
|
|
374
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
375
|
+
const owner = updater.getOwner()
|
|
376
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
377
|
+
|
|
378
|
+
const ref1 = createTestReference(1)
|
|
379
|
+
const ref2 = createTestReference(2)
|
|
380
|
+
|
|
381
|
+
// Both updates without hints - both use root epoch
|
|
382
|
+
await updater.update(10n, ref1, stamper)
|
|
383
|
+
await updater.update(20n, ref2, stamper) // No hints, overwrites
|
|
384
|
+
|
|
385
|
+
// Only ref2 findable (ref1 was overwritten)
|
|
386
|
+
expect(await finder.findAt(20n, 0n)).toEqual(ref2)
|
|
387
|
+
expect(await finder.findAt(100n, 0n)).toEqual(ref2)
|
|
388
|
+
})
|
|
389
|
+
})
|
|
390
|
+
|
|
391
|
+
describe("Auto-Lookup (No Hints Required)", () => {
|
|
392
|
+
it("should auto-lookup and use different epochs for sequential updates", async () => {
|
|
393
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
394
|
+
const owner = updater.getOwner()
|
|
395
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
396
|
+
|
|
397
|
+
const ref1 = createTestReference(1)
|
|
398
|
+
const ref2 = createTestReference(2)
|
|
399
|
+
const ref3 = createTestReference(3)
|
|
400
|
+
|
|
401
|
+
// All updates without hints - updater should auto-lookup
|
|
402
|
+
const result1 = await updater.update(10n, ref1, stamper)
|
|
403
|
+
expect(result1.epoch.level).toBe(MAX_LEVEL) // First uses root
|
|
404
|
+
|
|
405
|
+
const result2 = await updater.update(20n, ref2, stamper) // No hints
|
|
406
|
+
expect(result2.epoch.level).toBeLessThan(MAX_LEVEL) // Auto-lookup finds ref1, uses child
|
|
407
|
+
|
|
408
|
+
const result3 = await updater.update(30n, ref3, stamper) // No hints
|
|
409
|
+
expect(result3.epoch.level).toBeLessThan(result2.epoch.level) // Auto-lookup finds ref2, uses grandchild
|
|
410
|
+
|
|
411
|
+
// All three updates should be findable at their respective timestamps
|
|
412
|
+
expect(await finder.findAt(10n, 0n)).toEqual(ref1)
|
|
413
|
+
expect(await finder.findAt(20n, 0n)).toEqual(ref2)
|
|
414
|
+
expect(await finder.findAt(30n, 0n)).toEqual(ref3)
|
|
415
|
+
})
|
|
416
|
+
|
|
417
|
+
it("should preserve all updates with auto-lookup at fixed intervals", async () => {
|
|
418
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
419
|
+
const owner = updater.getOwner()
|
|
420
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
421
|
+
|
|
422
|
+
const interval = 10n
|
|
423
|
+
const count = 5
|
|
424
|
+
const updates: { at: bigint; ref: Uint8Array }[] = []
|
|
425
|
+
|
|
426
|
+
// Create updates at fixed intervals WITHOUT hints
|
|
427
|
+
for (let i = 0; i < count; i++) {
|
|
428
|
+
const at = BigInt(i + 1) * interval
|
|
429
|
+
const reference = createTestReference(i)
|
|
430
|
+
updates.push({ at, ref: reference })
|
|
431
|
+
await updater.update(at, reference, stamper) // No hints - auto-lookup
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
// All updates should be findable
|
|
435
|
+
for (const { at, ref } of updates) {
|
|
436
|
+
const found = await finder.findAt(at, 0n)
|
|
437
|
+
expect(found).toEqual(ref)
|
|
438
|
+
}
|
|
439
|
+
})
|
|
440
|
+
|
|
441
|
+
it("should auto-lookup with sparse timestamps", async () => {
|
|
442
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
443
|
+
const owner = updater.getOwner()
|
|
444
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
445
|
+
|
|
446
|
+
const ref1 = createTestReference(1)
|
|
447
|
+
const ref2 = createTestReference(2)
|
|
448
|
+
const ref3 = createTestReference(3)
|
|
449
|
+
|
|
450
|
+
// Sparse updates without hints
|
|
451
|
+
await updater.update(10n, ref1, stamper)
|
|
452
|
+
await updater.update(1000n, ref2, stamper)
|
|
453
|
+
await updater.update(100000n, ref3, stamper)
|
|
454
|
+
|
|
455
|
+
// All should be findable
|
|
456
|
+
expect(await finder.findAt(10n, 0n)).toEqual(ref1)
|
|
457
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(ref2)
|
|
458
|
+
expect(await finder.findAt(100000n, 0n)).toEqual(ref3)
|
|
459
|
+
})
|
|
460
|
+
|
|
461
|
+
it("should have different SOC addresses for each update with auto-lookup", async () => {
|
|
462
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
463
|
+
|
|
464
|
+
const ref1 = createTestReference(1)
|
|
465
|
+
const ref2 = createTestReference(2)
|
|
466
|
+
const ref3 = createTestReference(3)
|
|
467
|
+
|
|
468
|
+
// All updates without hints - should get different SOC addresses
|
|
469
|
+
const result1 = await updater.update(10n, ref1, stamper)
|
|
470
|
+
const result2 = await updater.update(20n, ref2, stamper)
|
|
471
|
+
const result3 = await updater.update(30n, ref3, stamper)
|
|
472
|
+
|
|
473
|
+
// Verify SOC addresses are different
|
|
474
|
+
const addr1 = Binary.uint8ArrayToHex(result1.socAddress)
|
|
475
|
+
const addr2 = Binary.uint8ArrayToHex(result2.socAddress)
|
|
476
|
+
const addr3 = Binary.uint8ArrayToHex(result3.socAddress)
|
|
477
|
+
|
|
478
|
+
expect(addr1).not.toBe(addr2)
|
|
479
|
+
expect(addr2).not.toBe(addr3)
|
|
480
|
+
expect(addr1).not.toBe(addr3)
|
|
481
|
+
})
|
|
482
|
+
|
|
483
|
+
it("should work correctly when first update is at timestamp 0", async () => {
|
|
484
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
485
|
+
const owner = updater.getOwner()
|
|
486
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
487
|
+
|
|
488
|
+
const ref0 = createTestReference(10)
|
|
489
|
+
const ref1 = createTestReference(11)
|
|
490
|
+
|
|
491
|
+
// First update at timestamp 0, second at timestamp 1, both without hints
|
|
492
|
+
await updater.update(0n, ref0, stamper)
|
|
493
|
+
await updater.update(1n, ref1, stamper)
|
|
494
|
+
|
|
495
|
+
// Both should be findable
|
|
496
|
+
expect(await finder.findAt(0n, 0n)).toEqual(ref0)
|
|
497
|
+
expect(await finder.findAt(1n, 0n)).toEqual(ref1)
|
|
498
|
+
})
|
|
499
|
+
|
|
500
|
+
it("should work with 64-byte references and auto-lookup", async () => {
|
|
501
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
502
|
+
const owner = updater.getOwner()
|
|
503
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
504
|
+
|
|
505
|
+
const ref64a = createTestReference64(1)
|
|
506
|
+
const ref64b = createTestReference64(2)
|
|
507
|
+
|
|
508
|
+
// Both without hints
|
|
509
|
+
await updater.update(100n, ref64a, stamper)
|
|
510
|
+
await updater.update(200n, ref64b, stamper)
|
|
511
|
+
|
|
512
|
+
// Both should be findable with correct 64-byte length
|
|
513
|
+
const found1 = await finder.findAt(100n, 0n)
|
|
514
|
+
const found2 = await finder.findAt(200n, 0n)
|
|
515
|
+
|
|
516
|
+
expect(found1).toHaveLength(64)
|
|
517
|
+
expect(found2).toHaveLength(64)
|
|
518
|
+
expect(found1).toEqual(ref64a)
|
|
519
|
+
expect(found2).toEqual(ref64b)
|
|
520
|
+
})
|
|
521
|
+
})
|
|
522
|
+
|
|
523
|
+
describe("Fixed Intervals (With Hints)", () => {
|
|
524
|
+
it("should preserve all updates at fixed intervals when hints used", async () => {
|
|
525
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
526
|
+
const owner = updater.getOwner()
|
|
527
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
528
|
+
|
|
529
|
+
const interval = 10n
|
|
530
|
+
const count = 10 // Reduced for test performance
|
|
531
|
+
const updates: { at: bigint; ref: Uint8Array }[] = []
|
|
532
|
+
let hints: EpochUpdateHints | undefined
|
|
533
|
+
|
|
534
|
+
// Create updates at fixed intervals with hints
|
|
535
|
+
for (let i = 0; i < count; i++) {
|
|
536
|
+
const at = BigInt(i + 1) * interval
|
|
537
|
+
const reference = createTestReference(i)
|
|
538
|
+
updates.push({ at, ref: reference })
|
|
539
|
+
const result = await updater.update(
|
|
540
|
+
at,
|
|
541
|
+
reference,
|
|
542
|
+
stamper,
|
|
543
|
+
undefined,
|
|
544
|
+
hints,
|
|
545
|
+
)
|
|
546
|
+
hints = {
|
|
547
|
+
lastEpoch: result.epoch,
|
|
548
|
+
lastTimestamp: result.timestamp,
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
// All updates should be findable
|
|
553
|
+
for (const { at, ref } of updates) {
|
|
554
|
+
const found = await finder.findAt(at, 0n)
|
|
555
|
+
expect(found).toEqual(ref)
|
|
556
|
+
}
|
|
557
|
+
})
|
|
558
|
+
})
|
|
559
|
+
|
|
560
|
+
describe("Random Intervals (With Hints)", () => {
|
|
561
|
+
it("should preserve all updates at random intervals when hints used", async () => {
|
|
562
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
563
|
+
const owner = updater.getOwner()
|
|
564
|
+
const finder = new SyncEpochFinder(bee as any, topic, owner)
|
|
565
|
+
|
|
566
|
+
const updates: { at: bigint; ref: Uint8Array }[] = []
|
|
567
|
+
let current = 0n
|
|
568
|
+
let hints: EpochUpdateHints | undefined
|
|
569
|
+
|
|
570
|
+
// Create random updates with hints
|
|
571
|
+
for (let i = 0; i < 10; i++) {
|
|
572
|
+
current += BigInt(Math.floor(Math.random() * 100) + 1)
|
|
573
|
+
const reference = createTestReference(i)
|
|
574
|
+
updates.push({ at: current, ref: reference })
|
|
575
|
+
const result = await updater.update(
|
|
576
|
+
current,
|
|
577
|
+
reference,
|
|
578
|
+
stamper,
|
|
579
|
+
undefined,
|
|
580
|
+
hints,
|
|
581
|
+
)
|
|
582
|
+
hints = {
|
|
583
|
+
lastEpoch: result.epoch,
|
|
584
|
+
lastTimestamp: result.timestamp,
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
// All updates should be findable
|
|
589
|
+
for (const { at, ref } of updates) {
|
|
590
|
+
const found = await finder.findAt(at, 0n)
|
|
591
|
+
expect(found).toEqual(ref)
|
|
592
|
+
}
|
|
593
|
+
})
|
|
594
|
+
})
|
|
595
|
+
|
|
596
|
+
describe("Async Finder (With Hints)", () => {
|
|
597
|
+
it("should work with async finder (basic)", async () => {
|
|
598
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
599
|
+
const owner = updater.getOwner()
|
|
600
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
601
|
+
|
|
602
|
+
const at = 100n
|
|
603
|
+
const reference = createTestReference(1)
|
|
604
|
+
|
|
605
|
+
await updater.update(at, reference, stamper)
|
|
606
|
+
|
|
607
|
+
// Findable at upload time
|
|
608
|
+
expect(await finder.findAt(at, 0n)).toEqual(reference)
|
|
609
|
+
// Findable at future time
|
|
610
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(reference)
|
|
611
|
+
})
|
|
612
|
+
|
|
613
|
+
it("should work with async finder (multiple updates with hints)", async () => {
|
|
614
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
615
|
+
const owner = updater.getOwner()
|
|
616
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
617
|
+
|
|
618
|
+
const updates: { at: bigint; ref: Uint8Array }[] = []
|
|
619
|
+
let hints: EpochUpdateHints | undefined
|
|
620
|
+
|
|
621
|
+
// Create multiple updates with hints
|
|
622
|
+
for (let i = 0; i < 5; i++) {
|
|
623
|
+
const at = BigInt((i + 1) * 10)
|
|
624
|
+
const reference = createTestReference(i)
|
|
625
|
+
updates.push({ at, ref: reference })
|
|
626
|
+
const result = await updater.update(
|
|
627
|
+
at,
|
|
628
|
+
reference,
|
|
629
|
+
stamper,
|
|
630
|
+
undefined,
|
|
631
|
+
hints,
|
|
632
|
+
)
|
|
633
|
+
hints = {
|
|
634
|
+
lastEpoch: result.epoch,
|
|
635
|
+
lastTimestamp: result.timestamp,
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
// All updates should be findable
|
|
640
|
+
for (const { at, ref } of updates) {
|
|
641
|
+
const found = await finder.findAt(at, 0n)
|
|
642
|
+
expect(found).toEqual(ref)
|
|
643
|
+
}
|
|
644
|
+
})
|
|
645
|
+
|
|
646
|
+
it("should work with async finder (sparse updates with hints)", async () => {
|
|
647
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
648
|
+
const owner = updater.getOwner()
|
|
649
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
650
|
+
|
|
651
|
+
const ref1 = createTestReference(1)
|
|
652
|
+
const ref2 = createTestReference(2)
|
|
653
|
+
const ref3 = createTestReference(3)
|
|
654
|
+
|
|
655
|
+
const result1 = await updater.update(10n, ref1, stamper)
|
|
656
|
+
const result2 = await updater.update(1000n, ref2, stamper, undefined, {
|
|
657
|
+
lastEpoch: result1.epoch,
|
|
658
|
+
lastTimestamp: result1.timestamp,
|
|
659
|
+
})
|
|
660
|
+
await updater.update(100000n, ref3, stamper, undefined, {
|
|
661
|
+
lastEpoch: result2.epoch,
|
|
662
|
+
lastTimestamp: result2.timestamp,
|
|
663
|
+
})
|
|
664
|
+
|
|
665
|
+
// All updates should be findable
|
|
666
|
+
expect(await finder.findAt(10n, 0n)).toEqual(ref1)
|
|
667
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(ref2)
|
|
668
|
+
expect(await finder.findAt(100000n, 0n)).toEqual(ref3)
|
|
669
|
+
})
|
|
670
|
+
})
|
|
671
|
+
|
|
672
|
+
describe("Error Handling", () => {
|
|
673
|
+
it("should reject reference with wrong length", async () => {
|
|
674
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
675
|
+
|
|
676
|
+
const wrongRef = new Uint8Array(16) // Wrong size
|
|
677
|
+
await expect(updater.update(100n, wrongRef, stamper)).rejects.toThrow(
|
|
678
|
+
"Reference must be 32 or 64 bytes",
|
|
679
|
+
)
|
|
680
|
+
})
|
|
681
|
+
})
|
|
682
|
+
|
|
683
|
+
describe("Correctness Core Regression Matrix", () => {
|
|
684
|
+
it("finds all updates with hints for sync and async finders", async () => {
|
|
685
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
686
|
+
const owner = updater.getOwner()
|
|
687
|
+
const ref100 = createTestReference(100)
|
|
688
|
+
const ref200 = createTestReference(200)
|
|
689
|
+
const ref300 = createTestReference(300)
|
|
690
|
+
|
|
691
|
+
// Multiple updates with hints - all should be findable
|
|
692
|
+
const result1 = await updater.update(100n, ref100, stamper)
|
|
693
|
+
const result2 = await updater.update(200n, ref200, stamper, undefined, {
|
|
694
|
+
lastEpoch: result1.epoch,
|
|
695
|
+
lastTimestamp: result1.timestamp,
|
|
696
|
+
})
|
|
697
|
+
await updater.update(300n, ref300, stamper, undefined, {
|
|
698
|
+
lastEpoch: result2.epoch,
|
|
699
|
+
lastTimestamp: result2.timestamp,
|
|
700
|
+
})
|
|
701
|
+
|
|
702
|
+
const finders = [
|
|
703
|
+
new SyncEpochFinder(bee as any, topic, owner),
|
|
704
|
+
new AsyncEpochFinder(bee as any, topic, owner),
|
|
705
|
+
]
|
|
706
|
+
|
|
707
|
+
// All updates should be findable at their respective timestamps
|
|
708
|
+
for (const finder of finders) {
|
|
709
|
+
expect(await finder.findAt(100n, 0n)).toEqual(ref100)
|
|
710
|
+
expect(await finder.findAt(200n, 0n)).toEqual(ref200)
|
|
711
|
+
expect(await finder.findAt(300n, 0n)).toEqual(ref300)
|
|
712
|
+
// Latest update findable at any future timestamp
|
|
713
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(ref300)
|
|
714
|
+
}
|
|
715
|
+
})
|
|
716
|
+
|
|
717
|
+
it("returns consistent results regardless of after hint", async () => {
|
|
718
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
719
|
+
const owner = updater.getOwner()
|
|
720
|
+
const ref100 = createTestReference(100)
|
|
721
|
+
const ref200 = createTestReference(200)
|
|
722
|
+
|
|
723
|
+
// Updates with hints
|
|
724
|
+
const result1 = await updater.update(100n, ref100, stamper)
|
|
725
|
+
await updater.update(200n, ref200, stamper, undefined, {
|
|
726
|
+
lastEpoch: result1.epoch,
|
|
727
|
+
lastTimestamp: result1.timestamp,
|
|
728
|
+
})
|
|
729
|
+
|
|
730
|
+
const finders = [
|
|
731
|
+
new SyncEpochFinder(bee as any, topic, owner),
|
|
732
|
+
new AsyncEpochFinder(bee as any, topic, owner),
|
|
733
|
+
]
|
|
734
|
+
|
|
735
|
+
// Results should be consistent regardless of after hint
|
|
736
|
+
for (const finder of finders) {
|
|
737
|
+
const withoutHint = await finder.findAt(200n, 0n)
|
|
738
|
+
const withAheadHint = await finder.findAt(200n, 300n)
|
|
739
|
+
expect(withoutHint).toEqual(ref200)
|
|
740
|
+
expect(withAheadHint).toEqual(withoutHint)
|
|
741
|
+
}
|
|
742
|
+
})
|
|
743
|
+
|
|
744
|
+
it("handles boundary timestamps 0 and 1 with hints", async () => {
|
|
745
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
746
|
+
const owner = updater.getOwner()
|
|
747
|
+
const ref0 = createTestReference(10)
|
|
748
|
+
const ref1 = createTestReference(11)
|
|
749
|
+
|
|
750
|
+
// Both updates with hints - both should be findable
|
|
751
|
+
const result0 = await updater.update(0n, ref0, stamper)
|
|
752
|
+
await updater.update(1n, ref1, stamper, undefined, {
|
|
753
|
+
lastEpoch: result0.epoch,
|
|
754
|
+
lastTimestamp: result0.timestamp,
|
|
755
|
+
})
|
|
756
|
+
|
|
757
|
+
const syncFinder = new SyncEpochFinder(bee as any, topic, owner)
|
|
758
|
+
const asyncFinder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
759
|
+
|
|
760
|
+
// Both should be findable
|
|
761
|
+
expect(await syncFinder.findAt(0n, 0n)).toEqual(ref0)
|
|
762
|
+
expect(await syncFinder.findAt(1n, 0n)).toEqual(ref1)
|
|
763
|
+
expect(await asyncFinder.findAt(0n, 0n)).toEqual(ref0)
|
|
764
|
+
expect(await asyncFinder.findAt(1n, 0n)).toEqual(ref1)
|
|
765
|
+
})
|
|
766
|
+
|
|
767
|
+
it("uses deterministic last-write-wins for same timestamp updates without hints", async () => {
|
|
768
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
769
|
+
await updater.update(100n, createTestReference(1), stamper)
|
|
770
|
+
const ref2 = createTestReference(2)
|
|
771
|
+
await updater.update(100n, ref2, stamper) // No hints - overwrites
|
|
772
|
+
const finder = new AsyncEpochFinder(
|
|
773
|
+
bee as any,
|
|
774
|
+
topic,
|
|
775
|
+
signer.publicKey().address(),
|
|
776
|
+
)
|
|
777
|
+
expect(await finder.findAt(100n, 0n)).toEqual(ref2)
|
|
778
|
+
})
|
|
779
|
+
|
|
780
|
+
it("isolates by topic for same owner", async () => {
|
|
781
|
+
const topicA = createTestTopic("topic-a")
|
|
782
|
+
const topicB = createTestTopic("topic-b")
|
|
783
|
+
const updaterA = new BasicEpochUpdater(bee as any, topicA, signer)
|
|
784
|
+
const updaterB = new BasicEpochUpdater(bee as any, topicB, signer)
|
|
785
|
+
const owner = signer.publicKey().address()
|
|
786
|
+
const refA = createTestReference(9001)
|
|
787
|
+
const refB = createTestReference(9002)
|
|
788
|
+
|
|
789
|
+
// Each topic has its own epoch tree
|
|
790
|
+
await updaterA.update(100n, refA, stamper)
|
|
791
|
+
await updaterB.update(100n, refB, stamper)
|
|
792
|
+
|
|
793
|
+
const finderA = new AsyncEpochFinder(bee as any, topicA, owner)
|
|
794
|
+
const finderB = new AsyncEpochFinder(bee as any, topicB, owner)
|
|
795
|
+
expect(await finderA.findAt(100n, 0n)).toEqual(refA)
|
|
796
|
+
expect(await finderB.findAt(100n, 0n)).toEqual(refB)
|
|
797
|
+
})
|
|
798
|
+
|
|
799
|
+
it("isolates by owner for same topic and timestamp", async () => {
|
|
800
|
+
const signerA = createTestSigner()
|
|
801
|
+
const signerB = new PrivateKey(
|
|
802
|
+
"9a4ce1ef8d14b7864ea3f1ecfcb39f937ce4a45f47f4d7d02f6b76f1f3ab2c11",
|
|
803
|
+
)
|
|
804
|
+
const refA = createTestReference(5001)
|
|
805
|
+
const refB = createTestReference(5002)
|
|
806
|
+
const at = 100n
|
|
807
|
+
|
|
808
|
+
// Write top-level epoch chunks directly for each owner to avoid mockFetch
|
|
809
|
+
// owner coupling and assert true owner isolation.
|
|
810
|
+
const payloadA = payloadWithTimestamp(at, refA)
|
|
811
|
+
const payloadB = payloadWithTimestamp(at, refB)
|
|
812
|
+
await putEpochSoc(store, signerA, topic, new EpochIndex(0n, 32), payloadA)
|
|
813
|
+
await putEpochSoc(store, signerB, topic, new EpochIndex(0n, 32), payloadB)
|
|
814
|
+
|
|
815
|
+
const finderA = new AsyncEpochFinder(
|
|
816
|
+
bee as any,
|
|
817
|
+
topic,
|
|
818
|
+
signerA.publicKey().address(),
|
|
819
|
+
)
|
|
820
|
+
const finderB = new AsyncEpochFinder(
|
|
821
|
+
bee as any,
|
|
822
|
+
topic,
|
|
823
|
+
signerB.publicKey().address(),
|
|
824
|
+
)
|
|
825
|
+
expect(await finderA.findAt(at, 0n)).toEqual(refA)
|
|
826
|
+
expect(await finderB.findAt(at, 0n)).toEqual(refB)
|
|
827
|
+
})
|
|
828
|
+
|
|
829
|
+
it("roundtrips 64-byte references without truncation", async () => {
|
|
830
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
831
|
+
const owner = updater.getOwner()
|
|
832
|
+
const ref64 = createTestReference64(42)
|
|
833
|
+
|
|
834
|
+
await updater.update(100n, ref64, stamper)
|
|
835
|
+
|
|
836
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
837
|
+
const got64 = await finder.findAt(100n, 0n)
|
|
838
|
+
expect(got64).toBeDefined()
|
|
839
|
+
expect(got64).toHaveLength(64)
|
|
840
|
+
expect(got64).toEqual(ref64)
|
|
841
|
+
|
|
842
|
+
// Test 32-byte reference on separate topic
|
|
843
|
+
const topic32 = createTestTopic("topic-32")
|
|
844
|
+
const updater32 = new BasicEpochUpdater(bee as any, topic32, signer)
|
|
845
|
+
const ref32 = createTestReference(24)
|
|
846
|
+
await updater32.update(200n, ref32, stamper)
|
|
847
|
+
|
|
848
|
+
const finder32 = new AsyncEpochFinder(bee as any, topic32, owner)
|
|
849
|
+
const got32 = await finder32.findAt(200n, 0n)
|
|
850
|
+
expect(got32).toBeDefined()
|
|
851
|
+
expect(got32).toHaveLength(32)
|
|
852
|
+
expect(got32).toEqual(ref32)
|
|
853
|
+
})
|
|
854
|
+
|
|
855
|
+
it("latest update findable at any future timestamp with hints", async () => {
|
|
856
|
+
const updater = new BasicEpochUpdater(bee as any, topic, signer)
|
|
857
|
+
const owner = updater.getOwner()
|
|
858
|
+
const ref100 = createTestReference(100)
|
|
859
|
+
const ref150 = createTestReference(150)
|
|
860
|
+
|
|
861
|
+
// Two updates with hints
|
|
862
|
+
const result1 = await updater.update(100n, ref100, stamper)
|
|
863
|
+
await updater.update(150n, ref150, stamper, undefined, {
|
|
864
|
+
lastEpoch: result1.epoch,
|
|
865
|
+
lastTimestamp: result1.timestamp,
|
|
866
|
+
})
|
|
867
|
+
|
|
868
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
869
|
+
// Both findable at their timestamps
|
|
870
|
+
expect(await finder.findAt(100n, 0n)).toEqual(ref100)
|
|
871
|
+
expect(await finder.findAt(150n, 0n)).toEqual(ref150)
|
|
872
|
+
// Latest findable at any future timestamp
|
|
873
|
+
expect(await finder.findAt(1000n, 0n)).toEqual(ref150)
|
|
874
|
+
})
|
|
875
|
+
|
|
876
|
+
it("bounded fallback returns miss when nearest valid leaf is outside window", async () => {
|
|
877
|
+
const failingBee = new CountingMockBee(store)
|
|
878
|
+
const owner = signer.publicKey().address()
|
|
879
|
+
const at = 2000n
|
|
880
|
+
const farAt = 1500n
|
|
881
|
+
const farRef = createTestReference(3333)
|
|
882
|
+
const poisonTimestamp = 2n ** 63n
|
|
883
|
+
const poisonPayload = payloadWithTimestamp(
|
|
884
|
+
poisonTimestamp,
|
|
885
|
+
createTestReference(4444),
|
|
886
|
+
)
|
|
887
|
+
|
|
888
|
+
await putEpochSoc(
|
|
889
|
+
store,
|
|
890
|
+
signer,
|
|
891
|
+
topic,
|
|
892
|
+
new EpochIndex(0n, 32),
|
|
893
|
+
poisonPayload,
|
|
894
|
+
)
|
|
895
|
+
await putEpochSoc(
|
|
896
|
+
store,
|
|
897
|
+
signer,
|
|
898
|
+
topic,
|
|
899
|
+
new EpochIndex(0n, 31),
|
|
900
|
+
poisonPayload,
|
|
901
|
+
)
|
|
902
|
+
await putEpochSoc(
|
|
903
|
+
store,
|
|
904
|
+
signer,
|
|
905
|
+
topic,
|
|
906
|
+
new EpochIndex(farAt, 0),
|
|
907
|
+
payloadWithTimestamp(farAt, farRef),
|
|
908
|
+
)
|
|
909
|
+
|
|
910
|
+
const finder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
911
|
+
const result = await finder.findAt(at, 0n)
|
|
912
|
+
expect(result).toBeUndefined()
|
|
913
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(220)
|
|
914
|
+
})
|
|
915
|
+
})
|
|
916
|
+
|
|
917
|
+
describe("Epoch Correctness Stress Matrix", () => {
|
|
918
|
+
it("keeps probes bounded with mixed 404/500/timeout failures", async () => {
|
|
919
|
+
const mixedBee = new MixedErrorMockBee(store)
|
|
920
|
+
const owner = signer.publicKey().address()
|
|
921
|
+
const finder = new AsyncEpochFinder(mixedBee as any, topic, owner)
|
|
922
|
+
|
|
923
|
+
const result = await finder.findAt(1771362000n, 0n)
|
|
924
|
+
expect(result).toBeUndefined()
|
|
925
|
+
expect(mixedBee.downloadCalls).toBeLessThanOrEqual(80)
|
|
926
|
+
})
|
|
927
|
+
|
|
928
|
+
it("resolves poisoned root with valid intermediate and leaf data", async () => {
|
|
929
|
+
const failingBee = new CountingMockBee(store)
|
|
930
|
+
const owner = signer.publicKey().address()
|
|
931
|
+
const at = 1771362100n
|
|
932
|
+
const expected = createTestReference(6100)
|
|
933
|
+
|
|
934
|
+
await putEpochSoc(
|
|
935
|
+
store,
|
|
936
|
+
signer,
|
|
937
|
+
topic,
|
|
938
|
+
new EpochIndex(0n, 32),
|
|
939
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(1)),
|
|
940
|
+
)
|
|
941
|
+
await putEpochSoc(
|
|
942
|
+
store,
|
|
943
|
+
signer,
|
|
944
|
+
topic,
|
|
945
|
+
new EpochIndex(0n, 31),
|
|
946
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(2)),
|
|
947
|
+
)
|
|
948
|
+
await putEpochSoc(
|
|
949
|
+
store,
|
|
950
|
+
signer,
|
|
951
|
+
topic,
|
|
952
|
+
new EpochIndex(at, 0),
|
|
953
|
+
payloadWithTimestamp(at, expected),
|
|
954
|
+
)
|
|
955
|
+
|
|
956
|
+
const asyncFinder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
957
|
+
const syncFinder = new SyncEpochFinder(failingBee as any, topic, owner)
|
|
958
|
+
expect(await asyncFinder.findAt(at, 0n)).toEqual(expected)
|
|
959
|
+
expect(await syncFinder.findAt(at, 0n)).toEqual(expected)
|
|
960
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(120)
|
|
961
|
+
})
|
|
962
|
+
|
|
963
|
+
it("maintains expected behavior across power-of-two timestamp boundaries", async () => {
|
|
964
|
+
const owner = signer.publicKey().address()
|
|
965
|
+
const finder = new AsyncEpochFinder(bee as any, topic, owner)
|
|
966
|
+
|
|
967
|
+
const before = (1n << 20n) - 1n
|
|
968
|
+
const at = 1n << 20n
|
|
969
|
+
const after = (1n << 20n) + 1n
|
|
970
|
+
|
|
971
|
+
await putEpochSoc(
|
|
972
|
+
store,
|
|
973
|
+
signer,
|
|
974
|
+
topic,
|
|
975
|
+
new EpochIndex(before, 0),
|
|
976
|
+
payloadWithTimestamp(before, createTestReference(7001)),
|
|
977
|
+
)
|
|
978
|
+
await putEpochSoc(
|
|
979
|
+
store,
|
|
980
|
+
signer,
|
|
981
|
+
topic,
|
|
982
|
+
new EpochIndex(at, 0),
|
|
983
|
+
payloadWithTimestamp(at, createTestReference(7002)),
|
|
984
|
+
)
|
|
985
|
+
await putEpochSoc(
|
|
986
|
+
store,
|
|
987
|
+
signer,
|
|
988
|
+
topic,
|
|
989
|
+
new EpochIndex(after, 0),
|
|
990
|
+
payloadWithTimestamp(after, createTestReference(7003)),
|
|
991
|
+
)
|
|
992
|
+
|
|
993
|
+
expect(await finder.findAt(before, 0n)).toEqual(createTestReference(7001))
|
|
994
|
+
expect(await finder.findAt(at, 0n)).toEqual(createTestReference(7002))
|
|
995
|
+
expect(await finder.findAt(after, 0n)).toEqual(createTestReference(7003))
|
|
996
|
+
})
|
|
997
|
+
|
|
998
|
+
it("returns inside-window previous leaf and misses outside-window one under poison", async () => {
|
|
999
|
+
const failingBee = new CountingMockBee(store)
|
|
1000
|
+
const owner = signer.publicKey().address()
|
|
1001
|
+
const at = 3000n
|
|
1002
|
+
const insideAt = at - 64n
|
|
1003
|
+
const outsideAt = at - 1000n
|
|
1004
|
+
const insideRef = createTestReference(8001)
|
|
1005
|
+
const outsideRef = createTestReference(8002)
|
|
1006
|
+
|
|
1007
|
+
await putEpochSoc(
|
|
1008
|
+
store,
|
|
1009
|
+
signer,
|
|
1010
|
+
topic,
|
|
1011
|
+
new EpochIndex(0n, 32),
|
|
1012
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(3)),
|
|
1013
|
+
)
|
|
1014
|
+
await putEpochSoc(
|
|
1015
|
+
store,
|
|
1016
|
+
signer,
|
|
1017
|
+
topic,
|
|
1018
|
+
new EpochIndex(0n, 31),
|
|
1019
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(4)),
|
|
1020
|
+
)
|
|
1021
|
+
|
|
1022
|
+
// Inside bounded window should be found.
|
|
1023
|
+
await putEpochSoc(
|
|
1024
|
+
store,
|
|
1025
|
+
signer,
|
|
1026
|
+
topic,
|
|
1027
|
+
new EpochIndex(insideAt, 0),
|
|
1028
|
+
payloadWithTimestamp(insideAt, insideRef),
|
|
1029
|
+
)
|
|
1030
|
+
const finderInside = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
1031
|
+
expect(await finderInside.findAt(at, 0n)).toEqual(insideRef)
|
|
1032
|
+
|
|
1033
|
+
// Separate topic: outside window should miss.
|
|
1034
|
+
const topic2 = createTestTopic("outside-window-topic")
|
|
1035
|
+
await putEpochSoc(
|
|
1036
|
+
store,
|
|
1037
|
+
signer,
|
|
1038
|
+
topic2,
|
|
1039
|
+
new EpochIndex(0n, 32),
|
|
1040
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(5)),
|
|
1041
|
+
)
|
|
1042
|
+
await putEpochSoc(
|
|
1043
|
+
store,
|
|
1044
|
+
signer,
|
|
1045
|
+
topic2,
|
|
1046
|
+
new EpochIndex(0n, 31),
|
|
1047
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(6)),
|
|
1048
|
+
)
|
|
1049
|
+
await putEpochSoc(
|
|
1050
|
+
store,
|
|
1051
|
+
signer,
|
|
1052
|
+
topic2,
|
|
1053
|
+
new EpochIndex(outsideAt, 0),
|
|
1054
|
+
payloadWithTimestamp(outsideAt, outsideRef),
|
|
1055
|
+
)
|
|
1056
|
+
const finderOutside = new AsyncEpochFinder(
|
|
1057
|
+
failingBee as any,
|
|
1058
|
+
topic2,
|
|
1059
|
+
owner,
|
|
1060
|
+
)
|
|
1061
|
+
expect(await finderOutside.findAt(at, 0n)).toBeUndefined()
|
|
1062
|
+
})
|
|
1063
|
+
|
|
1064
|
+
it("preserves owner/topic isolation under poisoned ancestors", async () => {
|
|
1065
|
+
const failingBee = new CountingMockBee(store)
|
|
1066
|
+
const signerA = createTestSigner()
|
|
1067
|
+
const signerB = new PrivateKey(
|
|
1068
|
+
"7f6c8f5de489c56ba40b494a26d0c6dd0c05fc4f0d37fe2f217af6e9ac7b1a01",
|
|
1069
|
+
)
|
|
1070
|
+
const topicA = createTestTopic("iso-a")
|
|
1071
|
+
const topicB = createTestTopic("iso-b")
|
|
1072
|
+
const at = 1771363000n
|
|
1073
|
+
const refAA = createTestReference(9101)
|
|
1074
|
+
const refBB = createTestReference(9202)
|
|
1075
|
+
|
|
1076
|
+
await putEpochSoc(
|
|
1077
|
+
store,
|
|
1078
|
+
signerA,
|
|
1079
|
+
topicA,
|
|
1080
|
+
new EpochIndex(0n, 32),
|
|
1081
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(7)),
|
|
1082
|
+
)
|
|
1083
|
+
await putEpochSoc(
|
|
1084
|
+
store,
|
|
1085
|
+
signerB,
|
|
1086
|
+
topicB,
|
|
1087
|
+
new EpochIndex(0n, 32),
|
|
1088
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(8)),
|
|
1089
|
+
)
|
|
1090
|
+
await putEpochSoc(
|
|
1091
|
+
store,
|
|
1092
|
+
signerA,
|
|
1093
|
+
topicA,
|
|
1094
|
+
new EpochIndex(at, 0),
|
|
1095
|
+
payloadWithTimestamp(at, refAA),
|
|
1096
|
+
)
|
|
1097
|
+
await putEpochSoc(
|
|
1098
|
+
store,
|
|
1099
|
+
signerB,
|
|
1100
|
+
topicB,
|
|
1101
|
+
new EpochIndex(at, 0),
|
|
1102
|
+
payloadWithTimestamp(at, refBB),
|
|
1103
|
+
)
|
|
1104
|
+
|
|
1105
|
+
const finderAA = new AsyncEpochFinder(
|
|
1106
|
+
failingBee as any,
|
|
1107
|
+
topicA,
|
|
1108
|
+
signerA.publicKey().address(),
|
|
1109
|
+
)
|
|
1110
|
+
const finderBB = new AsyncEpochFinder(
|
|
1111
|
+
failingBee as any,
|
|
1112
|
+
topicB,
|
|
1113
|
+
signerB.publicKey().address(),
|
|
1114
|
+
)
|
|
1115
|
+
expect(await finderAA.findAt(at, 0n)).toEqual(refAA)
|
|
1116
|
+
expect(await finderBB.findAt(at, 0n)).toEqual(refBB)
|
|
1117
|
+
})
|
|
1118
|
+
})
|
|
1119
|
+
|
|
1120
|
+
describe("Pathological Network Conditions", () => {
|
|
1121
|
+
it("keeps lookup probes bounded when many epoch chunks fail with 500", async () => {
|
|
1122
|
+
const failingBee = new CountingMockBee(store)
|
|
1123
|
+
const owner = signer.publicKey().address()
|
|
1124
|
+
const finder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
1125
|
+
|
|
1126
|
+
await finder.findAt(1771360835n, 0n)
|
|
1127
|
+
|
|
1128
|
+
// Bound should stay near tree depth (log2 range), not explode.
|
|
1129
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(MAX_LEVEL + 2)
|
|
1130
|
+
})
|
|
1131
|
+
|
|
1132
|
+
it("finds a valid leaf update even when upper epochs contain poisoned timestamps", async () => {
|
|
1133
|
+
const failingBee = new CountingMockBee(store)
|
|
1134
|
+
const owner = signer.publicKey().address()
|
|
1135
|
+
const at = 1771360835n
|
|
1136
|
+
const reference = createTestReference(999)
|
|
1137
|
+
|
|
1138
|
+
// Poison ancestors with far-future timestamps to force descent.
|
|
1139
|
+
const poisonTimestamp = 2n ** 63n
|
|
1140
|
+
const poisonPayload = payloadWithTimestamp(
|
|
1141
|
+
poisonTimestamp,
|
|
1142
|
+
createTestReference(111),
|
|
1143
|
+
)
|
|
1144
|
+
await putEpochSoc(
|
|
1145
|
+
store,
|
|
1146
|
+
signer,
|
|
1147
|
+
topic,
|
|
1148
|
+
new EpochIndex(0n, 32),
|
|
1149
|
+
poisonPayload,
|
|
1150
|
+
)
|
|
1151
|
+
await putEpochSoc(
|
|
1152
|
+
store,
|
|
1153
|
+
signer,
|
|
1154
|
+
topic,
|
|
1155
|
+
new EpochIndex(0n, 31),
|
|
1156
|
+
poisonPayload,
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
// Write the expected leaf update at the exact timestamp.
|
|
1160
|
+
const leafPayload = payloadWithTimestamp(at, reference)
|
|
1161
|
+
await putEpochSoc(
|
|
1162
|
+
store,
|
|
1163
|
+
signer,
|
|
1164
|
+
topic,
|
|
1165
|
+
new EpochIndex(at, 0),
|
|
1166
|
+
leafPayload,
|
|
1167
|
+
)
|
|
1168
|
+
|
|
1169
|
+
const finder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
1170
|
+
const result = await finder.findAt(at, 0n)
|
|
1171
|
+
|
|
1172
|
+
expect(result).toBeDefined()
|
|
1173
|
+
expect(result).toEqual(reference)
|
|
1174
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(80)
|
|
1175
|
+
})
|
|
1176
|
+
|
|
1177
|
+
it("sync finder also finds exact leaf under poisoned ancestors", async () => {
|
|
1178
|
+
const failingBee = new CountingMockBee(store)
|
|
1179
|
+
const owner = signer.publicKey().address()
|
|
1180
|
+
const at = 1771360999n
|
|
1181
|
+
const reference = createTestReference(321)
|
|
1182
|
+
|
|
1183
|
+
const poisonTimestamp = 2n ** 63n
|
|
1184
|
+
const poisonPayload = payloadWithTimestamp(
|
|
1185
|
+
poisonTimestamp,
|
|
1186
|
+
createTestReference(777),
|
|
1187
|
+
)
|
|
1188
|
+
await putEpochSoc(
|
|
1189
|
+
store,
|
|
1190
|
+
signer,
|
|
1191
|
+
topic,
|
|
1192
|
+
new EpochIndex(0n, 32),
|
|
1193
|
+
poisonPayload,
|
|
1194
|
+
)
|
|
1195
|
+
await putEpochSoc(
|
|
1196
|
+
store,
|
|
1197
|
+
signer,
|
|
1198
|
+
topic,
|
|
1199
|
+
new EpochIndex(0n, 31),
|
|
1200
|
+
poisonPayload,
|
|
1201
|
+
)
|
|
1202
|
+
|
|
1203
|
+
const leafPayload = payloadWithTimestamp(at, reference)
|
|
1204
|
+
await putEpochSoc(
|
|
1205
|
+
store,
|
|
1206
|
+
signer,
|
|
1207
|
+
topic,
|
|
1208
|
+
new EpochIndex(at, 0),
|
|
1209
|
+
leafPayload,
|
|
1210
|
+
)
|
|
1211
|
+
|
|
1212
|
+
const finder = new SyncEpochFinder(failingBee as any, topic, owner)
|
|
1213
|
+
const result = await finder.findAt(at, 0n)
|
|
1214
|
+
|
|
1215
|
+
expect(result).toBeDefined()
|
|
1216
|
+
expect(result).toEqual(reference)
|
|
1217
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(90)
|
|
1218
|
+
})
|
|
1219
|
+
|
|
1220
|
+
it("async finder can return previous update between two leaf updates when ancestors are poisoned", async () => {
|
|
1221
|
+
const failingBee = new CountingMockBee(store)
|
|
1222
|
+
const owner = signer.publicKey().address()
|
|
1223
|
+
const firstAt = 1771361000n
|
|
1224
|
+
const secondAt = 1771361100n
|
|
1225
|
+
const queryAt = 1771361050n
|
|
1226
|
+
const firstRef = createTestReference(1001)
|
|
1227
|
+
const secondRef = createTestReference(1002)
|
|
1228
|
+
|
|
1229
|
+
const poisonTimestamp = 2n ** 63n
|
|
1230
|
+
const poisonPayload = payloadWithTimestamp(
|
|
1231
|
+
poisonTimestamp,
|
|
1232
|
+
createTestReference(888),
|
|
1233
|
+
)
|
|
1234
|
+
await putEpochSoc(
|
|
1235
|
+
store,
|
|
1236
|
+
signer,
|
|
1237
|
+
topic,
|
|
1238
|
+
new EpochIndex(0n, 32),
|
|
1239
|
+
poisonPayload,
|
|
1240
|
+
)
|
|
1241
|
+
await putEpochSoc(
|
|
1242
|
+
store,
|
|
1243
|
+
signer,
|
|
1244
|
+
topic,
|
|
1245
|
+
new EpochIndex(0n, 31),
|
|
1246
|
+
poisonPayload,
|
|
1247
|
+
)
|
|
1248
|
+
|
|
1249
|
+
const firstLeaf = payloadWithTimestamp(firstAt, firstRef)
|
|
1250
|
+
const secondLeaf = payloadWithTimestamp(secondAt, secondRef)
|
|
1251
|
+
await putEpochSoc(
|
|
1252
|
+
store,
|
|
1253
|
+
signer,
|
|
1254
|
+
topic,
|
|
1255
|
+
new EpochIndex(firstAt, 0),
|
|
1256
|
+
firstLeaf,
|
|
1257
|
+
)
|
|
1258
|
+
await putEpochSoc(
|
|
1259
|
+
store,
|
|
1260
|
+
signer,
|
|
1261
|
+
topic,
|
|
1262
|
+
new EpochIndex(secondAt, 0),
|
|
1263
|
+
secondLeaf,
|
|
1264
|
+
)
|
|
1265
|
+
|
|
1266
|
+
const finder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
1267
|
+
const result = await finder.findAt(queryAt, 0n)
|
|
1268
|
+
|
|
1269
|
+
expect(result).toBeDefined()
|
|
1270
|
+
expect(result).toEqual(firstRef)
|
|
1271
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(120)
|
|
1272
|
+
})
|
|
1273
|
+
|
|
1274
|
+
it("keeps probes bounded for upload read-back style lookup with after=at", async () => {
|
|
1275
|
+
const failingBee = new CountingMockBee(store)
|
|
1276
|
+
const owner = signer.publicKey().address()
|
|
1277
|
+
const at = 1771362340n
|
|
1278
|
+
|
|
1279
|
+
// Poison upper epochs so traversal sees invalid ancestors and many misses.
|
|
1280
|
+
await putEpochSoc(
|
|
1281
|
+
store,
|
|
1282
|
+
signer,
|
|
1283
|
+
topic,
|
|
1284
|
+
new EpochIndex(0n, 32),
|
|
1285
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(1101)),
|
|
1286
|
+
)
|
|
1287
|
+
await putEpochSoc(
|
|
1288
|
+
store,
|
|
1289
|
+
signer,
|
|
1290
|
+
topic,
|
|
1291
|
+
new EpochIndex(0n, 31),
|
|
1292
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(1102)),
|
|
1293
|
+
)
|
|
1294
|
+
|
|
1295
|
+
const finder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
1296
|
+
const result = await finder.findAt(at, at)
|
|
1297
|
+
expect(result).toBeUndefined()
|
|
1298
|
+
// Bound should stay near tree depth for upload read-back checks.
|
|
1299
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(MAX_LEVEL + 6)
|
|
1300
|
+
})
|
|
1301
|
+
|
|
1302
|
+
it("returns exact timestamp update under poison with upload read-back hint", async () => {
|
|
1303
|
+
const failingBee = new CountingMockBee(store)
|
|
1304
|
+
const owner = signer.publicKey().address()
|
|
1305
|
+
const at = 1771362340n
|
|
1306
|
+
const expected = createTestReference64(1201)
|
|
1307
|
+
|
|
1308
|
+
await putEpochSoc(
|
|
1309
|
+
store,
|
|
1310
|
+
signer,
|
|
1311
|
+
topic,
|
|
1312
|
+
new EpochIndex(0n, 32),
|
|
1313
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(1202)),
|
|
1314
|
+
)
|
|
1315
|
+
await putEpochSoc(
|
|
1316
|
+
store,
|
|
1317
|
+
signer,
|
|
1318
|
+
topic,
|
|
1319
|
+
new EpochIndex(0n, 31),
|
|
1320
|
+
payloadWithTimestamp(2n ** 63n, createTestReference(1203)),
|
|
1321
|
+
)
|
|
1322
|
+
await putEpochSoc(
|
|
1323
|
+
store,
|
|
1324
|
+
signer,
|
|
1325
|
+
topic,
|
|
1326
|
+
new EpochIndex(at, 0),
|
|
1327
|
+
payloadWithTimestamp(at, expected),
|
|
1328
|
+
)
|
|
1329
|
+
|
|
1330
|
+
const finder = new AsyncEpochFinder(failingBee as any, topic, owner)
|
|
1331
|
+
const result = await finder.findAt(at, at)
|
|
1332
|
+
expect(result).toEqual(expected)
|
|
1333
|
+
expect(failingBee.downloadCalls).toBeLessThanOrEqual(MAX_LEVEL + 6)
|
|
1334
|
+
})
|
|
1335
|
+
})
|
|
1336
|
+
})
|