@unlink-xyz/core 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.json +4 -0
- package/account/zkAccount.test.ts +316 -0
- package/account/zkAccount.ts +222 -0
- package/clients/broadcaster.ts +67 -0
- package/clients/http.ts +94 -0
- package/clients/indexer.ts +150 -0
- package/config.ts +39 -0
- package/core.ts +17 -0
- package/dist/account/railgun-imports-prototype.d.ts +12 -0
- package/dist/account/railgun-imports-prototype.d.ts.map +1 -0
- package/dist/account/railgun-imports-prototype.js +30 -0
- package/dist/clients/indexer.d.ts.map +1 -1
- package/dist/clients/indexer.js +1 -1
- package/dist/state/hydrator.d.ts +16 -0
- package/dist/state/hydrator.d.ts.map +1 -0
- package/dist/state/hydrator.js +18 -0
- package/dist/state/job-store.d.ts +12 -0
- package/dist/state/job-store.d.ts.map +1 -0
- package/dist/state/job-store.js +118 -0
- package/dist/state/jobs.d.ts +50 -0
- package/dist/state/jobs.d.ts.map +1 -0
- package/dist/state/jobs.js +1 -0
- package/dist/state.d.ts +83 -0
- package/dist/state.d.ts.map +1 -0
- package/dist/state.js +171 -0
- package/dist/transactions/deposit.d.ts +0 -2
- package/dist/transactions/deposit.d.ts.map +1 -1
- package/dist/transactions/deposit.js +5 -9
- package/dist/transactions/note-sync.d.ts.map +1 -1
- package/dist/transactions/note-sync.js +1 -1
- package/dist/transactions/shield.d.ts +5 -0
- package/dist/transactions/shield.d.ts.map +1 -0
- package/dist/transactions/shield.js +93 -0
- package/dist/transactions/transact.d.ts +0 -5
- package/dist/transactions/transact.d.ts.map +1 -1
- package/dist/transactions/transact.js +2 -2
- package/dist/transactions/utils.d.ts +10 -0
- package/dist/transactions/utils.d.ts.map +1 -0
- package/dist/transactions/utils.js +17 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/dist/utils/time.d.ts +2 -0
- package/dist/utils/time.d.ts.map +1 -0
- package/dist/utils/time.js +3 -0
- package/dist/utils/witness.d.ts +11 -0
- package/dist/utils/witness.d.ts.map +1 -0
- package/dist/utils/witness.js +19 -0
- package/errors.ts +20 -0
- package/index.ts +17 -0
- package/key-derivation/babyjubjub.ts +11 -0
- package/key-derivation/bech32.test.ts +90 -0
- package/key-derivation/bech32.ts +124 -0
- package/key-derivation/bip32.ts +56 -0
- package/key-derivation/bip39.ts +76 -0
- package/key-derivation/bytes.ts +118 -0
- package/key-derivation/hash.ts +13 -0
- package/key-derivation/index.ts +7 -0
- package/key-derivation/wallet-node.ts +155 -0
- package/keys.ts +47 -0
- package/package.json +4 -5
- package/prover/config.ts +104 -0
- package/prover/index.ts +1 -0
- package/prover/prover.integration.test.ts +162 -0
- package/prover/prover.test.ts +309 -0
- package/prover/prover.ts +405 -0
- package/prover/registry.test.ts +90 -0
- package/prover/registry.ts +82 -0
- package/schema.ts +17 -0
- package/setup-artifacts.sh +57 -0
- package/state/index.ts +2 -0
- package/state/merkle/hydrator.ts +69 -0
- package/state/merkle/index.ts +12 -0
- package/state/merkle/merkle-tree.test.ts +50 -0
- package/state/merkle/merkle-tree.ts +163 -0
- package/state/store/ciphertext-store.ts +28 -0
- package/state/store/index.ts +24 -0
- package/state/store/job-store.ts +162 -0
- package/state/store/jobs.ts +64 -0
- package/state/store/leaf-store.ts +39 -0
- package/state/store/note-store.ts +177 -0
- package/state/store/nullifier-store.ts +39 -0
- package/state/store/records.ts +61 -0
- package/state/store/root-store.ts +34 -0
- package/state/store/store.ts +25 -0
- package/state.test.ts +235 -0
- package/storage/index.ts +3 -0
- package/storage/indexeddb.test.ts +99 -0
- package/storage/indexeddb.ts +235 -0
- package/storage/memory.test.ts +59 -0
- package/storage/memory.ts +93 -0
- package/transactions/deposit.test.ts +160 -0
- package/transactions/deposit.ts +227 -0
- package/transactions/index.ts +20 -0
- package/transactions/note-sync.test.ts +155 -0
- package/transactions/note-sync.ts +452 -0
- package/transactions/reconcile.ts +73 -0
- package/transactions/transact.test.ts +451 -0
- package/transactions/transact.ts +811 -0
- package/transactions/types.ts +141 -0
- package/tsconfig.json +14 -0
- package/types/global.d.ts +15 -0
- package/types.ts +24 -0
- package/utils/async.ts +15 -0
- package/utils/bigint.ts +34 -0
- package/utils/crypto.test.ts +69 -0
- package/utils/crypto.ts +58 -0
- package/utils/json-codec.ts +38 -0
- package/utils/polling.ts +6 -0
- package/utils/signature.ts +16 -0
- package/utils/validators.test.ts +64 -0
- package/utils/validators.ts +86 -0
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import { CoreError } from "../../errors.js";
|
|
2
|
+
import { keys } from "../../keys.js";
|
|
3
|
+
import type { BatchOp, Storage } from "../../types.js";
|
|
4
|
+
import {
|
|
5
|
+
decodeJson,
|
|
6
|
+
encodeJson,
|
|
7
|
+
getJson,
|
|
8
|
+
type JsonRecord,
|
|
9
|
+
} from "../../utils/json-codec.js";
|
|
10
|
+
import {
|
|
11
|
+
ensureBigIntString,
|
|
12
|
+
ensureChainId,
|
|
13
|
+
ensureMpk,
|
|
14
|
+
ensurePositiveInt,
|
|
15
|
+
} from "../../utils/validators.js";
|
|
16
|
+
import type { NoteInsert, NoteRecord } from "../store/records.js";
|
|
17
|
+
|
|
18
|
+
export const emptyBytes = () => new Uint8Array(0);
|
|
19
|
+
|
|
20
|
+
export function createNoteStore(storage: Storage) {
|
|
21
|
+
const persistNote = async (record: NoteRecord) => {
|
|
22
|
+
ensureChainId(record.chainId);
|
|
23
|
+
ensurePositiveInt("note index", record.index);
|
|
24
|
+
ensureMpk(record.mpk);
|
|
25
|
+
ensureBigIntString("note value", record.value);
|
|
26
|
+
if (record.spentAt !== undefined) {
|
|
27
|
+
ensurePositiveInt("note spentAt", record.spentAt);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const noteKey = keys.note(record.chainId, record.index);
|
|
31
|
+
const previous = await getJson<NoteRecord>(storage, noteKey);
|
|
32
|
+
|
|
33
|
+
const ops: BatchOp[] = [
|
|
34
|
+
{ put: [noteKey, encodeJson(record as JsonRecord)] },
|
|
35
|
+
];
|
|
36
|
+
|
|
37
|
+
if (previous && previous.spentAt === undefined) {
|
|
38
|
+
ops.push({
|
|
39
|
+
del: keys.unspent(previous.chainId, previous.mpk, previous.index),
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if (record.spentAt === undefined) {
|
|
44
|
+
// idx:notes:unspent functions as a set, so we only persist the sentinel bytes.
|
|
45
|
+
ops.push({
|
|
46
|
+
put: [
|
|
47
|
+
keys.unspent(record.chainId, record.mpk, record.index),
|
|
48
|
+
emptyBytes(),
|
|
49
|
+
],
|
|
50
|
+
});
|
|
51
|
+
} else {
|
|
52
|
+
ops.push({
|
|
53
|
+
del: keys.unspent(record.chainId, record.mpk, record.index),
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
await storage.batch(ops);
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
const store = {
|
|
61
|
+
/**
|
|
62
|
+
* Upsert a note record and maintain the unspent-note index for fast balance queries.
|
|
63
|
+
* If the note already exists and is marked as spent, preserve the spentAt field.
|
|
64
|
+
*/
|
|
65
|
+
async putNote(note: NoteInsert) {
|
|
66
|
+
const noteKey = keys.note(note.chainId, note.index);
|
|
67
|
+
const existing = await getJson<NoteRecord>(storage, noteKey);
|
|
68
|
+
if (existing?.spentAt !== undefined) {
|
|
69
|
+
// Preserve spentAt if note was already marked as spent
|
|
70
|
+
await persistNote({ ...note, spentAt: existing.spentAt });
|
|
71
|
+
} else {
|
|
72
|
+
await persistNote({ ...note });
|
|
73
|
+
}
|
|
74
|
+
},
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Fetch a note by (chainId, index) if it exists in local storage.
|
|
78
|
+
*/
|
|
79
|
+
async getNote(chainId: number, index: number) {
|
|
80
|
+
ensureChainId(chainId);
|
|
81
|
+
ensurePositiveInt("note index", index);
|
|
82
|
+
return getJson<NoteRecord>(storage, keys.note(chainId, index));
|
|
83
|
+
},
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* List notes matching optional filters (by chainId, mpk) and spent state.
|
|
87
|
+
*/
|
|
88
|
+
async listNotes(
|
|
89
|
+
options: {
|
|
90
|
+
chainId?: number;
|
|
91
|
+
mpk?: string;
|
|
92
|
+
token?: string;
|
|
93
|
+
includeSpent?: boolean;
|
|
94
|
+
} = {},
|
|
95
|
+
) {
|
|
96
|
+
const { chainId, mpk, token, includeSpent = true } = options;
|
|
97
|
+
if (chainId !== undefined) {
|
|
98
|
+
ensureChainId(chainId);
|
|
99
|
+
}
|
|
100
|
+
if (mpk !== undefined) {
|
|
101
|
+
ensureMpk(mpk);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const prefix = chainId !== undefined ? `notes:${chainId}:` : "notes:";
|
|
105
|
+
const entries = await storage.iter({ prefix });
|
|
106
|
+
const filtered = entries
|
|
107
|
+
.map(({ value }) => decodeJson<NoteRecord>(value))
|
|
108
|
+
.filter(
|
|
109
|
+
(note) =>
|
|
110
|
+
(chainId === undefined || note.chainId === chainId) &&
|
|
111
|
+
(mpk === undefined || note.mpk === mpk) &&
|
|
112
|
+
(token === undefined || note.token === token) &&
|
|
113
|
+
(includeSpent || note.spentAt === undefined),
|
|
114
|
+
);
|
|
115
|
+
filtered.sort((a, b) => a.index - b.index);
|
|
116
|
+
return filtered;
|
|
117
|
+
},
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Mark a cached note as spent by setting its timestamp and re-indexing metadata.
|
|
121
|
+
*/
|
|
122
|
+
async markNoteSpent(chainId: number, index: number, spentAt = Date.now()) {
|
|
123
|
+
ensureChainId(chainId);
|
|
124
|
+
ensurePositiveInt("note index", index);
|
|
125
|
+
ensurePositiveInt("note spentAt", spentAt);
|
|
126
|
+
const existing = await store.getNote(chainId, index);
|
|
127
|
+
if (!existing) {
|
|
128
|
+
throw new CoreError("note not found");
|
|
129
|
+
}
|
|
130
|
+
if (existing.spentAt === spentAt) {
|
|
131
|
+
return existing;
|
|
132
|
+
}
|
|
133
|
+
const updated: NoteRecord = { ...existing, spentAt };
|
|
134
|
+
await persistNote(updated);
|
|
135
|
+
return updated;
|
|
136
|
+
},
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Drop the spent marker from a cached note, returning it to the unspent index.
|
|
140
|
+
*/
|
|
141
|
+
async markNoteUnspent(chainId: number, index: number) {
|
|
142
|
+
ensureChainId(chainId);
|
|
143
|
+
ensurePositiveInt("note index", index);
|
|
144
|
+
const existing = await store.getNote(chainId, index);
|
|
145
|
+
if (!existing) {
|
|
146
|
+
throw new CoreError("note not found");
|
|
147
|
+
}
|
|
148
|
+
if (existing.spentAt === undefined) {
|
|
149
|
+
return existing;
|
|
150
|
+
}
|
|
151
|
+
const updated: NoteRecord = { ...existing };
|
|
152
|
+
delete updated.spentAt;
|
|
153
|
+
await persistNote(updated);
|
|
154
|
+
return updated;
|
|
155
|
+
},
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Aggregate unspent note balances per token for a given master public key.
|
|
159
|
+
*/
|
|
160
|
+
async getZkBalances(mpk: string, options: { chainId?: number } = {}) {
|
|
161
|
+
ensureMpk(mpk);
|
|
162
|
+
const notes = await store.listNotes({
|
|
163
|
+
chainId: options.chainId,
|
|
164
|
+
mpk,
|
|
165
|
+
includeSpent: false,
|
|
166
|
+
});
|
|
167
|
+
const totals: Record<string, bigint> = {};
|
|
168
|
+
for (const note of notes) {
|
|
169
|
+
const amount = BigInt(note.value);
|
|
170
|
+
totals[note.token] = (totals[note.token] ?? 0n) + amount;
|
|
171
|
+
}
|
|
172
|
+
return totals;
|
|
173
|
+
},
|
|
174
|
+
};
|
|
175
|
+
|
|
176
|
+
return store;
|
|
177
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { keys } from "../../keys.js";
|
|
2
|
+
import type { Storage } from "../../types.js";
|
|
3
|
+
import { getJson, putJson } from "../../utils/json-codec.js";
|
|
4
|
+
import { ensureChainId } from "../../utils/validators.js";
|
|
5
|
+
import type { NullifierRecord } from "../store/records.js";
|
|
6
|
+
|
|
7
|
+
export function createNullifierStore(storage: Storage) {
|
|
8
|
+
return {
|
|
9
|
+
/**
|
|
10
|
+
* Record a nullifier observation so later syncs can de-duplicate work.
|
|
11
|
+
*/
|
|
12
|
+
async putNullifier(nullifier: NullifierRecord) {
|
|
13
|
+
ensureChainId(nullifier.chainId);
|
|
14
|
+
await putJson(
|
|
15
|
+
storage,
|
|
16
|
+
keys.nullifier(nullifier.chainId, nullifier.nullifier),
|
|
17
|
+
nullifier,
|
|
18
|
+
);
|
|
19
|
+
},
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Lookup a previously observed nullifier by value.
|
|
23
|
+
*/
|
|
24
|
+
async getNullifier(chainId: number, value: string) {
|
|
25
|
+
ensureChainId(chainId);
|
|
26
|
+
return getJson<NullifierRecord>(storage, keys.nullifier(chainId, value));
|
|
27
|
+
},
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Count all nullifiers stored locally for a given chain.
|
|
31
|
+
*/
|
|
32
|
+
async countNullifiers(chainId: number): Promise<number> {
|
|
33
|
+
ensureChainId(chainId);
|
|
34
|
+
const prefix = `nullifiers:${chainId}:`;
|
|
35
|
+
const entries = await storage.iter({ prefix });
|
|
36
|
+
return entries.length;
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Locally cached metadata for a deposit note we consider owned.
|
|
3
|
+
*/
|
|
4
|
+
export type NoteRecord = {
|
|
5
|
+
chainId: number;
|
|
6
|
+
/** Leaf index within the on-chain tree (zero-based, monotonic). */
|
|
7
|
+
index: number;
|
|
8
|
+
/** Canonical asset identifier, typically the ERC-20 address lowercased. */
|
|
9
|
+
token: string;
|
|
10
|
+
/** Amount expressed as an unsigned BigInt serialized to a base-10 string. */
|
|
11
|
+
value: string;
|
|
12
|
+
/** Commitment stored on-chain (Poseidon(note)). */
|
|
13
|
+
commitment: string;
|
|
14
|
+
/** Note public key derived from the master public key. */
|
|
15
|
+
npk: string;
|
|
16
|
+
/**
|
|
17
|
+
* Master public key for the account (Poseidon(spendingPk.public, nullifyingKey)).
|
|
18
|
+
* Acts as the stable account identifier when grouping notes locally.
|
|
19
|
+
*/
|
|
20
|
+
mpk: string;
|
|
21
|
+
/** Randomizer used when committing the note. */
|
|
22
|
+
random: string;
|
|
23
|
+
/** Nullifier that will be revealed when the note is spent. */
|
|
24
|
+
nullifier: string;
|
|
25
|
+
/** Optional timestamp when the note was marked as spent. */
|
|
26
|
+
spentAt?: number;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
// prettier-ignore
|
|
30
|
+
export type NoteInsert = Omit<NoteRecord, 'spentAt'>;
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Details about a nullifier observed on-chain, whether or not we own the note.
|
|
34
|
+
*/
|
|
35
|
+
export type NullifierRecord = {
|
|
36
|
+
chainId: number;
|
|
37
|
+
/** Nullifier value (expected to match NoteRecord.nullifier). */
|
|
38
|
+
nullifier: string;
|
|
39
|
+
/** Leaf index of the corresponding note when known. */
|
|
40
|
+
noteIndex?: number;
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Public information about a leaf commitment inserted into the pool's tree.
|
|
45
|
+
*/
|
|
46
|
+
export type LeafRecord = {
|
|
47
|
+
chainId: number;
|
|
48
|
+
/** Leaf index inside the tree (zero-based). */
|
|
49
|
+
index: number;
|
|
50
|
+
/** Commitment value stored in the tree. */
|
|
51
|
+
commitment: string;
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Historical merkle root snapshot used for proof construction.
|
|
56
|
+
*/
|
|
57
|
+
export type RootRecord = {
|
|
58
|
+
chainId: number;
|
|
59
|
+
/** Merkle root value committed on-chain. */
|
|
60
|
+
root: string;
|
|
61
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { CoreError } from "../../errors.js";
|
|
2
|
+
import { keys, validateKey } from "../../keys.js";
|
|
3
|
+
import type { Storage } from "../../types.js";
|
|
4
|
+
import { getJson, putJson } from "../../utils/json-codec.js";
|
|
5
|
+
import { ensureChainId } from "../../utils/validators.js";
|
|
6
|
+
import type { RootRecord } from "../store/records.js";
|
|
7
|
+
|
|
8
|
+
export function createRootStore(storage: Storage) {
|
|
9
|
+
return {
|
|
10
|
+
/**
|
|
11
|
+
* Persist a historical root snapshot for proof generation.
|
|
12
|
+
*/
|
|
13
|
+
async putRoot(root: RootRecord) {
|
|
14
|
+
ensureChainId(root.chainId);
|
|
15
|
+
if (typeof root.root !== "string" || root.root.length === 0) {
|
|
16
|
+
throw new CoreError("root value must be a non-empty string");
|
|
17
|
+
}
|
|
18
|
+
const key = keys.root(root.chainId, root.root);
|
|
19
|
+
validateKey(key);
|
|
20
|
+
await putJson(storage, key, root);
|
|
21
|
+
},
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Retrieve a root snapshot by Merkle root value.
|
|
25
|
+
*/
|
|
26
|
+
async getRoot(chainId: number, value: string) {
|
|
27
|
+
ensureChainId(chainId);
|
|
28
|
+
if (typeof value !== "string" || value.length === 0) {
|
|
29
|
+
throw new CoreError("root value must be a non-empty string");
|
|
30
|
+
}
|
|
31
|
+
return getJson<RootRecord>(storage, keys.root(chainId, value));
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { Storage } from "../../types.js";
|
|
2
|
+
import { createCiphertextStore } from "./ciphertext-store.js";
|
|
3
|
+
import { createJobStore } from "./job-store.js";
|
|
4
|
+
import { createLeafStore } from "./leaf-store.js";
|
|
5
|
+
import { createNoteStore } from "./note-store.js";
|
|
6
|
+
import { createNullifierStore } from "./nullifier-store.js";
|
|
7
|
+
import { createRootStore } from "./root-store.js";
|
|
8
|
+
|
|
9
|
+
export function createStateStore(storage: Storage) {
|
|
10
|
+
const notes = createNoteStore(storage);
|
|
11
|
+
const nullifiers = createNullifierStore(storage);
|
|
12
|
+
const leaves = createLeafStore(storage);
|
|
13
|
+
const roots = createRootStore(storage);
|
|
14
|
+
const ciphertexts = createCiphertextStore(storage);
|
|
15
|
+
const jobs = createJobStore(storage);
|
|
16
|
+
|
|
17
|
+
return {
|
|
18
|
+
...notes,
|
|
19
|
+
...nullifiers,
|
|
20
|
+
...leaves,
|
|
21
|
+
...roots,
|
|
22
|
+
...ciphertexts,
|
|
23
|
+
...jobs,
|
|
24
|
+
};
|
|
25
|
+
}
|
package/state.test.ts
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
import "fake-indexeddb/auto";
|
|
2
|
+
|
|
3
|
+
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
|
4
|
+
|
|
5
|
+
import { createStateStore } from "./state/index.js";
|
|
6
|
+
import {
|
|
7
|
+
createIndexedDbStorage,
|
|
8
|
+
createMemoryStorage,
|
|
9
|
+
} from "./storage/index.js";
|
|
10
|
+
import type { Storage } from "./types.js";
|
|
11
|
+
|
|
12
|
+
type StoreFactory = {
|
|
13
|
+
name: string;
|
|
14
|
+
setup(): Promise;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
async function deleteIndexeddbDatabase(name: string) {
|
|
18
|
+
await new Promise<void>((resolve, reject) => {
|
|
19
|
+
const request = indexedDB.deleteDatabase(name);
|
|
20
|
+
request.onsuccess = () => resolve();
|
|
21
|
+
request.onerror = () =>
|
|
22
|
+
reject(request.error ?? new Error("failed to delete indexedDB"));
|
|
23
|
+
request.onblocked = () =>
|
|
24
|
+
reject(new Error("indexedDB deletion was blocked"));
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const factories: StoreFactory[] = [
|
|
29
|
+
{
|
|
30
|
+
name: "memory",
|
|
31
|
+
async setup() {
|
|
32
|
+
const storage = createMemoryStorage();
|
|
33
|
+
await storage.open();
|
|
34
|
+
return { storage };
|
|
35
|
+
},
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
name: "indexeddb",
|
|
39
|
+
async setup() {
|
|
40
|
+
const dbName = `unlink-state-test-${Math.random().toString(6)}`;
|
|
41
|
+
await deleteIndexeddbDatabase(dbName).catch(() => {});
|
|
42
|
+
const storage = createIndexedDbStorage({ name: dbName });
|
|
43
|
+
await storage.open();
|
|
44
|
+
return {
|
|
45
|
+
storage,
|
|
46
|
+
cleanup: () => deleteIndexeddbDatabase(dbName).catch(() => {}),
|
|
47
|
+
};
|
|
48
|
+
},
|
|
49
|
+
},
|
|
50
|
+
];
|
|
51
|
+
|
|
52
|
+
describe.each(factories)("state store (%s)", ({ name, setup }) => {
|
|
53
|
+
let storage: Storage;
|
|
54
|
+
let cleanup: (() => Promise) | undefined;
|
|
55
|
+
let state: ReturnType;
|
|
56
|
+
|
|
57
|
+
beforeEach(async () => {
|
|
58
|
+
const prepared = await setup();
|
|
59
|
+
storage = prepared.storage;
|
|
60
|
+
cleanup = prepared.cleanup;
|
|
61
|
+
state = createStateStore(storage);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
afterEach(async () => {
|
|
65
|
+
await cleanup?.();
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it(`stores and loads note records [${name}]`, async () => {
|
|
69
|
+
const inserted = {
|
|
70
|
+
chainId: 1,
|
|
71
|
+
index: 5,
|
|
72
|
+
token: "0xToken",
|
|
73
|
+
value: "42",
|
|
74
|
+
commitment: "0xcommitment",
|
|
75
|
+
npk: "0xnpk",
|
|
76
|
+
mpk: "0xmpk",
|
|
77
|
+
random: "0xrandom",
|
|
78
|
+
nullifier: "0xnullifier",
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
await state.putNote(inserted);
|
|
82
|
+
|
|
83
|
+
const fetched = await state.getNote(1, 5);
|
|
84
|
+
expect(fetched).toEqual(inserted);
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
it(`stores and loads nullifier records [${name}]`, async () => {
|
|
88
|
+
const inserted = {
|
|
89
|
+
chainId: 1,
|
|
90
|
+
nullifier: "0xdeadbeef",
|
|
91
|
+
noteIndex: 12,
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
await state.putNullifier(inserted);
|
|
95
|
+
|
|
96
|
+
const fetched = await state.getNullifier(1, "0xdeadbeef");
|
|
97
|
+
expect(fetched).toEqual(inserted);
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it(`stores and loads leaf records [${name}]`, async () => {
|
|
101
|
+
const inserted = {
|
|
102
|
+
chainId: 137,
|
|
103
|
+
index: 2,
|
|
104
|
+
commitment: "0xleaf",
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
await state.putLeaf(inserted);
|
|
108
|
+
|
|
109
|
+
const fetched = await state.getLeaf(137, 2);
|
|
110
|
+
expect(fetched).toEqual(inserted);
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
it(`stores and loads root records [${name}]`, async () => {
|
|
114
|
+
const inserted = {
|
|
115
|
+
chainId: 10,
|
|
116
|
+
root: "0xroot",
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
await state.putRoot(inserted);
|
|
120
|
+
|
|
121
|
+
const fetched = await state.getRoot(10, "0xroot");
|
|
122
|
+
expect(fetched).toEqual(inserted);
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it(`stores ciphertext payloads [${name}]`, async () => {
|
|
126
|
+
const payload = new Uint8Array([1, 2, 3, 4]);
|
|
127
|
+
|
|
128
|
+
await state.putCiphertext(1, 7, payload);
|
|
129
|
+
|
|
130
|
+
const fetched = await state.getCiphertext(1, 7);
|
|
131
|
+
expect(fetched).not.toBeNull();
|
|
132
|
+
expect(fetched).toEqual(payload);
|
|
133
|
+
|
|
134
|
+
if (fetched) {
|
|
135
|
+
fetched[0] = 99;
|
|
136
|
+
const second = await state.getCiphertext(1, 7);
|
|
137
|
+
expect(second).toEqual(payload);
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
it(`lists notes by account and spent status [${name}]`, async () => {
|
|
142
|
+
const mpkA = "0xmpkA";
|
|
143
|
+
const mpkB = "0xmpkB";
|
|
144
|
+
|
|
145
|
+
const base = {
|
|
146
|
+
chainId: 1,
|
|
147
|
+
token: "0xToken",
|
|
148
|
+
value: "5",
|
|
149
|
+
commitment: "0xcommitment",
|
|
150
|
+
npk: "0xnpk",
|
|
151
|
+
random: "0xrandom",
|
|
152
|
+
} as const;
|
|
153
|
+
|
|
154
|
+
await state.putNote({
|
|
155
|
+
...base,
|
|
156
|
+
index: 1,
|
|
157
|
+
nullifier: "0xnullifier1",
|
|
158
|
+
mpk: mpkA,
|
|
159
|
+
});
|
|
160
|
+
await state.putNote({
|
|
161
|
+
...base,
|
|
162
|
+
index: 2,
|
|
163
|
+
nullifier: "0xnullifier2",
|
|
164
|
+
mpk: mpkA,
|
|
165
|
+
});
|
|
166
|
+
await state.putNote({
|
|
167
|
+
...base,
|
|
168
|
+
index: 3,
|
|
169
|
+
nullifier: "0xnullifier3",
|
|
170
|
+
mpk: mpkB,
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
await state.markNoteSpent(1, 2, 123);
|
|
174
|
+
|
|
175
|
+
const allForA = await state.listNotes({ mpk: mpkA });
|
|
176
|
+
expect(allForA.map((n) => n.index).sort((a, b) => a - b)).toEqual([1, 2]);
|
|
177
|
+
|
|
178
|
+
const activeForA = await state.listNotes({
|
|
179
|
+
mpk: mpkA,
|
|
180
|
+
includeSpent: false,
|
|
181
|
+
});
|
|
182
|
+
expect(activeForA.map((n) => n.index)).toEqual([1]);
|
|
183
|
+
|
|
184
|
+
const allForB = await state.listNotes({ mpk: mpkB });
|
|
185
|
+
expect(allForB).toHaveLength(1);
|
|
186
|
+
expect(allForB[0]?.index).toBe(3);
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
it(`aggregates zk balances per asset [${name}]`, async () => {
|
|
190
|
+
const mpk = "0xmpkBalance";
|
|
191
|
+
|
|
192
|
+
const note = (index: number, overrides: Partial = {}) => ({
|
|
193
|
+
chainId: 1,
|
|
194
|
+
index,
|
|
195
|
+
token: "0xTokenA",
|
|
196
|
+
value: `${index * 10}`,
|
|
197
|
+
commitment: `0xcommit-${index}`,
|
|
198
|
+
npk: `0xnpk-${index}`,
|
|
199
|
+
mpk,
|
|
200
|
+
random: `0xrandom-${index}`,
|
|
201
|
+
nullifier: `0xnullifier-${index}`,
|
|
202
|
+
...overrides,
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
await state.putNote(note(1, { value: "100" }));
|
|
206
|
+
await state.putNote(note(2, { value: "50" }));
|
|
207
|
+
await state.putNote(
|
|
208
|
+
note(3, { token: "0xTokenB", value: "5", nullifier: "0xnullifier-b" }),
|
|
209
|
+
);
|
|
210
|
+
await state.putNote(
|
|
211
|
+
note(4, {
|
|
212
|
+
mpk: "0xother",
|
|
213
|
+
nullifier: "0xnullifier-other",
|
|
214
|
+
value: "999",
|
|
215
|
+
}),
|
|
216
|
+
);
|
|
217
|
+
|
|
218
|
+
await state.markNoteSpent(1, 2, 456);
|
|
219
|
+
|
|
220
|
+
const balances = await state.getZkBalances(mpk, { chainId: 1 });
|
|
221
|
+
expect(balances).toEqual({
|
|
222
|
+
"0xTokenA": 100n,
|
|
223
|
+
"0xTokenB": 5n,
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
await state.markNoteUnspent(1, 2);
|
|
227
|
+
const balancesAfterReopen = await state.getZkBalances(mpk, {
|
|
228
|
+
chainId: 1,
|
|
229
|
+
});
|
|
230
|
+
expect(balancesAfterReopen).toEqual({
|
|
231
|
+
"0xTokenA": 150n,
|
|
232
|
+
"0xTokenB": 5n,
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
});
|
package/storage/index.ts
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import "fake-indexeddb/auto";
|
|
2
|
+
|
|
3
|
+
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
|
4
|
+
|
|
5
|
+
import { CoreError, KeyValidationError } from "../errors.js";
|
|
6
|
+
import { MAX_KEY_LEN } from "../keys.js";
|
|
7
|
+
import type { Storage } from "../types.js";
|
|
8
|
+
import { createIndexedDbStorage } from "./indexeddb.js";
|
|
9
|
+
|
|
10
|
+
const encoder = new TextEncoder();
|
|
11
|
+
let dbName = "unlink-core-test";
|
|
12
|
+
|
|
13
|
+
function deleteDatabase(name: string) {
|
|
14
|
+
return new Promise<void>((resolve, reject) => {
|
|
15
|
+
const request = indexedDB.deleteDatabase(name);
|
|
16
|
+
request.onsuccess = () => resolve();
|
|
17
|
+
request.onerror = () =>
|
|
18
|
+
reject(request.error ?? new Error("failed to delete indexedDB"));
|
|
19
|
+
request.onblocked = () =>
|
|
20
|
+
reject(new Error("indexedDB deletion was blocked"));
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
describe("createIndexedDbStorage", () => {
|
|
25
|
+
let storage: Storage;
|
|
26
|
+
|
|
27
|
+
beforeEach(async () => {
|
|
28
|
+
dbName = `unlink-core-test-${Math.random().toString(6)}`;
|
|
29
|
+
await deleteDatabase(dbName).catch(() => {});
|
|
30
|
+
storage = createIndexedDbStorage({ name: dbName });
|
|
31
|
+
await storage.open();
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
afterEach(async () => {
|
|
35
|
+
await deleteDatabase(dbName).catch(() => {});
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
it("returns null when key is missing", async () => {
|
|
39
|
+
const value = await storage.get("meta:missing");
|
|
40
|
+
expect(value).toBeNull();
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
it("roundtrips put/get values", async () => {
|
|
44
|
+
await storage.put("meta:key", encoder.encode("value"));
|
|
45
|
+
const value = await storage.get("meta:key");
|
|
46
|
+
expect(value).toEqual(encoder.encode("value"));
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it("roundtrips schema version", async () => {
|
|
50
|
+
await storage.setSchemaVersion(3);
|
|
51
|
+
const version = await storage.getSchemaVersion();
|
|
52
|
+
expect(version).toBe(3);
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
it("rejects iter calls when start bound exceeds end bound", async () => {
|
|
56
|
+
await expect(
|
|
57
|
+
storage.iter({ start: "notes:z", end: "notes:a" }),
|
|
58
|
+
).rejects.toBeInstanceOf(CoreError);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it("supports prefix, limit, and reverse iteration", async () => {
|
|
62
|
+
await storage.put("notes:a", encoder.encode("1"));
|
|
63
|
+
await storage.put("notes:b", encoder.encode("2"));
|
|
64
|
+
await storage.put("notes:c", encoder.encode("3"));
|
|
65
|
+
await storage.put("meta:other", encoder.encode("x"));
|
|
66
|
+
|
|
67
|
+
const forward = await storage.iter({
|
|
68
|
+
prefix: "notes:",
|
|
69
|
+
limit: 2,
|
|
70
|
+
});
|
|
71
|
+
expect(forward.map((p) => p.key)).toEqual(["notes:a", "notes:b"]);
|
|
72
|
+
|
|
73
|
+
const reverse = await storage.iter({
|
|
74
|
+
prefix: "notes:",
|
|
75
|
+
reverse: true,
|
|
76
|
+
limit: 1,
|
|
77
|
+
});
|
|
78
|
+
expect(reverse.map((p) => p.key)).toEqual(["notes:c"]);
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("keeps batch atomic when validation fails", async () => {
|
|
82
|
+
await storage.put("meta:ok", encoder.encode("1"));
|
|
83
|
+
|
|
84
|
+
const oversizedKey = "a".repeat(MAX_KEY_LEN + 1);
|
|
85
|
+
await expect(
|
|
86
|
+
storage.batch([
|
|
87
|
+
{ del: "meta:ok" },
|
|
88
|
+
{ put: ["notes:new", encoder.encode("value")] },
|
|
89
|
+
{ put: [oversizedKey, encoder.encode("bad")] },
|
|
90
|
+
]),
|
|
91
|
+
).rejects.toBeInstanceOf(KeyValidationError);
|
|
92
|
+
|
|
93
|
+
const original = await storage.get("meta:ok");
|
|
94
|
+
expect(original).toEqual(encoder.encode("1"));
|
|
95
|
+
|
|
96
|
+
const missing = await storage.get("notes:new");
|
|
97
|
+
expect(missing).toBeNull();
|
|
98
|
+
});
|
|
99
|
+
});
|