@unlink-xyz/core 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.json +4 -0
- package/account/zkAccount.test.ts +316 -0
- package/account/zkAccount.ts +222 -0
- package/clients/broadcaster.ts +67 -0
- package/clients/http.ts +94 -0
- package/clients/indexer.ts +150 -0
- package/config.ts +39 -0
- package/core.ts +17 -0
- package/dist/account/railgun-imports-prototype.d.ts +12 -0
- package/dist/account/railgun-imports-prototype.d.ts.map +1 -0
- package/dist/account/railgun-imports-prototype.js +30 -0
- package/dist/clients/indexer.d.ts.map +1 -1
- package/dist/clients/indexer.js +1 -1
- package/dist/state/hydrator.d.ts +16 -0
- package/dist/state/hydrator.d.ts.map +1 -0
- package/dist/state/hydrator.js +18 -0
- package/dist/state/job-store.d.ts +12 -0
- package/dist/state/job-store.d.ts.map +1 -0
- package/dist/state/job-store.js +118 -0
- package/dist/state/jobs.d.ts +50 -0
- package/dist/state/jobs.d.ts.map +1 -0
- package/dist/state/jobs.js +1 -0
- package/dist/state.d.ts +83 -0
- package/dist/state.d.ts.map +1 -0
- package/dist/state.js +171 -0
- package/dist/transactions/deposit.d.ts +0 -2
- package/dist/transactions/deposit.d.ts.map +1 -1
- package/dist/transactions/deposit.js +5 -9
- package/dist/transactions/note-sync.d.ts.map +1 -1
- package/dist/transactions/note-sync.js +1 -1
- package/dist/transactions/shield.d.ts +5 -0
- package/dist/transactions/shield.d.ts.map +1 -0
- package/dist/transactions/shield.js +93 -0
- package/dist/transactions/transact.d.ts +0 -5
- package/dist/transactions/transact.d.ts.map +1 -1
- package/dist/transactions/transact.js +2 -2
- package/dist/transactions/utils.d.ts +10 -0
- package/dist/transactions/utils.d.ts.map +1 -0
- package/dist/transactions/utils.js +17 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/dist/utils/time.d.ts +2 -0
- package/dist/utils/time.d.ts.map +1 -0
- package/dist/utils/time.js +3 -0
- package/dist/utils/witness.d.ts +11 -0
- package/dist/utils/witness.d.ts.map +1 -0
- package/dist/utils/witness.js +19 -0
- package/errors.ts +20 -0
- package/index.ts +17 -0
- package/key-derivation/babyjubjub.ts +11 -0
- package/key-derivation/bech32.test.ts +90 -0
- package/key-derivation/bech32.ts +124 -0
- package/key-derivation/bip32.ts +56 -0
- package/key-derivation/bip39.ts +76 -0
- package/key-derivation/bytes.ts +118 -0
- package/key-derivation/hash.ts +13 -0
- package/key-derivation/index.ts +7 -0
- package/key-derivation/wallet-node.ts +155 -0
- package/keys.ts +47 -0
- package/package.json +4 -5
- package/prover/config.ts +104 -0
- package/prover/index.ts +1 -0
- package/prover/prover.integration.test.ts +162 -0
- package/prover/prover.test.ts +309 -0
- package/prover/prover.ts +405 -0
- package/prover/registry.test.ts +90 -0
- package/prover/registry.ts +82 -0
- package/schema.ts +17 -0
- package/setup-artifacts.sh +57 -0
- package/state/index.ts +2 -0
- package/state/merkle/hydrator.ts +69 -0
- package/state/merkle/index.ts +12 -0
- package/state/merkle/merkle-tree.test.ts +50 -0
- package/state/merkle/merkle-tree.ts +163 -0
- package/state/store/ciphertext-store.ts +28 -0
- package/state/store/index.ts +24 -0
- package/state/store/job-store.ts +162 -0
- package/state/store/jobs.ts +64 -0
- package/state/store/leaf-store.ts +39 -0
- package/state/store/note-store.ts +177 -0
- package/state/store/nullifier-store.ts +39 -0
- package/state/store/records.ts +61 -0
- package/state/store/root-store.ts +34 -0
- package/state/store/store.ts +25 -0
- package/state.test.ts +235 -0
- package/storage/index.ts +3 -0
- package/storage/indexeddb.test.ts +99 -0
- package/storage/indexeddb.ts +235 -0
- package/storage/memory.test.ts +59 -0
- package/storage/memory.ts +93 -0
- package/transactions/deposit.test.ts +160 -0
- package/transactions/deposit.ts +227 -0
- package/transactions/index.ts +20 -0
- package/transactions/note-sync.test.ts +155 -0
- package/transactions/note-sync.ts +452 -0
- package/transactions/reconcile.ts +73 -0
- package/transactions/transact.test.ts +451 -0
- package/transactions/transact.ts +811 -0
- package/transactions/types.ts +141 -0
- package/tsconfig.json +14 -0
- package/types/global.d.ts +15 -0
- package/types.ts +24 -0
- package/utils/async.ts +15 -0
- package/utils/bigint.ts +34 -0
- package/utils/crypto.test.ts +69 -0
- package/utils/crypto.ts +58 -0
- package/utils/json-codec.ts +38 -0
- package/utils/polling.ts +6 -0
- package/utils/signature.ts +16 -0
- package/utils/validators.test.ts +64 -0
- package/utils/validators.ts +86 -0
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* IndexedDB-backed key/value storage for Unlink Core.
|
|
3
|
+
*
|
|
4
|
+
* What this is
|
|
5
|
+
* - A durable, transactional KV store implemented on top of the browser's IndexedDB.
|
|
6
|
+
* - Two object stores: "kv" for app data, "meta" for schema/versioning.
|
|
7
|
+
*
|
|
8
|
+
* Why we implement it
|
|
9
|
+
* - We need persistent client-side state (notes, leaves, nullifiers, cfg, etc.) that survives reloads
|
|
10
|
+
* and works in both web apps and browser extensions without introducing a new backend.
|
|
11
|
+
*
|
|
12
|
+
* When to use
|
|
13
|
+
* - Browser UIs and extensions (MV2/MV3) where persistence is required.
|
|
14
|
+
* - Any environment with a real IndexedDB (tests use `fake-indexeddb`).
|
|
15
|
+
*
|
|
16
|
+
* Trade-offs
|
|
17
|
+
* - 👍 Durable, quota-managed, non-blocking, transactional writes (atomic batch).
|
|
18
|
+
* - 👍 Works offline; no server dependency.
|
|
19
|
+
* - ⚠️ Not natively available in Node/SSR; needs a polyfill for tests only.
|
|
20
|
+
*
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
import { CoreError } from "../errors.js";
|
|
24
|
+
import { validateKey } from "../keys.js";
|
|
25
|
+
import type { BatchOp, Bytes, IterOptions, KvPair, Storage } from "../types.js";
|
|
26
|
+
|
|
27
|
+
const DEFAULT_DB_NAME = "unlink-core";
|
|
28
|
+
const DB_VERSION = 1;
|
|
29
|
+
const KV_STORE = "kv";
|
|
30
|
+
const META_STORE = "meta";
|
|
31
|
+
const SCHEMA_KEY = "schema_version";
|
|
32
|
+
|
|
33
|
+
const copy = (value: Bytes) => new Uint8Array(value);
|
|
34
|
+
|
|
35
|
+
function assertIndexedDb(): IDBFactory {
|
|
36
|
+
if (typeof indexedDB === "undefined") {
|
|
37
|
+
throw new CoreError("indexedDB is not available in this environment");
|
|
38
|
+
}
|
|
39
|
+
return indexedDB;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export type IndexedDbOptions = {
|
|
43
|
+
name?: string;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
// Translate IndexedDB's event-based transaction API into a promise that resolves
|
|
47
|
+
// when the transaction completes or rejects on failure/abort.
|
|
48
|
+
function txDone(tx: IDBTransaction): Promise<void> {
|
|
49
|
+
return new Promise<void>((resolve, reject) => {
|
|
50
|
+
tx.oncomplete = () => resolve();
|
|
51
|
+
tx.onerror = () =>
|
|
52
|
+
reject(tx.error ?? new CoreError("indexedDB transaction failed"));
|
|
53
|
+
tx.onabort = () =>
|
|
54
|
+
reject(tx.error ?? new CoreError("indexedDB transaction aborted"));
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function wrapRequest<T>(request: IDBRequest<T>): Promise<T> {
|
|
59
|
+
return new Promise<T>((resolve, reject) => {
|
|
60
|
+
request.onsuccess = () => resolve(request.result);
|
|
61
|
+
request.onerror = () =>
|
|
62
|
+
reject(request.error ?? new CoreError("indexedDB request failed"));
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Coerce whatever IndexedDB gives us back into our Bytes type.
|
|
67
|
+
function asBytes(raw: unknown): Bytes {
|
|
68
|
+
if (raw instanceof Uint8Array) return copy(raw);
|
|
69
|
+
if (raw instanceof ArrayBuffer) return copy(new Uint8Array(raw));
|
|
70
|
+
if (ArrayBuffer.isView(raw)) {
|
|
71
|
+
const view = raw as ArrayBufferView;
|
|
72
|
+
const buf = view.buffer.slice(
|
|
73
|
+
view.byteOffset,
|
|
74
|
+
view.byteOffset + view.byteLength,
|
|
75
|
+
);
|
|
76
|
+
return copy(new Uint8Array(buf));
|
|
77
|
+
}
|
|
78
|
+
throw new CoreError("indexedDB value is not binary data");
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// IndexedDB-backed implementation implementing the storage API.
|
|
82
|
+
export function createIndexedDbStorage(opts: IndexedDbOptions = {}): Storage {
|
|
83
|
+
const name = opts.name ?? DEFAULT_DB_NAME;
|
|
84
|
+
let db: IDBDatabase | null = null;
|
|
85
|
+
|
|
86
|
+
// Lazy-open the database so we only touch IndexedDB when needed.
|
|
87
|
+
async function ensureOpen(): Promise<IDBDatabase> {
|
|
88
|
+
if (db) return db;
|
|
89
|
+
|
|
90
|
+
// Guard for environments without IndexedDB (unit tests or SSR).
|
|
91
|
+
const idb: IDBFactory = assertIndexedDb();
|
|
92
|
+
|
|
93
|
+
const request = idb.open(name, DB_VERSION);
|
|
94
|
+
// Versioned schema: create one store for kv entries and one for metadata.
|
|
95
|
+
request.onupgradeneeded = () => {
|
|
96
|
+
const upgradeDb = request.result;
|
|
97
|
+
if (!upgradeDb.objectStoreNames.contains(KV_STORE)) {
|
|
98
|
+
upgradeDb.createObjectStore(KV_STORE);
|
|
99
|
+
}
|
|
100
|
+
if (!upgradeDb.objectStoreNames.contains(META_STORE)) {
|
|
101
|
+
upgradeDb.createObjectStore(META_STORE);
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
db = await new Promise<IDBDatabase>((resolve, reject) => {
|
|
105
|
+
request.onsuccess = () => resolve(request.result);
|
|
106
|
+
request.onerror = () =>
|
|
107
|
+
reject(request.error ?? new CoreError("indexedDB open failed"));
|
|
108
|
+
request.onblocked = () =>
|
|
109
|
+
reject(new CoreError("indexedDB open request was blocked"));
|
|
110
|
+
});
|
|
111
|
+
return db!;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Start a transaction and give callers both the store and a completion promise.
|
|
115
|
+
const getStore = async (
|
|
116
|
+
storeName: string,
|
|
117
|
+
mode: IDBTransactionMode,
|
|
118
|
+
): Promise<{ store: IDBObjectStore; done: Promise<void> }> => {
|
|
119
|
+
const database = await ensureOpen();
|
|
120
|
+
const tx = database.transaction(storeName, mode);
|
|
121
|
+
const done = txDone(tx);
|
|
122
|
+
return { store: tx.objectStore(storeName), done };
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
return {
|
|
126
|
+
async open() {
|
|
127
|
+
await ensureOpen();
|
|
128
|
+
},
|
|
129
|
+
|
|
130
|
+
async get(key: string) {
|
|
131
|
+
validateKey(key);
|
|
132
|
+
const { store, done } = await getStore(KV_STORE, "readonly");
|
|
133
|
+
const result = await wrapRequest(store.get(key));
|
|
134
|
+
await done;
|
|
135
|
+
if (result === undefined) return null;
|
|
136
|
+
return asBytes(result);
|
|
137
|
+
},
|
|
138
|
+
|
|
139
|
+
async put(key: string, value: Bytes) {
|
|
140
|
+
validateKey(key);
|
|
141
|
+
const { store, done } = await getStore(KV_STORE, "readwrite");
|
|
142
|
+
await wrapRequest(store.put(copy(value), key)); // copy avoids shared mutation; revisit if this becomes costly
|
|
143
|
+
await done;
|
|
144
|
+
},
|
|
145
|
+
|
|
146
|
+
async delete(key: string) {
|
|
147
|
+
validateKey(key);
|
|
148
|
+
const { store, done } = await getStore(KV_STORE, "readwrite");
|
|
149
|
+
await wrapRequest(store.delete(key));
|
|
150
|
+
await done;
|
|
151
|
+
},
|
|
152
|
+
|
|
153
|
+
async batch(ops: BatchOp[]) {
|
|
154
|
+
// Execute the batch within a single transaction to preserve atomicity.
|
|
155
|
+
const database = await ensureOpen();
|
|
156
|
+
const tx = database.transaction(KV_STORE, "readwrite");
|
|
157
|
+
const store = tx.objectStore(KV_STORE);
|
|
158
|
+
const done = txDone(tx);
|
|
159
|
+
try {
|
|
160
|
+
// Replay the batch within a single transaction; validation failure aborts everything.
|
|
161
|
+
for (const op of ops) {
|
|
162
|
+
if (op.put) {
|
|
163
|
+
const [key, value] = op.put;
|
|
164
|
+
validateKey(key);
|
|
165
|
+
store.put(copy(value), key); // copy avoids shared mutation; revisit if this becomes costly
|
|
166
|
+
}
|
|
167
|
+
if (op.del) {
|
|
168
|
+
validateKey(op.del);
|
|
169
|
+
store.delete(op.del);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
await done;
|
|
173
|
+
} catch (err) {
|
|
174
|
+
try {
|
|
175
|
+
tx.abort();
|
|
176
|
+
} catch (_) {
|
|
177
|
+
/* noop */
|
|
178
|
+
}
|
|
179
|
+
await done.catch(() => {});
|
|
180
|
+
throw err;
|
|
181
|
+
}
|
|
182
|
+
},
|
|
183
|
+
|
|
184
|
+
async iter(options: IterOptions = {}) {
|
|
185
|
+
if (options.start && options.end && options.start > options.end) {
|
|
186
|
+
throw new CoreError("iter start bound must not exceed end bound");
|
|
187
|
+
}
|
|
188
|
+
// Pull everything into memory for now. TODO: stream w/ cursor when needed for perf
|
|
189
|
+
const database = await ensureOpen();
|
|
190
|
+
const tx = database.transaction(KV_STORE, "readonly");
|
|
191
|
+
const store = tx.objectStore(KV_STORE);
|
|
192
|
+
const done = txDone(tx);
|
|
193
|
+
const [keys, values] = await Promise.all([
|
|
194
|
+
wrapRequest<IDBValidKey[]>(store.getAllKeys()),
|
|
195
|
+
wrapRequest<unknown[]>(store.getAll()),
|
|
196
|
+
]);
|
|
197
|
+
await done;
|
|
198
|
+
|
|
199
|
+
const pairs: KvPair[] = keys.map((key, idx) => {
|
|
200
|
+
if (typeof key !== "string") {
|
|
201
|
+
throw new CoreError("encountered non-string key in indexedDB");
|
|
202
|
+
}
|
|
203
|
+
const raw = values[idx];
|
|
204
|
+
return { key, value: asBytes(raw) };
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
let filtered = pairs.filter(({ key }) => {
|
|
208
|
+
if (options.prefix && !key.startsWith(options.prefix)) return false;
|
|
209
|
+
if (options.start && key < options.start) return false;
|
|
210
|
+
if (options.end && key > options.end) return false;
|
|
211
|
+
return true;
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
filtered.sort((a, b) => a.key.localeCompare(b.key));
|
|
215
|
+
if (options.reverse) filtered = filtered.reverse();
|
|
216
|
+
if (options.limit) filtered = filtered.slice(0, options.limit);
|
|
217
|
+
return filtered;
|
|
218
|
+
},
|
|
219
|
+
|
|
220
|
+
async getSchemaVersion() {
|
|
221
|
+
const { store, done } = await getStore(META_STORE, "readonly");
|
|
222
|
+
const value = await wrapRequest<number | undefined>(
|
|
223
|
+
store.get(SCHEMA_KEY),
|
|
224
|
+
);
|
|
225
|
+
await done;
|
|
226
|
+
return value ?? 0;
|
|
227
|
+
},
|
|
228
|
+
|
|
229
|
+
async setSchemaVersion(version) {
|
|
230
|
+
const { store, done } = await getStore(META_STORE, "readwrite");
|
|
231
|
+
await wrapRequest(store.put(version, SCHEMA_KEY));
|
|
232
|
+
await done;
|
|
233
|
+
},
|
|
234
|
+
};
|
|
235
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { beforeEach, describe, expect, it } from "vitest";
|
|
2
|
+
|
|
3
|
+
import { CoreError, KeyValidationError } from "../errors.js";
|
|
4
|
+
import { MAX_KEY_LEN } from "../keys.js";
|
|
5
|
+
import type { Storage } from "../types.js";
|
|
6
|
+
import { createMemoryStorage } from "./memory.js";
|
|
7
|
+
|
|
8
|
+
const encoder = new TextEncoder();
|
|
9
|
+
|
|
10
|
+
describe("createMemoryStorage", () => {
|
|
11
|
+
let storage: Storage;
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
storage = createMemoryStorage();
|
|
15
|
+
await storage.open();
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it("returns null when key is missing", async () => {
|
|
19
|
+
const value = await storage.get("meta:missing");
|
|
20
|
+
expect(value).toBeNull();
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
it("roundtrips put/get values", async () => {
|
|
24
|
+
await storage.put("meta:key", encoder.encode("value"));
|
|
25
|
+
const value = await storage.get("meta:key");
|
|
26
|
+
expect(value).toEqual(encoder.encode("value"));
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
it("roundtrips schema version", async () => {
|
|
30
|
+
await storage.setSchemaVersion(2);
|
|
31
|
+
const version = await storage.getSchemaVersion();
|
|
32
|
+
expect(version).toBe(2);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
it("rejects iter calls when start bound exceeds end bound", async () => {
|
|
36
|
+
await expect(
|
|
37
|
+
storage.iter({ start: "notes:z", end: "notes:a" }),
|
|
38
|
+
).rejects.toBeInstanceOf(CoreError);
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it("keeps batch atomic when validation fails", async () => {
|
|
42
|
+
await storage.put("meta:ok", encoder.encode("1"));
|
|
43
|
+
|
|
44
|
+
const oversizedKey = "a".repeat(MAX_KEY_LEN + 1);
|
|
45
|
+
await expect(
|
|
46
|
+
storage.batch([
|
|
47
|
+
{ del: "meta:ok" },
|
|
48
|
+
{ put: ["notes:new", encoder.encode("value")] },
|
|
49
|
+
{ put: [oversizedKey, encoder.encode("bad")] },
|
|
50
|
+
]),
|
|
51
|
+
).rejects.toBeInstanceOf(KeyValidationError);
|
|
52
|
+
|
|
53
|
+
const original = await storage.get("meta:ok");
|
|
54
|
+
expect(original).toEqual(encoder.encode("1"));
|
|
55
|
+
|
|
56
|
+
const missing = await storage.get("notes:new");
|
|
57
|
+
expect(missing).toBeNull();
|
|
58
|
+
});
|
|
59
|
+
});
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* In-memory key/value storage for Unlink Core.
|
|
3
|
+
*
|
|
4
|
+
* What this is
|
|
5
|
+
* - A minimal, process-local KV store backed by a `Map<string, Uint8Array>`.
|
|
6
|
+
*
|
|
7
|
+
* Why we implement it
|
|
8
|
+
* - Fast, dependency-free storage for unit tests, demos, and ephemeral runs.
|
|
9
|
+
* - Useful as a reference driver for the Storage interface.
|
|
10
|
+
*
|
|
11
|
+
* When to use
|
|
12
|
+
* - Unit tests (deterministic, zero I/O).
|
|
13
|
+
* - CLI/Node scripts that do not need persistence beyond process lifetime.
|
|
14
|
+
* - Prototyping or sandboxing Core logic.
|
|
15
|
+
*
|
|
16
|
+
* Trade-offs
|
|
17
|
+
* - 👍 Extremely fast; zero external APIs; simple to reason about.
|
|
18
|
+
* - 👍 Great for isolation in tests; easy to snapshot/clone.
|
|
19
|
+
* - ⚠️ No durability (data is lost on process exit).
|
|
20
|
+
* - ⚠️ Per-process only; no cross-tab or reload survival.
|
|
21
|
+
*
|
|
22
|
+
*/
|
|
23
|
+
|
|
24
|
+
import { CoreError } from "../errors.js";
|
|
25
|
+
import { validateKey } from "../keys.js";
|
|
26
|
+
import { BatchOp, Bytes, IterOptions, KvPair, Storage } from "../types.js";
|
|
27
|
+
|
|
28
|
+
const copy = (value: Bytes) => new Uint8Array(value);
|
|
29
|
+
|
|
30
|
+
export function createMemoryStorage(): Storage {
|
|
31
|
+
const data = new Map<string, Bytes>();
|
|
32
|
+
let schema = 0;
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
async open() {},
|
|
36
|
+
async get(k) {
|
|
37
|
+
validateKey(k);
|
|
38
|
+
return data.has(k) ? copy(data.get(k)!) : null;
|
|
39
|
+
},
|
|
40
|
+
async put(k, v) {
|
|
41
|
+
validateKey(k);
|
|
42
|
+
data.set(k, copy(v));
|
|
43
|
+
},
|
|
44
|
+
async delete(k) {
|
|
45
|
+
validateKey(k);
|
|
46
|
+
data.delete(k);
|
|
47
|
+
},
|
|
48
|
+
async batch(ops: BatchOp[]) {
|
|
49
|
+
const draft = new Map(data);
|
|
50
|
+
for (const op of ops) {
|
|
51
|
+
if (op.put) {
|
|
52
|
+
const [k, v] = op.put;
|
|
53
|
+
validateKey(k);
|
|
54
|
+
draft.set(k, copy(v));
|
|
55
|
+
}
|
|
56
|
+
if (op.del) {
|
|
57
|
+
validateKey(op.del);
|
|
58
|
+
draft.delete(op.del);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
data.clear();
|
|
62
|
+
for (const [k, v] of draft.entries()) {
|
|
63
|
+
data.set(k, copy(v));
|
|
64
|
+
}
|
|
65
|
+
},
|
|
66
|
+
async iter(o: IterOptions = {}): Promise<KvPair[]> {
|
|
67
|
+
if (o.start && o.end && o.start > o.end) {
|
|
68
|
+
throw new CoreError("iter start bound must not exceed end bound");
|
|
69
|
+
}
|
|
70
|
+
const all = [...data.entries()].map(([key, value]) => ({
|
|
71
|
+
key,
|
|
72
|
+
value: copy(value),
|
|
73
|
+
}));
|
|
74
|
+
let ks = all
|
|
75
|
+
.filter(
|
|
76
|
+
({ key }) =>
|
|
77
|
+
(!o.prefix || key.startsWith(o.prefix)) &&
|
|
78
|
+
(!o.start || key >= o.start) &&
|
|
79
|
+
(!o.end || key <= o.end),
|
|
80
|
+
)
|
|
81
|
+
.sort((a, b) => a.key.localeCompare(b.key));
|
|
82
|
+
if (o.reverse) ks.reverse();
|
|
83
|
+
if (o.limit) ks = ks.slice(0, o.limit);
|
|
84
|
+
return ks;
|
|
85
|
+
},
|
|
86
|
+
async getSchemaVersion() {
|
|
87
|
+
return schema;
|
|
88
|
+
},
|
|
89
|
+
async setSchemaVersion(n: number) {
|
|
90
|
+
schema = n;
|
|
91
|
+
},
|
|
92
|
+
};
|
|
93
|
+
}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { eddsa, poseidon } from "@railgun-community/circomlibjs";
|
|
2
|
+
import { Interface } from "ethers";
|
|
3
|
+
import { beforeEach, describe, expect, it } from "vitest";
|
|
4
|
+
|
|
5
|
+
import {
|
|
6
|
+
createMockFetch,
|
|
7
|
+
createMockIndexer,
|
|
8
|
+
} from "../../test-utils/mock-indexer.js";
|
|
9
|
+
import { ZkAccount } from "../account/zkAccount.js";
|
|
10
|
+
import { ByteLength, ByteUtils } from "../key-derivation/bytes.js";
|
|
11
|
+
import { createStateStore } from "../state/index.js";
|
|
12
|
+
import { createMemoryStorage } from "../storage/index.js";
|
|
13
|
+
import { createDepositClient, DEPOSIT_ABI } from "./deposit.js";
|
|
14
|
+
|
|
15
|
+
const chainId = 1;
|
|
16
|
+
const poolAddress = "0x1111111111111111111111111111111111111111";
|
|
17
|
+
const depositor = "0x2222222222222222222222222222222222222222";
|
|
18
|
+
const token = "0x3333333333333333333333333333333333333333";
|
|
19
|
+
|
|
20
|
+
const mockPrivateKey = Buffer.from(new Uint8Array(32).fill(1));
|
|
21
|
+
const mockPublicKey = eddsa.prv2pub(mockPrivateKey);
|
|
22
|
+
const mockNullifyingKey = 987n;
|
|
23
|
+
const mockMpk = poseidon([
|
|
24
|
+
mockPublicKey[0],
|
|
25
|
+
mockPublicKey[1],
|
|
26
|
+
mockNullifyingKey,
|
|
27
|
+
]);
|
|
28
|
+
const mockZkAccount: ZkAccount = {
|
|
29
|
+
spendingKeyPair: {
|
|
30
|
+
privateKey: mockPrivateKey,
|
|
31
|
+
pubkey: mockPublicKey,
|
|
32
|
+
},
|
|
33
|
+
viewingKeyPair: {
|
|
34
|
+
privateKey: Buffer.from(new Uint8Array(32).fill(2)),
|
|
35
|
+
pubkey: Buffer.from(new Uint8Array(32).fill(3)),
|
|
36
|
+
},
|
|
37
|
+
nullifyingKey: mockNullifyingKey,
|
|
38
|
+
masterPublicKey: mockMpk,
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const depositInterface = new Interface(DEPOSIT_ABI);
|
|
42
|
+
|
|
43
|
+
describe.skip("deposit client", () => {
|
|
44
|
+
let state: ReturnType<typeof createStateStore>;
|
|
45
|
+
let client: ReturnType<typeof createDepositClient>;
|
|
46
|
+
let fetchMock: ReturnType<typeof createMockFetch>;
|
|
47
|
+
let indexer: ReturnType<typeof createMockIndexer>;
|
|
48
|
+
|
|
49
|
+
beforeEach(async () => {
|
|
50
|
+
const storage = createMemoryStorage();
|
|
51
|
+
await storage.open();
|
|
52
|
+
state = createStateStore(storage);
|
|
53
|
+
indexer = createMockIndexer();
|
|
54
|
+
fetchMock = createMockFetch(indexer);
|
|
55
|
+
client = createDepositClient(state, { fetch: fetchMock });
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it("builds calldata, enqueues relay, and syncs state via indexer", async () => {
|
|
59
|
+
const request = {
|
|
60
|
+
zkAccount: mockZkAccount,
|
|
61
|
+
chainId,
|
|
62
|
+
poolAddress,
|
|
63
|
+
depositor,
|
|
64
|
+
note: {
|
|
65
|
+
mpk: 7n,
|
|
66
|
+
random: 8n,
|
|
67
|
+
amount: 5n,
|
|
68
|
+
token,
|
|
69
|
+
},
|
|
70
|
+
} as const;
|
|
71
|
+
|
|
72
|
+
const expectedNpk = poseidon([request.note.mpk, request.note.random]);
|
|
73
|
+
const expectedCommitment = poseidon([
|
|
74
|
+
expectedNpk,
|
|
75
|
+
BigInt(token),
|
|
76
|
+
request.note.amount,
|
|
77
|
+
]);
|
|
78
|
+
const expectedCommitmentHex = ByteUtils.nToHex(
|
|
79
|
+
expectedCommitment,
|
|
80
|
+
ByteLength.UINT_256,
|
|
81
|
+
true,
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
const relay = await client.request(request);
|
|
85
|
+
expect(relay.commitment).toBe(expectedCommitmentHex);
|
|
86
|
+
indexer.publish(chainId, relay.commitment);
|
|
87
|
+
const result = await client.syncPendingDeposit(relay.relayId);
|
|
88
|
+
|
|
89
|
+
expect(result.chainId).toBe(chainId);
|
|
90
|
+
expect(result.index).toBe(0);
|
|
91
|
+
expect(result.commitment).toBe(expectedCommitmentHex);
|
|
92
|
+
expect(result.root).toMatch(/^0x[0-9a-f]+$/);
|
|
93
|
+
|
|
94
|
+
const decoded = depositInterface.decodeFunctionData(
|
|
95
|
+
"deposit",
|
|
96
|
+
relay.calldata,
|
|
97
|
+
);
|
|
98
|
+
expect(decoded[0]).toBe(depositor);
|
|
99
|
+
expect(decoded[1][0].npk).toBe(expectedNpk);
|
|
100
|
+
expect(decoded[1][0].amount).toBe(request.note.amount);
|
|
101
|
+
expect(decoded[1][0].token).toBe(token);
|
|
102
|
+
|
|
103
|
+
const storedLeaf = await state.getLeaf(chainId, result.index);
|
|
104
|
+
expect(storedLeaf?.commitment).toBe(result.commitment);
|
|
105
|
+
|
|
106
|
+
const storedRoot = await state.getRoot(chainId, result.root);
|
|
107
|
+
expect(storedRoot?.root).toBe(result.root);
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
it("tracks indexes per chain independently", async () => {
|
|
111
|
+
const firstNote = { mpk: 1n, random: 2n, amount: 2n, token };
|
|
112
|
+
const firstRequest = await client.request({
|
|
113
|
+
zkAccount: mockZkAccount,
|
|
114
|
+
chainId,
|
|
115
|
+
poolAddress,
|
|
116
|
+
depositor,
|
|
117
|
+
note: firstNote,
|
|
118
|
+
});
|
|
119
|
+
indexer.publish(chainId, firstRequest.commitment);
|
|
120
|
+
const first = await client.syncPendingDeposit(firstRequest.relayId);
|
|
121
|
+
|
|
122
|
+
const secondNote = { mpk: 3n, random: 4n, amount: 4n, token };
|
|
123
|
+
const secondRequest = await client.request({
|
|
124
|
+
zkAccount: mockZkAccount,
|
|
125
|
+
chainId,
|
|
126
|
+
poolAddress,
|
|
127
|
+
depositor,
|
|
128
|
+
note: secondNote,
|
|
129
|
+
});
|
|
130
|
+
indexer.publish(chainId, secondRequest.commitment);
|
|
131
|
+
const second = await client.syncPendingDeposit(secondRequest.relayId);
|
|
132
|
+
|
|
133
|
+
const otherNote = { mpk: 5n, random: 6n, amount: 6n, token };
|
|
134
|
+
const otherRequest = await client.request({
|
|
135
|
+
zkAccount: mockZkAccount,
|
|
136
|
+
chainId: 10,
|
|
137
|
+
poolAddress,
|
|
138
|
+
depositor,
|
|
139
|
+
note: otherNote,
|
|
140
|
+
});
|
|
141
|
+
indexer.publish(10, otherRequest.commitment);
|
|
142
|
+
const otherChain = await client.syncPendingDeposit(otherRequest.relayId);
|
|
143
|
+
|
|
144
|
+
expect(first.index).toBe(0);
|
|
145
|
+
expect(second.index).toBe(1);
|
|
146
|
+
expect(otherChain.index).toBe(0);
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
it("rejects invalid chain ids", async () => {
|
|
150
|
+
await expect(
|
|
151
|
+
client.request({
|
|
152
|
+
zkAccount: mockZkAccount,
|
|
153
|
+
chainId: 0,
|
|
154
|
+
poolAddress,
|
|
155
|
+
depositor,
|
|
156
|
+
note: { mpk: 1n, random: 1n, amount: 1n, token },
|
|
157
|
+
}),
|
|
158
|
+
).rejects.toThrow("chainId must be a positive integer");
|
|
159
|
+
});
|
|
160
|
+
});
|