@breeztech/breez-sdk-spark 0.12.2-dev1 → 0.12.2-dev2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/breez-sdk-spark.tgz +0 -0
- package/bundler/breez_sdk_spark_wasm.d.ts +731 -601
- package/bundler/breez_sdk_spark_wasm_bg.js +151 -54
- package/bundler/breez_sdk_spark_wasm_bg.wasm +0 -0
- package/bundler/breez_sdk_spark_wasm_bg.wasm.d.ts +7 -5
- package/bundler/storage/index.js +65 -43
- package/deno/breez_sdk_spark_wasm.d.ts +731 -601
- package/deno/breez_sdk_spark_wasm.js +141 -54
- package/deno/breez_sdk_spark_wasm_bg.wasm +0 -0
- package/deno/breez_sdk_spark_wasm_bg.wasm.d.ts +7 -5
- package/nodejs/breez_sdk_spark_wasm.d.ts +731 -601
- package/nodejs/breez_sdk_spark_wasm.js +151 -54
- package/nodejs/breez_sdk_spark_wasm_bg.wasm +0 -0
- package/nodejs/breez_sdk_spark_wasm_bg.wasm.d.ts +7 -5
- package/nodejs/index.js +16 -2
- package/nodejs/package.json +1 -0
- package/nodejs/postgres-storage/index.cjs +42 -31
- package/nodejs/postgres-storage/migrations.cjs +24 -0
- package/nodejs/postgres-token-store/errors.cjs +13 -0
- package/nodejs/postgres-token-store/index.cjs +857 -0
- package/nodejs/postgres-token-store/migrations.cjs +163 -0
- package/nodejs/postgres-token-store/package.json +9 -0
- package/nodejs/postgres-tree-store/index.cjs +12 -2
- package/nodejs/storage/index.cjs +19 -28
- package/nodejs/storage/migrations.cjs +18 -0
- package/package.json +1 -1
- package/web/breez_sdk_spark_wasm.d.ts +738 -606
- package/web/breez_sdk_spark_wasm.js +141 -54
- package/web/breez_sdk_spark_wasm_bg.wasm +0 -0
- package/web/breez_sdk_spark_wasm_bg.wasm.d.ts +7 -5
- package/web/storage/index.js +65 -43
|
@@ -0,0 +1,857 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CommonJS implementation for Node.js PostgreSQL Token Store
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
let pg;
|
|
6
|
+
try {
|
|
7
|
+
const mainModule = require.main;
|
|
8
|
+
if (mainModule) {
|
|
9
|
+
pg = mainModule.require("pg");
|
|
10
|
+
} else {
|
|
11
|
+
pg = require("pg");
|
|
12
|
+
}
|
|
13
|
+
} catch (error) {
|
|
14
|
+
try {
|
|
15
|
+
pg = require("pg");
|
|
16
|
+
} catch (fallbackError) {
|
|
17
|
+
throw new Error(
|
|
18
|
+
`pg not found. Please install it in your project: npm install pg@^8.18.0\n` +
|
|
19
|
+
`Original error: ${error.message}\nFallback error: ${fallbackError.message}`
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const { TokenStoreError } = require("./errors.cjs");
|
|
25
|
+
const { TokenStoreMigrationManager } = require("./migrations.cjs");
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Advisory lock key for serializing token store write operations.
|
|
29
|
+
* Matches the Rust constant TOKEN_STORE_WRITE_LOCK_KEY = 0x746F_6B65_6E53_5452
|
|
30
|
+
*/
|
|
31
|
+
const TOKEN_STORE_WRITE_LOCK_KEY = "8390042714201347154"; // 0x746F6B656E535452 as decimal string
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Spent markers are kept for this duration to support multiple SDK instances.
|
|
35
|
+
* During setTokensOutputs, spent markers older than refresh_timestamp are ignored.
|
|
36
|
+
*/
|
|
37
|
+
const SPENT_MARKER_CLEANUP_THRESHOLD_MS = 5 * 60 * 1000; // 5 minutes
|
|
38
|
+
|
|
39
|
+
class PostgresTokenStore {
|
|
40
|
+
constructor(pool, logger = null) {
|
|
41
|
+
this.pool = pool;
|
|
42
|
+
this.logger = logger;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Initialize the database (run migrations)
|
|
47
|
+
*/
|
|
48
|
+
async initialize() {
|
|
49
|
+
try {
|
|
50
|
+
const migrationManager = new TokenStoreMigrationManager(this.logger);
|
|
51
|
+
await migrationManager.migrate(this.pool);
|
|
52
|
+
return this;
|
|
53
|
+
} catch (error) {
|
|
54
|
+
throw new TokenStoreError(
|
|
55
|
+
`Failed to initialize PostgreSQL token store: ${error.message}`,
|
|
56
|
+
error
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Close the pool
|
|
63
|
+
*/
|
|
64
|
+
async close() {
|
|
65
|
+
if (this.pool) {
|
|
66
|
+
await this.pool.end();
|
|
67
|
+
this.pool = null;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Run a function inside a transaction with the advisory lock.
|
|
73
|
+
* @param {function(import('pg').PoolClient): Promise<T>} fn
|
|
74
|
+
* @returns {Promise<T>}
|
|
75
|
+
* @template T
|
|
76
|
+
*/
|
|
77
|
+
async _withWriteTransaction(fn) {
|
|
78
|
+
const client = await this.pool.connect();
|
|
79
|
+
try {
|
|
80
|
+
await client.query("BEGIN");
|
|
81
|
+
await client.query(`SELECT pg_advisory_xact_lock(${TOKEN_STORE_WRITE_LOCK_KEY})`);
|
|
82
|
+
const result = await fn(client);
|
|
83
|
+
await client.query("COMMIT");
|
|
84
|
+
return result;
|
|
85
|
+
} catch (error) {
|
|
86
|
+
await client.query("ROLLBACK").catch(() => {});
|
|
87
|
+
throw error;
|
|
88
|
+
} finally {
|
|
89
|
+
client.release();
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// ===== TokenOutputStore Methods =====
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Set the full set of token outputs, reconciling reservations.
|
|
97
|
+
* @param {Array<{metadata: Object, outputs: Array}>} tokenOutputs
|
|
98
|
+
* @param {number} refreshStartedAtMs - Milliseconds since epoch when the refresh started
|
|
99
|
+
*/
|
|
100
|
+
async setTokensOutputs(tokenOutputs, refreshStartedAtMs) {
|
|
101
|
+
try {
|
|
102
|
+
const refreshTimestamp = new Date(refreshStartedAtMs);
|
|
103
|
+
|
|
104
|
+
await this._withWriteTransaction(async (client) => {
|
|
105
|
+
// Skip if swap is active or completed during this refresh
|
|
106
|
+
const swapCheckResult = await client.query(
|
|
107
|
+
`SELECT
|
|
108
|
+
EXISTS(SELECT 1 FROM token_reservations WHERE purpose = 'Swap') AS has_active_swap,
|
|
109
|
+
COALESCE((SELECT last_completed_at >= $1 FROM token_swap_status WHERE id = 1), FALSE) AS swap_completed`,
|
|
110
|
+
[refreshTimestamp]
|
|
111
|
+
);
|
|
112
|
+
const { has_active_swap, swap_completed } = swapCheckResult.rows[0];
|
|
113
|
+
if (has_active_swap || swap_completed) {
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Clean up old spent markers
|
|
118
|
+
const cleanupCutoff = new Date(refreshTimestamp.getTime() - SPENT_MARKER_CLEANUP_THRESHOLD_MS);
|
|
119
|
+
await client.query(
|
|
120
|
+
"DELETE FROM token_spent_outputs WHERE spent_at < $1",
|
|
121
|
+
[cleanupCutoff]
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
// Get recent spent output IDs (spent_at >= refresh_timestamp)
|
|
125
|
+
const spentResult = await client.query(
|
|
126
|
+
"SELECT output_id FROM token_spent_outputs WHERE spent_at >= $1",
|
|
127
|
+
[refreshTimestamp]
|
|
128
|
+
);
|
|
129
|
+
const spentIds = new Set(spentResult.rows.map((r) => r.output_id));
|
|
130
|
+
|
|
131
|
+
// Delete non-reserved outputs added BEFORE the refresh started
|
|
132
|
+
await client.query(
|
|
133
|
+
"DELETE FROM token_outputs WHERE reservation_id IS NULL AND added_at < $1",
|
|
134
|
+
[refreshTimestamp]
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
// Build a set of all incoming output IDs for reconciliation
|
|
138
|
+
const incomingOutputIds = new Set();
|
|
139
|
+
for (const to of tokenOutputs) {
|
|
140
|
+
for (const o of to.outputs) {
|
|
141
|
+
incomingOutputIds.add(o.output.id);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Reconcile reservations: find reserved outputs that no longer exist
|
|
146
|
+
const reservedRows = await client.query(
|
|
147
|
+
`SELECT r.id, o.id AS output_id
|
|
148
|
+
FROM token_reservations r
|
|
149
|
+
JOIN token_outputs o ON o.reservation_id = r.id`
|
|
150
|
+
);
|
|
151
|
+
|
|
152
|
+
// Group reserved outputs by reservation ID
|
|
153
|
+
const reservationOutputs = new Map();
|
|
154
|
+
for (const row of reservedRows.rows) {
|
|
155
|
+
if (!reservationOutputs.has(row.id)) {
|
|
156
|
+
reservationOutputs.set(row.id, []);
|
|
157
|
+
}
|
|
158
|
+
reservationOutputs.get(row.id).push(row.output_id);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Find reservations that have no valid outputs after reconciliation
|
|
162
|
+
const reservationsToDelete = [];
|
|
163
|
+
const outputsToRemoveFromReservation = [];
|
|
164
|
+
for (const [reservationId, outputIds] of reservationOutputs) {
|
|
165
|
+
const validIds = outputIds.filter((id) => incomingOutputIds.has(id));
|
|
166
|
+
if (validIds.length === 0) {
|
|
167
|
+
reservationsToDelete.push(reservationId);
|
|
168
|
+
} else {
|
|
169
|
+
for (const id of outputIds) {
|
|
170
|
+
if (!incomingOutputIds.has(id)) {
|
|
171
|
+
outputsToRemoveFromReservation.push(id);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Delete outputs whose reservations are being removed entirely
|
|
178
|
+
if (reservationsToDelete.length > 0) {
|
|
179
|
+
await client.query(
|
|
180
|
+
"DELETE FROM token_outputs WHERE reservation_id = ANY($1)",
|
|
181
|
+
[reservationsToDelete]
|
|
182
|
+
);
|
|
183
|
+
await client.query(
|
|
184
|
+
"DELETE FROM token_reservations WHERE id = ANY($1)",
|
|
185
|
+
[reservationsToDelete]
|
|
186
|
+
);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Delete individual reserved outputs that no longer exist
|
|
190
|
+
if (outputsToRemoveFromReservation.length > 0) {
|
|
191
|
+
await client.query(
|
|
192
|
+
"DELETE FROM token_outputs WHERE id = ANY($1)",
|
|
193
|
+
[outputsToRemoveFromReservation]
|
|
194
|
+
);
|
|
195
|
+
|
|
196
|
+
// Check if any reservations are now empty
|
|
197
|
+
const emptyReservations = await client.query(
|
|
198
|
+
`SELECT r.id FROM token_reservations r
|
|
199
|
+
LEFT JOIN token_outputs o ON o.reservation_id = r.id
|
|
200
|
+
WHERE o.id IS NULL`
|
|
201
|
+
);
|
|
202
|
+
const emptyIds = emptyReservations.rows.map((r) => r.id);
|
|
203
|
+
if (emptyIds.length > 0) {
|
|
204
|
+
await client.query(
|
|
205
|
+
"DELETE FROM token_reservations WHERE id = ANY($1)",
|
|
206
|
+
[emptyIds]
|
|
207
|
+
);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Collect IDs of currently reserved outputs (that survived reconciliation)
|
|
212
|
+
const reservedOutputIdsResult = await client.query(
|
|
213
|
+
"SELECT id FROM token_outputs WHERE reservation_id IS NOT NULL"
|
|
214
|
+
);
|
|
215
|
+
const reservedOutputIds = new Set(
|
|
216
|
+
reservedOutputIdsResult.rows.map((r) => r.id)
|
|
217
|
+
);
|
|
218
|
+
|
|
219
|
+
// Delete orphan metadata
|
|
220
|
+
await client.query(
|
|
221
|
+
`DELETE FROM token_metadata
|
|
222
|
+
WHERE identifier NOT IN (
|
|
223
|
+
SELECT DISTINCT token_identifier FROM token_outputs
|
|
224
|
+
)`
|
|
225
|
+
);
|
|
226
|
+
|
|
227
|
+
// Insert new metadata and outputs, excluding spent and reserved
|
|
228
|
+
for (const to of tokenOutputs) {
|
|
229
|
+
await this._upsertMetadata(client, to.metadata);
|
|
230
|
+
|
|
231
|
+
for (const output of to.outputs) {
|
|
232
|
+
if (reservedOutputIds.has(output.output.id) || spentIds.has(output.output.id)) {
|
|
233
|
+
continue;
|
|
234
|
+
}
|
|
235
|
+
await this._insertSingleOutput(
|
|
236
|
+
client,
|
|
237
|
+
to.metadata.identifier,
|
|
238
|
+
output
|
|
239
|
+
);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
});
|
|
243
|
+
} catch (error) {
|
|
244
|
+
if (error instanceof TokenStoreError) throw error;
|
|
245
|
+
throw new TokenStoreError(
|
|
246
|
+
`Failed to set token outputs: ${error.message}`,
|
|
247
|
+
error
|
|
248
|
+
);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* List all token outputs grouped by status.
|
|
254
|
+
* @returns {Promise<Array<{metadata: Object, available: Array, reservedForPayment: Array, reservedForSwap: Array}>>}
|
|
255
|
+
*/
|
|
256
|
+
async listTokensOutputs() {
|
|
257
|
+
try {
|
|
258
|
+
const result = await this.pool.query(
|
|
259
|
+
`SELECT m.identifier, m.issuer_public_key, m.name, m.ticker, m.decimals,
|
|
260
|
+
m.max_supply, m.is_freezable, m.creation_entity_public_key,
|
|
261
|
+
o.id AS output_id, o.owner_public_key, o.revocation_commitment,
|
|
262
|
+
o.withdraw_bond_sats, o.withdraw_relative_block_locktime,
|
|
263
|
+
o.token_public_key, o.token_amount, o.token_identifier,
|
|
264
|
+
o.prev_tx_hash, o.prev_tx_vout, o.reservation_id,
|
|
265
|
+
r.purpose
|
|
266
|
+
FROM token_metadata m
|
|
267
|
+
LEFT JOIN token_outputs o ON o.token_identifier = m.identifier
|
|
268
|
+
LEFT JOIN token_reservations r ON o.reservation_id = r.id
|
|
269
|
+
ORDER BY m.identifier, o.token_amount::NUMERIC ASC`
|
|
270
|
+
);
|
|
271
|
+
|
|
272
|
+
const map = new Map();
|
|
273
|
+
|
|
274
|
+
for (const row of result.rows) {
|
|
275
|
+
if (!map.has(row.identifier)) {
|
|
276
|
+
map.set(row.identifier, {
|
|
277
|
+
metadata: this._metadataFromRow(row),
|
|
278
|
+
available: [],
|
|
279
|
+
reservedForPayment: [],
|
|
280
|
+
reservedForSwap: [],
|
|
281
|
+
});
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
const entry = map.get(row.identifier);
|
|
285
|
+
|
|
286
|
+
if (!row.output_id) {
|
|
287
|
+
continue;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
const output = this._outputFromRow(row);
|
|
291
|
+
|
|
292
|
+
if (row.purpose === "Payment") {
|
|
293
|
+
entry.reservedForPayment.push(output);
|
|
294
|
+
} else if (row.purpose === "Swap") {
|
|
295
|
+
entry.reservedForSwap.push(output);
|
|
296
|
+
} else {
|
|
297
|
+
entry.available.push(output);
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
return Array.from(map.values());
|
|
302
|
+
} catch (error) {
|
|
303
|
+
if (error instanceof TokenStoreError) throw error;
|
|
304
|
+
throw new TokenStoreError(
|
|
305
|
+
`Failed to list token outputs: ${error.message}`,
|
|
306
|
+
error
|
|
307
|
+
);
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
/**
|
|
312
|
+
* Get token outputs for a specific token by filter.
|
|
313
|
+
* @param {{type: string, identifier?: string, issuerPublicKey?: string}} filter
|
|
314
|
+
* @returns {Promise<{metadata: Object, available: Array, reservedForPayment: Array, reservedForSwap: Array}>}
|
|
315
|
+
*/
|
|
316
|
+
async getTokenOutputs(filter) {
|
|
317
|
+
try {
|
|
318
|
+
let whereClause;
|
|
319
|
+
let param;
|
|
320
|
+
|
|
321
|
+
if (filter.type === "identifier") {
|
|
322
|
+
whereClause = "m.identifier = $1";
|
|
323
|
+
param = filter.identifier;
|
|
324
|
+
} else if (filter.type === "issuerPublicKey") {
|
|
325
|
+
whereClause = "m.issuer_public_key = $1";
|
|
326
|
+
param = filter.issuerPublicKey;
|
|
327
|
+
} else {
|
|
328
|
+
throw new TokenStoreError(`Unknown filter type: ${filter.type}`);
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
const result = await this.pool.query(
|
|
332
|
+
`SELECT m.identifier, m.issuer_public_key, m.name, m.ticker, m.decimals,
|
|
333
|
+
m.max_supply, m.is_freezable, m.creation_entity_public_key,
|
|
334
|
+
o.id AS output_id, o.owner_public_key, o.revocation_commitment,
|
|
335
|
+
o.withdraw_bond_sats, o.withdraw_relative_block_locktime,
|
|
336
|
+
o.token_public_key, o.token_amount, o.token_identifier,
|
|
337
|
+
o.prev_tx_hash, o.prev_tx_vout, o.reservation_id,
|
|
338
|
+
r.purpose
|
|
339
|
+
FROM token_metadata m
|
|
340
|
+
LEFT JOIN token_outputs o ON o.token_identifier = m.identifier
|
|
341
|
+
LEFT JOIN token_reservations r ON o.reservation_id = r.id
|
|
342
|
+
WHERE ${whereClause}
|
|
343
|
+
ORDER BY o.token_amount::NUMERIC ASC`,
|
|
344
|
+
[param]
|
|
345
|
+
);
|
|
346
|
+
|
|
347
|
+
if (result.rows.length === 0) {
|
|
348
|
+
throw new TokenStoreError("Token outputs not found");
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
const metadata = this._metadataFromRow(result.rows[0]);
|
|
352
|
+
const entry = {
|
|
353
|
+
metadata,
|
|
354
|
+
available: [],
|
|
355
|
+
reservedForPayment: [],
|
|
356
|
+
reservedForSwap: [],
|
|
357
|
+
};
|
|
358
|
+
|
|
359
|
+
for (const row of result.rows) {
|
|
360
|
+
if (!row.output_id) {
|
|
361
|
+
continue;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
const output = this._outputFromRow(row);
|
|
365
|
+
|
|
366
|
+
if (row.purpose === "Payment") {
|
|
367
|
+
entry.reservedForPayment.push(output);
|
|
368
|
+
} else if (row.purpose === "Swap") {
|
|
369
|
+
entry.reservedForSwap.push(output);
|
|
370
|
+
} else {
|
|
371
|
+
entry.available.push(output);
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
return entry;
|
|
376
|
+
} catch (error) {
|
|
377
|
+
if (error instanceof TokenStoreError) throw error;
|
|
378
|
+
throw new TokenStoreError(
|
|
379
|
+
`Failed to get token outputs: ${error.message}`,
|
|
380
|
+
error
|
|
381
|
+
);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
/**
|
|
386
|
+
* Insert token outputs (upsert metadata, insert outputs with ON CONFLICT DO NOTHING).
|
|
387
|
+
* @param {{metadata: Object, outputs: Array}} tokenOutputs
|
|
388
|
+
*/
|
|
389
|
+
async insertTokenOutputs(tokenOutputs) {
|
|
390
|
+
try {
|
|
391
|
+
const client = await this.pool.connect();
|
|
392
|
+
try {
|
|
393
|
+
await client.query("BEGIN");
|
|
394
|
+
|
|
395
|
+
await this._upsertMetadata(client, tokenOutputs.metadata);
|
|
396
|
+
|
|
397
|
+
// Remove inserted output IDs from spent markers (output returned to us)
|
|
398
|
+
const outputIds = tokenOutputs.outputs.map((o) => o.output.id);
|
|
399
|
+
if (outputIds.length > 0) {
|
|
400
|
+
await client.query(
|
|
401
|
+
"DELETE FROM token_spent_outputs WHERE output_id = ANY($1)",
|
|
402
|
+
[outputIds]
|
|
403
|
+
);
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
for (const output of tokenOutputs.outputs) {
|
|
407
|
+
await this._insertSingleOutput(
|
|
408
|
+
client,
|
|
409
|
+
tokenOutputs.metadata.identifier,
|
|
410
|
+
output
|
|
411
|
+
);
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
await client.query("COMMIT");
|
|
415
|
+
} catch (error) {
|
|
416
|
+
await client.query("ROLLBACK").catch(() => {});
|
|
417
|
+
throw error;
|
|
418
|
+
} finally {
|
|
419
|
+
client.release();
|
|
420
|
+
}
|
|
421
|
+
} catch (error) {
|
|
422
|
+
if (error instanceof TokenStoreError) throw error;
|
|
423
|
+
throw new TokenStoreError(
|
|
424
|
+
`Failed to insert token outputs: ${error.message}`,
|
|
425
|
+
error
|
|
426
|
+
);
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
/**
|
|
431
|
+
* Reserve token outputs for a payment or swap.
|
|
432
|
+
* @param {string} tokenIdentifier
|
|
433
|
+
* @param {{type: string, value: number}} target - MinTotalValue or MaxOutputCount
|
|
434
|
+
* @param {string} purpose - "Payment" or "Swap"
|
|
435
|
+
* @param {Array|null} preferredOutputs
|
|
436
|
+
* @param {string|null} selectionStrategy - "SmallestFirst" or "LargestFirst"
|
|
437
|
+
* @returns {Promise<{id: string, tokenOutputs: {metadata: Object, outputs: Array}}>}
|
|
438
|
+
*/
|
|
439
|
+
async reserveTokenOutputs(
|
|
440
|
+
tokenIdentifier,
|
|
441
|
+
target,
|
|
442
|
+
purpose,
|
|
443
|
+
preferredOutputs,
|
|
444
|
+
selectionStrategy
|
|
445
|
+
) {
|
|
446
|
+
try {
|
|
447
|
+
return await this._withWriteTransaction(async (client) => {
|
|
448
|
+
// Validate target
|
|
449
|
+
if (target.type === "minTotalValue" && (!target.value || target.value === "0")) {
|
|
450
|
+
throw new TokenStoreError(
|
|
451
|
+
"Amount to reserve must be greater than zero"
|
|
452
|
+
);
|
|
453
|
+
}
|
|
454
|
+
if (target.type === "maxOutputCount" && (!target.value || target.value === 0)) {
|
|
455
|
+
throw new TokenStoreError(
|
|
456
|
+
"Count to reserve must be greater than zero"
|
|
457
|
+
);
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
// Get metadata
|
|
461
|
+
const metadataResult = await client.query(
|
|
462
|
+
"SELECT * FROM token_metadata WHERE identifier = $1",
|
|
463
|
+
[tokenIdentifier]
|
|
464
|
+
);
|
|
465
|
+
|
|
466
|
+
if (metadataResult.rows.length === 0) {
|
|
467
|
+
throw new TokenStoreError(
|
|
468
|
+
`Token outputs not found for identifier: ${tokenIdentifier}`
|
|
469
|
+
);
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
const metadata = this._metadataFromRow(metadataResult.rows[0]);
|
|
473
|
+
|
|
474
|
+
// Get available (non-reserved) outputs
|
|
475
|
+
const outputRows = await client.query(
|
|
476
|
+
`SELECT o.id AS output_id, o.owner_public_key, o.revocation_commitment,
|
|
477
|
+
o.withdraw_bond_sats, o.withdraw_relative_block_locktime,
|
|
478
|
+
o.token_public_key, o.token_amount, o.token_identifier,
|
|
479
|
+
o.prev_tx_hash, o.prev_tx_vout
|
|
480
|
+
FROM token_outputs o
|
|
481
|
+
WHERE o.token_identifier = $1 AND o.reservation_id IS NULL`,
|
|
482
|
+
[tokenIdentifier]
|
|
483
|
+
);
|
|
484
|
+
|
|
485
|
+
let outputs = outputRows.rows.map((row) => this._outputFromRow(row));
|
|
486
|
+
|
|
487
|
+
// Filter by preferred if provided
|
|
488
|
+
if (preferredOutputs && preferredOutputs.length > 0) {
|
|
489
|
+
const preferredIds = new Set(
|
|
490
|
+
preferredOutputs.map((p) => p.output.id)
|
|
491
|
+
);
|
|
492
|
+
outputs = outputs.filter((o) => preferredIds.has(o.output.id));
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
// Select outputs based on target
|
|
496
|
+
let selectedOutputs;
|
|
497
|
+
|
|
498
|
+
if (target.type === "minTotalValue") {
|
|
499
|
+
const amount = BigInt(target.value);
|
|
500
|
+
|
|
501
|
+
// Check sufficiency
|
|
502
|
+
const totalAvailable = outputs.reduce(
|
|
503
|
+
(sum, o) => sum + BigInt(o.output.tokenAmount),
|
|
504
|
+
0n
|
|
505
|
+
);
|
|
506
|
+
if (totalAvailable < amount) {
|
|
507
|
+
throw new TokenStoreError("InsufficientFunds");
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
// Try exact match first
|
|
511
|
+
const exactMatch = outputs.find(
|
|
512
|
+
(o) => BigInt(o.output.tokenAmount) === amount
|
|
513
|
+
);
|
|
514
|
+
if (exactMatch) {
|
|
515
|
+
selectedOutputs = [exactMatch];
|
|
516
|
+
} else {
|
|
517
|
+
// Sort by selection strategy
|
|
518
|
+
if (selectionStrategy === "LargestFirst") {
|
|
519
|
+
outputs.sort(
|
|
520
|
+
(a, b) =>
|
|
521
|
+
Number(BigInt(b.output.tokenAmount) - BigInt(a.output.tokenAmount))
|
|
522
|
+
);
|
|
523
|
+
} else {
|
|
524
|
+
// Default: SmallestFirst
|
|
525
|
+
outputs.sort(
|
|
526
|
+
(a, b) =>
|
|
527
|
+
Number(BigInt(a.output.tokenAmount) - BigInt(b.output.tokenAmount))
|
|
528
|
+
);
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
selectedOutputs = [];
|
|
532
|
+
let remaining = amount;
|
|
533
|
+
for (const output of outputs) {
|
|
534
|
+
if (remaining <= 0n) break;
|
|
535
|
+
selectedOutputs.push(output);
|
|
536
|
+
remaining -= BigInt(output.output.tokenAmount);
|
|
537
|
+
}
|
|
538
|
+
if (remaining > 0n) {
|
|
539
|
+
throw new TokenStoreError("InsufficientFunds");
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
} else if (target.type === "maxOutputCount") {
|
|
543
|
+
const count = target.value;
|
|
544
|
+
|
|
545
|
+
// Sort by selection strategy
|
|
546
|
+
if (selectionStrategy === "LargestFirst") {
|
|
547
|
+
outputs.sort(
|
|
548
|
+
(a, b) =>
|
|
549
|
+
Number(BigInt(b.output.tokenAmount) - BigInt(a.output.tokenAmount))
|
|
550
|
+
);
|
|
551
|
+
} else {
|
|
552
|
+
// Default: SmallestFirst
|
|
553
|
+
outputs.sort(
|
|
554
|
+
(a, b) =>
|
|
555
|
+
Number(BigInt(a.output.tokenAmount) - BigInt(b.output.tokenAmount))
|
|
556
|
+
);
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
selectedOutputs = outputs.slice(0, count);
|
|
560
|
+
} else {
|
|
561
|
+
throw new TokenStoreError(`Unknown target type: ${target.type}`);
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
// Create reservation
|
|
565
|
+
const reservationId = this._generateId();
|
|
566
|
+
|
|
567
|
+
await client.query(
|
|
568
|
+
"INSERT INTO token_reservations (id, purpose) VALUES ($1, $2)",
|
|
569
|
+
[reservationId, purpose]
|
|
570
|
+
);
|
|
571
|
+
|
|
572
|
+
// Set reservation_id on selected outputs
|
|
573
|
+
const selectedIds = selectedOutputs.map((o) => o.output.id);
|
|
574
|
+
if (selectedIds.length > 0) {
|
|
575
|
+
await client.query(
|
|
576
|
+
"UPDATE token_outputs SET reservation_id = $1 WHERE id = ANY($2)",
|
|
577
|
+
[reservationId, selectedIds]
|
|
578
|
+
);
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
return {
|
|
582
|
+
id: reservationId,
|
|
583
|
+
tokenOutputs: {
|
|
584
|
+
metadata,
|
|
585
|
+
outputs: selectedOutputs,
|
|
586
|
+
},
|
|
587
|
+
};
|
|
588
|
+
});
|
|
589
|
+
} catch (error) {
|
|
590
|
+
if (error instanceof TokenStoreError) throw error;
|
|
591
|
+
throw new TokenStoreError(
|
|
592
|
+
`Failed to reserve token outputs: ${error.message}`,
|
|
593
|
+
error
|
|
594
|
+
);
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
/**
|
|
599
|
+
* Cancel a reservation, releasing reserved outputs.
|
|
600
|
+
* @param {string} id - Reservation ID
|
|
601
|
+
*/
|
|
602
|
+
async cancelReservation(id) {
|
|
603
|
+
try {
|
|
604
|
+
await this._withWriteTransaction(async (client) => {
|
|
605
|
+
// Clear reservation_id from outputs
|
|
606
|
+
await client.query(
|
|
607
|
+
"UPDATE token_outputs SET reservation_id = NULL WHERE reservation_id = $1",
|
|
608
|
+
[id]
|
|
609
|
+
);
|
|
610
|
+
|
|
611
|
+
// Delete the reservation
|
|
612
|
+
await client.query(
|
|
613
|
+
"DELETE FROM token_reservations WHERE id = $1",
|
|
614
|
+
[id]
|
|
615
|
+
);
|
|
616
|
+
});
|
|
617
|
+
} catch (error) {
|
|
618
|
+
if (error instanceof TokenStoreError) throw error;
|
|
619
|
+
throw new TokenStoreError(
|
|
620
|
+
`Failed to cancel reservation '${id}': ${error.message}`,
|
|
621
|
+
error
|
|
622
|
+
);
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
/**
|
|
627
|
+
* Finalize a reservation, deleting reserved outputs and cleaning up.
|
|
628
|
+
* @param {string} id - Reservation ID
|
|
629
|
+
*/
|
|
630
|
+
async finalizeReservation(id) {
|
|
631
|
+
try {
|
|
632
|
+
await this._withWriteTransaction(async (client) => {
|
|
633
|
+
// Get reservation purpose
|
|
634
|
+
const reservationResult = await client.query(
|
|
635
|
+
"SELECT purpose FROM token_reservations WHERE id = $1",
|
|
636
|
+
[id]
|
|
637
|
+
);
|
|
638
|
+
if (reservationResult.rows.length === 0) {
|
|
639
|
+
return; // Non-existing reservation
|
|
640
|
+
}
|
|
641
|
+
const isSwap = reservationResult.rows[0].purpose === "Swap";
|
|
642
|
+
|
|
643
|
+
// Get reserved output IDs and mark them as spent
|
|
644
|
+
const reservedOutputsResult = await client.query(
|
|
645
|
+
"SELECT id FROM token_outputs WHERE reservation_id = $1",
|
|
646
|
+
[id]
|
|
647
|
+
);
|
|
648
|
+
const reservedOutputIds = reservedOutputsResult.rows.map((r) => r.id);
|
|
649
|
+
|
|
650
|
+
if (reservedOutputIds.length > 0) {
|
|
651
|
+
await client.query(
|
|
652
|
+
`INSERT INTO token_spent_outputs (output_id)
|
|
653
|
+
SELECT * FROM UNNEST($1::text[])
|
|
654
|
+
ON CONFLICT DO NOTHING`,
|
|
655
|
+
[reservedOutputIds]
|
|
656
|
+
);
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
// Delete reserved outputs
|
|
660
|
+
await client.query(
|
|
661
|
+
"DELETE FROM token_outputs WHERE reservation_id = $1",
|
|
662
|
+
[id]
|
|
663
|
+
);
|
|
664
|
+
|
|
665
|
+
// Delete the reservation
|
|
666
|
+
await client.query(
|
|
667
|
+
"DELETE FROM token_reservations WHERE id = $1",
|
|
668
|
+
[id]
|
|
669
|
+
);
|
|
670
|
+
|
|
671
|
+
// If this was a swap reservation, update last_completed_at
|
|
672
|
+
if (isSwap) {
|
|
673
|
+
await client.query(
|
|
674
|
+
"UPDATE token_swap_status SET last_completed_at = NOW() WHERE id = 1"
|
|
675
|
+
);
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
// Clean up orphaned metadata
|
|
679
|
+
await client.query(
|
|
680
|
+
`DELETE FROM token_metadata
|
|
681
|
+
WHERE identifier NOT IN (
|
|
682
|
+
SELECT DISTINCT token_identifier FROM token_outputs
|
|
683
|
+
)`
|
|
684
|
+
);
|
|
685
|
+
});
|
|
686
|
+
} catch (error) {
|
|
687
|
+
if (error instanceof TokenStoreError) throw error;
|
|
688
|
+
throw new TokenStoreError(
|
|
689
|
+
`Failed to finalize reservation '${id}': ${error.message}`,
|
|
690
|
+
error
|
|
691
|
+
);
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
/**
|
|
696
|
+
* Get the current database server time as milliseconds since epoch.
|
|
697
|
+
* @returns {Promise<number>}
|
|
698
|
+
*/
|
|
699
|
+
async now() {
|
|
700
|
+
try {
|
|
701
|
+
const result = await this.pool.query("SELECT NOW()");
|
|
702
|
+
return result.rows[0].now.getTime();
|
|
703
|
+
} catch (error) {
|
|
704
|
+
if (error instanceof TokenStoreError) throw error;
|
|
705
|
+
throw new TokenStoreError(
|
|
706
|
+
`Failed to get current time: ${error.message}`,
|
|
707
|
+
error
|
|
708
|
+
);
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// ===== Private Helpers =====
|
|
713
|
+
|
|
714
|
+
/**
|
|
715
|
+
* Generate a unique reservation ID (UUIDv4).
|
|
716
|
+
*/
|
|
717
|
+
_generateId() {
|
|
718
|
+
if (typeof crypto !== "undefined" && crypto.randomUUID) {
|
|
719
|
+
return crypto.randomUUID();
|
|
720
|
+
}
|
|
721
|
+
return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, (c) => {
|
|
722
|
+
const r = (Math.random() * 16) | 0;
|
|
723
|
+
const v = c === "x" ? r : (r & 0x3) | 0x8;
|
|
724
|
+
return v.toString(16);
|
|
725
|
+
});
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
/**
|
|
729
|
+
* Upsert token metadata.
|
|
730
|
+
*/
|
|
731
|
+
async _upsertMetadata(client, metadata) {
|
|
732
|
+
await client.query(
|
|
733
|
+
`INSERT INTO token_metadata
|
|
734
|
+
(identifier, issuer_public_key, name, ticker, decimals, max_supply,
|
|
735
|
+
is_freezable, creation_entity_public_key)
|
|
736
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
737
|
+
ON CONFLICT (identifier) DO UPDATE SET
|
|
738
|
+
issuer_public_key = EXCLUDED.issuer_public_key,
|
|
739
|
+
name = EXCLUDED.name,
|
|
740
|
+
ticker = EXCLUDED.ticker,
|
|
741
|
+
decimals = EXCLUDED.decimals,
|
|
742
|
+
max_supply = EXCLUDED.max_supply,
|
|
743
|
+
is_freezable = EXCLUDED.is_freezable,
|
|
744
|
+
creation_entity_public_key = EXCLUDED.creation_entity_public_key`,
|
|
745
|
+
[
|
|
746
|
+
metadata.identifier,
|
|
747
|
+
metadata.issuerPublicKey,
|
|
748
|
+
metadata.name,
|
|
749
|
+
metadata.ticker,
|
|
750
|
+
metadata.decimals,
|
|
751
|
+
metadata.maxSupply,
|
|
752
|
+
metadata.isFreezable,
|
|
753
|
+
metadata.creationEntityPublicKey || null,
|
|
754
|
+
]
|
|
755
|
+
);
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
/**
|
|
759
|
+
* Insert a single output.
|
|
760
|
+
*/
|
|
761
|
+
async _insertSingleOutput(client, tokenIdentifier, output) {
|
|
762
|
+
await client.query(
|
|
763
|
+
`INSERT INTO token_outputs
|
|
764
|
+
(id, token_identifier, owner_public_key, revocation_commitment,
|
|
765
|
+
withdraw_bond_sats, withdraw_relative_block_locktime,
|
|
766
|
+
token_public_key, token_amount, prev_tx_hash, prev_tx_vout, added_at)
|
|
767
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
|
|
768
|
+
ON CONFLICT (id) DO NOTHING`,
|
|
769
|
+
[
|
|
770
|
+
output.output.id,
|
|
771
|
+
tokenIdentifier,
|
|
772
|
+
output.output.ownerPublicKey,
|
|
773
|
+
output.output.revocationCommitment,
|
|
774
|
+
output.output.withdrawBondSats,
|
|
775
|
+
output.output.withdrawRelativeBlockLocktime,
|
|
776
|
+
output.output.tokenPublicKey || null,
|
|
777
|
+
output.output.tokenAmount,
|
|
778
|
+
output.prevTxHash,
|
|
779
|
+
output.prevTxVout,
|
|
780
|
+
]
|
|
781
|
+
);
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
/**
|
|
785
|
+
* Parse a TokenMetadata from a database row.
|
|
786
|
+
*/
|
|
787
|
+
_metadataFromRow(row) {
|
|
788
|
+
return {
|
|
789
|
+
identifier: row.identifier,
|
|
790
|
+
issuerPublicKey: row.issuer_public_key,
|
|
791
|
+
name: row.name,
|
|
792
|
+
ticker: row.ticker,
|
|
793
|
+
decimals: row.decimals,
|
|
794
|
+
maxSupply: row.max_supply,
|
|
795
|
+
isFreezable: row.is_freezable,
|
|
796
|
+
creationEntityPublicKey: row.creation_entity_public_key || null,
|
|
797
|
+
};
|
|
798
|
+
}
|
|
799
|
+
|
|
800
|
+
/**
|
|
801
|
+
* Parse a TokenOutputWithPrevOut from a database row.
|
|
802
|
+
*/
|
|
803
|
+
_outputFromRow(row) {
|
|
804
|
+
return {
|
|
805
|
+
output: {
|
|
806
|
+
id: row.output_id,
|
|
807
|
+
ownerPublicKey: row.owner_public_key,
|
|
808
|
+
revocationCommitment: row.revocation_commitment,
|
|
809
|
+
withdrawBondSats: Number(row.withdraw_bond_sats),
|
|
810
|
+
withdrawRelativeBlockLocktime: Number(
|
|
811
|
+
row.withdraw_relative_block_locktime
|
|
812
|
+
),
|
|
813
|
+
tokenPublicKey: row.token_public_key || null,
|
|
814
|
+
tokenIdentifier: row.token_identifier || row.identifier,
|
|
815
|
+
tokenAmount: row.token_amount,
|
|
816
|
+
},
|
|
817
|
+
prevTxHash: row.prev_tx_hash,
|
|
818
|
+
prevTxVout: row.prev_tx_vout,
|
|
819
|
+
};
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
/**
|
|
824
|
+
* Create a PostgresTokenStore instance from a config object.
|
|
825
|
+
*
|
|
826
|
+
* @param {object} config - PostgreSQL configuration
|
|
827
|
+
* @param {string} config.connectionString - PostgreSQL connection string
|
|
828
|
+
* @param {number} config.maxPoolSize - Maximum number of connections in the pool
|
|
829
|
+
* @param {number} config.createTimeoutSecs - Timeout in seconds for establishing a new connection
|
|
830
|
+
* @param {number} config.recycleTimeoutSecs - Timeout in seconds before recycling an idle connection
|
|
831
|
+
* @param {object} [logger] - Optional logger
|
|
832
|
+
* @returns {Promise<PostgresTokenStore>}
|
|
833
|
+
*/
|
|
834
|
+
async function createPostgresTokenStore(config, logger = null) {
|
|
835
|
+
const pool = new pg.Pool({
|
|
836
|
+
connectionString: config.connectionString,
|
|
837
|
+
max: config.maxPoolSize,
|
|
838
|
+
connectionTimeoutMillis: config.createTimeoutSecs * 1000,
|
|
839
|
+
idleTimeoutMillis: config.recycleTimeoutSecs * 1000,
|
|
840
|
+
});
|
|
841
|
+
return createPostgresTokenStoreWithPool(pool, logger);
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
/**
|
|
845
|
+
* Create a PostgresTokenStore instance from an existing pg.Pool.
|
|
846
|
+
*
|
|
847
|
+
* @param {pg.Pool} pool - An existing connection pool
|
|
848
|
+
* @param {object} [logger] - Optional logger
|
|
849
|
+
* @returns {Promise<PostgresTokenStore>}
|
|
850
|
+
*/
|
|
851
|
+
async function createPostgresTokenStoreWithPool(pool, logger = null) {
|
|
852
|
+
const store = new PostgresTokenStore(pool, logger);
|
|
853
|
+
await store.initialize();
|
|
854
|
+
return store;
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
module.exports = { PostgresTokenStore, createPostgresTokenStore, createPostgresTokenStoreWithPool, TokenStoreError };
|