@arkade-os/sdk 0.3.1-alpha.0 → 0.3.1-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/musig2/index.js +2 -1
- package/dist/cjs/musig2/nonces.js +4 -0
- package/dist/cjs/providers/ark.js +10 -8
- package/dist/cjs/providers/expoArk.js +5 -5
- package/dist/cjs/providers/expoIndexer.js +3 -3
- package/dist/cjs/providers/expoUtils.js +124 -0
- package/dist/cjs/providers/utils.js +0 -122
- package/dist/cjs/tree/signingSession.js +38 -4
- package/dist/cjs/utils/unknownFields.js +2 -2
- package/dist/cjs/wallet/wallet.js +16 -8
- package/dist/esm/musig2/index.js +1 -1
- package/dist/esm/musig2/nonces.js +3 -0
- package/dist/esm/providers/ark.js +10 -8
- package/dist/esm/providers/expoArk.js +1 -1
- package/dist/esm/providers/expoIndexer.js +1 -1
- package/dist/esm/providers/expoUtils.js +87 -0
- package/dist/esm/providers/utils.js +0 -87
- package/dist/esm/tree/signingSession.js +38 -4
- package/dist/esm/utils/unknownFields.js +2 -2
- package/dist/esm/wallet/wallet.js +16 -8
- package/dist/types/index.d.ts +2 -2
- package/dist/types/musig2/index.d.ts +1 -1
- package/dist/types/musig2/nonces.d.ts +1 -0
- package/dist/types/providers/ark.d.ts +7 -5
- package/dist/types/providers/expoUtils.d.ts +18 -0
- package/dist/types/providers/utils.d.ts +0 -18
- package/dist/types/tree/signingSession.d.ts +8 -3
- package/dist/types/utils/unknownFields.d.ts +2 -2
- package/dist/types/wallet/wallet.d.ts +1 -1
- package/package.json +1 -1
package/dist/cjs/musig2/index.js
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.aggregateKeys = exports.sign = exports.PartialSig = exports.generateNonces = void 0;
|
|
3
|
+
exports.aggregateKeys = exports.sign = exports.PartialSig = exports.aggregateNonces = exports.generateNonces = void 0;
|
|
4
4
|
var nonces_1 = require("./nonces");
|
|
5
5
|
Object.defineProperty(exports, "generateNonces", { enumerable: true, get: function () { return nonces_1.generateNonces; } });
|
|
6
|
+
Object.defineProperty(exports, "aggregateNonces", { enumerable: true, get: function () { return nonces_1.aggregateNonces; } });
|
|
6
7
|
var sign_1 = require("./sign");
|
|
7
8
|
Object.defineProperty(exports, "PartialSig", { enumerable: true, get: function () { return sign_1.PartialSig; } });
|
|
8
9
|
Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sign_1.sign; } });
|
|
@@ -34,6 +34,7 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
36
|
exports.generateNonces = generateNonces;
|
|
37
|
+
exports.aggregateNonces = aggregateNonces;
|
|
37
38
|
const musig = __importStar(require("@scure/btc-signer/musig2.js"));
|
|
38
39
|
/**
|
|
39
40
|
* Generates a pair of public and secret nonces for MuSig2 signing
|
|
@@ -42,3 +43,6 @@ function generateNonces(publicKey) {
|
|
|
42
43
|
const nonces = musig.nonceGen(publicKey);
|
|
43
44
|
return { secNonce: nonces.secret, pubNonce: nonces.public };
|
|
44
45
|
}
|
|
46
|
+
function aggregateNonces(pubNonces) {
|
|
47
|
+
return musig.nonceAggregate(pubNonces);
|
|
48
|
+
}
|
|
@@ -11,7 +11,7 @@ var SettlementEventType;
|
|
|
11
11
|
SettlementEventType["BatchFinalized"] = "batch_finalized";
|
|
12
12
|
SettlementEventType["BatchFailed"] = "batch_failed";
|
|
13
13
|
SettlementEventType["TreeSigningStarted"] = "tree_signing_started";
|
|
14
|
-
SettlementEventType["
|
|
14
|
+
SettlementEventType["TreeNonces"] = "tree_nonces";
|
|
15
15
|
SettlementEventType["TreeTx"] = "tree_tx";
|
|
16
16
|
SettlementEventType["TreeSignature"] = "tree_signature";
|
|
17
17
|
})(SettlementEventType || (exports.SettlementEventType = SettlementEventType = {}));
|
|
@@ -390,10 +390,16 @@ class RestArkProvider {
|
|
|
390
390
|
}
|
|
391
391
|
// Check for TreeNoncesAggregated event
|
|
392
392
|
if (data.treeNoncesAggregated) {
|
|
393
|
+
// skip treeNoncesAggregated event, deprecated
|
|
394
|
+
return null;
|
|
395
|
+
}
|
|
396
|
+
if (data.treeNonces) {
|
|
393
397
|
return {
|
|
394
|
-
type: SettlementEventType.
|
|
395
|
-
id: data.
|
|
396
|
-
|
|
398
|
+
type: SettlementEventType.TreeNonces,
|
|
399
|
+
id: data.treeNonces.id,
|
|
400
|
+
topic: data.treeNonces.topic,
|
|
401
|
+
txid: data.treeNonces.txid,
|
|
402
|
+
nonces: decodeMusig2Nonces(data.treeNonces.nonces), // pubkey -> public nonce
|
|
397
403
|
};
|
|
398
404
|
}
|
|
399
405
|
// Check for TreeTx event
|
|
@@ -423,10 +429,6 @@ class RestArkProvider {
|
|
|
423
429
|
signature: data.treeSignature.signature,
|
|
424
430
|
};
|
|
425
431
|
}
|
|
426
|
-
// TODO: Handle TreeNoncesEvent when implemented server-side
|
|
427
|
-
if (data.treeNonces) {
|
|
428
|
-
return null;
|
|
429
|
-
}
|
|
430
432
|
// Skip heartbeat events
|
|
431
433
|
if (data.heartbeat) {
|
|
432
434
|
return null;
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.ExpoArkProvider = void 0;
|
|
4
4
|
const ark_1 = require("./ark");
|
|
5
|
-
const
|
|
5
|
+
const expoUtils_1 = require("./expoUtils");
|
|
6
6
|
/**
|
|
7
7
|
* Expo-compatible Ark provider implementation using expo/fetch for SSE support.
|
|
8
8
|
* This provider works specifically in React Native/Expo environments where
|
|
@@ -21,14 +21,14 @@ class ExpoArkProvider extends ark_1.RestArkProvider {
|
|
|
21
21
|
super(serverUrl);
|
|
22
22
|
}
|
|
23
23
|
async *getEventStream(signal, topics) {
|
|
24
|
-
const expoFetch = await (0,
|
|
24
|
+
const expoFetch = await (0, expoUtils_1.getExpoFetch)();
|
|
25
25
|
const url = `${this.serverUrl}/v1/batch/events`;
|
|
26
26
|
const queryParams = topics.length > 0
|
|
27
27
|
? `?${topics.map((topic) => `topics=${encodeURIComponent(topic)}`).join("&")}`
|
|
28
28
|
: "";
|
|
29
29
|
while (!signal?.aborted) {
|
|
30
30
|
try {
|
|
31
|
-
yield* (0,
|
|
31
|
+
yield* (0, expoUtils_1.sseStreamIterator)(url + queryParams, signal, expoFetch, {}, (data) => {
|
|
32
32
|
// Handle different response structures
|
|
33
33
|
// v8 mesh API might wrap in {result: ...} or send directly
|
|
34
34
|
const eventData = data.result || data;
|
|
@@ -55,11 +55,11 @@ class ExpoArkProvider extends ark_1.RestArkProvider {
|
|
|
55
55
|
}
|
|
56
56
|
}
|
|
57
57
|
async *getTransactionsStream(signal) {
|
|
58
|
-
const expoFetch = await (0,
|
|
58
|
+
const expoFetch = await (0, expoUtils_1.getExpoFetch)();
|
|
59
59
|
const url = `${this.serverUrl}/v1/txs`;
|
|
60
60
|
while (!signal?.aborted) {
|
|
61
61
|
try {
|
|
62
|
-
yield* (0,
|
|
62
|
+
yield* (0, expoUtils_1.sseStreamIterator)(url, signal, expoFetch, {}, (data) => {
|
|
63
63
|
return this.parseTransactionNotification(data.result);
|
|
64
64
|
});
|
|
65
65
|
}
|
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.ExpoIndexerProvider = void 0;
|
|
4
4
|
const indexer_1 = require("./indexer");
|
|
5
5
|
const ark_1 = require("./ark");
|
|
6
|
-
const
|
|
6
|
+
const expoUtils_1 = require("./expoUtils");
|
|
7
7
|
// Helper function to convert Vtxo to VirtualCoin (same as in indexer.ts)
|
|
8
8
|
function convertVtxo(vtxo) {
|
|
9
9
|
return {
|
|
@@ -53,7 +53,7 @@ class ExpoIndexerProvider extends indexer_1.RestIndexerProvider {
|
|
|
53
53
|
// Detect if we're running in React Native/Expo environment
|
|
54
54
|
const isReactNative = typeof navigator !== "undefined" &&
|
|
55
55
|
navigator.product === "ReactNative";
|
|
56
|
-
const expoFetch = await (0,
|
|
56
|
+
const expoFetch = await (0, expoUtils_1.getExpoFetch)().catch((error) => {
|
|
57
57
|
// In React Native/Expo, expo/fetch is required for proper streaming support
|
|
58
58
|
if (isReactNative) {
|
|
59
59
|
throw new Error("expo/fetch is unavailable in React Native environment. " +
|
|
@@ -65,7 +65,7 @@ class ExpoIndexerProvider extends indexer_1.RestIndexerProvider {
|
|
|
65
65
|
const url = `${this.serverUrl}/v1/indexer/script/subscription/${subscriptionId}`;
|
|
66
66
|
while (!abortSignal.aborted) {
|
|
67
67
|
try {
|
|
68
|
-
yield* (0,
|
|
68
|
+
yield* (0, expoUtils_1.sseStreamIterator)(url, abortSignal, expoFetch, { "Content-Type": "application/json" }, (data) => {
|
|
69
69
|
// Handle new v8 proto format with heartbeat or event
|
|
70
70
|
if (data.heartbeat !== undefined) {
|
|
71
71
|
// Skip heartbeat messages
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.getExpoFetch = getExpoFetch;
|
|
37
|
+
exports.sseStreamIterator = sseStreamIterator;
|
|
38
|
+
/**
|
|
39
|
+
* Dynamically imports expo/fetch with fallback to standard fetch.
|
|
40
|
+
* @returns A fetch function suitable for SSE streaming
|
|
41
|
+
*/
|
|
42
|
+
async function getExpoFetch(options) {
|
|
43
|
+
const requireExpo = options?.requireExpo ?? false;
|
|
44
|
+
try {
|
|
45
|
+
const expoFetchModule = await Promise.resolve().then(() => __importStar(require("expo/fetch")));
|
|
46
|
+
console.debug("Using expo/fetch for streaming");
|
|
47
|
+
return expoFetchModule.fetch;
|
|
48
|
+
}
|
|
49
|
+
catch (error) {
|
|
50
|
+
if (requireExpo) {
|
|
51
|
+
throw new Error("expo/fetch is unavailable in this environment. " +
|
|
52
|
+
"Please ensure expo/fetch is installed and properly configured.");
|
|
53
|
+
}
|
|
54
|
+
console.warn("Using standard fetch instead of expo/fetch. " +
|
|
55
|
+
"Streaming may not be fully supported in some environments.", error);
|
|
56
|
+
return fetch;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Generic SSE stream processor using fetch API with ReadableStream.
|
|
61
|
+
* Handles SSE format parsing, buffer management, and abort signals.
|
|
62
|
+
*
|
|
63
|
+
* @param url - The SSE endpoint URL
|
|
64
|
+
* @param abortSignal - Signal to abort the stream
|
|
65
|
+
* @param fetchFn - Fetch function to use (defaults to standard fetch)
|
|
66
|
+
* @param headers - Additional headers to send
|
|
67
|
+
* @param parseData - Function to parse and yield data from SSE events
|
|
68
|
+
*/
|
|
69
|
+
async function* sseStreamIterator(url, abortSignal, fetchFn, headers, parseData) {
|
|
70
|
+
const fetchController = new AbortController();
|
|
71
|
+
const cleanup = () => fetchController.abort();
|
|
72
|
+
abortSignal?.addEventListener("abort", cleanup, { once: true });
|
|
73
|
+
try {
|
|
74
|
+
const response = await fetchFn(url, {
|
|
75
|
+
headers: {
|
|
76
|
+
Accept: "text/event-stream",
|
|
77
|
+
...headers,
|
|
78
|
+
},
|
|
79
|
+
signal: fetchController.signal,
|
|
80
|
+
});
|
|
81
|
+
if (!response.ok) {
|
|
82
|
+
throw new Error(`Unexpected status ${response.status} when fetching SSE stream`);
|
|
83
|
+
}
|
|
84
|
+
if (!response.body) {
|
|
85
|
+
throw new Error("Response body is null");
|
|
86
|
+
}
|
|
87
|
+
const reader = response.body.getReader();
|
|
88
|
+
const decoder = new TextDecoder();
|
|
89
|
+
let buffer = "";
|
|
90
|
+
while (!abortSignal?.aborted) {
|
|
91
|
+
const { done, value } = await reader.read();
|
|
92
|
+
if (done) {
|
|
93
|
+
break;
|
|
94
|
+
}
|
|
95
|
+
buffer += decoder.decode(value, { stream: true });
|
|
96
|
+
const lines = buffer.split("\n");
|
|
97
|
+
for (let i = 0; i < lines.length - 1; i++) {
|
|
98
|
+
const line = lines[i].trim();
|
|
99
|
+
if (!line)
|
|
100
|
+
continue;
|
|
101
|
+
if (line.startsWith("data:")) {
|
|
102
|
+
const jsonStr = line.substring(5).trim();
|
|
103
|
+
if (!jsonStr)
|
|
104
|
+
continue;
|
|
105
|
+
try {
|
|
106
|
+
const data = JSON.parse(jsonStr);
|
|
107
|
+
const parsed = parseData(data);
|
|
108
|
+
if (parsed !== null) {
|
|
109
|
+
yield parsed;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
catch (parseError) {
|
|
113
|
+
console.error("Failed to parse SSE data:", parseError);
|
|
114
|
+
throw parseError;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
buffer = lines[lines.length - 1];
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
finally {
|
|
122
|
+
abortSignal?.removeEventListener("abort", cleanup);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
@@ -1,41 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
3
|
exports.eventSourceIterator = eventSourceIterator;
|
|
37
|
-
exports.getExpoFetch = getExpoFetch;
|
|
38
|
-
exports.sseStreamIterator = sseStreamIterator;
|
|
39
4
|
async function* eventSourceIterator(eventSource) {
|
|
40
5
|
const messageQueue = [];
|
|
41
6
|
const errorQueue = [];
|
|
@@ -93,90 +58,3 @@ async function* eventSourceIterator(eventSource) {
|
|
|
93
58
|
eventSource.removeEventListener("error", errorHandler);
|
|
94
59
|
}
|
|
95
60
|
}
|
|
96
|
-
/**
|
|
97
|
-
* Dynamically imports expo/fetch with fallback to standard fetch.
|
|
98
|
-
* @returns A fetch function suitable for SSE streaming
|
|
99
|
-
*/
|
|
100
|
-
async function getExpoFetch(options) {
|
|
101
|
-
const requireExpo = options?.requireExpo ?? false;
|
|
102
|
-
try {
|
|
103
|
-
const expoFetchModule = await Promise.resolve().then(() => __importStar(require("expo/fetch")));
|
|
104
|
-
console.debug("Using expo/fetch for streaming");
|
|
105
|
-
return expoFetchModule.fetch;
|
|
106
|
-
}
|
|
107
|
-
catch (error) {
|
|
108
|
-
if (requireExpo) {
|
|
109
|
-
throw new Error("expo/fetch is unavailable in this environment. " +
|
|
110
|
-
"Please ensure expo/fetch is installed and properly configured.");
|
|
111
|
-
}
|
|
112
|
-
console.warn("Using standard fetch instead of expo/fetch. " +
|
|
113
|
-
"Streaming may not be fully supported in some environments.", error);
|
|
114
|
-
return fetch;
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
/**
|
|
118
|
-
* Generic SSE stream processor using fetch API with ReadableStream.
|
|
119
|
-
* Handles SSE format parsing, buffer management, and abort signals.
|
|
120
|
-
*
|
|
121
|
-
* @param url - The SSE endpoint URL
|
|
122
|
-
* @param abortSignal - Signal to abort the stream
|
|
123
|
-
* @param fetchFn - Fetch function to use (defaults to standard fetch)
|
|
124
|
-
* @param headers - Additional headers to send
|
|
125
|
-
* @param parseData - Function to parse and yield data from SSE events
|
|
126
|
-
*/
|
|
127
|
-
async function* sseStreamIterator(url, abortSignal, fetchFn, headers, parseData) {
|
|
128
|
-
const fetchController = new AbortController();
|
|
129
|
-
const cleanup = () => fetchController.abort();
|
|
130
|
-
abortSignal?.addEventListener("abort", cleanup, { once: true });
|
|
131
|
-
try {
|
|
132
|
-
const response = await fetchFn(url, {
|
|
133
|
-
headers: {
|
|
134
|
-
Accept: "text/event-stream",
|
|
135
|
-
...headers,
|
|
136
|
-
},
|
|
137
|
-
signal: fetchController.signal,
|
|
138
|
-
});
|
|
139
|
-
if (!response.ok) {
|
|
140
|
-
throw new Error(`Unexpected status ${response.status} when fetching SSE stream`);
|
|
141
|
-
}
|
|
142
|
-
if (!response.body) {
|
|
143
|
-
throw new Error("Response body is null");
|
|
144
|
-
}
|
|
145
|
-
const reader = response.body.getReader();
|
|
146
|
-
const decoder = new TextDecoder();
|
|
147
|
-
let buffer = "";
|
|
148
|
-
while (!abortSignal?.aborted) {
|
|
149
|
-
const { done, value } = await reader.read();
|
|
150
|
-
if (done) {
|
|
151
|
-
break;
|
|
152
|
-
}
|
|
153
|
-
buffer += decoder.decode(value, { stream: true });
|
|
154
|
-
const lines = buffer.split("\n");
|
|
155
|
-
for (let i = 0; i < lines.length - 1; i++) {
|
|
156
|
-
const line = lines[i].trim();
|
|
157
|
-
if (!line)
|
|
158
|
-
continue;
|
|
159
|
-
if (line.startsWith("data:")) {
|
|
160
|
-
const jsonStr = line.substring(5).trim();
|
|
161
|
-
if (!jsonStr)
|
|
162
|
-
continue;
|
|
163
|
-
try {
|
|
164
|
-
const data = JSON.parse(jsonStr);
|
|
165
|
-
const parsed = parseData(data);
|
|
166
|
-
if (parsed !== null) {
|
|
167
|
-
yield parsed;
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
catch (parseError) {
|
|
171
|
-
console.error("Failed to parse SSE data:", parseError);
|
|
172
|
-
throw parseError;
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
buffer = lines[lines.length - 1];
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
finally {
|
|
180
|
-
abortSignal?.removeEventListener("abort", cleanup);
|
|
181
|
-
}
|
|
182
|
-
}
|
|
@@ -77,10 +77,44 @@ class TreeSignerSession {
|
|
|
77
77
|
}
|
|
78
78
|
return publicNonces;
|
|
79
79
|
}
|
|
80
|
-
async
|
|
81
|
-
if (this.
|
|
82
|
-
throw
|
|
83
|
-
this.aggregateNonces
|
|
80
|
+
async aggregatedNonces(txid, noncesByPubkey) {
|
|
81
|
+
if (!this.graph)
|
|
82
|
+
throw exports.ErrMissingVtxoGraph;
|
|
83
|
+
if (!this.aggregateNonces) {
|
|
84
|
+
this.aggregateNonces = new Map();
|
|
85
|
+
}
|
|
86
|
+
if (!this.myNonces) {
|
|
87
|
+
await this.getNonces(); // generate nonces if not generated yet
|
|
88
|
+
}
|
|
89
|
+
if (this.aggregateNonces.has(txid)) {
|
|
90
|
+
return {
|
|
91
|
+
hasAllNonces: this.aggregateNonces.size === this.myNonces?.size,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
const myNonce = this.myNonces.get(txid);
|
|
95
|
+
if (!myNonce)
|
|
96
|
+
throw new Error(`missing nonce for txid ${txid}`);
|
|
97
|
+
const myPublicKey = await this.getPublicKey();
|
|
98
|
+
// set my nonce to not rely on server
|
|
99
|
+
noncesByPubkey.set(base_1.hex.encode(myPublicKey.subarray(1)), myNonce);
|
|
100
|
+
const tx = this.graph.find(txid);
|
|
101
|
+
if (!tx)
|
|
102
|
+
throw new Error(`missing tx for txid ${txid}`);
|
|
103
|
+
const cosigners = (0, unknownFields_1.getArkPsbtFields)(tx.root, 0, unknownFields_1.CosignerPublicKey).map((c) => base_1.hex.encode(c.key.subarray(1)) // xonly pubkey
|
|
104
|
+
);
|
|
105
|
+
const pubNonces = [];
|
|
106
|
+
for (const cosigner of cosigners) {
|
|
107
|
+
const nonce = noncesByPubkey.get(cosigner);
|
|
108
|
+
if (!nonce) {
|
|
109
|
+
throw new Error(`missing nonce for cosigner ${cosigner}`);
|
|
110
|
+
}
|
|
111
|
+
pubNonces.push(nonce.pubNonce);
|
|
112
|
+
}
|
|
113
|
+
const aggregateNonce = musig2.aggregateNonces(pubNonces);
|
|
114
|
+
this.aggregateNonces.set(txid, { pubNonce: aggregateNonce });
|
|
115
|
+
return {
|
|
116
|
+
hasAllNonces: this.aggregateNonces.size === this.myNonces?.size,
|
|
117
|
+
};
|
|
84
118
|
}
|
|
85
119
|
async sign() {
|
|
86
120
|
if (!this.graph)
|
|
@@ -51,9 +51,9 @@ var ArkPsbtFieldKey;
|
|
|
51
51
|
})(ArkPsbtFieldKey || (exports.ArkPsbtFieldKey = ArkPsbtFieldKey = {}));
|
|
52
52
|
/**
|
|
53
53
|
* ArkPsbtFieldKeyType is the type of the ark psbt field key.
|
|
54
|
-
* Every ark psbt field has key type
|
|
54
|
+
* Every ark psbt field has key type 222.
|
|
55
55
|
*/
|
|
56
|
-
exports.ArkPsbtFieldKeyType =
|
|
56
|
+
exports.ArkPsbtFieldKeyType = 222;
|
|
57
57
|
/**
|
|
58
58
|
* setArkPsbtField appends a new unknown field to the input at inputIndex
|
|
59
59
|
*
|
|
@@ -546,13 +546,13 @@ class Wallet {
|
|
|
546
546
|
if (!hasOffchainOutputs) {
|
|
547
547
|
// if there are no offchain outputs, we don't have to handle musig2 tree signatures
|
|
548
548
|
// we can directly advance to the finalization step
|
|
549
|
-
step = ark_1.SettlementEventType.
|
|
549
|
+
step = ark_1.SettlementEventType.TreeNonces;
|
|
550
550
|
}
|
|
551
551
|
}
|
|
552
552
|
break;
|
|
553
553
|
case ark_1.SettlementEventType.TreeTx:
|
|
554
554
|
if (step !== ark_1.SettlementEventType.BatchStarted &&
|
|
555
|
-
step !== ark_1.SettlementEventType.
|
|
555
|
+
step !== ark_1.SettlementEventType.TreeNonces) {
|
|
556
556
|
continue;
|
|
557
557
|
}
|
|
558
558
|
// index 0 = vtxo tree
|
|
@@ -568,7 +568,7 @@ class Wallet {
|
|
|
568
568
|
}
|
|
569
569
|
break;
|
|
570
570
|
case ark_1.SettlementEventType.TreeSignature:
|
|
571
|
-
if (step !== ark_1.SettlementEventType.
|
|
571
|
+
if (step !== ark_1.SettlementEventType.TreeNonces) {
|
|
572
572
|
continue;
|
|
573
573
|
}
|
|
574
574
|
if (!hasOffchainOutputs) {
|
|
@@ -610,7 +610,7 @@ class Wallet {
|
|
|
610
610
|
break;
|
|
611
611
|
// the musig2 nonces of the vtxo tree transactions are generated
|
|
612
612
|
// the server expects now the partial musig2 signatures
|
|
613
|
-
case ark_1.SettlementEventType.
|
|
613
|
+
case ark_1.SettlementEventType.TreeNonces:
|
|
614
614
|
if (step !== ark_1.SettlementEventType.TreeSigningStarted) {
|
|
615
615
|
continue;
|
|
616
616
|
}
|
|
@@ -618,14 +618,18 @@ class Wallet {
|
|
|
618
618
|
if (!session) {
|
|
619
619
|
throw new Error("Signing session not set");
|
|
620
620
|
}
|
|
621
|
-
await this.
|
|
621
|
+
const signed = await this.handleSettlementTreeNoncesEvent(event, session);
|
|
622
|
+
if (signed) {
|
|
623
|
+
step = event.type;
|
|
624
|
+
}
|
|
625
|
+
break;
|
|
622
626
|
}
|
|
623
627
|
step = event.type;
|
|
624
628
|
break;
|
|
625
629
|
// the vtxo tree is signed, craft, sign and submit forfeit transactions
|
|
626
630
|
// if any boarding utxos are involved, the settlement tx is also signed
|
|
627
631
|
case ark_1.SettlementEventType.BatchFinalization:
|
|
628
|
-
if (step !== ark_1.SettlementEventType.
|
|
632
|
+
if (step !== ark_1.SettlementEventType.TreeNonces) {
|
|
629
633
|
continue;
|
|
630
634
|
}
|
|
631
635
|
if (!this.forfeitOutputScript) {
|
|
@@ -771,11 +775,15 @@ class Wallet {
|
|
|
771
775
|
const nonces = await session.getNonces();
|
|
772
776
|
await this.arkProvider.submitTreeNonces(event.id, pubkey, nonces);
|
|
773
777
|
}
|
|
774
|
-
async
|
|
775
|
-
session.
|
|
778
|
+
async handleSettlementTreeNoncesEvent(event, session) {
|
|
779
|
+
const { hasAllNonces } = await session.aggregatedNonces(event.txid, event.nonces);
|
|
780
|
+
// wait to receive and aggregate all nonces before sending signatures
|
|
781
|
+
if (!hasAllNonces)
|
|
782
|
+
return false;
|
|
776
783
|
const signatures = await session.sign();
|
|
777
784
|
const pubkey = base_1.hex.encode(await session.getPublicKey());
|
|
778
785
|
await this.arkProvider.submitTreeSignatures(event.id, pubkey, signatures);
|
|
786
|
+
return true;
|
|
779
787
|
}
|
|
780
788
|
async handleSettlementFinalizationEvent(event, inputs, forfeitOutputScript, connectorsGraph) {
|
|
781
789
|
// the signed forfeits transactions to submit
|
package/dist/esm/musig2/index.js
CHANGED
|
@@ -7,7 +7,7 @@ export var SettlementEventType;
|
|
|
7
7
|
SettlementEventType["BatchFinalized"] = "batch_finalized";
|
|
8
8
|
SettlementEventType["BatchFailed"] = "batch_failed";
|
|
9
9
|
SettlementEventType["TreeSigningStarted"] = "tree_signing_started";
|
|
10
|
-
SettlementEventType["
|
|
10
|
+
SettlementEventType["TreeNonces"] = "tree_nonces";
|
|
11
11
|
SettlementEventType["TreeTx"] = "tree_tx";
|
|
12
12
|
SettlementEventType["TreeSignature"] = "tree_signature";
|
|
13
13
|
})(SettlementEventType || (SettlementEventType = {}));
|
|
@@ -386,10 +386,16 @@ export class RestArkProvider {
|
|
|
386
386
|
}
|
|
387
387
|
// Check for TreeNoncesAggregated event
|
|
388
388
|
if (data.treeNoncesAggregated) {
|
|
389
|
+
// skip treeNoncesAggregated event, deprecated
|
|
390
|
+
return null;
|
|
391
|
+
}
|
|
392
|
+
if (data.treeNonces) {
|
|
389
393
|
return {
|
|
390
|
-
type: SettlementEventType.
|
|
391
|
-
id: data.
|
|
392
|
-
|
|
394
|
+
type: SettlementEventType.TreeNonces,
|
|
395
|
+
id: data.treeNonces.id,
|
|
396
|
+
topic: data.treeNonces.topic,
|
|
397
|
+
txid: data.treeNonces.txid,
|
|
398
|
+
nonces: decodeMusig2Nonces(data.treeNonces.nonces), // pubkey -> public nonce
|
|
393
399
|
};
|
|
394
400
|
}
|
|
395
401
|
// Check for TreeTx event
|
|
@@ -419,10 +425,6 @@ export class RestArkProvider {
|
|
|
419
425
|
signature: data.treeSignature.signature,
|
|
420
426
|
};
|
|
421
427
|
}
|
|
422
|
-
// TODO: Handle TreeNoncesEvent when implemented server-side
|
|
423
|
-
if (data.treeNonces) {
|
|
424
|
-
return null;
|
|
425
|
-
}
|
|
426
428
|
// Skip heartbeat events
|
|
427
429
|
if (data.heartbeat) {
|
|
428
430
|
return null;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { RestArkProvider, isFetchTimeoutError, } from './ark.js';
|
|
2
|
-
import { getExpoFetch, sseStreamIterator } from './
|
|
2
|
+
import { getExpoFetch, sseStreamIterator } from './expoUtils.js';
|
|
3
3
|
/**
|
|
4
4
|
* Expo-compatible Ark provider implementation using expo/fetch for SSE support.
|
|
5
5
|
* This provider works specifically in React Native/Expo environments where
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { RestIndexerProvider } from './indexer.js';
|
|
2
2
|
import { isFetchTimeoutError } from './ark.js';
|
|
3
|
-
import { getExpoFetch, sseStreamIterator } from './
|
|
3
|
+
import { getExpoFetch, sseStreamIterator } from './expoUtils.js';
|
|
4
4
|
// Helper function to convert Vtxo to VirtualCoin (same as in indexer.ts)
|
|
5
5
|
function convertVtxo(vtxo) {
|
|
6
6
|
return {
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dynamically imports expo/fetch with fallback to standard fetch.
|
|
3
|
+
* @returns A fetch function suitable for SSE streaming
|
|
4
|
+
*/
|
|
5
|
+
export async function getExpoFetch(options) {
|
|
6
|
+
const requireExpo = options?.requireExpo ?? false;
|
|
7
|
+
try {
|
|
8
|
+
const expoFetchModule = await import("expo/fetch");
|
|
9
|
+
console.debug("Using expo/fetch for streaming");
|
|
10
|
+
return expoFetchModule.fetch;
|
|
11
|
+
}
|
|
12
|
+
catch (error) {
|
|
13
|
+
if (requireExpo) {
|
|
14
|
+
throw new Error("expo/fetch is unavailable in this environment. " +
|
|
15
|
+
"Please ensure expo/fetch is installed and properly configured.");
|
|
16
|
+
}
|
|
17
|
+
console.warn("Using standard fetch instead of expo/fetch. " +
|
|
18
|
+
"Streaming may not be fully supported in some environments.", error);
|
|
19
|
+
return fetch;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Generic SSE stream processor using fetch API with ReadableStream.
|
|
24
|
+
* Handles SSE format parsing, buffer management, and abort signals.
|
|
25
|
+
*
|
|
26
|
+
* @param url - The SSE endpoint URL
|
|
27
|
+
* @param abortSignal - Signal to abort the stream
|
|
28
|
+
* @param fetchFn - Fetch function to use (defaults to standard fetch)
|
|
29
|
+
* @param headers - Additional headers to send
|
|
30
|
+
* @param parseData - Function to parse and yield data from SSE events
|
|
31
|
+
*/
|
|
32
|
+
export async function* sseStreamIterator(url, abortSignal, fetchFn, headers, parseData) {
|
|
33
|
+
const fetchController = new AbortController();
|
|
34
|
+
const cleanup = () => fetchController.abort();
|
|
35
|
+
abortSignal?.addEventListener("abort", cleanup, { once: true });
|
|
36
|
+
try {
|
|
37
|
+
const response = await fetchFn(url, {
|
|
38
|
+
headers: {
|
|
39
|
+
Accept: "text/event-stream",
|
|
40
|
+
...headers,
|
|
41
|
+
},
|
|
42
|
+
signal: fetchController.signal,
|
|
43
|
+
});
|
|
44
|
+
if (!response.ok) {
|
|
45
|
+
throw new Error(`Unexpected status ${response.status} when fetching SSE stream`);
|
|
46
|
+
}
|
|
47
|
+
if (!response.body) {
|
|
48
|
+
throw new Error("Response body is null");
|
|
49
|
+
}
|
|
50
|
+
const reader = response.body.getReader();
|
|
51
|
+
const decoder = new TextDecoder();
|
|
52
|
+
let buffer = "";
|
|
53
|
+
while (!abortSignal?.aborted) {
|
|
54
|
+
const { done, value } = await reader.read();
|
|
55
|
+
if (done) {
|
|
56
|
+
break;
|
|
57
|
+
}
|
|
58
|
+
buffer += decoder.decode(value, { stream: true });
|
|
59
|
+
const lines = buffer.split("\n");
|
|
60
|
+
for (let i = 0; i < lines.length - 1; i++) {
|
|
61
|
+
const line = lines[i].trim();
|
|
62
|
+
if (!line)
|
|
63
|
+
continue;
|
|
64
|
+
if (line.startsWith("data:")) {
|
|
65
|
+
const jsonStr = line.substring(5).trim();
|
|
66
|
+
if (!jsonStr)
|
|
67
|
+
continue;
|
|
68
|
+
try {
|
|
69
|
+
const data = JSON.parse(jsonStr);
|
|
70
|
+
const parsed = parseData(data);
|
|
71
|
+
if (parsed !== null) {
|
|
72
|
+
yield parsed;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
catch (parseError) {
|
|
76
|
+
console.error("Failed to parse SSE data:", parseError);
|
|
77
|
+
throw parseError;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
buffer = lines[lines.length - 1];
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
finally {
|
|
85
|
+
abortSignal?.removeEventListener("abort", cleanup);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
@@ -55,90 +55,3 @@ export async function* eventSourceIterator(eventSource) {
|
|
|
55
55
|
eventSource.removeEventListener("error", errorHandler);
|
|
56
56
|
}
|
|
57
57
|
}
|
|
58
|
-
/**
|
|
59
|
-
* Dynamically imports expo/fetch with fallback to standard fetch.
|
|
60
|
-
* @returns A fetch function suitable for SSE streaming
|
|
61
|
-
*/
|
|
62
|
-
export async function getExpoFetch(options) {
|
|
63
|
-
const requireExpo = options?.requireExpo ?? false;
|
|
64
|
-
try {
|
|
65
|
-
const expoFetchModule = await import("expo/fetch");
|
|
66
|
-
console.debug("Using expo/fetch for streaming");
|
|
67
|
-
return expoFetchModule.fetch;
|
|
68
|
-
}
|
|
69
|
-
catch (error) {
|
|
70
|
-
if (requireExpo) {
|
|
71
|
-
throw new Error("expo/fetch is unavailable in this environment. " +
|
|
72
|
-
"Please ensure expo/fetch is installed and properly configured.");
|
|
73
|
-
}
|
|
74
|
-
console.warn("Using standard fetch instead of expo/fetch. " +
|
|
75
|
-
"Streaming may not be fully supported in some environments.", error);
|
|
76
|
-
return fetch;
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
/**
|
|
80
|
-
* Generic SSE stream processor using fetch API with ReadableStream.
|
|
81
|
-
* Handles SSE format parsing, buffer management, and abort signals.
|
|
82
|
-
*
|
|
83
|
-
* @param url - The SSE endpoint URL
|
|
84
|
-
* @param abortSignal - Signal to abort the stream
|
|
85
|
-
* @param fetchFn - Fetch function to use (defaults to standard fetch)
|
|
86
|
-
* @param headers - Additional headers to send
|
|
87
|
-
* @param parseData - Function to parse and yield data from SSE events
|
|
88
|
-
*/
|
|
89
|
-
export async function* sseStreamIterator(url, abortSignal, fetchFn, headers, parseData) {
|
|
90
|
-
const fetchController = new AbortController();
|
|
91
|
-
const cleanup = () => fetchController.abort();
|
|
92
|
-
abortSignal?.addEventListener("abort", cleanup, { once: true });
|
|
93
|
-
try {
|
|
94
|
-
const response = await fetchFn(url, {
|
|
95
|
-
headers: {
|
|
96
|
-
Accept: "text/event-stream",
|
|
97
|
-
...headers,
|
|
98
|
-
},
|
|
99
|
-
signal: fetchController.signal,
|
|
100
|
-
});
|
|
101
|
-
if (!response.ok) {
|
|
102
|
-
throw new Error(`Unexpected status ${response.status} when fetching SSE stream`);
|
|
103
|
-
}
|
|
104
|
-
if (!response.body) {
|
|
105
|
-
throw new Error("Response body is null");
|
|
106
|
-
}
|
|
107
|
-
const reader = response.body.getReader();
|
|
108
|
-
const decoder = new TextDecoder();
|
|
109
|
-
let buffer = "";
|
|
110
|
-
while (!abortSignal?.aborted) {
|
|
111
|
-
const { done, value } = await reader.read();
|
|
112
|
-
if (done) {
|
|
113
|
-
break;
|
|
114
|
-
}
|
|
115
|
-
buffer += decoder.decode(value, { stream: true });
|
|
116
|
-
const lines = buffer.split("\n");
|
|
117
|
-
for (let i = 0; i < lines.length - 1; i++) {
|
|
118
|
-
const line = lines[i].trim();
|
|
119
|
-
if (!line)
|
|
120
|
-
continue;
|
|
121
|
-
if (line.startsWith("data:")) {
|
|
122
|
-
const jsonStr = line.substring(5).trim();
|
|
123
|
-
if (!jsonStr)
|
|
124
|
-
continue;
|
|
125
|
-
try {
|
|
126
|
-
const data = JSON.parse(jsonStr);
|
|
127
|
-
const parsed = parseData(data);
|
|
128
|
-
if (parsed !== null) {
|
|
129
|
-
yield parsed;
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
catch (parseError) {
|
|
133
|
-
console.error("Failed to parse SSE data:", parseError);
|
|
134
|
-
throw parseError;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
buffer = lines[lines.length - 1];
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
finally {
|
|
142
|
-
abortSignal?.removeEventListener("abort", cleanup);
|
|
143
|
-
}
|
|
144
|
-
}
|
|
@@ -40,10 +40,44 @@ export class TreeSignerSession {
|
|
|
40
40
|
}
|
|
41
41
|
return publicNonces;
|
|
42
42
|
}
|
|
43
|
-
async
|
|
44
|
-
if (this.
|
|
45
|
-
throw
|
|
46
|
-
this.aggregateNonces
|
|
43
|
+
async aggregatedNonces(txid, noncesByPubkey) {
|
|
44
|
+
if (!this.graph)
|
|
45
|
+
throw ErrMissingVtxoGraph;
|
|
46
|
+
if (!this.aggregateNonces) {
|
|
47
|
+
this.aggregateNonces = new Map();
|
|
48
|
+
}
|
|
49
|
+
if (!this.myNonces) {
|
|
50
|
+
await this.getNonces(); // generate nonces if not generated yet
|
|
51
|
+
}
|
|
52
|
+
if (this.aggregateNonces.has(txid)) {
|
|
53
|
+
return {
|
|
54
|
+
hasAllNonces: this.aggregateNonces.size === this.myNonces?.size,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
const myNonce = this.myNonces.get(txid);
|
|
58
|
+
if (!myNonce)
|
|
59
|
+
throw new Error(`missing nonce for txid ${txid}`);
|
|
60
|
+
const myPublicKey = await this.getPublicKey();
|
|
61
|
+
// set my nonce to not rely on server
|
|
62
|
+
noncesByPubkey.set(hex.encode(myPublicKey.subarray(1)), myNonce);
|
|
63
|
+
const tx = this.graph.find(txid);
|
|
64
|
+
if (!tx)
|
|
65
|
+
throw new Error(`missing tx for txid ${txid}`);
|
|
66
|
+
const cosigners = getArkPsbtFields(tx.root, 0, CosignerPublicKey).map((c) => hex.encode(c.key.subarray(1)) // xonly pubkey
|
|
67
|
+
);
|
|
68
|
+
const pubNonces = [];
|
|
69
|
+
for (const cosigner of cosigners) {
|
|
70
|
+
const nonce = noncesByPubkey.get(cosigner);
|
|
71
|
+
if (!nonce) {
|
|
72
|
+
throw new Error(`missing nonce for cosigner ${cosigner}`);
|
|
73
|
+
}
|
|
74
|
+
pubNonces.push(nonce.pubNonce);
|
|
75
|
+
}
|
|
76
|
+
const aggregateNonce = musig2.aggregateNonces(pubNonces);
|
|
77
|
+
this.aggregateNonces.set(txid, { pubNonce: aggregateNonce });
|
|
78
|
+
return {
|
|
79
|
+
hasAllNonces: this.aggregateNonces.size === this.myNonces?.size,
|
|
80
|
+
};
|
|
47
81
|
}
|
|
48
82
|
async sign() {
|
|
49
83
|
if (!this.graph)
|
|
@@ -13,9 +13,9 @@ export var ArkPsbtFieldKey;
|
|
|
13
13
|
})(ArkPsbtFieldKey || (ArkPsbtFieldKey = {}));
|
|
14
14
|
/**
|
|
15
15
|
* ArkPsbtFieldKeyType is the type of the ark psbt field key.
|
|
16
|
-
* Every ark psbt field has key type
|
|
16
|
+
* Every ark psbt field has key type 222.
|
|
17
17
|
*/
|
|
18
|
-
export const ArkPsbtFieldKeyType =
|
|
18
|
+
export const ArkPsbtFieldKeyType = 222;
|
|
19
19
|
/**
|
|
20
20
|
* setArkPsbtField appends a new unknown field to the input at inputIndex
|
|
21
21
|
*
|
|
@@ -509,13 +509,13 @@ export class Wallet {
|
|
|
509
509
|
if (!hasOffchainOutputs) {
|
|
510
510
|
// if there are no offchain outputs, we don't have to handle musig2 tree signatures
|
|
511
511
|
// we can directly advance to the finalization step
|
|
512
|
-
step = SettlementEventType.
|
|
512
|
+
step = SettlementEventType.TreeNonces;
|
|
513
513
|
}
|
|
514
514
|
}
|
|
515
515
|
break;
|
|
516
516
|
case SettlementEventType.TreeTx:
|
|
517
517
|
if (step !== SettlementEventType.BatchStarted &&
|
|
518
|
-
step !== SettlementEventType.
|
|
518
|
+
step !== SettlementEventType.TreeNonces) {
|
|
519
519
|
continue;
|
|
520
520
|
}
|
|
521
521
|
// index 0 = vtxo tree
|
|
@@ -531,7 +531,7 @@ export class Wallet {
|
|
|
531
531
|
}
|
|
532
532
|
break;
|
|
533
533
|
case SettlementEventType.TreeSignature:
|
|
534
|
-
if (step !== SettlementEventType.
|
|
534
|
+
if (step !== SettlementEventType.TreeNonces) {
|
|
535
535
|
continue;
|
|
536
536
|
}
|
|
537
537
|
if (!hasOffchainOutputs) {
|
|
@@ -573,7 +573,7 @@ export class Wallet {
|
|
|
573
573
|
break;
|
|
574
574
|
// the musig2 nonces of the vtxo tree transactions are generated
|
|
575
575
|
// the server expects now the partial musig2 signatures
|
|
576
|
-
case SettlementEventType.
|
|
576
|
+
case SettlementEventType.TreeNonces:
|
|
577
577
|
if (step !== SettlementEventType.TreeSigningStarted) {
|
|
578
578
|
continue;
|
|
579
579
|
}
|
|
@@ -581,14 +581,18 @@ export class Wallet {
|
|
|
581
581
|
if (!session) {
|
|
582
582
|
throw new Error("Signing session not set");
|
|
583
583
|
}
|
|
584
|
-
await this.
|
|
584
|
+
const signed = await this.handleSettlementTreeNoncesEvent(event, session);
|
|
585
|
+
if (signed) {
|
|
586
|
+
step = event.type;
|
|
587
|
+
}
|
|
588
|
+
break;
|
|
585
589
|
}
|
|
586
590
|
step = event.type;
|
|
587
591
|
break;
|
|
588
592
|
// the vtxo tree is signed, craft, sign and submit forfeit transactions
|
|
589
593
|
// if any boarding utxos are involved, the settlement tx is also signed
|
|
590
594
|
case SettlementEventType.BatchFinalization:
|
|
591
|
-
if (step !== SettlementEventType.
|
|
595
|
+
if (step !== SettlementEventType.TreeNonces) {
|
|
592
596
|
continue;
|
|
593
597
|
}
|
|
594
598
|
if (!this.forfeitOutputScript) {
|
|
@@ -734,11 +738,15 @@ export class Wallet {
|
|
|
734
738
|
const nonces = await session.getNonces();
|
|
735
739
|
await this.arkProvider.submitTreeNonces(event.id, pubkey, nonces);
|
|
736
740
|
}
|
|
737
|
-
async
|
|
738
|
-
session.
|
|
741
|
+
async handleSettlementTreeNoncesEvent(event, session) {
|
|
742
|
+
const { hasAllNonces } = await session.aggregatedNonces(event.txid, event.nonces);
|
|
743
|
+
// wait to receive and aggregate all nonces before sending signatures
|
|
744
|
+
if (!hasAllNonces)
|
|
745
|
+
return false;
|
|
739
746
|
const signatures = await session.sign();
|
|
740
747
|
const pubkey = hex.encode(await session.getPublicKey());
|
|
741
748
|
await this.arkProvider.submitTreeSignatures(event.id, pubkey, signatures);
|
|
749
|
+
return true;
|
|
742
750
|
}
|
|
743
751
|
async handleSettlementFinalizationEvent(event, inputs, forfeitOutputScript, connectorsGraph) {
|
|
744
752
|
// the signed forfeits transactions to submit
|
package/dist/types/index.d.ts
CHANGED
|
@@ -18,7 +18,7 @@ import { Worker } from "./wallet/serviceWorker/worker";
|
|
|
18
18
|
import { Request } from "./wallet/serviceWorker/request";
|
|
19
19
|
import { Response } from "./wallet/serviceWorker/response";
|
|
20
20
|
import { ESPLORA_URL, EsploraProvider, OnchainProvider, ExplorerTransaction } from "./providers/onchain";
|
|
21
|
-
import { RestArkProvider, ArkProvider, SettlementEvent, SettlementEventType, ArkInfo, SignedIntent, Output, TxNotification, BatchFinalizationEvent, BatchFinalizedEvent, BatchFailedEvent, TreeSigningStartedEvent,
|
|
21
|
+
import { RestArkProvider, ArkProvider, SettlementEvent, SettlementEventType, ArkInfo, SignedIntent, Output, TxNotification, BatchFinalizationEvent, BatchFinalizedEvent, BatchFailedEvent, TreeSigningStartedEvent, TreeNoncesEvent, BatchStartedEvent, TreeTxEvent, TreeSignatureEvent, ScheduledSession } from "./providers/ark";
|
|
22
22
|
import { CLTVMultisigTapscript, ConditionCSVMultisigTapscript, ConditionMultisigTapscript, CSVMultisigTapscript, decodeTapscript, MultisigTapscript, TapscriptType, ArkTapscript, RelativeTimelock } from "./script/tapscript";
|
|
23
23
|
import { hasBoardingTxExpired, buildOffchainTx, verifyTapscriptSignatures, ArkTxInput, OffchainTx } from "./utils/arkTransaction";
|
|
24
24
|
import { VtxoTaprootTree, ConditionWitness, getArkPsbtFields, setArkPsbtField, ArkPsbtFieldCoder, ArkPsbtFieldKey, ArkPsbtFieldKeyType, CosignerPublicKey, VtxoTreeExpiry } from "./utils/unknownFields";
|
|
@@ -33,4 +33,4 @@ import { Unroll } from "./wallet/unroll";
|
|
|
33
33
|
import { WalletRepositoryImpl } from "./repositories/walletRepository";
|
|
34
34
|
import { ContractRepositoryImpl } from "./repositories/contractRepository";
|
|
35
35
|
export { Wallet, SingleKey, OnchainWallet, Ramps, VtxoManager, ESPLORA_URL, EsploraProvider, RestArkProvider, RestIndexerProvider, ArkAddress, DefaultVtxo, VtxoScript, VHTLC, TxType, IndexerTxType, ChainTxType, SettlementEventType, setupServiceWorker, Worker, ServiceWorkerWallet, Request, Response, decodeTapscript, MultisigTapscript, CSVMultisigTapscript, ConditionCSVMultisigTapscript, ConditionMultisigTapscript, CLTVMultisigTapscript, ArkPsbtFieldKey, ArkPsbtFieldKeyType, setArkPsbtField, getArkPsbtFields, CosignerPublicKey, VtxoTreeExpiry, VtxoTaprootTree, ConditionWitness, buildOffchainTx, verifyTapscriptSignatures, waitForIncomingFunds, hasBoardingTxExpired, ArkNote, networks, WalletRepositoryImpl, ContractRepositoryImpl, Intent, TxTree, P2A, Unroll, Transaction, };
|
|
36
|
-
export type { Identity, IWallet, WalletConfig, ProviderClass, ArkTransaction, Coin, ExtendedCoin, ExtendedVirtualCoin, WalletBalance, SendBitcoinParams, Recipient, SettleParams, Status, VirtualStatus, Outpoint, VirtualCoin, TxKey, TapscriptType, ArkTxInput, OffchainTx, TapLeaves, IncomingFunds, IndexerProvider, PageResponse, Batch, ChainTx, CommitmentTx, TxHistoryRecord, Vtxo, VtxoChain, Tx, OnchainProvider, ArkProvider, SettlementEvent, ArkInfo, SignedIntent, Output, TxNotification, ExplorerTransaction, BatchFinalizationEvent, BatchFinalizedEvent, BatchFailedEvent, TreeSigningStartedEvent,
|
|
36
|
+
export type { Identity, IWallet, WalletConfig, ProviderClass, ArkTransaction, Coin, ExtendedCoin, ExtendedVirtualCoin, WalletBalance, SendBitcoinParams, Recipient, SettleParams, Status, VirtualStatus, Outpoint, VirtualCoin, TxKey, TapscriptType, ArkTxInput, OffchainTx, TapLeaves, IncomingFunds, IndexerProvider, PageResponse, Batch, ChainTx, CommitmentTx, TxHistoryRecord, Vtxo, VtxoChain, Tx, OnchainProvider, ArkProvider, SettlementEvent, ArkInfo, SignedIntent, Output, TxNotification, ExplorerTransaction, BatchFinalizationEvent, BatchFinalizedEvent, BatchFailedEvent, TreeSigningStartedEvent, TreeNoncesEvent, BatchStartedEvent, TreeTxEvent, TreeSignatureEvent, ScheduledSession, PaginationOptions, SubscriptionResponse, SubscriptionHeartbeat, SubscriptionEvent, Network, NetworkName, ArkTapscript, RelativeTimelock, EncodedVtxoScript, TapLeafScript, SignerSession, TreeNonces, TreePartialSigs, GetVtxosFilter, Nonces, PartialSig, ArkPsbtFieldCoder, TxTreeNode, AnchorBumper, };
|
|
@@ -11,7 +11,7 @@ export declare enum SettlementEventType {
|
|
|
11
11
|
BatchFinalized = "batch_finalized",
|
|
12
12
|
BatchFailed = "batch_failed",
|
|
13
13
|
TreeSigningStarted = "tree_signing_started",
|
|
14
|
-
|
|
14
|
+
TreeNonces = "tree_nonces",
|
|
15
15
|
TreeTx = "tree_tx",
|
|
16
16
|
TreeSignature = "tree_signature"
|
|
17
17
|
}
|
|
@@ -36,10 +36,12 @@ export type TreeSigningStartedEvent = {
|
|
|
36
36
|
cosignersPublicKeys: string[];
|
|
37
37
|
unsignedCommitmentTx: string;
|
|
38
38
|
};
|
|
39
|
-
export type
|
|
40
|
-
type: SettlementEventType.
|
|
39
|
+
export type TreeNoncesEvent = {
|
|
40
|
+
type: SettlementEventType.TreeNonces;
|
|
41
41
|
id: string;
|
|
42
|
-
|
|
42
|
+
topic: string[];
|
|
43
|
+
txid: string;
|
|
44
|
+
nonces: TreeNonces;
|
|
43
45
|
};
|
|
44
46
|
export type BatchStartedEvent = {
|
|
45
47
|
type: SettlementEventType.BatchStarted;
|
|
@@ -62,7 +64,7 @@ export type TreeSignatureEvent = {
|
|
|
62
64
|
txid: string;
|
|
63
65
|
signature: string;
|
|
64
66
|
};
|
|
65
|
-
export type SettlementEvent = BatchFinalizationEvent | BatchFinalizedEvent | BatchFailedEvent | TreeSigningStartedEvent |
|
|
67
|
+
export type SettlementEvent = BatchFinalizationEvent | BatchFinalizedEvent | BatchFailedEvent | TreeSigningStartedEvent | TreeNoncesEvent | BatchStartedEvent | TreeTxEvent | TreeSignatureEvent;
|
|
66
68
|
export interface ScheduledSession {
|
|
67
69
|
duration: bigint;
|
|
68
70
|
fees: FeeInfo;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dynamically imports expo/fetch with fallback to standard fetch.
|
|
3
|
+
* @returns A fetch function suitable for SSE streaming
|
|
4
|
+
*/
|
|
5
|
+
export declare function getExpoFetch(options?: {
|
|
6
|
+
requireExpo?: boolean;
|
|
7
|
+
}): Promise<typeof fetch>;
|
|
8
|
+
/**
|
|
9
|
+
* Generic SSE stream processor using fetch API with ReadableStream.
|
|
10
|
+
* Handles SSE format parsing, buffer management, and abort signals.
|
|
11
|
+
*
|
|
12
|
+
* @param url - The SSE endpoint URL
|
|
13
|
+
* @param abortSignal - Signal to abort the stream
|
|
14
|
+
* @param fetchFn - Fetch function to use (defaults to standard fetch)
|
|
15
|
+
* @param headers - Additional headers to send
|
|
16
|
+
* @param parseData - Function to parse and yield data from SSE events
|
|
17
|
+
*/
|
|
18
|
+
export declare function sseStreamIterator<T>(url: string, abortSignal: AbortSignal, fetchFn: typeof fetch, headers: Record<string, string>, parseData: (data: any) => T | null): AsyncGenerator<T, void, unknown>;
|
|
@@ -1,19 +1 @@
|
|
|
1
1
|
export declare function eventSourceIterator(eventSource: EventSource): AsyncGenerator<MessageEvent, void, unknown>;
|
|
2
|
-
/**
|
|
3
|
-
* Dynamically imports expo/fetch with fallback to standard fetch.
|
|
4
|
-
* @returns A fetch function suitable for SSE streaming
|
|
5
|
-
*/
|
|
6
|
-
export declare function getExpoFetch(options?: {
|
|
7
|
-
requireExpo?: boolean;
|
|
8
|
-
}): Promise<typeof fetch>;
|
|
9
|
-
/**
|
|
10
|
-
* Generic SSE stream processor using fetch API with ReadableStream.
|
|
11
|
-
* Handles SSE format parsing, buffer management, and abort signals.
|
|
12
|
-
*
|
|
13
|
-
* @param url - The SSE endpoint URL
|
|
14
|
-
* @param abortSignal - Signal to abort the stream
|
|
15
|
-
* @param fetchFn - Fetch function to use (defaults to standard fetch)
|
|
16
|
-
* @param headers - Additional headers to send
|
|
17
|
-
* @param parseData - Function to parse and yield data from SSE events
|
|
18
|
-
*/
|
|
19
|
-
export declare function sseStreamIterator<T>(url: string, abortSignal: AbortSignal, fetchFn: typeof fetch, headers: Record<string, string>, parseData: (data: any) => T | null): AsyncGenerator<T, void, unknown>;
|
|
@@ -2,13 +2,16 @@ import * as musig2 from "../musig2";
|
|
|
2
2
|
import { TxTree } from "./txTree";
|
|
3
3
|
export declare const ErrMissingVtxoGraph: Error;
|
|
4
4
|
export declare const ErrMissingAggregateKey: Error;
|
|
5
|
-
export type
|
|
5
|
+
export type Musig2PublicNonce = Pick<musig2.Nonces, "pubNonce">;
|
|
6
|
+
export type TreeNonces = Map<string, Musig2PublicNonce>;
|
|
6
7
|
export type TreePartialSigs = Map<string, musig2.PartialSig>;
|
|
7
8
|
export interface SignerSession {
|
|
8
9
|
getPublicKey(): Promise<Uint8Array>;
|
|
9
10
|
init(tree: TxTree, scriptRoot: Uint8Array, rootInputAmount: bigint): Promise<void>;
|
|
10
11
|
getNonces(): Promise<TreeNonces>;
|
|
11
|
-
|
|
12
|
+
aggregatedNonces(txid: string, noncesByPubkey: TreeNonces): Promise<{
|
|
13
|
+
hasAllNonces: boolean;
|
|
14
|
+
}>;
|
|
12
15
|
sign(): Promise<TreePartialSigs>;
|
|
13
16
|
}
|
|
14
17
|
export declare class TreeSignerSession implements SignerSession {
|
|
@@ -24,7 +27,9 @@ export declare class TreeSignerSession implements SignerSession {
|
|
|
24
27
|
init(tree: TxTree, scriptRoot: Uint8Array, rootInputAmount: bigint): Promise<void>;
|
|
25
28
|
getPublicKey(): Promise<Uint8Array>;
|
|
26
29
|
getNonces(): Promise<TreeNonces>;
|
|
27
|
-
|
|
30
|
+
aggregatedNonces(txid: string, noncesByPubkey: TreeNonces): Promise<{
|
|
31
|
+
hasAllNonces: boolean;
|
|
32
|
+
}>;
|
|
28
33
|
sign(): Promise<TreePartialSigs>;
|
|
29
34
|
private generateNonces;
|
|
30
35
|
private signPartial;
|
|
@@ -11,9 +11,9 @@ export declare enum ArkPsbtFieldKey {
|
|
|
11
11
|
}
|
|
12
12
|
/**
|
|
13
13
|
* ArkPsbtFieldKeyType is the type of the ark psbt field key.
|
|
14
|
-
* Every ark psbt field has key type
|
|
14
|
+
* Every ark psbt field has key type 222.
|
|
15
15
|
*/
|
|
16
|
-
export declare const ArkPsbtFieldKeyType =
|
|
16
|
+
export declare const ArkPsbtFieldKeyType = 222;
|
|
17
17
|
/**
|
|
18
18
|
* ArkPsbtFieldCoder is the coder for the ark psbt fields.
|
|
19
19
|
* each type has its own coder.
|
|
@@ -91,7 +91,7 @@ export declare class Wallet implements IWallet {
|
|
|
91
91
|
notifyIncomingFunds(eventCallback: (coins: IncomingFunds) => void): Promise<() => void>;
|
|
92
92
|
private handleBatchStartedEvent;
|
|
93
93
|
private handleSettlementSigningEvent;
|
|
94
|
-
private
|
|
94
|
+
private handleSettlementTreeNoncesEvent;
|
|
95
95
|
private handleSettlementFinalizationEvent;
|
|
96
96
|
private makeRegisterIntentSignature;
|
|
97
97
|
private makeDeleteIntentSignature;
|