@1sat/wallet-toolbox 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/OneSatWallet.d.ts +40 -17
  2. package/dist/OneSatWallet.js +956 -0
  3. package/dist/errors.js +11 -0
  4. package/dist/index.d.ts +0 -2
  5. package/dist/index.js +12 -93764
  6. package/dist/indexers/Bsv21Indexer.js +232 -0
  7. package/dist/indexers/CosignIndexer.js +25 -0
  8. package/dist/indexers/FundIndexer.js +64 -0
  9. package/dist/indexers/InscriptionIndexer.js +115 -0
  10. package/dist/indexers/LockIndexer.js +42 -0
  11. package/dist/indexers/MapIndexer.js +62 -0
  12. package/dist/indexers/OpNSIndexer.js +38 -0
  13. package/dist/indexers/OrdLockIndexer.js +63 -0
  14. package/dist/indexers/OriginIndexer.js +240 -0
  15. package/dist/indexers/Outpoint.js +53 -0
  16. package/dist/indexers/SigmaIndexer.js +133 -0
  17. package/dist/indexers/index.js +13 -0
  18. package/dist/indexers/parseAddress.js +24 -0
  19. package/dist/indexers/types.js +18 -0
  20. package/dist/services/OneSatServices.d.ts +12 -4
  21. package/dist/services/OneSatServices.js +231 -0
  22. package/dist/services/client/ArcadeClient.js +107 -0
  23. package/dist/services/client/BaseClient.js +125 -0
  24. package/dist/services/client/BeefClient.js +33 -0
  25. package/dist/services/client/Bsv21Client.js +65 -0
  26. package/dist/services/client/ChaintracksClient.js +175 -0
  27. package/dist/services/client/OrdfsClient.js +122 -0
  28. package/dist/services/client/OwnerClient.js +123 -0
  29. package/dist/services/client/TxoClient.js +85 -0
  30. package/dist/services/client/index.js +8 -0
  31. package/dist/services/types.js +5 -0
  32. package/dist/signers/ReadOnlySigner.js +47 -0
  33. package/dist/sync/IndexedDbSyncQueue.js +355 -0
  34. package/dist/sync/SqliteSyncQueue.js +197 -0
  35. package/dist/sync/index.js +3 -0
  36. package/dist/sync/types.js +4 -0
  37. package/package.json +5 -5
@@ -0,0 +1,240 @@
1
+ import { HttpError } from "../errors";
2
+ import { parseAddress } from "./parseAddress";
3
+ import { Indexer, } from "./types";
4
+ export class OriginIndexer extends Indexer {
5
+ owners;
6
+ network;
7
+ services;
8
+ tag = "origin";
9
+ name = "Origins";
10
+ constructor(owners, network, services) {
11
+ super(owners, network);
12
+ this.owners = owners;
13
+ this.network = network;
14
+ this.services = services;
15
+ }
16
+ /**
17
+ * Parse identifies 1-sat ordinal outputs and extracts basic data.
18
+ * Origin tracking (determining if transfer vs new) is done in summarize()
19
+ * since it requires cross-output and cross-input context.
20
+ */
21
+ async parse(txo) {
22
+ const satoshis = BigInt(txo.output.satoshis || 0);
23
+ // Only parse 1-satoshi outputs, exclude BSV-20 tokens
24
+ if (satoshis !== 1n)
25
+ return;
26
+ const insc = txo.data.insc?.data;
27
+ if (insc?.file?.type === "application/bsv-20")
28
+ return;
29
+ // Parse the address
30
+ const script = txo.output.lockingScript;
31
+ const address = parseAddress(script, 0, this.network);
32
+ // Start with placeholder origin - will be populated in summarize()
33
+ const origin = {
34
+ outpoint: "", // Will be set in summarize()
35
+ nonce: 0,
36
+ sigma: txo.data.sigma?.data,
37
+ };
38
+ // Merge current output's MAP data
39
+ const currentMap = txo.data.map?.data;
40
+ if (currentMap) {
41
+ origin.map = { ...currentMap };
42
+ }
43
+ // If current output has inscription, use it
44
+ if (insc) {
45
+ origin.insc = insc;
46
+ }
47
+ return {
48
+ data: origin,
49
+ tags: [], // Tags will be added in summarize() once origin is determined
50
+ owner: address && this.owners.has(address) ? address : undefined,
51
+ basket: "1sat",
52
+ };
53
+ }
54
+ /**
55
+ * Summarize determines origin tracking (transfer vs new origin) and
56
+ * fetches metadata from OrdFS for transfers.
57
+ */
58
+ async summarize(ctx) {
59
+ // First, calculate satoshi positions and determine origins for all outputs
60
+ await this.resolveOrigins(ctx);
61
+ // Now compute balance summary
62
+ let balance = 0;
63
+ let hasTag = false;
64
+ let icon;
65
+ let id = "";
66
+ // Check inputs
67
+ for (const spend of ctx.spends) {
68
+ if (spend.data[this.tag]) {
69
+ const origin = spend.data[this.tag].data;
70
+ if (spend.owner && this.owners.has(spend.owner)) {
71
+ hasTag = true;
72
+ balance--;
73
+ if (!icon && origin?.insc?.file?.type.startsWith("image/")) {
74
+ icon = origin?.outpoint;
75
+ id = origin.map?.name || "";
76
+ }
77
+ }
78
+ }
79
+ }
80
+ // Check outputs
81
+ for (const txo of ctx.txos) {
82
+ if (txo.data[this.tag]) {
83
+ if (txo.owner && this.owners.has(txo.owner)) {
84
+ hasTag = true;
85
+ balance++;
86
+ const origin = txo.data.origin?.data;
87
+ if (!icon && origin?.insc?.file?.type.startsWith("image/")) {
88
+ icon = origin?.outpoint;
89
+ }
90
+ }
91
+ // Clear file content before saving - content is loaded locally but shouldn't be persisted
92
+ const origin = txo.data[this.tag].data;
93
+ if (origin?.insc?.file) {
94
+ origin.insc.file.content = [];
95
+ }
96
+ }
97
+ }
98
+ if (hasTag) {
99
+ return {
100
+ id,
101
+ amount: balance,
102
+ icon,
103
+ };
104
+ }
105
+ }
106
+ /**
107
+ * Resolve origins for all 1-sat outputs in the transaction.
108
+ * This determines whether each is a new origin or a transfer.
109
+ */
110
+ async resolveOrigins(ctx) {
111
+ // Calculate satoshi positions for all outputs
112
+ const satPositions = [];
113
+ let cumulative = 0n;
114
+ for (const txo of ctx.txos) {
115
+ satPositions.push(cumulative);
116
+ cumulative += BigInt(txo.output.satoshis || 0);
117
+ }
118
+ // Process each output that has origin data
119
+ for (let vout = 0; vout < ctx.txos.length; vout++) {
120
+ const txo = ctx.txos[vout];
121
+ const originData = txo.data[this.tag];
122
+ if (!originData)
123
+ continue;
124
+ const origin = originData.data;
125
+ const outSat = satPositions[vout];
126
+ // Track accumulated input satoshis to find which input contains our satoshi
127
+ let satsIn = 0n;
128
+ let sourceOutpoint;
129
+ for (const spend of ctx.spends) {
130
+ const spendSatoshis = BigInt(spend.output.satoshis || 0);
131
+ // Check if this input's satoshi range contains our output's satoshi
132
+ if (satsIn === outSat && spendSatoshis === 1n) {
133
+ sourceOutpoint = spend.outpoint.toString();
134
+ break;
135
+ }
136
+ satsIn += spendSatoshis;
137
+ // If we've passed our satoshi position, this is a new origin
138
+ if (satsIn > outSat) {
139
+ break;
140
+ }
141
+ }
142
+ if (sourceOutpoint) {
143
+ // Transfer - fetch metadata from OrdFS
144
+ try {
145
+ const metadata = await this.services.ordfs.getMetadata(sourceOutpoint, 0);
146
+ origin.outpoint = metadata.origin || sourceOutpoint;
147
+ origin.nonce = metadata.sequence + 1;
148
+ // Merge inherited map with current
149
+ if (metadata.map) {
150
+ origin.map = { ...metadata.map, ...(origin.map || {}) };
151
+ }
152
+ // If no inscription on current output, use metadata from source
153
+ // and potentially fetch text content
154
+ if (!origin.insc) {
155
+ origin.insc = {
156
+ file: {
157
+ hash: "",
158
+ size: metadata.contentLength,
159
+ type: metadata.contentType,
160
+ content: [],
161
+ },
162
+ };
163
+ // Fetch text content if it qualifies
164
+ const contentType = metadata.contentType.toLowerCase();
165
+ const isTextContent = contentType.startsWith("text/") ||
166
+ contentType === "application/json";
167
+ if (isTextContent && metadata.contentLength <= 1000) {
168
+ try {
169
+ const { data } = await this.services.ordfs.getContent(origin.outpoint || sourceOutpoint);
170
+ if (data) {
171
+ originData.content = new TextDecoder().decode(data);
172
+ }
173
+ }
174
+ catch {
175
+ // Ignore content fetch errors
176
+ }
177
+ }
178
+ }
179
+ }
180
+ catch (e) {
181
+ if (e instanceof HttpError && e.status === 404) {
182
+ // Source outpoint not found in OrdFS - treat as new origin
183
+ origin.outpoint = txo.outpoint.toString();
184
+ }
185
+ else {
186
+ throw e;
187
+ }
188
+ }
189
+ }
190
+ else {
191
+ // New origin
192
+ origin.outpoint = txo.outpoint.toString();
193
+ }
194
+ // Validate parent if inscription claims one
195
+ const insc = txo.data.insc?.data;
196
+ if (insc?.parent) {
197
+ try {
198
+ const metadata = await this.services.ordfs.getMetadata(txo.outpoint.toString(), 0);
199
+ if (metadata.parent !== insc.parent) {
200
+ if (origin.insc) {
201
+ origin.insc.parent = undefined;
202
+ }
203
+ }
204
+ }
205
+ catch (e) {
206
+ if (e instanceof HttpError && e.status === 404) {
207
+ // Can't verify parent claim - remove it
208
+ if (origin.insc) {
209
+ origin.insc.parent = undefined;
210
+ }
211
+ }
212
+ else {
213
+ throw e;
214
+ }
215
+ }
216
+ }
217
+ // Clear large file content to save space
218
+ if (origin.insc?.file?.size && origin.insc.file.size > 4096) {
219
+ origin.insc.file.content = [];
220
+ }
221
+ // Now add tags since origin is determined
222
+ if (txo.owner && this.owners.has(txo.owner)) {
223
+ originData.tags.push(`origin:${origin.outpoint || ""}`);
224
+ if (origin.insc?.file?.type) {
225
+ const fullType = origin.insc.file.type;
226
+ const baseType = fullType.split(";")[0].trim();
227
+ const category = baseType.split("/")[0];
228
+ originData.tags.push(`type:${category}`);
229
+ originData.tags.push(`type:${baseType}`);
230
+ }
231
+ // Extract name from map data
232
+ const name = (origin.map?.name ??
233
+ origin.map?.subTypeData?.name);
234
+ if (name) {
235
+ originData.tags.push(`name:${name}`);
236
+ }
237
+ }
238
+ }
239
+ }
240
+ }
@@ -0,0 +1,53 @@
1
+ import { Utils } from "@bsv/sdk";
2
+ export class Outpoint {
3
+ txid;
4
+ vout;
5
+ constructor(txidOrOutpoint, vout) {
6
+ if (vout !== undefined) {
7
+ this.vout = vout;
8
+ if (typeof txidOrOutpoint === "string") {
9
+ this.txid = txidOrOutpoint;
10
+ }
11
+ else if (Array.isArray(txidOrOutpoint)) {
12
+ this.txid = Utils.toHex(txidOrOutpoint);
13
+ }
14
+ else {
15
+ throw new Error("Invalid Outpoint");
16
+ }
17
+ }
18
+ else if (Array.isArray(txidOrOutpoint)) {
19
+ const reader = new Utils.Reader(txidOrOutpoint);
20
+ this.txid = Utils.toHex(reader.read(32).reverse());
21
+ this.vout = reader.readInt32LE();
22
+ }
23
+ else if (typeof txidOrOutpoint === "string") {
24
+ this.txid = txidOrOutpoint.substring(0, 64);
25
+ this.vout = Number.parseInt(txidOrOutpoint.substring(65), 10);
26
+ }
27
+ else if (typeof txidOrOutpoint === "object") {
28
+ this.txid = txidOrOutpoint.txid;
29
+ this.vout = txidOrOutpoint.vout;
30
+ }
31
+ else {
32
+ throw new Error("Invalid Outpoint");
33
+ }
34
+ }
35
+ toString() {
36
+ return `${this.txid}_${this.vout}`;
37
+ }
38
+ toBinary() {
39
+ const writer = new Utils.Writer();
40
+ writer.write(Utils.toArray(this.txid, "hex").reverse());
41
+ writer.writeUInt32LE(this.vout);
42
+ return writer.toArray();
43
+ }
44
+ toBEBinary() {
45
+ const writer = new Utils.Writer();
46
+ writer.write(Utils.toArray(this.txid, "hex"));
47
+ writer.writeUInt32BE(this.vout);
48
+ return writer.toArray();
49
+ }
50
+ toJSON() {
51
+ return this.toString();
52
+ }
53
+ }
@@ -0,0 +1,133 @@
1
+ import { BSM, BigNumber, Hash, OP, Script, Signature, Utils } from "@bsv/sdk";
2
+ import { Indexer, } from "./types";
3
+ export class SigmaIndexer extends Indexer {
4
+ owners;
5
+ network;
6
+ tag = "sigma";
7
+ name = "Sigma";
8
+ constructor(owners = new Set(), network = "mainnet") {
9
+ super(owners, network);
10
+ this.owners = owners;
11
+ this.network = network;
12
+ }
13
+ /**
14
+ * Parse extracts raw sigma protocol data without validation.
15
+ * Validation requires ctx.spends and is done in summarize().
16
+ */
17
+ async parse(txo) {
18
+ const script = txo.output.lockingScript;
19
+ const vout = txo.outpoint.vout;
20
+ let retPos = 0;
21
+ const sigmas = [];
22
+ for (let i = retPos + 1; i < script.chunks.length; i++) {
23
+ const chunk = script.chunks[i];
24
+ if (!retPos && chunk.op === OP.OP_RETURN) {
25
+ retPos = i;
26
+ continue;
27
+ }
28
+ if (!retPos || chunk.data?.length !== 1 || chunk.data[0] !== 0x7c) {
29
+ continue;
30
+ }
31
+ if (Utils.toUTF8(script.chunks[++i]?.data || []) !== "SIGMA") {
32
+ continue;
33
+ }
34
+ const sigma = {
35
+ algorithm: script.chunks[++i]?.data
36
+ ? Utils.toUTF8(script.chunks[i].data || [])
37
+ : "",
38
+ address: script.chunks[++i]?.data
39
+ ? Utils.toUTF8(script.chunks[i].data || [])
40
+ : "",
41
+ signature: script.chunks[++i]?.data || [],
42
+ vin: script.chunks[++i]?.data
43
+ ? Number.parseInt(Utils.toUTF8(script.chunks[i].data || []))
44
+ : -1,
45
+ valid: false, // Will be validated in summarize()
46
+ };
47
+ // Use vout as default vin if not specified
48
+ if (sigma.vin === -1)
49
+ sigma.vin = vout;
50
+ sigmas.push(sigma);
51
+ }
52
+ if (!sigmas.length)
53
+ return;
54
+ return { data: sigmas, tags: [] };
55
+ }
56
+ /**
57
+ * Validate all sigma signatures against ctx.spends.
58
+ */
59
+ async summarize(ctx, _isBroadcasted) {
60
+ for (const txo of ctx.txos) {
61
+ const sigmaData = txo.data[this.tag];
62
+ if (!sigmaData)
63
+ continue;
64
+ const sigmas = sigmaData.data;
65
+ const script = txo.output.lockingScript;
66
+ for (const sigma of sigmas) {
67
+ // Find the dataPos by re-scanning for this sigma
68
+ const dataPos = this.findSigmaDataPos(script, sigma);
69
+ if (dataPos === -1)
70
+ continue;
71
+ // Get the spend for this sigma's vin
72
+ const spend = ctx.spends[sigma.vin];
73
+ if (!spend)
74
+ continue;
75
+ const bw = new Utils.Writer();
76
+ bw.write(Utils.toArray(spend.outpoint.txid, "hex"));
77
+ bw.writeUInt32LE(spend.outpoint.vout);
78
+ const inputHash = Hash.sha256(bw.toArray());
79
+ const dataScript = new Script();
80
+ dataScript.chunks = script.chunks.slice(0, dataPos);
81
+ const outputHash = Hash.sha256(dataScript.toBinary());
82
+ const msgHash = Hash.sha256(inputHash.concat(outputHash));
83
+ const signature = Signature.fromCompact(sigma.signature);
84
+ for (let recovery = 0; recovery < 4; recovery++) {
85
+ try {
86
+ const publicKey = signature.RecoverPublicKey(recovery, new BigNumber(BSM.magicHash(msgHash)));
87
+ const sigFitsPubkey = BSM.verify(msgHash, signature, publicKey);
88
+ const pubkeyAddress = publicKey.toAddress(this.network === "mainnet" ? "mainnet" : "testnet");
89
+ if (sigFitsPubkey && pubkeyAddress === sigma.address) {
90
+ sigma.valid = true;
91
+ break;
92
+ }
93
+ }
94
+ catch {
95
+ // try next recovery
96
+ }
97
+ }
98
+ }
99
+ }
100
+ return undefined;
101
+ }
102
+ /**
103
+ * Find the data position for a sigma in the script (position before the pipe separator).
104
+ */
105
+ findSigmaDataPos(script, targetSigma) {
106
+ let retPos = 0;
107
+ for (let i = 1; i < script.chunks.length; i++) {
108
+ const chunk = script.chunks[i];
109
+ if (!retPos && chunk.op === OP.OP_RETURN) {
110
+ retPos = i;
111
+ continue;
112
+ }
113
+ if (!retPos || chunk.data?.length !== 1 || chunk.data[0] !== 0x7c) {
114
+ continue;
115
+ }
116
+ if (Utils.toUTF8(script.chunks[i + 1]?.data || []) !== "SIGMA") {
117
+ continue;
118
+ }
119
+ const dataPos = i;
120
+ // Check if this is the target sigma by comparing address and signature
121
+ const address = script.chunks[i + 3]?.data
122
+ ? Utils.toUTF8(script.chunks[i + 3].data || [])
123
+ : "";
124
+ const sig = script.chunks[i + 4]?.data || [];
125
+ if (address === targetSigma.address &&
126
+ sig.length === targetSigma.signature.length &&
127
+ sig.every((b, idx) => b === targetSigma.signature[idx])) {
128
+ return dataPos;
129
+ }
130
+ }
131
+ return -1;
132
+ }
133
+ }
@@ -0,0 +1,13 @@
1
+ export { Indexer, } from "./types";
2
+ export { Outpoint } from "./Outpoint";
3
+ export { parseAddress } from "./parseAddress";
4
+ export { FundIndexer } from "./FundIndexer";
5
+ export { LockIndexer } from "./LockIndexer";
6
+ export { InscriptionIndexer, } from "./InscriptionIndexer";
7
+ export { SigmaIndexer } from "./SigmaIndexer";
8
+ export { MapIndexer } from "./MapIndexer";
9
+ export { OriginIndexer } from "./OriginIndexer";
10
+ export { Bsv21Indexer, deriveFundAddress } from "./Bsv21Indexer";
11
+ export { OrdLockIndexer, Listing } from "./OrdLockIndexer";
12
+ export { OpNSIndexer } from "./OpNSIndexer";
13
+ export { CosignIndexer } from "./CosignIndexer";
@@ -0,0 +1,24 @@
1
+ import { OP, Utils } from "@bsv/sdk";
2
+ /**
3
+ * Parse a P2PKH address from a locking script
4
+ * @param script - The locking script to parse
5
+ * @param offset - Chunk offset (default 0)
6
+ * @param network - Network type for address encoding
7
+ * @returns Base58Check encoded address or empty string if not P2PKH
8
+ */
9
+ export function parseAddress(script, offset = 0, network = "mainnet") {
10
+ if (script.chunks[0 + offset]?.op !== OP.OP_DUP)
11
+ return "";
12
+ if (script.chunks[1 + offset]?.op !== OP.OP_HASH160)
13
+ return "";
14
+ if (script.chunks[2 + offset]?.data?.length !== 20)
15
+ return "";
16
+ if (script.chunks[3 + offset]?.op !== OP.OP_EQUALVERIFY)
17
+ return "";
18
+ if (script.chunks[4 + offset]?.op !== OP.OP_CHECKSIG)
19
+ return "";
20
+ const data = script.chunks[2 + offset].data;
21
+ if (!data)
22
+ return "";
23
+ return Utils.toBase58Check(data, network === "mainnet" ? [0] : [111]);
24
+ }
@@ -0,0 +1,18 @@
1
+ /**
2
+ * Base indexer class that all indexers extend
3
+ */
4
+ export class Indexer {
5
+ owners;
6
+ network;
7
+ constructor(owners = new Set(), network = "mainnet") {
8
+ this.owners = owners;
9
+ this.network = network;
10
+ }
11
+ /**
12
+ * Post-parse phase with full transaction context.
13
+ * Used for cross-output/cross-input validation and transaction-level summarization.
14
+ */
15
+ async summarize(_ctx, _isBroadcasted) {
16
+ return undefined;
17
+ }
18
+ }
@@ -1,8 +1,16 @@
1
1
  import { Beef, Transaction } from "@bsv/sdk";
2
- import type { WalletStorageManager } from "@bsv/wallet-toolbox/mobile";
3
- import type { BlockHeader, GetMerklePathResult, GetRawTxResult, GetScriptHashHistoryResult, GetStatusForTxidsResult, GetUtxoStatusOutputFormat, GetUtxoStatusResult, PostBeefResult, ServicesCallHistory, WalletServices } from "@bsv/wallet-toolbox/mobile/out/src/sdk/WalletServices.interfaces";
4
- import type { Chain } from "@bsv/wallet-toolbox/mobile/out/src/sdk/types";
5
- import type { TableOutput } from "@bsv/wallet-toolbox/mobile/out/src/storage/schema/tables/TableOutput";
2
+ import type { TableOutput, WalletStorageManager, sdk as toolboxSdk } from "@bsv/wallet-toolbox";
3
+ type Chain = toolboxSdk.Chain;
4
+ type BlockHeader = toolboxSdk.BlockHeader;
5
+ type GetMerklePathResult = toolboxSdk.GetMerklePathResult;
6
+ type GetRawTxResult = toolboxSdk.GetRawTxResult;
7
+ type GetScriptHashHistoryResult = toolboxSdk.GetScriptHashHistoryResult;
8
+ type GetStatusForTxidsResult = toolboxSdk.GetStatusForTxidsResult;
9
+ type GetUtxoStatusOutputFormat = toolboxSdk.GetUtxoStatusOutputFormat;
10
+ type GetUtxoStatusResult = toolboxSdk.GetUtxoStatusResult;
11
+ type PostBeefResult = toolboxSdk.PostBeefResult;
12
+ type ServicesCallHistory = toolboxSdk.ServicesCallHistory;
13
+ type WalletServices = toolboxSdk.WalletServices;
6
14
  import { ArcadeClient, BeefClient, Bsv21Client, ChaintracksClient, OrdfsClient, OwnerClient, TxoClient } from "./client";
7
15
  import type { Capability, SyncOutput } from "./types";
8
16
  export type { SyncOutput };