@1sat/wallet-toolbox 0.0.21 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/sweep/index.d.ts +9 -2
- package/dist/api/sweep/index.js +154 -1
- package/dist/api/sweep/types.d.ts +23 -0
- package/dist/backup/FileBackupProvider.d.ts +96 -0
- package/dist/backup/FileBackupProvider.js +184 -0
- package/dist/backup/FileRestoreReader.d.ts +58 -0
- package/dist/backup/FileRestoreReader.js +88 -0
- package/dist/backup/index.d.ts +5 -0
- package/dist/backup/index.js +4 -0
- package/dist/backup/types.d.ts +31 -0
- package/dist/backup/types.js +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +2 -0
- package/package.json +4 -2
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
*/
|
|
6
6
|
import type { OneSatContext, Skill } from "../skills/types";
|
|
7
7
|
import type { IndexedOutput } from "../../services/types";
|
|
8
|
-
import type { SweepBsvRequest, SweepBsvResponse, SweepInput } from "./types";
|
|
8
|
+
import type { SweepBsvRequest, SweepBsvResponse, SweepInput, SweepOrdinalsRequest, SweepOrdinalsResponse } from "./types";
|
|
9
9
|
export * from "./types";
|
|
10
10
|
/**
|
|
11
11
|
* Prepare sweep inputs from IndexedOutput objects by fetching locking scripts.
|
|
@@ -20,4 +20,11 @@ export declare function prepareSweepInputs(ctx: OneSatContext, utxos: IndexedOut
|
|
|
20
20
|
* value is swept (minus fees).
|
|
21
21
|
*/
|
|
22
22
|
export declare const sweepBsv: Skill<SweepBsvRequest, SweepBsvResponse>;
|
|
23
|
-
|
|
23
|
+
/**
|
|
24
|
+
* Sweep ordinals from external inputs into the destination wallet.
|
|
25
|
+
*
|
|
26
|
+
* Each input is expected to be a 1-sat ordinal output. Each ordinal is
|
|
27
|
+
* transferred to a derived address using the wallet's key derivation.
|
|
28
|
+
*/
|
|
29
|
+
export declare const sweepOrdinals: Skill<SweepOrdinalsRequest, SweepOrdinalsResponse>;
|
|
30
|
+
export declare const sweepSkills: (Skill<SweepBsvRequest, SweepBsvResponse> | Skill<SweepOrdinalsRequest, SweepOrdinalsResponse>)[];
|
package/dist/api/sweep/index.js
CHANGED
|
@@ -217,5 +217,158 @@ export const sweepBsv = {
|
|
|
217
217
|
}
|
|
218
218
|
},
|
|
219
219
|
};
|
|
220
|
+
/**
|
|
221
|
+
* Sweep ordinals from external inputs into the destination wallet.
|
|
222
|
+
*
|
|
223
|
+
* Each input is expected to be a 1-sat ordinal output. Each ordinal is
|
|
224
|
+
* transferred to a derived address using the wallet's key derivation.
|
|
225
|
+
*/
|
|
226
|
+
export const sweepOrdinals = {
|
|
227
|
+
meta: {
|
|
228
|
+
name: "sweepOrdinals",
|
|
229
|
+
description: "Sweep ordinals from external wallet (via WIF) into the connected wallet",
|
|
230
|
+
category: "sweep",
|
|
231
|
+
requiresServices: true,
|
|
232
|
+
inputSchema: {
|
|
233
|
+
type: "object",
|
|
234
|
+
properties: {
|
|
235
|
+
inputs: {
|
|
236
|
+
type: "array",
|
|
237
|
+
description: "Ordinal UTXOs to sweep",
|
|
238
|
+
items: {
|
|
239
|
+
type: "object",
|
|
240
|
+
properties: {
|
|
241
|
+
outpoint: { type: "string", description: "Outpoint (txid_vout)" },
|
|
242
|
+
satoshis: { type: "integer", description: "Satoshis (should be 1)" },
|
|
243
|
+
lockingScript: { type: "string", description: "Locking script hex" },
|
|
244
|
+
contentType: { type: "string", description: "Content type from metadata" },
|
|
245
|
+
origin: { type: "string", description: "Origin outpoint" },
|
|
246
|
+
},
|
|
247
|
+
required: ["outpoint", "satoshis", "lockingScript"],
|
|
248
|
+
},
|
|
249
|
+
},
|
|
250
|
+
wif: {
|
|
251
|
+
type: "string",
|
|
252
|
+
description: "WIF private key controlling the inputs",
|
|
253
|
+
},
|
|
254
|
+
},
|
|
255
|
+
required: ["inputs", "wif"],
|
|
256
|
+
},
|
|
257
|
+
},
|
|
258
|
+
async execute(ctx, request) {
|
|
259
|
+
if (!ctx.services) {
|
|
260
|
+
return { error: "services-required" };
|
|
261
|
+
}
|
|
262
|
+
try {
|
|
263
|
+
const { inputs, wif } = request;
|
|
264
|
+
if (!inputs || inputs.length === 0) {
|
|
265
|
+
return { error: "no-inputs" };
|
|
266
|
+
}
|
|
267
|
+
// Parse WIF
|
|
268
|
+
const privateKey = PrivateKey.fromWif(wif);
|
|
269
|
+
// Fetch BEEF for all input transactions and merge them
|
|
270
|
+
const txids = [...new Set(inputs.map((i) => i.outpoint.split("_")[0]))];
|
|
271
|
+
console.log(`[sweepOrdinals] Fetching BEEF for ${txids.length} transactions`);
|
|
272
|
+
const firstBeef = await ctx.services.getBeefForTxid(txids[0]);
|
|
273
|
+
for (let i = 1; i < txids.length; i++) {
|
|
274
|
+
const additionalBeef = await ctx.services.getBeefForTxid(txids[i]);
|
|
275
|
+
firstBeef.mergeBeef(additionalBeef);
|
|
276
|
+
}
|
|
277
|
+
console.log(`[sweepOrdinals] Merged BEEF valid=${firstBeef.isValid()}, txs=${firstBeef.txs.length}`);
|
|
278
|
+
// Build input descriptors
|
|
279
|
+
const inputDescriptors = inputs.map((input) => {
|
|
280
|
+
const [txid, voutStr] = input.outpoint.split("_");
|
|
281
|
+
return {
|
|
282
|
+
outpoint: `${txid}.${voutStr}`,
|
|
283
|
+
inputDescription: `Ordinal ${input.origin ?? input.outpoint}`,
|
|
284
|
+
unlockingScriptLength: 108,
|
|
285
|
+
sequenceNumber: 0xffffffff,
|
|
286
|
+
};
|
|
287
|
+
});
|
|
288
|
+
// Build outputs - one per ordinal, each 1 sat to derived address
|
|
289
|
+
const outputs = [];
|
|
290
|
+
for (const input of inputs) {
|
|
291
|
+
// Derive a unique public key for this ordinal
|
|
292
|
+
const pubKeyResult = await ctx.wallet.getPublicKey({
|
|
293
|
+
protocolID: [1, "ordinal"],
|
|
294
|
+
keyID: input.outpoint,
|
|
295
|
+
forSelf: true,
|
|
296
|
+
});
|
|
297
|
+
if (!pubKeyResult.publicKey) {
|
|
298
|
+
return { error: `Failed to derive key for ${input.outpoint}` };
|
|
299
|
+
}
|
|
300
|
+
// Create P2PKH locking script from derived public key
|
|
301
|
+
const derivedAddress = pubKeyResult.publicKey;
|
|
302
|
+
const lockingScript = new P2PKH().lock(derivedAddress);
|
|
303
|
+
outputs.push({
|
|
304
|
+
lockingScript: lockingScript.toHex(),
|
|
305
|
+
satoshis: 1,
|
|
306
|
+
outputDescription: `Ordinal ${input.origin ?? input.outpoint}`,
|
|
307
|
+
basket: "1sat",
|
|
308
|
+
tags: ["insc", "origin"],
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
const beefData = firstBeef.toBinary();
|
|
312
|
+
// Create action to get signable transaction
|
|
313
|
+
const createResult = await ctx.wallet.createAction({
|
|
314
|
+
description: `Sweep ${inputs.length} ordinal${inputs.length !== 1 ? "s" : ""}`,
|
|
315
|
+
inputBEEF: beefData,
|
|
316
|
+
inputs: inputDescriptors,
|
|
317
|
+
outputs,
|
|
318
|
+
options: { signAndProcess: false },
|
|
319
|
+
});
|
|
320
|
+
if ("error" in createResult && createResult.error) {
|
|
321
|
+
return { error: String(createResult.error) };
|
|
322
|
+
}
|
|
323
|
+
if (!createResult.signableTransaction) {
|
|
324
|
+
return { error: "no-signable-transaction" };
|
|
325
|
+
}
|
|
326
|
+
// Sign each input with our external key
|
|
327
|
+
const tx = Transaction.fromBEEF(createResult.signableTransaction.tx);
|
|
328
|
+
console.log(`[sweepOrdinals] Transaction has ${tx.inputs.length} inputs, ${tx.outputs.length} outputs`);
|
|
329
|
+
// Build a set of outpoints we control
|
|
330
|
+
const ourOutpoints = new Set(inputs.map((input) => {
|
|
331
|
+
const [txid, vout] = input.outpoint.split("_");
|
|
332
|
+
return `${txid}.${vout}`;
|
|
333
|
+
}));
|
|
334
|
+
// Set up P2PKH unlocker on each input we control
|
|
335
|
+
for (let i = 0; i < tx.inputs.length; i++) {
|
|
336
|
+
const txInput = tx.inputs[i];
|
|
337
|
+
const inputOutpoint = `${txInput.sourceTXID}.${txInput.sourceOutputIndex}`;
|
|
338
|
+
if (ourOutpoints.has(inputOutpoint)) {
|
|
339
|
+
const p2pkh = new P2PKH();
|
|
340
|
+
txInput.unlockingScriptTemplate = p2pkh.unlock(privateKey, "all", true);
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
await tx.sign();
|
|
344
|
+
// Extract unlocking scripts for signAction
|
|
345
|
+
const spends = {};
|
|
346
|
+
for (let i = 0; i < tx.inputs.length; i++) {
|
|
347
|
+
const txInput = tx.inputs[i];
|
|
348
|
+
const inputOutpoint = `${txInput.sourceTXID}.${txInput.sourceOutputIndex}`;
|
|
349
|
+
if (ourOutpoints.has(inputOutpoint)) {
|
|
350
|
+
spends[i] = { unlockingScript: txInput.unlockingScript?.toHex() ?? "" };
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
// Complete the action with our signatures
|
|
354
|
+
const signResult = await ctx.wallet.signAction({
|
|
355
|
+
reference: createResult.signableTransaction.reference,
|
|
356
|
+
spends,
|
|
357
|
+
});
|
|
358
|
+
if ("error" in signResult) {
|
|
359
|
+
return { error: String(signResult.error) };
|
|
360
|
+
}
|
|
361
|
+
return {
|
|
362
|
+
txid: signResult.txid,
|
|
363
|
+
beef: signResult.tx ? Array.from(signResult.tx) : undefined,
|
|
364
|
+
};
|
|
365
|
+
}
|
|
366
|
+
catch (error) {
|
|
367
|
+
return {
|
|
368
|
+
error: error instanceof Error ? error.message : "unknown-error",
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
},
|
|
372
|
+
};
|
|
220
373
|
// Export skills array for registry
|
|
221
|
-
export const sweepSkills = [sweepBsv];
|
|
374
|
+
export const sweepSkills = [sweepBsv, sweepOrdinals];
|
|
@@ -28,3 +28,26 @@ export interface SweepBsvResponse {
|
|
|
28
28
|
/** Error message if failed */
|
|
29
29
|
error?: string;
|
|
30
30
|
}
|
|
31
|
+
/** Input for ordinal sweep operations */
|
|
32
|
+
export interface SweepOrdinalInput extends SweepInput {
|
|
33
|
+
/** Content type from ordfs metadata */
|
|
34
|
+
contentType?: string;
|
|
35
|
+
/** Origin outpoint for tracking */
|
|
36
|
+
origin?: string;
|
|
37
|
+
}
|
|
38
|
+
/** Request to sweep ordinals */
|
|
39
|
+
export interface SweepOrdinalsRequest {
|
|
40
|
+
/** Ordinal UTXOs to sweep */
|
|
41
|
+
inputs: SweepOrdinalInput[];
|
|
42
|
+
/** WIF private key controlling the inputs */
|
|
43
|
+
wif: string;
|
|
44
|
+
}
|
|
45
|
+
/** Response from ordinal sweep operation */
|
|
46
|
+
export interface SweepOrdinalsResponse {
|
|
47
|
+
/** Transaction ID if successful */
|
|
48
|
+
txid?: string;
|
|
49
|
+
/** BEEF (transaction with validity proof) */
|
|
50
|
+
beef?: number[];
|
|
51
|
+
/** Error message if failed */
|
|
52
|
+
error?: string;
|
|
53
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { Zip, ZipDeflate } from "fflate";
|
|
2
|
+
import type { sdk } from "@bsv/wallet-toolbox-mobile/out/src/index.client.js";
|
|
3
|
+
import type { TableSettings } from "@bsv/wallet-toolbox-mobile/out/src/storage/schema/tables/TableSettings.js";
|
|
4
|
+
import type { TableSyncState } from "@bsv/wallet-toolbox-mobile/out/src/storage/schema/tables/TableSyncState.js";
|
|
5
|
+
import type { TableUser } from "@bsv/wallet-toolbox-mobile/out/src/storage/schema/tables/TableUser.js";
|
|
6
|
+
type AuthId = sdk.AuthId;
|
|
7
|
+
type ProcessSyncChunkResult = sdk.ProcessSyncChunkResult;
|
|
8
|
+
type RequestSyncChunkArgs = sdk.RequestSyncChunkArgs;
|
|
9
|
+
type SyncChunk = sdk.SyncChunk;
|
|
10
|
+
type WalletStorageProvider = sdk.WalletStorageProvider;
|
|
11
|
+
type WalletServices = sdk.WalletServices;
|
|
12
|
+
/**
|
|
13
|
+
* FileBackupProvider implements WalletStorageProvider to receive sync chunks
|
|
14
|
+
* during wallet export. It encodes each chunk with MessagePack and streams
|
|
15
|
+
* it directly to a fflate Zip instance.
|
|
16
|
+
*
|
|
17
|
+
* Usage:
|
|
18
|
+
* 1. Create fflate.Zip with streaming callback
|
|
19
|
+
* 2. Create FileBackupProvider with the zip instance
|
|
20
|
+
* 3. Call storage.syncToWriter(auth, provider)
|
|
21
|
+
* 4. Provider receives processSyncChunk() calls and writes to zip
|
|
22
|
+
* 5. Call provider.getChunkCount() to get chunk count for manifest
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* ```typescript
|
|
26
|
+
* import { Zip } from 'fflate';
|
|
27
|
+
*
|
|
28
|
+
* const chunks: Uint8Array[] = [];
|
|
29
|
+
* const zip = new Zip((err, data, final) => {
|
|
30
|
+
* if (err) throw err;
|
|
31
|
+
* chunks.push(data);
|
|
32
|
+
* if (final) {
|
|
33
|
+
* const blob = new Blob(chunks, { type: 'application/zip' });
|
|
34
|
+
* // Download or save blob
|
|
35
|
+
* }
|
|
36
|
+
* });
|
|
37
|
+
*
|
|
38
|
+
* const provider = new FileBackupProvider(zip, storage.getSettings(), identityKey);
|
|
39
|
+
* await storage.syncToWriter(auth, provider);
|
|
40
|
+
* console.log(`Exported ${provider.getChunkCount()} chunks`);
|
|
41
|
+
* ```
|
|
42
|
+
*/
|
|
43
|
+
export declare class FileBackupProvider implements WalletStorageProvider {
|
|
44
|
+
private zip;
|
|
45
|
+
private chunkCount;
|
|
46
|
+
private settings;
|
|
47
|
+
private identityKey;
|
|
48
|
+
constructor(zip: Zip, settings: TableSettings, identityKey: string);
|
|
49
|
+
/**
|
|
50
|
+
* Get the number of chunks written. Call after sync completes.
|
|
51
|
+
*/
|
|
52
|
+
getChunkCount(): number;
|
|
53
|
+
/**
|
|
54
|
+
* Add a file to the zip archive with compression.
|
|
55
|
+
* Creates a ZipDeflate stream and pushes the data in one chunk.
|
|
56
|
+
*/
|
|
57
|
+
private addFileToZip;
|
|
58
|
+
isStorageProvider(): boolean;
|
|
59
|
+
setServices(_v: WalletServices): void;
|
|
60
|
+
findOrInsertSyncStateAuth(_auth: AuthId, _storageIdentityKey: string, _storageName: string): Promise<{
|
|
61
|
+
syncState: TableSyncState;
|
|
62
|
+
isNew: boolean;
|
|
63
|
+
}>;
|
|
64
|
+
setActive(_auth: AuthId, _newActiveStorageIdentityKey: string): Promise<number>;
|
|
65
|
+
getSyncChunk(_args: RequestSyncChunkArgs): Promise<SyncChunk>;
|
|
66
|
+
/**
|
|
67
|
+
* Receives sync chunks from WalletStorageManager.syncToWriter().
|
|
68
|
+
* Encodes each chunk with MessagePack and adds to the zip.
|
|
69
|
+
*/
|
|
70
|
+
processSyncChunk(_args: RequestSyncChunkArgs, chunk: SyncChunk): Promise<ProcessSyncChunkResult>;
|
|
71
|
+
makeAvailable(): Promise<TableSettings>;
|
|
72
|
+
migrate(_storageName: string, _storageIdentityKey: string): Promise<string>;
|
|
73
|
+
destroy(): Promise<void>;
|
|
74
|
+
findOrInsertUser(identityKey: string): Promise<{
|
|
75
|
+
user: TableUser;
|
|
76
|
+
isNew: boolean;
|
|
77
|
+
}>;
|
|
78
|
+
abortAction(_auth: AuthId, _args: unknown): Promise<never>;
|
|
79
|
+
createAction(_auth: AuthId, _args: unknown): Promise<never>;
|
|
80
|
+
processAction(_auth: AuthId, _args: unknown): Promise<never>;
|
|
81
|
+
internalizeAction(_auth: AuthId, _args: unknown): Promise<never>;
|
|
82
|
+
insertCertificateAuth(_auth: AuthId, _certificate: unknown): Promise<number>;
|
|
83
|
+
relinquishCertificate(_auth: AuthId, _args: unknown): Promise<number>;
|
|
84
|
+
relinquishOutput(_auth: AuthId, _args: unknown): Promise<number>;
|
|
85
|
+
isAvailable(): boolean;
|
|
86
|
+
getServices(): WalletServices;
|
|
87
|
+
getSettings(): TableSettings;
|
|
88
|
+
findCertificatesAuth(_auth: AuthId, _args: unknown): Promise<never[]>;
|
|
89
|
+
findOutputBasketsAuth(_auth: AuthId, _args: unknown): Promise<never[]>;
|
|
90
|
+
findOutputsAuth(_auth: AuthId, _args: unknown): Promise<never[]>;
|
|
91
|
+
findProvenTxReqs(_args: unknown): Promise<never[]>;
|
|
92
|
+
listActions(_auth: AuthId, _vargs: unknown): Promise<never>;
|
|
93
|
+
listCertificates(_auth: AuthId, _vargs: unknown): Promise<never>;
|
|
94
|
+
listOutputs(_auth: AuthId, _vargs: unknown): Promise<never>;
|
|
95
|
+
}
|
|
96
|
+
export { Zip, ZipDeflate };
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import { encode } from "@msgpack/msgpack";
|
|
2
|
+
import { Zip, ZipDeflate } from "fflate";
|
|
3
|
+
/**
|
|
4
|
+
* FileBackupProvider implements WalletStorageProvider to receive sync chunks
|
|
5
|
+
* during wallet export. It encodes each chunk with MessagePack and streams
|
|
6
|
+
* it directly to a fflate Zip instance.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* 1. Create fflate.Zip with streaming callback
|
|
10
|
+
* 2. Create FileBackupProvider with the zip instance
|
|
11
|
+
* 3. Call storage.syncToWriter(auth, provider)
|
|
12
|
+
* 4. Provider receives processSyncChunk() calls and writes to zip
|
|
13
|
+
* 5. Call provider.getChunkCount() to get chunk count for manifest
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* ```typescript
|
|
17
|
+
* import { Zip } from 'fflate';
|
|
18
|
+
*
|
|
19
|
+
* const chunks: Uint8Array[] = [];
|
|
20
|
+
* const zip = new Zip((err, data, final) => {
|
|
21
|
+
* if (err) throw err;
|
|
22
|
+
* chunks.push(data);
|
|
23
|
+
* if (final) {
|
|
24
|
+
* const blob = new Blob(chunks, { type: 'application/zip' });
|
|
25
|
+
* // Download or save blob
|
|
26
|
+
* }
|
|
27
|
+
* });
|
|
28
|
+
*
|
|
29
|
+
* const provider = new FileBackupProvider(zip, storage.getSettings(), identityKey);
|
|
30
|
+
* await storage.syncToWriter(auth, provider);
|
|
31
|
+
* console.log(`Exported ${provider.getChunkCount()} chunks`);
|
|
32
|
+
* ```
|
|
33
|
+
*/
|
|
34
|
+
export class FileBackupProvider {
|
|
35
|
+
zip;
|
|
36
|
+
chunkCount = 0;
|
|
37
|
+
settings;
|
|
38
|
+
identityKey;
|
|
39
|
+
constructor(zip, settings, identityKey) {
|
|
40
|
+
this.zip = zip;
|
|
41
|
+
this.settings = settings;
|
|
42
|
+
this.identityKey = identityKey;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Get the number of chunks written. Call after sync completes.
|
|
46
|
+
*/
|
|
47
|
+
getChunkCount() {
|
|
48
|
+
return this.chunkCount;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Add a file to the zip archive with compression.
|
|
52
|
+
* Creates a ZipDeflate stream and pushes the data in one chunk.
|
|
53
|
+
*/
|
|
54
|
+
addFileToZip(filename, data) {
|
|
55
|
+
const deflate = new ZipDeflate(filename, { level: 6 });
|
|
56
|
+
this.zip.add(deflate);
|
|
57
|
+
deflate.push(data, true); // true = final chunk
|
|
58
|
+
}
|
|
59
|
+
// WalletStorageProvider interface methods
|
|
60
|
+
isStorageProvider() {
|
|
61
|
+
return false;
|
|
62
|
+
}
|
|
63
|
+
setServices(_v) {
|
|
64
|
+
// Not needed for backup
|
|
65
|
+
}
|
|
66
|
+
// WalletStorageSync interface methods
|
|
67
|
+
async findOrInsertSyncStateAuth(_auth, _storageIdentityKey, _storageName) {
|
|
68
|
+
throw new Error("Not supported: findOrInsertSyncStateAuth");
|
|
69
|
+
}
|
|
70
|
+
async setActive(_auth, _newActiveStorageIdentityKey) {
|
|
71
|
+
throw new Error("Not supported: setActive");
|
|
72
|
+
}
|
|
73
|
+
async getSyncChunk(_args) {
|
|
74
|
+
throw new Error("Not supported: getSyncChunk - this is a write-only provider");
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Receives sync chunks from WalletStorageManager.syncToWriter().
|
|
78
|
+
* Encodes each chunk with MessagePack and adds to the zip.
|
|
79
|
+
*/
|
|
80
|
+
async processSyncChunk(_args, chunk) {
|
|
81
|
+
const chunkName = `chunk-${String(this.chunkCount).padStart(4, "0")}.bin`;
|
|
82
|
+
const encoded = encode(chunk);
|
|
83
|
+
// Add chunk to zip with compression using ZipDeflate
|
|
84
|
+
this.addFileToZip(chunkName, new Uint8Array(encoded));
|
|
85
|
+
this.chunkCount++;
|
|
86
|
+
// Check if this chunk has any data - if all arrays are empty or undefined, we're done
|
|
87
|
+
const hasData = (chunk.provenTxs && chunk.provenTxs.length > 0) ||
|
|
88
|
+
(chunk.provenTxReqs && chunk.provenTxReqs.length > 0) ||
|
|
89
|
+
(chunk.outputBaskets && chunk.outputBaskets.length > 0) ||
|
|
90
|
+
(chunk.txLabels && chunk.txLabels.length > 0) ||
|
|
91
|
+
(chunk.outputTags && chunk.outputTags.length > 0) ||
|
|
92
|
+
(chunk.transactions && chunk.transactions.length > 0) ||
|
|
93
|
+
(chunk.txLabelMaps && chunk.txLabelMaps.length > 0) ||
|
|
94
|
+
(chunk.commissions && chunk.commissions.length > 0) ||
|
|
95
|
+
(chunk.outputs && chunk.outputs.length > 0) ||
|
|
96
|
+
(chunk.outputTagMaps && chunk.outputTagMaps.length > 0) ||
|
|
97
|
+
(chunk.certificates && chunk.certificates.length > 0) ||
|
|
98
|
+
(chunk.certificateFields && chunk.certificateFields.length > 0);
|
|
99
|
+
return {
|
|
100
|
+
done: !hasData,
|
|
101
|
+
maxUpdated_at: undefined,
|
|
102
|
+
updates: 0,
|
|
103
|
+
inserts: 0,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
// WalletStorageWriter interface methods (throw not supported)
|
|
107
|
+
async makeAvailable() {
|
|
108
|
+
return this.settings;
|
|
109
|
+
}
|
|
110
|
+
async migrate(_storageName, _storageIdentityKey) {
|
|
111
|
+
throw new Error("Not supported: migrate");
|
|
112
|
+
}
|
|
113
|
+
async destroy() {
|
|
114
|
+
throw new Error("Not supported: destroy");
|
|
115
|
+
}
|
|
116
|
+
async findOrInsertUser(identityKey) {
|
|
117
|
+
// Return a mock user for backup purposes
|
|
118
|
+
const now = new Date();
|
|
119
|
+
return {
|
|
120
|
+
user: {
|
|
121
|
+
userId: 1,
|
|
122
|
+
identityKey: identityKey || this.identityKey,
|
|
123
|
+
activeStorage: this.settings.storageIdentityKey || "",
|
|
124
|
+
created_at: now,
|
|
125
|
+
updated_at: now,
|
|
126
|
+
},
|
|
127
|
+
isNew: false,
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
async abortAction(_auth, _args) {
|
|
131
|
+
throw new Error("Not supported: abortAction");
|
|
132
|
+
}
|
|
133
|
+
async createAction(_auth, _args) {
|
|
134
|
+
throw new Error("Not supported: createAction");
|
|
135
|
+
}
|
|
136
|
+
async processAction(_auth, _args) {
|
|
137
|
+
throw new Error("Not supported: processAction");
|
|
138
|
+
}
|
|
139
|
+
async internalizeAction(_auth, _args) {
|
|
140
|
+
throw new Error("Not supported: internalizeAction");
|
|
141
|
+
}
|
|
142
|
+
async insertCertificateAuth(_auth, _certificate) {
|
|
143
|
+
throw new Error("Not supported: insertCertificateAuth");
|
|
144
|
+
}
|
|
145
|
+
async relinquishCertificate(_auth, _args) {
|
|
146
|
+
throw new Error("Not supported: relinquishCertificate");
|
|
147
|
+
}
|
|
148
|
+
async relinquishOutput(_auth, _args) {
|
|
149
|
+
throw new Error("Not supported: relinquishOutput");
|
|
150
|
+
}
|
|
151
|
+
// WalletStorageReader interface methods (throw not supported)
|
|
152
|
+
isAvailable() {
|
|
153
|
+
return true;
|
|
154
|
+
}
|
|
155
|
+
getServices() {
|
|
156
|
+
throw new Error("Not supported: getServices");
|
|
157
|
+
}
|
|
158
|
+
getSettings() {
|
|
159
|
+
return this.settings;
|
|
160
|
+
}
|
|
161
|
+
async findCertificatesAuth(_auth, _args) {
|
|
162
|
+
throw new Error("Not supported: findCertificatesAuth");
|
|
163
|
+
}
|
|
164
|
+
async findOutputBasketsAuth(_auth, _args) {
|
|
165
|
+
throw new Error("Not supported: findOutputBasketsAuth");
|
|
166
|
+
}
|
|
167
|
+
async findOutputsAuth(_auth, _args) {
|
|
168
|
+
throw new Error("Not supported: findOutputsAuth");
|
|
169
|
+
}
|
|
170
|
+
async findProvenTxReqs(_args) {
|
|
171
|
+
throw new Error("Not supported: findProvenTxReqs");
|
|
172
|
+
}
|
|
173
|
+
async listActions(_auth, _vargs) {
|
|
174
|
+
throw new Error("Not supported: listActions");
|
|
175
|
+
}
|
|
176
|
+
async listCertificates(_auth, _vargs) {
|
|
177
|
+
throw new Error("Not supported: listCertificates");
|
|
178
|
+
}
|
|
179
|
+
async listOutputs(_auth, _vargs) {
|
|
180
|
+
throw new Error("Not supported: listOutputs");
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
// Re-export Zip and ZipDeflate for convenience
|
|
184
|
+
export { Zip, ZipDeflate };
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import type { Unzipped } from "fflate";
|
|
2
|
+
import type { sdk } from "@bsv/wallet-toolbox-mobile/out/src/index.client.js";
|
|
3
|
+
import type { TableSettings } from "@bsv/wallet-toolbox-mobile/out/src/storage/schema/tables/TableSettings.js";
|
|
4
|
+
import type { BackupManifest } from "./types";
|
|
5
|
+
type RequestSyncChunkArgs = sdk.RequestSyncChunkArgs;
|
|
6
|
+
type SyncChunk = sdk.SyncChunk;
|
|
7
|
+
type WalletStorageSyncReader = sdk.WalletStorageSyncReader;
|
|
8
|
+
/**
|
|
9
|
+
* FileRestoreReader implements WalletStorageSyncReader to serve sync chunks
|
|
10
|
+
* from a previously exported backup ZIP file during wallet restore.
|
|
11
|
+
*
|
|
12
|
+
* Usage:
|
|
13
|
+
* 1. Unzip the backup file with fflate.unzip()
|
|
14
|
+
* 2. Read and parse manifest.json
|
|
15
|
+
* 3. Create FileRestoreReader with unzipped data and manifest
|
|
16
|
+
* 4. Call storage.syncFromReader(identityKey, reader)
|
|
17
|
+
* 5. Reader serves chunks sequentially via getSyncChunk()
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* import { unzip } from 'fflate';
|
|
22
|
+
*
|
|
23
|
+
* const zipData = new Uint8Array(await file.arrayBuffer());
|
|
24
|
+
* const unzipped = await new Promise<Unzipped>((resolve, reject) => {
|
|
25
|
+
* unzip(zipData, (err, data) => err ? reject(err) : resolve(data));
|
|
26
|
+
* });
|
|
27
|
+
*
|
|
28
|
+
* const manifest = JSON.parse(
|
|
29
|
+
* new TextDecoder().decode(unzipped['manifest.json'])
|
|
30
|
+
* );
|
|
31
|
+
*
|
|
32
|
+
* const reader = new FileRestoreReader(unzipped, manifest);
|
|
33
|
+
* await storage.syncFromReader(manifest.identityKey, reader);
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
export declare class FileRestoreReader implements WalletStorageSyncReader {
|
|
37
|
+
private unzipped;
|
|
38
|
+
private manifest;
|
|
39
|
+
private currentChunkIndex;
|
|
40
|
+
private settings;
|
|
41
|
+
constructor(unzipped: Unzipped, manifest: BackupManifest);
|
|
42
|
+
/**
|
|
43
|
+
* Get the backup manifest.
|
|
44
|
+
*/
|
|
45
|
+
getManifest(): BackupManifest;
|
|
46
|
+
/**
|
|
47
|
+
* Reset the reader to start from the beginning.
|
|
48
|
+
* Call this if you need to re-read the backup.
|
|
49
|
+
*/
|
|
50
|
+
reset(): void;
|
|
51
|
+
makeAvailable(): Promise<TableSettings>;
|
|
52
|
+
/**
|
|
53
|
+
* Returns sync chunks sequentially from the backup.
|
|
54
|
+
* When all chunks are consumed, returns an empty chunk to signal completion.
|
|
55
|
+
*/
|
|
56
|
+
getSyncChunk(args: RequestSyncChunkArgs): Promise<SyncChunk>;
|
|
57
|
+
}
|
|
58
|
+
export {};
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { decode } from "@msgpack/msgpack";
|
|
2
|
+
/**
|
|
3
|
+
* FileRestoreReader implements WalletStorageSyncReader to serve sync chunks
|
|
4
|
+
* from a previously exported backup ZIP file during wallet restore.
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* 1. Unzip the backup file with fflate.unzip()
|
|
8
|
+
* 2. Read and parse manifest.json
|
|
9
|
+
* 3. Create FileRestoreReader with unzipped data and manifest
|
|
10
|
+
* 4. Call storage.syncFromReader(identityKey, reader)
|
|
11
|
+
* 5. Reader serves chunks sequentially via getSyncChunk()
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```typescript
|
|
15
|
+
* import { unzip } from 'fflate';
|
|
16
|
+
*
|
|
17
|
+
* const zipData = new Uint8Array(await file.arrayBuffer());
|
|
18
|
+
* const unzipped = await new Promise<Unzipped>((resolve, reject) => {
|
|
19
|
+
* unzip(zipData, (err, data) => err ? reject(err) : resolve(data));
|
|
20
|
+
* });
|
|
21
|
+
*
|
|
22
|
+
* const manifest = JSON.parse(
|
|
23
|
+
* new TextDecoder().decode(unzipped['manifest.json'])
|
|
24
|
+
* );
|
|
25
|
+
*
|
|
26
|
+
* const reader = new FileRestoreReader(unzipped, manifest);
|
|
27
|
+
* await storage.syncFromReader(manifest.identityKey, reader);
|
|
28
|
+
* ```
|
|
29
|
+
*/
|
|
30
|
+
export class FileRestoreReader {
|
|
31
|
+
unzipped;
|
|
32
|
+
manifest;
|
|
33
|
+
currentChunkIndex = 0;
|
|
34
|
+
settings;
|
|
35
|
+
constructor(unzipped, manifest) {
|
|
36
|
+
this.unzipped = unzipped;
|
|
37
|
+
this.manifest = manifest;
|
|
38
|
+
// Create settings from manifest
|
|
39
|
+
this.settings = {
|
|
40
|
+
created_at: new Date(manifest.createdAt),
|
|
41
|
+
updated_at: new Date(manifest.createdAt),
|
|
42
|
+
storageIdentityKey: `backup-${manifest.identityKey.slice(0, 16)}`,
|
|
43
|
+
storageName: "FileBackup",
|
|
44
|
+
chain: manifest.chain,
|
|
45
|
+
dbtype: "IndexedDB",
|
|
46
|
+
maxOutputScript: 256,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Get the backup manifest.
|
|
51
|
+
*/
|
|
52
|
+
getManifest() {
|
|
53
|
+
return this.manifest;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Reset the reader to start from the beginning.
|
|
57
|
+
* Call this if you need to re-read the backup.
|
|
58
|
+
*/
|
|
59
|
+
reset() {
|
|
60
|
+
this.currentChunkIndex = 0;
|
|
61
|
+
}
|
|
62
|
+
// WalletStorageSyncReader interface methods
|
|
63
|
+
async makeAvailable() {
|
|
64
|
+
return this.settings;
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Returns sync chunks sequentially from the backup.
|
|
68
|
+
* When all chunks are consumed, returns an empty chunk to signal completion.
|
|
69
|
+
*/
|
|
70
|
+
async getSyncChunk(args) {
|
|
71
|
+
if (this.currentChunkIndex >= this.manifest.chunkCount) {
|
|
72
|
+
// Return empty chunk to signal completion
|
|
73
|
+
return {
|
|
74
|
+
fromStorageIdentityKey: this.settings.storageIdentityKey,
|
|
75
|
+
toStorageIdentityKey: args.toStorageIdentityKey,
|
|
76
|
+
userIdentityKey: this.manifest.identityKey,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
const chunkName = `chunk-${String(this.currentChunkIndex).padStart(4, "0")}.bin`;
|
|
80
|
+
const chunkData = this.unzipped[chunkName];
|
|
81
|
+
if (!chunkData) {
|
|
82
|
+
throw new Error(`Missing chunk file: ${chunkName}`);
|
|
83
|
+
}
|
|
84
|
+
const chunk = decode(chunkData);
|
|
85
|
+
this.currentChunkIndex++;
|
|
86
|
+
return chunk;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { FileBackupProvider, Zip, ZipDeflate } from "./FileBackupProvider";
|
|
2
|
+
export { FileRestoreReader } from "./FileRestoreReader";
|
|
3
|
+
export type { BackupManifest, BackupProgressEvent, BackupProgressCallback, } from "./types";
|
|
4
|
+
export { unzip } from "fflate";
|
|
5
|
+
export type { Unzipped } from "fflate";
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { sdk } from "@bsv/wallet-toolbox-mobile/out/src/index.client.js";
|
|
2
|
+
type Chain = sdk.Chain;
|
|
3
|
+
/**
|
|
4
|
+
* Manifest stored in the backup ZIP file describing the backup contents.
|
|
5
|
+
*/
|
|
6
|
+
export interface BackupManifest {
|
|
7
|
+
/** Backup format version */
|
|
8
|
+
version: 1;
|
|
9
|
+
/** ISO timestamp when backup was created */
|
|
10
|
+
createdAt: string;
|
|
11
|
+
/** Network chain (main or test) */
|
|
12
|
+
chain: Chain;
|
|
13
|
+
/** Wallet identity public key */
|
|
14
|
+
identityKey: string;
|
|
15
|
+
/** Number of sync chunks in the backup */
|
|
16
|
+
chunkCount: number;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Progress events emitted during backup/restore operations.
|
|
20
|
+
*/
|
|
21
|
+
export interface BackupProgressEvent {
|
|
22
|
+
stage: "preparing" | "exporting" | "downloading" | "uploading" | "importing" | "complete";
|
|
23
|
+
message: string;
|
|
24
|
+
/** Optional progress percentage (0-100) */
|
|
25
|
+
progress?: number;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Callback type for progress updates during backup/restore.
|
|
29
|
+
*/
|
|
30
|
+
export type BackupProgressCallback = (event: BackupProgressEvent) => void;
|
|
31
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@1sat/wallet-toolbox",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.22",
|
|
4
4
|
"description": "BSV wallet library extending @bsv/wallet-toolbox with 1Sat Ordinals protocol support",
|
|
5
5
|
"author": "1Sat Team",
|
|
6
6
|
"license": "MIT",
|
|
@@ -39,7 +39,9 @@
|
|
|
39
39
|
"dependencies": {
|
|
40
40
|
"@bopen-io/templates": "^1.1.6",
|
|
41
41
|
"@bsv/sdk": "^1.10.1",
|
|
42
|
-
"
|
|
42
|
+
"@msgpack/msgpack": "^3.1.3",
|
|
43
|
+
"buffer": "^6.0.3",
|
|
44
|
+
"fflate": "^0.8.2"
|
|
43
45
|
},
|
|
44
46
|
"peerDependencies": {
|
|
45
47
|
"@bsv/wallet-toolbox-mobile": "npm:@bopen-io/wallet-toolbox-mobile@^1.7.20-idb-fix.15"
|