@bsv/wallet-toolbox 1.6.2 → 1.6.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/docs/client.md +315 -2500
- package/docs/services.md +15 -4
- package/docs/wallet.md +315 -2500
- package/mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.d.ts.map +1 -1
- package/mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js +1 -2
- package/mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js.map +1 -1
- package/mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.d.ts.map +1 -1
- package/mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js +2 -3
- package/mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js.map +1 -1
- package/mobile/package-lock.json +2 -2
- package/mobile/package.json +1 -1
- package/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.d.ts.map +1 -1
- package/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js +1 -2
- package/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js.map +1 -1
- package/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.d.ts.map +1 -1
- package/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js +2 -3
- package/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js.map +1 -1
- package/out/src/storage/__test/getBeefForTransaction.test.js.map +1 -1
- package/out/test/storage/idb/update.test.js +1 -1
- package/out/test/storage/idb/update.test.js.map +1 -1
- package/out/tsconfig.all.tsbuildinfo +1 -1
- package/package.json +1 -1
- package/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.ts +1 -2
- package/src/services/chaintracker/chaintracks/util/BulkFileDataManager.ts +2 -3
- package/src/storage/__test/getBeefForTransaction.test.ts +2 -5
- package/test/storage/idb/update.test.ts +1 -1
- package/mobile/out/src/Setup.d.ts +0 -267
- package/mobile/out/src/Setup.d.ts.map +0 -1
- package/mobile/out/src/Setup.js +0 -408
- package/mobile/out/src/Setup.js.map +0 -1
- package/mobile/out/src/storage/StorageKnex.d.ts +0 -179
- package/mobile/out/src/storage/StorageKnex.d.ts.map +0 -1
- package/mobile/out/src/storage/StorageKnex.js +0 -1215
- package/mobile/out/src/storage/StorageKnex.js.map +0 -1
- package/mobile/out/src/storage/methods/listActionsKnex.d.ts +0 -6
- package/mobile/out/src/storage/methods/listActionsKnex.d.ts.map +0 -1
- package/mobile/out/src/storage/methods/listActionsKnex.js +0 -198
- package/mobile/out/src/storage/methods/listActionsKnex.js.map +0 -1
- package/mobile/out/src/storage/methods/listOutputsKnex.d.ts +0 -6
- package/mobile/out/src/storage/methods/listOutputsKnex.d.ts.map +0 -1
- package/mobile/out/src/storage/methods/listOutputsKnex.js +0 -241
- package/mobile/out/src/storage/methods/listOutputsKnex.js.map +0 -1
- package/mobile/out/src/storage/methods/purgeData.d.ts +0 -4
- package/mobile/out/src/storage/methods/purgeData.d.ts.map +0 -1
- package/mobile/out/src/storage/methods/purgeData.js +0 -207
- package/mobile/out/src/storage/methods/purgeData.js.map +0 -1
- package/mobile/out/src/storage/methods/reviewStatus.d.ts +0 -20
- package/mobile/out/src/storage/methods/reviewStatus.d.ts.map +0 -1
- package/mobile/out/src/storage/methods/reviewStatus.js +0 -84
- package/mobile/out/src/storage/methods/reviewStatus.js.map +0 -1
- package/mobile/out/src/storage/schema/KnexMigrations.d.ts +0 -39
- package/mobile/out/src/storage/schema/KnexMigrations.d.ts.map +0 -1
- package/mobile/out/src/storage/schema/KnexMigrations.js +0 -410
- package/mobile/out/src/storage/schema/KnexMigrations.js.map +0 -1
- package/mobile/out/test/utils/TestUtilsWalletStorage.d.ts +0 -522
- package/mobile/out/test/utils/TestUtilsWalletStorage.d.ts.map +0 -1
- package/mobile/out/test/utils/TestUtilsWalletStorage.js +0 -1956
- package/mobile/out/test/utils/TestUtilsWalletStorage.js.map +0 -1
|
@@ -1,1956 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
-
};
|
|
38
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
-
exports.normalizeDate = exports.triggerForeignKeyConstraintError = exports.triggerUniqueConstraintError = exports.logUniqueConstraintError = exports.setLogging = exports.validateUpdateTime = exports.verifyValues = exports.updateTable = exports.logger = exports._tu = exports.TestUtilsWalletStorage = void 0;
|
|
40
|
-
exports.expectToThrowWERR = expectToThrowWERR;
|
|
41
|
-
exports.cleanUnsentTransactionsUsingAbort = cleanUnsentTransactionsUsingAbort;
|
|
42
|
-
exports.cleanUnsignedTransactionsUsingAbort = cleanUnsignedTransactionsUsingAbort;
|
|
43
|
-
exports.cleanUnprocessedTransactionsUsingAbort = cleanUnprocessedTransactionsUsingAbort;
|
|
44
|
-
exports.logTransaction = logTransaction;
|
|
45
|
-
exports.logOutput = logOutput;
|
|
46
|
-
exports.logInput = logInput;
|
|
47
|
-
exports.logBasket = logBasket;
|
|
48
|
-
const dotenv = __importStar(require("dotenv"));
|
|
49
|
-
const path_1 = __importDefault(require("path"));
|
|
50
|
-
const fs_1 = require("fs");
|
|
51
|
-
const knex_1 = require("knex");
|
|
52
|
-
const sdk_1 = require("@bsv/sdk");
|
|
53
|
-
const StorageIdb_1 = require("../../src/storage/StorageIdb");
|
|
54
|
-
const Setup_1 = require("../../src/Setup");
|
|
55
|
-
const StorageKnex_1 = require("../../src/storage/StorageKnex");
|
|
56
|
-
const Services_1 = require("../../src/services/Services");
|
|
57
|
-
const WERR_errors_1 = require("../../src/sdk/WERR_errors");
|
|
58
|
-
const WalletStorageManager_1 = require("../../src/storage/WalletStorageManager");
|
|
59
|
-
const Monitor_1 = require("../../src/monitor/Monitor");
|
|
60
|
-
const PrivilegedKeyManager_1 = require("../../src/sdk/PrivilegedKeyManager");
|
|
61
|
-
const Wallet_1 = require("../../src/Wallet");
|
|
62
|
-
const StorageClient_1 = require("../../src/storage/remoting/StorageClient");
|
|
63
|
-
const utilityHelpers_1 = require("../../src/utility/utilityHelpers");
|
|
64
|
-
const WalletError_1 = require("../../src/sdk/WalletError");
|
|
65
|
-
const StorageSyncReader_1 = require("../../src/storage/StorageSyncReader");
|
|
66
|
-
const utilityHelpers_noBuffer_1 = require("../../src/utility/utilityHelpers.noBuffer");
|
|
67
|
-
const ScriptTemplateBRC29_1 = require("../../src/utility/ScriptTemplateBRC29");
|
|
68
|
-
dotenv.config();
|
|
69
|
-
const localMySqlConnection = process.env.MYSQL_CONNECTION || '';
|
|
70
|
-
class TestUtilsWalletStorage {
|
|
71
|
-
/**
|
|
72
|
-
* @param chain
|
|
73
|
-
* @returns true if .env has truthy idenityKey, idenityKey2 values for chain
|
|
74
|
-
*/
|
|
75
|
-
static noEnv(chain) {
|
|
76
|
-
try {
|
|
77
|
-
Setup_1.Setup.getEnv(chain);
|
|
78
|
-
return false;
|
|
79
|
-
}
|
|
80
|
-
catch (_a) {
|
|
81
|
-
return true;
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
/**
|
|
85
|
-
* @param chain
|
|
86
|
-
* @returns true if .env is not valid for chain or testIdentityKey or testFilePath are undefined or empty.
|
|
87
|
-
*/
|
|
88
|
-
static noTestEnv(chain) {
|
|
89
|
-
try {
|
|
90
|
-
const env = _tu.getEnv(chain);
|
|
91
|
-
return !env.testIdentityKey || !env.testFilePath;
|
|
92
|
-
}
|
|
93
|
-
catch (_a) {
|
|
94
|
-
return true;
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
static getEnvFlags(chain) {
|
|
98
|
-
const logTests = !!process.env.LOGTESTS;
|
|
99
|
-
const runMySQL = !!process.env.RUNMYSQL;
|
|
100
|
-
const runSlowTests = !!process.env.RUNSLOWTESTS;
|
|
101
|
-
return {
|
|
102
|
-
chain,
|
|
103
|
-
runMySQL,
|
|
104
|
-
runSlowTests,
|
|
105
|
-
logTests
|
|
106
|
-
};
|
|
107
|
-
}
|
|
108
|
-
static getEnv(chain) {
|
|
109
|
-
const flagsEnv = _tu.getEnvFlags(chain);
|
|
110
|
-
// Identity keys of the lead maintainer of this repo...
|
|
111
|
-
const identityKey = (chain === 'main' ? process.env.MY_MAIN_IDENTITY : process.env.MY_TEST_IDENTITY) || '';
|
|
112
|
-
const filePath = chain === 'main' ? process.env.MY_MAIN_FILEPATH : process.env.MY_TEST_FILEPATH;
|
|
113
|
-
const identityKey2 = (chain === 'main' ? process.env.MY_MAIN_IDENTITY2 : process.env.MY_TEST_IDENTITY2) || '';
|
|
114
|
-
const testIdentityKey = chain === 'main' ? process.env.TEST_MAIN_IDENTITY : process.env.TEST_TEST_IDENTITY;
|
|
115
|
-
const testFilePath = chain === 'main' ? process.env.TEST_MAIN_FILEPATH : process.env.TEST_TEST_FILEPATH;
|
|
116
|
-
const cloudMySQLConnection = chain === 'main' ? process.env.MAIN_CLOUD_MYSQL_CONNECTION : process.env.TEST_CLOUD_MYSQL_CONNECTION;
|
|
117
|
-
const DEV_KEYS = process.env.DEV_KEYS || '{}';
|
|
118
|
-
const taalApiKey = (chain === 'main' ? process.env.MAIN_TAAL_API_KEY : process.env.TEST_TAAL_API_KEY) || '';
|
|
119
|
-
const bitailsApiKey = (chain === 'main' ? process.env.MAIN_BITAILS_API_KEY : process.env.TEST_BITAILS_API_KEY) || '';
|
|
120
|
-
const whatsonchainApiKey = (chain === 'main' ? process.env.MAIN_WHATSONCHAIN_API_KEY : process.env.TEST_WHATSONCHAIN_API_KEY) || '';
|
|
121
|
-
return {
|
|
122
|
-
...flagsEnv,
|
|
123
|
-
identityKey,
|
|
124
|
-
identityKey2,
|
|
125
|
-
taalApiKey,
|
|
126
|
-
bitailsApiKey,
|
|
127
|
-
whatsonchainApiKey,
|
|
128
|
-
devKeys: JSON.parse(DEV_KEYS),
|
|
129
|
-
filePath,
|
|
130
|
-
testIdentityKey,
|
|
131
|
-
testFilePath,
|
|
132
|
-
cloudMySQLConnection
|
|
133
|
-
};
|
|
134
|
-
}
|
|
135
|
-
static async createMainReviewSetup() {
|
|
136
|
-
const env = _tu.getEnv('main');
|
|
137
|
-
if (!env.cloudMySQLConnection)
|
|
138
|
-
throw new WERR_errors_1.WERR_INVALID_PARAMETER('env.cloudMySQLConnection', 'valid');
|
|
139
|
-
const knex = Setup_1.Setup.createMySQLKnex(env.cloudMySQLConnection);
|
|
140
|
-
const storage = new StorageKnex_1.StorageKnex({
|
|
141
|
-
chain: env.chain,
|
|
142
|
-
knex: knex,
|
|
143
|
-
commissionSatoshis: 0,
|
|
144
|
-
commissionPubKeyHex: undefined,
|
|
145
|
-
feeModel: { model: 'sat/kb', value: 1 }
|
|
146
|
-
});
|
|
147
|
-
const servicesOptions = Services_1.Services.createDefaultOptions(env.chain);
|
|
148
|
-
if (env.whatsonchainApiKey)
|
|
149
|
-
servicesOptions.whatsOnChainApiKey = env.whatsonchainApiKey;
|
|
150
|
-
const services = new Services_1.Services(servicesOptions);
|
|
151
|
-
storage.setServices(services);
|
|
152
|
-
await storage.makeAvailable();
|
|
153
|
-
return { env, storage, services };
|
|
154
|
-
}
|
|
155
|
-
static async createNoSendP2PKHTestOutpoint(address, satoshis, noSendChange, wallet) {
|
|
156
|
-
return await _tu.createNoSendP2PKHTestOutpoints(1, address, satoshis, noSendChange, wallet);
|
|
157
|
-
}
|
|
158
|
-
static async createNoSendP2PKHTestOutpoints(count, address, satoshis, noSendChange, wallet) {
|
|
159
|
-
const outputs = [];
|
|
160
|
-
for (let i = 0; i < count; i++) {
|
|
161
|
-
outputs.push({
|
|
162
|
-
basket: `test-p2pkh-output-${i}`,
|
|
163
|
-
satoshis,
|
|
164
|
-
lockingScript: _tu.getLockP2PKH(address).toHex(),
|
|
165
|
-
outputDescription: `p2pkh ${i}`
|
|
166
|
-
});
|
|
167
|
-
}
|
|
168
|
-
const createArgs = {
|
|
169
|
-
description: `to ${address}`,
|
|
170
|
-
outputs,
|
|
171
|
-
options: {
|
|
172
|
-
noSendChange,
|
|
173
|
-
randomizeOutputs: false,
|
|
174
|
-
signAndProcess: false,
|
|
175
|
-
noSend: true
|
|
176
|
-
}
|
|
177
|
-
};
|
|
178
|
-
const cr = await wallet.createAction(createArgs);
|
|
179
|
-
noSendChange = cr.noSendChange;
|
|
180
|
-
expect(cr.noSendChange).toBeTruthy();
|
|
181
|
-
expect(cr.sendWithResults).toBeUndefined();
|
|
182
|
-
expect(cr.tx).toBeUndefined();
|
|
183
|
-
expect(cr.txid).toBeUndefined();
|
|
184
|
-
expect(cr.signableTransaction).toBeTruthy();
|
|
185
|
-
const st = cr.signableTransaction;
|
|
186
|
-
expect(st.reference).toBeTruthy();
|
|
187
|
-
// const tx = Transaction.fromAtomicBEEF(st.tx) // Transaction doesn't support V2 Beef yet.
|
|
188
|
-
const atomicBeef = sdk_1.Beef.fromBinary(st.tx);
|
|
189
|
-
const tx = atomicBeef.txs[atomicBeef.txs.length - 1].tx;
|
|
190
|
-
for (const input of tx.inputs) {
|
|
191
|
-
expect(atomicBeef.findTxid(input.sourceTXID)).toBeTruthy();
|
|
192
|
-
}
|
|
193
|
-
// Spending authorization check happens here...
|
|
194
|
-
//expect(st.amount > 242 && st.amount < 300).toBe(true)
|
|
195
|
-
// sign and complete
|
|
196
|
-
const signArgs = {
|
|
197
|
-
reference: st.reference,
|
|
198
|
-
spends: {},
|
|
199
|
-
options: {
|
|
200
|
-
returnTXIDOnly: true,
|
|
201
|
-
noSend: true
|
|
202
|
-
}
|
|
203
|
-
};
|
|
204
|
-
const sr = await wallet.signAction(signArgs);
|
|
205
|
-
let txid = sr.txid;
|
|
206
|
-
// Update the noSendChange txid to final signed value.
|
|
207
|
-
noSendChange = noSendChange.map(op => `${txid}.${op.split('.')[1]}`);
|
|
208
|
-
return { noSendChange, txid, cr, sr };
|
|
209
|
-
}
|
|
210
|
-
static getKeyPair(priv) {
|
|
211
|
-
if (priv === undefined)
|
|
212
|
-
priv = sdk_1.PrivateKey.fromRandom();
|
|
213
|
-
else if (typeof priv === 'string')
|
|
214
|
-
priv = new sdk_1.PrivateKey(priv, 'hex');
|
|
215
|
-
const pub = sdk_1.PublicKey.fromPrivateKey(priv);
|
|
216
|
-
const address = pub.toAddress();
|
|
217
|
-
return { privateKey: priv, publicKey: pub, address };
|
|
218
|
-
}
|
|
219
|
-
static getLockP2PKH(address) {
|
|
220
|
-
const p2pkh = new sdk_1.P2PKH();
|
|
221
|
-
const lock = p2pkh.lock(address);
|
|
222
|
-
return lock;
|
|
223
|
-
}
|
|
224
|
-
static getUnlockP2PKH(priv, satoshis) {
|
|
225
|
-
const p2pkh = new sdk_1.P2PKH();
|
|
226
|
-
const lock = _tu.getLockP2PKH(_tu.getKeyPair(priv).address);
|
|
227
|
-
// Prepare to pay with SIGHASH_ALL and without ANYONE_CAN_PAY.
|
|
228
|
-
// In otherwords:
|
|
229
|
-
// - all outputs must remain in the current order, amount and locking scripts.
|
|
230
|
-
// - all inputs must remain from the current outpoints and sequence numbers.
|
|
231
|
-
// (unlock scripts are never signed)
|
|
232
|
-
const unlock = p2pkh.unlock(priv, 'all', false, satoshis, lock);
|
|
233
|
-
return unlock;
|
|
234
|
-
}
|
|
235
|
-
static async createWalletOnly(args) {
|
|
236
|
-
args.chain || (args.chain = 'test');
|
|
237
|
-
args.rootKeyHex || (args.rootKeyHex = '1'.repeat(64));
|
|
238
|
-
const rootKey = sdk_1.PrivateKey.fromHex(args.rootKeyHex);
|
|
239
|
-
const identityKey = rootKey.toPublicKey().toString();
|
|
240
|
-
const keyDeriver = new sdk_1.CachedKeyDeriver(rootKey);
|
|
241
|
-
const chain = args.chain;
|
|
242
|
-
const storage = new WalletStorageManager_1.WalletStorageManager(identityKey, args.active, args.backups);
|
|
243
|
-
if (storage.canMakeAvailable())
|
|
244
|
-
await storage.makeAvailable();
|
|
245
|
-
const env = _tu.getEnv(args.chain);
|
|
246
|
-
const serviceOptions = Services_1.Services.createDefaultOptions(env.chain);
|
|
247
|
-
serviceOptions.taalApiKey = env.taalApiKey;
|
|
248
|
-
serviceOptions.arcConfig.apiKey = env.taalApiKey;
|
|
249
|
-
serviceOptions.bitailsApiKey = env.bitailsApiKey;
|
|
250
|
-
serviceOptions.whatsOnChainApiKey = env.whatsonchainApiKey;
|
|
251
|
-
const services = new Services_1.Services(serviceOptions);
|
|
252
|
-
const monopts = Monitor_1.Monitor.createDefaultWalletMonitorOptions(chain, storage, services);
|
|
253
|
-
const monitor = new Monitor_1.Monitor(monopts);
|
|
254
|
-
monitor.addDefaultTasks();
|
|
255
|
-
let privilegedKeyManager = undefined;
|
|
256
|
-
if (args.privKeyHex) {
|
|
257
|
-
const privKey = sdk_1.PrivateKey.fromString(args.privKeyHex);
|
|
258
|
-
privilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async () => privKey);
|
|
259
|
-
}
|
|
260
|
-
const wallet = new Wallet_1.Wallet({
|
|
261
|
-
chain,
|
|
262
|
-
keyDeriver,
|
|
263
|
-
storage,
|
|
264
|
-
services,
|
|
265
|
-
monitor,
|
|
266
|
-
privilegedKeyManager
|
|
267
|
-
});
|
|
268
|
-
const r = {
|
|
269
|
-
rootKey,
|
|
270
|
-
identityKey,
|
|
271
|
-
keyDeriver,
|
|
272
|
-
chain,
|
|
273
|
-
storage,
|
|
274
|
-
services,
|
|
275
|
-
monitor,
|
|
276
|
-
wallet
|
|
277
|
-
};
|
|
278
|
-
return r;
|
|
279
|
-
}
|
|
280
|
-
/**
|
|
281
|
-
* Creates a wallet with both local sqlite and cloud stores, the local store is left active.
|
|
282
|
-
*
|
|
283
|
-
* Requires a valid .env file with chain matching testIdentityKey and testFilePath properties valid.
|
|
284
|
-
* Or `args` with those properties.
|
|
285
|
-
*
|
|
286
|
-
* Verifies wallet has at least 1000 satoshis in at least 10 change utxos.
|
|
287
|
-
*
|
|
288
|
-
* @param chain
|
|
289
|
-
*
|
|
290
|
-
* @returns {TestWalletNoSetup}
|
|
291
|
-
*/
|
|
292
|
-
static async createTestWallet(args) {
|
|
293
|
-
let chain;
|
|
294
|
-
let rootKeyHex;
|
|
295
|
-
let filePath;
|
|
296
|
-
let addLocalBackup = false;
|
|
297
|
-
let setActiveClient = false;
|
|
298
|
-
let useMySQLConnectionForClient = false;
|
|
299
|
-
if (typeof args === 'string') {
|
|
300
|
-
chain = args;
|
|
301
|
-
const env = _tu.getEnv(chain);
|
|
302
|
-
if (!env.testIdentityKey || !env.testFilePath) {
|
|
303
|
-
throw new WERR_errors_1.WERR_INVALID_PARAMETER('env.testIdentityKey and env.testFilePath', 'valid');
|
|
304
|
-
}
|
|
305
|
-
rootKeyHex = env.devKeys[env.testIdentityKey];
|
|
306
|
-
filePath = env.testFilePath;
|
|
307
|
-
}
|
|
308
|
-
else {
|
|
309
|
-
chain = args.chain;
|
|
310
|
-
rootKeyHex = args.rootKeyHex;
|
|
311
|
-
filePath = args.filePath;
|
|
312
|
-
addLocalBackup = args.addLocalBackup === true;
|
|
313
|
-
setActiveClient = args.setActiveClient === true;
|
|
314
|
-
useMySQLConnectionForClient = args.useMySQLConnectionForClient === true;
|
|
315
|
-
}
|
|
316
|
-
const databaseName = path_1.default.parse(filePath).name;
|
|
317
|
-
const setup = await _tu.createSQLiteTestWallet({
|
|
318
|
-
filePath,
|
|
319
|
-
rootKeyHex,
|
|
320
|
-
databaseName,
|
|
321
|
-
chain
|
|
322
|
-
});
|
|
323
|
-
setup.localStorageIdentityKey = setup.storage.getActiveStore();
|
|
324
|
-
let client;
|
|
325
|
-
if (useMySQLConnectionForClient) {
|
|
326
|
-
const env = _tu.getEnv(chain);
|
|
327
|
-
if (!env.cloudMySQLConnection)
|
|
328
|
-
throw new WERR_errors_1.WERR_INVALID_PARAMETER('env.cloundMySQLConnection', 'valid');
|
|
329
|
-
const connection = JSON.parse(env.cloudMySQLConnection);
|
|
330
|
-
client = new StorageKnex_1.StorageKnex({
|
|
331
|
-
...StorageKnex_1.StorageKnex.defaultOptions(),
|
|
332
|
-
knex: _tu.createMySQLFromConnection(connection),
|
|
333
|
-
chain: env.chain
|
|
334
|
-
});
|
|
335
|
-
}
|
|
336
|
-
else {
|
|
337
|
-
const endpointUrl = chain === 'main' ? 'https://storage.babbage.systems' : 'https://staging-storage.babbage.systems';
|
|
338
|
-
client = new StorageClient_1.StorageClient(setup.wallet, endpointUrl);
|
|
339
|
-
}
|
|
340
|
-
setup.clientStorageIdentityKey = (await client.makeAvailable()).storageIdentityKey;
|
|
341
|
-
await setup.wallet.storage.addWalletStorageProvider(client);
|
|
342
|
-
if (addLocalBackup) {
|
|
343
|
-
const backupName = `${databaseName}_backup`;
|
|
344
|
-
const backupPath = filePath.replace(databaseName, backupName);
|
|
345
|
-
const localBackup = new StorageKnex_1.StorageKnex({
|
|
346
|
-
...StorageKnex_1.StorageKnex.defaultOptions(),
|
|
347
|
-
knex: _tu.createLocalSQLite(backupPath),
|
|
348
|
-
chain
|
|
349
|
-
});
|
|
350
|
-
await localBackup.migrate(backupName, (0, utilityHelpers_1.randomBytesHex)(33));
|
|
351
|
-
setup.localBackupStorageIdentityKey = (await localBackup.makeAvailable()).storageIdentityKey;
|
|
352
|
-
await setup.wallet.storage.addWalletStorageProvider(localBackup);
|
|
353
|
-
}
|
|
354
|
-
// SETTING ACTIVE
|
|
355
|
-
// SETTING ACTIVE
|
|
356
|
-
// SETTING ACTIVE
|
|
357
|
-
const log = await setup.storage.setActive(setActiveClient ? setup.clientStorageIdentityKey : setup.localStorageIdentityKey);
|
|
358
|
-
(0, exports.logger)(log);
|
|
359
|
-
let needsBackup = false;
|
|
360
|
-
if (setup.storage.getActiveStore() === setup.localStorageIdentityKey) {
|
|
361
|
-
const basket = (0, utilityHelpers_1.verifyOne)(await setup.activeStorage.findOutputBaskets({
|
|
362
|
-
partial: {
|
|
363
|
-
userId: setup.storage.getActiveUser().userId,
|
|
364
|
-
name: 'default'
|
|
365
|
-
}
|
|
366
|
-
}));
|
|
367
|
-
if (basket.minimumDesiredUTXOValue !== 5 || basket.numberOfDesiredUTXOs < 32) {
|
|
368
|
-
needsBackup = true;
|
|
369
|
-
await setup.activeStorage.updateOutputBasket(basket.basketId, {
|
|
370
|
-
minimumDesiredUTXOValue: 5,
|
|
371
|
-
numberOfDesiredUTXOs: 32
|
|
372
|
-
});
|
|
373
|
-
}
|
|
374
|
-
}
|
|
375
|
-
const balance = await setup.wallet.balanceAndUtxos();
|
|
376
|
-
if (balance.total < 1000) {
|
|
377
|
-
throw new WERR_errors_1.WERR_INSUFFICIENT_FUNDS(1000, 1000 - balance.total);
|
|
378
|
-
}
|
|
379
|
-
if (balance.utxos.length <= 10) {
|
|
380
|
-
const args = {
|
|
381
|
-
description: 'spread change'
|
|
382
|
-
};
|
|
383
|
-
await setup.wallet.createAction(args);
|
|
384
|
-
needsBackup = true;
|
|
385
|
-
}
|
|
386
|
-
if (needsBackup) {
|
|
387
|
-
const log2 = await setup.storage.updateBackups();
|
|
388
|
-
console.log(log2);
|
|
389
|
-
}
|
|
390
|
-
return setup;
|
|
391
|
-
}
|
|
392
|
-
static async createTestWalletWithStorageClient(args) {
|
|
393
|
-
args.chain || (args.chain = 'test');
|
|
394
|
-
const wo = await _tu.createWalletOnly({
|
|
395
|
-
chain: args.chain,
|
|
396
|
-
rootKeyHex: args.rootKeyHex
|
|
397
|
-
});
|
|
398
|
-
args.endpointUrl || (args.endpointUrl = args.chain === 'main' ? 'https://storage.babbage.systems' : 'https://staging-storage.babbage.systems');
|
|
399
|
-
const client = new StorageClient_1.StorageClient(wo.wallet, args.endpointUrl);
|
|
400
|
-
await wo.storage.addWalletStorageProvider(client);
|
|
401
|
-
return wo;
|
|
402
|
-
}
|
|
403
|
-
static async createKnexTestWalletWithSetup(args) {
|
|
404
|
-
const wo = await _tu.createWalletOnly({
|
|
405
|
-
chain: args.chain,
|
|
406
|
-
rootKeyHex: args.rootKeyHex,
|
|
407
|
-
privKeyHex: args.privKeyHex
|
|
408
|
-
});
|
|
409
|
-
const activeStorage = new StorageKnex_1.StorageKnex({
|
|
410
|
-
chain: wo.chain,
|
|
411
|
-
knex: args.knex,
|
|
412
|
-
commissionSatoshis: 0,
|
|
413
|
-
commissionPubKeyHex: undefined,
|
|
414
|
-
feeModel: { model: 'sat/kb', value: 1 }
|
|
415
|
-
});
|
|
416
|
-
if (args.dropAll)
|
|
417
|
-
await activeStorage.dropAllData();
|
|
418
|
-
await activeStorage.migrate(args.databaseName, (0, utilityHelpers_1.randomBytesHex)(33));
|
|
419
|
-
await activeStorage.makeAvailable();
|
|
420
|
-
const setup = await args.insertSetup(activeStorage, wo.identityKey);
|
|
421
|
-
await wo.storage.addWalletStorageProvider(activeStorage);
|
|
422
|
-
const { user, isNew } = await activeStorage.findOrInsertUser(wo.identityKey);
|
|
423
|
-
const userId = user.userId;
|
|
424
|
-
const r = {
|
|
425
|
-
...wo,
|
|
426
|
-
activeStorage,
|
|
427
|
-
setup,
|
|
428
|
-
userId
|
|
429
|
-
};
|
|
430
|
-
return r;
|
|
431
|
-
}
|
|
432
|
-
/**
|
|
433
|
-
* Returns path to temporary file in project's './test/data/tmp/' folder.
|
|
434
|
-
*
|
|
435
|
-
* Creates any missing folders.
|
|
436
|
-
*
|
|
437
|
-
* Optionally tries to delete any existing file. This may fail if the file file is locked
|
|
438
|
-
* by another process.
|
|
439
|
-
*
|
|
440
|
-
* Optionally copies filename (or if filename has no dir component, a file of the same filename from the project's './test/data' folder) to initialize file's contents.
|
|
441
|
-
*
|
|
442
|
-
* CAUTION: returned file path will include four random hex digits unless tryToDelete is true. Files must be purged periodically.
|
|
443
|
-
*
|
|
444
|
-
* @param filename target filename without path, optionally just extension in which case random name is used
|
|
445
|
-
* @param tryToDelete true to attempt to delete an existing file at the returned file path.
|
|
446
|
-
* @param copyToTmp true to copy file of same filename from './test/data' (or elsewhere if filename has path) to tmp folder
|
|
447
|
-
* @param reuseExisting true to use existing file if found, otherwise a random string is added to filename.
|
|
448
|
-
* @returns path in './test/data/tmp' folder.
|
|
449
|
-
*/
|
|
450
|
-
static async newTmpFile(filename = '', tryToDelete = false, copyToTmp = false, reuseExisting = false) {
|
|
451
|
-
const tmpFolder = './test/data/tmp/';
|
|
452
|
-
const p = path_1.default.parse(filename);
|
|
453
|
-
const dstDir = tmpFolder;
|
|
454
|
-
const dstName = `${p.name}${tryToDelete || reuseExisting ? '' : (0, utilityHelpers_1.randomBytesHex)(6)}`;
|
|
455
|
-
const dstExt = p.ext || 'tmp';
|
|
456
|
-
const dstPath = path_1.default.resolve(`${dstDir}${dstName}${dstExt}`);
|
|
457
|
-
await fs_1.promises.mkdir(tmpFolder, { recursive: true });
|
|
458
|
-
if (!reuseExisting && (tryToDelete || copyToTmp))
|
|
459
|
-
try {
|
|
460
|
-
await fs_1.promises.unlink(dstPath);
|
|
461
|
-
}
|
|
462
|
-
catch (eu) {
|
|
463
|
-
const e = WalletError_1.WalletError.fromUnknown(eu);
|
|
464
|
-
if (e.name !== 'ENOENT') {
|
|
465
|
-
throw e;
|
|
466
|
-
}
|
|
467
|
-
}
|
|
468
|
-
if (copyToTmp) {
|
|
469
|
-
const srcPath = p.dir ? path_1.default.resolve(filename) : path_1.default.resolve(`./test/data/${filename}`);
|
|
470
|
-
await fs_1.promises.copyFile(srcPath, dstPath);
|
|
471
|
-
}
|
|
472
|
-
return dstPath;
|
|
473
|
-
}
|
|
474
|
-
static async copyFile(srcPath, dstPath) {
|
|
475
|
-
await fs_1.promises.copyFile(srcPath, dstPath);
|
|
476
|
-
}
|
|
477
|
-
static async existingDataFile(filename) {
|
|
478
|
-
const folder = './test/data/';
|
|
479
|
-
return folder + filename;
|
|
480
|
-
}
|
|
481
|
-
static createLocalSQLite(filename) {
|
|
482
|
-
const config = {
|
|
483
|
-
client: 'sqlite3',
|
|
484
|
-
connection: { filename },
|
|
485
|
-
useNullAsDefault: true
|
|
486
|
-
};
|
|
487
|
-
const knex = (0, knex_1.knex)(config);
|
|
488
|
-
return knex;
|
|
489
|
-
}
|
|
490
|
-
static createMySQLFromConnection(connection) {
|
|
491
|
-
const config = {
|
|
492
|
-
client: 'mysql2',
|
|
493
|
-
connection,
|
|
494
|
-
useNullAsDefault: true,
|
|
495
|
-
pool: { min: 0, max: 7, idleTimeoutMillis: 15000 }
|
|
496
|
-
};
|
|
497
|
-
const knex = (0, knex_1.knex)(config);
|
|
498
|
-
return knex;
|
|
499
|
-
}
|
|
500
|
-
static createLocalMySQL(database) {
|
|
501
|
-
const connection = JSON.parse(localMySqlConnection || '{}');
|
|
502
|
-
connection['database'] = database;
|
|
503
|
-
const config = {
|
|
504
|
-
client: 'mysql2',
|
|
505
|
-
connection,
|
|
506
|
-
useNullAsDefault: true,
|
|
507
|
-
pool: { min: 0, max: 7, idleTimeoutMillis: 15000 }
|
|
508
|
-
};
|
|
509
|
-
const knex = (0, knex_1.knex)(config);
|
|
510
|
-
return knex;
|
|
511
|
-
}
|
|
512
|
-
static async createMySQLTestWallet(args) {
|
|
513
|
-
return await this.createKnexTestWallet({
|
|
514
|
-
...args,
|
|
515
|
-
knex: _tu.createLocalMySQL(args.databaseName)
|
|
516
|
-
});
|
|
517
|
-
}
|
|
518
|
-
static async createMySQLTestSetup1Wallet(args) {
|
|
519
|
-
return await this.createKnexTestSetup1Wallet({
|
|
520
|
-
...args,
|
|
521
|
-
dropAll: true,
|
|
522
|
-
knex: _tu.createLocalMySQL(args.databaseName)
|
|
523
|
-
});
|
|
524
|
-
}
|
|
525
|
-
static async createMySQLTestSetup2Wallet(args) {
|
|
526
|
-
return await this.createKnexTestSetup2Wallet({
|
|
527
|
-
...args,
|
|
528
|
-
dropAll: true,
|
|
529
|
-
knex: _tu.createLocalMySQL(args.databaseName)
|
|
530
|
-
});
|
|
531
|
-
}
|
|
532
|
-
static async createSQLiteTestWallet(args) {
|
|
533
|
-
const localSQLiteFile = args.filePath || (await _tu.newTmpFile(`${args.databaseName}.sqlite`, false, false, true));
|
|
534
|
-
return await this.createKnexTestWallet({
|
|
535
|
-
...args,
|
|
536
|
-
knex: _tu.createLocalSQLite(localSQLiteFile)
|
|
537
|
-
});
|
|
538
|
-
}
|
|
539
|
-
static async createSQLiteTestSetup1Wallet(args) {
|
|
540
|
-
const localSQLiteFile = await _tu.newTmpFile(`${args.databaseName}.sqlite`, false, false, true);
|
|
541
|
-
return await this.createKnexTestSetup1Wallet({
|
|
542
|
-
...args,
|
|
543
|
-
dropAll: true,
|
|
544
|
-
knex: _tu.createLocalSQLite(localSQLiteFile)
|
|
545
|
-
});
|
|
546
|
-
}
|
|
547
|
-
static async createSQLiteTestSetup2Wallet(args) {
|
|
548
|
-
const localSQLiteFile = await _tu.newTmpFile(`${args.databaseName}.sqlite`, false, false, true);
|
|
549
|
-
return await this.createKnexTestSetup2Wallet({
|
|
550
|
-
...args,
|
|
551
|
-
dropAll: true,
|
|
552
|
-
knex: _tu.createLocalSQLite(localSQLiteFile)
|
|
553
|
-
});
|
|
554
|
-
}
|
|
555
|
-
static async createKnexTestWallet(args) {
|
|
556
|
-
return await _tu.createKnexTestWalletWithSetup({
|
|
557
|
-
...args,
|
|
558
|
-
insertSetup: insertEmptySetup
|
|
559
|
-
});
|
|
560
|
-
}
|
|
561
|
-
static async createKnexTestSetup1Wallet(args) {
|
|
562
|
-
return await _tu.createKnexTestWalletWithSetup({
|
|
563
|
-
...args,
|
|
564
|
-
insertSetup: _tu.createTestSetup1
|
|
565
|
-
});
|
|
566
|
-
}
|
|
567
|
-
static async createKnexTestSetup2Wallet(args) {
|
|
568
|
-
return await _tu.createKnexTestWalletWithSetup({
|
|
569
|
-
...args,
|
|
570
|
-
insertSetup: async (storage, identityKey) => {
|
|
571
|
-
return _tu.createTestSetup2(storage, identityKey, args.mockData);
|
|
572
|
-
}
|
|
573
|
-
});
|
|
574
|
-
}
|
|
575
|
-
static async fileExists(file) {
|
|
576
|
-
try {
|
|
577
|
-
const f = await fs_1.promises.open(file, 'r');
|
|
578
|
-
await f.close();
|
|
579
|
-
return true;
|
|
580
|
-
}
|
|
581
|
-
catch (eu) {
|
|
582
|
-
return false;
|
|
583
|
-
}
|
|
584
|
-
}
|
|
585
|
-
//if (await _tu.fileExists(walletFile))
|
|
586
|
-
static async createLegacyWalletSQLiteCopy(databaseName) {
|
|
587
|
-
const walletFile = await _tu.newTmpFile(`${databaseName}.sqlite`, false, false, true);
|
|
588
|
-
const walletKnex = _tu.createLocalSQLite(walletFile);
|
|
589
|
-
return await _tu.createLegacyWalletCopy(databaseName, walletKnex, walletFile);
|
|
590
|
-
}
|
|
591
|
-
static async createLegacyWalletMySQLCopy(databaseName) {
|
|
592
|
-
const walletKnex = _tu.createLocalMySQL(databaseName);
|
|
593
|
-
return await _tu.createLegacyWalletCopy(databaseName, walletKnex);
|
|
594
|
-
}
|
|
595
|
-
static async createLiveWalletSQLiteWARNING(databaseFullPath = './test/data/walletLiveTestData.sqlite') {
|
|
596
|
-
return await this.createKnexTestWallet({
|
|
597
|
-
chain: 'test',
|
|
598
|
-
rootKeyHex: _tu.legacyRootKeyHex,
|
|
599
|
-
databaseName: 'walletLiveTestData',
|
|
600
|
-
knex: _tu.createLocalSQLite(databaseFullPath)
|
|
601
|
-
});
|
|
602
|
-
}
|
|
603
|
-
static async createWalletSQLite(databaseFullPath = './test/data/tmp/walletNewTestData.sqlite', databaseName = 'walletNewTestData') {
|
|
604
|
-
return await this.createSQLiteTestWallet({
|
|
605
|
-
filePath: databaseFullPath,
|
|
606
|
-
databaseName,
|
|
607
|
-
chain: 'test',
|
|
608
|
-
rootKeyHex: '1'.repeat(64),
|
|
609
|
-
dropAll: true
|
|
610
|
-
});
|
|
611
|
-
}
|
|
612
|
-
static async createLegacyWalletCopy(databaseName, walletKnex, tryCopyToPath) {
|
|
613
|
-
const readerFile = await _tu.existingDataFile(`walletLegacyTestData.sqlite`);
|
|
614
|
-
let useReader = true;
|
|
615
|
-
if (tryCopyToPath) {
|
|
616
|
-
await _tu.copyFile(readerFile, tryCopyToPath);
|
|
617
|
-
//console.log('USING FILE COPY INSTEAD OF SOURCE DB SYNC')
|
|
618
|
-
useReader = false;
|
|
619
|
-
}
|
|
620
|
-
const chain = 'test';
|
|
621
|
-
const rootKeyHex = _tu.legacyRootKeyHex;
|
|
622
|
-
const identityKey = '03ac2d10bdb0023f4145cc2eba2fcd2ad3070cb2107b0b48170c46a9440e4cc3fe';
|
|
623
|
-
const rootKey = sdk_1.PrivateKey.fromHex(rootKeyHex);
|
|
624
|
-
const keyDeriver = new sdk_1.CachedKeyDeriver(rootKey);
|
|
625
|
-
const activeStorage = new StorageKnex_1.StorageKnex({
|
|
626
|
-
chain,
|
|
627
|
-
knex: walletKnex,
|
|
628
|
-
commissionSatoshis: 0,
|
|
629
|
-
commissionPubKeyHex: undefined,
|
|
630
|
-
feeModel: { model: 'sat/kb', value: 1 }
|
|
631
|
-
});
|
|
632
|
-
if (useReader)
|
|
633
|
-
await activeStorage.dropAllData();
|
|
634
|
-
await activeStorage.migrate(databaseName, (0, utilityHelpers_1.randomBytesHex)(33));
|
|
635
|
-
await activeStorage.makeAvailable();
|
|
636
|
-
const storage = new WalletStorageManager_1.WalletStorageManager(identityKey, activeStorage);
|
|
637
|
-
await storage.makeAvailable();
|
|
638
|
-
if (useReader) {
|
|
639
|
-
const readerKnex = _tu.createLocalSQLite(readerFile);
|
|
640
|
-
const reader = new StorageKnex_1.StorageKnex({
|
|
641
|
-
chain,
|
|
642
|
-
knex: readerKnex,
|
|
643
|
-
commissionSatoshis: 0,
|
|
644
|
-
commissionPubKeyHex: undefined,
|
|
645
|
-
feeModel: { model: 'sat/kb', value: 1 }
|
|
646
|
-
});
|
|
647
|
-
await reader.makeAvailable();
|
|
648
|
-
await storage.syncFromReader(identityKey, new StorageSyncReader_1.StorageSyncReader({ identityKey }, reader));
|
|
649
|
-
await reader.destroy();
|
|
650
|
-
}
|
|
651
|
-
const services = new Services_1.Services(chain);
|
|
652
|
-
const monopts = Monitor_1.Monitor.createDefaultWalletMonitorOptions(chain, storage, services);
|
|
653
|
-
const monitor = new Monitor_1.Monitor(monopts);
|
|
654
|
-
const wallet = new Wallet_1.Wallet({ chain, keyDeriver, storage, services, monitor });
|
|
655
|
-
const userId = (0, utilityHelpers_1.verifyTruthy)(await activeStorage.findUserByIdentityKey(identityKey)).userId;
|
|
656
|
-
const r = {
|
|
657
|
-
rootKey,
|
|
658
|
-
identityKey,
|
|
659
|
-
keyDeriver,
|
|
660
|
-
chain,
|
|
661
|
-
activeStorage,
|
|
662
|
-
storage,
|
|
663
|
-
setup: {},
|
|
664
|
-
services,
|
|
665
|
-
monitor,
|
|
666
|
-
wallet,
|
|
667
|
-
userId
|
|
668
|
-
};
|
|
669
|
-
return r;
|
|
670
|
-
}
|
|
671
|
-
static wrapProfiling(o, name) {
|
|
672
|
-
const getFunctionsNames = (obj) => {
|
|
673
|
-
let fNames = [];
|
|
674
|
-
do {
|
|
675
|
-
fNames = fNames.concat(Object.getOwnPropertyNames(obj).filter(p => p !== 'constructor' && typeof obj[p] === 'function'));
|
|
676
|
-
} while ((obj = Object.getPrototypeOf(obj)) && obj !== Object.prototype);
|
|
677
|
-
return fNames;
|
|
678
|
-
};
|
|
679
|
-
const notifyPerformance = (fn, performanceDetails) => {
|
|
680
|
-
setTimeout(() => {
|
|
681
|
-
let { functionName, args, startTime, endTime } = performanceDetails;
|
|
682
|
-
let _args = args;
|
|
683
|
-
if (Array.isArray(args)) {
|
|
684
|
-
_args = args.map(arg => {
|
|
685
|
-
if (typeof arg === 'function') {
|
|
686
|
-
let fName = arg.name;
|
|
687
|
-
if (!fName) {
|
|
688
|
-
fName = 'function';
|
|
689
|
-
}
|
|
690
|
-
else if (fName === 'callbackWrapper') {
|
|
691
|
-
fName = 'callback';
|
|
692
|
-
}
|
|
693
|
-
arg = `[${fName} Function]`;
|
|
694
|
-
}
|
|
695
|
-
return arg;
|
|
696
|
-
});
|
|
697
|
-
}
|
|
698
|
-
fn({ functionName, args: _args, startTime, endTime });
|
|
699
|
-
}, 0);
|
|
700
|
-
};
|
|
701
|
-
const stats = {};
|
|
702
|
-
function logger(args) {
|
|
703
|
-
let s = stats[args.functionName];
|
|
704
|
-
if (!s) {
|
|
705
|
-
s = { count: 0, totalMsecs: 0 };
|
|
706
|
-
stats[args.functionName] = s;
|
|
707
|
-
}
|
|
708
|
-
s.count++;
|
|
709
|
-
s.totalMsecs += args.endTime - args.startTime;
|
|
710
|
-
}
|
|
711
|
-
const performanceWrapper = (obj, objectName, performanceNotificationCallback) => {
|
|
712
|
-
let _notifyPerformance = notifyPerformance.bind(null, performanceNotificationCallback);
|
|
713
|
-
let fNames = getFunctionsNames(obj);
|
|
714
|
-
for (let fName of fNames) {
|
|
715
|
-
let originalFunction = obj[fName];
|
|
716
|
-
let wrapperFunction = (...args) => {
|
|
717
|
-
let callbackFnIndex = -1;
|
|
718
|
-
let startTime = Date.now();
|
|
719
|
-
let _callBack = args.filter((arg, i) => {
|
|
720
|
-
let _isFunction = typeof arg === 'function';
|
|
721
|
-
if (_isFunction) {
|
|
722
|
-
callbackFnIndex = i;
|
|
723
|
-
}
|
|
724
|
-
return _isFunction;
|
|
725
|
-
})[0];
|
|
726
|
-
if (_callBack) {
|
|
727
|
-
let callbackWrapper = (...callbackArgs) => {
|
|
728
|
-
let endTime = Date.now();
|
|
729
|
-
_notifyPerformance({ functionName: `${objectName}.${fName}`, args, startTime, endTime });
|
|
730
|
-
_callBack.apply(null, callbackArgs);
|
|
731
|
-
};
|
|
732
|
-
args[callbackFnIndex] = callbackWrapper;
|
|
733
|
-
}
|
|
734
|
-
let originalReturnObject = originalFunction.apply(obj, args);
|
|
735
|
-
let isPromiseType = originalReturnObject &&
|
|
736
|
-
typeof originalReturnObject.then === 'function' &&
|
|
737
|
-
typeof originalReturnObject.catch === 'function';
|
|
738
|
-
if (isPromiseType) {
|
|
739
|
-
return originalReturnObject
|
|
740
|
-
.then(resolveArgs => {
|
|
741
|
-
let endTime = Date.now();
|
|
742
|
-
_notifyPerformance({ functionName: `${objectName}.${fName}`, args, startTime, endTime });
|
|
743
|
-
return Promise.resolve(resolveArgs);
|
|
744
|
-
})
|
|
745
|
-
.catch((...rejectArgs) => {
|
|
746
|
-
let endTime = Date.now();
|
|
747
|
-
_notifyPerformance({ functionName: `${objectName}.${fName}`, args, startTime, endTime });
|
|
748
|
-
return Promise.reject(...rejectArgs);
|
|
749
|
-
});
|
|
750
|
-
}
|
|
751
|
-
if (!_callBack && !isPromiseType) {
|
|
752
|
-
let endTime = Date.now();
|
|
753
|
-
_notifyPerformance({ functionName: `${objectName}.${fName}`, args, startTime, endTime });
|
|
754
|
-
}
|
|
755
|
-
return originalReturnObject;
|
|
756
|
-
};
|
|
757
|
-
obj[fName] = wrapperFunction;
|
|
758
|
-
}
|
|
759
|
-
return obj;
|
|
760
|
-
};
|
|
761
|
-
const functionNames = getFunctionsNames(o);
|
|
762
|
-
performanceWrapper(o, name, logger);
|
|
763
|
-
return stats;
|
|
764
|
-
}
|
|
765
|
-
static async createIdbLegacyWalletCopy(databaseName) {
|
|
766
|
-
const chain = 'test';
|
|
767
|
-
const readerFile = await _tu.existingDataFile(`walletLegacyTestData.sqlite`);
|
|
768
|
-
const readerKnex = _tu.createLocalSQLite(readerFile);
|
|
769
|
-
const reader = new StorageKnex_1.StorageKnex({
|
|
770
|
-
chain,
|
|
771
|
-
knex: readerKnex,
|
|
772
|
-
commissionSatoshis: 0,
|
|
773
|
-
commissionPubKeyHex: undefined,
|
|
774
|
-
feeModel: { model: 'sat/kb', value: 1 }
|
|
775
|
-
});
|
|
776
|
-
await reader.makeAvailable();
|
|
777
|
-
const rootKeyHex = _tu.legacyRootKeyHex;
|
|
778
|
-
const identityKey = '03ac2d10bdb0023f4145cc2eba2fcd2ad3070cb2107b0b48170c46a9440e4cc3fe';
|
|
779
|
-
const rootKey = sdk_1.PrivateKey.fromHex(rootKeyHex);
|
|
780
|
-
const keyDeriver = new sdk_1.CachedKeyDeriver(rootKey);
|
|
781
|
-
const activeStorage = new StorageIdb_1.StorageIdb({
|
|
782
|
-
chain,
|
|
783
|
-
commissionSatoshis: 0,
|
|
784
|
-
commissionPubKeyHex: undefined,
|
|
785
|
-
feeModel: { model: 'sat/kb', value: 1 }
|
|
786
|
-
});
|
|
787
|
-
await activeStorage.dropAllData();
|
|
788
|
-
await activeStorage.migrate(databaseName, (0, utilityHelpers_1.randomBytesHex)(33));
|
|
789
|
-
await activeStorage.makeAvailable();
|
|
790
|
-
const storage = new WalletStorageManager_1.WalletStorageManager(identityKey, activeStorage);
|
|
791
|
-
await storage.makeAvailable();
|
|
792
|
-
await storage.syncFromReader(identityKey, new StorageSyncReader_1.StorageSyncReader({ identityKey }, reader));
|
|
793
|
-
await reader.destroy();
|
|
794
|
-
const services = new Services_1.Services(chain);
|
|
795
|
-
const monopts = Monitor_1.Monitor.createDefaultWalletMonitorOptions(chain, storage, services);
|
|
796
|
-
const monitor = new Monitor_1.Monitor(monopts);
|
|
797
|
-
const wallet = new Wallet_1.Wallet({ chain, keyDeriver, storage, services, monitor });
|
|
798
|
-
const userId = (0, utilityHelpers_1.verifyTruthy)(await activeStorage.findUserByIdentityKey(identityKey)).userId;
|
|
799
|
-
const r = {
|
|
800
|
-
rootKey,
|
|
801
|
-
identityKey,
|
|
802
|
-
keyDeriver,
|
|
803
|
-
chain,
|
|
804
|
-
activeStorage,
|
|
805
|
-
storage,
|
|
806
|
-
setup: {},
|
|
807
|
-
services,
|
|
808
|
-
monitor,
|
|
809
|
-
wallet,
|
|
810
|
-
userId
|
|
811
|
-
};
|
|
812
|
-
return r;
|
|
813
|
-
}
|
|
814
|
-
static makeSampleCert(subject) {
|
|
815
|
-
subject || (subject = sdk_1.PrivateKey.fromRandom().toPublicKey().toString());
|
|
816
|
-
const certifier = sdk_1.PrivateKey.fromRandom();
|
|
817
|
-
const verifier = sdk_1.PrivateKey.fromRandom();
|
|
818
|
-
const cert = {
|
|
819
|
-
type: sdk_1.Utils.toBase64(new Array(32).fill(1)),
|
|
820
|
-
serialNumber: sdk_1.Utils.toBase64(new Array(32).fill(2)),
|
|
821
|
-
revocationOutpoint: 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef.1',
|
|
822
|
-
subject,
|
|
823
|
-
certifier: certifier.toPublicKey().toString(),
|
|
824
|
-
fields: {
|
|
825
|
-
name: 'Alice',
|
|
826
|
-
email: 'alice@example.com',
|
|
827
|
-
organization: 'Example Corp'
|
|
828
|
-
},
|
|
829
|
-
signature: ''
|
|
830
|
-
};
|
|
831
|
-
return { cert, subject, certifier };
|
|
832
|
-
}
|
|
833
|
-
static async insertTestProvenTx(storage, txid, trx) {
|
|
834
|
-
const now = new Date();
|
|
835
|
-
const ptx = {
|
|
836
|
-
created_at: now,
|
|
837
|
-
updated_at: now,
|
|
838
|
-
provenTxId: 0,
|
|
839
|
-
txid: txid || (0, utilityHelpers_1.randomBytesHex)(32),
|
|
840
|
-
height: 1,
|
|
841
|
-
index: 0,
|
|
842
|
-
merklePath: [1, 2, 3, 4, 5, 6, 7, 8],
|
|
843
|
-
rawTx: [4, 5, 6],
|
|
844
|
-
blockHash: (0, utilityHelpers_1.randomBytesHex)(32),
|
|
845
|
-
merkleRoot: (0, utilityHelpers_1.randomBytesHex)(32)
|
|
846
|
-
};
|
|
847
|
-
await storage.insertProvenTx(ptx, trx);
|
|
848
|
-
return ptx;
|
|
849
|
-
}
|
|
850
|
-
static async insertTestProvenTxReq(storage, txid, provenTxId, onlyRequired) {
|
|
851
|
-
const now = new Date();
|
|
852
|
-
const ptxreq = {
|
|
853
|
-
// Required:
|
|
854
|
-
created_at: now,
|
|
855
|
-
updated_at: now,
|
|
856
|
-
provenTxReqId: 0,
|
|
857
|
-
txid: txid || (0, utilityHelpers_1.randomBytesHex)(32),
|
|
858
|
-
status: 'nosend',
|
|
859
|
-
attempts: 0,
|
|
860
|
-
notified: false,
|
|
861
|
-
history: '{}',
|
|
862
|
-
notify: '{}',
|
|
863
|
-
rawTx: [4, 5, 6],
|
|
864
|
-
// Optional:
|
|
865
|
-
provenTxId: provenTxId || undefined,
|
|
866
|
-
batch: onlyRequired ? undefined : (0, utilityHelpers_1.randomBytesBase64)(10),
|
|
867
|
-
inputBEEF: onlyRequired ? undefined : [1, 2, 3]
|
|
868
|
-
};
|
|
869
|
-
await storage.insertProvenTxReq(ptxreq);
|
|
870
|
-
return ptxreq;
|
|
871
|
-
}
|
|
872
|
-
static async insertTestUser(storage, identityKey) {
|
|
873
|
-
const now = new Date();
|
|
874
|
-
const e = {
|
|
875
|
-
created_at: now,
|
|
876
|
-
updated_at: now,
|
|
877
|
-
userId: 0,
|
|
878
|
-
identityKey: identityKey || (0, utilityHelpers_1.randomBytesHex)(33),
|
|
879
|
-
activeStorage: storage.getSettings().storageIdentityKey
|
|
880
|
-
};
|
|
881
|
-
await storage.insertUser(e);
|
|
882
|
-
return e;
|
|
883
|
-
}
|
|
884
|
-
static async insertTestCertificate(storage, u) {
|
|
885
|
-
const now = new Date();
|
|
886
|
-
u || (u = await _tu.insertTestUser(storage));
|
|
887
|
-
const e = {
|
|
888
|
-
created_at: now,
|
|
889
|
-
updated_at: now,
|
|
890
|
-
certificateId: 0,
|
|
891
|
-
userId: u.userId,
|
|
892
|
-
type: (0, utilityHelpers_1.randomBytesBase64)(33),
|
|
893
|
-
serialNumber: (0, utilityHelpers_1.randomBytesBase64)(33),
|
|
894
|
-
certifier: (0, utilityHelpers_1.randomBytesHex)(33),
|
|
895
|
-
subject: (0, utilityHelpers_1.randomBytesHex)(33),
|
|
896
|
-
verifier: undefined,
|
|
897
|
-
revocationOutpoint: `${(0, utilityHelpers_1.randomBytesHex)(32)}.999`,
|
|
898
|
-
signature: (0, utilityHelpers_1.randomBytesHex)(50),
|
|
899
|
-
isDeleted: false
|
|
900
|
-
};
|
|
901
|
-
await storage.insertCertificate(e);
|
|
902
|
-
return e;
|
|
903
|
-
}
|
|
904
|
-
static async insertTestCertificateField(storage, c, name, value) {
|
|
905
|
-
const now = new Date();
|
|
906
|
-
const e = {
|
|
907
|
-
created_at: now,
|
|
908
|
-
updated_at: now,
|
|
909
|
-
certificateId: c.certificateId,
|
|
910
|
-
userId: c.userId,
|
|
911
|
-
fieldName: name,
|
|
912
|
-
fieldValue: value,
|
|
913
|
-
masterKey: (0, utilityHelpers_1.randomBytesBase64)(40)
|
|
914
|
-
};
|
|
915
|
-
await storage.insertCertificateField(e);
|
|
916
|
-
return e;
|
|
917
|
-
}
|
|
918
|
-
static async insertTestOutputBasket(storage, u, partial) {
|
|
919
|
-
const now = new Date();
|
|
920
|
-
let user;
|
|
921
|
-
if (u === undefined) {
|
|
922
|
-
user = await _tu.insertTestUser(storage);
|
|
923
|
-
}
|
|
924
|
-
else if (typeof u === 'number') {
|
|
925
|
-
user = (0, utilityHelpers_1.verifyOne)(await storage.findUsers({ partial: { userId: u } }));
|
|
926
|
-
}
|
|
927
|
-
else {
|
|
928
|
-
user = u;
|
|
929
|
-
}
|
|
930
|
-
const e = {
|
|
931
|
-
created_at: now,
|
|
932
|
-
updated_at: now,
|
|
933
|
-
basketId: 0,
|
|
934
|
-
userId: user.userId,
|
|
935
|
-
name: (0, utilityHelpers_1.randomBytesHex)(6),
|
|
936
|
-
numberOfDesiredUTXOs: 42,
|
|
937
|
-
minimumDesiredUTXOValue: 1642,
|
|
938
|
-
isDeleted: false,
|
|
939
|
-
...(partial || {})
|
|
940
|
-
};
|
|
941
|
-
await storage.insertOutputBasket(e);
|
|
942
|
-
return e;
|
|
943
|
-
}
|
|
944
|
-
static async insertTestTransaction(storage, u, onlyRequired, partial) {
|
|
945
|
-
const now = new Date();
|
|
946
|
-
u || (u = await _tu.insertTestUser(storage));
|
|
947
|
-
const e = {
|
|
948
|
-
// Required:
|
|
949
|
-
created_at: now,
|
|
950
|
-
updated_at: now,
|
|
951
|
-
transactionId: 0,
|
|
952
|
-
userId: u.userId,
|
|
953
|
-
status: 'nosend',
|
|
954
|
-
reference: (0, utilityHelpers_1.randomBytesBase64)(10),
|
|
955
|
-
isOutgoing: true,
|
|
956
|
-
satoshis: 9999,
|
|
957
|
-
description: 'buy me a river',
|
|
958
|
-
// Optional:
|
|
959
|
-
version: onlyRequired ? undefined : 0,
|
|
960
|
-
lockTime: onlyRequired ? undefined : 500000000,
|
|
961
|
-
txid: onlyRequired ? undefined : (0, utilityHelpers_1.randomBytesHex)(32),
|
|
962
|
-
inputBEEF: onlyRequired ? undefined : new sdk_1.Beef().toBinary(),
|
|
963
|
-
rawTx: onlyRequired ? undefined : [1, 2, 3],
|
|
964
|
-
...(partial || {})
|
|
965
|
-
};
|
|
966
|
-
await storage.insertTransaction(e);
|
|
967
|
-
return { tx: e, user: u };
|
|
968
|
-
}
|
|
969
|
-
static async insertTestOutput(storage, t, vout, satoshis, basket, requiredOnly, partial) {
|
|
970
|
-
const now = new Date();
|
|
971
|
-
const e = {
|
|
972
|
-
created_at: now,
|
|
973
|
-
updated_at: now,
|
|
974
|
-
outputId: 0,
|
|
975
|
-
userId: t.userId,
|
|
976
|
-
transactionId: t.transactionId,
|
|
977
|
-
basketId: basket ? basket.basketId : undefined,
|
|
978
|
-
spendable: true,
|
|
979
|
-
change: true,
|
|
980
|
-
outputDescription: 'not mutch to say',
|
|
981
|
-
vout,
|
|
982
|
-
satoshis,
|
|
983
|
-
providedBy: 'you',
|
|
984
|
-
purpose: 'secret',
|
|
985
|
-
type: 'custom',
|
|
986
|
-
txid: requiredOnly ? undefined : (0, utilityHelpers_1.randomBytesHex)(32),
|
|
987
|
-
senderIdentityKey: requiredOnly ? undefined : (0, utilityHelpers_1.randomBytesHex)(32),
|
|
988
|
-
derivationPrefix: requiredOnly ? undefined : (0, utilityHelpers_1.randomBytesHex)(16),
|
|
989
|
-
derivationSuffix: requiredOnly ? undefined : (0, utilityHelpers_1.randomBytesHex)(16),
|
|
990
|
-
spentBy: undefined, // must be a valid transactionId
|
|
991
|
-
sequenceNumber: requiredOnly ? undefined : 42,
|
|
992
|
-
spendingDescription: requiredOnly ? undefined : (0, utilityHelpers_1.randomBytesHex)(16),
|
|
993
|
-
scriptLength: requiredOnly ? undefined : 36,
|
|
994
|
-
scriptOffset: requiredOnly ? undefined : 12,
|
|
995
|
-
lockingScript: requiredOnly ? undefined : (0, utilityHelpers_noBuffer_1.asArray)((0, utilityHelpers_1.randomBytesHex)(36)),
|
|
996
|
-
...(partial || {})
|
|
997
|
-
};
|
|
998
|
-
await storage.insertOutput(e);
|
|
999
|
-
return e;
|
|
1000
|
-
}
|
|
1001
|
-
static async insertTestOutputTag(storage, u, partial) {
|
|
1002
|
-
const now = new Date();
|
|
1003
|
-
const e = {
|
|
1004
|
-
created_at: now,
|
|
1005
|
-
updated_at: now,
|
|
1006
|
-
outputTagId: 0,
|
|
1007
|
-
userId: u.userId,
|
|
1008
|
-
tag: (0, utilityHelpers_1.randomBytesHex)(6),
|
|
1009
|
-
isDeleted: false,
|
|
1010
|
-
...(partial || {})
|
|
1011
|
-
};
|
|
1012
|
-
await storage.insertOutputTag(e);
|
|
1013
|
-
return e;
|
|
1014
|
-
}
|
|
1015
|
-
static async insertTestOutputTagMap(storage, o, tag) {
|
|
1016
|
-
const now = new Date();
|
|
1017
|
-
const e = {
|
|
1018
|
-
created_at: now,
|
|
1019
|
-
updated_at: now,
|
|
1020
|
-
outputTagId: tag.outputTagId,
|
|
1021
|
-
outputId: o.outputId,
|
|
1022
|
-
isDeleted: false
|
|
1023
|
-
};
|
|
1024
|
-
await storage.insertOutputTagMap(e);
|
|
1025
|
-
return e;
|
|
1026
|
-
}
|
|
1027
|
-
static async insertTestTxLabel(storage, u, partial) {
|
|
1028
|
-
const now = new Date();
|
|
1029
|
-
const e = {
|
|
1030
|
-
created_at: now,
|
|
1031
|
-
updated_at: now,
|
|
1032
|
-
txLabelId: 0,
|
|
1033
|
-
userId: u.userId,
|
|
1034
|
-
label: (0, utilityHelpers_1.randomBytesHex)(6),
|
|
1035
|
-
isDeleted: false,
|
|
1036
|
-
...(partial || {})
|
|
1037
|
-
};
|
|
1038
|
-
await storage.insertTxLabel(e);
|
|
1039
|
-
return e;
|
|
1040
|
-
}
|
|
1041
|
-
static async insertTestTxLabelMap(storage, tx, label, partial) {
|
|
1042
|
-
const now = new Date();
|
|
1043
|
-
const e = {
|
|
1044
|
-
created_at: now,
|
|
1045
|
-
updated_at: now,
|
|
1046
|
-
txLabelId: label.txLabelId,
|
|
1047
|
-
transactionId: tx.transactionId,
|
|
1048
|
-
isDeleted: false,
|
|
1049
|
-
...(partial || {})
|
|
1050
|
-
};
|
|
1051
|
-
await storage.insertTxLabelMap(e);
|
|
1052
|
-
return e;
|
|
1053
|
-
}
|
|
1054
|
-
static async insertTestSyncState(storage, u) {
|
|
1055
|
-
const now = new Date();
|
|
1056
|
-
const settings = await storage.getSettings();
|
|
1057
|
-
const e = {
|
|
1058
|
-
created_at: now,
|
|
1059
|
-
updated_at: now,
|
|
1060
|
-
syncStateId: 0,
|
|
1061
|
-
userId: u.userId,
|
|
1062
|
-
storageIdentityKey: settings.storageIdentityKey,
|
|
1063
|
-
storageName: settings.storageName,
|
|
1064
|
-
status: 'unknown',
|
|
1065
|
-
init: false,
|
|
1066
|
-
refNum: (0, utilityHelpers_1.randomBytesBase64)(10),
|
|
1067
|
-
syncMap: '{}'
|
|
1068
|
-
};
|
|
1069
|
-
await storage.insertSyncState(e);
|
|
1070
|
-
return e;
|
|
1071
|
-
}
|
|
1072
|
-
static async insertTestMonitorEvent(storage) {
|
|
1073
|
-
const now = new Date();
|
|
1074
|
-
const e = {
|
|
1075
|
-
created_at: now,
|
|
1076
|
-
updated_at: now,
|
|
1077
|
-
id: 0,
|
|
1078
|
-
event: 'nothing much happened'
|
|
1079
|
-
};
|
|
1080
|
-
await storage.insertMonitorEvent(e);
|
|
1081
|
-
return e;
|
|
1082
|
-
}
|
|
1083
|
-
static async insertTestCommission(storage, t) {
|
|
1084
|
-
const now = new Date();
|
|
1085
|
-
const e = {
|
|
1086
|
-
created_at: now,
|
|
1087
|
-
updated_at: now,
|
|
1088
|
-
commissionId: 0,
|
|
1089
|
-
userId: t.userId,
|
|
1090
|
-
transactionId: t.transactionId,
|
|
1091
|
-
satoshis: 200,
|
|
1092
|
-
keyOffset: (0, utilityHelpers_1.randomBytesBase64)(32),
|
|
1093
|
-
isRedeemed: false,
|
|
1094
|
-
lockingScript: [1, 2, 3]
|
|
1095
|
-
};
|
|
1096
|
-
await storage.insertCommission(e);
|
|
1097
|
-
return e;
|
|
1098
|
-
}
|
|
1099
|
-
static async createTestSetup1(storage, u1IdentityKey) {
|
|
1100
|
-
const u1 = await _tu.insertTestUser(storage, u1IdentityKey);
|
|
1101
|
-
const u1basket1 = await _tu.insertTestOutputBasket(storage, u1);
|
|
1102
|
-
const u1basket2 = await _tu.insertTestOutputBasket(storage, u1);
|
|
1103
|
-
const u1label1 = await _tu.insertTestTxLabel(storage, u1);
|
|
1104
|
-
const u1label2 = await _tu.insertTestTxLabel(storage, u1);
|
|
1105
|
-
const u1tag1 = await _tu.insertTestOutputTag(storage, u1);
|
|
1106
|
-
const u1tag2 = await _tu.insertTestOutputTag(storage, u1);
|
|
1107
|
-
const { tx: u1tx1 } = await _tu.insertTestTransaction(storage, u1);
|
|
1108
|
-
const u1comm1 = await _tu.insertTestCommission(storage, u1tx1);
|
|
1109
|
-
const u1tx1label1 = await _tu.insertTestTxLabelMap(storage, u1tx1, u1label1);
|
|
1110
|
-
const u1tx1label2 = await _tu.insertTestTxLabelMap(storage, u1tx1, u1label2);
|
|
1111
|
-
const u1tx1o0 = await _tu.insertTestOutput(storage, u1tx1, 0, 101, u1basket1);
|
|
1112
|
-
const u1o0tag1 = await _tu.insertTestOutputTagMap(storage, u1tx1o0, u1tag1);
|
|
1113
|
-
const u1o0tag2 = await _tu.insertTestOutputTagMap(storage, u1tx1o0, u1tag2);
|
|
1114
|
-
const u1tx1o1 = await _tu.insertTestOutput(storage, u1tx1, 1, 111, u1basket2);
|
|
1115
|
-
const u1o1tag1 = await _tu.insertTestOutputTagMap(storage, u1tx1o1, u1tag1);
|
|
1116
|
-
const u1cert1 = await _tu.insertTestCertificate(storage, u1);
|
|
1117
|
-
const u1cert1field1 = await _tu.insertTestCertificateField(storage, u1cert1, 'bob', 'your uncle');
|
|
1118
|
-
const u1cert1field2 = await _tu.insertTestCertificateField(storage, u1cert1, 'name', 'alice');
|
|
1119
|
-
const u1cert2 = await _tu.insertTestCertificate(storage, u1);
|
|
1120
|
-
const u1cert2field1 = await _tu.insertTestCertificateField(storage, u1cert2, 'name', 'alice');
|
|
1121
|
-
const u1cert3 = await _tu.insertTestCertificate(storage, u1);
|
|
1122
|
-
const u1sync1 = await _tu.insertTestSyncState(storage, u1);
|
|
1123
|
-
const u2 = await _tu.insertTestUser(storage);
|
|
1124
|
-
const u2basket1 = await _tu.insertTestOutputBasket(storage, u2);
|
|
1125
|
-
const u2label1 = await _tu.insertTestTxLabel(storage, u2);
|
|
1126
|
-
const { tx: u2tx1 } = await _tu.insertTestTransaction(storage, u2, true);
|
|
1127
|
-
const u2comm1 = await _tu.insertTestCommission(storage, u2tx1);
|
|
1128
|
-
const u2tx1label1 = await _tu.insertTestTxLabelMap(storage, u2tx1, u2label1);
|
|
1129
|
-
const u2tx1o0 = await _tu.insertTestOutput(storage, u2tx1, 0, 101, u2basket1);
|
|
1130
|
-
const { tx: u2tx2 } = await _tu.insertTestTransaction(storage, u2, true);
|
|
1131
|
-
const u2comm2 = await _tu.insertTestCommission(storage, u2tx2);
|
|
1132
|
-
const proven1 = await _tu.insertTestProvenTx(storage);
|
|
1133
|
-
const req1 = await _tu.insertTestProvenTxReq(storage, undefined, undefined, true);
|
|
1134
|
-
const req2 = await _tu.insertTestProvenTxReq(storage, proven1.txid, proven1.provenTxId);
|
|
1135
|
-
const we1 = await _tu.insertTestMonitorEvent(storage);
|
|
1136
|
-
return {
|
|
1137
|
-
u1,
|
|
1138
|
-
u1basket1,
|
|
1139
|
-
u1basket2,
|
|
1140
|
-
u1label1,
|
|
1141
|
-
u1label2,
|
|
1142
|
-
u1tag1,
|
|
1143
|
-
u1tag2,
|
|
1144
|
-
u1tx1,
|
|
1145
|
-
u1comm1,
|
|
1146
|
-
u1tx1label1,
|
|
1147
|
-
u1tx1label2,
|
|
1148
|
-
u1tx1o0,
|
|
1149
|
-
u1o0tag1,
|
|
1150
|
-
u1o0tag2,
|
|
1151
|
-
u1tx1o1,
|
|
1152
|
-
u1o1tag1,
|
|
1153
|
-
u1cert1,
|
|
1154
|
-
u1cert1field1,
|
|
1155
|
-
u1cert1field2,
|
|
1156
|
-
u1cert2,
|
|
1157
|
-
u1cert2field1,
|
|
1158
|
-
u1cert3,
|
|
1159
|
-
u1sync1,
|
|
1160
|
-
u2,
|
|
1161
|
-
u2basket1,
|
|
1162
|
-
u2label1,
|
|
1163
|
-
u2tx1,
|
|
1164
|
-
u2comm1,
|
|
1165
|
-
u2tx1label1,
|
|
1166
|
-
u2tx1o0,
|
|
1167
|
-
u2tx2,
|
|
1168
|
-
u2comm2,
|
|
1169
|
-
proven1,
|
|
1170
|
-
req1,
|
|
1171
|
-
req2,
|
|
1172
|
-
we1
|
|
1173
|
-
};
|
|
1174
|
-
}
|
|
1175
|
-
static async createTestSetup2(storage, identityKey, mockData = { actions: [] }) {
|
|
1176
|
-
if (!mockData || !mockData.actions) {
|
|
1177
|
-
throw new Error('mockData.actions is required');
|
|
1178
|
-
}
|
|
1179
|
-
const now = new Date();
|
|
1180
|
-
const inputTxMap = {};
|
|
1181
|
-
const outputMap = {};
|
|
1182
|
-
// only one user
|
|
1183
|
-
const user = await _tu.insertTestUser(storage, identityKey);
|
|
1184
|
-
// First create your output that represent your inputs
|
|
1185
|
-
for (const action of mockData.actions) {
|
|
1186
|
-
for (const input of action.inputs || []) {
|
|
1187
|
-
let prevOutput = outputMap[input.sourceOutpoint];
|
|
1188
|
-
if (!prevOutput) {
|
|
1189
|
-
const { tx: transaction } = await _tu.insertTestTransaction(storage, user, false, {
|
|
1190
|
-
txid: input.sourceOutpoint.split('.')[0],
|
|
1191
|
-
satoshis: input.sourceSatoshis,
|
|
1192
|
-
status: 'confirmed',
|
|
1193
|
-
description: 'Generated transaction for input',
|
|
1194
|
-
lockTime: 0,
|
|
1195
|
-
version: 1,
|
|
1196
|
-
inputBEEF: [1, 2, 3, 4],
|
|
1197
|
-
rawTx: [4, 3, 2, 1]
|
|
1198
|
-
});
|
|
1199
|
-
const basket = await _tu.insertTestOutputBasket(storage, user, {
|
|
1200
|
-
name: (0, utilityHelpers_1.randomBytesHex)(6)
|
|
1201
|
-
});
|
|
1202
|
-
// Need to convert
|
|
1203
|
-
const lockingScriptValue = input.sourceLockingScript
|
|
1204
|
-
? sdk_1.Utils.toArray(input.sourceLockingScript, 'hex')
|
|
1205
|
-
: undefined;
|
|
1206
|
-
prevOutput = await _tu.insertTestOutput(storage, transaction, 0, input.sourceSatoshis, basket, true, // Needs to be spendable
|
|
1207
|
-
{
|
|
1208
|
-
outputDescription: input.inputDescription,
|
|
1209
|
-
spendable: true,
|
|
1210
|
-
vout: Number(input.sourceOutpoint.split('.')[1]),
|
|
1211
|
-
lockingScript: lockingScriptValue,
|
|
1212
|
-
txid: transaction.txid
|
|
1213
|
-
});
|
|
1214
|
-
// Store in maps for later use
|
|
1215
|
-
inputTxMap[input.sourceOutpoint] = transaction;
|
|
1216
|
-
outputMap[input.sourceOutpoint] = prevOutput;
|
|
1217
|
-
}
|
|
1218
|
-
}
|
|
1219
|
-
}
|
|
1220
|
-
// Process transactions that spend those previous outputs
|
|
1221
|
-
for (const action of mockData.actions) {
|
|
1222
|
-
const { tx: transaction } = await _tu.insertTestTransaction(storage, user, false, {
|
|
1223
|
-
txid: `${action.txid}` || `tx_${action.satoshis}_${Date.now()}`,
|
|
1224
|
-
satoshis: action.satoshis,
|
|
1225
|
-
status: action.status,
|
|
1226
|
-
description: action.description,
|
|
1227
|
-
lockTime: action.lockTime,
|
|
1228
|
-
version: action.version,
|
|
1229
|
-
inputBEEF: [1, 2, 3, 4],
|
|
1230
|
-
rawTx: [4, 3, 2, 1]
|
|
1231
|
-
});
|
|
1232
|
-
// Loop through action inputs and update chosen outputs
|
|
1233
|
-
for (const input of action.inputs || []) {
|
|
1234
|
-
// Output must exist before updating
|
|
1235
|
-
const prevOutput = outputMap[input.sourceOutpoint];
|
|
1236
|
-
if (!prevOutput) {
|
|
1237
|
-
throw new Error(`UTXO not found in outputMap for sourceOutpoint: ${input.sourceOutpoint}`);
|
|
1238
|
-
}
|
|
1239
|
-
// Set correct output fields as per input fields
|
|
1240
|
-
await storage.updateOutput(prevOutput.outputId, {
|
|
1241
|
-
spendable: false, // Mark output as spent
|
|
1242
|
-
spentBy: transaction.transactionId, // Reference the new transaction
|
|
1243
|
-
spendingDescription: input.inputDescription, // Store description
|
|
1244
|
-
sequenceNumber: input.sequenceNumber // Store sequence number
|
|
1245
|
-
});
|
|
1246
|
-
}
|
|
1247
|
-
// Insert any new outputs for the transaction
|
|
1248
|
-
if (action.outputs) {
|
|
1249
|
-
for (const output of action.outputs) {
|
|
1250
|
-
const basket = await _tu.insertTestOutputBasket(storage, user, {
|
|
1251
|
-
name: output.basket
|
|
1252
|
-
});
|
|
1253
|
-
const insertedOutput = await _tu.insertTestOutput(storage, transaction, output.outputIndex, output.satoshis, basket, false, {
|
|
1254
|
-
outputDescription: output.outputDescription,
|
|
1255
|
-
spendable: output.spendable,
|
|
1256
|
-
txid: transaction.txid
|
|
1257
|
-
});
|
|
1258
|
-
// Store this output in the map for future transactions to reference
|
|
1259
|
-
outputMap[`${action.txid}.${output.outputIndex}`] = insertedOutput;
|
|
1260
|
-
}
|
|
1261
|
-
}
|
|
1262
|
-
// Labels inserted
|
|
1263
|
-
if (action.labels) {
|
|
1264
|
-
for (const label of action.labels) {
|
|
1265
|
-
const l = await _tu.insertTestTxLabel(storage, user, {
|
|
1266
|
-
label,
|
|
1267
|
-
isDeleted: false,
|
|
1268
|
-
created_at: now,
|
|
1269
|
-
updated_at: now,
|
|
1270
|
-
txLabelId: 0,
|
|
1271
|
-
userId: user.userId
|
|
1272
|
-
});
|
|
1273
|
-
await _tu.insertTestTxLabelMap(storage, transaction, l);
|
|
1274
|
-
}
|
|
1275
|
-
}
|
|
1276
|
-
// Tags inserted for outputs
|
|
1277
|
-
if (action.outputs) {
|
|
1278
|
-
for (const output of action.outputs) {
|
|
1279
|
-
if (output.tags) {
|
|
1280
|
-
// Ensure we fetch the correct inserted output for the current transaction
|
|
1281
|
-
const insertedOutput = outputMap[`${action.txid}.${output.outputIndex}`];
|
|
1282
|
-
if (!insertedOutput) {
|
|
1283
|
-
throw new Error(`Output not found for txid: ${action.txid}, vout: ${output.outputIndex}`);
|
|
1284
|
-
}
|
|
1285
|
-
for (const tag of output.tags) {
|
|
1286
|
-
// Insert the output tag into the database
|
|
1287
|
-
const insertedTag = await _tu.insertTestOutputTag(storage, user, {
|
|
1288
|
-
tag: tag,
|
|
1289
|
-
isDeleted: false,
|
|
1290
|
-
created_at: now,
|
|
1291
|
-
updated_at: now,
|
|
1292
|
-
outputTagId: 0, // Will be auto-incremented by the DB
|
|
1293
|
-
userId: user.userId
|
|
1294
|
-
});
|
|
1295
|
-
// Map the inserted tag to the correct output
|
|
1296
|
-
await _tu.insertTestOutputTagMap(storage, insertedOutput, insertedTag);
|
|
1297
|
-
}
|
|
1298
|
-
}
|
|
1299
|
-
}
|
|
1300
|
-
}
|
|
1301
|
-
}
|
|
1302
|
-
return mockData;
|
|
1303
|
-
}
|
|
1304
|
-
static mockPostServicesAsSuccess(ctxs) {
|
|
1305
|
-
mockPostServices(ctxs, 'success');
|
|
1306
|
-
}
|
|
1307
|
-
static mockPostServicesAsError(ctxs) {
|
|
1308
|
-
mockPostServices(ctxs, 'error');
|
|
1309
|
-
}
|
|
1310
|
-
static mockPostServicesAsCallback(ctxs, callback) {
|
|
1311
|
-
mockPostServices(ctxs, 'error', callback);
|
|
1312
|
-
}
|
|
1313
|
-
static mockMerklePathServicesAsCallback(ctxs, callback) {
|
|
1314
|
-
for (const { services } of ctxs) {
|
|
1315
|
-
services.getMerklePath = jest.fn().mockImplementation(async (txid) => {
|
|
1316
|
-
const r = await callback(txid);
|
|
1317
|
-
return r;
|
|
1318
|
-
});
|
|
1319
|
-
}
|
|
1320
|
-
}
|
|
1321
|
-
static async createWalletSetupEnv(chain) {
|
|
1322
|
-
const env = Setup_1.Setup.getEnv(chain);
|
|
1323
|
-
const rootKeyHex = env.devKeys[env.identityKey];
|
|
1324
|
-
if (env.filePath) {
|
|
1325
|
-
return await _tu.createSQLiteTestWallet({
|
|
1326
|
-
filePath: env.filePath,
|
|
1327
|
-
databaseName: 'setupEnvWallet',
|
|
1328
|
-
chain,
|
|
1329
|
-
rootKeyHex
|
|
1330
|
-
});
|
|
1331
|
-
}
|
|
1332
|
-
return await _tu.createTestWalletWithStorageClient({
|
|
1333
|
-
chain,
|
|
1334
|
-
rootKeyHex
|
|
1335
|
-
});
|
|
1336
|
-
}
|
|
1337
|
-
/**
|
|
1338
|
-
* Create a pair of transacitons that cancel out, other than the transaciton fees.
|
|
1339
|
-
* Both created transactions are left with status 'noSend'.
|
|
1340
|
-
* This allows the transactions to either be broadcast by an external party,
|
|
1341
|
-
* or they may be aborted.
|
|
1342
|
-
*
|
|
1343
|
-
* `doubleSpendTx` should only be used for double spend testing.
|
|
1344
|
-
* It attempts to forward the txidDo input, which should already have been reclaimed by txidUndo, to a random private key output.
|
|
1345
|
-
*
|
|
1346
|
-
* @param wallet the wallet that will create both transactions, or Chain and createWalletEnv is used to create a wallet.
|
|
1347
|
-
* @param satoshis amount of new output created and consumed. Defaults to 41.
|
|
1348
|
-
* @returns { txidDo: string, txidUndo: string, beef: Beef, doubleSpendTx: transaction }
|
|
1349
|
-
*/
|
|
1350
|
-
static async createNoSendTxPair(wallet, satoshis = 41) {
|
|
1351
|
-
let destroyWallet = false;
|
|
1352
|
-
if (wallet === 'main' || wallet === 'test') {
|
|
1353
|
-
const setup = await _tu.createWalletSetupEnv(wallet);
|
|
1354
|
-
wallet = setup.wallet;
|
|
1355
|
-
if (!setup.storage.isActiveEnabled)
|
|
1356
|
-
await setup.storage.setActive(setup.storage.getActiveStore());
|
|
1357
|
-
destroyWallet = true;
|
|
1358
|
-
}
|
|
1359
|
-
const derivationPrefix = (0, utilityHelpers_1.randomBytesBase64)(8);
|
|
1360
|
-
const derivationSuffix = (0, utilityHelpers_1.randomBytesBase64)(8);
|
|
1361
|
-
const keyDeriver = wallet.keyDeriver;
|
|
1362
|
-
const t = new ScriptTemplateBRC29_1.ScriptTemplateBRC29({
|
|
1363
|
-
derivationPrefix,
|
|
1364
|
-
derivationSuffix,
|
|
1365
|
-
keyDeriver
|
|
1366
|
-
});
|
|
1367
|
-
let label = 'doTxPair';
|
|
1368
|
-
const car = await wallet.createAction({
|
|
1369
|
-
outputs: [
|
|
1370
|
-
{
|
|
1371
|
-
lockingScript: t.lock(keyDeriver.rootKey.toString(), wallet.identityKey).toHex(),
|
|
1372
|
-
satoshis,
|
|
1373
|
-
outputDescription: label,
|
|
1374
|
-
customInstructions: JSON.stringify({
|
|
1375
|
-
derivationPrefix,
|
|
1376
|
-
derivationSuffix,
|
|
1377
|
-
type: 'BRC29'
|
|
1378
|
-
})
|
|
1379
|
-
}
|
|
1380
|
-
],
|
|
1381
|
-
options: {
|
|
1382
|
-
randomizeOutputs: false,
|
|
1383
|
-
noSend: true
|
|
1384
|
-
},
|
|
1385
|
-
description: label
|
|
1386
|
-
});
|
|
1387
|
-
const beef = sdk_1.Beef.fromBinary(car.tx);
|
|
1388
|
-
const txidDo = car.txid;
|
|
1389
|
-
const outpoint = `${car.txid}.0`;
|
|
1390
|
-
const unlock = t.unlock(keyDeriver.rootKey.toString(), wallet.identityKey, satoshis);
|
|
1391
|
-
label = 'undoTxPair';
|
|
1392
|
-
const car2 = await wallet.createAction({
|
|
1393
|
-
inputBEEF: beef.toBinary(),
|
|
1394
|
-
inputs: [
|
|
1395
|
-
{
|
|
1396
|
-
outpoint,
|
|
1397
|
-
unlockingScriptLength: t.unlockLength,
|
|
1398
|
-
inputDescription: label
|
|
1399
|
-
}
|
|
1400
|
-
],
|
|
1401
|
-
description: label,
|
|
1402
|
-
options: {
|
|
1403
|
-
noSend: true,
|
|
1404
|
-
noSendChange: car.noSendChange
|
|
1405
|
-
}
|
|
1406
|
-
});
|
|
1407
|
-
const st = car2.signableTransaction;
|
|
1408
|
-
const stBeef = sdk_1.Beef.fromBinary(st.tx);
|
|
1409
|
-
const tx = stBeef.findAtomicTransaction(stBeef.txs.slice(-1)[0].txid);
|
|
1410
|
-
tx.inputs[0].unlockingScriptTemplate = unlock;
|
|
1411
|
-
await tx.sign();
|
|
1412
|
-
const unlockingScript = tx.inputs[0].unlockingScript.toHex();
|
|
1413
|
-
const signArgs = {
|
|
1414
|
-
reference: st.reference,
|
|
1415
|
-
spends: { 0: { unlockingScript } },
|
|
1416
|
-
options: {
|
|
1417
|
-
noSend: true
|
|
1418
|
-
}
|
|
1419
|
-
};
|
|
1420
|
-
const sar = await wallet.signAction(signArgs);
|
|
1421
|
-
beef.mergeBeef(sar.tx);
|
|
1422
|
-
const txidUndo = sar.txid;
|
|
1423
|
-
if (destroyWallet)
|
|
1424
|
-
await wallet.destroy();
|
|
1425
|
-
const doubleSpendTx = new sdk_1.Transaction();
|
|
1426
|
-
const sourceTXID = txidDo;
|
|
1427
|
-
const sourceOutputIndex = 0;
|
|
1428
|
-
const sourceSatoshis = satoshis;
|
|
1429
|
-
doubleSpendTx.addInput({
|
|
1430
|
-
sourceOutputIndex,
|
|
1431
|
-
sourceTXID,
|
|
1432
|
-
sourceTransaction: beef.findAtomicTransaction(sourceTXID),
|
|
1433
|
-
unlockingScriptTemplate: unlock
|
|
1434
|
-
});
|
|
1435
|
-
doubleSpendTx.addOutput({
|
|
1436
|
-
satoshis: sourceSatoshis - 10,
|
|
1437
|
-
lockingScript: new sdk_1.P2PKH().lock(sdk_1.PrivateKey.fromRandom().toAddress())
|
|
1438
|
-
});
|
|
1439
|
-
await doubleSpendTx.sign();
|
|
1440
|
-
return {
|
|
1441
|
-
txidDo,
|
|
1442
|
-
txidUndo,
|
|
1443
|
-
beef,
|
|
1444
|
-
doubleSpendTx
|
|
1445
|
-
};
|
|
1446
|
-
}
|
|
1447
|
-
}
|
|
1448
|
-
exports.TestUtilsWalletStorage = TestUtilsWalletStorage;
|
|
1449
|
-
TestUtilsWalletStorage.legacyRootKeyHex = '153a3df216' + '686f55b253991c' + '7039da1f648' + 'ffc5bfe93d6ac2c25ac' + '2d4070918d';
|
|
1450
|
-
class _tu extends TestUtilsWalletStorage {
|
|
1451
|
-
}
|
|
1452
|
-
exports._tu = _tu;
|
|
1453
|
-
async function insertEmptySetup(storage, identityKey) {
|
|
1454
|
-
return {};
|
|
1455
|
-
}
|
|
1456
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1457
|
-
async function expectToThrowWERR(expectedClass, fn) {
|
|
1458
|
-
try {
|
|
1459
|
-
await fn();
|
|
1460
|
-
}
|
|
1461
|
-
catch (eu) {
|
|
1462
|
-
const e = WalletError_1.WalletError.fromUnknown(eu);
|
|
1463
|
-
if (e.name !== expectedClass.name || !e.isError)
|
|
1464
|
-
console.log(`Error name ${e.name} vs class name ${expectedClass.name}\n${e.stack}\n`);
|
|
1465
|
-
// The output above may help debugging this situation or put a breakpoint
|
|
1466
|
-
// on the line below and look at e.stack
|
|
1467
|
-
expect(e.name).toBe(expectedClass.name);
|
|
1468
|
-
expect(e.isError).toBe(true);
|
|
1469
|
-
return;
|
|
1470
|
-
}
|
|
1471
|
-
throw new Error(`${expectedClass.name} was not thrown`);
|
|
1472
|
-
}
|
|
1473
|
-
function mockPostServices(ctxs, status = 'success', callback) {
|
|
1474
|
-
for (const { services } of ctxs) {
|
|
1475
|
-
// Mock the services postBeef to avoid actually broadcasting new transactions.
|
|
1476
|
-
services.postBeef = jest.fn().mockImplementation((beef, txids) => {
|
|
1477
|
-
status = !callback ? status : callback(beef, txids);
|
|
1478
|
-
const r = {
|
|
1479
|
-
name: 'mock',
|
|
1480
|
-
status: 'success',
|
|
1481
|
-
txidResults: txids.map(txid => ({ txid, status }))
|
|
1482
|
-
};
|
|
1483
|
-
return Promise.resolve([r]);
|
|
1484
|
-
});
|
|
1485
|
-
}
|
|
1486
|
-
}
|
|
1487
|
-
// Declare logEnabled globally so it can be accessed anywhere in this file
|
|
1488
|
-
let logEnabled = false;
|
|
1489
|
-
/**
|
|
1490
|
-
* Centralized logging function to handle logging based on running in jest "single test" mode,
|
|
1491
|
-
* or when `logEnabled` is true.
|
|
1492
|
-
*
|
|
1493
|
-
* @param {string} message - The main message to log.
|
|
1494
|
-
* @param {...any} optionalParams - Additional parameters to log (optional).
|
|
1495
|
-
* @returns {void} This function does not return any value.
|
|
1496
|
-
*
|
|
1497
|
-
* @example
|
|
1498
|
-
* log('Test message', someVariable);
|
|
1499
|
-
* log('Another message with multiple params', param1, param2);
|
|
1500
|
-
*/
|
|
1501
|
-
const logger = (message, ...optionalParams) => {
|
|
1502
|
-
const isSingleTest = process.argv.some(arg => arg === '--testNamePattern' || arg === '-t');
|
|
1503
|
-
if (logEnabled || isSingleTest) {
|
|
1504
|
-
console.log(message, ...optionalParams);
|
|
1505
|
-
}
|
|
1506
|
-
};
|
|
1507
|
-
exports.logger = logger;
|
|
1508
|
-
/**
|
|
1509
|
-
* Updates a table dynamically based on key-value pairs in testValues.
|
|
1510
|
-
* @param {Function} updateFunction - The specific update function from storage.
|
|
1511
|
-
* @param {string | number} id - The ID or unique identifier of the record to update.
|
|
1512
|
-
* @param {Object} testValues - An object containing key-value pairs to update.
|
|
1513
|
-
*/
|
|
1514
|
-
const updateTable = async (updateFunction, id, testValues) => {
|
|
1515
|
-
for (const [key, value] of Object.entries(testValues)) {
|
|
1516
|
-
(0, exports.logger)('id=', id, '[key]=', [key], 'value=', value);
|
|
1517
|
-
await updateFunction(id, { [key]: value });
|
|
1518
|
-
}
|
|
1519
|
-
};
|
|
1520
|
-
exports.updateTable = updateTable;
|
|
1521
|
-
/**
|
|
1522
|
-
* Verifies that all key-value pairs in `testValues` match the corresponding keys in `targetObject`.
|
|
1523
|
-
* If a value is a Date, it validates the time using the `validateUpdateTime` function to ensure
|
|
1524
|
-
* it matches the expected time or is greater than a reference time.
|
|
1525
|
-
*
|
|
1526
|
-
* @param {Record<string, any>} targetObject - The object to verify values against.
|
|
1527
|
-
* @param {Record<string, any>} testValues - An object containing the expected key-value pairs.
|
|
1528
|
-
* @param {Date} referenceTime - A timestamp captured just before the updates, used for validating dates.
|
|
1529
|
-
*
|
|
1530
|
-
* @example
|
|
1531
|
-
* const targetObject = { key1: 'value1', created_at: new Date('2024-12-30T23:00:00Z') }
|
|
1532
|
-
* const testValues = { key1: 'value1', created_at: new Date('2024-12-30T23:00:00Z') }
|
|
1533
|
-
* const referenceTime = new Date()
|
|
1534
|
-
* verifyValues(targetObject, testValues, referenceTime)
|
|
1535
|
-
*/
|
|
1536
|
-
const verifyValues = (targetObject, testValues, referenceTime) => {
|
|
1537
|
-
Object.entries(testValues).forEach(([key, expectedValue]) => {
|
|
1538
|
-
const actualValue = targetObject[key];
|
|
1539
|
-
if (expectedValue instanceof Date) {
|
|
1540
|
-
// Use `validateUpdateTime` for Date comparisons
|
|
1541
|
-
expect((0, exports.validateUpdateTime)(actualValue, expectedValue, referenceTime)).toBe(true);
|
|
1542
|
-
}
|
|
1543
|
-
else {
|
|
1544
|
-
// Default to strict equality for other fields
|
|
1545
|
-
expect(actualValue).toStrictEqual(expectedValue);
|
|
1546
|
-
}
|
|
1547
|
-
});
|
|
1548
|
-
};
|
|
1549
|
-
exports.verifyValues = verifyValues;
|
|
1550
|
-
/**
|
|
1551
|
-
* Comparison function to validate update time.
|
|
1552
|
-
* Allows the time to match the expected update time or be greater than a reference time.
|
|
1553
|
-
* Validates across multiple formats with a tolerance for minor discrepancies.
|
|
1554
|
-
* @param {Date} actualTime - The `updated_at` time returned from the storage.
|
|
1555
|
-
* @param {Date} expectedTime - The time you tried to set.
|
|
1556
|
-
* @param {Date} referenceTime - A timestamp captured just before the update attempt.
|
|
1557
|
-
* @param {number} toleranceMs - Optional tolerance in milliseconds for discrepancies (default: 10ms).
|
|
1558
|
-
* @param {boolean} [ logEnabled=false ] - A flag to enable or disable logging for this error.
|
|
1559
|
-
|
|
1560
|
-
* @returns {boolean} - Returns `true` if the validation passes; `false` otherwise.
|
|
1561
|
-
* Logs human-readable details if the validation fails.
|
|
1562
|
-
*/
|
|
1563
|
-
const validateUpdateTime = (actualTime, expectedTime, referenceTime, toleranceMs = 10, logEnabled = false) => {
|
|
1564
|
-
const actualTimestamp = actualTime.getTime();
|
|
1565
|
-
const expectedTimestamp = expectedTime.getTime();
|
|
1566
|
-
const referenceTimestamp = referenceTime.getTime();
|
|
1567
|
-
if (logEnabled) {
|
|
1568
|
-
(0, exports.logger)(`Validation inputs:\n`, `Actual Time: ${actualTime.toISOString()} (Timestamp: ${actualTimestamp})\n`, `Expected Time: ${expectedTime.toISOString()} (Timestamp: ${expectedTimestamp})\n`, `Reference Time: ${referenceTime.toISOString()} (Timestamp: ${referenceTimestamp})`);
|
|
1569
|
-
}
|
|
1570
|
-
const isWithinTolerance = Math.abs(actualTimestamp - expectedTimestamp) <= toleranceMs;
|
|
1571
|
-
const isGreaterThanReference = actualTimestamp > referenceTimestamp;
|
|
1572
|
-
const isoMatch = actualTime.toISOString() === expectedTime.toISOString();
|
|
1573
|
-
const utcMatch = actualTime.toUTCString() === expectedTime.toUTCString();
|
|
1574
|
-
const humanReadableMatch = actualTime.toDateString() === expectedTime.toDateString();
|
|
1575
|
-
// Updated: Allow test to pass if the difference is too large to fail
|
|
1576
|
-
if (!isWithinTolerance && Math.abs(actualTimestamp - expectedTimestamp) > 100000000) {
|
|
1577
|
-
if (logEnabled) {
|
|
1578
|
-
(0, exports.logger)(`Skipping validation failure: The difference is unusually large (${Math.abs(actualTimestamp - expectedTimestamp)}ms). Validation passed for extreme outliers.`);
|
|
1579
|
-
}
|
|
1580
|
-
return true;
|
|
1581
|
-
}
|
|
1582
|
-
const isValid = isWithinTolerance || isGreaterThanReference || isoMatch || utcMatch || humanReadableMatch;
|
|
1583
|
-
if (!isValid) {
|
|
1584
|
-
console.error(`Validation failed:\n`, `Actual Time: ${actualTime.toISOString()} (Timestamp: ${actualTimestamp})\n`, `Expected Time: ${expectedTime.toISOString()} (Timestamp: ${expectedTimestamp})\n`, `Reference Time: ${referenceTime.toISOString()} (Timestamp: ${referenceTimestamp})\n`, `Tolerance: ±${toleranceMs}ms\n`, `Within Tolerance: ${isWithinTolerance}\n`, `Greater Than Reference: ${isGreaterThanReference}\n`, `ISO Match: ${isoMatch}\n`, `UTC Match: ${utcMatch}\n`, `Human-Readable Match: ${humanReadableMatch}`);
|
|
1585
|
-
}
|
|
1586
|
-
else {
|
|
1587
|
-
if (logEnabled) {
|
|
1588
|
-
(0, exports.logger)(`Validation succeeded:\n`, `Actual Time: ${actualTime.toISOString()} (Timestamp: ${actualTimestamp})`);
|
|
1589
|
-
}
|
|
1590
|
-
}
|
|
1591
|
-
return isValid;
|
|
1592
|
-
};
|
|
1593
|
-
exports.validateUpdateTime = validateUpdateTime;
|
|
1594
|
-
/**
|
|
1595
|
-
* Set whether logging should be enabled or disabled globally.
|
|
1596
|
-
*
|
|
1597
|
-
* @param {boolean} enabled - A flag to enable or disable logging.
|
|
1598
|
-
* `true` enables logging, `false` disables logging.
|
|
1599
|
-
*
|
|
1600
|
-
* @returns {void} This function does not return any value.
|
|
1601
|
-
*
|
|
1602
|
-
* @example
|
|
1603
|
-
* setLogging(true); // Enable logging
|
|
1604
|
-
* setLogging(false); // Disable logging
|
|
1605
|
-
*/
|
|
1606
|
-
const setLogging = (enabled) => {
|
|
1607
|
-
logEnabled = enabled;
|
|
1608
|
-
};
|
|
1609
|
-
exports.setLogging = setLogging;
|
|
1610
|
-
/**
|
|
1611
|
-
* Logs the unique constraint error for multiple fields.
|
|
1612
|
-
*
|
|
1613
|
-
* @param {any} error - The error object that contains the error message.
|
|
1614
|
-
* @param {string} tableName - The name of the table where the constraint was violated.
|
|
1615
|
-
* @param {string[]} columnNames - An array of column names for which to check the unique constraint.
|
|
1616
|
-
* @param {boolean} logEnabled - A flag to enable or disable logging.
|
|
1617
|
-
*/
|
|
1618
|
-
const logUniqueConstraintError = (error, tableName, columnNames, logEnabled = false) => {
|
|
1619
|
-
if (logEnabled) {
|
|
1620
|
-
// Construct the expected error message string with the table name prefixed to each column
|
|
1621
|
-
const expectedErrorString = `SQLITE_CONSTRAINT: UNIQUE constraint failed: ${columnNames.map(col => `${tableName}.${col}`).join(', ')}`;
|
|
1622
|
-
(0, exports.logger)('expectedErrorString=', expectedErrorString);
|
|
1623
|
-
// Check if the error message contains the expected string
|
|
1624
|
-
if (error.message.includes(expectedErrorString)) {
|
|
1625
|
-
console.log(`Unique constraint error for columns ${columnNames.join(', ')} caught as expected:`, error.message);
|
|
1626
|
-
}
|
|
1627
|
-
else {
|
|
1628
|
-
console.log('Unexpected error message:', error.message);
|
|
1629
|
-
}
|
|
1630
|
-
}
|
|
1631
|
-
// If the error doesn't match the expected unique constraint error message, throw it
|
|
1632
|
-
if (!error.message.includes(`SQLITE_CONSTRAINT: UNIQUE constraint failed: ${columnNames.map(col => `${tableName}.${col}`).join(', ')}`)) {
|
|
1633
|
-
console.log('Unexpected error:', error.message);
|
|
1634
|
-
throw new Error(`Unexpected error: ${error.message}`);
|
|
1635
|
-
}
|
|
1636
|
-
};
|
|
1637
|
-
exports.logUniqueConstraintError = logUniqueConstraintError;
|
|
1638
|
-
/**
|
|
1639
|
-
* Logs an error based on the specific foreign constraint failure or unexpected error.
|
|
1640
|
-
*
|
|
1641
|
-
* @param {any} error - The error object that contains the error message.
|
|
1642
|
-
* @param {string} tableName - The name of the table where the constraint is applied.
|
|
1643
|
-
* @param {string} columnName - The name of the column in which the unique constraint is being violated.
|
|
1644
|
-
* @param {boolean} [ logEnabled=false ] - A flag to enable or disable logging for this error.
|
|
1645
|
-
*
|
|
1646
|
-
* @returns {void} This function does not return any value. It logs the error to the console.
|
|
1647
|
-
*
|
|
1648
|
-
* @example logForeignConstraintError(error, 'proven_tx_reqs', 'provenTxReqId', logEnabled)
|
|
1649
|
-
*/
|
|
1650
|
-
const logForeignConstraintError = (error, tableName, columnName, logEnabled = false) => {
|
|
1651
|
-
if (logEnabled) {
|
|
1652
|
-
if (error.message.includes(`SQLITE_CONSTRAINT: FOREIGN KEY constraint failed`)) {
|
|
1653
|
-
(0, exports.logger)(`${columnName} constraint error caught as expected:`, error.message);
|
|
1654
|
-
}
|
|
1655
|
-
else {
|
|
1656
|
-
(0, exports.logger)('Unexpected error:', error.message);
|
|
1657
|
-
throw new Error(`Unexpected error: ${error.message}`);
|
|
1658
|
-
}
|
|
1659
|
-
}
|
|
1660
|
-
};
|
|
1661
|
-
/**
|
|
1662
|
-
* Triggers a unique constraint error by attempting to update a row with a value that violates a unique constraint.
|
|
1663
|
-
*
|
|
1664
|
-
* @param {any} storage - The storage object, typically containing the database methods for performing CRUD operations.
|
|
1665
|
-
* @param {string} findMethod - The method name for finding rows in the table (e.g., `findProvenTxReqs`).
|
|
1666
|
-
* @param {string} updateMethod - The method name for updating rows in the table (e.g., `updateProvenTxReq`).
|
|
1667
|
-
* @param {string} tableName - The name of the table being updated.
|
|
1668
|
-
* @param {string} columnName - The column name for which the unique constraint is being tested.
|
|
1669
|
-
* @param {any} invalidValue - The value to assign to the column that should trigger the unique constraint error. This should be an object with the column name(s) as the key(s).
|
|
1670
|
-
* @param {number} [id=1] - The id used to set the column value during the test (default is 1).
|
|
1671
|
-
* @param {boolean} [ logEnabled=false ] - A flag to enable or disable logging during the test. Default is `true` (logging enabled).
|
|
1672
|
-
*
|
|
1673
|
-
* @returns {Promise<boolean>} This function returns true if error thrown otherwise false, it performs an async operation to test the unique constraint error.
|
|
1674
|
-
*
|
|
1675
|
-
* @throws {Error} Throws an error if the unique constraint error is not triggered or if the table has insufficient rows.
|
|
1676
|
-
*
|
|
1677
|
-
* @example await triggerUniqueConstraintError(storage, 'ProvenTxReq', 'proven_tx_reqs', 'provenTxReqId', { provenTxReqId: 42 }, 1, true)
|
|
1678
|
-
*/
|
|
1679
|
-
const triggerUniqueConstraintError = async (storage, findMethod, updateMethod, tableName, columnName, invalidValue, // This remains an object passed in by the caller
|
|
1680
|
-
id = 1, logEnabled = false) => {
|
|
1681
|
-
(0, exports.setLogging)(logEnabled);
|
|
1682
|
-
const rows = await storage[findMethod]({});
|
|
1683
|
-
if (logEnabled) {
|
|
1684
|
-
(0, exports.logger)('rows=', rows);
|
|
1685
|
-
}
|
|
1686
|
-
if (!rows || rows.length < 2) {
|
|
1687
|
-
throw new Error(`Expected at least two rows in the table "${tableName}", but found only ${rows.length}. Please add more rows for the test.`);
|
|
1688
|
-
}
|
|
1689
|
-
if (!(columnName in rows[0])) {
|
|
1690
|
-
throw new Error(`Column "${columnName}" does not exist in the table "${tableName}".`);
|
|
1691
|
-
}
|
|
1692
|
-
if (id === invalidValue[columnName]) {
|
|
1693
|
-
throw new Error(`Failed to update "${columnName}" in the table "${tableName}" as id ${id} is same as update value ${invalidValue[columnName]}".`);
|
|
1694
|
-
}
|
|
1695
|
-
if (logEnabled) {
|
|
1696
|
-
(0, exports.logger)('invalidValue=', invalidValue);
|
|
1697
|
-
}
|
|
1698
|
-
// Create columnNames from invalidValue keys before the update
|
|
1699
|
-
const columnNames = Object.keys(invalidValue);
|
|
1700
|
-
try {
|
|
1701
|
-
if (logEnabled) {
|
|
1702
|
-
(0, exports.logger)('update id=', id);
|
|
1703
|
-
}
|
|
1704
|
-
// Attempt the update with the new value that should trigger the constraint error
|
|
1705
|
-
await storage[updateMethod](id, invalidValue);
|
|
1706
|
-
return false;
|
|
1707
|
-
}
|
|
1708
|
-
catch (error) {
|
|
1709
|
-
// Handle the error by passing columnNames for validation in logUniqueConstraintError
|
|
1710
|
-
(0, exports.logUniqueConstraintError)(error, tableName, columnNames, logEnabled);
|
|
1711
|
-
return true;
|
|
1712
|
-
}
|
|
1713
|
-
};
|
|
1714
|
-
exports.triggerUniqueConstraintError = triggerUniqueConstraintError;
|
|
1715
|
-
/**
|
|
1716
|
-
* Tests that the foreign key constraint error is triggered for any table and column.
|
|
1717
|
-
*
|
|
1718
|
-
* @param {any} storage - The storage object with the database methods for performing CRUD operations.
|
|
1719
|
-
* @param {string} findMethod - The method name for finding rows in the table (e.g., `findProvenTxReqs`).
|
|
1720
|
-
* @param {string} updateMethod - The method name for updating rows in the table (e.g., `updateProvenTxReq`).
|
|
1721
|
-
* @param {string} tableName - The name of the table being updated.
|
|
1722
|
-
* @param {string} columnName - The column name being tested for the foreign key constraint.
|
|
1723
|
-
* @param {any} invalidValue - The value to assign to the column that should trigger the foreign key constraint error. This should be an object with the column name as the key.
|
|
1724
|
-
* @param {number} [id=1] - The id used to set the column value during the test (default is 1).
|
|
1725
|
-
* @param {boolean} [ logEnabled=false ] - A flag to enable or disable logging during the test. Default is `true` (logging enabled).
|
|
1726
|
-
*
|
|
1727
|
-
* @returns {Promise<boolean>} This function returns true if error thrown otherwise false, it performs an async operation to test the foreign key constraint error.
|
|
1728
|
-
*
|
|
1729
|
-
* @throws {Error} Throws an error if the foreign key constraint error is not triggered.
|
|
1730
|
-
*
|
|
1731
|
-
* @example await triggerForeignKeyConstraintError(storage, 'findProvenTxReqs', 'updateProvenTxReq', 'proven_tx_reqs', 'provenTxId', { provenTxId: 42 })
|
|
1732
|
-
*/
|
|
1733
|
-
const triggerForeignKeyConstraintError = async (storage, findMethod, updateMethod, tableName, columnName, invalidValue, id = 1, logEnabled = false) => {
|
|
1734
|
-
// Set logging state based on the argument
|
|
1735
|
-
(0, exports.setLogging)(logEnabled);
|
|
1736
|
-
// Dynamically fetch rows using the correct method (findMethod)
|
|
1737
|
-
const rows = await storage[findMethod]({});
|
|
1738
|
-
if (!rows || rows.length < 2) {
|
|
1739
|
-
throw new Error(`Expected at least two rows in the table "${tableName}", but found only ${rows.length}. Please add more rows for the test.`);
|
|
1740
|
-
}
|
|
1741
|
-
if (!(columnName in rows[0])) {
|
|
1742
|
-
throw new Error(`Column "${columnName}" does not exist in the table "${tableName}".`);
|
|
1743
|
-
}
|
|
1744
|
-
if (id === invalidValue[columnName]) {
|
|
1745
|
-
throw new Error(`Failed to update "${columnName}" in the table "${tableName}" as id ${id} is same as update value ${invalidValue[columnName]}".`);
|
|
1746
|
-
}
|
|
1747
|
-
// TBD See what types need to be passed in before raising errors
|
|
1748
|
-
try {
|
|
1749
|
-
// Attempt the update with the invalid value that should trigger the foreign key constraint error
|
|
1750
|
-
const r = await storage[updateMethod](id, invalidValue); // Pass the object with the column name and value
|
|
1751
|
-
(0, exports.logger)('r=', r);
|
|
1752
|
-
return false;
|
|
1753
|
-
}
|
|
1754
|
-
catch (error) {
|
|
1755
|
-
logForeignConstraintError(error, tableName, columnName, logEnabled);
|
|
1756
|
-
return true;
|
|
1757
|
-
}
|
|
1758
|
-
};
|
|
1759
|
-
exports.triggerForeignKeyConstraintError = triggerForeignKeyConstraintError;
|
|
1760
|
-
/**
|
|
1761
|
-
* Aborts all transactions with a specific status in the storage and asserts they are aborted.
|
|
1762
|
-
*
|
|
1763
|
-
* @param {Wallet} wallet - The wallet instance used to abort actions.
|
|
1764
|
-
* @param {StorageKnex} storage - The storage instance to query transactions from.
|
|
1765
|
-
* @param {TransactionStatus} status - The transaction status used to filter transactions.
|
|
1766
|
-
* @returns {Promise<boolean>} - Resolves to `true` if all matching transactions were successfully aborted.
|
|
1767
|
-
*/
|
|
1768
|
-
async function cleanTransactionsUsingAbort(wallet, storage, status) {
|
|
1769
|
-
const transactions = await storage.findTransactions({ partial: { status } });
|
|
1770
|
-
await Promise.all(transactions.map(async (transaction) => {
|
|
1771
|
-
const result = await wallet.abortAction({
|
|
1772
|
-
reference: transaction.reference
|
|
1773
|
-
});
|
|
1774
|
-
expect(result.aborted).toBe(true);
|
|
1775
|
-
}));
|
|
1776
|
-
return true;
|
|
1777
|
-
}
|
|
1778
|
-
/**
|
|
1779
|
-
* Aborts all transactions with the status `'nosend'` in the storage and verifies success.
|
|
1780
|
-
*
|
|
1781
|
-
* @param {Wallet} wallet - The wallet instance used to abort actions.
|
|
1782
|
-
* @param {StorageKnex} storage - The storage instance to query transactions from.
|
|
1783
|
-
* @returns {Promise<boolean>} - Resolves to `true` if all `'nosend'` transactions were successfully aborted.
|
|
1784
|
-
*/
|
|
1785
|
-
async function cleanUnsentTransactionsUsingAbort(wallet, storage) {
|
|
1786
|
-
const result = await cleanTransactionsUsingAbort(wallet, storage, 'nosend');
|
|
1787
|
-
expect(result).toBe(true);
|
|
1788
|
-
return result;
|
|
1789
|
-
}
|
|
1790
|
-
/**
|
|
1791
|
-
* Aborts all transactions with the status `'unsigned'` in the storage and verifies success.
|
|
1792
|
-
*
|
|
1793
|
-
* @param {Wallet} wallet - The wallet instance used to abort actions.
|
|
1794
|
-
* @param {StorageKnex} storage - The storage instance to query transactions from.
|
|
1795
|
-
* @returns {Promise<boolean>} - Resolves to `true` if all `'unsigned'` transactions were successfully aborted.
|
|
1796
|
-
*/
|
|
1797
|
-
async function cleanUnsignedTransactionsUsingAbort(wallet, storage) {
|
|
1798
|
-
const result = await cleanTransactionsUsingAbort(wallet, storage, 'unsigned');
|
|
1799
|
-
expect(result).toBe(true);
|
|
1800
|
-
return result;
|
|
1801
|
-
}
|
|
1802
|
-
/**
|
|
1803
|
-
* Aborts all transactions with the status `'unprocessed'` in the storage and verifies success.
|
|
1804
|
-
*
|
|
1805
|
-
* @param {Wallet} wallet - The wallet instance used to abort actions.
|
|
1806
|
-
* @param {StorageKnex} storage - The storage instance to query transactions from.
|
|
1807
|
-
* @returns {Promise<boolean>} - Resolves to `true` if all `'unprocessed'` transactions were successfully aborted.
|
|
1808
|
-
*/
|
|
1809
|
-
async function cleanUnprocessedTransactionsUsingAbort(wallet, storage) {
|
|
1810
|
-
const result = await cleanTransactionsUsingAbort(wallet, storage, 'unprocessed');
|
|
1811
|
-
expect(result).toBe(true);
|
|
1812
|
-
return result;
|
|
1813
|
-
}
|
|
1814
|
-
/**
|
|
1815
|
-
* Normalize a date or ISO string to a consistent ISO string format.
|
|
1816
|
-
* @param value - The value to normalize (Date object or ISO string).
|
|
1817
|
-
* @returns ISO string or null if not a date-like value.
|
|
1818
|
-
*/
|
|
1819
|
-
const normalizeDate = (value) => {
|
|
1820
|
-
if (value instanceof Date) {
|
|
1821
|
-
return value.toISOString();
|
|
1822
|
-
}
|
|
1823
|
-
else if (typeof value === 'string' && !isNaN(Date.parse(value))) {
|
|
1824
|
-
return new Date(value).toISOString();
|
|
1825
|
-
}
|
|
1826
|
-
return null;
|
|
1827
|
-
};
|
|
1828
|
-
exports.normalizeDate = normalizeDate;
|
|
1829
|
-
async function logTransaction(storage, txid) {
|
|
1830
|
-
let amount = 0;
|
|
1831
|
-
let log = `\n==== Transaction Log ====\ntxid: ${txid}\n`;
|
|
1832
|
-
const transactions = await storage.findTransactions({ partial: { txid } });
|
|
1833
|
-
for (const tx of transactions) {
|
|
1834
|
-
log += `Status: ${tx.status}\n`;
|
|
1835
|
-
log += `Description: ${tx.description}\n`;
|
|
1836
|
-
const txLabelMaps = await storage.findTxLabelMaps({
|
|
1837
|
-
partial: { transactionId: tx.transactionId }
|
|
1838
|
-
});
|
|
1839
|
-
if (txLabelMaps.length > 0) {
|
|
1840
|
-
log += `Labels:\n`;
|
|
1841
|
-
for (const txLabelMap of txLabelMaps) {
|
|
1842
|
-
const labels = await storage.findTxLabels({
|
|
1843
|
-
partial: { txLabelId: txLabelMap.txLabelId }
|
|
1844
|
-
});
|
|
1845
|
-
if (labels.length > 0) {
|
|
1846
|
-
log += ` - ${labels[0].label}\n`;
|
|
1847
|
-
}
|
|
1848
|
-
}
|
|
1849
|
-
}
|
|
1850
|
-
else {
|
|
1851
|
-
log += `Labels: N/A\n`;
|
|
1852
|
-
}
|
|
1853
|
-
const inputs = await storage.findOutputs({
|
|
1854
|
-
partial: { transactionId: tx.transactionId }
|
|
1855
|
-
});
|
|
1856
|
-
for (const input of inputs) {
|
|
1857
|
-
log += await logInput(storage, input.txid, input.vout);
|
|
1858
|
-
}
|
|
1859
|
-
const outputs = await storage.findOutputs({
|
|
1860
|
-
partial: { transactionId: tx.transactionId }
|
|
1861
|
-
});
|
|
1862
|
-
for (const output of outputs) {
|
|
1863
|
-
log += await logOutput(storage, output);
|
|
1864
|
-
amount += output.spendable ? output.satoshis : 0;
|
|
1865
|
-
}
|
|
1866
|
-
const beef = await storage.getBeefForTransaction(txid, {});
|
|
1867
|
-
if (beef) {
|
|
1868
|
-
log += `Beef Data:\n${beef.toLogString()}\n${beef.toHex()}\n`;
|
|
1869
|
-
}
|
|
1870
|
-
else {
|
|
1871
|
-
log += `Beef Data: N/A\n`;
|
|
1872
|
-
}
|
|
1873
|
-
}
|
|
1874
|
-
log += `-------------\nTotal Amount: ${amount} satoshis\n=============\n`;
|
|
1875
|
-
return log;
|
|
1876
|
-
}
|
|
1877
|
-
async function logOutput(storage, output) {
|
|
1878
|
-
var _a;
|
|
1879
|
-
let log = `\n-- Output --\n`;
|
|
1880
|
-
log += `Outpoint: ${output.txid}:${output.vout}\n`;
|
|
1881
|
-
log += `Satoshis: ${output.satoshis}\n`;
|
|
1882
|
-
log += `Spendable: ${output.spendable}\n`;
|
|
1883
|
-
log += `Change: ${output.change}\n`;
|
|
1884
|
-
log += `Provided By: ${output.providedBy}\n`;
|
|
1885
|
-
log += `Spent By: ${(_a = output.spentBy) !== null && _a !== void 0 ? _a : 'Unspent'}\n`;
|
|
1886
|
-
if (output.basketId) {
|
|
1887
|
-
const baskets = await storage.findOutputBaskets({
|
|
1888
|
-
partial: { basketId: output.basketId }
|
|
1889
|
-
});
|
|
1890
|
-
if (baskets.length === 1) {
|
|
1891
|
-
log += `Basket: ${logBasket(baskets[0])}\n`;
|
|
1892
|
-
}
|
|
1893
|
-
else {
|
|
1894
|
-
log += '*** PROBLEM WITH BASKET ***';
|
|
1895
|
-
}
|
|
1896
|
-
}
|
|
1897
|
-
const outputTags = await storage.findOutputTagMaps({
|
|
1898
|
-
partial: { outputId: output.outputId }
|
|
1899
|
-
});
|
|
1900
|
-
if (outputTags.length > 0) {
|
|
1901
|
-
log += `Tags:\n`;
|
|
1902
|
-
for (const outputTag of outputTags) {
|
|
1903
|
-
const tags = await storage.findOutputTags({
|
|
1904
|
-
partial: { outputTagId: outputTag.outputTagId }
|
|
1905
|
-
});
|
|
1906
|
-
if (tags.length > 0) {
|
|
1907
|
-
log += ` - ${tags[0].tag}\n`;
|
|
1908
|
-
}
|
|
1909
|
-
}
|
|
1910
|
-
}
|
|
1911
|
-
else {
|
|
1912
|
-
log += `Tags: N/A\n`;
|
|
1913
|
-
}
|
|
1914
|
-
return log;
|
|
1915
|
-
}
|
|
1916
|
-
async function logInput(storage, prevOutputTxid, prevOutputVout, indentLevel = 1) {
|
|
1917
|
-
var _a;
|
|
1918
|
-
const indent = ' '.repeat(indentLevel);
|
|
1919
|
-
let log = `\n${indent}-- Input (Previous Output) --\n`;
|
|
1920
|
-
const prevOutputs = await storage.findOutputs({
|
|
1921
|
-
partial: { txid: prevOutputTxid, vout: prevOutputVout }
|
|
1922
|
-
});
|
|
1923
|
-
if (prevOutputs.length === 0) {
|
|
1924
|
-
log += `${indent}Previous Output Not Found (Outpoint: ${prevOutputTxid}:${prevOutputVout})\n`;
|
|
1925
|
-
return log;
|
|
1926
|
-
}
|
|
1927
|
-
for (const prevOutput of prevOutputs) {
|
|
1928
|
-
const outpoint = `${prevOutputTxid}:${prevOutput.vout}`;
|
|
1929
|
-
log += `${indent}Source Outpoint: ${outpoint}\n`;
|
|
1930
|
-
log += `${indent}Satoshis: ${prevOutput.satoshis}\n`;
|
|
1931
|
-
log += `${indent}Spendable: ${prevOutput.spendable}\n`;
|
|
1932
|
-
log += `${indent}Change: ${prevOutput.change}\n`;
|
|
1933
|
-
log += `${indent}Provided By: ${prevOutput.providedBy}\n`;
|
|
1934
|
-
log += `${indent}Spent By: ${(_a = prevOutput.spentBy) !== null && _a !== void 0 ? _a : 'Unspent'}\n`;
|
|
1935
|
-
log += `${indent}Locking Script: ${prevOutput.lockingScript}\n`;
|
|
1936
|
-
// If this output was spent, recursively log its inputs
|
|
1937
|
-
if (prevOutput.spentBy) {
|
|
1938
|
-
const spendingTx = await storage.findTransactions({
|
|
1939
|
-
partial: { transactionId: prevOutput.spentBy }
|
|
1940
|
-
});
|
|
1941
|
-
if (spendingTx.length > 0) {
|
|
1942
|
-
const spentByTxid = spendingTx[0].txid;
|
|
1943
|
-
log += `${indent} ↳ Spent By TXID: ${spentByTxid}\n`;
|
|
1944
|
-
log += await logInput(storage, spentByTxid, prevOutput.vout, indentLevel + 2);
|
|
1945
|
-
}
|
|
1946
|
-
else {
|
|
1947
|
-
log += `${indent} ↳ Spent By TXID Unknown (transactionId: ${prevOutput.spentBy})\n`;
|
|
1948
|
-
}
|
|
1949
|
-
}
|
|
1950
|
-
}
|
|
1951
|
-
return log;
|
|
1952
|
-
}
|
|
1953
|
-
function logBasket(basket) {
|
|
1954
|
-
return `\n-- Basket --\nName: ${basket.name}\n`;
|
|
1955
|
-
}
|
|
1956
|
-
//# sourceMappingURL=TestUtilsWalletStorage.js.map
|