@spirobel/monero-wallet-api 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +3 -3
  2. package/dist/api.d.ts +25 -96
  3. package/dist/api.js +23 -175
  4. package/dist/io/BunFileInterface.d.ts +32 -0
  5. package/dist/io/atomicWrite.d.ts +2 -0
  6. package/dist/io/atomicWrite.js +10 -0
  7. package/dist/io/extension.d.ts +18 -0
  8. package/dist/io/extension.js +11 -0
  9. package/dist/io/indexedDB.d.ts +45 -0
  10. package/dist/io/indexedDB.js +221 -0
  11. package/dist/io/readDir.d.ts +1 -0
  12. package/dist/io/readDir.js +7 -0
  13. package/dist/io/sleep.d.ts +1 -0
  14. package/dist/io/sleep.js +1 -0
  15. package/dist/keypairs-seeds/keypairs.d.ts +29 -0
  16. package/dist/keypairs-seeds/keypairs.js +207 -0
  17. package/dist/keypairs-seeds/writeKeypairs.d.ts +11 -0
  18. package/dist/keypairs-seeds/writeKeypairs.js +75 -0
  19. package/dist/node-interaction/binaryEndpoints.d.ts +59 -14
  20. package/dist/node-interaction/binaryEndpoints.js +110 -54
  21. package/dist/node-interaction/jsonEndpoints.d.ts +249 -187
  22. package/dist/node-interaction/jsonEndpoints.js +287 -0
  23. package/dist/node-interaction/nodeUrl.d.ts +129 -0
  24. package/dist/node-interaction/nodeUrl.js +113 -0
  25. package/dist/scanning-syncing/backgroundWorker.d.ts +6 -0
  26. package/dist/scanning-syncing/backgroundWorker.js +56 -0
  27. package/dist/scanning-syncing/connectionStatus.d.ts +15 -0
  28. package/dist/scanning-syncing/connectionStatus.js +35 -0
  29. package/dist/scanning-syncing/openWallet.d.ts +28 -0
  30. package/dist/scanning-syncing/openWallet.js +57 -0
  31. package/dist/scanning-syncing/scanSettings.d.ts +96 -0
  32. package/dist/scanning-syncing/scanSettings.js +243 -0
  33. package/dist/scanning-syncing/scanresult/computeKeyImage.d.ts +3 -0
  34. package/dist/scanning-syncing/scanresult/computeKeyImage.js +21 -0
  35. package/dist/scanning-syncing/scanresult/getBlocksbinBuffer.d.ts +28 -0
  36. package/dist/scanning-syncing/scanresult/getBlocksbinBuffer.js +52 -0
  37. package/dist/scanning-syncing/scanresult/reorg.d.ts +14 -0
  38. package/dist/scanning-syncing/scanresult/reorg.js +78 -0
  39. package/dist/scanning-syncing/scanresult/scanCache.d.ts +84 -0
  40. package/dist/scanning-syncing/scanresult/scanCache.js +134 -0
  41. package/dist/scanning-syncing/scanresult/scanCacheOpened.d.ts +149 -0
  42. package/dist/scanning-syncing/scanresult/scanCacheOpened.js +648 -0
  43. package/dist/scanning-syncing/scanresult/scanResult.d.ts +64 -0
  44. package/dist/scanning-syncing/scanresult/scanResult.js +213 -0
  45. package/dist/scanning-syncing/scanresult/scanStats.d.ts +60 -0
  46. package/dist/scanning-syncing/scanresult/scanStats.js +273 -0
  47. package/dist/scanning-syncing/worker-entrypoints/worker.d.ts +1 -0
  48. package/dist/scanning-syncing/worker-entrypoints/worker.js +8 -0
  49. package/dist/scanning-syncing/worker-mains/worker.d.ts +1 -0
  50. package/dist/scanning-syncing/worker-mains/worker.js +7 -0
  51. package/dist/send-functionality/conversion.d.ts +4 -0
  52. package/dist/send-functionality/conversion.js +75 -0
  53. package/dist/send-functionality/inputSelection.d.ts +13 -0
  54. package/dist/send-functionality/inputSelection.js +8 -0
  55. package/dist/send-functionality/transactionBuilding.d.ts +51 -0
  56. package/dist/send-functionality/transactionBuilding.js +111 -0
  57. package/dist/tools/monero-tools.d.ts +46 -0
  58. package/dist/tools/monero-tools.js +165 -0
  59. package/dist/viewpair/ViewPair.d.ts +157 -0
  60. package/dist/viewpair/ViewPair.js +346 -0
  61. package/dist/wasm-processing/wasi.js +1 -2
  62. package/dist/wasm-processing/wasmFile.d.ts +1 -1
  63. package/dist/wasm-processing/wasmFile.js +2 -2
  64. package/dist/wasm-processing/wasmProcessor.d.ts +16 -4
  65. package/dist/wasm-processing/wasmProcessor.js +23 -7
  66. package/package.json +29 -6
  67. package/dist/testscrap.js +0 -36
  68. /package/dist/{testscrap.d.ts → io/BunFileInterface.js} +0 -0
@@ -0,0 +1,64 @@
1
+ import type { BlockInfo, Output } from "../../api";
2
+ import { type KeyImage } from "./computeKeyImage";
3
+ import { type ErrorResponse } from "../../node-interaction/binaryEndpoints";
4
+ import type { CacheChangedCallback, CacheRange, ChangedOutput, ScanCache } from "./scanCache";
5
+ export type ProcessScanResultParams = {
6
+ current_range: CacheRange;
7
+ result: ScanResult | ErrorResponse | undefined;
8
+ cacheChanged: CacheChangedCallback;
9
+ catastrophic_reorg_cb: () => void;
10
+ secret_spend_key?: string;
11
+ pathPrefix?: string;
12
+ use_master_current_range?: boolean;
13
+ };
14
+ export declare function processScanResult(params: ProcessScanResultParams): Promise<CacheRange>;
15
+ export type OnchainKeyImage = {
16
+ key_image_hex: KeyImage;
17
+ relative_index: number;
18
+ tx_hash: string;
19
+ block_hash: string;
20
+ block_height: number;
21
+ block_timestamp: number;
22
+ };
23
+ export type ScanResult = {
24
+ outputs: Output[];
25
+ all_key_images: OnchainKeyImage[];
26
+ new_height: number;
27
+ primary_address: string;
28
+ block_infos: BlockInfo[];
29
+ daemon_height: number;
30
+ };
31
+ export type EmptyScanResult = {};
32
+ export type FastForward = number;
33
+ /**
34
+ * we will await async callbacks. convenient way to halt a sync + feed back the key image list,
35
+ * to look out for our own spends before proceeding the scan. This happens in the processScanResult function.
36
+ */
37
+ export type ScanResultCallback = ((result: ScanResult | ErrorResponse | EmptyScanResult) => FastForward | void) | ((result: ScanResult | ErrorResponse | EmptyScanResult) => Promise<FastForward | void>);
38
+ export declare function updateScanHeight(current_range: CacheRange, result: ScanResult, cache: ScanCache): [CacheRange, ChangedOutput[]];
39
+ export declare function makeNewRange(newRange: CacheRange, cache: ScanCache): CacheRange;
40
+ export declare function detectOutputs(result: ScanResult, cache: ScanCache, spend_private_key?: string): Promise<ChangedOutput[]>;
41
+ export declare function detectOwnspends(result: ScanResult, cache: ScanCache): ChangedOutput[];
42
+ export declare function unlockedAtHeight(output: Output): number;
43
+ export type PrePending = {
44
+ status: "prepending";
45
+ };
46
+ export type Pending = {
47
+ status: "pending";
48
+ unlock_height: number;
49
+ };
50
+ export type Spent = {
51
+ status: "spent";
52
+ };
53
+ export type Burnt = {
54
+ status: "burnt";
55
+ };
56
+ export type Reorged = {
57
+ status: "reorged";
58
+ };
59
+ export type Spendable = {
60
+ status: "spendable";
61
+ };
62
+ export type OutputStatus = PrePending | Pending | Spent | Burnt | Reorged | Spendable;
63
+ export declare function outputStatus(output: Output, cache: ScanCache, current_height: number): OutputStatus;
64
+ export declare function spendable(output: Output, cache: ScanCache, current_height: number): boolean;
@@ -0,0 +1,213 @@
1
+ import { computeKeyImage } from "./computeKeyImage";
2
+ import { mergeRanges, findRange, readCacheFileDefaultLocation, lastRange, writeCacheToFile, } from "./scanCache";
3
+ import {} from "../../node-interaction/binaryEndpoints";
4
+ import { handleReorg } from "./reorg";
5
+ export async function processScanResult(params) {
6
+ let current_range = params.current_range;
7
+ const { result, cacheChanged, secret_spend_key, pathPrefix, use_master_current_range, } = params;
8
+ if (!(result && "primary_address" in result))
9
+ return current_range;
10
+ const cache = await readCacheFileDefaultLocation(result.primary_address, pathPrefix);
11
+ if (!cache)
12
+ throw new Error(`cache not found for primary address: ${result.primary_address} and path prefix: ${pathPrefix}`);
13
+ if (result && "daemon_height" in result)
14
+ cache.daemon_height = result.daemon_height;
15
+ if (use_master_current_range) {
16
+ const last = lastRange(cache.scanned_ranges);
17
+ if (last) {
18
+ last.block_hashes = current_range.block_hashes;
19
+ last.end = current_range.end;
20
+ }
21
+ else {
22
+ cache.scanned_ranges.push(current_range);
23
+ }
24
+ }
25
+ try {
26
+ if (result && "new_height" in result) {
27
+ const [new_range, changed_outputs] = updateScanHeight(current_range, result, cache);
28
+ current_range = new_range;
29
+ changed_outputs.push(...(await detectOutputs(result, cache, secret_spend_key)));
30
+ if (secret_spend_key)
31
+ changed_outputs.push(...detectOwnspends(result, cache));
32
+ // write to cache
33
+ await writeCacheToFile(cache, params.pathPrefix);
34
+ await cacheChanged({
35
+ newCache: cache,
36
+ changed_outputs,
37
+ });
38
+ }
39
+ return current_range;
40
+ }
41
+ catch (e) {
42
+ params.catastrophic_reorg_cb();
43
+ throw e;
44
+ }
45
+ }
46
+ export function updateScanHeight(current_range, result, cache) {
47
+ let last_block_hash_of_result = result.block_infos.at(-1);
48
+ let current_blockhash = current_range?.block_hashes.at(0);
49
+ if (!current_blockhash)
50
+ throw new Error("current_range passed to updateScanHeight was malformed. block_hashes is empty");
51
+ if (!last_block_hash_of_result)
52
+ return [current_range, []]; // block_infos empty, no change (we are at tip and there was no new block)
53
+ // if last blockhash is undefined it means there was not reorg, we are at tip, block_infos is empty ( no new blocks )
54
+ const oldRange = findRange(cache.scanned_ranges, current_blockhash.block_height);
55
+ if (!oldRange)
56
+ throw new Error(`could not find scan range for height ${current_blockhash.block_height},
57
+ that means the blocks in the response from getBlocks.bin do not overlap
58
+ with the scanned ranges in the cache. This should not happen, as even if
59
+ we are starting from a new start_height that has been supplied to scanWithCache,
60
+ it has been found as an existing range in the cache, or it has been
61
+ added as a new range before we started scannning.`);
62
+ // now we need to find the block_infos of old range in the new geblocksbin response result block_infos
63
+ // if we cant find the new range, there was a reorg and we need to clean all outputs after that and log what happened
64
+ let first_block_hash = result.block_infos.at(0);
65
+ if (!first_block_hash)
66
+ return [current_range, []]; // should never happen, if there is last_block_hash there should be first_block_hash
67
+ // if the first block hash in the response is not the same as the last block hash in the old range, there was a reorg
68
+ if (!(first_block_hash.block_hash === current_blockhash.block_hash)) {
69
+ return handleReorg(current_range, result, cache, oldRange);
70
+ }
71
+ // scan only happens in one direction,
72
+ // to scan earlier ranges: abort and recall with smaller start_height
73
+ // getblocksbin will return up to 1000 blocks at once
74
+ // so this should never happen, except if we just popped a block (but that case is handled above in the reorg case)
75
+ if (current_blockhash.block_height > last_block_hash_of_result.block_height)
76
+ throw new Error(`current scan height was larger than block height of last block from latest scan result.
77
+ Most likely connected to faulty node / catastrophic reorg.
78
+ current height: ${current_blockhash.block_height}, new height: ${last_block_hash_of_result.block_height}`);
79
+ // 1. add new scanned range
80
+ let anchor = undefined;
81
+ let anchor_candidate = undefined;
82
+ if (oldRange.block_hashes.length >= 3) {
83
+ const old_anchor = oldRange.block_hashes.at(-1);
84
+ const old_anchor_candidate = oldRange.block_hashes.at(-2);
85
+ anchor = old_anchor;
86
+ anchor_candidate = old_anchor_candidate;
87
+ if (
88
+ // if the old range has an anchor, and the anchor is more than 200 blocks old
89
+ typeof old_anchor?.block_height === "number" &&
90
+ current_blockhash.block_height - old_anchor.block_height > 200) {
91
+ anchor = old_anchor_candidate; // use the anchor_candidate as anchor
92
+ // new anchor_candidate: is the one 100 blocks in, or the old scan tip
93
+ anchor_candidate =
94
+ result.block_infos.slice(-100)[0] || oldRange?.block_hashes.at(0); // use use the old scan tip as anchor candidate
95
+ }
96
+ }
97
+ // if there is no old anchor, use the one 100 blocks in, or the last block hash
98
+ anchor =
99
+ anchor || result.block_infos.slice(-100)[0] || last_block_hash_of_result;
100
+ // carry over the old anchor candidate or use the last block
101
+ anchor_candidate = anchor_candidate || last_block_hash_of_result;
102
+ const newRange = {
103
+ start: current_blockhash.block_height,
104
+ end: last_block_hash_of_result.block_height,
105
+ block_hashes: [last_block_hash_of_result, anchor_candidate, anchor],
106
+ };
107
+ return [makeNewRange(newRange, cache), []];
108
+ }
109
+ export function makeNewRange(newRange, cache) {
110
+ cache.scanned_ranges.push(newRange);
111
+ // merge existing ranges & find end of current range
112
+ cache.scanned_ranges = mergeRanges(cache.scanned_ranges);
113
+ // if we hit the end of a range we already scanned, move scan tip to the end
114
+ const fastForward = findRange(cache.scanned_ranges, newRange.end);
115
+ if (fastForward)
116
+ return fastForward;
117
+ return newRange;
118
+ }
119
+ // Assumption: result is new, cache is still old. (this + detectOwnspends() turns the catch new, based on the scan result)
120
+ export async function detectOutputs(result, cache, spend_private_key) {
121
+ let changed_outputs = [];
122
+ for (const output of result.outputs) {
123
+ // 0. prevent burning bug and avoid overwriting earlier found outputs
124
+ const duplicate = Object.values(cache.outputs).find((ex) => ex.stealth_address === output.stealth_address && !ex.burned);
125
+ let burned = false;
126
+ if (duplicate?.index_on_blockchain !== output.index_on_blockchain) {
127
+ burned = true;
128
+ }
129
+ if (duplicate && burned) {
130
+ //mark burned output
131
+ const existingIndex = duplicate.index_on_blockchain;
132
+ const liveIndex = Math.min(existingIndex, output.index_on_blockchain);
133
+ if (liveIndex === existingIndex) {
134
+ output.burned = existingIndex;
135
+ }
136
+ else {
137
+ duplicate.burned = output.index_on_blockchain;
138
+ }
139
+ // here we add to changed_outputs with reason burned
140
+ changed_outputs.push({ output, change_reason: "burned" });
141
+ }
142
+ else if (duplicate && !burned) {
143
+ continue; // if it is just a duplicate we continue the loop to avoid overwriting (ereasing spent status)
144
+ }
145
+ // 1. add to outputs cache 2. add to added array for cacheChanged callback
146
+ const globalId = output.index_on_blockchain.toString();
147
+ cache.outputs[globalId] = output;
148
+ //here we add to changed_outputs with reason added
149
+ changed_outputs.push({ output, change_reason: "added" });
150
+ // 3. if this is not view only, add the key image to the cache, to find transactions spent by this wallet
151
+ if (spend_private_key) {
152
+ let keyImage = await computeKeyImage(output, spend_private_key);
153
+ if (keyImage) {
154
+ cache.own_key_images[keyImage] = globalId;
155
+ }
156
+ }
157
+ }
158
+ return changed_outputs;
159
+ }
160
+ // Assumption: result is new, cache is still old. (this + detectOutputs() turns the catch new, based on the scan result)
161
+ export function detectOwnspends(result, cache) {
162
+ let changed_outputs = [];
163
+ for (const onchainKeyImage of result.all_key_images) {
164
+ if (onchainKeyImage.key_image_hex in cache.own_key_images) {
165
+ // this is one of ours
166
+ const globalId = cache.own_key_images[onchainKeyImage.key_image_hex];
167
+ // add the information where we spent it to the output
168
+ cache.outputs[globalId].spent_relative_index =
169
+ onchainKeyImage.relative_index;
170
+ cache.outputs[globalId].spent_in_tx_hash = onchainKeyImage.tx_hash;
171
+ cache.outputs[globalId].spent_block_height = onchainKeyImage.block_height;
172
+ cache.outputs[globalId].spent_block_timestamp =
173
+ onchainKeyImage.block_timestamp;
174
+ //here we add to changed_outputs with reason ownspend
175
+ changed_outputs.push({
176
+ output: cache.outputs[globalId],
177
+ change_reason: "ownspend",
178
+ });
179
+ }
180
+ }
181
+ return changed_outputs;
182
+ }
183
+ export function unlockedAtHeight(output) {
184
+ if (output.is_miner_tx) {
185
+ return output.block_height + 60;
186
+ }
187
+ else {
188
+ return output.block_height + 10;
189
+ }
190
+ }
191
+ export function outputStatus(output, cache, current_height) {
192
+ if (
193
+ // order matters, check this before "spent" check + "pending" check
194
+ cache.pending_spent_utxos &&
195
+ cache.pending_spent_utxos[output.index_on_blockchain.toString()]) {
196
+ return { status: "prepending" };
197
+ }
198
+ if (typeof output.burned === "number") {
199
+ return { status: "burnt" };
200
+ }
201
+ if (typeof output.spent_in_tx_hash === "string") {
202
+ return { status: "spent" };
203
+ }
204
+ const unlock_height = unlockedAtHeight(output);
205
+ if (unlock_height > current_height) {
206
+ return { status: "pending", unlock_height };
207
+ }
208
+ return { status: "spendable" };
209
+ }
210
+ export function spendable(output, cache, current_height) {
211
+ const status = outputStatus(output, cache, current_height);
212
+ return status.status === "spendable";
213
+ }
@@ -0,0 +1,60 @@
1
+ import { ViewPair, type Output } from "../../api";
2
+ import type { ScanCache, Subaddress, TxLog } from "./scanCache";
3
+ import { type OutputStatus } from "./scanResult";
4
+ export type WriteStatsFileParams = {
5
+ primary_address: string;
6
+ pathPrefix?: string | undefined;
7
+ writeCallback: (stats: ScanStats) => void | Promise<void>;
8
+ };
9
+ export declare function writeStatsFileDefaultLocation(params: WriteStatsFileParams): Promise<ScanStats>;
10
+ export declare function statsFileDefaultLocation(primary_address: string, pathPrefix?: string): string;
11
+ export declare function readStatsFile(cacheFilePath: string): Promise<ScanStats | undefined>;
12
+ export declare function readStatsFileDefaultLocation(primary_address: string, pathPrefix?: string): Promise<ScanStats | undefined>;
13
+ export declare function addSpentAmount(scan_stats: ScanStats, output: Output): void;
14
+ export declare function addSpendableAmount(scan_stats: ScanStats, output: Output): void;
15
+ export declare function addPendingAmount(scan_stats: ScanStats, output: Output): void;
16
+ export type SubaddressMinorIndex = string;
17
+ export type Amount = bigint;
18
+ export type FoundTransaction = {
19
+ amount: bigint;
20
+ inputs: Output[];
21
+ outputs: Output[];
22
+ tx_hash: string;
23
+ status: OutputStatus;
24
+ txlog?: TxLog;
25
+ };
26
+ export type TxHash = string;
27
+ export type ScanStats = {
28
+ height: number;
29
+ total_spendable_amount: bigint;
30
+ total_pending_amount: bigint;
31
+ primary_address: string;
32
+ primary_address_received_amount?: bigint;
33
+ primary_address_pending_amount?: bigint;
34
+ subaddresses: Record<SubaddressMinorIndex, Subaddress>;
35
+ found_transactions: Record<TxHash, FoundTransaction>;
36
+ ordered_transactions: TxHash[];
37
+ };
38
+ export declare function processFoundTransactions(cache: ScanCache, stats: ScanStats, current_height: number): void;
39
+ export declare function removeChangeFromPrimaddressAmounts(stats: ScanStats): void;
40
+ export declare function addSubAddressesFromCacheToScanStats(cache: ScanCache, stats: ScanStats): void;
41
+ export declare function addMissingSubAddressesToScanStats(stats: ScanStats, view_pair: ViewPair, highestSubaddressMinor?: number, created_at_height?: number): void;
42
+ export declare function isSelfSpent(address: string, cache: ScanCache): boolean;
43
+ export declare function removeChangeFromPrimAddressReceivedAmounts(stats: ScanStats): void;
44
+ export type PrePendingTx = {
45
+ amount: bigint;
46
+ inputs: Output[];
47
+ txlog: TxLog;
48
+ inputSum: bigint;
49
+ outWardPaymentSum: bigint;
50
+ self_spent: boolean;
51
+ destination_address: string;
52
+ };
53
+ export declare function processTxlogPayments(txlog: TxLog, cache: ScanCache): bigint;
54
+ export declare function processTxlogInputs(txlog: TxLog, cache: ScanCache): {
55
+ inputSum: bigint;
56
+ alreadyRecognizedAsSpend: boolean;
57
+ };
58
+ export declare function processTxlogs(cache: ScanCache, stats: ScanStats): void;
59
+ export declare function alignScanStatsWithCache(cache: ScanCache, view_pair: ViewPair, primary_address: string, pathPrefix?: string, highestSubaddressMinor?: number, current_scan_tip_height?: number): Promise<ScanStats>;
60
+ export declare function resetStats(stats: ScanStats): void;
@@ -0,0 +1,273 @@
1
+ import { stat } from "fs";
2
+ import { atomicWrite, ViewPair } from "../../api";
3
+ import { outputStatus } from "./scanResult";
4
+ export async function writeStatsFileDefaultLocation(params) {
5
+ let stats = await readStatsFileDefaultLocation(params.primary_address, params.pathPrefix);
6
+ if (!stats)
7
+ stats = {
8
+ height: 0,
9
+ total_spendable_amount: 0n,
10
+ total_pending_amount: 0n,
11
+ primary_address: params.primary_address,
12
+ found_transactions: {},
13
+ ordered_transactions: [],
14
+ subaddresses: {},
15
+ };
16
+ await params.writeCallback(stats);
17
+ await atomicWrite(statsFileDefaultLocation(stats.primary_address, params.pathPrefix), JSON.stringify(stats, (key, value) => (typeof value === "bigint" ? value.toString() : value), 2));
18
+ return stats;
19
+ }
20
+ export function statsFileDefaultLocation(primary_address, pathPrefix) {
21
+ return `${pathPrefix ?? ""}${primary_address}_stats.json`;
22
+ }
23
+ // amount | total_amount | total_pending_amount | pending_amount :-> all bigint keys
24
+ export async function readStatsFile(cacheFilePath) {
25
+ const jsonString = await Bun.file(cacheFilePath)
26
+ .text()
27
+ .catch(() => undefined);
28
+ return jsonString
29
+ ? JSON.parse(jsonString, (key, value) => {
30
+ if (key === "amount" ||
31
+ key === "pending_amount" ||
32
+ key === "total_amount" ||
33
+ key === "total_pending_amount")
34
+ return BigInt(value);
35
+ return value;
36
+ })
37
+ : undefined;
38
+ }
39
+ export async function readStatsFileDefaultLocation(primary_address, pathPrefix) {
40
+ return await readStatsFile(statsFileDefaultLocation(primary_address, pathPrefix));
41
+ }
42
+ export function addSpentAmount(scan_stats, output) {
43
+ if (!output.subaddress_index) {
44
+ if (!scan_stats.primary_address_received_amount)
45
+ scan_stats.primary_address_received_amount = 0n;
46
+ scan_stats.primary_address_received_amount += output.amount;
47
+ return;
48
+ }
49
+ const statsSubaddress = scan_stats.subaddresses[output.subaddress_index.toString()];
50
+ if (!statsSubaddress)
51
+ return;
52
+ if (typeof statsSubaddress.received_amount === "undefined")
53
+ statsSubaddress.received_amount = 0n;
54
+ statsSubaddress.received_amount += output.amount;
55
+ }
56
+ export function addSpendableAmount(scan_stats, output) {
57
+ scan_stats.total_spendable_amount += output.amount;
58
+ if (!output.subaddress_index) {
59
+ if (!scan_stats.primary_address_received_amount)
60
+ scan_stats.primary_address_received_amount = 0n;
61
+ scan_stats.primary_address_received_amount += output.amount;
62
+ return;
63
+ }
64
+ const statsSubaddress = scan_stats.subaddresses[output.subaddress_index.toString()];
65
+ if (!statsSubaddress)
66
+ return;
67
+ if (typeof statsSubaddress.received_amount === "undefined")
68
+ statsSubaddress.received_amount = 0n;
69
+ statsSubaddress.received_amount += output.amount;
70
+ }
71
+ export function addPendingAmount(scan_stats, output) {
72
+ scan_stats.total_pending_amount += output.amount;
73
+ if (!output.subaddress_index) {
74
+ if (!scan_stats.primary_address_pending_amount)
75
+ scan_stats.primary_address_pending_amount = 0n;
76
+ scan_stats.primary_address_pending_amount += output.amount;
77
+ return;
78
+ }
79
+ const statsSubaddress = scan_stats.subaddresses[output.subaddress_index.toString()];
80
+ if (!statsSubaddress)
81
+ return;
82
+ if (typeof statsSubaddress.pending_amount === "undefined")
83
+ statsSubaddress.pending_amount = 0n;
84
+ statsSubaddress.pending_amount += output.amount;
85
+ }
86
+ export function processFoundTransactions(cache, stats, current_height) {
87
+ stats.found_transactions = {};
88
+ stats.ordered_transactions = [];
89
+ Object.entries(cache.outputs).forEach(([_, output]) => {
90
+ const status = outputStatus(output, cache, current_height || 0);
91
+ const in_ordered_transactions = stats.ordered_transactions.includes(output.tx_hash);
92
+ if (!in_ordered_transactions)
93
+ stats.ordered_transactions.push(output.tx_hash);
94
+ const receivedTx = stats.found_transactions[output.tx_hash];
95
+ if (receivedTx) {
96
+ receivedTx.outputs.push(output);
97
+ receivedTx.amount += output.amount;
98
+ // we possibly first added the tx_hash when we found a spent output
99
+ // placehodler pending status that we added in "handle spent case"
100
+ // needs to be updated
101
+ receivedTx.status = status;
102
+ }
103
+ else {
104
+ stats.found_transactions[output.tx_hash] = {
105
+ status,
106
+ inputs: [],
107
+ amount: output.amount,
108
+ outputs: [output],
109
+ tx_hash: output.tx_hash,
110
+ };
111
+ }
112
+ // handle spent case
113
+ if (output.spent_in_tx_hash) {
114
+ const spent_utxo_value = cache.pending_spent_utxos
115
+ ? cache.pending_spent_utxos[output.index_on_blockchain]
116
+ : null;
117
+ const txlog = cache.tx_logs && spent_utxo_value
118
+ ? cache.tx_logs[spent_utxo_value]
119
+ : undefined;
120
+ const spentTx = stats.found_transactions[output.spent_in_tx_hash];
121
+ if (spentTx) {
122
+ spentTx.amount -= output.amount;
123
+ spentTx.inputs.push(output);
124
+ }
125
+ else {
126
+ stats.found_transactions[output.spent_in_tx_hash] = {
127
+ status: { status: "pending", unlock_height: 0 },
128
+ inputs: [output],
129
+ amount: -output.amount,
130
+ outputs: [],
131
+ tx_hash: output.spent_in_tx_hash,
132
+ txlog,
133
+ };
134
+ }
135
+ }
136
+ if (status.status === "spendable")
137
+ addSpendableAmount(stats, output);
138
+ else if (status.status === "pending")
139
+ addPendingAmount(stats, output);
140
+ else if (status.status === "spent")
141
+ addSpentAmount(stats, output);
142
+ });
143
+ }
144
+ export function removeChangeFromPrimaddressAmounts(stats) { }
145
+ export function addSubAddressesFromCacheToScanStats(cache, stats) {
146
+ // add cache subaddresses to statsfile
147
+ for (const cacheSub of cache.subaddresses || []) {
148
+ //if (!stats.subaddresses[cacheSub.minor.toString()]) <-- uncommented to overwrite existing
149
+ stats.subaddresses[cacheSub.minor.toString()] = {
150
+ minor: cacheSub.minor,
151
+ address: cacheSub.address,
152
+ created_at_height: cacheSub.created_at_height,
153
+ created_at_timestamp: cacheSub.created_at_timestamp,
154
+ received_amount: 0n,
155
+ pending_amount: 0n,
156
+ };
157
+ }
158
+ }
159
+ export function addMissingSubAddressesToScanStats(stats, view_pair, highestSubaddressMinor = 1, created_at_height = 0) {
160
+ // add subaddresses to statsfile that are not in the cache
161
+ let minor = 1;
162
+ //const highestSubaddressMinor = walletSettings.subaddress_index || 1;
163
+ while (minor <= highestSubaddressMinor) {
164
+ if (stats.subaddresses[minor.toString()]) {
165
+ minor++;
166
+ continue;
167
+ }
168
+ const subaddress = view_pair.makeSubaddress(minor);
169
+ //const created_at_height =
170
+ // lastRange(scanCacheOpen._cache.scanned_ranges)?.end || 0;
171
+ const created_at_timestamp = new Date().getTime();
172
+ const new_subaddress = {
173
+ minor,
174
+ address: subaddress,
175
+ created_at_height,
176
+ created_at_timestamp,
177
+ not_yet_included: true,
178
+ received_amount: 0n,
179
+ pending_amount: 0n,
180
+ };
181
+ stats.subaddresses[minor.toString()] = new_subaddress;
182
+ minor++;
183
+ }
184
+ }
185
+ export function isSelfSpent(address, cache) {
186
+ if (address === cache.primary_address)
187
+ return true;
188
+ for (const subaddress of cache.subaddresses || []) {
189
+ if (subaddress.address === address)
190
+ return true;
191
+ }
192
+ return false;
193
+ }
194
+ export function removeChangeFromPrimAddressReceivedAmounts(stats) {
195
+ if (!stats.primary_address_pending_amount)
196
+ stats.primary_address_pending_amount = 0n;
197
+ if (!stats.primary_address_received_amount)
198
+ stats.primary_address_received_amount = 0n;
199
+ for (const tx of stats.ordered_transactions) {
200
+ const transaction = stats.found_transactions[tx];
201
+ if (transaction.status.status === "pending" ||
202
+ transaction.status.status === "prepending")
203
+ stats.primary_address_pending_amount -= transaction.amount;
204
+ else if (transaction.status.status === "spendable" ||
205
+ transaction.status.status === "spent")
206
+ stats.primary_address_received_amount -= transaction.amount;
207
+ }
208
+ }
209
+ export function processTxlogPayments(txlog, cache) {
210
+ let outWardPaymentSum = 0n;
211
+ for (const payment of txlog.payments) {
212
+ if (!isSelfSpent(payment.address, cache)) {
213
+ outWardPaymentSum += BigInt(payment.amount);
214
+ }
215
+ }
216
+ return outWardPaymentSum;
217
+ }
218
+ export function processTxlogInputs(txlog, cache) {
219
+ let alreadyRecognizedAsSpend = false;
220
+ let inputSum = 0n;
221
+ for (const inputId of txlog.inputs_index) {
222
+ const input = cache.outputs[inputId];
223
+ if (typeof input.spent_in_tx_hash === "string") {
224
+ alreadyRecognizedAsSpend = true;
225
+ continue;
226
+ }
227
+ inputSum += input.amount;
228
+ }
229
+ return { inputSum, alreadyRecognizedAsSpend };
230
+ }
231
+ export function processTxlogs(cache, stats) {
232
+ for (const txlog of cache.tx_logs || []) {
233
+ if (!txlog ||
234
+ !txlog.sendResult ||
235
+ (txlog.sendResult && txlog.sendResult.status !== "OK"))
236
+ continue;
237
+ const { inputSum, alreadyRecognizedAsSpend } = processTxlogInputs(txlog, cache);
238
+ if (alreadyRecognizedAsSpend)
239
+ continue;
240
+ const outWardPaymentSum = processTxlogPayments(txlog, cache);
241
+ stats.total_spendable_amount -= inputSum;
242
+ const newPending = inputSum - outWardPaymentSum;
243
+ stats.total_pending_amount += newPending;
244
+ }
245
+ }
246
+ export async function alignScanStatsWithCache(cache, view_pair, primary_address, pathPrefix, highestSubaddressMinor = 1, current_scan_tip_height = 0) {
247
+ return await writeStatsFileDefaultLocation({
248
+ primary_address,
249
+ pathPrefix,
250
+ writeCallback: async (stats) => {
251
+ resetStats(stats);
252
+ // this condition misses reorgs
253
+ // it seems wasteful to re read the cache on every wallet open
254
+ // not doing it is premature optimization.
255
+ // This is not computationally expensive + memory bandwith is tens of gb per second
256
+ //if (!current_scan_tip_height || current_scan_tip_height > stats.height) {
257
+ addSubAddressesFromCacheToScanStats(cache, stats);
258
+ addMissingSubAddressesToScanStats(stats, view_pair, highestSubaddressMinor, current_scan_tip_height);
259
+ processFoundTransactions(cache, stats, current_scan_tip_height);
260
+ processTxlogs(cache, stats);
261
+ stats.height = current_scan_tip_height;
262
+ //}
263
+ },
264
+ });
265
+ }
266
+ export function resetStats(stats) {
267
+ stats.height = 0;
268
+ stats.total_spendable_amount = 0n;
269
+ stats.total_pending_amount = 0n;
270
+ stats.found_transactions = {};
271
+ stats.ordered_transactions = [];
272
+ stats.subaddresses = {};
273
+ }
@@ -0,0 +1 @@
1
+ export declare const workerMainCode = "";