hugin-utils 0.1.3 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +126 -0
- package/challenge/backends/js_hashfn.js +22 -0
- package/challenge/backends/native_batch.js +25 -0
- package/challenge/backends/node_worker.js +103 -0
- package/challenge/core.js +160 -0
- package/challenge/index.js +6 -0
- package/challenge/node_worker.js +2 -0
- package/challenge/node_worker_process.cjs +92 -0
- package/index.js +239 -132
- package/package.json +15 -1
- package/test/challenge.test.cjs +67 -0
package/README.md
CHANGED
|
@@ -7,3 +7,129 @@ PoW utils for hugin node messages.
|
|
|
7
7
|
```js
|
|
8
8
|
const { insertNonce, meetsTarget, findShare } = require('hugin-utils');
|
|
9
9
|
```
|
|
10
|
+
|
|
11
|
+
## Reusable challenge engine (desktop + mobile)
|
|
12
|
+
|
|
13
|
+
### Engine API
|
|
14
|
+
|
|
15
|
+
```js
|
|
16
|
+
const {
|
|
17
|
+
message_challenge,
|
|
18
|
+
create_pow_scheduler,
|
|
19
|
+
create_rate_policy
|
|
20
|
+
} = require('hugin-utils/challenge')
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
`message_challenge({ get_job, backend, message_hash, required_shares, nonce_tag_bits, scheduler, rate_policy, freshness_policy, log })`
|
|
24
|
+
|
|
25
|
+
- `get_job() -> Promise<job|null>`: provide the latest pool job `{ job_id, blob, target }`
|
|
26
|
+
- `backend.find_share(...) -> Promise<share|null>`: platform-specific PoW backend
|
|
27
|
+
- `scheduler`: shared instance to enforce total H/s budget across concurrent message sends
|
|
28
|
+
- `rate_policy({ active_tasks, elapsed_ms }) -> { hashes_per_second, time_budget_ms, in_phase1 }`
|
|
29
|
+
- `freshness_policy(job) -> boolean`: optional prevId freshness check
|
|
30
|
+
|
|
31
|
+
### Desktop backend (Node worker)
|
|
32
|
+
|
|
33
|
+
```js
|
|
34
|
+
const { create_node_worker_backend } = require('hugin-utils/challenge/node_worker')
|
|
35
|
+
const backend = create_node_worker_backend()
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Note: the Node-worker backend uses `kryptokrona-utils` for `cn_turtle_lite_slow_hash_v2`. It is declared as an **optional peer dependency** (desktop already provides it).
|
|
39
|
+
|
|
40
|
+
### Mobile backend (native batch scan)
|
|
41
|
+
|
|
42
|
+
You selected a native batch-scan interface. Implement a native function with the shape:
|
|
43
|
+
|
|
44
|
+
- `native_find_share(jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue) -> { nonceHex, resultHex } | null`
|
|
45
|
+
|
|
46
|
+
Then wrap it:
|
|
47
|
+
|
|
48
|
+
```js
|
|
49
|
+
const { create_native_batch_backend } = require('hugin-utils/challenge')
|
|
50
|
+
const backend = create_native_batch_backend({ native_find_share })
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
#### React Native example
|
|
54
|
+
|
|
55
|
+
Example using `NativeModules` (you can adapt this to TurboModules/JSI).
|
|
56
|
+
|
|
57
|
+
```js
|
|
58
|
+
import { NativeModules } from 'react-native'
|
|
59
|
+
import {
|
|
60
|
+
message_challenge,
|
|
61
|
+
create_pow_scheduler,
|
|
62
|
+
create_rate_policy,
|
|
63
|
+
create_native_batch_backend
|
|
64
|
+
} from 'hugin-utils/challenge'
|
|
65
|
+
|
|
66
|
+
// Your native module must expose:
|
|
67
|
+
// findShare(jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue)
|
|
68
|
+
const { HuginPow } = NativeModules
|
|
69
|
+
|
|
70
|
+
const backend = create_native_batch_backend({
|
|
71
|
+
native_find_share: async (jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue) => {
|
|
72
|
+
const res = await HuginPow.findShare(jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue)
|
|
73
|
+
// Expect { nonceHex, resultHex } or null
|
|
74
|
+
return res
|
|
75
|
+
}
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
// Share across concurrent sends to enforce total H/s cap.
|
|
79
|
+
const scheduler = create_pow_scheduler()
|
|
80
|
+
const rate_policy = create_rate_policy({
|
|
81
|
+
total_hashes_per_second_cap: 1500,
|
|
82
|
+
phase1_hashes_per_second_cap: 950,
|
|
83
|
+
phase2_hashes_per_second_cap: 250
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
export async function challenge_message({ get_job, message_hash }) {
|
|
87
|
+
return await message_challenge({
|
|
88
|
+
get_job, // () => Promise<{ job_id, blob, target }|null>
|
|
89
|
+
backend,
|
|
90
|
+
message_hash, // string (the message id/hash you tag the nonce with)
|
|
91
|
+
required_shares: 1,
|
|
92
|
+
nonce_tag_bits: 4,
|
|
93
|
+
scheduler,
|
|
94
|
+
rate_policy
|
|
95
|
+
})
|
|
96
|
+
}
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
#### Mobile (easiest) when you only have `cn_pico` hash()
|
|
100
|
+
|
|
101
|
+
If your native side only exposes a single function like `cn_pico(blobHex) -> hashHex`, you can keep the nonce search loop in JS and just provide the hash function:
|
|
102
|
+
|
|
103
|
+
```js
|
|
104
|
+
import { NativeModules } from 'react-native'
|
|
105
|
+
import {
|
|
106
|
+
message_challenge,
|
|
107
|
+
create_pow_scheduler,
|
|
108
|
+
create_rate_policy,
|
|
109
|
+
create_js_hashfn_backend
|
|
110
|
+
} from 'hugin-utils/challenge'
|
|
111
|
+
|
|
112
|
+
const { HuginPow } = NativeModules
|
|
113
|
+
|
|
114
|
+
const backend = create_js_hashfn_backend({
|
|
115
|
+
// Must return a 64-char hex hash string
|
|
116
|
+
hash_fn: async (blobHex) => await HuginPow.cn_pico(blobHex)
|
|
117
|
+
})
|
|
118
|
+
|
|
119
|
+
const scheduler = create_pow_scheduler()
|
|
120
|
+
const rate_policy = create_rate_policy()
|
|
121
|
+
|
|
122
|
+
export async function challenge_message({ get_job, message_hash }) {
|
|
123
|
+
return await message_challenge({
|
|
124
|
+
get_job,
|
|
125
|
+
backend,
|
|
126
|
+
message_hash,
|
|
127
|
+
required_shares: 1,
|
|
128
|
+
nonce_tag_bits: 4,
|
|
129
|
+
scheduler,
|
|
130
|
+
rate_policy
|
|
131
|
+
})
|
|
132
|
+
}
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
Performance note: this is simplest to integrate, but can be significantly slower if `cn_pico()` is called over the classic RN bridge. Prefer JSI/TurboModules for high call rates, or implement the native batch-scan `findShare()` API for best performance.
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
const { findShare } = require('../..')
|
|
2
|
+
|
|
3
|
+
function create_js_hashfn_backend({ hash_fn }) {
|
|
4
|
+
if (typeof hash_fn !== 'function') throw new Error('hash_fn_missing')
|
|
5
|
+
|
|
6
|
+
return {
|
|
7
|
+
async find_share({ job, hashes_per_second, time_budget_ms, nonce_tag_bits, nonce_tag_value }) {
|
|
8
|
+
return await findShare({
|
|
9
|
+
job,
|
|
10
|
+
startNonce: Math.floor(Math.random() * 0xFFFFFFFF),
|
|
11
|
+
hashesPerSecond: parseInt(hashes_per_second, 10),
|
|
12
|
+
timeBudgetMs: parseInt(time_budget_ms, 10),
|
|
13
|
+
nonceTagBits: nonce_tag_bits,
|
|
14
|
+
nonceTagValue: nonce_tag_value,
|
|
15
|
+
hashFn: async (blobHex) => await hash_fn(blobHex)
|
|
16
|
+
})
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
module.exports = { create_js_hashfn_backend }
|
|
22
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
function create_native_batch_backend({ native_find_share }) {
|
|
2
|
+
if (typeof native_find_share !== 'function') throw new Error('native_find_share_missing')
|
|
3
|
+
|
|
4
|
+
return {
|
|
5
|
+
async find_share({ job, hashes_per_second, time_budget_ms, nonce_tag_bits, nonce_tag_value }) {
|
|
6
|
+
const res = await native_find_share(
|
|
7
|
+
job.blob,
|
|
8
|
+
job.target,
|
|
9
|
+
0,
|
|
10
|
+
time_budget_ms,
|
|
11
|
+
nonce_tag_bits,
|
|
12
|
+
nonce_tag_value
|
|
13
|
+
)
|
|
14
|
+
if (!res) return null
|
|
15
|
+
return {
|
|
16
|
+
job_id: job.job_id,
|
|
17
|
+
nonce: res.nonceHex || res.nonce || res.nonce_hex,
|
|
18
|
+
result: res.resultHex || res.result || res.result_hex
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
module.exports = { create_native_batch_backend }
|
|
25
|
+
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
const path = require('path')
|
|
2
|
+
const { fork } = require('child_process')
|
|
3
|
+
|
|
4
|
+
function create_node_worker_backend({
|
|
5
|
+
max_job_time_ms = 90000
|
|
6
|
+
} = {}) {
|
|
7
|
+
let worker = null
|
|
8
|
+
let worker_ready = false
|
|
9
|
+
const requests = new Map()
|
|
10
|
+
let cleanup_set = false
|
|
11
|
+
|
|
12
|
+
function ensure_worker() {
|
|
13
|
+
if (worker && worker_ready) return
|
|
14
|
+
const worker_path = path.join(__dirname, '..', 'node_worker_process.cjs')
|
|
15
|
+
worker = fork(worker_path, [], { stdio: ['pipe', 'pipe', 'pipe', 'ipc'] })
|
|
16
|
+
worker_ready = true
|
|
17
|
+
|
|
18
|
+
if (!cleanup_set) {
|
|
19
|
+
cleanup_set = true
|
|
20
|
+
const cleanup = () => {
|
|
21
|
+
if (worker) {
|
|
22
|
+
try { worker.kill() } catch (e) {}
|
|
23
|
+
worker = null
|
|
24
|
+
worker_ready = false
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
process.once('exit', cleanup)
|
|
28
|
+
process.once('SIGINT', cleanup)
|
|
29
|
+
process.once('SIGTERM', cleanup)
|
|
30
|
+
process.once('beforeExit', cleanup)
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
worker.on('message', (msg) => {
|
|
34
|
+
if (!msg || !msg.req_id) return
|
|
35
|
+
const pending = requests.get(msg.req_id)
|
|
36
|
+
if (!pending) return
|
|
37
|
+
requests.delete(msg.req_id)
|
|
38
|
+
if (msg.type === 'result') {
|
|
39
|
+
pending.resolve(msg.result)
|
|
40
|
+
return
|
|
41
|
+
}
|
|
42
|
+
const error = msg.error || 'pow_worker_error'
|
|
43
|
+
pending.reject(new Error(error))
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
worker.on('exit', (code) => {
|
|
47
|
+
worker_ready = false
|
|
48
|
+
for (const [, pending] of requests) {
|
|
49
|
+
pending.reject(new Error(`pow_worker_exit_${code}`))
|
|
50
|
+
}
|
|
51
|
+
requests.clear()
|
|
52
|
+
})
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function call_worker(type, payload, timeout_ms) {
|
|
56
|
+
ensure_worker()
|
|
57
|
+
return new Promise((resolve, reject) => {
|
|
58
|
+
const req_id = `${Date.now()}-${Math.random()}`
|
|
59
|
+
let timer = null
|
|
60
|
+
if (timeout_ms && timeout_ms > 0) {
|
|
61
|
+
const backlog = requests.size
|
|
62
|
+
const per_req_slack = Math.min(timeout_ms, 5000)
|
|
63
|
+
const effective_timeout_ms = timeout_ms + (backlog * per_req_slack) + 500
|
|
64
|
+
timer = setTimeout(() => {
|
|
65
|
+
requests.delete(req_id)
|
|
66
|
+
reject(new Error('pow_worker_timeout'))
|
|
67
|
+
}, effective_timeout_ms)
|
|
68
|
+
}
|
|
69
|
+
requests.set(req_id, {
|
|
70
|
+
resolve: (result) => {
|
|
71
|
+
if (timer) clearTimeout(timer)
|
|
72
|
+
resolve(result)
|
|
73
|
+
},
|
|
74
|
+
reject: (error) => {
|
|
75
|
+
if (timer) clearTimeout(timer)
|
|
76
|
+
reject(error)
|
|
77
|
+
}
|
|
78
|
+
})
|
|
79
|
+
worker.send({ type, req_id, payload })
|
|
80
|
+
})
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
async find_share({ job, hashes_per_second, time_budget_ms, nonce_tag_bits, nonce_tag_value }) {
|
|
85
|
+
const tms = parseInt(time_budget_ms, 10)
|
|
86
|
+
const timeout_ms = tms > 0 ? tms + 1500 : max_job_time_ms + 1500
|
|
87
|
+
return await call_worker('find_share', {
|
|
88
|
+
job,
|
|
89
|
+
start_nonce: Math.floor(Math.random() * 0xFFFFFFFF),
|
|
90
|
+
options: {
|
|
91
|
+
hashes_per_second: parseInt(hashes_per_second, 10),
|
|
92
|
+
time_budget_ms: tms,
|
|
93
|
+
max_job_time_ms,
|
|
94
|
+
nonceTagBits: nonce_tag_bits,
|
|
95
|
+
nonceTagValue: nonce_tag_value
|
|
96
|
+
}
|
|
97
|
+
}, timeout_ms)
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
module.exports = { create_node_worker_backend }
|
|
103
|
+
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
const { nonceTagFromMessageHash, extractPrevIdFromBlob } = require('..')
|
|
2
|
+
|
|
3
|
+
class ChallengeScheduler {
|
|
4
|
+
constructor() {
|
|
5
|
+
this.active = 0
|
|
6
|
+
}
|
|
7
|
+
acquire() {
|
|
8
|
+
this.active++
|
|
9
|
+
let released = false
|
|
10
|
+
return () => {
|
|
11
|
+
if (released) return
|
|
12
|
+
released = true
|
|
13
|
+
this.active = Math.max(0, this.active - 1)
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
active_count() {
|
|
17
|
+
return this.active
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function create_pow_scheduler() {
|
|
22
|
+
return new ChallengeScheduler()
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function create_rate_policy({
|
|
26
|
+
total_hashes_per_second_cap = 1500,
|
|
27
|
+
phase1_hashes_per_second_cap = 950,
|
|
28
|
+
phase2_hashes_per_second_cap = 250,
|
|
29
|
+
phase1_ms = 2 * 60 * 1000,
|
|
30
|
+
slice_ms_phase1 = 10000,
|
|
31
|
+
slice_ms_phase2 = 10000
|
|
32
|
+
} = {}) {
|
|
33
|
+
return ({ active_tasks, elapsed_ms }) => {
|
|
34
|
+
const in_phase1 = elapsed_ms < phase1_ms
|
|
35
|
+
const per_task_budget = Math.max(1, Math.floor(total_hashes_per_second_cap / Math.max(1, active_tasks)))
|
|
36
|
+
const hashes_per_second = Math.min(
|
|
37
|
+
per_task_budget,
|
|
38
|
+
in_phase1 ? phase1_hashes_per_second_cap : phase2_hashes_per_second_cap
|
|
39
|
+
)
|
|
40
|
+
const time_budget_ms = in_phase1 ? slice_ms_phase1 : slice_ms_phase2
|
|
41
|
+
return { hashes_per_second, time_budget_ms, in_phase1 }
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function create_freshness_policy({ get_current_prev_ids }) {
|
|
46
|
+
return (job) => {
|
|
47
|
+
if (!job || !job.blob) return false
|
|
48
|
+
const prevId = extractPrevIdFromBlob(job.blob)
|
|
49
|
+
const { currentPrevId, previousPrevId } = get_current_prev_ids()
|
|
50
|
+
if (!prevId || !currentPrevId) return false
|
|
51
|
+
return prevId === currentPrevId || prevId === previousPrevId
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function message_challenge({
|
|
56
|
+
get_job,
|
|
57
|
+
backend,
|
|
58
|
+
message_hash,
|
|
59
|
+
required_shares = 1,
|
|
60
|
+
nonce_tag_bits = 4,
|
|
61
|
+
scheduler,
|
|
62
|
+
rate_policy,
|
|
63
|
+
freshness_policy,
|
|
64
|
+
log
|
|
65
|
+
}) {
|
|
66
|
+
if (!backend || typeof backend.find_share !== 'function') throw new Error('pow_backend_missing')
|
|
67
|
+
if (typeof get_job !== 'function') throw new Error('pow_get_job_missing')
|
|
68
|
+
if (!scheduler) scheduler = create_pow_scheduler()
|
|
69
|
+
if (!rate_policy) rate_policy = create_rate_policy()
|
|
70
|
+
|
|
71
|
+
const release = scheduler.acquire()
|
|
72
|
+
try {
|
|
73
|
+
const start = Date.now()
|
|
74
|
+
const nonce_tag_value = nonceTagFromMessageHash(message_hash, nonce_tag_bits)
|
|
75
|
+
|
|
76
|
+
const shares = []
|
|
77
|
+
const share_nonces = new Set()
|
|
78
|
+
const push_share = (share) => {
|
|
79
|
+
if (!share) return
|
|
80
|
+
if (typeof share.nonce !== 'string' || typeof share.result !== 'string') return
|
|
81
|
+
const nonce = share.nonce.toLowerCase()
|
|
82
|
+
if (share_nonces.has(nonce)) return
|
|
83
|
+
share_nonces.add(nonce)
|
|
84
|
+
shares.push({ ...share, job_id: String(share.job_id), nonce, result: share.result.toLowerCase() })
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
let did_boost = false
|
|
88
|
+
while (shares.length < required_shares) {
|
|
89
|
+
const job = await get_job()
|
|
90
|
+
if (!job) {
|
|
91
|
+
await new Promise(r => setTimeout(r, 250))
|
|
92
|
+
continue
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (freshness_policy && !freshness_policy(job)) {
|
|
96
|
+
if (log) log('pow_stale_local', { jobId: job.job_id })
|
|
97
|
+
await new Promise(r => setTimeout(r, 250))
|
|
98
|
+
continue
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const elapsed_ms = Date.now() - start
|
|
102
|
+
const active_tasks = scheduler.active_count()
|
|
103
|
+
const { hashes_per_second, time_budget_ms, in_phase1 } = rate_policy({ active_tasks, elapsed_ms })
|
|
104
|
+
|
|
105
|
+
// Optional early boost pass.
|
|
106
|
+
if (in_phase1 && !did_boost && backend.boost_find_share) {
|
|
107
|
+
did_boost = true
|
|
108
|
+
try {
|
|
109
|
+
const share = await backend.boost_find_share({
|
|
110
|
+
job,
|
|
111
|
+
hashes_per_second,
|
|
112
|
+
time_budget_ms: Math.min(4000, time_budget_ms),
|
|
113
|
+
nonce_tag_bits,
|
|
114
|
+
nonce_tag_value,
|
|
115
|
+
log
|
|
116
|
+
})
|
|
117
|
+
if (share) push_share(share)
|
|
118
|
+
if (shares.length >= required_shares) return { job, shares }
|
|
119
|
+
} catch (e) {}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
let share = null
|
|
123
|
+
try {
|
|
124
|
+
share = await backend.find_share({
|
|
125
|
+
job,
|
|
126
|
+
hashes_per_second,
|
|
127
|
+
time_budget_ms,
|
|
128
|
+
nonce_tag_bits,
|
|
129
|
+
nonce_tag_value,
|
|
130
|
+
log
|
|
131
|
+
})
|
|
132
|
+
} catch (e) {
|
|
133
|
+
if (e && e.message === 'pow_worker_timeout') {
|
|
134
|
+
if (log) log('pow_worker_timeout_retry', { jobId: job.job_id, sliceMs: time_budget_ms, hps: hashes_per_second })
|
|
135
|
+
await new Promise(r => setTimeout(r, in_phase1 ? 100 : 300))
|
|
136
|
+
continue
|
|
137
|
+
}
|
|
138
|
+
throw e
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (share) {
|
|
142
|
+
push_share(share)
|
|
143
|
+
if (shares.length >= required_shares) return { job, shares }
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
await new Promise(r => setTimeout(r, in_phase1 ? 50 : 250))
|
|
147
|
+
}
|
|
148
|
+
return null
|
|
149
|
+
} finally {
|
|
150
|
+
release()
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
module.exports = {
|
|
155
|
+
create_pow_scheduler,
|
|
156
|
+
create_rate_policy,
|
|
157
|
+
create_freshness_policy,
|
|
158
|
+
message_challenge
|
|
159
|
+
}
|
|
160
|
+
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
const { Crypto } = require('kryptokrona-utils')
|
|
2
|
+
const { findShare, nonceMatchesTag } = require('..')
|
|
3
|
+
|
|
4
|
+
const crypto = new Crypto()
|
|
5
|
+
|
|
6
|
+
function logPow(event, data) {
|
|
7
|
+
process.send && process.send({ type: 'log', event, data })
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
async function pow_find_share(job, start_nonce, options = {}) {
|
|
11
|
+
const hashes_per_second = parseInt(options.hashes_per_second, 10)
|
|
12
|
+
const time_budget_ms = parseInt(options.time_budget_ms, 10)
|
|
13
|
+
const max_job_time_ms = parseInt(options.max_job_time_ms, 10)
|
|
14
|
+
const nonceTagBits = parseInt(options.nonceTagBits || '0', 10)
|
|
15
|
+
const nonceTagValue = parseInt(options.nonceTagValue || '0', 10)
|
|
16
|
+
const tag_enabled = Number.isFinite(nonceTagBits) && nonceTagBits > 0
|
|
17
|
+
|
|
18
|
+
let nonce = start_nonce >>> 0
|
|
19
|
+
const start = Date.now()
|
|
20
|
+
while (true) {
|
|
21
|
+
const elapsed = Date.now() - start
|
|
22
|
+
const remaining_ms = time_budget_ms - elapsed
|
|
23
|
+
if (remaining_ms <= 0) return null
|
|
24
|
+
if (max_job_time_ms > 0 && elapsed >= max_job_time_ms) return null
|
|
25
|
+
|
|
26
|
+
const slice_ms = Math.min(1000, remaining_ms)
|
|
27
|
+
const share = await findShare({
|
|
28
|
+
job,
|
|
29
|
+
startNonce: nonce,
|
|
30
|
+
hashesPerSecond: hashes_per_second,
|
|
31
|
+
timeBudgetMs: slice_ms,
|
|
32
|
+
nonceTagBits,
|
|
33
|
+
nonceTagValue,
|
|
34
|
+
hashFn: (blobHex) => crypto.cn_turtle_lite_slow_hash_v2(blobHex),
|
|
35
|
+
log: (event, data) => logPow(event, data)
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
if (share) {
|
|
39
|
+
const nextNonce = (parseInt(share.nonce, 16) + 1) >>> 0
|
|
40
|
+
if (!tag_enabled || nonceMatchesTag(share.nonce, nonceTagValue, nonceTagBits)) {
|
|
41
|
+
return share
|
|
42
|
+
}
|
|
43
|
+
nonce = nextNonce
|
|
44
|
+
continue
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const slice_attempts = Math.max(1, Math.floor(hashes_per_second * (slice_ms / 1000)))
|
|
48
|
+
nonce = (nonce + slice_attempts) >>> 0
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async function pow_calculate_shares(job, required_shares = 1, options = {}) {
|
|
53
|
+
if (!job || !job.blob || !job.target) {
|
|
54
|
+
throw new Error('Invalid job data')
|
|
55
|
+
}
|
|
56
|
+
const shares = []
|
|
57
|
+
let nonce = Math.floor(Math.random() * 0xFFFFFFFF)
|
|
58
|
+
const start = Date.now()
|
|
59
|
+
const max_job_time_ms = parseInt(options.max_job_time_ms, 10)
|
|
60
|
+
for (let i = 0; i < required_shares; i++) {
|
|
61
|
+
if (max_job_time_ms > 0 && (Date.now() - start) >= max_job_time_ms) break
|
|
62
|
+
const share = await pow_find_share(job, nonce, options)
|
|
63
|
+
if (share) {
|
|
64
|
+
shares.push(share)
|
|
65
|
+
nonce = parseInt(share.nonce, 16) + 1
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return { job, shares }
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
process.on('message', async (msg) => {
|
|
72
|
+
if (!msg || !msg.type || !msg.req_id) return
|
|
73
|
+
const { type, req_id, payload } = msg
|
|
74
|
+
try {
|
|
75
|
+
if (type === 'find_share') {
|
|
76
|
+
const { job, start_nonce, options } = payload || {}
|
|
77
|
+
const result = await pow_find_share(job, start_nonce, options || {})
|
|
78
|
+
process.send && process.send({ type: 'result', req_id, result })
|
|
79
|
+
return
|
|
80
|
+
}
|
|
81
|
+
if (type === 'calculate_shares') {
|
|
82
|
+
const { job, required_shares, options } = payload || {}
|
|
83
|
+
const result = await pow_calculate_shares(job, required_shares, options || {})
|
|
84
|
+
process.send && process.send({ type: 'result', req_id, result })
|
|
85
|
+
return
|
|
86
|
+
}
|
|
87
|
+
process.send && process.send({ type: 'error', req_id, error: 'unknown_request' })
|
|
88
|
+
} catch (e) {
|
|
89
|
+
process.send && process.send({ type: 'error', req_id, error: e && e.message })
|
|
90
|
+
}
|
|
91
|
+
})
|
|
92
|
+
|
package/index.js
CHANGED
|
@@ -1,132 +1,239 @@
|
|
|
1
|
-
const DEFAULT_NONCE_OFFSET = 39;
|
|
2
|
-
|
|
3
|
-
function
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const byte =
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
offset
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
const
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
return
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
const
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
}
|
|
1
|
+
const DEFAULT_NONCE_OFFSET = 39;
|
|
2
|
+
|
|
3
|
+
function hexToBytes(hex) {
|
|
4
|
+
if (typeof hex !== 'string') throw new Error('hex_to_bytes_invalid')
|
|
5
|
+
if (hex.length % 2 !== 0) throw new Error('hex_to_bytes_len')
|
|
6
|
+
const out = new Uint8Array(hex.length / 2)
|
|
7
|
+
for (let i = 0; i < out.length; i++) {
|
|
8
|
+
const byte = parseInt(hex.slice(i * 2, (i * 2) + 2), 16)
|
|
9
|
+
if (!Number.isFinite(byte)) throw new Error('hex_to_bytes_parse')
|
|
10
|
+
out[i] = byte
|
|
11
|
+
}
|
|
12
|
+
return out
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function bytesToHex(bytes) {
|
|
16
|
+
let hex = ''
|
|
17
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
18
|
+
hex += bytes[i].toString(16).padStart(2, '0')
|
|
19
|
+
}
|
|
20
|
+
return hex
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function readUInt32LE(bytes, offset) {
|
|
24
|
+
return (
|
|
25
|
+
(bytes[offset] |
|
|
26
|
+
(bytes[offset + 1] << 8) |
|
|
27
|
+
(bytes[offset + 2] << 16) |
|
|
28
|
+
(bytes[offset + 3] << 24)) >>> 0
|
|
29
|
+
)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function readBigUInt64LE(bytes, offset) {
|
|
33
|
+
const lo = BigInt(readUInt32LE(bytes, offset))
|
|
34
|
+
const hi = BigInt(readUInt32LE(bytes, offset + 4))
|
|
35
|
+
return (hi << 32n) | lo
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function readVarint(buffer, offset) {
|
|
39
|
+
let value = 0;
|
|
40
|
+
let shift = 0;
|
|
41
|
+
let bytes = 0;
|
|
42
|
+
while (offset + bytes < buffer.length) {
|
|
43
|
+
const byte = buffer[offset + bytes];
|
|
44
|
+
value |= (byte & 0x7f) << shift;
|
|
45
|
+
bytes += 1;
|
|
46
|
+
if ((byte & 0x80) === 0) {
|
|
47
|
+
return { value, bytes };
|
|
48
|
+
}
|
|
49
|
+
shift += 7;
|
|
50
|
+
if (shift > 63) return null;
|
|
51
|
+
}
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function getNonceOffsetFromBuffer(blobBuffer) {
|
|
56
|
+
try {
|
|
57
|
+
let offset = 0;
|
|
58
|
+
const major = readVarint(blobBuffer, offset);
|
|
59
|
+
if (!major) return DEFAULT_NONCE_OFFSET;
|
|
60
|
+
offset += major.bytes;
|
|
61
|
+
const minor = readVarint(blobBuffer, offset);
|
|
62
|
+
if (!minor) return DEFAULT_NONCE_OFFSET;
|
|
63
|
+
offset += minor.bytes;
|
|
64
|
+
const timestamp = readVarint(blobBuffer, offset);
|
|
65
|
+
if (!timestamp) return DEFAULT_NONCE_OFFSET;
|
|
66
|
+
offset += timestamp.bytes;
|
|
67
|
+
offset += 32;
|
|
68
|
+
return offset;
|
|
69
|
+
} catch (e) {
|
|
70
|
+
return DEFAULT_NONCE_OFFSET;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function getNonceOffset(blobHex) {
|
|
75
|
+
const blobBuffer = hexToBytes(blobHex);
|
|
76
|
+
return getNonceOffsetFromBuffer(blobBuffer);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function insertNonce(blobHex, nonceHex) {
|
|
80
|
+
const blobBuffer = hexToBytes(blobHex);
|
|
81
|
+
const nonceBuffer = hexToBytes(nonceHex);
|
|
82
|
+
const offset = getNonceOffsetFromBuffer(blobBuffer);
|
|
83
|
+
blobBuffer.set(nonceBuffer, offset);
|
|
84
|
+
return { blobHex: bytesToHex(blobBuffer), offset };
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function extractPrevIdFromBlob(blobHex) {
|
|
88
|
+
try {
|
|
89
|
+
const blobBuffer = hexToBytes(blobHex);
|
|
90
|
+
let offset = 0;
|
|
91
|
+
const major = readVarint(blobBuffer, offset);
|
|
92
|
+
if (!major) return null;
|
|
93
|
+
offset += major.bytes;
|
|
94
|
+
const minor = readVarint(blobBuffer, offset);
|
|
95
|
+
if (!minor) return null;
|
|
96
|
+
offset += minor.bytes;
|
|
97
|
+
const timestamp = readVarint(blobBuffer, offset);
|
|
98
|
+
if (!timestamp) return null;
|
|
99
|
+
offset += timestamp.bytes;
|
|
100
|
+
if (offset + 32 > blobBuffer.length) return null;
|
|
101
|
+
return bytesToHex(blobBuffer.subarray(offset, offset + 32));
|
|
102
|
+
} catch (e) {
|
|
103
|
+
return null;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
function nonceToHexLE(nonce) {
|
|
108
|
+
const n = nonce >>> 0
|
|
109
|
+
const out = new Uint8Array(4)
|
|
110
|
+
out[0] = n & 0xff
|
|
111
|
+
out[1] = (n >>> 8) & 0xff
|
|
112
|
+
out[2] = (n >>> 16) & 0xff
|
|
113
|
+
out[3] = (n >>> 24) & 0xff
|
|
114
|
+
return bytesToHex(out)
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function parseTarget(targetHex) {
|
|
118
|
+
if (!targetHex) return null;
|
|
119
|
+
const targetBytes = hexToBytes(targetHex)
|
|
120
|
+
if (targetBytes.length === 4) {
|
|
121
|
+
const raw = readUInt32LE(targetBytes, 0);
|
|
122
|
+
if (raw === 0) return null;
|
|
123
|
+
const numerator = 0xFFFFFFFFFFFFFFFFn;
|
|
124
|
+
const denom = 0xFFFFFFFFn / BigInt(raw);
|
|
125
|
+
if (denom === 0n) return null;
|
|
126
|
+
return numerator / denom;
|
|
127
|
+
}
|
|
128
|
+
if (targetBytes.length === 8) {
|
|
129
|
+
return readBigUInt64LE(targetBytes, 0);
|
|
130
|
+
}
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
function meetsTarget(hashHex, targetHex) {
|
|
135
|
+
const target = parseTarget(targetHex);
|
|
136
|
+
if (target === null) return true;
|
|
137
|
+
const hashBytes = hexToBytes(hashHex);
|
|
138
|
+
if (hashBytes.length < 32) return false;
|
|
139
|
+
const hashTail = readBigUInt64LE(hashBytes, 24);
|
|
140
|
+
return hashTail <= target;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
function minTargetHex(a, b) {
|
|
144
|
+
const aa = parseTarget(a);
|
|
145
|
+
const bb = parseTarget(b);
|
|
146
|
+
if (aa === null) return b;
|
|
147
|
+
if (bb === null) return a;
|
|
148
|
+
return aa <= bb ? a : b;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
function nonceTagFromMessageHash(messageHash, bits) {
|
|
152
|
+
try {
|
|
153
|
+
const b = typeof bits === 'number' ? bits : 0;
|
|
154
|
+
if (b <= 0 || b > 16) return 0;
|
|
155
|
+
const mask = (1 << b) - 1;
|
|
156
|
+
// Isomorphic sha256 (works in Node + React Native).
|
|
157
|
+
const { sha256 } = require('js-sha256')
|
|
158
|
+
const digest0 = sha256.array(String(messageHash))[0]
|
|
159
|
+
return digest0 & mask;
|
|
160
|
+
} catch (e) {
|
|
161
|
+
return 0;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
function nonceMatchesTag(nonceHex, tagValue, bits) {
|
|
166
|
+
if (typeof nonceHex !== 'string' || nonceHex.length !== 8) return false;
|
|
167
|
+
const b = typeof bits === 'number' ? bits : 0;
|
|
168
|
+
if (b <= 0) return true;
|
|
169
|
+
const mask = (1 << b) - 1;
|
|
170
|
+
// Tag is computed over the actual 32-bit nonce value inserted into the blob (little-endian).
|
|
171
|
+
const nonceBytes = hexToBytes(nonceHex)
|
|
172
|
+
const nonce = readUInt32LE(nonceBytes, 0)
|
|
173
|
+
return (nonce & mask) === (tagValue & mask);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
async function findShare({
|
|
177
|
+
job,
|
|
178
|
+
startNonce,
|
|
179
|
+
hashFn,
|
|
180
|
+
log,
|
|
181
|
+
hashesPerSecond = 500,
|
|
182
|
+
timeBudgetMs = 1000,
|
|
183
|
+
yieldEvery = 200,
|
|
184
|
+
logEvery = 100,
|
|
185
|
+
nonceTagBits = 0,
|
|
186
|
+
nonceTagValue = 0
|
|
187
|
+
}) {
|
|
188
|
+
const maxAttempts = Math.max(1, Math.floor((hashesPerSecond * timeBudgetMs) / 1000));
|
|
189
|
+
let nonce = startNonce;
|
|
190
|
+
const start = Date.now();
|
|
191
|
+
const b = typeof nonceTagBits === 'number' ? nonceTagBits : 0;
|
|
192
|
+
const mask = b > 0 ? ((1 << b) - 1) : 0;
|
|
193
|
+
|
|
194
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
195
|
+
if (b > 0 && ((nonce >>> 0) & mask) !== (nonceTagValue & mask)) {
|
|
196
|
+
nonce++;
|
|
197
|
+
continue;
|
|
198
|
+
}
|
|
199
|
+
const nonceHex = nonceToHexLE(nonce);
|
|
200
|
+
const { blobHex, offset } = insertNonce(job.blob, nonceHex);
|
|
201
|
+
if (log) log('nonce_offset', { jobId: job.job_id, offset });
|
|
202
|
+
const result = await hashFn(blobHex);
|
|
203
|
+
|
|
204
|
+
if (meetsTarget(result, job.target)) {
|
|
205
|
+
if (log) log('pow_share_found', { jobId: job.job_id, nonce: nonceHex, attempt });
|
|
206
|
+
return { job_id: job.job_id, nonce: nonceHex, result };
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
nonce++;
|
|
210
|
+
if (attempt > 0 && attempt % logEvery === 0) {
|
|
211
|
+
if (log) log('pow_progress', { jobId: job.job_id, attempt, nonce: nonceHex });
|
|
212
|
+
}
|
|
213
|
+
if (attempt > 0 && attempt % yieldEvery === 0) {
|
|
214
|
+
await new Promise(resolve => setTimeout(resolve, 0));
|
|
215
|
+
}
|
|
216
|
+
if (Date.now() - start >= timeBudgetMs) {
|
|
217
|
+
if (log) log('pow_time_budget', { jobId: job.job_id, attempt, elapsedMs: Date.now() - start });
|
|
218
|
+
break;
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (log) log('pow_share_exhausted', { jobId: job.job_id, attempts: maxAttempts });
|
|
223
|
+
return null;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
module.exports = {
|
|
227
|
+
readVarint,
|
|
228
|
+
getNonceOffset,
|
|
229
|
+
insertNonce,
|
|
230
|
+
meetsTarget,
|
|
231
|
+
minTargetHex,
|
|
232
|
+
extractPrevIdFromBlob,
|
|
233
|
+
nonceTagFromMessageHash,
|
|
234
|
+
nonceMatchesTag,
|
|
235
|
+
findShare,
|
|
236
|
+
// helpers for embedders/backends
|
|
237
|
+
hexToBytes,
|
|
238
|
+
bytesToHex
|
|
239
|
+
};
|
package/package.json
CHANGED
|
@@ -1,8 +1,22 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "hugin-utils",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.2.0",
|
|
4
4
|
"description": "PoW utils for hugin node messages",
|
|
5
5
|
"main": "index.js",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"test": "node test/challenge.test.cjs"
|
|
8
|
+
},
|
|
9
|
+
"dependencies": {
|
|
10
|
+
"js-sha256": "^0.11.0"
|
|
11
|
+
},
|
|
12
|
+
"peerDependencies": {
|
|
13
|
+
"kryptokrona-utils": "^1.3.7"
|
|
14
|
+
},
|
|
15
|
+
"peerDependenciesMeta": {
|
|
16
|
+
"kryptokrona-utils": {
|
|
17
|
+
"optional": true
|
|
18
|
+
}
|
|
19
|
+
},
|
|
6
20
|
"license": "MIT",
|
|
7
21
|
"author": "n9lsjr"
|
|
8
22
|
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
const assert = require('assert')
|
|
2
|
+
|
|
3
|
+
const {
|
|
4
|
+
getNonceOffset,
|
|
5
|
+
insertNonce,
|
|
6
|
+
meetsTarget,
|
|
7
|
+
nonceTagFromMessageHash,
|
|
8
|
+
nonceMatchesTag,
|
|
9
|
+
findShare
|
|
10
|
+
} = require('..')
|
|
11
|
+
|
|
12
|
+
async function test_insert_nonce_defaults() {
|
|
13
|
+
const blobHex = '00'.repeat(100)
|
|
14
|
+
const nonceHex = '01020304'
|
|
15
|
+
const { blobHex: outHex, offset } = insertNonce(blobHex, nonceHex)
|
|
16
|
+
assert.strictEqual(offset, getNonceOffset(blobHex))
|
|
17
|
+
assert.strictEqual(offset, 39)
|
|
18
|
+
const inserted = outHex.slice(offset * 2, (offset * 2) + 8)
|
|
19
|
+
assert.strictEqual(inserted, nonceHex)
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function test_meets_target_64bit() {
|
|
23
|
+
const targetHex = '0000000000000000' // 0
|
|
24
|
+
const okHash = '00'.repeat(32)
|
|
25
|
+
const badHash = '00'.repeat(24) + '0100000000000000' // tail=1 (LE)
|
|
26
|
+
assert.strictEqual(meetsTarget(okHash, targetHex), true)
|
|
27
|
+
assert.strictEqual(meetsTarget(badHash, targetHex), false)
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function test_nonce_tag_roundtrip() {
|
|
31
|
+
const bits = 4
|
|
32
|
+
const messageHash = 'test-message-hash'
|
|
33
|
+
const tagValue = nonceTagFromMessageHash(messageHash, bits)
|
|
34
|
+
|
|
35
|
+
const job = {
|
|
36
|
+
job_id: '1',
|
|
37
|
+
blob: '00'.repeat(100),
|
|
38
|
+
target: 'ffffffff'
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const share = await findShare({
|
|
42
|
+
job,
|
|
43
|
+
startNonce: 0,
|
|
44
|
+
nonceTagBits: bits,
|
|
45
|
+
nonceTagValue: tagValue,
|
|
46
|
+
hashesPerSecond: 10_000,
|
|
47
|
+
timeBudgetMs: 50,
|
|
48
|
+
hashFn: async () => '00'.repeat(32)
|
|
49
|
+
})
|
|
50
|
+
|
|
51
|
+
assert.ok(share, 'expected share')
|
|
52
|
+
assert.strictEqual(share.job_id, job.job_id)
|
|
53
|
+
assert.ok(nonceMatchesTag(share.nonce, tagValue, bits), 'share nonce must match nonce-tag')
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async function run() {
|
|
57
|
+
await test_insert_nonce_defaults()
|
|
58
|
+
test_meets_target_64bit()
|
|
59
|
+
await test_nonce_tag_roundtrip()
|
|
60
|
+
console.log('ok')
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
run().catch((e) => {
|
|
64
|
+
console.error(e)
|
|
65
|
+
process.exit(1)
|
|
66
|
+
})
|
|
67
|
+
|