hugin-utils 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -7,3 +7,129 @@ PoW utils for hugin node messages.
7
7
  ```js
8
8
  const { insertNonce, meetsTarget, findShare } = require('hugin-utils');
9
9
  ```
10
+
11
+ ## Reusable challenge engine (desktop + mobile)
12
+
13
+ ### Engine API
14
+
15
+ ```js
16
+ const {
17
+ message_challenge,
18
+ create_pow_scheduler,
19
+ create_rate_policy
20
+ } = require('hugin-utils/challenge')
21
+ ```
22
+
23
+ `message_challenge({ get_job, backend, message_hash, required_shares, nonce_tag_bits, scheduler, rate_policy, freshness_policy, log })`
24
+
25
+ - `get_job() -> Promise<job|null>`: provide the latest pool job `{ job_id, blob, target }`
26
+ - `backend.find_share(...) -> Promise<share|null>`: platform-specific PoW backend
27
+ - `scheduler`: shared instance to enforce total H/s budget across concurrent message sends
28
+ - `rate_policy({ active_tasks, elapsed_ms }) -> { hashes_per_second, time_budget_ms, in_phase1 }`
29
+ - `freshness_policy(job) -> boolean`: optional prevId freshness check
30
+
31
+ ### Desktop backend (Node worker)
32
+
33
+ ```js
34
+ const { create_node_worker_backend } = require('hugin-utils/challenge/node_worker')
35
+ const backend = create_node_worker_backend()
36
+ ```
37
+
38
+ Note: the Node-worker backend uses `kryptokrona-utils` for `cn_turtle_lite_slow_hash_v2`. It is declared as an **optional peer dependency** (desktop already provides it).
39
+
40
+ ### Mobile backend (native batch scan)
41
+
42
+ You selected a native batch-scan interface. Implement a native function with the shape:
43
+
44
+ - `native_find_share(jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue) -> { nonceHex, resultHex } | null`
45
+
46
+ Then wrap it:
47
+
48
+ ```js
49
+ const { create_native_batch_backend } = require('hugin-utils/challenge')
50
+ const backend = create_native_batch_backend({ native_find_share })
51
+ ```
52
+
53
+ #### React Native example
54
+
55
+ Example using `NativeModules` (you can adapt this to TurboModules/JSI).
56
+
57
+ ```js
58
+ import { NativeModules } from 'react-native'
59
+ import {
60
+ message_challenge,
61
+ create_pow_scheduler,
62
+ create_rate_policy,
63
+ create_native_batch_backend
64
+ } from 'hugin-utils/challenge'
65
+
66
+ // Your native module must expose:
67
+ // findShare(jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue)
68
+ const { HuginPow } = NativeModules
69
+
70
+ const backend = create_native_batch_backend({
71
+ native_find_share: async (jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue) => {
72
+ const res = await HuginPow.findShare(jobBlobHex, targetHex, startNonce, timeBudgetMs, tagBits, tagValue)
73
+ // Expect { nonceHex, resultHex } or null
74
+ return res
75
+ }
76
+ })
77
+
78
+ // Share across concurrent sends to enforce total H/s cap.
79
+ const scheduler = create_pow_scheduler()
80
+ const rate_policy = create_rate_policy({
81
+ total_hashes_per_second_cap: 1500,
82
+ phase1_hashes_per_second_cap: 950,
83
+ phase2_hashes_per_second_cap: 250
84
+ })
85
+
86
+ export async function challenge_message({ get_job, message_hash }) {
87
+ return await message_challenge({
88
+ get_job, // () => Promise<{ job_id, blob, target }|null>
89
+ backend,
90
+ message_hash, // string (the message id/hash you tag the nonce with)
91
+ required_shares: 1,
92
+ nonce_tag_bits: 4,
93
+ scheduler,
94
+ rate_policy
95
+ })
96
+ }
97
+ ```
98
+
99
+ #### Mobile (easiest) when you only have `cn_pico` hash()
100
+
101
+ If your native side only exposes a single function like `cn_pico(blobHex) -> hashHex`, you can keep the nonce search loop in JS and just provide the hash function:
102
+
103
+ ```js
104
+ import { NativeModules } from 'react-native'
105
+ import {
106
+ message_challenge,
107
+ create_pow_scheduler,
108
+ create_rate_policy,
109
+ create_js_hashfn_backend
110
+ } from 'hugin-utils/challenge'
111
+
112
+ const { HuginPow } = NativeModules
113
+
114
+ const backend = create_js_hashfn_backend({
115
+ // Must return a 64-char hex hash string
116
+ hash_fn: async (blobHex) => await HuginPow.cn_pico(blobHex)
117
+ })
118
+
119
+ const scheduler = create_pow_scheduler()
120
+ const rate_policy = create_rate_policy()
121
+
122
+ export async function challenge_message({ get_job, message_hash }) {
123
+ return await message_challenge({
124
+ get_job,
125
+ backend,
126
+ message_hash,
127
+ required_shares: 1,
128
+ nonce_tag_bits: 4,
129
+ scheduler,
130
+ rate_policy
131
+ })
132
+ }
133
+ ```
134
+
135
+ Performance note: this is simplest to integrate, but can be significantly slower if `cn_pico()` is called over the classic RN bridge. Prefer JSI/TurboModules for high call rates, or implement the native batch-scan `findShare()` API for best performance.
@@ -0,0 +1,22 @@
1
+ const { findShare } = require('../..')
2
+
3
+ function create_js_hashfn_backend({ hash_fn }) {
4
+ if (typeof hash_fn !== 'function') throw new Error('hash_fn_missing')
5
+
6
+ return {
7
+ async find_share({ job, hashes_per_second, time_budget_ms, nonce_tag_bits, nonce_tag_value }) {
8
+ return await findShare({
9
+ job,
10
+ startNonce: Math.floor(Math.random() * 0xFFFFFFFF),
11
+ hashesPerSecond: parseInt(hashes_per_second, 10),
12
+ timeBudgetMs: parseInt(time_budget_ms, 10),
13
+ nonceTagBits: nonce_tag_bits,
14
+ nonceTagValue: nonce_tag_value,
15
+ hashFn: async (blobHex) => await hash_fn(blobHex)
16
+ })
17
+ }
18
+ }
19
+ }
20
+
21
+ module.exports = { create_js_hashfn_backend }
22
+
@@ -0,0 +1,25 @@
1
+ function create_native_batch_backend({ native_find_share }) {
2
+ if (typeof native_find_share !== 'function') throw new Error('native_find_share_missing')
3
+
4
+ return {
5
+ async find_share({ job, hashes_per_second, time_budget_ms, nonce_tag_bits, nonce_tag_value }) {
6
+ const res = await native_find_share(
7
+ job.blob,
8
+ job.target,
9
+ 0,
10
+ time_budget_ms,
11
+ nonce_tag_bits,
12
+ nonce_tag_value
13
+ )
14
+ if (!res) return null
15
+ return {
16
+ job_id: job.job_id,
17
+ nonce: res.nonceHex || res.nonce || res.nonce_hex,
18
+ result: res.resultHex || res.result || res.result_hex
19
+ }
20
+ }
21
+ }
22
+ }
23
+
24
+ module.exports = { create_native_batch_backend }
25
+
@@ -0,0 +1,103 @@
1
+ const path = require('path')
2
+ const { fork } = require('child_process')
3
+
4
+ function create_node_worker_backend({
5
+ max_job_time_ms = 90000
6
+ } = {}) {
7
+ let worker = null
8
+ let worker_ready = false
9
+ const requests = new Map()
10
+ let cleanup_set = false
11
+
12
+ function ensure_worker() {
13
+ if (worker && worker_ready) return
14
+ const worker_path = path.join(__dirname, '..', 'node_worker_process.cjs')
15
+ worker = fork(worker_path, [], { stdio: ['pipe', 'pipe', 'pipe', 'ipc'] })
16
+ worker_ready = true
17
+
18
+ if (!cleanup_set) {
19
+ cleanup_set = true
20
+ const cleanup = () => {
21
+ if (worker) {
22
+ try { worker.kill() } catch (e) {}
23
+ worker = null
24
+ worker_ready = false
25
+ }
26
+ }
27
+ process.once('exit', cleanup)
28
+ process.once('SIGINT', cleanup)
29
+ process.once('SIGTERM', cleanup)
30
+ process.once('beforeExit', cleanup)
31
+ }
32
+
33
+ worker.on('message', (msg) => {
34
+ if (!msg || !msg.req_id) return
35
+ const pending = requests.get(msg.req_id)
36
+ if (!pending) return
37
+ requests.delete(msg.req_id)
38
+ if (msg.type === 'result') {
39
+ pending.resolve(msg.result)
40
+ return
41
+ }
42
+ const error = msg.error || 'pow_worker_error'
43
+ pending.reject(new Error(error))
44
+ })
45
+
46
+ worker.on('exit', (code) => {
47
+ worker_ready = false
48
+ for (const [, pending] of requests) {
49
+ pending.reject(new Error(`pow_worker_exit_${code}`))
50
+ }
51
+ requests.clear()
52
+ })
53
+ }
54
+
55
+ function call_worker(type, payload, timeout_ms) {
56
+ ensure_worker()
57
+ return new Promise((resolve, reject) => {
58
+ const req_id = `${Date.now()}-${Math.random()}`
59
+ let timer = null
60
+ if (timeout_ms && timeout_ms > 0) {
61
+ const backlog = requests.size
62
+ const per_req_slack = Math.min(timeout_ms, 5000)
63
+ const effective_timeout_ms = timeout_ms + (backlog * per_req_slack) + 500
64
+ timer = setTimeout(() => {
65
+ requests.delete(req_id)
66
+ reject(new Error('pow_worker_timeout'))
67
+ }, effective_timeout_ms)
68
+ }
69
+ requests.set(req_id, {
70
+ resolve: (result) => {
71
+ if (timer) clearTimeout(timer)
72
+ resolve(result)
73
+ },
74
+ reject: (error) => {
75
+ if (timer) clearTimeout(timer)
76
+ reject(error)
77
+ }
78
+ })
79
+ worker.send({ type, req_id, payload })
80
+ })
81
+ }
82
+
83
+ return {
84
+ async find_share({ job, hashes_per_second, time_budget_ms, nonce_tag_bits, nonce_tag_value }) {
85
+ const tms = parseInt(time_budget_ms, 10)
86
+ const timeout_ms = tms > 0 ? tms + 1500 : max_job_time_ms + 1500
87
+ return await call_worker('find_share', {
88
+ job,
89
+ start_nonce: Math.floor(Math.random() * 0xFFFFFFFF),
90
+ options: {
91
+ hashes_per_second: parseInt(hashes_per_second, 10),
92
+ time_budget_ms: tms,
93
+ max_job_time_ms,
94
+ nonceTagBits: nonce_tag_bits,
95
+ nonceTagValue: nonce_tag_value
96
+ }
97
+ }, timeout_ms)
98
+ }
99
+ }
100
+ }
101
+
102
+ module.exports = { create_node_worker_backend }
103
+
@@ -0,0 +1,160 @@
1
+ const { nonceTagFromMessageHash, extractPrevIdFromBlob } = require('..')
2
+
3
+ class ChallengeScheduler {
4
+ constructor() {
5
+ this.active = 0
6
+ }
7
+ acquire() {
8
+ this.active++
9
+ let released = false
10
+ return () => {
11
+ if (released) return
12
+ released = true
13
+ this.active = Math.max(0, this.active - 1)
14
+ }
15
+ }
16
+ active_count() {
17
+ return this.active
18
+ }
19
+ }
20
+
21
+ function create_pow_scheduler() {
22
+ return new ChallengeScheduler()
23
+ }
24
+
25
+ function create_rate_policy({
26
+ total_hashes_per_second_cap = 1500,
27
+ phase1_hashes_per_second_cap = 950,
28
+ phase2_hashes_per_second_cap = 250,
29
+ phase1_ms = 2 * 60 * 1000,
30
+ slice_ms_phase1 = 10000,
31
+ slice_ms_phase2 = 10000
32
+ } = {}) {
33
+ return ({ active_tasks, elapsed_ms }) => {
34
+ const in_phase1 = elapsed_ms < phase1_ms
35
+ const per_task_budget = Math.max(1, Math.floor(total_hashes_per_second_cap / Math.max(1, active_tasks)))
36
+ const hashes_per_second = Math.min(
37
+ per_task_budget,
38
+ in_phase1 ? phase1_hashes_per_second_cap : phase2_hashes_per_second_cap
39
+ )
40
+ const time_budget_ms = in_phase1 ? slice_ms_phase1 : slice_ms_phase2
41
+ return { hashes_per_second, time_budget_ms, in_phase1 }
42
+ }
43
+ }
44
+
45
+ function create_freshness_policy({ get_current_prev_ids }) {
46
+ return (job) => {
47
+ if (!job || !job.blob) return false
48
+ const prevId = extractPrevIdFromBlob(job.blob)
49
+ const { currentPrevId, previousPrevId } = get_current_prev_ids()
50
+ if (!prevId || !currentPrevId) return false
51
+ return prevId === currentPrevId || prevId === previousPrevId
52
+ }
53
+ }
54
+
55
+ async function message_challenge({
56
+ get_job,
57
+ backend,
58
+ message_hash,
59
+ required_shares = 1,
60
+ nonce_tag_bits = 4,
61
+ scheduler,
62
+ rate_policy,
63
+ freshness_policy,
64
+ log
65
+ }) {
66
+ if (!backend || typeof backend.find_share !== 'function') throw new Error('pow_backend_missing')
67
+ if (typeof get_job !== 'function') throw new Error('pow_get_job_missing')
68
+ if (!scheduler) scheduler = create_pow_scheduler()
69
+ if (!rate_policy) rate_policy = create_rate_policy()
70
+
71
+ const release = scheduler.acquire()
72
+ try {
73
+ const start = Date.now()
74
+ const nonce_tag_value = nonceTagFromMessageHash(message_hash, nonce_tag_bits)
75
+
76
+ const shares = []
77
+ const share_nonces = new Set()
78
+ const push_share = (share) => {
79
+ if (!share) return
80
+ if (typeof share.nonce !== 'string' || typeof share.result !== 'string') return
81
+ const nonce = share.nonce.toLowerCase()
82
+ if (share_nonces.has(nonce)) return
83
+ share_nonces.add(nonce)
84
+ shares.push({ ...share, job_id: String(share.job_id), nonce, result: share.result.toLowerCase() })
85
+ }
86
+
87
+ let did_boost = false
88
+ while (shares.length < required_shares) {
89
+ const job = await get_job()
90
+ if (!job) {
91
+ await new Promise(r => setTimeout(r, 250))
92
+ continue
93
+ }
94
+
95
+ if (freshness_policy && !freshness_policy(job)) {
96
+ if (log) log('pow_stale_local', { jobId: job.job_id })
97
+ await new Promise(r => setTimeout(r, 250))
98
+ continue
99
+ }
100
+
101
+ const elapsed_ms = Date.now() - start
102
+ const active_tasks = scheduler.active_count()
103
+ const { hashes_per_second, time_budget_ms, in_phase1 } = rate_policy({ active_tasks, elapsed_ms })
104
+
105
+ // Optional early boost pass.
106
+ if (in_phase1 && !did_boost && backend.boost_find_share) {
107
+ did_boost = true
108
+ try {
109
+ const share = await backend.boost_find_share({
110
+ job,
111
+ hashes_per_second,
112
+ time_budget_ms: Math.min(4000, time_budget_ms),
113
+ nonce_tag_bits,
114
+ nonce_tag_value,
115
+ log
116
+ })
117
+ if (share) push_share(share)
118
+ if (shares.length >= required_shares) return { job, shares }
119
+ } catch (e) {}
120
+ }
121
+
122
+ let share = null
123
+ try {
124
+ share = await backend.find_share({
125
+ job,
126
+ hashes_per_second,
127
+ time_budget_ms,
128
+ nonce_tag_bits,
129
+ nonce_tag_value,
130
+ log
131
+ })
132
+ } catch (e) {
133
+ if (e && e.message === 'pow_worker_timeout') {
134
+ if (log) log('pow_worker_timeout_retry', { jobId: job.job_id, sliceMs: time_budget_ms, hps: hashes_per_second })
135
+ await new Promise(r => setTimeout(r, in_phase1 ? 100 : 300))
136
+ continue
137
+ }
138
+ throw e
139
+ }
140
+
141
+ if (share) {
142
+ push_share(share)
143
+ if (shares.length >= required_shares) return { job, shares }
144
+ }
145
+
146
+ await new Promise(r => setTimeout(r, in_phase1 ? 50 : 250))
147
+ }
148
+ return null
149
+ } finally {
150
+ release()
151
+ }
152
+ }
153
+
154
+ module.exports = {
155
+ create_pow_scheduler,
156
+ create_rate_policy,
157
+ create_freshness_policy,
158
+ message_challenge
159
+ }
160
+
@@ -0,0 +1,6 @@
1
+ module.exports = {
2
+ ...require('./core'),
3
+ ...require('./backends/native_batch'),
4
+ ...require('./backends/js_hashfn')
5
+ }
6
+
@@ -0,0 +1,2 @@
1
+ module.exports = require('./backends/node_worker')
2
+
@@ -0,0 +1,92 @@
1
+ const { Crypto } = require('kryptokrona-utils')
2
+ const { findShare, nonceMatchesTag } = require('..')
3
+
4
+ const crypto = new Crypto()
5
+
6
+ function logPow(event, data) {
7
+ process.send && process.send({ type: 'log', event, data })
8
+ }
9
+
10
+ async function pow_find_share(job, start_nonce, options = {}) {
11
+ const hashes_per_second = parseInt(options.hashes_per_second, 10)
12
+ const time_budget_ms = parseInt(options.time_budget_ms, 10)
13
+ const max_job_time_ms = parseInt(options.max_job_time_ms, 10)
14
+ const nonceTagBits = parseInt(options.nonceTagBits || '0', 10)
15
+ const nonceTagValue = parseInt(options.nonceTagValue || '0', 10)
16
+ const tag_enabled = Number.isFinite(nonceTagBits) && nonceTagBits > 0
17
+
18
+ let nonce = start_nonce >>> 0
19
+ const start = Date.now()
20
+ while (true) {
21
+ const elapsed = Date.now() - start
22
+ const remaining_ms = time_budget_ms - elapsed
23
+ if (remaining_ms <= 0) return null
24
+ if (max_job_time_ms > 0 && elapsed >= max_job_time_ms) return null
25
+
26
+ const slice_ms = Math.min(1000, remaining_ms)
27
+ const share = await findShare({
28
+ job,
29
+ startNonce: nonce,
30
+ hashesPerSecond: hashes_per_second,
31
+ timeBudgetMs: slice_ms,
32
+ nonceTagBits,
33
+ nonceTagValue,
34
+ hashFn: (blobHex) => crypto.cn_turtle_lite_slow_hash_v2(blobHex),
35
+ log: (event, data) => logPow(event, data)
36
+ })
37
+
38
+ if (share) {
39
+ const nextNonce = (parseInt(share.nonce, 16) + 1) >>> 0
40
+ if (!tag_enabled || nonceMatchesTag(share.nonce, nonceTagValue, nonceTagBits)) {
41
+ return share
42
+ }
43
+ nonce = nextNonce
44
+ continue
45
+ }
46
+
47
+ const slice_attempts = Math.max(1, Math.floor(hashes_per_second * (slice_ms / 1000)))
48
+ nonce = (nonce + slice_attempts) >>> 0
49
+ }
50
+ }
51
+
52
+ async function pow_calculate_shares(job, required_shares = 1, options = {}) {
53
+ if (!job || !job.blob || !job.target) {
54
+ throw new Error('Invalid job data')
55
+ }
56
+ const shares = []
57
+ let nonce = Math.floor(Math.random() * 0xFFFFFFFF)
58
+ const start = Date.now()
59
+ const max_job_time_ms = parseInt(options.max_job_time_ms, 10)
60
+ for (let i = 0; i < required_shares; i++) {
61
+ if (max_job_time_ms > 0 && (Date.now() - start) >= max_job_time_ms) break
62
+ const share = await pow_find_share(job, nonce, options)
63
+ if (share) {
64
+ shares.push(share)
65
+ nonce = parseInt(share.nonce, 16) + 1
66
+ }
67
+ }
68
+ return { job, shares }
69
+ }
70
+
71
+ process.on('message', async (msg) => {
72
+ if (!msg || !msg.type || !msg.req_id) return
73
+ const { type, req_id, payload } = msg
74
+ try {
75
+ if (type === 'find_share') {
76
+ const { job, start_nonce, options } = payload || {}
77
+ const result = await pow_find_share(job, start_nonce, options || {})
78
+ process.send && process.send({ type: 'result', req_id, result })
79
+ return
80
+ }
81
+ if (type === 'calculate_shares') {
82
+ const { job, required_shares, options } = payload || {}
83
+ const result = await pow_calculate_shares(job, required_shares, options || {})
84
+ process.send && process.send({ type: 'result', req_id, result })
85
+ return
86
+ }
87
+ process.send && process.send({ type: 'error', req_id, error: 'unknown_request' })
88
+ } catch (e) {
89
+ process.send && process.send({ type: 'error', req_id, error: e && e.message })
90
+ }
91
+ })
92
+
package/index.js CHANGED
@@ -1,5 +1,40 @@
1
1
  const DEFAULT_NONCE_OFFSET = 39;
2
2
 
3
+ function hexToBytes(hex) {
4
+ if (typeof hex !== 'string') throw new Error('hex_to_bytes_invalid')
5
+ if (hex.length % 2 !== 0) throw new Error('hex_to_bytes_len')
6
+ const out = new Uint8Array(hex.length / 2)
7
+ for (let i = 0; i < out.length; i++) {
8
+ const byte = parseInt(hex.slice(i * 2, (i * 2) + 2), 16)
9
+ if (!Number.isFinite(byte)) throw new Error('hex_to_bytes_parse')
10
+ out[i] = byte
11
+ }
12
+ return out
13
+ }
14
+
15
+ function bytesToHex(bytes) {
16
+ let hex = ''
17
+ for (let i = 0; i < bytes.length; i++) {
18
+ hex += bytes[i].toString(16).padStart(2, '0')
19
+ }
20
+ return hex
21
+ }
22
+
23
+ function readUInt32LE(bytes, offset) {
24
+ return (
25
+ (bytes[offset] |
26
+ (bytes[offset + 1] << 8) |
27
+ (bytes[offset + 2] << 16) |
28
+ (bytes[offset + 3] << 24)) >>> 0
29
+ )
30
+ }
31
+
32
+ function readBigUInt64LE(bytes, offset) {
33
+ const lo = BigInt(readUInt32LE(bytes, offset))
34
+ const hi = BigInt(readUInt32LE(bytes, offset + 4))
35
+ return (hi << 32n) | lo
36
+ }
37
+
3
38
  function readVarint(buffer, offset) {
4
39
  let value = 0;
5
40
  let shift = 0;
@@ -37,21 +72,21 @@ function getNonceOffsetFromBuffer(blobBuffer) {
37
72
  }
38
73
 
39
74
  function getNonceOffset(blobHex) {
40
- const blobBuffer = Buffer.from(blobHex, 'hex');
75
+ const blobBuffer = hexToBytes(blobHex);
41
76
  return getNonceOffsetFromBuffer(blobBuffer);
42
77
  }
43
78
 
44
79
  function insertNonce(blobHex, nonceHex) {
45
- const blobBuffer = Buffer.from(blobHex, 'hex');
46
- const nonceBuffer = Buffer.from(nonceHex, 'hex');
80
+ const blobBuffer = hexToBytes(blobHex);
81
+ const nonceBuffer = hexToBytes(nonceHex);
47
82
  const offset = getNonceOffsetFromBuffer(blobBuffer);
48
- nonceBuffer.copy(blobBuffer, offset);
49
- return { blobHex: blobBuffer.toString('hex'), offset };
83
+ blobBuffer.set(nonceBuffer, offset);
84
+ return { blobHex: bytesToHex(blobBuffer), offset };
50
85
  }
51
86
 
52
87
  function extractPrevIdFromBlob(blobHex) {
53
88
  try {
54
- const blobBuffer = Buffer.from(blobHex, 'hex');
89
+ const blobBuffer = hexToBytes(blobHex);
55
90
  let offset = 0;
56
91
  const major = readVarint(blobBuffer, offset);
57
92
  if (!major) return null;
@@ -63,31 +98,35 @@ function extractPrevIdFromBlob(blobHex) {
63
98
  if (!timestamp) return null;
64
99
  offset += timestamp.bytes;
65
100
  if (offset + 32 > blobBuffer.length) return null;
66
- return blobBuffer.subarray(offset, offset + 32).toString('hex');
101
+ return bytesToHex(blobBuffer.subarray(offset, offset + 32));
67
102
  } catch (e) {
68
103
  return null;
69
104
  }
70
105
  }
71
106
 
72
107
  function nonceToHexLE(nonce) {
73
- const buf = Buffer.alloc(4);
74
- buf.writeUInt32LE(nonce >>> 0, 0);
75
- return buf.toString('hex');
108
+ const n = nonce >>> 0
109
+ const out = new Uint8Array(4)
110
+ out[0] = n & 0xff
111
+ out[1] = (n >>> 8) & 0xff
112
+ out[2] = (n >>> 16) & 0xff
113
+ out[3] = (n >>> 24) & 0xff
114
+ return bytesToHex(out)
76
115
  }
77
116
 
78
117
  function parseTarget(targetHex) {
79
118
  if (!targetHex) return null;
80
- const targetBuffer = Buffer.from(targetHex, 'hex');
81
- if (targetBuffer.length === 4) {
82
- const raw = targetBuffer.readUInt32LE(0);
119
+ const targetBytes = hexToBytes(targetHex)
120
+ if (targetBytes.length === 4) {
121
+ const raw = readUInt32LE(targetBytes, 0);
83
122
  if (raw === 0) return null;
84
123
  const numerator = 0xFFFFFFFFFFFFFFFFn;
85
124
  const denom = 0xFFFFFFFFn / BigInt(raw);
86
125
  if (denom === 0n) return null;
87
126
  return numerator / denom;
88
127
  }
89
- if (targetBuffer.length === 8) {
90
- return targetBuffer.readBigUInt64LE(0);
128
+ if (targetBytes.length === 8) {
129
+ return readBigUInt64LE(targetBytes, 0);
91
130
  }
92
131
  return null;
93
132
  }
@@ -95,9 +134,9 @@ function parseTarget(targetHex) {
95
134
  function meetsTarget(hashHex, targetHex) {
96
135
  const target = parseTarget(targetHex);
97
136
  if (target === null) return true;
98
- const hash = Buffer.from(hashHex, 'hex');
99
- if (hash.length < 32) return false;
100
- const hashTail = hash.readBigUInt64LE(24);
137
+ const hashBytes = hexToBytes(hashHex);
138
+ if (hashBytes.length < 32) return false;
139
+ const hashTail = readBigUInt64LE(hashBytes, 24);
101
140
  return hashTail <= target;
102
141
  }
103
142
 
@@ -114,10 +153,9 @@ function nonceTagFromMessageHash(messageHash, bits) {
114
153
  const b = typeof bits === 'number' ? bits : 0;
115
154
  if (b <= 0 || b > 16) return 0;
116
155
  const mask = (1 << b) - 1;
117
- const digest0 = require('crypto')
118
- .createHash('sha256')
119
- .update(String(messageHash))
120
- .digest()[0];
156
+ // Isomorphic sha256 (works in Node + React Native).
157
+ const { sha256 } = require('js-sha256')
158
+ const digest0 = sha256.array(String(messageHash))[0]
121
159
  return digest0 & mask;
122
160
  } catch (e) {
123
161
  return 0;
@@ -129,7 +167,9 @@ function nonceMatchesTag(nonceHex, tagValue, bits) {
129
167
  const b = typeof bits === 'number' ? bits : 0;
130
168
  if (b <= 0) return true;
131
169
  const mask = (1 << b) - 1;
132
- const nonce = parseInt(nonceHex, 16) >>> 0;
170
+ // Tag is computed over the actual 32-bit nonce value inserted into the blob (little-endian).
171
+ const nonceBytes = hexToBytes(nonceHex)
172
+ const nonce = readUInt32LE(nonceBytes, 0)
133
173
  return (nonce & mask) === (tagValue & mask);
134
174
  }
135
175
 
@@ -192,5 +232,8 @@ module.exports = {
192
232
  extractPrevIdFromBlob,
193
233
  nonceTagFromMessageHash,
194
234
  nonceMatchesTag,
195
- findShare
235
+ findShare,
236
+ // helpers for embedders/backends
237
+ hexToBytes,
238
+ bytesToHex
196
239
  };
package/package.json CHANGED
@@ -1,8 +1,22 @@
1
1
  {
2
2
  "name": "hugin-utils",
3
- "version": "0.1.4",
3
+ "version": "0.2.0",
4
4
  "description": "PoW utils for hugin node messages",
5
5
  "main": "index.js",
6
+ "scripts": {
7
+ "test": "node test/challenge.test.cjs"
8
+ },
9
+ "dependencies": {
10
+ "js-sha256": "^0.11.0"
11
+ },
12
+ "peerDependencies": {
13
+ "kryptokrona-utils": "^1.3.7"
14
+ },
15
+ "peerDependenciesMeta": {
16
+ "kryptokrona-utils": {
17
+ "optional": true
18
+ }
19
+ },
6
20
  "license": "MIT",
7
21
  "author": "n9lsjr"
8
22
  }
@@ -0,0 +1,67 @@
1
+ const assert = require('assert')
2
+
3
+ const {
4
+ getNonceOffset,
5
+ insertNonce,
6
+ meetsTarget,
7
+ nonceTagFromMessageHash,
8
+ nonceMatchesTag,
9
+ findShare
10
+ } = require('..')
11
+
12
+ async function test_insert_nonce_defaults() {
13
+ const blobHex = '00'.repeat(100)
14
+ const nonceHex = '01020304'
15
+ const { blobHex: outHex, offset } = insertNonce(blobHex, nonceHex)
16
+ assert.strictEqual(offset, getNonceOffset(blobHex))
17
+ assert.strictEqual(offset, 39)
18
+ const inserted = outHex.slice(offset * 2, (offset * 2) + 8)
19
+ assert.strictEqual(inserted, nonceHex)
20
+ }
21
+
22
+ function test_meets_target_64bit() {
23
+ const targetHex = '0000000000000000' // 0
24
+ const okHash = '00'.repeat(32)
25
+ const badHash = '00'.repeat(24) + '0100000000000000' // tail=1 (LE)
26
+ assert.strictEqual(meetsTarget(okHash, targetHex), true)
27
+ assert.strictEqual(meetsTarget(badHash, targetHex), false)
28
+ }
29
+
30
+ async function test_nonce_tag_roundtrip() {
31
+ const bits = 4
32
+ const messageHash = 'test-message-hash'
33
+ const tagValue = nonceTagFromMessageHash(messageHash, bits)
34
+
35
+ const job = {
36
+ job_id: '1',
37
+ blob: '00'.repeat(100),
38
+ target: 'ffffffff'
39
+ }
40
+
41
+ const share = await findShare({
42
+ job,
43
+ startNonce: 0,
44
+ nonceTagBits: bits,
45
+ nonceTagValue: tagValue,
46
+ hashesPerSecond: 10_000,
47
+ timeBudgetMs: 50,
48
+ hashFn: async () => '00'.repeat(32)
49
+ })
50
+
51
+ assert.ok(share, 'expected share')
52
+ assert.strictEqual(share.job_id, job.job_id)
53
+ assert.ok(nonceMatchesTag(share.nonce, tagValue, bits), 'share nonce must match nonce-tag')
54
+ }
55
+
56
+ async function run() {
57
+ await test_insert_nonce_defaults()
58
+ test_meets_target_64bit()
59
+ await test_nonce_tag_roundtrip()
60
+ console.log('ok')
61
+ }
62
+
63
+ run().catch((e) => {
64
+ console.error(e)
65
+ process.exit(1)
66
+ })
67
+