nappup 1.0.8 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,7 @@ export function parseArgs (args) {
10
10
  let sk = null
11
11
  let dTag = null
12
12
  let channel = null
13
+ let shouldReupload = false
13
14
 
14
15
  for (let i = 0; i < args.length; i++) {
15
16
  if (args[i] === '-s' && args[i + 1]) {
@@ -24,6 +25,8 @@ export function parseArgs (args) {
24
25
  channel = 'next'
25
26
  } else if (args[i] === '--draft' && channel === null) {
26
27
  channel = 'draft'
28
+ } else if (args[i] === '-r') {
29
+ shouldReupload = true
27
30
  } else if (!args[i].startsWith('-') && dir === null) {
28
31
  dir = args[i]
29
32
  }
@@ -33,7 +36,8 @@ export function parseArgs (args) {
33
36
  dir: path.resolve(dir ?? '.'),
34
37
  sk,
35
38
  dTag,
36
- channel: channel || 'main'
39
+ channel: channel || 'main',
40
+ shouldReupload
37
41
  }
38
42
  }
39
43
 
@@ -11,7 +11,7 @@ import toApp from '#index.js'
11
11
  const args = parseArgs(process.argv.slice(2))
12
12
  await confirmArgs(args)
13
13
 
14
- const { dir, sk, dTag, channel } = args
14
+ const { dir, sk, dTag, channel, shouldReupload } = args
15
15
  const fileList = await toFileList(getFiles(dir), dir)
16
16
 
17
- await toApp(fileList, await NostrSigner.create(sk), { log: console.log.bind(console), dTag, channel })
17
+ await toApp(fileList, await NostrSigner.create(sk), { log: console.log.bind(console), dTag, channel, shouldReupload })
package/package.json CHANGED
@@ -6,7 +6,7 @@
6
6
  "url": "git+https://github.com/44billion/nappup.git"
7
7
  },
8
8
  "license": "GPL-3.0-or-later",
9
- "version": "1.0.8",
9
+ "version": "1.0.11",
10
10
  "description": "Nostr App Uploader",
11
11
  "type": "module",
12
12
  "scripts": {
@@ -17,10 +17,12 @@
17
17
  "nappup": "bin/nappup/index.js"
18
18
  },
19
19
  "dependencies": {
20
+ "@noble/curves": "^2.0.0",
21
+ "@noble/hashes": "^2.0.0",
20
22
  "dotenv": "^17.2.0",
21
23
  "file-type": "^21.0.0",
22
24
  "mime-types": "^3.0.1",
23
- "nmmr": "^1.0.4",
25
+ "nmmr": "^1.0.9",
24
26
  "nostr-tools": "^2.15.0"
25
27
  },
26
28
  "devDependencies": {
@@ -1,4 +1,4 @@
1
- import { bytesToBase36 } from '#helpers/base36.js'
1
+ import { bytesToBase36, isBase36 } from '#helpers/base36.js'
2
2
 
3
3
  // 63 - (1<channel> + 5<b36loggeduserpkslug> 50<b36pk>)
4
4
  // <b36loggeduserpkslug> pk chars at positions [7][17][27][37][47]
@@ -6,10 +6,10 @@ import { bytesToBase36 } from '#helpers/base36.js'
6
6
  export const NOSTR_APP_D_TAG_MAX_LENGTH = 7
7
7
 
8
8
  export function isNostrAppDTagSafe (string) {
9
- return isSubdomainSafe(string) && string.length <= NOSTR_APP_D_TAG_MAX_LENGTH
9
+ return string.length > 0 && string.length <= NOSTR_APP_D_TAG_MAX_LENGTH && isBase36(string)
10
10
  }
11
11
 
12
- function isSubdomainSafe (string) {
12
+ export function isSubdomainSafe (string) {
13
13
  return /(?:^[a-z0-9]$)|(?:^(?!.*--)[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$)/.test(string)
14
14
  }
15
15
 
@@ -5,6 +5,12 @@ export const BASE36_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz'
5
5
  const BASE = BigInt(BASE36_ALPHABET.length)
6
6
  const LEADER = BASE36_ALPHABET[0]
7
7
  const CHAR_MAP = new Map([...BASE36_ALPHABET].map((char, index) => [char, BigInt(index)]))
8
+ const BASE36_REGEX = /^[0-9a-z]+$/
9
+
10
+ export function isBase36 (str) {
11
+ if (typeof str !== 'string') return false
12
+ return BASE36_REGEX.test(str)
13
+ }
8
14
 
9
15
  export function bytesToBase36 (bytes, padLength = 0) {
10
16
  return base16ToBase36(bytesToBase16(bytes), padLength)
@@ -1,7 +1,7 @@
1
- export const ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
2
- const BASE = BigInt(ALPHABET.length)
3
- const LEADER = ALPHABET[0]
4
- const CHAR_MAP = new Map([...ALPHABET].map((char, index) => [char, BigInt(index)]))
1
+ export const BASE62_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
2
+ const BASE = BigInt(BASE62_ALPHABET.length)
3
+ const LEADER = BASE62_ALPHABET[0]
4
+ const CHAR_MAP = new Map([...BASE62_ALPHABET].map((char, index) => [char, BigInt(index)]))
5
5
 
6
6
  export function bytesToBase62 (bytes, padLength = 0) {
7
7
  if (bytes.length === 0) return ''.padStart(padLength, LEADER)
@@ -16,7 +16,7 @@ export function bytesToBase62 (bytes, padLength = 0) {
16
16
 
17
17
  while (num > 0n) {
18
18
  const remainder = num % BASE
19
- result = ALPHABET[Number(remainder)] + result
19
+ result = BASE62_ALPHABET[Number(remainder)] + result
20
20
  num = num / BASE
21
21
  }
22
22
 
@@ -0,0 +1,32 @@
1
+ import { schnorr } from '@noble/curves/secp256k1.js'
2
+ import { sha256 } from '@noble/hashes/sha2.js'
3
+ import { bytesToBase16, base16ToBytes } from '#helpers/base16.js'
4
+ import { getPublicKey } from 'nostr-tools/pure'
5
+
6
+ function serializeEvent (event) {
7
+ return JSON.stringify([
8
+ 0,
9
+ event.pubkey,
10
+ event.created_at,
11
+ event.kind,
12
+ event.tags,
13
+ event.content
14
+ ])
15
+ }
16
+
17
+ function getEventHash (event) {
18
+ return sha256(new TextEncoder().encode(serializeEvent(event)))
19
+ }
20
+
21
+ function getSignature (eventHash, privkey) {
22
+ return bytesToBase16(schnorr.sign(eventHash, privkey))
23
+ }
24
+
25
+ export function finalizeEvent (event, privkey, withSig = true) {
26
+ event.pubkey ??= getPublicKey(privkey)
27
+ const eventHash = event.id ? base16ToBytes(event.id) : getEventHash(event)
28
+ event.id ??= bytesToBase16(eventHash)
29
+ if (withSig) event.sig ??= getSignature(eventHash, privkey)
30
+ else delete event.sig
31
+ return event
32
+ }
@@ -1,45 +1,56 @@
1
1
  import { bytesToBase16, base16ToBytes } from '#helpers/base16.js'
2
- import { bytesToBase62, base62ToBytes, ALPHABET as base62Alphabet } from '#helpers/base62.js'
2
+ import { bytesToBase62, base62ToBytes, BASE62_ALPHABET } from '#helpers/base62.js'
3
3
  import { isNostrAppDTagSafe } from '#helpers/app.js'
4
4
 
5
5
  const MAX_SIZE = 5000
6
- export const BASE62_ENTITY_REGEX = new RegExp(`^app-[${base62Alphabet}]{,${MAX_SIZE}}$`)
7
- export const kindByChannel = {
6
+ export const NAPP_ENTITY_REGEX = new RegExp(`^\\+{1,3}[${BASE62_ALPHABET}]{48,${MAX_SIZE}}$`)
7
+ const textEncoder = new TextEncoder()
8
+ const textDecoder = new TextDecoder()
9
+
10
+ const kindByChannel = {
8
11
  main: 37448,
9
12
  next: 37449,
10
13
  draft: 37450
11
14
  }
12
- const channelEnum = Object.keys(kindByChannel)
13
- const textEncoder = new TextEncoder()
14
- const textDecoder = new TextDecoder()
15
-
15
+ const channelByKind = Object.fromEntries(
16
+ Object.entries(kindByChannel).map(([k, v]) => [v, k])
17
+ )
18
+ const prefixByChannel = {
19
+ main: '+',
20
+ next: '++',
21
+ draft: '+++'
22
+ }
23
+ const channelByPrefix = Object.fromEntries(
24
+ Object.entries(prefixByChannel).map(([k, v]) => [v, k])
25
+ )
16
26
  export function appEncode (ref) {
17
27
  if (!isNostrAppDTagSafe(ref.dTag)) { throw new Error('Invalid deduplication tag') }
18
- const channelIndex = Object.entries(kindByChannel)
19
- .findIndex(([k, v]) => ref.channel ? k === ref.channel : v === ref.kind)
20
- if (channelIndex === -1) throw new Error('Wrong channel')
28
+ const channel = ref.channel ? (prefixByChannel[ref.channel] && ref.channel) : channelByKind[ref.kind]
29
+ if (!channel) throw new Error('Wrong channel')
21
30
  const tlv = toTlv([
22
31
  [textEncoder.encode(ref.dTag)], // type 0 (the array index)
23
32
  (ref.relays || []).map(url => textEncoder.encode(url)), // type 1
24
- [base16ToBytes(ref.pubkey)], // type 2
25
- [uintToBytes(channelIndex)] // type 3
33
+ [base16ToBytes(ref.pubkey)] // type 2
26
34
  ])
27
35
  const base62 = bytesToBase62(tlv)
28
- return `app-${base62}`
36
+ const prefix = prefixByChannel[channel]
37
+ return `${prefix}${base62}`
29
38
  }
30
39
 
31
40
  export function appDecode (entity) {
32
- const [, base62] = entity.split('-')
41
+ const prefix = entity.match(/^\+*/)[0]
42
+ const channel = channelByPrefix[prefix]
43
+ if (!channel) throw new Error('Invalid channel')
44
+ const base62 = entity.slice(prefix.length)
33
45
  const tlv = tlvToObj(base62ToBytes(base62))
34
46
  if (!tlv[0]?.[0]) throw new Error('Missing deduplication tag')
35
47
  if (!tlv[2]?.[0]) throw new Error('Missing author pubkey')
36
48
  if (tlv[2][0].length !== 32) throw new Error('Author pubkey should be 32 bytes')
37
- if (!tlv[3]?.[0]) throw new Error('Missing channel enum')
38
- if (tlv[3][0].length !== 1) throw new Error('Channel enum should be 1 byte')
49
+ const dTag = textDecoder.decode(tlv[0][0])
50
+ if (!isNostrAppDTagSafe(dTag)) { throw new Error('Invalid deduplication tag') }
39
51
 
40
- const channel = channelEnum[parseInt(tlv[3][0])]
41
52
  return {
42
- dTag: textDecoder.decode(tlv[0][0]),
53
+ dTag,
43
54
  pubkey: bytesToBase16(tlv[2][0]),
44
55
  kind: kindByChannel[channel],
45
56
  channel,
@@ -47,12 +58,6 @@ export function appDecode (entity) {
47
58
  }
48
59
  }
49
60
 
50
- // Return shortest uint8Array size (not fixed size)
51
- function uintToBytes (n, bytes = []) {
52
- do { bytes.unshift(n & 255) } while ((n >>= 8) > 0)
53
- return new Uint8Array(bytes)
54
- }
55
-
56
61
  function toTlv (tlvConfig) {
57
62
  const arrays = []
58
63
  tlvConfig
package/src/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import NMMR from 'nmmr'
2
2
  import { appEncode } from '#helpers/nip19.js'
3
- import Base122Encoder from '#services/base122-encoder.js'
3
+ import Base93Encoder from '#services/base93-encoder.js'
4
4
  import nostrRelays from '#services/nostr-relays.js'
5
5
  import NostrSigner from '#services/nostr-signer.js'
6
6
  import { streamToChunks } from '#helpers/stream.js'
@@ -14,37 +14,41 @@ export default async function (...args) {
14
14
  }
15
15
  }
16
16
 
17
- export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, channel = 'main' } = {}) {
17
+ export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, channel = 'main', shouldReupload = false } = {}) {
18
18
  if (!nostrSigner && typeof window !== 'undefined') nostrSigner = window.nostr
19
19
  if (!nostrSigner) throw new Error('No Nostr signer found')
20
20
  if (typeof window !== 'undefined' && nostrSigner === window.nostr) {
21
21
  nostrSigner.getRelays = NostrSigner.prototype.getRelays
22
22
  }
23
+ const writeRelays = (await nostrSigner.getRelays()).write
24
+ log(`Found ${writeRelays.length} outbox relays for pubkey ${nostrSigner.getPublicKey()}:\n${writeRelays.join(', ')}`)
25
+ if (writeRelays.length === 0) throw new Error('No outbox relays found')
23
26
 
24
27
  if (typeof dTag === 'string') {
25
28
  if (!isNostrAppDTagSafe(dTag)) throw new Error('dTag should be [A-Za-z0-9] with length ranging from 1 to 19')
26
29
  } else {
27
30
  dTag = fileList[0].webkitRelativePath.split('/')[0].trim()
28
- if (!isNostrAppDTagSafe(dTag)) dTag = deriveNostrAppDTag(dTag || Math.random().toString(36))
31
+ if (!isNostrAppDTagSafe(dTag)) dTag = await deriveNostrAppDTag(dTag || Math.random().toString(36))
29
32
  }
30
33
  let nmmr
31
34
  const fileMetadata = []
32
35
 
33
36
  log(`Processing ${fileList.length} files`)
37
+ let pause = 1000
34
38
  for (const file of fileList) {
35
39
  nmmr = new NMMR()
36
40
  const stream = file.stream()
37
41
 
38
42
  let chunkLength = 0
39
- for await (const chunk of streamToChunks(stream, 54600)) {
43
+ for await (const chunk of streamToChunks(stream, 51000)) {
40
44
  chunkLength++
41
- nmmr.append(chunk)
45
+ await nmmr.append(chunk)
42
46
  }
43
47
  if (chunkLength) {
44
48
  // remove root dir
45
49
  const filename = file.webkitRelativePath.split('/').slice(1).join('/')
46
50
  log(`Uploading ${chunkLength} file parts of ${filename}`)
47
- await uploadBinaryDataChunks(nmmr, nostrSigner, { mimeType: file.type || 'application/octet-stream' })
51
+ ;({ pause } = (await uploadBinaryDataChunks({ nmmr, signer: nostrSigner, filename, chunkLength, log, pause, mimeType: file.type || 'application/octet-stream', shouldReupload })))
48
52
  fileMetadata.push({
49
53
  rootHash: nmmr.getRoot(),
50
54
  filename,
@@ -54,7 +58,7 @@ export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, chan
54
58
  }
55
59
 
56
60
  log(`Uploading bundle #${dTag}`)
57
- const bundle = await uploadBundle(dTag, channel, fileMetadata, nostrSigner)
61
+ const bundle = await uploadBundle({ dTag, channel, fileMetadata, signer: nostrSigner, pause })
58
62
 
59
63
  const appEntity = appEncode({
60
64
  dTag: bundle.tags.find(v => v[0] === 'd')[1],
@@ -65,43 +69,105 @@ export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, chan
65
69
  log(`Visit at https://44billion.net/${appEntity}`)
66
70
  }
67
71
 
68
- async function uploadBinaryDataChunks (nmmr, signer, { mimeType } = {}) {
72
+ async function uploadBinaryDataChunks ({ nmmr, signer, filename, chunkLength, log, pause = 0, mimeType, shouldReupload = false }) {
69
73
  const writeRelays = (await signer.getRelays()).write
74
+ let chunkIndex = 0
70
75
  for await (const chunk of nmmr.getChunks()) {
71
76
  const dTag = chunk.x
72
77
  const currentCtag = `${chunk.rootX}:${chunk.index}`
73
- const prevCTags = await getPreviousCtags(dTag, currentCtag, writeRelays, signer)
78
+ const { otherCtags, hasCurrentCtag } = await getPreviousCtags(dTag, currentCtag, writeRelays, signer)
79
+ if (!shouldReupload && hasCurrentCtag) {
80
+ log(`${filename}: Skipping chunk ${++chunkIndex} of ${chunkLength} (already uploaded)`)
81
+ continue
82
+ }
74
83
  const binaryDataChunk = {
75
84
  kind: 34600,
76
85
  tags: [
77
86
  ['d', dTag],
78
- ...prevCTags,
87
+ ...otherCtags,
79
88
  ['c', currentCtag, chunk.length, ...chunk.proof],
80
89
  ...(mimeType ? [['m', mimeType]] : [])
81
90
  ],
82
- // These chunks already have the expected size of 54600 bytes
83
- content: new Base122Encoder().update(chunk.contentBytes).getEncoded(),
91
+ // These chunks already have the expected size of 51000 bytes
92
+ content: new Base93Encoder().update(chunk.contentBytes).getEncoded(),
84
93
  created_at: Math.floor(Date.now() / 1000)
85
94
  }
86
95
 
87
96
  const event = await signer.signEvent(binaryDataChunk)
88
- await nostrRelays.sendEvent(event, writeRelays)
97
+ log(`${filename}: Uploading file part ${++chunkIndex} of ${chunkLength} to ${writeRelays.length} relays`)
98
+ ;({ pause } = (await throttledSendEvent(event, writeRelays, { pause, log, trailingPause: true })))
89
99
  }
100
+ return { pause }
101
+ }
102
+
103
+ async function throttledSendEvent (event, relays, {
104
+ pause, log,
105
+ retries = 0, maxRetries = 10,
106
+ minSuccessfulRelays = 1,
107
+ leadingPause = false, trailingPause = false
108
+ }) {
109
+ if (pause && leadingPause) await new Promise(resolve => setTimeout(resolve, pause))
110
+ if (retries > 0) log(`Retrying upload to ${relays.length} relays: ${relays.join(', ')}`)
111
+
112
+ const { errors } = (await nostrRelays.sendEvent(event, relays, 15000))
113
+ if (errors.length === 0) {
114
+ if (pause && trailingPause) await new Promise(resolve => setTimeout(resolve, pause))
115
+ return { pause }
116
+ }
117
+
118
+ const [rateLimitErrors, unretryableErrors] =
119
+ errors.reduce((r, v) => {
120
+ if ((v.reason?.message ?? '').startsWith('rate-limited:')) r[0].push(v)
121
+ else r[1].push(v)
122
+ return r
123
+ }, [[], []])
124
+ log(`${unretryableErrors.length} Unretryable errors\n: ${unretryableErrors.map(v => `${v.relay}: ${v.reason.message}`).join('; ')}`)
125
+ const unretryableErrorsLength = errors.length - rateLimitErrors.length
126
+ const maybeSuccessfulRelays = relays.length - unretryableErrorsLength
127
+ const hasReachedMaxRetries = retries > maxRetries
128
+ if (
129
+ hasReachedMaxRetries ||
130
+ maybeSuccessfulRelays < minSuccessfulRelays
131
+ ) throw new Error(errors.map(v => `\n${v.relay}: ${v.reason}`).join('\n'))
132
+
133
+ if (rateLimitErrors.length === 0) {
134
+ if (pause && trailingPause) await new Promise(resolve => setTimeout(resolve, pause))
135
+ return { pause }
136
+ }
137
+
138
+ const erroedRelays = rateLimitErrors.map(v => v.relay)
139
+ log(`Rate limited by ${erroedRelays.length} relays, pausing for ${pause + 2000} ms`)
140
+ await new Promise(resolve => setTimeout(resolve, (pause += 2000)))
141
+
142
+ minSuccessfulRelays = Math.max(0, minSuccessfulRelays - (relays.length - erroedRelays.length))
143
+ return await throttledSendEvent(event, erroedRelays, {
144
+ pause, log, retries: ++retries, maxRetries, minSuccessfulRelays, leadingPause: false, trailingPause
145
+ })
90
146
  }
91
147
 
92
148
  async function getPreviousCtags (dTagValue, currentCtagValue, writeRelays, signer) {
93
- const storedEvents = await nostrRelays.getEvents({
149
+ const storedEvents = (await nostrRelays.getEvents({
94
150
  kinds: [34600],
95
151
  authors: [await signer.getPublicKey()],
96
152
  '#d': [dTagValue],
97
153
  limit: 1
98
- }, writeRelays)
99
- if (storedEvents.length === 0) return []
154
+ }, writeRelays)).result
155
+
156
+ let hasCurrentCtag = false
157
+ const hasEvent = storedEvents.length > 0
158
+ if (!hasEvent) return { otherCtags: [], hasEvent, hasCurrentCtag }
100
159
 
101
160
  const cTagValues = { [currentCtagValue]: true }
102
161
  const prevTags = storedEvents.sort((a, b) => b.created_at - a.created_at)[0].tags
103
- if (!Array.isArray(prevTags)) return []
104
- return prevTags
162
+ if (!Array.isArray(prevTags)) return { otherCtags: [], hasEvent, hasCurrentCtag }
163
+
164
+ hasCurrentCtag = prevTags.some(tag =>
165
+ Array.isArray(tag) &&
166
+ tag[0] === 'c' &&
167
+ tag[1] === currentCtagValue
168
+ )
169
+
170
+ const otherCtags = prevTags
105
171
  .filter(v => {
106
172
  const isCTag =
107
173
  Array.isArray(v) &&
@@ -114,9 +180,11 @@ async function getPreviousCtags (dTagValue, currentCtagValue, writeRelays, signe
114
180
  cTagValues[v[1]] = true
115
181
  return isCTag && isntDuplicate
116
182
  })
183
+
184
+ return { otherCtags, hasEvent, hasCurrentCtag }
117
185
  }
118
186
 
119
- async function uploadBundle (dTag, channel, fileMetadata, signer) {
187
+ async function uploadBundle ({ dTag, channel, fileMetadata, signer, pause = 0 }) {
120
188
  const kind = {
121
189
  main: 37448, // stable
122
190
  next: 37449, // insider
@@ -132,6 +200,6 @@ async function uploadBundle (dTag, channel, fileMetadata, signer) {
132
200
  created_at: Math.floor(Date.now() / 1000)
133
201
  }
134
202
  const event = await signer.signEvent(appBundle)
135
- await nostrRelays.sendEvent(event, (await signer.getRelays()).write)
203
+ await throttledSendEvent(event, (await signer.getRelays()).write, { pause, trailingPause: true })
136
204
  return event
137
205
  }
@@ -0,0 +1,107 @@
1
+ // https://github.com/ticlo/arrow-code/blob/master/src/base93.ts
2
+ // https://github.com/ticlo/arrow-code/blob/master/LICENSE - Apache 2.0
3
+
4
+ // JSON-safe (space included; " and \ excluded)
5
+ const BASE93_ALPHABET =
6
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'()*+,-./:;<=>?@[]^_`{|}~ "
7
+
8
+ const DECODING_TABLE = (() => {
9
+ const out = new Int16Array(128)
10
+ out.fill(93) // sentinel = invalid
11
+ for (let i = 0; i < 93; i++) out[BASE93_ALPHABET.charCodeAt(i)] = i
12
+ return out
13
+ })()
14
+
15
+ /**
16
+ * Decode Base93 string to Uint8Array
17
+ * @param {string} str - The Base93 encoded string to decode
18
+ * @param {number} [offset=0] - The starting position in the string
19
+ * @param {number} [length=-1] - The number of characters to decode, or -1 for all remaining
20
+ * @returns {Uint8Array} The decoded bytes
21
+ */
22
+ export function decode (str, offset = 0, length = -1) {
23
+ let end = offset + length
24
+ if (length < 0 || end > str.length) end = str.length
25
+
26
+ // Over-allocate; we’ll trim at the end
27
+ const out = new Uint8Array(Math.ceil((end - offset) * 7 / 8))
28
+
29
+ let dbq = 0
30
+ let dn = 0
31
+ let dv = -1
32
+ let pos = 0
33
+
34
+ for (let i = offset; i < end; i++) {
35
+ const code = str.charCodeAt(i)
36
+ if (code > 126) continue // ignore non-ASCII
37
+ const v = DECODING_TABLE[code]
38
+ if (v === 93) continue // ignore invalids
39
+ if (dv === -1) {
40
+ dv = v
41
+ } else {
42
+ const t = dv + v * 93
43
+ dv = -1
44
+ dbq |= t << dn
45
+ dn += ((t & 0x1fff) > 456 ? 13 : 14)
46
+ while (dn > 7) {
47
+ out[pos++] = dbq & 0xff
48
+ dbq >>>= 8
49
+ dn -= 8
50
+ }
51
+ }
52
+ }
53
+
54
+ if (dv !== -1) {
55
+ out[pos++] = (dbq | (dv << dn)) & 0xff
56
+ }
57
+ return out.subarray(0, pos)
58
+ }
59
+
60
+ export default class Base93Decoder {
61
+ constructor (source, { mimeType = '', preferTextStreamDecoding = false } = {}) {
62
+ this.sourceIterator = source?.[Symbol.iterator]?.() || source?.[Symbol.asyncIterator]?.() || source()
63
+ this.asTextStream = preferTextStreamDecoding && mimeType.startsWith('text/')
64
+ if (this.asTextStream) this.textDecoder = new TextDecoder()
65
+ }
66
+
67
+ // decoder generator
68
+ * [Symbol.iterator] (base93String) {
69
+ if (this.asTextStream) {
70
+ while (base93String) {
71
+ // stream=true avoids cutting a multi-byte character
72
+ base93String = yield this.textDecoder.decode(decode(base93String), { stream: true })
73
+ }
74
+ } else {
75
+ while (base93String) {
76
+ base93String = yield decode(base93String)
77
+ }
78
+ }
79
+ }
80
+
81
+ // Gets the decoded data.
82
+ getDecoded () { return iteratorToStream(this, this.sourceIterator) }
83
+ }
84
+
85
+ function iteratorToStream (decoder, sourceIterator) {
86
+ return new ReadableStream({
87
+ decoderIterator: null,
88
+ async start (controller) {
89
+ const { value: chunk, done } = await sourceIterator.next()
90
+ if (done) return controller.close()
91
+
92
+ // Pass first chunk when instantiating the decoder generator
93
+ this.decoderIterator = decoder[Symbol.iterator](chunk)
94
+ const { value } = this.decoderIterator.next()
95
+ if (value) controller.enqueue(value)
96
+ },
97
+ async pull (controller) {
98
+ if (!this.decoderIterator) return
99
+
100
+ const { value: chunk, done: sourceDone } = await sourceIterator.next()
101
+ const { value, done } = this.decoderIterator.next(chunk)
102
+
103
+ if (value) controller.enqueue(value)
104
+ if (done || sourceDone) controller.close()
105
+ }
106
+ })
107
+ }
@@ -0,0 +1,96 @@
1
+ // https://github.com/ticlo/arrow-code/blob/master/src/base93.ts
2
+ // https://github.com/ticlo/arrow-code/blob/master/LICENSE - Apache 2.0
3
+
4
+ // JSON-safe (space included; " and \ excluded)
5
+ const BASE93_ALPHABET =
6
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'()*+,-./:;<=>?@[]^_`{|}~ "
7
+ const ENCODING_TABLE = (() => {
8
+ const out = new Uint16Array(93)
9
+ for (let i = 0; i < 93; i++) out[i] = BASE93_ALPHABET.charCodeAt(i)
10
+ return out
11
+ })()
12
+
13
+ function codesToString (codes, len) {
14
+ const CHUNK = 16384 // 16k chars per slice
15
+ let s = ''
16
+ for (let i = 0; i < len; i += CHUNK) {
17
+ const end = i + CHUNK < len ? i + CHUNK : len
18
+ s += String.fromCharCode.apply(
19
+ null,
20
+ Array.prototype.slice.call(codes, i, end)
21
+ )
22
+ }
23
+ return s
24
+ }
25
+
26
+ export default class Base93Encoder {
27
+ constructor (prefix = '') {
28
+ // bit reservoir
29
+ this._ebq = 0 // queued bits
30
+ this._en = 0 // number of bits in ebq
31
+
32
+ // output parts
33
+ this._parts = []
34
+ this._finished = false
35
+
36
+ if (prefix) this._parts.push(prefix)
37
+ }
38
+
39
+ // Stream bytes; keeps reservoir across calls.
40
+ update (bytes) {
41
+ if (this._finished) throw new Error('Encoder already finalized.')
42
+ const src = bytes instanceof Uint8Array ? bytes : Uint8Array.from(bytes)
43
+
44
+ // Over-allocate for this update; we’ll trim to 'pos'
45
+ const outCodes = new Uint16Array(Math.ceil(src.length * 8 / 6.5) + 4)
46
+ let pos = 0
47
+
48
+ let ebq = this._ebq
49
+ let en = this._en
50
+ let ev = 0
51
+
52
+ for (let i = 0; i < src.length; i++) {
53
+ ebq |= (src[i] & 0xff) << en
54
+ en += 8
55
+ if (en > 13) {
56
+ ev = ebq & 0x1fff
57
+ if (ev > 456) {
58
+ ebq >>>= 13
59
+ en -= 13
60
+ } else {
61
+ ev = ebq & 0x3fff
62
+ ebq >>>= 14
63
+ en -= 14
64
+ }
65
+ outCodes[pos++] = ENCODING_TABLE[ev % 93]
66
+ outCodes[pos++] = ENCODING_TABLE[(ev / 93) | 0]
67
+ }
68
+ }
69
+
70
+ // persist reservoir
71
+ this._ebq = ebq
72
+ this._en = en
73
+
74
+ if (pos) this._parts.push(codesToString(outCodes, pos))
75
+ return this
76
+ }
77
+
78
+ // Finalize on first call: flush trailing partial block, join, lock.
79
+ getEncoded () {
80
+ if (!this._finished) {
81
+ if (this._en > 0) {
82
+ const outCodes = new Uint16Array(2)
83
+ let pos = 0
84
+ outCodes[pos++] = ENCODING_TABLE[this._ebq % 93]
85
+ if (this._en > 7 || this._ebq > 92) {
86
+ outCodes[pos++] = ENCODING_TABLE[(this._ebq / 93) | 0]
87
+ }
88
+ this._parts.push(codesToString(outCodes, pos))
89
+ }
90
+ this._finished = true
91
+ this._ebq = 0
92
+ this._en = 0
93
+ }
94
+ return this._parts.join('')
95
+ }
96
+ }
@@ -10,10 +10,10 @@ export const seedRelays = [
10
10
  'wss://indexer.coracle.social'
11
11
  ]
12
12
  export const freeRelays = [
13
- 'wss://relay.damus.io',
14
- 'wss://relay.nostr.band',
13
+ 'wss://relay.primal.net',
15
14
  'wss://nos.lol',
16
- 'wss://relay.primal.net'
15
+ 'wss://relay.damus.io',
16
+ 'wss://relay.nostr.band'
17
17
  ]
18
18
 
19
19
  // Interacts with Nostr relays.
@@ -27,7 +27,10 @@ export class NostrRelays {
27
27
  if (this.#relays.has(url)) {
28
28
  clearTimeout(this.#relayTimeouts.get(url))
29
29
  this.#relayTimeouts.set(url, maybeUnref(setTimeout(() => this.disconnect(url), this.#timeout)))
30
- return this.#relays.get(url)
30
+ const relay = this.#relays.get(url)
31
+ // reconnect if needed to avoid SendingOnClosedConnection errors
32
+ await relay.connect()
33
+ return relay
31
34
  }
32
35
 
33
36
  const relay = new Relay(url)
@@ -44,7 +47,7 @@ export class NostrRelays {
44
47
  async disconnect (url) {
45
48
  if (this.#relays.has(url)) {
46
49
  const relay = this.#relays.get(url)
47
- if (relay.ws.readyState < 2) await relay.close().catch(console.log)
50
+ if (relay.ws.readyState < 2) await relay.close()?.catch(console.log)
48
51
  this.#relays.delete(url)
49
52
  clearTimeout(this.#relayTimeouts.get(url))
50
53
  this.#relayTimeouts.delete(url)
@@ -61,55 +64,84 @@ export class NostrRelays {
61
64
  // Get events from a list of relays
62
65
  async getEvents (filter, relays, timeout = 5000) {
63
66
  const events = []
67
+ const resolveOrReject = (resolve, reject, err) => {
68
+ err ? reject(err) : resolve()
69
+ }
64
70
  const promises = relays.map(async (url) => {
71
+ let sub
72
+ let isClosed = false
73
+ const p = Promise.withResolvers()
74
+ const timer = maybeUnref(setTimeout(() => {
75
+ sub?.close()
76
+ isClosed = true
77
+ resolveOrReject(p.resolve, p.reject, new Error(`timeout: ${url}`))
78
+ }, timeout))
65
79
  try {
66
80
  const relay = await this.#getRelay(url)
67
- return new Promise((resolve) => {
68
- const sub = relay.subscribe([filter], {
69
- onevent: (event) => {
70
- events.push(event)
71
- },
72
- onclose: () => {
73
- clearTimeout(timer)
74
- resolve()
75
- },
76
- oneose: () => {
77
- clearTimeout(timer)
78
- resolve()
79
- }
80
- })
81
- const timer = maybeUnref(setTimeout(() => {
81
+ sub = relay.subscribe([filter], {
82
+ onevent: (event) => {
83
+ events.push(event)
84
+ },
85
+ onclose: err => {
86
+ clearTimeout(timer)
87
+ if (isClosed) return
88
+ resolveOrReject(p.resolve, p.reject, err /* may be empty (closed normally) */)
89
+ },
90
+ oneose: () => {
91
+ clearTimeout(timer)
92
+ isClosed = true
82
93
  sub.close()
83
- resolve()
84
- }, timeout))
94
+ p.resolve()
95
+ }
85
96
  })
86
- } catch (error) {
87
- console.error(`Failed to get events from ${url}`, error)
97
+
98
+ await p.promise
99
+ } catch (err) {
100
+ clearTimeout(timer)
101
+ p.reject(err)
88
102
  }
89
103
  })
90
104
 
91
105
  const results = await Promise.allSettled(promises)
92
- if (results.some(v => v.status === 'rejected')) throw new Error(results[0].reason)
93
- return events
106
+ const rejectedResults = results.filter(v => v.status === 'rejected')
107
+
108
+ return {
109
+ result: events,
110
+ errors: rejectedResults.map(v => ({ reason: v.reason, relay: relays[results.indexOf(v)] })),
111
+ success: events.length > 0 || results.length !== rejectedResults.length
112
+ }
94
113
  }
95
114
 
96
115
  // Send an event to a list of relays.
97
116
  async sendEvent (event, relays, timeout = 3000) {
98
117
  const promises = relays.map(async (url) => {
118
+ let timer
99
119
  try {
100
- const relay = await this.#getRelay(url)
101
- const timer = maybeUnref(setTimeout(() => {
102
- throw new Error(`Timeout sending event to ${url}`)
120
+ timer = maybeUnref(setTimeout(() => {
121
+ throw new Error(`timeout: ${url}`)
103
122
  }, timeout))
123
+ const relay = await this.#getRelay(url)
104
124
  await relay.publish(event)
125
+ } catch (err) {
126
+ if (err.message?.startsWith('duplicate:')) return
127
+ if (err.message?.startsWith('mute:')) {
128
+ console.info(`${url} - ${err.message}`)
129
+ return
130
+ }
131
+ throw err
132
+ } finally {
105
133
  clearTimeout(timer)
106
- } catch (error) {
107
- console.error(`Failed to send event to ${url}`, error)
108
134
  }
109
135
  })
110
136
 
111
137
  const results = await Promise.allSettled(promises)
112
- if (results.some(v => v.status === 'rejected')) throw new Error(results[0].reason)
138
+ const rejectedResults = results.filter(v => v.status === 'rejected')
139
+
140
+ return {
141
+ result: null,
142
+ errors: rejectedResults.map(v => ({ reason: v.reason, relay: relays[results.indexOf(v)] })),
143
+ success: results.length !== rejectedResults.length
144
+ }
113
145
  }
114
146
  }
115
147
  // Share same connection.
@@ -2,10 +2,11 @@ import fs from 'node:fs'
2
2
  import path from 'node:path'
3
3
  import { fileURLToPath } from 'node:url'
4
4
  import * as dotenv from 'dotenv'
5
- import { getPublicKey, finalizeEvent } from 'nostr-tools/pure'
5
+ import { getPublicKey } from 'nostr-tools/pure'
6
6
  import { getConversationKey, encrypt, decrypt } from 'nostr-tools/nip44'
7
7
  import nostrRelays, { seedRelays, freeRelays } from '#services/nostr-relays.js'
8
8
  import { bytesToBase16, base16ToBytes } from '#helpers/base16.js'
9
+ import { finalizeEvent } from '#helpers/nip01.js'
9
10
  const __dirname = fileURLToPath(new URL('.', import.meta.url))
10
11
 
11
12
  const dotenvPath = process.env.DOTENV_CONFIG_PATH ?? `${__dirname}/../../.env`
@@ -54,7 +55,7 @@ export default class NostrSigner {
54
55
  async getRelays () {
55
56
  if (this.relays) return this.relays
56
57
 
57
- const relayLists = await nostrRelays.getEvents({ authors: [await this.getPublicKey()], kinds: [10002], limit: 1 }, seedRelays)
58
+ const relayLists = (await nostrRelays.getEvents({ authors: [await this.getPublicKey()], kinds: [10002], limit: 1 }, seedRelays)).result
58
59
  const relayList = relayLists.sort((a, b) => b.created_at - a.created_at)[0]
59
60
  const rTags = (relayList?.tags ?? []).filter(v => v[0] === 'r' && /^wss?:\/\//.test(v[1]))
60
61
  if (rTags.length === 0) return (this.relays = await this.#initRelays())
package/lib/GEMINI.md DELETED
@@ -1,4 +0,0 @@
1
- # lib folder
2
-
3
- This is the place to add third-party libraries that don't have a
4
- published npm package.
package/lib/base122.js DELETED
@@ -1,171 +0,0 @@
1
- // https://github.com/kevinAlbs/Base122/commit/b62945c2733fa4da8792a1071e40a8b326e8dd1b
2
- // Provides functions for encoding/decoding data to and from base-122.
3
-
4
- const kString = 0
5
- const kUint8Array = 1
6
- const kDebug = false
7
- const kIllegals = [
8
- 0, // null
9
- 10, // newline
10
- 13, // carriage return
11
- 34, // double quote
12
- 38, // ampersand
13
- 92 // backslash
14
- ]
15
- const kShortened = 0b111 // Uses the illegal index to signify the last two-byte char encodes <= 7 bits.
16
-
17
- /**
18
- * Encodes raw data into base-122.
19
- * @param {Uint8Array|Buffer|Array|String} rawData - The data to be encoded. This can be an array
20
- * or Buffer with raw data bytes or a string of bytes (i.e. the type of argument to btoa())
21
- * @returns {Array} The base-122 encoded data as a regular array of UTF-8 character byte values.
22
- */
23
- function encode (rawData) {
24
- const dataType = typeof (rawData) === 'string' ? kString : kUint8Array
25
- let curIndex = 0
26
- let curBit = 0 // Points to current bit needed
27
- // const curMask = 0b10000000
28
- const outData = []
29
- let getByte = i => rawData[i]
30
-
31
- if (dataType === kString) {
32
- getByte = (i) => {
33
- const val = rawData.codePointAt(i)
34
- if (val > 255) {
35
- throw new Error('Unexpected code point at position: ' + i + '. Expected value [0,255]. Got: ' + val)
36
- }
37
- return val
38
- }
39
- }
40
-
41
- // Get seven bits of input data. Returns false if there is no input left.
42
- function get7 () {
43
- if (curIndex >= rawData.length) return false
44
- // Shift, mask, unshift to get first part.
45
- const firstByte = getByte(curIndex)
46
- let firstPart = ((0b11111110 >>> curBit) & firstByte) << curBit
47
- // Align it to a seven bit chunk.
48
- firstPart >>= 1
49
- // Check if we need to go to the next byte for more bits.
50
- curBit += 7
51
- if (curBit < 8) return firstPart // Do not need next byte.
52
- curBit -= 8
53
- curIndex++
54
- // Now we want bits [0..curBit] of the next byte if it exists.
55
- if (curIndex >= rawData.length) return firstPart
56
- const secondByte = getByte(curIndex)
57
- let secondPart = ((0xFF00 >>> curBit) & secondByte) & 0xFF
58
- // Align it.
59
- secondPart >>= 8 - curBit
60
- return firstPart | secondPart
61
- }
62
-
63
- while (true) {
64
- // Grab 7 bits.
65
- const bits = get7()
66
- if (bits === false) break
67
- debugLog('Seven input bits', print7Bits(bits), bits)
68
-
69
- const illegalIndex = kIllegals.indexOf(bits)
70
- if (illegalIndex !== -1) {
71
- // Since this will be a two-byte character, get the next chunk of seven bits.
72
- let nextBits = get7()
73
- debugLog('Handle illegal sequence', print7Bits(bits), print7Bits(nextBits))
74
-
75
- let b1 = 0b11000010; let b2 = 0b10000000
76
- if (nextBits === false) {
77
- debugLog('Last seven bits are an illegal sequence.')
78
- b1 |= (0b111 & kShortened) << 2
79
- nextBits = bits // Encode these bits after the shortened signifier.
80
- } else {
81
- b1 |= (0b111 & illegalIndex) << 2
82
- }
83
-
84
- // Push first bit onto first byte, remaining 6 onto second.
85
- const firstBit = (nextBits & 0b01000000) > 0 ? 1 : 0
86
- b1 |= firstBit
87
- b2 |= nextBits & 0b00111111
88
- outData.push(b1)
89
- outData.push(b2)
90
- } else {
91
- outData.push(bits)
92
- }
93
- }
94
- return outData
95
- }
96
-
97
- /**
98
- * Decodes base-122 encoded data back to the original data.
99
- * @param {Uint8Array|Buffer|String} rawData - The data to be decoded. This can be a Uint8Array
100
- * or Buffer with raw data bytes or a string of bytes (i.e. the type of argument to btoa())
101
- * @returns {Array} The data in a regular array representing byte values.
102
- */
103
- function decode (base122Data) {
104
- const strData = typeof (base122Data) === 'string' ? base122Data : utf8DataToString(base122Data)
105
- const decoded = []
106
- // const decodedIndex = 0
107
- let curByte = 0
108
- let bitOfByte = 0
109
-
110
- function push7 (byte) {
111
- byte <<= 1
112
- // Align this byte to offset for current byte.
113
- curByte |= (byte >>> bitOfByte)
114
- bitOfByte += 7
115
- if (bitOfByte >= 8) {
116
- decoded.push(curByte)
117
- bitOfByte -= 8
118
- // Now, take the remainder, left shift by what has been taken.
119
- curByte = (byte << (7 - bitOfByte)) & 255
120
- }
121
- }
122
-
123
- for (let i = 0; i < strData.length; i++) {
124
- const c = strData.charCodeAt(i)
125
- // Check if this is a two-byte character.
126
- if (c > 127) {
127
- // Note, the charCodeAt will give the codePoint, thus
128
- // 0b110xxxxx 0b10yyyyyy will give => xxxxxyyyyyy
129
- const illegalIndex = (c >>> 8) & 7 // 7 = 0b111.
130
- // We have to first check if this is a shortened two-byte character, i.e. if it only
131
- // encodes <= 7 bits.
132
- if (illegalIndex !== kShortened) push7(kIllegals[illegalIndex])
133
- // Always push the rest.
134
- push7(c & 127)
135
- } else {
136
- // One byte characters can be pushed directly.
137
- push7(c)
138
- }
139
- }
140
- return decoded
141
- }
142
-
143
- /**
144
- * Converts a sequence of UTF-8 bytes to a string.
145
- * @param {Uint8Array|Buffer} data - The UTF-8 data.
146
- * @returns {String} A string with each character representing a code point.
147
- */
148
- function utf8DataToString (data) {
149
- return Buffer.from(data).toString('utf-8')
150
- }
151
-
152
- // For debugging.
153
- function debugLog () {
154
- if (kDebug) console.log(...arguments)
155
- }
156
-
157
- // For debugging.
158
- function print7Bits (num) {
159
- return '0000000'.substring(num.toString(2).length) + num.toString(2)
160
- }
161
-
162
- // For debugging.
163
- // eslint-disable-next-line no-unused-vars
164
- function print8Bits (num) {
165
- return '00000000'.substring(num.toString(2).length) + num.toString(2)
166
- }
167
-
168
- export {
169
- encode,
170
- decode
171
- }
@@ -1,56 +0,0 @@
1
- import { decode } from '#lib/base122.js'
2
-
3
- // Decodes data from base122.
4
- export default class Base122Decoder {
5
- textEncoder = new TextEncoder()
6
-
7
- constructor (source, { mimeType = '' } = {}) {
8
- this.sourceIterator = source?.[Symbol.iterator]?.() || source?.[Symbol.asyncIterator]?.() || source()
9
- this.isText = mimeType.startsWith('text/')
10
- if (this.isText) this.textDecoder = new TextDecoder()
11
- }
12
-
13
- // decoder generator
14
- * [Symbol.iterator] (base122String) {
15
- let bytes
16
- if (this.isText) {
17
- while (base122String) {
18
- bytes = this.textEncoder.encode(base122String) // from string to UInt8Array
19
- // stream=true avoids cutting a multi-byte character
20
- base122String = yield this.textDecoder.decode(new Uint8Array(decode(bytes)), { stream: true })
21
- }
22
- } else {
23
- while (base122String) {
24
- bytes = this.textEncoder.encode(base122String)
25
- base122String = yield new Uint8Array(decode(bytes))
26
- }
27
- }
28
- }
29
-
30
- // Gets the decoded data.
31
- getDecoded () { return iteratorToStream(this, this.sourceIterator) }
32
- }
33
-
34
- function iteratorToStream (decoder, sourceIterator) {
35
- return new ReadableStream({
36
- decoderIterator: null,
37
- async start (controller) {
38
- const { value: chunk, done } = await sourceIterator.next()
39
- if (done) return controller.close()
40
-
41
- // Pass first chunk when instantiating the decoder generator
42
- this.decoderIterator = decoder[Symbol.iterator](chunk)
43
- const { value } = this.decoderIterator.next()
44
- if (value) controller.enqueue(value)
45
- },
46
- async pull (controller) {
47
- if (!this.decoderIterator) return
48
-
49
- const { value: chunk, done: sourceDone } = await sourceIterator.next()
50
- const { value, done } = this.decoderIterator.next(chunk)
51
-
52
- if (value) controller.enqueue(value)
53
- if (done || sourceDone) controller.close()
54
- }
55
- })
56
- }
@@ -1,19 +0,0 @@
1
- import { encode } from '#lib/base122.js'
2
-
3
- // Encodes data using base122.
4
- export default class Base122Encoder {
5
- textDecoder = new TextDecoder()
6
- // The encoded data.
7
- encoded = ''
8
-
9
- // Updates the encoded data with the given bytes.
10
- update (bytes) {
11
- this.encoded += this.textDecoder.decode(new Uint8Array(encode(bytes)))
12
- return this
13
- }
14
-
15
- // Gets the encoded data.
16
- getEncoded () {
17
- return this.encoded
18
- }
19
- }