nappup 1.5.5 → 1.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -6,7 +6,7 @@
6
6
  "url": "git+https://github.com/44billion/nappup.git"
7
7
  },
8
8
  "license": "MIT",
9
- "version": "1.5.5",
9
+ "version": "1.5.7",
10
10
  "description": "Nostr App Uploader",
11
11
  "type": "module",
12
12
  "scripts": {
@@ -9,6 +9,20 @@ export function extractHtmlMetadata (htmlContent) {
9
9
  name = titleMatch[1].trim()
10
10
  }
11
11
 
12
+ if (!name) {
13
+ const ogTitleRegex = /<meta\s+[^>]*(?:property|name)\s*=\s*["']og:title["'][^>]*content\s*=\s*["']([^"']+)["'][^>]*>/i
14
+ const ogTitleMatch = htmlContent.match(ogTitleRegex)
15
+ if (ogTitleMatch && ogTitleMatch[1]) {
16
+ name = ogTitleMatch[1].trim()
17
+ } else {
18
+ const altOgTitleRegex = /<meta\s+[^>]*content\s*=\s*["']([^"']+)["'][^>]*(?:property|name)\s*=\s*["']og:title["'][^>]*>/i
19
+ const altOgTitleMatch = htmlContent.match(altOgTitleRegex)
20
+ if (altOgTitleMatch && altOgTitleMatch[1]) {
21
+ name = altOgTitleMatch[1].trim()
22
+ }
23
+ }
24
+ }
25
+
12
26
  const metaDescRegex = /<meta\s+[^>]*name\s*=\s*["']description["'][^>]*content\s*=\s*["']([^"']+)["'][^>]*>/i
13
27
  const metaDescMatch = htmlContent.match(metaDescRegex)
14
28
  if (metaDescMatch && metaDescMatch[1]) {
@@ -22,6 +36,20 @@ export function extractHtmlMetadata (htmlContent) {
22
36
  description = altMetaDescMatch[1].trim()
23
37
  }
24
38
  }
39
+
40
+ if (!description) {
41
+ const ogDescRegex = /<meta\s+[^>]*(?:property|name)\s*=\s*["']og:description["'][^>]*content\s*=\s*["']([^"']+)["'][^>]*>/i
42
+ const ogDescMatch = htmlContent.match(ogDescRegex)
43
+ if (ogDescMatch && ogDescMatch[1]) {
44
+ description = ogDescMatch[1].trim()
45
+ } else {
46
+ const altOgDescRegex = /<meta\s+[^>]*content\s*=\s*["']([^"']+)["'][^>]*(?:property|name)\s*=\s*["']og:description["'][^>]*>/i
47
+ const altOgDescMatch = htmlContent.match(altOgDescRegex)
48
+ if (altOgDescMatch && altOgDescMatch[1]) {
49
+ description = altOgDescMatch[1].trim()
50
+ }
51
+ }
52
+ }
25
53
  } catch (_) {
26
54
  // ignore
27
55
  }
package/src/index.js CHANGED
@@ -182,6 +182,12 @@ async function uploadBinaryDataChunks ({ nmmr, signer, filename, chunkLength, lo
182
182
  ;({ pause } = (await throttledSendEvent(foundEvent, missingRelays, { pause, log, trailingPause: true, minSuccessfulRelays: 0 })))
183
183
  continue
184
184
  }
185
+
186
+ const createdAt = Math.floor(Date.now() / 1000)
187
+ let effectiveCreatedAt = (foundEvent && foundEvent.created_at >= createdAt) ? foundEvent.created_at + 1 : createdAt
188
+ const maxCreatedAt = createdAt + 172800 // 2 days ahead
189
+ if (effectiveCreatedAt > maxCreatedAt) effectiveCreatedAt = maxCreatedAt
190
+
185
191
  const binaryDataChunk = {
186
192
  kind: 34600,
187
193
  tags: [
@@ -192,7 +198,7 @@ async function uploadBinaryDataChunks ({ nmmr, signer, filename, chunkLength, lo
192
198
  ],
193
199
  // These chunks already have the expected size of 51000 bytes
194
200
  content: new Base93Encoder().update(chunk.contentBytes).getEncoded(),
195
- created_at: Math.floor(Date.now() / 1000)
201
+ created_at: effectiveCreatedAt
196
202
  }
197
203
 
198
204
  const event = await signer.signEvent(binaryDataChunk)
@@ -218,23 +224,38 @@ async function throttledSendEvent (event, relays, {
218
224
  return { pause }
219
225
  }
220
226
 
221
- const [rateLimitErrors, unretryableErrors] =
227
+ const [rateLimitErrors, maybeUnretryableErrors, unretryableErrors] =
222
228
  errors.reduce((r, v) => {
223
- if ((v.reason?.message ?? '').startsWith('rate-limited:')) r[0].push(v)
224
- else r[1].push(v)
229
+ const message = v.reason?.message ?? ''
230
+ if (message.startsWith('rate-limited:')) r[0].push(v)
231
+ // https://github.com/nbd-wtf/nostr-tools/blob/28f7553187d201088c8a1009365db4ecbe03e568/abstract-relay.ts#L311
232
+ else if (message === 'publish timed out') r[1].push(v)
233
+ else r[2].push(v)
225
234
  return r
226
- }, [[], []])
235
+ }, [[], [], []])
236
+
237
+ // One-time special retry
238
+ if (maybeUnretryableErrors.length > 0) {
239
+ const timedOutRelays = maybeUnretryableErrors.map(v => v.relay)
240
+ log(`${maybeUnretryableErrors.length} timeout errors, retrying once after ${pause}ms:\n${maybeUnretryableErrors.map(v => `${v.relay}: ${v.reason.message}`).join('; ')}`)
241
+ if (pause) await new Promise(resolve => setTimeout(resolve, pause))
242
+ const { errors: timeoutRetryErrors } = await nostrRelays.sendEvent(event, timedOutRelays, 15000)
243
+ unretryableErrors.push(...timeoutRetryErrors)
244
+ }
245
+
227
246
  if (unretryableErrors.length > 0) {
228
247
  log(`${unretryableErrors.length} unretryable errors:\n${unretryableErrors.map(v => `${v.relay}: ${v.reason.message}`).join('; ')}`)
229
248
  console.log('Erroed event:', stringifyEvent(event))
230
249
  }
231
- const unretryableErrorsLength = errors.length - rateLimitErrors.length
232
- const maybeSuccessfulRelays = relays.length - unretryableErrorsLength
250
+ const maybeSuccessfulRelays = relays.length - unretryableErrors.length
233
251
  const hasReachedMaxRetries = retries > maxRetries
234
252
  if (
235
253
  hasReachedMaxRetries ||
236
254
  maybeSuccessfulRelays < minSuccessfulRelays
237
- ) throw new Error(errors.map(v => `\n${v.relay}: ${v.reason}`).join('\n'))
255
+ ) {
256
+ const finalErrors = [...rateLimitErrors, ...unretryableErrors]
257
+ throw new Error(finalErrors.map(v => `\n${v.relay}: ${v.reason}`).join('\n'))
258
+ }
238
259
 
239
260
  if (rateLimitErrors.length === 0) {
240
261
  if (pause && trailingPause) await new Promise(resolve => setTimeout(resolve, pause))
@@ -245,7 +266,8 @@ async function throttledSendEvent (event, relays, {
245
266
  log(`Rate limited by ${erroedRelays.length} relays, pausing for ${pause + 2000} ms`)
246
267
  await new Promise(resolve => setTimeout(resolve, (pause += 2000)))
247
268
 
248
- minSuccessfulRelays = Math.max(0, minSuccessfulRelays - (relays.length - erroedRelays.length))
269
+ // Subtracts the successful publishes from the original minSuccessfulRelays goal
270
+ minSuccessfulRelays = Math.max(0, minSuccessfulRelays - (relays.length - erroedRelays.length - unretryableErrors.length))
249
271
  return await throttledSendEvent(event, erroedRelays, {
250
272
  pause, log, retries: ++retries, maxRetries, minSuccessfulRelays, leadingPause: false, trailingPause
251
273
  })
@@ -313,59 +335,65 @@ async function uploadBundle ({ dTag, channel, fileMetadata, signer, pause = 0, s
313
335
 
314
336
  const writeRelays = [...new Set([...(await signer.getRelays()).write, ...nappRelays])]
315
337
 
316
- if (!shouldReupload) {
317
- const events = (await nostrRelays.getEvents({
318
- kinds: [kind],
319
- authors: [await signer.getPublicKey()],
320
- '#d': [dTag],
321
- limit: 1
322
- }, writeRelays)).result
323
-
324
- if (events.length > 0) {
325
- events.sort((a, b) => {
326
- if (b.created_at !== a.created_at) return b.created_at - a.created_at
327
- if (a.id < b.id) return -1
328
- if (a.id > b.id) return 1
329
- return 0
330
- })
338
+ let mostRecentEvent
339
+ const events = (await nostrRelays.getEvents({
340
+ kinds: [kind],
341
+ authors: [await signer.getPublicKey()],
342
+ '#d': [dTag],
343
+ limit: 1
344
+ }, writeRelays)).result
345
+
346
+ if (events.length > 0) {
347
+ events.sort((a, b) => {
348
+ if (b.created_at !== a.created_at) return b.created_at - a.created_at
349
+ if (a.id < b.id) return -1
350
+ if (a.id > b.id) return 1
351
+ return 0
352
+ })
353
+ mostRecentEvent = events[0]
354
+ }
331
355
 
332
- const mostRecentEvent = events[0]
333
- const recentFileTags = mostRecentEvent.tags
334
- .filter(t => t[0] === 'file' && t[2] !== '.well-known/napp.json')
335
- .sort((a, b) => (a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0))
356
+ if (!shouldReupload && mostRecentEvent) {
357
+ const recentFileTags = mostRecentEvent.tags
358
+ .filter(t => t[0] === 'file' && t[2] !== '.well-known/napp.json')
359
+ .sort((a, b) => (a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0))
336
360
 
337
- const currentFileTags = fileTags
338
- .filter(t => t[2] !== '.well-known/napp.json')
339
- .sort((a, b) => (a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0))
361
+ const currentFileTags = fileTags
362
+ .filter(t => t[2] !== '.well-known/napp.json')
363
+ .sort((a, b) => (a[1] < b[1] ? -1 : a[1] > b[1] ? 1 : 0))
340
364
 
341
- const isSame = currentFileTags.length === recentFileTags.length && currentFileTags.every((t, i) => {
342
- const rt = recentFileTags[i]
343
- return rt.length >= 4 && rt[1] === t[1] && rt[2] === t[2] && rt[3] === t[3]
344
- })
365
+ const isSame = currentFileTags.length === recentFileTags.length && currentFileTags.every((t, i) => {
366
+ const rt = recentFileTags[i]
367
+ return rt.length >= 4 && rt[1] === t[1] && rt[2] === t[2] && rt[3] === t[3]
368
+ })
345
369
 
346
- if (isSame) {
347
- log(`Bundle based on ${fileTags.length} files is up-to-date (id: ${mostRecentEvent.id} - created_at: ${new Date(mostRecentEvent.created_at * 1000).toISOString()})`)
370
+ if (isSame) {
371
+ log(`Bundle based on ${fileTags.length} files is up-to-date (id: ${mostRecentEvent.id} - created_at: ${new Date(mostRecentEvent.created_at * 1000).toISOString()})`)
348
372
 
349
- const matchingEvents = events.filter(e => e.id === mostRecentEvent.id)
350
- const coveredRelays = new Set(matchingEvents.map(e => e.meta?.relay).filter(Boolean))
351
- const missingRelays = writeRelays.filter(r => !coveredRelays.has(r))
373
+ const matchingEvents = events.filter(e => e.id === mostRecentEvent.id)
374
+ const coveredRelays = new Set(matchingEvents.map(e => e.meta?.relay).filter(Boolean))
375
+ const missingRelays = writeRelays.filter(r => !coveredRelays.has(r))
352
376
 
353
- if (missingRelays.length === 0) return mostRecentEvent
377
+ if (missingRelays.length === 0) return mostRecentEvent
354
378
 
355
- // nostrRelays.getEvents currently doesn't tell us which event came from which relay,
356
- // so we re-upload to all relays to ensure consistency
357
- log(`Re-uploading existing bundle event to ${missingRelays.length} missing relays (out of ${writeRelays.length})`)
358
- await throttledSendEvent(mostRecentEvent, missingRelays, { pause, trailingPause: true, log, minSuccessfulRelays: 0 })
359
- return mostRecentEvent
360
- }
379
+ // nostrRelays.getEvents currently doesn't tell us which event came from which relay,
380
+ // so we re-upload to all relays to ensure consistency
381
+ log(`Re-uploading existing bundle event to ${missingRelays.length} missing relays (out of ${writeRelays.length})`)
382
+ await throttledSendEvent(mostRecentEvent, missingRelays, { pause, trailingPause: true, log, minSuccessfulRelays: 0 })
383
+ return mostRecentEvent
361
384
  }
362
385
  }
363
386
 
387
+ const createdAt = Math.floor(Date.now() / 1000)
388
+ let effectiveCreatedAt = (mostRecentEvent && mostRecentEvent.created_at >= createdAt) ? mostRecentEvent.created_at + 1 : createdAt
389
+ const maxCreatedAt = createdAt + 172800 // 2 days ahead
390
+ if (effectiveCreatedAt > maxCreatedAt) effectiveCreatedAt = maxCreatedAt
391
+
364
392
  const appBundle = {
365
393
  kind,
366
394
  tags,
367
395
  content: '',
368
- created_at: Math.floor(Date.now() / 1000)
396
+ created_at: effectiveCreatedAt
369
397
  }
370
398
  const event = await signer.signEvent(appBundle)
371
399
  await throttledSendEvent(event, writeRelays, { pause, trailingPause: true, log })
@@ -404,7 +432,10 @@ async function maybeUploadStall ({
404
432
 
405
433
  const previousResult = await getPreviousStall(dTag, relays, signer, channel)
406
434
  const previous = previousResult?.previous
407
- if (!previous && !hasMetadata) return { pause }
435
+ if (!previous && !hasMetadata) {
436
+ if (shouldReupload) log('Skipping stall event upload: No previous event found and no metadata provided.')
437
+ return { pause }
438
+ }
408
439
 
409
440
  const publishStall = async (event) => {
410
441
  const signedEvent = await signer.signEvent(event)
@@ -479,7 +510,10 @@ async function maybeUploadStall ({
479
510
  if (isSummaryAuto) tags.push(['auto', 'summary'])
480
511
  }
481
512
 
482
- if (!hasIcon || !hasName) return { pause }
513
+ if (!hasIcon || !hasName) {
514
+ log(`Skipping stall event creation: Missing required metadata.${!hasName ? ' Name is missing.' : ''}${!hasIcon ? ' Icon is missing.' : ''}`)
515
+ return { pause }
516
+ }
483
517
 
484
518
  return await publishStall({
485
519
  kind,
@@ -646,11 +680,15 @@ async function maybeUploadStall ({
646
680
  return await throttledSendEvent(previous, missingRelays, { pause, log, trailingPause: true, minSuccessfulRelays: 0 })
647
681
  }
648
682
 
683
+ let effectiveCreatedAt = (previous && previous.created_at >= createdAt) ? previous.created_at + 1 : createdAt
684
+ const maxCreatedAt = createdAt + 172800 // 2 days ahead
685
+ if (effectiveCreatedAt > maxCreatedAt) effectiveCreatedAt = maxCreatedAt
686
+
649
687
  return await publishStall({
650
688
  kind,
651
689
  tags,
652
690
  content: typeof previous.content === 'string' ? previous.content : '',
653
- created_at: createdAt
691
+ created_at: effectiveCreatedAt
654
692
  })
655
693
  }
656
694