nappup 1.0.9 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.js CHANGED
@@ -1,10 +1,11 @@
1
1
  import NMMR from 'nmmr'
2
2
  import { appEncode } from '#helpers/nip19.js'
3
- import Base122Encoder from '#services/base122-encoder.js'
3
+ import Base93Encoder from '#services/base93-encoder.js'
4
4
  import nostrRelays from '#services/nostr-relays.js'
5
5
  import NostrSigner from '#services/nostr-signer.js'
6
- import { streamToChunks } from '#helpers/stream.js'
6
+ import { streamToChunks, streamToText } from '#helpers/stream.js'
7
7
  import { isNostrAppDTagSafe, deriveNostrAppDTag } from '#helpers/app.js'
8
+ import { extractHtmlMetadata, findFavicon, findIndexFile } from '#helpers/app-metadata.js'
8
9
 
9
10
  export default async function (...args) {
10
11
  try {
@@ -14,47 +15,86 @@ export default async function (...args) {
14
15
  }
15
16
  }
16
17
 
17
- export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, channel = 'main' } = {}) {
18
+ export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, channel = 'main', shouldReupload = false } = {}) {
18
19
  if (!nostrSigner && typeof window !== 'undefined') nostrSigner = window.nostr
19
20
  if (!nostrSigner) throw new Error('No Nostr signer found')
20
21
  if (typeof window !== 'undefined' && nostrSigner === window.nostr) {
21
22
  nostrSigner.getRelays = NostrSigner.prototype.getRelays
22
23
  }
24
+ const writeRelays = (await nostrSigner.getRelays()).write
25
+ log(`Found ${writeRelays.length} outbox relays for pubkey ${nostrSigner.getPublicKey()}:\n${writeRelays.join(', ')}`)
26
+ if (writeRelays.length === 0) throw new Error('No outbox relays found')
23
27
 
24
28
  if (typeof dTag === 'string') {
25
29
  if (!isNostrAppDTagSafe(dTag)) throw new Error('dTag should be [A-Za-z0-9] with length ranging from 1 to 19')
26
30
  } else {
27
31
  dTag = fileList[0].webkitRelativePath.split('/')[0].trim()
28
- if (!isNostrAppDTagSafe(dTag)) dTag = deriveNostrAppDTag(dTag || Math.random().toString(36))
32
+ if (!isNostrAppDTagSafe(dTag)) dTag = await deriveNostrAppDTag(dTag || Math.random().toString(36))
29
33
  }
30
34
  let nmmr
31
35
  const fileMetadata = []
32
36
 
37
+ const indexFile = findIndexFile(fileList)
38
+ let stallName, stallSummary
39
+ if (indexFile) {
40
+ try {
41
+ const htmlContent = await streamToText(indexFile.stream())
42
+ const { name, description } = extractHtmlMetadata(htmlContent)
43
+ stallName = name
44
+ stallSummary = description
45
+ } catch (err) {
46
+ log('Error extracting HTML metadata:', err)
47
+ }
48
+ }
49
+ const faviconFile = findFavicon(fileList)
50
+ let iconMetadata
51
+
33
52
  log(`Processing ${fileList.length} files`)
53
+ let pause = 1000
34
54
  for (const file of fileList) {
35
55
  nmmr = new NMMR()
36
56
  const stream = file.stream()
37
57
 
38
58
  let chunkLength = 0
39
- for await (const chunk of streamToChunks(stream, 54600)) {
59
+ for await (const chunk of streamToChunks(stream, 51000)) {
40
60
  chunkLength++
41
- nmmr.append(chunk)
61
+ await nmmr.append(chunk)
42
62
  }
43
63
  if (chunkLength) {
44
64
  // remove root dir
45
65
  const filename = file.webkitRelativePath.split('/').slice(1).join('/')
46
66
  log(`Uploading ${chunkLength} file parts of ${filename}`)
47
- await uploadBinaryDataChunks(nmmr, nostrSigner, { mimeType: file.type || 'application/octet-stream' })
67
+ ;({ pause } = (await uploadBinaryDataChunks({ nmmr, signer: nostrSigner, filename, chunkLength, log, pause, mimeType: file.type || 'application/octet-stream', shouldReupload })))
48
68
  fileMetadata.push({
49
69
  rootHash: nmmr.getRoot(),
50
70
  filename,
51
71
  mimeType: file.type || 'application/octet-stream'
52
72
  })
73
+
74
+ if (faviconFile && file === faviconFile) {
75
+ iconMetadata = {
76
+ rootHash: nmmr.getRoot(),
77
+ mimeType: file.type || 'application/octet-stream'
78
+ }
79
+ }
53
80
  }
54
81
  }
55
82
 
83
+ log(`Uploading stall event for #${dTag}`)
84
+ ;({ pause } = (await maybeUploadStall({
85
+ dTag,
86
+ channel,
87
+ name: stallName,
88
+ summary: stallSummary,
89
+ icon: iconMetadata,
90
+ signer: nostrSigner,
91
+ writeRelays,
92
+ log,
93
+ pause
94
+ })))
95
+
56
96
  log(`Uploading bundle #${dTag}`)
57
- const bundle = await uploadBundle(dTag, channel, fileMetadata, nostrSigner)
97
+ const bundle = await uploadBundle({ dTag, channel, fileMetadata, signer: nostrSigner, pause })
58
98
 
59
99
  const appEntity = appEncode({
60
100
  dTag: bundle.tags.find(v => v[0] === 'd')[1],
@@ -65,43 +105,105 @@ export async function toApp (fileList, nostrSigner, { log = () => {}, dTag, chan
65
105
  log(`Visit at https://44billion.net/${appEntity}`)
66
106
  }
67
107
 
68
- async function uploadBinaryDataChunks (nmmr, signer, { mimeType } = {}) {
108
+ async function uploadBinaryDataChunks ({ nmmr, signer, filename, chunkLength, log, pause = 0, mimeType, shouldReupload = false }) {
69
109
  const writeRelays = (await signer.getRelays()).write
110
+ let chunkIndex = 0
70
111
  for await (const chunk of nmmr.getChunks()) {
71
112
  const dTag = chunk.x
72
113
  const currentCtag = `${chunk.rootX}:${chunk.index}`
73
- const prevCTags = await getPreviousCtags(dTag, currentCtag, writeRelays, signer)
114
+ const { otherCtags, hasCurrentCtag } = await getPreviousCtags(dTag, currentCtag, writeRelays, signer)
115
+ if (!shouldReupload && hasCurrentCtag) {
116
+ log(`${filename}: Skipping chunk ${++chunkIndex} of ${chunkLength} (already uploaded)`)
117
+ continue
118
+ }
74
119
  const binaryDataChunk = {
75
120
  kind: 34600,
76
121
  tags: [
77
122
  ['d', dTag],
78
- ...prevCTags,
123
+ ...otherCtags,
79
124
  ['c', currentCtag, chunk.length, ...chunk.proof],
80
125
  ...(mimeType ? [['m', mimeType]] : [])
81
126
  ],
82
- // These chunks already have the expected size of 54600 bytes
83
- content: new Base122Encoder().update(chunk.contentBytes).getEncoded(),
127
+ // These chunks already have the expected size of 51000 bytes
128
+ content: new Base93Encoder().update(chunk.contentBytes).getEncoded(),
84
129
  created_at: Math.floor(Date.now() / 1000)
85
130
  }
86
131
 
87
132
  const event = await signer.signEvent(binaryDataChunk)
88
- await nostrRelays.sendEvent(event, writeRelays)
133
+ log(`${filename}: Uploading file part ${++chunkIndex} of ${chunkLength} to ${writeRelays.length} relays`)
134
+ ;({ pause } = (await throttledSendEvent(event, writeRelays, { pause, log, trailingPause: true })))
135
+ }
136
+ return { pause }
137
+ }
138
+
139
+ async function throttledSendEvent (event, relays, {
140
+ pause, log,
141
+ retries = 0, maxRetries = 10,
142
+ minSuccessfulRelays = 1,
143
+ leadingPause = false, trailingPause = false
144
+ }) {
145
+ if (pause && leadingPause) await new Promise(resolve => setTimeout(resolve, pause))
146
+ if (retries > 0) log(`Retrying upload to ${relays.length} relays: ${relays.join(', ')}`)
147
+
148
+ const { errors } = (await nostrRelays.sendEvent(event, relays, 15000))
149
+ if (errors.length === 0) {
150
+ if (pause && trailingPause) await new Promise(resolve => setTimeout(resolve, pause))
151
+ return { pause }
89
152
  }
153
+
154
+ const [rateLimitErrors, unretryableErrors] =
155
+ errors.reduce((r, v) => {
156
+ if ((v.reason?.message ?? '').startsWith('rate-limited:')) r[0].push(v)
157
+ else r[1].push(v)
158
+ return r
159
+ }, [[], []])
160
+ log(`${unretryableErrors.length} Unretryable errors\n: ${unretryableErrors.map(v => `${v.relay}: ${v.reason.message}`).join('; ')}`)
161
+ const unretryableErrorsLength = errors.length - rateLimitErrors.length
162
+ const maybeSuccessfulRelays = relays.length - unretryableErrorsLength
163
+ const hasReachedMaxRetries = retries > maxRetries
164
+ if (
165
+ hasReachedMaxRetries ||
166
+ maybeSuccessfulRelays < minSuccessfulRelays
167
+ ) throw new Error(errors.map(v => `\n${v.relay}: ${v.reason}`).join('\n'))
168
+
169
+ if (rateLimitErrors.length === 0) {
170
+ if (pause && trailingPause) await new Promise(resolve => setTimeout(resolve, pause))
171
+ return { pause }
172
+ }
173
+
174
+ const erroedRelays = rateLimitErrors.map(v => v.relay)
175
+ log(`Rate limited by ${erroedRelays.length} relays, pausing for ${pause + 2000} ms`)
176
+ await new Promise(resolve => setTimeout(resolve, (pause += 2000)))
177
+
178
+ minSuccessfulRelays = Math.max(0, minSuccessfulRelays - (relays.length - erroedRelays.length))
179
+ return await throttledSendEvent(event, erroedRelays, {
180
+ pause, log, retries: ++retries, maxRetries, minSuccessfulRelays, leadingPause: false, trailingPause
181
+ })
90
182
  }
91
183
 
92
184
  async function getPreviousCtags (dTagValue, currentCtagValue, writeRelays, signer) {
93
- const storedEvents = await nostrRelays.getEvents({
185
+ const storedEvents = (await nostrRelays.getEvents({
94
186
  kinds: [34600],
95
187
  authors: [await signer.getPublicKey()],
96
188
  '#d': [dTagValue],
97
189
  limit: 1
98
- }, writeRelays)
99
- if (storedEvents.length === 0) return []
190
+ }, writeRelays)).result
191
+
192
+ let hasCurrentCtag = false
193
+ const hasEvent = storedEvents.length > 0
194
+ if (!hasEvent) return { otherCtags: [], hasEvent, hasCurrentCtag }
100
195
 
101
196
  const cTagValues = { [currentCtagValue]: true }
102
197
  const prevTags = storedEvents.sort((a, b) => b.created_at - a.created_at)[0].tags
103
- if (!Array.isArray(prevTags)) return []
104
- return prevTags
198
+ if (!Array.isArray(prevTags)) return { otherCtags: [], hasEvent, hasCurrentCtag }
199
+
200
+ hasCurrentCtag = prevTags.some(tag =>
201
+ Array.isArray(tag) &&
202
+ tag[0] === 'c' &&
203
+ tag[1] === currentCtagValue
204
+ )
205
+
206
+ const otherCtags = prevTags
105
207
  .filter(v => {
106
208
  const isCTag =
107
209
  Array.isArray(v) &&
@@ -114,9 +216,11 @@ async function getPreviousCtags (dTagValue, currentCtagValue, writeRelays, signe
114
216
  cTagValues[v[1]] = true
115
217
  return isCTag && isntDuplicate
116
218
  })
219
+
220
+ return { otherCtags, hasEvent, hasCurrentCtag }
117
221
  }
118
222
 
119
- async function uploadBundle (dTag, channel, fileMetadata, signer) {
223
+ async function uploadBundle ({ dTag, channel, fileMetadata, signer, pause = 0 }) {
120
224
  const kind = {
121
225
  main: 37448, // stable
122
226
  next: 37449, // insider
@@ -132,6 +236,159 @@ async function uploadBundle (dTag, channel, fileMetadata, signer) {
132
236
  created_at: Math.floor(Date.now() / 1000)
133
237
  }
134
238
  const event = await signer.signEvent(appBundle)
135
- await nostrRelays.sendEvent(event, (await signer.getRelays()).write)
239
+ await throttledSendEvent(event, (await signer.getRelays()).write, { pause, trailingPause: true })
136
240
  return event
137
241
  }
242
+
243
+ async function maybeUploadStall ({
244
+ dTag,
245
+ channel,
246
+ name,
247
+ summary,
248
+ icon,
249
+ signer,
250
+ writeRelays,
251
+ log,
252
+ pause
253
+ }) {
254
+ const trimmedName = typeof name === 'string' ? name.trim() : ''
255
+ const trimmedSummary = typeof summary === 'string' ? summary.trim() : ''
256
+ const iconRootHash = icon?.rootHash
257
+ const iconMimeType = icon?.mimeType
258
+ const hasMetadata = Boolean(trimmedName) || Boolean(trimmedSummary) || Boolean(iconRootHash)
259
+
260
+ const previous = await getPreviousStall(dTag, writeRelays, signer, channel)
261
+ if (!previous && !hasMetadata) return { pause }
262
+
263
+ const publishStall = async (event) => {
264
+ const signedEvent = await signer.signEvent(event)
265
+ return await throttledSendEvent(signedEvent, writeRelays, { pause, log, trailingPause: true })
266
+ }
267
+
268
+ const createdAt = Math.floor(Date.now() / 1000)
269
+ const kind = {
270
+ main: 37348,
271
+ next: 37349,
272
+ draft: 37350
273
+ }[channel] ?? 37348
274
+
275
+ if (!previous) {
276
+ const tags = [
277
+ ['d', dTag],
278
+ ['c', '*']
279
+ ]
280
+
281
+ let hasIcon = false
282
+ let hasName = false
283
+ if (iconRootHash && iconMimeType) {
284
+ hasIcon = true
285
+ tags.push(['icon', iconRootHash, iconMimeType])
286
+ tags.push(['auto', 'icon'])
287
+ }
288
+
289
+ if (trimmedName) {
290
+ hasName = true
291
+ tags.push(['name', trimmedName])
292
+ tags.push(['auto', 'name'])
293
+ }
294
+
295
+ if (trimmedSummary) {
296
+ tags.push(['summary', trimmedSummary])
297
+ tags.push(['auto', 'summary'])
298
+ }
299
+
300
+ if (!hasIcon || !hasName) return { pause }
301
+
302
+ return await publishStall({
303
+ kind,
304
+ tags,
305
+ content: '',
306
+ created_at: createdAt
307
+ })
308
+ }
309
+
310
+ const tags = Array.isArray(previous.tags)
311
+ ? previous.tags.map(tag => (Array.isArray(tag) ? [...tag] : tag))
312
+ : []
313
+ let changed = false
314
+
315
+ const ensureTagValue = (key, updater) => {
316
+ const index = tags.findIndex(tag => Array.isArray(tag) && tag[0] === key)
317
+ if (index === -1) {
318
+ const next = updater(null)
319
+ if (!next) return
320
+ tags.push(next)
321
+ changed = true
322
+ return
323
+ }
324
+
325
+ const next = updater(tags[index])
326
+ if (!next) return
327
+ if (!tags[index] || tags[index].some((value, idx) => value !== next[idx])) {
328
+ tags[index] = next
329
+ changed = true
330
+ }
331
+ }
332
+
333
+ ensureTagValue('d', (existing) => {
334
+ if (existing && existing[1] === dTag) return existing
335
+ return ['d', dTag]
336
+ })
337
+
338
+ ensureTagValue('c', (existing) => {
339
+ if (!existing) return ['c', '*']
340
+ const currentValue = typeof existing[1] === 'string' ? existing[1].trim() : ''
341
+ if (currentValue === '') return ['c', '*']
342
+ return existing
343
+ })
344
+
345
+ const hasAuto = (field) => tags.some(tag => Array.isArray(tag) && tag[0] === 'auto' && tag[1] === field)
346
+
347
+ if (trimmedName && hasAuto('name')) {
348
+ ensureTagValue('name', (existing) => {
349
+ if (existing && existing[1] === trimmedName) return existing
350
+ return ['name', trimmedName]
351
+ })
352
+ }
353
+
354
+ if (trimmedSummary && hasAuto('summary')) {
355
+ ensureTagValue('summary', (existing) => {
356
+ if (existing && existing[1] === trimmedSummary) return existing
357
+ return ['summary', trimmedSummary]
358
+ })
359
+ }
360
+
361
+ if (iconRootHash && iconMimeType && hasAuto('icon')) {
362
+ ensureTagValue('icon', (existing) => {
363
+ if (existing && existing[1] === iconRootHash && existing[2] === iconMimeType) return existing
364
+ return ['icon', iconRootHash, iconMimeType]
365
+ })
366
+ }
367
+
368
+ if (!changed) return { pause }
369
+
370
+ return await publishStall({
371
+ kind,
372
+ tags,
373
+ content: typeof previous.content === 'string' ? previous.content : '',
374
+ created_at: createdAt
375
+ })
376
+ }
377
+
378
+ async function getPreviousStall (dTagValue, writeRelays, signer, channel) {
379
+ const kind = {
380
+ main: 37348,
381
+ next: 37349,
382
+ draft: 37350
383
+ }[channel] ?? 37348
384
+
385
+ const storedEvents = (await nostrRelays.getEvents({
386
+ kinds: [kind],
387
+ authors: [await signer.getPublicKey()],
388
+ '#d': [dTagValue],
389
+ limit: 1
390
+ }, writeRelays)).result
391
+
392
+ if (storedEvents.length === 0) return null
393
+ return storedEvents.sort((a, b) => b.created_at - a.created_at)[0]
394
+ }
@@ -0,0 +1,107 @@
1
+ // https://github.com/ticlo/arrow-code/blob/master/src/base93.ts
2
+ // https://github.com/ticlo/arrow-code/blob/master/LICENSE - Apache 2.0
3
+
4
+ // JSON-safe (space included; " and \ excluded)
5
+ const BASE93_ALPHABET =
6
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'()*+,-./:;<=>?@[]^_`{|}~ "
7
+
8
+ const DECODING_TABLE = (() => {
9
+ const out = new Int16Array(128)
10
+ out.fill(93) // sentinel = invalid
11
+ for (let i = 0; i < 93; i++) out[BASE93_ALPHABET.charCodeAt(i)] = i
12
+ return out
13
+ })()
14
+
15
+ /**
16
+ * Decode Base93 string to Uint8Array
17
+ * @param {string} str - The Base93 encoded string to decode
18
+ * @param {number} [offset=0] - The starting position in the string
19
+ * @param {number} [length=-1] - The number of characters to decode, or -1 for all remaining
20
+ * @returns {Uint8Array} The decoded bytes
21
+ */
22
+ export function decode (str, offset = 0, length = -1) {
23
+ let end = offset + length
24
+ if (length < 0 || end > str.length) end = str.length
25
+
26
+ // Over-allocate; we’ll trim at the end
27
+ const out = new Uint8Array(Math.ceil((end - offset) * 7 / 8))
28
+
29
+ let dbq = 0
30
+ let dn = 0
31
+ let dv = -1
32
+ let pos = 0
33
+
34
+ for (let i = offset; i < end; i++) {
35
+ const code = str.charCodeAt(i)
36
+ if (code > 126) continue // ignore non-ASCII
37
+ const v = DECODING_TABLE[code]
38
+ if (v === 93) continue // ignore invalids
39
+ if (dv === -1) {
40
+ dv = v
41
+ } else {
42
+ const t = dv + v * 93
43
+ dv = -1
44
+ dbq |= t << dn
45
+ dn += ((t & 0x1fff) > 456 ? 13 : 14)
46
+ while (dn > 7) {
47
+ out[pos++] = dbq & 0xff
48
+ dbq >>>= 8
49
+ dn -= 8
50
+ }
51
+ }
52
+ }
53
+
54
+ if (dv !== -1) {
55
+ out[pos++] = (dbq | (dv << dn)) & 0xff
56
+ }
57
+ return out.subarray(0, pos)
58
+ }
59
+
60
+ export default class Base93Decoder {
61
+ constructor (source, { mimeType = '', preferTextStreamDecoding = false } = {}) {
62
+ this.sourceIterator = source?.[Symbol.iterator]?.() || source?.[Symbol.asyncIterator]?.() || source()
63
+ this.asTextStream = preferTextStreamDecoding && mimeType.startsWith('text/')
64
+ if (this.asTextStream) this.textDecoder = new TextDecoder()
65
+ }
66
+
67
+ // decoder generator
68
+ * [Symbol.iterator] (base93String) {
69
+ if (this.asTextStream) {
70
+ while (base93String) {
71
+ // stream=true avoids cutting a multi-byte character
72
+ base93String = yield this.textDecoder.decode(decode(base93String), { stream: true })
73
+ }
74
+ } else {
75
+ while (base93String) {
76
+ base93String = yield decode(base93String)
77
+ }
78
+ }
79
+ }
80
+
81
+ // Gets the decoded data.
82
+ getDecoded () { return iteratorToStream(this, this.sourceIterator) }
83
+ }
84
+
85
+ function iteratorToStream (decoder, sourceIterator) {
86
+ return new ReadableStream({
87
+ decoderIterator: null,
88
+ async start (controller) {
89
+ const { value: chunk, done } = await sourceIterator.next()
90
+ if (done) return controller.close()
91
+
92
+ // Pass first chunk when instantiating the decoder generator
93
+ this.decoderIterator = decoder[Symbol.iterator](chunk)
94
+ const { value } = this.decoderIterator.next()
95
+ if (value) controller.enqueue(value)
96
+ },
97
+ async pull (controller) {
98
+ if (!this.decoderIterator) return
99
+
100
+ const { value: chunk, done: sourceDone } = await sourceIterator.next()
101
+ const { value, done } = this.decoderIterator.next(chunk)
102
+
103
+ if (value) controller.enqueue(value)
104
+ if (done || sourceDone) controller.close()
105
+ }
106
+ })
107
+ }
@@ -0,0 +1,96 @@
1
+ // https://github.com/ticlo/arrow-code/blob/master/src/base93.ts
2
+ // https://github.com/ticlo/arrow-code/blob/master/LICENSE - Apache 2.0
3
+
4
+ // JSON-safe (space included; " and \ excluded)
5
+ const BASE93_ALPHABET =
6
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!#$%&'()*+,-./:;<=>?@[]^_`{|}~ "
7
+ const ENCODING_TABLE = (() => {
8
+ const out = new Uint16Array(93)
9
+ for (let i = 0; i < 93; i++) out[i] = BASE93_ALPHABET.charCodeAt(i)
10
+ return out
11
+ })()
12
+
13
+ function codesToString (codes, len) {
14
+ const CHUNK = 16384 // 16k chars per slice
15
+ let s = ''
16
+ for (let i = 0; i < len; i += CHUNK) {
17
+ const end = i + CHUNK < len ? i + CHUNK : len
18
+ s += String.fromCharCode.apply(
19
+ null,
20
+ Array.prototype.slice.call(codes, i, end)
21
+ )
22
+ }
23
+ return s
24
+ }
25
+
26
+ export default class Base93Encoder {
27
+ constructor (prefix = '') {
28
+ // bit reservoir
29
+ this._ebq = 0 // queued bits
30
+ this._en = 0 // number of bits in ebq
31
+
32
+ // output parts
33
+ this._parts = []
34
+ this._finished = false
35
+
36
+ if (prefix) this._parts.push(prefix)
37
+ }
38
+
39
+ // Stream bytes; keeps reservoir across calls.
40
+ update (bytes) {
41
+ if (this._finished) throw new Error('Encoder already finalized.')
42
+ const src = bytes instanceof Uint8Array ? bytes : Uint8Array.from(bytes)
43
+
44
+ // Over-allocate for this update; we’ll trim to 'pos'
45
+ const outCodes = new Uint16Array(Math.ceil(src.length * 8 / 6.5) + 4)
46
+ let pos = 0
47
+
48
+ let ebq = this._ebq
49
+ let en = this._en
50
+ let ev = 0
51
+
52
+ for (let i = 0; i < src.length; i++) {
53
+ ebq |= (src[i] & 0xff) << en
54
+ en += 8
55
+ if (en > 13) {
56
+ ev = ebq & 0x1fff
57
+ if (ev > 456) {
58
+ ebq >>>= 13
59
+ en -= 13
60
+ } else {
61
+ ev = ebq & 0x3fff
62
+ ebq >>>= 14
63
+ en -= 14
64
+ }
65
+ outCodes[pos++] = ENCODING_TABLE[ev % 93]
66
+ outCodes[pos++] = ENCODING_TABLE[(ev / 93) | 0]
67
+ }
68
+ }
69
+
70
+ // persist reservoir
71
+ this._ebq = ebq
72
+ this._en = en
73
+
74
+ if (pos) this._parts.push(codesToString(outCodes, pos))
75
+ return this
76
+ }
77
+
78
+ // Finalize on first call: flush trailing partial block, join, lock.
79
+ getEncoded () {
80
+ if (!this._finished) {
81
+ if (this._en > 0) {
82
+ const outCodes = new Uint16Array(2)
83
+ let pos = 0
84
+ outCodes[pos++] = ENCODING_TABLE[this._ebq % 93]
85
+ if (this._en > 7 || this._ebq > 92) {
86
+ outCodes[pos++] = ENCODING_TABLE[(this._ebq / 93) | 0]
87
+ }
88
+ this._parts.push(codesToString(outCodes, pos))
89
+ }
90
+ this._finished = true
91
+ this._ebq = 0
92
+ this._en = 0
93
+ }
94
+ return this._parts.join('')
95
+ }
96
+ }