cborg 4.4.1 → 4.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/CHANGELOG.md +9 -0
  2. package/README.md +21 -0
  3. package/bench/README.md +115 -0
  4. package/bench/bench-comparative.js +133 -0
  5. package/bench/bench.js +414 -101
  6. package/bench/fixtures.js +558 -0
  7. package/bench/index.html +405 -0
  8. package/cborg.js +2 -1
  9. package/interface.ts +8 -2
  10. package/lib/0uint.js +11 -11
  11. package/lib/1negint.js +4 -4
  12. package/lib/2bytes.js +5 -5
  13. package/lib/3string.js +1 -1
  14. package/lib/4array.js +4 -4
  15. package/lib/5map.js +4 -4
  16. package/lib/6tag.js +4 -4
  17. package/lib/7float.js +10 -10
  18. package/lib/bl.js +46 -0
  19. package/lib/encode.js +40 -15
  20. package/lib/is.js +12 -31
  21. package/lib/json/encode.js +10 -10
  22. package/package.json +1 -1
  23. package/test/test-0uint.js +12 -1
  24. package/test/test-1negint.js +12 -1
  25. package/test/test-2bytes.js +11 -1
  26. package/test/test-3string.js +11 -1
  27. package/test/test-4array.js +11 -1
  28. package/test/test-5map.js +11 -3
  29. package/test/test-6tag.js +19 -1
  30. package/test/test-7float.js +11 -1
  31. package/test/test-cbor-vectors.js +13 -2
  32. package/test/test-encodeInto.js +246 -0
  33. package/types/cborg.d.ts +2 -1
  34. package/types/cborg.d.ts.map +1 -1
  35. package/types/interface.d.ts +7 -2
  36. package/types/interface.d.ts.map +1 -1
  37. package/types/lib/0uint.d.ts +6 -6
  38. package/types/lib/0uint.d.ts.map +1 -1
  39. package/types/lib/1negint.d.ts +4 -4
  40. package/types/lib/1negint.d.ts.map +1 -1
  41. package/types/lib/2bytes.d.ts +3 -3
  42. package/types/lib/2bytes.d.ts.map +1 -1
  43. package/types/lib/3string.d.ts +1 -1
  44. package/types/lib/3string.d.ts.map +1 -1
  45. package/types/lib/4array.d.ts +3 -3
  46. package/types/lib/4array.d.ts.map +1 -1
  47. package/types/lib/5map.d.ts +3 -3
  48. package/types/lib/5map.d.ts.map +1 -1
  49. package/types/lib/6tag.d.ts +4 -4
  50. package/types/lib/6tag.d.ts.map +1 -1
  51. package/types/lib/7float.d.ts +3 -3
  52. package/types/lib/7float.d.ts.map +1 -1
  53. package/types/lib/bl.d.ts +25 -0
  54. package/types/lib/bl.d.ts.map +1 -1
  55. package/types/lib/encode.d.ts +12 -1
  56. package/types/lib/encode.d.ts.map +1 -1
  57. package/types/lib/is.d.ts.map +1 -1
  58. package/types/lib/json/encode.d.ts +1 -1
  59. package/types/lib/json/encode.d.ts.map +1 -1
package/bench/bench.js CHANGED
@@ -1,117 +1,430 @@
1
- // can be run in a browser with `polendina --runner=bare-sync --timeout 6000 --cleanup bench.js`
2
- // with additional dependencies for cborg installed here
3
-
4
- import assert from 'assert'
5
- import { garbage } from 'ipld-garbage'
6
- import { decode, encode } from '../cborg.js'
7
- import borc from 'borc'
8
-
9
- let writebuf = ''
10
- const write = process.stdout
11
- ? process.stdout.write.bind(process.stdout)
12
- : (str) => {
13
- writebuf += str
14
- if (str.endsWith('\n')) {
15
- console.log(writebuf.replace(/\n$/, ''))
16
- writebuf = ''
17
- }
18
- }
1
+ /**
2
+ * cborg benchmark runner
3
+ *
4
+ * Measures encode/decode performance with realistic IPLD/CBOR workloads.
5
+ * Works in both Node.js and browser environments.
6
+ *
7
+ * Usage:
8
+ * node bench/bench-new.js # run all benchmarks (dag-cbor mode)
9
+ * node bench/bench-new.js --mode=raw # run with raw cborg (no tags)
10
+ * node bench/bench-new.js --suite=bsky # run only bluesky suite
11
+ * node bench/bench-new.js --json # output JSON for comparison
12
+ * node bench/bench-new.js --compare=baseline.json # compare to baseline
13
+ * node bench/bench-new.js --encode-into # use encodeInto instead of encode
14
+ */
19
15
 
20
- function runWith (description, count, targetTime, size, options) {
21
- let borcDecoder = null
22
- const borcDecode = (bytes) => {
23
- if (!borcDecoder) {
24
- // account for initial allocation & setup time in benchmark
25
- borcDecoder = new borc.Decoder({ size: 10 * 1024 * 1024 })
26
- }
27
- return borcDecoder.decodeAll(bytes)[0]
16
+ import { encode, decode, encodeInto, Token, Type } from '../cborg.js'
17
+ import { generateFixtures, BenchCID } from './fixtures.js'
18
+
19
+ // =============================================================================
20
+ // CID Tag Encoder/Decoder (matches @ipld/dag-cbor implementation)
21
+ // =============================================================================
22
+
23
+ const CID_CBOR_TAG = 42
24
+
25
+ /**
26
+ * CID encoder for CBOR tag 42.
27
+ * Matches the detection logic from @ipld/dag-cbor.
28
+ *
29
+ * @param {any} obj
30
+ * @returns {Token[]|null}
31
+ */
32
+ function cidEncoder (obj) {
33
+ // Fast-path rejection for non-CID objects (matches dag-cbor)
34
+ if (obj.asCID !== obj && obj['/'] !== obj.bytes) {
35
+ return null
36
+ }
37
+ // At this point we have something CID-like
38
+ if (!(obj instanceof BenchCID)) {
39
+ return null
40
+ }
41
+ // Encode with 0x00 prefix (historical reasons)
42
+ const bytes = new Uint8Array(obj.bytes.byteLength + 1)
43
+ bytes.set(obj.bytes, 1)
44
+ return [
45
+ new Token(Type.tag, CID_CBOR_TAG),
46
+ new Token(Type.bytes, bytes)
47
+ ]
48
+ }
49
+
50
+ /**
51
+ * CID decoder for CBOR tag 42.
52
+ *
53
+ * @param {Uint8Array} bytes
54
+ * @returns {BenchCID}
55
+ */
56
+ function cidDecoder (bytes) {
57
+ if (bytes[0] !== 0) {
58
+ throw new Error('Invalid CID for CBOR tag 42; expected leading 0x00')
28
59
  }
60
+ return new BenchCID(bytes.subarray(1))
61
+ }
62
+
63
+ /**
64
+ * Number encoder that rejects NaN and Infinity (matches dag-cbor IPLD constraints)
65
+ *
66
+ * @param {number} num
67
+ * @returns {null}
68
+ */
69
+ function numberEncoder (num) {
70
+ if (Number.isNaN(num)) {
71
+ throw new Error('`NaN` is not supported by the IPLD Data Model')
72
+ }
73
+ if (num === Infinity || num === -Infinity) {
74
+ throw new Error('`Infinity` is not supported by the IPLD Data Model')
75
+ }
76
+ return null
77
+ }
29
78
 
30
- const fixtures = []
79
+ /**
80
+ * Undefined encoder that throws (matches strict dag-cbor IPLD constraints)
81
+ *
82
+ * @returns {null}
83
+ */
84
+ function undefinedEncoder () {
85
+ throw new Error('`undefined` is not supported by the IPLD Data Model')
86
+ }
31
87
 
32
- console.log(`${description} @ ${count.toLocaleString()}`)
33
- for (let i = 0; i < count; i++) {
34
- const obj = garbage(size, options)
35
- const cbyts = encode(obj)
36
- /*
37
- const bbyts = borc.encode(obj)
38
- if (Buffer.compare(Buffer.from(cbyts), bbyts) !== 0) {
39
- console.log(`mismatch for obj: ${JSON.stringify(obj)}`)
40
- console.log('\t', Buffer.from(cbyts).toString('hex'))
41
- console.log('\t', Buffer.from(bbyts).toString('hex'))
88
+ // Strict dag-cbor encode options (Filecoin, micro-benchmarks)
89
+ // Throws on undefined values
90
+ const dagCborEncodeOptions = {
91
+ float64: true,
92
+ typeEncoders: {
93
+ Object: cidEncoder,
94
+ number: numberEncoder,
95
+ undefined: undefinedEncoder
96
+ }
97
+ }
98
+
99
+ // Bluesky encode options - uses ignoreUndefinedProperties instead of throwing
100
+ // This matches Bluesky's actual usage pattern
101
+ const bskyEncodeOptions = {
102
+ float64: true,
103
+ ignoreUndefinedProperties: true,
104
+ typeEncoders: {
105
+ Object: cidEncoder,
106
+ number: numberEncoder
107
+ }
108
+ }
109
+
110
+ // dag-cbor-like decode options
111
+ const dagCborDecodeOptions = {
112
+ allowIndefinite: false,
113
+ coerceUndefinedToNull: true,
114
+ allowNaN: false,
115
+ allowInfinity: false,
116
+ allowBigInt: true,
117
+ strict: true,
118
+ useMaps: false,
119
+ rejectDuplicateMapKeys: true,
120
+ tags: []
121
+ }
122
+ dagCborDecodeOptions.tags[CID_CBOR_TAG] = cidDecoder
123
+
124
+ // Configuration
125
+ const WARMUP_ITERATIONS = 50
126
+ const DEFAULT_DURATION_MS = 1000
127
+ const FIXTURE_SEED = 12345
128
+
129
+ // Parse CLI args (Node.js only, ignored in browser)
130
+ const args = typeof process !== 'undefined' ? process.argv.slice(2) : []
131
+ const opts = {
132
+ json: args.includes('--json'),
133
+ suite: args.find(a => a.startsWith('--suite='))?.split('=')[1] || null,
134
+ compare: args.find(a => a.startsWith('--compare='))?.split('=')[1] || null,
135
+ duration: parseInt(args.find(a => a.startsWith('--duration='))?.split('=')[1] || DEFAULT_DURATION_MS),
136
+ encodeInto: args.includes('--encode-into'),
137
+ // Mode: 'dag-cbor' (default) uses tag 42 + strict options, 'raw' uses plain cborg
138
+ mode: args.find(a => a.startsWith('--mode='))?.split('=')[1] || 'dag-cbor'
139
+ }
140
+
141
+ /**
142
+ * Raw mode CID encoder - just encodes as bytes without tag.
143
+ * Needed because BenchCID has self-reference that triggers circular ref detection.
144
+ */
145
+ function rawCidEncoder (obj) {
146
+ if (obj.asCID !== obj && obj['/'] !== obj.bytes) {
147
+ return null
148
+ }
149
+ if (!(obj instanceof BenchCID)) {
150
+ return null
151
+ }
152
+ return [new Token(Type.bytes, obj.bytes)]
153
+ }
154
+
155
+ // Raw mode encode options - minimal, just handles BenchCID
156
+ const rawEncodeOptions = {
157
+ typeEncoders: {
158
+ Object: rawCidEncoder
159
+ }
160
+ }
161
+
162
+ // Raw mode with ignoreUndefinedProperties for Bluesky data
163
+ const rawBskyEncodeOptions = {
164
+ ignoreUndefinedProperties: true,
165
+ typeEncoders: {
166
+ Object: rawCidEncoder
167
+ }
168
+ }
169
+
170
+ /**
171
+ * Get encode/decode options based on mode and suite type
172
+ * @param {'bsky'|'default'} suiteType
173
+ */
174
+ function getOptions (suiteType = 'default') {
175
+ if (opts.mode === 'raw') {
176
+ return {
177
+ encode: suiteType === 'bsky' ? rawBskyEncodeOptions : rawEncodeOptions,
178
+ decode: {}
42
179
  }
43
- */
44
- if (cbyts.length <= size * 2) {
45
- fixtures.push([obj, cbyts])
180
+ }
181
+ return {
182
+ encode: suiteType === 'bsky' ? bskyEncodeOptions : dagCborEncodeOptions,
183
+ decode: dagCborDecodeOptions
184
+ }
185
+ }
186
+
187
+ // Output helpers
188
+ const log = opts.json ? () => {} : console.log.bind(console)
189
+ const write = typeof process !== 'undefined' && process.stdout
190
+ ? (s) => process.stdout.write(s)
191
+ : (s) => log(s)
192
+
193
+ /**
194
+ * Run a benchmark function for a duration, return ops/sec
195
+ */
196
+ function bench (fn, durationMs = opts.duration) {
197
+ // Warmup
198
+ for (let i = 0; i < WARMUP_ITERATIONS; i++) fn()
199
+
200
+ // Measure
201
+ const start = performance.now()
202
+ let ops = 0
203
+ while (performance.now() - start < durationMs) {
204
+ fn()
205
+ ops++
206
+ }
207
+ const elapsed = performance.now() - start
208
+ return {
209
+ opsPerSec: Math.round(ops / (elapsed / 1000)),
210
+ totalOps: ops,
211
+ elapsedMs: Math.round(elapsed)
212
+ }
213
+ }
214
+
215
+ /**
216
+ * Benchmark a fixture set for encode and decode
217
+ * @param {string} name
218
+ * @param {any[]} fixtures
219
+ * @param {'bsky'|'default'} suiteType - determines which encode options to use
220
+ */
221
+ function benchFixtures (name, fixtures, suiteType = 'default') {
222
+ const { encode: encodeOptions, decode: decodeOptions } = getOptions(suiteType)
223
+
224
+ // Pre-encode all fixtures for decode benchmark
225
+ const encoded = fixtures.map(f => encode(f, encodeOptions))
226
+ const totalBytes = encoded.reduce((sum, b) => sum + b.length, 0)
227
+ const avgBytes = Math.round(totalBytes / encoded.length)
228
+
229
+ // Setup encodeInto if requested
230
+ let encodeFn = (f) => encode(f, encodeOptions)
231
+ if (opts.encodeInto) {
232
+ const dest = new Uint8Array(1024 * 1024) // 1MB buffer
233
+ encodeFn = (f) => encodeInto(f, dest, encodeOptions)
234
+ }
235
+
236
+ log(` ${name} (${fixtures.length} items, avg ${avgBytes} bytes)`)
237
+
238
+ // Encode benchmark
239
+ write(' encode: ')
240
+ const encResult = bench(() => {
241
+ for (const f of fixtures) encodeFn(f)
242
+ })
243
+ const encOpsPerItem = encResult.opsPerSec * fixtures.length
244
+ const encMBps = Math.round((encResult.opsPerSec * totalBytes) / (1024 * 1024) * 10) / 10
245
+ log(`${encOpsPerItem.toLocaleString()} items/s (${encMBps} MB/s)`)
246
+
247
+ // Decode benchmark
248
+ write(' decode: ')
249
+ const decResult = bench(() => {
250
+ for (const e of encoded) decode(e, decodeOptions)
251
+ })
252
+ const decOpsPerItem = decResult.opsPerSec * encoded.length
253
+ const decMBps = Math.round((decResult.opsPerSec * totalBytes) / (1024 * 1024) * 10) / 10
254
+ log(`${decOpsPerItem.toLocaleString()} items/s (${decMBps} MB/s)`)
255
+
256
+ return {
257
+ name,
258
+ count: fixtures.length,
259
+ avgBytes,
260
+ totalBytes,
261
+ encode: {
262
+ opsPerSec: encResult.opsPerSec,
263
+ itemsPerSec: encOpsPerItem,
264
+ mbPerSec: encMBps
265
+ },
266
+ decode: {
267
+ opsPerSec: decResult.opsPerSec,
268
+ itemsPerSec: decOpsPerItem,
269
+ mbPerSec: decMBps
46
270
  }
47
271
  }
48
- const avgSize = Math.round(fixtures.reduce((p, c) => p + c[1].length, 0) / fixtures.length)
272
+ }
273
+
274
+ /**
275
+ * Run a suite of benchmarks
276
+ * @param {string} name
277
+ * @param {Object} fixtureGroups
278
+ * @param {'bsky'|'default'} suiteType
279
+ */
280
+ function runSuite (name, fixtureGroups, suiteType = 'default') {
281
+ log(`\n${name}`)
282
+ log('='.repeat(name.length))
49
283
 
50
- const enc = (encoder) => {
51
- for (const [obj, byts] of fixtures) {
52
- const ebyts = encoder(obj)
53
- if (byts.length !== ebyts.length) {
54
- throw new Error('bork')
55
- }
284
+ const results = []
285
+ for (const [groupName, fixtures] of Object.entries(fixtureGroups)) {
286
+ if (Array.isArray(fixtures) && fixtures.length > 0) {
287
+ results.push(benchFixtures(groupName, fixtures, suiteType))
56
288
  }
57
- return fixtures.length
58
- }
59
-
60
- const bench = (bfn) => {
61
- const start = Date.now()
62
- let opcount = 0
63
- do {
64
- opcount += bfn()
65
- } while (Date.now() - start < targetTime)
66
- const ops = Math.round(opcount / ((Date.now() - start) / 1000))
67
- return ops
68
- }
69
-
70
- const dec = (decoder) => {
71
- for (const [obj, byts] of fixtures) {
72
- const cobj = decoder(byts)
73
- if (obj != null && typeof obj === 'object') {
74
- assert.deepStrictEqual(Object.keys(cobj).length, Object.keys(obj).length)
75
- } else {
76
- assert.deepStrictEqual(obj, cobj)
77
- }
289
+ }
290
+ return { suite: name, results }
291
+ }
292
+
293
+ /**
294
+ * Main benchmark runner
295
+ */
296
+ async function main () {
297
+ log('Generating fixtures...')
298
+ const fixtures = generateFixtures(FIXTURE_SEED)
299
+ const modeDesc = opts.mode === 'raw' ? 'raw cborg (no tags)' : 'dag-cbor mode (tag 42 + strict)'
300
+ log(`Done. Running benchmarks in ${modeDesc} (${opts.duration}ms per test)...\n`)
301
+
302
+ const allResults = []
303
+
304
+ // Bluesky suite (string-heavy) - uses ignoreUndefinedProperties
305
+ if (!opts.suite || opts.suite === 'bsky') {
306
+ allResults.push(runSuite('Bluesky (string-heavy)', {
307
+ posts: fixtures.bsky.posts,
308
+ follows: fixtures.bsky.follows,
309
+ likes: fixtures.bsky.likes,
310
+ reposts: fixtures.bsky.reposts,
311
+ profiles: fixtures.bsky.profiles,
312
+ mstNodes: fixtures.bsky.mstNodes
313
+ }, 'bsky'))
314
+ }
315
+
316
+ // Filecoin suite (bytes-heavy)
317
+ if (!opts.suite || opts.suite === 'filecoin') {
318
+ allResults.push(runSuite('Filecoin (bytes-heavy)', {
319
+ messages: fixtures.filecoin.messages,
320
+ blockHeaders: fixtures.filecoin.blockHeaders,
321
+ hamtNodes: fixtures.filecoin.hamtNodes,
322
+ amtNodes: fixtures.filecoin.amtNodes,
323
+ cidArrays: fixtures.filecoin.cidArrays
324
+ }))
325
+ }
326
+
327
+ // Micro-benchmarks
328
+ if (!opts.suite || opts.suite === 'micro') {
329
+ allResults.push(runSuite('Maps (key sorting)', {
330
+ 'small (10 keys)': fixtures.micro.mapsSmall,
331
+ 'medium (50 keys)': fixtures.micro.mapsMedium,
332
+ 'large (200 keys)': fixtures.micro.mapsLarge
333
+ }))
334
+
335
+ allResults.push(runSuite('Nesting depth', {
336
+ 'shallow (depth 3)': fixtures.micro.nestedShallow,
337
+ 'deep (depth 10)': fixtures.micro.nestedDeep
338
+ }))
339
+
340
+ allResults.push(runSuite('Strings', {
341
+ 'short (5-20 chars)': fixtures.micro.stringsShort,
342
+ 'medium (20-100 chars)': fixtures.micro.stringsMedium,
343
+ 'long (100-500 chars)': fixtures.micro.stringsLong
344
+ }))
345
+
346
+ allResults.push(runSuite('Integers', {
347
+ 'small (0-23)': fixtures.micro.integersSmall,
348
+ 'medium (0-65535)': fixtures.micro.integersMedium,
349
+ 'large (64-bit)': fixtures.micro.integersLarge
350
+ }))
351
+
352
+ allResults.push(runSuite('Bytes', {
353
+ 'small (<64)': fixtures.micro.bytesSmall,
354
+ 'medium (64-512)': fixtures.micro.bytesMedium,
355
+ 'large (1KB+)': fixtures.micro.bytesLarge
356
+ }))
357
+ }
358
+
359
+ // Summary
360
+ log('\n' + '='.repeat(50))
361
+ const allEncodeRates = allResults.flatMap(s => s.results.map(r => r.encode.mbPerSec))
362
+ const allDecodeRates = allResults.flatMap(s => s.results.map(r => r.decode.mbPerSec))
363
+ const avgEncode = Math.round(allEncodeRates.reduce((a, b) => a + b, 0) / allEncodeRates.length * 10) / 10
364
+ const avgDecode = Math.round(allDecodeRates.reduce((a, b) => a + b, 0) / allDecodeRates.length * 10) / 10
365
+ log(`Average throughput: encode ${avgEncode} MB/s, decode ${avgDecode} MB/s`)
366
+
367
+ // JSON output
368
+ if (opts.json) {
369
+ const output = {
370
+ timestamp: new Date().toISOString(),
371
+ seed: FIXTURE_SEED,
372
+ duration: opts.duration,
373
+ mode: opts.mode,
374
+ encodeInto: opts.encodeInto,
375
+ suites: allResults,
376
+ summary: { avgEncodeMBps: avgEncode, avgDecodeMBps: avgDecode }
78
377
  }
79
- return fixtures.length
378
+ console.log(JSON.stringify(output, null, 2))
80
379
  }
81
380
 
82
- const cmp = (desc, cbfn, bofn) => {
83
- write(`\t${desc} (avg ${avgSize.toLocaleString()} b):`)
84
- const cborgOps = bench(cbfn)
85
- write(` cborg @ ${cborgOps.toLocaleString()} op/s`)
86
- const borcOps = bench(bofn)
87
- write(` / borc @ ${borcOps.toLocaleString()} op/s`)
88
- const percent = Math.round((cborgOps / borcOps) * 1000) / 10
89
- write(` = ${(percent).toLocaleString()} %\n`)
90
- return percent
381
+ // Compare to baseline
382
+ if (opts.compare) {
383
+ await compareToBaseline(opts.compare, allResults)
91
384
  }
92
385
 
93
- return [
94
- cmp('encode', () => enc(encode), () => enc(borc.encode)),
95
- cmp('decode', () => dec(decode), () => dec(borcDecode))
96
- ]
386
+ return allResults
387
+ }
388
+
389
+ /**
390
+ * Compare results to a baseline file
391
+ */
392
+ async function compareToBaseline (baselinePath, currentResults) {
393
+ let baseline
394
+ try {
395
+ if (typeof process !== 'undefined') {
396
+ const fs = await import('fs')
397
+ baseline = JSON.parse(fs.readFileSync(baselinePath, 'utf8'))
398
+ } else {
399
+ const response = await fetch(baselinePath)
400
+ baseline = await response.json()
401
+ }
402
+ } catch (e) {
403
+ log(`\nCould not load baseline: ${e.message}`)
404
+ return
405
+ }
406
+
407
+ log('\nComparison to baseline:')
408
+ log('-'.repeat(50))
409
+
410
+ for (const suite of currentResults) {
411
+ const baselineSuite = baseline.suites.find(s => s.suite === suite.suite)
412
+ if (!baselineSuite) continue
413
+
414
+ log(`\n${suite.suite}:`)
415
+ for (const result of suite.results) {
416
+ const baselineResult = baselineSuite.results.find(r => r.name === result.name)
417
+ if (!baselineResult) continue
418
+
419
+ const encDiff = ((result.encode.mbPerSec - baselineResult.encode.mbPerSec) / baselineResult.encode.mbPerSec * 100).toFixed(1)
420
+ const decDiff = ((result.decode.mbPerSec - baselineResult.decode.mbPerSec) / baselineResult.decode.mbPerSec * 100).toFixed(1)
421
+ const encSign = encDiff >= 0 ? '+' : ''
422
+ const decSign = decDiff >= 0 ? '+' : ''
423
+
424
+ log(` ${result.name}: encode ${encSign}${encDiff}%, decode ${decSign}${decDiff}%`)
425
+ }
426
+ }
97
427
  }
98
428
 
99
- const targetTime = 1000
100
- const accum = []
101
- accum.push(runWith('rnd-100', 1000, targetTime, 100, { weights: { CID: 0 } }))
102
- accum.push(runWith('rnd-300', 1000, targetTime, 300, { weights: { CID: 0 } }))
103
- accum.push(runWith('rnd-nomap-300', 1000, targetTime, 300, { weights: { CID: 0, map: 0 } }))
104
- accum.push(runWith('rnd-nolist-300', 1000, targetTime, 300, { weights: { CID: 0, list: 0 } }))
105
- accum.push(runWith('rnd-nofloat-300', 1000, targetTime, 300, { weights: { CID: 0, float: 0 } }))
106
- accum.push(runWith('rnd-nomaj7-300', 1000, targetTime, 300, { weights: { CID: 0, float: 0, null: 0, boolean: 0 } }))
107
- accum.push(runWith('rnd-nostr-300', 1000, targetTime, 300, { weights: { CID: 0, string: 0, bytes: 0 } }))
108
- accum.push(runWith('rnd-nostrbyts-300', 1000, targetTime, 300, { weights: { CID: 0, string: 0 } }))
109
- accum.push(runWith('rnd-1000', 1000, targetTime, 1000, { weights: { CID: 0 } }))
110
- accum.push(runWith('rnd-2000', 1000, targetTime, 2000, { weights: { CID: 0 } }))
111
- accum.push(runWith('rnd-fil-100', 1000, targetTime, 100, { weights: { float: 0, map: 0, CID: 0 } }))
112
- accum.push(runWith('rnd-fil-300', 1000, targetTime, 300, { weights: { float: 0, map: 0, CID: 0 } }))
113
- accum.push(runWith('rnd-fil-500', 1000, targetTime, 500, { weights: { float: 0, map: 0, CID: 0 } }))
114
- accum.push(runWith('rnd-fil-1000', 1000, targetTime, 1000, { weights: { float: 0, map: 0, CID: 0 } }))
115
- accum.push(runWith('rnd-fil-2000', 1000, targetTime, 2000, { weights: { float: 0, map: 0, CID: 0 } }))
116
- console.log(`Avg encode: ${Math.round(accum.reduce((p, c) => p + c[0], 0) / accum.length).toLocaleString()} %`)
117
- console.log(`Avg decode: ${Math.round(accum.reduce((p, c) => p + c[1], 0) / accum.length).toLocaleString()} %`)
429
+ // Run
430
+ main().catch(console.error)