@durable-streams/client-conformance-tests 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +451 -0
  2. package/dist/adapters/typescript-adapter.d.ts +1 -0
  3. package/dist/adapters/typescript-adapter.js +586 -0
  4. package/dist/benchmark-runner-C_Yghc8f.js +1333 -0
  5. package/dist/cli.d.ts +1 -0
  6. package/dist/cli.js +265 -0
  7. package/dist/index.d.ts +508 -0
  8. package/dist/index.js +4 -0
  9. package/dist/protocol-DyEvTHPF.d.ts +472 -0
  10. package/dist/protocol-qb83AeUH.js +120 -0
  11. package/dist/protocol.d.ts +2 -0
  12. package/dist/protocol.js +3 -0
  13. package/package.json +53 -0
  14. package/src/adapters/typescript-adapter.ts +848 -0
  15. package/src/benchmark-runner.ts +860 -0
  16. package/src/benchmark-scenarios.ts +311 -0
  17. package/src/cli.ts +294 -0
  18. package/src/index.ts +50 -0
  19. package/src/protocol.ts +656 -0
  20. package/src/runner.ts +1191 -0
  21. package/src/test-cases.ts +475 -0
  22. package/test-cases/consumer/cache-headers.yaml +150 -0
  23. package/test-cases/consumer/error-handling.yaml +108 -0
  24. package/test-cases/consumer/message-ordering.yaml +209 -0
  25. package/test-cases/consumer/offset-handling.yaml +209 -0
  26. package/test-cases/consumer/offset-resumption.yaml +197 -0
  27. package/test-cases/consumer/read-catchup.yaml +173 -0
  28. package/test-cases/consumer/read-longpoll.yaml +132 -0
  29. package/test-cases/consumer/read-sse.yaml +145 -0
  30. package/test-cases/consumer/retry-resilience.yaml +160 -0
  31. package/test-cases/consumer/streaming-equivalence.yaml +226 -0
  32. package/test-cases/lifecycle/dynamic-headers.yaml +147 -0
  33. package/test-cases/lifecycle/headers-params.yaml +117 -0
  34. package/test-cases/lifecycle/stream-lifecycle.yaml +148 -0
  35. package/test-cases/producer/append-data.yaml +142 -0
  36. package/test-cases/producer/batching.yaml +112 -0
  37. package/test-cases/producer/create-stream.yaml +87 -0
  38. package/test-cases/producer/error-handling.yaml +90 -0
  39. package/test-cases/producer/sequence-ordering.yaml +148 -0
@@ -0,0 +1,848 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * TypeScript client adapter for conformance testing.
4
+ *
5
+ * This adapter implements the stdin/stdout protocol for the
6
+ * @durable-streams/client package.
7
+ *
8
+ * Run directly:
9
+ * npx tsx packages/client-conformance-tests/src/adapters/typescript-adapter.ts
10
+ */
11
+
12
+ import { createInterface } from "node:readline"
13
+ import {
14
+ DurableStream,
15
+ DurableStreamError,
16
+ FetchError,
17
+ stream,
18
+ } from "@durable-streams/client"
19
+ import {
20
+ ErrorCodes,
21
+ decodeBase64,
22
+ parseCommand,
23
+ serializeResult,
24
+ } from "../protocol.js"
25
+ import type {
26
+ BenchmarkCommand,
27
+ BenchmarkOperation,
28
+ ErrorCode,
29
+ ReadChunk,
30
+ TestCommand,
31
+ TestResult,
32
+ } from "../protocol.js"
33
+
34
+ // Package version - read from package.json would be ideal
35
+ const CLIENT_VERSION = `0.0.1`
36
+
37
+ let serverUrl = ``
38
+
39
+ // Track content-type per stream path for append operations
40
+ const streamContentTypes = new Map<string, string>()
41
+
42
+ // Dynamic headers/params state
43
+ interface DynamicValue {
44
+ type: `counter` | `timestamp` | `token`
45
+ counter: number
46
+ tokenValue?: string
47
+ }
48
+
49
+ const dynamicHeaders = new Map<string, DynamicValue>()
50
+ const dynamicParams = new Map<string, DynamicValue>()
51
+
52
+ /** Resolve dynamic headers, returning both the header function map and tracked values */
53
+ function resolveDynamicHeaders(): {
54
+ headers: Record<string, () => string>
55
+ values: Record<string, string>
56
+ } {
57
+ const headers: Record<string, () => string> = {}
58
+ const values: Record<string, string> = {}
59
+
60
+ for (const [name, config] of dynamicHeaders.entries()) {
61
+ // Capture current values for tracking
62
+ let value: string
63
+ switch (config.type) {
64
+ case `counter`:
65
+ config.counter++
66
+ value = config.counter.toString()
67
+ break
68
+ case `timestamp`:
69
+ value = Date.now().toString()
70
+ break
71
+ case `token`:
72
+ value = config.tokenValue ?? ``
73
+ break
74
+ }
75
+ values[name] = value
76
+
77
+ // Create closure that returns the value we just computed
78
+ // (For actual dynamic behavior, the client would call this per-request,
79
+ // but for testing we want to track what value was used)
80
+ const capturedValue = value
81
+ headers[name] = () => capturedValue
82
+ }
83
+
84
+ return { headers, values }
85
+ }
86
+
87
+ /** Resolve dynamic params */
88
+ function resolveDynamicParams(): {
89
+ params: Record<string, () => string>
90
+ values: Record<string, string>
91
+ } {
92
+ const params: Record<string, () => string> = {}
93
+ const values: Record<string, string> = {}
94
+
95
+ for (const [name, config] of dynamicParams.entries()) {
96
+ let value: string
97
+ switch (config.type) {
98
+ case `counter`:
99
+ config.counter++
100
+ value = config.counter.toString()
101
+ break
102
+ case `timestamp`:
103
+ value = Date.now().toString()
104
+ break
105
+ default:
106
+ value = ``
107
+ }
108
+ values[name] = value
109
+
110
+ const capturedValue = value
111
+ params[name] = () => capturedValue
112
+ }
113
+
114
+ return { params, values }
115
+ }
116
+
117
+ async function handleCommand(command: TestCommand): Promise<TestResult> {
118
+ switch (command.type) {
119
+ case `init`: {
120
+ serverUrl = command.serverUrl
121
+ // Clear all caches on init
122
+ streamContentTypes.clear()
123
+ dynamicHeaders.clear()
124
+ dynamicParams.clear()
125
+ return {
126
+ type: `init`,
127
+ success: true,
128
+ clientName: `@durable-streams/client`,
129
+ clientVersion: CLIENT_VERSION,
130
+ features: {
131
+ batching: true,
132
+ sse: true,
133
+ longPoll: true,
134
+ streaming: true,
135
+ dynamicHeaders: true,
136
+ },
137
+ }
138
+ }
139
+
140
+ case `create`: {
141
+ try {
142
+ const url = `${serverUrl}${command.path}`
143
+ const contentType = command.contentType ?? `application/octet-stream`
144
+
145
+ // Check if stream already exists by trying to connect first
146
+ let alreadyExists = false
147
+ try {
148
+ await DurableStream.head({ url })
149
+ alreadyExists = true
150
+ } catch {
151
+ // Stream doesn't exist, which is expected for new creates
152
+ }
153
+
154
+ const ds = await DurableStream.create({
155
+ url,
156
+ contentType,
157
+ ttlSeconds: command.ttlSeconds,
158
+ expiresAt: command.expiresAt,
159
+ headers: command.headers,
160
+ })
161
+
162
+ // Cache the content-type
163
+ streamContentTypes.set(command.path, contentType)
164
+
165
+ const head = await ds.head()
166
+
167
+ return {
168
+ type: `create`,
169
+ success: true,
170
+ status: alreadyExists ? 200 : 201, // 201 for new, 200 for idempotent
171
+ offset: head.offset,
172
+ }
173
+ } catch (err) {
174
+ return errorResult(`create`, err)
175
+ }
176
+ }
177
+
178
+ case `connect`: {
179
+ try {
180
+ const url = `${serverUrl}${command.path}`
181
+ const ds = await DurableStream.connect({
182
+ url,
183
+ headers: command.headers,
184
+ })
185
+
186
+ const head = await ds.head()
187
+
188
+ // Cache the content-type for this stream
189
+ if (head.contentType) {
190
+ streamContentTypes.set(command.path, head.contentType)
191
+ }
192
+
193
+ return {
194
+ type: `connect`,
195
+ success: true,
196
+ status: 200,
197
+ offset: head.offset,
198
+ }
199
+ } catch (err) {
200
+ return errorResult(`connect`, err)
201
+ }
202
+ }
203
+
204
+ case `append`: {
205
+ try {
206
+ const url = `${serverUrl}${command.path}`
207
+
208
+ // Get content-type from cache or use default
209
+ const contentType =
210
+ streamContentTypes.get(command.path) ?? `application/octet-stream`
211
+
212
+ // Resolve dynamic headers/params
213
+ const { headers: dynamicHdrs, values: headersSent } =
214
+ resolveDynamicHeaders()
215
+ const { values: paramsSent } = resolveDynamicParams()
216
+
217
+ // Merge command headers with dynamic headers (command takes precedence)
218
+ const mergedHeaders: Record<string, string | (() => string)> = {
219
+ ...dynamicHdrs,
220
+ ...command.headers,
221
+ }
222
+
223
+ const ds = new DurableStream({
224
+ url,
225
+ headers: mergedHeaders,
226
+ contentType,
227
+ })
228
+
229
+ let body: Uint8Array | string
230
+ if (command.binary) {
231
+ body = decodeBase64(command.data)
232
+ } else {
233
+ body = command.data
234
+ }
235
+
236
+ await ds.append(body, { seq: command.seq?.toString() })
237
+ const head = await ds.head()
238
+
239
+ return {
240
+ type: `append`,
241
+ success: true,
242
+ status: 200,
243
+ offset: head.offset,
244
+ headersSent:
245
+ Object.keys(headersSent).length > 0 ? headersSent : undefined,
246
+ paramsSent:
247
+ Object.keys(paramsSent).length > 0 ? paramsSent : undefined,
248
+ }
249
+ } catch (err) {
250
+ return errorResult(`append`, err)
251
+ }
252
+ }
253
+
254
+ case `read`: {
255
+ try {
256
+ const url = `${serverUrl}${command.path}`
257
+
258
+ // Resolve dynamic headers/params
259
+ const { headers: dynamicHdrs, values: headersSent } =
260
+ resolveDynamicHeaders()
261
+ const { values: paramsSent } = resolveDynamicParams()
262
+
263
+ // Merge command headers with dynamic headers (command takes precedence)
264
+ const mergedHeaders: Record<string, string | (() => string)> = {
265
+ ...dynamicHdrs,
266
+ ...command.headers,
267
+ }
268
+
269
+ // Determine live mode
270
+ let live: `long-poll` | `sse` | false
271
+ if (command.live === `long-poll`) {
272
+ live = `long-poll`
273
+ } else if (command.live === `sse`) {
274
+ live = `sse`
275
+ } else {
276
+ live = false
277
+ }
278
+
279
+ // Create abort controller for timeout handling
280
+ const abortController = new AbortController()
281
+ const timeoutMs = command.timeoutMs ?? 5000
282
+
283
+ // Set up timeout BEFORE calling stream() - important for long-poll
284
+ // where the first request may block waiting for data
285
+ const timeoutId = setTimeout(() => {
286
+ abortController.abort()
287
+ }, timeoutMs)
288
+
289
+ let response: Awaited<ReturnType<typeof stream>>
290
+ try {
291
+ response = await stream({
292
+ url,
293
+ offset: command.offset,
294
+ live,
295
+ headers: mergedHeaders,
296
+ signal: abortController.signal,
297
+ })
298
+ } catch (err) {
299
+ clearTimeout(timeoutId)
300
+ // If we timed out waiting for the initial response, return gracefully
301
+ if (abortController.signal.aborted) {
302
+ return {
303
+ type: `read`,
304
+ success: true,
305
+ status: 200,
306
+ chunks: [],
307
+ offset: command.offset ?? `-1`,
308
+ upToDate: true, // Timed out = caught up (no new data)
309
+ headersSent:
310
+ Object.keys(headersSent).length > 0 ? headersSent : undefined,
311
+ paramsSent:
312
+ Object.keys(paramsSent).length > 0 ? paramsSent : undefined,
313
+ }
314
+ }
315
+ throw err
316
+ }
317
+
318
+ // Initial stream() succeeded, clear the outer timeout
319
+ clearTimeout(timeoutId)
320
+
321
+ const chunks: Array<ReadChunk> = []
322
+ let finalOffset = command.offset ?? response.offset
323
+ let upToDate = response.upToDate
324
+
325
+ // Collect chunks using body() for non-live mode or bodyStream() for live
326
+ const maxChunks = command.maxChunks ?? 100
327
+
328
+ if (!live) {
329
+ // For non-live mode, use body() to get all data
330
+ try {
331
+ const data = await response.body()
332
+ if (data.length > 0) {
333
+ chunks.push({
334
+ data: new TextDecoder().decode(data),
335
+ offset: response.offset,
336
+ })
337
+ }
338
+ finalOffset = response.offset
339
+ upToDate = response.upToDate
340
+ } catch {
341
+ // If body fails, stream might be empty
342
+ }
343
+ } else {
344
+ // For live mode, use subscribeBytes which provides per-chunk metadata
345
+ const decoder = new TextDecoder()
346
+ const startTime = Date.now()
347
+ let chunkCount = 0
348
+ let done = false
349
+
350
+ // Create a promise that resolves when we're done collecting chunks
351
+ await new Promise<void>((resolve) => {
352
+ // Set up subscription timeout
353
+ const subscriptionTimeoutId = setTimeout(() => {
354
+ done = true
355
+ // Abort the underlying fetch to stop the long-poll request
356
+ abortController.abort()
357
+ // Capture final state from response when timing out
358
+ // Important for empty streams that never call subscribeBytes
359
+ // For timeouts with no data, mark as upToDate since we've caught up
360
+ upToDate = response.upToDate || true
361
+ finalOffset = response.offset
362
+ resolve()
363
+ }, timeoutMs)
364
+
365
+ const unsubscribe = response.subscribeBytes(async (chunk) => {
366
+ // Check if we should stop
367
+ if (done || chunkCount >= maxChunks) {
368
+ return
369
+ }
370
+
371
+ // Check timeout
372
+ if (Date.now() - startTime > timeoutMs) {
373
+ done = true
374
+ resolve()
375
+ return
376
+ }
377
+
378
+ const hasData = chunk.data.length > 0
379
+ if (hasData) {
380
+ chunks.push({
381
+ data: decoder.decode(chunk.data),
382
+ offset: chunk.offset,
383
+ })
384
+ chunkCount++
385
+ }
386
+
387
+ finalOffset = chunk.offset
388
+ upToDate = chunk.upToDate
389
+
390
+ // For waitForUpToDate, stop when we've reached up-to-date
391
+ if (command.waitForUpToDate && chunk.upToDate) {
392
+ done = true
393
+ clearTimeout(subscriptionTimeoutId)
394
+ resolve()
395
+ return
396
+ }
397
+
398
+ // Stop if we've collected enough chunks
399
+ if (chunkCount >= maxChunks) {
400
+ done = true
401
+ clearTimeout(subscriptionTimeoutId)
402
+ resolve()
403
+ }
404
+
405
+ // Keep async for backpressure support even though not using await
406
+ await Promise.resolve()
407
+ })
408
+
409
+ // Clean up subscription when done
410
+ // Also capture final upToDate state for empty streams
411
+ response.closed
412
+ .then(() => {
413
+ if (!done) {
414
+ done = true
415
+ clearTimeout(subscriptionTimeoutId)
416
+ // For empty streams, capture the final upToDate from response
417
+ upToDate = response.upToDate
418
+ finalOffset = response.offset
419
+ resolve()
420
+ }
421
+ })
422
+ .catch(() => {
423
+ if (!done) {
424
+ done = true
425
+ clearTimeout(subscriptionTimeoutId)
426
+ resolve()
427
+ }
428
+ })
429
+
430
+ // Also handle the case where subscribeBytes is called
431
+ // We need to store unsubscribe but only call it on cleanup
432
+ void unsubscribe // Keep reference for potential future use
433
+ })
434
+ }
435
+
436
+ // Cancel the response to clean up
437
+ response.cancel()
438
+
439
+ return {
440
+ type: `read`,
441
+ success: true,
442
+ status: 200,
443
+ chunks,
444
+ offset: finalOffset,
445
+ upToDate,
446
+ headersSent:
447
+ Object.keys(headersSent).length > 0 ? headersSent : undefined,
448
+ paramsSent:
449
+ Object.keys(paramsSent).length > 0 ? paramsSent : undefined,
450
+ }
451
+ } catch (err) {
452
+ return errorResult(`read`, err)
453
+ }
454
+ }
455
+
456
+ case `head`: {
457
+ try {
458
+ const url = `${serverUrl}${command.path}`
459
+ const result = await DurableStream.head({
460
+ url,
461
+ headers: command.headers,
462
+ })
463
+
464
+ // Cache content-type
465
+ if (result.contentType) {
466
+ streamContentTypes.set(command.path, result.contentType)
467
+ }
468
+
469
+ return {
470
+ type: `head`,
471
+ success: true,
472
+ status: 200,
473
+ offset: result.offset,
474
+ contentType: result.contentType,
475
+ // Note: HeadResult from client doesn't expose TTL info currently
476
+ }
477
+ } catch (err) {
478
+ return errorResult(`head`, err)
479
+ }
480
+ }
481
+
482
+ case `delete`: {
483
+ try {
484
+ const url = `${serverUrl}${command.path}`
485
+ await DurableStream.delete({
486
+ url,
487
+ headers: command.headers,
488
+ })
489
+
490
+ // Remove from cache
491
+ streamContentTypes.delete(command.path)
492
+
493
+ return {
494
+ type: `delete`,
495
+ success: true,
496
+ status: 200,
497
+ }
498
+ } catch (err) {
499
+ return errorResult(`delete`, err)
500
+ }
501
+ }
502
+
503
+ case `shutdown`: {
504
+ return {
505
+ type: `shutdown`,
506
+ success: true,
507
+ }
508
+ }
509
+
510
+ case `benchmark`: {
511
+ return handleBenchmark(command)
512
+ }
513
+
514
+ case `set-dynamic-header`: {
515
+ dynamicHeaders.set(command.name, {
516
+ type: command.valueType,
517
+ counter: 0,
518
+ tokenValue: command.initialValue,
519
+ })
520
+ return {
521
+ type: `set-dynamic-header`,
522
+ success: true,
523
+ }
524
+ }
525
+
526
+ case `set-dynamic-param`: {
527
+ dynamicParams.set(command.name, {
528
+ type: command.valueType,
529
+ counter: 0,
530
+ })
531
+ return {
532
+ type: `set-dynamic-param`,
533
+ success: true,
534
+ }
535
+ }
536
+
537
+ case `clear-dynamic`: {
538
+ dynamicHeaders.clear()
539
+ dynamicParams.clear()
540
+ return {
541
+ type: `clear-dynamic`,
542
+ success: true,
543
+ }
544
+ }
545
+
546
+ default:
547
+ return {
548
+ type: `error`,
549
+ success: false,
550
+ commandType: (command as TestCommand).type,
551
+ errorCode: ErrorCodes.NOT_SUPPORTED,
552
+ message: `Unknown command type: ${(command as { type: string }).type}`,
553
+ }
554
+ }
555
+ }
556
+
557
+ function errorResult(
558
+ commandType: TestCommand[`type`],
559
+ err: unknown
560
+ ): TestResult {
561
+ if (err instanceof DurableStreamError) {
562
+ let errorCode: ErrorCode = ErrorCodes.INTERNAL_ERROR
563
+ let status: number | undefined
564
+
565
+ // Map error codes - use actual DurableStreamErrorCode values
566
+ if (err.code === `NOT_FOUND`) {
567
+ errorCode = ErrorCodes.NOT_FOUND
568
+ status = 404
569
+ } else if (err.code === `CONFLICT_EXISTS`) {
570
+ errorCode = ErrorCodes.CONFLICT
571
+ status = 409
572
+ } else if (err.code === `CONFLICT_SEQ`) {
573
+ errorCode = ErrorCodes.SEQUENCE_CONFLICT
574
+ status = 409
575
+ } else if (err.code === `BAD_REQUEST`) {
576
+ errorCode = ErrorCodes.INVALID_OFFSET
577
+ status = 400
578
+ }
579
+
580
+ return {
581
+ type: `error`,
582
+ success: false,
583
+ commandType,
584
+ status,
585
+ errorCode,
586
+ message: err.message,
587
+ }
588
+ }
589
+
590
+ if (err instanceof FetchError) {
591
+ let errorCode: ErrorCode
592
+ const msg = err.message.toLowerCase()
593
+
594
+ if (err.status === 404) {
595
+ errorCode = ErrorCodes.NOT_FOUND
596
+ } else if (err.status === 409) {
597
+ // Check for sequence conflict vs general conflict
598
+ if (msg.includes(`sequence`)) {
599
+ errorCode = ErrorCodes.SEQUENCE_CONFLICT
600
+ } else {
601
+ errorCode = ErrorCodes.CONFLICT
602
+ }
603
+ } else if (err.status === 400) {
604
+ // Check if this is an invalid offset error
605
+ if (msg.includes(`offset`) || msg.includes(`invalid`)) {
606
+ errorCode = ErrorCodes.INVALID_OFFSET
607
+ } else {
608
+ errorCode = ErrorCodes.UNEXPECTED_STATUS
609
+ }
610
+ } else {
611
+ errorCode = ErrorCodes.UNEXPECTED_STATUS
612
+ }
613
+
614
+ return {
615
+ type: `error`,
616
+ success: false,
617
+ commandType,
618
+ status: err.status,
619
+ errorCode,
620
+ message: err.message,
621
+ }
622
+ }
623
+
624
+ if (err instanceof Error) {
625
+ if (err.message.includes(`ECONNREFUSED`) || err.message.includes(`fetch`)) {
626
+ return {
627
+ type: `error`,
628
+ success: false,
629
+ commandType,
630
+ errorCode: ErrorCodes.NETWORK_ERROR,
631
+ message: err.message,
632
+ }
633
+ }
634
+
635
+ return {
636
+ type: `error`,
637
+ success: false,
638
+ commandType,
639
+ errorCode: ErrorCodes.INTERNAL_ERROR,
640
+ message: err.message,
641
+ }
642
+ }
643
+
644
+ return {
645
+ type: `error`,
646
+ success: false,
647
+ commandType,
648
+ errorCode: ErrorCodes.INTERNAL_ERROR,
649
+ message: String(err),
650
+ }
651
+ }
652
+
653
+ /**
654
+ * Handle benchmark commands with high-resolution timing.
655
+ */
656
+ async function handleBenchmark(command: BenchmarkCommand): Promise<TestResult> {
657
+ const { iterationId, operation } = command
658
+
659
+ try {
660
+ const startTime = process.hrtime.bigint()
661
+ const metrics: { bytesTransferred?: number; messagesProcessed?: number } =
662
+ {}
663
+
664
+ switch (operation.op) {
665
+ case `append`: {
666
+ const url = `${serverUrl}${operation.path}`
667
+ const contentType =
668
+ streamContentTypes.get(operation.path) ?? `application/octet-stream`
669
+ const ds = new DurableStream({ url, contentType })
670
+
671
+ // Generate payload (using fill for speed - don't want to measure PRNG)
672
+ const payload = new Uint8Array(operation.size).fill(42)
673
+
674
+ await ds.append(payload)
675
+ metrics.bytesTransferred = operation.size
676
+ break
677
+ }
678
+
679
+ case `read`: {
680
+ const url = `${serverUrl}${operation.path}`
681
+ const res = await stream({ url, offset: operation.offset, live: false })
682
+ const data = await res.body()
683
+ metrics.bytesTransferred = data.length
684
+ break
685
+ }
686
+
687
+ case `roundtrip`: {
688
+ const url = `${serverUrl}${operation.path}`
689
+ const contentType = operation.contentType ?? `application/octet-stream`
690
+
691
+ // Create stream first
692
+ const ds = await DurableStream.create({ url, contentType })
693
+
694
+ // Generate payload (using fill for speed - don't want to measure PRNG)
695
+ const payload = new Uint8Array(operation.size).fill(42)
696
+
697
+ // Start reading before appending (to catch the data via live mode)
698
+ const readPromise = (async () => {
699
+ const res = await ds.stream({
700
+ live: operation.live ?? `long-poll`,
701
+ })
702
+
703
+ // Wait for data
704
+ return new Promise<Uint8Array>((resolve) => {
705
+ const unsubscribe = res.subscribeBytes(async (chunk) => {
706
+ if (chunk.data.length > 0) {
707
+ unsubscribe()
708
+ res.cancel()
709
+ resolve(chunk.data)
710
+ }
711
+ })
712
+ })
713
+ })()
714
+
715
+ // Append the data
716
+ await ds.append(payload)
717
+
718
+ // Wait for read to complete
719
+ const readData = await readPromise
720
+
721
+ metrics.bytesTransferred = operation.size + readData.length
722
+ break
723
+ }
724
+
725
+ case `create`: {
726
+ const url = `${serverUrl}${operation.path}`
727
+ await DurableStream.create({
728
+ url,
729
+ contentType: operation.contentType ?? `application/octet-stream`,
730
+ })
731
+ break
732
+ }
733
+
734
+ case `throughput_append`: {
735
+ const url = `${serverUrl}${operation.path}`
736
+ const contentType =
737
+ streamContentTypes.get(operation.path) ?? `application/octet-stream`
738
+
739
+ // Ensure stream exists
740
+ try {
741
+ await DurableStream.create({ url, contentType })
742
+ } catch {
743
+ // Stream may already exist
744
+ }
745
+
746
+ const ds = new DurableStream({ url, contentType })
747
+
748
+ // Generate payload (using fill for speed - don't want to measure PRNG)
749
+ const payload = new Uint8Array(operation.size).fill(42)
750
+
751
+ // Submit all messages at once - client batching will handle the rest
752
+ await Promise.all(
753
+ Array.from({ length: operation.count }, () => ds.append(payload))
754
+ )
755
+
756
+ metrics.bytesTransferred = operation.count * operation.size
757
+ metrics.messagesProcessed = operation.count
758
+ break
759
+ }
760
+
761
+ case `throughput_read`: {
762
+ const url = `${serverUrl}${operation.path}`
763
+ const res = await stream({ url, live: false })
764
+ // Iterate over JSON messages and count them
765
+ let count = 0
766
+ let bytes = 0
767
+ // jsonStream() returns a ReadableStream that can be async iterated
768
+ for await (const msg of res.jsonStream()) {
769
+ count++
770
+ // Rough byte count from JSON
771
+ bytes += JSON.stringify(msg).length
772
+ }
773
+ metrics.bytesTransferred = bytes
774
+ metrics.messagesProcessed = count
775
+ break
776
+ }
777
+
778
+ default: {
779
+ return {
780
+ type: `error`,
781
+ success: false,
782
+ commandType: `benchmark`,
783
+ errorCode: ErrorCodes.NOT_SUPPORTED,
784
+ message: `Unknown benchmark operation: ${(operation as BenchmarkOperation).op}`,
785
+ }
786
+ }
787
+ }
788
+
789
+ const endTime = process.hrtime.bigint()
790
+ const durationNs = endTime - startTime
791
+
792
+ return {
793
+ type: `benchmark`,
794
+ success: true,
795
+ iterationId,
796
+ durationNs: durationNs.toString(),
797
+ metrics,
798
+ }
799
+ } catch (err) {
800
+ return {
801
+ type: `error`,
802
+ success: false,
803
+ commandType: `benchmark`,
804
+ errorCode: ErrorCodes.INTERNAL_ERROR,
805
+ message: err instanceof Error ? err.message : String(err),
806
+ }
807
+ }
808
+ }
809
+
810
+ async function main(): Promise<void> {
811
+ const rl = createInterface({
812
+ input: process.stdin,
813
+ output: process.stdout,
814
+ terminal: false,
815
+ })
816
+
817
+ for await (const line of rl) {
818
+ if (!line.trim()) continue
819
+
820
+ try {
821
+ const command = parseCommand(line)
822
+ const result = await handleCommand(command)
823
+ console.log(serializeResult(result))
824
+
825
+ // Exit after shutdown command
826
+ if (command.type === `shutdown`) {
827
+ break
828
+ }
829
+ } catch (err) {
830
+ console.log(
831
+ serializeResult({
832
+ type: `error`,
833
+ success: false,
834
+ commandType: `init`,
835
+ errorCode: ErrorCodes.PARSE_ERROR,
836
+ message: `Failed to parse command: ${err}`,
837
+ })
838
+ )
839
+ }
840
+ }
841
+
842
+ process.exit(0)
843
+ }
844
+
845
+ main().catch((err) => {
846
+ console.error(`Fatal error:`, err)
847
+ process.exit(1)
848
+ })