@durable-streams/server-conformance-tests 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +2 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1 -1
- package/dist/{src-DK3GDgwo.cjs → src-ChUwq33M.cjs} +485 -20
- package/dist/{src-DcbQ_SIQ.js → src-DWkKYD4d.js} +485 -20
- package/dist/test-runner.cjs +1 -1
- package/dist/test-runner.js +1 -1
- package/package.json +2 -2
- package/src/index.ts +845 -92
package/src/index.ts
CHANGED
|
@@ -17,6 +17,8 @@ import {
|
|
|
17
17
|
export interface ConformanceTestOptions {
|
|
18
18
|
/** Base URL of the server to test */
|
|
19
19
|
baseUrl: string
|
|
20
|
+
/** Timeout for long-poll tests in milliseconds (default: 20000) */
|
|
21
|
+
longPollTimeoutMs?: number
|
|
20
22
|
}
|
|
21
23
|
|
|
22
24
|
/**
|
|
@@ -108,7 +110,7 @@ function parseSSEEvents(
|
|
|
108
110
|
sseText: string
|
|
109
111
|
): Array<{ type: string; data: string }> {
|
|
110
112
|
const events: Array<{ type: string; data: string }> = []
|
|
111
|
-
const normalized = sseText.replace(/\r\n/g, `\n`)
|
|
113
|
+
const normalized = sseText.replace(/\r\n/g, `\n`).replace(/\r/g, `\n`)
|
|
112
114
|
|
|
113
115
|
// Split by double newlines (event boundaries)
|
|
114
116
|
const eventBlocks = normalized.split(`\n\n`).filter((block) => block.trim())
|
|
@@ -144,6 +146,8 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
144
146
|
// Access options.baseUrl directly instead of destructuring to support
|
|
145
147
|
// mutable config objects (needed for dynamic port assignment)
|
|
146
148
|
const getBaseUrl = () => options.baseUrl
|
|
149
|
+
const getLongPollTestTimeoutMs = () =>
|
|
150
|
+
(options.longPollTimeoutMs ?? 20_000) + 1_000
|
|
147
151
|
|
|
148
152
|
// ============================================================================
|
|
149
153
|
// Basic Stream Operations
|
|
@@ -380,42 +384,47 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
380
384
|
// ============================================================================
|
|
381
385
|
|
|
382
386
|
describe(`Long-Poll Operations`, () => {
|
|
383
|
-
test(
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
387
|
+
test(
|
|
388
|
+
`should wait for new data with long-poll`,
|
|
389
|
+
async () => {
|
|
390
|
+
const streamPath = `/v1/stream/longpoll-test-${Date.now()}`
|
|
391
|
+
const stream = await DurableStream.create({
|
|
392
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
393
|
+
contentType: `text/plain`,
|
|
394
|
+
})
|
|
389
395
|
|
|
390
|
-
|
|
396
|
+
const receivedData: Array<string> = []
|
|
391
397
|
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
398
|
+
// Start reading in long-poll mode
|
|
399
|
+
const readPromise = (async () => {
|
|
400
|
+
const res = await stream.stream({ live: `long-poll` })
|
|
401
|
+
await new Promise<void>((resolve) => {
|
|
402
|
+
const unsubscribe = res.subscribeBytes((chunk) => {
|
|
403
|
+
if (chunk.data.length > 0) {
|
|
404
|
+
receivedData.push(new TextDecoder().decode(chunk.data))
|
|
405
|
+
}
|
|
406
|
+
if (receivedData.length >= 1) {
|
|
407
|
+
unsubscribe()
|
|
408
|
+
res.cancel()
|
|
409
|
+
resolve()
|
|
410
|
+
}
|
|
411
|
+
return Promise.resolve()
|
|
412
|
+
})
|
|
405
413
|
})
|
|
406
|
-
})
|
|
407
|
-
})()
|
|
414
|
+
})()
|
|
408
415
|
|
|
409
|
-
|
|
410
|
-
|
|
416
|
+
// Wait a bit for the long-poll to be active
|
|
417
|
+
await new Promise((resolve) => setTimeout(resolve, 500))
|
|
411
418
|
|
|
412
|
-
|
|
413
|
-
|
|
419
|
+
// Append data while long-poll is waiting
|
|
420
|
+
await stream.append(`new data`)
|
|
414
421
|
|
|
415
|
-
|
|
422
|
+
await readPromise
|
|
416
423
|
|
|
417
|
-
|
|
418
|
-
|
|
424
|
+
expect(receivedData).toContain(`new data`)
|
|
425
|
+
},
|
|
426
|
+
getLongPollTestTimeoutMs()
|
|
427
|
+
)
|
|
419
428
|
|
|
420
429
|
test(`should return immediately if data already exists`, async () => {
|
|
421
430
|
const streamPath = `/v1/stream/longpoll-immediate-test-${Date.now()}`
|
|
@@ -470,7 +479,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
470
479
|
method: `PUT`,
|
|
471
480
|
headers: { "Content-Type": `text/plain` },
|
|
472
481
|
})
|
|
473
|
-
expect(
|
|
482
|
+
expect(secondResponse.status).toBe(200)
|
|
474
483
|
})
|
|
475
484
|
|
|
476
485
|
test(`should return 409 on PUT with different config`, async () => {
|
|
@@ -507,7 +516,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
507
516
|
body: `hello world`,
|
|
508
517
|
})
|
|
509
518
|
|
|
510
|
-
expect(
|
|
519
|
+
expect(response.status).toBe(204)
|
|
511
520
|
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
512
521
|
})
|
|
513
522
|
|
|
@@ -784,7 +793,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
784
793
|
body: `second`,
|
|
785
794
|
})
|
|
786
795
|
|
|
787
|
-
expect(
|
|
796
|
+
expect(response.status).toBe(204)
|
|
788
797
|
})
|
|
789
798
|
|
|
790
799
|
test(`should reject duplicate seq values`, async () => {
|
|
@@ -820,6 +829,218 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
820
829
|
})
|
|
821
830
|
})
|
|
822
831
|
|
|
832
|
+
// ============================================================================
|
|
833
|
+
// Browser Security Headers (Protocol Section 10.7)
|
|
834
|
+
// ============================================================================
|
|
835
|
+
|
|
836
|
+
describe(`Browser Security Headers`, () => {
|
|
837
|
+
test(`should include X-Content-Type-Options: nosniff on GET responses`, async () => {
|
|
838
|
+
const streamPath = `/v1/stream/security-get-nosniff-${Date.now()}`
|
|
839
|
+
|
|
840
|
+
// Create stream with data
|
|
841
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
842
|
+
method: `PUT`,
|
|
843
|
+
headers: { "Content-Type": `text/plain` },
|
|
844
|
+
body: `test data`,
|
|
845
|
+
})
|
|
846
|
+
|
|
847
|
+
// Read data
|
|
848
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
849
|
+
method: `GET`,
|
|
850
|
+
})
|
|
851
|
+
|
|
852
|
+
expect(response.status).toBe(200)
|
|
853
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
854
|
+
})
|
|
855
|
+
|
|
856
|
+
test(`should include X-Content-Type-Options: nosniff on PUT responses`, async () => {
|
|
857
|
+
const streamPath = `/v1/stream/security-put-nosniff-${Date.now()}`
|
|
858
|
+
|
|
859
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
860
|
+
method: `PUT`,
|
|
861
|
+
headers: { "Content-Type": `text/plain` },
|
|
862
|
+
})
|
|
863
|
+
|
|
864
|
+
expect(response.status).toBe(201)
|
|
865
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
866
|
+
})
|
|
867
|
+
|
|
868
|
+
test(`should include X-Content-Type-Options: nosniff on POST responses`, async () => {
|
|
869
|
+
const streamPath = `/v1/stream/security-post-nosniff-${Date.now()}`
|
|
870
|
+
|
|
871
|
+
// Create stream
|
|
872
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
873
|
+
method: `PUT`,
|
|
874
|
+
headers: { "Content-Type": `text/plain` },
|
|
875
|
+
})
|
|
876
|
+
|
|
877
|
+
// Append data
|
|
878
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
879
|
+
method: `POST`,
|
|
880
|
+
headers: { "Content-Type": `text/plain` },
|
|
881
|
+
body: `data`,
|
|
882
|
+
})
|
|
883
|
+
|
|
884
|
+
expect([200, 204]).toContain(response.status)
|
|
885
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
886
|
+
})
|
|
887
|
+
|
|
888
|
+
test(`should include X-Content-Type-Options: nosniff on HEAD responses`, async () => {
|
|
889
|
+
const streamPath = `/v1/stream/security-head-nosniff-${Date.now()}`
|
|
890
|
+
|
|
891
|
+
// Create stream
|
|
892
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
893
|
+
method: `PUT`,
|
|
894
|
+
headers: { "Content-Type": `text/plain` },
|
|
895
|
+
})
|
|
896
|
+
|
|
897
|
+
// HEAD request
|
|
898
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
899
|
+
method: `HEAD`,
|
|
900
|
+
})
|
|
901
|
+
|
|
902
|
+
expect(response.status).toBe(200)
|
|
903
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
904
|
+
})
|
|
905
|
+
|
|
906
|
+
test(`should include Cross-Origin-Resource-Policy header on GET responses`, async () => {
|
|
907
|
+
const streamPath = `/v1/stream/security-corp-get-${Date.now()}`
|
|
908
|
+
|
|
909
|
+
// Create stream with data
|
|
910
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
911
|
+
method: `PUT`,
|
|
912
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
913
|
+
body: new Uint8Array([1, 2, 3, 4]),
|
|
914
|
+
})
|
|
915
|
+
|
|
916
|
+
// Read data
|
|
917
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
918
|
+
method: `GET`,
|
|
919
|
+
})
|
|
920
|
+
|
|
921
|
+
expect(response.status).toBe(200)
|
|
922
|
+
const corp = response.headers.get(`cross-origin-resource-policy`)
|
|
923
|
+
expect(corp).toBeDefined()
|
|
924
|
+
expect([`cross-origin`, `same-origin`, `same-site`]).toContain(corp)
|
|
925
|
+
})
|
|
926
|
+
|
|
927
|
+
test(`should include Cache-Control: no-store on HEAD responses`, async () => {
|
|
928
|
+
const streamPath = `/v1/stream/security-head-cache-${Date.now()}`
|
|
929
|
+
|
|
930
|
+
// Create stream
|
|
931
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
932
|
+
method: `PUT`,
|
|
933
|
+
headers: { "Content-Type": `text/plain` },
|
|
934
|
+
})
|
|
935
|
+
|
|
936
|
+
// HEAD request
|
|
937
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
938
|
+
method: `HEAD`,
|
|
939
|
+
})
|
|
940
|
+
|
|
941
|
+
expect(response.status).toBe(200)
|
|
942
|
+
const cacheControl = response.headers.get(`cache-control`)
|
|
943
|
+
expect(cacheControl).toBeDefined()
|
|
944
|
+
expect(cacheControl).toContain(`no-store`)
|
|
945
|
+
})
|
|
946
|
+
|
|
947
|
+
test(`should include X-Content-Type-Options: nosniff on SSE responses`, async () => {
|
|
948
|
+
const streamPath = `/v1/stream/security-sse-nosniff-${Date.now()}`
|
|
949
|
+
|
|
950
|
+
// Create stream with data
|
|
951
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
952
|
+
method: `PUT`,
|
|
953
|
+
headers: { "Content-Type": `application/json` },
|
|
954
|
+
body: JSON.stringify({ test: `data` }),
|
|
955
|
+
})
|
|
956
|
+
|
|
957
|
+
// Get offset
|
|
958
|
+
const headResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
959
|
+
method: `HEAD`,
|
|
960
|
+
})
|
|
961
|
+
const offset = headResponse.headers.get(STREAM_OFFSET_HEADER) ?? `-1`
|
|
962
|
+
|
|
963
|
+
// SSE request with abort controller
|
|
964
|
+
const controller = new AbortController()
|
|
965
|
+
const timeoutId = setTimeout(() => controller.abort(), 500)
|
|
966
|
+
|
|
967
|
+
try {
|
|
968
|
+
const response = await fetch(
|
|
969
|
+
`${getBaseUrl()}${streamPath}?offset=${offset}&live=sse`,
|
|
970
|
+
{
|
|
971
|
+
method: `GET`,
|
|
972
|
+
signal: controller.signal,
|
|
973
|
+
}
|
|
974
|
+
)
|
|
975
|
+
|
|
976
|
+
expect(response.status).toBe(200)
|
|
977
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
978
|
+
} catch (e) {
|
|
979
|
+
// AbortError is expected
|
|
980
|
+
if (!(e instanceof Error && e.name === `AbortError`)) {
|
|
981
|
+
throw e
|
|
982
|
+
}
|
|
983
|
+
} finally {
|
|
984
|
+
clearTimeout(timeoutId)
|
|
985
|
+
}
|
|
986
|
+
})
|
|
987
|
+
|
|
988
|
+
test(`should include X-Content-Type-Options: nosniff on long-poll responses`, async () => {
|
|
989
|
+
const streamPath = `/v1/stream/security-longpoll-nosniff-${Date.now()}`
|
|
990
|
+
|
|
991
|
+
// Create stream with data
|
|
992
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
993
|
+
method: `PUT`,
|
|
994
|
+
headers: { "Content-Type": `text/plain` },
|
|
995
|
+
body: `initial data`,
|
|
996
|
+
})
|
|
997
|
+
|
|
998
|
+
// Get offset
|
|
999
|
+
const headResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1000
|
+
method: `HEAD`,
|
|
1001
|
+
})
|
|
1002
|
+
const offset = headResponse.headers.get(STREAM_OFFSET_HEADER) ?? `-1`
|
|
1003
|
+
|
|
1004
|
+
// Long-poll request (will likely return 204 if no new data)
|
|
1005
|
+
const controller = new AbortController()
|
|
1006
|
+
const timeoutId = setTimeout(() => controller.abort(), 500)
|
|
1007
|
+
|
|
1008
|
+
try {
|
|
1009
|
+
const response = await fetch(
|
|
1010
|
+
`${getBaseUrl()}${streamPath}?offset=${offset}&live=long-poll`,
|
|
1011
|
+
{
|
|
1012
|
+
method: `GET`,
|
|
1013
|
+
signal: controller.signal,
|
|
1014
|
+
}
|
|
1015
|
+
)
|
|
1016
|
+
|
|
1017
|
+
// Either 200 (data) or 204 (timeout) - both should have nosniff
|
|
1018
|
+
expect([200, 204]).toContain(response.status)
|
|
1019
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
1020
|
+
} catch (e) {
|
|
1021
|
+
// AbortError is acceptable if request times out
|
|
1022
|
+
if (!(e instanceof Error && e.name === `AbortError`)) {
|
|
1023
|
+
throw e
|
|
1024
|
+
}
|
|
1025
|
+
} finally {
|
|
1026
|
+
clearTimeout(timeoutId)
|
|
1027
|
+
}
|
|
1028
|
+
})
|
|
1029
|
+
|
|
1030
|
+
test(`should include security headers on error responses`, async () => {
|
|
1031
|
+
const streamPath = `/v1/stream/security-error-headers-${Date.now()}`
|
|
1032
|
+
|
|
1033
|
+
// Try to read non-existent stream (404)
|
|
1034
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1035
|
+
method: `GET`,
|
|
1036
|
+
})
|
|
1037
|
+
|
|
1038
|
+
expect(response.status).toBe(404)
|
|
1039
|
+
// Security headers should be present even on error responses
|
|
1040
|
+
expect(response.headers.get(`x-content-type-options`)).toBe(`nosniff`)
|
|
1041
|
+
})
|
|
1042
|
+
})
|
|
1043
|
+
|
|
823
1044
|
// ============================================================================
|
|
824
1045
|
// TTL and Expiry Validation
|
|
825
1046
|
// ============================================================================
|
|
@@ -918,7 +1139,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
918
1139
|
body: `test`,
|
|
919
1140
|
})
|
|
920
1141
|
|
|
921
|
-
expect(
|
|
1142
|
+
expect(response.status).toBe(204)
|
|
922
1143
|
})
|
|
923
1144
|
|
|
924
1145
|
test(`should allow idempotent create with different case content-type`, async () => {
|
|
@@ -936,7 +1157,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
936
1157
|
method: `PUT`,
|
|
937
1158
|
headers: { "Content-Type": `APPLICATION/JSON` },
|
|
938
1159
|
})
|
|
939
|
-
expect(
|
|
1160
|
+
expect(response2.status).toBe(200)
|
|
940
1161
|
})
|
|
941
1162
|
|
|
942
1163
|
test(`should accept headers with different casing`, async () => {
|
|
@@ -958,7 +1179,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
958
1179
|
body: `test`,
|
|
959
1180
|
})
|
|
960
1181
|
|
|
961
|
-
expect(
|
|
1182
|
+
expect(response.status).toBe(204)
|
|
962
1183
|
})
|
|
963
1184
|
})
|
|
964
1185
|
|
|
@@ -1002,7 +1223,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
1002
1223
|
body: `{"test": true}`,
|
|
1003
1224
|
})
|
|
1004
1225
|
|
|
1005
|
-
expect(
|
|
1226
|
+
expect(response.status).toBe(204)
|
|
1006
1227
|
})
|
|
1007
1228
|
|
|
1008
1229
|
test(`should return stream content-type on GET`, async () => {
|
|
@@ -1483,7 +1704,10 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
1483
1704
|
expect(response.status).toBe(201)
|
|
1484
1705
|
const location = response.headers.get(`location`)
|
|
1485
1706
|
expect(location).toBeDefined()
|
|
1486
|
-
|
|
1707
|
+
// Check that Location contains the correct path (host may vary by server config)
|
|
1708
|
+
expect(location!.endsWith(streamPath)).toBe(true)
|
|
1709
|
+
// Verify it's a valid absolute URL
|
|
1710
|
+
expect(() => new URL(location!)).not.toThrow()
|
|
1487
1711
|
})
|
|
1488
1712
|
|
|
1489
1713
|
test(`should reject missing Content-Type on POST`, async () => {
|
|
@@ -1631,58 +1855,62 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
1631
1855
|
expect(parseInt(cursor2!, 10)).toBeGreaterThan(parseInt(cursor1!, 10))
|
|
1632
1856
|
})
|
|
1633
1857
|
|
|
1634
|
-
test(
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
method: `PUT`,
|
|
1639
|
-
headers: { "Content-Type": `text/plain` },
|
|
1640
|
-
})
|
|
1641
|
-
|
|
1642
|
-
// Get the current tail offset
|
|
1643
|
-
const headResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1644
|
-
method: `HEAD`,
|
|
1645
|
-
})
|
|
1646
|
-
const tailOffset = headResponse.headers.get(STREAM_OFFSET_HEADER)
|
|
1647
|
-
expect(tailOffset).toBeDefined()
|
|
1858
|
+
test(
|
|
1859
|
+
`should return Stream-Cursor, Stream-Up-To-Date and Stream-Next-Offset on 204 timeout`,
|
|
1860
|
+
async () => {
|
|
1861
|
+
const streamPath = `/v1/stream/longpoll-204-headers-test-${Date.now()}`
|
|
1648
1862
|
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1863
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1864
|
+
method: `PUT`,
|
|
1865
|
+
headers: { "Content-Type": `text/plain` },
|
|
1866
|
+
})
|
|
1653
1867
|
|
|
1654
|
-
|
|
1655
|
-
const
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1868
|
+
// Get the current tail offset
|
|
1869
|
+
const headResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1870
|
+
method: `HEAD`,
|
|
1871
|
+
})
|
|
1872
|
+
const tailOffset = headResponse.headers.get(STREAM_OFFSET_HEADER)
|
|
1873
|
+
expect(tailOffset).toBeDefined()
|
|
1874
|
+
|
|
1875
|
+
// Long-poll at tail offset with a short timeout
|
|
1876
|
+
// We use AbortController to limit wait time on our side
|
|
1877
|
+
const controller = new AbortController()
|
|
1878
|
+
const timeoutId = setTimeout(() => controller.abort(), 5000)
|
|
1879
|
+
|
|
1880
|
+
try {
|
|
1881
|
+
const response = await fetch(
|
|
1882
|
+
`${getBaseUrl()}${streamPath}?offset=${tailOffset}&live=long-poll`,
|
|
1883
|
+
{
|
|
1884
|
+
method: `GET`,
|
|
1885
|
+
signal: controller.signal,
|
|
1886
|
+
}
|
|
1887
|
+
)
|
|
1662
1888
|
|
|
1663
|
-
|
|
1889
|
+
clearTimeout(timeoutId)
|
|
1664
1890
|
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1891
|
+
// If we get a 204, verify headers
|
|
1892
|
+
if (response.status === 204) {
|
|
1893
|
+
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
1894
|
+
expect(response.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
1669
1895
|
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1896
|
+
// Server MUST return Stream-Cursor even on 204 timeout
|
|
1897
|
+
const cursor = response.headers.get(`Stream-Cursor`)
|
|
1898
|
+
expect(cursor).toBeDefined()
|
|
1899
|
+
expect(/^\d+$/.test(cursor!)).toBe(true)
|
|
1900
|
+
}
|
|
1901
|
+
// If we get a 200 (data arrived somehow), that's also valid
|
|
1902
|
+
expect([200, 204]).toContain(response.status)
|
|
1903
|
+
} catch (e) {
|
|
1904
|
+
clearTimeout(timeoutId)
|
|
1905
|
+
// AbortError is expected if server timeout is longer than our 5s
|
|
1906
|
+
if (e instanceof Error && e.name !== `AbortError`) {
|
|
1907
|
+
throw e
|
|
1908
|
+
}
|
|
1909
|
+
// Test passes - server just has a longer timeout than our abort
|
|
1682
1910
|
}
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1911
|
+
},
|
|
1912
|
+
getLongPollTestTimeoutMs()
|
|
1913
|
+
)
|
|
1686
1914
|
})
|
|
1687
1915
|
|
|
1688
1916
|
// ============================================================================
|
|
@@ -1815,7 +2043,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
1815
2043
|
"Stream-TTL": `3600`,
|
|
1816
2044
|
},
|
|
1817
2045
|
})
|
|
1818
|
-
expect(
|
|
2046
|
+
expect(response2.status).toBe(200)
|
|
1819
2047
|
})
|
|
1820
2048
|
|
|
1821
2049
|
test(`should reject idempotent PUT with different TTL`, async () => {
|
|
@@ -1992,7 +2220,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
1992
2220
|
headers: { "Content-Type": `text/plain` },
|
|
1993
2221
|
body: `appended data`,
|
|
1994
2222
|
})
|
|
1995
|
-
expect(
|
|
2223
|
+
expect(postBefore.status).toBe(204)
|
|
1996
2224
|
|
|
1997
2225
|
// Wait for TTL to expire
|
|
1998
2226
|
await sleep(1500)
|
|
@@ -2096,7 +2324,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
2096
2324
|
headers: { "Content-Type": `text/plain` },
|
|
2097
2325
|
body: `appended data`,
|
|
2098
2326
|
})
|
|
2099
|
-
expect(
|
|
2327
|
+
expect(postBefore.status).toBe(204)
|
|
2100
2328
|
|
|
2101
2329
|
// Wait for expiry time to pass
|
|
2102
2330
|
await sleep(1500)
|
|
@@ -2829,6 +3057,154 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
2829
3057
|
expect(received).toContain(`data: line3`)
|
|
2830
3058
|
})
|
|
2831
3059
|
|
|
3060
|
+
test(`should prevent CRLF injection in payloads - embedded event boundaries become literal data`, async () => {
|
|
3061
|
+
const streamPath = `/v1/stream/sse-crlf-injection-test-${Date.now()}`
|
|
3062
|
+
|
|
3063
|
+
// Payload attempts to inject a fake control event via CRLF sequences
|
|
3064
|
+
// If vulnerable, this would terminate the current event and inject a new one
|
|
3065
|
+
const maliciousPayload = `safe content\r\n\r\nevent: control\r\ndata: {"injected":true}\r\n\r\nmore safe content`
|
|
3066
|
+
|
|
3067
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3068
|
+
method: `PUT`,
|
|
3069
|
+
headers: { "Content-Type": `text/plain` },
|
|
3070
|
+
body: maliciousPayload,
|
|
3071
|
+
})
|
|
3072
|
+
|
|
3073
|
+
const { response, received } = await fetchSSE(
|
|
3074
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
3075
|
+
{ untilContent: `event: control` }
|
|
3076
|
+
)
|
|
3077
|
+
|
|
3078
|
+
expect(response.status).toBe(200)
|
|
3079
|
+
|
|
3080
|
+
// Parse all events from the response
|
|
3081
|
+
const events = parseSSEEvents(received)
|
|
3082
|
+
|
|
3083
|
+
// Should have exactly 1 data event and 1 control event (the real one from server)
|
|
3084
|
+
const dataEvents = events.filter((e) => e.type === `data`)
|
|
3085
|
+
const controlEvents = events.filter((e) => e.type === `control`)
|
|
3086
|
+
|
|
3087
|
+
expect(dataEvents.length).toBe(1)
|
|
3088
|
+
expect(controlEvents.length).toBe(1)
|
|
3089
|
+
|
|
3090
|
+
// The "injected" control event should NOT exist as a real event
|
|
3091
|
+
// Instead, "event: control" should appear as literal text within the data
|
|
3092
|
+
const dataContent = dataEvents[0]!.data
|
|
3093
|
+
expect(dataContent).toContain(`event: control`)
|
|
3094
|
+
expect(dataContent).toContain(`data: {"injected":true}`)
|
|
3095
|
+
|
|
3096
|
+
// The real control event should have server-generated fields, not injected ones
|
|
3097
|
+
const controlContent = JSON.parse(controlEvents[0]!.data)
|
|
3098
|
+
expect(controlContent.injected).toBeUndefined()
|
|
3099
|
+
expect(controlContent.streamNextOffset).toBeDefined()
|
|
3100
|
+
})
|
|
3101
|
+
|
|
3102
|
+
test(`should prevent CRLF injection - LF-only attack vectors`, async () => {
|
|
3103
|
+
const streamPath = `/v1/stream/sse-lf-injection-test-${Date.now()}`
|
|
3104
|
+
|
|
3105
|
+
// Attempt injection using Unix-style line endings only
|
|
3106
|
+
const maliciousPayload = `start\n\nevent: data\ndata: fake-event\n\nend`
|
|
3107
|
+
|
|
3108
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3109
|
+
method: `PUT`,
|
|
3110
|
+
headers: { "Content-Type": `text/plain` },
|
|
3111
|
+
body: maliciousPayload,
|
|
3112
|
+
})
|
|
3113
|
+
|
|
3114
|
+
const { response, received } = await fetchSSE(
|
|
3115
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
3116
|
+
{ untilContent: `event: control` }
|
|
3117
|
+
)
|
|
3118
|
+
|
|
3119
|
+
expect(response.status).toBe(200)
|
|
3120
|
+
|
|
3121
|
+
const events = parseSSEEvents(received)
|
|
3122
|
+
const dataEvents = events.filter((e) => e.type === `data`)
|
|
3123
|
+
|
|
3124
|
+
// Should be exactly 1 data event (the injected one should be escaped)
|
|
3125
|
+
expect(dataEvents.length).toBe(1)
|
|
3126
|
+
|
|
3127
|
+
// The payload should be preserved as literal content, including the
|
|
3128
|
+
// "event: data" and "data: fake-event" as text, not parsed as SSE commands
|
|
3129
|
+
const dataContent = dataEvents[0]!.data
|
|
3130
|
+
expect(dataContent).toContain(`event: data`)
|
|
3131
|
+
expect(dataContent).toContain(`data: fake-event`)
|
|
3132
|
+
})
|
|
3133
|
+
|
|
3134
|
+
test(`should prevent CRLF injection - carriage return only attack vectors`, async () => {
|
|
3135
|
+
const streamPath = `/v1/stream/sse-cr-injection-test-${Date.now()}`
|
|
3136
|
+
|
|
3137
|
+
// Attempt injection using CR-only line endings (per SSE spec, CR is a valid line terminator)
|
|
3138
|
+
const maliciousPayload = `start\r\revent: control\rdata: {"cr_injected":true}\r\rend`
|
|
3139
|
+
|
|
3140
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3141
|
+
method: `PUT`,
|
|
3142
|
+
headers: { "Content-Type": `text/plain` },
|
|
3143
|
+
body: maliciousPayload,
|
|
3144
|
+
})
|
|
3145
|
+
|
|
3146
|
+
const { response, received } = await fetchSSE(
|
|
3147
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
3148
|
+
{ untilContent: `event: control` }
|
|
3149
|
+
)
|
|
3150
|
+
|
|
3151
|
+
expect(response.status).toBe(200)
|
|
3152
|
+
|
|
3153
|
+
const events = parseSSEEvents(received)
|
|
3154
|
+
const controlEvents = events.filter((e) => e.type === `control`)
|
|
3155
|
+
|
|
3156
|
+
// Should have exactly 1 control event (the real one from server)
|
|
3157
|
+
expect(controlEvents.length).toBe(1)
|
|
3158
|
+
|
|
3159
|
+
// The real control event should not contain injected fields
|
|
3160
|
+
const controlContent = JSON.parse(controlEvents[0]!.data)
|
|
3161
|
+
expect(controlContent.cr_injected).toBeUndefined()
|
|
3162
|
+
expect(controlContent.streamNextOffset).toBeDefined()
|
|
3163
|
+
})
|
|
3164
|
+
|
|
3165
|
+
test(`should handle JSON payloads with embedded newlines safely`, async () => {
|
|
3166
|
+
const streamPath = `/v1/stream/sse-json-newline-test-${Date.now()}`
|
|
3167
|
+
|
|
3168
|
+
// JSON content that contains literal newlines in string values
|
|
3169
|
+
// These should be JSON-escaped, but we test that even if they're not,
|
|
3170
|
+
// SSE encoding handles them safely
|
|
3171
|
+
const jsonPayload = JSON.stringify({
|
|
3172
|
+
message: `line1\nline2\nline3`,
|
|
3173
|
+
attack: `try\r\n\r\nevent: control\r\ndata: {"bad":true}`,
|
|
3174
|
+
})
|
|
3175
|
+
|
|
3176
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3177
|
+
method: `PUT`,
|
|
3178
|
+
headers: { "Content-Type": `application/json` },
|
|
3179
|
+
body: jsonPayload,
|
|
3180
|
+
})
|
|
3181
|
+
|
|
3182
|
+
const { response, received } = await fetchSSE(
|
|
3183
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
3184
|
+
{ untilContent: `event: control` }
|
|
3185
|
+
)
|
|
3186
|
+
|
|
3187
|
+
expect(response.status).toBe(200)
|
|
3188
|
+
|
|
3189
|
+
const events = parseSSEEvents(received)
|
|
3190
|
+
const dataEvents = events.filter((e) => e.type === `data`)
|
|
3191
|
+
const controlEvents = events.filter((e) => e.type === `control`)
|
|
3192
|
+
|
|
3193
|
+
expect(dataEvents.length).toBe(1)
|
|
3194
|
+
expect(controlEvents.length).toBe(1)
|
|
3195
|
+
|
|
3196
|
+
// Parse the data event - should be valid JSON array wrapping the original object
|
|
3197
|
+
const parsedData = JSON.parse(dataEvents[0]!.data)
|
|
3198
|
+
expect(Array.isArray(parsedData)).toBe(true)
|
|
3199
|
+
expect(parsedData[0].message).toBe(`line1\nline2\nline3`)
|
|
3200
|
+
expect(parsedData[0].attack).toContain(`event: control`)
|
|
3201
|
+
|
|
3202
|
+
// Control event should be the real server-generated one
|
|
3203
|
+
const controlContent = JSON.parse(controlEvents[0]!.data)
|
|
3204
|
+
expect(controlContent.bad).toBeUndefined()
|
|
3205
|
+
expect(controlContent.streamNextOffset).toBeDefined()
|
|
3206
|
+
})
|
|
3207
|
+
|
|
2832
3208
|
test(`should generate unique, monotonically increasing offsets in SSE mode`, async () => {
|
|
2833
3209
|
const streamPath = `/v1/stream/sse-monotonic-offset-test-${Date.now()}`
|
|
2834
3210
|
|
|
@@ -3328,7 +3704,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
3328
3704
|
headers: { "Content-Type": `application/octet-stream` },
|
|
3329
3705
|
body: chunk,
|
|
3330
3706
|
})
|
|
3331
|
-
expect(
|
|
3707
|
+
expect(response.status).toBe(204)
|
|
3332
3708
|
}
|
|
3333
3709
|
|
|
3334
3710
|
// Calculate expected result
|
|
@@ -3479,7 +3855,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
3479
3855
|
headers: { "Content-Type": `application/octet-stream` },
|
|
3480
3856
|
body: op.data as BodyInit,
|
|
3481
3857
|
})
|
|
3482
|
-
expect(
|
|
3858
|
+
expect(response.status).toBe(204)
|
|
3483
3859
|
|
|
3484
3860
|
// Track what we appended
|
|
3485
3861
|
appendedData.push(...Array.from(op.data))
|
|
@@ -3630,7 +4006,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
3630
4006
|
body: data,
|
|
3631
4007
|
}
|
|
3632
4008
|
)
|
|
3633
|
-
expect(
|
|
4009
|
+
expect(appendResponse.status).toBe(204)
|
|
3634
4010
|
|
|
3635
4011
|
// Immediately read back
|
|
3636
4012
|
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
@@ -3789,7 +4165,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
3789
4165
|
},
|
|
3790
4166
|
body: `data-${seq}`,
|
|
3791
4167
|
})
|
|
3792
|
-
expect(
|
|
4168
|
+
expect(response.status).toBe(204)
|
|
3793
4169
|
}
|
|
3794
4170
|
|
|
3795
4171
|
return true
|
|
@@ -3826,7 +4202,7 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
3826
4202
|
},
|
|
3827
4203
|
body: `first`,
|
|
3828
4204
|
})
|
|
3829
|
-
expect(
|
|
4205
|
+
expect(response1.status).toBe(204)
|
|
3830
4206
|
|
|
3831
4207
|
// Second append with smaller seq should be rejected
|
|
3832
4208
|
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
@@ -3846,5 +4222,382 @@ export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
|
3846
4222
|
)
|
|
3847
4223
|
})
|
|
3848
4224
|
})
|
|
4225
|
+
|
|
4226
|
+
describe(`Concurrent Writer Stress Tests`, () => {
|
|
4227
|
+
test(`concurrent writers with sequence numbers - server handles gracefully`, async () => {
|
|
4228
|
+
const streamPath = `/v1/stream/concurrent-seq-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4229
|
+
|
|
4230
|
+
// Create stream
|
|
4231
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4232
|
+
method: `PUT`,
|
|
4233
|
+
headers: { "Content-Type": `text/plain` },
|
|
4234
|
+
})
|
|
4235
|
+
|
|
4236
|
+
// Try to write with same seq from multiple "writers" concurrently
|
|
4237
|
+
const numWriters = 5
|
|
4238
|
+
const seqValue = `seq-001`
|
|
4239
|
+
|
|
4240
|
+
const writePromises = Array.from({ length: numWriters }, (_, i) =>
|
|
4241
|
+
fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4242
|
+
method: `POST`,
|
|
4243
|
+
headers: {
|
|
4244
|
+
"Content-Type": `text/plain`,
|
|
4245
|
+
[STREAM_SEQ_HEADER]: seqValue,
|
|
4246
|
+
},
|
|
4247
|
+
body: `writer-${i}`,
|
|
4248
|
+
})
|
|
4249
|
+
)
|
|
4250
|
+
|
|
4251
|
+
const responses = await Promise.all(writePromises)
|
|
4252
|
+
const statuses = responses.map((r) => r.status)
|
|
4253
|
+
|
|
4254
|
+
// Server should handle concurrent writes gracefully
|
|
4255
|
+
// All responses should be valid (success or conflict)
|
|
4256
|
+
for (const status of statuses) {
|
|
4257
|
+
expect([200, 204, 409]).toContain(status)
|
|
4258
|
+
}
|
|
4259
|
+
|
|
4260
|
+
// At least one should succeed
|
|
4261
|
+
const successes = statuses.filter((s) => s === 200 || s === 204)
|
|
4262
|
+
expect(successes.length).toBeGreaterThanOrEqual(1)
|
|
4263
|
+
|
|
4264
|
+
// Read back - should have exactly one write's data
|
|
4265
|
+
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
4266
|
+
const content = await readResponse.text()
|
|
4267
|
+
|
|
4268
|
+
// Content should contain data from exactly one writer
|
|
4269
|
+
const matchingWriters = Array.from({ length: numWriters }, (_, i) =>
|
|
4270
|
+
content.includes(`writer-${i}`)
|
|
4271
|
+
).filter(Boolean)
|
|
4272
|
+
expect(matchingWriters.length).toBeGreaterThanOrEqual(1)
|
|
4273
|
+
})
|
|
4274
|
+
|
|
4275
|
+
test(`concurrent writers racing with incrementing seq values`, async () => {
|
|
4276
|
+
await fc.assert(
|
|
4277
|
+
fc.asyncProperty(
|
|
4278
|
+
fc.integer({ min: 3, max: 8 }), // Number of writers
|
|
4279
|
+
async (numWriters) => {
|
|
4280
|
+
const streamPath = `/v1/stream/concurrent-race-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4281
|
+
|
|
4282
|
+
// Create stream
|
|
4283
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4284
|
+
method: `PUT`,
|
|
4285
|
+
headers: { "Content-Type": `text/plain` },
|
|
4286
|
+
})
|
|
4287
|
+
|
|
4288
|
+
// Each writer gets a unique seq value (padded for lexicographic ordering)
|
|
4289
|
+
const writePromises = Array.from({ length: numWriters }, (_, i) =>
|
|
4290
|
+
fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4291
|
+
method: `POST`,
|
|
4292
|
+
headers: {
|
|
4293
|
+
"Content-Type": `text/plain`,
|
|
4294
|
+
[STREAM_SEQ_HEADER]: String(i).padStart(4, `0`),
|
|
4295
|
+
},
|
|
4296
|
+
body: `data-${i}`,
|
|
4297
|
+
})
|
|
4298
|
+
)
|
|
4299
|
+
|
|
4300
|
+
const responses = await Promise.all(writePromises)
|
|
4301
|
+
|
|
4302
|
+
// With concurrent writes, some may succeed (200/204) and some may conflict (409)
|
|
4303
|
+
// due to out-of-order arrival at the server. All responses should be valid.
|
|
4304
|
+
const successIndices: Array<number> = []
|
|
4305
|
+
for (let i = 0; i < responses.length; i++) {
|
|
4306
|
+
expect([200, 204, 409]).toContain(responses[i]!.status)
|
|
4307
|
+
if (
|
|
4308
|
+
responses[i]!.status === 200 ||
|
|
4309
|
+
responses[i]!.status === 204
|
|
4310
|
+
) {
|
|
4311
|
+
successIndices.push(i)
|
|
4312
|
+
}
|
|
4313
|
+
}
|
|
4314
|
+
|
|
4315
|
+
// At least one write should succeed
|
|
4316
|
+
expect(successIndices.length).toBeGreaterThanOrEqual(1)
|
|
4317
|
+
|
|
4318
|
+
// Read back and verify successful writes are present
|
|
4319
|
+
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
4320
|
+
const content = await readResponse.text()
|
|
4321
|
+
|
|
4322
|
+
// All successful writes should have their data in the stream
|
|
4323
|
+
for (const i of successIndices) {
|
|
4324
|
+
expect(content).toContain(`data-${i}`)
|
|
4325
|
+
}
|
|
4326
|
+
|
|
4327
|
+
return true
|
|
4328
|
+
}
|
|
4329
|
+
),
|
|
4330
|
+
{ numRuns: 10 }
|
|
4331
|
+
)
|
|
4332
|
+
})
|
|
4333
|
+
|
|
4334
|
+
test(`concurrent appends without seq - all data is persisted`, async () => {
|
|
4335
|
+
const streamPath = `/v1/stream/concurrent-no-seq-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4336
|
+
|
|
4337
|
+
// Create stream
|
|
4338
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4339
|
+
method: `PUT`,
|
|
4340
|
+
headers: { "Content-Type": `text/plain` },
|
|
4341
|
+
})
|
|
4342
|
+
|
|
4343
|
+
const numWriters = 10
|
|
4344
|
+
const writePromises = Array.from({ length: numWriters }, (_, i) =>
|
|
4345
|
+
fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4346
|
+
method: `POST`,
|
|
4347
|
+
headers: { "Content-Type": `text/plain` },
|
|
4348
|
+
body: `concurrent-${i}`,
|
|
4349
|
+
})
|
|
4350
|
+
)
|
|
4351
|
+
|
|
4352
|
+
const responses = await Promise.all(writePromises)
|
|
4353
|
+
|
|
4354
|
+
// All should succeed
|
|
4355
|
+
for (const response of responses) {
|
|
4356
|
+
expect([200, 204]).toContain(response.status)
|
|
4357
|
+
}
|
|
4358
|
+
|
|
4359
|
+
// All offsets that are returned should be valid (non-null)
|
|
4360
|
+
const offsets = responses.map((r) =>
|
|
4361
|
+
r.headers.get(STREAM_OFFSET_HEADER)
|
|
4362
|
+
)
|
|
4363
|
+
for (const offset of offsets) {
|
|
4364
|
+
expect(offset).not.toBeNull()
|
|
4365
|
+
}
|
|
4366
|
+
|
|
4367
|
+
// Read back and verify all data is present (the key invariant)
|
|
4368
|
+
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
4369
|
+
const content = await readResponse.text()
|
|
4370
|
+
|
|
4371
|
+
for (let i = 0; i < numWriters; i++) {
|
|
4372
|
+
expect(content).toContain(`concurrent-${i}`)
|
|
4373
|
+
}
|
|
4374
|
+
})
|
|
4375
|
+
|
|
4376
|
+
test(`mixed readers and writers - readers see consistent state`, async () => {
|
|
4377
|
+
const streamPath = `/v1/stream/concurrent-rw-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4378
|
+
|
|
4379
|
+
// Create stream with initial data
|
|
4380
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4381
|
+
method: `PUT`,
|
|
4382
|
+
headers: { "Content-Type": `text/plain` },
|
|
4383
|
+
})
|
|
4384
|
+
|
|
4385
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4386
|
+
method: `POST`,
|
|
4387
|
+
headers: { "Content-Type": `text/plain` },
|
|
4388
|
+
body: `initial`,
|
|
4389
|
+
})
|
|
4390
|
+
|
|
4391
|
+
// Launch concurrent readers and writers
|
|
4392
|
+
const numOps = 20
|
|
4393
|
+
const operations = Array.from({ length: numOps }, (_, i) => {
|
|
4394
|
+
if (i % 2 === 0) {
|
|
4395
|
+
// Writer
|
|
4396
|
+
return fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4397
|
+
method: `POST`,
|
|
4398
|
+
headers: { "Content-Type": `text/plain` },
|
|
4399
|
+
body: `write-${i}`,
|
|
4400
|
+
})
|
|
4401
|
+
} else {
|
|
4402
|
+
// Reader
|
|
4403
|
+
return fetch(`${getBaseUrl()}${streamPath}`)
|
|
4404
|
+
}
|
|
4405
|
+
})
|
|
4406
|
+
|
|
4407
|
+
const responses = await Promise.all(operations)
|
|
4408
|
+
|
|
4409
|
+
// All operations should succeed
|
|
4410
|
+
// Writers (even indices) return 204, readers (odd indices) return 200
|
|
4411
|
+
responses.forEach((response, i) => {
|
|
4412
|
+
const expectedStatus = i % 2 === 0 ? 204 : 200
|
|
4413
|
+
expect(response.status).toBe(expectedStatus)
|
|
4414
|
+
})
|
|
4415
|
+
|
|
4416
|
+
// Final read should have all writes
|
|
4417
|
+
const finalRead = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
4418
|
+
const content = await finalRead.text()
|
|
4419
|
+
|
|
4420
|
+
// Initial data should be present
|
|
4421
|
+
expect(content).toContain(`initial`)
|
|
4422
|
+
|
|
4423
|
+
// All writes should be present
|
|
4424
|
+
for (let i = 0; i < numOps; i += 2) {
|
|
4425
|
+
expect(content).toContain(`write-${i}`)
|
|
4426
|
+
}
|
|
4427
|
+
})
|
|
4428
|
+
})
|
|
4429
|
+
|
|
4430
|
+
describe(`State Hash Verification`, () => {
|
|
4431
|
+
/**
|
|
4432
|
+
* Simple hash function for content verification.
|
|
4433
|
+
* Uses FNV-1a algorithm for deterministic hashing.
|
|
4434
|
+
*/
|
|
4435
|
+
function hashContent(data: Uint8Array): string {
|
|
4436
|
+
let hash = 2166136261 // FNV offset basis
|
|
4437
|
+
for (const byte of data) {
|
|
4438
|
+
hash ^= byte
|
|
4439
|
+
hash = Math.imul(hash, 16777619) // FNV prime
|
|
4440
|
+
hash = hash >>> 0 // Convert to unsigned 32-bit
|
|
4441
|
+
}
|
|
4442
|
+
return hash.toString(16).padStart(8, `0`)
|
|
4443
|
+
}
|
|
4444
|
+
|
|
4445
|
+
test(`replay produces identical content hash`, async () => {
|
|
4446
|
+
await fc.assert(
|
|
4447
|
+
fc.asyncProperty(
|
|
4448
|
+
// Generate a sequence of appends
|
|
4449
|
+
fc.array(fc.uint8Array({ minLength: 1, maxLength: 100 }), {
|
|
4450
|
+
minLength: 1,
|
|
4451
|
+
maxLength: 10,
|
|
4452
|
+
}),
|
|
4453
|
+
async (chunks) => {
|
|
4454
|
+
// Create first stream and append data
|
|
4455
|
+
const streamPath1 = `/v1/stream/hash-verify-1-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4456
|
+
await fetch(`${getBaseUrl()}${streamPath1}`, {
|
|
4457
|
+
method: `PUT`,
|
|
4458
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4459
|
+
})
|
|
4460
|
+
|
|
4461
|
+
for (const chunk of chunks) {
|
|
4462
|
+
await fetch(`${getBaseUrl()}${streamPath1}`, {
|
|
4463
|
+
method: `POST`,
|
|
4464
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4465
|
+
body: chunk,
|
|
4466
|
+
})
|
|
4467
|
+
}
|
|
4468
|
+
|
|
4469
|
+
// Read and hash first stream
|
|
4470
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath1}`)
|
|
4471
|
+
const data1 = new Uint8Array(await response1.arrayBuffer())
|
|
4472
|
+
const hash1 = hashContent(data1)
|
|
4473
|
+
|
|
4474
|
+
// Create second stream and replay same operations
|
|
4475
|
+
const streamPath2 = `/v1/stream/hash-verify-2-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4476
|
+
await fetch(`${getBaseUrl()}${streamPath2}`, {
|
|
4477
|
+
method: `PUT`,
|
|
4478
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4479
|
+
})
|
|
4480
|
+
|
|
4481
|
+
for (const chunk of chunks) {
|
|
4482
|
+
await fetch(`${getBaseUrl()}${streamPath2}`, {
|
|
4483
|
+
method: `POST`,
|
|
4484
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4485
|
+
body: chunk,
|
|
4486
|
+
})
|
|
4487
|
+
}
|
|
4488
|
+
|
|
4489
|
+
// Read and hash second stream
|
|
4490
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath2}`)
|
|
4491
|
+
const data2 = new Uint8Array(await response2.arrayBuffer())
|
|
4492
|
+
const hash2 = hashContent(data2)
|
|
4493
|
+
|
|
4494
|
+
// Hashes must match
|
|
4495
|
+
expect(hash1).toBe(hash2)
|
|
4496
|
+
expect(data1.length).toBe(data2.length)
|
|
4497
|
+
|
|
4498
|
+
return true
|
|
4499
|
+
}
|
|
4500
|
+
),
|
|
4501
|
+
{ numRuns: 15 }
|
|
4502
|
+
)
|
|
4503
|
+
})
|
|
4504
|
+
|
|
4505
|
+
test(`content hash changes with each append`, async () => {
|
|
4506
|
+
const streamPath = `/v1/stream/hash-changes-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4507
|
+
|
|
4508
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4509
|
+
method: `PUT`,
|
|
4510
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4511
|
+
})
|
|
4512
|
+
|
|
4513
|
+
const hashes: Array<string> = []
|
|
4514
|
+
|
|
4515
|
+
// Append 5 chunks and verify hash changes each time
|
|
4516
|
+
for (let i = 0; i < 5; i++) {
|
|
4517
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4518
|
+
method: `POST`,
|
|
4519
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4520
|
+
body: new Uint8Array([i, i + 1, i + 2]),
|
|
4521
|
+
})
|
|
4522
|
+
|
|
4523
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
4524
|
+
const data = new Uint8Array(await response.arrayBuffer())
|
|
4525
|
+
hashes.push(hashContent(data))
|
|
4526
|
+
}
|
|
4527
|
+
|
|
4528
|
+
// All hashes should be unique
|
|
4529
|
+
const uniqueHashes = new Set(hashes)
|
|
4530
|
+
expect(uniqueHashes.size).toBe(5)
|
|
4531
|
+
})
|
|
4532
|
+
|
|
4533
|
+
test(`empty stream has consistent hash`, async () => {
|
|
4534
|
+
// Create two empty streams
|
|
4535
|
+
const streamPath1 = `/v1/stream/empty-hash-1-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4536
|
+
const streamPath2 = `/v1/stream/empty-hash-2-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4537
|
+
|
|
4538
|
+
await fetch(`${getBaseUrl()}${streamPath1}`, {
|
|
4539
|
+
method: `PUT`,
|
|
4540
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4541
|
+
})
|
|
4542
|
+
await fetch(`${getBaseUrl()}${streamPath2}`, {
|
|
4543
|
+
method: `PUT`,
|
|
4544
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4545
|
+
})
|
|
4546
|
+
|
|
4547
|
+
// Read both
|
|
4548
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath1}`)
|
|
4549
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath2}`)
|
|
4550
|
+
|
|
4551
|
+
const data1 = new Uint8Array(await response1.arrayBuffer())
|
|
4552
|
+
const data2 = new Uint8Array(await response2.arrayBuffer())
|
|
4553
|
+
|
|
4554
|
+
// Both should be empty and have same hash
|
|
4555
|
+
expect(data1.length).toBe(0)
|
|
4556
|
+
expect(data2.length).toBe(0)
|
|
4557
|
+
expect(hashContent(data1)).toBe(hashContent(data2))
|
|
4558
|
+
})
|
|
4559
|
+
|
|
4560
|
+
test(`deterministic ordering - same data in same order produces same hash`, async () => {
|
|
4561
|
+
await fc.assert(
|
|
4562
|
+
fc.asyncProperty(
|
|
4563
|
+
fc.array(fc.uint8Array({ minLength: 1, maxLength: 50 }), {
|
|
4564
|
+
minLength: 2,
|
|
4565
|
+
maxLength: 5,
|
|
4566
|
+
}),
|
|
4567
|
+
async (chunks) => {
|
|
4568
|
+
// Create two streams with same data in same order
|
|
4569
|
+
const hashes: Array<string> = []
|
|
4570
|
+
|
|
4571
|
+
for (let run = 0; run < 2; run++) {
|
|
4572
|
+
const streamPath = `/v1/stream/order-hash-${run}-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
4573
|
+
|
|
4574
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4575
|
+
method: `PUT`,
|
|
4576
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4577
|
+
})
|
|
4578
|
+
|
|
4579
|
+
// Append in order
|
|
4580
|
+
for (const chunk of chunks) {
|
|
4581
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
4582
|
+
method: `POST`,
|
|
4583
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
4584
|
+
body: chunk,
|
|
4585
|
+
})
|
|
4586
|
+
}
|
|
4587
|
+
|
|
4588
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
4589
|
+
const data = new Uint8Array(await response.arrayBuffer())
|
|
4590
|
+
hashes.push(hashContent(data))
|
|
4591
|
+
}
|
|
4592
|
+
|
|
4593
|
+
expect(hashes[0]).toBe(hashes[1])
|
|
4594
|
+
|
|
4595
|
+
return true
|
|
4596
|
+
}
|
|
4597
|
+
),
|
|
4598
|
+
{ numRuns: 10 }
|
|
4599
|
+
)
|
|
4600
|
+
})
|
|
4601
|
+
})
|
|
3849
4602
|
})
|
|
3850
4603
|
}
|