@durable-streams/server-conformance-tests 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +132 -0
- package/bin/conformance-dev.mjs +27 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +221 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +3 -0
- package/dist/src-DRIMnUPk.js +2326 -0
- package/dist/test-runner.d.ts +1 -0
- package/dist/test-runner.js +8 -0
- package/package.json +43 -0
- package/src/cli.ts +345 -0
- package/src/index.ts +3596 -0
- package/src/test-runner.ts +19 -0
package/src/index.ts
ADDED
|
@@ -0,0 +1,3596 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Conformance test suite for Durable Streams server implementations
|
|
3
|
+
*
|
|
4
|
+
* This package provides a standardized test suite that can be run against
|
|
5
|
+
* any server implementation to verify protocol compliance.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { describe, expect, test } from "vitest"
|
|
9
|
+
import * as fc from "fast-check"
|
|
10
|
+
import {
|
|
11
|
+
DurableStream,
|
|
12
|
+
STREAM_OFFSET_HEADER,
|
|
13
|
+
STREAM_SEQ_HEADER,
|
|
14
|
+
STREAM_UP_TO_DATE_HEADER,
|
|
15
|
+
} from "@durable-streams/client"
|
|
16
|
+
|
|
17
|
+
export interface ConformanceTestOptions {
|
|
18
|
+
/** Base URL of the server to test */
|
|
19
|
+
baseUrl: string
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Helper to fetch SSE stream and read until a condition is met.
|
|
24
|
+
* Handles AbortController, timeout, and cleanup automatically.
|
|
25
|
+
*/
|
|
26
|
+
async function fetchSSE(
|
|
27
|
+
url: string,
|
|
28
|
+
opts: {
|
|
29
|
+
timeoutMs?: number
|
|
30
|
+
maxChunks?: number
|
|
31
|
+
untilContent?: string
|
|
32
|
+
signal?: AbortSignal
|
|
33
|
+
headers?: Record<string, string>
|
|
34
|
+
} = {}
|
|
35
|
+
): Promise<{ response: Response; received: string }> {
|
|
36
|
+
const {
|
|
37
|
+
timeoutMs = 2000,
|
|
38
|
+
maxChunks = 10,
|
|
39
|
+
untilContent,
|
|
40
|
+
headers = {},
|
|
41
|
+
signal,
|
|
42
|
+
} = opts
|
|
43
|
+
|
|
44
|
+
const controller = new AbortController()
|
|
45
|
+
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
|
|
46
|
+
if (signal) {
|
|
47
|
+
signal.addEventListener(`abort`, () => controller.abort())
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
try {
|
|
51
|
+
const response = await fetch(url, {
|
|
52
|
+
method: `GET`,
|
|
53
|
+
headers,
|
|
54
|
+
signal: controller.signal,
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
if (!response.body) {
|
|
58
|
+
clearTimeout(timeoutId)
|
|
59
|
+
return { response, received: `` }
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const reader = response.body.getReader()
|
|
63
|
+
const decoder = new TextDecoder()
|
|
64
|
+
let received = ``
|
|
65
|
+
|
|
66
|
+
let untilContentIndex = -1
|
|
67
|
+
for (let i = 0; i < maxChunks; i++) {
|
|
68
|
+
const { done, value } = await reader.read()
|
|
69
|
+
if (done) break
|
|
70
|
+
received += decoder.decode(value, { stream: true })
|
|
71
|
+
if (
|
|
72
|
+
untilContent &&
|
|
73
|
+
received.includes(untilContent) &&
|
|
74
|
+
untilContentIndex < 0
|
|
75
|
+
) {
|
|
76
|
+
untilContentIndex = received.indexOf(untilContent)
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const normalized = received.replace(/\r\n/g, `\n`)
|
|
80
|
+
if (
|
|
81
|
+
untilContentIndex >= 0 &&
|
|
82
|
+
normalized.lastIndexOf(`\n\n`) > untilContentIndex
|
|
83
|
+
) {
|
|
84
|
+
break
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
clearTimeout(timeoutId)
|
|
89
|
+
reader.cancel()
|
|
90
|
+
|
|
91
|
+
return { response, received }
|
|
92
|
+
} catch (e) {
|
|
93
|
+
clearTimeout(timeoutId)
|
|
94
|
+
if (e instanceof Error && e.name === `AbortError`) {
|
|
95
|
+
// Return empty result on timeout/abort
|
|
96
|
+
return { response: new Response(), received: `` }
|
|
97
|
+
}
|
|
98
|
+
throw e
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Parse SSE events from raw SSE text.
|
|
104
|
+
* Handles multi-line data correctly by joining data: lines per the SSE spec.
|
|
105
|
+
* Returns an array of parsed events with type and data.
|
|
106
|
+
*/
|
|
107
|
+
function parseSSEEvents(
|
|
108
|
+
sseText: string
|
|
109
|
+
): Array<{ type: string; data: string }> {
|
|
110
|
+
const events: Array<{ type: string; data: string }> = []
|
|
111
|
+
const normalized = sseText.replace(/\r\n/g, `\n`)
|
|
112
|
+
|
|
113
|
+
// Split by double newlines (event boundaries)
|
|
114
|
+
const eventBlocks = normalized.split(`\n\n`).filter((block) => block.trim())
|
|
115
|
+
|
|
116
|
+
for (const block of eventBlocks) {
|
|
117
|
+
const lines = block.split(`\n`)
|
|
118
|
+
let eventType = ``
|
|
119
|
+
const dataLines: Array<string> = []
|
|
120
|
+
|
|
121
|
+
for (const line of lines) {
|
|
122
|
+
if (line.startsWith(`event:`)) {
|
|
123
|
+
eventType = line.slice(6).trim()
|
|
124
|
+
} else if (line.startsWith(`data:`)) {
|
|
125
|
+
// Per SSE spec, strip the optional space after "data:"
|
|
126
|
+
const content = line.slice(5)
|
|
127
|
+
dataLines.push(content.startsWith(` `) ? content.slice(1) : content)
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (eventType && dataLines.length > 0) {
|
|
132
|
+
// Join data lines with newlines per SSE spec
|
|
133
|
+
events.push({ type: eventType, data: dataLines.join(`\n`) })
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return events
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Run the full conformance test suite against a server
|
|
142
|
+
*/
|
|
143
|
+
export function runConformanceTests(options: ConformanceTestOptions): void {
|
|
144
|
+
// Access options.baseUrl directly instead of destructuring to support
|
|
145
|
+
// mutable config objects (needed for dynamic port assignment)
|
|
146
|
+
const getBaseUrl = () => options.baseUrl
|
|
147
|
+
|
|
148
|
+
// ============================================================================
|
|
149
|
+
// Basic Stream Operations
|
|
150
|
+
// ============================================================================
|
|
151
|
+
|
|
152
|
+
describe(`Basic Stream Operations`, () => {
|
|
153
|
+
test(`should create a stream`, async () => {
|
|
154
|
+
const streamPath = `/v1/stream/create-test-${Date.now()}`
|
|
155
|
+
const stream = await DurableStream.create({
|
|
156
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
157
|
+
contentType: `text/plain`,
|
|
158
|
+
})
|
|
159
|
+
|
|
160
|
+
expect(stream.url).toBe(`${getBaseUrl()}${streamPath}`)
|
|
161
|
+
})
|
|
162
|
+
|
|
163
|
+
test(`should allow idempotent create with same config`, async () => {
|
|
164
|
+
const streamPath = `/v1/stream/duplicate-test-${Date.now()}`
|
|
165
|
+
|
|
166
|
+
// Create first stream
|
|
167
|
+
await DurableStream.create({
|
|
168
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
169
|
+
contentType: `text/plain`,
|
|
170
|
+
})
|
|
171
|
+
|
|
172
|
+
// Create again with same config - should succeed (idempotent)
|
|
173
|
+
await DurableStream.create({
|
|
174
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
175
|
+
contentType: `text/plain`,
|
|
176
|
+
})
|
|
177
|
+
})
|
|
178
|
+
|
|
179
|
+
test(`should reject create with different config (409)`, async () => {
|
|
180
|
+
const streamPath = `/v1/stream/config-mismatch-test-${Date.now()}`
|
|
181
|
+
|
|
182
|
+
// Create with text/plain
|
|
183
|
+
await DurableStream.create({
|
|
184
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
185
|
+
contentType: `text/plain`,
|
|
186
|
+
})
|
|
187
|
+
|
|
188
|
+
// Try to create with different content type - should fail
|
|
189
|
+
await expect(
|
|
190
|
+
DurableStream.create({
|
|
191
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
192
|
+
contentType: `application/json`,
|
|
193
|
+
})
|
|
194
|
+
).rejects.toThrow()
|
|
195
|
+
})
|
|
196
|
+
|
|
197
|
+
test(`should delete a stream`, async () => {
|
|
198
|
+
const streamPath = `/v1/stream/delete-test-${Date.now()}`
|
|
199
|
+
|
|
200
|
+
const stream = await DurableStream.create({
|
|
201
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
202
|
+
contentType: `text/plain`,
|
|
203
|
+
})
|
|
204
|
+
|
|
205
|
+
await stream.delete()
|
|
206
|
+
|
|
207
|
+
// Verify it's gone by trying to read
|
|
208
|
+
await expect(stream.stream({ live: false })).rejects.toThrow()
|
|
209
|
+
})
|
|
210
|
+
|
|
211
|
+
test(`should properly isolate recreated stream after delete`, async () => {
|
|
212
|
+
const streamPath = `/v1/stream/delete-recreate-test-${Date.now()}`
|
|
213
|
+
|
|
214
|
+
// Create stream and append data
|
|
215
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
216
|
+
method: `PUT`,
|
|
217
|
+
headers: { "Content-Type": `text/plain` },
|
|
218
|
+
body: `old data`,
|
|
219
|
+
})
|
|
220
|
+
|
|
221
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
222
|
+
method: `POST`,
|
|
223
|
+
headers: { "Content-Type": `text/plain` },
|
|
224
|
+
body: ` more old data`,
|
|
225
|
+
})
|
|
226
|
+
|
|
227
|
+
// Verify old data exists
|
|
228
|
+
const readOld = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
229
|
+
method: `GET`,
|
|
230
|
+
})
|
|
231
|
+
const oldText = await readOld.text()
|
|
232
|
+
expect(oldText).toBe(`old data more old data`)
|
|
233
|
+
|
|
234
|
+
// Delete the stream
|
|
235
|
+
const deleteResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
236
|
+
method: `DELETE`,
|
|
237
|
+
})
|
|
238
|
+
expect(deleteResponse.status).toBe(204)
|
|
239
|
+
|
|
240
|
+
// Immediately recreate at same URL with different data
|
|
241
|
+
const recreateResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
242
|
+
method: `PUT`,
|
|
243
|
+
headers: { "Content-Type": `text/plain` },
|
|
244
|
+
body: `new data`,
|
|
245
|
+
})
|
|
246
|
+
expect(recreateResponse.status).toBe(201)
|
|
247
|
+
|
|
248
|
+
// Read the new stream - should only see new data, not old
|
|
249
|
+
const readNew = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
250
|
+
method: `GET`,
|
|
251
|
+
})
|
|
252
|
+
const newText = await readNew.text()
|
|
253
|
+
expect(newText).toBe(`new data`)
|
|
254
|
+
expect(newText).not.toContain(`old data`)
|
|
255
|
+
|
|
256
|
+
// Verify Stream-Up-To-Date is true (we're caught up on new stream)
|
|
257
|
+
expect(readNew.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
258
|
+
|
|
259
|
+
// Append to the new stream to verify it's fully functional
|
|
260
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
261
|
+
method: `POST`,
|
|
262
|
+
headers: { "Content-Type": `text/plain` },
|
|
263
|
+
body: ` appended`,
|
|
264
|
+
})
|
|
265
|
+
|
|
266
|
+
// Read again and verify
|
|
267
|
+
const finalRead = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
268
|
+
method: `GET`,
|
|
269
|
+
})
|
|
270
|
+
const finalText = await finalRead.text()
|
|
271
|
+
expect(finalText).toBe(`new data appended`)
|
|
272
|
+
})
|
|
273
|
+
})
|
|
274
|
+
|
|
275
|
+
// ============================================================================
|
|
276
|
+
// Append Operations
|
|
277
|
+
// ============================================================================
|
|
278
|
+
|
|
279
|
+
describe(`Append Operations`, () => {
|
|
280
|
+
test(`should append string data`, async () => {
|
|
281
|
+
const streamPath = `/v1/stream/append-test-${Date.now()}`
|
|
282
|
+
const stream = await DurableStream.create({
|
|
283
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
284
|
+
contentType: `text/plain`,
|
|
285
|
+
})
|
|
286
|
+
|
|
287
|
+
await stream.append(`hello world`)
|
|
288
|
+
|
|
289
|
+
const res = await stream.stream({ live: false })
|
|
290
|
+
const text = await res.text()
|
|
291
|
+
expect(text).toBe(`hello world`)
|
|
292
|
+
})
|
|
293
|
+
|
|
294
|
+
test(`should append multiple chunks`, async () => {
|
|
295
|
+
const streamPath = `/v1/stream/multi-append-test-${Date.now()}`
|
|
296
|
+
const stream = await DurableStream.create({
|
|
297
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
298
|
+
contentType: `text/plain`,
|
|
299
|
+
})
|
|
300
|
+
|
|
301
|
+
await stream.append(`chunk1`)
|
|
302
|
+
await stream.append(`chunk2`)
|
|
303
|
+
await stream.append(`chunk3`)
|
|
304
|
+
|
|
305
|
+
const res = await stream.stream({ live: false })
|
|
306
|
+
const text = await res.text()
|
|
307
|
+
expect(text).toBe(`chunk1chunk2chunk3`)
|
|
308
|
+
})
|
|
309
|
+
|
|
310
|
+
test(`should enforce sequence ordering with seq`, async () => {
|
|
311
|
+
const streamPath = `/v1/stream/seq-test-${Date.now()}`
|
|
312
|
+
const stream = await DurableStream.create({
|
|
313
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
314
|
+
contentType: `text/plain`,
|
|
315
|
+
})
|
|
316
|
+
|
|
317
|
+
await stream.append(`first`, { seq: `001` })
|
|
318
|
+
await stream.append(`second`, { seq: `002` })
|
|
319
|
+
|
|
320
|
+
// Trying to append with lower seq should fail
|
|
321
|
+
await expect(stream.append(`invalid`, { seq: `001` })).rejects.toThrow()
|
|
322
|
+
})
|
|
323
|
+
})
|
|
324
|
+
|
|
325
|
+
// ============================================================================
|
|
326
|
+
// Read Operations
|
|
327
|
+
// ============================================================================
|
|
328
|
+
|
|
329
|
+
describe(`Read Operations`, () => {
|
|
330
|
+
test(`should read empty stream`, async () => {
|
|
331
|
+
const streamPath = `/v1/stream/read-empty-test-${Date.now()}`
|
|
332
|
+
const stream = await DurableStream.create({
|
|
333
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
334
|
+
contentType: `text/plain`,
|
|
335
|
+
})
|
|
336
|
+
|
|
337
|
+
const res = await stream.stream({ live: false })
|
|
338
|
+
const body = await res.body()
|
|
339
|
+
expect(body.length).toBe(0)
|
|
340
|
+
expect(res.upToDate).toBe(true)
|
|
341
|
+
})
|
|
342
|
+
|
|
343
|
+
test(`should read stream with data`, async () => {
|
|
344
|
+
const streamPath = `/v1/stream/read-data-test-${Date.now()}`
|
|
345
|
+
const stream = await DurableStream.create({
|
|
346
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
347
|
+
contentType: `text/plain`,
|
|
348
|
+
})
|
|
349
|
+
|
|
350
|
+
await stream.append(`hello`)
|
|
351
|
+
|
|
352
|
+
const res = await stream.stream({ live: false })
|
|
353
|
+
const text = await res.text()
|
|
354
|
+
expect(text).toBe(`hello`)
|
|
355
|
+
expect(res.upToDate).toBe(true)
|
|
356
|
+
})
|
|
357
|
+
|
|
358
|
+
test(`should read from offset`, async () => {
|
|
359
|
+
const streamPath = `/v1/stream/read-offset-test-${Date.now()}`
|
|
360
|
+
const stream = await DurableStream.create({
|
|
361
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
362
|
+
contentType: `text/plain`,
|
|
363
|
+
})
|
|
364
|
+
|
|
365
|
+
await stream.append(`first`)
|
|
366
|
+
const res1 = await stream.stream({ live: false })
|
|
367
|
+
await res1.text()
|
|
368
|
+
const firstOffset = res1.offset
|
|
369
|
+
|
|
370
|
+
await stream.append(`second`)
|
|
371
|
+
|
|
372
|
+
const res2 = await stream.stream({ offset: firstOffset, live: false })
|
|
373
|
+
const text = await res2.text()
|
|
374
|
+
expect(text).toBe(`second`)
|
|
375
|
+
})
|
|
376
|
+
})
|
|
377
|
+
|
|
378
|
+
// ============================================================================
|
|
379
|
+
// Long-Poll Operations
|
|
380
|
+
// ============================================================================
|
|
381
|
+
|
|
382
|
+
describe(`Long-Poll Operations`, () => {
|
|
383
|
+
test(`should wait for new data with long-poll`, async () => {
|
|
384
|
+
const streamPath = `/v1/stream/longpoll-test-${Date.now()}`
|
|
385
|
+
const stream = await DurableStream.create({
|
|
386
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
387
|
+
contentType: `text/plain`,
|
|
388
|
+
})
|
|
389
|
+
|
|
390
|
+
const receivedData: Array<string> = []
|
|
391
|
+
|
|
392
|
+
// Start reading in long-poll mode
|
|
393
|
+
const readPromise = (async () => {
|
|
394
|
+
const res = await stream.stream({ live: `long-poll` })
|
|
395
|
+
await new Promise<void>((resolve) => {
|
|
396
|
+
const unsubscribe = res.subscribeBytes(async (chunk) => {
|
|
397
|
+
if (chunk.data.length > 0) {
|
|
398
|
+
receivedData.push(new TextDecoder().decode(chunk.data))
|
|
399
|
+
}
|
|
400
|
+
if (receivedData.length >= 1) {
|
|
401
|
+
unsubscribe()
|
|
402
|
+
res.cancel()
|
|
403
|
+
resolve()
|
|
404
|
+
}
|
|
405
|
+
})
|
|
406
|
+
})
|
|
407
|
+
})()
|
|
408
|
+
|
|
409
|
+
// Wait a bit for the long-poll to be active
|
|
410
|
+
await new Promise((resolve) => setTimeout(resolve, 500))
|
|
411
|
+
|
|
412
|
+
// Append data while long-poll is waiting
|
|
413
|
+
await stream.append(`new data`)
|
|
414
|
+
|
|
415
|
+
await readPromise
|
|
416
|
+
|
|
417
|
+
expect(receivedData).toContain(`new data`)
|
|
418
|
+
}, 10000)
|
|
419
|
+
|
|
420
|
+
test(`should return immediately if data already exists`, async () => {
|
|
421
|
+
const streamPath = `/v1/stream/longpoll-immediate-test-${Date.now()}`
|
|
422
|
+
const stream = await DurableStream.create({
|
|
423
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
424
|
+
contentType: `text/plain`,
|
|
425
|
+
})
|
|
426
|
+
|
|
427
|
+
// Add data first
|
|
428
|
+
await stream.append(`existing data`)
|
|
429
|
+
|
|
430
|
+
// Read should return existing data immediately
|
|
431
|
+
const res = await stream.stream({ live: false })
|
|
432
|
+
const text = await res.text()
|
|
433
|
+
|
|
434
|
+
expect(text).toBe(`existing data`)
|
|
435
|
+
})
|
|
436
|
+
})
|
|
437
|
+
|
|
438
|
+
// ============================================================================
|
|
439
|
+
// HTTP Protocol Tests
|
|
440
|
+
// ============================================================================
|
|
441
|
+
|
|
442
|
+
describe(`HTTP Protocol`, () => {
|
|
443
|
+
test(`should return correct headers on PUT`, async () => {
|
|
444
|
+
const streamPath = `/v1/stream/put-headers-test-${Date.now()}`
|
|
445
|
+
|
|
446
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
447
|
+
method: `PUT`,
|
|
448
|
+
headers: {
|
|
449
|
+
"Content-Type": `text/plain`,
|
|
450
|
+
},
|
|
451
|
+
})
|
|
452
|
+
|
|
453
|
+
expect(response.status).toBe(201)
|
|
454
|
+
expect(response.headers.get(`content-type`)).toBe(`text/plain`)
|
|
455
|
+
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
456
|
+
})
|
|
457
|
+
|
|
458
|
+
test(`should return 200 on idempotent PUT with same config`, async () => {
|
|
459
|
+
const streamPath = `/v1/stream/duplicate-put-test-${Date.now()}`
|
|
460
|
+
|
|
461
|
+
// First PUT
|
|
462
|
+
const firstResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
463
|
+
method: `PUT`,
|
|
464
|
+
headers: { "Content-Type": `text/plain` },
|
|
465
|
+
})
|
|
466
|
+
expect(firstResponse.status).toBe(201)
|
|
467
|
+
|
|
468
|
+
// Second PUT with same config should succeed
|
|
469
|
+
const secondResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
470
|
+
method: `PUT`,
|
|
471
|
+
headers: { "Content-Type": `text/plain` },
|
|
472
|
+
})
|
|
473
|
+
expect([200, 204]).toContain(secondResponse.status)
|
|
474
|
+
})
|
|
475
|
+
|
|
476
|
+
test(`should return 409 on PUT with different config`, async () => {
|
|
477
|
+
const streamPath = `/v1/stream/config-conflict-test-${Date.now()}`
|
|
478
|
+
|
|
479
|
+
// First PUT with text/plain
|
|
480
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
481
|
+
method: `PUT`,
|
|
482
|
+
headers: { "Content-Type": `text/plain` },
|
|
483
|
+
})
|
|
484
|
+
|
|
485
|
+
// Second PUT with different content type should fail
|
|
486
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
487
|
+
method: `PUT`,
|
|
488
|
+
headers: { "Content-Type": `application/json` },
|
|
489
|
+
})
|
|
490
|
+
|
|
491
|
+
expect(response.status).toBe(409)
|
|
492
|
+
})
|
|
493
|
+
|
|
494
|
+
test(`should return correct headers on POST`, async () => {
|
|
495
|
+
const streamPath = `/v1/stream/post-headers-test-${Date.now()}`
|
|
496
|
+
|
|
497
|
+
// Create stream
|
|
498
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
499
|
+
method: `PUT`,
|
|
500
|
+
headers: { "Content-Type": `text/plain` },
|
|
501
|
+
})
|
|
502
|
+
|
|
503
|
+
// Append data
|
|
504
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
505
|
+
method: `POST`,
|
|
506
|
+
headers: { "Content-Type": `text/plain` },
|
|
507
|
+
body: `hello world`,
|
|
508
|
+
})
|
|
509
|
+
|
|
510
|
+
expect([200, 204]).toContain(response.status)
|
|
511
|
+
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
512
|
+
})
|
|
513
|
+
|
|
514
|
+
test(`should return 404 on POST to non-existent stream`, async () => {
|
|
515
|
+
const streamPath = `/v1/stream/post-404-test-${Date.now()}`
|
|
516
|
+
|
|
517
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
518
|
+
method: `POST`,
|
|
519
|
+
headers: { "Content-Type": `text/plain` },
|
|
520
|
+
body: `data`,
|
|
521
|
+
})
|
|
522
|
+
|
|
523
|
+
expect(response.status).toBe(404)
|
|
524
|
+
})
|
|
525
|
+
|
|
526
|
+
test(`should return 409 on content-type mismatch`, async () => {
|
|
527
|
+
const streamPath = `/v1/stream/content-type-mismatch-test-${Date.now()}`
|
|
528
|
+
|
|
529
|
+
// Create with text/plain
|
|
530
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
531
|
+
method: `PUT`,
|
|
532
|
+
headers: { "Content-Type": `text/plain` },
|
|
533
|
+
})
|
|
534
|
+
|
|
535
|
+
// Try to append with application/json - valid content-type but doesn't match stream
|
|
536
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
537
|
+
method: `POST`,
|
|
538
|
+
headers: { "Content-Type": `application/json` },
|
|
539
|
+
body: `{}`,
|
|
540
|
+
})
|
|
541
|
+
|
|
542
|
+
expect(response.status).toBe(409)
|
|
543
|
+
})
|
|
544
|
+
|
|
545
|
+
test(`should return correct headers on GET`, async () => {
|
|
546
|
+
const streamPath = `/v1/stream/get-headers-test-${Date.now()}`
|
|
547
|
+
|
|
548
|
+
// Create and add data
|
|
549
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
550
|
+
method: `PUT`,
|
|
551
|
+
headers: { "Content-Type": `text/plain` },
|
|
552
|
+
body: `test data`,
|
|
553
|
+
})
|
|
554
|
+
|
|
555
|
+
// Read data
|
|
556
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
557
|
+
method: `GET`,
|
|
558
|
+
})
|
|
559
|
+
|
|
560
|
+
expect(response.status).toBe(200)
|
|
561
|
+
expect(response.headers.get(`content-type`)).toBe(`text/plain`)
|
|
562
|
+
const nextOffset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
563
|
+
expect(nextOffset).toBeDefined()
|
|
564
|
+
expect(response.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
565
|
+
const etag = response.headers.get(`etag`)
|
|
566
|
+
expect(etag).toBeDefined()
|
|
567
|
+
|
|
568
|
+
const text = await response.text()
|
|
569
|
+
expect(text).toBe(`test data`)
|
|
570
|
+
})
|
|
571
|
+
|
|
572
|
+
test(`should return empty body with up-to-date for empty stream`, async () => {
|
|
573
|
+
const streamPath = `/v1/stream/get-empty-test-${Date.now()}`
|
|
574
|
+
|
|
575
|
+
// Create empty stream
|
|
576
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
577
|
+
method: `PUT`,
|
|
578
|
+
headers: { "Content-Type": `text/plain` },
|
|
579
|
+
})
|
|
580
|
+
|
|
581
|
+
// Read empty stream
|
|
582
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
583
|
+
method: `GET`,
|
|
584
|
+
})
|
|
585
|
+
|
|
586
|
+
expect(response.status).toBe(200)
|
|
587
|
+
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
588
|
+
expect(response.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
589
|
+
|
|
590
|
+
const text = await response.text()
|
|
591
|
+
expect(text).toBe(``)
|
|
592
|
+
})
|
|
593
|
+
|
|
594
|
+
test(`should read from offset`, async () => {
|
|
595
|
+
const streamPath = `/v1/stream/get-offset-test-${Date.now()}`
|
|
596
|
+
|
|
597
|
+
// Create with data
|
|
598
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
599
|
+
method: `PUT`,
|
|
600
|
+
headers: { "Content-Type": `text/plain` },
|
|
601
|
+
body: `first`,
|
|
602
|
+
})
|
|
603
|
+
|
|
604
|
+
// Append more
|
|
605
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
606
|
+
method: `POST`,
|
|
607
|
+
headers: { "Content-Type": `text/plain` },
|
|
608
|
+
body: `second`,
|
|
609
|
+
})
|
|
610
|
+
|
|
611
|
+
// Get the first offset (after "first")
|
|
612
|
+
const firstResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
613
|
+
method: `GET`,
|
|
614
|
+
})
|
|
615
|
+
const firstText = await firstResponse.text()
|
|
616
|
+
expect(firstText).toBe(`firstsecond`)
|
|
617
|
+
|
|
618
|
+
// Now create fresh and read from middle offset
|
|
619
|
+
const streamPath2 = `/v1/stream/get-offset-test2-${Date.now()}`
|
|
620
|
+
await fetch(`${getBaseUrl()}${streamPath2}`, {
|
|
621
|
+
method: `PUT`,
|
|
622
|
+
headers: { "Content-Type": `text/plain` },
|
|
623
|
+
body: `first`,
|
|
624
|
+
})
|
|
625
|
+
const middleResponse = await fetch(`${getBaseUrl()}${streamPath2}`, {
|
|
626
|
+
method: `GET`,
|
|
627
|
+
})
|
|
628
|
+
const middleOffset = middleResponse.headers.get(STREAM_OFFSET_HEADER)
|
|
629
|
+
|
|
630
|
+
// Append more
|
|
631
|
+
await fetch(`${getBaseUrl()}${streamPath2}`, {
|
|
632
|
+
method: `POST`,
|
|
633
|
+
headers: { "Content-Type": `text/plain` },
|
|
634
|
+
body: `second`,
|
|
635
|
+
})
|
|
636
|
+
|
|
637
|
+
// Read from the middle offset
|
|
638
|
+
const response = await fetch(
|
|
639
|
+
`${getBaseUrl()}${streamPath2}?offset=${middleOffset}`,
|
|
640
|
+
{
|
|
641
|
+
method: `GET`,
|
|
642
|
+
}
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
expect(response.status).toBe(200)
|
|
646
|
+
const text = await response.text()
|
|
647
|
+
expect(text).toBe(`second`)
|
|
648
|
+
})
|
|
649
|
+
|
|
650
|
+
test(`should return 404 on DELETE non-existent stream`, async () => {
|
|
651
|
+
const streamPath = `/v1/stream/delete-404-test-${Date.now()}`
|
|
652
|
+
|
|
653
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
654
|
+
method: `DELETE`,
|
|
655
|
+
})
|
|
656
|
+
|
|
657
|
+
expect(response.status).toBe(404)
|
|
658
|
+
})
|
|
659
|
+
|
|
660
|
+
test(`should return 204 on successful DELETE`, async () => {
|
|
661
|
+
const streamPath = `/v1/stream/delete-success-test-${Date.now()}`
|
|
662
|
+
|
|
663
|
+
// Create stream
|
|
664
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
665
|
+
method: `PUT`,
|
|
666
|
+
headers: { "Content-Type": `text/plain` },
|
|
667
|
+
})
|
|
668
|
+
|
|
669
|
+
// Delete it
|
|
670
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
671
|
+
method: `DELETE`,
|
|
672
|
+
})
|
|
673
|
+
|
|
674
|
+
expect(response.status).toBe(204)
|
|
675
|
+
|
|
676
|
+
// Verify it's gone
|
|
677
|
+
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
678
|
+
method: `GET`,
|
|
679
|
+
})
|
|
680
|
+
expect(readResponse.status).toBe(404)
|
|
681
|
+
})
|
|
682
|
+
|
|
683
|
+
test(`should enforce sequence ordering`, async () => {
|
|
684
|
+
const streamPath = `/v1/stream/seq-test-${Date.now()}`
|
|
685
|
+
|
|
686
|
+
// Create stream
|
|
687
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
688
|
+
method: `PUT`,
|
|
689
|
+
headers: { "Content-Type": `text/plain` },
|
|
690
|
+
})
|
|
691
|
+
|
|
692
|
+
// Append with seq 001
|
|
693
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
694
|
+
method: `POST`,
|
|
695
|
+
headers: {
|
|
696
|
+
"Content-Type": `text/plain`,
|
|
697
|
+
[STREAM_SEQ_HEADER]: `001`,
|
|
698
|
+
},
|
|
699
|
+
body: `first`,
|
|
700
|
+
})
|
|
701
|
+
|
|
702
|
+
// Append with seq 002
|
|
703
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
704
|
+
method: `POST`,
|
|
705
|
+
headers: {
|
|
706
|
+
"Content-Type": `text/plain`,
|
|
707
|
+
[STREAM_SEQ_HEADER]: `002`,
|
|
708
|
+
},
|
|
709
|
+
body: `second`,
|
|
710
|
+
})
|
|
711
|
+
|
|
712
|
+
// Try to append with seq 001 (regression) - should fail
|
|
713
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
714
|
+
method: `POST`,
|
|
715
|
+
headers: {
|
|
716
|
+
"Content-Type": `text/plain`,
|
|
717
|
+
[STREAM_SEQ_HEADER]: `001`,
|
|
718
|
+
},
|
|
719
|
+
body: `invalid`,
|
|
720
|
+
})
|
|
721
|
+
|
|
722
|
+
expect(response.status).toBe(409)
|
|
723
|
+
})
|
|
724
|
+
|
|
725
|
+
test(`should enforce lexicographic seq ordering ("2" then "10" rejects)`, async () => {
|
|
726
|
+
const streamPath = `/v1/stream/seq-lexicographic-test-${Date.now()}`
|
|
727
|
+
|
|
728
|
+
// Create stream
|
|
729
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
730
|
+
method: `PUT`,
|
|
731
|
+
headers: { "Content-Type": `text/plain` },
|
|
732
|
+
})
|
|
733
|
+
|
|
734
|
+
// Append with seq "2"
|
|
735
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
736
|
+
method: `POST`,
|
|
737
|
+
headers: {
|
|
738
|
+
"Content-Type": `text/plain`,
|
|
739
|
+
[STREAM_SEQ_HEADER]: `2`,
|
|
740
|
+
},
|
|
741
|
+
body: `first`,
|
|
742
|
+
})
|
|
743
|
+
|
|
744
|
+
// Try to append with seq "10" - should fail (lexicographically "10" < "2")
|
|
745
|
+
// A numeric implementation would incorrectly accept this (10 > 2)
|
|
746
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
747
|
+
method: `POST`,
|
|
748
|
+
headers: {
|
|
749
|
+
"Content-Type": `text/plain`,
|
|
750
|
+
[STREAM_SEQ_HEADER]: `10`,
|
|
751
|
+
},
|
|
752
|
+
body: `second`,
|
|
753
|
+
})
|
|
754
|
+
|
|
755
|
+
expect(response.status).toBe(409)
|
|
756
|
+
})
|
|
757
|
+
|
|
758
|
+
test(`should allow lexicographic seq ordering ("09" then "10" succeeds)`, async () => {
|
|
759
|
+
const streamPath = `/v1/stream/seq-padded-test-${Date.now()}`
|
|
760
|
+
|
|
761
|
+
// Create stream
|
|
762
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
763
|
+
method: `PUT`,
|
|
764
|
+
headers: { "Content-Type": `text/plain` },
|
|
765
|
+
})
|
|
766
|
+
|
|
767
|
+
// Append with seq "09"
|
|
768
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
769
|
+
method: `POST`,
|
|
770
|
+
headers: {
|
|
771
|
+
"Content-Type": `text/plain`,
|
|
772
|
+
[STREAM_SEQ_HEADER]: `09`,
|
|
773
|
+
},
|
|
774
|
+
body: `first`,
|
|
775
|
+
})
|
|
776
|
+
|
|
777
|
+
// Append with seq "10" - should succeed (lexicographically "10" > "09")
|
|
778
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
779
|
+
method: `POST`,
|
|
780
|
+
headers: {
|
|
781
|
+
"Content-Type": `text/plain`,
|
|
782
|
+
[STREAM_SEQ_HEADER]: `10`,
|
|
783
|
+
},
|
|
784
|
+
body: `second`,
|
|
785
|
+
})
|
|
786
|
+
|
|
787
|
+
expect([200, 204]).toContain(response.status)
|
|
788
|
+
})
|
|
789
|
+
|
|
790
|
+
test(`should reject duplicate seq values`, async () => {
|
|
791
|
+
const streamPath = `/v1/stream/seq-duplicate-test-${Date.now()}`
|
|
792
|
+
|
|
793
|
+
// Create stream
|
|
794
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
795
|
+
method: `PUT`,
|
|
796
|
+
headers: { "Content-Type": `text/plain` },
|
|
797
|
+
})
|
|
798
|
+
|
|
799
|
+
// Append with seq "001"
|
|
800
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
801
|
+
method: `POST`,
|
|
802
|
+
headers: {
|
|
803
|
+
"Content-Type": `text/plain`,
|
|
804
|
+
[STREAM_SEQ_HEADER]: `001`,
|
|
805
|
+
},
|
|
806
|
+
body: `first`,
|
|
807
|
+
})
|
|
808
|
+
|
|
809
|
+
// Try to append with same seq "001" - should fail
|
|
810
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
811
|
+
method: `POST`,
|
|
812
|
+
headers: {
|
|
813
|
+
"Content-Type": `text/plain`,
|
|
814
|
+
[STREAM_SEQ_HEADER]: `001`,
|
|
815
|
+
},
|
|
816
|
+
body: `duplicate`,
|
|
817
|
+
})
|
|
818
|
+
|
|
819
|
+
expect(response.status).toBe(409)
|
|
820
|
+
})
|
|
821
|
+
})
|
|
822
|
+
|
|
823
|
+
// ============================================================================
|
|
824
|
+
// TTL and Expiry Validation
|
|
825
|
+
// ============================================================================
|
|
826
|
+
|
|
827
|
+
describe(`TTL and Expiry Validation`, () => {
|
|
828
|
+
test(`should reject both TTL and Expires-At (400)`, async () => {
|
|
829
|
+
const streamPath = `/v1/stream/ttl-expires-conflict-test-${Date.now()}`
|
|
830
|
+
|
|
831
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
832
|
+
method: `PUT`,
|
|
833
|
+
headers: {
|
|
834
|
+
"Content-Type": `text/plain`,
|
|
835
|
+
"Stream-TTL": `3600`,
|
|
836
|
+
"Stream-Expires-At": new Date(Date.now() + 3600000).toISOString(),
|
|
837
|
+
},
|
|
838
|
+
})
|
|
839
|
+
|
|
840
|
+
expect(response.status).toBe(400)
|
|
841
|
+
})
|
|
842
|
+
|
|
843
|
+
test(`should reject invalid TTL (non-integer)`, async () => {
|
|
844
|
+
const streamPath = `/v1/stream/ttl-invalid-test-${Date.now()}`
|
|
845
|
+
|
|
846
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
847
|
+
method: `PUT`,
|
|
848
|
+
headers: {
|
|
849
|
+
"Content-Type": `text/plain`,
|
|
850
|
+
"Stream-TTL": `abc`,
|
|
851
|
+
},
|
|
852
|
+
})
|
|
853
|
+
|
|
854
|
+
expect(response.status).toBe(400)
|
|
855
|
+
})
|
|
856
|
+
|
|
857
|
+
test(`should reject negative TTL`, async () => {
|
|
858
|
+
const streamPath = `/v1/stream/ttl-negative-test-${Date.now()}`
|
|
859
|
+
|
|
860
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
861
|
+
method: `PUT`,
|
|
862
|
+
headers: {
|
|
863
|
+
"Content-Type": `text/plain`,
|
|
864
|
+
"Stream-TTL": `-1`,
|
|
865
|
+
},
|
|
866
|
+
})
|
|
867
|
+
|
|
868
|
+
expect(response.status).toBe(400)
|
|
869
|
+
})
|
|
870
|
+
|
|
871
|
+
test(`should accept valid TTL`, async () => {
|
|
872
|
+
const streamPath = `/v1/stream/ttl-valid-test-${Date.now()}`
|
|
873
|
+
|
|
874
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
875
|
+
method: `PUT`,
|
|
876
|
+
headers: {
|
|
877
|
+
"Content-Type": `text/plain`,
|
|
878
|
+
"Stream-TTL": `3600`,
|
|
879
|
+
},
|
|
880
|
+
})
|
|
881
|
+
|
|
882
|
+
expect([200, 201]).toContain(response.status)
|
|
883
|
+
})
|
|
884
|
+
|
|
885
|
+
test(`should accept valid Expires-At`, async () => {
|
|
886
|
+
const streamPath = `/v1/stream/expires-valid-test-${Date.now()}`
|
|
887
|
+
|
|
888
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
889
|
+
method: `PUT`,
|
|
890
|
+
headers: {
|
|
891
|
+
"Content-Type": `text/plain`,
|
|
892
|
+
"Stream-Expires-At": new Date(Date.now() + 3600000).toISOString(),
|
|
893
|
+
},
|
|
894
|
+
})
|
|
895
|
+
|
|
896
|
+
expect([200, 201]).toContain(response.status)
|
|
897
|
+
})
|
|
898
|
+
})
|
|
899
|
+
|
|
900
|
+
// ============================================================================
|
|
901
|
+
// Case-Insensitivity Tests
|
|
902
|
+
// ============================================================================
|
|
903
|
+
|
|
904
|
+
describe(`Case-Insensitivity`, () => {
|
|
905
|
+
test(`should treat content-type case-insensitively`, async () => {
|
|
906
|
+
const streamPath = `/v1/stream/case-content-type-test-${Date.now()}`
|
|
907
|
+
|
|
908
|
+
// Create with lowercase content-type
|
|
909
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
910
|
+
method: `PUT`,
|
|
911
|
+
headers: { "Content-Type": `text/plain` },
|
|
912
|
+
})
|
|
913
|
+
|
|
914
|
+
// Append with mixed case - should succeed
|
|
915
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
916
|
+
method: `POST`,
|
|
917
|
+
headers: { "Content-Type": `TEXT/PLAIN` },
|
|
918
|
+
body: `test`,
|
|
919
|
+
})
|
|
920
|
+
|
|
921
|
+
expect([200, 204]).toContain(response.status)
|
|
922
|
+
})
|
|
923
|
+
|
|
924
|
+
test(`should allow idempotent create with different case content-type`, async () => {
|
|
925
|
+
const streamPath = `/v1/stream/case-idempotent-test-${Date.now()}`
|
|
926
|
+
|
|
927
|
+
// Create with lowercase
|
|
928
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
929
|
+
method: `PUT`,
|
|
930
|
+
headers: { "Content-Type": `application/json` },
|
|
931
|
+
})
|
|
932
|
+
expect(response1.status).toBe(201)
|
|
933
|
+
|
|
934
|
+
// PUT again with uppercase - should be idempotent
|
|
935
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
936
|
+
method: `PUT`,
|
|
937
|
+
headers: { "Content-Type": `APPLICATION/JSON` },
|
|
938
|
+
})
|
|
939
|
+
expect([200, 204]).toContain(response2.status)
|
|
940
|
+
})
|
|
941
|
+
|
|
942
|
+
test(`should accept headers with different casing`, async () => {
|
|
943
|
+
const streamPath = `/v1/stream/case-header-test-${Date.now()}`
|
|
944
|
+
|
|
945
|
+
// Create stream
|
|
946
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
947
|
+
method: `PUT`,
|
|
948
|
+
headers: { "Content-Type": `text/plain` },
|
|
949
|
+
})
|
|
950
|
+
|
|
951
|
+
// Append with different header casing (lowercase)
|
|
952
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
953
|
+
method: `POST`,
|
|
954
|
+
headers: {
|
|
955
|
+
"content-type": `text/plain`,
|
|
956
|
+
"stream-seq": `001`,
|
|
957
|
+
},
|
|
958
|
+
body: `test`,
|
|
959
|
+
})
|
|
960
|
+
|
|
961
|
+
expect([200, 204]).toContain(response.status)
|
|
962
|
+
})
|
|
963
|
+
})
|
|
964
|
+
|
|
965
|
+
// ============================================================================
|
|
966
|
+
// Content-Type Validation
|
|
967
|
+
// ============================================================================
|
|
968
|
+
|
|
969
|
+
describe(`Content-Type Validation`, () => {
|
|
970
|
+
test(`should enforce content-type match on append`, async () => {
|
|
971
|
+
const streamPath = `/v1/stream/content-type-enforcement-test-${Date.now()}`
|
|
972
|
+
|
|
973
|
+
// Create with text/plain
|
|
974
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
975
|
+
method: `PUT`,
|
|
976
|
+
headers: { "Content-Type": `text/plain` },
|
|
977
|
+
})
|
|
978
|
+
|
|
979
|
+
// Try to append with application/json - valid but doesn't match stream (409)
|
|
980
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
981
|
+
method: `POST`,
|
|
982
|
+
headers: { "Content-Type": `application/json` },
|
|
983
|
+
body: `{"test": true}`,
|
|
984
|
+
})
|
|
985
|
+
|
|
986
|
+
expect(response.status).toBe(409)
|
|
987
|
+
})
|
|
988
|
+
|
|
989
|
+
test(`should allow append with matching content-type`, async () => {
|
|
990
|
+
const streamPath = `/v1/stream/content-type-match-test-${Date.now()}`
|
|
991
|
+
|
|
992
|
+
// Create with application/json
|
|
993
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
994
|
+
method: `PUT`,
|
|
995
|
+
headers: { "Content-Type": `application/json` },
|
|
996
|
+
})
|
|
997
|
+
|
|
998
|
+
// Append with same content-type - should succeed
|
|
999
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1000
|
+
method: `POST`,
|
|
1001
|
+
headers: { "Content-Type": `application/json` },
|
|
1002
|
+
body: `{"test": true}`,
|
|
1003
|
+
})
|
|
1004
|
+
|
|
1005
|
+
expect([200, 204]).toContain(response.status)
|
|
1006
|
+
})
|
|
1007
|
+
|
|
1008
|
+
test(`should return stream content-type on GET`, async () => {
|
|
1009
|
+
const streamPath = `/v1/stream/content-type-get-test-${Date.now()}`
|
|
1010
|
+
|
|
1011
|
+
// Create with application/json
|
|
1012
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1013
|
+
method: `PUT`,
|
|
1014
|
+
headers: { "Content-Type": `application/json` },
|
|
1015
|
+
body: `{"initial": true}`,
|
|
1016
|
+
})
|
|
1017
|
+
|
|
1018
|
+
// Read and verify content-type
|
|
1019
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1020
|
+
method: `GET`,
|
|
1021
|
+
})
|
|
1022
|
+
|
|
1023
|
+
expect(response.status).toBe(200)
|
|
1024
|
+
expect(response.headers.get(`content-type`)).toBe(`application/json`)
|
|
1025
|
+
})
|
|
1026
|
+
})
|
|
1027
|
+
|
|
1028
|
+
// ============================================================================
|
|
1029
|
+
// HEAD Metadata Tests
|
|
1030
|
+
// ============================================================================
|
|
1031
|
+
|
|
1032
|
+
describe(`HEAD Metadata`, () => {
|
|
1033
|
+
test(`should return metadata without body`, async () => {
|
|
1034
|
+
const streamPath = `/v1/stream/head-test-${Date.now()}`
|
|
1035
|
+
|
|
1036
|
+
// Create stream with data
|
|
1037
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1038
|
+
method: `PUT`,
|
|
1039
|
+
headers: { "Content-Type": `text/plain` },
|
|
1040
|
+
body: `test data`,
|
|
1041
|
+
})
|
|
1042
|
+
|
|
1043
|
+
// HEAD request
|
|
1044
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1045
|
+
method: `HEAD`,
|
|
1046
|
+
})
|
|
1047
|
+
|
|
1048
|
+
expect(response.status).toBe(200)
|
|
1049
|
+
expect(response.headers.get(`content-type`)).toBe(`text/plain`)
|
|
1050
|
+
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
1051
|
+
|
|
1052
|
+
// Body should be empty
|
|
1053
|
+
const text = await response.text()
|
|
1054
|
+
expect(text).toBe(``)
|
|
1055
|
+
})
|
|
1056
|
+
|
|
1057
|
+
test(`should return 404 for non-existent stream`, async () => {
|
|
1058
|
+
const streamPath = `/v1/stream/head-404-test-${Date.now()}`
|
|
1059
|
+
|
|
1060
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1061
|
+
method: `HEAD`,
|
|
1062
|
+
})
|
|
1063
|
+
|
|
1064
|
+
expect(response.status).toBe(404)
|
|
1065
|
+
})
|
|
1066
|
+
|
|
1067
|
+
test(`should return tail offset`, async () => {
|
|
1068
|
+
const streamPath = `/v1/stream/head-offset-test-${Date.now()}`
|
|
1069
|
+
|
|
1070
|
+
// Create empty stream
|
|
1071
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1072
|
+
method: `PUT`,
|
|
1073
|
+
headers: { "Content-Type": `text/plain` },
|
|
1074
|
+
})
|
|
1075
|
+
|
|
1076
|
+
// HEAD should show initial offset
|
|
1077
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1078
|
+
method: `HEAD`,
|
|
1079
|
+
})
|
|
1080
|
+
const offset1 = response1.headers.get(STREAM_OFFSET_HEADER)
|
|
1081
|
+
expect(offset1).toBeDefined()
|
|
1082
|
+
|
|
1083
|
+
// Append data
|
|
1084
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1085
|
+
method: `POST`,
|
|
1086
|
+
headers: { "Content-Type": `text/plain` },
|
|
1087
|
+
body: `test`,
|
|
1088
|
+
})
|
|
1089
|
+
|
|
1090
|
+
// HEAD should show updated offset
|
|
1091
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1092
|
+
method: `HEAD`,
|
|
1093
|
+
})
|
|
1094
|
+
const offset2 = response2.headers.get(STREAM_OFFSET_HEADER)
|
|
1095
|
+
expect(offset2).toBeDefined()
|
|
1096
|
+
expect(offset2).not.toBe(offset1)
|
|
1097
|
+
})
|
|
1098
|
+
})
|
|
1099
|
+
|
|
1100
|
+
// ============================================================================
|
|
1101
|
+
// Offset Validation and Resumability
|
|
1102
|
+
// ============================================================================
|
|
1103
|
+
|
|
1104
|
+
describe(`Offset Validation and Resumability`, () => {
|
|
1105
|
+
test(`should accept -1 as sentinel for stream beginning`, async () => {
|
|
1106
|
+
const streamPath = `/v1/stream/offset-sentinel-test-${Date.now()}`
|
|
1107
|
+
|
|
1108
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1109
|
+
method: `PUT`,
|
|
1110
|
+
headers: { "Content-Type": `text/plain` },
|
|
1111
|
+
body: `test data`,
|
|
1112
|
+
})
|
|
1113
|
+
|
|
1114
|
+
// Using offset=-1 should return data from the beginning
|
|
1115
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}?offset=-1`, {
|
|
1116
|
+
method: `GET`,
|
|
1117
|
+
})
|
|
1118
|
+
|
|
1119
|
+
expect(response.status).toBe(200)
|
|
1120
|
+
const text = await response.text()
|
|
1121
|
+
expect(text).toBe(`test data`)
|
|
1122
|
+
expect(response.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
1123
|
+
})
|
|
1124
|
+
|
|
1125
|
+
test(`should return same data for offset=-1 and no offset`, async () => {
|
|
1126
|
+
const streamPath = `/v1/stream/offset-sentinel-equiv-test-${Date.now()}`
|
|
1127
|
+
|
|
1128
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1129
|
+
method: `PUT`,
|
|
1130
|
+
headers: { "Content-Type": `text/plain` },
|
|
1131
|
+
body: `hello world`,
|
|
1132
|
+
})
|
|
1133
|
+
|
|
1134
|
+
// Request without offset
|
|
1135
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1136
|
+
method: `GET`,
|
|
1137
|
+
})
|
|
1138
|
+
const text1 = await response1.text()
|
|
1139
|
+
|
|
1140
|
+
// Request with offset=-1
|
|
1141
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}?offset=-1`, {
|
|
1142
|
+
method: `GET`,
|
|
1143
|
+
})
|
|
1144
|
+
const text2 = await response2.text()
|
|
1145
|
+
|
|
1146
|
+
// Both should return the same data
|
|
1147
|
+
expect(text1).toBe(text2)
|
|
1148
|
+
expect(text1).toBe(`hello world`)
|
|
1149
|
+
})
|
|
1150
|
+
|
|
1151
|
+
test(`should reject malformed offset (contains comma)`, async () => {
|
|
1152
|
+
const streamPath = `/v1/stream/offset-comma-test-${Date.now()}`
|
|
1153
|
+
|
|
1154
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1155
|
+
method: `PUT`,
|
|
1156
|
+
headers: { "Content-Type": `text/plain` },
|
|
1157
|
+
body: `test`,
|
|
1158
|
+
})
|
|
1159
|
+
|
|
1160
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}?offset=0,1`, {
|
|
1161
|
+
method: `GET`,
|
|
1162
|
+
})
|
|
1163
|
+
|
|
1164
|
+
expect(response.status).toBe(400)
|
|
1165
|
+
})
|
|
1166
|
+
|
|
1167
|
+
test(`should reject offset with spaces`, async () => {
|
|
1168
|
+
const streamPath = `/v1/stream/offset-spaces-test-${Date.now()}`
|
|
1169
|
+
|
|
1170
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1171
|
+
method: `PUT`,
|
|
1172
|
+
headers: { "Content-Type": `text/plain` },
|
|
1173
|
+
body: `test`,
|
|
1174
|
+
})
|
|
1175
|
+
|
|
1176
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}?offset=0 1`, {
|
|
1177
|
+
method: `GET`,
|
|
1178
|
+
})
|
|
1179
|
+
|
|
1180
|
+
expect(response.status).toBe(400)
|
|
1181
|
+
})
|
|
1182
|
+
|
|
1183
|
+
test(`should support resumable reads (no duplicate data)`, async () => {
|
|
1184
|
+
const streamPath = `/v1/stream/resumable-test-${Date.now()}`
|
|
1185
|
+
|
|
1186
|
+
// Create stream
|
|
1187
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1188
|
+
method: `PUT`,
|
|
1189
|
+
headers: { "Content-Type": `text/plain` },
|
|
1190
|
+
})
|
|
1191
|
+
|
|
1192
|
+
// Append chunk 1
|
|
1193
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1194
|
+
method: `POST`,
|
|
1195
|
+
headers: { "Content-Type": `text/plain` },
|
|
1196
|
+
body: `chunk1`,
|
|
1197
|
+
})
|
|
1198
|
+
|
|
1199
|
+
// Read chunk 1
|
|
1200
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1201
|
+
method: `GET`,
|
|
1202
|
+
})
|
|
1203
|
+
const text1 = await response1.text()
|
|
1204
|
+
const offset1 = response1.headers.get(STREAM_OFFSET_HEADER)
|
|
1205
|
+
|
|
1206
|
+
expect(text1).toBe(`chunk1`)
|
|
1207
|
+
expect(offset1).toBeDefined()
|
|
1208
|
+
|
|
1209
|
+
// Append chunk 2
|
|
1210
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1211
|
+
method: `POST`,
|
|
1212
|
+
headers: { "Content-Type": `text/plain` },
|
|
1213
|
+
body: `chunk2`,
|
|
1214
|
+
})
|
|
1215
|
+
|
|
1216
|
+
// Read from offset1 - should only get chunk2
|
|
1217
|
+
const response2 = await fetch(
|
|
1218
|
+
`${getBaseUrl()}${streamPath}?offset=${offset1}`,
|
|
1219
|
+
{
|
|
1220
|
+
method: `GET`,
|
|
1221
|
+
}
|
|
1222
|
+
)
|
|
1223
|
+
const text2 = await response2.text()
|
|
1224
|
+
|
|
1225
|
+
expect(text2).toBe(`chunk2`)
|
|
1226
|
+
})
|
|
1227
|
+
|
|
1228
|
+
test(`should return empty response when reading from tail offset`, async () => {
|
|
1229
|
+
const streamPath = `/v1/stream/tail-read-test-${Date.now()}`
|
|
1230
|
+
|
|
1231
|
+
// Create stream with data
|
|
1232
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1233
|
+
method: `PUT`,
|
|
1234
|
+
headers: { "Content-Type": `text/plain` },
|
|
1235
|
+
body: `test`,
|
|
1236
|
+
})
|
|
1237
|
+
|
|
1238
|
+
// Read all data
|
|
1239
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1240
|
+
method: `GET`,
|
|
1241
|
+
})
|
|
1242
|
+
const tailOffset = response1.headers.get(STREAM_OFFSET_HEADER)
|
|
1243
|
+
|
|
1244
|
+
// Read from tail offset - should return empty with up-to-date
|
|
1245
|
+
const response2 = await fetch(
|
|
1246
|
+
`${getBaseUrl()}${streamPath}?offset=${tailOffset}`,
|
|
1247
|
+
{
|
|
1248
|
+
method: `GET`,
|
|
1249
|
+
}
|
|
1250
|
+
)
|
|
1251
|
+
|
|
1252
|
+
expect(response2.status).toBe(200)
|
|
1253
|
+
const text = await response2.text()
|
|
1254
|
+
expect(text).toBe(``)
|
|
1255
|
+
expect(response2.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
1256
|
+
})
|
|
1257
|
+
})
|
|
1258
|
+
|
|
1259
|
+
// ============================================================================
|
|
1260
|
+
// Protocol Edge Cases
|
|
1261
|
+
// ============================================================================
|
|
1262
|
+
|
|
1263
|
+
describe(`Protocol Edge Cases`, () => {
|
|
1264
|
+
test(`should reject empty POST body with 400`, async () => {
|
|
1265
|
+
const streamPath = `/v1/stream/empty-append-test-${Date.now()}`
|
|
1266
|
+
|
|
1267
|
+
// Create stream
|
|
1268
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1269
|
+
method: `PUT`,
|
|
1270
|
+
headers: { "Content-Type": `text/plain` },
|
|
1271
|
+
})
|
|
1272
|
+
|
|
1273
|
+
// Try to append empty body - should fail
|
|
1274
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1275
|
+
method: `POST`,
|
|
1276
|
+
headers: { "Content-Type": `text/plain` },
|
|
1277
|
+
body: ``,
|
|
1278
|
+
})
|
|
1279
|
+
|
|
1280
|
+
expect(response.status).toBe(400)
|
|
1281
|
+
})
|
|
1282
|
+
|
|
1283
|
+
test(`should handle PUT with initial body correctly`, async () => {
|
|
1284
|
+
const streamPath = `/v1/stream/put-initial-body-test-${Date.now()}`
|
|
1285
|
+
const initialData = `initial stream content`
|
|
1286
|
+
|
|
1287
|
+
// Create stream with initial content
|
|
1288
|
+
const putResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1289
|
+
method: `PUT`,
|
|
1290
|
+
headers: { "Content-Type": `text/plain` },
|
|
1291
|
+
body: initialData,
|
|
1292
|
+
})
|
|
1293
|
+
|
|
1294
|
+
expect(putResponse.status).toBe(201)
|
|
1295
|
+
const nextOffset = putResponse.headers.get(STREAM_OFFSET_HEADER)
|
|
1296
|
+
expect(nextOffset).toBeDefined()
|
|
1297
|
+
|
|
1298
|
+
// Verify we can read the initial content
|
|
1299
|
+
const getResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1300
|
+
method: `GET`,
|
|
1301
|
+
})
|
|
1302
|
+
|
|
1303
|
+
const text = await getResponse.text()
|
|
1304
|
+
expect(text).toBe(initialData)
|
|
1305
|
+
expect(getResponse.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
1306
|
+
})
|
|
1307
|
+
|
|
1308
|
+
test(`should preserve data immutability by position`, async () => {
|
|
1309
|
+
const streamPath = `/v1/stream/immutability-test-${Date.now()}`
|
|
1310
|
+
|
|
1311
|
+
// Create and append first chunk
|
|
1312
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1313
|
+
method: `PUT`,
|
|
1314
|
+
headers: { "Content-Type": `text/plain` },
|
|
1315
|
+
body: `chunk1`,
|
|
1316
|
+
})
|
|
1317
|
+
|
|
1318
|
+
// Read and save the offset after chunk1
|
|
1319
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1320
|
+
method: `GET`,
|
|
1321
|
+
})
|
|
1322
|
+
const text1 = await response1.text()
|
|
1323
|
+
const offset1 = response1.headers.get(STREAM_OFFSET_HEADER)
|
|
1324
|
+
expect(text1).toBe(`chunk1`)
|
|
1325
|
+
|
|
1326
|
+
// Append more chunks
|
|
1327
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1328
|
+
method: `POST`,
|
|
1329
|
+
headers: { "Content-Type": `text/plain` },
|
|
1330
|
+
body: `chunk2`,
|
|
1331
|
+
})
|
|
1332
|
+
|
|
1333
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1334
|
+
method: `POST`,
|
|
1335
|
+
headers: { "Content-Type": `text/plain` },
|
|
1336
|
+
body: `chunk3`,
|
|
1337
|
+
})
|
|
1338
|
+
|
|
1339
|
+
// Read from the saved offset - should still get chunk2 (position is immutable)
|
|
1340
|
+
const response2 = await fetch(
|
|
1341
|
+
`${getBaseUrl()}${streamPath}?offset=${offset1}`,
|
|
1342
|
+
{
|
|
1343
|
+
method: `GET`,
|
|
1344
|
+
}
|
|
1345
|
+
)
|
|
1346
|
+
const text2 = await response2.text()
|
|
1347
|
+
expect(text2).toBe(`chunk2chunk3`)
|
|
1348
|
+
})
|
|
1349
|
+
|
|
1350
|
+
test(`should generate unique, monotonically increasing offsets`, async () => {
|
|
1351
|
+
const streamPath = `/v1/stream/monotonic-offset-test-${Date.now()}`
|
|
1352
|
+
|
|
1353
|
+
// Create stream
|
|
1354
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1355
|
+
method: `PUT`,
|
|
1356
|
+
headers: { "Content-Type": `text/plain` },
|
|
1357
|
+
})
|
|
1358
|
+
|
|
1359
|
+
const offsets: Array<string> = []
|
|
1360
|
+
|
|
1361
|
+
// Append multiple chunks and collect offsets
|
|
1362
|
+
for (let i = 0; i < 5; i++) {
|
|
1363
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1364
|
+
method: `POST`,
|
|
1365
|
+
headers: { "Content-Type": `text/plain` },
|
|
1366
|
+
body: `chunk${i}`,
|
|
1367
|
+
})
|
|
1368
|
+
|
|
1369
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
1370
|
+
expect(offset).toBeDefined()
|
|
1371
|
+
offsets.push(offset!)
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
// Verify offsets are unique and strictly increasing (lexicographically)
|
|
1375
|
+
for (let i = 1; i < offsets.length; i++) {
|
|
1376
|
+
expect(offsets[i]! > offsets[i - 1]!).toBe(true)
|
|
1377
|
+
}
|
|
1378
|
+
})
|
|
1379
|
+
|
|
1380
|
+
test(`should reject empty offset parameter`, async () => {
|
|
1381
|
+
const streamPath = `/v1/stream/empty-offset-test-${Date.now()}`
|
|
1382
|
+
|
|
1383
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1384
|
+
method: `PUT`,
|
|
1385
|
+
headers: { "Content-Type": `text/plain` },
|
|
1386
|
+
body: `test`,
|
|
1387
|
+
})
|
|
1388
|
+
|
|
1389
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}?offset=`, {
|
|
1390
|
+
method: `GET`,
|
|
1391
|
+
})
|
|
1392
|
+
|
|
1393
|
+
expect(response.status).toBe(400)
|
|
1394
|
+
})
|
|
1395
|
+
|
|
1396
|
+
test(`should reject multiple offset parameters`, async () => {
|
|
1397
|
+
const streamPath = `/v1/stream/multi-offset-test-${Date.now()}`
|
|
1398
|
+
|
|
1399
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1400
|
+
method: `PUT`,
|
|
1401
|
+
headers: { "Content-Type": `text/plain` },
|
|
1402
|
+
body: `test`,
|
|
1403
|
+
})
|
|
1404
|
+
|
|
1405
|
+
const response = await fetch(
|
|
1406
|
+
`${getBaseUrl()}${streamPath}?offset=a&offset=b`,
|
|
1407
|
+
{
|
|
1408
|
+
method: `GET`,
|
|
1409
|
+
}
|
|
1410
|
+
)
|
|
1411
|
+
|
|
1412
|
+
expect(response.status).toBe(400)
|
|
1413
|
+
})
|
|
1414
|
+
|
|
1415
|
+
test(`should enforce case-sensitive seq ordering`, async () => {
|
|
1416
|
+
const streamPath = `/v1/stream/case-seq-test-${Date.now()}`
|
|
1417
|
+
|
|
1418
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1419
|
+
method: `PUT`,
|
|
1420
|
+
headers: { "Content-Type": `text/plain` },
|
|
1421
|
+
})
|
|
1422
|
+
|
|
1423
|
+
// Append with seq "a" (lowercase)
|
|
1424
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1425
|
+
method: `POST`,
|
|
1426
|
+
headers: {
|
|
1427
|
+
"Content-Type": `text/plain`,
|
|
1428
|
+
[STREAM_SEQ_HEADER]: `a`,
|
|
1429
|
+
},
|
|
1430
|
+
body: `first`,
|
|
1431
|
+
})
|
|
1432
|
+
|
|
1433
|
+
// Try to append with seq "B" (uppercase) - should fail
|
|
1434
|
+
// Lexicographically: "B" < "a" in byte order (uppercase comes before lowercase in ASCII)
|
|
1435
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1436
|
+
method: `POST`,
|
|
1437
|
+
headers: {
|
|
1438
|
+
"Content-Type": `text/plain`,
|
|
1439
|
+
[STREAM_SEQ_HEADER]: `B`,
|
|
1440
|
+
},
|
|
1441
|
+
body: `second`,
|
|
1442
|
+
})
|
|
1443
|
+
|
|
1444
|
+
expect(response.status).toBe(409)
|
|
1445
|
+
})
|
|
1446
|
+
|
|
1447
|
+
test(`should handle binary data with integrity`, async () => {
|
|
1448
|
+
const streamPath = `/v1/stream/binary-test-${Date.now()}`
|
|
1449
|
+
|
|
1450
|
+
// Create binary stream with various byte values including 0x00 and 0xFF
|
|
1451
|
+
const binaryData = new Uint8Array([
|
|
1452
|
+
0x00, 0x01, 0x02, 0x7f, 0x80, 0xfe, 0xff,
|
|
1453
|
+
])
|
|
1454
|
+
|
|
1455
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1456
|
+
method: `PUT`,
|
|
1457
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
1458
|
+
body: binaryData,
|
|
1459
|
+
})
|
|
1460
|
+
|
|
1461
|
+
// Read back and verify byte-for-byte
|
|
1462
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1463
|
+
method: `GET`,
|
|
1464
|
+
})
|
|
1465
|
+
|
|
1466
|
+
const buffer = await response.arrayBuffer()
|
|
1467
|
+
const result = new Uint8Array(buffer)
|
|
1468
|
+
|
|
1469
|
+
expect(result.length).toBe(binaryData.length)
|
|
1470
|
+
for (let i = 0; i < binaryData.length; i++) {
|
|
1471
|
+
expect(result[i]).toBe(binaryData[i])
|
|
1472
|
+
}
|
|
1473
|
+
})
|
|
1474
|
+
|
|
1475
|
+
test(`should return Location header on 201`, async () => {
|
|
1476
|
+
const streamPath = `/v1/stream/location-test-${Date.now()}`
|
|
1477
|
+
|
|
1478
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1479
|
+
method: `PUT`,
|
|
1480
|
+
headers: { "Content-Type": `text/plain` },
|
|
1481
|
+
})
|
|
1482
|
+
|
|
1483
|
+
expect(response.status).toBe(201)
|
|
1484
|
+
const location = response.headers.get(`location`)
|
|
1485
|
+
expect(location).toBeDefined()
|
|
1486
|
+
expect(location).toBe(`${getBaseUrl()}${streamPath}`)
|
|
1487
|
+
})
|
|
1488
|
+
|
|
1489
|
+
test(`should reject missing Content-Type on POST`, async () => {
|
|
1490
|
+
const streamPath = `/v1/stream/missing-ct-post-test-${Date.now()}`
|
|
1491
|
+
|
|
1492
|
+
// Create stream
|
|
1493
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1494
|
+
method: `PUT`,
|
|
1495
|
+
headers: { "Content-Type": `text/plain` },
|
|
1496
|
+
})
|
|
1497
|
+
|
|
1498
|
+
// Try to append without Content-Type - should fail
|
|
1499
|
+
// Note: fetch will try to detect the Content-Type based on the body.
|
|
1500
|
+
// Blob with an explicit empty type results in the header being omitted.
|
|
1501
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1502
|
+
method: `POST`,
|
|
1503
|
+
body: new Blob([`data`], { type: `` }),
|
|
1504
|
+
})
|
|
1505
|
+
|
|
1506
|
+
expect(response.status).toBe(400)
|
|
1507
|
+
})
|
|
1508
|
+
|
|
1509
|
+
test(`should accept PUT without Content-Type (use default)`, async () => {
|
|
1510
|
+
const streamPath = `/v1/stream/no-ct-put-test-${Date.now()}`
|
|
1511
|
+
|
|
1512
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1513
|
+
method: `PUT`,
|
|
1514
|
+
})
|
|
1515
|
+
|
|
1516
|
+
expect([200, 201]).toContain(response.status)
|
|
1517
|
+
const contentType = response.headers.get(`content-type`)
|
|
1518
|
+
expect(contentType).toBeDefined()
|
|
1519
|
+
})
|
|
1520
|
+
|
|
1521
|
+
test(`should ignore unknown query parameters`, async () => {
|
|
1522
|
+
const streamPath = `/v1/stream/unknown-param-test-${Date.now()}`
|
|
1523
|
+
|
|
1524
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1525
|
+
method: `PUT`,
|
|
1526
|
+
headers: { "Content-Type": `text/plain` },
|
|
1527
|
+
body: `test data`,
|
|
1528
|
+
})
|
|
1529
|
+
|
|
1530
|
+
// Should work fine with unknown params (use -1 to start from beginning)
|
|
1531
|
+
const response = await fetch(
|
|
1532
|
+
`${getBaseUrl()}${streamPath}?offset=-1&foo=bar&baz=qux`,
|
|
1533
|
+
{
|
|
1534
|
+
method: `GET`,
|
|
1535
|
+
}
|
|
1536
|
+
)
|
|
1537
|
+
|
|
1538
|
+
expect(response.status).toBe(200)
|
|
1539
|
+
const text = await response.text()
|
|
1540
|
+
expect(text).toBe(`test data`)
|
|
1541
|
+
})
|
|
1542
|
+
})
|
|
1543
|
+
|
|
1544
|
+
// ============================================================================
|
|
1545
|
+
// Long-Poll Edge Cases
|
|
1546
|
+
// ============================================================================
|
|
1547
|
+
|
|
1548
|
+
describe(`Long-Poll Edge Cases`, () => {
|
|
1549
|
+
test(`should require offset parameter for long-poll`, async () => {
|
|
1550
|
+
const streamPath = `/v1/stream/longpoll-no-offset-test-${Date.now()}`
|
|
1551
|
+
|
|
1552
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1553
|
+
method: `PUT`,
|
|
1554
|
+
headers: { "Content-Type": `text/plain` },
|
|
1555
|
+
})
|
|
1556
|
+
|
|
1557
|
+
// Try long-poll without offset - protocol says offset MUST be provided
|
|
1558
|
+
const response = await fetch(
|
|
1559
|
+
`${getBaseUrl()}${streamPath}?live=long-poll`,
|
|
1560
|
+
{
|
|
1561
|
+
method: `GET`,
|
|
1562
|
+
}
|
|
1563
|
+
)
|
|
1564
|
+
|
|
1565
|
+
expect(response.status).toBe(400)
|
|
1566
|
+
})
|
|
1567
|
+
|
|
1568
|
+
test(`should generate Stream-Cursor header on long-poll responses`, async () => {
|
|
1569
|
+
const streamPath = `/v1/stream/longpoll-cursor-gen-test-${Date.now()}`
|
|
1570
|
+
|
|
1571
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1572
|
+
method: `PUT`,
|
|
1573
|
+
headers: { "Content-Type": `text/plain` },
|
|
1574
|
+
body: `test data`,
|
|
1575
|
+
})
|
|
1576
|
+
|
|
1577
|
+
// Long-poll request without cursor - server MUST generate one
|
|
1578
|
+
const response = await fetch(
|
|
1579
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=long-poll`,
|
|
1580
|
+
{
|
|
1581
|
+
method: `GET`,
|
|
1582
|
+
}
|
|
1583
|
+
)
|
|
1584
|
+
|
|
1585
|
+
expect(response.status).toBe(200)
|
|
1586
|
+
|
|
1587
|
+
// Server MUST return a Stream-Cursor header
|
|
1588
|
+
const cursor = response.headers.get(`Stream-Cursor`)
|
|
1589
|
+
expect(cursor).toBeDefined()
|
|
1590
|
+
expect(cursor).not.toBeNull()
|
|
1591
|
+
|
|
1592
|
+
// Cursor must be a numeric string (interval number)
|
|
1593
|
+
expect(/^\d+$/.test(cursor!)).toBe(true)
|
|
1594
|
+
})
|
|
1595
|
+
|
|
1596
|
+
test(`should echo cursor and handle collision with jitter`, async () => {
|
|
1597
|
+
const streamPath = `/v1/stream/longpoll-cursor-collision-test-${Date.now()}`
|
|
1598
|
+
|
|
1599
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1600
|
+
method: `PUT`,
|
|
1601
|
+
headers: { "Content-Type": `text/plain` },
|
|
1602
|
+
body: `test data`,
|
|
1603
|
+
})
|
|
1604
|
+
|
|
1605
|
+
// First request to get current cursor
|
|
1606
|
+
const response1 = await fetch(
|
|
1607
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=long-poll`,
|
|
1608
|
+
{
|
|
1609
|
+
method: `GET`,
|
|
1610
|
+
}
|
|
1611
|
+
)
|
|
1612
|
+
|
|
1613
|
+
expect(response1.status).toBe(200)
|
|
1614
|
+
const cursor1 = response1.headers.get(`Stream-Cursor`)
|
|
1615
|
+
expect(cursor1).toBeDefined()
|
|
1616
|
+
|
|
1617
|
+
// Immediate second request with same cursor - should get advanced cursor due to collision
|
|
1618
|
+
const response2 = await fetch(
|
|
1619
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=long-poll&cursor=${cursor1}`,
|
|
1620
|
+
{
|
|
1621
|
+
method: `GET`,
|
|
1622
|
+
}
|
|
1623
|
+
)
|
|
1624
|
+
|
|
1625
|
+
expect(response2.status).toBe(200)
|
|
1626
|
+
const cursor2 = response2.headers.get(`Stream-Cursor`)
|
|
1627
|
+
expect(cursor2).toBeDefined()
|
|
1628
|
+
|
|
1629
|
+
// The returned cursor MUST be strictly greater than the one we sent
|
|
1630
|
+
// (monotonic progression prevents cache cycles)
|
|
1631
|
+
expect(parseInt(cursor2!, 10)).toBeGreaterThan(parseInt(cursor1!, 10))
|
|
1632
|
+
})
|
|
1633
|
+
|
|
1634
|
+
test(`should return Stream-Cursor, Stream-Up-To-Date and Stream-Next-Offset on 204 timeout`, async () => {
|
|
1635
|
+
const streamPath = `/v1/stream/longpoll-204-headers-test-${Date.now()}`
|
|
1636
|
+
|
|
1637
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1638
|
+
method: `PUT`,
|
|
1639
|
+
headers: { "Content-Type": `text/plain` },
|
|
1640
|
+
})
|
|
1641
|
+
|
|
1642
|
+
// Get the current tail offset
|
|
1643
|
+
const headResponse = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1644
|
+
method: `HEAD`,
|
|
1645
|
+
})
|
|
1646
|
+
const tailOffset = headResponse.headers.get(STREAM_OFFSET_HEADER)
|
|
1647
|
+
expect(tailOffset).toBeDefined()
|
|
1648
|
+
|
|
1649
|
+
// Long-poll at tail offset with a short timeout
|
|
1650
|
+
// We use AbortController to limit wait time on our side
|
|
1651
|
+
const controller = new AbortController()
|
|
1652
|
+
const timeoutId = setTimeout(() => controller.abort(), 5000)
|
|
1653
|
+
|
|
1654
|
+
try {
|
|
1655
|
+
const response = await fetch(
|
|
1656
|
+
`${getBaseUrl()}${streamPath}?offset=${tailOffset}&live=long-poll`,
|
|
1657
|
+
{
|
|
1658
|
+
method: `GET`,
|
|
1659
|
+
signal: controller.signal,
|
|
1660
|
+
}
|
|
1661
|
+
)
|
|
1662
|
+
|
|
1663
|
+
clearTimeout(timeoutId)
|
|
1664
|
+
|
|
1665
|
+
// If we get a 204, verify headers
|
|
1666
|
+
if (response.status === 204) {
|
|
1667
|
+
expect(response.headers.get(STREAM_OFFSET_HEADER)).toBeDefined()
|
|
1668
|
+
expect(response.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
1669
|
+
|
|
1670
|
+
// Server MUST return Stream-Cursor even on 204 timeout
|
|
1671
|
+
const cursor = response.headers.get(`Stream-Cursor`)
|
|
1672
|
+
expect(cursor).toBeDefined()
|
|
1673
|
+
expect(/^\d+$/.test(cursor!)).toBe(true)
|
|
1674
|
+
}
|
|
1675
|
+
// If we get a 200 (data arrived somehow), that's also valid
|
|
1676
|
+
expect([200, 204]).toContain(response.status)
|
|
1677
|
+
} catch (e) {
|
|
1678
|
+
clearTimeout(timeoutId)
|
|
1679
|
+
// AbortError is expected if server timeout is longer than our 5s
|
|
1680
|
+
if (e instanceof Error && e.name !== `AbortError`) {
|
|
1681
|
+
throw e
|
|
1682
|
+
}
|
|
1683
|
+
// Test passes - server just has a longer timeout than our abort
|
|
1684
|
+
}
|
|
1685
|
+
}, 10000)
|
|
1686
|
+
})
|
|
1687
|
+
|
|
1688
|
+
// ============================================================================
|
|
1689
|
+
// TTL and Expiry Edge Cases
|
|
1690
|
+
// ============================================================================
|
|
1691
|
+
|
|
1692
|
+
describe(`TTL and Expiry Edge Cases`, () => {
|
|
1693
|
+
test(`should reject TTL with leading zeros`, async () => {
|
|
1694
|
+
const streamPath = `/v1/stream/ttl-leading-zeros-test-${Date.now()}`
|
|
1695
|
+
|
|
1696
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1697
|
+
method: `PUT`,
|
|
1698
|
+
headers: {
|
|
1699
|
+
"Content-Type": `text/plain`,
|
|
1700
|
+
"Stream-TTL": `00060`,
|
|
1701
|
+
},
|
|
1702
|
+
})
|
|
1703
|
+
|
|
1704
|
+
expect(response.status).toBe(400)
|
|
1705
|
+
})
|
|
1706
|
+
|
|
1707
|
+
test(`should reject TTL with plus sign`, async () => {
|
|
1708
|
+
const streamPath = `/v1/stream/ttl-plus-test-${Date.now()}`
|
|
1709
|
+
|
|
1710
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1711
|
+
method: `PUT`,
|
|
1712
|
+
headers: {
|
|
1713
|
+
"Content-Type": `text/plain`,
|
|
1714
|
+
"Stream-TTL": `+60`,
|
|
1715
|
+
},
|
|
1716
|
+
})
|
|
1717
|
+
|
|
1718
|
+
expect(response.status).toBe(400)
|
|
1719
|
+
})
|
|
1720
|
+
|
|
1721
|
+
test(`should reject TTL with float value`, async () => {
|
|
1722
|
+
const streamPath = `/v1/stream/ttl-float-test-${Date.now()}`
|
|
1723
|
+
|
|
1724
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1725
|
+
method: `PUT`,
|
|
1726
|
+
headers: {
|
|
1727
|
+
"Content-Type": `text/plain`,
|
|
1728
|
+
"Stream-TTL": `60.5`,
|
|
1729
|
+
},
|
|
1730
|
+
})
|
|
1731
|
+
|
|
1732
|
+
expect(response.status).toBe(400)
|
|
1733
|
+
})
|
|
1734
|
+
|
|
1735
|
+
test(`should reject TTL with scientific notation`, async () => {
|
|
1736
|
+
const streamPath = `/v1/stream/ttl-scientific-test-${Date.now()}`
|
|
1737
|
+
|
|
1738
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1739
|
+
method: `PUT`,
|
|
1740
|
+
headers: {
|
|
1741
|
+
"Content-Type": `text/plain`,
|
|
1742
|
+
"Stream-TTL": `1e3`,
|
|
1743
|
+
},
|
|
1744
|
+
})
|
|
1745
|
+
|
|
1746
|
+
expect(response.status).toBe(400)
|
|
1747
|
+
})
|
|
1748
|
+
|
|
1749
|
+
test(`should reject invalid Expires-At timestamp`, async () => {
|
|
1750
|
+
const streamPath = `/v1/stream/expires-invalid-test-${Date.now()}`
|
|
1751
|
+
|
|
1752
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1753
|
+
method: `PUT`,
|
|
1754
|
+
headers: {
|
|
1755
|
+
"Content-Type": `text/plain`,
|
|
1756
|
+
"Stream-Expires-At": `not-a-timestamp`,
|
|
1757
|
+
},
|
|
1758
|
+
})
|
|
1759
|
+
|
|
1760
|
+
expect(response.status).toBe(400)
|
|
1761
|
+
})
|
|
1762
|
+
|
|
1763
|
+
test(`should accept Expires-At with Z timezone`, async () => {
|
|
1764
|
+
const streamPath = `/v1/stream/expires-z-test-${Date.now()}`
|
|
1765
|
+
|
|
1766
|
+
const expiresAt = new Date(Date.now() + 3600000).toISOString()
|
|
1767
|
+
|
|
1768
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1769
|
+
method: `PUT`,
|
|
1770
|
+
headers: {
|
|
1771
|
+
"Content-Type": `text/plain`,
|
|
1772
|
+
"Stream-Expires-At": expiresAt,
|
|
1773
|
+
},
|
|
1774
|
+
})
|
|
1775
|
+
|
|
1776
|
+
expect([200, 201]).toContain(response.status)
|
|
1777
|
+
})
|
|
1778
|
+
|
|
1779
|
+
test(`should accept Expires-At with timezone offset`, async () => {
|
|
1780
|
+
const streamPath = `/v1/stream/expires-offset-test-${Date.now()}`
|
|
1781
|
+
|
|
1782
|
+
// RFC3339 with timezone offset
|
|
1783
|
+
const date = new Date(Date.now() + 3600000)
|
|
1784
|
+
const expiresAt = date.toISOString().replace(`Z`, `+00:00`)
|
|
1785
|
+
|
|
1786
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1787
|
+
method: `PUT`,
|
|
1788
|
+
headers: {
|
|
1789
|
+
"Content-Type": `text/plain`,
|
|
1790
|
+
"Stream-Expires-At": expiresAt,
|
|
1791
|
+
},
|
|
1792
|
+
})
|
|
1793
|
+
|
|
1794
|
+
expect([200, 201]).toContain(response.status)
|
|
1795
|
+
})
|
|
1796
|
+
|
|
1797
|
+
test(`should handle idempotent PUT with same TTL`, async () => {
|
|
1798
|
+
const streamPath = `/v1/stream/ttl-idempotent-test-${Date.now()}`
|
|
1799
|
+
|
|
1800
|
+
// Create with TTL
|
|
1801
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1802
|
+
method: `PUT`,
|
|
1803
|
+
headers: {
|
|
1804
|
+
"Content-Type": `text/plain`,
|
|
1805
|
+
"Stream-TTL": `3600`,
|
|
1806
|
+
},
|
|
1807
|
+
})
|
|
1808
|
+
expect(response1.status).toBe(201)
|
|
1809
|
+
|
|
1810
|
+
// PUT again with same TTL - should be idempotent
|
|
1811
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1812
|
+
method: `PUT`,
|
|
1813
|
+
headers: {
|
|
1814
|
+
"Content-Type": `text/plain`,
|
|
1815
|
+
"Stream-TTL": `3600`,
|
|
1816
|
+
},
|
|
1817
|
+
})
|
|
1818
|
+
expect([200, 204]).toContain(response2.status)
|
|
1819
|
+
})
|
|
1820
|
+
|
|
1821
|
+
test(`should reject idempotent PUT with different TTL`, async () => {
|
|
1822
|
+
const streamPath = `/v1/stream/ttl-conflict-test-${Date.now()}`
|
|
1823
|
+
|
|
1824
|
+
// Create with TTL=3600
|
|
1825
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1826
|
+
method: `PUT`,
|
|
1827
|
+
headers: {
|
|
1828
|
+
"Content-Type": `text/plain`,
|
|
1829
|
+
"Stream-TTL": `3600`,
|
|
1830
|
+
},
|
|
1831
|
+
})
|
|
1832
|
+
|
|
1833
|
+
// PUT again with different TTL - should fail
|
|
1834
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1835
|
+
method: `PUT`,
|
|
1836
|
+
headers: {
|
|
1837
|
+
"Content-Type": `text/plain`,
|
|
1838
|
+
"Stream-TTL": `7200`,
|
|
1839
|
+
},
|
|
1840
|
+
})
|
|
1841
|
+
|
|
1842
|
+
expect(response.status).toBe(409)
|
|
1843
|
+
})
|
|
1844
|
+
})
|
|
1845
|
+
|
|
1846
|
+
// ============================================================================
|
|
1847
|
+
// HEAD Metadata Edge Cases
|
|
1848
|
+
// ============================================================================
|
|
1849
|
+
|
|
1850
|
+
describe(`HEAD Metadata Edge Cases`, () => {
|
|
1851
|
+
test(`should return TTL metadata if configured`, async () => {
|
|
1852
|
+
const streamPath = `/v1/stream/head-ttl-metadata-test-${Date.now()}`
|
|
1853
|
+
|
|
1854
|
+
// Create with TTL
|
|
1855
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1856
|
+
method: `PUT`,
|
|
1857
|
+
headers: {
|
|
1858
|
+
"Content-Type": `text/plain`,
|
|
1859
|
+
"Stream-TTL": `3600`,
|
|
1860
|
+
},
|
|
1861
|
+
})
|
|
1862
|
+
|
|
1863
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1864
|
+
method: `HEAD`,
|
|
1865
|
+
})
|
|
1866
|
+
|
|
1867
|
+
// SHOULD return TTL metadata
|
|
1868
|
+
const ttl = response.headers.get(`Stream-TTL`)
|
|
1869
|
+
if (ttl) {
|
|
1870
|
+
expect(parseInt(ttl)).toBeGreaterThan(0)
|
|
1871
|
+
expect(parseInt(ttl)).toBeLessThanOrEqual(3600)
|
|
1872
|
+
}
|
|
1873
|
+
})
|
|
1874
|
+
|
|
1875
|
+
test(`should return Expires-At metadata if configured`, async () => {
|
|
1876
|
+
const streamPath = `/v1/stream/head-expires-metadata-test-${Date.now()}`
|
|
1877
|
+
|
|
1878
|
+
const expiresAt = new Date(Date.now() + 3600000).toISOString()
|
|
1879
|
+
|
|
1880
|
+
// Create with Expires-At
|
|
1881
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1882
|
+
method: `PUT`,
|
|
1883
|
+
headers: {
|
|
1884
|
+
"Content-Type": `text/plain`,
|
|
1885
|
+
"Stream-Expires-At": expiresAt,
|
|
1886
|
+
},
|
|
1887
|
+
})
|
|
1888
|
+
|
|
1889
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1890
|
+
method: `HEAD`,
|
|
1891
|
+
})
|
|
1892
|
+
|
|
1893
|
+
// SHOULD return Expires-At metadata
|
|
1894
|
+
const expiresHeader = response.headers.get(`Stream-Expires-At`)
|
|
1895
|
+
if (expiresHeader) {
|
|
1896
|
+
expect(expiresHeader).toBeDefined()
|
|
1897
|
+
}
|
|
1898
|
+
})
|
|
1899
|
+
})
|
|
1900
|
+
|
|
1901
|
+
// ============================================================================
|
|
1902
|
+
// Caching and ETag Tests
|
|
1903
|
+
// ============================================================================
|
|
1904
|
+
|
|
1905
|
+
describe(`Caching and ETag`, () => {
|
|
1906
|
+
test(`should generate ETag on GET responses`, async () => {
|
|
1907
|
+
const streamPath = `/v1/stream/etag-generate-test-${Date.now()}`
|
|
1908
|
+
|
|
1909
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1910
|
+
method: `PUT`,
|
|
1911
|
+
headers: { "Content-Type": `text/plain` },
|
|
1912
|
+
body: `test data`,
|
|
1913
|
+
})
|
|
1914
|
+
|
|
1915
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1916
|
+
method: `GET`,
|
|
1917
|
+
})
|
|
1918
|
+
|
|
1919
|
+
expect(response.status).toBe(200)
|
|
1920
|
+
const etag = response.headers.get(`etag`)
|
|
1921
|
+
expect(etag).toBeDefined()
|
|
1922
|
+
expect(etag!.length).toBeGreaterThan(0)
|
|
1923
|
+
})
|
|
1924
|
+
|
|
1925
|
+
test(`should return 304 Not Modified for matching If-None-Match`, async () => {
|
|
1926
|
+
const streamPath = `/v1/stream/etag-304-test-${Date.now()}`
|
|
1927
|
+
|
|
1928
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1929
|
+
method: `PUT`,
|
|
1930
|
+
headers: { "Content-Type": `text/plain` },
|
|
1931
|
+
body: `test data`,
|
|
1932
|
+
})
|
|
1933
|
+
|
|
1934
|
+
// First request to get ETag
|
|
1935
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1936
|
+
method: `GET`,
|
|
1937
|
+
})
|
|
1938
|
+
|
|
1939
|
+
const etag = response1.headers.get(`etag`)
|
|
1940
|
+
expect(etag).toBeDefined()
|
|
1941
|
+
|
|
1942
|
+
// Second request with If-None-Match - MUST return 304
|
|
1943
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1944
|
+
method: `GET`,
|
|
1945
|
+
headers: {
|
|
1946
|
+
"If-None-Match": etag!,
|
|
1947
|
+
},
|
|
1948
|
+
})
|
|
1949
|
+
|
|
1950
|
+
expect(response2.status).toBe(304)
|
|
1951
|
+
// 304 should have empty body
|
|
1952
|
+
const text = await response2.text()
|
|
1953
|
+
expect(text).toBe(``)
|
|
1954
|
+
})
|
|
1955
|
+
|
|
1956
|
+
test(`should return 200 for non-matching If-None-Match`, async () => {
|
|
1957
|
+
const streamPath = `/v1/stream/etag-mismatch-test-${Date.now()}`
|
|
1958
|
+
|
|
1959
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1960
|
+
method: `PUT`,
|
|
1961
|
+
headers: { "Content-Type": `text/plain` },
|
|
1962
|
+
body: `test data`,
|
|
1963
|
+
})
|
|
1964
|
+
|
|
1965
|
+
// Request with wrong ETag - should return 200 with data
|
|
1966
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1967
|
+
method: `GET`,
|
|
1968
|
+
headers: {
|
|
1969
|
+
"If-None-Match": `"wrong-etag"`,
|
|
1970
|
+
},
|
|
1971
|
+
})
|
|
1972
|
+
|
|
1973
|
+
expect(response.status).toBe(200)
|
|
1974
|
+
const text = await response.text()
|
|
1975
|
+
expect(text).toBe(`test data`)
|
|
1976
|
+
})
|
|
1977
|
+
|
|
1978
|
+
test(`should return new ETag after data changes`, async () => {
|
|
1979
|
+
const streamPath = `/v1/stream/etag-change-test-${Date.now()}`
|
|
1980
|
+
|
|
1981
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1982
|
+
method: `PUT`,
|
|
1983
|
+
headers: { "Content-Type": `text/plain` },
|
|
1984
|
+
body: `initial`,
|
|
1985
|
+
})
|
|
1986
|
+
|
|
1987
|
+
// Get initial ETag
|
|
1988
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1989
|
+
method: `GET`,
|
|
1990
|
+
})
|
|
1991
|
+
const etag1 = response1.headers.get(`etag`)
|
|
1992
|
+
|
|
1993
|
+
// Append more data
|
|
1994
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
1995
|
+
method: `POST`,
|
|
1996
|
+
headers: { "Content-Type": `text/plain` },
|
|
1997
|
+
body: ` more`,
|
|
1998
|
+
})
|
|
1999
|
+
|
|
2000
|
+
// Get new ETag
|
|
2001
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2002
|
+
method: `GET`,
|
|
2003
|
+
})
|
|
2004
|
+
const etag2 = response2.headers.get(`etag`)
|
|
2005
|
+
|
|
2006
|
+
// ETags should be different
|
|
2007
|
+
expect(etag1).not.toBe(etag2)
|
|
2008
|
+
|
|
2009
|
+
// Old ETag should now return 200 (not 304)
|
|
2010
|
+
const response3 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2011
|
+
method: `GET`,
|
|
2012
|
+
headers: {
|
|
2013
|
+
"If-None-Match": etag1!,
|
|
2014
|
+
},
|
|
2015
|
+
})
|
|
2016
|
+
expect(response3.status).toBe(200)
|
|
2017
|
+
})
|
|
2018
|
+
})
|
|
2019
|
+
|
|
2020
|
+
// ============================================================================
|
|
2021
|
+
// Chunking and Large Payloads
|
|
2022
|
+
// ============================================================================
|
|
2023
|
+
|
|
2024
|
+
describe(`Chunking and Large Payloads`, () => {
|
|
2025
|
+
test(`should handle chunk-size pagination correctly`, async () => {
|
|
2026
|
+
const streamPath = `/v1/stream/chunk-pagination-test-${Date.now()}`
|
|
2027
|
+
|
|
2028
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2029
|
+
method: `PUT`,
|
|
2030
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
2031
|
+
})
|
|
2032
|
+
|
|
2033
|
+
// Append a large amount of data (100KB)
|
|
2034
|
+
const largeData = new Uint8Array(100 * 1024)
|
|
2035
|
+
for (let i = 0; i < largeData.length; i++) {
|
|
2036
|
+
largeData[i] = i % 256
|
|
2037
|
+
}
|
|
2038
|
+
|
|
2039
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2040
|
+
method: `POST`,
|
|
2041
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
2042
|
+
body: largeData,
|
|
2043
|
+
})
|
|
2044
|
+
|
|
2045
|
+
// Read back using pagination
|
|
2046
|
+
const accumulated: Array<number> = []
|
|
2047
|
+
let currentOffset: string | null = null
|
|
2048
|
+
let previousOffset: string | null = null
|
|
2049
|
+
let iterations = 0
|
|
2050
|
+
const maxIterations = 1000
|
|
2051
|
+
|
|
2052
|
+
while (iterations < maxIterations) {
|
|
2053
|
+
iterations++
|
|
2054
|
+
|
|
2055
|
+
const url: string = currentOffset
|
|
2056
|
+
? `${getBaseUrl()}${streamPath}?offset=${encodeURIComponent(currentOffset)}`
|
|
2057
|
+
: `${getBaseUrl()}${streamPath}`
|
|
2058
|
+
|
|
2059
|
+
const response: Response = await fetch(url, { method: `GET` })
|
|
2060
|
+
expect(response.status).toBe(200)
|
|
2061
|
+
|
|
2062
|
+
const buffer = await response.arrayBuffer()
|
|
2063
|
+
const data = new Uint8Array(buffer)
|
|
2064
|
+
|
|
2065
|
+
if (data.length > 0) {
|
|
2066
|
+
accumulated.push(...Array.from(data))
|
|
2067
|
+
}
|
|
2068
|
+
|
|
2069
|
+
const nextOffset: string | null =
|
|
2070
|
+
response.headers.get(STREAM_OFFSET_HEADER)
|
|
2071
|
+
const upToDate = response.headers.get(STREAM_UP_TO_DATE_HEADER)
|
|
2072
|
+
|
|
2073
|
+
if (upToDate === `true` && data.length === 0) {
|
|
2074
|
+
break
|
|
2075
|
+
}
|
|
2076
|
+
|
|
2077
|
+
expect(nextOffset).toBeDefined()
|
|
2078
|
+
|
|
2079
|
+
// Verify offset progresses
|
|
2080
|
+
if (nextOffset === currentOffset && data.length === 0) {
|
|
2081
|
+
break
|
|
2082
|
+
}
|
|
2083
|
+
|
|
2084
|
+
// Verify monotonic progression
|
|
2085
|
+
if (previousOffset && nextOffset) {
|
|
2086
|
+
expect(nextOffset >= previousOffset).toBe(true)
|
|
2087
|
+
}
|
|
2088
|
+
|
|
2089
|
+
previousOffset = currentOffset
|
|
2090
|
+
currentOffset = nextOffset
|
|
2091
|
+
}
|
|
2092
|
+
|
|
2093
|
+
// Verify we got all the data
|
|
2094
|
+
const result = new Uint8Array(accumulated)
|
|
2095
|
+
expect(result.length).toBe(largeData.length)
|
|
2096
|
+
for (let i = 0; i < largeData.length; i++) {
|
|
2097
|
+
expect(result[i]).toBe(largeData[i])
|
|
2098
|
+
}
|
|
2099
|
+
})
|
|
2100
|
+
|
|
2101
|
+
test(`should handle large payload appropriately`, async () => {
|
|
2102
|
+
const streamPath = `/v1/stream/large-payload-test-${Date.now()}`
|
|
2103
|
+
|
|
2104
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2105
|
+
method: `PUT`,
|
|
2106
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
2107
|
+
})
|
|
2108
|
+
|
|
2109
|
+
// Try to append very large payload (10MB)
|
|
2110
|
+
const largeData = new Uint8Array(10 * 1024 * 1024)
|
|
2111
|
+
|
|
2112
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2113
|
+
method: `POST`,
|
|
2114
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
2115
|
+
body: largeData,
|
|
2116
|
+
})
|
|
2117
|
+
|
|
2118
|
+
// Server may accept it (200/204) or reject with 413
|
|
2119
|
+
expect([200, 204, 413]).toContain(response.status)
|
|
2120
|
+
}, 30000)
|
|
2121
|
+
})
|
|
2122
|
+
|
|
2123
|
+
// ============================================================================
|
|
2124
|
+
// Read-Your-Writes Consistency
|
|
2125
|
+
// ============================================================================
|
|
2126
|
+
|
|
2127
|
+
describe(`Read-Your-Writes Consistency`, () => {
|
|
2128
|
+
test(`should immediately read message after append`, async () => {
|
|
2129
|
+
const streamPath = `/v1/stream/ryw-test-${Date.now()}`
|
|
2130
|
+
|
|
2131
|
+
// Create stream and append
|
|
2132
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2133
|
+
method: `PUT`,
|
|
2134
|
+
headers: { "Content-Type": `text/plain` },
|
|
2135
|
+
body: `initial`,
|
|
2136
|
+
})
|
|
2137
|
+
|
|
2138
|
+
// Immediately read - should see the data
|
|
2139
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2140
|
+
method: `GET`,
|
|
2141
|
+
})
|
|
2142
|
+
|
|
2143
|
+
const text = await response.text()
|
|
2144
|
+
expect(text).toBe(`initial`)
|
|
2145
|
+
})
|
|
2146
|
+
|
|
2147
|
+
test(`should immediately read multiple appends`, async () => {
|
|
2148
|
+
const streamPath = `/v1/stream/ryw-multi-test-${Date.now()}`
|
|
2149
|
+
|
|
2150
|
+
// Create stream
|
|
2151
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2152
|
+
method: `PUT`,
|
|
2153
|
+
headers: { "Content-Type": `text/plain` },
|
|
2154
|
+
})
|
|
2155
|
+
|
|
2156
|
+
// Append multiple messages
|
|
2157
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2158
|
+
method: `POST`,
|
|
2159
|
+
headers: { "Content-Type": `text/plain` },
|
|
2160
|
+
body: `msg1`,
|
|
2161
|
+
})
|
|
2162
|
+
|
|
2163
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2164
|
+
method: `POST`,
|
|
2165
|
+
headers: { "Content-Type": `text/plain` },
|
|
2166
|
+
body: `msg2`,
|
|
2167
|
+
})
|
|
2168
|
+
|
|
2169
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2170
|
+
method: `POST`,
|
|
2171
|
+
headers: { "Content-Type": `text/plain` },
|
|
2172
|
+
body: `msg3`,
|
|
2173
|
+
})
|
|
2174
|
+
|
|
2175
|
+
// Immediately read - should see all messages
|
|
2176
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2177
|
+
method: `GET`,
|
|
2178
|
+
})
|
|
2179
|
+
|
|
2180
|
+
const text = await response.text()
|
|
2181
|
+
expect(text).toBe(`msg1msg2msg3`)
|
|
2182
|
+
})
|
|
2183
|
+
|
|
2184
|
+
test(`should serve offset-based reads immediately after append`, async () => {
|
|
2185
|
+
const streamPath = `/v1/stream/ryw-offset-test-${Date.now()}`
|
|
2186
|
+
|
|
2187
|
+
// Create stream with first message
|
|
2188
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2189
|
+
method: `PUT`,
|
|
2190
|
+
headers: { "Content-Type": `text/plain` },
|
|
2191
|
+
body: `first`,
|
|
2192
|
+
})
|
|
2193
|
+
|
|
2194
|
+
// Get offset
|
|
2195
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2196
|
+
method: `GET`,
|
|
2197
|
+
})
|
|
2198
|
+
const offset1 = response1.headers.get(STREAM_OFFSET_HEADER)!
|
|
2199
|
+
|
|
2200
|
+
// Append more messages immediately
|
|
2201
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2202
|
+
method: `POST`,
|
|
2203
|
+
headers: { "Content-Type": `text/plain` },
|
|
2204
|
+
body: `second`,
|
|
2205
|
+
})
|
|
2206
|
+
|
|
2207
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2208
|
+
method: `POST`,
|
|
2209
|
+
headers: { "Content-Type": `text/plain` },
|
|
2210
|
+
body: `third`,
|
|
2211
|
+
})
|
|
2212
|
+
|
|
2213
|
+
// Immediately read from offset1 - should see second and third
|
|
2214
|
+
const response2 = await fetch(
|
|
2215
|
+
`${getBaseUrl()}${streamPath}?offset=${offset1}`,
|
|
2216
|
+
{
|
|
2217
|
+
method: `GET`,
|
|
2218
|
+
}
|
|
2219
|
+
)
|
|
2220
|
+
|
|
2221
|
+
const text = await response2.text()
|
|
2222
|
+
expect(text).toBe(`secondthird`)
|
|
2223
|
+
})
|
|
2224
|
+
})
|
|
2225
|
+
|
|
2226
|
+
// ============================================================================
|
|
2227
|
+
// SSE (Server-Sent Events) Mode
|
|
2228
|
+
// ============================================================================
|
|
2229
|
+
|
|
2230
|
+
describe(`SSE Mode`, () => {
|
|
2231
|
+
test(`should return text/event-stream content-type for SSE requests`, async () => {
|
|
2232
|
+
const streamPath = `/v1/stream/sse-content-type-test-${Date.now()}`
|
|
2233
|
+
|
|
2234
|
+
// Create stream with text/plain content type
|
|
2235
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2236
|
+
method: `PUT`,
|
|
2237
|
+
headers: { "Content-Type": `text/plain` },
|
|
2238
|
+
body: `test data`,
|
|
2239
|
+
})
|
|
2240
|
+
|
|
2241
|
+
// Make SSE request with AbortController to avoid hanging
|
|
2242
|
+
const { response } = await fetchSSE(
|
|
2243
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2244
|
+
{ headers: { Accept: `text/event-stream` }, maxChunks: 0 }
|
|
2245
|
+
)
|
|
2246
|
+
|
|
2247
|
+
expect(response.status).toBe(200)
|
|
2248
|
+
expect(response.headers.get(`content-type`)).toBe(`text/event-stream`)
|
|
2249
|
+
})
|
|
2250
|
+
|
|
2251
|
+
test(`should accept live=sse query parameter for application/json`, async () => {
|
|
2252
|
+
const streamPath = `/v1/stream/sse-json-test-${Date.now()}`
|
|
2253
|
+
|
|
2254
|
+
// Create stream with application/json content type
|
|
2255
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2256
|
+
method: `PUT`,
|
|
2257
|
+
headers: { "Content-Type": `application/json` },
|
|
2258
|
+
body: JSON.stringify({ message: `hello` }),
|
|
2259
|
+
})
|
|
2260
|
+
|
|
2261
|
+
const { response } = await fetchSSE(
|
|
2262
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2263
|
+
{ headers: { Accept: `text/event-stream` }, maxChunks: 0 }
|
|
2264
|
+
)
|
|
2265
|
+
|
|
2266
|
+
expect(response.status).toBe(200)
|
|
2267
|
+
expect(response.headers.get(`content-type`)).toBe(`text/event-stream`)
|
|
2268
|
+
})
|
|
2269
|
+
|
|
2270
|
+
test(`should require offset parameter for SSE mode`, async () => {
|
|
2271
|
+
const streamPath = `/v1/stream/sse-no-offset-test-${Date.now()}`
|
|
2272
|
+
|
|
2273
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2274
|
+
method: `PUT`,
|
|
2275
|
+
headers: { "Content-Type": `text/plain` },
|
|
2276
|
+
})
|
|
2277
|
+
|
|
2278
|
+
// SSE without offset should fail (similar to long-poll)
|
|
2279
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}?live=sse`, {
|
|
2280
|
+
method: `GET`,
|
|
2281
|
+
})
|
|
2282
|
+
|
|
2283
|
+
// Should return 400 (offset required for live modes)
|
|
2284
|
+
expect(response.status).toBe(400)
|
|
2285
|
+
})
|
|
2286
|
+
|
|
2287
|
+
test(`client should reject SSE mode for incompatible content types`, async () => {
|
|
2288
|
+
const streamPath = `/v1/stream/sse-binary-test-${Date.now()}`
|
|
2289
|
+
|
|
2290
|
+
// Create stream with binary content type (not SSE compatible)
|
|
2291
|
+
const stream = await DurableStream.create({
|
|
2292
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2293
|
+
contentType: `application/octet-stream`,
|
|
2294
|
+
})
|
|
2295
|
+
|
|
2296
|
+
// Append some binary data
|
|
2297
|
+
await stream.append(new Uint8Array([0x01, 0x02, 0x03]))
|
|
2298
|
+
|
|
2299
|
+
// Trying to read via SSE mode should throw
|
|
2300
|
+
await expect(stream.stream({ live: `sse` })).rejects.toThrow()
|
|
2301
|
+
})
|
|
2302
|
+
|
|
2303
|
+
test(`should stream data events via SSE`, async () => {
|
|
2304
|
+
const streamPath = `/v1/stream/sse-data-stream-test-${Date.now()}`
|
|
2305
|
+
|
|
2306
|
+
// Create stream with text/plain content type
|
|
2307
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2308
|
+
method: `PUT`,
|
|
2309
|
+
headers: { "Content-Type": `text/plain` },
|
|
2310
|
+
body: `message one`,
|
|
2311
|
+
})
|
|
2312
|
+
|
|
2313
|
+
// Append more data
|
|
2314
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2315
|
+
method: `POST`,
|
|
2316
|
+
headers: { "Content-Type": `text/plain` },
|
|
2317
|
+
body: `message two`,
|
|
2318
|
+
})
|
|
2319
|
+
|
|
2320
|
+
// Make SSE request and read the response body
|
|
2321
|
+
const { response, received } = await fetchSSE(
|
|
2322
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2323
|
+
{ untilContent: `message two` }
|
|
2324
|
+
)
|
|
2325
|
+
|
|
2326
|
+
expect(response.status).toBe(200)
|
|
2327
|
+
|
|
2328
|
+
// Verify SSE format: should contain event: and data: lines
|
|
2329
|
+
expect(received).toContain(`event:`)
|
|
2330
|
+
expect(received).toContain(`data:`)
|
|
2331
|
+
})
|
|
2332
|
+
|
|
2333
|
+
test(`should send control events with offset`, async () => {
|
|
2334
|
+
const streamPath = `/v1/stream/sse-control-event-test-${Date.now()}`
|
|
2335
|
+
|
|
2336
|
+
// Create stream with data
|
|
2337
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2338
|
+
method: `PUT`,
|
|
2339
|
+
headers: { "Content-Type": `text/plain` },
|
|
2340
|
+
body: `test data`,
|
|
2341
|
+
})
|
|
2342
|
+
|
|
2343
|
+
// Make SSE request
|
|
2344
|
+
const { response, received } = await fetchSSE(
|
|
2345
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2346
|
+
{ untilContent: `event: control` }
|
|
2347
|
+
)
|
|
2348
|
+
|
|
2349
|
+
expect(response.status).toBe(200)
|
|
2350
|
+
|
|
2351
|
+
// Verify control event format (Protocol Section 5.7)
|
|
2352
|
+
expect(received).toContain(`event: control`)
|
|
2353
|
+
expect(received).toContain(`streamNextOffset`)
|
|
2354
|
+
})
|
|
2355
|
+
|
|
2356
|
+
test(`should generate streamCursor in SSE control events`, async () => {
|
|
2357
|
+
const streamPath = `/v1/stream/sse-cursor-gen-test-${Date.now()}`
|
|
2358
|
+
|
|
2359
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2360
|
+
method: `PUT`,
|
|
2361
|
+
headers: { "Content-Type": `text/plain` },
|
|
2362
|
+
body: `test data`,
|
|
2363
|
+
})
|
|
2364
|
+
|
|
2365
|
+
// SSE request without cursor - server MUST generate one
|
|
2366
|
+
const { response, received } = await fetchSSE(
|
|
2367
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2368
|
+
{ untilContent: `streamCursor` }
|
|
2369
|
+
)
|
|
2370
|
+
|
|
2371
|
+
expect(response.status).toBe(200)
|
|
2372
|
+
|
|
2373
|
+
// Parse control event to find streamCursor
|
|
2374
|
+
const controlMatch = received.match(/event: control\s*\ndata: ({[^}]+})/)
|
|
2375
|
+
expect(controlMatch).toBeDefined()
|
|
2376
|
+
|
|
2377
|
+
const controlData = JSON.parse(controlMatch![1] as string)
|
|
2378
|
+
expect(controlData.streamCursor).toBeDefined()
|
|
2379
|
+
|
|
2380
|
+
// Cursor must be a numeric string (interval number)
|
|
2381
|
+
expect(/^\d+$/.test(controlData.streamCursor)).toBe(true)
|
|
2382
|
+
})
|
|
2383
|
+
|
|
2384
|
+
test(`should handle cursor collision with jitter in SSE mode`, async () => {
|
|
2385
|
+
const streamPath = `/v1/stream/sse-cursor-collision-test-${Date.now()}`
|
|
2386
|
+
|
|
2387
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2388
|
+
method: `PUT`,
|
|
2389
|
+
headers: { "Content-Type": `text/plain` },
|
|
2390
|
+
body: `test data`,
|
|
2391
|
+
})
|
|
2392
|
+
|
|
2393
|
+
// First SSE request to get current cursor
|
|
2394
|
+
const { received: received1 } = await fetchSSE(
|
|
2395
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2396
|
+
{ untilContent: `streamCursor` }
|
|
2397
|
+
)
|
|
2398
|
+
|
|
2399
|
+
const controlMatch1 = received1.match(
|
|
2400
|
+
/event: control\s*\ndata: ({[^}]+})/
|
|
2401
|
+
)
|
|
2402
|
+
expect(controlMatch1).toBeDefined()
|
|
2403
|
+
const cursor1 = JSON.parse(controlMatch1![1] as string).streamCursor
|
|
2404
|
+
|
|
2405
|
+
// Second SSE request with same cursor - should get advanced cursor
|
|
2406
|
+
const { received: received2 } = await fetchSSE(
|
|
2407
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse&cursor=${cursor1}`,
|
|
2408
|
+
{ untilContent: `streamCursor` }
|
|
2409
|
+
)
|
|
2410
|
+
|
|
2411
|
+
const controlMatch2 = received2.match(
|
|
2412
|
+
/event: control\s*\ndata: ({[^}]+})/
|
|
2413
|
+
)
|
|
2414
|
+
expect(controlMatch2).toBeDefined()
|
|
2415
|
+
const cursor2 = JSON.parse(controlMatch2![1] as string).streamCursor
|
|
2416
|
+
|
|
2417
|
+
// The returned cursor MUST be strictly greater than the one we sent
|
|
2418
|
+
// (monotonic progression prevents cache cycles)
|
|
2419
|
+
expect(parseInt(cursor2 as string, 10)).toBeGreaterThan(
|
|
2420
|
+
parseInt(cursor1 as string, 10)
|
|
2421
|
+
)
|
|
2422
|
+
})
|
|
2423
|
+
|
|
2424
|
+
test(`should wrap JSON data in arrays for SSE and produce valid JSON`, async () => {
|
|
2425
|
+
const streamPath = `/v1/stream/sse-json-wrap-test-${Date.now()}`
|
|
2426
|
+
|
|
2427
|
+
// Create JSON stream
|
|
2428
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2429
|
+
method: `PUT`,
|
|
2430
|
+
headers: { "Content-Type": `application/json` },
|
|
2431
|
+
body: JSON.stringify({ id: 1, message: `hello` }),
|
|
2432
|
+
})
|
|
2433
|
+
|
|
2434
|
+
const { response, received } = await fetchSSE(
|
|
2435
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2436
|
+
{ untilContent: `event: data` }
|
|
2437
|
+
)
|
|
2438
|
+
|
|
2439
|
+
expect(response.status).toBe(200)
|
|
2440
|
+
expect(received).toContain(`event: data`)
|
|
2441
|
+
|
|
2442
|
+
// Parse SSE events properly (handles multi-line data per SSE spec)
|
|
2443
|
+
const events = parseSSEEvents(received)
|
|
2444
|
+
const dataEvent = events.find((e) => e.type === `data`)
|
|
2445
|
+
expect(dataEvent).toBeDefined()
|
|
2446
|
+
|
|
2447
|
+
// This will throw if JSON is invalid (e.g., trailing comma)
|
|
2448
|
+
const parsed = JSON.parse(dataEvent!.data)
|
|
2449
|
+
|
|
2450
|
+
// Verify the structure matches what we sent
|
|
2451
|
+
expect(parsed).toEqual([{ id: 1, message: `hello` }])
|
|
2452
|
+
})
|
|
2453
|
+
|
|
2454
|
+
test(`should handle SSE for empty stream with correct offset`, async () => {
|
|
2455
|
+
const streamPath = `/v1/stream/sse-empty-test-${Date.now()}`
|
|
2456
|
+
|
|
2457
|
+
// Create empty stream
|
|
2458
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2459
|
+
method: `PUT`,
|
|
2460
|
+
headers: { "Content-Type": `text/plain` },
|
|
2461
|
+
})
|
|
2462
|
+
|
|
2463
|
+
// First, get the offset from HTTP GET (the canonical source)
|
|
2464
|
+
const httpResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2465
|
+
const httpOffset = httpResponse.headers.get(`Stream-Next-Offset`)
|
|
2466
|
+
expect(httpOffset).toBeDefined()
|
|
2467
|
+
expect(httpOffset).not.toBe(`-1`) // Should be the stream's actual offset, not -1
|
|
2468
|
+
|
|
2469
|
+
// Make SSE request
|
|
2470
|
+
const { response, received } = await fetchSSE(
|
|
2471
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2472
|
+
{ untilContent: `event: control` }
|
|
2473
|
+
)
|
|
2474
|
+
expect(response.status).toBe(200)
|
|
2475
|
+
|
|
2476
|
+
// Should get a control event even for empty stream
|
|
2477
|
+
expect(received).toContain(`event: control`)
|
|
2478
|
+
|
|
2479
|
+
// Parse the control event and verify offset matches HTTP GET
|
|
2480
|
+
const controlLine = received
|
|
2481
|
+
.split(`\n`)
|
|
2482
|
+
.find((l) => l.startsWith(`data: `) && l.includes(`streamNextOffset`))
|
|
2483
|
+
expect(controlLine).toBeDefined()
|
|
2484
|
+
|
|
2485
|
+
const controlPayload = controlLine!.slice(`data: `.length)
|
|
2486
|
+
const controlData = JSON.parse(controlPayload)
|
|
2487
|
+
|
|
2488
|
+
// SSE control offset should match HTTP GET offset (not -1)
|
|
2489
|
+
expect(controlData[`streamNextOffset`]).toBe(httpOffset)
|
|
2490
|
+
})
|
|
2491
|
+
|
|
2492
|
+
test(`should send upToDate flag in SSE control events when caught up`, async () => {
|
|
2493
|
+
const streamPath = `/v1/stream/sse-uptodate-test-${Date.now()}`
|
|
2494
|
+
|
|
2495
|
+
// Create stream with data
|
|
2496
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2497
|
+
method: `PUT`,
|
|
2498
|
+
headers: { "Content-Type": `text/plain` },
|
|
2499
|
+
body: `test data`,
|
|
2500
|
+
})
|
|
2501
|
+
|
|
2502
|
+
// Make SSE request and read until we get a control event
|
|
2503
|
+
const { response, received } = await fetchSSE(
|
|
2504
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2505
|
+
{ untilContent: `"upToDate"` }
|
|
2506
|
+
)
|
|
2507
|
+
|
|
2508
|
+
expect(response.status).toBe(200)
|
|
2509
|
+
|
|
2510
|
+
// Parse the control event
|
|
2511
|
+
const controlLine = received
|
|
2512
|
+
.split(`\n`)
|
|
2513
|
+
.find((l) => l.startsWith(`data: `) && l.includes(`streamNextOffset`))
|
|
2514
|
+
expect(controlLine).toBeDefined()
|
|
2515
|
+
|
|
2516
|
+
const controlPayload = controlLine!.slice(`data: `.length)
|
|
2517
|
+
const controlData = JSON.parse(controlPayload)
|
|
2518
|
+
|
|
2519
|
+
// When client has read all data, server MUST include upToDate: true
|
|
2520
|
+
// This is essential for clients to know they've caught up to head
|
|
2521
|
+
expect(controlData.upToDate).toBe(true)
|
|
2522
|
+
})
|
|
2523
|
+
|
|
2524
|
+
test(`should have correct SSE headers (no Content-Length, proper Cache-Control)`, async () => {
|
|
2525
|
+
const streamPath = `/v1/stream/sse-headers-test-${Date.now()}`
|
|
2526
|
+
|
|
2527
|
+
// Create stream with data
|
|
2528
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2529
|
+
method: `PUT`,
|
|
2530
|
+
headers: { "Content-Type": `text/plain` },
|
|
2531
|
+
body: `test data`,
|
|
2532
|
+
})
|
|
2533
|
+
|
|
2534
|
+
// Make SSE request
|
|
2535
|
+
const { response } = await fetchSSE(
|
|
2536
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2537
|
+
{ untilContent: `test data` }
|
|
2538
|
+
)
|
|
2539
|
+
|
|
2540
|
+
expect(response.status).toBe(200)
|
|
2541
|
+
|
|
2542
|
+
// SSE MUST have text/event-stream content type
|
|
2543
|
+
expect(response.headers.get(`content-type`)).toBe(`text/event-stream`)
|
|
2544
|
+
|
|
2545
|
+
// SSE MUST NOT have Content-Length (it's a streaming response)
|
|
2546
|
+
expect(response.headers.get(`content-length`)).toBeNull()
|
|
2547
|
+
|
|
2548
|
+
// SSE SHOULD have Cache-Control: no-cache to prevent proxy buffering
|
|
2549
|
+
const cacheControl = response.headers.get(`cache-control`)
|
|
2550
|
+
expect(cacheControl).toContain(`no-cache`)
|
|
2551
|
+
})
|
|
2552
|
+
|
|
2553
|
+
test(`should handle newlines in text/plain payloads`, async () => {
|
|
2554
|
+
const streamPath = `/v1/stream/sse-newline-test-${Date.now()}`
|
|
2555
|
+
|
|
2556
|
+
// Create stream with text containing newlines
|
|
2557
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2558
|
+
method: `PUT`,
|
|
2559
|
+
headers: { "Content-Type": `text/plain` },
|
|
2560
|
+
body: `line1\nline2\nline3`,
|
|
2561
|
+
})
|
|
2562
|
+
|
|
2563
|
+
const { response, received } = await fetchSSE(
|
|
2564
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2565
|
+
{ untilContent: `event: control` }
|
|
2566
|
+
)
|
|
2567
|
+
|
|
2568
|
+
expect(response.status).toBe(200)
|
|
2569
|
+
expect(received).toContain(`event: data`)
|
|
2570
|
+
|
|
2571
|
+
// Per SSE spec, multiline data must use multiple "data:" lines
|
|
2572
|
+
// Each line should have its own data: prefix
|
|
2573
|
+
expect(received).toContain(`data: line1`)
|
|
2574
|
+
expect(received).toContain(`data: line2`)
|
|
2575
|
+
expect(received).toContain(`data: line3`)
|
|
2576
|
+
})
|
|
2577
|
+
|
|
2578
|
+
test(`should generate unique, monotonically increasing offsets in SSE mode`, async () => {
|
|
2579
|
+
const streamPath = `/v1/stream/sse-monotonic-offset-test-${Date.now()}`
|
|
2580
|
+
|
|
2581
|
+
// Create stream
|
|
2582
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2583
|
+
method: `PUT`,
|
|
2584
|
+
headers: { "Content-Type": `text/plain` },
|
|
2585
|
+
})
|
|
2586
|
+
|
|
2587
|
+
// Append multiple messages
|
|
2588
|
+
for (let i = 0; i < 5; i++) {
|
|
2589
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2590
|
+
method: `POST`,
|
|
2591
|
+
headers: { "Content-Type": `text/plain` },
|
|
2592
|
+
body: `message ${i}`,
|
|
2593
|
+
})
|
|
2594
|
+
}
|
|
2595
|
+
|
|
2596
|
+
// Make SSE request
|
|
2597
|
+
const { response, received } = await fetchSSE(
|
|
2598
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2599
|
+
{ untilContent: `event: control` }
|
|
2600
|
+
)
|
|
2601
|
+
|
|
2602
|
+
expect(response.status).toBe(200)
|
|
2603
|
+
|
|
2604
|
+
// Extract all control event offsets
|
|
2605
|
+
const controlLines = received
|
|
2606
|
+
.split(`\n`)
|
|
2607
|
+
.filter((l) => l.startsWith(`data: `) && l.includes(`streamNextOffset`))
|
|
2608
|
+
|
|
2609
|
+
const offsets: Array<string> = []
|
|
2610
|
+
for (const line of controlLines) {
|
|
2611
|
+
const payload = line.slice(`data: `.length)
|
|
2612
|
+
const data = JSON.parse(payload)
|
|
2613
|
+
offsets.push(data[`streamNextOffset`])
|
|
2614
|
+
}
|
|
2615
|
+
|
|
2616
|
+
// Verify offsets are unique and strictly increasing (lexicographically)
|
|
2617
|
+
for (let i = 1; i < offsets.length; i++) {
|
|
2618
|
+
expect(offsets[i]! > offsets[i - 1]!).toBe(true)
|
|
2619
|
+
}
|
|
2620
|
+
})
|
|
2621
|
+
|
|
2622
|
+
test(`should support reconnection with last known offset`, async () => {
|
|
2623
|
+
const streamPath = `/v1/stream/sse-reconnect-test-${Date.now()}`
|
|
2624
|
+
|
|
2625
|
+
// Create stream with initial data
|
|
2626
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2627
|
+
method: `PUT`,
|
|
2628
|
+
headers: { "Content-Type": `text/plain` },
|
|
2629
|
+
body: `message 1`,
|
|
2630
|
+
})
|
|
2631
|
+
|
|
2632
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2633
|
+
method: `POST`,
|
|
2634
|
+
headers: { "Content-Type": `text/plain` },
|
|
2635
|
+
body: `message 2`,
|
|
2636
|
+
})
|
|
2637
|
+
|
|
2638
|
+
// First SSE connection - get initial data and offset
|
|
2639
|
+
let lastOffset: string | null = null
|
|
2640
|
+
const { response: response1, received: received1 } = await fetchSSE(
|
|
2641
|
+
`${getBaseUrl()}${streamPath}?offset=-1&live=sse`,
|
|
2642
|
+
{ untilContent: `event: control` }
|
|
2643
|
+
)
|
|
2644
|
+
|
|
2645
|
+
expect(response1.status).toBe(200)
|
|
2646
|
+
|
|
2647
|
+
// Extract offset from control event
|
|
2648
|
+
const controlLine = received1
|
|
2649
|
+
.split(`\n`)
|
|
2650
|
+
.find((l) => l.startsWith(`data: `) && l.includes(`streamNextOffset`))
|
|
2651
|
+
const controlPayload = controlLine!.slice(`data: `.length)
|
|
2652
|
+
lastOffset = JSON.parse(controlPayload)[`streamNextOffset`]
|
|
2653
|
+
|
|
2654
|
+
expect(lastOffset).toBeDefined()
|
|
2655
|
+
|
|
2656
|
+
// Append more data while "disconnected"
|
|
2657
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2658
|
+
method: `POST`,
|
|
2659
|
+
headers: { "Content-Type": `text/plain` },
|
|
2660
|
+
body: `message 3`,
|
|
2661
|
+
})
|
|
2662
|
+
|
|
2663
|
+
// Reconnect with last known offset
|
|
2664
|
+
const { response: response2, received: received2 } = await fetchSSE(
|
|
2665
|
+
`${getBaseUrl()}${streamPath}?offset=${lastOffset}&live=sse`,
|
|
2666
|
+
{ untilContent: `message 3` }
|
|
2667
|
+
)
|
|
2668
|
+
|
|
2669
|
+
expect(response2.status).toBe(200)
|
|
2670
|
+
|
|
2671
|
+
// Should receive message 3 (the new one), not duplicates of 1 and 2
|
|
2672
|
+
expect(received2).toContain(`message 3`)
|
|
2673
|
+
// Should NOT contain message 1 or 2 (already received before disconnect)
|
|
2674
|
+
expect(received2).not.toContain(`message 1`)
|
|
2675
|
+
expect(received2).not.toContain(`message 2`)
|
|
2676
|
+
})
|
|
2677
|
+
})
|
|
2678
|
+
|
|
2679
|
+
// ============================================================================
|
|
2680
|
+
// JSON Mode
|
|
2681
|
+
// ============================================================================
|
|
2682
|
+
|
|
2683
|
+
describe(`JSON Mode`, () => {
|
|
2684
|
+
test(`should allow PUT with empty array body (creates empty stream)`, async () => {
|
|
2685
|
+
const streamPath = `/v1/stream/json-put-empty-array-test-${Date.now()}`
|
|
2686
|
+
|
|
2687
|
+
// PUT with empty array should create an empty stream
|
|
2688
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2689
|
+
method: `PUT`,
|
|
2690
|
+
headers: { "Content-Type": `application/json` },
|
|
2691
|
+
body: `[]`,
|
|
2692
|
+
})
|
|
2693
|
+
|
|
2694
|
+
expect(response.status).toBe(201)
|
|
2695
|
+
|
|
2696
|
+
// Reading should return empty array
|
|
2697
|
+
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2698
|
+
const data = await readResponse.json()
|
|
2699
|
+
expect(data).toEqual([])
|
|
2700
|
+
expect(readResponse.headers.get(STREAM_UP_TO_DATE_HEADER)).toBe(`true`)
|
|
2701
|
+
})
|
|
2702
|
+
|
|
2703
|
+
test(`should reject POST with empty array body`, async () => {
|
|
2704
|
+
const streamPath = `/v1/stream/json-post-empty-array-test-${Date.now()}`
|
|
2705
|
+
|
|
2706
|
+
// Create stream first
|
|
2707
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2708
|
+
method: `PUT`,
|
|
2709
|
+
headers: { "Content-Type": `application/json` },
|
|
2710
|
+
})
|
|
2711
|
+
|
|
2712
|
+
// POST with empty array should be rejected
|
|
2713
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2714
|
+
method: `POST`,
|
|
2715
|
+
headers: { "Content-Type": `application/json` },
|
|
2716
|
+
body: `[]`,
|
|
2717
|
+
})
|
|
2718
|
+
|
|
2719
|
+
expect(response.status).toBe(400)
|
|
2720
|
+
})
|
|
2721
|
+
|
|
2722
|
+
test(`should handle content-type with charset parameter`, async () => {
|
|
2723
|
+
const streamPath = `/v1/stream/json-charset-test-${Date.now()}`
|
|
2724
|
+
|
|
2725
|
+
// Create with charset parameter
|
|
2726
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2727
|
+
method: `PUT`,
|
|
2728
|
+
headers: { "Content-Type": `application/json; charset=utf-8` },
|
|
2729
|
+
})
|
|
2730
|
+
|
|
2731
|
+
// Append JSON
|
|
2732
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2733
|
+
method: `POST`,
|
|
2734
|
+
headers: { "Content-Type": `application/json; charset=utf-8` },
|
|
2735
|
+
body: JSON.stringify({ message: `hello` }),
|
|
2736
|
+
})
|
|
2737
|
+
|
|
2738
|
+
// Read and verify it's treated as JSON mode
|
|
2739
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2740
|
+
const data = await response.json()
|
|
2741
|
+
|
|
2742
|
+
expect(Array.isArray(data)).toBe(true)
|
|
2743
|
+
expect(data).toEqual([{ message: `hello` }])
|
|
2744
|
+
})
|
|
2745
|
+
|
|
2746
|
+
test(`should wrap single JSON value in array`, async () => {
|
|
2747
|
+
const streamPath = `/v1/stream/json-single-test-${Date.now()}`
|
|
2748
|
+
|
|
2749
|
+
const stream = await DurableStream.create({
|
|
2750
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2751
|
+
contentType: `application/json`,
|
|
2752
|
+
})
|
|
2753
|
+
|
|
2754
|
+
await stream.append({ message: `hello` })
|
|
2755
|
+
|
|
2756
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2757
|
+
const data = await response.json()
|
|
2758
|
+
|
|
2759
|
+
expect(Array.isArray(data)).toBe(true)
|
|
2760
|
+
expect(data).toEqual([{ message: `hello` }])
|
|
2761
|
+
})
|
|
2762
|
+
|
|
2763
|
+
test(`should store arrays as single messages`, async () => {
|
|
2764
|
+
const streamPath = `/v1/stream/json-array-test-${Date.now()}`
|
|
2765
|
+
|
|
2766
|
+
const stream = await DurableStream.create({
|
|
2767
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2768
|
+
contentType: `application/json`,
|
|
2769
|
+
})
|
|
2770
|
+
|
|
2771
|
+
// Append array - should be stored as ONE message containing the array
|
|
2772
|
+
await stream.append([{ id: 1 }, { id: 2 }, { id: 3 }])
|
|
2773
|
+
|
|
2774
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2775
|
+
const data = await response.json()
|
|
2776
|
+
|
|
2777
|
+
expect(Array.isArray(data)).toBe(true)
|
|
2778
|
+
expect(data).toEqual([[{ id: 1 }, { id: 2 }, { id: 3 }]])
|
|
2779
|
+
})
|
|
2780
|
+
|
|
2781
|
+
test(`should concatenate multiple appends into single array`, async () => {
|
|
2782
|
+
const streamPath = `/v1/stream/json-concat-test-${Date.now()}`
|
|
2783
|
+
|
|
2784
|
+
const stream = await DurableStream.create({
|
|
2785
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2786
|
+
contentType: `application/json`,
|
|
2787
|
+
})
|
|
2788
|
+
|
|
2789
|
+
await stream.append({ event: `first` })
|
|
2790
|
+
await stream.append({ event: `second` })
|
|
2791
|
+
await stream.append({ event: `third` })
|
|
2792
|
+
|
|
2793
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2794
|
+
const data = await response.json()
|
|
2795
|
+
|
|
2796
|
+
expect(Array.isArray(data)).toBe(true)
|
|
2797
|
+
expect(data).toEqual([
|
|
2798
|
+
{ event: `first` },
|
|
2799
|
+
{ event: `second` },
|
|
2800
|
+
{ event: `third` },
|
|
2801
|
+
])
|
|
2802
|
+
})
|
|
2803
|
+
|
|
2804
|
+
test(`should handle mixed single values and arrays`, async () => {
|
|
2805
|
+
const streamPath = `/v1/stream/json-mixed-test-${Date.now()}`
|
|
2806
|
+
|
|
2807
|
+
const stream = await DurableStream.create({
|
|
2808
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2809
|
+
contentType: `application/json`,
|
|
2810
|
+
})
|
|
2811
|
+
|
|
2812
|
+
await stream.append({ type: `single` })
|
|
2813
|
+
await stream.append([
|
|
2814
|
+
{ type: `array`, id: 1 },
|
|
2815
|
+
{ type: `array`, id: 2 },
|
|
2816
|
+
])
|
|
2817
|
+
await stream.append({ type: `single-again` })
|
|
2818
|
+
|
|
2819
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2820
|
+
const data = await response.json()
|
|
2821
|
+
|
|
2822
|
+
// Array is stored as ONE message
|
|
2823
|
+
expect(data).toEqual([
|
|
2824
|
+
{ type: `single` },
|
|
2825
|
+
[
|
|
2826
|
+
{ type: `array`, id: 1 },
|
|
2827
|
+
{ type: `array`, id: 2 },
|
|
2828
|
+
],
|
|
2829
|
+
{ type: `single-again` },
|
|
2830
|
+
])
|
|
2831
|
+
})
|
|
2832
|
+
|
|
2833
|
+
test(`should reject invalid JSON with 400`, async () => {
|
|
2834
|
+
const streamPath = `/v1/stream/json-invalid-test-${Date.now()}`
|
|
2835
|
+
|
|
2836
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2837
|
+
method: `PUT`,
|
|
2838
|
+
headers: { "Content-Type": `application/json` },
|
|
2839
|
+
})
|
|
2840
|
+
|
|
2841
|
+
// Try to append invalid JSON
|
|
2842
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2843
|
+
method: `POST`,
|
|
2844
|
+
headers: { "Content-Type": `application/json` },
|
|
2845
|
+
body: `{ invalid json }`,
|
|
2846
|
+
})
|
|
2847
|
+
|
|
2848
|
+
expect(response.status).toBe(400)
|
|
2849
|
+
expect(response.ok).toBe(false)
|
|
2850
|
+
})
|
|
2851
|
+
|
|
2852
|
+
test(`should handle various JSON value types`, async () => {
|
|
2853
|
+
const streamPath = `/v1/stream/json-types-test-${Date.now()}`
|
|
2854
|
+
|
|
2855
|
+
const stream = await DurableStream.create({
|
|
2856
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2857
|
+
contentType: `application/json`,
|
|
2858
|
+
})
|
|
2859
|
+
|
|
2860
|
+
await stream.append(`string value`)
|
|
2861
|
+
await stream.append(42)
|
|
2862
|
+
await stream.append(true)
|
|
2863
|
+
await stream.append(null)
|
|
2864
|
+
await stream.append({ object: `value` })
|
|
2865
|
+
await stream.append([1, 2, 3])
|
|
2866
|
+
|
|
2867
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2868
|
+
const data = await response.json()
|
|
2869
|
+
|
|
2870
|
+
expect(data).toEqual([
|
|
2871
|
+
`string value`,
|
|
2872
|
+
42,
|
|
2873
|
+
true,
|
|
2874
|
+
null,
|
|
2875
|
+
{ object: `value` },
|
|
2876
|
+
[1, 2, 3],
|
|
2877
|
+
])
|
|
2878
|
+
})
|
|
2879
|
+
|
|
2880
|
+
test(`should preserve JSON structure and nesting`, async () => {
|
|
2881
|
+
const streamPath = `/v1/stream/json-nested-test-${Date.now()}`
|
|
2882
|
+
|
|
2883
|
+
const stream = await DurableStream.create({
|
|
2884
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2885
|
+
contentType: `application/json`,
|
|
2886
|
+
})
|
|
2887
|
+
|
|
2888
|
+
await stream.append({
|
|
2889
|
+
user: {
|
|
2890
|
+
id: 123,
|
|
2891
|
+
name: `Alice`,
|
|
2892
|
+
tags: [`admin`, `verified`],
|
|
2893
|
+
},
|
|
2894
|
+
timestamp: `2024-01-01T00:00:00Z`,
|
|
2895
|
+
})
|
|
2896
|
+
|
|
2897
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2898
|
+
const data = await response.json()
|
|
2899
|
+
|
|
2900
|
+
expect(data).toEqual([
|
|
2901
|
+
{
|
|
2902
|
+
user: {
|
|
2903
|
+
id: 123,
|
|
2904
|
+
name: `Alice`,
|
|
2905
|
+
tags: [`admin`, `verified`],
|
|
2906
|
+
},
|
|
2907
|
+
timestamp: `2024-01-01T00:00:00Z`,
|
|
2908
|
+
},
|
|
2909
|
+
])
|
|
2910
|
+
})
|
|
2911
|
+
|
|
2912
|
+
test(`should work with client json() iterator`, async () => {
|
|
2913
|
+
const streamPath = `/v1/stream/json-iterator-test-${Date.now()}`
|
|
2914
|
+
|
|
2915
|
+
const stream = await DurableStream.create({
|
|
2916
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2917
|
+
contentType: `application/json`,
|
|
2918
|
+
})
|
|
2919
|
+
|
|
2920
|
+
await stream.append({ id: 1 })
|
|
2921
|
+
await stream.append({ id: 2 })
|
|
2922
|
+
await stream.append({ id: 3 })
|
|
2923
|
+
|
|
2924
|
+
const res = await stream.stream<{ id: number }>({ live: false })
|
|
2925
|
+
const items = await res.json()
|
|
2926
|
+
|
|
2927
|
+
// All three objects are batched together by the writer
|
|
2928
|
+
expect(items).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }])
|
|
2929
|
+
})
|
|
2930
|
+
|
|
2931
|
+
test(`should reject empty JSON arrays with 400`, async () => {
|
|
2932
|
+
const streamPath = `/v1/stream/json-empty-array-test-${Date.now()}`
|
|
2933
|
+
|
|
2934
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2935
|
+
method: `PUT`,
|
|
2936
|
+
headers: { "Content-Type": `application/json` },
|
|
2937
|
+
})
|
|
2938
|
+
|
|
2939
|
+
// Try to append empty array
|
|
2940
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
2941
|
+
method: `POST`,
|
|
2942
|
+
headers: { "Content-Type": `application/json` },
|
|
2943
|
+
body: `[]`,
|
|
2944
|
+
})
|
|
2945
|
+
|
|
2946
|
+
expect(response.status).toBe(400)
|
|
2947
|
+
expect(response.ok).toBe(false)
|
|
2948
|
+
})
|
|
2949
|
+
|
|
2950
|
+
test(`should store nested arrays as single messages`, async () => {
|
|
2951
|
+
const streamPath = `/v1/stream/json-nested-arrays-test-${Date.now()}`
|
|
2952
|
+
|
|
2953
|
+
const stream = await DurableStream.create({
|
|
2954
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2955
|
+
contentType: `application/json`,
|
|
2956
|
+
})
|
|
2957
|
+
|
|
2958
|
+
// Append nested array - stored as ONE message
|
|
2959
|
+
await stream.append([
|
|
2960
|
+
[1, 2],
|
|
2961
|
+
[3, 4],
|
|
2962
|
+
])
|
|
2963
|
+
|
|
2964
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2965
|
+
const data = await response.json()
|
|
2966
|
+
|
|
2967
|
+
// Should store 1 message containing the nested array
|
|
2968
|
+
expect(data).toEqual([
|
|
2969
|
+
[
|
|
2970
|
+
[1, 2],
|
|
2971
|
+
[3, 4],
|
|
2972
|
+
],
|
|
2973
|
+
])
|
|
2974
|
+
})
|
|
2975
|
+
|
|
2976
|
+
test(`should store arrays as values when double-wrapped`, async () => {
|
|
2977
|
+
const streamPath = `/v1/stream/json-wrapped-array-test-${Date.now()}`
|
|
2978
|
+
|
|
2979
|
+
const stream = await DurableStream.create({
|
|
2980
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
2981
|
+
contentType: `application/json`,
|
|
2982
|
+
})
|
|
2983
|
+
|
|
2984
|
+
// Append double-wrapped array - stored as ONE message containing the array
|
|
2985
|
+
await stream.append([[1, 2, 3]])
|
|
2986
|
+
|
|
2987
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
2988
|
+
const data = await response.json()
|
|
2989
|
+
|
|
2990
|
+
// Should store 1 message containing the single-wrapped array
|
|
2991
|
+
expect(data).toEqual([[[1, 2, 3]]])
|
|
2992
|
+
expect(data.length).toBe(1)
|
|
2993
|
+
})
|
|
2994
|
+
|
|
2995
|
+
test(`should store primitive arrays as single messages`, async () => {
|
|
2996
|
+
const streamPath = `/v1/stream/json-primitive-array-test-${Date.now()}`
|
|
2997
|
+
|
|
2998
|
+
const stream = await DurableStream.create({
|
|
2999
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
3000
|
+
contentType: `application/json`,
|
|
3001
|
+
})
|
|
3002
|
+
|
|
3003
|
+
// Each append stores ONE message
|
|
3004
|
+
await stream.append([1, 2, 3])
|
|
3005
|
+
await stream.append([`a`, `b`, `c`])
|
|
3006
|
+
|
|
3007
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
3008
|
+
const data = await response.json()
|
|
3009
|
+
|
|
3010
|
+
// Should store 2 messages (2 arrays)
|
|
3011
|
+
expect(data).toEqual([
|
|
3012
|
+
[1, 2, 3],
|
|
3013
|
+
[`a`, `b`, `c`],
|
|
3014
|
+
])
|
|
3015
|
+
})
|
|
3016
|
+
|
|
3017
|
+
test(`should handle mixed batching - single values, arrays, and nested arrays`, async () => {
|
|
3018
|
+
const streamPath = `/v1/stream/json-mixed-batching-test-${Date.now()}`
|
|
3019
|
+
|
|
3020
|
+
const stream = await DurableStream.create({
|
|
3021
|
+
url: `${getBaseUrl()}${streamPath}`,
|
|
3022
|
+
contentType: `application/json`,
|
|
3023
|
+
})
|
|
3024
|
+
|
|
3025
|
+
await stream.append({ single: 1 }) // 1 message
|
|
3026
|
+
await stream.append([{ batch: 2 }, { batch: 3 }]) // 1 message (array)
|
|
3027
|
+
await stream.append([[`nested`, `array`]]) // 1 message (nested array)
|
|
3028
|
+
await stream.append(42) // 1 message
|
|
3029
|
+
|
|
3030
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
3031
|
+
const data = await response.json()
|
|
3032
|
+
|
|
3033
|
+
expect(data).toEqual([
|
|
3034
|
+
{ single: 1 },
|
|
3035
|
+
[{ batch: 2 }, { batch: 3 }],
|
|
3036
|
+
[[`nested`, `array`]],
|
|
3037
|
+
42,
|
|
3038
|
+
])
|
|
3039
|
+
expect(data.length).toBe(4)
|
|
3040
|
+
})
|
|
3041
|
+
})
|
|
3042
|
+
|
|
3043
|
+
// ============================================================================
|
|
3044
|
+
// Property-Based Tests (fast-check)
|
|
3045
|
+
// ============================================================================
|
|
3046
|
+
|
|
3047
|
+
describe(`Property-Based Tests (fast-check)`, () => {
|
|
3048
|
+
describe(`Byte-Exactness Property`, () => {
|
|
3049
|
+
test(`arbitrary byte sequences are preserved exactly`, async () => {
|
|
3050
|
+
await fc.assert(
|
|
3051
|
+
fc.asyncProperty(
|
|
3052
|
+
// Generate 1-10 chunks of arbitrary bytes (1-500 bytes each)
|
|
3053
|
+
fc.array(fc.uint8Array({ minLength: 1, maxLength: 500 }), {
|
|
3054
|
+
minLength: 1,
|
|
3055
|
+
maxLength: 10,
|
|
3056
|
+
}),
|
|
3057
|
+
async (chunks) => {
|
|
3058
|
+
const streamPath = `/v1/stream/fc-byte-exactness-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3059
|
+
|
|
3060
|
+
// Create stream
|
|
3061
|
+
const createResponse = await fetch(
|
|
3062
|
+
`${getBaseUrl()}${streamPath}`,
|
|
3063
|
+
{
|
|
3064
|
+
method: `PUT`,
|
|
3065
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3066
|
+
}
|
|
3067
|
+
)
|
|
3068
|
+
expect([200, 201, 204]).toContain(createResponse.status)
|
|
3069
|
+
|
|
3070
|
+
// Append each chunk
|
|
3071
|
+
for (const chunk of chunks) {
|
|
3072
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3073
|
+
method: `POST`,
|
|
3074
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3075
|
+
body: chunk,
|
|
3076
|
+
})
|
|
3077
|
+
expect([200, 204]).toContain(response.status)
|
|
3078
|
+
}
|
|
3079
|
+
|
|
3080
|
+
// Calculate expected result
|
|
3081
|
+
const totalLength = chunks.reduce(
|
|
3082
|
+
(sum, chunk) => sum + chunk.length,
|
|
3083
|
+
0
|
|
3084
|
+
)
|
|
3085
|
+
const expected = new Uint8Array(totalLength)
|
|
3086
|
+
let offset = 0
|
|
3087
|
+
for (const chunk of chunks) {
|
|
3088
|
+
expected.set(chunk, offset)
|
|
3089
|
+
offset += chunk.length
|
|
3090
|
+
}
|
|
3091
|
+
|
|
3092
|
+
// Read back entire stream
|
|
3093
|
+
const accumulated: Array<number> = []
|
|
3094
|
+
let currentOffset: string | null = null
|
|
3095
|
+
let iterations = 0
|
|
3096
|
+
|
|
3097
|
+
while (iterations < 100) {
|
|
3098
|
+
iterations++
|
|
3099
|
+
|
|
3100
|
+
const url: string = currentOffset
|
|
3101
|
+
? `${getBaseUrl()}${streamPath}?offset=${encodeURIComponent(currentOffset)}`
|
|
3102
|
+
: `${getBaseUrl()}${streamPath}`
|
|
3103
|
+
|
|
3104
|
+
const response: Response = await fetch(url, { method: `GET` })
|
|
3105
|
+
expect(response.status).toBe(200)
|
|
3106
|
+
|
|
3107
|
+
const buffer = await response.arrayBuffer()
|
|
3108
|
+
const data = new Uint8Array(buffer)
|
|
3109
|
+
|
|
3110
|
+
if (data.length > 0) {
|
|
3111
|
+
accumulated.push(...Array.from(data))
|
|
3112
|
+
}
|
|
3113
|
+
|
|
3114
|
+
const nextOffset: string | null =
|
|
3115
|
+
response.headers.get(STREAM_OFFSET_HEADER)
|
|
3116
|
+
const upToDate = response.headers.get(STREAM_UP_TO_DATE_HEADER)
|
|
3117
|
+
|
|
3118
|
+
if (upToDate === `true` && data.length === 0) {
|
|
3119
|
+
break
|
|
3120
|
+
}
|
|
3121
|
+
|
|
3122
|
+
if (nextOffset === currentOffset) {
|
|
3123
|
+
break
|
|
3124
|
+
}
|
|
3125
|
+
|
|
3126
|
+
currentOffset = nextOffset
|
|
3127
|
+
}
|
|
3128
|
+
|
|
3129
|
+
// Verify byte-for-byte exactness
|
|
3130
|
+
const result = new Uint8Array(accumulated)
|
|
3131
|
+
expect(result.length).toBe(expected.length)
|
|
3132
|
+
for (let i = 0; i < expected.length; i++) {
|
|
3133
|
+
expect(result[i]).toBe(expected[i])
|
|
3134
|
+
}
|
|
3135
|
+
|
|
3136
|
+
return true
|
|
3137
|
+
}
|
|
3138
|
+
),
|
|
3139
|
+
{ numRuns: 20 } // Limit runs since each creates a stream
|
|
3140
|
+
)
|
|
3141
|
+
})
|
|
3142
|
+
|
|
3143
|
+
test(`single byte values cover full range (0-255)`, async () => {
|
|
3144
|
+
await fc.assert(
|
|
3145
|
+
fc.asyncProperty(
|
|
3146
|
+
// Generate a byte value from 0-255
|
|
3147
|
+
fc.integer({ min: 0, max: 255 }),
|
|
3148
|
+
async (byteValue) => {
|
|
3149
|
+
const streamPath = `/v1/stream/fc-single-byte-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3150
|
+
|
|
3151
|
+
// Create and append single byte
|
|
3152
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3153
|
+
method: `PUT`,
|
|
3154
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3155
|
+
})
|
|
3156
|
+
|
|
3157
|
+
const chunk = new Uint8Array([byteValue])
|
|
3158
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3159
|
+
method: `POST`,
|
|
3160
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3161
|
+
body: chunk,
|
|
3162
|
+
})
|
|
3163
|
+
|
|
3164
|
+
// Read back
|
|
3165
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
3166
|
+
const buffer = await response.arrayBuffer()
|
|
3167
|
+
const result = new Uint8Array(buffer)
|
|
3168
|
+
|
|
3169
|
+
expect(result.length).toBe(1)
|
|
3170
|
+
expect(result[0]).toBe(byteValue)
|
|
3171
|
+
|
|
3172
|
+
return true
|
|
3173
|
+
}
|
|
3174
|
+
),
|
|
3175
|
+
{ numRuns: 50 } // Test a good sample of byte values
|
|
3176
|
+
)
|
|
3177
|
+
})
|
|
3178
|
+
})
|
|
3179
|
+
|
|
3180
|
+
describe(`Operation Sequence Properties`, () => {
|
|
3181
|
+
// Define operation types for the state machine
|
|
3182
|
+
type AppendOp = { type: `append`; data: Uint8Array }
|
|
3183
|
+
type ReadOp = { type: `read` }
|
|
3184
|
+
type ReadFromOffsetOp = { type: `readFromOffset`; offsetIndex: number }
|
|
3185
|
+
|
|
3186
|
+
test(`random operation sequences maintain stream invariants`, async () => {
|
|
3187
|
+
await fc.assert(
|
|
3188
|
+
fc.asyncProperty(
|
|
3189
|
+
// Generate a sequence of operations
|
|
3190
|
+
fc.array(
|
|
3191
|
+
fc.oneof(
|
|
3192
|
+
// Append operation with random data
|
|
3193
|
+
fc
|
|
3194
|
+
.uint8Array({ minLength: 1, maxLength: 200 })
|
|
3195
|
+
.map((data): AppendOp => ({ type: `append`, data })),
|
|
3196
|
+
// Full read operation
|
|
3197
|
+
fc.constant<ReadOp>({ type: `read` }),
|
|
3198
|
+
// Read from a saved offset (index into saved offsets array)
|
|
3199
|
+
fc.integer({ min: 0, max: 20 }).map(
|
|
3200
|
+
(idx): ReadFromOffsetOp => ({
|
|
3201
|
+
type: `readFromOffset`,
|
|
3202
|
+
offsetIndex: idx,
|
|
3203
|
+
})
|
|
3204
|
+
)
|
|
3205
|
+
),
|
|
3206
|
+
{ minLength: 5, maxLength: 30 }
|
|
3207
|
+
),
|
|
3208
|
+
async (operations) => {
|
|
3209
|
+
const streamPath = `/v1/stream/fc-ops-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3210
|
+
|
|
3211
|
+
// Create stream
|
|
3212
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3213
|
+
method: `PUT`,
|
|
3214
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3215
|
+
})
|
|
3216
|
+
|
|
3217
|
+
// Track state
|
|
3218
|
+
const appendedData: Array<number> = []
|
|
3219
|
+
const savedOffsets: Array<string> = []
|
|
3220
|
+
|
|
3221
|
+
for (const op of operations) {
|
|
3222
|
+
if (op.type === `append`) {
|
|
3223
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3224
|
+
method: `POST`,
|
|
3225
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3226
|
+
body: op.data as BodyInit,
|
|
3227
|
+
})
|
|
3228
|
+
expect([200, 204]).toContain(response.status)
|
|
3229
|
+
|
|
3230
|
+
// Track what we appended
|
|
3231
|
+
appendedData.push(...Array.from(op.data))
|
|
3232
|
+
|
|
3233
|
+
// Save the offset for potential later reads
|
|
3234
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
3235
|
+
if (offset) {
|
|
3236
|
+
savedOffsets.push(offset)
|
|
3237
|
+
}
|
|
3238
|
+
} else if (op.type === `read`) {
|
|
3239
|
+
// Full read from beginning - verify all data
|
|
3240
|
+
const accumulated: Array<number> = []
|
|
3241
|
+
let currentOffset: string | null = null
|
|
3242
|
+
let iterations = 0
|
|
3243
|
+
|
|
3244
|
+
while (iterations < 100) {
|
|
3245
|
+
iterations++
|
|
3246
|
+
|
|
3247
|
+
const url: string = currentOffset
|
|
3248
|
+
? `${getBaseUrl()}${streamPath}?offset=${encodeURIComponent(currentOffset)}`
|
|
3249
|
+
: `${getBaseUrl()}${streamPath}`
|
|
3250
|
+
|
|
3251
|
+
const response: Response = await fetch(url, {
|
|
3252
|
+
method: `GET`,
|
|
3253
|
+
})
|
|
3254
|
+
const buffer = await response.arrayBuffer()
|
|
3255
|
+
const data = new Uint8Array(buffer)
|
|
3256
|
+
|
|
3257
|
+
if (data.length > 0) {
|
|
3258
|
+
accumulated.push(...Array.from(data))
|
|
3259
|
+
}
|
|
3260
|
+
|
|
3261
|
+
const nextOffset: string | null =
|
|
3262
|
+
response.headers.get(STREAM_OFFSET_HEADER)
|
|
3263
|
+
const upToDate = response.headers.get(
|
|
3264
|
+
STREAM_UP_TO_DATE_HEADER
|
|
3265
|
+
)
|
|
3266
|
+
|
|
3267
|
+
if (upToDate === `true` && data.length === 0) {
|
|
3268
|
+
break
|
|
3269
|
+
}
|
|
3270
|
+
|
|
3271
|
+
if (nextOffset === currentOffset) {
|
|
3272
|
+
break
|
|
3273
|
+
}
|
|
3274
|
+
|
|
3275
|
+
currentOffset = nextOffset
|
|
3276
|
+
}
|
|
3277
|
+
|
|
3278
|
+
// Verify we read exactly what was appended
|
|
3279
|
+
expect(accumulated.length).toBe(appendedData.length)
|
|
3280
|
+
for (let i = 0; i < appendedData.length; i++) {
|
|
3281
|
+
expect(accumulated[i]).toBe(appendedData[i])
|
|
3282
|
+
}
|
|
3283
|
+
} else {
|
|
3284
|
+
// Read from a previously saved offset (op.type === `readFromOffset`)
|
|
3285
|
+
if (savedOffsets.length === 0) {
|
|
3286
|
+
continue // No offsets saved yet
|
|
3287
|
+
}
|
|
3288
|
+
|
|
3289
|
+
const offsetIdx = op.offsetIndex % savedOffsets.length
|
|
3290
|
+
const offset = savedOffsets[offsetIdx]!
|
|
3291
|
+
|
|
3292
|
+
const response = await fetch(
|
|
3293
|
+
`${getBaseUrl()}${streamPath}?offset=${encodeURIComponent(offset)}`,
|
|
3294
|
+
{ method: `GET` }
|
|
3295
|
+
)
|
|
3296
|
+
expect(response.status).toBe(200)
|
|
3297
|
+
|
|
3298
|
+
// Verify offset is monotonically increasing
|
|
3299
|
+
const nextOffset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
3300
|
+
if (nextOffset) {
|
|
3301
|
+
// Offsets should be lexicographically greater or equal
|
|
3302
|
+
expect(nextOffset >= offset).toBe(true)
|
|
3303
|
+
}
|
|
3304
|
+
}
|
|
3305
|
+
}
|
|
3306
|
+
|
|
3307
|
+
return true
|
|
3308
|
+
}
|
|
3309
|
+
),
|
|
3310
|
+
{ numRuns: 15 }
|
|
3311
|
+
)
|
|
3312
|
+
})
|
|
3313
|
+
|
|
3314
|
+
test(`offsets are always monotonically increasing`, async () => {
|
|
3315
|
+
await fc.assert(
|
|
3316
|
+
fc.asyncProperty(
|
|
3317
|
+
// Generate multiple chunks to append
|
|
3318
|
+
fc.array(fc.uint8Array({ minLength: 1, maxLength: 100 }), {
|
|
3319
|
+
minLength: 2,
|
|
3320
|
+
maxLength: 15,
|
|
3321
|
+
}),
|
|
3322
|
+
async (chunks) => {
|
|
3323
|
+
const streamPath = `/v1/stream/fc-monotonic-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3324
|
+
|
|
3325
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3326
|
+
method: `PUT`,
|
|
3327
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3328
|
+
})
|
|
3329
|
+
|
|
3330
|
+
const offsets: Array<string> = []
|
|
3331
|
+
|
|
3332
|
+
// Append all chunks and collect offsets
|
|
3333
|
+
for (const chunk of chunks) {
|
|
3334
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3335
|
+
method: `POST`,
|
|
3336
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3337
|
+
body: chunk,
|
|
3338
|
+
})
|
|
3339
|
+
|
|
3340
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER)
|
|
3341
|
+
expect(offset).toBeDefined()
|
|
3342
|
+
offsets.push(offset!)
|
|
3343
|
+
}
|
|
3344
|
+
|
|
3345
|
+
// Verify offsets are strictly increasing (lexicographically)
|
|
3346
|
+
for (let i = 1; i < offsets.length; i++) {
|
|
3347
|
+
expect(offsets[i]! > offsets[i - 1]!).toBe(true)
|
|
3348
|
+
}
|
|
3349
|
+
|
|
3350
|
+
return true
|
|
3351
|
+
}
|
|
3352
|
+
),
|
|
3353
|
+
{ numRuns: 25 }
|
|
3354
|
+
)
|
|
3355
|
+
})
|
|
3356
|
+
|
|
3357
|
+
test(`read-your-writes: data is immediately visible after append`, async () => {
|
|
3358
|
+
await fc.assert(
|
|
3359
|
+
fc.asyncProperty(
|
|
3360
|
+
fc.uint8Array({ minLength: 1, maxLength: 500 }),
|
|
3361
|
+
async (data) => {
|
|
3362
|
+
const streamPath = `/v1/stream/fc-ryw-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3363
|
+
|
|
3364
|
+
// Create stream
|
|
3365
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3366
|
+
method: `PUT`,
|
|
3367
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3368
|
+
})
|
|
3369
|
+
|
|
3370
|
+
// Append data
|
|
3371
|
+
const appendResponse = await fetch(
|
|
3372
|
+
`${getBaseUrl()}${streamPath}`,
|
|
3373
|
+
{
|
|
3374
|
+
method: `POST`,
|
|
3375
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3376
|
+
body: data,
|
|
3377
|
+
}
|
|
3378
|
+
)
|
|
3379
|
+
expect([200, 204]).toContain(appendResponse.status)
|
|
3380
|
+
|
|
3381
|
+
// Immediately read back
|
|
3382
|
+
const readResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
3383
|
+
expect(readResponse.status).toBe(200)
|
|
3384
|
+
|
|
3385
|
+
const buffer = await readResponse.arrayBuffer()
|
|
3386
|
+
const result = new Uint8Array(buffer)
|
|
3387
|
+
|
|
3388
|
+
// Must see the data we just wrote
|
|
3389
|
+
expect(result.length).toBe(data.length)
|
|
3390
|
+
for (let i = 0; i < data.length; i++) {
|
|
3391
|
+
expect(result[i]).toBe(data[i])
|
|
3392
|
+
}
|
|
3393
|
+
|
|
3394
|
+
return true
|
|
3395
|
+
}
|
|
3396
|
+
),
|
|
3397
|
+
{ numRuns: 30 }
|
|
3398
|
+
)
|
|
3399
|
+
})
|
|
3400
|
+
})
|
|
3401
|
+
|
|
3402
|
+
describe(`Immutability Properties`, () => {
|
|
3403
|
+
test(`data at offset never changes after additional appends`, async () => {
|
|
3404
|
+
await fc.assert(
|
|
3405
|
+
fc.asyncProperty(
|
|
3406
|
+
// Initial data and additional data to append
|
|
3407
|
+
fc.uint8Array({ minLength: 1, maxLength: 200 }),
|
|
3408
|
+
fc.array(fc.uint8Array({ minLength: 1, maxLength: 100 }), {
|
|
3409
|
+
minLength: 1,
|
|
3410
|
+
maxLength: 5,
|
|
3411
|
+
}),
|
|
3412
|
+
async (initialData, additionalChunks) => {
|
|
3413
|
+
const streamPath = `/v1/stream/fc-immutable-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3414
|
+
|
|
3415
|
+
// Create and append initial data
|
|
3416
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3417
|
+
method: `PUT`,
|
|
3418
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3419
|
+
})
|
|
3420
|
+
|
|
3421
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3422
|
+
method: `POST`,
|
|
3423
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3424
|
+
body: initialData,
|
|
3425
|
+
})
|
|
3426
|
+
|
|
3427
|
+
// Read and save the offset after initial data
|
|
3428
|
+
const initialRead = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
3429
|
+
const initialBuffer = await initialRead.arrayBuffer()
|
|
3430
|
+
const initialResult = new Uint8Array(initialBuffer)
|
|
3431
|
+
|
|
3432
|
+
// Append more data
|
|
3433
|
+
for (const chunk of additionalChunks) {
|
|
3434
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3435
|
+
method: `POST`,
|
|
3436
|
+
headers: { "Content-Type": `application/octet-stream` },
|
|
3437
|
+
body: chunk,
|
|
3438
|
+
})
|
|
3439
|
+
}
|
|
3440
|
+
|
|
3441
|
+
// Read from beginning again - initial data should be unchanged
|
|
3442
|
+
const rereadResponse = await fetch(`${getBaseUrl()}${streamPath}`)
|
|
3443
|
+
const rereadBuffer = await rereadResponse.arrayBuffer()
|
|
3444
|
+
const rereadResult = new Uint8Array(rereadBuffer)
|
|
3445
|
+
|
|
3446
|
+
// The initial data portion should be identical
|
|
3447
|
+
expect(rereadResult.length).toBeGreaterThanOrEqual(
|
|
3448
|
+
initialResult.length
|
|
3449
|
+
)
|
|
3450
|
+
for (let i = 0; i < initialResult.length; i++) {
|
|
3451
|
+
expect(rereadResult[i]).toBe(initialResult[i])
|
|
3452
|
+
}
|
|
3453
|
+
|
|
3454
|
+
return true
|
|
3455
|
+
}
|
|
3456
|
+
),
|
|
3457
|
+
{ numRuns: 20 }
|
|
3458
|
+
)
|
|
3459
|
+
})
|
|
3460
|
+
})
|
|
3461
|
+
|
|
3462
|
+
describe(`Offset Validation Properties`, () => {
|
|
3463
|
+
test(`should reject offsets with invalid characters`, async () => {
|
|
3464
|
+
await fc.assert(
|
|
3465
|
+
fc.asyncProperty(
|
|
3466
|
+
// Generate strings with at least one invalid character
|
|
3467
|
+
fc.oneof(
|
|
3468
|
+
// Strings with spaces
|
|
3469
|
+
fc.tuple(fc.string(), fc.string()).map(([a, b]) => `${a} ${b}`),
|
|
3470
|
+
// Strings with path traversal
|
|
3471
|
+
fc.string().map((s) => `../${s}`),
|
|
3472
|
+
fc.string().map((s) => `${s}/..`),
|
|
3473
|
+
// Strings with null bytes
|
|
3474
|
+
fc.string().map((s) => `${s}\u0000`),
|
|
3475
|
+
// Strings with newlines
|
|
3476
|
+
fc.string().map((s) => `${s}\n`),
|
|
3477
|
+
fc.string().map((s) => `${s}\r\n`),
|
|
3478
|
+
// Strings with commas
|
|
3479
|
+
fc.tuple(fc.string(), fc.string()).map(([a, b]) => `${a},${b}`),
|
|
3480
|
+
// Strings with slashes
|
|
3481
|
+
fc.tuple(fc.string(), fc.string()).map(([a, b]) => `${a}/${b}`)
|
|
3482
|
+
),
|
|
3483
|
+
async (badOffset) => {
|
|
3484
|
+
const streamPath = `/v1/stream/fc-bad-offset-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3485
|
+
|
|
3486
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3487
|
+
method: `PUT`,
|
|
3488
|
+
headers: { "Content-Type": `text/plain` },
|
|
3489
|
+
body: `test`,
|
|
3490
|
+
})
|
|
3491
|
+
|
|
3492
|
+
const response = await fetch(
|
|
3493
|
+
`${getBaseUrl()}${streamPath}?offset=${encodeURIComponent(badOffset)}`,
|
|
3494
|
+
{ method: `GET` }
|
|
3495
|
+
)
|
|
3496
|
+
|
|
3497
|
+
// Should reject with 400
|
|
3498
|
+
expect(response.status).toBe(400)
|
|
3499
|
+
|
|
3500
|
+
return true
|
|
3501
|
+
}
|
|
3502
|
+
),
|
|
3503
|
+
{ numRuns: 30 }
|
|
3504
|
+
)
|
|
3505
|
+
})
|
|
3506
|
+
})
|
|
3507
|
+
|
|
3508
|
+
describe(`Sequence Ordering Properties`, () => {
|
|
3509
|
+
test(`lexicographically ordered seq values are accepted`, async () => {
|
|
3510
|
+
await fc.assert(
|
|
3511
|
+
fc.asyncProperty(
|
|
3512
|
+
// Generate a sorted array of unique lexicographic strings
|
|
3513
|
+
fc
|
|
3514
|
+
.array(fc.stringMatching(/^[0-9a-zA-Z]+$/), {
|
|
3515
|
+
minLength: 2,
|
|
3516
|
+
maxLength: 10,
|
|
3517
|
+
})
|
|
3518
|
+
.map((arr) => [...new Set(arr)].sort())
|
|
3519
|
+
.filter((arr) => arr.length >= 2),
|
|
3520
|
+
async (seqValues) => {
|
|
3521
|
+
const streamPath = `/v1/stream/fc-seq-order-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3522
|
+
|
|
3523
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3524
|
+
method: `PUT`,
|
|
3525
|
+
headers: { "Content-Type": `text/plain` },
|
|
3526
|
+
})
|
|
3527
|
+
|
|
3528
|
+
// Append with each seq value in order
|
|
3529
|
+
for (const seq of seqValues) {
|
|
3530
|
+
const response = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3531
|
+
method: `POST`,
|
|
3532
|
+
headers: {
|
|
3533
|
+
"Content-Type": `text/plain`,
|
|
3534
|
+
[STREAM_SEQ_HEADER]: seq,
|
|
3535
|
+
},
|
|
3536
|
+
body: `data-${seq}`,
|
|
3537
|
+
})
|
|
3538
|
+
expect([200, 204]).toContain(response.status)
|
|
3539
|
+
}
|
|
3540
|
+
|
|
3541
|
+
return true
|
|
3542
|
+
}
|
|
3543
|
+
),
|
|
3544
|
+
{ numRuns: 20 }
|
|
3545
|
+
)
|
|
3546
|
+
})
|
|
3547
|
+
|
|
3548
|
+
test(`out-of-order seq values are rejected`, async () => {
|
|
3549
|
+
await fc.assert(
|
|
3550
|
+
fc.asyncProperty(
|
|
3551
|
+
// Generate two strings where the first is lexicographically greater
|
|
3552
|
+
fc
|
|
3553
|
+
.tuple(
|
|
3554
|
+
fc.stringMatching(/^[0-9a-zA-Z]+$/),
|
|
3555
|
+
fc.stringMatching(/^[0-9a-zA-Z]+$/)
|
|
3556
|
+
)
|
|
3557
|
+
.filter(([a, b]) => a > b && a.length > 0 && b.length > 0),
|
|
3558
|
+
async ([firstSeq, secondSeq]) => {
|
|
3559
|
+
const streamPath = `/v1/stream/fc-seq-reject-${Date.now()}-${Math.random().toString(36).slice(2)}`
|
|
3560
|
+
|
|
3561
|
+
await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3562
|
+
method: `PUT`,
|
|
3563
|
+
headers: { "Content-Type": `text/plain` },
|
|
3564
|
+
})
|
|
3565
|
+
|
|
3566
|
+
// First append with the larger seq value
|
|
3567
|
+
const response1 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3568
|
+
method: `POST`,
|
|
3569
|
+
headers: {
|
|
3570
|
+
"Content-Type": `text/plain`,
|
|
3571
|
+
[STREAM_SEQ_HEADER]: firstSeq,
|
|
3572
|
+
},
|
|
3573
|
+
body: `first`,
|
|
3574
|
+
})
|
|
3575
|
+
expect([200, 204]).toContain(response1.status)
|
|
3576
|
+
|
|
3577
|
+
// Second append with smaller seq should be rejected
|
|
3578
|
+
const response2 = await fetch(`${getBaseUrl()}${streamPath}`, {
|
|
3579
|
+
method: `POST`,
|
|
3580
|
+
headers: {
|
|
3581
|
+
"Content-Type": `text/plain`,
|
|
3582
|
+
[STREAM_SEQ_HEADER]: secondSeq,
|
|
3583
|
+
},
|
|
3584
|
+
body: `second`,
|
|
3585
|
+
})
|
|
3586
|
+
expect(response2.status).toBe(409)
|
|
3587
|
+
|
|
3588
|
+
return true
|
|
3589
|
+
}
|
|
3590
|
+
),
|
|
3591
|
+
{ numRuns: 25 }
|
|
3592
|
+
)
|
|
3593
|
+
})
|
|
3594
|
+
})
|
|
3595
|
+
})
|
|
3596
|
+
}
|