@prisma/streams-server 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/CODE_OF_CONDUCT.md +45 -0
  2. package/CONTRIBUTING.md +68 -0
  3. package/LICENSE +201 -0
  4. package/README.md +39 -2
  5. package/SECURITY.md +33 -0
  6. package/bin/prisma-streams-server +2 -0
  7. package/package.json +29 -34
  8. package/src/app.ts +74 -0
  9. package/src/app_core.ts +1983 -0
  10. package/src/app_local.ts +46 -0
  11. package/src/backpressure.ts +66 -0
  12. package/src/bootstrap.ts +239 -0
  13. package/src/config.ts +251 -0
  14. package/src/db/db.ts +1440 -0
  15. package/src/db/schema.ts +619 -0
  16. package/src/expiry_sweeper.ts +44 -0
  17. package/src/hist.ts +169 -0
  18. package/src/index/binary_fuse.ts +379 -0
  19. package/src/index/indexer.ts +745 -0
  20. package/src/index/run_cache.ts +84 -0
  21. package/src/index/run_format.ts +213 -0
  22. package/src/ingest.ts +655 -0
  23. package/src/lens/lens.ts +501 -0
  24. package/src/manifest.ts +114 -0
  25. package/src/memory.ts +155 -0
  26. package/src/metrics.ts +161 -0
  27. package/src/metrics_emitter.ts +50 -0
  28. package/src/notifier.ts +64 -0
  29. package/src/objectstore/interface.ts +13 -0
  30. package/src/objectstore/mock_r2.ts +269 -0
  31. package/src/objectstore/null.ts +32 -0
  32. package/src/objectstore/r2.ts +128 -0
  33. package/src/offset.ts +70 -0
  34. package/src/reader.ts +454 -0
  35. package/src/runtime/hash.ts +156 -0
  36. package/src/runtime/hash_vendor/LICENSE.hash-wasm +38 -0
  37. package/src/runtime/hash_vendor/NOTICE.md +8 -0
  38. package/src/runtime/hash_vendor/xxhash3.umd.min.cjs +7 -0
  39. package/src/runtime/hash_vendor/xxhash32.umd.min.cjs +7 -0
  40. package/src/runtime/hash_vendor/xxhash64.umd.min.cjs +7 -0
  41. package/src/schema/lens_schema.ts +290 -0
  42. package/src/schema/proof.ts +547 -0
  43. package/src/schema/registry.ts +405 -0
  44. package/src/segment/cache.ts +179 -0
  45. package/src/segment/format.ts +331 -0
  46. package/src/segment/segmenter.ts +326 -0
  47. package/src/segment/segmenter_worker.ts +43 -0
  48. package/src/segment/segmenter_workers.ts +94 -0
  49. package/src/server.ts +326 -0
  50. package/src/sqlite/adapter.ts +164 -0
  51. package/src/stats.ts +205 -0
  52. package/src/touch/engine.ts +41 -0
  53. package/src/touch/interpreter_worker.ts +459 -0
  54. package/src/touch/live_keys.ts +118 -0
  55. package/src/touch/live_metrics.ts +858 -0
  56. package/src/touch/live_templates.ts +619 -0
  57. package/src/touch/manager.ts +1341 -0
  58. package/src/touch/naming.ts +13 -0
  59. package/src/touch/routing_key_notifier.ts +275 -0
  60. package/src/touch/spec.ts +526 -0
  61. package/src/touch/touch_journal.ts +671 -0
  62. package/src/touch/touch_key_id.ts +20 -0
  63. package/src/touch/worker_pool.ts +189 -0
  64. package/src/touch/worker_protocol.ts +58 -0
  65. package/src/types/proper-lockfile.d.ts +1 -0
  66. package/src/uploader.ts +317 -0
  67. package/src/util/base32_crockford.ts +81 -0
  68. package/src/util/bloom256.ts +67 -0
  69. package/src/util/cleanup.ts +22 -0
  70. package/src/util/crc32c.ts +29 -0
  71. package/src/util/ds_error.ts +15 -0
  72. package/src/util/duration.ts +17 -0
  73. package/src/util/endian.ts +53 -0
  74. package/src/util/json_pointer.ts +148 -0
  75. package/src/util/log.ts +25 -0
  76. package/src/util/lru.ts +45 -0
  77. package/src/util/retry.ts +35 -0
  78. package/src/util/siphash.ts +71 -0
  79. package/src/util/stream_paths.ts +31 -0
  80. package/src/util/time.ts +14 -0
  81. package/src/util/yield.ts +3 -0
  82. package/build/index.d.mts +0 -1
  83. package/build/index.d.ts +0 -1
  84. package/build/index.js +0 -0
  85. package/build/index.mjs +0 -1
@@ -0,0 +1,1983 @@
1
+ import { mkdirSync } from "node:fs";
2
+ import type { Config } from "./config";
3
+ import { SqliteDurableStore, STREAM_FLAG_TOUCH } from "./db/db";
4
+ import { IngestQueue, type ProducerInfo, type AppendRow } from "./ingest";
5
+ import type { ObjectStore } from "./objectstore/interface";
6
+ import type { StreamReader, ReadBatch, ReaderError } from "./reader";
7
+ import { StreamNotifier } from "./notifier";
8
+ import { encodeOffset, parseOffsetResult, offsetToSeqOrNeg1, canonicalizeOffset, type ParsedOffset } from "./offset";
9
+ import { parseDurationMsResult } from "./util/duration";
10
+ import { Metrics } from "./metrics";
11
+ import { parseTimestampMsResult } from "./util/time";
12
+ import { cleanupTempSegments } from "./util/cleanup";
13
+ import { MetricsEmitter } from "./metrics_emitter";
14
+ import { SchemaRegistryStore, type SchemaRegistry, type SchemaRegistryMutationError, type SchemaRegistryReadError } from "./schema/registry";
15
+ import { resolvePointerResult } from "./util/json_pointer";
16
+ import { applyLensChainResult } from "./lens/lens";
17
+ import { ExpirySweeper } from "./expiry_sweeper";
18
+ import type { StatsCollector } from "./stats";
19
+ import { BackpressureGate } from "./backpressure";
20
+ import { MemoryGuard } from "./memory";
21
+ import { TouchInterpreterManager } from "./touch/manager";
22
+ import { isTouchEnabled } from "./touch/spec";
23
+ import { resolveTouchStreamName } from "./touch/naming";
24
+ import { RoutingKeyNotifier } from "./touch/routing_key_notifier";
25
+ import { parseTouchCursor } from "./touch/touch_journal";
26
+ import { touchKeyIdFromRoutingKeyResult } from "./touch/touch_key_id";
27
+ import { tableKeyIdFor, templateKeyIdFor } from "./touch/live_keys";
28
+ import type { SegmenterController } from "./segment/segmenter_workers";
29
+ import type { UploaderController } from "./uploader";
30
+ import type { IndexManager } from "./index/indexer";
31
+ import { Result } from "better-result";
32
+
33
+ function withNosniff(headers: HeadersInit = {}): HeadersInit {
34
+ return {
35
+ "x-content-type-options": "nosniff",
36
+ ...headers,
37
+ };
38
+ }
39
+
40
+ function json(status: number, body: any, headers: HeadersInit = {}): Response {
41
+ return new Response(JSON.stringify(body), {
42
+ status,
43
+ headers: {
44
+ "content-type": "application/json; charset=utf-8",
45
+ "cache-control": "no-store",
46
+ ...withNosniff(headers),
47
+ },
48
+ });
49
+ }
50
+
51
+ function internalError(message = "internal server error"): Response {
52
+ return json(500, { error: { code: "internal", message } });
53
+ }
54
+
55
+ function badRequest(msg: string): Response {
56
+ return json(400, { error: { code: "bad_request", message: msg } });
57
+ }
58
+
59
+ function notFound(msg = "not_found"): Response {
60
+ return json(404, { error: { code: "not_found", message: msg } });
61
+ }
62
+
63
+ function readerErrorResponse(err: ReaderError): Response {
64
+ if (err.kind === "not_found") return notFound();
65
+ if (err.kind === "gone") return notFound("stream expired");
66
+ if (err.kind === "internal") return internalError();
67
+ return badRequest(err.message);
68
+ }
69
+
70
+ function schemaMutationErrorResponse(err: SchemaRegistryMutationError): Response {
71
+ if (err.kind === "version_mismatch") return conflict(err.message);
72
+ return badRequest(err.message);
73
+ }
74
+
75
+ function schemaReadErrorResponse(_err: SchemaRegistryReadError): Response {
76
+ return internalError();
77
+ }
78
+
79
+ function conflict(msg: string, headers: HeadersInit = {}): Response {
80
+ return json(409, { error: { code: "conflict", message: msg } }, headers);
81
+ }
82
+
83
+ function tooLarge(msg: string): Response {
84
+ return json(413, { error: { code: "payload_too_large", message: msg } });
85
+ }
86
+
87
+ function normalizeContentType(value: string | null): string | null {
88
+ if (!value) return null;
89
+ const base = value.split(";")[0]?.trim().toLowerCase();
90
+ return base ? base : null;
91
+ }
92
+
93
+ function isJsonContentType(value: string | null): boolean {
94
+ return normalizeContentType(value) === "application/json";
95
+ }
96
+
97
+ function isTextContentType(value: string | null): boolean {
98
+ const norm = normalizeContentType(value);
99
+ return norm === "application/json" || (norm != null && norm.startsWith("text/"));
100
+ }
101
+
102
+ function parseStreamClosedHeader(value: string | null): boolean {
103
+ return value != null && value.trim().toLowerCase() === "true";
104
+ }
105
+
106
+ function parseStreamSeqHeader(value: string | null): Result<string | null, { message: string }> {
107
+ if (value == null) return Result.ok(null);
108
+ const v = value.trim();
109
+ if (v.length === 0) return Result.err({ message: "invalid Stream-Seq" });
110
+ return Result.ok(v);
111
+ }
112
+
113
+ function parseStreamTtlSeconds(value: string): Result<number, { message: string }> {
114
+ const s = value.trim();
115
+ if (/^(0|[1-9][0-9]*)$/.test(s)) return Result.ok(Number(s));
116
+ if (/^(0|[1-9][0-9]*)(ms|s|m|h|d)$/.test(s)) {
117
+ const msRes = parseDurationMsResult(s);
118
+ if (Result.isError(msRes)) return Result.err({ message: msRes.error.message });
119
+ const ms = msRes.value;
120
+ if (ms % 1000 !== 0) return Result.err({ message: "invalid Stream-TTL" });
121
+ return Result.ok(Math.floor(ms / 1000));
122
+ }
123
+ return Result.err({ message: "invalid Stream-TTL" });
124
+ }
125
+
126
+ function parseNonNegativeInt(value: string): number | null {
127
+ if (!/^[0-9]+$/.test(value)) return null;
128
+ const n = Number(value);
129
+ if (!Number.isFinite(n)) return null;
130
+ return n;
131
+ }
132
+
133
+ function splitSseLines(data: string): string[] {
134
+ if (data === "") return [""];
135
+ return data.split(/\r\n|\r|\n/);
136
+ }
137
+
138
+ function encodeSseEvent(eventType: string, data: string): string {
139
+ const lines = splitSseLines(data);
140
+ let out = `event: ${eventType}\n`;
141
+ for (const line of lines) {
142
+ out += `data:${line}\n`;
143
+ }
144
+ out += `\n`;
145
+ return out;
146
+ }
147
+
148
+ function computeCursor(nowMs: number, provided: string | null): string {
149
+ let cursor = Math.floor(nowMs / 1000);
150
+ if (provided && /^[0-9]+$/.test(provided)) {
151
+ const n = Number(provided);
152
+ if (Number.isFinite(n) && n >= cursor) cursor = n + 1;
153
+ }
154
+ return String(cursor);
155
+ }
156
+
157
+ function concatPayloads(parts: Uint8Array[]): Uint8Array {
158
+ let total = 0;
159
+ for (const p of parts) total += p.byteLength;
160
+ const out = new Uint8Array(total);
161
+ let off = 0;
162
+ for (const p of parts) {
163
+ out.set(p, off);
164
+ off += p.byteLength;
165
+ }
166
+ return out;
167
+ }
168
+
169
+ function keyBytesFromString(s: string | null): Uint8Array | null {
170
+ if (s == null) return null;
171
+ return new TextEncoder().encode(s);
172
+ }
173
+
174
+ function extractRoutingKey(reg: SchemaRegistry, value: any): Result<Uint8Array | null, { message: string }> {
175
+ if (!reg.routingKey) return Result.ok(null);
176
+ const { jsonPointer, required } = reg.routingKey;
177
+ const resolvedRes = resolvePointerResult(value, jsonPointer);
178
+ if (Result.isError(resolvedRes)) return Result.err({ message: resolvedRes.error.message });
179
+ const resolved = resolvedRes.value;
180
+ if (!resolved.exists) {
181
+ if (required) return Result.err({ message: "routing key missing" });
182
+ return Result.ok(null);
183
+ }
184
+ if (typeof resolved.value !== "string") return Result.err({ message: "routing key must be string" });
185
+ return Result.ok(keyBytesFromString(resolved.value));
186
+ }
187
+
188
+ function schemaVersionForOffset(reg: SchemaRegistry, offset: bigint): number {
189
+ if (!reg.boundaries || reg.boundaries.length === 0) return 0;
190
+ const off = Number(offset);
191
+ let version = 0;
192
+ for (const b of reg.boundaries) {
193
+ if (b.offset <= off) version = b.version;
194
+ else break;
195
+ }
196
+ return version;
197
+ }
198
+
199
+ export type App = {
200
+ fetch: (req: Request) => Promise<Response>;
201
+ close: () => void;
202
+ deps: {
203
+ config: Config;
204
+ db: SqliteDurableStore;
205
+ os: ObjectStore;
206
+ ingest: IngestQueue;
207
+ notifier: StreamNotifier;
208
+ touchRoutingKeyNotifier: RoutingKeyNotifier;
209
+ reader: StreamReader;
210
+ segmenter: SegmenterController;
211
+ uploader: UploaderController;
212
+ indexer?: IndexManager;
213
+ metrics: Metrics;
214
+ registry: SchemaRegistryStore;
215
+ touch: TouchInterpreterManager;
216
+ stats?: StatsCollector;
217
+ backpressure?: BackpressureGate;
218
+ memory?: MemoryGuard;
219
+ };
220
+ };
221
+
222
+ export type CreateAppRuntimeArgs = {
223
+ config: Config;
224
+ db: SqliteDurableStore;
225
+ ingest: IngestQueue;
226
+ notifier: StreamNotifier;
227
+ touchRoutingKeyNotifier: RoutingKeyNotifier;
228
+ registry: SchemaRegistryStore;
229
+ touch: TouchInterpreterManager;
230
+ stats?: StatsCollector;
231
+ backpressure?: BackpressureGate;
232
+ memory: MemoryGuard;
233
+ metrics: Metrics;
234
+ };
235
+
236
+ type AppRuntimeDeps = {
237
+ store: ObjectStore;
238
+ reader: StreamReader;
239
+ segmenter: SegmenterController;
240
+ uploader: UploaderController;
241
+ indexer?: IndexManager;
242
+ uploadSchemaRegistry: (stream: string, registry: SchemaRegistry) => Promise<void>;
243
+ start(): void;
244
+ };
245
+
246
+ export type CreateAppCoreOptions = {
247
+ stats?: StatsCollector;
248
+ createRuntime(args: CreateAppRuntimeArgs): AppRuntimeDeps;
249
+ };
250
+
251
+ export function createAppCore(cfg: Config, opts: CreateAppCoreOptions): App {
252
+ mkdirSync(cfg.rootDir, { recursive: true });
253
+ cleanupTempSegments(cfg.rootDir);
254
+
255
+ const db = new SqliteDurableStore(cfg.dbPath, { cacheBytes: cfg.sqliteCacheBytes });
256
+ db.resetSegmentInProgress();
257
+ const stats = opts.stats;
258
+ const backpressure =
259
+ cfg.localBacklogMaxBytes > 0
260
+ ? new BackpressureGate(cfg.localBacklogMaxBytes, db.sumPendingBytes() + db.sumPendingSegmentBytes())
261
+ : undefined;
262
+ const memory = new MemoryGuard(cfg.memoryLimitBytes, {
263
+ onSample: (rss, overLimit) => {
264
+ metrics.record("process.rss.bytes", rss, "bytes");
265
+ if (overLimit) metrics.record("process.rss.over_limit", 1, "count");
266
+ },
267
+ heapSnapshotPath: `${cfg.rootDir}/heap.heapsnapshot`,
268
+ });
269
+ memory.start();
270
+ const metrics = new Metrics();
271
+ const ingest = new IngestQueue(cfg, db, stats, backpressure, memory, metrics);
272
+ const notifier = new StreamNotifier();
273
+ const touchRoutingKeyNotifier = new RoutingKeyNotifier();
274
+ const registry = new SchemaRegistryStore(db);
275
+ const touch = new TouchInterpreterManager(cfg, db, ingest, notifier, registry, backpressure, touchRoutingKeyNotifier);
276
+ const runtime = opts.createRuntime({
277
+ config: cfg,
278
+ db,
279
+ ingest,
280
+ notifier,
281
+ touchRoutingKeyNotifier,
282
+ registry,
283
+ touch,
284
+ stats,
285
+ backpressure,
286
+ memory,
287
+ metrics,
288
+ });
289
+ const { store, reader, segmenter, uploader, indexer, uploadSchemaRegistry } = runtime;
290
+ const metricsEmitter = new MetricsEmitter(metrics, ingest, cfg.metricsFlushIntervalMs);
291
+ const expirySweeper = new ExpirySweeper(cfg, db);
292
+
293
+ db.ensureStream("__stream_metrics__", { contentType: "application/json" });
294
+ runtime.start();
295
+ metricsEmitter.start();
296
+ expirySweeper.start();
297
+ touch.start();
298
+
299
+ const buildJsonRows = (
300
+ stream: string,
301
+ bodyBytes: Uint8Array,
302
+ routingKeyHeader: string | null,
303
+ allowEmptyArray: boolean
304
+ ): Result<{ rows: AppendRow[] }, { status: 400 | 500; message: string }> => {
305
+ const regRes = registry.getRegistryResult(stream);
306
+ if (Result.isError(regRes)) {
307
+ return Result.err({ status: 500, message: regRes.error.message });
308
+ }
309
+ const reg = regRes.value;
310
+ const text = new TextDecoder().decode(bodyBytes);
311
+ let arr: any;
312
+ try {
313
+ arr = JSON.parse(text);
314
+ } catch {
315
+ return Result.err({ status: 400, message: "invalid JSON" });
316
+ }
317
+ if (!Array.isArray(arr)) arr = [arr];
318
+ if (arr.length === 0 && !allowEmptyArray) return Result.err({ status: 400, message: "empty JSON array" });
319
+ if (reg.routingKey && routingKeyHeader) {
320
+ return Result.err({ status: 400, message: "Stream-Key not allowed when routingKey is configured" });
321
+ }
322
+
323
+ const validator = reg.currentVersion > 0 ? registry.getValidatorForVersion(reg, reg.currentVersion) : null;
324
+ if (reg.currentVersion > 0 && !validator) {
325
+ return Result.err({ status: 500, message: "schema validator missing" });
326
+ }
327
+
328
+ const rows: AppendRow[] = [];
329
+ for (const v of arr) {
330
+ if (validator && !validator(v)) {
331
+ const msg = validator.errors ? validator.errors.map((e) => e.message).join("; ") : "schema validation failed";
332
+ return Result.err({ status: 400, message: msg });
333
+ }
334
+ const rkRes = reg.routingKey ? extractRoutingKey(reg, v) : Result.ok(keyBytesFromString(routingKeyHeader));
335
+ if (Result.isError(rkRes)) return Result.err({ status: 400, message: rkRes.error.message });
336
+ rows.push({
337
+ routingKey: rkRes.value,
338
+ contentType: "application/json",
339
+ payload: new TextEncoder().encode(JSON.stringify(v)),
340
+ });
341
+ }
342
+ return Result.ok({ rows });
343
+ };
344
+
345
+ const buildAppendRowsResult = (
346
+ stream: string,
347
+ bodyBytes: Uint8Array,
348
+ contentType: string,
349
+ routingKeyHeader: string | null,
350
+ allowEmptyJsonArray: boolean
351
+ ): Result<{ rows: AppendRow[] }, { status: 400 | 500; message: string }> => {
352
+ if (isJsonContentType(contentType)) {
353
+ return buildJsonRows(stream, bodyBytes, routingKeyHeader, allowEmptyJsonArray);
354
+ }
355
+ const regRes = registry.getRegistryResult(stream);
356
+ if (Result.isError(regRes)) return Result.err({ status: 500, message: regRes.error.message });
357
+ const reg = regRes.value;
358
+ if (reg.currentVersion > 0) return Result.err({ status: 400, message: "stream requires JSON" });
359
+ return Result.ok({
360
+ rows: [
361
+ {
362
+ routingKey: keyBytesFromString(routingKeyHeader),
363
+ contentType,
364
+ payload: bodyBytes,
365
+ },
366
+ ],
367
+ });
368
+ };
369
+
370
+ const enqueueAppend = (args: {
371
+ stream: string;
372
+ baseAppendMs: bigint;
373
+ rows: AppendRow[];
374
+ contentType: string | null;
375
+ close: boolean;
376
+ streamSeq?: string | null;
377
+ producer?: ProducerInfo | null;
378
+ }) =>
379
+ ingest.append({
380
+ stream: args.stream,
381
+ baseAppendMs: args.baseAppendMs,
382
+ rows: args.rows,
383
+ contentType: args.contentType,
384
+ streamSeq: args.streamSeq,
385
+ producer: args.producer,
386
+ close: args.close,
387
+ });
388
+
389
+ const recordAppendOutcome = (args: {
390
+ stream: string;
391
+ lastOffset: bigint;
392
+ appendedRows: number;
393
+ metricsBytes: number;
394
+ ingestedBytes: number;
395
+ touched: boolean;
396
+ closed: boolean;
397
+ }): void => {
398
+ if (args.appendedRows > 0) {
399
+ metrics.recordAppend(args.metricsBytes, args.appendedRows);
400
+ notifier.notify(args.stream, args.lastOffset);
401
+ touch.notify(args.stream);
402
+ }
403
+ if (stats) {
404
+ if (args.touched) stats.recordStreamTouched(args.stream);
405
+ if (args.appendedRows > 0) stats.recordIngested(args.ingestedBytes);
406
+ }
407
+ if (args.closed) notifier.notifyClose(args.stream);
408
+ };
409
+
410
+ const decodeJsonRecords = (
411
+ stream: string,
412
+ records: Array<{ offset: bigint; payload: Uint8Array }>
413
+ ): Result<{ values: any[] }, { status: 400 | 500; message: string }> => {
414
+ const regRes = registry.getRegistryResult(stream);
415
+ if (Result.isError(regRes)) return Result.err({ status: 500, message: regRes.error.message });
416
+ const reg = regRes.value;
417
+ const values: any[] = [];
418
+ for (const r of records) {
419
+ try {
420
+ const s = new TextDecoder().decode(r.payload);
421
+ let value: any = JSON.parse(s);
422
+ if (reg.currentVersion > 0) {
423
+ const version = schemaVersionForOffset(reg, r.offset);
424
+ if (version < reg.currentVersion) {
425
+ const chainRes = registry.getLensChainResult(reg, version, reg.currentVersion);
426
+ if (Result.isError(chainRes)) return Result.err({ status: 500, message: chainRes.error.message });
427
+ const chain = chainRes.value;
428
+ const transformedRes = applyLensChainResult(chain, value);
429
+ if (Result.isError(transformedRes)) return Result.err({ status: 400, message: transformedRes.error.message });
430
+ value = transformedRes.value;
431
+ }
432
+ }
433
+ values.push(value);
434
+ } catch (e: any) {
435
+ return Result.err({ status: 400, message: String(e?.message ?? e) });
436
+ }
437
+ }
438
+ return Result.ok({ values });
439
+ };
440
+
441
+ let closing = false;
442
+ const fetch = async (req: Request): Promise<Response> => {
443
+ if (closing) {
444
+ return json(503, { error: { code: "unavailable", message: "server shutting down" } });
445
+ }
446
+ try {
447
+ let url: URL;
448
+ try {
449
+ url = new URL(req.url, "http://localhost");
450
+ } catch {
451
+ return badRequest("invalid url");
452
+ }
453
+ const path = url.pathname;
454
+
455
+ if (path === "/health") {
456
+ return json(200, { ok: true });
457
+ }
458
+ if (path === "/metrics") {
459
+ return json(200, metrics.snapshot());
460
+ }
461
+
462
+ const rejectIfMemoryLimited = (): Response | null => {
463
+ if (!memory || memory.shouldAllow()) return null;
464
+ memory.maybeGc("memory limit");
465
+ memory.maybeHeapSnapshot("memory limit");
466
+ metrics.record("tieredstore.backpressure.over_limit", 1, "count", { reason: "memory" });
467
+ return json(429, { error: { code: "overloaded", message: "ingest queue full" } });
468
+ };
469
+
470
+ // /v1/streams
471
+ if (req.method === "GET" && path === "/v1/streams") {
472
+ const limit = Number(url.searchParams.get("limit") ?? "100");
473
+ const offset = Number(url.searchParams.get("offset") ?? "0");
474
+ const rows = db.listStreams(Math.max(0, Math.min(limit, 1000)), Math.max(0, offset));
475
+ const out = rows.map((r) => ({
476
+ name: r.stream,
477
+ created_at: new Date(Number(r.created_at_ms)).toISOString(),
478
+ expires_at: r.expires_at_ms == null ? null : new Date(Number(r.expires_at_ms)).toISOString(),
479
+ epoch: r.epoch,
480
+ next_offset: r.next_offset.toString(),
481
+ sealed_through: r.sealed_through.toString(),
482
+ uploaded_through: r.uploaded_through.toString(),
483
+ }));
484
+ return json(200, out);
485
+ }
486
+
487
+ // /v1/stream/:name[/_schema] (accept encoded or raw slashes in name)
488
+ const streamPrefix = "/v1/stream/";
489
+ if (path.startsWith(streamPrefix)) {
490
+ const rawRest = path.slice(streamPrefix.length);
491
+ const rest = rawRest.replace(/\/+$/, "");
492
+ if (rest.length === 0) return badRequest("missing stream name");
493
+ const segments = rest.split("/");
494
+ let isSchema = false;
495
+ let pathKeyParam: string | null = null;
496
+ let touchMode:
497
+ | null
498
+ | { kind: "read"; key: string | null }
499
+ | { kind: "meta" }
500
+ | { kind: "wait" }
501
+ | { kind: "templates_activate" } = null;
502
+ if (segments[segments.length - 1] === "_schema") {
503
+ isSchema = true;
504
+ segments.pop();
505
+ } else if (
506
+ segments.length >= 3 &&
507
+ segments[segments.length - 3] === "touch" &&
508
+ segments[segments.length - 2] === "templates" &&
509
+ segments[segments.length - 1] === "activate"
510
+ ) {
511
+ touchMode = { kind: "templates_activate" };
512
+ segments.splice(segments.length - 3, 3);
513
+ } else if (segments.length >= 2 && segments[segments.length - 2] === "touch" && segments[segments.length - 1] === "meta") {
514
+ touchMode = { kind: "meta" };
515
+ segments.splice(segments.length - 2, 2);
516
+ } else if (segments.length >= 2 && segments[segments.length - 2] === "touch" && segments[segments.length - 1] === "wait") {
517
+ touchMode = { kind: "wait" };
518
+ segments.splice(segments.length - 2, 2);
519
+ } else if (segments.length >= 3 && segments[segments.length - 3] === "touch" && segments[segments.length - 2] === "pk") {
520
+ touchMode = { kind: "read", key: decodeURIComponent(segments[segments.length - 1]) };
521
+ segments.splice(segments.length - 3, 3);
522
+ } else if (segments[segments.length - 1] === "touch") {
523
+ touchMode = { kind: "read", key: null };
524
+ segments.pop();
525
+ } else if (segments.length >= 2 && segments[segments.length - 2] === "pk") {
526
+ pathKeyParam = decodeURIComponent(segments[segments.length - 1]);
527
+ segments.splice(segments.length - 2, 2);
528
+ }
529
+ const streamPart = segments.join("/");
530
+ if (streamPart.length === 0) return badRequest("missing stream name");
531
+ const stream = decodeURIComponent(streamPart);
532
+
533
+ if (isSchema) {
534
+ const srow = db.getStream(stream);
535
+ if (!srow || db.isDeleted(srow)) return notFound();
536
+ if (srow.expires_at_ms != null && db.nowMs() > srow.expires_at_ms) return notFound("stream expired");
537
+
538
+ if (req.method === "GET") {
539
+ const regRes = registry.getRegistryResult(stream);
540
+ if (Result.isError(regRes)) return schemaReadErrorResponse(regRes.error);
541
+ return json(200, regRes.value);
542
+ }
543
+ if (req.method === "POST") {
544
+ let body: any;
545
+ try {
546
+ body = await req.json();
547
+ } catch {
548
+ return badRequest("schema update must be valid JSON");
549
+ }
550
+ // Accept incremental update shape ({schema, lens, routingKey}),
551
+ // full registry payload ({schemas, lenses, currentVersion, ...}),
552
+ // and routingKey-only updates (used by the Bluesky demo).
553
+ let update = body;
554
+ const isSchemaObject =
555
+ update &&
556
+ (update.schema === true ||
557
+ update.schema === false ||
558
+ (typeof update.schema === "object" && update.schema !== null && !Array.isArray(update.schema)));
559
+ if (!isSchemaObject && update && typeof update === "object" && update.schemas && typeof update.schemas === "object") {
560
+ const versions = Object.keys(update.schemas)
561
+ .map((v) => Number(v))
562
+ .filter((v) => Number.isFinite(v) && v >= 0);
563
+ const currentVersion =
564
+ typeof update.currentVersion === "number" && Number.isFinite(update.currentVersion)
565
+ ? update.currentVersion
566
+ : versions.length > 0
567
+ ? Math.max(...versions)
568
+ : null;
569
+ if (currentVersion != null) {
570
+ const schema = update.schemas[String(currentVersion)];
571
+ const lens =
572
+ update.lens ??
573
+ (update.lenses && typeof update.lenses === "object" ? update.lenses[String(currentVersion - 1)] : undefined);
574
+ update = {
575
+ schema,
576
+ lens,
577
+ routingKey: update.routingKey,
578
+ interpreter: (update as any).interpreter,
579
+ };
580
+ }
581
+ }
582
+ if (update && typeof update === "object") {
583
+ if (update.schema === null) {
584
+ delete update.schema;
585
+ }
586
+ if (update.routingKey === undefined) {
587
+ const raw = update as any;
588
+ const candidate =
589
+ raw.routing_key ?? raw.routingKeyPointer ?? raw.routing_key_pointer ?? raw.routingKey;
590
+ if (typeof candidate === "string") {
591
+ update.routingKey = { jsonPointer: candidate, required: true };
592
+ } else if (candidate && typeof candidate === "object") {
593
+ const jsonPointer = candidate.jsonPointer ?? candidate.json_pointer;
594
+ if (typeof jsonPointer === "string") {
595
+ update.routingKey = {
596
+ jsonPointer,
597
+ required: typeof candidate.required === "boolean" ? candidate.required : true,
598
+ };
599
+ }
600
+ }
601
+ } else if (update.routingKey && typeof update.routingKey === "object") {
602
+ const rk = update.routingKey as any;
603
+ if (rk.jsonPointer === undefined && typeof rk.json_pointer === "string") {
604
+ update.routingKey = {
605
+ jsonPointer: rk.json_pointer,
606
+ required: typeof rk.required === "boolean" ? rk.required : true,
607
+ };
608
+ }
609
+ }
610
+ }
611
+ if (update.schema === undefined && update.routingKey !== undefined && update.interpreter === undefined) {
612
+ const regRes = registry.updateRoutingKeyResult(stream, update.routingKey ?? null);
613
+ if (Result.isError(regRes)) return schemaMutationErrorResponse(regRes.error);
614
+ try {
615
+ await uploadSchemaRegistry(stream, regRes.value);
616
+ } catch {
617
+ return json(500, { error: { code: "internal", message: "schema upload failed" } });
618
+ }
619
+ return json(200, regRes.value);
620
+ }
621
+ if (update.schema === undefined && update.interpreter !== undefined && update.routingKey === undefined) {
622
+ const regRes = registry.updateInterpreterResult(stream, update.interpreter ?? null);
623
+ if (Result.isError(regRes)) return schemaMutationErrorResponse(regRes.error);
624
+ try {
625
+ await uploadSchemaRegistry(stream, regRes.value);
626
+ } catch {
627
+ return json(500, { error: { code: "internal", message: "schema upload failed" } });
628
+ }
629
+ return json(200, regRes.value);
630
+ }
631
+ if (update.schema === undefined && update.routingKey !== undefined && update.interpreter !== undefined) {
632
+ // Apply both updates, reusing the same endpoint semantics.
633
+ const routingRes = registry.updateRoutingKeyResult(stream, update.routingKey ?? null);
634
+ if (Result.isError(routingRes)) return schemaMutationErrorResponse(routingRes.error);
635
+ const interpreterRes = registry.updateInterpreterResult(stream, update.interpreter ?? null);
636
+ if (Result.isError(interpreterRes)) return schemaMutationErrorResponse(interpreterRes.error);
637
+ try {
638
+ await uploadSchemaRegistry(stream, interpreterRes.value);
639
+ } catch {
640
+ return json(500, { error: { code: "internal", message: "schema upload failed" } });
641
+ }
642
+ return json(200, interpreterRes.value);
643
+ }
644
+ const regRes = registry.updateRegistryResult(stream, srow, update);
645
+ if (Result.isError(regRes)) return schemaMutationErrorResponse(regRes.error);
646
+ try {
647
+ await uploadSchemaRegistry(stream, regRes.value);
648
+ } catch {
649
+ return json(500, { error: { code: "internal", message: "schema upload failed" } });
650
+ }
651
+ return json(200, regRes.value);
652
+ }
653
+ return badRequest("unsupported method");
654
+ }
655
+
656
+ if (touchMode) {
657
+ const srow = db.getStream(stream);
658
+ if (!srow || db.isDeleted(srow)) return notFound();
659
+ if (srow.expires_at_ms != null && db.nowMs() > srow.expires_at_ms) return notFound("stream expired");
660
+
661
+ const regRes = registry.getRegistryResult(stream);
662
+ if (Result.isError(regRes)) return schemaReadErrorResponse(regRes.error);
663
+ const reg = regRes.value;
664
+ if (!isTouchEnabled(reg.interpreter)) return notFound("touch not enabled");
665
+
666
+ const touchCfg = reg.interpreter.touch;
667
+ const touchStorage = touchCfg.storage ?? "memory";
668
+ const derived = resolveTouchStreamName(stream, touchCfg);
669
+
670
+ const ensureTouchStream = (): Result<void, { kind: "touch_stream_content_type_mismatch"; message: string }> => {
671
+ const existing = db.getStream(derived);
672
+ if (existing) {
673
+ if (String(existing.content_type) !== "application/json") {
674
+ return Result.err({
675
+ kind: "touch_stream_content_type_mismatch",
676
+ message: `touch stream content-type mismatch: ${existing.content_type}`,
677
+ });
678
+ }
679
+ if ((existing.stream_flags & STREAM_FLAG_TOUCH) === 0) db.addStreamFlags(derived, STREAM_FLAG_TOUCH);
680
+ return Result.ok(undefined);
681
+ }
682
+ db.ensureStream(derived, { contentType: "application/json", streamFlags: STREAM_FLAG_TOUCH });
683
+ return Result.ok(undefined);
684
+ };
685
+
686
+ if (touchMode.kind === "templates_activate") {
687
+ if (req.method !== "POST") return badRequest("unsupported method");
688
+ let body: any;
689
+ try {
690
+ body = await req.json();
691
+ } catch {
692
+ return badRequest("activate body must be valid JSON");
693
+ }
694
+ const templatesRaw = body?.templates;
695
+ if (!Array.isArray(templatesRaw) || templatesRaw.length === 0) {
696
+ return badRequest("activate.templates must be a non-empty array");
697
+ }
698
+ if (templatesRaw.length > 256) return badRequest("activate.templates too large (max 256)");
699
+
700
+ const ttlRaw = body?.inactivityTtlMs;
701
+ const inactivityTtlMs =
702
+ ttlRaw === undefined
703
+ ? touchCfg.templates?.defaultInactivityTtlMs ?? 60 * 60 * 1000
704
+ : typeof ttlRaw === "number" && Number.isFinite(ttlRaw) && ttlRaw >= 0
705
+ ? Math.floor(ttlRaw)
706
+ : null;
707
+ if (inactivityTtlMs == null) return badRequest("activate.inactivityTtlMs must be a non-negative number (ms)");
708
+
709
+ const templates: Array<{ entity: string; fields: Array<{ name: string; encoding: any }> }> = [];
710
+ for (const t of templatesRaw) {
711
+ const entity = typeof t?.entity === "string" ? t.entity.trim() : "";
712
+ const fieldsRaw = t?.fields;
713
+ if (entity === "" || !Array.isArray(fieldsRaw) || fieldsRaw.length === 0 || fieldsRaw.length > 3) continue;
714
+ const fields: Array<{ name: string; encoding: any }> = [];
715
+ for (const f of fieldsRaw) {
716
+ const name = typeof f?.name === "string" ? f.name.trim() : "";
717
+ const encoding = f?.encoding;
718
+ if (name === "") continue;
719
+ fields.push({ name, encoding });
720
+ }
721
+ if (fields.length !== fieldsRaw.length) continue;
722
+ templates.push({ entity, fields });
723
+ }
724
+ if (templates.length !== templatesRaw.length) return badRequest("activate.templates contains invalid template definitions");
725
+
726
+ const limits = {
727
+ maxActiveTemplatesPerStream: touchCfg.templates?.maxActiveTemplatesPerStream ?? 2048,
728
+ maxActiveTemplatesPerEntity: touchCfg.templates?.maxActiveTemplatesPerEntity ?? 256,
729
+ };
730
+
731
+ let activeFromTouchOffset: string;
732
+ if (touchStorage === "sqlite") {
733
+ const touchRes = ensureTouchStream();
734
+ if (Result.isError(touchRes)) return conflict(touchRes.error.message);
735
+ const trow = db.getStream(derived)!;
736
+ const tailSeq = trow.next_offset - 1n;
737
+ activeFromTouchOffset = encodeOffset(trow.epoch, tailSeq);
738
+ } else {
739
+ activeFromTouchOffset = touch.getOrCreateJournal(derived, touchCfg).getCursor();
740
+ }
741
+
742
+ const res = touch.activateTemplates({
743
+ stream,
744
+ touchCfg,
745
+ baseStreamNextOffset: srow.next_offset,
746
+ activeFromTouchOffset,
747
+ templates,
748
+ inactivityTtlMs,
749
+ });
750
+
751
+ return json(200, { activated: res.activated, denied: res.denied, limits });
752
+ }
753
+
754
+ if (touchMode.kind === "meta") {
755
+ if (req.method !== "GET") return badRequest("unsupported method");
756
+ let activeTemplates = 0;
757
+ try {
758
+ const row = db.db.query(`SELECT COUNT(*) as cnt FROM live_templates WHERE stream=? AND state='active';`).get(stream) as any;
759
+ activeTemplates = Number(row?.cnt ?? 0);
760
+ } catch {
761
+ activeTemplates = 0;
762
+ }
763
+ if (touchStorage === "sqlite") {
764
+ const touchRes = ensureTouchStream();
765
+ if (Result.isError(touchRes)) return conflict(touchRes.error.message);
766
+ const trow = db.getStream(derived)!;
767
+ const tailSeq = trow.next_offset - 1n;
768
+ const currentTouchOffset = encodeOffset(trow.epoch, tailSeq);
769
+ const oldestSeq = db.getWalOldestOffset(derived);
770
+ const oldestCursorSeq = oldestSeq == null ? -1n : oldestSeq - 1n;
771
+ const oldestAvailableTouchOffset = encodeOffset(trow.epoch, oldestCursorSeq);
772
+ const clampBigInt = (v: bigint): number => {
773
+ if (v <= 0n) return 0;
774
+ const max = BigInt(Number.MAX_SAFE_INTEGER);
775
+ return v > max ? Number.MAX_SAFE_INTEGER : Number(v);
776
+ };
777
+ return json(200, {
778
+ mode: "sqlite",
779
+ currentTouchOffset,
780
+ oldestAvailableTouchOffset,
781
+ coarseIntervalMs: touchCfg.coarseIntervalMs ?? 100,
782
+ touchCoalesceWindowMs: touchCfg.touchCoalesceWindowMs ?? 100,
783
+ touchRetentionMs: touchCfg.retention?.maxAgeMs ?? null,
784
+ activeTemplates,
785
+ touchWalRetainedRows: clampBigInt(trow.wal_rows),
786
+ touchWalRetainedBytes: clampBigInt(trow.wal_bytes),
787
+ });
788
+ }
789
+ const meta = touch.getOrCreateJournal(derived, touchCfg).getMeta();
790
+ const runtime = touch.getTouchRuntimeSnapshot({ stream, touchCfg });
791
+ const interp = db.getStreamInterpreter(stream);
792
+ return json(200, {
793
+ ...meta,
794
+ coarseIntervalMs: touchCfg.coarseIntervalMs ?? 100,
795
+ touchCoalesceWindowMs: touchCfg.touchCoalesceWindowMs ?? 100,
796
+ touchRetentionMs: null,
797
+ activeTemplates,
798
+ lagSourceOffsets: runtime.lagSourceOffsets,
799
+ touchMode: runtime.touchMode,
800
+ walScannedThrough: interp ? encodeOffset(srow.epoch, interp.interpreted_through) : null,
801
+ bucketMaxSourceOffsetSeq: meta.bucketMaxSourceOffsetSeq,
802
+ hotFineKeys: runtime.hotFineKeys,
803
+ hotTemplates: runtime.hotTemplates,
804
+ hotFineKeysActive: runtime.hotFineKeysActive,
805
+ hotFineKeysGrace: runtime.hotFineKeysGrace,
806
+ hotTemplatesActive: runtime.hotTemplatesActive,
807
+ hotTemplatesGrace: runtime.hotTemplatesGrace,
808
+ fineWaitersActive: runtime.fineWaitersActive,
809
+ coarseWaitersActive: runtime.coarseWaitersActive,
810
+ broadFineWaitersActive: runtime.broadFineWaitersActive,
811
+ hotKeyFilteringEnabled: runtime.hotKeyFilteringEnabled,
812
+ hotTemplateFilteringEnabled: runtime.hotTemplateFilteringEnabled,
813
+ scanRowsTotal: runtime.scanRowsTotal,
814
+ scanBatchesTotal: runtime.scanBatchesTotal,
815
+ scannedButEmitted0BatchesTotal: runtime.scannedButEmitted0BatchesTotal,
816
+ interpretedThroughDeltaTotal: runtime.interpretedThroughDeltaTotal,
817
+ touchesEmittedTotal: runtime.touchesEmittedTotal,
818
+ touchesTableTotal: runtime.touchesTableTotal,
819
+ touchesTemplateTotal: runtime.touchesTemplateTotal,
820
+ fineTouchesDroppedDueToBudgetTotal: runtime.fineTouchesDroppedDueToBudgetTotal,
821
+ fineTouchesSkippedColdTemplateTotal: runtime.fineTouchesSkippedColdTemplateTotal,
822
+ fineTouchesSkippedColdKeyTotal: runtime.fineTouchesSkippedColdKeyTotal,
823
+ fineTouchesSkippedTemplateBucketTotal: runtime.fineTouchesSkippedTemplateBucketTotal,
824
+ waitTouchedTotal: runtime.waitTouchedTotal,
825
+ waitTimeoutTotal: runtime.waitTimeoutTotal,
826
+ waitStaleTotal: runtime.waitStaleTotal,
827
+ journalFlushesTotal: runtime.journalFlushesTotal,
828
+ journalNotifyWakeupsTotal: runtime.journalNotifyWakeupsTotal,
829
+ journalNotifyWakeMsTotal: runtime.journalNotifyWakeMsTotal,
830
+ journalNotifyWakeMsMax: runtime.journalNotifyWakeMsMax,
831
+ journalTimeoutsFiredTotal: runtime.journalTimeoutsFiredTotal,
832
+ journalTimeoutSweepMsTotal: runtime.journalTimeoutSweepMsTotal,
833
+ });
834
+ }
835
+
836
+ if (touchMode.kind === "wait") {
837
+ if (req.method !== "POST") return badRequest("unsupported method");
838
+ const waitStartMs = Date.now();
839
+ let body: any;
840
+ try {
841
+ body = await req.json();
842
+ } catch {
843
+ return badRequest("wait body must be valid JSON");
844
+ }
845
+ const keysRaw = body?.keys;
846
+ const cursorRaw = body?.cursor;
847
+ const sinceRaw = body?.sinceTouchOffset;
848
+ const timeoutMsRaw = body?.timeoutMs;
849
+ if (keysRaw !== undefined && (!Array.isArray(keysRaw) || !keysRaw.every((k: any) => typeof k === "string" && k.trim() !== ""))) {
850
+ return badRequest("wait.keys must be a non-empty string array when provided");
851
+ }
852
+ const keys = Array.isArray(keysRaw) ? Array.from(new Set(keysRaw.map((k: string) => k.trim()))) : [];
853
+ if (keys.length > 1024) return badRequest("wait.keys too large (max 1024)");
854
+ const keyIdsRaw = body?.keyIds;
855
+ const keyIds =
856
+ Array.isArray(keyIdsRaw) && keyIdsRaw.length > 0
857
+ ? Array.from(
858
+ new Set(
859
+ keyIdsRaw.map((x: any) => Number(x)).filter((n: number) => Number.isFinite(n) && Number.isInteger(n) && n >= 0 && n <= 0xffffffff)
860
+ )
861
+ ).map((n) => n >>> 0)
862
+ : [];
863
+ if (Array.isArray(keyIdsRaw) && keyIds.length !== keyIdsRaw.length) {
864
+ return badRequest("wait.keyIds must be a non-empty uint32 array when provided");
865
+ }
866
+ if (keys.length === 0 && keyIds.length === 0) return badRequest("wait requires keys or keyIds");
867
+ if (keyIds.length > 1024) return badRequest("wait.keyIds too large (max 1024)");
868
+ if (touchStorage === "sqlite" && keys.length === 0) {
869
+ return badRequest("wait.keys must be a non-empty string array in sqlite touch storage mode");
870
+ }
871
+ const cursorOrSince = typeof cursorRaw === "string" && cursorRaw.trim() !== "" ? cursorRaw : sinceRaw;
872
+ if (typeof cursorOrSince !== "string" || cursorOrSince.trim() === "") {
873
+ return badRequest(touchStorage === "memory" ? "wait.cursor must be a non-empty string" : "wait.sinceTouchOffset must be a non-empty string");
874
+ }
875
+
876
+ const timeoutMs =
877
+ timeoutMsRaw === undefined ? 30_000 : typeof timeoutMsRaw === "number" && Number.isFinite(timeoutMsRaw) ? Math.max(0, Math.min(120_000, timeoutMsRaw)) : null;
878
+ if (timeoutMs == null) return badRequest("wait.timeoutMs must be a number (ms)");
879
+
880
+ const templateIdsUsedRaw = body?.templateIdsUsed;
881
+ if (Array.isArray(templateIdsUsedRaw) && !templateIdsUsedRaw.every((x: any) => typeof x === "string" && x.trim() !== "")) {
882
+ return badRequest("wait.templateIdsUsed must be a string array");
883
+ }
884
+ const templateIdsUsed =
885
+ Array.isArray(templateIdsUsedRaw) && templateIdsUsedRaw.length > 0
886
+ ? Array.from(new Set(templateIdsUsedRaw.map((s: any) => (typeof s === "string" ? s.trim() : "")).filter((s: string) => s !== "")))
887
+ : [];
888
+ const interestModeRaw = body?.interestMode;
889
+ if (interestModeRaw !== undefined && interestModeRaw !== "fine" && interestModeRaw !== "coarse") {
890
+ return badRequest("wait.interestMode must be 'fine' or 'coarse'");
891
+ }
892
+ const interestMode: "fine" | "coarse" = interestModeRaw === "coarse" ? "coarse" : "fine";
893
+
894
+ if (interestMode === "fine" && templateIdsUsed.length > 0) {
895
+ touch.heartbeatTemplates({ stream, touchCfg, templateIdsUsed });
896
+ }
897
+
898
+ const declareTemplatesRaw = body?.declareTemplates;
899
+ if (Array.isArray(declareTemplatesRaw) && declareTemplatesRaw.length > 0) {
900
+ if (declareTemplatesRaw.length > 256) return badRequest("wait.declareTemplates too large (max 256)");
901
+ const ttlRaw = body?.inactivityTtlMs;
902
+ const inactivityTtlMs =
903
+ ttlRaw === undefined
904
+ ? touchCfg.templates?.defaultInactivityTtlMs ?? 60 * 60 * 1000
905
+ : typeof ttlRaw === "number" && Number.isFinite(ttlRaw) && ttlRaw >= 0
906
+ ? Math.floor(ttlRaw)
907
+ : null;
908
+ if (inactivityTtlMs == null) return badRequest("wait.inactivityTtlMs must be a non-negative number (ms)");
909
+
910
+ const templates: Array<{ entity: string; fields: Array<{ name: string; encoding: any }> }> = [];
911
+ for (const t of declareTemplatesRaw) {
912
+ const entity = typeof t?.entity === "string" ? t.entity.trim() : "";
913
+ const fieldsRaw = t?.fields;
914
+ if (entity === "" || !Array.isArray(fieldsRaw) || fieldsRaw.length === 0 || fieldsRaw.length > 3) continue;
915
+ const fields: Array<{ name: string; encoding: any }> = [];
916
+ for (const f of fieldsRaw) {
917
+ const name = typeof f?.name === "string" ? f.name.trim() : "";
918
+ const encoding = f?.encoding;
919
+ if (name === "") continue;
920
+ fields.push({ name, encoding });
921
+ }
922
+ if (fields.length !== fieldsRaw.length) continue;
923
+ templates.push({ entity, fields });
924
+ }
925
+ if (templates.length !== declareTemplatesRaw.length) return badRequest("wait.declareTemplates contains invalid template definitions");
926
+ let activeFromTouchOffset: string;
927
+ if (touchStorage === "sqlite") {
928
+ const touchRes = ensureTouchStream();
929
+ if (Result.isError(touchRes)) return conflict(touchRes.error.message);
930
+ const trow = db.getStream(derived)!;
931
+ const tailSeq = trow.next_offset - 1n;
932
+ activeFromTouchOffset = encodeOffset(trow.epoch, tailSeq);
933
+ } else {
934
+ activeFromTouchOffset = touch.getOrCreateJournal(derived, touchCfg).getCursor();
935
+ }
936
+ touch.activateTemplates({
937
+ stream,
938
+ touchCfg,
939
+ baseStreamNextOffset: srow.next_offset,
940
+ activeFromTouchOffset,
941
+ templates,
942
+ inactivityTtlMs,
943
+ });
944
+ }
945
+
946
+ if (touchStorage === "memory") {
947
+ const j = touch.getOrCreateJournal(derived, touchCfg);
948
+ const runtime = touch.getTouchRuntimeSnapshot({ stream, touchCfg });
949
+ let rawFineKeyIds = keyIds;
950
+ if (keyIds.length === 0) {
951
+ const parsedKeyIds: number[] = [];
952
+ for (const key of keys) {
953
+ const keyIdRes = touchKeyIdFromRoutingKeyResult(key);
954
+ if (Result.isError(keyIdRes)) return internalError();
955
+ parsedKeyIds.push(keyIdRes.value);
956
+ }
957
+ rawFineKeyIds = parsedKeyIds;
958
+ }
959
+ const templateWaitKeyIds =
960
+ templateIdsUsed.length > 0
961
+ ? Array.from(new Set(templateIdsUsed.map((templateId) => templateKeyIdFor(templateId) >>> 0)))
962
+ : [];
963
+ let waitKeyIds = rawFineKeyIds;
964
+ let effectiveWaitKind: "fineKey" | "templateKey" | "tableKey" = "fineKey";
965
+
966
+ if (interestMode === "coarse") {
967
+ effectiveWaitKind = "tableKey";
968
+ } else if (runtime.touchMode === "restricted" && templateIdsUsed.length > 0) {
969
+ effectiveWaitKind = "templateKey";
970
+ } else if (runtime.touchMode === "coarseOnly" && templateIdsUsed.length > 0) {
971
+ effectiveWaitKind = "tableKey";
972
+ }
973
+
974
+ if (effectiveWaitKind === "templateKey") {
975
+ waitKeyIds = templateWaitKeyIds;
976
+ } else if (effectiveWaitKind === "tableKey") {
977
+ if (templateIdsUsed.length > 0) {
978
+ const entities = touch.resolveTemplateEntitiesForWait({ stream, templateIdsUsed });
979
+ waitKeyIds = Array.from(new Set(entities.map((entity) => tableKeyIdFor(entity) >>> 0)));
980
+ }
981
+ }
982
+
983
+ // Keep fine waits resilient to runtime mode flips: include template-key
984
+ // fallbacks even when the current mode is fine. This avoids starvation
985
+ // when a long-poll starts in fine mode but DS degrades to restricted
986
+ // before that waiter naturally re-issues.
987
+ if (interestMode === "fine" && effectiveWaitKind === "fineKey" && templateWaitKeyIds.length > 0) {
988
+ const merged = new Set<number>();
989
+ for (const keyId of waitKeyIds) merged.add(keyId >>> 0);
990
+ for (const keyId of templateWaitKeyIds) merged.add(keyId >>> 0);
991
+ waitKeyIds = Array.from(merged);
992
+ }
993
+
994
+ if (waitKeyIds.length === 0) {
995
+ waitKeyIds = rawFineKeyIds;
996
+ effectiveWaitKind = "fineKey";
997
+ }
998
+ const hotInterestKeyIds = interestMode === "fine" ? rawFineKeyIds : waitKeyIds;
999
+ const releaseHotInterest = touch.beginHotWaitInterest({
1000
+ stream,
1001
+ touchCfg,
1002
+ keyIds: hotInterestKeyIds,
1003
+ templateIdsUsed,
1004
+ interestMode,
1005
+ });
1006
+ try {
1007
+ let sinceGen: number;
1008
+ if (cursorOrSince === "now") {
1009
+ sinceGen = j.getGeneration();
1010
+ } else {
1011
+ const parsed = parseTouchCursor(cursorOrSince);
1012
+ if (!parsed) return badRequest("wait.cursor must be in the form <epochHex>:<generation> or 'now'");
1013
+ if (parsed.epoch !== j.getEpoch()) {
1014
+ const latencyMs = Date.now() - waitStartMs;
1015
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: waitKeyIds.length, outcome: "stale", latencyMs });
1016
+ return json(200, {
1017
+ stale: true,
1018
+ cursor: j.getCursor(),
1019
+ epoch: j.getEpoch(),
1020
+ generation: j.getGeneration(),
1021
+ effectiveWaitKind,
1022
+ bucketMaxSourceOffsetSeq: j.getLastFlushedSourceOffsetSeq().toString(),
1023
+ flushAtMs: j.getLastFlushAtMs(),
1024
+ bucketStartMs: j.getLastBucketStartMs(),
1025
+ error: { code: "stale", message: "cursor epoch mismatch; rerun/re-subscribe and start from cursor" },
1026
+ });
1027
+ }
1028
+ sinceGen = parsed.generation;
1029
+ }
1030
+
1031
+ // Clamp bogus future cursors (defensive).
1032
+ const nowGen = j.getGeneration();
1033
+ if (sinceGen > nowGen) sinceGen = nowGen;
1034
+
1035
+ // Fast path: already touched since cursor.
1036
+ if (j.maybeTouchedSinceAny(waitKeyIds, sinceGen)) {
1037
+ const latencyMs = Date.now() - waitStartMs;
1038
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: waitKeyIds.length, outcome: "touched", latencyMs });
1039
+ return json(200, {
1040
+ touched: true,
1041
+ cursor: j.getCursor(),
1042
+ effectiveWaitKind,
1043
+ bucketMaxSourceOffsetSeq: j.getLastFlushedSourceOffsetSeq().toString(),
1044
+ flushAtMs: j.getLastFlushAtMs(),
1045
+ bucketStartMs: j.getLastBucketStartMs(),
1046
+ });
1047
+ }
1048
+
1049
+ const deadline = Date.now() + timeoutMs;
1050
+ const remaining = deadline - Date.now();
1051
+ if (remaining <= 0) {
1052
+ const latencyMs = Date.now() - waitStartMs;
1053
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: waitKeyIds.length, outcome: "timeout", latencyMs });
1054
+ return json(200, {
1055
+ touched: false,
1056
+ cursor: j.getCursor(),
1057
+ effectiveWaitKind,
1058
+ bucketMaxSourceOffsetSeq: j.getLastFlushedSourceOffsetSeq().toString(),
1059
+ flushAtMs: j.getLastFlushAtMs(),
1060
+ bucketStartMs: j.getLastBucketStartMs(),
1061
+ });
1062
+ }
1063
+
1064
+ // Avoid lost-wakeup races by capturing the current generation before waiting.
1065
+ const afterGen = j.getGeneration();
1066
+ const hit = await j.waitForAny({ keys: waitKeyIds, afterGeneration: afterGen, timeoutMs: remaining, signal: req.signal });
1067
+ if (req.signal.aborted) return new Response(null, { status: 204 });
1068
+
1069
+ if (hit == null) {
1070
+ const latencyMs = Date.now() - waitStartMs;
1071
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: waitKeyIds.length, outcome: "timeout", latencyMs });
1072
+ return json(200, {
1073
+ touched: false,
1074
+ cursor: j.getCursor(),
1075
+ effectiveWaitKind,
1076
+ bucketMaxSourceOffsetSeq: j.getLastFlushedSourceOffsetSeq().toString(),
1077
+ flushAtMs: j.getLastFlushAtMs(),
1078
+ bucketStartMs: j.getLastBucketStartMs(),
1079
+ });
1080
+ }
1081
+
1082
+ const latencyMs = Date.now() - waitStartMs;
1083
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: waitKeyIds.length, outcome: "touched", latencyMs });
1084
+ return json(200, {
1085
+ touched: true,
1086
+ cursor: j.getCursor(),
1087
+ effectiveWaitKind,
1088
+ bucketMaxSourceOffsetSeq: hit.bucketMaxSourceOffsetSeq.toString(),
1089
+ flushAtMs: hit.flushAtMs,
1090
+ bucketStartMs: hit.bucketStartMs,
1091
+ });
1092
+ } finally {
1093
+ releaseHotInterest();
1094
+ }
1095
+ }
1096
+
1097
+ // touchStorage === "sqlite"
1098
+ const touchRes = ensureTouchStream();
1099
+ if (Result.isError(touchRes)) return conflict(touchRes.error.message);
1100
+ const trow = db.getStream(derived)!;
1101
+ const tailSeq = trow.next_offset - 1n;
1102
+ const currentTouchOffset = encodeOffset(trow.epoch, tailSeq);
1103
+ const oldestSeq = db.getWalOldestOffset(derived);
1104
+ const oldestCursorSeq = oldestSeq == null ? -1n : oldestSeq - 1n;
1105
+ const oldestAvailableTouchOffset = encodeOffset(trow.epoch, oldestCursorSeq);
1106
+
1107
+ const staleBody = () => ({
1108
+ stale: true,
1109
+ currentTouchOffset,
1110
+ oldestAvailableTouchOffset,
1111
+ error: {
1112
+ code: "stale",
1113
+ message:
1114
+ "offset is older than oldestAvailableTouchOffset; rerun/re-subscribe and start from currentTouchOffset",
1115
+ },
1116
+ });
1117
+
1118
+ let sinceSeq: bigint;
1119
+ if (cursorOrSince === "now") {
1120
+ sinceSeq = tailSeq;
1121
+ } else {
1122
+ const sinceRes = parseOffsetResult(cursorOrSince);
1123
+ if (Result.isError(sinceRes)) return badRequest(sinceRes.error.message);
1124
+ sinceSeq = offsetToSeqOrNeg1(sinceRes.value);
1125
+ }
1126
+
1127
+ if (sinceSeq < oldestCursorSeq) {
1128
+ const latencyMs = Date.now() - waitStartMs;
1129
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: keys.length, outcome: "stale", latencyMs });
1130
+ return json(200, staleBody());
1131
+ }
1132
+
1133
+ const encoder = new TextEncoder();
1134
+ let keyBytes: Uint8Array[] | null = null;
1135
+ const ensureKeyBytes = (): Uint8Array[] => {
1136
+ if (keyBytes) return keyBytes;
1137
+ keyBytes = keys.map((k) => encoder.encode(k));
1138
+ return keyBytes;
1139
+ };
1140
+
1141
+ // Only use in-memory key notifications for small key sets; for huge key
1142
+ // sets this would cause O(keysPerWait) register/unregister overhead.
1143
+ const KEY_NOTIFIER_MAX_KEYS = 32;
1144
+ const useKeyNotifier = keys.length <= KEY_NOTIFIER_MAX_KEYS;
1145
+
1146
+ let cursorSeq = sinceSeq;
1147
+ const deadline = Date.now() + timeoutMs;
1148
+ for (;;) {
1149
+ if (req.signal.aborted) return new Response(null, { status: 204 });
1150
+
1151
+ const latest = db.getStream(derived);
1152
+ if (!latest || db.isDeleted(latest)) return notFound();
1153
+
1154
+ const endSeq = latest.next_offset - 1n;
1155
+ const endTouchOffset = encodeOffset(latest.epoch, endSeq);
1156
+
1157
+ const match = cursorSeq < endSeq ? db.findFirstWalOffsetForRoutingKeys(derived, cursorSeq, endSeq, ensureKeyBytes()) : null;
1158
+ if (match != null) {
1159
+ let touchedKey: string | null = null;
1160
+ try {
1161
+ const row = db.db.query(`SELECT routing_key FROM wal WHERE stream=? AND offset=? LIMIT 1;`).get(derived, match) as any;
1162
+ if (row && row.routing_key) {
1163
+ touchedKey = new TextDecoder().decode(row.routing_key as Uint8Array);
1164
+ }
1165
+ } catch {
1166
+ touchedKey = null;
1167
+ }
1168
+ const latencyMs = Date.now() - waitStartMs;
1169
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: keys.length, outcome: "touched", latencyMs });
1170
+ return json(200, {
1171
+ touched: true,
1172
+ touchOffset: encodeOffset(latest.epoch, match),
1173
+ currentTouchOffset: endTouchOffset,
1174
+ touchedKeys: touchedKey ? [touchedKey] : [],
1175
+ });
1176
+ }
1177
+
1178
+ const remaining = deadline - Date.now();
1179
+ if (remaining <= 0) {
1180
+ // Return the tail as-of the timeout moment, not as-of the last scan.
1181
+ const latest2 = db.getStream(derived);
1182
+ if (!latest2 || db.isDeleted(latest2)) return notFound();
1183
+ const endSeq2 = latest2.next_offset - 1n;
1184
+ const endTouchOffset2 = encodeOffset(latest2.epoch, endSeq2);
1185
+ const latencyMs = Date.now() - waitStartMs;
1186
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: keys.length, outcome: "timeout", latencyMs });
1187
+ return json(200, { touched: false, currentTouchOffset: endTouchOffset2 });
1188
+ }
1189
+
1190
+ if (useKeyNotifier) {
1191
+ const hit = await touchRoutingKeyNotifier.waitForAny({
1192
+ stream: derived,
1193
+ keys,
1194
+ afterSeq: endSeq,
1195
+ timeoutMs: remaining,
1196
+ signal: req.signal,
1197
+ });
1198
+ if (req.signal.aborted) return new Response(null, { status: 204 });
1199
+
1200
+ const latest2 = db.getStream(derived);
1201
+ if (!latest2 || db.isDeleted(latest2)) return notFound();
1202
+ const endSeq2 = latest2.next_offset - 1n;
1203
+ const endTouchOffset2 = encodeOffset(latest2.epoch, endSeq2);
1204
+
1205
+ if (hit == null) {
1206
+ const latencyMs = Date.now() - waitStartMs;
1207
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: keys.length, outcome: "timeout", latencyMs });
1208
+ return json(200, { touched: false, currentTouchOffset: endTouchOffset2 });
1209
+ }
1210
+
1211
+ const latencyMs = Date.now() - waitStartMs;
1212
+ touch.recordWaitMetrics({ stream, touchCfg, keysCount: keys.length, outcome: "touched", latencyMs });
1213
+ return json(200, {
1214
+ touched: true,
1215
+ touchOffset: encodeOffset(latest2.epoch, hit.seq),
1216
+ currentTouchOffset: endTouchOffset2,
1217
+ touchedKeys: [hit.key],
1218
+ });
1219
+ }
1220
+
1221
+ // Fallback: wait for any new touch rows. Cursor advances to the tail we already scanned.
1222
+ await notifier.waitFor(derived, endSeq, remaining, req.signal);
1223
+ if (req.signal.aborted) return new Response(null, { status: 204 });
1224
+ cursorSeq = endSeq;
1225
+ }
1226
+ }
1227
+
1228
+ // touchMode.kind === "read"
1229
+ if (req.method !== "GET") return badRequest("unsupported method");
1230
+ if (touchStorage === "memory") {
1231
+ return notFound("touch stream read not supported in memory mode; use /touch/wait");
1232
+ }
1233
+ const touchRes = ensureTouchStream();
1234
+ if (Result.isError(touchRes)) return conflict(touchRes.error.message);
1235
+ const trow = db.getStream(derived)!;
1236
+ const tailSeq = trow.next_offset - 1n;
1237
+ const currentTouchOffset = encodeOffset(trow.epoch, tailSeq);
1238
+ const oldestSeq = db.getWalOldestOffset(derived);
1239
+ const oldestCursorSeq = oldestSeq == null ? -1n : oldestSeq - 1n;
1240
+ const oldestAvailableTouchOffset = encodeOffset(trow.epoch, oldestCursorSeq);
1241
+
1242
+ const staleBody = () => ({
1243
+ stale: true,
1244
+ currentTouchOffset,
1245
+ oldestAvailableTouchOffset,
1246
+ error: {
1247
+ code: "stale",
1248
+ message:
1249
+ "offset is older than oldestAvailableTouchOffset; rerun/re-subscribe and start from currentTouchOffset",
1250
+ },
1251
+ });
1252
+
1253
+ // Default to "subscribe from now" for companion touches.
1254
+ const nextUrl = new URL(req.url, "http://localhost");
1255
+ if (!nextUrl.searchParams.has("offset")) {
1256
+ const sinceParam = nextUrl.searchParams.get("since");
1257
+ if (sinceParam) {
1258
+ const sinceRes = parseTimestampMsResult(sinceParam);
1259
+ if (Result.isError(sinceRes)) return badRequest(sinceRes.error.message);
1260
+ const key = touchMode.key ?? nextUrl.searchParams.get("key");
1261
+ const computedRes = await reader.seekOffsetByTimestampResult(derived, sinceRes.value, key ?? null);
1262
+ if (Result.isError(computedRes)) return readerErrorResponse(computedRes.error);
1263
+ nextUrl.searchParams.set("offset", computedRes.value);
1264
+ nextUrl.searchParams.delete("since");
1265
+ } else {
1266
+ nextUrl.searchParams.set("offset", "now");
1267
+ }
1268
+ }
1269
+
1270
+ const requestedOffset = nextUrl.searchParams.get("offset") ?? "";
1271
+ if (requestedOffset !== "now") {
1272
+ const requestedRes = parseOffsetResult(requestedOffset);
1273
+ if (Result.isError(requestedRes)) return badRequest(requestedRes.error.message);
1274
+ const seq = offsetToSeqOrNeg1(requestedRes.value);
1275
+ if (seq < oldestCursorSeq) return json(409, staleBody());
1276
+ }
1277
+
1278
+ // Delegate to the standard stream read path for the internal derived stream.
1279
+ const touchPath = touchMode.key
1280
+ ? `${streamPrefix}${encodeURIComponent(derived)}/pk/${encodeURIComponent(touchMode.key)}`
1281
+ : `${streamPrefix}${encodeURIComponent(derived)}`;
1282
+ nextUrl.pathname = touchPath;
1283
+ return fetch(new Request(nextUrl.toString(), req));
1284
+ }
1285
+
1286
+ // Stream lifecycle.
1287
+ if (req.method === "PUT") {
1288
+ const streamClosed = parseStreamClosedHeader(req.headers.get("stream-closed"));
1289
+ const ttlHeader = req.headers.get("stream-ttl");
1290
+ const expiresHeader = req.headers.get("stream-expires-at");
1291
+ if (ttlHeader && expiresHeader) return badRequest("only one of Stream-TTL or Stream-Expires-At is allowed");
1292
+
1293
+ let ttlSeconds: number | null = null;
1294
+ let expiresAtMs: bigint | null = null;
1295
+ if (ttlHeader) {
1296
+ const ttlRes = parseStreamTtlSeconds(ttlHeader);
1297
+ if (Result.isError(ttlRes)) return badRequest(ttlRes.error.message);
1298
+ ttlSeconds = ttlRes.value;
1299
+ expiresAtMs = db.nowMs() + BigInt(ttlSeconds) * 1000n;
1300
+ } else if (expiresHeader) {
1301
+ const expiresRes = parseTimestampMsResult(expiresHeader);
1302
+ if (Result.isError(expiresRes)) return badRequest(expiresRes.error.message);
1303
+ expiresAtMs = expiresRes.value;
1304
+ }
1305
+
1306
+ const contentType = normalizeContentType(req.headers.get("content-type")) ?? "application/octet-stream";
1307
+ const routingKeyHeader = req.headers.get("stream-key");
1308
+
1309
+ const memReject = rejectIfMemoryLimited();
1310
+ if (memReject) return memReject;
1311
+ const ab = await req.arrayBuffer();
1312
+ if (ab.byteLength > cfg.appendMaxBodyBytes) return tooLarge(`body too large (max ${cfg.appendMaxBodyBytes})`);
1313
+ const bodyBytes = new Uint8Array(ab);
1314
+
1315
+ let srow = db.getStream(stream);
1316
+ if (srow && db.isDeleted(srow)) {
1317
+ db.hardDeleteStream(stream);
1318
+ srow = null;
1319
+ }
1320
+ if (srow && srow.expires_at_ms != null && db.nowMs() > srow.expires_at_ms) {
1321
+ db.hardDeleteStream(stream);
1322
+ srow = null;
1323
+ }
1324
+
1325
+ if (srow) {
1326
+ const existingClosed = srow.closed !== 0;
1327
+ const existingContentType = normalizeContentType(srow.content_type) ?? srow.content_type;
1328
+ const ttlMatch =
1329
+ ttlSeconds != null
1330
+ ? srow.ttl_seconds != null && srow.ttl_seconds === ttlSeconds
1331
+ : expiresAtMs != null
1332
+ ? srow.ttl_seconds == null && srow.expires_at_ms != null && srow.expires_at_ms === expiresAtMs
1333
+ : srow.ttl_seconds == null && srow.expires_at_ms == null;
1334
+ if (existingContentType !== contentType || existingClosed !== streamClosed || !ttlMatch) {
1335
+ return conflict("stream config mismatch");
1336
+ }
1337
+
1338
+ const tailOffset = encodeOffset(srow.epoch, srow.next_offset - 1n);
1339
+ const headers: Record<string, string> = {
1340
+ "content-type": existingContentType,
1341
+ "stream-next-offset": tailOffset,
1342
+ };
1343
+ if (existingClosed) headers["stream-closed"] = "true";
1344
+ if (srow.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(srow.expires_at_ms)).toISOString();
1345
+ return new Response(null, { status: 200, headers: withNosniff(headers) });
1346
+ }
1347
+
1348
+ db.ensureStream(stream, { contentType, expiresAtMs, ttlSeconds, closed: false });
1349
+ let lastOffset = -1n;
1350
+ let appendedRows = 0;
1351
+ let closedNow = false;
1352
+
1353
+ if (bodyBytes.byteLength > 0) {
1354
+ const rowsRes = buildAppendRowsResult(stream, bodyBytes, contentType, routingKeyHeader, true);
1355
+ if (Result.isError(rowsRes)) {
1356
+ if (rowsRes.error.status === 500) return internalError();
1357
+ return badRequest(rowsRes.error.message);
1358
+ }
1359
+ const rows = rowsRes.value.rows;
1360
+ appendedRows = rows.length;
1361
+ if (rows.length > 0 || streamClosed) {
1362
+ const appendRes = await enqueueAppend({
1363
+ stream,
1364
+ baseAppendMs: db.nowMs(),
1365
+ rows,
1366
+ contentType,
1367
+ close: streamClosed,
1368
+ });
1369
+ if (Result.isError(appendRes)) {
1370
+ if (appendRes.error.kind === "overloaded") return json(429, { error: { code: "overloaded", message: "ingest queue full" } });
1371
+ return json(500, { error: { code: "internal", message: "append failed" } });
1372
+ }
1373
+ lastOffset = appendRes.value.lastOffset;
1374
+ closedNow = appendRes.value.closed;
1375
+ }
1376
+ } else if (streamClosed) {
1377
+ const appendRes = await enqueueAppend({
1378
+ stream,
1379
+ baseAppendMs: db.nowMs(),
1380
+ rows: [],
1381
+ contentType,
1382
+ close: true,
1383
+ });
1384
+ if (Result.isError(appendRes)) {
1385
+ if (appendRes.error.kind === "overloaded") return json(429, { error: { code: "overloaded", message: "ingest queue full" } });
1386
+ return json(500, { error: { code: "internal", message: "close failed" } });
1387
+ }
1388
+ lastOffset = appendRes.value.lastOffset;
1389
+ closedNow = appendRes.value.closed;
1390
+ }
1391
+
1392
+ recordAppendOutcome({
1393
+ stream,
1394
+ lastOffset,
1395
+ appendedRows,
1396
+ metricsBytes: bodyBytes.byteLength,
1397
+ ingestedBytes: bodyBytes.byteLength,
1398
+ touched: bodyBytes.byteLength > 0 || streamClosed,
1399
+ closed: closedNow,
1400
+ });
1401
+
1402
+ const createdRow = db.getStream(stream)!;
1403
+ const tailOffset = encodeOffset(createdRow.epoch, createdRow.next_offset - 1n);
1404
+ const headers: Record<string, string> = {
1405
+ "content-type": contentType,
1406
+ "stream-next-offset": appendedRows > 0 || streamClosed ? encodeOffset(createdRow.epoch, lastOffset) : tailOffset,
1407
+ location: req.url,
1408
+ };
1409
+ if (streamClosed || closedNow) headers["stream-closed"] = "true";
1410
+ if (createdRow.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(createdRow.expires_at_ms)).toISOString();
1411
+ return new Response(null, { status: 201, headers: withNosniff(headers) });
1412
+ }
1413
+
1414
+ if (req.method === "DELETE") {
1415
+ const deleted = db.deleteStream(stream);
1416
+ if (!deleted) return notFound();
1417
+ await uploader.publishManifest(stream);
1418
+ return new Response(null, { status: 204, headers: withNosniff() });
1419
+ }
1420
+
1421
+ if (req.method === "HEAD") {
1422
+ const srow = db.getStream(stream);
1423
+ if (!srow || db.isDeleted(srow)) return notFound();
1424
+ if (srow.expires_at_ms != null && db.nowMs() > srow.expires_at_ms) return notFound("stream expired");
1425
+ const tailOffset = encodeOffset(srow.epoch, srow.next_offset - 1n);
1426
+ const headers: Record<string, string> = {
1427
+ "content-type": normalizeContentType(srow.content_type) ?? srow.content_type,
1428
+ "stream-next-offset": tailOffset,
1429
+ "stream-end-offset": tailOffset,
1430
+ "cache-control": "no-store",
1431
+ };
1432
+ if (srow.closed !== 0) headers["stream-closed"] = "true";
1433
+ if (srow.ttl_seconds != null && srow.expires_at_ms != null) {
1434
+ const remainingMs = Number(srow.expires_at_ms - db.nowMs());
1435
+ const remaining = Math.max(0, Math.ceil(remainingMs / 1000));
1436
+ headers["stream-ttl"] = String(remaining);
1437
+ }
1438
+ if (srow.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(srow.expires_at_ms)).toISOString();
1439
+ return new Response(null, { status: 200, headers: withNosniff(headers) });
1440
+ }
1441
+
1442
+ if (req.method === "POST") {
1443
+ const srow = db.getStream(stream);
1444
+ if (!srow || db.isDeleted(srow)) return notFound();
1445
+ if (srow.expires_at_ms != null && db.nowMs() > srow.expires_at_ms) return notFound("stream expired");
1446
+
1447
+ const streamClosed = parseStreamClosedHeader(req.headers.get("stream-closed"));
1448
+ const streamContentType = normalizeContentType(srow.content_type) ?? srow.content_type;
1449
+
1450
+ const producerId = req.headers.get("producer-id");
1451
+ const producerEpochHeader = req.headers.get("producer-epoch");
1452
+ const producerSeqHeader = req.headers.get("producer-seq");
1453
+ let producer: ProducerInfo | null = null;
1454
+ if (producerId != null || producerEpochHeader != null || producerSeqHeader != null) {
1455
+ if (!producerId || producerId.trim() === "") return badRequest("invalid Producer-Id");
1456
+ if (!producerEpochHeader || !producerSeqHeader) return badRequest("missing producer headers");
1457
+ const epoch = parseNonNegativeInt(producerEpochHeader);
1458
+ const seq = parseNonNegativeInt(producerSeqHeader);
1459
+ if (epoch == null || seq == null) return badRequest("invalid producer headers");
1460
+ producer = { id: producerId, epoch, seq };
1461
+ }
1462
+
1463
+ let streamSeq: string | null = null;
1464
+ const streamSeqRes = parseStreamSeqHeader(req.headers.get("stream-seq"));
1465
+ if (Result.isError(streamSeqRes)) return badRequest(streamSeqRes.error.message);
1466
+ streamSeq = streamSeqRes.value;
1467
+
1468
+ const tsHdr = req.headers.get("stream-timestamp");
1469
+ let baseAppendMs = db.nowMs();
1470
+ if (tsHdr) {
1471
+ const tsRes = parseTimestampMsResult(tsHdr);
1472
+ if (Result.isError(tsRes)) return badRequest(tsRes.error.message);
1473
+ baseAppendMs = tsRes.value;
1474
+ }
1475
+
1476
+ const memReject = rejectIfMemoryLimited();
1477
+ if (memReject) return memReject;
1478
+ const ab = await req.arrayBuffer();
1479
+ if (ab.byteLength > cfg.appendMaxBodyBytes) return tooLarge(`body too large (max ${cfg.appendMaxBodyBytes})`);
1480
+ const bodyBytes = new Uint8Array(ab);
1481
+
1482
+ const isCloseOnly = streamClosed && bodyBytes.byteLength === 0;
1483
+ if (bodyBytes.byteLength === 0 && !streamClosed) return badRequest("empty body");
1484
+
1485
+ let reqContentType = normalizeContentType(req.headers.get("content-type"));
1486
+ if (!isCloseOnly && !reqContentType) return badRequest("missing content-type");
1487
+
1488
+ const routingKeyHeader = req.headers.get("stream-key");
1489
+ let rows: AppendRow[] = [];
1490
+ if (!isCloseOnly) {
1491
+ const rowsRes = buildAppendRowsResult(stream, bodyBytes, reqContentType!, routingKeyHeader, false);
1492
+ if (Result.isError(rowsRes)) {
1493
+ if (rowsRes.error.status === 500) return internalError();
1494
+ return badRequest(rowsRes.error.message);
1495
+ }
1496
+ rows = rowsRes.value.rows;
1497
+ }
1498
+
1499
+ const appendRes = await enqueueAppend({
1500
+ stream,
1501
+ baseAppendMs,
1502
+ rows,
1503
+ contentType: reqContentType ?? streamContentType,
1504
+ streamSeq,
1505
+ producer,
1506
+ close: streamClosed,
1507
+ });
1508
+
1509
+ if (Result.isError(appendRes)) {
1510
+ const err = appendRes.error;
1511
+ if (err.kind === "overloaded") return json(429, { error: { code: "overloaded", message: "ingest queue full" } });
1512
+ if (err.kind === "gone") return notFound("stream expired");
1513
+ if (err.kind === "not_found") return notFound();
1514
+ if (err.kind === "content_type_mismatch") return conflict("content-type mismatch");
1515
+ if (err.kind === "stream_seq") {
1516
+ return conflict("sequence mismatch", {
1517
+ "stream-expected-seq": err.expected,
1518
+ "stream-received-seq": err.received,
1519
+ });
1520
+ }
1521
+ if (err.kind === "closed") {
1522
+ const headers: Record<string, string> = {
1523
+ "stream-next-offset": encodeOffset(srow.epoch, err.lastOffset),
1524
+ "stream-closed": "true",
1525
+ };
1526
+ return new Response(null, { status: 409, headers: withNosniff(headers) });
1527
+ }
1528
+ if (err.kind === "producer_stale_epoch") {
1529
+ return new Response(null, {
1530
+ status: 403,
1531
+ headers: withNosniff({ "producer-epoch": String(err.producerEpoch) }),
1532
+ });
1533
+ }
1534
+ if (err.kind === "producer_gap") {
1535
+ return new Response(null, {
1536
+ status: 409,
1537
+ headers: withNosniff({
1538
+ "producer-expected-seq": String(err.expected),
1539
+ "producer-received-seq": String(err.received),
1540
+ }),
1541
+ });
1542
+ }
1543
+ if (err.kind === "producer_epoch_seq") return badRequest("invalid producer sequence");
1544
+ return json(500, { error: { code: "internal", message: "append failed" } });
1545
+ }
1546
+ const res = appendRes.value;
1547
+
1548
+ const appendBytes = rows.reduce((acc, r) => acc + r.payload.byteLength, 0);
1549
+ recordAppendOutcome({
1550
+ stream,
1551
+ lastOffset: res.lastOffset,
1552
+ appendedRows: res.appendedRows,
1553
+ metricsBytes: appendBytes,
1554
+ ingestedBytes: bodyBytes.byteLength,
1555
+ touched: true,
1556
+ closed: res.closed,
1557
+ });
1558
+
1559
+ const headers: Record<string, string> = {
1560
+ "stream-next-offset": encodeOffset(srow.epoch, res.lastOffset),
1561
+ };
1562
+ if (res.closed) headers["stream-closed"] = "true";
1563
+ if (producer && res.producer) {
1564
+ headers["producer-epoch"] = String(res.producer.epoch);
1565
+ headers["producer-seq"] = String(res.producer.seq);
1566
+ }
1567
+
1568
+ const status = producer && res.appendedRows > 0 ? 200 : 204;
1569
+ return new Response(null, { status, headers: withNosniff(headers) });
1570
+ }
1571
+
1572
+ if (req.method === "GET") {
1573
+ const srow = db.getStream(stream);
1574
+ if (!srow || db.isDeleted(srow)) return notFound();
1575
+ if (srow.expires_at_ms != null && db.nowMs() > srow.expires_at_ms) return notFound("stream expired");
1576
+
1577
+ const streamContentType = normalizeContentType(srow.content_type) ?? srow.content_type;
1578
+ const isJsonStream = streamContentType === "application/json";
1579
+
1580
+ const fmtParam = url.searchParams.get("format");
1581
+ let format: "raw" | "json" = isJsonStream ? "json" : "raw";
1582
+ if (fmtParam) {
1583
+ if (fmtParam !== "raw" && fmtParam !== "json") return badRequest("invalid format");
1584
+ format = fmtParam as "raw" | "json";
1585
+ }
1586
+ if (format === "json" && !isJsonStream) return badRequest("invalid format");
1587
+
1588
+ const pathKey = pathKeyParam ?? null;
1589
+ const key = pathKey ?? url.searchParams.get("key");
1590
+
1591
+ const liveParam = url.searchParams.get("live") ?? "";
1592
+ const cursorParam = url.searchParams.get("cursor");
1593
+ let mode: "catchup" | "long-poll" | "sse";
1594
+ if (liveParam === "" || liveParam === "false" || liveParam === "0") mode = "catchup";
1595
+ else if (liveParam === "long-poll" || liveParam === "true" || liveParam === "1") mode = "long-poll";
1596
+ else if (liveParam === "sse") mode = "sse";
1597
+ else return badRequest("invalid live mode");
1598
+
1599
+ const timeout = url.searchParams.get("timeout") ?? url.searchParams.get("timeout_ms");
1600
+ let timeoutMs: number | null = null;
1601
+ if (timeout) {
1602
+ if (/^[0-9]+$/.test(timeout)) {
1603
+ timeoutMs = Number(timeout);
1604
+ } else {
1605
+ const timeoutRes = parseDurationMsResult(timeout);
1606
+ if (Result.isError(timeoutRes)) return badRequest("invalid timeout");
1607
+ timeoutMs = timeoutRes.value;
1608
+ }
1609
+ }
1610
+
1611
+ const hasOffsetParam = url.searchParams.has("offset");
1612
+ let offset = url.searchParams.get("offset");
1613
+ if (hasOffsetParam && (!offset || offset.trim() === "")) return badRequest("missing offset");
1614
+ const sinceParam = url.searchParams.get("since");
1615
+ if (!offset && sinceParam) {
1616
+ const sinceRes = parseTimestampMsResult(sinceParam);
1617
+ if (Result.isError(sinceRes)) return badRequest(sinceRes.error.message);
1618
+ const seekRes = await reader.seekOffsetByTimestampResult(stream, sinceRes.value, key ?? null);
1619
+ if (Result.isError(seekRes)) return readerErrorResponse(seekRes.error);
1620
+ offset = seekRes.value;
1621
+ }
1622
+
1623
+ if (!offset) {
1624
+ if (mode === "catchup") offset = "-1";
1625
+ else return badRequest("missing offset");
1626
+ }
1627
+
1628
+ let parsedOffset: ParsedOffset | null = null;
1629
+ if (offset !== "now") {
1630
+ const offsetRes = parseOffsetResult(offset);
1631
+ if (Result.isError(offsetRes)) return badRequest(offsetRes.error.message);
1632
+ parsedOffset = offsetRes.value;
1633
+ }
1634
+
1635
+ const ifNoneMatch = req.headers.get("if-none-match");
1636
+
1637
+ const sendBatch = async (batch: ReadBatch, cacheControl: string | null, includeEtag: boolean): Promise<Response> => {
1638
+ const upToDate = batch.nextOffsetSeq === batch.endOffsetSeq;
1639
+ const closedAtTail = srow.closed !== 0 && upToDate;
1640
+ const etag = includeEtag
1641
+ ? `W/\"slice:${canonicalizeOffset(offset!)}:${batch.nextOffset}:key=${key ?? ""}:fmt=${format}\"`
1642
+ : null;
1643
+ const baseHeaders: Record<string, string> = {
1644
+ "stream-next-offset": batch.nextOffset,
1645
+ "stream-end-offset": batch.endOffset,
1646
+ "cross-origin-resource-policy": "cross-origin",
1647
+ };
1648
+ if (upToDate) baseHeaders["stream-up-to-date"] = "true";
1649
+ if (closedAtTail) baseHeaders["stream-closed"] = "true";
1650
+ if (cacheControl) baseHeaders["cache-control"] = cacheControl;
1651
+ if (etag) baseHeaders["etag"] = etag;
1652
+ if (srow.expires_at_ms != null) baseHeaders["stream-expires-at"] = new Date(Number(srow.expires_at_ms)).toISOString();
1653
+
1654
+ if (etag && ifNoneMatch && ifNoneMatch === etag) {
1655
+ return new Response(null, { status: 304, headers: withNosniff(baseHeaders) });
1656
+ }
1657
+
1658
+ if (format === "json") {
1659
+ const decoded = decodeJsonRecords(stream, batch.records);
1660
+ if (Result.isError(decoded)) {
1661
+ if (decoded.error.status === 500) return internalError();
1662
+ return badRequest(decoded.error.message);
1663
+ }
1664
+ const body = JSON.stringify(decoded.value.values);
1665
+ metrics.recordRead(body.length, decoded.value.values.length);
1666
+ const headers: Record<string, string> = {
1667
+ "content-type": "application/json",
1668
+ ...baseHeaders,
1669
+ };
1670
+ return new Response(body, { status: 200, headers: withNosniff(headers) });
1671
+ }
1672
+
1673
+ const outBytes = concatPayloads(batch.records.map((r) => r.payload));
1674
+ metrics.recordRead(outBytes.byteLength, batch.records.length);
1675
+ const headers: Record<string, string> = {
1676
+ "content-type": streamContentType,
1677
+ ...baseHeaders,
1678
+ };
1679
+ const outBody = new Uint8Array(outBytes.byteLength);
1680
+ outBody.set(outBytes);
1681
+ return new Response(outBody, { status: 200, headers: withNosniff(headers) });
1682
+ };
1683
+
1684
+ if (mode === "sse") {
1685
+ const baseCursor = srow.closed !== 0 ? null : computeCursor(Date.now(), cursorParam);
1686
+ const dataEncoding = isTextContentType(streamContentType) ? "text" : "base64";
1687
+ const startOffsetSeq = offset === "now" ? srow.next_offset - 1n : offsetToSeqOrNeg1(parsedOffset!);
1688
+ const startOffset = offset === "now" ? encodeOffset(srow.epoch, startOffsetSeq) : canonicalizeOffset(offset);
1689
+
1690
+ const encoder = new TextEncoder();
1691
+ let aborted = false;
1692
+ const abortController = new AbortController();
1693
+ const streamBody = new ReadableStream({
1694
+ start(controller) {
1695
+ (async () => {
1696
+ const fail = (message: string): void => {
1697
+ if (aborted) return;
1698
+ aborted = true;
1699
+ abortController.abort();
1700
+ controller.error(new Error(message));
1701
+ };
1702
+ let currentOffset = startOffset;
1703
+ let currentSeq = startOffsetSeq;
1704
+ let first = true;
1705
+ while (!aborted) {
1706
+ let batch: ReadBatch;
1707
+ if (offset === "now" && first) {
1708
+ batch = {
1709
+ stream,
1710
+ format,
1711
+ key: key ?? null,
1712
+ requestOffset: startOffset,
1713
+ endOffset: startOffset,
1714
+ nextOffset: startOffset,
1715
+ endOffsetSeq: currentSeq,
1716
+ nextOffsetSeq: currentSeq,
1717
+ records: [],
1718
+ };
1719
+ } else {
1720
+ const batchRes = await reader.readResult({ stream, offset: currentOffset, key: key ?? null, format });
1721
+ if (Result.isError(batchRes)) {
1722
+ fail(batchRes.error.message);
1723
+ return;
1724
+ }
1725
+ batch = batchRes.value;
1726
+ }
1727
+ first = false;
1728
+
1729
+ let ssePayload = "";
1730
+
1731
+ if (batch.records.length > 0) {
1732
+ let dataPayload = "";
1733
+ if (format === "json") {
1734
+ const decoded = decodeJsonRecords(stream, batch.records);
1735
+ if (Result.isError(decoded)) {
1736
+ fail(decoded.error.message);
1737
+ return;
1738
+ }
1739
+ dataPayload = JSON.stringify(decoded.value.values);
1740
+ } else {
1741
+ const outBytes = concatPayloads(batch.records.map((r) => r.payload));
1742
+ dataPayload =
1743
+ dataEncoding === "base64"
1744
+ ? Buffer.from(outBytes).toString("base64")
1745
+ : new TextDecoder().decode(outBytes);
1746
+ }
1747
+ ssePayload += encodeSseEvent("data", dataPayload);
1748
+ }
1749
+
1750
+ const upToDate = batch.nextOffsetSeq === batch.endOffsetSeq;
1751
+ const latest = db.getStream(stream);
1752
+ const closedNow = !!latest && latest.closed !== 0 && upToDate;
1753
+
1754
+ const control: Record<string, any> = { streamNextOffset: batch.nextOffset };
1755
+ if (upToDate) control.upToDate = true;
1756
+ if (closedNow) control.streamClosed = true;
1757
+ if (!closedNow && baseCursor) control.streamCursor = baseCursor;
1758
+ ssePayload += encodeSseEvent("control", JSON.stringify(control));
1759
+ controller.enqueue(encoder.encode(ssePayload));
1760
+
1761
+ if (closedNow) break;
1762
+ currentOffset = batch.nextOffset;
1763
+ currentSeq = batch.nextOffsetSeq;
1764
+ if (!upToDate) continue;
1765
+
1766
+ const sseWaitMs = timeoutMs == null ? 30_000 : timeoutMs;
1767
+ await notifier.waitFor(stream, currentSeq, sseWaitMs, abortController.signal);
1768
+ }
1769
+ if (!aborted) controller.close();
1770
+ })().catch((err) => {
1771
+ if (!aborted) controller.error(err);
1772
+ });
1773
+ },
1774
+ cancel() {
1775
+ aborted = true;
1776
+ abortController.abort();
1777
+ },
1778
+ });
1779
+
1780
+ const headers: Record<string, string> = {
1781
+ "content-type": "text/event-stream",
1782
+ "cache-control": "no-cache",
1783
+ "cross-origin-resource-policy": "cross-origin",
1784
+ "stream-next-offset": startOffset,
1785
+ "stream-end-offset": encodeOffset(srow.epoch, srow.next_offset - 1n),
1786
+ };
1787
+ if (dataEncoding === "base64") headers["stream-sse-data-encoding"] = "base64";
1788
+ return new Response(streamBody, { status: 200, headers: withNosniff(headers) });
1789
+ }
1790
+
1791
+ const defaultLongPollTimeoutMs = 3000;
1792
+
1793
+ if (offset === "now") {
1794
+ const tailOffset = encodeOffset(srow.epoch, srow.next_offset - 1n);
1795
+ if (srow.closed !== 0) {
1796
+ if (mode === "long-poll") {
1797
+ const headers: Record<string, string> = {
1798
+ "stream-next-offset": tailOffset,
1799
+ "stream-end-offset": tailOffset,
1800
+ "stream-up-to-date": "true",
1801
+ "stream-closed": "true",
1802
+ "cache-control": "no-store",
1803
+ };
1804
+ if (srow.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(srow.expires_at_ms)).toISOString();
1805
+ return new Response(null, { status: 204, headers: withNosniff(headers) });
1806
+ }
1807
+ const headers: Record<string, string> = {
1808
+ "content-type": streamContentType,
1809
+ "stream-next-offset": tailOffset,
1810
+ "stream-end-offset": tailOffset,
1811
+ "stream-up-to-date": "true",
1812
+ "stream-closed": "true",
1813
+ "cache-control": "no-store",
1814
+ "cross-origin-resource-policy": "cross-origin",
1815
+ };
1816
+ if (srow.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(srow.expires_at_ms)).toISOString();
1817
+ const body = format === "json" ? "[]" : "";
1818
+ return new Response(body, { status: 200, headers: withNosniff(headers) });
1819
+ }
1820
+
1821
+ if (mode === "long-poll") {
1822
+ const deadline = Date.now() + (timeoutMs ?? defaultLongPollTimeoutMs);
1823
+ let currentOffset = tailOffset;
1824
+ while (true) {
1825
+ const batchRes = await reader.readResult({ stream, offset: currentOffset, key: key ?? null, format });
1826
+ if (Result.isError(batchRes)) return readerErrorResponse(batchRes.error);
1827
+ const batch = batchRes.value;
1828
+ if (batch.records.length > 0) {
1829
+ const cursor = computeCursor(Date.now(), cursorParam);
1830
+ const resp = await sendBatch(batch, "no-store", false);
1831
+ const headers = new Headers(resp.headers);
1832
+ headers.set("stream-cursor", cursor);
1833
+ return new Response(resp.body, { status: resp.status, headers });
1834
+ }
1835
+ const latest = db.getStream(stream);
1836
+ if (latest && latest.closed !== 0 && batch.nextOffsetSeq === batch.endOffsetSeq) {
1837
+ const latestTail = encodeOffset(latest.epoch, latest.next_offset - 1n);
1838
+ const headers: Record<string, string> = {
1839
+ "stream-next-offset": latestTail,
1840
+ "stream-end-offset": latestTail,
1841
+ "stream-up-to-date": "true",
1842
+ "stream-closed": "true",
1843
+ "cache-control": "no-store",
1844
+ };
1845
+ if (latest.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(latest.expires_at_ms)).toISOString();
1846
+ return new Response(null, { status: 204, headers: withNosniff(headers) });
1847
+ }
1848
+ const remaining = deadline - Date.now();
1849
+ if (remaining <= 0) break;
1850
+ currentOffset = batch.nextOffset;
1851
+ await notifier.waitFor(stream, batch.endOffsetSeq, remaining, req.signal);
1852
+ if (req.signal.aborted) return new Response(null, { status: 204 });
1853
+ }
1854
+ const latest = db.getStream(stream);
1855
+ const latestTail = latest ? encodeOffset(latest.epoch, latest.next_offset - 1n) : tailOffset;
1856
+ const headers: Record<string, string> = {
1857
+ "stream-next-offset": latestTail,
1858
+ "stream-end-offset": latestTail,
1859
+ "stream-up-to-date": "true",
1860
+ "cache-control": "no-store",
1861
+ };
1862
+ if (latest && latest.closed !== 0) headers["stream-closed"] = "true";
1863
+ else headers["stream-cursor"] = computeCursor(Date.now(), cursorParam);
1864
+ if (latest && latest.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(latest.expires_at_ms)).toISOString();
1865
+ return new Response(null, { status: 204, headers: withNosniff(headers) });
1866
+ }
1867
+
1868
+ const headers: Record<string, string> = {
1869
+ "content-type": streamContentType,
1870
+ "stream-next-offset": tailOffset,
1871
+ "stream-end-offset": tailOffset,
1872
+ "stream-up-to-date": "true",
1873
+ "cache-control": "no-store",
1874
+ "cross-origin-resource-policy": "cross-origin",
1875
+ };
1876
+ const body = format === "json" ? "[]" : "";
1877
+ return new Response(body, { status: 200, headers: withNosniff(headers) });
1878
+ }
1879
+
1880
+ if (mode === "long-poll") {
1881
+ const deadline = Date.now() + (timeoutMs ?? defaultLongPollTimeoutMs);
1882
+ let currentOffset = offset;
1883
+ while (true) {
1884
+ const batchRes = await reader.readResult({ stream, offset: currentOffset, key: key ?? null, format });
1885
+ if (Result.isError(batchRes)) return readerErrorResponse(batchRes.error);
1886
+ const batch = batchRes.value;
1887
+ if (batch.records.length > 0) {
1888
+ const cursor = computeCursor(Date.now(), cursorParam);
1889
+ const resp = await sendBatch(batch, "no-store", false);
1890
+ const headers = new Headers(resp.headers);
1891
+ headers.set("stream-cursor", cursor);
1892
+ return new Response(resp.body, { status: resp.status, headers });
1893
+ }
1894
+ const latest = db.getStream(stream);
1895
+ if (latest && latest.closed !== 0 && batch.nextOffsetSeq === batch.endOffsetSeq) {
1896
+ const latestTail = encodeOffset(latest.epoch, latest.next_offset - 1n);
1897
+ const headers: Record<string, string> = {
1898
+ "stream-next-offset": latestTail,
1899
+ "stream-end-offset": latestTail,
1900
+ "stream-up-to-date": "true",
1901
+ "stream-closed": "true",
1902
+ "cache-control": "no-store",
1903
+ };
1904
+ if (latest.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(latest.expires_at_ms)).toISOString();
1905
+ return new Response(null, { status: 204, headers: withNosniff(headers) });
1906
+ }
1907
+ const remaining = deadline - Date.now();
1908
+ if (remaining <= 0) break;
1909
+ currentOffset = batch.nextOffset;
1910
+ await notifier.waitFor(stream, batch.endOffsetSeq, remaining, req.signal);
1911
+ if (req.signal.aborted) return new Response(null, { status: 204 });
1912
+ }
1913
+ const latest = db.getStream(stream);
1914
+ const latestTail = latest ? encodeOffset(latest.epoch, latest.next_offset - 1n) : currentOffset;
1915
+ const headers: Record<string, string> = {
1916
+ "stream-next-offset": latestTail,
1917
+ "stream-end-offset": latestTail,
1918
+ "stream-up-to-date": "true",
1919
+ "cache-control": "no-store",
1920
+ };
1921
+ if (latest && latest.closed !== 0) headers["stream-closed"] = "true";
1922
+ else headers["stream-cursor"] = computeCursor(Date.now(), cursorParam);
1923
+ if (latest && latest.expires_at_ms != null) headers["stream-expires-at"] = new Date(Number(latest.expires_at_ms)).toISOString();
1924
+ return new Response(null, { status: 204, headers: withNosniff(headers) });
1925
+ }
1926
+
1927
+ const batchRes = await reader.readResult({ stream, offset, key: key ?? null, format });
1928
+ if (Result.isError(batchRes)) return readerErrorResponse(batchRes.error);
1929
+ const batch = batchRes.value;
1930
+ const cacheControl = "immutable, max-age=31536000";
1931
+ return sendBatch(batch, cacheControl, true);
1932
+ }
1933
+
1934
+ return badRequest("unsupported method");
1935
+ }
1936
+
1937
+ return notFound();
1938
+ } catch (e: any) {
1939
+ const msg = String(e?.message ?? e);
1940
+ if (!closing && !msg.includes("Statement has finalized")) {
1941
+ // eslint-disable-next-line no-console
1942
+ console.error("request failed", e);
1943
+ }
1944
+ return internalError();
1945
+ }
1946
+ };
1947
+
1948
+ const close = () => {
1949
+ closing = true;
1950
+ touch.stop();
1951
+ uploader.stop(true);
1952
+ indexer?.stop();
1953
+ segmenter.stop(true);
1954
+ metricsEmitter.stop();
1955
+ expirySweeper.stop();
1956
+ ingest.stop();
1957
+ memory.stop();
1958
+ db.close();
1959
+ };
1960
+
1961
+ return {
1962
+ fetch,
1963
+ close,
1964
+ deps: {
1965
+ config: cfg,
1966
+ db,
1967
+ os: store,
1968
+ ingest,
1969
+ notifier,
1970
+ touchRoutingKeyNotifier,
1971
+ reader,
1972
+ segmenter,
1973
+ uploader,
1974
+ indexer,
1975
+ metrics,
1976
+ registry,
1977
+ touch,
1978
+ stats,
1979
+ backpressure,
1980
+ memory,
1981
+ },
1982
+ };
1983
+ }