@prisma/streams-server 0.0.1 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/CODE_OF_CONDUCT.md +45 -0
  2. package/CONTRIBUTING.md +68 -0
  3. package/LICENSE +201 -0
  4. package/README.md +39 -2
  5. package/SECURITY.md +33 -0
  6. package/bin/prisma-streams-server +2 -0
  7. package/package.json +29 -34
  8. package/src/app.ts +74 -0
  9. package/src/app_core.ts +1706 -0
  10. package/src/app_local.ts +46 -0
  11. package/src/backpressure.ts +66 -0
  12. package/src/bootstrap.ts +239 -0
  13. package/src/config.ts +251 -0
  14. package/src/db/db.ts +1386 -0
  15. package/src/db/schema.ts +625 -0
  16. package/src/expiry_sweeper.ts +44 -0
  17. package/src/hist.ts +169 -0
  18. package/src/index/binary_fuse.ts +379 -0
  19. package/src/index/indexer.ts +745 -0
  20. package/src/index/run_cache.ts +84 -0
  21. package/src/index/run_format.ts +213 -0
  22. package/src/ingest.ts +655 -0
  23. package/src/lens/lens.ts +501 -0
  24. package/src/manifest.ts +114 -0
  25. package/src/memory.ts +155 -0
  26. package/src/metrics.ts +161 -0
  27. package/src/metrics_emitter.ts +50 -0
  28. package/src/notifier.ts +64 -0
  29. package/src/objectstore/interface.ts +13 -0
  30. package/src/objectstore/mock_r2.ts +269 -0
  31. package/src/objectstore/null.ts +32 -0
  32. package/src/objectstore/r2.ts +128 -0
  33. package/src/offset.ts +70 -0
  34. package/src/reader.ts +454 -0
  35. package/src/runtime/hash.ts +156 -0
  36. package/src/runtime/hash_vendor/LICENSE.hash-wasm +38 -0
  37. package/src/runtime/hash_vendor/NOTICE.md +8 -0
  38. package/src/runtime/hash_vendor/xxhash3.umd.min.cjs +7 -0
  39. package/src/runtime/hash_vendor/xxhash32.umd.min.cjs +7 -0
  40. package/src/runtime/hash_vendor/xxhash64.umd.min.cjs +7 -0
  41. package/src/schema/lens_schema.ts +290 -0
  42. package/src/schema/proof.ts +547 -0
  43. package/src/schema/registry.ts +405 -0
  44. package/src/segment/cache.ts +179 -0
  45. package/src/segment/format.ts +331 -0
  46. package/src/segment/segmenter.ts +326 -0
  47. package/src/segment/segmenter_worker.ts +43 -0
  48. package/src/segment/segmenter_workers.ts +94 -0
  49. package/src/server.ts +326 -0
  50. package/src/sqlite/adapter.ts +164 -0
  51. package/src/stats.ts +205 -0
  52. package/src/touch/engine.ts +41 -0
  53. package/src/touch/interpreter_worker.ts +442 -0
  54. package/src/touch/live_keys.ts +118 -0
  55. package/src/touch/live_metrics.ts +827 -0
  56. package/src/touch/live_templates.ts +619 -0
  57. package/src/touch/manager.ts +1199 -0
  58. package/src/touch/spec.ts +456 -0
  59. package/src/touch/touch_journal.ts +671 -0
  60. package/src/touch/touch_key_id.ts +20 -0
  61. package/src/touch/worker_pool.ts +189 -0
  62. package/src/touch/worker_protocol.ts +56 -0
  63. package/src/types/proper-lockfile.d.ts +1 -0
  64. package/src/uploader.ts +317 -0
  65. package/src/util/base32_crockford.ts +81 -0
  66. package/src/util/bloom256.ts +67 -0
  67. package/src/util/cleanup.ts +22 -0
  68. package/src/util/crc32c.ts +29 -0
  69. package/src/util/ds_error.ts +15 -0
  70. package/src/util/duration.ts +17 -0
  71. package/src/util/endian.ts +53 -0
  72. package/src/util/json_pointer.ts +148 -0
  73. package/src/util/log.ts +25 -0
  74. package/src/util/lru.ts +45 -0
  75. package/src/util/retry.ts +35 -0
  76. package/src/util/siphash.ts +71 -0
  77. package/src/util/stream_paths.ts +31 -0
  78. package/src/util/time.ts +14 -0
  79. package/src/util/yield.ts +3 -0
  80. package/build/index.d.mts +0 -1
  81. package/build/index.d.ts +0 -1
  82. package/build/index.js +0 -0
  83. package/build/index.mjs +0 -1
package/src/db/db.ts ADDED
@@ -0,0 +1,1386 @@
1
+ import { initSchema } from "./schema.ts";
2
+ import { openSqliteDatabase, type SqliteDatabase, type SqliteStatement } from "../sqlite/adapter.ts";
3
+ import { Result } from "better-result";
4
+
5
+ export const STREAM_FLAG_DELETED = 1 << 0;
6
+ // Internal companion touch stream. Hidden from listing and not eligible for segmentation.
7
+ export const STREAM_FLAG_TOUCH = 1 << 1;
8
+
9
+ const BASE_WAL_GC_CHUNK_OFFSETS = (() => {
10
+ const raw = process.env.DS_BASE_WAL_GC_CHUNK_OFFSETS;
11
+ if (raw == null || raw.trim() === "") return 100_000;
12
+ const n = Number(raw);
13
+ if (!Number.isFinite(n) || n <= 0) return 100_000;
14
+ return Math.floor(n);
15
+ })();
16
+
17
+ export type StreamRow = {
18
+ stream: string;
19
+ created_at_ms: bigint;
20
+ updated_at_ms: bigint;
21
+ content_type: string;
22
+ stream_seq: string | null;
23
+ closed: number;
24
+ closed_producer_id: string | null;
25
+ closed_producer_epoch: number | null;
26
+ closed_producer_seq: number | null;
27
+ ttl_seconds: number | null;
28
+ epoch: number;
29
+ next_offset: bigint;
30
+ sealed_through: bigint;
31
+ uploaded_through: bigint;
32
+ uploaded_segment_count: number;
33
+ pending_rows: bigint;
34
+ pending_bytes: bigint;
35
+ wal_rows: bigint;
36
+ wal_bytes: bigint;
37
+ last_append_ms: bigint;
38
+ last_segment_cut_ms: bigint;
39
+ segment_in_progress: number;
40
+ expires_at_ms: bigint | null;
41
+ stream_flags: number;
42
+ };
43
+
44
+ export type SegmentRow = {
45
+ segment_id: string;
46
+ stream: string;
47
+ segment_index: number;
48
+ start_offset: bigint;
49
+ end_offset: bigint;
50
+ block_count: number;
51
+ last_append_ms: bigint;
52
+ size_bytes: number;
53
+ local_path: string;
54
+ created_at_ms: bigint;
55
+ uploaded_at_ms: bigint | null;
56
+ r2_etag: string | null;
57
+ };
58
+
59
+ export type SegmentMetaRow = {
60
+ stream: string;
61
+ segment_count: number;
62
+ segment_offsets: Uint8Array;
63
+ segment_blocks: Uint8Array;
64
+ segment_last_ts: Uint8Array;
65
+ };
66
+
67
+ export type IndexStateRow = {
68
+ stream: string;
69
+ index_secret: Uint8Array;
70
+ indexed_through: number;
71
+ updated_at_ms: bigint;
72
+ };
73
+
74
+ export type IndexRunRow = {
75
+ run_id: string;
76
+ stream: string;
77
+ level: number;
78
+ start_segment: number;
79
+ end_segment: number;
80
+ object_key: string;
81
+ filter_len: number;
82
+ record_count: number;
83
+ retired_gen: number | null;
84
+ retired_at_ms: bigint | null;
85
+ };
86
+
87
+ export class SqliteDurableStore {
88
+ public readonly db: SqliteDatabase;
89
+ private dbstatReady: boolean | null = null;
90
+
91
+ // Prepared statements.
92
+ private readonly stmts: {
93
+ getStream: SqliteStatement;
94
+ upsertStream: SqliteStatement;
95
+ listStreams: SqliteStatement;
96
+ setDeleted: SqliteStatement;
97
+
98
+ insertWal: SqliteStatement;
99
+
100
+ updateStreamAppend: SqliteStatement;
101
+ updateStreamAppendSeqCheck: SqliteStatement;
102
+
103
+ candidateStreams: SqliteStatement;
104
+ candidateStreamsNoInterval: SqliteStatement;
105
+ listExpiredStreams: SqliteStatement;
106
+
107
+ streamWalRange: SqliteStatement;
108
+ streamWalRangeByKey: SqliteStatement;
109
+
110
+ createSegment: SqliteStatement;
111
+ listSegmentsForStream: SqliteStatement;
112
+ getSegmentByIndex: SqliteStatement;
113
+ findSegmentForOffset: SqliteStatement;
114
+ nextSegmentIndex: SqliteStatement;
115
+ markSegmentUploaded: SqliteStatement;
116
+ pendingUploadSegments: SqliteStatement;
117
+ countPendingSegments: SqliteStatement;
118
+ tryClaimSegment: SqliteStatement;
119
+ countSegmentsForStream: SqliteStatement;
120
+
121
+ getManifest: SqliteStatement;
122
+ upsertManifest: SqliteStatement;
123
+
124
+ getIndexState: SqliteStatement;
125
+ upsertIndexState: SqliteStatement;
126
+ updateIndexedThrough: SqliteStatement;
127
+ listIndexRuns: SqliteStatement;
128
+ listIndexRunsAll: SqliteStatement;
129
+ listRetiredIndexRuns: SqliteStatement;
130
+ insertIndexRun: SqliteStatement;
131
+ retireIndexRun: SqliteStatement;
132
+ deleteIndexRun: SqliteStatement;
133
+ countUploadedSegments: SqliteStatement;
134
+ getSegmentMeta: SqliteStatement;
135
+ ensureSegmentMeta: SqliteStatement;
136
+ appendSegmentMeta: SqliteStatement;
137
+ upsertSegmentMeta: SqliteStatement;
138
+ setUploadedSegmentCount: SqliteStatement;
139
+
140
+ advanceUploadedThrough: SqliteStatement;
141
+ deleteWalBeforeOffset: SqliteStatement;
142
+
143
+ getSchemaRegistry: SqliteStatement;
144
+ upsertSchemaRegistry: SqliteStatement;
145
+ getStreamInterpreter: SqliteStatement;
146
+ upsertStreamInterpreter: SqliteStatement;
147
+ deleteStreamInterpreter: SqliteStatement;
148
+ listStreamInterpreters: SqliteStatement;
149
+ countStreams: SqliteStatement;
150
+ sumPendingBytes: SqliteStatement;
151
+ sumPendingSegmentBytes: SqliteStatement;
152
+ };
153
+
154
+ constructor(path: string, opts: { cacheBytes?: number; skipMigrations?: boolean } = {}) {
155
+ this.db = openSqliteDatabase(path);
156
+ initSchema(this.db, { skipMigrations: opts.skipMigrations });
157
+ if (opts.cacheBytes && opts.cacheBytes > 0) {
158
+ const kb = Math.max(1, Math.floor(opts.cacheBytes / 1024));
159
+ this.db.exec(`PRAGMA cache_size = -${kb};`);
160
+ }
161
+
162
+ this.stmts = {
163
+ getStream: this.db.query(
164
+ `SELECT stream, created_at_ms, updated_at_ms,
165
+ content_type, stream_seq, closed, closed_producer_id, closed_producer_epoch, closed_producer_seq, ttl_seconds,
166
+ epoch, next_offset, sealed_through, uploaded_through, uploaded_segment_count,
167
+ pending_rows, pending_bytes, wal_rows, wal_bytes, last_append_ms, last_segment_cut_ms, segment_in_progress,
168
+ expires_at_ms, stream_flags
169
+ FROM streams WHERE stream = ? LIMIT 1;`
170
+ ),
171
+ upsertStream: this.db.query(
172
+ `INSERT INTO streams(stream, created_at_ms, updated_at_ms,
173
+ content_type, stream_seq, closed, closed_producer_id, closed_producer_epoch, closed_producer_seq, ttl_seconds,
174
+ epoch, next_offset, sealed_through, uploaded_through, uploaded_segment_count,
175
+ pending_rows, pending_bytes, wal_rows, wal_bytes, last_append_ms, last_segment_cut_ms, segment_in_progress,
176
+ expires_at_ms, stream_flags)
177
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
178
+ ON CONFLICT(stream) DO UPDATE SET
179
+ updated_at_ms=excluded.updated_at_ms,
180
+ expires_at_ms=excluded.expires_at_ms,
181
+ ttl_seconds=excluded.ttl_seconds,
182
+ content_type=excluded.content_type,
183
+ stream_flags=excluded.stream_flags;`
184
+ ),
185
+ listStreams: this.db.query(
186
+ `SELECT stream, created_at_ms, updated_at_ms,
187
+ content_type, stream_seq, closed, closed_producer_id, closed_producer_epoch, closed_producer_seq, ttl_seconds,
188
+ epoch, next_offset, sealed_through, uploaded_through, uploaded_segment_count,
189
+ pending_rows, pending_bytes, wal_rows, wal_bytes, last_append_ms, last_segment_cut_ms, segment_in_progress,
190
+ expires_at_ms, stream_flags
191
+ FROM streams
192
+ WHERE (stream_flags & ?) = 0
193
+ AND (expires_at_ms IS NULL OR expires_at_ms > ?)
194
+ ORDER BY stream
195
+ LIMIT ? OFFSET ?;`
196
+ ),
197
+ setDeleted: this.db.query(`UPDATE streams SET stream_flags = (stream_flags | ?), updated_at_ms=? WHERE stream=?;`),
198
+
199
+ insertWal: this.db.query(
200
+ `INSERT INTO wal(stream, offset, ts_ms, payload, payload_len, routing_key, content_type, flags)
201
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?);`
202
+ ),
203
+
204
+ updateStreamAppend: this.db.query(
205
+ `UPDATE streams
206
+ SET next_offset = ?, updated_at_ms = ?, last_append_ms = ?,
207
+ pending_rows = pending_rows + ?, pending_bytes = pending_bytes + ?,
208
+ wal_rows = wal_rows + ?, wal_bytes = wal_bytes + ?
209
+ WHERE stream = ? AND (stream_flags & ?) = 0;`
210
+ ),
211
+ updateStreamAppendSeqCheck: this.db.query(
212
+ `UPDATE streams
213
+ SET next_offset = ?, updated_at_ms = ?, last_append_ms = ?,
214
+ pending_rows = pending_rows + ?, pending_bytes = pending_bytes + ?,
215
+ wal_rows = wal_rows + ?, wal_bytes = wal_bytes + ?
216
+ WHERE stream = ? AND (stream_flags & ?) = 0 AND next_offset = ?;`
217
+ ),
218
+
219
+ candidateStreams: this.db.query(
220
+ `SELECT stream, pending_bytes, pending_rows, last_segment_cut_ms, sealed_through, next_offset, epoch
221
+ FROM streams
222
+ WHERE (stream_flags & ?) = 0
223
+ AND segment_in_progress = 0
224
+ AND (pending_bytes >= ? OR pending_rows >= ? OR (? - last_segment_cut_ms) >= ?)
225
+ ORDER BY pending_bytes DESC
226
+ LIMIT ?;`
227
+ ),
228
+ candidateStreamsNoInterval: this.db.query(
229
+ `SELECT stream, pending_bytes, pending_rows, last_segment_cut_ms, sealed_through, next_offset, epoch
230
+ FROM streams
231
+ WHERE (stream_flags & ?) = 0
232
+ AND segment_in_progress = 0
233
+ AND (pending_bytes >= ? OR pending_rows >= ?)
234
+ ORDER BY pending_bytes DESC
235
+ LIMIT ?;`
236
+ ),
237
+ listExpiredStreams: this.db.query(
238
+ `SELECT stream
239
+ FROM streams
240
+ WHERE (stream_flags & ?) = 0
241
+ AND expires_at_ms IS NOT NULL
242
+ AND expires_at_ms <= ?
243
+ ORDER BY expires_at_ms ASC
244
+ LIMIT ?;`
245
+ ),
246
+
247
+ streamWalRange: this.db.query(
248
+ `SELECT offset, ts_ms, routing_key, content_type, payload
249
+ FROM wal
250
+ WHERE stream = ? AND offset >= ? AND offset <= ?
251
+ ORDER BY offset ASC;`
252
+ ),
253
+ streamWalRangeByKey: this.db.query(
254
+ `SELECT offset, ts_ms, routing_key, content_type, payload
255
+ FROM wal
256
+ WHERE stream = ? AND offset >= ? AND offset <= ? AND routing_key = ?
257
+ ORDER BY offset ASC;`
258
+ ),
259
+
260
+ createSegment: this.db.query(
261
+ `INSERT INTO segments(segment_id, stream, segment_index, start_offset, end_offset, block_count,
262
+ last_append_ms, size_bytes, local_path, created_at_ms, uploaded_at_ms, r2_etag)
263
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NULL, NULL);`
264
+ ),
265
+ listSegmentsForStream: this.db.query(
266
+ `SELECT segment_id, stream, segment_index, start_offset, end_offset, block_count, last_append_ms, size_bytes,
267
+ local_path, created_at_ms, uploaded_at_ms, r2_etag
268
+ FROM segments WHERE stream=? ORDER BY segment_index ASC;`
269
+ ),
270
+ getSegmentByIndex: this.db.query(
271
+ `SELECT segment_id, stream, segment_index, start_offset, end_offset, block_count, last_append_ms, size_bytes,
272
+ local_path, created_at_ms, uploaded_at_ms, r2_etag
273
+ FROM segments WHERE stream=? AND segment_index=? LIMIT 1;`
274
+ ),
275
+ findSegmentForOffset: this.db.query(
276
+ `SELECT segment_id, stream, segment_index, start_offset, end_offset, block_count, last_append_ms, size_bytes,
277
+ local_path, created_at_ms, uploaded_at_ms, r2_etag
278
+ FROM segments
279
+ WHERE stream=? AND start_offset <= ? AND end_offset >= ?
280
+ ORDER BY segment_index DESC
281
+ LIMIT 1;`
282
+ ),
283
+ nextSegmentIndex: this.db.query(
284
+ `SELECT COALESCE(MAX(segment_index)+1, 0) as next_idx FROM segments WHERE stream=?;`
285
+ ),
286
+ markSegmentUploaded: this.db.query(
287
+ `UPDATE segments SET r2_etag=?, uploaded_at_ms=? WHERE segment_id=?;`
288
+ ),
289
+ pendingUploadSegments: this.db.query(
290
+ `SELECT segment_id, stream, segment_index, start_offset, end_offset, block_count, last_append_ms, size_bytes,
291
+ local_path, created_at_ms, uploaded_at_ms, r2_etag
292
+ FROM segments WHERE uploaded_at_ms IS NULL ORDER BY created_at_ms ASC LIMIT ?;`
293
+ ),
294
+ countPendingSegments: this.db.query(`SELECT COUNT(*) as cnt FROM segments WHERE uploaded_at_ms IS NULL;`),
295
+ countSegmentsForStream: this.db.query(`SELECT COUNT(*) as cnt FROM segments WHERE stream=?;`),
296
+ tryClaimSegment: this.db.query(
297
+ `UPDATE streams SET segment_in_progress=1, updated_at_ms=? WHERE stream=? AND segment_in_progress=0;`
298
+ ),
299
+
300
+ getManifest: this.db.query(`SELECT stream, generation, uploaded_generation, last_uploaded_at_ms, last_uploaded_etag FROM manifests WHERE stream=? LIMIT 1;`),
301
+ upsertManifest: this.db.query(
302
+ `INSERT INTO manifests(stream, generation, uploaded_generation, last_uploaded_at_ms, last_uploaded_etag)
303
+ VALUES(?, ?, ?, ?, ?)
304
+ ON CONFLICT(stream) DO UPDATE SET
305
+ generation=excluded.generation,
306
+ uploaded_generation=excluded.uploaded_generation,
307
+ last_uploaded_at_ms=excluded.last_uploaded_at_ms,
308
+ last_uploaded_etag=excluded.last_uploaded_etag;`
309
+ ),
310
+
311
+ getIndexState: this.db.query(
312
+ `SELECT stream, index_secret, indexed_through, updated_at_ms
313
+ FROM index_state WHERE stream=? LIMIT 1;`
314
+ ),
315
+ upsertIndexState: this.db.query(
316
+ `INSERT INTO index_state(stream, index_secret, indexed_through, updated_at_ms)
317
+ VALUES(?, ?, ?, ?)
318
+ ON CONFLICT(stream) DO UPDATE SET
319
+ index_secret=excluded.index_secret,
320
+ indexed_through=excluded.indexed_through,
321
+ updated_at_ms=excluded.updated_at_ms;`
322
+ ),
323
+ updateIndexedThrough: this.db.query(
324
+ `UPDATE index_state SET indexed_through=?, updated_at_ms=? WHERE stream=?;`
325
+ ),
326
+ listIndexRuns: this.db.query(
327
+ `SELECT run_id, stream, level, start_segment, end_segment, object_key, filter_len, record_count, retired_gen, retired_at_ms
328
+ FROM index_runs WHERE stream=? AND retired_gen IS NULL
329
+ ORDER BY start_segment ASC, level ASC;`
330
+ ),
331
+ listIndexRunsAll: this.db.query(
332
+ `SELECT run_id, stream, level, start_segment, end_segment, object_key, filter_len, record_count, retired_gen, retired_at_ms
333
+ FROM index_runs WHERE stream=?
334
+ ORDER BY start_segment ASC, level ASC;`
335
+ ),
336
+ listRetiredIndexRuns: this.db.query(
337
+ `SELECT run_id, stream, level, start_segment, end_segment, object_key, filter_len, record_count, retired_gen, retired_at_ms
338
+ FROM index_runs WHERE stream=? AND retired_gen IS NOT NULL
339
+ ORDER BY retired_at_ms ASC;`
340
+ ),
341
+ insertIndexRun: this.db.query(
342
+ `INSERT OR IGNORE INTO index_runs(run_id, stream, level, start_segment, end_segment, object_key, filter_len, record_count, retired_gen, retired_at_ms)
343
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?, NULL, NULL);`
344
+ ),
345
+ retireIndexRun: this.db.query(
346
+ `UPDATE index_runs SET retired_gen=?, retired_at_ms=? WHERE run_id=?;`
347
+ ),
348
+ deleteIndexRun: this.db.query(
349
+ `DELETE FROM index_runs WHERE run_id=?;`
350
+ ),
351
+ countUploadedSegments: this.db.query(
352
+ `SELECT COALESCE(MAX(segment_index), -1) as max_idx
353
+ FROM segments WHERE stream=? AND r2_etag IS NOT NULL;`
354
+ ),
355
+ getSegmentMeta: this.db.query(
356
+ `SELECT stream, segment_count, segment_offsets, segment_blocks, segment_last_ts
357
+ FROM stream_segment_meta WHERE stream=? LIMIT 1;`
358
+ ),
359
+ ensureSegmentMeta: this.db.query(
360
+ `INSERT INTO stream_segment_meta(stream, segment_count, segment_offsets, segment_blocks, segment_last_ts)
361
+ VALUES(?, 0, x'', x'', x'')
362
+ ON CONFLICT(stream) DO NOTHING;`
363
+ ),
364
+ appendSegmentMeta: this.db.query(
365
+ `UPDATE stream_segment_meta
366
+ SET segment_count = segment_count + 1,
367
+ segment_offsets = segment_offsets || ?,
368
+ segment_blocks = segment_blocks || ?,
369
+ segment_last_ts = segment_last_ts || ?
370
+ WHERE stream = ?;`
371
+ ),
372
+ upsertSegmentMeta: this.db.query(
373
+ `INSERT INTO stream_segment_meta(stream, segment_count, segment_offsets, segment_blocks, segment_last_ts)
374
+ VALUES(?, ?, ?, ?, ?)
375
+ ON CONFLICT(stream) DO UPDATE SET
376
+ segment_count=excluded.segment_count,
377
+ segment_offsets=excluded.segment_offsets,
378
+ segment_blocks=excluded.segment_blocks,
379
+ segment_last_ts=excluded.segment_last_ts;`
380
+ ),
381
+ setUploadedSegmentCount: this.db.query(
382
+ `UPDATE streams SET uploaded_segment_count=?, updated_at_ms=? WHERE stream=?;`
383
+ ),
384
+
385
+ advanceUploadedThrough: this.db.query(
386
+ `UPDATE streams SET uploaded_through=?, updated_at_ms=? WHERE stream=?;`
387
+ ),
388
+ deleteWalBeforeOffset: this.db.query(
389
+ `DELETE FROM wal WHERE stream=? AND offset <= ?;`
390
+ ),
391
+
392
+ getSchemaRegistry: this.db.query(`SELECT stream, schema_json, updated_at_ms FROM schemas WHERE stream=? LIMIT 1;`),
393
+ upsertSchemaRegistry: this.db.query(
394
+ `INSERT INTO schemas(stream, schema_json, updated_at_ms) VALUES(?, ?, ?)
395
+ ON CONFLICT(stream) DO UPDATE SET schema_json=excluded.schema_json, updated_at_ms=excluded.updated_at_ms;`
396
+ ),
397
+ getStreamInterpreter: this.db.query(
398
+ `SELECT stream, interpreted_through, updated_at_ms
399
+ FROM stream_interpreters WHERE stream=? LIMIT 1;`
400
+ ),
401
+ upsertStreamInterpreter: this.db.query(
402
+ `INSERT INTO stream_interpreters(stream, interpreted_through, updated_at_ms)
403
+ VALUES(?, ?, ?)
404
+ ON CONFLICT(stream) DO UPDATE SET
405
+ interpreted_through=excluded.interpreted_through,
406
+ updated_at_ms=excluded.updated_at_ms;`
407
+ ),
408
+ deleteStreamInterpreter: this.db.query(`DELETE FROM stream_interpreters WHERE stream=?;`),
409
+ listStreamInterpreters: this.db.query(
410
+ `SELECT stream, interpreted_through, updated_at_ms
411
+ FROM stream_interpreters
412
+ ORDER BY stream ASC;`
413
+ ),
414
+ countStreams: this.db.query(`SELECT COUNT(*) as cnt FROM streams WHERE (stream_flags & ?) = 0;`),
415
+ sumPendingBytes: this.db.query(`SELECT COALESCE(SUM(pending_bytes), 0) as total FROM streams;`),
416
+ sumPendingSegmentBytes: this.db.query(`SELECT COALESCE(SUM(size_bytes), 0) as total FROM segments WHERE uploaded_at_ms IS NULL;`),
417
+ };
418
+ }
419
+
420
+ private toBigInt(v: any): bigint {
421
+ return typeof v === "bigint" ? v : BigInt(v);
422
+ }
423
+
424
+ private bindInt(v: bigint): number | string {
425
+ const max = BigInt(Number.MAX_SAFE_INTEGER);
426
+ const min = BigInt(Number.MIN_SAFE_INTEGER);
427
+ if (v <= max && v >= min) return Number(v);
428
+ return v.toString();
429
+ }
430
+
431
+ private encodeU64Le(value: bigint): Uint8Array {
432
+ const buf = new Uint8Array(8);
433
+ const dv = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
434
+ dv.setBigUint64(0, value, true);
435
+ return buf;
436
+ }
437
+
438
+ private encodeU32Le(value: number): Uint8Array {
439
+ const buf = new Uint8Array(4);
440
+ const dv = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
441
+ dv.setUint32(0, value >>> 0, true);
442
+ return buf;
443
+ }
444
+
445
+ private coerceStreamRow(row: any): StreamRow {
446
+ return {
447
+ stream: String(row.stream),
448
+ created_at_ms: this.toBigInt(row.created_at_ms),
449
+ updated_at_ms: this.toBigInt(row.updated_at_ms),
450
+ content_type: String(row.content_type),
451
+ stream_seq: row.stream_seq == null ? null : String(row.stream_seq),
452
+ closed: Number(row.closed),
453
+ closed_producer_id: row.closed_producer_id == null ? null : String(row.closed_producer_id),
454
+ closed_producer_epoch: row.closed_producer_epoch == null ? null : Number(row.closed_producer_epoch),
455
+ closed_producer_seq: row.closed_producer_seq == null ? null : Number(row.closed_producer_seq),
456
+ ttl_seconds: row.ttl_seconds == null ? null : Number(row.ttl_seconds),
457
+ epoch: Number(row.epoch),
458
+ next_offset: this.toBigInt(row.next_offset),
459
+ sealed_through: this.toBigInt(row.sealed_through),
460
+ uploaded_through: this.toBigInt(row.uploaded_through),
461
+ uploaded_segment_count: Number(row.uploaded_segment_count ?? 0),
462
+ pending_rows: this.toBigInt(row.pending_rows),
463
+ pending_bytes: this.toBigInt(row.pending_bytes),
464
+ wal_rows: this.toBigInt(row.wal_rows ?? 0),
465
+ wal_bytes: this.toBigInt(row.wal_bytes ?? 0),
466
+ last_append_ms: this.toBigInt(row.last_append_ms),
467
+ last_segment_cut_ms: this.toBigInt(row.last_segment_cut_ms),
468
+ segment_in_progress: Number(row.segment_in_progress),
469
+ expires_at_ms: row.expires_at_ms == null ? null : this.toBigInt(row.expires_at_ms),
470
+ stream_flags: Number(row.stream_flags),
471
+ };
472
+ }
473
+
474
+ private coerceSegmentRow(row: any): SegmentRow {
475
+ return {
476
+ segment_id: String(row.segment_id),
477
+ stream: String(row.stream),
478
+ segment_index: Number(row.segment_index),
479
+ start_offset: this.toBigInt(row.start_offset),
480
+ end_offset: this.toBigInt(row.end_offset),
481
+ block_count: Number(row.block_count),
482
+ last_append_ms: this.toBigInt(row.last_append_ms),
483
+ size_bytes: Number(row.size_bytes),
484
+ local_path: String(row.local_path),
485
+ created_at_ms: this.toBigInt(row.created_at_ms),
486
+ uploaded_at_ms: row.uploaded_at_ms == null ? null : this.toBigInt(row.uploaded_at_ms),
487
+ r2_etag: row.r2_etag == null ? null : String(row.r2_etag),
488
+ };
489
+ }
490
+
491
+ close(): void {
492
+ this.db.close();
493
+ }
494
+
495
+ nowMs(): bigint {
496
+ return BigInt(Date.now());
497
+ }
498
+
499
+ isDeleted(row: StreamRow): boolean {
500
+ return (row.stream_flags & STREAM_FLAG_DELETED) !== 0;
501
+ }
502
+
503
+ getStream(stream: string): StreamRow | null {
504
+ const row = this.stmts.getStream.get(stream) as any;
505
+ return row ? this.coerceStreamRow(row) : null;
506
+ }
507
+
508
+ ensureStream(
509
+ stream: string,
510
+ opts?: {
511
+ contentType?: string;
512
+ expiresAtMs?: bigint | null;
513
+ ttlSeconds?: number | null;
514
+ closed?: boolean;
515
+ closedProducer?: { id: string; epoch: number; seq: number } | null;
516
+ streamFlags?: number;
517
+ }
518
+ ): StreamRow {
519
+ const existing = this.getStream(stream);
520
+ if (existing) return existing;
521
+
522
+ const now = this.nowMs();
523
+ const epoch = 0;
524
+ const nextOffset = 0n;
525
+ const contentType = opts?.contentType ?? "application/octet-stream";
526
+ const closed = opts?.closed ? 1 : 0;
527
+ const closedProducer = opts?.closedProducer ?? null;
528
+ const expiresAtMs = opts?.expiresAtMs ?? null;
529
+ const ttlSeconds = opts?.ttlSeconds ?? null;
530
+ const streamFlags = opts?.streamFlags ?? 0;
531
+
532
+ this.db
533
+ .query(
534
+ `INSERT INTO streams(
535
+ stream, created_at_ms, updated_at_ms,
536
+ content_type, stream_seq, closed, closed_producer_id, closed_producer_epoch, closed_producer_seq, ttl_seconds,
537
+ epoch, next_offset, sealed_through, uploaded_through, uploaded_segment_count,
538
+ pending_rows, pending_bytes, last_append_ms, last_segment_cut_ms, segment_in_progress,
539
+ expires_at_ms, stream_flags
540
+ )
541
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);`
542
+ )
543
+ .run(
544
+ stream,
545
+ now,
546
+ now,
547
+ contentType,
548
+ null,
549
+ closed,
550
+ closedProducer ? closedProducer.id : null,
551
+ closedProducer ? closedProducer.epoch : null,
552
+ closedProducer ? closedProducer.seq : null,
553
+ ttlSeconds,
554
+ epoch,
555
+ nextOffset,
556
+ -1n,
557
+ -1n,
558
+ 0,
559
+ 0n,
560
+ 0n,
561
+ now,
562
+ now,
563
+ 0,
564
+ expiresAtMs,
565
+ streamFlags
566
+ );
567
+
568
+ this.stmts.upsertManifest.run(stream, 0, 0, null, null);
569
+ this.ensureSegmentMeta(stream);
570
+ return this.getStream(stream)!;
571
+ }
572
+
573
+ restoreStreamRow(row: StreamRow): void {
574
+ this.stmts.upsertStream.run(
575
+ row.stream,
576
+ row.created_at_ms,
577
+ row.updated_at_ms,
578
+ row.content_type,
579
+ row.stream_seq,
580
+ row.closed,
581
+ row.closed_producer_id,
582
+ row.closed_producer_epoch,
583
+ row.closed_producer_seq,
584
+ row.ttl_seconds,
585
+ row.epoch,
586
+ row.next_offset,
587
+ row.sealed_through,
588
+ row.uploaded_through,
589
+ row.uploaded_segment_count,
590
+ row.pending_rows,
591
+ row.pending_bytes,
592
+ row.wal_rows,
593
+ row.wal_bytes,
594
+ row.last_append_ms,
595
+ row.last_segment_cut_ms,
596
+ row.segment_in_progress,
597
+ row.expires_at_ms,
598
+ row.stream_flags
599
+ );
600
+ }
601
+
602
+ listStreams(limit: number, offset: number): StreamRow[] {
603
+ const now = this.nowMs();
604
+ const rows = this.stmts.listStreams.all(STREAM_FLAG_DELETED | STREAM_FLAG_TOUCH, now, limit, offset) as any[];
605
+ return rows.map((r) => this.coerceStreamRow(r));
606
+ }
607
+
608
+ listExpiredStreams(limit: number): string[] {
609
+ const now = this.nowMs();
610
+ const rows = this.stmts.listExpiredStreams.all(STREAM_FLAG_DELETED | STREAM_FLAG_TOUCH, now, limit) as any[];
611
+ return rows.map((r) => String(r.stream));
612
+ }
613
+
614
+ deleteStream(stream: string): boolean {
615
+ const existing = this.getStream(stream);
616
+ if (!existing) return false;
617
+ const now = this.nowMs();
618
+ this.stmts.setDeleted.run(STREAM_FLAG_DELETED, now, stream);
619
+ return true;
620
+ }
621
+
622
+ hardDeleteStream(stream: string): boolean {
623
+ const tx = this.db.transaction(() => {
624
+ const existing = this.getStream(stream);
625
+ if (!existing) return false;
626
+ this.db.query(`DELETE FROM wal WHERE stream=?;`).run(stream);
627
+ this.db.query(`DELETE FROM segments WHERE stream=?;`).run(stream);
628
+ this.db.query(`DELETE FROM manifests WHERE stream=?;`).run(stream);
629
+ this.db.query(`DELETE FROM schemas WHERE stream=?;`).run(stream);
630
+ this.db.query(`DELETE FROM stream_interpreters WHERE stream=?;`).run(stream);
631
+ this.db.query(`DELETE FROM live_templates WHERE stream=?;`).run(stream);
632
+ this.db.query(`DELETE FROM producer_state WHERE stream=?;`).run(stream);
633
+ this.db.query(`DELETE FROM index_state WHERE stream=?;`).run(stream);
634
+ this.db.query(`DELETE FROM index_runs WHERE stream=?;`).run(stream);
635
+ this.db.query(`DELETE FROM stream_segment_meta WHERE stream=?;`).run(stream);
636
+ this.db.query(`DELETE FROM streams WHERE stream=?;`).run(stream);
637
+ return true;
638
+ });
639
+ return tx();
640
+ }
641
+
642
+ getSchemaRegistry(stream: string): { stream: string; registry_json: string; updated_at_ms: bigint } | null {
643
+ const row = this.stmts.getSchemaRegistry.get(stream) as any;
644
+ if (!row) return null;
645
+ return { stream: String(row.stream), registry_json: String(row.schema_json), updated_at_ms: this.toBigInt(row.updated_at_ms) };
646
+ }
647
+
648
+ upsertSchemaRegistry(stream: string, registryJson: string): void {
649
+ this.stmts.upsertSchemaRegistry.run(stream, registryJson, this.nowMs());
650
+ }
651
+
652
+ getStreamInterpreter(stream: string): { stream: string; interpreted_through: bigint; updated_at_ms: bigint } | null {
653
+ const row = this.stmts.getStreamInterpreter.get(stream) as any;
654
+ if (!row) return null;
655
+ return {
656
+ stream: String(row.stream),
657
+ interpreted_through: this.toBigInt(row.interpreted_through),
658
+ updated_at_ms: this.toBigInt(row.updated_at_ms),
659
+ };
660
+ }
661
+
662
+ listStreamInterpreters(): Array<{ stream: string; interpreted_through: bigint; updated_at_ms: bigint }> {
663
+ const rows = this.stmts.listStreamInterpreters.all() as any[];
664
+ return rows.map((row) => ({
665
+ stream: String(row.stream),
666
+ interpreted_through: this.toBigInt(row.interpreted_through),
667
+ updated_at_ms: this.toBigInt(row.updated_at_ms),
668
+ }));
669
+ }
670
+
671
+ ensureStreamInterpreter(stream: string): void {
672
+ const existing = this.getStreamInterpreter(stream);
673
+ if (existing) return;
674
+ const srow = this.getStream(stream);
675
+ const initialThrough = srow ? srow.next_offset - 1n : -1n;
676
+ this.stmts.upsertStreamInterpreter.run(stream, this.bindInt(initialThrough), this.nowMs());
677
+ }
678
+
679
+ updateStreamInterpreterThrough(stream: string, interpretedThrough: bigint): void {
680
+ this.stmts.upsertStreamInterpreter.run(stream, this.bindInt(interpretedThrough), this.nowMs());
681
+ }
682
+
683
+ deleteStreamInterpreter(stream: string): void {
684
+ this.stmts.deleteStreamInterpreter.run(stream);
685
+ }
686
+
687
+ addStreamFlags(stream: string, flags: number): void {
688
+ if (!Number.isFinite(flags) || flags <= 0) return;
689
+ this.db.query(`UPDATE streams SET stream_flags = (stream_flags | ?), updated_at_ms=? WHERE stream=?;`).run(flags, this.nowMs(), stream);
690
+ }
691
+
692
+ getWalOldestOffset(stream: string): bigint | null {
693
+ const row = this.db.query(`SELECT MIN(offset) as min_off FROM wal WHERE stream=?;`).get(stream) as any;
694
+ if (!row || row.min_off == null) return null;
695
+ return this.toBigInt(row.min_off);
696
+ }
697
+
698
+ /**
699
+ * Trim a WAL-only stream by age (in ms), leaving at least 1 record if the stream is non-empty.
700
+ *
701
+ * This is primarily intended for internal companion touch streams which are not segmented/uploaded.
702
+ */
703
+ trimWalByAge(stream: string, maxAgeMs: number): { trimmedRows: number; trimmedBytes: number; keptFromOffset: bigint | null } {
704
+ const ageMs = Math.max(0, Math.floor(maxAgeMs));
705
+ if (!Number.isFinite(ageMs)) return { trimmedRows: 0, trimmedBytes: 0, keptFromOffset: null };
706
+
707
+ const tx = this.db.transaction(() => {
708
+ const lastRow = this.db.query(`SELECT offset, ts_ms FROM wal WHERE stream=? ORDER BY offset DESC LIMIT 1;`).get(stream) as any;
709
+ if (!lastRow || lastRow.offset == null) return { trimmedRows: 0, trimmedBytes: 0, keptFromOffset: null };
710
+ const lastOffset = this.toBigInt(lastRow.offset);
711
+
712
+ let keepFromOffset: bigint;
713
+ if (ageMs === 0) {
714
+ // maxAgeMs=0 means "keep only the newest row" (still leaving 1 record).
715
+ keepFromOffset = lastOffset;
716
+ } else {
717
+ const cutoff = this.nowMs() - BigInt(ageMs);
718
+ const keepRow = this.db
719
+ .query(`SELECT offset FROM wal WHERE stream=? AND ts_ms >= ? ORDER BY offset ASC LIMIT 1;`)
720
+ .get(stream, this.bindInt(cutoff)) as any;
721
+ keepFromOffset = keepRow && keepRow.offset != null ? this.toBigInt(keepRow.offset) : lastOffset;
722
+ }
723
+
724
+ if (keepFromOffset <= 0n) return { trimmedRows: 0, trimmedBytes: 0, keptFromOffset: keepFromOffset };
725
+
726
+ const stats = this.db
727
+ .query(
728
+ `SELECT COALESCE(SUM(payload_len), 0) as bytes, COUNT(*) as rows
729
+ FROM wal WHERE stream=? AND offset < ?;`
730
+ )
731
+ .get(stream, this.bindInt(keepFromOffset)) as any;
732
+ const bytes = this.toBigInt(stats?.bytes ?? 0);
733
+ const rows = this.toBigInt(stats?.rows ?? 0);
734
+ if (rows <= 0n) return { trimmedRows: 0, trimmedBytes: 0, keptFromOffset: keepFromOffset };
735
+
736
+ this.db.query(`DELETE FROM wal WHERE stream=? AND offset < ?;`).run(stream, this.bindInt(keepFromOffset));
737
+
738
+ // Touch streams are WAL-only: pending_* tracks WAL payload bytes/rows. Keep it consistent for stats/backpressure.
739
+ const now = this.nowMs();
740
+ this.db.query(
741
+ `UPDATE streams
742
+ SET pending_bytes = CASE WHEN pending_bytes >= ? THEN pending_bytes - ? ELSE 0 END,
743
+ pending_rows = CASE WHEN pending_rows >= ? THEN pending_rows - ? ELSE 0 END,
744
+ wal_bytes = CASE WHEN wal_bytes >= ? THEN wal_bytes - ? ELSE 0 END,
745
+ wal_rows = CASE WHEN wal_rows >= ? THEN wal_rows - ? ELSE 0 END,
746
+ updated_at_ms = ?
747
+ WHERE stream = ?;`
748
+ ).run(bytes, bytes, rows, rows, bytes, bytes, rows, rows, now, stream);
749
+
750
+ const trimmedBytes = bytes <= BigInt(Number.MAX_SAFE_INTEGER) ? Number(bytes) : Number.MAX_SAFE_INTEGER;
751
+ const trimmedRows = rows <= BigInt(Number.MAX_SAFE_INTEGER) ? Number(rows) : Number.MAX_SAFE_INTEGER;
752
+ return { trimmedRows, trimmedBytes, keptFromOffset: keepFromOffset };
753
+ });
754
+ return tx();
755
+ }
756
+
757
+ countStreams(): number {
758
+ const row = this.stmts.countStreams.get(STREAM_FLAG_DELETED | STREAM_FLAG_TOUCH) as any;
759
+ return row ? Number(row.cnt) : 0;
760
+ }
761
+
762
+ sumPendingBytes(): number {
763
+ const row = this.stmts.sumPendingBytes.get() as any;
764
+ const total = row?.total ?? 0;
765
+ return Number(this.toBigInt(total));
766
+ }
767
+
768
+ sumPendingSegmentBytes(): number {
769
+ const row = this.stmts.sumPendingSegmentBytes.get() as any;
770
+ const total = row?.total ?? 0;
771
+ return Number(this.toBigInt(total));
772
+ }
773
+
774
+ private ensureDbStat(): boolean {
775
+ if (this.dbstatReady != null) return this.dbstatReady;
776
+ try {
777
+ this.db.exec("CREATE VIRTUAL TABLE IF NOT EXISTS temp.dbstat USING dbstat;");
778
+ this.dbstatReady = true;
779
+ } catch {
780
+ this.dbstatReady = false;
781
+ }
782
+ return this.dbstatReady;
783
+ }
784
+
785
+ private estimateWalBytes(): number {
786
+ try {
787
+ const row = this.db.query(
788
+ `SELECT
789
+ COALESCE(SUM(payload_len), 0) as payload,
790
+ COALESCE(SUM(LENGTH(routing_key)), 0) as rk,
791
+ COALESCE(SUM(LENGTH(content_type)), 0) as ct
792
+ FROM wal;`
793
+ ).get() as any;
794
+ return Number(row?.payload ?? 0) + Number(row?.rk ?? 0) + Number(row?.ct ?? 0);
795
+ } catch {
796
+ return 0;
797
+ }
798
+ }
799
+
800
+ private estimateMetaBytes(): number {
801
+ try {
802
+ const streams = this.db.query(
803
+ `SELECT
804
+ COALESCE(SUM(LENGTH(stream)), 0) as stream,
805
+ COALESCE(SUM(LENGTH(content_type)), 0) as content_type,
806
+ COALESCE(SUM(LENGTH(stream_seq)), 0) as stream_seq,
807
+ COALESCE(SUM(LENGTH(closed_producer_id)), 0) as closed_producer_id
808
+ FROM streams;`
809
+ ).get() as any;
810
+ const segments = this.db.query(
811
+ `SELECT
812
+ COALESCE(SUM(LENGTH(segment_id)), 0) as segment_id,
813
+ COALESCE(SUM(LENGTH(stream)), 0) as stream,
814
+ COALESCE(SUM(LENGTH(local_path)), 0) as local_path,
815
+ COALESCE(SUM(LENGTH(r2_etag)), 0) as r2_etag
816
+ FROM segments;`
817
+ ).get() as any;
818
+ const manifests = this.db.query(
819
+ `SELECT
820
+ COALESCE(SUM(LENGTH(stream)), 0) as stream,
821
+ COALESCE(SUM(LENGTH(last_uploaded_etag)), 0) as last_uploaded_etag
822
+ FROM manifests;`
823
+ ).get() as any;
824
+ const schemas = this.db.query(`SELECT COALESCE(SUM(LENGTH(schema_json)), 0) as schema_json FROM schemas;`).get() as any;
825
+ const producers = this.db.query(
826
+ `SELECT
827
+ COALESCE(SUM(LENGTH(stream)), 0) as stream,
828
+ COALESCE(SUM(LENGTH(producer_id)), 0) as producer_id
829
+ FROM producer_state;`
830
+ ).get() as any;
831
+ const total =
832
+ Number(streams?.stream ?? 0) +
833
+ Number(streams?.content_type ?? 0) +
834
+ Number(streams?.stream_seq ?? 0) +
835
+ Number(streams?.closed_producer_id ?? 0) +
836
+ Number(segments?.segment_id ?? 0) +
837
+ Number(segments?.stream ?? 0) +
838
+ Number(segments?.local_path ?? 0) +
839
+ Number(segments?.r2_etag ?? 0) +
840
+ Number(manifests?.stream ?? 0) +
841
+ Number(manifests?.last_uploaded_etag ?? 0) +
842
+ Number(schemas?.schema_json ?? 0) +
843
+ Number(producers?.stream ?? 0) +
844
+ Number(producers?.producer_id ?? 0);
845
+ return total;
846
+ } catch {
847
+ return 0;
848
+ }
849
+ }
850
+
851
+ getWalDbSizeBytes(): number {
852
+ if (this.ensureDbStat()) {
853
+ try {
854
+ const row = this.db.query(`SELECT COALESCE(SUM(pgsize), 0) as total FROM temp.dbstat WHERE name = 'wal';`).get() as any;
855
+ return Number(row?.total ?? 0);
856
+ } catch {
857
+ // fall through
858
+ }
859
+ }
860
+ return this.estimateWalBytes();
861
+ }
862
+
863
+ getMetaDbSizeBytes(): number {
864
+ if (this.ensureDbStat()) {
865
+ try {
866
+ const row = this.db
867
+ .query(`SELECT COALESCE(SUM(pgsize), 0) as total FROM temp.dbstat WHERE name != 'wal';`)
868
+ .get() as any;
869
+ return Number(row?.total ?? 0);
870
+ } catch {
871
+ // fall through
872
+ }
873
+ }
874
+ return this.estimateMetaBytes();
875
+ }
876
+
877
+ /**
878
+ * Append rows into WAL inside a transaction.
879
+ *
880
+ * Returns the last offset written.
881
+ */
882
+ appendWalRows(args: {
883
+ stream: string;
884
+ startOffset: bigint;
885
+ expectedOffset?: bigint;
886
+ baseAppendMs: bigint;
887
+ rows: Array<{ routingKey: Uint8Array | null; contentType: string | null; payload: Uint8Array; appendMs: bigint }>;
888
+ }): Result<
889
+ { lastOffset: bigint },
890
+ { kind: "no_rows" | "stream_missing" | "stream_expired" } | { kind: "seq_mismatch"; expectedNext: bigint }
891
+ > {
892
+ const { stream, startOffset, expectedOffset, rows } = args;
893
+ if (rows.length === 0) return Result.err({ kind: "no_rows" });
894
+
895
+ const tx = this.db.transaction(() => {
896
+ const st = this.getStream(stream);
897
+ if (!st || this.isDeleted(st)) return Result.err({ kind: "stream_missing" as const });
898
+ if (st.expires_at_ms != null && this.nowMs() > st.expires_at_ms) return Result.err({ kind: "stream_expired" as const });
899
+
900
+ if (expectedOffset !== undefined && st.next_offset !== expectedOffset) {
901
+ return Result.err({ kind: "seq_mismatch" as const, expectedNext: st.next_offset });
902
+ }
903
+
904
+ let totalBytes = 0n;
905
+ let offset = startOffset;
906
+ for (const r of rows) {
907
+ const payloadLen = r.payload.byteLength;
908
+ totalBytes += BigInt(payloadLen);
909
+ this.stmts.insertWal.run(stream, offset, r.appendMs, r.payload, payloadLen, r.routingKey, r.contentType, 0);
910
+ offset += 1n;
911
+ }
912
+
913
+ const lastOffset = offset - 1n;
914
+ const newNextOffset = lastOffset + 1n;
915
+ const now = this.nowMs();
916
+ const pendingRows = BigInt(rows.length);
917
+ const lastAppend = rows[rows.length - 1].appendMs;
918
+
919
+ this.stmts.updateStreamAppend.run(
920
+ newNextOffset,
921
+ now,
922
+ lastAppend,
923
+ pendingRows,
924
+ totalBytes,
925
+ pendingRows,
926
+ totalBytes,
927
+ stream,
928
+ STREAM_FLAG_DELETED
929
+ );
930
+
931
+ return Result.ok({ lastOffset });
932
+ });
933
+
934
+ return tx();
935
+ }
936
+
937
+ /**
938
+ * Query WAL rows within a range.
939
+ * Uses iterate() for bounded memory.
940
+ */
941
+ *iterWalRange(stream: string, startOffset: bigint, endOffset: bigint, routingKey?: Uint8Array): Generator<any, void, void> {
942
+ const start = this.bindInt(startOffset);
943
+ const end = this.bindInt(endOffset);
944
+ const stmt = routingKey
945
+ ? this.db.query(
946
+ `SELECT offset, ts_ms, routing_key, content_type, payload\n FROM wal\n WHERE stream = ? AND offset >= ? AND offset <= ? AND routing_key = ?\n ORDER BY offset ASC;`
947
+ )
948
+ : this.db.query(
949
+ `SELECT offset, ts_ms, routing_key, content_type, payload\n FROM wal\n WHERE stream = ? AND offset >= ? AND offset <= ?\n ORDER BY offset ASC;`
950
+ );
951
+ try {
952
+ const it = routingKey
953
+ ? (stmt.iterate(stream, start, end, routingKey) as any)
954
+ : (stmt.iterate(stream, start, end) as any);
955
+ for (const row of it) {
956
+ yield row;
957
+ }
958
+ } finally {
959
+ try {
960
+ stmt.finalize?.();
961
+ } catch {
962
+ // ignore
963
+ }
964
+ }
965
+ }
966
+
967
+ nextSegmentIndexForStream(stream: string): number {
968
+ const row = this.stmts.nextSegmentIndex.get(stream) as any;
969
+ return Number(row?.next_idx ?? 0);
970
+ }
971
+
972
+ createSegmentRow(row: {
973
+ segmentId: string;
974
+ stream: string;
975
+ segmentIndex: number;
976
+ startOffset: bigint;
977
+ endOffset: bigint;
978
+ blockCount: number;
979
+ lastAppendMs: bigint;
980
+ sizeBytes: number;
981
+ localPath: string;
982
+ }): void {
983
+ this.stmts.createSegment.run(
984
+ row.segmentId,
985
+ row.stream,
986
+ row.segmentIndex,
987
+ row.startOffset,
988
+ row.endOffset,
989
+ row.blockCount,
990
+ row.lastAppendMs,
991
+ row.sizeBytes,
992
+ row.localPath,
993
+ this.nowMs()
994
+ );
995
+ }
996
+
997
+ commitSealedSegment(row: {
998
+ segmentId: string;
999
+ stream: string;
1000
+ segmentIndex: number;
1001
+ startOffset: bigint;
1002
+ endOffset: bigint;
1003
+ blockCount: number;
1004
+ lastAppendMs: bigint;
1005
+ sizeBytes: number;
1006
+ localPath: string;
1007
+ payloadBytes: bigint;
1008
+ rowsSealed: bigint;
1009
+ }): void {
1010
+ const tx = this.db.transaction(() => {
1011
+ this.createSegmentRow(row);
1012
+ this.appendSegmentMeta(row.stream, row.endOffset + 1n, row.blockCount, row.lastAppendMs * 1_000_000n);
1013
+ this.setStreamSealedThrough(row.stream, row.endOffset, row.payloadBytes, row.rowsSealed);
1014
+ });
1015
+ tx();
1016
+ }
1017
+
1018
+ listSegmentsForStream(stream: string): SegmentRow[] {
1019
+ const rows = this.stmts.listSegmentsForStream.all(stream) as any[];
1020
+ return rows.map((r) => this.coerceSegmentRow(r));
1021
+ }
1022
+
1023
+ getSegmentByIndex(stream: string, segmentIndex: number): SegmentRow | null {
1024
+ const row = this.stmts.getSegmentByIndex.get(stream, segmentIndex) as any;
1025
+ return row ? this.coerceSegmentRow(row) : null;
1026
+ }
1027
+
1028
+ findSegmentForOffset(stream: string, offset: bigint): SegmentRow | null {
1029
+ const bound = this.bindInt(offset);
1030
+ const row = this.stmts.findSegmentForOffset.get(stream, bound, bound) as any;
1031
+ return row ? this.coerceSegmentRow(row) : null;
1032
+ }
1033
+
1034
+ pendingUploadSegments(limit: number): SegmentRow[] {
1035
+ const rows = this.stmts.pendingUploadSegments.all(limit) as any[];
1036
+ return rows.map((r) => this.coerceSegmentRow(r));
1037
+ }
1038
+
1039
+ countPendingSegments(): number {
1040
+ const row = this.stmts.countPendingSegments.get() as any;
1041
+ return row ? Number(row.cnt) : 0;
1042
+ }
1043
+
1044
+ countSegmentsForStream(stream: string): number {
1045
+ const row = this.stmts.countSegmentsForStream.get(stream) as any;
1046
+ return row ? Number(row.cnt) : 0;
1047
+ }
1048
+
1049
+ getSegmentMeta(stream: string): SegmentMetaRow | null {
1050
+ const row = this.stmts.getSegmentMeta.get(stream) as any;
1051
+ if (!row) return null;
1052
+ const offsets = row.segment_offsets instanceof Uint8Array ? row.segment_offsets : new Uint8Array(row.segment_offsets);
1053
+ const blocks = row.segment_blocks instanceof Uint8Array ? row.segment_blocks : new Uint8Array(row.segment_blocks);
1054
+ const lastTs = row.segment_last_ts instanceof Uint8Array ? row.segment_last_ts : new Uint8Array(row.segment_last_ts);
1055
+ return {
1056
+ stream: String(row.stream),
1057
+ segment_count: Number(row.segment_count),
1058
+ segment_offsets: offsets,
1059
+ segment_blocks: blocks,
1060
+ segment_last_ts: lastTs,
1061
+ };
1062
+ }
1063
+
1064
+ ensureSegmentMeta(stream: string): void {
1065
+ this.stmts.ensureSegmentMeta.run(stream);
1066
+ }
1067
+
1068
+ appendSegmentMeta(stream: string, offsetPlusOne: bigint, blockCount: number, lastAppendNs: bigint): void {
1069
+ this.ensureSegmentMeta(stream);
1070
+ const offsetBytes = this.encodeU64Le(offsetPlusOne);
1071
+ const blockBytes = this.encodeU32Le(blockCount);
1072
+ const tsBytes = this.encodeU64Le(lastAppendNs);
1073
+ this.stmts.appendSegmentMeta.run(offsetBytes, blockBytes, tsBytes, stream);
1074
+ }
1075
+
1076
+ upsertSegmentMeta(stream: string, count: number, offsets: Uint8Array, blocks: Uint8Array, lastTs: Uint8Array): void {
1077
+ this.stmts.upsertSegmentMeta.run(stream, count, offsets, blocks, lastTs);
1078
+ }
1079
+
1080
+ rebuildSegmentMeta(stream: string): SegmentMetaRow {
1081
+ const rows = this.db
1082
+ .query(
1083
+ `SELECT end_offset, block_count, last_append_ms
1084
+ FROM segments WHERE stream=? ORDER BY segment_index ASC;`
1085
+ )
1086
+ .all(stream) as any[];
1087
+ const count = rows.length;
1088
+ const offsets = new Uint8Array(count * 8);
1089
+ const blocks = new Uint8Array(count * 4);
1090
+ const lastTs = new Uint8Array(count * 8);
1091
+ const dvOffsets = new DataView(offsets.buffer, offsets.byteOffset, offsets.byteLength);
1092
+ const dvBlocks = new DataView(blocks.buffer, blocks.byteOffset, blocks.byteLength);
1093
+ const dvLastTs = new DataView(lastTs.buffer, lastTs.byteOffset, lastTs.byteLength);
1094
+ for (let i = 0; i < rows.length; i++) {
1095
+ const endOffset = this.toBigInt(rows[i].end_offset);
1096
+ const blockCount = Number(rows[i].block_count);
1097
+ const lastAppendMs = this.toBigInt(rows[i].last_append_ms);
1098
+ dvOffsets.setBigUint64(i * 8, endOffset + 1n, true);
1099
+ dvBlocks.setUint32(i * 4, blockCount >>> 0, true);
1100
+ dvLastTs.setBigUint64(i * 8, lastAppendMs * 1_000_000n, true);
1101
+ }
1102
+ this.upsertSegmentMeta(stream, count, offsets, blocks, lastTs);
1103
+ return { stream, segment_count: count, segment_offsets: offsets, segment_blocks: blocks, segment_last_ts: lastTs };
1104
+ }
1105
+
1106
+ setUploadedSegmentCount(stream: string, count: number): void {
1107
+ this.stmts.setUploadedSegmentCount.run(count, this.nowMs(), stream);
1108
+ }
1109
+
1110
+ advanceUploadedSegmentCount(stream: string): number {
1111
+ const row = this.getStream(stream);
1112
+ if (!row) return 0;
1113
+ let count = row.uploaded_segment_count ?? 0;
1114
+ for (;;) {
1115
+ const seg = this.getSegmentByIndex(stream, count);
1116
+ if (!seg || !seg.r2_etag) break;
1117
+ count += 1;
1118
+ }
1119
+ if (count !== row.uploaded_segment_count) {
1120
+ this.stmts.setUploadedSegmentCount.run(count, this.nowMs(), stream);
1121
+ }
1122
+ return count;
1123
+ }
1124
+
1125
+ markSegmentUploaded(segmentId: string, etag: string, uploadedAtMs: bigint): void {
1126
+ this.stmts.markSegmentUploaded.run(etag, uploadedAtMs, segmentId);
1127
+ }
1128
+
1129
+ setStreamSealedThrough(stream: string, sealedThrough: bigint, bytesSealed: bigint, rowsSealed: bigint): void {
1130
+ const now = this.nowMs();
1131
+ this.db.query(
1132
+ `UPDATE streams
1133
+ SET sealed_through = ?,
1134
+ pending_bytes = CASE WHEN pending_bytes >= ? THEN pending_bytes - ? ELSE 0 END,
1135
+ pending_rows = CASE WHEN pending_rows >= ? THEN pending_rows - ? ELSE 0 END,
1136
+ last_segment_cut_ms = ?,
1137
+ updated_at_ms = ?
1138
+ WHERE stream = ?;`
1139
+ ).run(sealedThrough, bytesSealed, bytesSealed, rowsSealed, rowsSealed, now, now, stream);
1140
+ }
1141
+
1142
+ setSegmentInProgress(stream: string, inProgress: number): void {
1143
+ this.db.query(`UPDATE streams SET segment_in_progress=?, updated_at_ms=? WHERE stream=?;`).run(inProgress, this.nowMs(), stream);
1144
+ }
1145
+
1146
+ tryClaimSegment(stream: string): boolean {
1147
+ const res = this.stmts.tryClaimSegment.run(this.nowMs(), stream) as any;
1148
+ const changes = typeof res?.changes === "bigint" ? res.changes : BigInt(Number(res?.changes ?? 0));
1149
+ return changes > 0n;
1150
+ }
1151
+
1152
+ resetSegmentInProgress(): void {
1153
+ this.db.query(`UPDATE streams SET segment_in_progress=0 WHERE segment_in_progress != 0;`).run();
1154
+ }
1155
+
1156
+ advanceUploadedThrough(stream: string, uploadedThrough: bigint): void {
1157
+ this.stmts.advanceUploadedThrough.run(uploadedThrough, this.nowMs(), stream);
1158
+ }
1159
+
1160
+ deleteWalThrough(stream: string, uploadedThrough: bigint): { deletedRows: number; deletedBytes: number } {
1161
+ const through = this.bindInt(uploadedThrough);
1162
+ const tx = this.db.transaction(() => {
1163
+ const stats = this.db
1164
+ .query(
1165
+ `SELECT COALESCE(SUM(payload_len), 0) as bytes, COUNT(*) as rows
1166
+ FROM wal WHERE stream=? AND offset <= ?;`
1167
+ )
1168
+ .get(stream, through) as any;
1169
+ const bytes = this.toBigInt(stats?.bytes ?? 0);
1170
+ const rows = this.toBigInt(stats?.rows ?? 0);
1171
+ if (rows <= 0n) return { deletedRows: 0, deletedBytes: 0 };
1172
+
1173
+ this.stmts.deleteWalBeforeOffset.run(stream, through);
1174
+
1175
+ const now = this.nowMs();
1176
+ this.db.query(
1177
+ `UPDATE streams
1178
+ SET wal_bytes = CASE WHEN wal_bytes >= ? THEN wal_bytes - ? ELSE 0 END,
1179
+ wal_rows = CASE WHEN wal_rows >= ? THEN wal_rows - ? ELSE 0 END,
1180
+ updated_at_ms = ?
1181
+ WHERE stream = ?;`
1182
+ ).run(bytes, bytes, rows, rows, now, stream);
1183
+
1184
+ const deletedBytes = bytes <= BigInt(Number.MAX_SAFE_INTEGER) ? Number(bytes) : Number.MAX_SAFE_INTEGER;
1185
+ const deletedRows = rows <= BigInt(Number.MAX_SAFE_INTEGER) ? Number(rows) : Number.MAX_SAFE_INTEGER;
1186
+ return { deletedRows, deletedBytes };
1187
+ });
1188
+ return tx();
1189
+ }
1190
+
1191
+ getManifestRow(stream: string): { stream: string; generation: number; uploaded_generation: number; last_uploaded_at_ms: bigint | null; last_uploaded_etag: string | null } {
1192
+ const row = this.stmts.getManifest.get(stream) as any;
1193
+ if (!row) {
1194
+ this.stmts.upsertManifest.run(stream, 0, 0, null, null);
1195
+ const fresh = this.stmts.getManifest.get(stream) as any;
1196
+ return {
1197
+ stream: String(fresh.stream),
1198
+ generation: Number(fresh.generation),
1199
+ uploaded_generation: Number(fresh.uploaded_generation),
1200
+ last_uploaded_at_ms: fresh.last_uploaded_at_ms == null ? null : this.toBigInt(fresh.last_uploaded_at_ms),
1201
+ last_uploaded_etag: fresh.last_uploaded_etag == null ? null : String(fresh.last_uploaded_etag),
1202
+ };
1203
+ }
1204
+ return {
1205
+ stream: String(row.stream),
1206
+ generation: Number(row.generation),
1207
+ uploaded_generation: Number(row.uploaded_generation),
1208
+ last_uploaded_at_ms: row.last_uploaded_at_ms == null ? null : this.toBigInt(row.last_uploaded_at_ms),
1209
+ last_uploaded_etag: row.last_uploaded_etag == null ? null : String(row.last_uploaded_etag),
1210
+ };
1211
+ }
1212
+
1213
+ upsertManifestRow(stream: string, generation: number, uploadedGeneration: number, uploadedAtMs: bigint | null, etag: string | null): void {
1214
+ this.stmts.upsertManifest.run(stream, generation, uploadedGeneration, uploadedAtMs, etag);
1215
+ }
1216
+
1217
+ getIndexState(stream: string): IndexStateRow | null {
1218
+ const row = this.stmts.getIndexState.get(stream) as any;
1219
+ if (!row) return null;
1220
+ return {
1221
+ stream: String(row.stream),
1222
+ index_secret: row.index_secret instanceof Uint8Array ? row.index_secret : new Uint8Array(row.index_secret),
1223
+ indexed_through: Number(row.indexed_through),
1224
+ updated_at_ms: this.toBigInt(row.updated_at_ms),
1225
+ };
1226
+ }
1227
+
1228
+ upsertIndexState(stream: string, indexSecret: Uint8Array, indexedThrough: number): void {
1229
+ this.stmts.upsertIndexState.run(stream, indexSecret, indexedThrough, this.nowMs());
1230
+ }
1231
+
1232
+ updateIndexedThrough(stream: string, indexedThrough: number): void {
1233
+ this.stmts.updateIndexedThrough.run(indexedThrough, this.nowMs(), stream);
1234
+ }
1235
+
1236
+ listIndexRuns(stream: string): IndexRunRow[] {
1237
+ const rows = this.stmts.listIndexRuns.all(stream) as any[];
1238
+ return rows.map((r) => ({
1239
+ run_id: String(r.run_id),
1240
+ stream: String(r.stream),
1241
+ level: Number(r.level),
1242
+ start_segment: Number(r.start_segment),
1243
+ end_segment: Number(r.end_segment),
1244
+ object_key: String(r.object_key),
1245
+ filter_len: Number(r.filter_len),
1246
+ record_count: Number(r.record_count),
1247
+ retired_gen: r.retired_gen == null ? null : Number(r.retired_gen),
1248
+ retired_at_ms: r.retired_at_ms == null ? null : this.toBigInt(r.retired_at_ms),
1249
+ }));
1250
+ }
1251
+
1252
+ listIndexRunsAll(stream: string): IndexRunRow[] {
1253
+ const rows = this.stmts.listIndexRunsAll.all(stream) as any[];
1254
+ return rows.map((r) => ({
1255
+ run_id: String(r.run_id),
1256
+ stream: String(r.stream),
1257
+ level: Number(r.level),
1258
+ start_segment: Number(r.start_segment),
1259
+ end_segment: Number(r.end_segment),
1260
+ object_key: String(r.object_key),
1261
+ filter_len: Number(r.filter_len),
1262
+ record_count: Number(r.record_count),
1263
+ retired_gen: r.retired_gen == null ? null : Number(r.retired_gen),
1264
+ retired_at_ms: r.retired_at_ms == null ? null : this.toBigInt(r.retired_at_ms),
1265
+ }));
1266
+ }
1267
+
1268
+ listRetiredIndexRuns(stream: string): IndexRunRow[] {
1269
+ const rows = this.stmts.listRetiredIndexRuns.all(stream) as any[];
1270
+ return rows.map((r) => ({
1271
+ run_id: String(r.run_id),
1272
+ stream: String(r.stream),
1273
+ level: Number(r.level),
1274
+ start_segment: Number(r.start_segment),
1275
+ end_segment: Number(r.end_segment),
1276
+ object_key: String(r.object_key),
1277
+ filter_len: Number(r.filter_len),
1278
+ record_count: Number(r.record_count),
1279
+ retired_gen: r.retired_gen == null ? null : Number(r.retired_gen),
1280
+ retired_at_ms: r.retired_at_ms == null ? null : this.toBigInt(r.retired_at_ms),
1281
+ }));
1282
+ }
1283
+
1284
+ insertIndexRun(row: Omit<IndexRunRow, "retired_gen" | "retired_at_ms">): void {
1285
+ this.stmts.insertIndexRun.run(
1286
+ row.run_id,
1287
+ row.stream,
1288
+ row.level,
1289
+ row.start_segment,
1290
+ row.end_segment,
1291
+ row.object_key,
1292
+ row.filter_len,
1293
+ row.record_count
1294
+ );
1295
+ }
1296
+
1297
+ retireIndexRuns(runIds: string[], retiredGen: number, retiredAtMs: bigint): void {
1298
+ if (runIds.length === 0) return;
1299
+ const tx = this.db.transaction(() => {
1300
+ for (const runId of runIds) {
1301
+ this.stmts.retireIndexRun.run(retiredGen, retiredAtMs, runId);
1302
+ }
1303
+ });
1304
+ tx();
1305
+ }
1306
+
1307
+ deleteIndexRuns(runIds: string[]): void {
1308
+ if (runIds.length === 0) return;
1309
+ const tx = this.db.transaction(() => {
1310
+ for (const runId of runIds) {
1311
+ this.stmts.deleteIndexRun.run(runId);
1312
+ }
1313
+ });
1314
+ tx();
1315
+ }
1316
+
1317
+ countUploadedSegments(stream: string): number {
1318
+ const row = this.stmts.countUploadedSegments.get(stream) as any;
1319
+ const maxIdx = row ? Number(row.max_idx) : -1;
1320
+ return maxIdx >= 0 ? maxIdx + 1 : 0;
1321
+ }
1322
+
1323
+ commitManifest(stream: string, generation: number, etag: string, uploadedAtMs: bigint, uploadedThrough: bigint): void {
1324
+ const tx = this.db.transaction(() => {
1325
+ this.stmts.upsertManifest.run(stream, generation, generation, uploadedAtMs, etag);
1326
+ this.stmts.advanceUploadedThrough.run(uploadedThrough, this.nowMs(), stream);
1327
+ let gcThrough = uploadedThrough;
1328
+ const interp = this.stmts.getStreamInterpreter.get(stream) as any;
1329
+ if (interp) {
1330
+ const interpretedThrough = this.toBigInt(interp.interpreted_through);
1331
+ gcThrough = interpretedThrough < gcThrough ? interpretedThrough : gcThrough;
1332
+ }
1333
+ if (gcThrough < 0n) return;
1334
+
1335
+ // Chunk deletes to avoid large event-loop stalls on catch-up uploads.
1336
+ // (Periodic GC in touch/manager.ts handles interpreter-gated cleanup too.)
1337
+ let deleteThrough = gcThrough;
1338
+ if (BASE_WAL_GC_CHUNK_OFFSETS > 0) {
1339
+ const oldest = this.getWalOldestOffset(stream);
1340
+ if (oldest != null) {
1341
+ const maxThrough = oldest + BigInt(BASE_WAL_GC_CHUNK_OFFSETS) - 1n;
1342
+ if (deleteThrough > maxThrough) deleteThrough = maxThrough;
1343
+ }
1344
+ }
1345
+ if (deleteThrough < 0n) return;
1346
+
1347
+ const bound = this.bindInt(deleteThrough);
1348
+ const stats = this.db
1349
+ .query(
1350
+ `SELECT COALESCE(SUM(payload_len), 0) as bytes, COUNT(*) as rows
1351
+ FROM wal WHERE stream=? AND offset <= ?;`
1352
+ )
1353
+ .get(stream, bound) as any;
1354
+ const bytes = this.toBigInt(stats?.bytes ?? 0);
1355
+ const rows = this.toBigInt(stats?.rows ?? 0);
1356
+ if (rows <= 0n) return;
1357
+
1358
+ this.stmts.deleteWalBeforeOffset.run(stream, bound);
1359
+
1360
+ // Keep retained-WAL counters consistent for metrics/debugging.
1361
+ const now = this.nowMs();
1362
+ this.db.query(
1363
+ `UPDATE streams
1364
+ SET wal_bytes = CASE WHEN wal_bytes >= ? THEN wal_bytes - ? ELSE 0 END,
1365
+ wal_rows = CASE WHEN wal_rows >= ? THEN wal_rows - ? ELSE 0 END,
1366
+ updated_at_ms = ?
1367
+ WHERE stream = ?;`
1368
+ ).run(bytes, bytes, rows, rows, now, stream);
1369
+ });
1370
+ tx();
1371
+ }
1372
+
1373
+ /** Find candidates by bytes/rows/interval. */
1374
+ candidates(
1375
+ minPendingBytes: bigint,
1376
+ minPendingRows: bigint,
1377
+ maxIntervalMs: bigint,
1378
+ limit: number
1379
+ ): Array<{ stream: string; pending_bytes: bigint; pending_rows: bigint; last_segment_cut_ms: bigint; sealed_through: bigint; next_offset: bigint; epoch: number }> {
1380
+ if (maxIntervalMs <= 0n) {
1381
+ return this.stmts.candidateStreamsNoInterval.all(STREAM_FLAG_DELETED | STREAM_FLAG_TOUCH, minPendingBytes, minPendingRows, limit) as any;
1382
+ }
1383
+ const now = this.nowMs();
1384
+ return this.stmts.candidateStreams.all(STREAM_FLAG_DELETED | STREAM_FLAG_TOUCH, minPendingBytes, minPendingRows, now, maxIntervalMs, limit) as any;
1385
+ }
1386
+ }