@prisma/streams-server 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/CODE_OF_CONDUCT.md +45 -0
  2. package/CONTRIBUTING.md +68 -0
  3. package/LICENSE +201 -0
  4. package/README.md +39 -2
  5. package/SECURITY.md +33 -0
  6. package/bin/prisma-streams-server +2 -0
  7. package/package.json +29 -34
  8. package/src/app.ts +74 -0
  9. package/src/app_core.ts +1983 -0
  10. package/src/app_local.ts +46 -0
  11. package/src/backpressure.ts +66 -0
  12. package/src/bootstrap.ts +239 -0
  13. package/src/config.ts +251 -0
  14. package/src/db/db.ts +1440 -0
  15. package/src/db/schema.ts +619 -0
  16. package/src/expiry_sweeper.ts +44 -0
  17. package/src/hist.ts +169 -0
  18. package/src/index/binary_fuse.ts +379 -0
  19. package/src/index/indexer.ts +745 -0
  20. package/src/index/run_cache.ts +84 -0
  21. package/src/index/run_format.ts +213 -0
  22. package/src/ingest.ts +655 -0
  23. package/src/lens/lens.ts +501 -0
  24. package/src/manifest.ts +114 -0
  25. package/src/memory.ts +155 -0
  26. package/src/metrics.ts +161 -0
  27. package/src/metrics_emitter.ts +50 -0
  28. package/src/notifier.ts +64 -0
  29. package/src/objectstore/interface.ts +13 -0
  30. package/src/objectstore/mock_r2.ts +269 -0
  31. package/src/objectstore/null.ts +32 -0
  32. package/src/objectstore/r2.ts +128 -0
  33. package/src/offset.ts +70 -0
  34. package/src/reader.ts +454 -0
  35. package/src/runtime/hash.ts +156 -0
  36. package/src/runtime/hash_vendor/LICENSE.hash-wasm +38 -0
  37. package/src/runtime/hash_vendor/NOTICE.md +8 -0
  38. package/src/runtime/hash_vendor/xxhash3.umd.min.cjs +7 -0
  39. package/src/runtime/hash_vendor/xxhash32.umd.min.cjs +7 -0
  40. package/src/runtime/hash_vendor/xxhash64.umd.min.cjs +7 -0
  41. package/src/schema/lens_schema.ts +290 -0
  42. package/src/schema/proof.ts +547 -0
  43. package/src/schema/registry.ts +405 -0
  44. package/src/segment/cache.ts +179 -0
  45. package/src/segment/format.ts +331 -0
  46. package/src/segment/segmenter.ts +326 -0
  47. package/src/segment/segmenter_worker.ts +43 -0
  48. package/src/segment/segmenter_workers.ts +94 -0
  49. package/src/server.ts +326 -0
  50. package/src/sqlite/adapter.ts +164 -0
  51. package/src/stats.ts +205 -0
  52. package/src/touch/engine.ts +41 -0
  53. package/src/touch/interpreter_worker.ts +459 -0
  54. package/src/touch/live_keys.ts +118 -0
  55. package/src/touch/live_metrics.ts +858 -0
  56. package/src/touch/live_templates.ts +619 -0
  57. package/src/touch/manager.ts +1341 -0
  58. package/src/touch/naming.ts +13 -0
  59. package/src/touch/routing_key_notifier.ts +275 -0
  60. package/src/touch/spec.ts +526 -0
  61. package/src/touch/touch_journal.ts +671 -0
  62. package/src/touch/touch_key_id.ts +20 -0
  63. package/src/touch/worker_pool.ts +189 -0
  64. package/src/touch/worker_protocol.ts +58 -0
  65. package/src/types/proper-lockfile.d.ts +1 -0
  66. package/src/uploader.ts +317 -0
  67. package/src/util/base32_crockford.ts +81 -0
  68. package/src/util/bloom256.ts +67 -0
  69. package/src/util/cleanup.ts +22 -0
  70. package/src/util/crc32c.ts +29 -0
  71. package/src/util/ds_error.ts +15 -0
  72. package/src/util/duration.ts +17 -0
  73. package/src/util/endian.ts +53 -0
  74. package/src/util/json_pointer.ts +148 -0
  75. package/src/util/log.ts +25 -0
  76. package/src/util/lru.ts +45 -0
  77. package/src/util/retry.ts +35 -0
  78. package/src/util/siphash.ts +71 -0
  79. package/src/util/stream_paths.ts +31 -0
  80. package/src/util/time.ts +14 -0
  81. package/src/util/yield.ts +3 -0
  82. package/build/index.d.mts +0 -1
  83. package/build/index.d.ts +0 -1
  84. package/build/index.js +0 -0
  85. package/build/index.mjs +0 -1
@@ -0,0 +1,189 @@
1
+ import { existsSync } from "node:fs";
2
+ import { resolve } from "node:path";
3
+ import { fileURLToPath } from "node:url";
4
+ import { Worker } from "node:worker_threads";
5
+ import { Result } from "better-result";
6
+ import type { Config } from "../config";
7
+ import type { ProcessRequest, ProcessResult, WorkerMessage } from "./worker_protocol";
8
+ import { dsError } from "../util/ds_error.ts";
9
+
10
+ type Pending = {
11
+ resolve: (r: Result<ProcessResult, WorkerPoolProcessError>) => void;
12
+ };
13
+
14
+ export type WorkerPoolProcessError = {
15
+ kind: "worker_pool_unavailable" | "worker_pool_failure";
16
+ message: string;
17
+ };
18
+
19
+ export class TouchInterpreterWorkerPool {
20
+ private readonly cfg: Config;
21
+ private readonly workerCount: number;
22
+ private readonly workers: Array<{ worker: Worker; busy: boolean; currentId: number | null }> = [];
23
+ private started = false;
24
+ private generation = 0;
25
+ private nextId = 1;
26
+ private readonly pending = new Map<number, Pending>();
27
+ private readonly queue: Array<Omit<ProcessRequest, "type" | "id"> & { id: number }> = [];
28
+
29
+ constructor(cfg: Config, workerCount: number) {
30
+ this.cfg = cfg;
31
+ this.workerCount = Math.max(0, Math.floor(workerCount));
32
+ }
33
+
34
+ start(): void {
35
+ if (this.started) return;
36
+ this.started = true;
37
+ this.generation += 1;
38
+ const generation = this.generation;
39
+ for (let i = 0; i < this.workerCount; i++) this.spawnWorker(i, generation);
40
+ }
41
+
42
+ stop(): void {
43
+ if (!this.started) return;
44
+ this.started = false;
45
+ this.generation += 1;
46
+ for (const w of this.workers) {
47
+ try {
48
+ w.worker.postMessage({ type: "stop" });
49
+ } catch {
50
+ // ignore
51
+ }
52
+ void w.worker.terminate();
53
+ }
54
+ this.workers.length = 0;
55
+ this.queue.length = 0;
56
+ for (const [id, p] of this.pending.entries()) {
57
+ p.resolve(Result.err({ kind: "worker_pool_failure", message: "worker pool stopped" }));
58
+ this.pending.delete(id);
59
+ }
60
+ }
61
+
62
+ restart(): void {
63
+ this.stop();
64
+ this.start();
65
+ }
66
+
67
+ async processResult(req: Omit<ProcessRequest, "type" | "id">): Promise<Result<ProcessResult, WorkerPoolProcessError>> {
68
+ if (!this.started) {
69
+ return Result.err({ kind: "worker_pool_unavailable", message: "worker pool not started" });
70
+ }
71
+ if (this.workerCount === 0) {
72
+ return Result.err({ kind: "worker_pool_unavailable", message: "worker pool disabled" });
73
+ }
74
+ const id = this.nextId++;
75
+ const queued = { ...req, id };
76
+ const value = await new Promise<Result<ProcessResult, WorkerPoolProcessError>>((resolve) => {
77
+ this.pending.set(id, { resolve });
78
+ this.queue.push(queued);
79
+ this.pump();
80
+ });
81
+ return value;
82
+ }
83
+
84
+ async process(req: Omit<ProcessRequest, "type" | "id">): Promise<ProcessResult> {
85
+ const res = await this.processResult(req);
86
+ if (Result.isError(res)) throw dsError(res.error.message);
87
+ return res.value;
88
+ }
89
+
90
+ private pump(): void {
91
+ if (!this.started) return;
92
+ if (this.queue.length === 0) return;
93
+ const slot = this.workers.find((w) => !w.busy);
94
+ if (!slot) return;
95
+ const next = this.queue.shift();
96
+ if (!next) return;
97
+ slot.busy = true;
98
+ slot.currentId = next.id;
99
+ slot.worker.postMessage({
100
+ type: "process",
101
+ id: next.id,
102
+ stream: next.stream,
103
+ fromOffset: next.fromOffset,
104
+ toOffset: next.toOffset,
105
+ interpreter: next.interpreter,
106
+ maxRows: next.maxRows,
107
+ maxBytes: next.maxBytes,
108
+ emitFineTouches: next.emitFineTouches,
109
+ fineTouchBudget: next.fineTouchBudget,
110
+ fineGranularity: next.fineGranularity,
111
+ interpretMode: next.interpretMode,
112
+ filterHotTemplates: next.filterHotTemplates,
113
+ hotTemplateIds: next.hotTemplateIds,
114
+ } satisfies ProcessRequest);
115
+ }
116
+
117
+ private spawnWorker(idx: number, generation: number = this.generation): void {
118
+ const workerUrl = new URL("./interpreter_worker.ts", import.meta.url);
119
+ let workerSpec = fileURLToPath(workerUrl);
120
+ if (!existsSync(workerSpec)) {
121
+ const fallback = resolve(process.cwd(), "src/touch/interpreter_worker.ts");
122
+ if (existsSync(fallback)) workerSpec = fallback;
123
+ }
124
+
125
+ const worker = new Worker(workerSpec, {
126
+ workerData: { config: this.cfg },
127
+ type: "module",
128
+ smol: true,
129
+ } as any);
130
+
131
+ const slot = { worker, busy: false, currentId: null };
132
+ this.workers.push(slot);
133
+
134
+ worker.on("message", (msg: WorkerMessage) => {
135
+ if (generation !== this.generation) return;
136
+ if (!msg || typeof msg !== "object") return;
137
+ if (msg.type === "result") {
138
+ const p = this.pending.get(msg.id);
139
+ if (p) {
140
+ this.pending.delete(msg.id);
141
+ slot.busy = false;
142
+ slot.currentId = null;
143
+ p.resolve(Result.ok(msg));
144
+ }
145
+ this.pump();
146
+ return;
147
+ }
148
+ if (msg.type === "error") {
149
+ const p = this.pending.get(msg.id);
150
+ if (p) {
151
+ this.pending.delete(msg.id);
152
+ slot.busy = false;
153
+ slot.currentId = null;
154
+ p.resolve(Result.err({ kind: "worker_pool_failure", message: msg.message }));
155
+ }
156
+ this.pump();
157
+ }
158
+ });
159
+
160
+ worker.on("error", (err) => {
161
+ if (generation !== this.generation) return;
162
+ // eslint-disable-next-line no-console
163
+ console.error(`touch interpreter worker ${idx} error`, err);
164
+ });
165
+
166
+ worker.on("exit", (code) => {
167
+ if (generation !== this.generation || !this.started) return;
168
+ // eslint-disable-next-line no-console
169
+ console.error(`touch interpreter worker ${idx} exited with code ${code}, respawning`);
170
+ if (slot.currentId != null) {
171
+ const p = this.pending.get(slot.currentId);
172
+ if (p) {
173
+ this.pending.delete(slot.currentId);
174
+ p.resolve(Result.err({ kind: "worker_pool_failure", message: "worker exited" }));
175
+ }
176
+ }
177
+ slot.busy = false;
178
+ slot.currentId = null;
179
+ try {
180
+ const widx = this.workers.indexOf(slot);
181
+ if (widx >= 0) this.workers.splice(widx, 1);
182
+ } catch {
183
+ // ignore
184
+ }
185
+ this.spawnWorker(idx, generation);
186
+ this.pump();
187
+ });
188
+ }
189
+ }
@@ -0,0 +1,58 @@
1
+ import type { StreamInterpreterConfig } from "./spec.ts";
2
+
3
+ export type TouchRow = {
4
+ keyId: number;
5
+ key?: string;
6
+ watermark: string; // source stream offset (base-10 string)
7
+ entity: string;
8
+ kind: "table" | "template";
9
+ templateId?: string;
10
+ };
11
+
12
+ export type ProcessRequest = {
13
+ type: "process";
14
+ id: number;
15
+ stream: string;
16
+ fromOffset: bigint;
17
+ toOffset: bigint;
18
+ interpreter: StreamInterpreterConfig;
19
+ maxRows: number;
20
+ maxBytes: number;
21
+ emitFineTouches?: boolean;
22
+ fineTouchBudget?: number | null;
23
+ fineGranularity?: "key" | "template";
24
+ interpretMode?: "full" | "hotTemplatesOnly";
25
+ filterHotTemplates?: boolean;
26
+ hotTemplateIds?: string[] | null;
27
+ };
28
+
29
+ export type ProcessResult = {
30
+ type: "result";
31
+ id: number;
32
+ stream: string;
33
+ derivedStream: string;
34
+ processedThrough: bigint;
35
+ touches: TouchRow[];
36
+ stats: {
37
+ rowsRead: number;
38
+ bytesRead: number;
39
+ changes: number;
40
+ touchesEmitted: number;
41
+ tableTouchesEmitted: number;
42
+ templateTouchesEmitted: number;
43
+ maxSourceTsMs?: number;
44
+ fineTouchesDroppedDueToBudget?: number;
45
+ fineTouchesSuppressedDueToBudget?: boolean;
46
+ fineTouchesSkippedColdTemplate?: number;
47
+ };
48
+ };
49
+
50
+ export type ProcessError = {
51
+ type: "error";
52
+ id: number;
53
+ stream: string;
54
+ message: string;
55
+ stack?: string;
56
+ };
57
+
58
+ export type WorkerMessage = ProcessResult | ProcessError;
@@ -0,0 +1 @@
1
+ declare module "proper-lockfile";
@@ -0,0 +1,317 @@
1
+ import { unlinkSync } from "node:fs";
2
+ import { readFile } from "node:fs/promises";
3
+ import { Result } from "better-result";
4
+ import type { Config } from "./config";
5
+ import type { SqliteDurableStore, SegmentRow } from "./db/db";
6
+ import type { ObjectStore } from "./objectstore/interface";
7
+ import { buildManifestResult } from "./manifest";
8
+ import { manifestObjectKey, segmentObjectKey, streamHash16Hex } from "./util/stream_paths";
9
+ import { readU64LE } from "./util/endian";
10
+ import { SegmentDiskCache } from "./segment/cache";
11
+ import { retry } from "./util/retry";
12
+ import { LruCache } from "./util/lru";
13
+ import type { StatsCollector } from "./stats";
14
+ import type { BackpressureGate } from "./backpressure";
15
+ import { dsError } from "./util/ds_error.ts";
16
+
17
+ export type UploaderController = {
18
+ start(): void;
19
+ stop(hard?: boolean): void;
20
+ countSegmentsWaiting(): number;
21
+ setHooks(hooks: { onSegmentsUploaded?: (stream: string) => void } | undefined): void;
22
+ publishManifest(stream: string): Promise<void>;
23
+ };
24
+
25
+ export class Uploader {
26
+ private readonly config: Config;
27
+ private readonly db: SqliteDurableStore;
28
+ private readonly os: ObjectStore;
29
+ private readonly diskCache?: SegmentDiskCache;
30
+ private readonly stats?: StatsCollector;
31
+ private readonly gate?: BackpressureGate;
32
+ private timer: any | null = null;
33
+ private running = false;
34
+ private stopping = false;
35
+ private readonly inflight = new Set<string>();
36
+ private readonly failures = new FailureTracker(1024);
37
+ private hooks?: { onSegmentsUploaded?: (stream: string) => void };
38
+ private readonly manifestInflight = new Set<string>();
39
+
40
+ constructor(
41
+ config: Config,
42
+ db: SqliteDurableStore,
43
+ os: ObjectStore,
44
+ diskCache?: SegmentDiskCache,
45
+ stats?: StatsCollector,
46
+ gate?: BackpressureGate,
47
+ hooks?: { onSegmentsUploaded?: (stream: string) => void }
48
+ ) {
49
+ this.config = config;
50
+ this.db = db;
51
+ this.os = os;
52
+ this.diskCache = diskCache;
53
+ this.stats = stats;
54
+ this.gate = gate;
55
+ this.hooks = hooks;
56
+ }
57
+
58
+ setHooks(hooks: { onSegmentsUploaded?: (stream: string) => void } | undefined): void {
59
+ this.hooks = hooks;
60
+ }
61
+
62
+ start(): void {
63
+ this.stopping = false;
64
+ if (this.timer) return;
65
+ this.timer = setInterval(() => {
66
+ void this.tick();
67
+ }, this.config.uploadIntervalMs);
68
+ }
69
+
70
+ stop(hard = false): void {
71
+ if (hard) this.stopping = true;
72
+ else this.stopping = false;
73
+ if (this.timer) clearInterval(this.timer);
74
+ this.timer = null;
75
+ }
76
+
77
+ countSegmentsWaiting(): number {
78
+ return this.db.countPendingSegments();
79
+ }
80
+
81
+ private async tick(): Promise<void> {
82
+ if (this.stopping) return;
83
+ if (this.running) return;
84
+ this.running = true;
85
+ try {
86
+ const pending = this.db.pendingUploadSegments(1000);
87
+ if (pending.length === 0) return;
88
+
89
+ // Upload with bounded concurrency.
90
+ const queue = pending.filter((s) => !this.inflight.has(s.segment_id) && !this.failures.shouldSkip(s.stream));
91
+ if (queue.length === 0) return;
92
+ const streams = new Set(queue.map((s) => s.stream));
93
+
94
+ const workers: Promise<void>[] = [];
95
+ for (let i = 0; i < this.config.uploadConcurrency; i++) {
96
+ workers.push(this.uploadWorker(queue));
97
+ }
98
+ await Promise.all(workers);
99
+
100
+ // Notify indexer / listeners.
101
+ if (this.hooks?.onSegmentsUploaded) {
102
+ for (const stream of streams) {
103
+ try {
104
+ this.hooks.onSegmentsUploaded(stream);
105
+ } catch {
106
+ // ignore
107
+ }
108
+ }
109
+ }
110
+
111
+ // Publish manifests for affected streams.
112
+ for (const stream of streams) {
113
+ if (this.failures.shouldSkip(stream)) continue;
114
+ try {
115
+ await this.publishManifest(stream);
116
+ } catch (e) {
117
+ const msg = String((e as any)?.message ?? e);
118
+ const lower = msg.toLowerCase();
119
+ if (!this.stopping && !lower.includes("database has closed") && !lower.includes("closed database") && !lower.includes("statement has finalized")) {
120
+ // eslint-disable-next-line no-console
121
+ console.error("manifest publish failed", stream, e);
122
+ }
123
+ }
124
+ }
125
+ } catch (e) {
126
+ const msg = String((e as any)?.message ?? e);
127
+ const lower = msg.toLowerCase();
128
+ if (!this.stopping && !lower.includes("database has closed") && !lower.includes("closed database") && !lower.includes("statement has finalized")) {
129
+ // eslint-disable-next-line no-console
130
+ console.error("uploader tick error", e);
131
+ }
132
+ } finally {
133
+ this.running = false;
134
+ }
135
+ }
136
+
137
+ private async uploadWorker(queue: SegmentRow[]): Promise<void> {
138
+ while (queue.length > 0) {
139
+ if (this.stopping) return;
140
+ const seg = queue.shift();
141
+ if (!seg) return;
142
+ if (this.inflight.has(seg.segment_id)) continue;
143
+ this.inflight.add(seg.segment_id);
144
+ try {
145
+ try {
146
+ await this.uploadOne(seg);
147
+ this.failures.recordSuccess(seg.stream);
148
+ } catch (e) {
149
+ const msg = String((e as any)?.message ?? e);
150
+ const lower = msg.toLowerCase();
151
+ if (!this.stopping && !lower.includes("database has closed") && !lower.includes("closed database") && !lower.includes("statement has finalized")) {
152
+ // eslint-disable-next-line no-console
153
+ console.error("segment upload failed", seg.segment_id, e);
154
+ }
155
+ }
156
+ } finally {
157
+ this.inflight.delete(seg.segment_id);
158
+ }
159
+ }
160
+ }
161
+
162
+ private async uploadOne(seg: SegmentRow): Promise<void> {
163
+ if (this.stopping) return;
164
+ const shash = streamHash16Hex(seg.stream);
165
+ const objectKey = segmentObjectKey(shash, seg.segment_index);
166
+ try {
167
+ const res = await retry(
168
+ async () => {
169
+ if (this.os.putFile) {
170
+ return this.os.putFile(objectKey, seg.local_path, seg.size_bytes);
171
+ }
172
+ const bytes = new Uint8Array(await readFile(seg.local_path));
173
+ return this.os.put(objectKey, bytes, { contentLength: seg.size_bytes });
174
+ },
175
+ {
176
+ retries: this.config.objectStoreRetries,
177
+ baseDelayMs: this.config.objectStoreBaseDelayMs,
178
+ maxDelayMs: this.config.objectStoreMaxDelayMs,
179
+ timeoutMs: this.config.objectStoreTimeoutMs,
180
+ }
181
+ );
182
+ this.db.markSegmentUploaded(seg.segment_id, res.etag, this.db.nowMs());
183
+ if (this.stats) this.stats.recordUploadedBytes(seg.size_bytes);
184
+ if (this.gate) this.gate.adjustOnUpload(seg.size_bytes);
185
+ } catch (e) {
186
+ this.failures.recordFailure(seg.stream);
187
+ throw e;
188
+ }
189
+ }
190
+
191
+ async publishManifest(stream: string): Promise<void> {
192
+ if (this.stopping) return;
193
+ if (this.manifestInflight.has(stream)) return;
194
+ this.manifestInflight.add(stream);
195
+ try {
196
+ const srow = this.db.getStream(stream);
197
+ if (!srow) return;
198
+
199
+ const prevPrefix = srow.uploaded_segment_count ?? 0;
200
+ let uploadedPrefix = this.db.advanceUploadedSegmentCount(stream);
201
+
202
+ const segCount = this.db.countSegmentsForStream(stream);
203
+ let meta = this.db.getSegmentMeta(stream);
204
+ const needsRebuild =
205
+ !meta ||
206
+ meta.segment_count !== segCount ||
207
+ meta.segment_offsets.byteLength !== segCount * 8 ||
208
+ meta.segment_blocks.byteLength !== segCount * 4 ||
209
+ meta.segment_last_ts.byteLength !== segCount * 8;
210
+ if (needsRebuild) {
211
+ meta = this.db.rebuildSegmentMeta(stream);
212
+ }
213
+ if (!meta) return;
214
+ if (uploadedPrefix > meta.segment_count) {
215
+ uploadedPrefix = meta.segment_count;
216
+ this.db.setUploadedSegmentCount(stream, uploadedPrefix);
217
+ }
218
+
219
+ const uploadedThrough =
220
+ uploadedPrefix === 0 ? -1n : readU64LE(meta.segment_offsets, (uploadedPrefix - 1) * 8) - 1n;
221
+
222
+ const manifestRow = this.db.getManifestRow(stream);
223
+ const generation = manifestRow.generation + 1;
224
+
225
+ const indexState = this.db.getIndexState(stream);
226
+ const indexRuns = this.db.listIndexRuns(stream);
227
+ const retiredRuns = this.db.listRetiredIndexRuns(stream);
228
+ const manifestRes = buildManifestResult({
229
+ streamName: stream,
230
+ streamRow: srow,
231
+ segmentMeta: meta,
232
+ uploadedPrefixCount: uploadedPrefix,
233
+ generation,
234
+ indexState,
235
+ indexRuns,
236
+ retiredRuns,
237
+ });
238
+ if (Result.isError(manifestRes)) {
239
+ this.failures.recordFailure(stream);
240
+ throw dsError(manifestRes.error.message);
241
+ }
242
+ const manifest = manifestRes.value;
243
+
244
+ const shash = streamHash16Hex(stream);
245
+ const mKey = manifestObjectKey(shash);
246
+ const body = new TextEncoder().encode(JSON.stringify(manifest));
247
+ let putRes;
248
+ try {
249
+ putRes = await retry(
250
+ () => this.os.put(mKey, body),
251
+ {
252
+ retries: this.config.objectStoreRetries,
253
+ baseDelayMs: this.config.objectStoreBaseDelayMs,
254
+ maxDelayMs: this.config.objectStoreMaxDelayMs,
255
+ timeoutMs: this.config.objectStoreTimeoutMs,
256
+ }
257
+ );
258
+ } catch (e) {
259
+ this.failures.recordFailure(stream);
260
+ throw e;
261
+ }
262
+
263
+ // Commit point: advance uploaded_through and delete WAL prefix.
264
+ this.db.commitManifest(stream, generation, putRes.etag, this.db.nowMs(), uploadedThrough);
265
+
266
+ // Local disk cleanup: delete newly uploaded segment files.
267
+ if (uploadedPrefix > prevPrefix) {
268
+ for (let i = prevPrefix; i < uploadedPrefix; i++) {
269
+ const seg = this.db.getSegmentByIndex(stream, i);
270
+ if (!seg) continue;
271
+ try {
272
+ const objectKey = segmentObjectKey(shash, seg.segment_index);
273
+ if (this.diskCache && this.diskCache.putFromLocal(objectKey, seg.local_path, seg.size_bytes)) {
274
+ continue;
275
+ }
276
+ unlinkSync(seg.local_path);
277
+ } catch {
278
+ // ignore
279
+ }
280
+ }
281
+ }
282
+ } finally {
283
+ this.manifestInflight.delete(stream);
284
+ }
285
+ }
286
+ }
287
+
288
+ class FailureTracker {
289
+ private readonly cache: LruCache<string, { attempts: number; untilMs: number }>;
290
+
291
+ constructor(maxEntries: number) {
292
+ this.cache = new LruCache(maxEntries);
293
+ }
294
+
295
+ shouldSkip(stream: string): boolean {
296
+ const item = this.cache.get(stream);
297
+ if (!item) return false;
298
+ if (Date.now() >= item.untilMs) {
299
+ this.cache.delete(stream);
300
+ return false;
301
+ }
302
+ return true;
303
+ }
304
+
305
+ recordFailure(stream: string): void {
306
+ const now = Date.now();
307
+ const item = this.cache.get(stream) ?? { attempts: 0, untilMs: now };
308
+ item.attempts += 1;
309
+ const backoff = Math.min(60_000, 500 * 2 ** (item.attempts - 1));
310
+ item.untilMs = now + backoff;
311
+ this.cache.set(stream, item);
312
+ }
313
+
314
+ recordSuccess(stream: string): void {
315
+ this.cache.delete(stream);
316
+ }
317
+ }
@@ -0,0 +1,81 @@
1
+ import { Result } from "better-result";
2
+ import { dsError } from "./ds_error.ts";
3
+ /**
4
+ * Crockford Base32 encoding/decoding.
5
+ *
6
+ * We use this for canonical 128-bit offsets encoded into a fixed 26-character string.
7
+ */
8
+
9
+ const ALPHABET = "0123456789ABCDEFGHJKMNPQRSTVWXYZ";
10
+ const DECODE_MAP: Record<string, number> = (() => {
11
+ const m: Record<string, number> = {};
12
+ for (let i = 0; i < ALPHABET.length; i++) {
13
+ m[ALPHABET[i]] = i;
14
+ m[ALPHABET[i].toLowerCase()] = i;
15
+ }
16
+ // Crockford aliases
17
+ m["O"] = m["o"] = 0;
18
+ m["I"] = m["i"] = 1;
19
+ m["L"] = m["l"] = 1;
20
+ return m;
21
+ })();
22
+
23
+ export type CrockfordBase32Error = {
24
+ kind: "invalid_base32";
25
+ message: string;
26
+ };
27
+
28
+ function invalidBase32<T = never>(message: string): Result<T, CrockfordBase32Error> {
29
+ return Result.err({ kind: "invalid_base32", message });
30
+ }
31
+
32
+ export function encodeCrockfordBase32Fixed26Result(bytes16: Uint8Array): Result<string, CrockfordBase32Error> {
33
+ if (bytes16.byteLength !== 16) return invalidBase32(`expected 16 bytes, got ${bytes16.byteLength}`);
34
+ // Interpret as big-endian 128-bit unsigned integer.
35
+ let n = 0n;
36
+ for (const b of bytes16) n = (n << 8n) | BigInt(b);
37
+ // Pad with 2 zero bits on the left: 128 -> 130 bits.
38
+ n = n << 2n;
39
+
40
+ let out = "";
41
+ // Extract 26 groups of 5 bits from MSB to LSB.
42
+ // Highest bit position is 129.
43
+ for (let i = 0; i < 26; i++) {
44
+ const shift = 5n * BigInt(25 - i);
45
+ const idx = Number((n >> shift) & 31n);
46
+ out += ALPHABET[idx];
47
+ }
48
+ return Result.ok(out);
49
+ }
50
+
51
+ export function encodeCrockfordBase32Fixed26(bytes16: Uint8Array): string {
52
+ const res = encodeCrockfordBase32Fixed26Result(bytes16);
53
+ if (Result.isError(res)) throw dsError(res.error.message);
54
+ return res.value;
55
+ }
56
+
57
+ export function decodeCrockfordBase32Fixed26Result(s: string): Result<Uint8Array, CrockfordBase32Error> {
58
+ if (s === "-1") return invalidBase32("-1 is a sentinel offset and cannot be decoded as base32");
59
+ if (s.length !== 26) return invalidBase32(`expected 26 chars, got ${s.length}`);
60
+ let n = 0n;
61
+ for (const ch of s) {
62
+ const v = DECODE_MAP[ch];
63
+ if (v === undefined) return invalidBase32(`invalid base32 char: ${ch}`);
64
+ n = (n << 5n) | BigInt(v);
65
+ }
66
+ // Remove the 2 padding bits.
67
+ n = n >> 2n;
68
+
69
+ const out = new Uint8Array(16);
70
+ for (let i = 15; i >= 0; i--) {
71
+ out[i] = Number(n & 0xffn);
72
+ n = n >> 8n;
73
+ }
74
+ return Result.ok(out);
75
+ }
76
+
77
+ export function decodeCrockfordBase32Fixed26(s: string): Uint8Array {
78
+ const res = decodeCrockfordBase32Fixed26Result(s);
79
+ if (Result.isError(res)) throw dsError(res.error.message);
80
+ return res.value;
81
+ }