@prisma/streams-server 0.0.1 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CODE_OF_CONDUCT.md +45 -0
- package/CONTRIBUTING.md +68 -0
- package/LICENSE +201 -0
- package/README.md +39 -2
- package/SECURITY.md +33 -0
- package/bin/prisma-streams-server +2 -0
- package/package.json +29 -34
- package/src/app.ts +74 -0
- package/src/app_core.ts +1706 -0
- package/src/app_local.ts +46 -0
- package/src/backpressure.ts +66 -0
- package/src/bootstrap.ts +239 -0
- package/src/config.ts +251 -0
- package/src/db/db.ts +1386 -0
- package/src/db/schema.ts +625 -0
- package/src/expiry_sweeper.ts +44 -0
- package/src/hist.ts +169 -0
- package/src/index/binary_fuse.ts +379 -0
- package/src/index/indexer.ts +745 -0
- package/src/index/run_cache.ts +84 -0
- package/src/index/run_format.ts +213 -0
- package/src/ingest.ts +655 -0
- package/src/lens/lens.ts +501 -0
- package/src/manifest.ts +114 -0
- package/src/memory.ts +155 -0
- package/src/metrics.ts +161 -0
- package/src/metrics_emitter.ts +50 -0
- package/src/notifier.ts +64 -0
- package/src/objectstore/interface.ts +13 -0
- package/src/objectstore/mock_r2.ts +269 -0
- package/src/objectstore/null.ts +32 -0
- package/src/objectstore/r2.ts +128 -0
- package/src/offset.ts +70 -0
- package/src/reader.ts +454 -0
- package/src/runtime/hash.ts +156 -0
- package/src/runtime/hash_vendor/LICENSE.hash-wasm +38 -0
- package/src/runtime/hash_vendor/NOTICE.md +8 -0
- package/src/runtime/hash_vendor/xxhash3.umd.min.cjs +7 -0
- package/src/runtime/hash_vendor/xxhash32.umd.min.cjs +7 -0
- package/src/runtime/hash_vendor/xxhash64.umd.min.cjs +7 -0
- package/src/schema/lens_schema.ts +290 -0
- package/src/schema/proof.ts +547 -0
- package/src/schema/registry.ts +405 -0
- package/src/segment/cache.ts +179 -0
- package/src/segment/format.ts +331 -0
- package/src/segment/segmenter.ts +326 -0
- package/src/segment/segmenter_worker.ts +43 -0
- package/src/segment/segmenter_workers.ts +94 -0
- package/src/server.ts +326 -0
- package/src/sqlite/adapter.ts +164 -0
- package/src/stats.ts +205 -0
- package/src/touch/engine.ts +41 -0
- package/src/touch/interpreter_worker.ts +442 -0
- package/src/touch/live_keys.ts +118 -0
- package/src/touch/live_metrics.ts +827 -0
- package/src/touch/live_templates.ts +619 -0
- package/src/touch/manager.ts +1199 -0
- package/src/touch/spec.ts +456 -0
- package/src/touch/touch_journal.ts +671 -0
- package/src/touch/touch_key_id.ts +20 -0
- package/src/touch/worker_pool.ts +189 -0
- package/src/touch/worker_protocol.ts +56 -0
- package/src/types/proper-lockfile.d.ts +1 -0
- package/src/uploader.ts +317 -0
- package/src/util/base32_crockford.ts +81 -0
- package/src/util/bloom256.ts +67 -0
- package/src/util/cleanup.ts +22 -0
- package/src/util/crc32c.ts +29 -0
- package/src/util/ds_error.ts +15 -0
- package/src/util/duration.ts +17 -0
- package/src/util/endian.ts +53 -0
- package/src/util/json_pointer.ts +148 -0
- package/src/util/log.ts +25 -0
- package/src/util/lru.ts +45 -0
- package/src/util/retry.ts +35 -0
- package/src/util/siphash.ts +71 -0
- package/src/util/stream_paths.ts +31 -0
- package/src/util/time.ts +14 -0
- package/src/util/yield.ts +3 -0
- package/build/index.d.mts +0 -1
- package/build/index.d.ts +0 -1
- package/build/index.js +0 -0
- package/build/index.mjs +0 -1
package/src/app_local.ts
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import type { Config } from "./config";
|
|
2
|
+
import { createAppCore, type App } from "./app_core";
|
|
3
|
+
import type { ObjectStore } from "./objectstore/interface";
|
|
4
|
+
import { NullObjectStore } from "./objectstore/null";
|
|
5
|
+
import { StreamReader } from "./reader";
|
|
6
|
+
import type { StatsCollector } from "./stats";
|
|
7
|
+
import type { UploaderController } from "./uploader";
|
|
8
|
+
import type { SegmenterController } from "./segment/segmenter_workers";
|
|
9
|
+
|
|
10
|
+
class NoopUploader implements UploaderController {
|
|
11
|
+
start(): void {}
|
|
12
|
+
stop(_hard?: boolean): void {}
|
|
13
|
+
countSegmentsWaiting(): number {
|
|
14
|
+
return 0;
|
|
15
|
+
}
|
|
16
|
+
setHooks(_hooks: { onSegmentsUploaded?: (stream: string) => void } | undefined): void {}
|
|
17
|
+
async publishManifest(_stream: string): Promise<void> {}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const noopSegmenter: SegmenterController = {
|
|
21
|
+
start(): void {},
|
|
22
|
+
stop(_hard?: boolean): void {},
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
export type CreateLocalAppOptions = {
|
|
26
|
+
stats?: StatsCollector;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
export function createLocalApp(cfg: Config, os?: ObjectStore, opts: CreateLocalAppOptions = {}): App {
|
|
30
|
+
return createAppCore(cfg, {
|
|
31
|
+
stats: opts.stats,
|
|
32
|
+
createRuntime: ({ config, db }) => {
|
|
33
|
+
const store = os ?? new NullObjectStore();
|
|
34
|
+
const reader = new StreamReader(config, db, store);
|
|
35
|
+
|
|
36
|
+
return {
|
|
37
|
+
store,
|
|
38
|
+
reader,
|
|
39
|
+
segmenter: noopSegmenter,
|
|
40
|
+
uploader: new NoopUploader(),
|
|
41
|
+
uploadSchemaRegistry: async (): Promise<void> => {},
|
|
42
|
+
start: (): void => {},
|
|
43
|
+
};
|
|
44
|
+
},
|
|
45
|
+
});
|
|
46
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
export class BackpressureGate {
|
|
2
|
+
private readonly maxBytes: number;
|
|
3
|
+
private currentBytes: number;
|
|
4
|
+
private reservedBytes: number;
|
|
5
|
+
|
|
6
|
+
constructor(maxBytes: number, initialBytes: number) {
|
|
7
|
+
this.maxBytes = maxBytes;
|
|
8
|
+
this.currentBytes = Math.max(0, initialBytes);
|
|
9
|
+
this.reservedBytes = 0;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
enabled(): boolean {
|
|
13
|
+
return this.maxBytes > 0;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
reserve(bytes: number): boolean {
|
|
17
|
+
if (this.maxBytes <= 0) return true;
|
|
18
|
+
if (bytes <= 0) return true;
|
|
19
|
+
if (this.currentBytes + this.reservedBytes + bytes > this.maxBytes) return false;
|
|
20
|
+
this.reservedBytes += bytes;
|
|
21
|
+
return true;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
commit(bytes: number, reservedBytes: number = bytes): void {
|
|
25
|
+
if (this.maxBytes <= 0) return;
|
|
26
|
+
if (bytes <= 0) return;
|
|
27
|
+
if (reservedBytes > 0) this.reservedBytes = Math.max(0, this.reservedBytes - reservedBytes);
|
|
28
|
+
this.currentBytes += bytes;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
release(bytes: number): void {
|
|
32
|
+
if (this.maxBytes <= 0) return;
|
|
33
|
+
if (bytes <= 0) return;
|
|
34
|
+
this.reservedBytes = Math.max(0, this.reservedBytes - bytes);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
adjustOnSeal(payloadBytes: number, segmentBytes: number): void {
|
|
38
|
+
if (this.maxBytes <= 0) return;
|
|
39
|
+
const delta = segmentBytes - payloadBytes;
|
|
40
|
+
this.currentBytes = Math.max(0, this.currentBytes + delta);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
adjustOnUpload(segmentBytes: number): void {
|
|
44
|
+
if (this.maxBytes <= 0) return;
|
|
45
|
+
this.currentBytes = Math.max(0, this.currentBytes - segmentBytes);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
adjustOnWalTrim(payloadBytes: number): void {
|
|
49
|
+
if (this.maxBytes <= 0) return;
|
|
50
|
+
if (payloadBytes <= 0) return;
|
|
51
|
+
this.currentBytes = Math.max(0, this.currentBytes - payloadBytes);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
getCurrentBytes(): number {
|
|
55
|
+
return this.currentBytes;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
getMaxBytes(): number {
|
|
59
|
+
return this.maxBytes;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
isOverLimit(): boolean {
|
|
63
|
+
if (this.maxBytes <= 0) return false;
|
|
64
|
+
return this.currentBytes + this.reservedBytes >= this.maxBytes;
|
|
65
|
+
}
|
|
66
|
+
}
|
package/src/bootstrap.ts
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import { mkdirSync, rmSync } from "node:fs";
|
|
2
|
+
import { dirname } from "node:path";
|
|
3
|
+
import { zstdDecompressSync } from "node:zlib";
|
|
4
|
+
import type { Config } from "./config";
|
|
5
|
+
import { SqliteDurableStore } from "./db/db";
|
|
6
|
+
import type { ObjectStore } from "./objectstore/interface";
|
|
7
|
+
import { localSegmentPath, schemaObjectKey, segmentObjectKey, streamHash16Hex } from "./util/stream_paths";
|
|
8
|
+
import { retry } from "./util/retry";
|
|
9
|
+
import { dsError } from "./util/ds_error.ts";
|
|
10
|
+
|
|
11
|
+
type Manifest = Record<string, any>;
|
|
12
|
+
|
|
13
|
+
export async function bootstrapFromR2(cfg: Config, store: ObjectStore, opts: { clearLocal?: boolean } = {}): Promise<void> {
|
|
14
|
+
if (opts.clearLocal !== false) {
|
|
15
|
+
try {
|
|
16
|
+
rmSync(cfg.dbPath, { force: true });
|
|
17
|
+
} catch {
|
|
18
|
+
// ignore
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
rmSync(`${cfg.rootDir}/local`, { recursive: true, force: true });
|
|
22
|
+
} catch {
|
|
23
|
+
// ignore
|
|
24
|
+
}
|
|
25
|
+
try {
|
|
26
|
+
rmSync(`${cfg.rootDir}/cache`, { recursive: true, force: true });
|
|
27
|
+
} catch {
|
|
28
|
+
// ignore
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
mkdirSync(cfg.rootDir, { recursive: true });
|
|
33
|
+
|
|
34
|
+
const db = new SqliteDurableStore(cfg.dbPath, { cacheBytes: cfg.sqliteCacheBytes });
|
|
35
|
+
try {
|
|
36
|
+
const retryOpts = {
|
|
37
|
+
retries: cfg.objectStoreRetries,
|
|
38
|
+
baseDelayMs: cfg.objectStoreBaseDelayMs,
|
|
39
|
+
maxDelayMs: cfg.objectStoreMaxDelayMs,
|
|
40
|
+
timeoutMs: cfg.objectStoreTimeoutMs,
|
|
41
|
+
};
|
|
42
|
+
const keys = await retry(() => store.list("streams/"), retryOpts);
|
|
43
|
+
const manifestKeys = keys.filter((k) => k.endsWith("/manifest.json"));
|
|
44
|
+
for (const mkey of manifestKeys) {
|
|
45
|
+
const mbytes = await retry(async () => {
|
|
46
|
+
const data = await store.get(mkey);
|
|
47
|
+
if (!data) throw dsError(`missing manifest ${mkey}`);
|
|
48
|
+
return data;
|
|
49
|
+
}, retryOpts);
|
|
50
|
+
const manifest = JSON.parse(new TextDecoder().decode(mbytes)) as Manifest;
|
|
51
|
+
const stream = String(manifest.name ?? "");
|
|
52
|
+
if (!stream) continue;
|
|
53
|
+
|
|
54
|
+
const shash = streamHash16Hex(stream);
|
|
55
|
+
const nowMs = db.nowMs();
|
|
56
|
+
|
|
57
|
+
const createdAtMs = parseIsoMs(manifest.created_at) ?? nowMs;
|
|
58
|
+
const expiresAtMs = parseIsoMs(manifest.expires_at);
|
|
59
|
+
const epoch = typeof manifest.epoch === "number" ? manifest.epoch : 0;
|
|
60
|
+
const nextOffsetNum = typeof manifest.next_offset === "number" ? manifest.next_offset : 0;
|
|
61
|
+
const nextOffset = BigInt(nextOffsetNum);
|
|
62
|
+
|
|
63
|
+
const contentType = typeof manifest.content_type === "string" ? manifest.content_type : "application/octet-stream";
|
|
64
|
+
const streamSeq = typeof manifest.stream_seq === "string" ? manifest.stream_seq : null;
|
|
65
|
+
const closed = typeof manifest.closed === "number" ? manifest.closed : 0;
|
|
66
|
+
const closedProducerId = typeof manifest.closed_producer_id === "string" ? manifest.closed_producer_id : null;
|
|
67
|
+
const closedProducerEpoch = typeof manifest.closed_producer_epoch === "number" ? manifest.closed_producer_epoch : null;
|
|
68
|
+
const closedProducerSeq = typeof manifest.closed_producer_seq === "number" ? manifest.closed_producer_seq : null;
|
|
69
|
+
const ttlSeconds = typeof manifest.ttl_seconds === "number" ? manifest.ttl_seconds : null;
|
|
70
|
+
const streamFlags = typeof manifest.stream_flags === "number" ? manifest.stream_flags : 0;
|
|
71
|
+
|
|
72
|
+
const segmentOffsetsBytes = decodeZstdBase64(manifest.segment_offsets ?? "");
|
|
73
|
+
const segmentBlocksBytes = decodeZstdBase64(manifest.segment_blocks ?? "");
|
|
74
|
+
const segmentLastTsBytes = decodeZstdBase64(manifest.segment_last_ts ?? "");
|
|
75
|
+
const segmentOffsets = decodeU64LeArray(segmentOffsetsBytes);
|
|
76
|
+
const segmentBlocks = decodeU32LeArray(segmentBlocksBytes);
|
|
77
|
+
const segmentLastTs = decodeU64LeArray(segmentLastTsBytes);
|
|
78
|
+
const segmentCount = typeof manifest.segment_count === "number" ? manifest.segment_count : segmentOffsets.length;
|
|
79
|
+
|
|
80
|
+
if (segmentOffsets.length !== segmentCount || segmentBlocks.length !== segmentCount || segmentLastTs.length !== segmentCount) {
|
|
81
|
+
throw dsError(`manifest array length mismatch for ${stream}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const lastEndOffset = segmentCount > 0 ? segmentOffsets[segmentCount - 1] - 1n : -1n;
|
|
85
|
+
const uploadedPrefix = typeof manifest.uploaded_through === "number" ? manifest.uploaded_through : segmentCount;
|
|
86
|
+
const uploadedThrough =
|
|
87
|
+
uploadedPrefix > 0 && uploadedPrefix <= segmentOffsets.length ? segmentOffsets[uploadedPrefix - 1] - 1n : -1n;
|
|
88
|
+
const lastAppendMs = segmentCount > 0 ? segmentLastTs[segmentCount - 1] / 1_000_000n : nowMs;
|
|
89
|
+
|
|
90
|
+
db.restoreStreamRow({
|
|
91
|
+
stream,
|
|
92
|
+
created_at_ms: createdAtMs,
|
|
93
|
+
updated_at_ms: nowMs,
|
|
94
|
+
content_type: contentType,
|
|
95
|
+
stream_seq: streamSeq,
|
|
96
|
+
closed,
|
|
97
|
+
closed_producer_id: closedProducerId,
|
|
98
|
+
closed_producer_epoch: closedProducerEpoch,
|
|
99
|
+
closed_producer_seq: closedProducerSeq,
|
|
100
|
+
ttl_seconds: ttlSeconds,
|
|
101
|
+
epoch,
|
|
102
|
+
next_offset: nextOffset,
|
|
103
|
+
sealed_through: lastEndOffset,
|
|
104
|
+
uploaded_through: uploadedThrough,
|
|
105
|
+
uploaded_segment_count: uploadedPrefix,
|
|
106
|
+
pending_rows: 0n,
|
|
107
|
+
pending_bytes: 0n,
|
|
108
|
+
wal_rows: 0n,
|
|
109
|
+
wal_bytes: 0n,
|
|
110
|
+
last_append_ms: lastAppendMs,
|
|
111
|
+
last_segment_cut_ms: lastAppendMs,
|
|
112
|
+
segment_in_progress: 0,
|
|
113
|
+
expires_at_ms: expiresAtMs,
|
|
114
|
+
stream_flags: streamFlags,
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
db.upsertSegmentMeta(stream, segmentCount, segmentOffsetsBytes, segmentBlocksBytes, segmentLastTsBytes);
|
|
118
|
+
|
|
119
|
+
const manifestHead = await retry(async () => {
|
|
120
|
+
const head = await store.head(mkey);
|
|
121
|
+
if (!head) throw dsError(`missing manifest head ${mkey}`);
|
|
122
|
+
return head;
|
|
123
|
+
}, retryOpts);
|
|
124
|
+
db.upsertManifestRow(stream, Number(manifest.generation ?? 0), Number(manifest.generation ?? 0), nowMs, manifestHead?.etag ?? null);
|
|
125
|
+
|
|
126
|
+
for (let i = 0; i < segmentCount; i++) {
|
|
127
|
+
const startOffset = i === 0 ? 0n : segmentOffsets[i - 1];
|
|
128
|
+
const endOffset = segmentOffsets[i] - 1n;
|
|
129
|
+
const lastTsMs = segmentLastTs[i] / 1_000_000n;
|
|
130
|
+
const localPath = localSegmentPath(cfg.rootDir, shash, i);
|
|
131
|
+
const segmentId = `${shash}-${i}-${startOffset.toString()}-${endOffset.toString()}`;
|
|
132
|
+
mkdirSync(dirname(localPath), { recursive: true });
|
|
133
|
+
const objectKey = segmentObjectKey(shash, i);
|
|
134
|
+
const head = await retry(async () => {
|
|
135
|
+
const h = await store.head(objectKey);
|
|
136
|
+
if (!h) throw dsError(`missing segment ${objectKey}`);
|
|
137
|
+
return h;
|
|
138
|
+
}, retryOpts);
|
|
139
|
+
if (!head) throw dsError(`missing segment ${objectKey}`);
|
|
140
|
+
db.createSegmentRow({
|
|
141
|
+
segmentId,
|
|
142
|
+
stream,
|
|
143
|
+
segmentIndex: i,
|
|
144
|
+
startOffset,
|
|
145
|
+
endOffset,
|
|
146
|
+
blockCount: segmentBlocks[i],
|
|
147
|
+
lastAppendMs: lastTsMs,
|
|
148
|
+
sizeBytes: head.size,
|
|
149
|
+
localPath,
|
|
150
|
+
});
|
|
151
|
+
db.markSegmentUploaded(segmentId, head.etag, nowMs);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const indexSecretB64 = typeof manifest.index_secret === "string" ? manifest.index_secret : "";
|
|
155
|
+
if (indexSecretB64) {
|
|
156
|
+
const secret = new Uint8Array(Buffer.from(indexSecretB64, "base64"));
|
|
157
|
+
const indexedThrough = typeof manifest.indexed_through === "number" ? manifest.indexed_through : 0;
|
|
158
|
+
db.upsertIndexState(stream, secret, indexedThrough);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
const activeRuns = Array.isArray(manifest.active_runs) ? manifest.active_runs : [];
|
|
162
|
+
const retiredRuns = Array.isArray(manifest.retired_runs) ? manifest.retired_runs : [];
|
|
163
|
+
for (const r of activeRuns) {
|
|
164
|
+
db.insertIndexRun({
|
|
165
|
+
run_id: String(r.run_id),
|
|
166
|
+
stream,
|
|
167
|
+
level: Number(r.level),
|
|
168
|
+
start_segment: Number(r.start_segment),
|
|
169
|
+
end_segment: Number(r.end_segment),
|
|
170
|
+
object_key: String(r.object_key),
|
|
171
|
+
filter_len: Number(r.filter_len ?? 0),
|
|
172
|
+
record_count: Number(r.record_count ?? 0),
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
for (const r of retiredRuns) {
|
|
176
|
+
const runId = String(r.run_id);
|
|
177
|
+
db.insertIndexRun({
|
|
178
|
+
run_id: runId,
|
|
179
|
+
stream,
|
|
180
|
+
level: Number(r.level),
|
|
181
|
+
start_segment: Number(r.start_segment),
|
|
182
|
+
end_segment: Number(r.end_segment),
|
|
183
|
+
object_key: String(r.object_key),
|
|
184
|
+
filter_len: Number(r.filter_len ?? 0),
|
|
185
|
+
record_count: Number(r.record_count ?? 0),
|
|
186
|
+
});
|
|
187
|
+
const retiredGen = typeof r.retired_gen === "number" ? r.retired_gen : Number(manifest.generation ?? 0);
|
|
188
|
+
const retiredAtUnix = typeof r.retired_at_unix === "number" ? r.retired_at_unix : Math.floor(Number(nowMs) / 1000);
|
|
189
|
+
db.retireIndexRuns([runId], retiredGen, BigInt(retiredAtUnix) * 1000n);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const schemaKey = schemaObjectKey(shash);
|
|
193
|
+
const schemaBytes = await retry(async () => {
|
|
194
|
+
const data = await store.get(schemaKey);
|
|
195
|
+
if (!data) return null;
|
|
196
|
+
return data;
|
|
197
|
+
}, retryOpts);
|
|
198
|
+
if (schemaBytes) {
|
|
199
|
+
db.upsertSchemaRegistry(stream, new TextDecoder().decode(schemaBytes));
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
} finally {
|
|
203
|
+
db.close();
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
function decodeZstdBase64(value: string): Uint8Array {
|
|
208
|
+
if (!value) return new Uint8Array(0);
|
|
209
|
+
const raw = Buffer.from(value, "base64");
|
|
210
|
+
if (raw.byteLength === 0) return new Uint8Array(0);
|
|
211
|
+
return new Uint8Array(zstdDecompressSync(raw));
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
function decodeU64LeArray(bytes: Uint8Array): bigint[] {
|
|
215
|
+
if (bytes.byteLength === 0) return [];
|
|
216
|
+
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
217
|
+
const out: bigint[] = [];
|
|
218
|
+
for (let off = 0; off + 8 <= bytes.byteLength; off += 8) {
|
|
219
|
+
out.push(dv.getBigUint64(off, true));
|
|
220
|
+
}
|
|
221
|
+
return out;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
function decodeU32LeArray(bytes: Uint8Array): number[] {
|
|
225
|
+
if (bytes.byteLength === 0) return [];
|
|
226
|
+
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
227
|
+
const out: number[] = [];
|
|
228
|
+
for (let off = 0; off + 4 <= bytes.byteLength; off += 4) {
|
|
229
|
+
out.push(dv.getUint32(off, true));
|
|
230
|
+
}
|
|
231
|
+
return out;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
function parseIsoMs(value: any): bigint | null {
|
|
235
|
+
if (!value || typeof value !== "string") return null;
|
|
236
|
+
const ms = Date.parse(value);
|
|
237
|
+
if (!Number.isFinite(ms)) return null;
|
|
238
|
+
return BigInt(ms);
|
|
239
|
+
}
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
import { dsError } from "./util/ds_error.ts";
|
|
2
|
+
export type Config = {
|
|
3
|
+
host: string;
|
|
4
|
+
rootDir: string;
|
|
5
|
+
dbPath: string;
|
|
6
|
+
segmentMaxBytes: number;
|
|
7
|
+
blockMaxBytes: number;
|
|
8
|
+
segmentTargetRows: number;
|
|
9
|
+
segmentMaxIntervalMs: number;
|
|
10
|
+
segmentCheckIntervalMs: number;
|
|
11
|
+
segmenterWorkers: number;
|
|
12
|
+
uploadIntervalMs: number;
|
|
13
|
+
uploadConcurrency: number;
|
|
14
|
+
segmentCacheMaxBytes: number;
|
|
15
|
+
segmentFooterCacheEntries: number;
|
|
16
|
+
indexRunCacheMaxBytes: number;
|
|
17
|
+
indexRunMemoryCacheBytes: number;
|
|
18
|
+
indexL0SpanSegments: number;
|
|
19
|
+
indexBuildConcurrency: number;
|
|
20
|
+
indexCheckIntervalMs: number;
|
|
21
|
+
indexCompactionFanout: number;
|
|
22
|
+
indexMaxLevel: number;
|
|
23
|
+
indexCompactionConcurrency: number;
|
|
24
|
+
indexRetireGenWindow: number;
|
|
25
|
+
indexRetireMinMs: number;
|
|
26
|
+
readMaxBytes: number;
|
|
27
|
+
readMaxRecords: number;
|
|
28
|
+
appendMaxBodyBytes: number;
|
|
29
|
+
ingestFlushIntervalMs: number;
|
|
30
|
+
ingestMaxBatchRequests: number;
|
|
31
|
+
ingestMaxBatchBytes: number;
|
|
32
|
+
ingestMaxQueueRequests: number;
|
|
33
|
+
ingestMaxQueueBytes: number;
|
|
34
|
+
ingestBusyTimeoutMs: number;
|
|
35
|
+
localBacklogMaxBytes: number;
|
|
36
|
+
memoryLimitBytes: number;
|
|
37
|
+
sqliteCacheBytes: number;
|
|
38
|
+
objectStoreTimeoutMs: number;
|
|
39
|
+
objectStoreRetries: number;
|
|
40
|
+
objectStoreBaseDelayMs: number;
|
|
41
|
+
objectStoreMaxDelayMs: number;
|
|
42
|
+
expirySweepIntervalMs: number;
|
|
43
|
+
expirySweepBatchLimit: number;
|
|
44
|
+
metricsFlushIntervalMs: number;
|
|
45
|
+
interpreterWorkers: number;
|
|
46
|
+
interpreterCheckIntervalMs: number;
|
|
47
|
+
interpreterMaxBatchRows: number;
|
|
48
|
+
interpreterMaxBatchBytes: number;
|
|
49
|
+
port: number;
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
const KNOWN_DS_ENVS = new Set<string>([
|
|
53
|
+
"DS_ROOT",
|
|
54
|
+
"DS_HOST",
|
|
55
|
+
"DS_DB_PATH",
|
|
56
|
+
"DS_SEGMENT_MAX_BYTES",
|
|
57
|
+
"DS_BLOCK_MAX_BYTES",
|
|
58
|
+
"DS_SEGMENT_TARGET_ROWS",
|
|
59
|
+
"DS_SEGMENT_MAX_INTERVAL_MS",
|
|
60
|
+
"DS_SEGMENT_CHECK_MS",
|
|
61
|
+
"DS_SEGMENTER_WORKERS",
|
|
62
|
+
"DS_UPLOAD_CHECK_MS",
|
|
63
|
+
"DS_UPLOAD_CONCURRENCY",
|
|
64
|
+
"DS_SEGMENT_CACHE_MAX_BYTES",
|
|
65
|
+
"DS_SEGMENT_FOOTER_CACHE_ENTRIES",
|
|
66
|
+
"DS_INDEX_RUN_CACHE_MAX_BYTES",
|
|
67
|
+
"DS_INDEX_RUN_MEM_CACHE_BYTES",
|
|
68
|
+
"DS_INDEX_L0_SPAN",
|
|
69
|
+
"DS_INDEX_BUILD_CONCURRENCY",
|
|
70
|
+
"DS_INDEX_CHECK_MS",
|
|
71
|
+
"DS_INDEX_COMPACTION_FANOUT",
|
|
72
|
+
"DS_INDEX_MAX_LEVEL",
|
|
73
|
+
"DS_INDEX_COMPACT_CONCURRENCY",
|
|
74
|
+
"DS_INDEX_RETIRE_GEN_WINDOW",
|
|
75
|
+
"DS_INDEX_RETIRE_MIN_MS",
|
|
76
|
+
"DS_READ_MAX_BYTES",
|
|
77
|
+
"DS_READ_MAX_RECORDS",
|
|
78
|
+
"DS_APPEND_MAX_BODY_BYTES",
|
|
79
|
+
"DS_INGEST_FLUSH_MS",
|
|
80
|
+
"DS_INGEST_MAX_BATCH_REQS",
|
|
81
|
+
"DS_INGEST_MAX_BATCH_BYTES",
|
|
82
|
+
"DS_INGEST_MAX_QUEUE_REQS",
|
|
83
|
+
"DS_INGEST_MAX_QUEUE_BYTES",
|
|
84
|
+
"DS_INGEST_BUSY_MS",
|
|
85
|
+
"DS_LOCAL_BACKLOG_MAX_BYTES",
|
|
86
|
+
"DS_MEMORY_LIMIT_BYTES",
|
|
87
|
+
"DS_MEMORY_LIMIT_MB",
|
|
88
|
+
"DS_SQLITE_CACHE_BYTES",
|
|
89
|
+
"DS_SQLITE_CACHE_MB",
|
|
90
|
+
"DS_OBJECTSTORE_TIMEOUT_MS",
|
|
91
|
+
"DS_OBJECTSTORE_RETRIES",
|
|
92
|
+
"DS_OBJECTSTORE_RETRY_BASE_MS",
|
|
93
|
+
"DS_OBJECTSTORE_RETRY_MAX_MS",
|
|
94
|
+
"DS_LOCAL_DATA_ROOT",
|
|
95
|
+
"DS_EXPIRY_SWEEP_MS",
|
|
96
|
+
"DS_EXPIRY_SWEEP_LIMIT",
|
|
97
|
+
"DS_METRICS_FLUSH_MS",
|
|
98
|
+
"DS_INTERPRETER_WORKERS",
|
|
99
|
+
"DS_INTERPRETER_CHECK_MS",
|
|
100
|
+
"DS_INTERPRETER_MAX_BATCH_ROWS",
|
|
101
|
+
"DS_INTERPRETER_MAX_BATCH_BYTES",
|
|
102
|
+
"DS_STATS_INTERVAL_MS",
|
|
103
|
+
"DS_BACKPRESSURE_BUDGET_MS",
|
|
104
|
+
"DS_MOCK_R2_MAX_INMEM_BYTES",
|
|
105
|
+
"DS_MOCK_R2_MAX_INMEM_MB",
|
|
106
|
+
"DS_MOCK_R2_SPILL_DIR",
|
|
107
|
+
"DS_BENCH_URL",
|
|
108
|
+
"DS_BENCH_DURATION_MS",
|
|
109
|
+
"DS_BENCH_INTERVAL_MS",
|
|
110
|
+
"DS_BENCH_PAYLOAD_BYTES",
|
|
111
|
+
"DS_BENCH_CONCURRENCY",
|
|
112
|
+
"DS_BENCH_REQUEST_TIMEOUT_MS",
|
|
113
|
+
"DS_BENCH_DRAIN_TIMEOUT_MS",
|
|
114
|
+
"DS_BENCH_PAUSE_BACKGROUND",
|
|
115
|
+
"DS_BENCH_YIELD_EVERY",
|
|
116
|
+
"DS_BENCH_DEBUG",
|
|
117
|
+
"DS_BENCH_SCENARIOS",
|
|
118
|
+
"DS_MEMORY_STRESS_LIMITS_MB",
|
|
119
|
+
"DS_MEMORY_STRESS_STATS_MS",
|
|
120
|
+
"DS_MEMORY_STRESS_PORT_BASE",
|
|
121
|
+
"DS_RK_EVENTS_MAX",
|
|
122
|
+
"DS_RK_EVENTS_STEP",
|
|
123
|
+
"DS_RK_PAYLOAD_BYTES",
|
|
124
|
+
"DS_RK_APPEND_BATCH",
|
|
125
|
+
"DS_RK_KEYS",
|
|
126
|
+
"DS_RK_HOT_KEYS",
|
|
127
|
+
"DS_RK_HOT_PCT",
|
|
128
|
+
"DS_RK_PAYLOAD_POOL",
|
|
129
|
+
"DS_RK_READ_ENTRIES",
|
|
130
|
+
"DS_RK_WARM_READS",
|
|
131
|
+
"DS_RK_SEGMENT_BYTES",
|
|
132
|
+
"DS_RK_BLOCK_BYTES",
|
|
133
|
+
"DS_RK_SEED",
|
|
134
|
+
"DS_RK_R2_GET_DELAY_MS",
|
|
135
|
+
]);
|
|
136
|
+
|
|
137
|
+
let warnedUnknownEnv = false;
|
|
138
|
+
|
|
139
|
+
function warnUnknownEnv(): void {
|
|
140
|
+
if (warnedUnknownEnv) return;
|
|
141
|
+
warnedUnknownEnv = true;
|
|
142
|
+
const unknown: string[] = [];
|
|
143
|
+
for (const key of Object.keys(process.env)) {
|
|
144
|
+
if (!key.startsWith("DS_")) continue;
|
|
145
|
+
if (KNOWN_DS_ENVS.has(key)) continue;
|
|
146
|
+
unknown.push(key);
|
|
147
|
+
}
|
|
148
|
+
if (unknown.length > 0) {
|
|
149
|
+
unknown.sort();
|
|
150
|
+
console.warn(`[config] unknown DS_* environment variables: ${unknown.join(", ")}`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
function envNum(name: string, def: number): number {
|
|
155
|
+
const v = process.env[name];
|
|
156
|
+
if (!v) return def;
|
|
157
|
+
const n = Number(v);
|
|
158
|
+
if (!Number.isFinite(n)) throw dsError(`invalid ${name}: ${v}`);
|
|
159
|
+
return n;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
function envBytes(name: string): number | null {
|
|
163
|
+
const v = process.env[name];
|
|
164
|
+
if (!v) return null;
|
|
165
|
+
const n = Number(v);
|
|
166
|
+
if (!Number.isFinite(n)) throw dsError(`invalid ${name}: ${v}`);
|
|
167
|
+
return Math.max(0, Math.floor(n));
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
function clampBytes(value: number, min: number, max: number): number {
|
|
171
|
+
if (!Number.isFinite(value)) return min;
|
|
172
|
+
if (value < min) return min;
|
|
173
|
+
if (value > max) return max;
|
|
174
|
+
return value;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
export function loadConfig(): Config {
|
|
178
|
+
warnUnknownEnv();
|
|
179
|
+
const rootDir = process.env.DS_ROOT ?? "./ds-data";
|
|
180
|
+
const host = process.env.DS_HOST?.trim() || "127.0.0.1";
|
|
181
|
+
const bytesOverride = envBytes("DS_MEMORY_LIMIT_BYTES");
|
|
182
|
+
const mbOverride = envBytes("DS_MEMORY_LIMIT_MB");
|
|
183
|
+
const memoryLimitBytes = bytesOverride ?? (mbOverride != null ? mbOverride * 1024 * 1024 : 0);
|
|
184
|
+
const backlogOverride = envBytes("DS_LOCAL_BACKLOG_MAX_BYTES");
|
|
185
|
+
const sqliteCacheBytesOverride = envBytes("DS_SQLITE_CACHE_BYTES");
|
|
186
|
+
const sqliteCacheMbOverride = envBytes("DS_SQLITE_CACHE_MB");
|
|
187
|
+
const indexMemOverride = envBytes("DS_INDEX_RUN_MEM_CACHE_BYTES");
|
|
188
|
+
const indexDiskOverride = envBytes("DS_INDEX_RUN_CACHE_MAX_BYTES");
|
|
189
|
+
const localBacklogMaxBytes = backlogOverride ?? 10 * 1024 * 1024 * 1024;
|
|
190
|
+
const sqliteCacheBytes =
|
|
191
|
+
sqliteCacheBytesOverride ??
|
|
192
|
+
(sqliteCacheMbOverride != null
|
|
193
|
+
? sqliteCacheMbOverride * 1024 * 1024
|
|
194
|
+
: memoryLimitBytes > 0
|
|
195
|
+
? Math.floor(memoryLimitBytes * 0.25)
|
|
196
|
+
: 0);
|
|
197
|
+
const tunedIndexMem =
|
|
198
|
+
indexMemOverride ??
|
|
199
|
+
(memoryLimitBytes > 0
|
|
200
|
+
? clampBytes(Math.floor(memoryLimitBytes * 0.05), 8 * 1024 * 1024, 128 * 1024 * 1024)
|
|
201
|
+
: 64 * 1024 * 1024);
|
|
202
|
+
return {
|
|
203
|
+
host,
|
|
204
|
+
rootDir,
|
|
205
|
+
dbPath: process.env.DS_DB_PATH ?? `${rootDir}/wal.sqlite`,
|
|
206
|
+
segmentMaxBytes: envNum("DS_SEGMENT_MAX_BYTES", 16 * 1024 * 1024),
|
|
207
|
+
blockMaxBytes: envNum("DS_BLOCK_MAX_BYTES", 256 * 1024),
|
|
208
|
+
segmentTargetRows: envNum("DS_SEGMENT_TARGET_ROWS", 50_000),
|
|
209
|
+
segmentMaxIntervalMs: envNum("DS_SEGMENT_MAX_INTERVAL_MS", 0),
|
|
210
|
+
segmentCheckIntervalMs: envNum("DS_SEGMENT_CHECK_MS", 250),
|
|
211
|
+
segmenterWorkers: envNum("DS_SEGMENTER_WORKERS", 0),
|
|
212
|
+
uploadIntervalMs: envNum("DS_UPLOAD_CHECK_MS", 250),
|
|
213
|
+
uploadConcurrency: envNum("DS_UPLOAD_CONCURRENCY", 4),
|
|
214
|
+
segmentCacheMaxBytes: envNum("DS_SEGMENT_CACHE_MAX_BYTES", 256 * 1024 * 1024),
|
|
215
|
+
segmentFooterCacheEntries: envNum("DS_SEGMENT_FOOTER_CACHE_ENTRIES", 2048),
|
|
216
|
+
indexRunCacheMaxBytes: indexDiskOverride ?? 256 * 1024 * 1024,
|
|
217
|
+
indexRunMemoryCacheBytes: tunedIndexMem,
|
|
218
|
+
indexL0SpanSegments: envNum("DS_INDEX_L0_SPAN", 16),
|
|
219
|
+
indexBuildConcurrency: envNum("DS_INDEX_BUILD_CONCURRENCY", 4),
|
|
220
|
+
indexCheckIntervalMs: envNum("DS_INDEX_CHECK_MS", 1000),
|
|
221
|
+
indexCompactionFanout: envNum("DS_INDEX_COMPACTION_FANOUT", 16),
|
|
222
|
+
indexMaxLevel: envNum("DS_INDEX_MAX_LEVEL", 4),
|
|
223
|
+
indexCompactionConcurrency: envNum("DS_INDEX_COMPACT_CONCURRENCY", 4),
|
|
224
|
+
indexRetireGenWindow: envNum("DS_INDEX_RETIRE_GEN_WINDOW", 2),
|
|
225
|
+
indexRetireMinMs: envNum("DS_INDEX_RETIRE_MIN_MS", 5 * 60 * 1000),
|
|
226
|
+
readMaxBytes: envNum("DS_READ_MAX_BYTES", 1 * 1024 * 1024),
|
|
227
|
+
readMaxRecords: envNum("DS_READ_MAX_RECORDS", 1000),
|
|
228
|
+
appendMaxBodyBytes: envNum("DS_APPEND_MAX_BODY_BYTES", 10 * 1024 * 1024),
|
|
229
|
+
ingestFlushIntervalMs: envNum("DS_INGEST_FLUSH_MS", 10),
|
|
230
|
+
ingestMaxBatchRequests: envNum("DS_INGEST_MAX_BATCH_REQS", 200),
|
|
231
|
+
ingestMaxBatchBytes: envNum("DS_INGEST_MAX_BATCH_BYTES", 8 * 1024 * 1024),
|
|
232
|
+
ingestMaxQueueRequests: envNum("DS_INGEST_MAX_QUEUE_REQS", 50_000),
|
|
233
|
+
ingestMaxQueueBytes: envNum("DS_INGEST_MAX_QUEUE_BYTES", 64 * 1024 * 1024),
|
|
234
|
+
ingestBusyTimeoutMs: envNum("DS_INGEST_BUSY_MS", 5000),
|
|
235
|
+
localBacklogMaxBytes,
|
|
236
|
+
memoryLimitBytes,
|
|
237
|
+
sqliteCacheBytes,
|
|
238
|
+
objectStoreTimeoutMs: envNum("DS_OBJECTSTORE_TIMEOUT_MS", 5000),
|
|
239
|
+
objectStoreRetries: envNum("DS_OBJECTSTORE_RETRIES", 3),
|
|
240
|
+
objectStoreBaseDelayMs: envNum("DS_OBJECTSTORE_RETRY_BASE_MS", 50),
|
|
241
|
+
objectStoreMaxDelayMs: envNum("DS_OBJECTSTORE_RETRY_MAX_MS", 2000),
|
|
242
|
+
expirySweepIntervalMs: envNum("DS_EXPIRY_SWEEP_MS", 60_000),
|
|
243
|
+
expirySweepBatchLimit: envNum("DS_EXPIRY_SWEEP_LIMIT", 100),
|
|
244
|
+
metricsFlushIntervalMs: envNum("DS_METRICS_FLUSH_MS", 10_000),
|
|
245
|
+
interpreterWorkers: envNum("DS_INTERPRETER_WORKERS", 1),
|
|
246
|
+
interpreterCheckIntervalMs: envNum("DS_INTERPRETER_CHECK_MS", 250),
|
|
247
|
+
interpreterMaxBatchRows: envNum("DS_INTERPRETER_MAX_BATCH_ROWS", 500),
|
|
248
|
+
interpreterMaxBatchBytes: envNum("DS_INTERPRETER_MAX_BATCH_BYTES", 4 * 1024 * 1024),
|
|
249
|
+
port: envNum("PORT", 8080),
|
|
250
|
+
};
|
|
251
|
+
}
|