@rce-mcp/data-plane 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,137 @@
1
+ import { createRequire } from "node:module";
2
+
3
+ export type SqliteBindValue = string | number | bigint | Uint8Array | null;
4
+
5
+ export interface SqliteRunResult {
6
+ changes?: number;
7
+ }
8
+
9
+ export interface SqliteStatement {
10
+ run(...params: SqliteBindValue[]): SqliteRunResult;
11
+ get(...params: SqliteBindValue[]): unknown;
12
+ all(...params: SqliteBindValue[]): unknown[];
13
+ }
14
+
15
+ export interface SqliteDatabase {
16
+ exec(sql: string): void;
17
+ prepare(sql: string): SqliteStatement;
18
+ close(): void;
19
+ }
20
+
21
+ type SqliteDriverName = "node:sqlite" | "bun:sqlite";
22
+
23
+ interface SqliteRuntimeDriver {
24
+ name: SqliteDriverName;
25
+ open(dbPath: string): SqliteDatabase;
26
+ }
27
+
28
+ const require = createRequire(import.meta.url);
29
+
30
+ function loadNodeSqliteDriver(): SqliteRuntimeDriver {
31
+ const mod = require("node:sqlite") as {
32
+ DatabaseSync: new (path: string) => {
33
+ exec(sql: string): void;
34
+ prepare(sql: string): {
35
+ run(...params: SqliteBindValue[]): SqliteRunResult;
36
+ get(...params: SqliteBindValue[]): unknown;
37
+ all(...params: SqliteBindValue[]): unknown[];
38
+ };
39
+ close(): void;
40
+ };
41
+ };
42
+
43
+ return {
44
+ name: "node:sqlite",
45
+ open(dbPath: string): SqliteDatabase {
46
+ const db = new mod.DatabaseSync(dbPath);
47
+ return {
48
+ exec(sql) {
49
+ db.exec(sql);
50
+ },
51
+ prepare(sql) {
52
+ return db.prepare(sql);
53
+ },
54
+ close() {
55
+ db.close();
56
+ }
57
+ };
58
+ }
59
+ };
60
+ }
61
+
62
+ function loadBunSqliteDriver(): SqliteRuntimeDriver {
63
+ const mod = require("bun:sqlite") as {
64
+ Database?: new (path: string) => {
65
+ exec(sql: string): void;
66
+ query(sql: string): {
67
+ run(...params: SqliteBindValue[]): SqliteRunResult;
68
+ get(...params: SqliteBindValue[]): unknown;
69
+ all(...params: SqliteBindValue[]): unknown[];
70
+ };
71
+ close(): void;
72
+ };
73
+ default?: {
74
+ Database?: new (path: string) => {
75
+ exec(sql: string): void;
76
+ query(sql: string): {
77
+ run(...params: SqliteBindValue[]): SqliteRunResult;
78
+ get(...params: SqliteBindValue[]): unknown;
79
+ all(...params: SqliteBindValue[]): unknown[];
80
+ };
81
+ close(): void;
82
+ };
83
+ };
84
+ };
85
+
86
+ const BunDatabase = mod.Database ?? mod.default?.Database;
87
+ if (!BunDatabase) {
88
+ throw new Error("bun:sqlite Database export is unavailable");
89
+ }
90
+
91
+ return {
92
+ name: "bun:sqlite",
93
+ open(dbPath: string): SqliteDatabase {
94
+ const db = new BunDatabase(dbPath);
95
+ return {
96
+ exec(sql) {
97
+ db.exec(sql);
98
+ },
99
+ prepare(sql) {
100
+ const statement = db.query(sql);
101
+ return {
102
+ run(...params) {
103
+ return statement.run(...params);
104
+ },
105
+ get(...params) {
106
+ return statement.get(...params);
107
+ },
108
+ all(...params) {
109
+ return statement.all(...params);
110
+ }
111
+ };
112
+ },
113
+ close() {
114
+ db.close();
115
+ }
116
+ };
117
+ }
118
+ };
119
+ }
120
+
121
+ function loadSqliteRuntimeDriver(): SqliteRuntimeDriver {
122
+ const isBun = typeof process.versions.bun === "string" && process.versions.bun.length > 0;
123
+ if (isBun) {
124
+ return loadBunSqliteDriver();
125
+ }
126
+ return loadNodeSqliteDriver();
127
+ }
128
+
129
+ const sqliteRuntimeDriver = loadSqliteRuntimeDriver();
130
+
131
+ export function sqliteRuntimeDriverName(): SqliteDriverName {
132
+ return sqliteRuntimeDriver.name;
133
+ }
134
+
135
+ export function openSqliteDatabase(dbPath: string): SqliteDatabase {
136
+ return sqliteRuntimeDriver.open(dbPath);
137
+ }
@@ -0,0 +1 @@
1
+ declare module "ioredis-mock";
@@ -0,0 +1,129 @@
1
+ import { afterEach, describe, expect, it } from "vitest";
2
+ import Redis from "ioredis-mock";
3
+ import { RedisIndexJobQueue } from "../src/index.js";
4
+
5
+ function delay(ms: number): Promise<void> {
6
+ return new Promise((resolve) => setTimeout(resolve, ms));
7
+ }
8
+
9
+ describe("redis queue integration", () => {
10
+ const clients: Array<{ quit(): Promise<string> }> = [];
11
+
12
+ afterEach(async () => {
13
+ while (clients.length > 0) {
14
+ const client = clients.pop();
15
+ if (client) {
16
+ await client.quit();
17
+ }
18
+ }
19
+ });
20
+
21
+ it("retries jobs and then dead-letters after max attempts", async () => {
22
+ const redis = new Redis();
23
+ clients.push(redis);
24
+
25
+ const queue = new RedisIndexJobQueue(redis as unknown as any, {
26
+ keyPrefix: `test:index_jobs:${Date.now()}`,
27
+ maxAttempts: 2
28
+ });
29
+
30
+ await queue.enqueue({
31
+ tenant_id: "tenant-a",
32
+ workspace_id: "ws-a",
33
+ index_version: "idx-1",
34
+ manifest_key: "tenant-a/ws-a/idx-1/manifest.json"
35
+ });
36
+
37
+ const firstClaim = await queue.claimNext(0);
38
+ expect(firstClaim).toBeDefined();
39
+ await queue.retryOrDeadLetter(firstClaim!, "upstream timeout");
40
+
41
+ expect(await queue.pendingCount()).toBe(1);
42
+ expect(await queue.deadLetterCount()).toBe(0);
43
+
44
+ const secondClaim = await queue.claimNext(0);
45
+ expect(secondClaim?.payload.attempts).toBe(1);
46
+ await queue.retryOrDeadLetter(secondClaim!, "upstream timeout again");
47
+
48
+ expect(await queue.pendingCount()).toBe(0);
49
+ expect(await queue.deadLetterCount()).toBe(1);
50
+
51
+ const deadLetters = await queue.listDeadLetters();
52
+ expect(deadLetters[0]?.attempts).toBe(2);
53
+ expect(deadLetters[0]?.last_error).toContain("upstream timeout again");
54
+ });
55
+
56
+ it("reclaims orphaned processing jobs after lease expiry", async () => {
57
+ const redis = new Redis();
58
+ clients.push(redis);
59
+
60
+ const queue = new RedisIndexJobQueue(redis as unknown as any, {
61
+ keyPrefix: `test:index_jobs:reclaim:${Date.now()}`,
62
+ maxAttempts: 2,
63
+ claimLeaseSeconds: 1
64
+ });
65
+
66
+ await queue.enqueue({
67
+ tenant_id: "tenant-a",
68
+ workspace_id: "ws-a",
69
+ index_version: "idx-lease-1",
70
+ manifest_key: "tenant-a/ws-a/idx-lease-1/manifest.json"
71
+ });
72
+
73
+ const claimed = await queue.claimNext(0);
74
+ expect(claimed).toBeDefined();
75
+ expect(await queue.processingCount()).toBe(1);
76
+
77
+ expect(await queue.reclaimOrphaned(1)).toBe(0);
78
+ await delay(1_100);
79
+
80
+ expect(await queue.reclaimOrphaned(1)).toBe(1);
81
+ expect(await queue.pendingCount()).toBe(1);
82
+ expect(await queue.processingCount()).toBe(0);
83
+ });
84
+
85
+ it("reconnects and retries queue operations after transient redis disconnects", async () => {
86
+ const redis = new Redis();
87
+ clients.push(redis);
88
+
89
+ const queue = new RedisIndexJobQueue(redis as unknown as any, {
90
+ keyPrefix: `test:index_jobs:reconnect:${Date.now()}`,
91
+ maxAttempts: 2,
92
+ reconnectRetries: 2,
93
+ reconnectDelayMs: 0
94
+ });
95
+
96
+ let failRpush = true;
97
+ const originalRpush = redis.rpush.bind(redis);
98
+ redis.rpush = (async (...args: [string, ...string[]]) => {
99
+ if (failRpush) {
100
+ failRpush = false;
101
+ throw new Error("Connection is closed.");
102
+ }
103
+ return originalRpush(...args);
104
+ }) as unknown as typeof redis.rpush;
105
+
106
+ let failClaim = true;
107
+ const originalBrpoplpush = redis.brpoplpush.bind(redis);
108
+ redis.brpoplpush = (async (...args: [string, string, number]) => {
109
+ if (failClaim) {
110
+ failClaim = false;
111
+ throw new Error("read ECONNRESET");
112
+ }
113
+ return originalBrpoplpush(...args);
114
+ }) as unknown as typeof redis.brpoplpush;
115
+
116
+ await queue.enqueue({
117
+ tenant_id: "tenant-a",
118
+ workspace_id: "ws-a",
119
+ index_version: "idx-reconnect",
120
+ manifest_key: "tenant-a/ws-a/idx-reconnect/manifest.json"
121
+ });
122
+
123
+ const claimed = await queue.claimNext(0);
124
+ expect(claimed).toBeDefined();
125
+ await queue.ack(claimed!);
126
+ expect(await queue.pendingCount()).toBe(0);
127
+ expect(await queue.processingCount()).toBe(0);
128
+ });
129
+ });
@@ -0,0 +1,56 @@
1
+ import { describe, expect, it } from "vitest";
2
+ import { resolveRuntimeMode, sqliteRuntimeDriverName } from "../src/index.js";
3
+
4
+ describe("runtime mode resolution", () => {
5
+ it("defaults to hybrid and falls back to local when cloud env is incomplete", () => {
6
+ const resolved = resolveRuntimeMode({});
7
+ expect(resolved.requested_mode).toBe("hybrid");
8
+ expect(resolved.effective_mode).toBe("local");
9
+ expect(resolved.cloud_configured).toBe(false);
10
+ expect(resolved.missing_cloud_vars.length).toBeGreaterThan(0);
11
+ });
12
+
13
+ it("selects cloud in hybrid mode when all cloud env vars are present", () => {
14
+ const resolved = resolveRuntimeMode({
15
+ RCE_RUNTIME_MODE: "hybrid",
16
+ DATABASE_URL: "postgres://localhost/db",
17
+ REDIS_URL: "redis://localhost:6379",
18
+ S3_BUCKET: "bucket",
19
+ S3_REGION: "us-east-1",
20
+ S3_ACCESS_KEY_ID: "key",
21
+ S3_SECRET_ACCESS_KEY: "secret"
22
+ });
23
+
24
+ expect(resolved.requested_mode).toBe("hybrid");
25
+ expect(resolved.effective_mode).toBe("cloud");
26
+ expect(resolved.cloud_configured).toBe(true);
27
+ expect(resolved.missing_cloud_vars).toHaveLength(0);
28
+ });
29
+
30
+ it("forces local mode without requiring cloud env", () => {
31
+ const resolved = resolveRuntimeMode({
32
+ RCE_RUNTIME_MODE: "local"
33
+ });
34
+
35
+ expect(resolved.requested_mode).toBe("local");
36
+ expect(resolved.effective_mode).toBe("local");
37
+ });
38
+
39
+ it("throws for cloud mode when required cloud env vars are missing", () => {
40
+ expect(() =>
41
+ resolveRuntimeMode({
42
+ RCE_RUNTIME_MODE: "cloud",
43
+ DATABASE_URL: "postgres://localhost/db"
44
+ })
45
+ ).toThrow(/RCE_RUNTIME_MODE=cloud requires/i);
46
+ });
47
+
48
+ it("selects a runtime-compatible sqlite driver", () => {
49
+ const driver = sqliteRuntimeDriverName();
50
+ if (process.versions.bun) {
51
+ expect(driver).toBe("bun:sqlite");
52
+ return;
53
+ }
54
+ expect(driver).toBe("node:sqlite");
55
+ });
56
+ });
@@ -0,0 +1,54 @@
1
+ import { mkdtemp, rm } from "node:fs/promises";
2
+ import { tmpdir } from "node:os";
3
+ import { join } from "node:path";
4
+ import { afterEach, describe, expect, it } from "vitest";
5
+ import { SqliteIndexJobQueue } from "../src/index.js";
6
+
7
+ function delay(ms: number): Promise<void> {
8
+ return new Promise((resolve) => setTimeout(resolve, ms));
9
+ }
10
+
11
+ describe("sqlite queue integration", () => {
12
+ const dirs: string[] = [];
13
+
14
+ afterEach(async () => {
15
+ while (dirs.length > 0) {
16
+ const dir = dirs.pop();
17
+ if (dir) {
18
+ await rm(dir, { recursive: true, force: true });
19
+ }
20
+ }
21
+ });
22
+
23
+ it("reclaims stale processing jobs", async () => {
24
+ const root = await mkdtemp(join(tmpdir(), "rce-sqlite-queue-"));
25
+ dirs.push(root);
26
+ const sqlitePath = join(root, "jobs.sqlite");
27
+ const queue = new SqliteIndexJobQueue(sqlitePath, {
28
+ maxAttempts: 2,
29
+ claimTtlSeconds: 1
30
+ });
31
+
32
+ try {
33
+ await queue.enqueue({
34
+ tenant_id: "tenant-a",
35
+ workspace_id: "ws-a",
36
+ index_version: "idx-1",
37
+ manifest_key: "tenant-a/ws-a/idx-1/manifest.json"
38
+ });
39
+
40
+ const claimed = await queue.claimNext(0);
41
+ expect(claimed).toBeDefined();
42
+ expect(await queue.processingCount()).toBe(1);
43
+
44
+ expect(await queue.reclaimOrphaned(1)).toBe(0);
45
+ await delay(1_100);
46
+
47
+ expect(await queue.reclaimOrphaned(1)).toBe(1);
48
+ expect(await queue.pendingCount()).toBe(1);
49
+ expect(await queue.processingCount()).toBe(0);
50
+ } finally {
51
+ queue.close();
52
+ }
53
+ });
54
+ });
@@ -0,0 +1,71 @@
1
+ import { mkdtemp, rm } from "node:fs/promises";
2
+ import { tmpdir } from "node:os";
3
+ import { join } from "node:path";
4
+ import { afterEach, describe, expect, it } from "vitest";
5
+ import { SqliteUsageMeterStore } from "../src/index.js";
6
+
7
+ describe("usage metering store", () => {
8
+ const dirs: string[] = [];
9
+
10
+ afterEach(async () => {
11
+ while (dirs.length > 0) {
12
+ const dir = dirs.pop();
13
+ if (dir) {
14
+ await rm(dir, { recursive: true, force: true });
15
+ }
16
+ }
17
+ });
18
+
19
+ it("persists usage summaries and audit events", async () => {
20
+ const root = await mkdtemp(join(tmpdir(), "rce-usage-metering-"));
21
+ dirs.push(root);
22
+ const sqlitePath = join(root, "metering.sqlite");
23
+
24
+ const store = new SqliteUsageMeterStore(sqlitePath);
25
+ await store.migrate();
26
+
27
+ try {
28
+ await store.recordUsage({
29
+ tenant_id: "tenant-a",
30
+ workspace_id: "ws-a",
31
+ tool_name: "search_context",
32
+ trace_id: "trc-1",
33
+ status: "success",
34
+ latency_ms: 42,
35
+ result_count: 3,
36
+ units: 3
37
+ });
38
+ await store.recordUsage({
39
+ tenant_id: "tenant-a",
40
+ workspace_id: "ws-a",
41
+ tool_name: "search_context",
42
+ trace_id: "trc-2",
43
+ status: "error",
44
+ latency_ms: 84,
45
+ result_count: 0,
46
+ units: 1
47
+ });
48
+ await store.recordAuditEvent({
49
+ tenant_id: "tenant-a",
50
+ subject: "user-1",
51
+ action: "search_context",
52
+ resource: "workspace:ws-a",
53
+ status: "success",
54
+ trace_id: "trc-1",
55
+ details: { statusCode: 200 }
56
+ });
57
+
58
+ const usage = await store.listUsageSummary({ tenant_id: "tenant-a" });
59
+ expect(usage.length).toBe(1);
60
+ expect(usage[0]?.request_count).toBe(2);
61
+ expect(usage[0]?.error_count).toBe(1);
62
+ expect(usage[0]?.total_units).toBe(4);
63
+
64
+ const audit = await store.listAuditEvents({ tenant_id: "tenant-a", limit: 10 });
65
+ expect(audit.length).toBe(1);
66
+ expect(audit[0]?.action).toBe("search_context");
67
+ } finally {
68
+ store.close();
69
+ }
70
+ });
71
+ });
@@ -0,0 +1,13 @@
1
+ {
2
+ "extends": "../../tsconfig.base.json",
3
+ "compilerOptions": {
4
+ "composite": true,
5
+ "declaration": true,
6
+ "declarationMap": false,
7
+ "rootDir": "src",
8
+ "outDir": "dist",
9
+ "tsBuildInfoFile": "dist/.tsbuildinfo"
10
+ },
11
+ "include": ["src/**/*.ts", "src/**/*.d.ts"],
12
+ "references": [{ "path": "../contracts/tsconfig.build.json" }]
13
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,4 @@
1
+ {
2
+ "extends": "../../tsconfig.base.json",
3
+ "include": ["src/**/*.ts", "src/**/*.d.ts", "test/**/*.ts", "test/**/*.d.ts"]
4
+ }