bunsane 0.2.8 → 0.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +26 -0
- package/core/App.ts +97 -0
- package/core/remote/CircuitBreaker.ts +115 -0
- package/core/remote/OutboxWorker.ts +176 -0
- package/core/remote/RemoteManager.ts +400 -0
- package/core/remote/RpcCaller.ts +310 -0
- package/core/remote/StreamConsumer.ts +535 -0
- package/core/remote/decorators.ts +121 -0
- package/core/remote/health.ts +139 -0
- package/core/remote/index.ts +37 -0
- package/core/remote/metrics.ts +99 -0
- package/core/remote/outboxSchema.ts +41 -0
- package/core/remote/types.ts +151 -0
- package/core/scheduler/DistributedLock.ts +309 -266
- package/docs/SCALABILITY_PLAN.md +3 -3
- package/package.json +1 -1
- package/query/FilterBuilder.ts +25 -0
- package/query/Query.ts +5 -1
- package/query/builders/JsonbArrayBuilder.ts +116 -0
- package/query/index.ts +28 -2
- package/tests/helpers/MockRedisClient.ts +113 -0
- package/tests/helpers/MockRedisStreamServer.ts +448 -0
- package/tests/integration/query/Query.exec.test.ts +67 -14
- package/tests/integration/query/Query.jsonbArray.test.ts +214 -0
- package/tests/integration/remote/dlq.test.ts +175 -0
- package/tests/integration/remote/event-dispatch.test.ts +114 -0
- package/tests/integration/remote/outbox.test.ts +130 -0
- package/tests/integration/remote/rpc.test.ts +177 -0
- package/tests/pglite-setup.ts +1 -0
- package/tests/unit/query/JsonbArrayBuilder.test.ts +178 -0
- package/tests/unit/remote/CircuitBreaker.test.ts +159 -0
- package/tests/unit/remote/RemoteError.test.ts +55 -0
- package/tests/unit/remote/decorators.test.ts +195 -0
- package/tests/unit/remote/metrics.test.ts +115 -0
- package/tests/unit/remote/mockRedisStreamServer.test.ts +104 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* JSONB Array Filter Builders
|
|
3
|
+
*
|
|
4
|
+
* Provides PostgreSQL JSONB array containment and existence operators
|
|
5
|
+
* as custom filter builders for the BunSane Query system.
|
|
6
|
+
*
|
|
7
|
+
* Operators:
|
|
8
|
+
* - CONTAINS (@>) — array contains value(s)
|
|
9
|
+
* - CONTAINED_BY (<@) — array is subset of given values
|
|
10
|
+
* - HAS_ANY (?|) — array has any of the given values
|
|
11
|
+
* - HAS_ALL (?&) — array has all of the given values
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import type { FilterBuilder, FilterBuilderOptions } from "../FilterBuilder";
|
|
15
|
+
import { buildJSONBPath } from "../FilterBuilder";
|
|
16
|
+
import type { QueryFilter } from "../QueryContext";
|
|
17
|
+
import type { QueryContext } from "../QueryContext";
|
|
18
|
+
|
|
19
|
+
export const JSONB_ARRAY_OPS = {
|
|
20
|
+
CONTAINS: "CONTAINS",
|
|
21
|
+
CONTAINED_BY: "CONTAINED_BY",
|
|
22
|
+
HAS_ANY: "HAS_ANY",
|
|
23
|
+
HAS_ALL: "HAS_ALL",
|
|
24
|
+
} as const;
|
|
25
|
+
|
|
26
|
+
function normalizeToArray(value: any): any[] {
|
|
27
|
+
return Array.isArray(value) ? value : [value];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function validateJsonbArrayFilter(filter: QueryFilter): boolean {
|
|
31
|
+
if (filter.value === null || filter.value === undefined) return false;
|
|
32
|
+
const arr = normalizeToArray(filter.value);
|
|
33
|
+
return arr.length > 0 && arr.every(
|
|
34
|
+
(v: any) => typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean'
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* CONTAINS (@>) — "array contains value(s)"
|
|
40
|
+
*
|
|
41
|
+
* Single value: Query.filter("tags", FilterOp.CONTAINS, "urgent")
|
|
42
|
+
* Multiple: Query.filter("tags", FilterOp.CONTAINS, ["urgent", "high"])
|
|
43
|
+
*/
|
|
44
|
+
export const jsonbContainsBuilder: FilterBuilder = (
|
|
45
|
+
filter: QueryFilter, alias: string, context: QueryContext
|
|
46
|
+
): { sql: string; addedParams: number } => {
|
|
47
|
+
const jsonbPath = buildJSONBPath(filter.field, alias);
|
|
48
|
+
const values = normalizeToArray(filter.value);
|
|
49
|
+
const paramIndex = context.addParam(values);
|
|
50
|
+
return {
|
|
51
|
+
sql: `${jsonbPath} @> $${paramIndex}::jsonb`,
|
|
52
|
+
addedParams: 1,
|
|
53
|
+
};
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* CONTAINED_BY (<@) — "array is subset of given values"
|
|
58
|
+
*
|
|
59
|
+
* Query.filter("tags", FilterOp.CONTAINED_BY, ["urgent", "high", "low"])
|
|
60
|
+
*/
|
|
61
|
+
export const jsonbContainedByBuilder: FilterBuilder = (
|
|
62
|
+
filter: QueryFilter, alias: string, context: QueryContext
|
|
63
|
+
): { sql: string; addedParams: number } => {
|
|
64
|
+
const jsonbPath = buildJSONBPath(filter.field, alias);
|
|
65
|
+
const values = normalizeToArray(filter.value);
|
|
66
|
+
const paramIndex = context.addParam(values);
|
|
67
|
+
return {
|
|
68
|
+
sql: `${jsonbPath} <@ $${paramIndex}::jsonb`,
|
|
69
|
+
addedParams: 1,
|
|
70
|
+
};
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* HAS_ANY (?|) — "array has any of the given values"
|
|
75
|
+
*
|
|
76
|
+
* Query.filter("tags", FilterOp.HAS_ANY, ["urgent", "high"])
|
|
77
|
+
*
|
|
78
|
+
* Note: ?| operates on text[], so values are cast to strings.
|
|
79
|
+
*/
|
|
80
|
+
export const jsonbHasAnyBuilder: FilterBuilder = (
|
|
81
|
+
filter: QueryFilter, alias: string, context: QueryContext
|
|
82
|
+
): { sql: string; addedParams: number } => {
|
|
83
|
+
const jsonbPath = buildJSONBPath(filter.field, alias);
|
|
84
|
+
const values = normalizeToArray(filter.value).map(String);
|
|
85
|
+
const paramIndex = context.addParam(values);
|
|
86
|
+
return {
|
|
87
|
+
sql: `${jsonbPath} ?| $${paramIndex}::text[]`,
|
|
88
|
+
addedParams: 1,
|
|
89
|
+
};
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* HAS_ALL (?&) — "array has all of the given values"
|
|
94
|
+
*
|
|
95
|
+
* Query.filter("tags", FilterOp.HAS_ALL, ["urgent", "high"])
|
|
96
|
+
*
|
|
97
|
+
* Note: ?& operates on text[], so values are cast to strings.
|
|
98
|
+
*/
|
|
99
|
+
export const jsonbHasAllBuilder: FilterBuilder = (
|
|
100
|
+
filter: QueryFilter, alias: string, context: QueryContext
|
|
101
|
+
): { sql: string; addedParams: number } => {
|
|
102
|
+
const jsonbPath = buildJSONBPath(filter.field, alias);
|
|
103
|
+
const values = normalizeToArray(filter.value).map(String);
|
|
104
|
+
const paramIndex = context.addParam(values);
|
|
105
|
+
return {
|
|
106
|
+
sql: `${jsonbPath} ?& $${paramIndex}::text[]`,
|
|
107
|
+
addedParams: 1,
|
|
108
|
+
};
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
export const jsonbArrayOptions: FilterBuilderOptions = {
|
|
112
|
+
supportsLateral: true,
|
|
113
|
+
requiresIndex: false,
|
|
114
|
+
complexityScore: 1,
|
|
115
|
+
validate: validateJsonbArrayFilter,
|
|
116
|
+
};
|
package/query/index.ts
CHANGED
|
@@ -17,5 +17,31 @@ export type FilterSchema<T = any> = {
|
|
|
17
17
|
|
|
18
18
|
// Custom Filter Builder exports
|
|
19
19
|
export type { FilterBuilder, FilterResult, FilterBuilderOptions } from "./FilterBuilder";
|
|
20
|
-
export { buildJSONPath } from "./FilterBuilder";
|
|
21
|
-
export { FilterBuilderRegistry } from "./FilterBuilderRegistry";
|
|
20
|
+
export { buildJSONPath, buildJSONBPath } from "./FilterBuilder";
|
|
21
|
+
export { FilterBuilderRegistry } from "./FilterBuilderRegistry";
|
|
22
|
+
|
|
23
|
+
// JSONB Array Builder exports
|
|
24
|
+
export {
|
|
25
|
+
jsonbContainsBuilder,
|
|
26
|
+
jsonbContainedByBuilder,
|
|
27
|
+
jsonbHasAnyBuilder,
|
|
28
|
+
jsonbHasAllBuilder,
|
|
29
|
+
jsonbArrayOptions,
|
|
30
|
+
JSONB_ARRAY_OPS,
|
|
31
|
+
} from "./builders/JsonbArrayBuilder";
|
|
32
|
+
|
|
33
|
+
// Auto-register JSONB array builders (core framework feature)
|
|
34
|
+
import { FilterBuilderRegistry } from "./FilterBuilderRegistry";
|
|
35
|
+
import {
|
|
36
|
+
jsonbContainsBuilder,
|
|
37
|
+
jsonbContainedByBuilder,
|
|
38
|
+
jsonbHasAnyBuilder,
|
|
39
|
+
jsonbHasAllBuilder,
|
|
40
|
+
jsonbArrayOptions,
|
|
41
|
+
JSONB_ARRAY_OPS,
|
|
42
|
+
} from "./builders/JsonbArrayBuilder";
|
|
43
|
+
|
|
44
|
+
FilterBuilderRegistry.register(JSONB_ARRAY_OPS.CONTAINS, jsonbContainsBuilder, jsonbArrayOptions, "bunsane-jsonb-array", "1.0.0");
|
|
45
|
+
FilterBuilderRegistry.register(JSONB_ARRAY_OPS.CONTAINED_BY, jsonbContainedByBuilder, jsonbArrayOptions, "bunsane-jsonb-array", "1.0.0");
|
|
46
|
+
FilterBuilderRegistry.register(JSONB_ARRAY_OPS.HAS_ANY, jsonbHasAnyBuilder, jsonbArrayOptions, "bunsane-jsonb-array", "1.0.0");
|
|
47
|
+
FilterBuilderRegistry.register(JSONB_ARRAY_OPS.HAS_ALL, jsonbHasAllBuilder, jsonbArrayOptions, "bunsane-jsonb-array", "1.0.0");
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ioredis-shaped client backed by a MockRedisStreamServer.
|
|
3
|
+
*
|
|
4
|
+
* Cast the returned instance to `Redis` (from "ioredis") when passing into
|
|
5
|
+
* the remote subsystem via `redisFactory`. Only methods the remote layer
|
|
6
|
+
* touches are implemented; others throw on use.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import type { MockRedisStreamServer } from "./MockRedisStreamServer";
|
|
10
|
+
|
|
11
|
+
export class MockRedisClient {
|
|
12
|
+
private server: MockRedisStreamServer;
|
|
13
|
+
private connected = true;
|
|
14
|
+
private listeners = new Map<string, Array<(...args: any[]) => void>>();
|
|
15
|
+
|
|
16
|
+
constructor(server: MockRedisStreamServer) {
|
|
17
|
+
this.server = server;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
on(event: string, listener: (...args: any[]) => void): this {
|
|
21
|
+
const arr = this.listeners.get(event) ?? [];
|
|
22
|
+
arr.push(listener);
|
|
23
|
+
this.listeners.set(event, arr);
|
|
24
|
+
return this;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async xadd(key: string, ...args: any[]): Promise<string | null> {
|
|
28
|
+
this.ensureConnected();
|
|
29
|
+
try {
|
|
30
|
+
return this.server.xadd(key, ...args);
|
|
31
|
+
} catch (err: any) {
|
|
32
|
+
throw err;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async xgroup(...args: any[]): Promise<string> {
|
|
37
|
+
this.ensureConnected();
|
|
38
|
+
const [op, key, group, id, mk] = args;
|
|
39
|
+
return this.server.xgroup(op, key, group, id, mk);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async xreadgroup(...args: any[]): Promise<any> {
|
|
43
|
+
this.ensureConnected();
|
|
44
|
+
return this.server.xreadgroup(...args);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async xread(...args: any[]): Promise<any> {
|
|
48
|
+
this.ensureConnected();
|
|
49
|
+
return this.server.xread(...args);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async xack(key: string, group: string, msgId: string): Promise<number> {
|
|
53
|
+
this.ensureConnected();
|
|
54
|
+
return this.server.xack(key, group, msgId);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async xpending(...args: any[]): Promise<any> {
|
|
58
|
+
this.ensureConnected();
|
|
59
|
+
const [key, group, ...rest] = args;
|
|
60
|
+
return this.server.xpending(key, group, ...rest);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
async xautoclaim(...args: any[]): Promise<any> {
|
|
64
|
+
this.ensureConnected();
|
|
65
|
+
return this.server.xautoclaim(
|
|
66
|
+
args[0],
|
|
67
|
+
args[1],
|
|
68
|
+
args[2],
|
|
69
|
+
Number(args[3]),
|
|
70
|
+
args[4],
|
|
71
|
+
...args.slice(5)
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async xlen(key: string): Promise<number> {
|
|
76
|
+
this.ensureConnected();
|
|
77
|
+
return this.server.xlen(key);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async xrange(...args: any[]): Promise<any> {
|
|
81
|
+
this.ensureConnected();
|
|
82
|
+
return this.server.xrange(
|
|
83
|
+
args[0],
|
|
84
|
+
args[1],
|
|
85
|
+
args[2],
|
|
86
|
+
...args.slice(3)
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async ping(): Promise<string> {
|
|
91
|
+
this.ensureConnected();
|
|
92
|
+
return this.server.ping();
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
disconnect(): void {
|
|
96
|
+
this.connected = false;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async quit(): Promise<string> {
|
|
100
|
+
this.connected = false;
|
|
101
|
+
return "OK";
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
private ensureConnected(): void {
|
|
105
|
+
if (!this.connected) {
|
|
106
|
+
throw new Error("Connection is closed");
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export function createMockRedisFactory(server: MockRedisStreamServer) {
|
|
112
|
+
return (_blocking: boolean) => new MockRedisClient(server);
|
|
113
|
+
}
|
|
@@ -0,0 +1,448 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* In-memory Redis Streams shim for Tier 2 integration tests.
|
|
3
|
+
*
|
|
4
|
+
* Implements only the commands the remote subsystem issues:
|
|
5
|
+
* xadd, xreadgroup, xread, xack, xgroup CREATE, xpending,
|
|
6
|
+
* xautoclaim, xlen, xrange, ping
|
|
7
|
+
*
|
|
8
|
+
* Shared server: multiple MockRedisClient instances pointing at the same
|
|
9
|
+
* server simulate separate app processes talking to one Redis.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
interface StreamEntry {
|
|
13
|
+
id: string;
|
|
14
|
+
fields: string[]; // flat [k,v,k,v,...]
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
interface PelEntry {
|
|
18
|
+
msgId: string;
|
|
19
|
+
consumer: string;
|
|
20
|
+
deliveredAt: number;
|
|
21
|
+
deliveryCount: number;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
interface ConsumerGroup {
|
|
25
|
+
name: string;
|
|
26
|
+
consumers: Set<string>;
|
|
27
|
+
pel: Map<string, PelEntry>;
|
|
28
|
+
/** Highest ID delivered via ">" — next read starts after this. */
|
|
29
|
+
lastDeliveredId: string;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
interface Stream {
|
|
33
|
+
key: string;
|
|
34
|
+
entries: StreamEntry[];
|
|
35
|
+
groups: Map<string, ConsumerGroup>;
|
|
36
|
+
lastGeneratedTs: number;
|
|
37
|
+
seqWithinMs: number;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const MIN_ID = "0-0";
|
|
41
|
+
|
|
42
|
+
function parseId(id: string): [number, number] {
|
|
43
|
+
const dash = id.indexOf("-");
|
|
44
|
+
if (dash < 0) return [Number(id), 0];
|
|
45
|
+
return [Number(id.slice(0, dash)), Number(id.slice(dash + 1))];
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function idLess(a: string, b: string): boolean {
|
|
49
|
+
const [at, as] = parseId(a);
|
|
50
|
+
const [bt, bs] = parseId(b);
|
|
51
|
+
if (at !== bt) return at < bt;
|
|
52
|
+
return as < bs;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function idGreater(a: string, b: string): boolean {
|
|
56
|
+
const [at, as] = parseId(a);
|
|
57
|
+
const [bt, bs] = parseId(b);
|
|
58
|
+
if (at !== bt) return at > bt;
|
|
59
|
+
return as > bs;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export class MockRedisStreamServer {
|
|
63
|
+
private streams = new Map<string, Stream>();
|
|
64
|
+
/** Fault injection for tests that need to simulate XADD failures. */
|
|
65
|
+
public xaddShouldFail = false;
|
|
66
|
+
|
|
67
|
+
private sleep(ms: number): Promise<void> {
|
|
68
|
+
return new Promise((r) => setTimeout(r, ms));
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
private getOrCreateStream(key: string): Stream {
|
|
72
|
+
let s = this.streams.get(key);
|
|
73
|
+
if (!s) {
|
|
74
|
+
s = {
|
|
75
|
+
key,
|
|
76
|
+
entries: [],
|
|
77
|
+
groups: new Map(),
|
|
78
|
+
lastGeneratedTs: 0,
|
|
79
|
+
seqWithinMs: 0,
|
|
80
|
+
};
|
|
81
|
+
this.streams.set(key, s);
|
|
82
|
+
}
|
|
83
|
+
return s;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
private generateId(stream: Stream): string {
|
|
87
|
+
const now = Date.now();
|
|
88
|
+
if (now === stream.lastGeneratedTs) {
|
|
89
|
+
stream.seqWithinMs++;
|
|
90
|
+
} else {
|
|
91
|
+
stream.lastGeneratedTs = now;
|
|
92
|
+
stream.seqWithinMs = 0;
|
|
93
|
+
}
|
|
94
|
+
return `${now}-${stream.seqWithinMs}`;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* XADD key [MAXLEN [~] N] * field value [field value ...]
|
|
99
|
+
* Returns the generated id.
|
|
100
|
+
*/
|
|
101
|
+
xadd(key: string, ...args: any[]): string {
|
|
102
|
+
if (this.xaddShouldFail) {
|
|
103
|
+
throw new Error("MOCK_XADD_FAIL");
|
|
104
|
+
}
|
|
105
|
+
const stream = this.getOrCreateStream(key);
|
|
106
|
+
|
|
107
|
+
// Parse leading options
|
|
108
|
+
let i = 0;
|
|
109
|
+
let maxLen: number | null = null;
|
|
110
|
+
if (args[i] === "MAXLEN") {
|
|
111
|
+
i++;
|
|
112
|
+
if (args[i] === "~" || args[i] === "=") i++;
|
|
113
|
+
maxLen = Number(args[i]);
|
|
114
|
+
i++;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Expect "*"
|
|
118
|
+
if (args[i] !== "*") {
|
|
119
|
+
throw new Error(`MockRedis xadd: only "*" auto-id supported, got ${args[i]}`);
|
|
120
|
+
}
|
|
121
|
+
i++;
|
|
122
|
+
|
|
123
|
+
const fields: string[] = [];
|
|
124
|
+
for (; i < args.length; i++) {
|
|
125
|
+
fields.push(String(args[i]));
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
const id = this.generateId(stream);
|
|
129
|
+
stream.entries.push({ id, fields });
|
|
130
|
+
|
|
131
|
+
if (maxLen !== null && stream.entries.length > maxLen) {
|
|
132
|
+
stream.entries.splice(0, stream.entries.length - maxLen);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return id;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* XGROUP CREATE stream group id [MKSTREAM]
|
|
140
|
+
* id "$" = start from latest, "0" / "0-0" = start from beginning.
|
|
141
|
+
*/
|
|
142
|
+
xgroup(op: string, key: string, groupName: string, startId: string, mkstream?: string): string {
|
|
143
|
+
if (op !== "CREATE") {
|
|
144
|
+
throw new Error(`MockRedis xgroup: op "${op}" not supported`);
|
|
145
|
+
}
|
|
146
|
+
const hasStream = this.streams.has(key);
|
|
147
|
+
if (!hasStream && mkstream !== "MKSTREAM") {
|
|
148
|
+
throw new Error("ERR no such key");
|
|
149
|
+
}
|
|
150
|
+
const stream = this.getOrCreateStream(key);
|
|
151
|
+
if (stream.groups.has(groupName)) {
|
|
152
|
+
const err = new Error(
|
|
153
|
+
`BUSYGROUP Consumer Group name already exists`
|
|
154
|
+
);
|
|
155
|
+
throw err;
|
|
156
|
+
}
|
|
157
|
+
const lastDeliveredId =
|
|
158
|
+
startId === "$"
|
|
159
|
+
? stream.entries.length > 0
|
|
160
|
+
? stream.entries[stream.entries.length - 1]!.id
|
|
161
|
+
: MIN_ID
|
|
162
|
+
: startId === "0" || startId === "0-0"
|
|
163
|
+
? MIN_ID
|
|
164
|
+
: startId;
|
|
165
|
+
stream.groups.set(groupName, {
|
|
166
|
+
name: groupName,
|
|
167
|
+
consumers: new Set(),
|
|
168
|
+
pel: new Map(),
|
|
169
|
+
lastDeliveredId,
|
|
170
|
+
});
|
|
171
|
+
return "OK";
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* XREADGROUP GROUP g consumer [COUNT n] [BLOCK ms] STREAMS s ">"
|
|
176
|
+
* Returns [[streamKey, [[id, fields], ...]]] or null on timeout.
|
|
177
|
+
*/
|
|
178
|
+
async xreadgroup(...args: any[]): Promise<any> {
|
|
179
|
+
let i = 0;
|
|
180
|
+
if (args[i] !== "GROUP") throw new Error("expected GROUP");
|
|
181
|
+
i++;
|
|
182
|
+
const groupName = String(args[i++]);
|
|
183
|
+
const consumer = String(args[i++]);
|
|
184
|
+
let count = Infinity;
|
|
185
|
+
let blockMs = 0;
|
|
186
|
+
while (args[i] !== "STREAMS") {
|
|
187
|
+
const opt = String(args[i++]).toUpperCase();
|
|
188
|
+
if (opt === "COUNT") count = Number(args[i++]);
|
|
189
|
+
else if (opt === "BLOCK") blockMs = Number(args[i++]);
|
|
190
|
+
else throw new Error(`unknown XREADGROUP opt ${opt}`);
|
|
191
|
+
}
|
|
192
|
+
i++; // skip STREAMS
|
|
193
|
+
const streams: string[] = [];
|
|
194
|
+
const ids: string[] = [];
|
|
195
|
+
const remaining = args.slice(i);
|
|
196
|
+
const half = remaining.length / 2;
|
|
197
|
+
for (let k = 0; k < half; k++) {
|
|
198
|
+
streams.push(String(remaining[k]));
|
|
199
|
+
ids.push(String(remaining[k + half]));
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const deadline = Date.now() + blockMs;
|
|
203
|
+
while (true) {
|
|
204
|
+
const result = this.readGroupOnce(groupName, consumer, count, streams, ids);
|
|
205
|
+
if (result) return result;
|
|
206
|
+
if (Date.now() >= deadline) return null;
|
|
207
|
+
await this.sleep(10);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
private readGroupOnce(
|
|
212
|
+
groupName: string,
|
|
213
|
+
consumer: string,
|
|
214
|
+
count: number,
|
|
215
|
+
streams: string[],
|
|
216
|
+
ids: string[]
|
|
217
|
+
): any[] | null {
|
|
218
|
+
const out: any[] = [];
|
|
219
|
+
for (let s = 0; s < streams.length; s++) {
|
|
220
|
+
const streamKey = streams[s]!;
|
|
221
|
+
const id = ids[s]!;
|
|
222
|
+
const stream = this.streams.get(streamKey);
|
|
223
|
+
if (!stream) continue;
|
|
224
|
+
const group = stream.groups.get(groupName);
|
|
225
|
+
if (!group) continue;
|
|
226
|
+
group.consumers.add(consumer);
|
|
227
|
+
|
|
228
|
+
let newEntries: StreamEntry[];
|
|
229
|
+
if (id === ">") {
|
|
230
|
+
// New messages only
|
|
231
|
+
newEntries = stream.entries.filter((e) =>
|
|
232
|
+
idGreater(e.id, group.lastDeliveredId)
|
|
233
|
+
);
|
|
234
|
+
if (newEntries.length > count) {
|
|
235
|
+
newEntries = newEntries.slice(0, count);
|
|
236
|
+
}
|
|
237
|
+
for (const entry of newEntries) {
|
|
238
|
+
group.lastDeliveredId = entry.id;
|
|
239
|
+
const existing = group.pel.get(entry.id);
|
|
240
|
+
if (existing) {
|
|
241
|
+
existing.deliveryCount++;
|
|
242
|
+
existing.deliveredAt = Date.now();
|
|
243
|
+
existing.consumer = consumer;
|
|
244
|
+
} else {
|
|
245
|
+
group.pel.set(entry.id, {
|
|
246
|
+
msgId: entry.id,
|
|
247
|
+
consumer,
|
|
248
|
+
deliveredAt: Date.now(),
|
|
249
|
+
deliveryCount: 1,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
} else {
|
|
254
|
+
// Re-read this consumer's PEL
|
|
255
|
+
newEntries = stream.entries.filter((e) => {
|
|
256
|
+
const p = group.pel.get(e.id);
|
|
257
|
+
return p && p.consumer === consumer && idGreater(e.id, id);
|
|
258
|
+
});
|
|
259
|
+
if (newEntries.length > count) {
|
|
260
|
+
newEntries = newEntries.slice(0, count);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
if (newEntries.length > 0) {
|
|
265
|
+
out.push([
|
|
266
|
+
streamKey,
|
|
267
|
+
newEntries.map((e) => [e.id, e.fields]),
|
|
268
|
+
]);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
return out.length > 0 ? out : null;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
/**
|
|
275
|
+
* XREAD [COUNT n] [BLOCK ms] STREAMS s id
|
|
276
|
+
*/
|
|
277
|
+
async xread(...args: any[]): Promise<any> {
|
|
278
|
+
let i = 0;
|
|
279
|
+
let count = Infinity;
|
|
280
|
+
let blockMs = 0;
|
|
281
|
+
while (args[i] !== "STREAMS") {
|
|
282
|
+
const opt = String(args[i++]).toUpperCase();
|
|
283
|
+
if (opt === "COUNT") count = Number(args[i++]);
|
|
284
|
+
else if (opt === "BLOCK") blockMs = Number(args[i++]);
|
|
285
|
+
else throw new Error(`unknown XREAD opt ${opt}`);
|
|
286
|
+
}
|
|
287
|
+
i++;
|
|
288
|
+
const remaining = args.slice(i);
|
|
289
|
+
const half = remaining.length / 2;
|
|
290
|
+
const streams: string[] = [];
|
|
291
|
+
const ids: string[] = [];
|
|
292
|
+
for (let k = 0; k < half; k++) {
|
|
293
|
+
streams.push(String(remaining[k]));
|
|
294
|
+
ids.push(String(remaining[k + half]));
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// Resolve "$" to the current last id per stream once, up front.
|
|
298
|
+
// Subsequent polls compare against that snapshot so new entries get
|
|
299
|
+
// delivered exactly once.
|
|
300
|
+
const resolvedIds = ids.map((id, k) => {
|
|
301
|
+
if (id !== "$") return id;
|
|
302
|
+
const stream = this.streams.get(streams[k]!);
|
|
303
|
+
if (!stream || stream.entries.length === 0) return MIN_ID;
|
|
304
|
+
return stream.entries[stream.entries.length - 1]!.id;
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
const deadline = Date.now() + blockMs;
|
|
308
|
+
while (true) {
|
|
309
|
+
const out: any[] = [];
|
|
310
|
+
for (let s = 0; s < streams.length; s++) {
|
|
311
|
+
const streamKey = streams[s]!;
|
|
312
|
+
const afterId = resolvedIds[s]!;
|
|
313
|
+
const stream = this.streams.get(streamKey);
|
|
314
|
+
if (!stream) continue;
|
|
315
|
+
const matching = stream.entries
|
|
316
|
+
.filter((e) => idGreater(e.id, afterId))
|
|
317
|
+
.slice(0, count);
|
|
318
|
+
if (matching.length > 0) {
|
|
319
|
+
out.push([
|
|
320
|
+
streamKey,
|
|
321
|
+
matching.map((e) => [e.id, e.fields]),
|
|
322
|
+
]);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
if (out.length > 0) return out;
|
|
326
|
+
if (Date.now() >= deadline) return null;
|
|
327
|
+
await this.sleep(10);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
xack(key: string, groupName: string, msgId: string): number {
|
|
332
|
+
const stream = this.streams.get(key);
|
|
333
|
+
if (!stream) return 0;
|
|
334
|
+
const group = stream.groups.get(groupName);
|
|
335
|
+
if (!group) return 0;
|
|
336
|
+
return group.pel.delete(msgId) ? 1 : 0;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
/**
|
|
340
|
+
* Two forms:
|
|
341
|
+
* XPENDING key group -> summary
|
|
342
|
+
* XPENDING key group minId maxId count [consumer] -> detail
|
|
343
|
+
*/
|
|
344
|
+
xpending(key: string, groupName: string, ...args: any[]): any {
|
|
345
|
+
const stream = this.streams.get(key);
|
|
346
|
+
if (!stream) return [0, null, null, null];
|
|
347
|
+
const group = stream.groups.get(groupName);
|
|
348
|
+
if (!group) return [0, null, null, null];
|
|
349
|
+
|
|
350
|
+
if (args.length === 0) {
|
|
351
|
+
// Summary
|
|
352
|
+
const ids = Array.from(group.pel.keys()).sort((a, b) =>
|
|
353
|
+
idLess(a, b) ? -1 : idGreater(a, b) ? 1 : 0
|
|
354
|
+
);
|
|
355
|
+
if (ids.length === 0) return [0, null, null, null];
|
|
356
|
+
const byConsumer = new Map<string, number>();
|
|
357
|
+
for (const p of group.pel.values()) {
|
|
358
|
+
byConsumer.set(
|
|
359
|
+
p.consumer,
|
|
360
|
+
(byConsumer.get(p.consumer) ?? 0) + 1
|
|
361
|
+
);
|
|
362
|
+
}
|
|
363
|
+
return [
|
|
364
|
+
ids.length,
|
|
365
|
+
ids[0],
|
|
366
|
+
ids[ids.length - 1],
|
|
367
|
+
Array.from(byConsumer.entries()).map(([c, n]) => [c, String(n)]),
|
|
368
|
+
];
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
const [minId, maxId, _count] = args;
|
|
372
|
+
const out: any[] = [];
|
|
373
|
+
for (const p of group.pel.values()) {
|
|
374
|
+
if (idLess(p.msgId, minId) || idGreater(p.msgId, maxId)) continue;
|
|
375
|
+
out.push([
|
|
376
|
+
p.msgId,
|
|
377
|
+
p.consumer,
|
|
378
|
+
Date.now() - p.deliveredAt,
|
|
379
|
+
p.deliveryCount,
|
|
380
|
+
]);
|
|
381
|
+
}
|
|
382
|
+
return out;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
/**
|
|
386
|
+
* XAUTOCLAIM stream group consumer idleMs cursor [COUNT n]
|
|
387
|
+
* Returns [nextCursor, entries]
|
|
388
|
+
*/
|
|
389
|
+
xautoclaim(
|
|
390
|
+
key: string,
|
|
391
|
+
groupName: string,
|
|
392
|
+
consumer: string,
|
|
393
|
+
idleMs: number,
|
|
394
|
+
cursor: string,
|
|
395
|
+
..._rest: any[]
|
|
396
|
+
): any {
|
|
397
|
+
const stream = this.streams.get(key);
|
|
398
|
+
if (!stream) return ["0-0", []];
|
|
399
|
+
const group = stream.groups.get(groupName);
|
|
400
|
+
if (!group) return ["0-0", []];
|
|
401
|
+
|
|
402
|
+
const now = Date.now();
|
|
403
|
+
const claimed: any[] = [];
|
|
404
|
+
for (const p of group.pel.values()) {
|
|
405
|
+
if (now - p.deliveredAt < idleMs) continue;
|
|
406
|
+
if (idLess(p.msgId, cursor) && cursor !== "0-0") continue;
|
|
407
|
+
p.consumer = consumer;
|
|
408
|
+
p.deliveryCount++;
|
|
409
|
+
p.deliveredAt = now;
|
|
410
|
+
const entry = stream.entries.find((e) => e.id === p.msgId);
|
|
411
|
+
if (entry) claimed.push([entry.id, entry.fields]);
|
|
412
|
+
}
|
|
413
|
+
return ["0-0", claimed];
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
xlen(key: string): number {
|
|
417
|
+
return this.streams.get(key)?.entries.length ?? 0;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
xrange(key: string, start: string, end: string, ..._rest: any[]): any[] {
|
|
421
|
+
const stream = this.streams.get(key);
|
|
422
|
+
if (!stream) return [];
|
|
423
|
+
const lo = start === "-" ? MIN_ID : start;
|
|
424
|
+
const hi = end === "+" ? "9999999999999-9999" : end;
|
|
425
|
+
return stream.entries
|
|
426
|
+
.filter(
|
|
427
|
+
(e) =>
|
|
428
|
+
!idLess(e.id, lo) && !idGreater(e.id, hi)
|
|
429
|
+
)
|
|
430
|
+
.map((e) => [e.id, e.fields]);
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
ping(): string {
|
|
434
|
+
return "PONG";
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
/** Helper for tests: total PEL entries across a group. */
|
|
438
|
+
getPelSize(streamKey: string, groupName: string): number {
|
|
439
|
+
return (
|
|
440
|
+
this.streams.get(streamKey)?.groups.get(groupName)?.pel.size ?? 0
|
|
441
|
+
);
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
/** Helper for tests: raw stream entry count ignoring groups. */
|
|
445
|
+
getStreamLength(key: string): number {
|
|
446
|
+
return this.xlen(key);
|
|
447
|
+
}
|
|
448
|
+
}
|