node-event-stream 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -0
- package/dist/consumer/consumer.js +1 -0
- package/dist/core/broker.js +410 -0
- package/dist/interface/interface.js +2 -0
- package/dist/producer/producer.js +1 -0
- package/dist/retention/retentionEngine.js +78 -0
- package/dist/server.js +2 -0
- package/dist/storage/bufferMemory.js +40 -0
- package/dist/storage/diskStorage.js +227 -0
- package/dist/storage/ringBuffer.js +40 -0
- package/dist/storage/secondaryIndex.js +62 -0
- package/dist/storage/storage.js +1 -0
- package/dist/utils/helper.js +6 -0
- package/package.json +19 -0
- package/src/consumer/consumer.ts +0 -0
- package/src/core/broker.ts +469 -0
- package/src/interface/interface.ts +87 -0
- package/src/producer/producer.ts +0 -0
- package/src/retention/retentionEngine.ts +100 -0
- package/src/server.ts +1 -0
- package/src/storage/bufferMemory.ts +39 -0
- package/src/storage/diskStorage.ts +210 -0
- package/src/storage/secondaryIndex.ts +71 -0
- package/src/utils/helper.ts +1 -0
- package/tsconfig.json +12 -0
package/README.md
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# Log Flow
|
|
2
|
+
|
|
3
|
+
Log Flow is a lightweight **event-driven pub/sub library for Node.js**.
|
|
4
|
+
It allows applications to publish and subscribe to events in a simple and efficient way, similar to a minimal streaming system.
|
|
5
|
+
|
|
6
|
+
The goal of Log Flow is to provide a **simple alternative to heavy message brokers** when you only need lightweight event streaming inside your Node.js services.
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
## Features
|
|
11
|
+
|
|
12
|
+
- Simple **Producer / Consumer** model
|
|
13
|
+
- Lightweight **Pub/Sub event system**
|
|
14
|
+
- Retrieve previous events by **key or time**
|
|
15
|
+
- Offset-based event reading
|
|
16
|
+
- Minimal setup
|
|
17
|
+
- Designed for **high-throughput event flow**
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
## Installation
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
npm install log-flow
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
---
|
|
28
|
+
|
|
29
|
+
## Basic Usage
|
|
30
|
+
|
|
31
|
+
### Producer
|
|
32
|
+
|
|
33
|
+
```javascript
|
|
34
|
+
const { Producer } = require('log-flow');
|
|
35
|
+
|
|
36
|
+
const producer = new Producer();
|
|
37
|
+
|
|
38
|
+
producer.send('user-location', {
|
|
39
|
+
pilgrim_id: 1,
|
|
40
|
+
lat: 23.78,
|
|
41
|
+
lon: 90.41,
|
|
42
|
+
});
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
---
|
|
46
|
+
|
|
47
|
+
### Consumer
|
|
48
|
+
|
|
49
|
+
```javascript
|
|
50
|
+
const { Consumer } = require('log-flow');
|
|
51
|
+
|
|
52
|
+
const consumer = new Consumer();
|
|
53
|
+
|
|
54
|
+
consumer.subscribe('user-location', (message) => {
|
|
55
|
+
console.log('New Event:', message);
|
|
56
|
+
});
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
---
|
|
60
|
+
|
|
61
|
+
## Event Structure
|
|
62
|
+
|
|
63
|
+
Example event stored in the system:
|
|
64
|
+
|
|
65
|
+
```json
|
|
66
|
+
{
|
|
67
|
+
"pilgrim_id": 1,
|
|
68
|
+
"lat": 23.78,
|
|
69
|
+
"lon": 90.41,
|
|
70
|
+
"created_at": "2026-03-07T10:20:00Z"
|
|
71
|
+
}
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
---
|
|
75
|
+
|
|
76
|
+
## Use Cases
|
|
77
|
+
|
|
78
|
+
Log Flow can be useful for:
|
|
79
|
+
|
|
80
|
+
- Real-time location tracking
|
|
81
|
+
- Event-driven microservices
|
|
82
|
+
- Activity logging systems
|
|
83
|
+
- Lightweight streaming pipelines
|
|
84
|
+
- Notification systems
|
|
85
|
+
|
|
86
|
+
---
|
|
87
|
+
|
|
88
|
+
## Project Goal
|
|
89
|
+
|
|
90
|
+
Log Flow was created to explore **event streaming concepts similar to distributed log systems**, but in a much simpler and lightweight form suitable for Node.js applications.
|
|
91
|
+
|
|
92
|
+
---
|
|
93
|
+
|
|
94
|
+
## License
|
|
95
|
+
|
|
96
|
+
MIT License
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";
|
|
@@ -0,0 +1,410 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.MessageBus = exports.Topic = void 0;
|
|
40
|
+
const stream_1 = require("stream");
|
|
41
|
+
const retentionEngine_1 = __importDefault(require("../retention/retentionEngine"));
|
|
42
|
+
const diskStorage_1 = __importDefault(require("../storage/diskStorage"));
|
|
43
|
+
const bufferMemory_1 = __importDefault(require("../storage/bufferMemory"));
|
|
44
|
+
const secondaryIndex_1 = __importDefault(require("../storage/secondaryIndex"));
|
|
45
|
+
const path_1 = __importDefault(require("path"));
|
|
46
|
+
const fs = __importStar(require("fs"));
|
|
47
|
+
class Topic {
|
|
48
|
+
constructor(name, dataDir) {
|
|
49
|
+
this.name = name;
|
|
50
|
+
this.memory = new bufferMemory_1.default(50000);
|
|
51
|
+
this.index = new secondaryIndex_1.default();
|
|
52
|
+
this.consumers = new Map();
|
|
53
|
+
this.nextOffset = 0;
|
|
54
|
+
this.writeCount = 0;
|
|
55
|
+
this.lastStatTs = Date.now();
|
|
56
|
+
this.writesPerSec = 0;
|
|
57
|
+
this.disk = new diskStorage_1.default(dataDir, name);
|
|
58
|
+
this.replayFromDisk();
|
|
59
|
+
}
|
|
60
|
+
replayFromDisk() {
|
|
61
|
+
let max = -1;
|
|
62
|
+
for (const rec of this.disk.replayAll()) {
|
|
63
|
+
this.index.insert({
|
|
64
|
+
offset: rec.offset,
|
|
65
|
+
timestamp: rec.timestamp,
|
|
66
|
+
key: rec.key,
|
|
67
|
+
});
|
|
68
|
+
if (rec.offset > max)
|
|
69
|
+
max = rec.offset;
|
|
70
|
+
}
|
|
71
|
+
if (max >= 0)
|
|
72
|
+
this.nextOffset = max + 1;
|
|
73
|
+
}
|
|
74
|
+
trackWrite() {
|
|
75
|
+
this.writeCount++;
|
|
76
|
+
const now = Date.now();
|
|
77
|
+
const elapsed = now - this.lastStatTs;
|
|
78
|
+
if (elapsed >= 1000) {
|
|
79
|
+
this.writesPerSec = Math.round((this.writeCount * 1000) / elapsed);
|
|
80
|
+
this.writeCount = 0;
|
|
81
|
+
this.lastStatTs = now;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
exports.Topic = Topic;
|
|
86
|
+
class MessageBus {
|
|
87
|
+
constructor(dataDir = './msgbus-data', options = {}) {
|
|
88
|
+
var _a;
|
|
89
|
+
this.dataDir = dataDir;
|
|
90
|
+
this.options = options;
|
|
91
|
+
this.topics = new Map();
|
|
92
|
+
this.retention = null;
|
|
93
|
+
this.cleanupTimer = null;
|
|
94
|
+
this.streamListeners = new Map();
|
|
95
|
+
fs.mkdirSync(dataDir, { recursive: true });
|
|
96
|
+
this.offsetFile = path_1.default.join(dataDir, '_consumer-offsets.json');
|
|
97
|
+
if (options.retention) {
|
|
98
|
+
this.retention = new retentionEngine_1.default(options.retention);
|
|
99
|
+
}
|
|
100
|
+
const intervalMin = (_a = options.cleanupIntervalMinutes) !== null && _a !== void 0 ? _a : 60;
|
|
101
|
+
if (intervalMin > 0 && this.retention) {
|
|
102
|
+
const ms = intervalMin * 60000;
|
|
103
|
+
this.cleanupTimer = setInterval(() => {
|
|
104
|
+
console.log('[bus] auto-cleanup running...');
|
|
105
|
+
this.cleanup();
|
|
106
|
+
}, ms);
|
|
107
|
+
if (this.cleanupTimer.unref)
|
|
108
|
+
this.cleanupTimer.unref();
|
|
109
|
+
console.log(`[bus] auto-cleanup scheduled every ${intervalMin} min`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
getTopic(name) {
|
|
113
|
+
if (!this.topics.has(name))
|
|
114
|
+
this.topics.set(name, new Topic(name, this.dataDir));
|
|
115
|
+
return this.topics.get(name);
|
|
116
|
+
}
|
|
117
|
+
loadOffsets() {
|
|
118
|
+
try {
|
|
119
|
+
if (fs.existsSync(this.offsetFile))
|
|
120
|
+
return JSON.parse(fs.readFileSync(this.offsetFile, 'utf-8'));
|
|
121
|
+
}
|
|
122
|
+
catch (_a) { }
|
|
123
|
+
return {};
|
|
124
|
+
}
|
|
125
|
+
saveOffsets() {
|
|
126
|
+
const snap = {};
|
|
127
|
+
for (const [name, topic] of this.topics) {
|
|
128
|
+
snap[name] = {};
|
|
129
|
+
for (const [gid, c] of topic.consumers)
|
|
130
|
+
snap[name][gid] = c.offset;
|
|
131
|
+
}
|
|
132
|
+
fs.writeFileSync(this.offsetFile, JSON.stringify(snap), 'utf-8');
|
|
133
|
+
}
|
|
134
|
+
produce(opts) {
|
|
135
|
+
const topic = this.getTopic(opts.topic);
|
|
136
|
+
const record = {
|
|
137
|
+
offset: topic.nextOffset++,
|
|
138
|
+
topic: opts.topic,
|
|
139
|
+
key: opts.key,
|
|
140
|
+
timestamp: Date.now(),
|
|
141
|
+
data: opts.data,
|
|
142
|
+
};
|
|
143
|
+
topic.memory.push(record);
|
|
144
|
+
topic.index.insert({
|
|
145
|
+
offset: record.offset,
|
|
146
|
+
timestamp: record.timestamp,
|
|
147
|
+
key: record.key,
|
|
148
|
+
});
|
|
149
|
+
topic.disk.enqueue(record);
|
|
150
|
+
this.deliver(topic, record);
|
|
151
|
+
topic.trackWrite();
|
|
152
|
+
return record.offset;
|
|
153
|
+
}
|
|
154
|
+
produceBatch(records) {
|
|
155
|
+
return records.map((r) => this.produce(r));
|
|
156
|
+
}
|
|
157
|
+
subscribe(opts, handler) {
|
|
158
|
+
var _a, _b;
|
|
159
|
+
const topic = this.getTopic(opts.topic);
|
|
160
|
+
const saved = this.loadOffsets();
|
|
161
|
+
const offset = (_b = (_a = saved[opts.topic]) === null || _a === void 0 ? void 0 : _a[opts.groupId]) !== null && _b !== void 0 ? _b : (opts.fromBeginning ? 0 : topic.nextOffset);
|
|
162
|
+
const consumer = { groupId: opts.groupId, handler, offset };
|
|
163
|
+
topic.consumers.set(opts.groupId, consumer);
|
|
164
|
+
if (consumer.offset < topic.nextOffset) {
|
|
165
|
+
setImmediate(() => this.catchUp(topic, consumer));
|
|
166
|
+
}
|
|
167
|
+
return () => {
|
|
168
|
+
topic.consumers.delete(opts.groupId);
|
|
169
|
+
this.saveOffsets();
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
deliver(topic, record) {
|
|
173
|
+
for (const consumer of topic.consumers.values()) {
|
|
174
|
+
if (consumer.offset === record.offset) {
|
|
175
|
+
consumer.offset++;
|
|
176
|
+
try {
|
|
177
|
+
const p = consumer.handler(record);
|
|
178
|
+
if (p instanceof Promise)
|
|
179
|
+
p.then(() => this.saveOffsets()).catch((e) => console.error(`[bus][${consumer.groupId}]`, e));
|
|
180
|
+
else
|
|
181
|
+
this.saveOffsets();
|
|
182
|
+
}
|
|
183
|
+
catch (e) {
|
|
184
|
+
console.error(`[bus][${consumer.groupId}]`, e);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
this.notifyStreamListeners(topic.name, record);
|
|
189
|
+
}
|
|
190
|
+
notifyStreamListeners(topicName, record) {
|
|
191
|
+
const topicMap = this.streamListeners.get(topicName);
|
|
192
|
+
if (!topicMap)
|
|
193
|
+
return;
|
|
194
|
+
const keyListeners = topicMap.get(record.key);
|
|
195
|
+
if (keyListeners)
|
|
196
|
+
for (const fn of keyListeners)
|
|
197
|
+
fn(record);
|
|
198
|
+
const wildcardListeners = topicMap.get('*');
|
|
199
|
+
if (wildcardListeners)
|
|
200
|
+
for (const fn of wildcardListeners)
|
|
201
|
+
fn(record);
|
|
202
|
+
}
|
|
203
|
+
addStreamListener(topicName, key, fn) {
|
|
204
|
+
if (!this.streamListeners.has(topicName))
|
|
205
|
+
this.streamListeners.set(topicName, new Map());
|
|
206
|
+
const topicMap = this.streamListeners.get(topicName);
|
|
207
|
+
if (!topicMap.has(key))
|
|
208
|
+
topicMap.set(key, new Set());
|
|
209
|
+
topicMap.get(key).add(fn);
|
|
210
|
+
return () => { var _a; return (_a = topicMap.get(key)) === null || _a === void 0 ? void 0 : _a.delete(fn); };
|
|
211
|
+
}
|
|
212
|
+
catchUp(topic, consumer) {
|
|
213
|
+
const ringMin = topic.memory.minOffset;
|
|
214
|
+
const fromDisk = new Set();
|
|
215
|
+
if (ringMin !== -1 && consumer.offset < ringMin) {
|
|
216
|
+
for (let o = consumer.offset; o < ringMin; o++)
|
|
217
|
+
fromDisk.add(o);
|
|
218
|
+
}
|
|
219
|
+
for (const rec of topic.disk.readByOffsets(fromDisk)) {
|
|
220
|
+
consumer.offset = rec.offset + 1;
|
|
221
|
+
try {
|
|
222
|
+
consumer.handler(rec);
|
|
223
|
+
}
|
|
224
|
+
catch (e) {
|
|
225
|
+
console.error(`[bus][${consumer.groupId}] catchup`, e);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
for (const rec of topic.memory.from(consumer.offset)) {
|
|
229
|
+
consumer.offset = rec.offset + 1;
|
|
230
|
+
try {
|
|
231
|
+
consumer.handler(rec);
|
|
232
|
+
}
|
|
233
|
+
catch (e) {
|
|
234
|
+
console.error(`[bus][${consumer.groupId}] catchup`, e);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
this.saveOffsets();
|
|
238
|
+
}
|
|
239
|
+
// ── Query ─────────────────────────────────────────────────────────────────
|
|
240
|
+
query(opts) {
|
|
241
|
+
var _a, _b, _c;
|
|
242
|
+
const topic = this.getTopic(opts.topic);
|
|
243
|
+
const limit = (_a = opts.limit) !== null && _a !== void 0 ? _a : 100;
|
|
244
|
+
const skip = (_b = opts.skip) !== null && _b !== void 0 ? _b : 0;
|
|
245
|
+
const order = (_c = opts.order) !== null && _c !== void 0 ? _c : 'desc';
|
|
246
|
+
const offsets = topic.index.query({
|
|
247
|
+
key: opts.key,
|
|
248
|
+
fromTime: opts.fromTime,
|
|
249
|
+
toTime: opts.toTime,
|
|
250
|
+
limit: limit + skip,
|
|
251
|
+
skip: 0,
|
|
252
|
+
order,
|
|
253
|
+
});
|
|
254
|
+
const paginated = offsets.slice(skip, skip + limit);
|
|
255
|
+
if (paginated.length === 0)
|
|
256
|
+
return [];
|
|
257
|
+
const set = new Set(paginated);
|
|
258
|
+
const ringMin = topic.memory.minOffset;
|
|
259
|
+
const fromRing = new Set();
|
|
260
|
+
const fromDisk = new Set();
|
|
261
|
+
for (const off of set) {
|
|
262
|
+
if (ringMin !== -1 && off >= ringMin)
|
|
263
|
+
fromRing.add(off);
|
|
264
|
+
else
|
|
265
|
+
fromDisk.add(off);
|
|
266
|
+
}
|
|
267
|
+
const results = [];
|
|
268
|
+
for (const rec of topic.memory) {
|
|
269
|
+
if (fromRing.has(rec.offset))
|
|
270
|
+
results.push(rec);
|
|
271
|
+
}
|
|
272
|
+
for (const rec of topic.disk.readByOffsets(fromDisk)) {
|
|
273
|
+
results.push(rec);
|
|
274
|
+
}
|
|
275
|
+
results.sort((a, b) => order === 'asc' ? a.offset - b.offset : b.offset - a.offset);
|
|
276
|
+
return results;
|
|
277
|
+
}
|
|
278
|
+
queryStream(opts) {
|
|
279
|
+
var _a, _b, _c;
|
|
280
|
+
const historySize = (_a = opts.historySize) !== null && _a !== void 0 ? _a : 1000;
|
|
281
|
+
const order = (_b = opts.order) !== null && _b !== void 0 ? _b : 'asc';
|
|
282
|
+
const listenerKey = (_c = opts.key) !== null && _c !== void 0 ? _c : '*';
|
|
283
|
+
let removeLive = null;
|
|
284
|
+
const stream = new stream_1.Readable({ objectMode: false, read() { } });
|
|
285
|
+
const push = (record) => {
|
|
286
|
+
if (stream.destroyed)
|
|
287
|
+
return;
|
|
288
|
+
if (opts.fromTime && record.timestamp < opts.fromTime)
|
|
289
|
+
return;
|
|
290
|
+
if (opts.toTime && record.timestamp > opts.toTime)
|
|
291
|
+
return;
|
|
292
|
+
stream.push(JSON.stringify(record) + '\n');
|
|
293
|
+
};
|
|
294
|
+
setImmediate(() => {
|
|
295
|
+
if (stream.destroyed)
|
|
296
|
+
return;
|
|
297
|
+
const history = this.query({
|
|
298
|
+
topic: opts.topic,
|
|
299
|
+
key: opts.key,
|
|
300
|
+
limit: historySize,
|
|
301
|
+
skip: 0,
|
|
302
|
+
order,
|
|
303
|
+
fromTime: opts.fromTime,
|
|
304
|
+
toTime: opts.toTime,
|
|
305
|
+
});
|
|
306
|
+
for (const record of history) {
|
|
307
|
+
if (stream.destroyed)
|
|
308
|
+
return;
|
|
309
|
+
push(record);
|
|
310
|
+
}
|
|
311
|
+
if (!stream.destroyed) {
|
|
312
|
+
removeLive = this.addStreamListener(opts.topic, listenerKey, push);
|
|
313
|
+
}
|
|
314
|
+
});
|
|
315
|
+
stream.on('close', () => removeLive === null || removeLive === void 0 ? void 0 : removeLive());
|
|
316
|
+
return stream;
|
|
317
|
+
}
|
|
318
|
+
// ── Retention / Cleanup ───────────────────────────────────────────────────
|
|
319
|
+
cleanup(topicName, policyOverride) {
|
|
320
|
+
const engine = policyOverride
|
|
321
|
+
? new retentionEngine_1.default(policyOverride)
|
|
322
|
+
: this.retention;
|
|
323
|
+
if (!engine) {
|
|
324
|
+
console.warn('[bus] cleanup() called but no retention policy is set.');
|
|
325
|
+
return [];
|
|
326
|
+
}
|
|
327
|
+
const targets = topicName
|
|
328
|
+
? [this.getTopic(topicName)]
|
|
329
|
+
: [...this.topics.values()];
|
|
330
|
+
const reports = [];
|
|
331
|
+
for (const topic of targets) {
|
|
332
|
+
topic.disk.flush();
|
|
333
|
+
const metas = topic.disk.buildSegmentMetas();
|
|
334
|
+
const { toDelete, reasons } = engine.evaluate(metas);
|
|
335
|
+
if (toDelete.length === 0) {
|
|
336
|
+
reports.push({
|
|
337
|
+
topic: topic.name,
|
|
338
|
+
segmentsDeleted: 0,
|
|
339
|
+
recordsDropped: 0,
|
|
340
|
+
bytesFreed: 0,
|
|
341
|
+
reasons: ['nothing to delete'],
|
|
342
|
+
skippedConsumers: [],
|
|
343
|
+
});
|
|
344
|
+
continue;
|
|
345
|
+
}
|
|
346
|
+
const deletedBases = new Set(toDelete.map((s) => s.baseOffset));
|
|
347
|
+
const surviving = metas.filter((m) => !deletedBases.has(m.baseOffset));
|
|
348
|
+
const earliestSurviving = surviving.length > 0
|
|
349
|
+
? Math.min(...surviving.map((m) => m.baseOffset))
|
|
350
|
+
: topic.nextOffset;
|
|
351
|
+
// Advance any slow consumers past the gap — "delete anyway" behaviour
|
|
352
|
+
const skippedConsumers = [];
|
|
353
|
+
for (const consumer of topic.consumers.values()) {
|
|
354
|
+
if (consumer.offset < earliestSurviving) {
|
|
355
|
+
console.warn(`[bus][${topic.name}] consumer "${consumer.groupId}" at offset ${consumer.offset} ` +
|
|
356
|
+
`skipped to ${earliestSurviving} (segment deleted)`);
|
|
357
|
+
skippedConsumers.push({
|
|
358
|
+
groupId: consumer.groupId,
|
|
359
|
+
skippedToOffset: earliestSurviving,
|
|
360
|
+
});
|
|
361
|
+
consumer.offset = earliestSurviving;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
// Delete segments and purge memory index
|
|
365
|
+
let bytesFreed = 0;
|
|
366
|
+
let recordsDropped = 0;
|
|
367
|
+
for (const seg of toDelete) {
|
|
368
|
+
bytesFreed += topic.disk.deleteSegment(seg.baseOffset);
|
|
369
|
+
recordsDropped += seg.recordCount;
|
|
370
|
+
}
|
|
371
|
+
topic.index.purgeBelow(earliestSurviving);
|
|
372
|
+
this.saveOffsets();
|
|
373
|
+
const report = {
|
|
374
|
+
topic: topic.name,
|
|
375
|
+
segmentsDeleted: toDelete.length,
|
|
376
|
+
recordsDropped,
|
|
377
|
+
bytesFreed,
|
|
378
|
+
reasons,
|
|
379
|
+
skippedConsumers,
|
|
380
|
+
};
|
|
381
|
+
console.log(`[bus] cleanup [${topic.name}]: ${toDelete.length} segments deleted, ` +
|
|
382
|
+
`${(bytesFreed / 1e6).toFixed(1)} MB freed, ${recordsDropped} records dropped`);
|
|
383
|
+
reports.push(report);
|
|
384
|
+
}
|
|
385
|
+
return reports;
|
|
386
|
+
}
|
|
387
|
+
// ── Stats ─────────────────────────────────────────────────────────────────
|
|
388
|
+
stats(topicName) {
|
|
389
|
+
const t = this.getTopic(topicName);
|
|
390
|
+
return {
|
|
391
|
+
topic: topicName,
|
|
392
|
+
totalRecords: t.index.totalEntries,
|
|
393
|
+
latestOffset: t.nextOffset - 1,
|
|
394
|
+
consumers: [...t.consumers.keys()],
|
|
395
|
+
indexedKeys: t.index.totalKeys,
|
|
396
|
+
memoryRecords: t.memory.size,
|
|
397
|
+
writesPerSec: t.writesPerSec,
|
|
398
|
+
diskSegments: t.disk.segmentCount,
|
|
399
|
+
diskSizeBytes: t.disk.totalDiskBytes,
|
|
400
|
+
};
|
|
401
|
+
}
|
|
402
|
+
close() {
|
|
403
|
+
if (this.cleanupTimer)
|
|
404
|
+
clearInterval(this.cleanupTimer);
|
|
405
|
+
this.saveOffsets();
|
|
406
|
+
for (const topic of this.topics.values())
|
|
407
|
+
topic.disk.close();
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
exports.MessageBus = MessageBus;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
3
|
+
// RETENTION ENGINE
|
|
4
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
class RetentionEngine {
|
|
7
|
+
constructor(policy) {
|
|
8
|
+
this.policy = policy;
|
|
9
|
+
}
|
|
10
|
+
evaluate(metas) {
|
|
11
|
+
var _a;
|
|
12
|
+
const candidates = metas.filter((m) => !m.isActive);
|
|
13
|
+
const deleteSet = new Set();
|
|
14
|
+
const reasons = [];
|
|
15
|
+
// ── Rule 1: Time-based ────────────────────────────────────────────────
|
|
16
|
+
const maxAgeMs = this.policy.maxAgeDays != null
|
|
17
|
+
? this.policy.maxAgeDays * 86400000
|
|
18
|
+
: this.policy.maxAgeHours != null
|
|
19
|
+
? this.policy.maxAgeHours * 3600000
|
|
20
|
+
: null;
|
|
21
|
+
if (maxAgeMs !== null) {
|
|
22
|
+
const cutoff = Date.now() - maxAgeMs;
|
|
23
|
+
for (const seg of candidates) {
|
|
24
|
+
if (seg.newestTimestamp > 0 && seg.newestTimestamp < cutoff) {
|
|
25
|
+
deleteSet.add(seg.baseOffset);
|
|
26
|
+
const ageDays = ((Date.now() - seg.newestTimestamp) /
|
|
27
|
+
86400000).toFixed(1);
|
|
28
|
+
reasons.push(`[time] segment ${seg.baseOffset}: newest record is ${ageDays} days old (limit: ${(_a = this.policy.maxAgeDays) !== null && _a !== void 0 ? _a : (this.policy.maxAgeHours / 24).toFixed(1)} days)`);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
// ── Rule 2: Size-based ────────────────────────────────────────────────
|
|
33
|
+
const maxBytes = this.policy.maxSizeGB != null
|
|
34
|
+
? this.policy.maxSizeGB * 1073741824
|
|
35
|
+
: this.policy.maxSizeMB != null
|
|
36
|
+
? this.policy.maxSizeMB * 1048576
|
|
37
|
+
: null;
|
|
38
|
+
if (maxBytes !== null) {
|
|
39
|
+
let totalBytes = metas.reduce((s, m) => s + m.sizeBytes, 0);
|
|
40
|
+
const sorted = [...candidates].sort((a, b) => a.baseOffset - b.baseOffset);
|
|
41
|
+
for (const seg of sorted) {
|
|
42
|
+
if (totalBytes <= maxBytes)
|
|
43
|
+
break;
|
|
44
|
+
if (!deleteSet.has(seg.baseOffset)) {
|
|
45
|
+
deleteSet.add(seg.baseOffset);
|
|
46
|
+
reasons.push(`[size] segment ${seg.baseOffset}: total ${(totalBytes / 1e9).toFixed(2)} GB ` +
|
|
47
|
+
`> limit ${(maxBytes / 1e9).toFixed(2)} GB — freeing ${(seg.sizeBytes / 1e6).toFixed(1)} MB`);
|
|
48
|
+
}
|
|
49
|
+
totalBytes -= seg.sizeBytes;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
// ── Rule 3: Record count ──────────────────────────────────────────────
|
|
53
|
+
if (this.policy.maxRecords != null) {
|
|
54
|
+
const total = metas.reduce((s, m) => s + m.recordCount, 0);
|
|
55
|
+
if (total > this.policy.maxRecords) {
|
|
56
|
+
let excess = total - this.policy.maxRecords;
|
|
57
|
+
const sorted = [...candidates].sort((a, b) => a.baseOffset - b.baseOffset);
|
|
58
|
+
for (const seg of sorted) {
|
|
59
|
+
if (excess <= 0)
|
|
60
|
+
break;
|
|
61
|
+
if (!deleteSet.has(seg.baseOffset)) {
|
|
62
|
+
deleteSet.add(seg.baseOffset);
|
|
63
|
+
reasons.push(`[records] segment ${seg.baseOffset}: ${total.toLocaleString()} records ` +
|
|
64
|
+
`> limit ${this.policy.maxRecords.toLocaleString()} — dropping ${seg.recordCount} records`);
|
|
65
|
+
}
|
|
66
|
+
excess -= seg.recordCount;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return {
|
|
71
|
+
toDelete: candidates
|
|
72
|
+
.filter((m) => deleteSet.has(m.baseOffset))
|
|
73
|
+
.sort((a, b) => a.baseOffset - b.baseOffset),
|
|
74
|
+
reasons,
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
exports.default = RetentionEngine;
|
package/dist/server.js
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
class BufferMemory {
|
|
4
|
+
constructor(capacity) {
|
|
5
|
+
this.capacity = capacity;
|
|
6
|
+
this.head = 0;
|
|
7
|
+
this._size = 0;
|
|
8
|
+
this.buffer = new Array(capacity);
|
|
9
|
+
}
|
|
10
|
+
push(record) {
|
|
11
|
+
this.buffer[this.head] = record;
|
|
12
|
+
this.head = (this.head + 1) % this.capacity;
|
|
13
|
+
if (this._size < this.capacity)
|
|
14
|
+
this._size++;
|
|
15
|
+
}
|
|
16
|
+
*[Symbol.iterator]() {
|
|
17
|
+
if (this._size === 0)
|
|
18
|
+
return;
|
|
19
|
+
const start = this._size < this.capacity ? 0 : this.head;
|
|
20
|
+
for (let i = 0; i < this._size; i++) {
|
|
21
|
+
const rec = this.buffer[(start + i) % this.capacity];
|
|
22
|
+
if (rec !== undefined)
|
|
23
|
+
yield rec;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
*from(fromOffset) {
|
|
27
|
+
for (const rec of this)
|
|
28
|
+
if (rec.offset >= fromOffset)
|
|
29
|
+
yield rec;
|
|
30
|
+
}
|
|
31
|
+
get size() {
|
|
32
|
+
return this._size;
|
|
33
|
+
}
|
|
34
|
+
get minOffset() {
|
|
35
|
+
for (const r of this)
|
|
36
|
+
return r.offset;
|
|
37
|
+
return -1;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
exports.default = BufferMemory;
|