node-event-stream 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -0
- package/dist/consumer/consumer.js +1 -0
- package/dist/core/broker.js +410 -0
- package/dist/interface/interface.js +2 -0
- package/dist/producer/producer.js +1 -0
- package/dist/retention/retentionEngine.js +78 -0
- package/dist/server.js +2 -0
- package/dist/storage/bufferMemory.js +40 -0
- package/dist/storage/diskStorage.js +227 -0
- package/dist/storage/ringBuffer.js +40 -0
- package/dist/storage/secondaryIndex.js +62 -0
- package/dist/storage/storage.js +1 -0
- package/dist/utils/helper.js +6 -0
- package/package.json +19 -0
- package/src/consumer/consumer.ts +0 -0
- package/src/core/broker.ts +469 -0
- package/src/interface/interface.ts +87 -0
- package/src/producer/producer.ts +0 -0
- package/src/retention/retentionEngine.ts +100 -0
- package/src/server.ts +1 -0
- package/src/storage/bufferMemory.ts +39 -0
- package/src/storage/diskStorage.ts +210 -0
- package/src/storage/secondaryIndex.ts +71 -0
- package/src/utils/helper.ts +1 -0
- package/tsconfig.json +12 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import { LogRecord } from '../interface/interface';
|
|
4
|
+
|
|
5
|
+
interface SegmentMeta {
|
|
6
|
+
filePath: string;
|
|
7
|
+
baseOffset: number;
|
|
8
|
+
lastOffset: number;
|
|
9
|
+
sizeBytes: number;
|
|
10
|
+
oldestTimestamp: number;
|
|
11
|
+
newestTimestamp: number;
|
|
12
|
+
recordCount: number;
|
|
13
|
+
isActive: boolean;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const MAX_SEGMENT_BYTES = 256 * 1024 * 1024;
|
|
17
|
+
const FLUSH_INTERVAL_MS = 50;
|
|
18
|
+
const FLUSH_BATCH_SIZE = 500;
|
|
19
|
+
|
|
20
|
+
export default class DiskStore {
|
|
21
|
+
private writeBuffer: string[] = [];
|
|
22
|
+
private currentFd: number = -1;
|
|
23
|
+
private currentSegmentSize = 0;
|
|
24
|
+
private currentSegmentBase = 0;
|
|
25
|
+
private flushTimer: ReturnType<typeof setInterval> | null = null;
|
|
26
|
+
private isFlushing = false;
|
|
27
|
+
public totalFlushed = 0;
|
|
28
|
+
|
|
29
|
+
constructor(
|
|
30
|
+
private readonly dir: string,
|
|
31
|
+
private readonly topic: string,
|
|
32
|
+
) {
|
|
33
|
+
fs.mkdirSync(this.segDir, { recursive: true });
|
|
34
|
+
this.openOrCreateActiveSegment();
|
|
35
|
+
this.startFlushLoop();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
private get segDir(): string {
|
|
39
|
+
return path.join(this.dir, this.topic);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
private listPaths(): string[] {
|
|
43
|
+
return fs
|
|
44
|
+
.readdirSync(this.segDir)
|
|
45
|
+
.filter((f) => f.endsWith('.log'))
|
|
46
|
+
.sort()
|
|
47
|
+
.map((f) => path.join(this.segDir, f));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
private openSegment(base: number): void {
|
|
51
|
+
if (this.currentFd !== -1) fs.closeSync(this.currentFd);
|
|
52
|
+
const file = path.join(
|
|
53
|
+
this.segDir,
|
|
54
|
+
String(base).padStart(10, '0') + '.log',
|
|
55
|
+
);
|
|
56
|
+
this.currentFd = fs.openSync(file, 'a');
|
|
57
|
+
this.currentSegmentBase = base;
|
|
58
|
+
this.currentSegmentSize = 0;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
private openOrCreateActiveSegment(): void {
|
|
62
|
+
const files = this.listPaths();
|
|
63
|
+
if (files.length === 0) {
|
|
64
|
+
this.openSegment(0);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
const last = files[files.length - 1];
|
|
68
|
+
const base = parseInt(path.basename(last, '.log'), 10);
|
|
69
|
+
const size = fs.statSync(last).size;
|
|
70
|
+
if (size >= MAX_SEGMENT_BYTES) this.openSegment(base + 1);
|
|
71
|
+
else {
|
|
72
|
+
this.currentSegmentBase = base;
|
|
73
|
+
this.currentSegmentSize = size;
|
|
74
|
+
this.currentFd = fs.openSync(last, 'a');
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
enqueue(record: LogRecord): void {
|
|
79
|
+
this.writeBuffer.push(JSON.stringify(record) + '\n');
|
|
80
|
+
if (this.writeBuffer.length >= FLUSH_BATCH_SIZE) this.flush();
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
private startFlushLoop(): void {
|
|
84
|
+
this.flushTimer = setInterval(() => this.flush(), FLUSH_INTERVAL_MS);
|
|
85
|
+
if ((this.flushTimer as any).unref) (this.flushTimer as any).unref();
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
flush(): void {
|
|
89
|
+
if (this.isFlushing || this.writeBuffer.length === 0) return;
|
|
90
|
+
this.isFlushing = true;
|
|
91
|
+
const batch = this.writeBuffer.splice(0);
|
|
92
|
+
const payload = batch.join('');
|
|
93
|
+
const byteLength = Buffer.byteLength(payload, 'utf-8');
|
|
94
|
+
if (this.currentSegmentSize + byteLength >= MAX_SEGMENT_BYTES)
|
|
95
|
+
this.openSegment(this.currentSegmentBase + 1);
|
|
96
|
+
fs.writeSync(this.currentFd, payload, null, 'utf-8');
|
|
97
|
+
this.currentSegmentSize += byteLength;
|
|
98
|
+
this.totalFlushed += batch.length;
|
|
99
|
+
this.isFlushing = false;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
*readByOffsets(offsets: Set<number>): Generator<LogRecord> {
|
|
103
|
+
if (offsets.size === 0) return;
|
|
104
|
+
const remaining = new Set(offsets);
|
|
105
|
+
for (const file of this.listPaths()) {
|
|
106
|
+
if (remaining.size === 0) break;
|
|
107
|
+
const content = fs.readFileSync(file, 'utf-8');
|
|
108
|
+
for (const line of content.split('\n')) {
|
|
109
|
+
if (!line.trim()) continue;
|
|
110
|
+
try {
|
|
111
|
+
const rec: LogRecord = JSON.parse(line);
|
|
112
|
+
if (remaining.has(rec.offset)) {
|
|
113
|
+
yield rec;
|
|
114
|
+
remaining.delete(rec.offset);
|
|
115
|
+
}
|
|
116
|
+
} catch {
|
|
117
|
+
/* skip */
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
*replayAll(): Generator<LogRecord> {
|
|
124
|
+
for (const file of this.listPaths()) {
|
|
125
|
+
const content = fs.readFileSync(file, 'utf-8');
|
|
126
|
+
for (const line of content.split('\n')) {
|
|
127
|
+
if (!line.trim()) continue;
|
|
128
|
+
try {
|
|
129
|
+
yield JSON.parse(line) as LogRecord;
|
|
130
|
+
} catch {
|
|
131
|
+
/* skip */
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
buildSegmentMetas(): SegmentMeta[] {
|
|
138
|
+
const files = this.listPaths();
|
|
139
|
+
return files.map((file) => {
|
|
140
|
+
const base = parseInt(path.basename(file, '.log'), 10);
|
|
141
|
+
const stat = fs.statSync(file);
|
|
142
|
+
let first: LogRecord | null = null;
|
|
143
|
+
let last: LogRecord | null = null;
|
|
144
|
+
let count = 0;
|
|
145
|
+
|
|
146
|
+
const content = fs.readFileSync(file, 'utf-8');
|
|
147
|
+
for (const line of content.split('\n')) {
|
|
148
|
+
if (!line.trim()) continue;
|
|
149
|
+
try {
|
|
150
|
+
const rec: LogRecord = JSON.parse(line);
|
|
151
|
+
if (!first) first = rec;
|
|
152
|
+
last = rec;
|
|
153
|
+
count++;
|
|
154
|
+
} catch {
|
|
155
|
+
/* skip */
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return {
|
|
160
|
+
filePath: file,
|
|
161
|
+
baseOffset: base,
|
|
162
|
+
lastOffset: last?.offset ?? -1,
|
|
163
|
+
sizeBytes: stat.size,
|
|
164
|
+
oldestTimestamp: first?.timestamp ?? 0,
|
|
165
|
+
newestTimestamp: last?.timestamp ?? 0,
|
|
166
|
+
recordCount: count,
|
|
167
|
+
isActive: base === this.currentSegmentBase,
|
|
168
|
+
};
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
deleteSegment(baseOffset: number): number {
|
|
173
|
+
const file = path.join(
|
|
174
|
+
this.segDir,
|
|
175
|
+
String(baseOffset).padStart(10, '0') + '.log',
|
|
176
|
+
);
|
|
177
|
+
if (baseOffset === this.currentSegmentBase) {
|
|
178
|
+
console.warn(`[bus] skipping active segment ${baseOffset}`);
|
|
179
|
+
return 0;
|
|
180
|
+
}
|
|
181
|
+
try {
|
|
182
|
+
const size = fs.statSync(file).size;
|
|
183
|
+
fs.unlinkSync(file);
|
|
184
|
+
return size;
|
|
185
|
+
} catch (e) {
|
|
186
|
+
console.error(`[bus] failed to delete segment ${file}:`, e);
|
|
187
|
+
return 0;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
get totalDiskBytes(): number {
|
|
192
|
+
return this.listPaths().reduce((s, f) => {
|
|
193
|
+
try {
|
|
194
|
+
return s + fs.statSync(f).size;
|
|
195
|
+
} catch {
|
|
196
|
+
return s;
|
|
197
|
+
}
|
|
198
|
+
}, 0);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
get segmentCount(): number {
|
|
202
|
+
return this.listPaths().length;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
close(): void {
|
|
206
|
+
this.flush();
|
|
207
|
+
if (this.flushTimer) clearInterval(this.flushTimer);
|
|
208
|
+
if (this.currentFd !== -1) fs.closeSync(this.currentFd);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
interface IndexEntry {
|
|
2
|
+
offset: number;
|
|
3
|
+
timestamp: number;
|
|
4
|
+
key: string;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export default class SecondaryIndex {
|
|
8
|
+
private keyIndex = new Map<string, number[]>();
|
|
9
|
+
private timeIndex: Array<[number, number]> = [];
|
|
10
|
+
private offsetMap = new Map<number, IndexEntry>();
|
|
11
|
+
|
|
12
|
+
insert(entry: IndexEntry): void {
|
|
13
|
+
if (!this.keyIndex.has(entry.key)) this.keyIndex.set(entry.key, []);
|
|
14
|
+
this.keyIndex.get(entry.key)!.push(entry.offset);
|
|
15
|
+
this.timeIndex.push([entry.timestamp, entry.offset]);
|
|
16
|
+
this.offsetMap.set(entry.offset, entry);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
purgeBelow(minOffset: number): number {
|
|
20
|
+
let dropped = 0;
|
|
21
|
+
for (const [offset] of this.offsetMap) {
|
|
22
|
+
if (offset < minOffset) {
|
|
23
|
+
this.offsetMap.delete(offset);
|
|
24
|
+
dropped++;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
for (const [key, offsets] of this.keyIndex) {
|
|
28
|
+
const filtered = offsets.filter((o) => o >= minOffset);
|
|
29
|
+
if (filtered.length === 0) this.keyIndex.delete(key);
|
|
30
|
+
else this.keyIndex.set(key, filtered);
|
|
31
|
+
}
|
|
32
|
+
this.timeIndex = this.timeIndex.filter(([, o]) => o >= minOffset);
|
|
33
|
+
return dropped;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
query(opts: {
|
|
37
|
+
key?: string;
|
|
38
|
+
fromTime?: number;
|
|
39
|
+
toTime?: number;
|
|
40
|
+
limit: number;
|
|
41
|
+
skip: number;
|
|
42
|
+
order: 'asc' | 'desc';
|
|
43
|
+
}): number[] {
|
|
44
|
+
let candidates: number[] =
|
|
45
|
+
opts.key !== undefined
|
|
46
|
+
? (this.keyIndex.get(opts.key) ?? [])
|
|
47
|
+
: this.timeIndex.map(([, o]) => o);
|
|
48
|
+
|
|
49
|
+
if (opts.fromTime !== undefined || opts.toTime !== undefined) {
|
|
50
|
+
candidates = candidates.filter((offset) => {
|
|
51
|
+
const e = this.offsetMap.get(offset);
|
|
52
|
+
if (!e) return false;
|
|
53
|
+
if (opts.fromTime !== undefined && e.timestamp < opts.fromTime)
|
|
54
|
+
return false;
|
|
55
|
+
if (opts.toTime !== undefined && e.timestamp > opts.toTime)
|
|
56
|
+
return false;
|
|
57
|
+
return true;
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (opts.order === 'desc') candidates = [...candidates].reverse();
|
|
62
|
+
return candidates.slice(opts.skip, opts.skip + opts.limit);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
get totalKeys(): number {
|
|
66
|
+
return this.keyIndex.size;
|
|
67
|
+
}
|
|
68
|
+
get totalEntries(): number {
|
|
69
|
+
return this.offsetMap.size;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export class UtilsHelper {}
|
package/tsconfig.json
ADDED