tina4-nodejs 3.0.0-rc.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/BENCHMARK_REPORT.md +96 -0
- package/CARBONAH.md +140 -0
- package/CLAUDE.md +599 -0
- package/COMPARISON.md +194 -0
- package/README.md +595 -0
- package/package.json +59 -0
- package/packages/cli/src/bin.ts +110 -0
- package/packages/cli/src/commands/init.ts +194 -0
- package/packages/cli/src/commands/migrate.ts +96 -0
- package/packages/cli/src/commands/migrateCreate.ts +59 -0
- package/packages/cli/src/commands/routes.ts +61 -0
- package/packages/cli/src/commands/serve.ts +58 -0
- package/packages/cli/src/commands/test.ts +83 -0
- package/packages/core/gallery/auth/meta.json +1 -0
- package/packages/core/gallery/auth/src/routes/api/gallery/auth/login/post.ts +22 -0
- package/packages/core/gallery/auth/src/routes/api/gallery/auth/verify/get.ts +16 -0
- package/packages/core/gallery/auth/src/routes/gallery/auth/get.ts +97 -0
- package/packages/core/gallery/database/meta.json +1 -0
- package/packages/core/gallery/database/src/routes/api/gallery/db/notes/get.ts +13 -0
- package/packages/core/gallery/database/src/routes/api/gallery/db/notes/post.ts +17 -0
- package/packages/core/gallery/database/src/routes/api/gallery/db/tables/get.ts +23 -0
- package/packages/core/gallery/error-overlay/meta.json +1 -0
- package/packages/core/gallery/error-overlay/src/routes/api/gallery/crash/get.ts +17 -0
- package/packages/core/gallery/orm/meta.json +1 -0
- package/packages/core/gallery/orm/src/routes/api/gallery/products/get.ts +12 -0
- package/packages/core/gallery/orm/src/routes/api/gallery/products/post.ts +7 -0
- package/packages/core/gallery/queue/meta.json +1 -0
- package/packages/core/gallery/queue/src/routes/api/gallery/queue/produce/post.ts +16 -0
- package/packages/core/gallery/queue/src/routes/api/gallery/queue/status/get.ts +10 -0
- package/packages/core/gallery/rest-api/meta.json +1 -0
- package/packages/core/gallery/rest-api/src/routes/api/gallery/hello/get.ts +6 -0
- package/packages/core/gallery/rest-api/src/routes/api/gallery/hello/post.ts +7 -0
- package/packages/core/gallery/templates/meta.json +1 -0
- package/packages/core/gallery/templates/src/routes/gallery/page/get.ts +15 -0
- package/packages/core/gallery/templates/src/templates/gallery_page.twig +257 -0
- package/packages/core/public/css/tina4.css +2463 -0
- package/packages/core/public/css/tina4.min.css +1 -0
- package/packages/core/public/favicon.ico +0 -0
- package/packages/core/public/images/logo.svg +5 -0
- package/packages/core/public/images/tina4-logo-icon.webp +0 -0
- package/packages/core/public/js/frond.min.js +420 -0
- package/packages/core/public/js/tina4-dev-admin.min.js +327 -0
- package/packages/core/public/js/tina4.min.js +93 -0
- package/packages/core/public/swagger/index.html +90 -0
- package/packages/core/public/swagger/oauth2-redirect.html +63 -0
- package/packages/core/src/ai.ts +359 -0
- package/packages/core/src/api.ts +248 -0
- package/packages/core/src/auth.ts +287 -0
- package/packages/core/src/cache.ts +121 -0
- package/packages/core/src/constants.ts +48 -0
- package/packages/core/src/container.ts +90 -0
- package/packages/core/src/devAdmin.ts +2024 -0
- package/packages/core/src/devMailbox.ts +316 -0
- package/packages/core/src/dotenv.ts +172 -0
- package/packages/core/src/errorOverlay.test.ts +122 -0
- package/packages/core/src/errorOverlay.ts +278 -0
- package/packages/core/src/events.ts +112 -0
- package/packages/core/src/fakeData.ts +309 -0
- package/packages/core/src/graphql.ts +812 -0
- package/packages/core/src/health.ts +31 -0
- package/packages/core/src/htmlElement.ts +172 -0
- package/packages/core/src/i18n.ts +136 -0
- package/packages/core/src/index.ts +88 -0
- package/packages/core/src/logger.ts +226 -0
- package/packages/core/src/messenger.ts +822 -0
- package/packages/core/src/middleware.ts +138 -0
- package/packages/core/src/queue.ts +481 -0
- package/packages/core/src/queueBackends/kafkaBackend.ts +348 -0
- package/packages/core/src/queueBackends/rabbitmqBackend.ts +479 -0
- package/packages/core/src/rateLimiter.ts +107 -0
- package/packages/core/src/request.ts +189 -0
- package/packages/core/src/response.ts +146 -0
- package/packages/core/src/routeDiscovery.ts +87 -0
- package/packages/core/src/router.ts +398 -0
- package/packages/core/src/scss.ts +366 -0
- package/packages/core/src/server.ts +610 -0
- package/packages/core/src/service.ts +380 -0
- package/packages/core/src/session.ts +480 -0
- package/packages/core/src/sessionHandlers/mongoHandler.ts +286 -0
- package/packages/core/src/sessionHandlers/valkeyHandler.ts +184 -0
- package/packages/core/src/static.ts +58 -0
- package/packages/core/src/testing.ts +233 -0
- package/packages/core/src/types.ts +98 -0
- package/packages/core/src/watcher.ts +37 -0
- package/packages/core/src/websocket.ts +408 -0
- package/packages/core/src/wsdl.ts +546 -0
- package/packages/core/templates/errors/302.twig +14 -0
- package/packages/core/templates/errors/401.twig +9 -0
- package/packages/core/templates/errors/403.twig +29 -0
- package/packages/core/templates/errors/404.twig +29 -0
- package/packages/core/templates/errors/500.twig +38 -0
- package/packages/core/templates/errors/502.twig +9 -0
- package/packages/core/templates/errors/503.twig +12 -0
- package/packages/core/templates/errors/base.twig +37 -0
- package/packages/frond/src/engine.ts +1475 -0
- package/packages/frond/src/index.ts +2 -0
- package/packages/orm/src/adapters/firebird.ts +455 -0
- package/packages/orm/src/adapters/mssql.ts +440 -0
- package/packages/orm/src/adapters/mysql.ts +355 -0
- package/packages/orm/src/adapters/postgres.ts +362 -0
- package/packages/orm/src/adapters/sqlite.ts +270 -0
- package/packages/orm/src/autoCrud.ts +231 -0
- package/packages/orm/src/baseModel.ts +536 -0
- package/packages/orm/src/database.ts +321 -0
- package/packages/orm/src/fakeData.ts +118 -0
- package/packages/orm/src/index.ts +49 -0
- package/packages/orm/src/migration.ts +392 -0
- package/packages/orm/src/model.ts +56 -0
- package/packages/orm/src/query.ts +113 -0
- package/packages/orm/src/seeder.ts +120 -0
- package/packages/orm/src/sqlTranslation.ts +272 -0
- package/packages/orm/src/types.ts +110 -0
- package/packages/orm/src/validation.ts +93 -0
- package/packages/swagger/src/generator.ts +189 -0
- package/packages/swagger/src/index.ts +2 -0
- package/packages/swagger/src/ui.ts +48 -0
- package/skills/tina4-developer.skill +0 -0
- package/skills/tina4-js.skill +0 -0
- package/skills/tina4-maintainer.skill +0 -0
|
@@ -0,0 +1,348 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tina4 Kafka Queue Backend — Kafka protocol via raw TCP, zero dependencies.
|
|
3
|
+
*
|
|
4
|
+
* Implements the same interface as the file-based queue but uses Apache Kafka
|
|
5
|
+
* for message storage and delivery.
|
|
6
|
+
*
|
|
7
|
+
* Configure via environment variables:
|
|
8
|
+
* TINA4_KAFKA_BROKERS (default: "localhost:9092")
|
|
9
|
+
* TINA4_KAFKA_GROUP_ID (default: "tina4_consumer_group")
|
|
10
|
+
*/
|
|
11
|
+
import net from "node:net";
|
|
12
|
+
import { randomUUID } from "node:crypto";
|
|
13
|
+
import type { QueueJob } from "../queue.js";
|
|
14
|
+
|
|
15
|
+
// ── Types ────────────────────────────────────────────────────
|
|
16
|
+
|
|
17
|
+
export interface KafkaConfig {
|
|
18
|
+
brokers?: string;
|
|
19
|
+
groupId?: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface QueueBackend {
|
|
23
|
+
push(queue: string, payload: unknown, delay?: number): string;
|
|
24
|
+
pop(queue: string): QueueJob | null;
|
|
25
|
+
size(queue: string): number;
|
|
26
|
+
clear(queue: string): void;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// ── Kafka Protocol Constants ─────────────────────────────────
|
|
30
|
+
|
|
31
|
+
const API_PRODUCE = 0;
|
|
32
|
+
const API_FETCH = 1;
|
|
33
|
+
const API_LIST_OFFSETS = 2;
|
|
34
|
+
const API_METADATA = 3;
|
|
35
|
+
const API_OFFSET_COMMIT = 8;
|
|
36
|
+
const API_OFFSET_FETCH = 9;
|
|
37
|
+
const API_FIND_COORDINATOR = 10;
|
|
38
|
+
const API_JOIN_GROUP = 11;
|
|
39
|
+
const API_HEARTBEAT = 12;
|
|
40
|
+
const API_LEAVE_GROUP = 13;
|
|
41
|
+
const API_SYNC_GROUP = 14;
|
|
42
|
+
|
|
43
|
+
// ── Kafka Backend ────────────────────────────────────────────
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Kafka queue backend using raw Kafka protocol over TCP.
|
|
47
|
+
*
|
|
48
|
+
* Uses synchronous-style communication by spawning a child process
|
|
49
|
+
* for each operation, similar to the Redis session handler pattern.
|
|
50
|
+
*/
|
|
51
|
+
export class KafkaBackend implements QueueBackend {
|
|
52
|
+
private brokers: string;
|
|
53
|
+
private groupId: string;
|
|
54
|
+
|
|
55
|
+
constructor(config?: KafkaConfig) {
|
|
56
|
+
this.brokers = config?.brokers ?? process.env.TINA4_KAFKA_BROKERS ?? "localhost:9092";
|
|
57
|
+
this.groupId = config?.groupId ?? process.env.TINA4_KAFKA_GROUP_ID ?? "tina4_consumer_group";
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Parse broker string into host:port.
|
|
62
|
+
*/
|
|
63
|
+
private parseBroker(): { host: string; port: number } {
|
|
64
|
+
const parts = this.brokers.split(",")[0].trim().split(":");
|
|
65
|
+
return {
|
|
66
|
+
host: parts[0] ?? "localhost",
|
|
67
|
+
port: parts[1] ? parseInt(parts[1], 10) : 9092,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Execute a Kafka operation synchronously via a child process.
|
|
73
|
+
*/
|
|
74
|
+
private execSync(operation: string, topic: string, data?: string): string {
|
|
75
|
+
const { execFileSync } = require("node:child_process");
|
|
76
|
+
const broker = this.parseBroker();
|
|
77
|
+
|
|
78
|
+
const script = `
|
|
79
|
+
const net = require("node:net");
|
|
80
|
+
const host = ${JSON.stringify(broker.host)};
|
|
81
|
+
const port = ${broker.port};
|
|
82
|
+
const operation = ${JSON.stringify(operation)};
|
|
83
|
+
const topic = ${JSON.stringify(topic)};
|
|
84
|
+
const groupId = ${JSON.stringify(this.groupId)};
|
|
85
|
+
const data = ${JSON.stringify(data ?? "")};
|
|
86
|
+
let correlationId = 0;
|
|
87
|
+
|
|
88
|
+
// Kafka wire protocol helpers
|
|
89
|
+
function writeInt32(buf, offset, val) {
|
|
90
|
+
buf.writeInt32BE(val, offset);
|
|
91
|
+
return offset + 4;
|
|
92
|
+
}
|
|
93
|
+
function writeInt16(buf, offset, val) {
|
|
94
|
+
buf.writeInt16BE(val, offset);
|
|
95
|
+
return offset + 2;
|
|
96
|
+
}
|
|
97
|
+
function writeString(buf, offset, str) {
|
|
98
|
+
if (str === null) {
|
|
99
|
+
buf.writeInt16BE(-1, offset);
|
|
100
|
+
return offset + 2;
|
|
101
|
+
}
|
|
102
|
+
const len = Buffer.byteLength(str, "utf-8");
|
|
103
|
+
buf.writeInt16BE(len, offset);
|
|
104
|
+
buf.write(str, offset + 2, len, "utf-8");
|
|
105
|
+
return offset + 2 + len;
|
|
106
|
+
}
|
|
107
|
+
function writeBytes(buf, offset, bytes) {
|
|
108
|
+
if (bytes === null) {
|
|
109
|
+
buf.writeInt32BE(-1, offset);
|
|
110
|
+
return offset + 4;
|
|
111
|
+
}
|
|
112
|
+
buf.writeInt32BE(bytes.length, offset);
|
|
113
|
+
bytes.copy(buf, offset + 4);
|
|
114
|
+
return offset + 4 + bytes.length;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function buildProduceRequest(topicName, messageBytes) {
|
|
118
|
+
correlationId++;
|
|
119
|
+
const clientId = "tina4";
|
|
120
|
+
const topicBuf = Buffer.from(topicName, "utf-8");
|
|
121
|
+
const clientBuf = Buffer.from(clientId, "utf-8");
|
|
122
|
+
|
|
123
|
+
// Build message set (MessageV0)
|
|
124
|
+
const msgSize = 4 + 1 + 1 + 4 + 4 + messageBytes.length; // crc + magic + attrs + key(-1) + value
|
|
125
|
+
const msgBuf = Buffer.alloc(12 + msgSize); // offset(8) + size(4) + message
|
|
126
|
+
let o = 0;
|
|
127
|
+
// Offset (8 bytes, 0 for produce)
|
|
128
|
+
msgBuf.writeBigInt64BE(0n, o); o += 8;
|
|
129
|
+
// Message size
|
|
130
|
+
msgBuf.writeInt32BE(msgSize, o); o += 4;
|
|
131
|
+
// CRC placeholder (will be 0 — Kafka accepts for some versions)
|
|
132
|
+
msgBuf.writeInt32BE(0, o); o += 4;
|
|
133
|
+
// Magic byte
|
|
134
|
+
msgBuf.writeInt8(0, o); o += 1;
|
|
135
|
+
// Attributes
|
|
136
|
+
msgBuf.writeInt8(0, o); o += 1;
|
|
137
|
+
// Key (null = -1)
|
|
138
|
+
msgBuf.writeInt32BE(-1, o); o += 4;
|
|
139
|
+
// Value
|
|
140
|
+
msgBuf.writeInt32BE(messageBytes.length, o); o += 4;
|
|
141
|
+
messageBytes.copy(msgBuf, o); o += messageBytes.length;
|
|
142
|
+
|
|
143
|
+
// Build request
|
|
144
|
+
const reqSize = 2 + 2 + 4 + 2 + clientBuf.length + 2 + 4 + 4 + 2 + topicBuf.length + 4 + 4 + 4 + msgBuf.length;
|
|
145
|
+
const req = Buffer.alloc(4 + reqSize);
|
|
146
|
+
let pos = 0;
|
|
147
|
+
req.writeInt32BE(reqSize, pos); pos += 4;
|
|
148
|
+
// API key (Produce = 0)
|
|
149
|
+
req.writeInt16BE(API_PRODUCE, pos); pos += 2;
|
|
150
|
+
// API version
|
|
151
|
+
req.writeInt16BE(0, pos); pos += 2;
|
|
152
|
+
// Correlation ID
|
|
153
|
+
req.writeInt32BE(correlationId, pos); pos += 4;
|
|
154
|
+
// Client ID
|
|
155
|
+
req.writeInt16BE(clientBuf.length, pos); pos += 2;
|
|
156
|
+
clientBuf.copy(req, pos); pos += clientBuf.length;
|
|
157
|
+
// Required acks
|
|
158
|
+
req.writeInt16BE(1, pos); pos += 2;
|
|
159
|
+
// Timeout
|
|
160
|
+
req.writeInt32BE(5000, pos); pos += 4;
|
|
161
|
+
// Topic count
|
|
162
|
+
req.writeInt32BE(1, pos); pos += 4;
|
|
163
|
+
// Topic name
|
|
164
|
+
req.writeInt16BE(topicBuf.length, pos); pos += 2;
|
|
165
|
+
topicBuf.copy(req, pos); pos += topicBuf.length;
|
|
166
|
+
// Partition count
|
|
167
|
+
req.writeInt32BE(1, pos); pos += 4;
|
|
168
|
+
// Partition index
|
|
169
|
+
req.writeInt32BE(0, pos); pos += 4;
|
|
170
|
+
// Message set size
|
|
171
|
+
req.writeInt32BE(msgBuf.length, pos); pos += 4;
|
|
172
|
+
msgBuf.copy(req, pos);
|
|
173
|
+
|
|
174
|
+
return req;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
function buildFetchRequest(topicName, fetchOffset) {
|
|
178
|
+
correlationId++;
|
|
179
|
+
const clientId = "tina4";
|
|
180
|
+
const topicBuf = Buffer.from(topicName, "utf-8");
|
|
181
|
+
const clientBuf = Buffer.from(clientId, "utf-8");
|
|
182
|
+
|
|
183
|
+
const reqSize = 2 + 2 + 4 + 2 + clientBuf.length + 4 + 4 + 4 + 4 + 2 + topicBuf.length + 4 + 4 + 8 + 4;
|
|
184
|
+
const req = Buffer.alloc(4 + reqSize);
|
|
185
|
+
let pos = 0;
|
|
186
|
+
req.writeInt32BE(reqSize, pos); pos += 4;
|
|
187
|
+
req.writeInt16BE(API_FETCH, pos); pos += 2;
|
|
188
|
+
req.writeInt16BE(0, pos); pos += 2;
|
|
189
|
+
req.writeInt32BE(correlationId, pos); pos += 4;
|
|
190
|
+
req.writeInt16BE(clientBuf.length, pos); pos += 2;
|
|
191
|
+
clientBuf.copy(req, pos); pos += clientBuf.length;
|
|
192
|
+
// Replica ID (-1 for consumer)
|
|
193
|
+
req.writeInt32BE(-1, pos); pos += 4;
|
|
194
|
+
// Max wait time
|
|
195
|
+
req.writeInt32BE(1000, pos); pos += 4;
|
|
196
|
+
// Min bytes
|
|
197
|
+
req.writeInt32BE(1, pos); pos += 4;
|
|
198
|
+
// Topic count
|
|
199
|
+
req.writeInt32BE(1, pos); pos += 4;
|
|
200
|
+
// Topic name
|
|
201
|
+
req.writeInt16BE(topicBuf.length, pos); pos += 2;
|
|
202
|
+
topicBuf.copy(req, pos); pos += topicBuf.length;
|
|
203
|
+
// Partition count
|
|
204
|
+
req.writeInt32BE(1, pos); pos += 4;
|
|
205
|
+
// Partition
|
|
206
|
+
req.writeInt32BE(0, pos); pos += 4;
|
|
207
|
+
// Fetch offset
|
|
208
|
+
req.writeBigInt64BE(BigInt(fetchOffset), pos); pos += 8;
|
|
209
|
+
// Max bytes
|
|
210
|
+
req.writeInt32BE(1048576, pos); pos += 4;
|
|
211
|
+
|
|
212
|
+
return req;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const sock = net.createConnection({ host, port }, () => {
|
|
216
|
+
if (operation === "publish") {
|
|
217
|
+
const msgBytes = Buffer.from(data, "utf-8");
|
|
218
|
+
const req = buildProduceRequest(topic, msgBytes);
|
|
219
|
+
sock.write(req);
|
|
220
|
+
} else if (operation === "get") {
|
|
221
|
+
const req = buildFetchRequest(topic, 0);
|
|
222
|
+
sock.write(req);
|
|
223
|
+
} else {
|
|
224
|
+
process.stdout.write("__UNSUPPORTED__");
|
|
225
|
+
sock.destroy();
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
let buffer = Buffer.alloc(0);
|
|
230
|
+
sock.on("data", (chunk) => {
|
|
231
|
+
buffer = Buffer.concat([buffer, chunk]);
|
|
232
|
+
|
|
233
|
+
if (buffer.length >= 4) {
|
|
234
|
+
const respSize = buffer.readInt32BE(0);
|
|
235
|
+
if (buffer.length >= 4 + respSize) {
|
|
236
|
+
if (operation === "publish") {
|
|
237
|
+
process.stdout.write("__PUBLISHED__");
|
|
238
|
+
} else if (operation === "get") {
|
|
239
|
+
// Parse fetch response to extract message value
|
|
240
|
+
try {
|
|
241
|
+
// Skip response header and topic metadata to find message
|
|
242
|
+
let pos = 4 + 4; // size + correlation_id
|
|
243
|
+
const topicCount = buffer.readInt32BE(pos); pos += 4;
|
|
244
|
+
if (topicCount > 0) {
|
|
245
|
+
const topicLen = buffer.readInt16BE(pos); pos += 2 + topicLen;
|
|
246
|
+
const partCount = buffer.readInt32BE(pos); pos += 4;
|
|
247
|
+
if (partCount > 0) {
|
|
248
|
+
const partId = buffer.readInt32BE(pos); pos += 4;
|
|
249
|
+
const errCode = buffer.readInt16BE(pos); pos += 2;
|
|
250
|
+
const hwm = buffer.readBigInt64BE(pos); pos += 8;
|
|
251
|
+
const msgSetSize = buffer.readInt32BE(pos); pos += 4;
|
|
252
|
+
|
|
253
|
+
if (msgSetSize > 0 && errCode === 0) {
|
|
254
|
+
// Parse first message in message set
|
|
255
|
+
const msgOffset = buffer.readBigInt64BE(pos); pos += 8;
|
|
256
|
+
const msgSize = buffer.readInt32BE(pos); pos += 4;
|
|
257
|
+
const crc = buffer.readInt32BE(pos); pos += 4;
|
|
258
|
+
const magic = buffer.readInt8(pos); pos += 1;
|
|
259
|
+
const attrs = buffer.readInt8(pos); pos += 1;
|
|
260
|
+
const keyLen = buffer.readInt32BE(pos); pos += 4;
|
|
261
|
+
if (keyLen > 0) pos += keyLen;
|
|
262
|
+
const valLen = buffer.readInt32BE(pos); pos += 4;
|
|
263
|
+
if (valLen > 0) {
|
|
264
|
+
const val = buffer.subarray(pos, pos + valLen).toString("utf-8");
|
|
265
|
+
process.stdout.write(val);
|
|
266
|
+
} else {
|
|
267
|
+
process.stdout.write("__EMPTY__");
|
|
268
|
+
}
|
|
269
|
+
} else {
|
|
270
|
+
process.stdout.write("__EMPTY__");
|
|
271
|
+
}
|
|
272
|
+
} else {
|
|
273
|
+
process.stdout.write("__EMPTY__");
|
|
274
|
+
}
|
|
275
|
+
} else {
|
|
276
|
+
process.stdout.write("__EMPTY__");
|
|
277
|
+
}
|
|
278
|
+
} catch (e) {
|
|
279
|
+
process.stdout.write("__EMPTY__");
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
sock.destroy();
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
sock.on("error", (err) => {
|
|
288
|
+
process.stderr.write(err.message);
|
|
289
|
+
process.exit(1);
|
|
290
|
+
});
|
|
291
|
+
|
|
292
|
+
setTimeout(() => { sock.destroy(); process.exit(1); }, 10000);
|
|
293
|
+
`;
|
|
294
|
+
|
|
295
|
+
try {
|
|
296
|
+
const result = execFileSync(process.execPath, ["-e", script], {
|
|
297
|
+
encoding: "utf-8",
|
|
298
|
+
timeout: 15000,
|
|
299
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
300
|
+
});
|
|
301
|
+
return result;
|
|
302
|
+
} catch {
|
|
303
|
+
return "";
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
push(queue: string, payload: unknown, _delay?: number): string {
|
|
308
|
+
const id = randomUUID();
|
|
309
|
+
const now = new Date().toISOString();
|
|
310
|
+
|
|
311
|
+
const job: QueueJob = {
|
|
312
|
+
id,
|
|
313
|
+
payload,
|
|
314
|
+
status: "pending",
|
|
315
|
+
createdAt: now,
|
|
316
|
+
attempts: 0,
|
|
317
|
+
delayUntil: null,
|
|
318
|
+
};
|
|
319
|
+
|
|
320
|
+
const result = this.execSync("publish", queue, JSON.stringify(job));
|
|
321
|
+
if (!result.includes("__PUBLISHED__")) {
|
|
322
|
+
throw new Error("Kafka publish failed");
|
|
323
|
+
}
|
|
324
|
+
return id;
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
pop(queue: string): QueueJob | null {
|
|
328
|
+
const result = this.execSync("get", queue);
|
|
329
|
+
if (!result || result === "__EMPTY__" || result === "__UNSUPPORTED__") return null;
|
|
330
|
+
|
|
331
|
+
try {
|
|
332
|
+
return JSON.parse(result) as QueueJob;
|
|
333
|
+
} catch {
|
|
334
|
+
return null;
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
size(_queue: string): number {
|
|
339
|
+
// Kafka doesn't have a simple "queue size" concept — return 0
|
|
340
|
+
// Real implementation would need to compare committed offset vs log end offset
|
|
341
|
+
return 0;
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
clear(_queue: string): void {
|
|
345
|
+
// Kafka topics are cleared via retention policies, not purging
|
|
346
|
+
// This is a no-op for Kafka
|
|
347
|
+
}
|
|
348
|
+
}
|