@mereb/shared-packages 0.0.32 → 0.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,6 +1,25 @@
|
|
|
1
|
-
import { Kafka, type KafkaConfig, type Producer
|
|
1
|
+
import { type Consumer, Kafka, type KafkaConfig, type Producer } from 'kafkajs';
|
|
2
2
|
export declare function getKafka(config: KafkaConfig): Kafka;
|
|
3
3
|
export declare function getProducer(config: KafkaConfig): Promise<Producer>;
|
|
4
4
|
export declare function createConsumer(config: KafkaConfig, groupId: string): Promise<Consumer>;
|
|
5
|
+
export type KafkaEnvConfigOptions = {
|
|
6
|
+
/**
|
|
7
|
+
* Override clientId; defaults to process.env.KAFKA_CLIENT_ID or 'app'
|
|
8
|
+
*/
|
|
9
|
+
clientId?: string;
|
|
10
|
+
/**
|
|
11
|
+
* Environment variable to read brokers from; defaults to KAFKA_BROKERS
|
|
12
|
+
*/
|
|
13
|
+
brokersEnvVar?: string;
|
|
14
|
+
/**
|
|
15
|
+
* Default value when env KAFKA_SSL is not set
|
|
16
|
+
*/
|
|
17
|
+
sslDefault?: boolean;
|
|
18
|
+
/**
|
|
19
|
+
* Default value when env KAFKA_SSL_INSECURE is not set
|
|
20
|
+
*/
|
|
21
|
+
sslInsecureDefault?: boolean;
|
|
22
|
+
};
|
|
23
|
+
export declare function buildKafkaConfigFromEnv(opts?: KafkaEnvConfigOptions): KafkaConfig | null;
|
|
5
24
|
export declare function disconnectProducer(): Promise<void>;
|
|
6
25
|
//# sourceMappingURL=kafka.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"kafka.d.ts","sourceRoot":"","sources":["../../src/messaging/kafka.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"kafka.d.ts","sourceRoot":"","sources":["../../src/messaging/kafka.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,KAAK,QAAQ,EAAE,KAAK,EAAE,KAAK,WAAW,EAAgB,KAAK,QAAQ,EAAC,MAAM,SAAS,CAAC;AAK5F,wBAAgB,QAAQ,CAAC,MAAM,EAAE,WAAW,SAG3C;AAED,wBAAsB,WAAW,CAAC,MAAM,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC,CASxE;AAED,wBAAsB,cAAc,CAAC,MAAM,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,CAAC,CAK5F;AAED,MAAM,MAAM,qBAAqB,GAAG;IAChC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAChC,CAAC;AAEF,wBAAgB,uBAAuB,CACnC,IAAI,GAAE,qBAA0B,GACjC,WAAW,GAAG,IAAI,CA8BpB;AAED,wBAAsB,kBAAkB,kBAKvC"}
|
package/dist/messaging/kafka.js
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
|
-
import { Kafka } from 'kafkajs';
|
|
1
|
+
import { Kafka, Partitioners } from 'kafkajs';
|
|
2
2
|
let kafkaInstance;
|
|
3
3
|
let producerInstance;
|
|
4
4
|
export function getKafka(config) {
|
|
5
|
-
|
|
6
|
-
kafkaInstance = new Kafka(config);
|
|
7
|
-
}
|
|
5
|
+
kafkaInstance ??= new Kafka(config);
|
|
8
6
|
return kafkaInstance;
|
|
9
7
|
}
|
|
10
8
|
export async function getProducer(config) {
|
|
11
9
|
if (!producerInstance) {
|
|
12
|
-
|
|
10
|
+
// Use legacy partitioner to avoid warning and retain old hashing behaviour
|
|
11
|
+
producerInstance = getKafka(config).producer({
|
|
12
|
+
createPartitioner: Partitioners.LegacyPartitioner
|
|
13
|
+
});
|
|
13
14
|
await producerInstance.connect();
|
|
14
15
|
}
|
|
15
16
|
return producerInstance;
|
|
@@ -20,6 +21,30 @@ export async function createConsumer(config, groupId) {
|
|
|
20
21
|
await consumer.connect();
|
|
21
22
|
return consumer;
|
|
22
23
|
}
|
|
24
|
+
export function buildKafkaConfigFromEnv(opts = {}) {
|
|
25
|
+
const brokersEnv = opts.brokersEnvVar ?? 'KAFKA_BROKERS';
|
|
26
|
+
const brokersRaw = process.env[brokersEnv] ?? '';
|
|
27
|
+
const brokers = brokersRaw
|
|
28
|
+
.split(',')
|
|
29
|
+
.map((b) => b.trim())
|
|
30
|
+
.filter(Boolean);
|
|
31
|
+
if (brokers.length === 0) {
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
const clientId = opts.clientId ?? process.env.KAFKA_CLIENT_ID ?? 'app';
|
|
35
|
+
const sslEnabled = (process.env.KAFKA_SSL ?? String(opts.sslDefault ?? false)) === 'true';
|
|
36
|
+
const sslInsecure = (process.env.KAFKA_SSL_INSECURE ?? String(opts.sslInsecureDefault ?? false)) ===
|
|
37
|
+
'true';
|
|
38
|
+
return {
|
|
39
|
+
clientId,
|
|
40
|
+
brokers,
|
|
41
|
+
ssl: sslEnabled
|
|
42
|
+
? {
|
|
43
|
+
rejectUnauthorized: !sslInsecure
|
|
44
|
+
}
|
|
45
|
+
: undefined
|
|
46
|
+
};
|
|
47
|
+
}
|
|
23
48
|
export async function disconnectProducer() {
|
|
24
49
|
if (producerInstance) {
|
|
25
50
|
await producerInstance.disconnect();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"splunk-transport.d.ts","sourceRoot":"","sources":["../../src/transports/splunk-transport.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAG5C,KAAK,sBAAsB,GAAG;IAC1B,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;CACtB,CAAC;
|
|
1
|
+
{"version":3,"file":"splunk-transport.d.ts","sourceRoot":"","sources":["../../src/transports/splunk-transport.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,yBAAyB,CAAC;AAG5C,KAAK,sBAAsB,GAAG;IAC1B,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;CACtB,CAAC;AA0CF,wBAA8B,eAAe,CAAC,IAAI,EAAE,sBAAsB,yDAsDzE"}
|
|
@@ -9,25 +9,31 @@ const warnOnce = () => {
|
|
|
9
9
|
}
|
|
10
10
|
};
|
|
11
11
|
};
|
|
12
|
-
const normalizeChunk = (chunk) => {
|
|
13
|
-
if (typeof chunk === 'string')
|
|
14
|
-
return chunk;
|
|
15
|
-
const candidate = chunk?.toString?.();
|
|
16
|
-
return typeof candidate === 'string' ? candidate : null;
|
|
17
|
-
};
|
|
18
12
|
const parsePayload = (chunk, onWarn) => {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
onWarn('Splunk transport dropped log: non-string chunk');
|
|
22
|
-
return null;
|
|
13
|
+
if (chunk != null && typeof chunk === 'object') {
|
|
14
|
+
return { success: true, payload: chunk }; // already structured JSON from pino
|
|
23
15
|
}
|
|
24
|
-
|
|
25
|
-
|
|
16
|
+
if (typeof chunk === 'string') {
|
|
17
|
+
try {
|
|
18
|
+
return { success: true, payload: JSON.parse(chunk) };
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
onWarn('Splunk transport dropped log: JSON parse failed');
|
|
22
|
+
return { success: false };
|
|
23
|
+
}
|
|
26
24
|
}
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
25
|
+
const candidate = chunk?.toString?.();
|
|
26
|
+
if (typeof candidate === 'string') {
|
|
27
|
+
try {
|
|
28
|
+
return { success: true, payload: JSON.parse(candidate) };
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
onWarn('Splunk transport dropped log: JSON parse failed');
|
|
32
|
+
return { success: false };
|
|
33
|
+
}
|
|
30
34
|
}
|
|
35
|
+
onWarn('Splunk transport dropped log: non-string chunk');
|
|
36
|
+
return { success: false };
|
|
31
37
|
};
|
|
32
38
|
export default async function splunkTransport(opts) {
|
|
33
39
|
const url = opts.url;
|
|
@@ -42,8 +48,8 @@ export default async function splunkTransport(opts) {
|
|
|
42
48
|
return build(async (stream) => {
|
|
43
49
|
const warn = warnOnce();
|
|
44
50
|
for await (const chunk of stream) {
|
|
45
|
-
const
|
|
46
|
-
if (
|
|
51
|
+
const parsed = parsePayload(chunk, warn);
|
|
52
|
+
if (!parsed.success)
|
|
47
53
|
continue;
|
|
48
54
|
const controller = AbortSignal.timeout(timeoutMs);
|
|
49
55
|
const requestId = randomUUID();
|
|
@@ -55,7 +61,7 @@ export default async function splunkTransport(opts) {
|
|
|
55
61
|
'X-Splunk-Request-Channel': requestId
|
|
56
62
|
},
|
|
57
63
|
body: JSON.stringify({
|
|
58
|
-
event: payload,
|
|
64
|
+
event: parsed.payload,
|
|
59
65
|
index,
|
|
60
66
|
source,
|
|
61
67
|
sourcetype
|