@thirdweb-dev/service-utils 0.8.4 → 0.8.5-nightly-ccf6329810c1426fadc4483660e154ae38a2927b-20250205043811
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/core/usageV2.js +2 -2
- package/dist/cjs/core/usageV2.js.map +1 -1
- package/dist/cjs/node/kafka.js +120 -0
- package/dist/cjs/node/kafka.js.map +1 -0
- package/dist/cjs/node/usageV2.js +23 -100
- package/dist/cjs/node/usageV2.js.map +1 -1
- package/dist/esm/core/usageV2.js +2 -2
- package/dist/esm/core/usageV2.js.map +1 -1
- package/dist/esm/node/kafka.js +116 -0
- package/dist/esm/node/kafka.js.map +1 -0
- package/dist/esm/node/usageV2.js +24 -101
- package/dist/esm/node/usageV2.js.map +1 -1
- package/dist/types/core/usageV2.d.ts +3 -2
- package/dist/types/core/usageV2.d.ts.map +1 -1
- package/dist/types/node/kafka.d.ts +55 -0
- package/dist/types/node/kafka.d.ts.map +1 -0
- package/dist/types/node/usageV2.d.ts +8 -22
- package/dist/types/node/usageV2.d.ts.map +1 -1
- package/package.json +3 -3
package/dist/cjs/core/usageV2.js
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.getTopicName = getTopicName;
|
4
|
-
function getTopicName(
|
5
|
-
return `usage_v2.raw_${
|
4
|
+
function getTopicName(source) {
|
5
|
+
return `usage_v2.raw_${source}`;
|
6
6
|
}
|
7
7
|
//# sourceMappingURL=usageV2.js.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/core/usageV2.ts"],"names":[],"mappings":";;
|
1
|
+
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/core/usageV2.ts"],"names":[],"mappings":";;AAyDA,oCAEC;AAFD,SAAgB,YAAY,CAAC,MAAqB;IAChD,OAAO,gBAAgB,MAAM,EAAE,CAAC;AAClC,CAAC"}
|
@@ -0,0 +1,120 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.KafkaProducer = void 0;
|
4
|
+
const node_tls_1 = require("node:tls");
|
5
|
+
const kafkajs_1 = require("kafkajs");
|
6
|
+
const lz4js_1 = require("lz4js");
|
7
|
+
// CompressionCodecs is not exported properly in kafkajs. Source: https://github.com/tulios/kafkajs/issues/1391
|
8
|
+
const kafkajs_2 = require("kafkajs");
|
9
|
+
const { CompressionCodecs } = kafkajs_2.default;
|
10
|
+
/**
|
11
|
+
* Creates a KafkaProducer which opens a persistent TCP connection.
|
12
|
+
* This class is thread-safe so your service should re-use one instance.
|
13
|
+
*
|
14
|
+
* Example:
|
15
|
+
* ```ts
|
16
|
+
* kafka = new KafkaProducer(...)
|
17
|
+
* await kafka.send(topic, events)
|
18
|
+
* // Non-blocking:
|
19
|
+
* // void kafka.send(topic, events).catch((e) => console.error(e))
|
20
|
+
* ```
|
21
|
+
*/
|
22
|
+
class KafkaProducer {
|
23
|
+
constructor(config) {
|
24
|
+
Object.defineProperty(this, "kafka", {
|
25
|
+
enumerable: true,
|
26
|
+
configurable: true,
|
27
|
+
writable: true,
|
28
|
+
value: void 0
|
29
|
+
});
|
30
|
+
Object.defineProperty(this, "producer", {
|
31
|
+
enumerable: true,
|
32
|
+
configurable: true,
|
33
|
+
writable: true,
|
34
|
+
value: null
|
35
|
+
});
|
36
|
+
Object.defineProperty(this, "compression", {
|
37
|
+
enumerable: true,
|
38
|
+
configurable: true,
|
39
|
+
writable: true,
|
40
|
+
value: void 0
|
41
|
+
});
|
42
|
+
const { producerName, environment, shouldCompress = true, username, password, } = config;
|
43
|
+
this.kafka = new kafkajs_1.Kafka({
|
44
|
+
clientId: `${producerName}-${environment}`,
|
45
|
+
brokers: environment === "production"
|
46
|
+
? ["warpstream.thirdweb.xyz:9092"]
|
47
|
+
: ["warpstream-dev.thirdweb.xyz:9092"],
|
48
|
+
ssl: {
|
49
|
+
checkServerIdentity(hostname, cert) {
|
50
|
+
return (0, node_tls_1.checkServerIdentity)(hostname.toLowerCase(), cert);
|
51
|
+
},
|
52
|
+
},
|
53
|
+
sasl: {
|
54
|
+
mechanism: "plain",
|
55
|
+
username,
|
56
|
+
password,
|
57
|
+
},
|
58
|
+
});
|
59
|
+
if (shouldCompress) {
|
60
|
+
this.compression = kafkajs_1.CompressionTypes.LZ4;
|
61
|
+
CompressionCodecs[kafkajs_1.CompressionTypes.LZ4] = () => ({
|
62
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
63
|
+
compress: (encoder) => {
|
64
|
+
const compressed = (0, lz4js_1.compress)(encoder.buffer);
|
65
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
66
|
+
return Buffer.from(compressed);
|
67
|
+
},
|
68
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
69
|
+
decompress: (buffer) => {
|
70
|
+
const decompressed = (0, lz4js_1.decompress)(buffer);
|
71
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
72
|
+
return Buffer.from(decompressed);
|
73
|
+
},
|
74
|
+
});
|
75
|
+
}
|
76
|
+
else {
|
77
|
+
this.compression = kafkajs_1.CompressionTypes.None;
|
78
|
+
}
|
79
|
+
}
|
80
|
+
/**
|
81
|
+
* Send messages to a Kafka topic.
|
82
|
+
* This method may throw. To call this non-blocking:
|
83
|
+
* @param topic
|
84
|
+
* @param messages
|
85
|
+
* @param configOverrides
|
86
|
+
*/
|
87
|
+
async send(topic, messages,
|
88
|
+
/**
|
89
|
+
* Reference: https://kafka.js.org/docs/producing#producing-messages
|
90
|
+
*/
|
91
|
+
options) {
|
92
|
+
if (!this.producer) {
|
93
|
+
this.producer = this.kafka.producer({
|
94
|
+
allowAutoTopicCreation: options?.allowAutoTopicCreation ?? false,
|
95
|
+
});
|
96
|
+
await this.producer.connect();
|
97
|
+
}
|
98
|
+
await this.producer.send({
|
99
|
+
topic,
|
100
|
+
messages: messages.map((m) => ({
|
101
|
+
value: JSON.stringify(m),
|
102
|
+
})),
|
103
|
+
compression: this.compression,
|
104
|
+
acks: options?.acks ?? -1, // Default: All brokers must acknowledge
|
105
|
+
timeout: options?.timeout ?? 10_000, // Default: 10 seconds
|
106
|
+
});
|
107
|
+
}
|
108
|
+
/**
|
109
|
+
* Disconnects KafkaProducer.
|
110
|
+
* Useful when shutting down the service to flush in-flight events.
|
111
|
+
*/
|
112
|
+
async disconnect() {
|
113
|
+
if (this.producer) {
|
114
|
+
await this.producer.disconnect();
|
115
|
+
this.producer = null;
|
116
|
+
}
|
117
|
+
}
|
118
|
+
}
|
119
|
+
exports.KafkaProducer = KafkaProducer;
|
120
|
+
//# sourceMappingURL=kafka.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"kafka.js","sourceRoot":"","sources":["../../../src/node/kafka.ts"],"names":[],"mappings":";;;AAAA,uCAA+C;AAC/C,qCAAiE;AACjE,iCAA6C;AAE7C,+GAA+G;AAC/G,qCAA8B;AAC9B,MAAM,EAAE,iBAAiB,EAAE,GAAG,iBAAO,CAAC;AAEtC;;;;;;;;;;;GAWG;AACH,MAAa,aAAa;IAKxB,YAAY,MAgBX;QApBO;;;;;WAAa;QACb;;;;mBAA4B,IAAI;WAAC;QACjC;;;;;WAA8B;QAmBpC,MAAM,EACJ,YAAY,EACZ,WAAW,EACX,cAAc,GAAG,IAAI,EACrB,QAAQ,EACR,QAAQ,GACT,GAAG,MAAM,CAAC;QAEX,IAAI,CAAC,KAAK,GAAG,IAAI,eAAK,CAAC;YACrB,QAAQ,EAAE,GAAG,YAAY,IAAI,WAAW,EAAE;YAC1C,OAAO,EACL,WAAW,KAAK,YAAY;gBAC1B,CAAC,CAAC,CAAC,8BAA8B,CAAC;gBAClC,CAAC,CAAC,CAAC,kCAAkC,CAAC;YAC1C,GAAG,EAAE;gBACH,mBAAmB,CAAC,QAAQ,EAAE,IAAI;oBAChC,OAAO,IAAA,8BAAmB,EAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,IAAI,CAAC,CAAC;gBAC3D,CAAC;aACF;YACD,IAAI,EAAE;gBACJ,SAAS,EAAE,OAAO;gBAClB,QAAQ;gBACR,QAAQ;aACT;SACF,CAAC,CAAC;QAEH,IAAI,cAAc,EAAE,CAAC;YACnB,IAAI,CAAC,WAAW,GAAG,0BAAgB,CAAC,GAAG,CAAC;YAExC,iBAAiB,CAAC,0BAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC;gBAC/C,wEAAwE;gBACxE,QAAQ,EAAE,CAAC,OAA2B,EAAE,EAAE;oBACxC,MAAM,UAAU,GAAG,IAAA,gBAAQ,EAAC,OAAO,CAAC,MAAM,CAAC,CAAC;oBAC5C,wEAAwE;oBACxE,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACjC,CAAC;gBACD,wEAAwE;gBACxE,UAAU,EAAE,CAAC,MAAc,EAAE,EAAE;oBAC7B,MAAM,YAAY,GAAG,IAAA,kBAAU,EAAC,MAAM,CAAC,CAAC;oBACxC,wEAAwE;oBACxE,OAAO,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACnC,CAAC;aACF,CAAC,CAAC;QACL,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,WAAW,GAAG,0BAAgB,CAAC,IAAI,CAAC;QAC3C,CAAC;IACH,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,IAAI,CACR,KAAa,EACb,QAAmC;IACnC;;OAEG;IACH,OAIC;QAED,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;YACnB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAClC,sBAAsB,EAAE,OAAO,EAAE,sBAAsB,IAAI,KAAK;aACjE,CAAC,CAAC;YACH,MAAM,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC;QAChC,CAAC;QAED,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC;YACvB,KAAK;YACL,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;gBAC7B,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC;aACzB,CAAC,CAAC;YACH,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,IAAI,EAAE,OAAO,EAAE,IAAI,IAAI,CAAC,CAAC,EAAE,wCAAwC;YACnE,OAAO,EAAE,OAAO,EAAE,OAAO,IAAI,MAAM,EAAE,sBAAsB;SAC5D,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,UAAU;QACd,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE,CAAC;YACjC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;QACvB,CAAC;IACH,CAAC;CACF;AArHD,sCAqHC"}
|
package/dist/cjs/node/usageV2.js
CHANGED
@@ -2,10 +2,8 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.UsageV2Producer = void 0;
|
4
4
|
const node_crypto_1 = require("node:crypto");
|
5
|
-
const node_tls_1 = require("node:tls");
|
6
|
-
const kafkajs_1 = require("kafkajs");
|
7
|
-
const lz4js_1 = require("lz4js");
|
8
5
|
const usageV2_js_1 = require("../core/usageV2.js");
|
6
|
+
const kafka_js_1 = require("./kafka.js");
|
9
7
|
/**
|
10
8
|
* Creates a UsageV2Producer which opens a persistent TCP connection.
|
11
9
|
* This class is thread-safe so your service should re-use one instance.
|
@@ -13,136 +11,61 @@ const usageV2_js_1 = require("../core/usageV2.js");
|
|
13
11
|
* Example:
|
14
12
|
* ```ts
|
15
13
|
* usageV2 = new UsageV2Producer(..)
|
16
|
-
* await usageV2.init()
|
17
14
|
* await usageV2.sendEvents(events)
|
18
15
|
* // Non-blocking:
|
19
|
-
* // void usageV2.sendEvents(events).catch(console.error)
|
16
|
+
* // void usageV2.sendEvents(events).catch((e) => console.error(e))
|
20
17
|
* ```
|
21
18
|
*/
|
22
19
|
class UsageV2Producer {
|
23
20
|
constructor(config) {
|
24
|
-
Object.defineProperty(this, "
|
21
|
+
Object.defineProperty(this, "kafkaProducer", {
|
25
22
|
enumerable: true,
|
26
23
|
configurable: true,
|
27
24
|
writable: true,
|
28
25
|
value: void 0
|
29
26
|
});
|
30
|
-
Object.defineProperty(this, "producer", {
|
31
|
-
enumerable: true,
|
32
|
-
configurable: true,
|
33
|
-
writable: true,
|
34
|
-
value: null
|
35
|
-
});
|
36
27
|
Object.defineProperty(this, "topic", {
|
37
28
|
enumerable: true,
|
38
29
|
configurable: true,
|
39
30
|
writable: true,
|
40
31
|
value: void 0
|
41
32
|
});
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
33
|
+
this.kafkaProducer = new kafka_js_1.KafkaProducer({
|
34
|
+
producerName: config.producerName,
|
35
|
+
environment: config.environment,
|
36
|
+
shouldCompress: config.shouldCompress,
|
37
|
+
username: config.username,
|
38
|
+
password: config.password,
|
47
39
|
});
|
48
|
-
|
49
|
-
this.kafka = new kafkajs_1.Kafka({
|
50
|
-
clientId: `${producerName}-${environment}`,
|
51
|
-
brokers: environment === "production"
|
52
|
-
? ["warpstream.thirdweb.xyz:9092"]
|
53
|
-
: ["warpstream-dev.thirdweb.xyz:9092"],
|
54
|
-
ssl: {
|
55
|
-
checkServerIdentity(hostname, cert) {
|
56
|
-
return (0, node_tls_1.checkServerIdentity)(hostname.toLowerCase(), cert);
|
57
|
-
},
|
58
|
-
},
|
59
|
-
sasl: {
|
60
|
-
mechanism: "plain",
|
61
|
-
username,
|
62
|
-
password,
|
63
|
-
},
|
64
|
-
});
|
65
|
-
this.topic = (0, usageV2_js_1.getTopicName)(productName);
|
66
|
-
this.compression = shouldCompress
|
67
|
-
? kafkajs_1.CompressionTypes.LZ4
|
68
|
-
: kafkajs_1.CompressionTypes.None;
|
69
|
-
}
|
70
|
-
/**
|
71
|
-
* Connect the producer.
|
72
|
-
* This must be called before calling `sendEvents()`.
|
73
|
-
*/
|
74
|
-
async init(configOverrides) {
|
75
|
-
if (this.compression === kafkajs_1.CompressionTypes.LZ4) {
|
76
|
-
kafkajs_1.CompressionCodecs[kafkajs_1.CompressionTypes.LZ4] = () => ({
|
77
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
78
|
-
compress: (encoder) => {
|
79
|
-
const compressed = (0, lz4js_1.compress)(encoder.buffer);
|
80
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
81
|
-
return Buffer.from(compressed);
|
82
|
-
},
|
83
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
84
|
-
decompress: (buffer) => {
|
85
|
-
const decompressed = (0, lz4js_1.decompress)(buffer);
|
86
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
87
|
-
return Buffer.from(decompressed);
|
88
|
-
},
|
89
|
-
});
|
90
|
-
}
|
91
|
-
this.producer = this.kafka.producer({
|
92
|
-
allowAutoTopicCreation: false,
|
93
|
-
...configOverrides,
|
94
|
-
});
|
95
|
-
await this.producer.connect();
|
40
|
+
this.topic = (0, usageV2_js_1.getTopicName)(config.source);
|
96
41
|
}
|
97
42
|
/**
|
98
43
|
* Send usageV2 events.
|
99
44
|
* This method may throw. To call this non-blocking:
|
100
|
-
*
|
101
|
-
* ```ts
|
102
|
-
* usageV2 = new UsageV2Producer(...)
|
103
|
-
* void usageV2.sendEvents(events).catch(console.error)
|
104
|
-
*
|
105
|
-
* @param events - The events to send.
|
45
|
+
* @param events
|
106
46
|
*/
|
107
47
|
async sendEvents(events,
|
108
48
|
/**
|
109
49
|
* Reference: https://kafka.js.org/docs/producing#producing-messages
|
110
50
|
*/
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
? event.team_id.slice(5)
|
123
|
-
: event.team_id,
|
124
|
-
};
|
125
|
-
});
|
126
|
-
await this.producer.send({
|
127
|
-
topic: this.topic,
|
128
|
-
messages: parsedEvents.map((event) => ({
|
129
|
-
value: JSON.stringify(event),
|
130
|
-
})),
|
131
|
-
acks: -1, // All brokers must acknowledge
|
132
|
-
timeout: 10_000, // 10 seconds
|
133
|
-
compression: this.compression,
|
134
|
-
...configOverrides,
|
135
|
-
});
|
51
|
+
options) {
|
52
|
+
const parsedEvents = events.map((event) => ({
|
53
|
+
...event,
|
54
|
+
id: event.id ?? (0, node_crypto_1.randomUUID)(),
|
55
|
+
created_at: event.created_at ?? new Date(),
|
56
|
+
// Remove the "team_" prefix, if any.
|
57
|
+
team_id: event.team_id.startsWith("team_")
|
58
|
+
? event.team_id.slice(5)
|
59
|
+
: event.team_id,
|
60
|
+
}));
|
61
|
+
await this.kafkaProducer.send(this.topic, parsedEvents, options);
|
136
62
|
}
|
137
63
|
/**
|
138
64
|
* Disconnects UsageV2Producer.
|
139
65
|
* Useful when shutting down the service to flush in-flight events.
|
140
66
|
*/
|
141
67
|
async disconnect() {
|
142
|
-
|
143
|
-
await this.producer.disconnect();
|
144
|
-
this.producer = null;
|
145
|
-
}
|
68
|
+
await this.kafkaProducer.disconnect();
|
146
69
|
}
|
147
70
|
}
|
148
71
|
exports.UsageV2Producer = UsageV2Producer;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/node/usageV2.ts"],"names":[],"mappings":";;;AAAA,6CAAyC;AACzC,
|
1
|
+
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/node/usageV2.ts"],"names":[],"mappings":";;;AAAA,6CAAyC;AACzC,mDAI4B;AAC5B,yCAA2C;AAE3C;;;;;;;;;;;GAWG;AACH,MAAa,eAAe;IAI1B,YAAY,MAoBX;QAvBO;;;;;WAA6B;QAC7B;;;;;WAAc;QAuBpB,IAAI,CAAC,aAAa,GAAG,IAAI,wBAAa,CAAC;YACrC,YAAY,EAAE,MAAM,CAAC,YAAY;YACjC,WAAW,EAAE,MAAM,CAAC,WAAW;YAC/B,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,QAAQ,EAAE,MAAM,CAAC,QAAQ;SAC1B,CAAC,CAAC;QACH,IAAI,CAAC,KAAK,GAAG,IAAA,yBAAY,EAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAC3C,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,UAAU,CACd,MAAsB;IACtB;;OAEG;IACH,OAIC;QAED,MAAM,YAAY,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;YAC1C,GAAG,KAAK;YACR,EAAE,EAAE,KAAK,CAAC,EAAE,IAAI,IAAA,wBAAU,GAAE;YAC5B,UAAU,EAAE,KAAK,CAAC,UAAU,IAAI,IAAI,IAAI,EAAE;YAC1C,qCAAqC;YACrC,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC;gBACxC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;gBACxB,CAAC,CAAC,KAAK,CAAC,OAAO;SAClB,CAAC,CAAC,CAAC;QACJ,MAAM,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,YAAY,EAAE,OAAO,CAAC,CAAC;IACnE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,UAAU;QACd,MAAM,IAAI,CAAC,aAAa,CAAC,UAAU,EAAE,CAAC;IACxC,CAAC;CACF;AAtED,0CAsEC"}
|
package/dist/esm/core/usageV2.js
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/core/usageV2.ts"],"names":[],"mappings":"
|
1
|
+
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/core/usageV2.ts"],"names":[],"mappings":"AAyDA,MAAM,UAAU,YAAY,CAAC,MAAqB;IAChD,OAAO,gBAAgB,MAAM,EAAE,CAAC;AAClC,CAAC"}
|
@@ -0,0 +1,116 @@
|
|
1
|
+
import { checkServerIdentity } from "node:tls";
|
2
|
+
import { CompressionTypes, Kafka } from "kafkajs";
|
3
|
+
import { compress, decompress } from "lz4js";
|
4
|
+
// CompressionCodecs is not exported properly in kafkajs. Source: https://github.com/tulios/kafkajs/issues/1391
|
5
|
+
import KafkaJS from "kafkajs";
|
6
|
+
const { CompressionCodecs } = KafkaJS;
|
7
|
+
/**
|
8
|
+
* Creates a KafkaProducer which opens a persistent TCP connection.
|
9
|
+
* This class is thread-safe so your service should re-use one instance.
|
10
|
+
*
|
11
|
+
* Example:
|
12
|
+
* ```ts
|
13
|
+
* kafka = new KafkaProducer(...)
|
14
|
+
* await kafka.send(topic, events)
|
15
|
+
* // Non-blocking:
|
16
|
+
* // void kafka.send(topic, events).catch((e) => console.error(e))
|
17
|
+
* ```
|
18
|
+
*/
|
19
|
+
export class KafkaProducer {
|
20
|
+
constructor(config) {
|
21
|
+
Object.defineProperty(this, "kafka", {
|
22
|
+
enumerable: true,
|
23
|
+
configurable: true,
|
24
|
+
writable: true,
|
25
|
+
value: void 0
|
26
|
+
});
|
27
|
+
Object.defineProperty(this, "producer", {
|
28
|
+
enumerable: true,
|
29
|
+
configurable: true,
|
30
|
+
writable: true,
|
31
|
+
value: null
|
32
|
+
});
|
33
|
+
Object.defineProperty(this, "compression", {
|
34
|
+
enumerable: true,
|
35
|
+
configurable: true,
|
36
|
+
writable: true,
|
37
|
+
value: void 0
|
38
|
+
});
|
39
|
+
const { producerName, environment, shouldCompress = true, username, password, } = config;
|
40
|
+
this.kafka = new Kafka({
|
41
|
+
clientId: `${producerName}-${environment}`,
|
42
|
+
brokers: environment === "production"
|
43
|
+
? ["warpstream.thirdweb.xyz:9092"]
|
44
|
+
: ["warpstream-dev.thirdweb.xyz:9092"],
|
45
|
+
ssl: {
|
46
|
+
checkServerIdentity(hostname, cert) {
|
47
|
+
return checkServerIdentity(hostname.toLowerCase(), cert);
|
48
|
+
},
|
49
|
+
},
|
50
|
+
sasl: {
|
51
|
+
mechanism: "plain",
|
52
|
+
username,
|
53
|
+
password,
|
54
|
+
},
|
55
|
+
});
|
56
|
+
if (shouldCompress) {
|
57
|
+
this.compression = CompressionTypes.LZ4;
|
58
|
+
CompressionCodecs[CompressionTypes.LZ4] = () => ({
|
59
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
60
|
+
compress: (encoder) => {
|
61
|
+
const compressed = compress(encoder.buffer);
|
62
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
63
|
+
return Buffer.from(compressed);
|
64
|
+
},
|
65
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
66
|
+
decompress: (buffer) => {
|
67
|
+
const decompressed = decompress(buffer);
|
68
|
+
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
69
|
+
return Buffer.from(decompressed);
|
70
|
+
},
|
71
|
+
});
|
72
|
+
}
|
73
|
+
else {
|
74
|
+
this.compression = CompressionTypes.None;
|
75
|
+
}
|
76
|
+
}
|
77
|
+
/**
|
78
|
+
* Send messages to a Kafka topic.
|
79
|
+
* This method may throw. To call this non-blocking:
|
80
|
+
* @param topic
|
81
|
+
* @param messages
|
82
|
+
* @param configOverrides
|
83
|
+
*/
|
84
|
+
async send(topic, messages,
|
85
|
+
/**
|
86
|
+
* Reference: https://kafka.js.org/docs/producing#producing-messages
|
87
|
+
*/
|
88
|
+
options) {
|
89
|
+
if (!this.producer) {
|
90
|
+
this.producer = this.kafka.producer({
|
91
|
+
allowAutoTopicCreation: options?.allowAutoTopicCreation ?? false,
|
92
|
+
});
|
93
|
+
await this.producer.connect();
|
94
|
+
}
|
95
|
+
await this.producer.send({
|
96
|
+
topic,
|
97
|
+
messages: messages.map((m) => ({
|
98
|
+
value: JSON.stringify(m),
|
99
|
+
})),
|
100
|
+
compression: this.compression,
|
101
|
+
acks: options?.acks ?? -1, // Default: All brokers must acknowledge
|
102
|
+
timeout: options?.timeout ?? 10_000, // Default: 10 seconds
|
103
|
+
});
|
104
|
+
}
|
105
|
+
/**
|
106
|
+
* Disconnects KafkaProducer.
|
107
|
+
* Useful when shutting down the service to flush in-flight events.
|
108
|
+
*/
|
109
|
+
async disconnect() {
|
110
|
+
if (this.producer) {
|
111
|
+
await this.producer.disconnect();
|
112
|
+
this.producer = null;
|
113
|
+
}
|
114
|
+
}
|
115
|
+
}
|
116
|
+
//# sourceMappingURL=kafka.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"kafka.js","sourceRoot":"","sources":["../../../src/node/kafka.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,UAAU,CAAC;AAC/C,OAAO,EAAE,gBAAgB,EAAE,KAAK,EAAiB,MAAM,SAAS,CAAC;AACjE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAE7C,+GAA+G;AAC/G,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,MAAM,EAAE,iBAAiB,EAAE,GAAG,OAAO,CAAC;AAEtC;;;;;;;;;;;GAWG;AACH,MAAM,OAAO,aAAa;IAKxB,YAAY,MAgBX;QApBO;;;;;WAAa;QACb;;;;mBAA4B,IAAI;WAAC;QACjC;;;;;WAA8B;QAmBpC,MAAM,EACJ,YAAY,EACZ,WAAW,EACX,cAAc,GAAG,IAAI,EACrB,QAAQ,EACR,QAAQ,GACT,GAAG,MAAM,CAAC;QAEX,IAAI,CAAC,KAAK,GAAG,IAAI,KAAK,CAAC;YACrB,QAAQ,EAAE,GAAG,YAAY,IAAI,WAAW,EAAE;YAC1C,OAAO,EACL,WAAW,KAAK,YAAY;gBAC1B,CAAC,CAAC,CAAC,8BAA8B,CAAC;gBAClC,CAAC,CAAC,CAAC,kCAAkC,CAAC;YAC1C,GAAG,EAAE;gBACH,mBAAmB,CAAC,QAAQ,EAAE,IAAI;oBAChC,OAAO,mBAAmB,CAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,IAAI,CAAC,CAAC;gBAC3D,CAAC;aACF;YACD,IAAI,EAAE;gBACJ,SAAS,EAAE,OAAO;gBAClB,QAAQ;gBACR,QAAQ;aACT;SACF,CAAC,CAAC;QAEH,IAAI,cAAc,EAAE,CAAC;YACnB,IAAI,CAAC,WAAW,GAAG,gBAAgB,CAAC,GAAG,CAAC;YAExC,iBAAiB,CAAC,gBAAgB,CAAC,GAAG,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC;gBAC/C,wEAAwE;gBACxE,QAAQ,EAAE,CAAC,OAA2B,EAAE,EAAE;oBACxC,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;oBAC5C,wEAAwE;oBACxE,OAAO,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACjC,CAAC;gBACD,wEAAwE;gBACxE,UAAU,EAAE,CAAC,MAAc,EAAE,EAAE;oBAC7B,MAAM,YAAY,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;oBACxC,wEAAwE;oBACxE,OAAO,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACnC,CAAC;aACF,CAAC,CAAC;QACL,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,WAAW,GAAG,gBAAgB,CAAC,IAAI,CAAC;QAC3C,CAAC;IACH,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,IAAI,CACR,KAAa,EACb,QAAmC;IACnC;;OAEG;IACH,OAIC;QAED,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;YACnB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAClC,sBAAsB,EAAE,OAAO,EAAE,sBAAsB,IAAI,KAAK;aACjE,CAAC,CAAC;YACH,MAAM,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC;QAChC,CAAC;QAED,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC;YACvB,KAAK;YACL,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;gBAC7B,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC;aACzB,CAAC,CAAC;YACH,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,IAAI,EAAE,OAAO,EAAE,IAAI,IAAI,CAAC,CAAC,EAAE,wCAAwC;YACnE,OAAO,EAAE,OAAO,EAAE,OAAO,IAAI,MAAM,EAAE,sBAAsB;SAC5D,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,UAAU;QACd,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE,CAAC;YACjC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;QACvB,CAAC;IACH,CAAC;CACF"}
|
package/dist/esm/node/usageV2.js
CHANGED
@@ -1,8 +1,6 @@
|
|
1
1
|
import { randomUUID } from "node:crypto";
|
2
|
-
import {
|
3
|
-
import {
|
4
|
-
import { compress, decompress } from "lz4js";
|
5
|
-
import { getTopicName } from "../core/usageV2.js";
|
2
|
+
import { getTopicName, } from "../core/usageV2.js";
|
3
|
+
import { KafkaProducer } from "./kafka.js";
|
6
4
|
/**
|
7
5
|
* Creates a UsageV2Producer which opens a persistent TCP connection.
|
8
6
|
* This class is thread-safe so your service should re-use one instance.
|
@@ -10,136 +8,61 @@ import { getTopicName } from "../core/usageV2.js";
|
|
10
8
|
* Example:
|
11
9
|
* ```ts
|
12
10
|
* usageV2 = new UsageV2Producer(..)
|
13
|
-
* await usageV2.init()
|
14
11
|
* await usageV2.sendEvents(events)
|
15
12
|
* // Non-blocking:
|
16
|
-
* // void usageV2.sendEvents(events).catch(console.error)
|
13
|
+
* // void usageV2.sendEvents(events).catch((e) => console.error(e))
|
17
14
|
* ```
|
18
15
|
*/
|
19
16
|
export class UsageV2Producer {
|
20
17
|
constructor(config) {
|
21
|
-
Object.defineProperty(this, "
|
18
|
+
Object.defineProperty(this, "kafkaProducer", {
|
22
19
|
enumerable: true,
|
23
20
|
configurable: true,
|
24
21
|
writable: true,
|
25
22
|
value: void 0
|
26
23
|
});
|
27
|
-
Object.defineProperty(this, "producer", {
|
28
|
-
enumerable: true,
|
29
|
-
configurable: true,
|
30
|
-
writable: true,
|
31
|
-
value: null
|
32
|
-
});
|
33
24
|
Object.defineProperty(this, "topic", {
|
34
25
|
enumerable: true,
|
35
26
|
configurable: true,
|
36
27
|
writable: true,
|
37
28
|
value: void 0
|
38
29
|
});
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
30
|
+
this.kafkaProducer = new KafkaProducer({
|
31
|
+
producerName: config.producerName,
|
32
|
+
environment: config.environment,
|
33
|
+
shouldCompress: config.shouldCompress,
|
34
|
+
username: config.username,
|
35
|
+
password: config.password,
|
44
36
|
});
|
45
|
-
|
46
|
-
this.kafka = new Kafka({
|
47
|
-
clientId: `${producerName}-${environment}`,
|
48
|
-
brokers: environment === "production"
|
49
|
-
? ["warpstream.thirdweb.xyz:9092"]
|
50
|
-
: ["warpstream-dev.thirdweb.xyz:9092"],
|
51
|
-
ssl: {
|
52
|
-
checkServerIdentity(hostname, cert) {
|
53
|
-
return checkServerIdentity(hostname.toLowerCase(), cert);
|
54
|
-
},
|
55
|
-
},
|
56
|
-
sasl: {
|
57
|
-
mechanism: "plain",
|
58
|
-
username,
|
59
|
-
password,
|
60
|
-
},
|
61
|
-
});
|
62
|
-
this.topic = getTopicName(productName);
|
63
|
-
this.compression = shouldCompress
|
64
|
-
? CompressionTypes.LZ4
|
65
|
-
: CompressionTypes.None;
|
66
|
-
}
|
67
|
-
/**
|
68
|
-
* Connect the producer.
|
69
|
-
* This must be called before calling `sendEvents()`.
|
70
|
-
*/
|
71
|
-
async init(configOverrides) {
|
72
|
-
if (this.compression === CompressionTypes.LZ4) {
|
73
|
-
CompressionCodecs[CompressionTypes.LZ4] = () => ({
|
74
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
75
|
-
compress: (encoder) => {
|
76
|
-
const compressed = compress(encoder.buffer);
|
77
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
78
|
-
return Buffer.from(compressed);
|
79
|
-
},
|
80
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
81
|
-
decompress: (buffer) => {
|
82
|
-
const decompressed = decompress(buffer);
|
83
|
-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
|
84
|
-
return Buffer.from(decompressed);
|
85
|
-
},
|
86
|
-
});
|
87
|
-
}
|
88
|
-
this.producer = this.kafka.producer({
|
89
|
-
allowAutoTopicCreation: false,
|
90
|
-
...configOverrides,
|
91
|
-
});
|
92
|
-
await this.producer.connect();
|
37
|
+
this.topic = getTopicName(config.source);
|
93
38
|
}
|
94
39
|
/**
|
95
40
|
* Send usageV2 events.
|
96
41
|
* This method may throw. To call this non-blocking:
|
97
|
-
*
|
98
|
-
* ```ts
|
99
|
-
* usageV2 = new UsageV2Producer(...)
|
100
|
-
* void usageV2.sendEvents(events).catch(console.error)
|
101
|
-
*
|
102
|
-
* @param events - The events to send.
|
42
|
+
* @param events
|
103
43
|
*/
|
104
44
|
async sendEvents(events,
|
105
45
|
/**
|
106
46
|
* Reference: https://kafka.js.org/docs/producing#producing-messages
|
107
47
|
*/
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
? event.team_id.slice(5)
|
120
|
-
: event.team_id,
|
121
|
-
};
|
122
|
-
});
|
123
|
-
await this.producer.send({
|
124
|
-
topic: this.topic,
|
125
|
-
messages: parsedEvents.map((event) => ({
|
126
|
-
value: JSON.stringify(event),
|
127
|
-
})),
|
128
|
-
acks: -1, // All brokers must acknowledge
|
129
|
-
timeout: 10_000, // 10 seconds
|
130
|
-
compression: this.compression,
|
131
|
-
...configOverrides,
|
132
|
-
});
|
48
|
+
options) {
|
49
|
+
const parsedEvents = events.map((event) => ({
|
50
|
+
...event,
|
51
|
+
id: event.id ?? randomUUID(),
|
52
|
+
created_at: event.created_at ?? new Date(),
|
53
|
+
// Remove the "team_" prefix, if any.
|
54
|
+
team_id: event.team_id.startsWith("team_")
|
55
|
+
? event.team_id.slice(5)
|
56
|
+
: event.team_id,
|
57
|
+
}));
|
58
|
+
await this.kafkaProducer.send(this.topic, parsedEvents, options);
|
133
59
|
}
|
134
60
|
/**
|
135
61
|
* Disconnects UsageV2Producer.
|
136
62
|
* Useful when shutting down the service to flush in-flight events.
|
137
63
|
*/
|
138
64
|
async disconnect() {
|
139
|
-
|
140
|
-
await this.producer.disconnect();
|
141
|
-
this.producer = null;
|
142
|
-
}
|
65
|
+
await this.kafkaProducer.disconnect();
|
143
66
|
}
|
144
67
|
}
|
145
68
|
//# sourceMappingURL=usageV2.js.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/node/usageV2.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,
|
1
|
+
{"version":3,"file":"usageV2.js","sourceRoot":"","sources":["../../../src/node/usageV2.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAGL,YAAY,GACb,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC;AAE3C;;;;;;;;;;;GAWG;AACH,MAAM,OAAO,eAAe;IAI1B,YAAY,MAoBX;QAvBO;;;;;WAA6B;QAC7B;;;;;WAAc;QAuBpB,IAAI,CAAC,aAAa,GAAG,IAAI,aAAa,CAAC;YACrC,YAAY,EAAE,MAAM,CAAC,YAAY;YACjC,WAAW,EAAE,MAAM,CAAC,WAAW;YAC/B,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,QAAQ,EAAE,MAAM,CAAC,QAAQ;SAC1B,CAAC,CAAC;QACH,IAAI,CAAC,KAAK,GAAG,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAC3C,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,UAAU,CACd,MAAsB;IACtB;;OAEG;IACH,OAIC;QAED,MAAM,YAAY,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;YAC1C,GAAG,KAAK;YACR,EAAE,EAAE,KAAK,CAAC,EAAE,IAAI,UAAU,EAAE;YAC5B,UAAU,EAAE,KAAK,CAAC,UAAU,IAAI,IAAI,IAAI,EAAE;YAC1C,qCAAqC;YACrC,OAAO,EAAE,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC;gBACxC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;gBACxB,CAAC,CAAC,KAAK,CAAC,OAAO;SAClB,CAAC,CAAC,CAAC;QACJ,MAAM,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,YAAY,EAAE,OAAO,CAAC,CAAC;IACnE,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,UAAU;QACd,MAAM,IAAI,CAAC,aAAa,CAAC,UAAU,EAAE,CAAC;IACxC,CAAC;CACF"}
|
@@ -1,4 +1,5 @@
|
|
1
|
-
import type { ServiceName } from "
|
1
|
+
import type { ServiceName } from "./services.js";
|
2
|
+
export type UsageV2Source = ServiceName | "sdk";
|
2
3
|
export interface UsageV2Event {
|
3
4
|
/**
|
4
5
|
* A unique identifier for the event. Defaults to a random UUID.
|
@@ -51,5 +52,5 @@ export interface UsageV2Event {
|
|
51
52
|
*/
|
52
53
|
[key: string]: boolean | number | string | Date | null | undefined;
|
53
54
|
}
|
54
|
-
export declare function getTopicName(
|
55
|
+
export declare function getTopicName(source: UsageV2Source): string;
|
55
56
|
//# sourceMappingURL=usageV2.d.ts.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usageV2.d.ts","sourceRoot":"","sources":["../../../src/core/usageV2.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,
|
1
|
+
{"version":3,"file":"usageV2.d.ts","sourceRoot":"","sources":["../../../src/core/usageV2.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAEjD,MAAM,MAAM,aAAa,GAAG,WAAW,GAAG,KAAK,CAAC;AAEhD,MAAM,WAAW,YAAY;IAC3B;;;OAGG;IACH,EAAE,CAAC,EAAE,GAAG,MAAM,IAAI,MAAM,IAAI,MAAM,IAAI,MAAM,IAAI,MAAM,EAAE,CAAC;IACzD;;OAEG;IACH,UAAU,CAAC,EAAE,IAAI,CAAC;IAClB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;;OAGG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,IAAI,GAAG,IAAI,GAAG,SAAS,CAAC;CACpE;AAED,wBAAgB,YAAY,CAAC,MAAM,EAAE,aAAa,UAEjD"}
|
@@ -0,0 +1,55 @@
|
|
1
|
+
/**
|
2
|
+
* Creates a KafkaProducer which opens a persistent TCP connection.
|
3
|
+
* This class is thread-safe so your service should re-use one instance.
|
4
|
+
*
|
5
|
+
* Example:
|
6
|
+
* ```ts
|
7
|
+
* kafka = new KafkaProducer(...)
|
8
|
+
* await kafka.send(topic, events)
|
9
|
+
* // Non-blocking:
|
10
|
+
* // void kafka.send(topic, events).catch((e) => console.error(e))
|
11
|
+
* ```
|
12
|
+
*/
|
13
|
+
export declare class KafkaProducer {
|
14
|
+
private kafka;
|
15
|
+
private producer;
|
16
|
+
private compression;
|
17
|
+
constructor(config: {
|
18
|
+
/**
|
19
|
+
* A descriptive name for your service. Example: "storage-server"
|
20
|
+
*/
|
21
|
+
producerName: string;
|
22
|
+
/**
|
23
|
+
* The environment the service is running in.
|
24
|
+
*/
|
25
|
+
environment: "development" | "production";
|
26
|
+
/**
|
27
|
+
* Whether to compress the events.
|
28
|
+
*/
|
29
|
+
shouldCompress?: boolean;
|
30
|
+
username: string;
|
31
|
+
password: string;
|
32
|
+
});
|
33
|
+
/**
|
34
|
+
* Send messages to a Kafka topic.
|
35
|
+
* This method may throw. To call this non-blocking:
|
36
|
+
* @param topic
|
37
|
+
* @param messages
|
38
|
+
* @param configOverrides
|
39
|
+
*/
|
40
|
+
send(topic: string, messages: Record<string, unknown>[],
|
41
|
+
/**
|
42
|
+
* Reference: https://kafka.js.org/docs/producing#producing-messages
|
43
|
+
*/
|
44
|
+
options?: {
|
45
|
+
acks?: number;
|
46
|
+
timeout?: number;
|
47
|
+
allowAutoTopicCreation?: boolean;
|
48
|
+
}): Promise<void>;
|
49
|
+
/**
|
50
|
+
* Disconnects KafkaProducer.
|
51
|
+
* Useful when shutting down the service to flush in-flight events.
|
52
|
+
*/
|
53
|
+
disconnect(): Promise<void>;
|
54
|
+
}
|
55
|
+
//# sourceMappingURL=kafka.d.ts.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"kafka.d.ts","sourceRoot":"","sources":["../../../src/node/kafka.ts"],"names":[],"mappings":"AAQA;;;;;;;;;;;GAWG;AACH,qBAAa,aAAa;IACxB,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,QAAQ,CAAyB;IACzC,OAAO,CAAC,WAAW,CAAmB;gBAE1B,MAAM,EAAE;QAClB;;WAEG;QACH,YAAY,EAAE,MAAM,CAAC;QACrB;;WAEG;QACH,WAAW,EAAE,aAAa,GAAG,YAAY,CAAC;QAC1C;;WAEG;QACH,cAAc,CAAC,EAAE,OAAO,CAAC;QAEzB,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,MAAM,CAAC;KAClB;IAiDD;;;;;;OAMG;IACG,IAAI,CACR,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE;IACnC;;OAEG;IACH,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,EAAE,MAAM,CAAC;QACd,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,sBAAsB,CAAC,EAAE,OAAO,CAAC;KAClC,GACA,OAAO,CAAC,IAAI,CAAC;IAmBhB;;;OAGG;IACG,UAAU;CAMjB"}
|
@@ -1,6 +1,4 @@
|
|
1
|
-
import { type
|
2
|
-
import type { ServiceName } from "../core/services.js";
|
3
|
-
import { type UsageV2Event } from "../core/usageV2.js";
|
1
|
+
import { type UsageV2Event, type UsageV2Source } from "../core/usageV2.js";
|
4
2
|
/**
|
5
3
|
* Creates a UsageV2Producer which opens a persistent TCP connection.
|
6
4
|
* This class is thread-safe so your service should re-use one instance.
|
@@ -8,17 +6,14 @@ import { type UsageV2Event } from "../core/usageV2.js";
|
|
8
6
|
* Example:
|
9
7
|
* ```ts
|
10
8
|
* usageV2 = new UsageV2Producer(..)
|
11
|
-
* await usageV2.init()
|
12
9
|
* await usageV2.sendEvents(events)
|
13
10
|
* // Non-blocking:
|
14
|
-
* // void usageV2.sendEvents(events).catch(console.error)
|
11
|
+
* // void usageV2.sendEvents(events).catch((e) => console.error(e))
|
15
12
|
* ```
|
16
13
|
*/
|
17
14
|
export declare class UsageV2Producer {
|
18
|
-
private
|
19
|
-
private producer;
|
15
|
+
private kafkaProducer;
|
20
16
|
private topic;
|
21
|
-
private compression;
|
22
17
|
constructor(config: {
|
23
18
|
/**
|
24
19
|
* A descriptive name for your service. Example: "storage-server"
|
@@ -29,9 +24,9 @@ export declare class UsageV2Producer {
|
|
29
24
|
*/
|
30
25
|
environment: "development" | "production";
|
31
26
|
/**
|
32
|
-
* The product
|
27
|
+
* The product where usage is coming from.
|
33
28
|
*/
|
34
|
-
|
29
|
+
source: UsageV2Source;
|
35
30
|
/**
|
36
31
|
* Whether to compress the events.
|
37
32
|
*/
|
@@ -39,28 +34,19 @@ export declare class UsageV2Producer {
|
|
39
34
|
username: string;
|
40
35
|
password: string;
|
41
36
|
});
|
42
|
-
/**
|
43
|
-
* Connect the producer.
|
44
|
-
* This must be called before calling `sendEvents()`.
|
45
|
-
*/
|
46
|
-
init(configOverrides?: ProducerConfig): Promise<void>;
|
47
37
|
/**
|
48
38
|
* Send usageV2 events.
|
49
39
|
* This method may throw. To call this non-blocking:
|
50
|
-
*
|
51
|
-
* ```ts
|
52
|
-
* usageV2 = new UsageV2Producer(...)
|
53
|
-
* void usageV2.sendEvents(events).catch(console.error)
|
54
|
-
*
|
55
|
-
* @param events - The events to send.
|
40
|
+
* @param events
|
56
41
|
*/
|
57
42
|
sendEvents(events: UsageV2Event[],
|
58
43
|
/**
|
59
44
|
* Reference: https://kafka.js.org/docs/producing#producing-messages
|
60
45
|
*/
|
61
|
-
|
46
|
+
options?: {
|
62
47
|
acks?: number;
|
63
48
|
timeout?: number;
|
49
|
+
allowAutoTopicCreation?: boolean;
|
64
50
|
}): Promise<void>;
|
65
51
|
/**
|
66
52
|
* Disconnects UsageV2Producer.
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usageV2.d.ts","sourceRoot":"","sources":["../../../src/node/usageV2.ts"],"names":[],"mappings":"
|
1
|
+
{"version":3,"file":"usageV2.d.ts","sourceRoot":"","sources":["../../../src/node/usageV2.ts"],"names":[],"mappings":"AACA,OAAO,EACL,KAAK,YAAY,EACjB,KAAK,aAAa,EAEnB,MAAM,oBAAoB,CAAC;AAG5B;;;;;;;;;;;GAWG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,KAAK,CAAS;gBAEV,MAAM,EAAE;QAClB;;WAEG;QACH,YAAY,EAAE,MAAM,CAAC;QACrB;;WAEG;QACH,WAAW,EAAE,aAAa,GAAG,YAAY,CAAC;QAC1C;;WAEG;QACH,MAAM,EAAE,aAAa,CAAC;QACtB;;WAEG;QACH,cAAc,CAAC,EAAE,OAAO,CAAC;QAEzB,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,MAAM,CAAC;KAClB;IAWD;;;;OAIG;IACG,UAAU,CACd,MAAM,EAAE,YAAY,EAAE;IACtB;;OAEG;IACH,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,EAAE,MAAM,CAAC;QACd,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,sBAAsB,CAAC,EAAE,OAAO,CAAC;KAClC,GACA,OAAO,CAAC,IAAI,CAAC;IAahB;;;OAGG;IACG,UAAU;CAGjB"}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@thirdweb-dev/service-utils",
|
3
|
-
"version": "0.8.
|
3
|
+
"version": "0.8.5-nightly-ccf6329810c1426fadc4483660e154ae38a2927b-20250205043811",
|
4
4
|
"type": "module",
|
5
5
|
"main": "dist/cjs/index.js",
|
6
6
|
"module": "dist/esm/index.js",
|
@@ -47,12 +47,12 @@
|
|
47
47
|
"dependencies": {
|
48
48
|
"aws4fetch": "1.0.20",
|
49
49
|
"kafkajs": "2.2.4",
|
50
|
-
"lz4js": "
|
50
|
+
"lz4js": "0.2.0",
|
51
51
|
"zod": "3.24.1"
|
52
52
|
},
|
53
53
|
"devDependencies": {
|
54
54
|
"@cloudflare/workers-types": "4.20250129.0",
|
55
|
-
"@types/lz4js": "
|
55
|
+
"@types/lz4js": "0.2.1",
|
56
56
|
"@types/node": "22.13.0",
|
57
57
|
"typescript": "5.7.3",
|
58
58
|
"vitest": "3.0.4"
|