samanbayaka 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,181 @@
1
+ /*esm-loading.mjs*/
2
+
3
+ import { fileURLToPath, pathToFileURL } from "url"
4
+ import * as path from "path"
5
+ import * as fs from "fs"
6
+ import { createRequire } from 'module'
7
+
8
+ import chokidar from "chokidar"
9
+
10
+ import {isHotReloadEnabled, serviceName, nodeUid} from '#hUti/node-argv.mjs'
11
+
12
+ const __filename = fileURLToPath(import.meta.url)
13
+ const __dirname = path.dirname(__filename)
14
+ const require = createRequire(import.meta.url)
15
+
16
+
17
+ const CONFIG_PATH = process.env.SBK_CONFIG_PATH // Environment variable of configuration path
18
+
19
+ /**
20
+ * Project absolute pat
21
+ */
22
+ const ABSOLUTE_PATH = path
23
+ .join(
24
+ __dirname,
25
+ "..", // back to parent directory
26
+ "..", // back to parent directory
27
+ )
28
+
29
+ /**
30
+ * Getting configurations through dynamic loading of ESM modules
31
+ * ftom the path defined in environment variable
32
+ * @param {string}
33
+ * @return {object}
34
+ */
35
+ export const getConfig = async (fileName) => {
36
+ const configFile = path.
37
+ join(
38
+ CONFIG_PATH,
39
+ `${fileName}.mjs`
40
+ )
41
+ return (await import(pathToFileURL(configFile).href)).default
42
+ }
43
+
44
+
45
+ /**
46
+ * Define Server configuration.
47
+ * @type {string}
48
+ */
49
+ const CONFIG = await getConfig('server')
50
+
51
+
52
+ /**
53
+ * Load modules from path as moleculer-repl is only supports cjs files
54
+ * @param {string} directory path
55
+ * @param {RegExp} regx to filter only *.mjs files from service path
56
+ * @return {void}
57
+ */
58
+ export const loadServices = async (broker) => {
59
+ /**
60
+ * Derive the service directory from the server configuration.
61
+ * @type {string}
62
+ */
63
+ const SERVICES_DIR = path
64
+ .join(
65
+ ABSOLUTE_PATH,
66
+ CONFIG.servicesDir,
67
+ serviceName
68
+ )
69
+
70
+ const filePath = path.join(SERVICES_DIR, CONFIG.serviceMain)
71
+
72
+ /**
73
+ * Dynamic import with cache busting
74
+ * @type {object}
75
+ */
76
+ const module = await import(`${pathToFileURL(filePath)}?t=${Date.now()}`)
77
+ const schema = module.default || module
78
+
79
+ if (!schema || typeof schema !== "object" || !schema.name || schema.name != serviceName) {
80
+ const regex = new RegExp(`(\\/${CONFIG.servicesDir}\\/.*)$`)
81
+ const rfPath = pathToFileURL(filePath).href.match(regex)[1]
82
+ await broker.stop()
83
+ broker.logger.fatal('❖ SBK', `Service start failed: invalid service name or schema in ${rfPath}`)
84
+ process.exit(0)
85
+ }
86
+
87
+ broker.createService(schema)
88
+ broker.logger.info('❖ SBK', `Hot-realod: ${isHotReloadEnabled ? "ON" : "OFF"}`)
89
+
90
+ const watcher = chokidar.watch(SERVICES_DIR, {
91
+ ignoreInitial: true,
92
+ awaitWriteFinish: {
93
+ stabilityThreshold: 200, // wait 200ms after last change
94
+ pollInterval: 50
95
+ },
96
+ ignored: [
97
+ /(^|[\/\\])\../,
98
+ /~$/,
99
+ /\.swp$/,
100
+ /\.tmp$/
101
+ ]
102
+ })
103
+
104
+ const handleWatchFile = async(wfPath, isUnlink) => {
105
+ const regex = new RegExp(`(\\/${CONFIG.servicesDir}\\/.*)$`)
106
+ const rfPath = pathToFileURL(wfPath).href.match(regex)[1]
107
+ /**
108
+ * Hot Reloading of moleculer services from service path
109
+ */
110
+ if(isHotReloadEnabled){
111
+ if(path.basename(filePath) === path.basename(wfPath)){
112
+ const oldServiceFullName = (broker.services.find(s => s.name === serviceName))?.fullName
113
+ const oldService = broker.getLocalService(oldServiceFullName)
114
+
115
+ if (!isUnlink) {
116
+ try {
117
+ /**
118
+ * Dynamic import with cache busting
119
+ */
120
+ const serviceModule = await import(`${pathToFileURL(filePath)}?t=${Date.now()}`)
121
+ const schema = serviceModule.default || serviceModule
122
+
123
+ if (!schema || typeof schema !== "object" || !schema.name || schema.name != serviceName) {
124
+ broker.logger.error('❖ SBK', `Hot-reloading terminated due to an invalid service name or schema for ${filePath}`)
125
+ return
126
+ }
127
+
128
+ /**
129
+ * Remove the already loaded service if it exists.
130
+ */
131
+ if (oldService) {
132
+ await broker.destroyService(oldService)
133
+ }
134
+
135
+ /**
136
+ * Create new service
137
+ */
138
+ broker.createService(schema)
139
+ const newServiceFullName = schema.version === undefined
140
+ ? schema.name
141
+ : `${schema.version}.${schema.name}`
142
+ broker.logger.info('❖ SBK', `Service '${newServiceFullName}' reloaded.`)
143
+ } catch (err) {
144
+ broker.logger.error('❖ SBK', `Hot-reloading of service '${newServiceFullName}' failed..`, err)
145
+ }
146
+ } else if (event === "unlink") {
147
+ /**
148
+ * Remove the already loaded service if it exists.
149
+ */
150
+ if (oldService) {
151
+ await broker.destroyService(oldService)
152
+ broker.logger.info('❖ SBK', `Service '${newServiceFullName}' removed.`)
153
+ }
154
+ }
155
+ }
156
+ else {
157
+ broker.logger.warn('❖ SBK',`Changes detected in the file "${rfPath}". Restart required to apply updates.`)
158
+ }
159
+
160
+ }
161
+ else {
162
+ broker.logger.warn('❖ SBK',`Changes detected in the file "${rfPath}". Restart required to apply updates.`)
163
+ }
164
+ }
165
+
166
+ watcher.on("add", (fp) => handleWatchFile(fp, false))
167
+ watcher.on("change", (fp) => handleWatchFile(fp, false))
168
+ watcher.on("unlink", (fp) => handleWatchFile(fp, true))
169
+ }
170
+
171
+
172
+ /**
173
+ * Assets path
174
+ */
175
+ export const assetPath = {
176
+ rootFolder: path
177
+ .join(
178
+ ABSOLUTE_PATH,
179
+ CONFIG.assetPath,
180
+ ),
181
+ }
@@ -0,0 +1,235 @@
1
+ /*RedpandaTransporter.mjs*/
2
+ import { Transporters } from "moleculer"
3
+ import { Kafka, logLevel } from "kafkajs"
4
+
5
+ /**
6
+ * Custom KafkaJS-backed Moleculer Transporter
7
+ *
8
+ * Topics used (prefix = MOL by default):
9
+ * - <prefix>.<CMD> (fanout/control)
10
+ * - <prefix>.<CMD>.<nodeID> (directed)
11
+ *
12
+ * Examples:
13
+ * - MOL.DISCOVER, MOL.INFO, MOL.HEARTBEAT, MOL.PING, MOL.PONG
14
+ * - MOL.REQ (requests)
15
+ * - MOL.RES.<nodeID> (responses back to caller node)
16
+ */
17
+ export default class RedpandaTransporter extends Transporters.Base {
18
+ constructor(opts) {
19
+ super(opts)
20
+
21
+ this.opts = {
22
+ ...{
23
+ // visit for documentation of KafkaJs https://kafka.js.org/docs/getting-started
24
+
25
+ // KafkaJs Client options.
26
+ client: {
27
+ // A logical identifier of an application. Can be used by brokers to apply quotas or trace requests to a specific application. Example: booking-events-processor.
28
+ clientId: "moleculer-kafkajs",
29
+ brokers: ["redpanda1:9092", "redpanda2:9092"],
30
+
31
+ // // The ssl option can be used to configure the TLS sockets. The options are passed directly to tls.connect and used to create the TLS Secure Context, all options are accepted.
32
+ // ssl: {
33
+ // rejectUnauthorized: false,
34
+ // ca: [fs.readFileSync('/my/custom/ca.crt', 'utf-8')],
35
+ // key: fs.readFileSync('/my/custom/client-key.pem', 'utf-8'),
36
+ // cert: fs.readFileSync('/my/custom/client-cert.pem', 'utf-8')
37
+ // },
38
+
39
+ // // Kafka has support for using SASL to authenticate clients. The sasl option can be used to configure the authentication mechanism. Currently, KafkaJS supports PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, and AWS mechanisms.
40
+ // authenticationTimeout: 10000,
41
+ // reauthenticationThreshold: 10000,
42
+ // sasl: {
43
+ // mechanism: 'plain', // scram-sha-256 or scram-sha-512
44
+ // username: 'my-username',
45
+ // password: 'my-password'
46
+ // },
47
+
48
+ // Time in milliseconds to wait for a successful connection. The default value is: 1000.
49
+ connectionTimeout: 3000,
50
+
51
+ // Time in milliseconds to wait for a successful request. The default value is: 30000.
52
+ requestTimeout: 25000,
53
+
54
+ // The request timeout can be disabled by setting enforceRequestTimeout to false.
55
+ enforceRequestTimeout: false,
56
+
57
+
58
+ // The retry option can be used to set the configuration of the retry mechanism, which is used to retry connections and API calls to Kafka (when using producers or consumers).
59
+ retry: {
60
+ initialRetryTime: 100,
61
+ retries: 8
62
+ },
63
+
64
+ // KafkaJS has a built-in STDOUT logger which outputs JSON. It also accepts a custom log creator which allows you to integrate your favorite logger library. There are 5 log levels available: NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.
65
+ logLevel: logLevel.INFO,
66
+ },
67
+
68
+ // KafkaJs Producer options.
69
+ producer: {
70
+ // If enabled producer will ensure each message is written exactly once. Acks must be set to -1 ("all"). Retries will default to MAX_SAFE_INTEGER.
71
+ idempotent: true,
72
+
73
+ // Max number of requests that may be in progress at any time. If falsey then no limit.
74
+ maxInFlightRequests: 1,
75
+ },
76
+ customPartitioner: undefined,
77
+
78
+ // KafkaJs ConsumerGroup options.
79
+ consumer: {
80
+ controlGroupId: null,
81
+ dataGroupId: null,
82
+ },
83
+
84
+ // Advanced options for `send`.
85
+ publish: {
86
+ partition: 0,
87
+ attributes: 0
88
+ },
89
+
90
+ prefix: "MOL",
91
+ },
92
+ ...opts
93
+ }
94
+
95
+ this.client = null
96
+ this.producer = null
97
+ this.consumer = null
98
+ }
99
+
100
+
101
+ /**
102
+ * Init transporter
103
+ */
104
+ init(...args) {
105
+ super.init(...args)
106
+ this.prefix = this.opts.prefix || "MOL"
107
+
108
+ this.opts.client.logLevel =
109
+ typeof this.opts.client.logLevel == "string"
110
+ ? logLevel[this.opts.client.logLevel]
111
+ : this.opts.client.logLevel
112
+
113
+ this.logger.info(`RedpandaTransporter initialized with topic prefix=${this.prefix}`)
114
+ }
115
+
116
+
117
+ /**
118
+ * Connect to Kafka broker
119
+ */
120
+ async connect() {
121
+ this.client = new Kafka(this.opts.client)
122
+
123
+ this.producer = this.client.producer()
124
+ await this.producer.connect()
125
+
126
+ // Consumer group is important for balancing
127
+ const groupId = this.opts.groupId || this.nodeID //.split(".")[0]
128
+
129
+ this.consumer = this.client.consumer({ groupId })
130
+ await this.consumer.connect()
131
+
132
+ this.connected = true
133
+ this.logger.info("Kafka transporter connected.")
134
+
135
+ return this.onConnected(false)
136
+ }
137
+
138
+ /**
139
+ * Disconnect from Kafka
140
+ */
141
+ async disconnect() {
142
+ if (this.producer) await this.producer.disconnect()
143
+ if (this.consumer) await this.consumer.disconnect()
144
+ this.connected = false
145
+ this.logger.info("Kafka transporter disconnected.")
146
+ }
147
+
148
+ /**
149
+ * Subscribe to a topic (for non-balanced commands)
150
+ * @param {String} cmd
151
+ * @param {String} nodeID
152
+ */
153
+ async subscribe(cmd, nodeID) {
154
+ if (!this.connected) {
155
+ throw new BrokerDisconnectedError()
156
+ }
157
+
158
+ const topic = this.getTopicName(cmd, nodeID)
159
+
160
+ await this.consumer.subscribe({ topic, fromBeginning: false })
161
+
162
+ if (!this.consumerRunning) {
163
+ this.consumerRunning = true
164
+ // Single run handler for all topics
165
+ await this.consumer.run({
166
+ eachMessage: async ({ topic, message }) => {
167
+ try {
168
+ // Extract command from topic name
169
+ const parts = topic.split(".")
170
+ const cmd = parts[1] // e.g., INFO, HEARTBEAT, REQ, RES, EVENT
171
+ this.receive(cmd, message.value)
172
+ } catch (err) {
173
+ this.logger.error("Kafka message handling failed", err)
174
+ }
175
+ },
176
+ })
177
+ }
178
+
179
+ this.logger.debug(`Subscribed to topic '${topic}'`)
180
+ }
181
+
182
+ /**
183
+ * Subscribe to balanced requests
184
+ * @param {String} action
185
+ */
186
+ async subscribeBalancedRequest(action) {
187
+ const topic = `${this.prefix}.REQB.${action}`
188
+ await this.consumer.subscribe({ topic, fromBeginning: false })
189
+
190
+ await this.consumer.run({
191
+ eachMessage: async ({ topic, message }) => {
192
+ this.receive("REQ", message.value)
193
+ },
194
+ })
195
+
196
+ this.logger.debug(`Subscribed to balanced request '${action}'`)
197
+ }
198
+
199
+ /**
200
+ * Subscribe to balanced events
201
+ * @param {String} event
202
+ * @param {String} group
203
+ */
204
+ async subscribeBalancedEvent(event, group) {
205
+ const topic = `${this.prefix}.EVTB.${group}.${event}`
206
+ await this.consumer.subscribe({ topic, fromBeginning: false })
207
+
208
+ await this.consumer.run({
209
+ eachMessage: async ({ topic, message }) => {
210
+ this.receive("EVENT", message.value)
211
+ },
212
+ })
213
+
214
+ this.logger.debug(`Subscribed to balanced event '${event}' (group: ${group})`)
215
+ }
216
+
217
+ /**
218
+ * Send a message to Kafka
219
+ * @param {String} topic
220
+ * @param {Buffer} data
221
+ * @param {Object} meta
222
+ */
223
+ async send(topic, data, meta) {
224
+ if (!this.connected) {
225
+ throw new BrokerDisconnectedError()
226
+ }
227
+
228
+ await this.producer.send({
229
+ topic,
230
+ messages: [{ value: data }],
231
+ })
232
+
233
+ this.logger.debug(`Message published to '${topic}'`, meta?.packet?.type)
234
+ }
235
+ }
@@ -0,0 +1,230 @@
1
+ /*RedpandaTransporter.mjs*/
2
+ import { Transporters } from "moleculer"
3
+ import { Kafka, logLevel } from "kafkajs"
4
+
5
+ /**
6
+ * Custom KafkaJS-backed Moleculer Transporter
7
+ *
8
+ * Topics used (prefix = MOL by default):
9
+ * - <prefix>.<CMD> (fanout/control)
10
+ * - <prefix>.<CMD>.<nodeID> (directed)
11
+ *
12
+ * Examples:
13
+ * - MOL.DISCOVER, MOL.INFO, MOL.HEARTBEAT, MOL.PING, MOL.PONG
14
+ * - MOL.REQ (requests)
15
+ * - MOL.RES.<nodeID> (responses back to caller node)
16
+ */
17
+ export default class RedpandaTransporter extends Transporters.Base {
18
+ constructor(opts) {
19
+ super(opts)
20
+
21
+ this.opts = {
22
+ ...{
23
+ // visit for documentation of KafkaJs https://kafka.js.org/docs/getting-started
24
+
25
+ // KafkaJs Client options.
26
+ client: {
27
+ // A logical identifier of an application. Can be used by brokers to apply quotas or trace requests to a specific application. Example: booking-events-processor.
28
+ clientId: "moleculer-kafkajs",
29
+ brokers: ["redpanda1:9092", "redpanda2:9092"],
30
+
31
+ // // The ssl option can be used to configure the TLS sockets. The options are passed directly to tls.connect and used to create the TLS Secure Context, all options are accepted.
32
+ // ssl: {
33
+ // rejectUnauthorized: false,
34
+ // ca: [fs.readFileSync('/my/custom/ca.crt', 'utf-8')],
35
+ // key: fs.readFileSync('/my/custom/client-key.pem', 'utf-8'),
36
+ // cert: fs.readFileSync('/my/custom/client-cert.pem', 'utf-8')
37
+ // },
38
+
39
+ // // Kafka has support for using SASL to authenticate clients. The sasl option can be used to configure the authentication mechanism. Currently, KafkaJS supports PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, and AWS mechanisms.
40
+ // authenticationTimeout: 10000,
41
+ // reauthenticationThreshold: 10000,
42
+ // sasl: {
43
+ // mechanism: 'plain', // scram-sha-256 or scram-sha-512
44
+ // username: 'my-username',
45
+ // password: 'my-password'
46
+ // },
47
+
48
+ // Time in milliseconds to wait for a successful connection. The default value is: 1000.
49
+ connectionTimeout: 3000,
50
+
51
+ // Time in milliseconds to wait for a successful request. The default value is: 30000.
52
+ requestTimeout: 25000,
53
+
54
+ // The request timeout can be disabled by setting enforceRequestTimeout to false.
55
+ enforceRequestTimeout: false,
56
+
57
+
58
+ // The retry option can be used to set the configuration of the retry mechanism, which is used to retry connections and API calls to Kafka (when using producers or consumers).
59
+ retry: {
60
+ initialRetryTime: 100,
61
+ retries: 8
62
+ },
63
+
64
+ // KafkaJS has a built-in STDOUT logger which outputs JSON. It also accepts a custom log creator which allows you to integrate your favorite logger library. There are 5 log levels available: NOTHING, ERROR, WARN, INFO, and DEBUG. INFO is configured by default.
65
+ logLevel: logLevel.INFO,
66
+ },
67
+
68
+ // KafkaJs Producer options.
69
+ producer: {
70
+ // If enabled producer will ensure each message is written exactly once. Acks must be set to -1 ("all"). Retries will default to MAX_SAFE_INTEGER.
71
+ idempotent: true,
72
+
73
+ // Max number of requests that may be in progress at any time. If falsey then no limit.
74
+ maxInFlightRequests: 1,
75
+ },
76
+ customPartitioner: undefined,
77
+
78
+ // KafkaJs ConsumerGroup options.
79
+ consumer: {
80
+ controlGroupId: null,
81
+ dataGroupId: null,
82
+ },
83
+
84
+ // Advanced options for `send`.
85
+ publish: {
86
+ partition: 0,
87
+ attributes: 0
88
+ },
89
+
90
+ prefix: "MOL",
91
+ },
92
+ ...opts
93
+ }
94
+
95
+ this.client = null
96
+ this.producer = null
97
+ this.consumer = null
98
+ }
99
+
100
+
101
+ /**
102
+ * Init transporter
103
+ */
104
+ init(...args) {
105
+ super.init(...args)
106
+ this.prefix = this.opts.prefix || "MOL"
107
+
108
+ this.opts.client.logLevel =
109
+ typeof this.opts.client.logLevel == "string"
110
+ ? logLevel[this.opts.client.logLevel]
111
+ : this.opts.client.logLevel
112
+
113
+ this.logger.info(`RedpandaTransporter initialized with topic prefix=${this.prefix}`)
114
+ }
115
+
116
+
117
+ /**
118
+ * Connect to Kafka broker
119
+ */
120
+ async connect() {
121
+ this.client = new Kafka(this.opts.client)
122
+
123
+ this.producer = this.client.producer()
124
+ await this.producer.connect()
125
+
126
+ // Consumer group is important for balancing
127
+ const groupId = this.opts.groupId || this.nodeID
128
+ this.consumer = this.client.consumer({ groupId })
129
+ await this.consumer.connect()
130
+
131
+ this.connected = true
132
+ this.logger.info("Kafka transporter connected.")
133
+
134
+ return this.onConnected(false)
135
+ }
136
+
137
+ /**
138
+ * Disconnect from Kafka
139
+ */
140
+ async disconnect() {
141
+ if (this.producer) await this.producer.disconnect()
142
+ if (this.consumer) await this.consumer.disconnect()
143
+ this.connected = false
144
+ this.logger.info("Kafka transporter disconnected.")
145
+ }
146
+
147
+ /**
148
+ * Subscribe to a topic (for non-balanced commands)
149
+ * @param {String} cmd
150
+ * @param {String} nodeID
151
+ */
152
+ async subscribe(cmd, nodeID) {
153
+ const topic = this.getTopicName(cmd, nodeID)
154
+
155
+ await this.consumer.subscribe({ topic, fromBeginning: false })
156
+
157
+ // Single run handler for all topics
158
+ await this.consumer.run({
159
+ eachMessage: async ({ topic, message }) => {
160
+ try {
161
+ // Extract command from topic name
162
+ const parts = topic.split(".")
163
+ const cmd = parts[1] // e.g., INFO, HEARTBEAT, REQ, RES, EVENT
164
+ // console.log('CCCCCCCCCCCCCCCCCCCC', cmd, nodeID)
165
+ this.receive(cmd, message.value)
166
+ } catch (err) {
167
+ this.logger.error("Kafka message handling failed", err)
168
+ }
169
+ },
170
+ })
171
+
172
+ this.logger.debug(`Subscribed to topic '${topic}'`)
173
+ }
174
+
175
+ /**
176
+ * Subscribe to balanced requests
177
+ * @param {String} action
178
+ */
179
+ async subscribeBalancedRequest(action) {
180
+ const topic = `${this.prefix}.REQB.${action}`
181
+ await this.consumer.subscribe({ topic, fromBeginning: false })
182
+
183
+ await this.consumer.run({
184
+ eachMessage: async ({ topic, message }) => {
185
+ this.receive("REQ", message.value)
186
+ },
187
+ })
188
+
189
+ this.logger.debug(`Subscribed to balanced request '${action}'`)
190
+ }
191
+
192
+ /**
193
+ * Subscribe to balanced events
194
+ * @param {String} event
195
+ * @param {String} group
196
+ */
197
+ async subscribeBalancedEvent(event, group) {
198
+ const topic = `${this.prefix}.EVTB.${group}.${event}`
199
+ await this.consumer.subscribe({ topic, fromBeginning: false })
200
+
201
+ await this.consumer.run({
202
+ eachMessage: async ({ topic, message }) => {
203
+ this.receive("EVENT", message.value)
204
+ },
205
+ })
206
+
207
+ this.logger.debug(`Subscribed to balanced event '${event}' (group: ${group})`)
208
+ }
209
+
210
+ /**
211
+ * Send a message to Kafka
212
+ * @param {String} topic
213
+ * @param {Buffer} data
214
+ * @param {Object} meta
215
+ */
216
+ async send(topic, data, meta) {
217
+ if (!this.connected) {
218
+ throw new BrokerDisconnectedError()
219
+ }
220
+
221
+ // const kTopic = topic.split(".").slice(0,3).join(".")
222
+ // console.log('PPPPPPPPPPPPPPPPPPPPPPPPPPPPP', kTopic, topic)
223
+ await this.producer.send({
224
+ topic,
225
+ messages: [{ value: data }],
226
+ })
227
+
228
+ this.logger.debug(`Message published to '${topic}'`, meta?.packet?.type)
229
+ }
230
+ }
@@ -0,0 +1,8 @@
1
+ #check redpanda running status
2
+ $rpk cluster info
3
+
4
+ #view all topics
5
+ $rpk --brokers redpanda:9092 topic list
6
+
7
+ #delete all topics
8
+ $rpk topic list --brokers redpanda:9092 | tail -n +2 | grep -v '^__' | xargs -n1 rpk topic delete --brokers redpanda:9092