@opra/kafka 1.26.3 → 1.26.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,3 +1,26 @@
1
- # @opra/core
1
+ # @opra/kafka
2
2
 
3
- OPRA schema package.
3
+ [![NPM Version][npm-image]][npm-url]
4
+ [![NPM Downloads][downloads-image]][downloads-url]
5
+ [![CI Tests][ci-test-image]][ci-test-url]
6
+ [![Test Coverage][coveralls-image]][coveralls-url]
7
+
8
+
9
+ ## Support
10
+ You can report bugs and discuss features on the [GitHub issues](https://github.com/panates/opra/issues) page.
11
+
12
+ ## Node Compatibility
13
+ - node >= 20.x
14
+
15
+
16
+ ## License
17
+ Available under [MIT](LICENSE) license.
18
+
19
+ [npm-image]: https://img.shields.io/npm/v/@opra/kafka
20
+ [npm-url]: https://npmjs.org/package/@opra/kafka
21
+ [downloads-image]: https://img.shields.io/npm/dm/@opra/kafka.svg
22
+ [downloads-url]: https://npmjs.org/package/@opra/kafka
23
+ [ci-test-image]: https://github.com/panates/opra/actions/workflows/test.yml/badge.svg
24
+ [ci-test-url]: https://github.com/panates/opra/actions/workflows/test.yml
25
+ [coveralls-image]: https://coveralls.io/repos/github/panates/opra/badge.svg?branch=main
26
+ [coveralls-url]: https://coveralls.io/github/panates/opra?branch=main
@@ -2,6 +2,12 @@ import '@opra/core';
2
2
  import { KafkaAdapter } from '../kafka-adapter.js';
3
3
  declare module '@opra/common' {
4
4
  interface MQOperationDecorator {
5
+ /**
6
+ * Decorator for configuring Kafka-specific options for an MQ operation.
7
+ *
8
+ * @param config - The Kafka operation options or a resolver function.
9
+ * @returns The decorator instance.
10
+ */
5
11
  Kafka(config: KafkaAdapter.OperationOptions | (() => KafkaAdapter.OperationOptions | Promise<KafkaAdapter.OperationOptions>)): this;
6
12
  }
7
13
  }
@@ -1,7 +1,7 @@
1
1
  import '@opra/core';
2
2
  import { classes } from '@opra/common';
3
3
  import { KAFKA_OPERATION_METADATA, KAFKA_OPERATION_METADATA_RESOLVER, } from '../constants.js';
4
- /** Implementation **/
4
+ /* Implementation **/
5
5
  classes.MQOperationDecoratorFactory.augment((decorator, decoratorChain) => {
6
6
  decorator.Kafka = (config) => {
7
7
  decoratorChain.push((_, target, propertyKey) => {
@@ -18,8 +18,9 @@ interface HandlerArguments {
18
18
  topics: (string | RegExp)[];
19
19
  }
20
20
  /**
21
- *
22
- * @class KafkaAdapter
21
+ * Adapter for integrating Kafka into the Opra platform.
22
+ * It manages Kafka consumers, handles message routing to controllers,
23
+ * and provides integration with the Opra execution context.
23
24
  */
24
25
  export declare class KafkaAdapter extends PlatformAdapter<KafkaAdapter.Events> {
25
26
  static readonly PlatformName = "kafka";
@@ -33,53 +34,107 @@ export declare class KafkaAdapter extends PlatformAdapter<KafkaAdapter.Events> {
33
34
  readonly platform = "kafka";
34
35
  readonly interceptors: (KafkaAdapter.InterceptorFunction | KafkaAdapter.IKafkaInterceptor)[];
35
36
  /**
37
+ * Initializes a new instance of the KafkaAdapter.
36
38
  *
37
- * @param document
38
- * @param config
39
- * @constructor
39
+ * @param document - The API document that defines the Kafka services and controllers.
40
+ * @param config - The configuration options for the Kafka adapter.
41
+ * @throws {@link TypeError} Throws if the document does not expose a Kafka API.
40
42
  */
41
43
  constructor(document: ApiDocument, config: KafkaAdapter.Config);
44
+ /**
45
+ * Gets the MQ API defined in the document.
46
+ */
42
47
  get api(): MQApi;
48
+ /**
49
+ * Gets the Kafka client instance.
50
+ */
43
51
  get kafka(): Kafka;
52
+ /**
53
+ * Gets the configuration scope for the adapter.
54
+ */
44
55
  get scope(): string | undefined;
56
+ /**
57
+ * Gets the current status of the adapter.
58
+ */
45
59
  get status(): KafkaAdapter.Status;
60
+ /**
61
+ * Initializes the Kafka client and all defined consumers.
62
+ * This method is called automatically by {@link start} if not already initialized.
63
+ */
46
64
  initialize(): Promise<void>;
47
65
  /**
48
- * Starts the service
66
+ * Starts the Kafka adapter, connecting all consumers and subscribing to topics.
67
+ *
68
+ * @throws {@link Error} Throws if a consumer fails to connect or subscribe.
49
69
  */
50
70
  start(): Promise<void>;
51
71
  /**
52
- * Closes all connections and stops the service
72
+ * Closes all active Kafka consumers and clears internal caches.
73
+ * This effectively stops the service and returns it to the idle state.
53
74
  */
54
75
  close(): Promise<void>;
76
+ /**
77
+ * Retrieves a controller instance by its path.
78
+ *
79
+ * @param controllerPath - The unique path of the controller.
80
+ * @returns The controller instance or undefined if not found.
81
+ */
55
82
  getControllerInstance<T>(controllerPath: string): T | undefined;
56
83
  /**
84
+ * Resolves the configuration for a specific MQ operation.
57
85
  *
58
- * @param controller
59
- * @param instance
60
- * @param operation
86
+ * @param controller - The MQ controller containing the operation.
87
+ * @param instance - The actual instance of the controller class.
88
+ * @param operation - The MQ operation being configured.
89
+ * @returns A promise that resolves to the operation configuration or undefined if not applicable.
61
90
  * @protected
62
91
  */
63
92
  protected _getOperationConfig(controller: MQController, instance: any, operation: MQOperation): Promise<OperationConfig | undefined>;
64
93
  /**
65
- *
94
+ * Creates and prepares all consumers defined in the API document.
66
95
  * @protected
67
96
  */
68
97
  protected _createAllConsumers(): Promise<void>;
69
98
  /**
99
+ * Creates a Rabbitmq consumer for the given handler arguments if it doesn't already exist.
70
100
  *
71
- * @param args
101
+ * @param args - The handler arguments containing configuration and state.
102
+ * @throws {@link Error} Throws if a self-consumer for the group ID already exists.
72
103
  * @protected
73
104
  */
74
105
  protected _createConsumer(args: HandlerArguments): Promise<void>;
75
106
  /**
107
+ * Creates a message handler for a specific MQ operation.
108
+ * This handler processes incoming Kafka messages, decodes them, and executes the operation.
76
109
  *
77
- * @param args
110
+ * @param args - The handler arguments for the operation.
78
111
  * @protected
79
112
  */
80
113
  protected _createHandler(args: HandlerArguments): void;
114
+ /**
115
+ * Emits an error event and logs the error.
116
+ *
117
+ * @param error - The error that occurred.
118
+ * @param context - The optional Kafka execution context.
119
+ * @protected
120
+ */
81
121
  protected _emitError(error: any, context?: KafkaContext): void;
122
+ /**
123
+ * Wraps multiple exceptions into an array of {@link OpraException}.
124
+ *
125
+ * @param exceptions - The array of exceptions to wrap.
126
+ * @returns An array of wrapped exceptions.
127
+ * @protected
128
+ */
82
129
  protected _wrapExceptions(exceptions: any[]): OpraException[];
130
+ /**
131
+ * Creates a KafkaJS log creator that redirects logs to the provided logger.
132
+ *
133
+ * @param logger - The logger instance to use.
134
+ * @param logExtra - Whether to include additional metadata in the logs.
135
+ * @returns A log creator function for KafkaJS.
136
+ * @protected
137
+ */
83
138
  protected _createLogCreator(logger: ILogger, logExtra?: boolean): ({ namespace, level, log }: {
84
139
  namespace: any;
85
140
  level: any;
@@ -87,11 +142,20 @@ export declare class KafkaAdapter extends PlatformAdapter<KafkaAdapter.Events> {
87
142
  }) => any;
88
143
  }
89
144
  /**
90
- * @namespace KafkaAdapter
145
+ * Namespace for KafkaAdapter related types and interfaces.
91
146
  */
92
147
  export declare namespace KafkaAdapter {
148
+ /**
149
+ * Callback function for the next middleware in the interceptor chain.
150
+ */
93
151
  type NextCallback = () => Promise<any>;
152
+ /**
153
+ * Represents the operational status of the Kafka adapter.
154
+ */
94
155
  type Status = 'idle' | 'starting' | 'started';
156
+ /**
157
+ * Configuration options for the Kafka adapter.
158
+ */
95
159
  interface Config extends PlatformAdapter.Options {
96
160
  client: StrictOmit<KafkaConfig, 'logCreator' | 'logLevel'>;
97
161
  consumers?: Record<string, StrictOmit<ConsumerConfig, 'groupId'>>;
@@ -105,25 +169,41 @@ export declare namespace KafkaAdapter {
105
169
  interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
106
170
  logExtra?: boolean;
107
171
  }
172
+ /**
173
+ * Options for a specific Kafka operation.
174
+ */
108
175
  interface OperationOptions {
109
176
  /**
110
- * groupId or ConsumerConfig
177
+ * Group ID or consumer configuration.
111
178
  */
112
179
  consumer?: string | ConsumerConfig;
180
+ /**
181
+ * Subscription options for the topic.
182
+ */
113
183
  subscribe?: {
114
184
  fromBeginning?: boolean;
115
185
  };
116
186
  }
117
187
  /**
118
- * @type InterceptorFunction
188
+ * Type definition for a Kafka interceptor function.
119
189
  */
120
190
  type InterceptorFunction = IKafkaInterceptor['intercept'];
121
191
  /**
122
- * @interface IKafkaInterceptor
192
+ * Interface for a Kafka interceptor class.
123
193
  */
124
- type IKafkaInterceptor = {
194
+ interface IKafkaInterceptor {
195
+ /**
196
+ * Intercepts the execution of a Kafka operation.
197
+ *
198
+ * @param context - The Kafka execution context.
199
+ * @param next - The next function in the chain.
200
+ * @returns A promise that resolves to the result of the operation.
201
+ */
125
202
  intercept(context: KafkaContext, next: NextCallback): Promise<any>;
126
- };
203
+ }
204
+ /**
205
+ * Event definitions for the Kafka adapter.
206
+ */
127
207
  interface Events {
128
208
  error: [error: Error, context: KafkaContext | undefined];
129
209
  finish: [context: KafkaContext, result: any];
package/kafka-adapter.js CHANGED
@@ -10,8 +10,9 @@ const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
10
10
  const kGroupId = Symbol('kGroupId');
11
11
  const noOp = () => undefined;
12
12
  /**
13
- *
14
- * @class KafkaAdapter
13
+ * Adapter for integrating Kafka into the Opra platform.
14
+ * It manages Kafka consumers, handles message routing to controllers,
15
+ * and provides integration with the Opra execution context.
15
16
  */
16
17
  export class KafkaAdapter extends PlatformAdapter {
17
18
  static PlatformName = 'kafka';
@@ -24,10 +25,11 @@ export class KafkaAdapter extends PlatformAdapter {
24
25
  platform = KafkaAdapter.PlatformName;
25
26
  interceptors;
26
27
  /**
28
+ * Initializes a new instance of the KafkaAdapter.
27
29
  *
28
- * @param document
29
- * @param config
30
- * @constructor
30
+ * @param document - The API document that defines the Kafka services and controllers.
31
+ * @param config - The configuration options for the Kafka adapter.
32
+ * @throws {@link TypeError} Throws if the document does not expose a Kafka API.
31
33
  */
32
34
  constructor(document, config) {
33
35
  super(config);
@@ -49,18 +51,34 @@ export class KafkaAdapter extends PlatformAdapter {
49
51
  process.once(type, () => this.close());
50
52
  });
51
53
  }
54
+ /**
55
+ * Gets the MQ API defined in the document.
56
+ */
52
57
  get api() {
53
58
  return this.document.getMqApi();
54
59
  }
60
+ /**
61
+ * Gets the Kafka client instance.
62
+ */
55
63
  get kafka() {
56
64
  return this._kafka;
57
65
  }
66
+ /**
67
+ * Gets the configuration scope for the adapter.
68
+ */
58
69
  get scope() {
59
70
  return this._config.scope;
60
71
  }
72
+ /**
73
+ * Gets the current status of the adapter.
74
+ */
61
75
  get status() {
62
76
  return this._status;
63
77
  }
78
+ /**
79
+ * Initializes the Kafka client and all defined consumers.
80
+ * This method is called automatically by {@link start} if not already initialized.
81
+ */
64
82
  async initialize() {
65
83
  if (this._kafka)
66
84
  return;
@@ -73,7 +91,9 @@ export class KafkaAdapter extends PlatformAdapter {
73
91
  await this._createAllConsumers();
74
92
  }
75
93
  /**
76
- * Starts the service
94
+ * Starts the Kafka adapter, connecting all consumers and subscribing to topics.
95
+ *
96
+ * @throws {@link Error} Throws if a consumer fails to connect or subscribe.
77
97
  */
78
98
  async start() {
79
99
  if (this.status !== 'idle')
@@ -81,14 +101,14 @@ export class KafkaAdapter extends PlatformAdapter {
81
101
  await this.initialize();
82
102
  this._status = 'starting';
83
103
  try {
84
- /** Connect all consumers */
104
+ /* Connect all consumers */
85
105
  for (const consumer of this._consumers.values()) {
86
106
  await consumer.connect().catch(e => {
87
107
  this._emitError(e);
88
108
  throw e;
89
109
  });
90
110
  }
91
- /** Subscribe to channels */
111
+ /* Subscribe to channels */
92
112
  for (const args of this._handlerArgs) {
93
113
  const { consumer, operation, operationConfig } = args;
94
114
  args.topics = Array.isArray(operation.channel)
@@ -105,7 +125,7 @@ export class KafkaAdapter extends PlatformAdapter {
105
125
  });
106
126
  this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
107
127
  }
108
- /** Start consumer listeners */
128
+ /* Start consumer listeners */
109
129
  const topicMap = new Map();
110
130
  for (const consumer of this._consumers.values()) {
111
131
  const groupId = consumer[kGroupId];
@@ -126,7 +146,7 @@ export class KafkaAdapter extends PlatformAdapter {
126
146
  }
127
147
  topicMap.set(topicCacheKey, handlerArgsArray);
128
148
  }
129
- /** Iterate and call all matching handlers */
149
+ /* Iterate and call all matching handlers */
130
150
  for (const args of handlerArgsArray) {
131
151
  try {
132
152
  await args.handler(payload);
@@ -150,7 +170,8 @@ export class KafkaAdapter extends PlatformAdapter {
150
170
  }
151
171
  }
152
172
  /**
153
- * Closes all connections and stops the service
173
+ * Closes all active Kafka consumers and clears internal caches.
174
+ * This effectively stops the service and returns it to the idle state.
154
175
  */
155
176
  async close() {
156
177
  await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.disconnect()));
@@ -158,15 +179,23 @@ export class KafkaAdapter extends PlatformAdapter {
158
179
  this._controllerInstances.clear();
159
180
  this._status = 'idle';
160
181
  }
182
+ /**
183
+ * Retrieves a controller instance by its path.
184
+ *
185
+ * @param controllerPath - The unique path of the controller.
186
+ * @returns The controller instance or undefined if not found.
187
+ */
161
188
  getControllerInstance(controllerPath) {
162
189
  const controller = this.api.findController(controllerPath);
163
190
  return controller && this._controllerInstances.get(controller);
164
191
  }
165
192
  /**
193
+ * Resolves the configuration for a specific MQ operation.
166
194
  *
167
- * @param controller
168
- * @param instance
169
- * @param operation
195
+ * @param controller - The MQ controller containing the operation.
196
+ * @param instance - The actual instance of the controller class.
197
+ * @param operation - The MQ operation being configured.
198
+ * @returns A promise that resolves to the operation configuration or undefined if not applicable.
170
199
  * @protected
171
200
  */
172
201
  async _getOperationConfig(controller, instance, operation) {
@@ -217,7 +246,7 @@ export class KafkaAdapter extends PlatformAdapter {
217
246
  return operationConfig;
218
247
  }
219
248
  /**
220
- *
249
+ * Creates and prepares all consumers defined in the API document.
221
250
  * @protected
222
251
  */
223
252
  async _createAllConsumers() {
@@ -228,7 +257,7 @@ export class KafkaAdapter extends PlatformAdapter {
228
257
  if (!instance)
229
258
  continue;
230
259
  this._controllerInstances.set(controller, instance);
231
- /** Build HandlerData array */
260
+ /* Build HandlerData array */
232
261
  for (const operation of controller.operations.values()) {
233
262
  const operationConfig = await this._getOperationConfig(controller, instance, operation);
234
263
  if (!operationConfig)
@@ -246,14 +275,16 @@ export class KafkaAdapter extends PlatformAdapter {
246
275
  this._handlerArgs.push(args);
247
276
  }
248
277
  }
249
- /** Initialize consumers */
278
+ /* Initialize consumers */
250
279
  for (const args of this._handlerArgs) {
251
280
  await this._createConsumer(args);
252
281
  }
253
282
  }
254
283
  /**
284
+ * Creates a Rabbitmq consumer for the given handler arguments if it doesn't already exist.
255
285
  *
256
- * @param args
286
+ * @param args - The handler arguments containing configuration and state.
287
+ * @throws {@link Error} Throws if a self-consumer for the group ID already exists.
257
288
  * @protected
258
289
  */
259
290
  async _createConsumer(args) {
@@ -262,7 +293,7 @@ export class KafkaAdapter extends PlatformAdapter {
262
293
  if (consumer && operationConfig.selfConsumer) {
263
294
  throw new Error(`Operation consumer for groupId (${operationConfig.consumer.groupId}) already exists`);
264
295
  }
265
- /** Create consumers */
296
+ /* Create consumers */
266
297
  if (!consumer) {
267
298
  consumer = this.kafka.consumer(operationConfig.consumer);
268
299
  consumer[kGroupId] = operationConfig.consumer.groupId;
@@ -271,16 +302,18 @@ export class KafkaAdapter extends PlatformAdapter {
271
302
  args.consumer = consumer;
272
303
  }
273
304
  /**
305
+ * Creates a message handler for a specific MQ operation.
306
+ * This handler processes incoming Kafka messages, decodes them, and executes the operation.
274
307
  *
275
- * @param args
308
+ * @param args - The handler arguments for the operation.
276
309
  * @protected
277
310
  */
278
311
  _createHandler(args) {
279
312
  const { controller, instance, operation } = args;
280
- /** Prepare parsers */
313
+ /* Prepare parsers */
281
314
  const parseKey = RequestParser.STRING;
282
315
  const parsePayload = RequestParser.STRING;
283
- /** Prepare decoders */
316
+ /* Prepare decoders */
284
317
  const decodeKey = operation.generateKeyCodec('decode', {
285
318
  scope: this.scope,
286
319
  ignoreReadonlyFields: true,
@@ -305,17 +338,17 @@ export class KafkaAdapter extends PlatformAdapter {
305
338
  let payload;
306
339
  const headers = {};
307
340
  try {
308
- /** Parse and decode `key` */
341
+ /* Parse and decode `key` */
309
342
  if (message.key) {
310
343
  const s = parseKey(message.key);
311
344
  key = decodeKey(s);
312
345
  }
313
- /** Parse and decode `payload` */
346
+ /* Parse and decode `payload` */
314
347
  if (message.value != null) {
315
348
  const s = parsePayload(message.value);
316
349
  payload = decodePayload(s);
317
350
  }
318
- /** Parse and decode `headers` */
351
+ /* Parse and decode `headers` */
319
352
  if (message.headers) {
320
353
  for (const [k, v] of Object.entries(message.headers)) {
321
354
  const header = operation.findHeader(k);
@@ -328,7 +361,7 @@ export class KafkaAdapter extends PlatformAdapter {
328
361
  this._emitError(e);
329
362
  return;
330
363
  }
331
- /** Create context */
364
+ /* Create context */
332
365
  const context = new KafkaContext({
333
366
  __adapter: this,
334
367
  __contDef: controller,
@@ -346,7 +379,7 @@ export class KafkaAdapter extends PlatformAdapter {
346
379
  });
347
380
  await this.emitAsync('execute', context);
348
381
  try {
349
- /** Call operation handler */
382
+ /* Call operation handler */
350
383
  const result = await operationHandler.call(instance, context);
351
384
  await this.emitAsync('finish', context, result);
352
385
  }
@@ -355,6 +388,13 @@ export class KafkaAdapter extends PlatformAdapter {
355
388
  }
356
389
  };
357
390
  }
391
+ /**
392
+ * Emits an error event and logs the error.
393
+ *
394
+ * @param error - The error that occurred.
395
+ * @param context - The optional Kafka execution context.
396
+ * @protected
397
+ */
358
398
  _emitError(error, context) {
359
399
  Promise.resolve()
360
400
  .then(async () => {
@@ -379,12 +419,27 @@ export class KafkaAdapter extends PlatformAdapter {
379
419
  })
380
420
  .catch(noOp);
381
421
  }
422
+ /**
423
+ * Wraps multiple exceptions into an array of {@link OpraException}.
424
+ *
425
+ * @param exceptions - The array of exceptions to wrap.
426
+ * @returns An array of wrapped exceptions.
427
+ * @protected
428
+ */
382
429
  _wrapExceptions(exceptions) {
383
430
  const wrappedErrors = exceptions.map(e => e instanceof OpraException ? e : new OpraException(e));
384
431
  if (!wrappedErrors.length)
385
432
  wrappedErrors.push(new OpraException('Internal Server Error'));
386
433
  return wrappedErrors;
387
434
  }
435
+ /**
436
+ * Creates a KafkaJS log creator that redirects logs to the provided logger.
437
+ *
438
+ * @param logger - The logger instance to use.
439
+ * @param logExtra - Whether to include additional metadata in the logs.
440
+ * @returns A log creator function for KafkaJS.
441
+ * @protected
442
+ */
388
443
  _createLogCreator(logger, logExtra) {
389
444
  return ({ namespace, level, log }) => {
390
445
  const { message, error, ...extra } = log;
@@ -22,8 +22,9 @@ export declare class KafkaContext extends ExecutionContext implements AsyncEvent
22
22
  readonly heartbeat: () => Promise<void>;
23
23
  readonly pause: () => void;
24
24
  /**
25
- * Constructor
26
- * @param init the context options
25
+ * Initializes a new instance of the KafkaContext.
26
+ *
27
+ * @param init - The initialization options for the context.
27
28
  */
28
29
  constructor(init: KafkaContext.Initiator);
29
30
  }
package/kafka-context.js CHANGED
@@ -13,8 +13,9 @@ export class KafkaContext extends ExecutionContext {
13
13
  heartbeat;
14
14
  pause;
15
15
  /**
16
- * Constructor
17
- * @param init the context options
16
+ * Initializes a new instance of the KafkaContext.
17
+ *
18
+ * @param init - The initialization options for the context.
18
19
  */
19
20
  constructor(init) {
20
21
  super({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@opra/kafka",
3
- "version": "1.26.3",
3
+ "version": "1.26.4",
4
4
  "description": "Opra Kafka adapter",
5
5
  "author": "Panates",
6
6
  "license": "MIT",
@@ -10,8 +10,8 @@
10
10
  "valgen": "^6.0.3"
11
11
  },
12
12
  "peerDependencies": {
13
- "@opra/common": "^1.26.3",
14
- "@opra/core": "^1.26.3",
13
+ "@opra/common": "^1.26.4",
14
+ "@opra/core": "^1.26.4",
15
15
  "kafkajs": ">=2.2.4 <3"
16
16
  },
17
17
  "exports": {
@@ -1,2 +1,8 @@
1
1
  import type { RequestParseFunction } from '../request-parser.js';
2
+ /**
3
+ * A parser function that returns the input buffer as-is.
4
+ *
5
+ * @param buffer - The buffer to be returned.
6
+ * @returns The original buffer.
7
+ */
2
8
  export declare const binaryParser: RequestParseFunction;
@@ -1,3 +1,9 @@
1
+ /**
2
+ * A parser function that returns the input buffer as-is.
3
+ *
4
+ * @param buffer - The buffer to be returned.
5
+ * @returns The original buffer.
6
+ */
1
7
  export const binaryParser = function (buffer) {
2
8
  return buffer;
3
9
  };
@@ -1,2 +1,8 @@
1
1
  import type { RequestParseFunction } from '../request-parser.js';
2
+ /**
3
+ * A parser function that converts the input buffer to a string.
4
+ *
5
+ * @param buffer - The buffer to be converted.
6
+ * @returns The buffer content as a string.
7
+ */
2
8
  export declare const stringParser: RequestParseFunction;
@@ -1,3 +1,9 @@
1
+ /**
2
+ * A parser function that converts the input buffer to a string.
3
+ *
4
+ * @param buffer - The buffer to be converted.
5
+ * @returns The buffer content as a string.
6
+ */
1
7
  export const stringParser = function (buffer) {
2
8
  return buffer.toString();
3
9
  };
@@ -1,2 +1,11 @@
1
+ /**
2
+ * Type definition for a function that parses a Buffer into a specific format.
3
+ *
4
+ * @param buffer - The buffer to be parsed.
5
+ * @returns The parsed result.
6
+ */
1
7
  export type RequestParseFunction = (buffer: Buffer) => any;
8
+ /**
9
+ * A registry of pre-defined request parsing functions.
10
+ */
2
11
  export declare const RequestParser: Record<string, RequestParseFunction>;
package/request-parser.js CHANGED
@@ -1,6 +1,11 @@
1
1
  import { binaryParser } from './parsers/binary.parser.js';
2
2
  import { stringParser } from './parsers/string.parser.js';
3
+ /**
4
+ * A registry of pre-defined request parsing functions.
5
+ */
3
6
  export const RequestParser = {
7
+ /* Parses the buffer as raw binary data (returns the buffer as-is). */
4
8
  BINARY: binaryParser,
9
+ /* Parses the buffer as a UTF-8 string. */
5
10
  STRING: stringParser,
6
11
  };