@crossdelta/cloudevents 0.5.7 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +20 -0
  2. package/dist/index.cjs +1602 -0
  3. package/dist/index.d.mts +812 -0
  4. package/dist/index.d.ts +812 -9
  5. package/dist/index.js +1574 -6
  6. package/package.json +20 -18
  7. package/dist/adapters/cloudevents/cloudevents.d.ts +0 -14
  8. package/dist/adapters/cloudevents/cloudevents.js +0 -58
  9. package/dist/adapters/cloudevents/index.d.ts +0 -8
  10. package/dist/adapters/cloudevents/index.js +0 -7
  11. package/dist/adapters/cloudevents/parsers/binary-mode.d.ts +0 -5
  12. package/dist/adapters/cloudevents/parsers/binary-mode.js +0 -32
  13. package/dist/adapters/cloudevents/parsers/pubsub.d.ts +0 -5
  14. package/dist/adapters/cloudevents/parsers/pubsub.js +0 -54
  15. package/dist/adapters/cloudevents/parsers/raw-event.d.ts +0 -5
  16. package/dist/adapters/cloudevents/parsers/raw-event.js +0 -17
  17. package/dist/adapters/cloudevents/parsers/structured-mode.d.ts +0 -5
  18. package/dist/adapters/cloudevents/parsers/structured-mode.js +0 -18
  19. package/dist/adapters/cloudevents/types.d.ts +0 -29
  20. package/dist/adapters/cloudevents/types.js +0 -1
  21. package/dist/domain/contract-helper.d.ts +0 -63
  22. package/dist/domain/contract-helper.js +0 -61
  23. package/dist/domain/discovery.d.ts +0 -24
  24. package/dist/domain/discovery.js +0 -201
  25. package/dist/domain/handler-factory.d.ts +0 -49
  26. package/dist/domain/handler-factory.js +0 -169
  27. package/dist/domain/index.d.ts +0 -6
  28. package/dist/domain/index.js +0 -4
  29. package/dist/domain/types.d.ts +0 -108
  30. package/dist/domain/types.js +0 -6
  31. package/dist/domain/validation.d.ts +0 -37
  32. package/dist/domain/validation.js +0 -53
  33. package/dist/infrastructure/errors.d.ts +0 -53
  34. package/dist/infrastructure/errors.js +0 -54
  35. package/dist/infrastructure/index.d.ts +0 -4
  36. package/dist/infrastructure/index.js +0 -2
  37. package/dist/infrastructure/logging.d.ts +0 -18
  38. package/dist/infrastructure/logging.js +0 -27
  39. package/dist/middlewares/cloudevents-middleware.d.ts +0 -171
  40. package/dist/middlewares/cloudevents-middleware.js +0 -276
  41. package/dist/middlewares/index.d.ts +0 -1
  42. package/dist/middlewares/index.js +0 -1
  43. package/dist/processing/dlq-safe.d.ts +0 -82
  44. package/dist/processing/dlq-safe.js +0 -108
  45. package/dist/processing/handler-cache.d.ts +0 -36
  46. package/dist/processing/handler-cache.js +0 -94
  47. package/dist/processing/idempotency.d.ts +0 -51
  48. package/dist/processing/idempotency.js +0 -112
  49. package/dist/processing/index.d.ts +0 -4
  50. package/dist/processing/index.js +0 -4
  51. package/dist/processing/validation.d.ts +0 -41
  52. package/dist/processing/validation.js +0 -48
  53. package/dist/publishing/index.d.ts +0 -2
  54. package/dist/publishing/index.js +0 -2
  55. package/dist/publishing/nats.publisher.d.ts +0 -19
  56. package/dist/publishing/nats.publisher.js +0 -115
  57. package/dist/publishing/pubsub.publisher.d.ts +0 -39
  58. package/dist/publishing/pubsub.publisher.js +0 -84
  59. package/dist/transports/nats/base-message-processor.d.ts +0 -44
  60. package/dist/transports/nats/base-message-processor.js +0 -118
  61. package/dist/transports/nats/index.d.ts +0 -5
  62. package/dist/transports/nats/index.js +0 -5
  63. package/dist/transports/nats/jetstream-consumer.d.ts +0 -217
  64. package/dist/transports/nats/jetstream-consumer.js +0 -367
  65. package/dist/transports/nats/jetstream-message-processor.d.ts +0 -9
  66. package/dist/transports/nats/jetstream-message-processor.js +0 -32
  67. package/dist/transports/nats/nats-consumer.d.ts +0 -36
  68. package/dist/transports/nats/nats-consumer.js +0 -84
  69. package/dist/transports/nats/nats-message-processor.d.ts +0 -11
  70. package/dist/transports/nats/nats-message-processor.js +0 -32
package/dist/index.js CHANGED
@@ -1,6 +1,1574 @@
1
- export { parseEventFromContext } from './adapters/cloudevents';
2
- export { createContract, eventSchema, extractTypeFromSchema, handleEvent } from './domain';
3
- export { clearHandlerCache, cloudEvents } from './middlewares';
4
- export { checkAndMarkProcessed, createInMemoryIdempotencyStore, getDefaultIdempotencyStore, resetDefaultIdempotencyStore, } from './processing/idempotency';
5
- export * from './publishing';
6
- export { consumeJetStreamEvents, consumeJetStreamStreams, consumeJetStreams, consumeNatsEvents, ensureJetStreamStream, ensureJetStreamStreams, ensureJetStreams, } from './transports/nats';
1
+ import { HTTP } from 'cloudevents';
2
+ import { existsSync } from 'fs';
3
+ import { dirname, join } from 'path';
4
+ import { fileURLToPath } from 'url';
5
+ import { glob } from 'glob';
6
+ import { z } from 'zod';
7
+ import { StringCodec, connect } from 'nats';
8
+
9
+ var __defProp = Object.defineProperty;
10
+ var __getOwnPropNames = Object.getOwnPropertyNames;
11
+ var __esm = (fn, res) => function __init() {
12
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
13
+ };
14
+ var __export = (target, all) => {
15
+ for (var name in all)
16
+ __defProp(target, name, { get: all[name], enumerable: true });
17
+ };
18
+
19
+ // src/infrastructure/errors.ts
20
+ var handleErrorWithContext, createValidationError, createCloudEventParseError;
21
+ var init_errors = __esm({
22
+ "src/infrastructure/errors.ts"() {
23
+ handleErrorWithContext = (error, context, additionalInfo) => {
24
+ const errorInfo = {
25
+ message: error.message,
26
+ stack: error.stack,
27
+ url: context.req.url,
28
+ method: context.req.method,
29
+ headers: Object.fromEntries(context.req.raw.headers.entries()),
30
+ ...additionalInfo
31
+ };
32
+ console.error("[Error Handler]", JSON.stringify(errorInfo, null, 2));
33
+ return errorInfo;
34
+ };
35
+ createValidationError = (eventType, details) => ({
36
+ type: "ValidationError",
37
+ eventType,
38
+ message: `Validation failed for event type: ${eventType}`,
39
+ details
40
+ });
41
+ createCloudEventParseError = (message, cause) => ({
42
+ type: "CloudEventParseError",
43
+ message,
44
+ cause
45
+ });
46
+ }
47
+ });
48
+
49
+ // src/infrastructure/logging.ts
50
+ var LOG_PREFIX, createLogger, logger;
51
+ var init_logging = __esm({
52
+ "src/infrastructure/logging.ts"() {
53
+ LOG_PREFIX = "cloudevents";
54
+ createLogger = (enabled) => {
55
+ const logWithArgs = (consoleFn, message, args) => {
56
+ const formattedMessage = `[${LOG_PREFIX}] ${message}`;
57
+ args !== void 0 ? consoleFn(formattedMessage, args) : consoleFn(formattedMessage);
58
+ };
59
+ return {
60
+ log: (message, args) => enabled && logWithArgs(console.log, message, args),
61
+ info: (message, args) => enabled && logWithArgs(console.info, message, args),
62
+ warn: (message, error) => enabled && logWithArgs(console.warn, message, error),
63
+ error: (message, error) => logWithArgs(console.error, message, error),
64
+ debug: (message, args) => logWithArgs(console.debug, message, args)
65
+ };
66
+ };
67
+ logger = createLogger(true);
68
+ }
69
+ });
70
+
71
+ // src/infrastructure/index.ts
72
+ var init_infrastructure = __esm({
73
+ "src/infrastructure/index.ts"() {
74
+ init_errors();
75
+ init_logging();
76
+ }
77
+ });
78
+
79
+ // src/adapters/cloudevents/parsers/structured-mode.ts
80
+ var structured_mode_exports = {};
81
+ __export(structured_mode_exports, {
82
+ parseStructuredMode: () => parseStructuredMode
83
+ });
84
+ var parseStructuredMode;
85
+ var init_structured_mode = __esm({
86
+ "src/adapters/cloudevents/parsers/structured-mode.ts"() {
87
+ init_infrastructure();
88
+ parseStructuredMode = (body) => {
89
+ logger.info("Structured mode CloudEvent detected");
90
+ const cloudEventData = {
91
+ id: String(body.id || "unknown"),
92
+ type: String(body.type || "unknown.event"),
93
+ specversion: String(body.specversion || "1.0"),
94
+ source: String(body.source || "//unknown"),
95
+ subject: body.subject ? String(body.subject) : void 0,
96
+ time: String(body.time) || (/* @__PURE__ */ new Date()).toISOString(),
97
+ data: body.data
98
+ };
99
+ return { event: null, cloudEventData };
100
+ };
101
+ }
102
+ });
103
+
104
+ // src/adapters/cloudevents/parsers/pubsub.ts
105
+ var pubsub_exports = {};
106
+ __export(pubsub_exports, {
107
+ parsePubSubMessage: () => parsePubSubMessage
108
+ });
109
+ var parsePubSubMessage;
110
+ var init_pubsub = __esm({
111
+ "src/adapters/cloudevents/parsers/pubsub.ts"() {
112
+ init_infrastructure();
113
+ parsePubSubMessage = async (rawBody, headers) => {
114
+ const body = rawBody;
115
+ if (!body.message?.data) {
116
+ throw new Error("Invalid PubSub message: missing message.data");
117
+ }
118
+ let eventType = "google.cloud.pubsub.topic.v1.messagePublished";
119
+ let decodedData = body;
120
+ try {
121
+ const base64Data = body.message.data;
122
+ const decodedString = Buffer.from(base64Data, "base64").toString("utf8");
123
+ const parsedData = JSON.parse(decodedString);
124
+ if (body.message.attributes?.eventType) {
125
+ eventType = body.message.attributes.eventType;
126
+ decodedData = parsedData;
127
+ } else if (body.message.attributes?.type) {
128
+ eventType = body.message.attributes.type;
129
+ decodedData = parsedData;
130
+ } else if (parsedData.type) {
131
+ eventType = parsedData.type;
132
+ decodedData = parsedData;
133
+ } else {
134
+ decodedData = parsedData;
135
+ }
136
+ } catch (error) {
137
+ logger.warn("Failed to extract event type from PubSub message:", error);
138
+ }
139
+ const messageId = body.message.messageId || "unknown";
140
+ const eventSource = headers["ce-source"] || "//pubsub.googleapis.com";
141
+ const subject = headers["ce-subject"];
142
+ const publishTime = headers["ce-time"];
143
+ const cloudEventData = {
144
+ id: messageId,
145
+ type: eventType,
146
+ specversion: "1.0",
147
+ source: eventSource,
148
+ subject,
149
+ time: publishTime || (/* @__PURE__ */ new Date()).toISOString(),
150
+ data: decodedData
151
+ };
152
+ return { event: null, cloudEventData };
153
+ };
154
+ }
155
+ });
156
+
157
+ // src/adapters/cloudevents/parsers/binary-mode.ts
158
+ var binary_mode_exports = {};
159
+ __export(binary_mode_exports, {
160
+ parseBinaryMode: () => parseBinaryMode
161
+ });
162
+ var parseBinaryMode;
163
+ var init_binary_mode = __esm({
164
+ "src/adapters/cloudevents/parsers/binary-mode.ts"() {
165
+ init_infrastructure();
166
+ parseBinaryMode = async (headers, rawBody) => {
167
+ const normalizedHeaders = {};
168
+ Object.entries(headers).forEach(([key, value]) => {
169
+ if (typeof value === "string") {
170
+ normalizedHeaders[key] = value;
171
+ }
172
+ });
173
+ const body = rawBody ?? "";
174
+ try {
175
+ const event = HTTP.toEvent({ headers: normalizedHeaders, body });
176
+ const finalEvent = Array.isArray(event) ? event[0] : event;
177
+ const cloudEventData = {
178
+ id: finalEvent.id,
179
+ type: finalEvent.type,
180
+ specversion: finalEvent.specversion,
181
+ source: finalEvent.source,
182
+ subject: finalEvent.subject,
183
+ time: finalEvent.time || (/* @__PURE__ */ new Date()).toISOString(),
184
+ data: finalEvent.data
185
+ };
186
+ return { event: finalEvent, cloudEventData };
187
+ } catch (error) {
188
+ logger.error("Binary mode parsing failed:", error);
189
+ throw createCloudEventParseError(`Binary mode parsing failed: ${error}`);
190
+ }
191
+ };
192
+ }
193
+ });
194
+
195
+ // src/adapters/cloudevents/parsers/raw-event.ts
196
+ var raw_event_exports = {};
197
+ __export(raw_event_exports, {
198
+ parseRawEvent: () => parseRawEvent
199
+ });
200
+ var parseRawEvent;
201
+ var init_raw_event = __esm({
202
+ "src/adapters/cloudevents/parsers/raw-event.ts"() {
203
+ init_infrastructure();
204
+ parseRawEvent = (body) => {
205
+ logger.info("Creating CloudEvent wrapper for raw data");
206
+ const cloudEventData = {
207
+ id: `raw-${Date.now()}`,
208
+ type: body.type || "unknown.event",
209
+ specversion: "1.0",
210
+ source: "//direct.post",
211
+ subject: void 0,
212
+ time: (/* @__PURE__ */ new Date()).toISOString(),
213
+ data: body
214
+ };
215
+ return { event: null, cloudEventData };
216
+ };
217
+ }
218
+ });
219
+
220
+ // src/adapters/cloudevents/cloudevents.ts
221
+ init_infrastructure();
222
+ var hasCloudEventHeaders = (headers) => Object.keys(headers).some((key) => key.toLowerCase().startsWith("ce-"));
223
+ var isRecord = (value) => typeof value === "object" && value !== null;
224
+ var parseEventFromContext = async (context) => {
225
+ try {
226
+ const headers = context.req.header();
227
+ const rawBody = await context.req.text();
228
+ let parsedBody;
229
+ if (rawBody?.length) {
230
+ try {
231
+ parsedBody = JSON.parse(rawBody);
232
+ } catch {
233
+ parsedBody = void 0;
234
+ }
235
+ }
236
+ const bodyObject = isRecord(parsedBody) ? parsedBody : void 0;
237
+ if (bodyObject && "specversion" in bodyObject) {
238
+ const { parseStructuredMode: parseStructuredMode2 } = await Promise.resolve().then(() => (init_structured_mode(), structured_mode_exports));
239
+ return parseStructuredMode2(bodyObject);
240
+ }
241
+ if (bodyObject && "message" in bodyObject) {
242
+ const { parsePubSubMessage: parsePubSubMessage2 } = await Promise.resolve().then(() => (init_pubsub(), pubsub_exports));
243
+ return await parsePubSubMessage2(bodyObject, headers);
244
+ }
245
+ if (hasCloudEventHeaders(headers)) {
246
+ const { parseBinaryMode: parseBinaryMode2 } = await Promise.resolve().then(() => (init_binary_mode(), binary_mode_exports));
247
+ return await parseBinaryMode2(headers, rawBody);
248
+ }
249
+ const { parseRawEvent: parseRawEvent2 } = await Promise.resolve().then(() => (init_raw_event(), raw_event_exports));
250
+ if (bodyObject) {
251
+ return parseRawEvent2(bodyObject);
252
+ }
253
+ return parseRawEvent2({ raw: rawBody });
254
+ } catch (error) {
255
+ logger.error("Failed to parse event:", error);
256
+ throw new Error(`Failed to parse CloudEvent: ${error instanceof Error ? error.message : "Unknown error"}`);
257
+ }
258
+ };
259
+
260
+ // src/domain/contract-helper.ts
261
+ function createContract(options) {
262
+ const resolvedChannel = options.channel ? {
263
+ stream: options.channel.stream,
264
+ subject: options.channel.subject ?? options.type
265
+ } : void 0;
266
+ return {
267
+ ...options,
268
+ _schema: options.schema,
269
+ channel: resolvedChannel
270
+ };
271
+ }
272
+
273
+ // src/domain/discovery.ts
274
+ init_infrastructure();
275
+
276
+ // src/domain/validation.ts
277
+ var hasShape = (schema) => "shape" in schema && schema.shape !== null && schema.shape !== void 0 && typeof schema.shape === "object";
278
+ var hasTypeField = (shape) => "type" in shape && shape.type !== null && shape.type !== void 0 && typeof shape.type === "object" && "value" in shape.type;
279
+ var extractTypeValue = (typeField) => typeof typeField.value === "string" ? typeField.value : void 0;
280
+ var safeExtractType = (schema) => {
281
+ if (!hasShape(schema)) return void 0;
282
+ if (!hasTypeField(schema.shape)) return void 0;
283
+ return extractTypeValue(schema.shape.type);
284
+ };
285
+ function isValidHandler(value) {
286
+ if (typeof value !== "function") return false;
287
+ const handler = value;
288
+ return !!(handler.__eventarcMetadata || handler.prototype?.handle);
289
+ }
290
+ var extractTypeFromSchema = (schema) => {
291
+ try {
292
+ return safeExtractType(schema);
293
+ } catch {
294
+ return void 0;
295
+ }
296
+ };
297
+
298
+ // src/domain/discovery.ts
299
+ var getSearchDirectories = () => {
300
+ const directories = /* @__PURE__ */ new Set();
301
+ directories.add(process.cwd());
302
+ try {
303
+ const currentDir = dirname(fileURLToPath(import.meta.url));
304
+ directories.add(currentDir);
305
+ if (currentDir.includes("/dist/")) {
306
+ const pkgRoot = currentDir.split("/dist/")[0];
307
+ if (pkgRoot) directories.add(pkgRoot);
308
+ }
309
+ } catch {
310
+ }
311
+ return [...directories].filter((dir) => existsSync(dir));
312
+ };
313
+ var loadHandlers = async (filePath, filter) => {
314
+ try {
315
+ if (!existsSync(filePath)) {
316
+ return [];
317
+ }
318
+ const module = await import(filePath);
319
+ return Object.entries(module).filter(([name, handler]) => isValidHandler(handler) && (!filter || filter(name, handler))).map(([name, handler]) => {
320
+ const HandlerClass = handler;
321
+ const hasNoMeaningfulName = !HandlerClass.name || HandlerClass.name === "" || HandlerClass.name === "HandlerClass";
322
+ const isNamedExport = name !== "default";
323
+ if (hasNoMeaningfulName && isNamedExport) {
324
+ return Object.defineProperty(HandlerClass, "name", { value: name, configurable: true });
325
+ }
326
+ return HandlerClass;
327
+ });
328
+ } catch (error) {
329
+ logger?.warn(`Failed to load ${filePath}`);
330
+ logger?.warn(`Error type: ${typeof error}`);
331
+ logger?.warn(`Error constructor: ${error?.constructor?.name}`);
332
+ if (error instanceof Error) {
333
+ logger?.warn(`Error message: ${error.message}`);
334
+ if (error.stack) {
335
+ logger?.warn(`Stack trace:
336
+ ${error.stack}`);
337
+ }
338
+ } else {
339
+ logger?.warn(`Error value: ${String(error)}`);
340
+ try {
341
+ logger?.warn(`Error JSON: ${JSON.stringify(error, null, 2)}`);
342
+ } catch {
343
+ logger?.warn("(Error is not JSON-serializable)");
344
+ }
345
+ }
346
+ return [];
347
+ }
348
+ };
349
+ var deduplicateHandlers = (handlers) => handlers.reduce((acc, handler) => {
350
+ const existing = acc.find((h) => h.name === handler.name);
351
+ if (!existing) {
352
+ acc.push(handler);
353
+ }
354
+ return acc;
355
+ }, []);
356
+ var EXTENSION_FALLBACKS = ["js", "mjs", "cjs"];
357
+ var expandPatternVariants = (pattern, preferCompiled) => {
358
+ if (!/\.tsx?\b/.test(pattern)) {
359
+ return [pattern];
360
+ }
361
+ const basePattern = pattern;
362
+ const compiledVariants = EXTENSION_FALLBACKS.map((ext) => basePattern.replace(/\.tsx?\b/g, `.${ext}`));
363
+ const ordered = preferCompiled ? [...compiledVariants, basePattern] : [basePattern, ...compiledVariants];
364
+ return [...new Set(ordered)];
365
+ };
366
+ var shouldScanDirectory = (basePath, variant) => {
367
+ if (!existsSync(basePath)) {
368
+ return false;
369
+ }
370
+ const dirPrefix = variant.match(/^([^*{[]+)\//)?.[1];
371
+ if (dirPrefix) {
372
+ const fullDirPath = join(basePath, dirPrefix);
373
+ return existsSync(fullDirPath);
374
+ }
375
+ return true;
376
+ };
377
+ var discoverFiles = async (pattern, basePath, preferCompiled) => {
378
+ const isTestPattern = pattern.startsWith("test/");
379
+ const prefixedPattern = isTestPattern ? pattern : `{src,dist,build,lib,out}/${pattern}`;
380
+ const patterns = preferCompiled ? [prefixedPattern, pattern] : [pattern, prefixedPattern];
381
+ const allFiles = [];
382
+ for (const globPattern of patterns) {
383
+ const variants = expandPatternVariants(globPattern, preferCompiled);
384
+ for (const variant of variants) {
385
+ try {
386
+ if (!shouldScanDirectory(basePath, variant)) {
387
+ continue;
388
+ }
389
+ const files = await glob(variant, {
390
+ cwd: basePath,
391
+ absolute: true,
392
+ nodir: true,
393
+ windowsPathsNoEscape: true
394
+ });
395
+ const existingFiles = files.filter((f) => existsSync(f));
396
+ allFiles.push(...existingFiles);
397
+ } catch {
398
+ }
399
+ }
400
+ }
401
+ return [...new Set(allFiles)];
402
+ };
403
+ var discoverHandlers = async (pattern, options = {}) => {
404
+ const { filter } = options;
405
+ const searchDirectories = getSearchDirectories();
406
+ const preferCompiled = searchDirectories.some((dir) => dir.includes("/dist/") || dir.includes("\\dist\\"));
407
+ try {
408
+ const uniqueFiles = /* @__PURE__ */ new Set();
409
+ for (const basePath of searchDirectories) {
410
+ const discoveredFiles = await discoverFiles(pattern, basePath, preferCompiled);
411
+ for (const file of discoveredFiles) {
412
+ uniqueFiles.add(file);
413
+ }
414
+ }
415
+ if (uniqueFiles.size === 0) {
416
+ logger.warn(`No files found matching pattern: ${pattern}`);
417
+ return [];
418
+ }
419
+ const handlers = await Promise.all([...uniqueFiles].map((file) => loadHandlers(file, filter)));
420
+ const flatHandlers = handlers.flat();
421
+ const uniqueHandlers = deduplicateHandlers(flatHandlers);
422
+ return uniqueHandlers;
423
+ } catch (error) {
424
+ logger.error("Discovery failed:", error);
425
+ return [];
426
+ }
427
+ };
428
+ function extractFromValueGetter(typeField) {
429
+ if ("value" in typeField && typeof typeField.value === "string") {
430
+ return typeField.value;
431
+ }
432
+ return void 0;
433
+ }
434
+ function extractFromTypeDef(typeField) {
435
+ if (!("_def" in typeField)) return void 0;
436
+ const typeDef = typeField._def;
437
+ if (Array.isArray(typeDef.values) && typeDef.values.length > 0) {
438
+ return String(typeDef.values[0]);
439
+ }
440
+ if (typeof typeDef.value === "string") {
441
+ return typeDef.value;
442
+ }
443
+ return void 0;
444
+ }
445
+ function extractFromTypeField(shape) {
446
+ const typeField = shape.type;
447
+ if (!typeField || typeof typeField !== "object" || typeField === null) {
448
+ return void 0;
449
+ }
450
+ const field = typeField;
451
+ return extractFromValueGetter(field) ?? extractFromTypeDef(field);
452
+ }
453
+ function extractEventTypeFromSchema(schema) {
454
+ if (!("shape" in schema) || !schema.shape || typeof schema.shape !== "object") {
455
+ return void 0;
456
+ }
457
+ return extractFromTypeField(schema.shape);
458
+ }
459
+ function createTenantMatcher(tenantId) {
460
+ const tenantIds = Array.isArray(tenantId) ? tenantId : [tenantId];
461
+ return (event) => {
462
+ if (!event.tenantId) return false;
463
+ return tenantIds.includes(event.tenantId);
464
+ };
465
+ }
466
+ function combineMatchers(matchers) {
467
+ return (event) => matchers.every((matcher) => matcher(event));
468
+ }
469
+ function handleEvent(schemaOrOptions, handler, eventType) {
470
+ let schema;
471
+ let finalEventType = eventType;
472
+ let matchFn;
473
+ let safeParse = false;
474
+ if (schemaOrOptions && typeof schemaOrOptions === "object" && "schema" in schemaOrOptions) {
475
+ const options = schemaOrOptions;
476
+ schema = options.schema;
477
+ finalEventType = options.type || eventType;
478
+ safeParse = options.safeParse ?? false;
479
+ const matchers = [];
480
+ if (options.tenantId) {
481
+ matchers.push(createTenantMatcher(options.tenantId));
482
+ }
483
+ if (options.match) {
484
+ matchers.push(options.match);
485
+ }
486
+ if (matchers.length === 1) {
487
+ matchFn = matchers[0];
488
+ } else if (matchers.length > 1) {
489
+ matchFn = combineMatchers(matchers);
490
+ }
491
+ } else {
492
+ schema = schemaOrOptions;
493
+ }
494
+ if (!finalEventType) {
495
+ finalEventType = extractEventTypeFromSchema(schema);
496
+ }
497
+ if (!finalEventType) {
498
+ finalEventType = "unknown.event";
499
+ }
500
+ const handlerName = `${finalEventType.split(".").slice(-2).map((s) => s.charAt(0).toUpperCase() + s.slice(1)).join("")}Handler`;
501
+ const HandlerClass = class extends Object {
502
+ static __eventarcMetadata = {
503
+ schema,
504
+ declaredType: finalEventType,
505
+ match: matchFn,
506
+ safeParse
507
+ };
508
+ async handle(payload, context) {
509
+ await Promise.resolve(handler(payload, context));
510
+ }
511
+ };
512
+ Object.defineProperty(HandlerClass, "name", {
513
+ value: handlerName,
514
+ configurable: true
515
+ });
516
+ return HandlerClass;
517
+ }
518
+ function eventSchema(schema) {
519
+ return z.object(schema);
520
+ }
521
+
522
+ // src/middlewares/cloudevents-middleware.ts
523
+ init_infrastructure();
524
+
525
+ // src/processing/dlq-safe.ts
526
+ init_infrastructure();
527
+ init_infrastructure();
528
+ var sc = StringCodec();
529
+ var natsConnectionPromise = null;
530
+ var pluralize = (word) => {
531
+ if (word.endsWith("y") && !["a", "e", "i", "o", "u"].includes(word[word.length - 2])) {
532
+ return `${word.slice(0, -1)}ies`;
533
+ }
534
+ if (word.endsWith("s") || word.endsWith("sh") || word.endsWith("ch") || word.endsWith("x")) {
535
+ return `${word}es`;
536
+ }
537
+ return `${word}s`;
538
+ };
539
+ var deriveSubjectFromEventType = (eventType) => {
540
+ const parts = eventType.split(".");
541
+ if (parts.length < 2) return eventType;
542
+ const domain = parts[0];
543
+ const action = parts.slice(1).join(".");
544
+ const pluralDomain = pluralize(domain);
545
+ return `${pluralDomain}.${action}`;
546
+ };
547
+ var getNatsConnection = async (servers) => {
548
+ if (!natsConnectionPromise) {
549
+ const url = servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
550
+ natsConnectionPromise = connect({ servers: url }).then((connection) => {
551
+ logger.debug(`[NATS] connected to ${url}`);
552
+ return connection;
553
+ }).catch((error) => {
554
+ logger.error("[NATS] connection error", error);
555
+ natsConnectionPromise = null;
556
+ throw error;
557
+ });
558
+ }
559
+ return natsConnectionPromise;
560
+ };
561
+ var deriveSubjectFromType = (eventType, config) => {
562
+ if (!config?.typeToSubjectMap) {
563
+ return config?.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
564
+ }
565
+ const sortedPrefixes = Object.keys(config.typeToSubjectMap).sort((a, b) => b.length - a.length);
566
+ for (const prefix of sortedPrefixes) {
567
+ if (eventType.startsWith(prefix)) {
568
+ const suffix = eventType.slice(prefix.length);
569
+ const mappedPrefix = config.typeToSubjectMap[prefix];
570
+ const cleanSuffix = suffix.startsWith(".") ? suffix.slice(1) : suffix;
571
+ return cleanSuffix ? `${mappedPrefix}.${cleanSuffix}` : mappedPrefix;
572
+ }
573
+ }
574
+ return config.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
575
+ };
576
+ var deriveStreamFromType = (eventType, config) => {
577
+ if (!config?.typeToStreamMap) return void 0;
578
+ const sortedPrefixes = Object.keys(config.typeToStreamMap).sort((a, b) => b.length - a.length);
579
+ for (const prefix of sortedPrefixes) {
580
+ if (eventType.startsWith(prefix)) {
581
+ return config.typeToStreamMap[prefix];
582
+ }
583
+ }
584
+ return void 0;
585
+ };
586
+ var publishNatsRawEvent = async (subjectName, eventType, eventData, options) => {
587
+ const cloudEvent = {
588
+ specversion: "1.0",
589
+ type: eventType,
590
+ source: options?.source || "hono-service",
591
+ id: crypto.randomUUID(),
592
+ time: (/* @__PURE__ */ new Date()).toISOString(),
593
+ datacontenttype: "application/json",
594
+ data: eventData,
595
+ ...options?.subject && { subject: options.subject },
596
+ ...options?.tenantId && { tenantid: options.tenantId }
597
+ };
598
+ const data = JSON.stringify(cloudEvent);
599
+ const nc = await getNatsConnection(options?.servers);
600
+ nc.publish(subjectName, sc.encode(data));
601
+ logger.debug(`Published CloudEvent ${eventType} to NATS subject ${subjectName} (id=${cloudEvent.id})`);
602
+ return cloudEvent.id;
603
+ };
604
+ var publishNatsEvent = async (subjectName, schema, eventData, options) => {
605
+ const eventType = extractTypeFromSchema(schema);
606
+ if (!eventType) {
607
+ throw new Error("Could not extract event type from schema. Make sure your schema has proper metadata.");
608
+ }
609
+ const validationResult = schema.safeParse(eventData);
610
+ if (!validationResult.success) {
611
+ const validationDetails = validationResult.error.issues.map((issue) => ({
612
+ code: issue.code,
613
+ message: issue.message,
614
+ path: issue.path.filter((p) => typeof p !== "symbol"),
615
+ expected: "expected" in issue ? String(issue.expected) : void 0,
616
+ received: "received" in issue ? String(issue.received) : void 0
617
+ }));
618
+ const handlerValidationError = {
619
+ handlerName: `NatsPublisher:${eventType}`,
620
+ validationErrors: validationDetails
621
+ };
622
+ throw createValidationError(eventType, [handlerValidationError]);
623
+ }
624
+ return publishNatsRawEvent(subjectName, eventType, validationResult.data, options);
625
+ };
626
+ var publish = async (eventTypeOrContract, eventData, options) => {
627
+ const eventType = typeof eventTypeOrContract === "string" ? eventTypeOrContract : eventTypeOrContract.type;
628
+ const natsSubject = typeof eventTypeOrContract === "string" ? deriveSubjectFromEventType(eventTypeOrContract) : eventTypeOrContract.channel?.subject ?? eventTypeOrContract.type;
629
+ return publishNatsRawEvent(natsSubject, eventType, eventData, options);
630
+ };
631
+ var __resetNatsPublisher = () => {
632
+ natsConnectionPromise = null;
633
+ };
634
+
635
+ // src/publishing/pubsub.publisher.ts
636
+ init_infrastructure();
637
+ async function publishEvent(topicName, schema, eventData, options) {
638
+ const eventType = extractTypeFromSchema(schema);
639
+ if (!eventType) {
640
+ throw new Error("Could not extract event type from schema. Make sure your schema has proper metadata.");
641
+ }
642
+ const validationResult = schema.safeParse(eventData);
643
+ if (!validationResult.success) {
644
+ const validationDetails = validationResult.error.issues.map((issue) => ({
645
+ code: issue.code,
646
+ message: issue.message,
647
+ path: issue.path.filter((p) => typeof p !== "symbol"),
648
+ expected: "expected" in issue ? String(issue.expected) : void 0,
649
+ received: "received" in issue ? String(issue.received) : void 0
650
+ }));
651
+ const handlerValidationError = {
652
+ handlerName: `Publisher:${eventType}`,
653
+ validationErrors: validationDetails
654
+ };
655
+ throw createValidationError(eventType, [handlerValidationError]);
656
+ }
657
+ return publishRawEvent(topicName, eventType, validationResult.data, options);
658
+ }
659
+ async function publishRawEvent(topicName, eventType, eventData, options) {
660
+ const { PubSub } = await import('@google-cloud/pubsub');
661
+ const pubsub = new PubSub({
662
+ projectId: options?.projectId,
663
+ keyFilename: options?.keyFilename
664
+ });
665
+ const cloudEvent = {
666
+ specversion: "1.0",
667
+ type: eventType,
668
+ source: options?.source || "hono-service",
669
+ id: crypto.randomUUID(),
670
+ time: (/* @__PURE__ */ new Date()).toISOString(),
671
+ datacontenttype: "application/json",
672
+ data: eventData,
673
+ ...options?.subject && { subject: options.subject }
674
+ };
675
+ const data = JSON.stringify(cloudEvent);
676
+ const dataBuffer = Buffer.from(data);
677
+ const topic = pubsub.topic(topicName);
678
+ const messageId = await topic.publishMessage({
679
+ data: dataBuffer,
680
+ attributes: {
681
+ "content-type": "application/cloudevents+json",
682
+ ...options?.attributes
683
+ }
684
+ });
685
+ logger.debug(`Published CloudEvent ${eventType} with ID: ${messageId}`);
686
+ return messageId;
687
+ }
688
+
689
+ // src/processing/dlq-safe.ts
690
+ var isDlqSafeMode = (options) => {
691
+ return !!(options.quarantineTopic || options.errorTopic);
692
+ };
693
+ var quarantineMessage = async (processingContext, reason, options, error) => {
694
+ if (!options.quarantineTopic) {
695
+ logger.warn("No quarantine topic configured, skipping quarantine");
696
+ return;
697
+ }
698
+ try {
699
+ let serializedError;
700
+ if (error) {
701
+ if (typeof error === "object" && error !== null && "type" in error && error.type === "ValidationError") {
702
+ serializedError = JSON.stringify(error, null, 2);
703
+ } else if (error instanceof Error) {
704
+ serializedError = JSON.stringify({
705
+ name: error.name,
706
+ message: error.message,
707
+ stack: error.stack
708
+ }, null, 2);
709
+ } else {
710
+ serializedError = JSON.stringify(error, null, 2);
711
+ }
712
+ }
713
+ const quarantineData = {
714
+ originalMessageId: processingContext.messageId,
715
+ originalEventType: processingContext.eventType,
716
+ originalEventData: processingContext.eventData,
717
+ originalEventContext: processingContext.eventContext,
718
+ originalCloudEvent: processingContext.originalCloudEvent,
719
+ quarantinedAt: (/* @__PURE__ */ new Date()).toISOString(),
720
+ quarantineReason: reason,
721
+ error: serializedError
722
+ };
723
+ await publishRawEvent(options.quarantineTopic, "hono.cloudevents.quarantined", quarantineData, {
724
+ projectId: options.projectId,
725
+ subject: `quarantine.${reason}`,
726
+ source: options.source || "hono-cloudevents",
727
+ attributes: {
728
+ "quarantine-reason": reason
729
+ }
730
+ });
731
+ logger.info(`Message ${processingContext.messageId} quarantined: ${reason}`);
732
+ } catch (publishError) {
733
+ const errorMessage = publishError instanceof Error ? publishError.message : "Unknown error";
734
+ logger.error(`Failed to quarantine message ${processingContext.messageId}: ${errorMessage}`);
735
+ }
736
+ };
737
+ var publishRecoverableError = async (processingContext, error, options) => {
738
+ if (!options.errorTopic) {
739
+ logger.warn("No error topic configured, skipping error publishing");
740
+ return;
741
+ }
742
+ try {
743
+ const errorData = {
744
+ originalMessageId: processingContext.messageId,
745
+ originalEventType: processingContext.eventType,
746
+ originalEventData: processingContext.eventData,
747
+ originalEventContext: processingContext.eventContext,
748
+ originalCloudEvent: processingContext.originalCloudEvent,
749
+ errorTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
750
+ error: {
751
+ message: error instanceof Error ? error.message : String(error),
752
+ stack: error instanceof Error ? error.stack : void 0,
753
+ type: error instanceof Error ? error.constructor.name : typeof error
754
+ }
755
+ };
756
+ await publishRawEvent(options.errorTopic, "hono.cloudevents.processing-error", errorData, {
757
+ projectId: options.projectId,
758
+ subject: "processing.error",
759
+ source: options.source || "hono-cloudevents"
760
+ });
761
+ logger.info(`Processing error published for message ${processingContext.messageId}`);
762
+ } catch (publishError) {
763
+ const errorMessage = publishError instanceof Error ? publishError.message : "Unknown error";
764
+ logger.error(`Failed to publish error for message ${processingContext.messageId}: ${errorMessage}`);
765
+ }
766
+ };
767
+ var createProcessingContext = (eventType, eventData, context, originalCloudEvent) => ({
768
+ messageId: context?.messageId || "unknown",
769
+ eventType,
770
+ eventData,
771
+ eventContext: context,
772
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
773
+ originalCloudEvent
774
+ });
775
+
776
+ // src/processing/handler-cache.ts
777
+ init_logging();
778
+ var createEmptyCache = () => /* @__PURE__ */ new Map();
779
+ var createEmptyMessageIds = () => /* @__PURE__ */ new Set();
780
+ var handlerCache = createEmptyCache();
781
+ var processedMessageIds = createEmptyMessageIds();
782
+ var clearHandlerCache = () => {
783
+ handlerCache = createEmptyCache();
784
+ processedMessageIds = createEmptyMessageIds();
785
+ };
786
+ var getCachedHandlers = (key) => handlerCache.get(key) || [];
787
+ var setCachedHandlers = (key, handlers) => {
788
+ handlerCache = new Map(handlerCache).set(key, handlers);
789
+ };
790
+ var hasCachedHandlers = (key) => handlerCache.has(key);
791
+ var addProcessedMessage = (messageId) => {
792
+ processedMessageIds = new Set(processedMessageIds).add(messageId);
793
+ };
794
+ var processHandler = (HandlerClass) => {
795
+ const metadata = HandlerClass.__eventarcMetadata;
796
+ if (!metadata) return null;
797
+ const instance = new HandlerClass();
798
+ const eventType = metadata.declaredType || extractTypeFromSchema(metadata.schema);
799
+ if (!eventType) return null;
800
+ return {
801
+ type: eventType,
802
+ name: HandlerClass.name,
803
+ schema: metadata.schema,
804
+ handle: async (payload, context) => {
805
+ await Promise.resolve(instance.handle(payload, context));
806
+ },
807
+ match: metadata.match,
808
+ safeParse: metadata.safeParse || false
809
+ };
810
+ };
811
+ var createCacheKey = (discover, handlers = []) => {
812
+ const discoverKey = discover || "no-discover";
813
+ const handlersKey = handlers.map((h) => h.name).sort().join(",") || "no-handlers";
814
+ const envKey = process.env.NODE_ENV || "development";
815
+ return `${envKey}:${discoverKey}:${handlersKey}`;
816
+ };
817
+ var setupHandlers = async (cacheKey, discover, handlers = [], log = false) => {
818
+ if (hasCachedHandlers(cacheKey)) return;
819
+ try {
820
+ const allHandlers = discover ? await discoverHandlers(discover, { log: !!log }) : handlers;
821
+ const processedHandlers = allHandlers.map(processHandler).filter(Boolean);
822
+ setCachedHandlers(cacheKey, processedHandlers);
823
+ if (log && processedHandlers.length > 0) {
824
+ if (log === "pretty") {
825
+ logger.info(`Discovered ${processedHandlers.length} handler${processedHandlers.length === 1 ? "" : "s"}:`);
826
+ for (const handler of processedHandlers) {
827
+ logger.info(` \u2022 ${handler.name}`);
828
+ }
829
+ } else {
830
+ logger.info("Handler discovery completed", {
831
+ handlerCount: processedHandlers.length,
832
+ handlers: processedHandlers.map((h) => h.name)
833
+ });
834
+ }
835
+ }
836
+ } catch (error) {
837
+ logger.error("Failed to setup handlers:", error);
838
+ setCachedHandlers(cacheKey, []);
839
+ }
840
+ };
841
+
842
+ // src/processing/idempotency.ts
843
+ function createInMemoryIdempotencyStore(options = {}) {
844
+ const { maxSize = 1e4, defaultTtlMs = 24 * 60 * 60 * 1e3 } = options;
845
+ const cache = /* @__PURE__ */ new Map();
846
+ const evictExpired = () => {
847
+ const now = Date.now();
848
+ for (const [key, entry] of cache) {
849
+ if (entry.expiresAt <= now) {
850
+ cache.delete(key);
851
+ }
852
+ }
853
+ };
854
+ const evictOldest = (count) => {
855
+ const keysToDelete = [...cache.keys()].slice(0, count);
856
+ for (const key of keysToDelete) {
857
+ cache.delete(key);
858
+ }
859
+ };
860
+ return {
861
+ has(messageId) {
862
+ const entry = cache.get(messageId);
863
+ if (!entry) return false;
864
+ if (entry.expiresAt <= Date.now()) {
865
+ cache.delete(messageId);
866
+ return false;
867
+ }
868
+ cache.delete(messageId);
869
+ cache.set(messageId, entry);
870
+ return true;
871
+ },
872
+ add(messageId, ttlMs) {
873
+ if (cache.size >= maxSize) {
874
+ evictExpired();
875
+ }
876
+ if (cache.size >= maxSize) {
877
+ const evictCount = Math.max(1, Math.floor(maxSize * 0.1));
878
+ evictOldest(evictCount);
879
+ }
880
+ cache.set(messageId, {
881
+ expiresAt: Date.now() + (ttlMs ?? defaultTtlMs)
882
+ });
883
+ },
884
+ clear() {
885
+ cache.clear();
886
+ }
887
+ };
888
+ }
889
+ var defaultStore = null;
890
+ function getDefaultIdempotencyStore() {
891
+ if (!defaultStore) {
892
+ defaultStore = createInMemoryIdempotencyStore();
893
+ }
894
+ return defaultStore;
895
+ }
896
+ function resetDefaultIdempotencyStore() {
897
+ defaultStore?.clear?.();
898
+ defaultStore = null;
899
+ }
900
+ async function checkAndMarkProcessed(store, messageId, ttlMs) {
901
+ const isDuplicate = await store.has(messageId);
902
+ if (isDuplicate) {
903
+ return false;
904
+ }
905
+ await store.add(messageId, ttlMs);
906
+ return true;
907
+ }
908
+
909
+ // src/processing/validation.ts
910
+ init_infrastructure();
911
+ var createValidationDetails = (error) => error.issues.map((issue) => ({
912
+ code: issue.code,
913
+ message: issue.message,
914
+ path: issue.path.filter((p) => typeof p !== "symbol"),
915
+ expected: String(issue.expected),
916
+ received: String(issue.received)
917
+ }));
918
+ var validateEventData = (handler, eventData) => {
919
+ const result = handler.schema.safeParse(eventData);
920
+ if (!result.success) {
921
+ const validationDetails = createValidationDetails(result.error);
922
+ const handlerValidationError = {
923
+ handlerName: handler.name,
924
+ validationErrors: validationDetails
925
+ };
926
+ return {
927
+ error: handlerValidationError,
928
+ shouldSkip: handler.safeParse || false
929
+ };
930
+ }
931
+ return { success: true };
932
+ };
933
+ var isHandlerMatching = (eventType, eventData, context) => (handler) => {
934
+ if (handler.type !== eventType) return false;
935
+ if (handler.match) {
936
+ const enrichedEvent = { data: eventData, ...context };
937
+ return handler.match(enrichedEvent);
938
+ }
939
+ return true;
940
+ };
941
+ var throwValidationError = (handlerType, error) => {
942
+ throw createValidationError(handlerType, [error]);
943
+ };
944
+
945
+ // src/middlewares/cloudevents-middleware.ts
946
+ var logInitialization = (options, log) => {
947
+ if (isDlqSafeMode(options)) {
948
+ const quarantine = options.quarantineTopic || "not configured";
949
+ const errorTopic = options.errorTopic || "not configured";
950
+ const projectId = options.projectId || "default";
951
+ const source = options.source || "auto-detected";
952
+ if (log === "pretty") {
953
+ logger.info("DLQ-Safe mode enabled");
954
+ logger.info(` \u2022 Quarantine topic: ${quarantine}`);
955
+ logger.info(` \u2022 Error topic: ${errorTopic}`);
956
+ logger.info(` \u2022 Project ID: ${projectId}`);
957
+ logger.info(` \u2022 Source: ${source}`);
958
+ } else {
959
+ logger.info("CloudEvents middleware initialized", {
960
+ mode: "DLQ-Safe",
961
+ quarantineTopic: quarantine,
962
+ errorTopic,
963
+ projectId,
964
+ source
965
+ });
966
+ }
967
+ } else {
968
+ if (log === "pretty") {
969
+ logger.info("DLQ-Safe mode disabled - using standard error handling");
970
+ } else {
971
+ logger.info("CloudEvents middleware initialized", {
972
+ mode: "Standard",
973
+ note: "Using standard error handling"
974
+ });
975
+ }
976
+ }
977
+ };
978
+ var executeHandler = async (handler, eventData, context) => {
979
+ try {
980
+ await handler.handle(eventData, context);
981
+ } catch (error) {
982
+ logger.error(`Handler execution failed for ${handler.name}:`, error);
983
+ throw error;
984
+ }
985
+ };
986
+ var processHandlerSafely = async (handler, eventData, ctx, processingContext, options) => {
987
+ const validationResult = validateEventData(handler, eventData);
988
+ if ("error" in validationResult) {
989
+ if (validationResult.shouldSkip) {
990
+ return;
991
+ }
992
+ if (isDlqSafeMode(options)) {
993
+ queueMicrotask(() => {
994
+ quarantineMessage(processingContext, "validation_error", options, validationResult.error).catch((error) => {
995
+ logger.error("Failed to quarantine validation error:", error);
996
+ });
997
+ });
998
+ return;
999
+ }
1000
+ throwValidationError(handler.name, validationResult.error);
1001
+ }
1002
+ try {
1003
+ await executeHandler(handler, eventData, ctx);
1004
+ } catch (error) {
1005
+ if (isDlqSafeMode(options)) {
1006
+ queueMicrotask(() => {
1007
+ publishRecoverableError(processingContext, error, options).catch((publishError) => {
1008
+ logger.error("Failed to publish recoverable error:", publishError);
1009
+ });
1010
+ });
1011
+ return;
1012
+ }
1013
+ throw error;
1014
+ }
1015
+ };
1016
+ var executeMatchingHandlers = async (handlers, eventType, eventData, ctx, processingContext, options) => {
1017
+ const matchingHandlers = handlers.filter(isHandlerMatching(eventType, eventData, ctx));
1018
+ if (matchingHandlers.length === 0) {
1019
+ const errorMessage = `No handlers found for event type: ${eventType}`;
1020
+ if (isDlqSafeMode(options)) {
1021
+ queueMicrotask(() => {
1022
+ quarantineMessage(processingContext, "no_handler", options, new Error(errorMessage)).catch((error) => {
1023
+ logger.error("Failed to quarantine no_handler error:", error);
1024
+ });
1025
+ });
1026
+ return;
1027
+ }
1028
+ throw new Error(errorMessage);
1029
+ }
1030
+ await Promise.all(
1031
+ matchingHandlers.map((handler) => processHandlerSafely(handler, eventData, ctx, processingContext, options))
1032
+ );
1033
+ };
1034
+ var processCloudEvent = async (ctx, options) => {
1035
+ const cacheKey = createCacheKey(options.discover, options.handlers);
1036
+ const handlers = getCachedHandlers(cacheKey);
1037
+ try {
1038
+ const { event, cloudEventData } = await parseEventFromContext(ctx);
1039
+ const eventData = cloudEventData || event;
1040
+ if (!eventData) {
1041
+ throw new Error("Failed to parse CloudEvent from request");
1042
+ }
1043
+ const eventType = eventData.type;
1044
+ const eventPayload = eventData.data;
1045
+ const context = cloudEventData || {};
1046
+ const messageId = eventData.id || "unknown";
1047
+ const dedupeSet = options.processedMessageIds || /* @__PURE__ */ new Set();
1048
+ if (dedupeSet.has(messageId)) {
1049
+ logger.info(`Duplicate message ${messageId} detected, skipping processing`);
1050
+ return ctx.json({ status: "duplicate", messageId }, 200);
1051
+ }
1052
+ addProcessedMessage(messageId);
1053
+ dedupeSet.add(messageId);
1054
+ const processingContext = createProcessingContext(eventType, eventPayload, context, event || eventData);
1055
+ if (options.log) {
1056
+ logger.info(`Processing event type: ${eventType}`);
1057
+ }
1058
+ await executeMatchingHandlers(handlers, eventType, eventPayload, context, processingContext, options);
1059
+ return isDlqSafeMode(options) ? ctx.body(null, 204) : ctx.json({ status: "processed", eventType, messageId }, 200);
1060
+ } catch (error) {
1061
+ if (error && typeof error === "object" && "type" in error && error.type === "ValidationError") {
1062
+ return ctx.json({ error }, 422);
1063
+ }
1064
+ handleErrorWithContext(error, ctx, { isDlqSafe: isDlqSafeMode(options) });
1065
+ return ctx.json({ error: "Internal Server Error" }, 500);
1066
+ }
1067
+ };
1068
+ function cloudEvents(options = {}) {
1069
+ const { discover, handlers = [], log = false } = options;
1070
+ const cacheKey = createCacheKey(discover, handlers);
1071
+ if (log) {
1072
+ setupHandlers(cacheKey, discover, handlers, log).then(() => logInitialization(options, log)).catch((error) => logger.error("Failed to setup handlers during initialization:", error));
1073
+ }
1074
+ return async (ctx, next) => {
1075
+ if (ctx.req.method !== "POST") {
1076
+ return next();
1077
+ }
1078
+ await setupHandlers(cacheKey, discover, handlers, log);
1079
+ try {
1080
+ return await processCloudEvent(ctx, options);
1081
+ } catch (error) {
1082
+ if (isDlqSafeMode(options)) {
1083
+ logger.error("Error during request processing (DLQ-safe mode):", error);
1084
+ queueMicrotask(async () => {
1085
+ try {
1086
+ const processingContext = createProcessingContext("unknown", null, null, null);
1087
+ await quarantineMessage(processingContext, "processing_error", options, error);
1088
+ } catch (quarantineError) {
1089
+ const errorMessage = quarantineError instanceof Error ? quarantineError.message : "Unknown error";
1090
+ logger.error(`Failed to quarantine error: ${errorMessage}`);
1091
+ }
1092
+ });
1093
+ return ctx.body(null, 204);
1094
+ }
1095
+ throw error;
1096
+ }
1097
+ };
1098
+ }
1099
+
1100
+ // src/transports/nats/base-message-processor.ts
1101
+ function extractTenantId(ce) {
1102
+ const extensions = ce;
1103
+ return extensions.tenantid ?? extensions.tenantId ?? extensions.tenant_id;
1104
+ }
1105
+ function createBaseMessageProcessor(deps) {
1106
+ const { name, dlqEnabled, options, processedHandlers, decode, logger: logger2 } = deps;
1107
+ const toEnrichedEvent = (ce) => ({
1108
+ eventType: ce.type,
1109
+ source: ce.source,
1110
+ subject: ce.subject,
1111
+ time: ce.time ?? (/* @__PURE__ */ new Date()).toISOString(),
1112
+ messageId: ce.id,
1113
+ data: ce.data,
1114
+ tenantId: extractTenantId(ce)
1115
+ });
1116
+ const createContext = (event, ce) => createProcessingContext(event.eventType, event.data, event, ce);
1117
+ const parseCloudEvent = (data) => {
1118
+ return JSON.parse(decode(data));
1119
+ };
1120
+ const findHandler = (event) => processedHandlers.find(
1121
+ (handler) => handler.type === event.eventType && (!handler.match || handler.match(event))
1122
+ );
1123
+ const handleMissingHandler = async (context, eventType) => {
1124
+ logger2.warn(`[${name}] no handler for event type: ${eventType}`);
1125
+ if (dlqEnabled) {
1126
+ await quarantineMessage(context, "no_handler", options, new Error(`No handler for event type ${eventType}`));
1127
+ }
1128
+ return { handled: true, shouldAck: true };
1129
+ };
1130
+ const handleValidationFailure = async (validationResult, handler, context) => {
1131
+ logger2.error(
1132
+ `[${name}] validation failed for handler ${handler.name}`,
1133
+ JSON.stringify(validationResult.error, null, 2)
1134
+ );
1135
+ if (dlqEnabled) {
1136
+ await quarantineMessage(context, "validation_error", options, validationResult.error);
1137
+ return { handled: true, shouldAck: true };
1138
+ }
1139
+ if (validationResult.shouldSkip) {
1140
+ return { handled: true, shouldAck: true };
1141
+ }
1142
+ throwValidationError(handler.name, validationResult.error);
1143
+ return { handled: true, shouldAck: false };
1144
+ };
1145
+ const executeHandler2 = async (handler, enriched, context) => {
1146
+ try {
1147
+ await handler.handle(enriched.data, enriched);
1148
+ return { success: true };
1149
+ } catch (error) {
1150
+ if (dlqEnabled) {
1151
+ await publishRecoverableError(context, error, options);
1152
+ return { success: true };
1153
+ }
1154
+ throw error;
1155
+ }
1156
+ };
1157
+ const processEvent = async (cloudEvent, enriched) => {
1158
+ const context = createContext(enriched, cloudEvent);
1159
+ const handler = findHandler(enriched);
1160
+ if (!handler) {
1161
+ const result2 = await handleMissingHandler(context, enriched.eventType);
1162
+ return result2.shouldAck;
1163
+ }
1164
+ const validationResult = validateEventData(handler, enriched.data);
1165
+ if ("error" in validationResult) {
1166
+ const result2 = await handleValidationFailure(validationResult, handler, context);
1167
+ return result2.shouldAck;
1168
+ }
1169
+ const result = await executeHandler2(handler, enriched, context);
1170
+ return result.success;
1171
+ };
1172
+ const handleParseError = async (error, context, redeliveryCount = 0) => {
1173
+ logger2.error(`[${name}] failed to parse CloudEvent (attempt ${redeliveryCount + 1})`, error);
1174
+ if (dlqEnabled) {
1175
+ await quarantineMessage(context, "parse_error", options, error);
1176
+ return true;
1177
+ }
1178
+ return redeliveryCount >= 2;
1179
+ };
1180
+ const handleUnhandledError = async (error, context, ackFn) => {
1181
+ logger2.error(`[${name}] unhandled processing error`, error);
1182
+ if (dlqEnabled) {
1183
+ try {
1184
+ await quarantineMessage(context, "unhandled_error", options, error);
1185
+ ackFn?.();
1186
+ } catch (quarantineError) {
1187
+ logger2.error(`[${name}] failed to quarantine unhandled error`, quarantineError);
1188
+ }
1189
+ }
1190
+ };
1191
+ return {
1192
+ toEnrichedEvent,
1193
+ createContext,
1194
+ parseCloudEvent,
1195
+ findHandler,
1196
+ processEvent,
1197
+ handleParseError,
1198
+ handleUnhandledError
1199
+ };
1200
+ }
1201
+ init_logging();
1202
+
1203
+ // src/transports/nats/jetstream-message-processor.ts
1204
+ var createJetStreamMessageProcessor = (deps) => {
1205
+ const { decode } = deps;
1206
+ const base = createBaseMessageProcessor(deps);
1207
+ const toUnknownContext = (msg) => ({
1208
+ eventType: "unknown",
1209
+ source: `jetstream://${msg.info.stream}`,
1210
+ subject: msg.subject,
1211
+ time: (/* @__PURE__ */ new Date()).toISOString(),
1212
+ messageId: `${msg.info.stream}:${msg.seq}`,
1213
+ data: decode(msg.data)
1214
+ });
1215
+ const handleMessage = async (msg) => {
1216
+ try {
1217
+ const cloudEvent = base.parseCloudEvent(msg.data);
1218
+ const enriched = base.toEnrichedEvent(cloudEvent);
1219
+ return base.processEvent(cloudEvent, enriched);
1220
+ } catch (error) {
1221
+ const unknownCtx = toUnknownContext(msg);
1222
+ const context = createProcessingContext("unknown", decode(msg.data), unknownCtx, void 0);
1223
+ return base.handleParseError(error, context, msg.info.deliveryCount);
1224
+ }
1225
+ };
1226
+ const handleUnhandledProcessingError = async (msg, error) => {
1227
+ const unknownCtx = toUnknownContext(msg);
1228
+ const context = createProcessingContext("unknown", decode(msg.data), unknownCtx, void 0);
1229
+ await base.handleUnhandledError(error, context, () => msg.ack());
1230
+ };
1231
+ return { handleMessage, handleUnhandledProcessingError };
1232
+ };
1233
+
1234
+ // src/transports/nats/jetstream-consumer.ts
1235
+ var ACK_EXPLICIT = "explicit";
1236
+ var DELIVER_ALL = "all";
1237
+ var DELIVER_LAST = "last";
1238
+ var DELIVER_NEW = "new";
1239
+ var DELIVER_START_TIME = "by_start_time";
1240
+ var RETENTION_LIMITS = "limits";
1241
+ var STORAGE_FILE = "file";
1242
+ var sc2 = StringCodec();
1243
+ var DEFAULT_STREAM_CONFIG = {
1244
+ maxAge: 7 * 24 * 60 * 60 * 1e3,
1245
+ // 7 days in ms
1246
+ maxBytes: 1024 * 1024 * 1024,
1247
+ // 1 GB
1248
+ replicas: 1
1249
+ };
1250
+ async function ensureJetStreamStream(options) {
1251
+ const servers = options.servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
1252
+ const user = options.user ?? process.env.NATS_USER;
1253
+ const pass = options.pass ?? process.env.NATS_PASSWORD;
1254
+ const nc = await connect({
1255
+ servers,
1256
+ ...user && pass ? { user, pass } : {}
1257
+ });
1258
+ try {
1259
+ const jsm = await nc.jetstreamManager();
1260
+ await ensureStream(jsm, options.stream, options.subjects, options.config);
1261
+ } finally {
1262
+ await nc.close();
1263
+ }
1264
+ }
1265
+ async function ensureStream(jsm, name, subjects, config = {}) {
1266
+ const streamConfig = { ...DEFAULT_STREAM_CONFIG, ...config };
1267
+ try {
1268
+ const stream = await jsm.streams.info(name);
1269
+ const existingSubjects = new Set(stream.config.subjects);
1270
+ const newSubjects = subjects.filter((s) => !existingSubjects.has(s));
1271
+ if (newSubjects.length > 0) {
1272
+ await jsm.streams.update(name, {
1273
+ subjects: [...stream.config.subjects, ...newSubjects]
1274
+ });
1275
+ logger.info(`[jetstream] updated stream ${name} with subjects: ${newSubjects.join(", ")}`);
1276
+ }
1277
+ } catch {
1278
+ await jsm.streams.add({
1279
+ name,
1280
+ subjects,
1281
+ retention: RETENTION_LIMITS,
1282
+ storage: STORAGE_FILE,
1283
+ max_age: streamConfig.maxAge * 1e6,
1284
+ // Convert ms to nanoseconds
1285
+ max_bytes: streamConfig.maxBytes,
1286
+ num_replicas: streamConfig.replicas
1287
+ });
1288
+ logger.info(`[jetstream] created stream ${name} with subjects: ${subjects.join(", ")}`);
1289
+ }
1290
+ }
1291
+ async function ensureJetStreamStreams(options) {
1292
+ const servers = options.servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
1293
+ const user = options.user ?? process.env.NATS_USER;
1294
+ const pass = options.pass ?? process.env.NATS_PASSWORD;
1295
+ const nc = await connect({
1296
+ servers,
1297
+ ...user && pass ? { user, pass } : {}
1298
+ });
1299
+ try {
1300
+ const jsm = await nc.jetstreamManager();
1301
+ for (const { stream, subjects, config } of options.streams) {
1302
+ await ensureStream(jsm, stream, subjects, config);
1303
+ }
1304
+ } finally {
1305
+ await nc.close();
1306
+ }
1307
+ }
1308
+ async function ensureConsumer(jsm, streamName, consumerName, options) {
1309
+ const deliverPolicy = (() => {
1310
+ switch (options.startFrom) {
1311
+ case "all":
1312
+ return DELIVER_ALL;
1313
+ case "last":
1314
+ return DELIVER_LAST;
1315
+ default:
1316
+ return DELIVER_NEW;
1317
+ }
1318
+ })();
1319
+ const optStartTime = options.startFrom instanceof Date ? options.startFrom : void 0;
1320
+ try {
1321
+ await jsm.consumers.info(streamName, consumerName);
1322
+ } catch {
1323
+ await jsm.consumers.add(streamName, {
1324
+ durable_name: consumerName,
1325
+ ack_policy: ACK_EXPLICIT,
1326
+ deliver_policy: optStartTime ? DELIVER_START_TIME : deliverPolicy,
1327
+ opt_start_time: optStartTime?.toISOString(),
1328
+ // replay_policy defaults to 'instant', no need to specify explicitly
1329
+ ack_wait: (options.ackWait ?? 3e4) * 1e6,
1330
+ // Convert to nanoseconds
1331
+ max_deliver: options.maxDeliver ?? 3,
1332
+ // Filter subjects at consumer level (optional)
1333
+ filter_subjects: options.filterSubjects
1334
+ });
1335
+ logger.info(`[jetstream] created durable consumer ${consumerName} on stream ${streamName}`);
1336
+ }
1337
+ }
1338
+ async function consumeJetStreamEvents(options) {
1339
+ const name = options.consumer;
1340
+ const servers = options.servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
1341
+ const user = options.user ?? process.env.NATS_USER;
1342
+ const pass = options.pass ?? process.env.NATS_PASSWORD;
1343
+ const handlerConstructors = await discoverHandlers(options.discover);
1344
+ const processedHandlers = handlerConstructors.map(processHandler).filter((h) => h !== null);
1345
+ const handlerNames = processedHandlers.map((h) => h.name).join(", ");
1346
+ logger.info(`[${name}] discovered ${processedHandlers.length} handler(s): ${handlerNames}`);
1347
+ const nc = await connect({
1348
+ servers,
1349
+ ...user && pass ? { user, pass } : {}
1350
+ });
1351
+ logger.info(`[${name}] connected to NATS: ${servers}${user ? " (authenticated)" : ""}`);
1352
+ const jsm = await nc.jetstreamManager();
1353
+ const js = nc.jetstream();
1354
+ await ensureConsumer(jsm, options.stream, name, options);
1355
+ const consumer = await js.consumers.get(options.stream, name);
1356
+ const messages = await consumer.consume({
1357
+ max_messages: options.maxMessages ?? 100
1358
+ });
1359
+ logger.info(`[${name}] consuming from stream ${options.stream}`);
1360
+ const dlqEnabled = Boolean(options.quarantineTopic || options.errorTopic);
1361
+ const idempotencyStore = options.idempotencyStore === false ? null : options.idempotencyStore ?? getDefaultIdempotencyStore();
1362
+ const idempotencyTtl = options.idempotencyTtl;
1363
+ const { handleMessage, handleUnhandledProcessingError } = createJetStreamMessageProcessor({
1364
+ name,
1365
+ dlqEnabled,
1366
+ options,
1367
+ processedHandlers,
1368
+ decode: (data) => sc2.decode(data),
1369
+ logger
1370
+ });
1371
+ const checkIdempotency = async (msg) => {
1372
+ if (!idempotencyStore) return true;
1373
+ const messageId = `${msg.info.stream}:${msg.seq}`;
1374
+ const shouldProcess = await checkAndMarkProcessed(idempotencyStore, messageId, idempotencyTtl);
1375
+ if (!shouldProcess) {
1376
+ logger.debug(`[${name}] skipping duplicate message: ${messageId}`);
1377
+ msg.ack();
1378
+ }
1379
+ return shouldProcess;
1380
+ };
1381
+ const processSingleMessage = async (msg) => {
1382
+ const shouldProcess = await checkIdempotency(msg);
1383
+ if (!shouldProcess) return;
1384
+ try {
1385
+ const success = await handleMessage(msg);
1386
+ if (success) {
1387
+ msg.ack();
1388
+ } else {
1389
+ msg.nak();
1390
+ }
1391
+ } catch (error) {
1392
+ await handleUnhandledProcessingError(msg, error);
1393
+ }
1394
+ };
1395
+ (async () => {
1396
+ try {
1397
+ for await (const msg of messages) {
1398
+ await processSingleMessage(msg);
1399
+ }
1400
+ } catch (err) {
1401
+ logger.error(`[${name}] message processing loop crashed`, err);
1402
+ }
1403
+ })();
1404
+ return messages;
1405
+ }
1406
+ async function consumeJetStreamStreams(options) {
1407
+ const name = options.consumer;
1408
+ const servers = options.servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
1409
+ const user = options.user ?? process.env.NATS_USER;
1410
+ const pass = options.pass ?? process.env.NATS_PASSWORD;
1411
+ const handlerConstructors = await discoverHandlers(options.discover);
1412
+ const processedHandlers = handlerConstructors.map(processHandler).filter((h) => h !== null);
1413
+ const handlerNames = processedHandlers.map((h) => h.name).join(", ");
1414
+ logger.info(`[${name}] discovered ${processedHandlers.length} handler(s): ${handlerNames}`);
1415
+ const nc = await connect({
1416
+ servers,
1417
+ ...user && pass ? { user, pass } : {}
1418
+ });
1419
+ logger.info(`[${name}] connected to NATS: ${servers}${user ? " (authenticated)" : ""}`);
1420
+ const jsm = await nc.jetstreamManager();
1421
+ const js = nc.jetstream();
1422
+ const dlqEnabled = Boolean(options.quarantineTopic || options.errorTopic);
1423
+ const idempotencyStore = options.idempotencyStore === false ? null : options.idempotencyStore ?? getDefaultIdempotencyStore();
1424
+ const idempotencyTtl = options.idempotencyTtl;
1425
+ const { handleMessage, handleUnhandledProcessingError } = createJetStreamMessageProcessor({
1426
+ name,
1427
+ dlqEnabled,
1428
+ options,
1429
+ processedHandlers,
1430
+ decode: (data) => sc2.decode(data),
1431
+ logger
1432
+ });
1433
+ const checkIdempotency = async (msg) => {
1434
+ if (!idempotencyStore) return true;
1435
+ const messageId = `${msg.info.stream}:${msg.seq}`;
1436
+ const shouldProcess = await checkAndMarkProcessed(idempotencyStore, messageId, idempotencyTtl);
1437
+ if (!shouldProcess) {
1438
+ logger.debug(`[${name}] skipping duplicate message: ${messageId}`);
1439
+ msg.ack();
1440
+ }
1441
+ return shouldProcess;
1442
+ };
1443
+ const processSingleMessage = async (msg) => {
1444
+ const shouldProcess = await checkIdempotency(msg);
1445
+ if (!shouldProcess) return;
1446
+ try {
1447
+ const success = await handleMessage(msg);
1448
+ if (success) {
1449
+ msg.ack();
1450
+ } else {
1451
+ msg.nak();
1452
+ }
1453
+ } catch (error) {
1454
+ await handleUnhandledProcessingError(msg, error);
1455
+ }
1456
+ };
1457
+ const allMessages = [];
1458
+ for (const stream of options.streams) {
1459
+ const consumerOpts = {
1460
+ ...options};
1461
+ await ensureConsumer(jsm, stream, name, consumerOpts);
1462
+ const consumer = await js.consumers.get(stream, name);
1463
+ const messages = await consumer.consume({
1464
+ max_messages: options.maxMessages ?? 100
1465
+ });
1466
+ logger.info(`[${name}] consuming from stream ${stream}`);
1467
+ (async () => {
1468
+ try {
1469
+ for await (const msg of messages) {
1470
+ await processSingleMessage(msg);
1471
+ }
1472
+ } catch (err) {
1473
+ logger.error(`[${name}] message processing loop crashed for stream ${stream}`, err);
1474
+ }
1475
+ })();
1476
+ allMessages.push(messages);
1477
+ }
1478
+ return allMessages;
1479
+ }
1480
+ var ensureJetStreams = ensureJetStreamStreams;
1481
+ var consumeJetStreams = consumeJetStreamStreams;
1482
+ init_logging();
1483
+
1484
+ // src/transports/nats/nats-message-processor.ts
1485
+ var createNatsMessageProcessor = (deps) => {
1486
+ const { subject, decode } = deps;
1487
+ const base = createBaseMessageProcessor(deps);
1488
+ const toUnknownContext = (msg) => ({
1489
+ eventType: "unknown",
1490
+ source: `nats://${subject}`,
1491
+ subject,
1492
+ time: (/* @__PURE__ */ new Date()).toISOString(),
1493
+ messageId: msg.headers?.get("Nats-Msg-Id") ?? "unknown",
1494
+ data: decode(msg.data)
1495
+ });
1496
+ const handleMessage = async (msg) => {
1497
+ try {
1498
+ const cloudEvent = base.parseCloudEvent(msg.data);
1499
+ const enriched = base.toEnrichedEvent(cloudEvent);
1500
+ await base.processEvent(cloudEvent, enriched);
1501
+ } catch (error) {
1502
+ const unknownCtx = toUnknownContext(msg);
1503
+ const context = createProcessingContext("unknown", decode(msg.data), unknownCtx, void 0);
1504
+ await base.handleParseError(error, context);
1505
+ }
1506
+ };
1507
+ const handleUnhandledProcessingError = async (msg, error) => {
1508
+ const unknownCtx = toUnknownContext(msg);
1509
+ const context = createProcessingContext("unknown", decode(msg.data), unknownCtx, void 0);
1510
+ await base.handleUnhandledError(error, context);
1511
+ };
1512
+ return { handleMessage, handleUnhandledProcessingError };
1513
+ };
1514
+
1515
+ // src/transports/nats/nats-consumer.ts
1516
+ var sc3 = StringCodec();
1517
+ var CONSUMER_REGISTRY_KEY = "__crossdelta_nats_consumers__";
1518
+ function getConsumerRegistry() {
1519
+ if (!globalThis[CONSUMER_REGISTRY_KEY]) {
1520
+ globalThis[CONSUMER_REGISTRY_KEY] = /* @__PURE__ */ new Map();
1521
+ }
1522
+ return globalThis[CONSUMER_REGISTRY_KEY];
1523
+ }
1524
+ async function cleanupConsumer(name) {
1525
+ const registry = getConsumerRegistry();
1526
+ const consumer = registry.get(name);
1527
+ if (consumer) {
1528
+ logger.info(`[${name}] cleaning up subscription...`);
1529
+ consumer.subscription.unsubscribe();
1530
+ await consumer.connection.drain();
1531
+ registry.delete(name);
1532
+ }
1533
+ }
1534
+ async function consumeNatsEvents(options) {
1535
+ const servers = options.servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
1536
+ const subject = options.subject;
1537
+ const name = options.consumerName ?? `nats-consumer:${subject}`;
1538
+ const user = options.user ?? process.env.NATS_USER;
1539
+ const pass = options.pass ?? process.env.NATS_PASSWORD;
1540
+ await cleanupConsumer(name);
1541
+ const handlerConstructors = await discoverHandlers(options.discover);
1542
+ const processedHandlers = handlerConstructors.map(processHandler).filter((h) => h !== null);
1543
+ const handlerNames = processedHandlers.map((h) => h.name).join(", ");
1544
+ logger.info(`[${name}] discovered ${processedHandlers.length} handler(s): ${handlerNames}`);
1545
+ const nc = await connect({
1546
+ servers,
1547
+ ...user && pass ? { user, pass } : {}
1548
+ });
1549
+ logger.info(`[${name}] connected to NATS: ${servers}${user ? " (authenticated)" : ""}`);
1550
+ const sub = nc.subscribe(subject);
1551
+ logger.info(`[${name}] subscribed to subject: ${subject}`);
1552
+ getConsumerRegistry().set(name, { subscription: sub, connection: nc });
1553
+ const dlqEnabled = Boolean(options.quarantineTopic || options.errorTopic);
1554
+ const { handleMessage, handleUnhandledProcessingError } = createNatsMessageProcessor({
1555
+ name,
1556
+ subject,
1557
+ dlqEnabled,
1558
+ options,
1559
+ processedHandlers,
1560
+ decode: (data) => sc3.decode(data),
1561
+ logger
1562
+ });
1563
+ const processSubscription = async () => {
1564
+ for await (const msg of sub) {
1565
+ await handleMessage(msg).catch((error) => handleUnhandledProcessingError(msg, error));
1566
+ }
1567
+ };
1568
+ processSubscription().catch((err) => {
1569
+ logger.error(`[${name}] subscription loop crashed`, err);
1570
+ });
1571
+ return sub;
1572
+ }
1573
+
1574
+ export { __resetNatsPublisher, checkAndMarkProcessed, clearHandlerCache, cloudEvents, consumeJetStreamEvents, consumeJetStreamStreams, consumeJetStreams, consumeNatsEvents, createContract, createInMemoryIdempotencyStore, deriveStreamFromType, deriveSubjectFromType, ensureJetStreamStream, ensureJetStreamStreams, ensureJetStreams, eventSchema, extractTypeFromSchema, getDefaultIdempotencyStore, handleEvent, parseEventFromContext, publish, publishEvent, publishNatsEvent, publishNatsRawEvent, publishRawEvent, resetDefaultIdempotencyStore };