@crossdelta/cloudevents 0.6.2 → 0.6.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,10 @@
1
1
  import { HTTP } from 'cloudevents';
2
- import { existsSync } from 'fs';
3
- import { dirname, join } from 'path';
2
+ import pluralizeLib from 'pluralize';
3
+ import * as fs from 'fs';
4
+ import { existsSync, readdirSync, statSync } from 'fs';
5
+ import * as path from 'path';
6
+ import { join, dirname } from 'path';
7
+ import { createFlow, createGenerationResult, input, initGenerationContext, trackChange, printGenerationSummary, change, runFlow } from '@crossdelta/flowcore';
4
8
  import { fileURLToPath } from 'url';
5
9
  import { glob } from 'glob';
6
10
  import { z } from 'zod';
@@ -17,9 +21,26 @@ var __export = (target, all) => {
17
21
  };
18
22
 
19
23
  // src/infrastructure/errors.ts
20
- var handleErrorWithContext, createValidationError, createCloudEventParseError;
24
+ var ValidationError, CloudEventParseError, handleErrorWithContext, createValidationError, createCloudEventParseError;
21
25
  var init_errors = __esm({
22
26
  "src/infrastructure/errors.ts"() {
27
+ ValidationError = class extends Error {
28
+ constructor(eventType, details) {
29
+ super(`Validation failed for event type: ${eventType}`);
30
+ this.eventType = eventType;
31
+ this.details = details;
32
+ this.name = "ValidationError";
33
+ }
34
+ type = "ValidationError";
35
+ };
36
+ CloudEventParseError = class extends Error {
37
+ constructor(message, cause) {
38
+ super(message);
39
+ this.cause = cause;
40
+ this.name = "CloudEventParseError";
41
+ }
42
+ type = "CloudEventParseError";
43
+ };
23
44
  handleErrorWithContext = (error, context, additionalInfo) => {
24
45
  const errorInfo = {
25
46
  message: error.message,
@@ -32,17 +53,8 @@ var init_errors = __esm({
32
53
  console.error("[Error Handler]", JSON.stringify(errorInfo, null, 2));
33
54
  return errorInfo;
34
55
  };
35
- createValidationError = (eventType, details) => ({
36
- type: "ValidationError",
37
- eventType,
38
- message: `Validation failed for event type: ${eventType}`,
39
- details
40
- });
41
- createCloudEventParseError = (message, cause) => ({
42
- type: "CloudEventParseError",
43
- message,
44
- cause
45
- });
56
+ createValidationError = (eventType, details) => new ValidationError(eventType, details);
57
+ createCloudEventParseError = (message, cause) => new CloudEventParseError(message, cause);
46
58
  }
47
59
  });
48
60
 
@@ -216,46 +228,842 @@ var init_raw_event = __esm({
216
228
  };
217
229
  }
218
230
  });
231
+ var pluralize, singularize;
232
+ var init_pluralize = __esm({
233
+ "src/utils/pluralize.ts"() {
234
+ pluralize = (word) => pluralizeLib.plural(word);
235
+ singularize = (word) => pluralizeLib.singular(word);
236
+ }
237
+ });
219
238
 
220
- // src/adapters/cloudevents/cloudevents.ts
221
- init_infrastructure();
222
- var hasCloudEventHeaders = (headers) => Object.keys(headers).some((key) => key.toLowerCase().startsWith("ce-"));
223
- var isRecord = (value) => typeof value === "object" && value !== null;
224
- var parseEventFromContext = async (context) => {
225
- try {
226
- const headers = context.req.header();
227
- const rawBody = await context.req.text();
228
- let parsedBody;
229
- if (rawBody?.length) {
239
+ // src/utils/index.ts
240
+ var init_utils = __esm({
241
+ "src/utils/index.ts"() {
242
+ init_pluralize();
243
+ }
244
+ });
245
+
246
+ // src/domain/naming.ts
247
+ var toKebabCase, toPascalCase, validateEventType, isValidEventType, deriveEventNames, getContractPaths, getHandlerPath, getStreamName, parseEventTypeFromHandler, parseEventTypeFromContract, normalizeSubject;
248
+ var init_naming = __esm({
249
+ "src/domain/naming.ts"() {
250
+ init_utils();
251
+ toKebabCase = (str) => str.replace(/\./g, "-").toLowerCase();
252
+ toPascalCase = (str) => str.split(/[-.]/).map((part) => part.charAt(0).toUpperCase() + part.slice(1).toLowerCase()).join("");
253
+ validateEventType = (eventType) => {
254
+ const errors = [];
255
+ if (!eventType) {
256
+ errors.push("Event type is required");
257
+ return { valid: false, errors };
258
+ }
259
+ if (!eventType.includes(".")) {
260
+ errors.push("Event type must include a namespace separator (e.g., order.created)");
261
+ }
262
+ if (eventType !== eventType.toLowerCase()) {
263
+ errors.push("Event type must be lowercase");
264
+ }
265
+ if (eventType.startsWith(".") || eventType.endsWith(".")) {
266
+ errors.push("Event type must not start or end with a dot");
267
+ }
268
+ if (eventType.includes("..")) {
269
+ errors.push("Event type must not contain consecutive dots");
270
+ }
271
+ const validChars = /^[a-z0-9.-]+$/;
272
+ if (!validChars.test(eventType)) {
273
+ errors.push("Event type must only contain lowercase letters, numbers, dots, and dashes");
274
+ }
275
+ return {
276
+ valid: errors.length === 0,
277
+ errors
278
+ };
279
+ };
280
+ isValidEventType = (eventType) => validateEventType(eventType).valid;
281
+ deriveEventNames = (eventType) => {
282
+ const parts = eventType.split(".");
283
+ const namespace = parts[0];
284
+ const action = parts.slice(1).join("-");
285
+ const kebab = toKebabCase(eventType);
286
+ const pascal = toPascalCase(eventType);
287
+ const pluralNamespace = pluralize(namespace);
288
+ const pluralPascal = toPascalCase(`${pluralNamespace}.${action}`);
289
+ return {
290
+ eventType,
291
+ kebab,
292
+ pascal,
293
+ schemaName: `${pascal}Schema`,
294
+ typeName: `${pascal}Data`,
295
+ contractName: `${pluralPascal}Contract`,
296
+ handlerFile: `${kebab}.handler.ts`,
297
+ streamName: pluralNamespace.toUpperCase(),
298
+ domain: pluralNamespace.toLowerCase(),
299
+ action,
300
+ namespace
301
+ };
302
+ };
303
+ getContractPaths = (eventType) => {
304
+ const { domain, action } = deriveEventNames(eventType);
305
+ return {
306
+ relativePath: `events/${domain}/${action}.ts`,
307
+ folder: domain,
308
+ filename: action
309
+ };
310
+ };
311
+ getHandlerPath = (eventType) => {
312
+ const { handlerFile } = deriveEventNames(eventType);
313
+ return `events/${handlerFile}`;
314
+ };
315
+ getStreamName = (eventType) => deriveEventNames(eventType).streamName;
316
+ parseEventTypeFromHandler = (filename) => {
317
+ const match = filename.match(/^(.+)\.handler\.ts$/);
318
+ if (!match) return null;
319
+ const kebab = match[1];
320
+ const parts = kebab.split("-");
321
+ if (parts.length < 2) return null;
322
+ const action = parts.at(-1);
323
+ if (!action) return null;
324
+ const namespace = parts.slice(0, -1).join("-");
325
+ return `${namespace}.${action}`;
326
+ };
327
+ parseEventTypeFromContract = (path2) => {
328
+ const cleanPath = path2.replace(/^events\//, "");
329
+ const match = cleanPath.match(/^([^/]+)\/(.+)\.ts$/);
330
+ if (!match) return null;
331
+ const [, folder, filename] = match;
332
+ const namespace = singularize(folder);
333
+ return `${namespace}.${filename}`;
334
+ };
335
+ normalizeSubject = (eventType) => {
336
+ const { domain, action } = deriveEventNames(eventType);
337
+ return `${domain}.${action}`;
338
+ };
339
+ }
340
+ });
341
+
342
+ // src/effects/types.ts
343
+ var streamWired, contractCreated, handlerCreated;
344
+ var init_types = __esm({
345
+ "src/effects/types.ts"() {
346
+ streamWired = (stream, servicePath) => ({
347
+ kind: "stream.wired",
348
+ stream,
349
+ servicePath
350
+ });
351
+ contractCreated = (path2, eventType) => ({
352
+ kind: "contract.created",
353
+ path: path2,
354
+ eventType
355
+ });
356
+ handlerCreated = (path2, eventType, servicePath) => ({
357
+ kind: "handler.created",
358
+ path: path2,
359
+ eventType,
360
+ servicePath
361
+ });
362
+ }
363
+ });
364
+
365
+ // src/effects/index.ts
366
+ var init_effects = __esm({
367
+ "src/effects/index.ts"() {
368
+ init_types();
369
+ }
370
+ });
371
+ var createNodeFileSystem, createMemoryFileSystem;
372
+ var init_types2 = __esm({
373
+ "src/generators/types.ts"() {
374
+ createNodeFileSystem = () => {
375
+ return {
376
+ readFile: (filePath) => {
377
+ try {
378
+ return fs.readFileSync(filePath, "utf-8");
379
+ } catch {
380
+ return null;
381
+ }
382
+ },
383
+ writeFile: (filePath, content) => {
384
+ const dir = path.dirname(filePath);
385
+ if (!fs.existsSync(dir)) {
386
+ fs.mkdirSync(dir, { recursive: true });
387
+ }
388
+ fs.writeFileSync(filePath, content, "utf-8");
389
+ },
390
+ exists: (filePath) => fs.existsSync(filePath),
391
+ mkdir: (dirPath) => fs.mkdirSync(dirPath, { recursive: true })
392
+ };
393
+ };
394
+ createMemoryFileSystem = (initialFiles = {}) => {
395
+ const files = { ...initialFiles };
396
+ return {
397
+ files,
398
+ readFile: (path2) => files[path2] ?? null,
399
+ writeFile: (path2, content) => {
400
+ files[path2] = content;
401
+ },
402
+ exists: (path2) => path2 in files,
403
+ mkdir: () => {
404
+ }
405
+ };
406
+ };
407
+ }
408
+ });
409
+ var fieldTypeToZod, generateSchemaFields, generateContractContent, getContractFilePath, generateContract;
410
+ var init_contract = __esm({
411
+ "src/generators/contract.ts"() {
412
+ init_naming();
413
+ init_types2();
414
+ fieldTypeToZod = (type, optional) => {
415
+ const baseTypes = {
416
+ string: "z.string()",
417
+ number: "z.number()",
418
+ boolean: "z.boolean()",
419
+ date: "z.string().datetime()",
420
+ datetime: "z.string().datetime()",
421
+ array: "z.array(z.unknown())",
422
+ object: "z.record(z.unknown())"
423
+ };
424
+ const base = baseTypes[type];
425
+ return optional ? `${base}.optional()` : base;
426
+ };
427
+ generateSchemaFields = (fields) => {
428
+ if (!fields || fields.length === 0) {
429
+ return ` id: z.string(),
430
+ createdAt: z.string(),`;
431
+ }
432
+ return fields.map((field) => ` ${field.name}: ${fieldTypeToZod(field.type, field.optional)},`).join("\n");
433
+ };
434
+ generateContractContent = (options) => {
435
+ const { eventType, fields } = options;
436
+ const names = deriveEventNames(eventType);
437
+ const schemaFields = generateSchemaFields(fields);
438
+ const schemaBody = schemaFields ? `{
439
+ ${schemaFields}
440
+ }` : "{}";
441
+ return `import { createContract } from '@crossdelta/cloudevents'
442
+ import { z } from 'zod'
443
+
444
+ export const ${names.schemaName} = z.object(${schemaBody})
445
+
446
+ export const ${names.contractName} = createContract({
447
+ type: '${eventType}',
448
+ channel: { stream: '${names.streamName}' },
449
+ schema: ${names.schemaName},
450
+ })
451
+
452
+ export type ${names.typeName} = z.infer<typeof ${names.contractName}.schema>
453
+ `;
454
+ };
455
+ getContractFilePath = (eventType, basePath) => {
456
+ const paths = getContractPaths(eventType);
457
+ return `${basePath}/${paths.relativePath}`;
458
+ };
459
+ generateContract = (options, fs2 = createNodeFileSystem()) => {
460
+ const filePath = getContractFilePath(options.eventType, options.basePath);
461
+ const content = generateContractContent(options);
462
+ const exists = fs2.exists(filePath);
463
+ if (!exists) {
464
+ fs2.writeFile(filePath, content);
465
+ return change.created(filePath);
466
+ }
467
+ if (options.force) {
468
+ fs2.writeFile(filePath, content);
469
+ return change.updated(filePath);
470
+ }
471
+ return change.skipped("File already exists");
472
+ };
473
+ }
474
+ });
475
+
476
+ // src/generators/exports.ts
477
+ var hasExport, addExportToIndex, ensureDomainExport;
478
+ var init_exports = __esm({
479
+ "src/generators/exports.ts"() {
480
+ init_types2();
481
+ hasExport = (content, exportPath) => {
482
+ const patterns = [
483
+ new RegExp(`^\\s*export \\* from ['"]${exportPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}['"]`, "m"),
484
+ new RegExp(`^\\s*export \\* from ['"]${exportPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\.ts['"]`, "m")
485
+ ];
486
+ return patterns.some((p) => p.test(content));
487
+ };
488
+ addExportToIndex = (options, fs2 = createNodeFileSystem()) => {
489
+ const { indexPath, exportPath, force } = options;
490
+ if (!fs2.exists(indexPath)) {
491
+ fs2.writeFile(indexPath, `export * from '${exportPath}'
492
+ `);
493
+ return { added: true, path: indexPath };
494
+ }
495
+ const content = fs2.readFile(indexPath);
496
+ if (!content) {
497
+ fs2.writeFile(indexPath, `export * from '${exportPath}'
498
+ `);
499
+ return { added: true, path: indexPath };
500
+ }
501
+ if (!force && hasExport(content, exportPath)) {
502
+ return { added: false, path: indexPath, alreadyExists: true };
503
+ }
504
+ const commentedExport = new RegExp(`//\\s*export \\* from ['"]${exportPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}['"]`);
505
+ let newContent;
506
+ if (commentedExport.test(content)) {
507
+ newContent = content.replace(commentedExport, `export * from '${exportPath}'`);
508
+ } else {
509
+ const trimmed = content.trimEnd();
510
+ newContent = trimmed.length > 0 ? `${trimmed}
511
+ export * from '${exportPath}'
512
+ ` : `export * from '${exportPath}'
513
+ `;
514
+ }
515
+ fs2.writeFile(indexPath, newContent);
516
+ return { added: true, path: indexPath };
517
+ };
518
+ ensureDomainExport = (options, fs2 = createNodeFileSystem()) => {
519
+ const results = [];
520
+ const { contractsBasePath, domain, eventFile } = options;
521
+ const eventsIndexPath = `${contractsBasePath}/events/index.ts`;
522
+ const domainExportResult = addExportToIndex(
523
+ {
524
+ indexPath: eventsIndexPath,
525
+ exportPath: `./${domain}`
526
+ },
527
+ fs2
528
+ );
529
+ results.push(domainExportResult);
530
+ const domainIndexPath = `${contractsBasePath}/events/${domain}/index.ts`;
531
+ const eventExportResult = addExportToIndex(
532
+ {
533
+ indexPath: domainIndexPath,
534
+ exportPath: `./${eventFile}`
535
+ },
536
+ fs2
537
+ );
538
+ results.push(eventExportResult);
539
+ const mainIndexPath = `${contractsBasePath}/index.ts`;
540
+ const mainExportResult = addExportToIndex(
541
+ {
542
+ indexPath: mainIndexPath,
543
+ exportPath: "./events"
544
+ },
545
+ fs2
546
+ );
547
+ results.push(mainExportResult);
548
+ return results;
549
+ };
550
+ }
551
+ });
552
+ var generateEventHandlerContent, getHandlerFilePath, generateEventHandler;
553
+ var init_handler = __esm({
554
+ "src/generators/handler.ts"() {
555
+ init_naming();
556
+ init_types2();
557
+ generateEventHandlerContent = (options) => {
558
+ const { eventType, contractsPackage = "@crossdelta/contracts" } = options;
559
+ const names = deriveEventNames(eventType);
560
+ return `import { handleEvent } from '@crossdelta/cloudevents'
561
+ import { ${names.contractName}, type ${names.typeName} } from '${contractsPackage}'
562
+
563
+ export default handleEvent(${names.contractName}, async (data: ${names.typeName}) => {
564
+ // TODO: Implement event handling logic
565
+ console.log('Received ${eventType}:', data)
566
+ })
567
+ `;
568
+ };
569
+ getHandlerFilePath = (eventType, basePath) => {
570
+ const handlerPath = getHandlerPath(eventType);
571
+ return `${basePath}/${handlerPath}`;
572
+ };
573
+ generateEventHandler = (options, fs2 = createNodeFileSystem()) => {
574
+ const filePath = getHandlerFilePath(options.eventType, options.basePath);
575
+ const content = generateEventHandlerContent(options);
576
+ const exists = fs2.exists(filePath);
577
+ if (!exists) {
578
+ fs2.writeFile(filePath, content);
579
+ return change.created(filePath);
580
+ }
581
+ if (options.force) {
582
+ fs2.writeFile(filePath, content);
583
+ return change.updated(filePath);
584
+ }
585
+ return change.skipped("File already exists");
586
+ };
587
+ }
588
+ });
589
+ var fakerInstance, fakerLoadAttempted, tryLoadFaker, getFaker, initFaker, createFieldGenerators, generateByFieldName, generateByFormat, generateByZodType, generateMockValue, generateMockFields, generateMockContent, getMockFilePath, generateMock, generateDefaultMockFields, processMockField, generateJsonMockData, parseSchemaFromSource, getJsonMockPath, jsonMockExists, loadJsonMock, generateJsonMock, generateJsonMockFromContract;
590
+ var init_mock = __esm({
591
+ "src/generators/mock.ts"() {
592
+ init_naming();
593
+ init_types2();
594
+ fakerInstance = null;
595
+ fakerLoadAttempted = false;
596
+ tryLoadFaker = async () => {
597
+ if (fakerLoadAttempted) return fakerInstance;
598
+ fakerLoadAttempted = true;
230
599
  try {
231
- parsedBody = JSON.parse(rawBody);
600
+ const { faker } = await import('@faker-js/faker');
601
+ fakerInstance = faker;
602
+ return faker;
232
603
  } catch {
233
- parsedBody = void 0;
604
+ return null;
234
605
  }
235
- }
236
- const bodyObject = isRecord(parsedBody) ? parsedBody : void 0;
237
- if (bodyObject && "specversion" in bodyObject) {
238
- const { parseStructuredMode: parseStructuredMode2 } = await Promise.resolve().then(() => (init_structured_mode(), structured_mode_exports));
239
- return parseStructuredMode2(bodyObject);
240
- }
241
- if (bodyObject && "message" in bodyObject) {
242
- const { parsePubSubMessage: parsePubSubMessage2 } = await Promise.resolve().then(() => (init_pubsub(), pubsub_exports));
243
- return await parsePubSubMessage2(bodyObject, headers);
244
- }
245
- if (hasCloudEventHeaders(headers)) {
246
- const { parseBinaryMode: parseBinaryMode2 } = await Promise.resolve().then(() => (init_binary_mode(), binary_mode_exports));
247
- return await parseBinaryMode2(headers, rawBody);
248
- }
249
- const { parseRawEvent: parseRawEvent2 } = await Promise.resolve().then(() => (init_raw_event(), raw_event_exports));
250
- if (bodyObject) {
251
- return parseRawEvent2(bodyObject);
252
- }
253
- return parseRawEvent2({ raw: rawBody });
254
- } catch (error) {
255
- logger.error("Failed to parse event:", error);
256
- throw new Error(`Failed to parse CloudEvent: ${error instanceof Error ? error.message : "Unknown error"}`);
606
+ };
607
+ getFaker = () => fakerInstance;
608
+ initFaker = async () => {
609
+ const faker = await tryLoadFaker();
610
+ return faker !== null;
611
+ };
612
+ createFieldGenerators = () => [
613
+ { pattern: "email", generate: () => "user@example.com", fakerGenerate: () => getFaker()?.internet.email(), hint: "email" },
614
+ { pattern: "id", generate: () => crypto.randomUUID(), fakerGenerate: () => getFaker()?.string.uuid(), hint: "uuid" },
615
+ { pattern: "firstname", generate: () => "John", fakerGenerate: () => getFaker()?.person.firstName(), hint: "firstName" },
616
+ { pattern: "lastname", generate: () => "Doe", fakerGenerate: () => getFaker()?.person.lastName(), hint: "lastName" },
617
+ { pattern: "name", generate: () => "John Doe", fakerGenerate: () => getFaker()?.person.fullName(), hint: "fullName" },
618
+ { pattern: "phone", generate: () => "+1-555-123-4567", fakerGenerate: () => getFaker()?.phone.number(), hint: "phoneNumber" },
619
+ { pattern: "address", generate: () => "123 Main St", fakerGenerate: () => getFaker()?.location.streetAddress(), hint: "streetAddress" },
620
+ { pattern: "street", generate: () => "123 Main St", fakerGenerate: () => getFaker()?.location.streetAddress(), hint: "streetAddress" },
621
+ { pattern: "city", generate: () => "New York", fakerGenerate: () => getFaker()?.location.city(), hint: "city" },
622
+ { pattern: "country", generate: () => "USA", fakerGenerate: () => getFaker()?.location.country(), hint: "country" },
623
+ { pattern: "price", generate: () => 99.99, fakerGenerate: () => getFaker()?.number.float({ min: 10, max: 1e3, fractionDigits: 2 }), hint: "price" },
624
+ { pattern: "amount", generate: () => 99.99, fakerGenerate: () => getFaker()?.number.float({ min: 10, max: 1e3, fractionDigits: 2 }), hint: "price" },
625
+ { pattern: "total", generate: () => 99.99, fakerGenerate: () => getFaker()?.number.float({ min: 10, max: 1e3, fractionDigits: 2 }), hint: "price" },
626
+ { pattern: "quantity", generate: () => 1, fakerGenerate: () => getFaker()?.number.int({ min: 1, max: 10 }), hint: "quantity" },
627
+ { pattern: "date", generate: () => (/* @__PURE__ */ new Date()).toISOString(), fakerGenerate: () => getFaker()?.date.recent().toISOString(), hint: "date" },
628
+ { pattern: "createdat", generate: () => (/* @__PURE__ */ new Date()).toISOString(), fakerGenerate: () => getFaker()?.date.recent().toISOString(), hint: "date" },
629
+ { pattern: "updatedat", generate: () => (/* @__PURE__ */ new Date()).toISOString(), fakerGenerate: () => getFaker()?.date.recent().toISOString(), hint: "date" }
630
+ ];
631
+ generateByFieldName = (fieldName, useFaker = false) => {
632
+ const generators = createFieldGenerators();
633
+ const lowerField = fieldName.toLowerCase();
634
+ const matched = generators.find((g) => lowerField.includes(g.pattern));
635
+ if (!matched) return null;
636
+ const value = (useFaker ? matched.fakerGenerate?.() : void 0) ?? matched.generate();
637
+ return { value, hint: matched.hint };
638
+ };
639
+ generateByFormat = (format, useFaker = false) => {
640
+ const faker = getFaker();
641
+ const formatGenerators = {
642
+ email: {
643
+ static: () => "user@example.com",
644
+ faker: faker ? () => faker.internet.email() : void 0
645
+ },
646
+ datetime: {
647
+ static: () => (/* @__PURE__ */ new Date()).toISOString(),
648
+ faker: faker ? () => faker.date.recent().toISOString() : void 0
649
+ },
650
+ url: {
651
+ static: () => "https://example.com",
652
+ faker: faker ? () => faker.internet.url() : void 0
653
+ },
654
+ uuid: {
655
+ static: () => crypto.randomUUID(),
656
+ faker: faker ? () => faker.string.uuid() : void 0
657
+ },
658
+ date: {
659
+ static: () => (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
660
+ faker: faker ? () => faker.date.recent().toISOString().split("T")[0] : void 0
661
+ },
662
+ time: {
663
+ static: () => (/* @__PURE__ */ new Date()).toISOString().split("T")[1]?.split(".")[0],
664
+ faker: faker ? () => faker.date.recent().toISOString().split("T")[1]?.split(".")[0] : void 0
665
+ }
666
+ };
667
+ const gen = formatGenerators[format];
668
+ if (!gen) return "example";
669
+ return useFaker && gen.faker ? gen.faker() : gen.static();
670
+ };
671
+ generateByZodType = (zodType, useFaker = false) => {
672
+ const faker = getFaker();
673
+ const typeGenerators = {
674
+ string: {
675
+ static: () => "example",
676
+ faker: faker ? () => faker.lorem.word() : void 0
677
+ },
678
+ number: {
679
+ static: () => 42,
680
+ faker: faker ? () => faker.number.int({ min: 1, max: 100 }) : void 0
681
+ },
682
+ boolean: {
683
+ static: () => true,
684
+ faker: faker ? () => faker.datatype.boolean() : void 0
685
+ }
686
+ };
687
+ const gen = typeGenerators[zodType];
688
+ if (!gen) return void 0;
689
+ return useFaker && gen.faker ? gen.faker() : gen.static();
690
+ };
691
+ generateMockValue = (type) => {
692
+ const mockValues = {
693
+ string: "'mock-string'",
694
+ number: "42",
695
+ boolean: "true",
696
+ date: "new Date().toISOString()",
697
+ datetime: "new Date().toISOString()",
698
+ array: "[]",
699
+ object: "{}"
700
+ };
701
+ return mockValues[type];
702
+ };
703
+ generateMockFields = (fields) => {
704
+ if (!fields || fields.length === 0) {
705
+ return ` id: 'mock-id',
706
+ createdAt: new Date().toISOString(),`;
707
+ }
708
+ return fields.filter((field) => !field.optional).map((field) => ` ${field.name}: ${generateMockValue(field.type)},`).join("\n");
709
+ };
710
+ generateMockContent = (options) => {
711
+ const { eventType, fields } = options;
712
+ const names = deriveEventNames(eventType);
713
+ return `import type { ${names.typeName} } from '../${names.domain}/${names.action}'
714
+
715
+ export const mock${names.pascal}: ${names.typeName} = {
716
+ ${generateMockFields(fields)}
717
+ }
718
+
719
+ export const create${names.pascal}Mock = (overrides: Partial<${names.typeName}> = {}): ${names.typeName} => ({
720
+ ...mock${names.pascal},
721
+ ...overrides,
722
+ })
723
+ `;
724
+ };
725
+ getMockFilePath = (eventType, basePath) => {
726
+ const names = deriveEventNames(eventType);
727
+ return `${basePath}/${names.kebab}.mock.ts`;
728
+ };
729
+ generateMock = (options, fs2 = createNodeFileSystem()) => {
730
+ const filePath = getMockFilePath(options.eventType, options.basePath);
731
+ const content = generateMockContent(options);
732
+ const exists = fs2.exists(filePath);
733
+ if (!exists) {
734
+ fs2.writeFile(filePath, content);
735
+ return change.created(filePath);
736
+ }
737
+ if (options.force) {
738
+ fs2.writeFile(filePath, content);
739
+ return change.updated(filePath);
740
+ }
741
+ return change.skipped("File already exists");
742
+ };
743
+ generateDefaultMockFields = (useFaker) => ({
744
+ data: {
745
+ id: (useFaker ? getFaker()?.string.uuid() : void 0) ?? crypto.randomUUID(),
746
+ createdAt: (useFaker ? getFaker()?.date.recent().toISOString() : void 0) ?? (/* @__PURE__ */ new Date()).toISOString()
747
+ },
748
+ faker: { id: "uuid", createdAt: "date" }
749
+ });
750
+ processMockField = (field, useFaker, data, fakerHints) => {
751
+ const byName = generateByFieldName(field.name, useFaker);
752
+ if (byName) {
753
+ data[field.name] = byName.value;
754
+ if (byName.hint) fakerHints[field.name] = byName.hint;
755
+ } else {
756
+ data[field.name] = generateByZodType(field.type, useFaker) ?? "example";
757
+ }
758
+ };
759
+ generateJsonMockData = (options = {}) => {
760
+ const { fields, useFaker = false } = options;
761
+ if (!fields || fields.length === 0) {
762
+ return generateDefaultMockFields(useFaker);
763
+ }
764
+ const data = {};
765
+ const fakerHints = {};
766
+ for (const field of fields) {
767
+ if (!field.optional) processMockField(field, useFaker, data, fakerHints);
768
+ }
769
+ return { data, faker: fakerHints };
770
+ };
771
+ parseSchemaFromSource = (schemaBody, useFaker = false) => {
772
+ const data = {};
773
+ const fakerHints = {};
774
+ const fieldRegex = /(\w+):\s*z\.(\w+)\(/g;
775
+ const matches = Array.from(schemaBody.matchAll(fieldRegex));
776
+ for (const [, fieldName, zodType] of matches) {
777
+ if (zodType === "array" || zodType === "object") continue;
778
+ const byName = generateByFieldName(fieldName, useFaker);
779
+ if (byName) {
780
+ data[fieldName] = byName.value;
781
+ if (byName.hint) fakerHints[fieldName] = byName.hint;
782
+ } else {
783
+ data[fieldName] = generateByZodType(zodType, useFaker) ?? "example";
784
+ }
785
+ }
786
+ const chainedRegex = /(\w+):\s*z\.string\(\)\.(datetime|email|url|uuid)\(\)/g;
787
+ const chainedMatches = Array.from(schemaBody.matchAll(chainedRegex));
788
+ for (const [, fieldName, method] of chainedMatches) {
789
+ data[fieldName] = generateByFormat(method, useFaker);
790
+ fakerHints[fieldName] = method === "datetime" ? "date" : method;
791
+ }
792
+ return { data, faker: fakerHints };
793
+ };
794
+ getJsonMockPath = (eventType, contractsPath) => {
795
+ const names = deriveEventNames(eventType);
796
+ return `${contractsPath}/events/${names.domain}/${names.action}.mock.json`;
797
+ };
798
+ jsonMockExists = (eventType, contractsPath, fs2 = createNodeFileSystem()) => fs2.exists(getJsonMockPath(eventType, contractsPath));
799
+ loadJsonMock = (eventType, contractsPath, fs2 = createNodeFileSystem()) => {
800
+ const mockPath = getJsonMockPath(eventType, contractsPath);
801
+ const content = fs2.readFile(mockPath);
802
+ if (!content) return null;
803
+ try {
804
+ return JSON.parse(content);
805
+ } catch {
806
+ return null;
807
+ }
808
+ };
809
+ generateJsonMock = (options, fs2 = createNodeFileSystem()) => {
810
+ const { eventType, contractsPath, fields, force, useFaker = false } = options;
811
+ const mockPath = getJsonMockPath(eventType, contractsPath);
812
+ const exists = fs2.exists(mockPath);
813
+ const { data, faker: fakerHints } = generateJsonMockData({ fields, useFaker });
814
+ const mockData = {
815
+ eventName: eventType,
816
+ description: `Mock data for ${eventType} event`,
817
+ data
818
+ };
819
+ if (Object.keys(fakerHints).length > 0) {
820
+ mockData.faker = fakerHints;
821
+ }
822
+ const content = JSON.stringify(mockData, null, 2);
823
+ if (!exists) {
824
+ fs2.writeFile(mockPath, content);
825
+ return change.created(mockPath);
826
+ }
827
+ if (force) {
828
+ fs2.writeFile(mockPath, content);
829
+ return change.updated(mockPath);
830
+ }
831
+ return change.skipped("File already exists");
832
+ };
833
+ generateJsonMockFromContract = (eventType, contractsPath, options = {}, fs2 = createNodeFileSystem()) => {
834
+ const { useFaker = false } = options;
835
+ const names = deriveEventNames(eventType);
836
+ const contractPath = `${contractsPath}/events/${names.domain}/${names.action}.ts`;
837
+ const contractContent = fs2.readFile(contractPath);
838
+ if (!contractContent) return null;
839
+ const schemaMatch = contractContent.match(/export const \w+Schema\s*=\s*z\.object\(\s*\{([\s\S]*?)\}\s*\)/m);
840
+ if (!schemaMatch) return null;
841
+ const { data, faker: fakerHints } = parseSchemaFromSource(schemaMatch[1], useFaker);
842
+ const mockPath = getJsonMockPath(eventType, contractsPath);
843
+ const mockData = {
844
+ eventName: eventType,
845
+ description: `Mock data for ${eventType} event`,
846
+ data
847
+ };
848
+ if (Object.keys(fakerHints).length > 0) {
849
+ mockData.faker = fakerHints;
850
+ }
851
+ const content = JSON.stringify(mockData, null, 2);
852
+ fs2.writeFile(mockPath, content);
853
+ return change.created(mockPath);
854
+ };
257
855
  }
258
- };
856
+ });
857
+
858
+ // src/generators/index.ts
859
+ var generators_exports = {};
860
+ __export(generators_exports, {
861
+ addExportToIndex: () => addExportToIndex,
862
+ createMemoryFileSystem: () => createMemoryFileSystem,
863
+ createNodeFileSystem: () => createNodeFileSystem,
864
+ ensureDomainExport: () => ensureDomainExport,
865
+ generateContract: () => generateContract,
866
+ generateContractContent: () => generateContractContent,
867
+ generateEventHandler: () => generateEventHandler,
868
+ generateEventHandlerContent: () => generateEventHandlerContent,
869
+ generateJsonMock: () => generateJsonMock,
870
+ generateJsonMockFromContract: () => generateJsonMockFromContract,
871
+ generateMock: () => generateMock,
872
+ generateMockContent: () => generateMockContent,
873
+ getContractFilePath: () => getContractFilePath,
874
+ getHandlerFilePath: () => getHandlerFilePath,
875
+ getJsonMockPath: () => getJsonMockPath,
876
+ getMockFilePath: () => getMockFilePath,
877
+ initFaker: () => initFaker,
878
+ jsonMockExists: () => jsonMockExists,
879
+ loadJsonMock: () => loadJsonMock
880
+ });
881
+ var init_generators = __esm({
882
+ "src/generators/index.ts"() {
883
+ init_contract();
884
+ init_exports();
885
+ init_handler();
886
+ init_mock();
887
+ init_types2();
888
+ }
889
+ });
890
+
891
+ // src/flows/create-event.flow.ts
892
+ var create_event_flow_exports = {};
893
+ __export(create_event_flow_exports, {
894
+ createEventFlowSteps: () => createEventFlowSteps,
895
+ parseFieldsInput: () => parseFieldsInput
896
+ });
897
+ var parseField, parseFieldsInput, generateContractTask, addExportsTask, generateMockTask, generateHandlerTask, flow, isInvalidEventType, getEventTypeErrorMessage, ensureInfraStep, ensureGenerationStep, eventTypeStep, validateEventTypeStep, schemaFieldsStep, serviceSelectionStep, generateFilesStep, printSummary, printSummaryStep, createEventFlowSteps;
898
+ var init_create_event_flow = __esm({
899
+ "src/flows/create-event.flow.ts"() {
900
+ init_naming();
901
+ init_effects();
902
+ init_generators();
903
+ parseField = (field) => {
904
+ const [nameWithOptional, type = "string"] = field.split(":").map((s) => s.trim());
905
+ const optional = nameWithOptional.endsWith("?");
906
+ const name = optional ? nameWithOptional.slice(0, -1) : nameWithOptional;
907
+ return { name, type, optional };
908
+ };
909
+ parseFieldsInput = (input2) => input2.trim() === "" ? [] : input2.split(",").map((f) => f.trim()).filter(Boolean).map(parseField);
910
+ generateContractTask = (ctx) => {
911
+ const generation = initGenerationContext(ctx);
912
+ const result = generateContract(
913
+ {
914
+ eventType: ctx.eventType,
915
+ basePath: ctx.config.contractsPath,
916
+ fields: ctx.fields ?? [],
917
+ force: ctx.force ?? false
918
+ },
919
+ ctx._fs
920
+ );
921
+ trackChange(result).record(generation.artifacts, "Contract").onCreated((path2) => {
922
+ generation.addEffect(contractCreated(path2, ctx.eventType));
923
+ });
924
+ };
925
+ addExportsTask = (ctx) => {
926
+ const names = deriveEventNames(ctx.eventType);
927
+ ensureDomainExport(
928
+ {
929
+ contractsBasePath: ctx.config.contractsPath,
930
+ domain: names.domain,
931
+ eventFile: names.action
932
+ },
933
+ ctx._fs
934
+ );
935
+ };
936
+ generateMockTask = (ctx) => {
937
+ const generation = initGenerationContext(ctx);
938
+ const result = generateJsonMock(
939
+ {
940
+ eventType: ctx.eventType,
941
+ contractsPath: ctx.config.contractsPath,
942
+ fields: ctx.fields ?? [],
943
+ force: ctx.force ?? false
944
+ },
945
+ ctx._fs
946
+ );
947
+ trackChange(result).record(generation.artifacts, "Mock");
948
+ };
949
+ generateHandlerTask = (ctx) => {
950
+ if (!ctx.servicePath) return;
951
+ const generation = initGenerationContext(ctx);
952
+ const names = deriveEventNames(ctx.eventType);
953
+ const handlerBasePath = ctx.servicePath.endsWith("/src") ? ctx.servicePath : `${ctx.servicePath}/src`;
954
+ const result = generateEventHandler(
955
+ {
956
+ eventType: ctx.eventType,
957
+ basePath: handlerBasePath,
958
+ contractsPackage: ctx.config.contractsPackage ?? "",
959
+ force: ctx.force ?? false
960
+ },
961
+ ctx._fs
962
+ );
963
+ trackChange(result).record(generation.artifacts, "Handler").onCreated((path2) => {
964
+ if (ctx.servicePath) {
965
+ generation.addEffect(handlerCreated(path2, ctx.eventType, ctx.servicePath));
966
+ generation.addEffect(streamWired(names.streamName, ctx.servicePath));
967
+ }
968
+ });
969
+ };
970
+ flow = createFlow();
971
+ isInvalidEventType = (ctx) => !isValidEventType(ctx.eventType);
972
+ getEventTypeErrorMessage = (ctx) => `Invalid event type: ${validateEventType(ctx.eventType).errors.join(", ")}`;
973
+ ensureInfraStep = flow.ensure("_fs", (ctx) => ctx.config.fs ?? createNodeFileSystem()).build();
974
+ ensureGenerationStep = flow.ensure("_generation", () => createGenerationResult()).build();
975
+ eventTypeStep = flow.input("eventType").prompt({
976
+ title: "Event Type",
977
+ message: "Enter event type (e.g., order.created):"
978
+ }).validateBy(validateEventType).skipIfSet().build();
979
+ validateEventTypeStep = flow.abortIf(isInvalidEventType, getEventTypeErrorMessage).build();
980
+ schemaFieldsStep = input("fieldsInput").prompt({
981
+ title: "Schema Fields",
982
+ message: "Schema fields (e.g., orderId:string,total:number) or leave empty:"
983
+ }).mapTo("fields", parseFieldsInput).skipWhen((ctx) => ctx.fields !== void 0).build();
984
+ serviceSelectionStep = flow.select("servicePath").title("Service Selection").prompt("Select service to create handler in:").choicesFrom((ctx) => (ctx.availableServices ?? []).map((s) => ({ name: s, value: s }))).optional("Skip handler creation").skipIfSet().build();
985
+ generateFilesStep = flow.task("Generate files").steps([generateContractTask, addExportsTask, generateMockTask, generateHandlerTask]).produces("fields", "_generation").build();
986
+ printSummary = (ctx) => {
987
+ printGenerationSummary(initGenerationContext(ctx));
988
+ };
989
+ printSummaryStep = flow.task("Print summary").steps([printSummary]).build();
990
+ createEventFlowSteps = [
991
+ ensureInfraStep,
992
+ ensureGenerationStep,
993
+ eventTypeStep,
994
+ validateEventTypeStep,
995
+ schemaFieldsStep,
996
+ serviceSelectionStep,
997
+ generateFilesStep,
998
+ printSummaryStep
999
+ ];
1000
+ }
1001
+ });
1002
+
1003
+ // src/flows/list-events.flow.ts
1004
+ var list_events_flow_exports = {};
1005
+ __export(list_events_flow_exports, {
1006
+ discoverEventTypes: () => discoverEventTypes,
1007
+ listEventsFlowSteps: () => listEventsFlowSteps
1008
+ });
1009
+ var isContractFile, makeRelativePath, safeReadDir, safeStat, processEntry, findContractFiles, parseEventType, matchesPattern, discoverEventTypes, flow2, listEventsFlowSteps;
1010
+ var init_list_events_flow = __esm({
1011
+ "src/flows/list-events.flow.ts"() {
1012
+ init_naming();
1013
+ isContractFile = (filename) => filename.endsWith(".ts") && !filename.endsWith(".test.ts") && filename !== "index.ts";
1014
+ makeRelativePath = (contractsPath, fullPath) => fullPath.replace(`${contractsPath}/`, "").replace(/\\/g, "/");
1015
+ safeReadDir = (dir) => {
1016
+ try {
1017
+ return readdirSync(dir);
1018
+ } catch {
1019
+ return [];
1020
+ }
1021
+ };
1022
+ safeStat = (path2) => {
1023
+ try {
1024
+ const stat = statSync(path2);
1025
+ return { isDirectory: stat.isDirectory() };
1026
+ } catch {
1027
+ return null;
1028
+ }
1029
+ };
1030
+ processEntry = (dir, entry, contractsPath) => {
1031
+ const fullPath = join(dir, entry);
1032
+ const stat = safeStat(fullPath);
1033
+ if (!stat) return [];
1034
+ if (stat.isDirectory) {
1035
+ return findContractFiles(fullPath, contractsPath);
1036
+ }
1037
+ if (isContractFile(entry)) {
1038
+ return [makeRelativePath(contractsPath, fullPath)];
1039
+ }
1040
+ return [];
1041
+ };
1042
+ findContractFiles = (dir, contractsPath) => safeReadDir(dir).flatMap((entry) => processEntry(dir, entry, contractsPath));
1043
+ parseEventType = (filePath) => parseEventTypeFromContract(filePath);
1044
+ matchesPattern = (pattern) => (eventType) => !pattern || pattern.trim() === "" || eventType.includes(pattern);
1045
+ discoverEventTypes = (contractsPath, pattern) => {
1046
+ const eventsDir = join(contractsPath, "events");
1047
+ if (!existsSync(eventsDir)) {
1048
+ return [];
1049
+ }
1050
+ return findContractFiles(eventsDir, contractsPath).map(parseEventType).filter((eventType) => eventType !== null).filter(matchesPattern(pattern)).sort();
1051
+ };
1052
+ flow2 = createFlow();
1053
+ listEventsFlowSteps = [
1054
+ flow2.input("pattern").prompt({
1055
+ title: "Filter Pattern",
1056
+ message: "Filter by pattern (leave empty for all):"
1057
+ }).skipWhen((ctx) => ctx.pattern != null).build(),
1058
+ flow2.task("Discover events").steps([
1059
+ (ctx) => {
1060
+ const filterPattern = ctx.pattern && ctx.pattern.trim() !== "" ? ctx.pattern : void 0;
1061
+ ctx.eventTypes = discoverEventTypes(ctx.config.contractsPath, filterPattern);
1062
+ }
1063
+ ]).produces("eventTypes").build()
1064
+ ];
1065
+ }
1066
+ });
259
1067
 
260
1068
  // src/domain/contract-helper.ts
261
1069
  function createContract(options) {
@@ -269,162 +1077,173 @@ function createContract(options) {
269
1077
  channel: resolvedChannel
270
1078
  };
271
1079
  }
272
-
273
- // src/domain/discovery.ts
274
- init_infrastructure();
1080
+ var init_contract_helper = __esm({
1081
+ "src/domain/contract-helper.ts"() {
1082
+ }
1083
+ });
275
1084
 
276
1085
  // src/domain/validation.ts
277
- var hasShape = (schema) => "shape" in schema && schema.shape !== null && schema.shape !== void 0 && typeof schema.shape === "object";
278
- var hasTypeField = (shape) => "type" in shape && shape.type !== null && shape.type !== void 0 && typeof shape.type === "object" && "value" in shape.type;
279
- var extractTypeValue = (typeField) => typeof typeField.value === "string" ? typeField.value : void 0;
280
- var safeExtractType = (schema) => {
281
- if (!hasShape(schema)) return void 0;
282
- if (!hasTypeField(schema.shape)) return void 0;
283
- return extractTypeValue(schema.shape.type);
284
- };
285
1086
  function isValidHandler(value) {
286
1087
  if (typeof value !== "function") return false;
287
1088
  const handler = value;
288
1089
  return !!(handler.__eventarcMetadata || handler.prototype?.handle);
289
1090
  }
290
- var extractTypeFromSchema = (schema) => {
291
- try {
292
- return safeExtractType(schema);
293
- } catch {
294
- return void 0;
295
- }
296
- };
297
-
298
- // src/domain/discovery.ts
299
- var getSearchDirectories = () => {
300
- const directories = /* @__PURE__ */ new Set();
301
- directories.add(process.cwd());
302
- try {
303
- const currentDir = dirname(fileURLToPath(import.meta.url));
304
- directories.add(currentDir);
305
- if (currentDir.includes("/dist/")) {
306
- const pkgRoot = currentDir.split("/dist/")[0];
307
- if (pkgRoot) directories.add(pkgRoot);
308
- }
309
- } catch {
310
- }
311
- return [...directories].filter((dir) => existsSync(dir));
312
- };
313
- var loadHandlers = async (filePath, filter) => {
314
- try {
315
- if (!existsSync(filePath)) {
316
- return [];
317
- }
318
- const module = await import(filePath);
319
- return Object.entries(module).filter(([name, handler]) => isValidHandler(handler) && (!filter || filter(name, handler))).map(([name, handler]) => {
320
- const HandlerClass = handler;
321
- const hasNoMeaningfulName = !HandlerClass.name || HandlerClass.name === "" || HandlerClass.name === "HandlerClass";
322
- const isNamedExport = name !== "default";
323
- if (hasNoMeaningfulName && isNamedExport) {
324
- return Object.defineProperty(HandlerClass, "name", { value: name, configurable: true });
325
- }
326
- return HandlerClass;
327
- });
328
- } catch (error) {
329
- logger?.warn(`Failed to load ${filePath}`);
330
- logger?.warn(`Error type: ${typeof error}`);
331
- logger?.warn(`Error constructor: ${error?.constructor?.name}`);
332
- if (error instanceof Error) {
333
- logger?.warn(`Error message: ${error.message}`);
334
- if (error.stack) {
335
- logger?.warn(`Stack trace:
336
- ${error.stack}`);
337
- }
338
- } else {
339
- logger?.warn(`Error value: ${String(error)}`);
1091
+ var hasShape, hasTypeField, extractTypeValue, safeExtractType, extractTypeFromSchema;
1092
+ var init_validation = __esm({
1093
+ "src/domain/validation.ts"() {
1094
+ hasShape = (schema) => "shape" in schema && schema.shape !== null && schema.shape !== void 0 && typeof schema.shape === "object";
1095
+ hasTypeField = (shape) => "type" in shape && shape.type !== null && shape.type !== void 0 && typeof shape.type === "object" && "value" in shape.type;
1096
+ extractTypeValue = (typeField) => typeof typeField.value === "string" ? typeField.value : void 0;
1097
+ safeExtractType = (schema) => {
1098
+ if (!hasShape(schema)) return void 0;
1099
+ if (!hasTypeField(schema.shape)) return void 0;
1100
+ return extractTypeValue(schema.shape.type);
1101
+ };
1102
+ extractTypeFromSchema = (schema) => {
340
1103
  try {
341
- logger?.warn(`Error JSON: ${JSON.stringify(error, null, 2)}`);
1104
+ return safeExtractType(schema);
342
1105
  } catch {
343
- logger?.warn("(Error is not JSON-serializable)");
1106
+ return void 0;
344
1107
  }
345
- }
346
- return [];
347
- }
348
- };
349
- var deduplicateHandlers = (handlers) => handlers.reduce((acc, handler) => {
350
- const existing = acc.find((h) => h.name === handler.name);
351
- if (!existing) {
352
- acc.push(handler);
353
- }
354
- return acc;
355
- }, []);
356
- var EXTENSION_FALLBACKS = ["js", "mjs", "cjs"];
357
- var expandPatternVariants = (pattern, preferCompiled) => {
358
- if (!/\.tsx?\b/.test(pattern)) {
359
- return [pattern];
360
- }
361
- const basePattern = pattern;
362
- const compiledVariants = EXTENSION_FALLBACKS.map((ext) => basePattern.replace(/\.tsx?\b/g, `.${ext}`));
363
- const ordered = preferCompiled ? [...compiledVariants, basePattern] : [basePattern, ...compiledVariants];
364
- return [...new Set(ordered)];
365
- };
366
- var shouldScanDirectory = (basePath, variant) => {
367
- if (!existsSync(basePath)) {
368
- return false;
369
- }
370
- const dirPrefix = variant.match(/^([^*{[]+)\//)?.[1];
371
- if (dirPrefix) {
372
- const fullDirPath = join(basePath, dirPrefix);
373
- return existsSync(fullDirPath);
1108
+ };
374
1109
  }
375
- return true;
376
- };
377
- var discoverFiles = async (pattern, basePath, preferCompiled) => {
378
- const isTestPattern = pattern.startsWith("test/");
379
- const prefixedPattern = isTestPattern ? pattern : `{src,dist,build,lib,out}/${pattern}`;
380
- const patterns = preferCompiled ? [prefixedPattern, pattern] : [pattern, prefixedPattern];
381
- const allFiles = [];
382
- for (const globPattern of patterns) {
383
- const variants = expandPatternVariants(globPattern, preferCompiled);
384
- for (const variant of variants) {
1110
+ });
1111
+ var getSearchDirectories, loadHandlers, deduplicateHandlers, EXTENSION_FALLBACKS, expandPatternVariants, shouldScanDirectory, discoverFiles, discoverHandlers;
1112
+ var init_discovery = __esm({
1113
+ "src/domain/discovery.ts"() {
1114
+ init_infrastructure();
1115
+ init_validation();
1116
+ getSearchDirectories = () => {
1117
+ const directories = /* @__PURE__ */ new Set();
1118
+ directories.add(process.cwd());
385
1119
  try {
386
- if (!shouldScanDirectory(basePath, variant)) {
387
- continue;
1120
+ const currentDir = dirname(fileURLToPath(import.meta.url));
1121
+ directories.add(currentDir);
1122
+ if (currentDir.includes("/dist/")) {
1123
+ const pkgRoot = currentDir.split("/dist/")[0];
1124
+ if (pkgRoot) directories.add(pkgRoot);
388
1125
  }
389
- const files = await glob(variant, {
390
- cwd: basePath,
391
- absolute: true,
392
- nodir: true,
393
- windowsPathsNoEscape: true
394
- });
395
- const existingFiles = files.filter((f) => existsSync(f));
396
- allFiles.push(...existingFiles);
397
1126
  } catch {
398
1127
  }
399
- }
400
- }
401
- return [...new Set(allFiles)];
402
- };
403
- var discoverHandlers = async (pattern, options = {}) => {
404
- const { filter } = options;
405
- const searchDirectories = getSearchDirectories();
406
- const preferCompiled = searchDirectories.some((dir) => dir.includes("/dist/") || dir.includes("\\dist\\"));
407
- try {
408
- const uniqueFiles = /* @__PURE__ */ new Set();
409
- for (const basePath of searchDirectories) {
410
- const discoveredFiles = await discoverFiles(pattern, basePath, preferCompiled);
411
- for (const file of discoveredFiles) {
412
- uniqueFiles.add(file);
1128
+ return [...directories].filter((dir) => existsSync(dir));
1129
+ };
1130
+ loadHandlers = async (filePath, filter) => {
1131
+ try {
1132
+ if (!existsSync(filePath)) {
1133
+ return [];
1134
+ }
1135
+ const module = await import(filePath);
1136
+ return Object.entries(module).filter(([name, handler]) => isValidHandler(handler) && (!filter || filter(name, handler))).map(([name, handler]) => {
1137
+ const HandlerClass = handler;
1138
+ const hasNoMeaningfulName = !HandlerClass.name || HandlerClass.name === "" || HandlerClass.name === "HandlerClass";
1139
+ const isNamedExport = name !== "default";
1140
+ if (hasNoMeaningfulName && isNamedExport) {
1141
+ return Object.defineProperty(HandlerClass, "name", { value: name, configurable: true });
1142
+ }
1143
+ return HandlerClass;
1144
+ });
1145
+ } catch (error) {
1146
+ logger?.warn(`Failed to load ${filePath}`);
1147
+ logger?.warn(`Error type: ${typeof error}`);
1148
+ logger?.warn(`Error constructor: ${error?.constructor?.name}`);
1149
+ if (error instanceof Error) {
1150
+ logger?.warn(`Error message: ${error.message}`);
1151
+ if (error.stack) {
1152
+ logger?.warn(`Stack trace:
1153
+ ${error.stack}`);
1154
+ }
1155
+ } else {
1156
+ logger?.warn(`Error value: ${String(error)}`);
1157
+ try {
1158
+ logger?.warn(`Error JSON: ${JSON.stringify(error, null, 2)}`);
1159
+ } catch {
1160
+ logger?.warn("(Error is not JSON-serializable)");
1161
+ }
1162
+ }
1163
+ return [];
413
1164
  }
414
- }
415
- if (uniqueFiles.size === 0) {
416
- logger.warn(`No files found matching pattern: ${pattern}`);
417
- return [];
418
- }
419
- const handlers = await Promise.all([...uniqueFiles].map((file) => loadHandlers(file, filter)));
420
- const flatHandlers = handlers.flat();
421
- const uniqueHandlers = deduplicateHandlers(flatHandlers);
422
- return uniqueHandlers;
423
- } catch (error) {
424
- logger.error("Discovery failed:", error);
425
- return [];
1165
+ };
1166
+ deduplicateHandlers = (handlers) => handlers.reduce((acc, handler) => {
1167
+ const existing = acc.find((h) => h.name === handler.name);
1168
+ if (!existing) {
1169
+ acc.push(handler);
1170
+ }
1171
+ return acc;
1172
+ }, []);
1173
+ EXTENSION_FALLBACKS = ["js", "mjs", "cjs"];
1174
+ expandPatternVariants = (pattern, preferCompiled) => {
1175
+ if (!/\.tsx?\b/.test(pattern)) {
1176
+ return [pattern];
1177
+ }
1178
+ const basePattern = pattern;
1179
+ const compiledVariants = EXTENSION_FALLBACKS.map((ext) => basePattern.replace(/\.tsx?\b/g, `.${ext}`));
1180
+ const ordered = preferCompiled ? [...compiledVariants, basePattern] : [basePattern, ...compiledVariants];
1181
+ return [...new Set(ordered)];
1182
+ };
1183
+ shouldScanDirectory = (basePath, variant) => {
1184
+ if (!existsSync(basePath)) {
1185
+ return false;
1186
+ }
1187
+ const dirPrefix = variant.match(/^([^*{[]+)\//)?.[1];
1188
+ if (dirPrefix) {
1189
+ const fullDirPath = join(basePath, dirPrefix);
1190
+ return existsSync(fullDirPath);
1191
+ }
1192
+ return true;
1193
+ };
1194
+ discoverFiles = async (pattern, basePath, preferCompiled) => {
1195
+ const isTestPattern = pattern.startsWith("test/");
1196
+ const prefixedPattern = isTestPattern ? pattern : `{src,dist,build,lib,out}/${pattern}`;
1197
+ const patterns = preferCompiled ? [prefixedPattern, pattern] : [pattern, prefixedPattern];
1198
+ const allFiles = [];
1199
+ for (const globPattern of patterns) {
1200
+ const variants = expandPatternVariants(globPattern, preferCompiled);
1201
+ for (const variant of variants) {
1202
+ try {
1203
+ if (!shouldScanDirectory(basePath, variant)) {
1204
+ continue;
1205
+ }
1206
+ const files = await glob(variant, {
1207
+ cwd: basePath,
1208
+ absolute: true,
1209
+ nodir: true,
1210
+ windowsPathsNoEscape: true
1211
+ });
1212
+ const existingFiles = files.filter((f) => existsSync(f));
1213
+ allFiles.push(...existingFiles);
1214
+ } catch {
1215
+ }
1216
+ }
1217
+ }
1218
+ return [...new Set(allFiles)];
1219
+ };
1220
+ discoverHandlers = async (pattern, options = {}) => {
1221
+ const { filter } = options;
1222
+ const searchDirectories = getSearchDirectories();
1223
+ const preferCompiled = searchDirectories.some((dir) => dir.includes("/dist/") || dir.includes("\\dist\\"));
1224
+ try {
1225
+ const uniqueFiles = /* @__PURE__ */ new Set();
1226
+ for (const basePath of searchDirectories) {
1227
+ const discoveredFiles = await discoverFiles(pattern, basePath, preferCompiled);
1228
+ for (const file of discoveredFiles) {
1229
+ uniqueFiles.add(file);
1230
+ }
1231
+ }
1232
+ if (uniqueFiles.size === 0) {
1233
+ logger.warn(`No files found matching pattern: ${pattern}`);
1234
+ return [];
1235
+ }
1236
+ const handlers = await Promise.all([...uniqueFiles].map((file) => loadHandlers(file, filter)));
1237
+ const flatHandlers = handlers.flat();
1238
+ const uniqueHandlers = deduplicateHandlers(flatHandlers);
1239
+ return uniqueHandlers;
1240
+ } catch (error) {
1241
+ logger.error("Discovery failed:", error);
1242
+ return [];
1243
+ }
1244
+ };
426
1245
  }
427
- };
1246
+ });
428
1247
  function extractFromValueGetter(typeField) {
429
1248
  if ("value" in typeField && typeof typeField.value === "string") {
430
1249
  return typeField.value;
@@ -518,160 +1337,353 @@ function handleEvent(schemaOrOptions, handler, eventType) {
518
1337
  function eventSchema(schema) {
519
1338
  return z.object(schema);
520
1339
  }
521
-
522
- // src/middlewares/cloudevents-middleware.ts
523
- init_infrastructure();
524
-
525
- // src/processing/dlq-safe.ts
526
- init_infrastructure();
527
- init_infrastructure();
528
-
529
- // src/utils/pluralize.ts
530
- var pluralize = (word) => {
531
- if (word.endsWith("y") && !["a", "e", "i", "o", "u"].includes(word[word.length - 2])) {
532
- return `${word.slice(0, -1)}ies`;
533
- }
534
- if (word.endsWith("s") || word.endsWith("sh") || word.endsWith("ch") || word.endsWith("x")) {
535
- return `${word}es`;
1340
+ var init_handler_factory = __esm({
1341
+ "src/domain/handler-factory.ts"() {
536
1342
  }
537
- return `${word}s`;
538
- };
539
- var singularize = (word) => {
540
- if (word.endsWith("ies")) {
541
- return `${word.slice(0, -3)}y`;
542
- }
543
- if (word.endsWith("xes") || word.endsWith("shes") || word.endsWith("ches") || word.endsWith("ses")) {
544
- return word.slice(0, -2);
545
- }
546
- if (word.endsWith("s")) {
547
- return word.slice(0, -1);
1343
+ });
1344
+
1345
+ // src/domain/index.ts
1346
+ var init_domain = __esm({
1347
+ "src/domain/index.ts"() {
1348
+ init_contract_helper();
1349
+ init_discovery();
1350
+ init_handler_factory();
1351
+ init_naming();
1352
+ init_validation();
548
1353
  }
549
- return word;
550
- };
1354
+ });
551
1355
 
552
1356
  // src/publishing/nats.publisher.ts
553
- var sc = StringCodec();
554
- var natsConnectionPromise = null;
555
- var deriveSubjectFromEventType = (eventType) => {
556
- const parts = eventType.split(".");
557
- if (parts.length < 2) return eventType;
558
- const domain = parts[0];
559
- const action = parts.slice(1).join(".");
560
- const pluralDomain = pluralize(domain);
561
- return `${pluralDomain}.${action}`;
562
- };
563
- var getNatsConnection = async (servers) => {
564
- if (!natsConnectionPromise) {
565
- const url = servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
566
- natsConnectionPromise = connect({ servers: url }).then((connection) => {
567
- logger.debug(`[NATS] connected to ${url}`);
568
- return connection;
569
- }).catch((error) => {
570
- logger.error("[NATS] connection error", error);
571
- natsConnectionPromise = null;
572
- throw error;
573
- });
1357
+ var nats_publisher_exports = {};
1358
+ __export(nats_publisher_exports, {
1359
+ __resetNatsPublisher: () => __resetNatsPublisher,
1360
+ closeConnection: () => closeConnection,
1361
+ deriveStreamFromType: () => deriveStreamFromType,
1362
+ deriveSubjectFromType: () => deriveSubjectFromType,
1363
+ publish: () => publish,
1364
+ publishNatsEvent: () => publishNatsEvent,
1365
+ publishNatsRawEvent: () => publishNatsRawEvent
1366
+ });
1367
+ var sc, natsConnectionPromise, deriveSubjectFromEventType, getNatsConnection, closeConnection, __resetNatsPublisher, deriveSubjectFromType, deriveStreamFromType, publishNatsRawEvent, publishNatsEvent, publish;
1368
+ var init_nats_publisher = __esm({
1369
+ "src/publishing/nats.publisher.ts"() {
1370
+ init_domain();
1371
+ init_infrastructure();
1372
+ init_utils();
1373
+ sc = StringCodec();
1374
+ natsConnectionPromise = null;
1375
+ deriveSubjectFromEventType = (eventType) => {
1376
+ const parts = eventType.split(".");
1377
+ if (parts.length < 2) return eventType;
1378
+ const domain = parts[0];
1379
+ const action = parts.slice(1).join(".");
1380
+ const pluralDomain = pluralize(domain);
1381
+ return `${pluralDomain}.${action}`;
1382
+ };
1383
+ getNatsConnection = async (servers) => {
1384
+ if (!natsConnectionPromise) {
1385
+ const url = servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
1386
+ natsConnectionPromise = connect({ servers: url }).then((connection) => {
1387
+ logger.debug(`[NATS] connected to ${url}`);
1388
+ return connection;
1389
+ }).catch((error) => {
1390
+ logger.error("[NATS] connection error", error);
1391
+ natsConnectionPromise = null;
1392
+ throw error;
1393
+ });
1394
+ }
1395
+ return natsConnectionPromise;
1396
+ };
1397
+ closeConnection = async () => {
1398
+ if (natsConnectionPromise) {
1399
+ try {
1400
+ const nc = await natsConnectionPromise;
1401
+ if (nc && typeof nc.drain === "function") {
1402
+ await nc.drain();
1403
+ logger.debug("[NATS] connection closed");
1404
+ }
1405
+ } catch {
1406
+ } finally {
1407
+ natsConnectionPromise = null;
1408
+ }
1409
+ }
1410
+ };
1411
+ __resetNatsPublisher = async () => {
1412
+ await closeConnection();
1413
+ };
1414
+ deriveSubjectFromType = (eventType, config) => {
1415
+ if (!config?.typeToSubjectMap) {
1416
+ return config?.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
1417
+ }
1418
+ const sortedPrefixes = Object.keys(config.typeToSubjectMap).sort((a, b) => b.length - a.length);
1419
+ for (const prefix of sortedPrefixes) {
1420
+ if (eventType.startsWith(prefix)) {
1421
+ const suffix = eventType.slice(prefix.length);
1422
+ const mappedPrefix = config.typeToSubjectMap[prefix];
1423
+ const cleanSuffix = suffix.startsWith(".") ? suffix.slice(1) : suffix;
1424
+ return cleanSuffix ? `${mappedPrefix}.${cleanSuffix}` : mappedPrefix;
1425
+ }
1426
+ }
1427
+ return config.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
1428
+ };
1429
+ deriveStreamFromType = (eventType, config) => {
1430
+ if (!config?.typeToStreamMap) return void 0;
1431
+ const sortedPrefixes = Object.keys(config.typeToStreamMap).sort((a, b) => b.length - a.length);
1432
+ for (const prefix of sortedPrefixes) {
1433
+ if (eventType.startsWith(prefix)) {
1434
+ return config.typeToStreamMap[prefix];
1435
+ }
1436
+ }
1437
+ return void 0;
1438
+ };
1439
+ publishNatsRawEvent = async (subjectName, eventType, eventData, options) => {
1440
+ const cloudEvent = {
1441
+ specversion: "1.0",
1442
+ type: eventType,
1443
+ source: options?.source || "hono-service",
1444
+ id: crypto.randomUUID(),
1445
+ time: (/* @__PURE__ */ new Date()).toISOString(),
1446
+ datacontenttype: "application/json",
1447
+ data: eventData,
1448
+ ...options?.subject && { subject: options.subject },
1449
+ ...options?.tenantId && { tenantid: options.tenantId }
1450
+ };
1451
+ const data = JSON.stringify(cloudEvent);
1452
+ const nc = await getNatsConnection(options?.servers);
1453
+ nc.publish(subjectName, sc.encode(data));
1454
+ logger.debug(`Published CloudEvent ${eventType} to NATS subject ${subjectName} (id=${cloudEvent.id})`);
1455
+ if (options?.closeAfterPublish) {
1456
+ await closeConnection();
1457
+ }
1458
+ return cloudEvent.id;
1459
+ };
1460
+ publishNatsEvent = async (subjectName, schema, eventData, options) => {
1461
+ const eventType = extractTypeFromSchema(schema);
1462
+ if (!eventType) {
1463
+ throw new Error("Could not extract event type from schema. Make sure your schema has proper metadata.");
1464
+ }
1465
+ const validationResult = schema.safeParse(eventData);
1466
+ if (!validationResult.success) {
1467
+ const validationDetails = validationResult.error.issues.map((issue) => ({
1468
+ code: issue.code,
1469
+ message: issue.message,
1470
+ path: issue.path.filter((p) => typeof p !== "symbol"),
1471
+ expected: "expected" in issue ? String(issue.expected) : void 0,
1472
+ received: "received" in issue ? String(issue.received) : void 0
1473
+ }));
1474
+ const handlerValidationError = {
1475
+ handlerName: `NatsPublisher:${eventType}`,
1476
+ validationErrors: validationDetails
1477
+ };
1478
+ throw createValidationError(eventType, [handlerValidationError]);
1479
+ }
1480
+ return publishNatsRawEvent(subjectName, eventType, validationResult.data, options);
1481
+ };
1482
+ publish = async (eventTypeOrContract, eventData, options) => {
1483
+ const eventType = typeof eventTypeOrContract === "string" ? eventTypeOrContract : eventTypeOrContract.type;
1484
+ const natsSubject = typeof eventTypeOrContract === "string" ? deriveSubjectFromEventType(eventTypeOrContract) : eventTypeOrContract.channel?.subject ?? eventTypeOrContract.type;
1485
+ return publishNatsRawEvent(natsSubject, eventType, eventData, options);
1486
+ };
574
1487
  }
575
- return natsConnectionPromise;
576
- };
577
- var deriveSubjectFromType = (eventType, config) => {
578
- if (!config?.typeToSubjectMap) {
579
- return config?.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
580
- }
581
- const sortedPrefixes = Object.keys(config.typeToSubjectMap).sort((a, b) => b.length - a.length);
582
- for (const prefix of sortedPrefixes) {
583
- if (eventType.startsWith(prefix)) {
584
- const suffix = eventType.slice(prefix.length);
585
- const mappedPrefix = config.typeToSubjectMap[prefix];
586
- const cleanSuffix = suffix.startsWith(".") ? suffix.slice(1) : suffix;
587
- return cleanSuffix ? `${mappedPrefix}.${cleanSuffix}` : mappedPrefix;
588
- }
1488
+ });
1489
+
1490
+ // src/flows/publish-event.flow.ts
1491
+ var publish_event_flow_exports = {};
1492
+ __export(publish_event_flow_exports, {
1493
+ parseDataInput: () => parseDataInput,
1494
+ publishEventFlowSteps: () => publishEventFlowSteps
1495
+ });
1496
+ var parseDataInput, loadDataFromInput, publishToNats, hasExplicitData, isInvalidEventType2, getEventTypeErrorMessage2, flow3, publishEventFlowSteps;
1497
+ var init_publish_event_flow = __esm({
1498
+ "src/flows/publish-event.flow.ts"() {
1499
+ init_naming();
1500
+ init_generators();
1501
+ parseDataInput = (input2) => {
1502
+ if (!input2.trim()) return {};
1503
+ try {
1504
+ return JSON.parse(input2);
1505
+ } catch {
1506
+ return null;
1507
+ }
1508
+ };
1509
+ loadDataFromInput = async (ctx) => {
1510
+ if (ctx.dataInput !== void 0 && ctx.dataInput.trim() !== "") {
1511
+ const parsed = parseDataInput(ctx.dataInput);
1512
+ if (parsed === null) {
1513
+ throw new Error("Invalid JSON data");
1514
+ }
1515
+ ctx.data = parsed;
1516
+ return;
1517
+ }
1518
+ const { loadJsonMock: loadJsonMock2 } = await Promise.resolve().then(() => (init_generators(), generators_exports));
1519
+ const mock = loadJsonMock2(ctx.eventType, ctx.config.contractsPath);
1520
+ if (mock?.data) {
1521
+ ctx.data = mock.data;
1522
+ ctx.usedMock = true;
1523
+ return;
1524
+ }
1525
+ ctx.data = {};
1526
+ };
1527
+ publishToNats = async (ctx) => {
1528
+ const { publish: publish2 } = await Promise.resolve().then(() => (init_nats_publisher(), nats_publisher_exports));
1529
+ await publish2(ctx.eventType, ctx.data ?? {}, {
1530
+ source: "pf-cli",
1531
+ closeAfterPublish: true
1532
+ });
1533
+ ctx.published = true;
1534
+ const mockHint = ctx.usedMock ? " (using mock data)" : "";
1535
+ console.log(`\u2705 Published ${ctx.eventType}${mockHint}`);
1536
+ };
1537
+ hasExplicitData = (ctx) => ctx.data !== void 0 || jsonMockExists(ctx.eventType, ctx.config.contractsPath);
1538
+ isInvalidEventType2 = (ctx) => !isValidEventType(ctx.eventType);
1539
+ getEventTypeErrorMessage2 = (ctx) => `Invalid event type: ${validateEventType(ctx.eventType).errors.join(", ")}`;
1540
+ flow3 = createFlow();
1541
+ publishEventFlowSteps = [
1542
+ flow3.input("eventType").prompt({
1543
+ title: "Event Type",
1544
+ message: "Event type to publish (e.g., order.created):"
1545
+ }).validateBy(validateEventType).skipIfSet().build(),
1546
+ flow3.abortIf(isInvalidEventType2, getEventTypeErrorMessage2).build(),
1547
+ flow3.input("dataInput").prompt({
1548
+ title: "Event Data",
1549
+ message: "Event data as JSON (or leave empty for mock data):"
1550
+ }).skipWhen(hasExplicitData).build(),
1551
+ flow3.task("Publish event").steps([
1552
+ loadDataFromInput,
1553
+ publishToNats
1554
+ ]).produces("data", "published", "usedMock").build()
1555
+ ];
589
1556
  }
590
- return config.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
591
- };
592
- var deriveStreamFromType = (eventType, config) => {
593
- if (!config?.typeToStreamMap) return void 0;
594
- const sortedPrefixes = Object.keys(config.typeToStreamMap).sort((a, b) => b.length - a.length);
595
- for (const prefix of sortedPrefixes) {
596
- if (eventType.startsWith(prefix)) {
597
- return config.typeToStreamMap[prefix];
1557
+ });
1558
+
1559
+ // src/adapters/cloudevents/cloudevents.ts
1560
+ init_infrastructure();
1561
+ var hasCloudEventHeaders = (headers) => Object.keys(headers).some((key) => key.toLowerCase().startsWith("ce-"));
1562
+ var isRecord = (value) => typeof value === "object" && value !== null;
1563
+ var parseEventFromContext = async (context) => {
1564
+ try {
1565
+ const headers = context.req.header();
1566
+ const rawBody = await context.req.text();
1567
+ let parsedBody;
1568
+ if (rawBody?.length) {
1569
+ try {
1570
+ parsedBody = JSON.parse(rawBody);
1571
+ } catch {
1572
+ parsedBody = void 0;
1573
+ }
1574
+ }
1575
+ const bodyObject = isRecord(parsedBody) ? parsedBody : void 0;
1576
+ if (bodyObject && "specversion" in bodyObject) {
1577
+ const { parseStructuredMode: parseStructuredMode2 } = await Promise.resolve().then(() => (init_structured_mode(), structured_mode_exports));
1578
+ return parseStructuredMode2(bodyObject);
1579
+ }
1580
+ if (bodyObject && "message" in bodyObject) {
1581
+ const { parsePubSubMessage: parsePubSubMessage2 } = await Promise.resolve().then(() => (init_pubsub(), pubsub_exports));
1582
+ return await parsePubSubMessage2(bodyObject, headers);
1583
+ }
1584
+ if (hasCloudEventHeaders(headers)) {
1585
+ const { parseBinaryMode: parseBinaryMode2 } = await Promise.resolve().then(() => (init_binary_mode(), binary_mode_exports));
1586
+ return await parseBinaryMode2(headers, rawBody);
1587
+ }
1588
+ const { parseRawEvent: parseRawEvent2 } = await Promise.resolve().then(() => (init_raw_event(), raw_event_exports));
1589
+ if (bodyObject) {
1590
+ return parseRawEvent2(bodyObject);
598
1591
  }
1592
+ return parseRawEvent2({ raw: rawBody });
1593
+ } catch (error) {
1594
+ logger.error("Failed to parse event:", error);
1595
+ throw new Error(`Failed to parse CloudEvent: ${error instanceof Error ? error.message : "Unknown error"}`);
599
1596
  }
600
- return void 0;
601
1597
  };
602
- var publishNatsRawEvent = async (subjectName, eventType, eventData, options) => {
603
- const cloudEvent = {
604
- specversion: "1.0",
605
- type: eventType,
606
- source: options?.source || "hono-service",
607
- id: crypto.randomUUID(),
608
- time: (/* @__PURE__ */ new Date()).toISOString(),
609
- datacontenttype: "application/json",
610
- data: eventData,
611
- ...options?.subject && { subject: options.subject },
612
- ...options?.tenantId && { tenantid: options.tenantId }
1598
+
1599
+ // src/flows/index.ts
1600
+ init_create_event_flow();
1601
+ init_list_events_flow();
1602
+ init_publish_event_flow();
1603
+
1604
+ // src/api.ts
1605
+ var createEvent = async (eventType, options = {}) => {
1606
+ const { fields, service, contractsPath, mocksPath, force } = options;
1607
+ const result = await runFlow(createEventFlowSteps, {
1608
+ initialContext: {
1609
+ eventType,
1610
+ fields: typeof fields === "string" ? parseFieldsInput(fields) : fields,
1611
+ servicePath: service,
1612
+ availableServices: [],
1613
+ // No services in simple API
1614
+ force,
1615
+ config: {
1616
+ contractsPath: contractsPath ?? "packages/contracts/src",
1617
+ mocksPath
1618
+ }
1619
+ },
1620
+ inputs: {},
1621
+ // Non-interactive mode
1622
+ exitOnError: true
1623
+ });
1624
+ if (!result.success) {
1625
+ throw new Error(result.abortReason ?? "Failed to create event");
1626
+ }
1627
+ return {
1628
+ created: result.context._generation?.created ?? [],
1629
+ skipped: result.context._generation?.skipped ?? [],
1630
+ effects: result.context._generation?.effects ?? []
613
1631
  };
614
- const data = JSON.stringify(cloudEvent);
615
- const nc = await getNatsConnection(options?.servers);
616
- nc.publish(subjectName, sc.encode(data));
617
- logger.debug(`Published CloudEvent ${eventType} to NATS subject ${subjectName} (id=${cloudEvent.id})`);
618
- return cloudEvent.id;
619
1632
  };
620
- var publishNatsEvent = async (subjectName, schema, eventData, options) => {
621
- const eventType = extractTypeFromSchema(schema);
622
- if (!eventType) {
623
- throw new Error("Could not extract event type from schema. Make sure your schema has proper metadata.");
624
- }
625
- const validationResult = schema.safeParse(eventData);
626
- if (!validationResult.success) {
627
- const validationDetails = validationResult.error.issues.map((issue) => ({
628
- code: issue.code,
629
- message: issue.message,
630
- path: issue.path.filter((p) => typeof p !== "symbol"),
631
- expected: "expected" in issue ? String(issue.expected) : void 0,
632
- received: "received" in issue ? String(issue.received) : void 0
633
- }));
634
- const handlerValidationError = {
635
- handlerName: `NatsPublisher:${eventType}`,
636
- validationErrors: validationDetails
637
- };
638
- throw createValidationError(eventType, [handlerValidationError]);
1633
+ var listEvents = async (options = {}) => {
1634
+ const { pattern, contractsPath } = options;
1635
+ const result = await runFlow(listEventsFlowSteps, {
1636
+ initialContext: {
1637
+ pattern: pattern ?? "",
1638
+ config: {
1639
+ contractsPath: contractsPath ?? "packages/contracts/src"
1640
+ }
1641
+ },
1642
+ exitOnError: true
1643
+ });
1644
+ if (!result.success) {
1645
+ throw new Error(result.abortReason ?? "Failed to list events");
639
1646
  }
640
- return publishNatsRawEvent(subjectName, eventType, validationResult.data, options);
641
- };
642
- var publish = async (eventTypeOrContract, eventData, options) => {
643
- const eventType = typeof eventTypeOrContract === "string" ? eventTypeOrContract : eventTypeOrContract.type;
644
- const natsSubject = typeof eventTypeOrContract === "string" ? deriveSubjectFromEventType(eventTypeOrContract) : eventTypeOrContract.channel?.subject ?? eventTypeOrContract.type;
645
- return publishNatsRawEvent(natsSubject, eventType, eventData, options);
1647
+ return result.context.eventTypes ?? [];
646
1648
  };
647
- var __resetNatsPublisher = () => {
648
- natsConnectionPromise = null;
1649
+ var publishEvent = async (eventType, data, options = {}) => {
1650
+ const { natsUrl, contractsPath } = options;
1651
+ const result = await runFlow(publishEventFlowSteps, {
1652
+ initialContext: {
1653
+ eventType,
1654
+ data,
1655
+ config: {
1656
+ contractsPath: contractsPath ?? "packages/contracts/src",
1657
+ natsUrl: natsUrl ?? "nats://localhost:4222"
1658
+ },
1659
+ published: false
1660
+ },
1661
+ exitOnError: true
1662
+ });
1663
+ if (!result.success) {
1664
+ throw new Error(result.abortReason ?? "Failed to publish event");
1665
+ }
1666
+ return { success: true };
649
1667
  };
650
1668
 
1669
+ // src/index.ts
1670
+ init_domain();
1671
+ init_naming();
1672
+ init_effects();
1673
+ init_generators();
1674
+
1675
+ // src/middlewares/cloudevents-middleware.ts
1676
+ init_infrastructure();
1677
+
1678
+ // src/processing/dlq-safe.ts
1679
+ init_infrastructure();
1680
+
1681
+ // src/publishing/index.ts
1682
+ init_nats_publisher();
1683
+
651
1684
  // src/publishing/pubsub.publisher.ts
1685
+ init_domain();
652
1686
  init_infrastructure();
653
- async function publishEvent(topicName, schema, eventData, options) {
654
- const eventType = extractTypeFromSchema(schema);
655
- if (!eventType) {
656
- throw new Error("Could not extract event type from schema. Make sure your schema has proper metadata.");
657
- }
658
- const validationResult = schema.safeParse(eventData);
659
- if (!validationResult.success) {
660
- const validationDetails = validationResult.error.issues.map((issue) => ({
661
- code: issue.code,
662
- message: issue.message,
663
- path: issue.path.filter((p) => typeof p !== "symbol"),
664
- expected: "expected" in issue ? String(issue.expected) : void 0,
665
- received: "received" in issue ? String(issue.received) : void 0
666
- }));
667
- const handlerValidationError = {
668
- handlerName: `Publisher:${eventType}`,
669
- validationErrors: validationDetails
670
- };
671
- throw createValidationError(eventType, [handlerValidationError]);
672
- }
673
- return publishRawEvent(topicName, eventType, validationResult.data, options);
674
- }
675
1687
  async function publishRawEvent(topicName, eventType, eventData, options) {
676
1688
  const { PubSub } = await import('@google-cloud/pubsub');
677
1689
  const pubsub = new PubSub({
@@ -790,6 +1802,7 @@ var createProcessingContext = (eventType, eventData, context, originalCloudEvent
790
1802
  });
791
1803
 
792
1804
  // src/processing/handler-cache.ts
1805
+ init_domain();
793
1806
  init_logging();
794
1807
  var createEmptyCache = () => /* @__PURE__ */ new Map();
795
1808
  var createEmptyMessageIds = () => /* @__PURE__ */ new Set();
@@ -1113,6 +2126,142 @@ function cloudEvents(options = {}) {
1113
2126
  };
1114
2127
  }
1115
2128
 
2129
+ // package.json
2130
+ var package_default = {
2131
+ name: "@crossdelta/cloudevents",
2132
+ version: "0.6.4",
2133
+ description: "CloudEvents toolkit for TypeScript - Zod validation, handler discovery, NATS JetStream"};
2134
+
2135
+ // src/plugin.ts
2136
+ var createAddCommand = (config) => ({
2137
+ name: "add",
2138
+ description: "Add a new event contract, handler, and mock",
2139
+ args: [{ name: "eventType", description: "Event type (e.g., order.created)", required: false }],
2140
+ options: [
2141
+ { flags: "-s, --service <path>", description: "Service path for handler" },
2142
+ { flags: "-f, --fields <fields>", description: "Schema fields (e.g., orderId:string,total:number)" },
2143
+ { flags: "--force", description: "Overwrite existing files", default: false }
2144
+ ],
2145
+ run: async (args, opts) => {
2146
+ const { runFlow: runFlow2 } = await import('@crossdelta/flowcore');
2147
+ const { createEventFlowSteps: createEventFlowSteps2, parseFieldsInput: parseFieldsInput2 } = await Promise.resolve().then(() => (init_create_event_flow(), create_event_flow_exports));
2148
+ const result = await runFlow2(createEventFlowSteps2, {
2149
+ initialContext: {
2150
+ eventType: args.eventType,
2151
+ fields: opts.fields ? parseFieldsInput2(opts.fields) : void 0,
2152
+ servicePath: opts.service,
2153
+ availableServices: config.availableServices,
2154
+ force: opts.force,
2155
+ config: {
2156
+ contractsPath: config.contractsPath,
2157
+ contractsPackage: config.contractsPackage,
2158
+ mocksPath: config.mocksPath
2159
+ }
2160
+ },
2161
+ exitOnError: true
2162
+ });
2163
+ if (!result.success) {
2164
+ throw new Error(result.abortReason ?? "Unknown error");
2165
+ }
2166
+ return { effects: result.context._generation?.effects ?? [] };
2167
+ }
2168
+ });
2169
+ var createListCommand = (config) => ({
2170
+ name: "list",
2171
+ description: "List event types in the workspace",
2172
+ args: [],
2173
+ options: [{ flags: "-p, --pattern <pattern>", description: "Filter by pattern" }],
2174
+ run: async (_args, opts) => {
2175
+ const { runFlow: runFlow2 } = await import('@crossdelta/flowcore');
2176
+ const { listEventsFlowSteps: listEventsFlowSteps2 } = await Promise.resolve().then(() => (init_list_events_flow(), list_events_flow_exports));
2177
+ const pattern = opts.pattern !== void 0 ? opts.pattern : "";
2178
+ const result = await runFlow2(listEventsFlowSteps2, {
2179
+ initialContext: {
2180
+ pattern,
2181
+ config: { contractsPath: config.contractsPath }
2182
+ },
2183
+ exitOnError: true
2184
+ });
2185
+ if (!result.success) {
2186
+ throw new Error(result.abortReason ?? "Unknown error");
2187
+ }
2188
+ const eventTypes = result.context.eventTypes ?? [];
2189
+ const output = eventTypes.length > 0 ? `
2190
+ Event types:
2191
+ ${eventTypes.map((e) => ` \u2022 ${e}`).join("\n")}` : "\n\u2139\uFE0F No event types found";
2192
+ return { effects: [], output };
2193
+ }
2194
+ });
2195
+ var createPublishCommand = (config) => ({
2196
+ name: "publish",
2197
+ description: "Publish an event to NATS",
2198
+ args: [{ name: "eventType", description: "Event type to publish", required: true }],
2199
+ options: [{ flags: "-d, --data <json>", description: "Event data as JSON" }],
2200
+ run: async (args, opts) => {
2201
+ const { runFlow: runFlow2 } = await import('@crossdelta/flowcore');
2202
+ const { publishEventFlowSteps: publishEventFlowSteps2, parseDataInput: parseDataInput2 } = await Promise.resolve().then(() => (init_publish_event_flow(), publish_event_flow_exports));
2203
+ const data = opts.data ? parseDataInput2(opts.data) ?? void 0 : void 0;
2204
+ const result = await runFlow2(publishEventFlowSteps2, {
2205
+ initialContext: {
2206
+ eventType: args.eventType,
2207
+ data,
2208
+ config: { contractsPath: config.contractsPath, natsUrl: config.natsUrl },
2209
+ published: false
2210
+ },
2211
+ exitOnError: true
2212
+ });
2213
+ if (!result.success) {
2214
+ throw new Error(result.abortReason ?? "Unknown error");
2215
+ }
2216
+ return { effects: [] };
2217
+ }
2218
+ });
2219
+ var createFlows = () => [
2220
+ {
2221
+ name: "create-event",
2222
+ description: "Interactive flow for creating event artifacts",
2223
+ getSteps: async () => {
2224
+ const { createEventFlowSteps: createEventFlowSteps2 } = await Promise.resolve().then(() => (init_create_event_flow(), create_event_flow_exports));
2225
+ return createEventFlowSteps2;
2226
+ }
2227
+ },
2228
+ {
2229
+ name: "list-events",
2230
+ description: "Interactive flow for listing events",
2231
+ getSteps: async () => {
2232
+ const { listEventsFlowSteps: listEventsFlowSteps2 } = await Promise.resolve().then(() => (init_list_events_flow(), list_events_flow_exports));
2233
+ return listEventsFlowSteps2;
2234
+ }
2235
+ },
2236
+ {
2237
+ name: "publish-event",
2238
+ description: "Interactive flow for publishing events",
2239
+ getSteps: async () => {
2240
+ const { publishEventFlowSteps: publishEventFlowSteps2 } = await Promise.resolve().then(() => (init_publish_event_flow(), publish_event_flow_exports));
2241
+ return publishEventFlowSteps2;
2242
+ }
2243
+ }
2244
+ ];
2245
+ var createPfPlugin = (options = {}) => {
2246
+ const config = {
2247
+ contractsPath: options.contractsPath ?? "packages/contracts/src",
2248
+ contractsPackage: options.contractsPackage,
2249
+ mocksPath: options.mocksPath,
2250
+ availableServices: options.availableServices ?? [],
2251
+ natsUrl: options.natsUrl ?? "nats://localhost:4222"
2252
+ };
2253
+ return {
2254
+ name: package_default.name.replace("@crossdelta/", ""),
2255
+ version: package_default.version,
2256
+ description: package_default.description,
2257
+ commands: [createAddCommand(config), createListCommand(config), createPublishCommand(config)],
2258
+ flows: createFlows(),
2259
+ setup: (context) => {
2260
+ context.logger.debug("cloudevents plugin loaded");
2261
+ }
2262
+ };
2263
+ };
2264
+
1116
2265
  // src/transports/nats/base-message-processor.ts
1117
2266
  function extractTenantId(ce) {
1118
2267
  const extensions = ce;
@@ -1136,11 +2285,7 @@ function createBaseMessageProcessor(deps) {
1136
2285
  const findHandler = (event) => processedHandlers.find(
1137
2286
  (handler) => handler.type === event.eventType && (!handler.match || handler.match(event))
1138
2287
  );
1139
- const handleMissingHandler = async (context, eventType) => {
1140
- logger2.warn(`[${name}] no handler for event type: ${eventType}`);
1141
- if (dlqEnabled) {
1142
- await quarantineMessage(context, "no_handler", options, new Error(`No handler for event type ${eventType}`));
1143
- }
2288
+ const handleMissingHandler = async (_context, _eventType) => {
1144
2289
  return { handled: true, shouldAck: true };
1145
2290
  };
1146
2291
  const handleValidationFailure = async (validationResult, handler, context) => {
@@ -1214,6 +2359,9 @@ function createBaseMessageProcessor(deps) {
1214
2359
  handleUnhandledError
1215
2360
  };
1216
2361
  }
2362
+
2363
+ // src/transports/nats/jetstream-consumer.ts
2364
+ init_domain();
1217
2365
  init_logging();
1218
2366
 
1219
2367
  // src/transports/nats/jetstream-message-processor.ts
@@ -1495,6 +2643,9 @@ async function consumeJetStreamStreams(options) {
1495
2643
  }
1496
2644
  var ensureJetStreams = ensureJetStreamStreams;
1497
2645
  var consumeJetStreams = consumeJetStreamStreams;
2646
+
2647
+ // src/transports/nats/nats-consumer.ts
2648
+ init_domain();
1498
2649
  init_logging();
1499
2650
 
1500
2651
  // src/transports/nats/nats-message-processor.ts
@@ -1587,4 +2738,7 @@ async function consumeNatsEvents(options) {
1587
2738
  return sub;
1588
2739
  }
1589
2740
 
1590
- export { __resetNatsPublisher, checkAndMarkProcessed, clearHandlerCache, cloudEvents, consumeJetStreamEvents, consumeJetStreamStreams, consumeJetStreams, consumeNatsEvents, createContract, createInMemoryIdempotencyStore, deriveStreamFromType, deriveSubjectFromType, ensureJetStreamStream, ensureJetStreamStreams, ensureJetStreams, eventSchema, extractTypeFromSchema, getDefaultIdempotencyStore, handleEvent, parseEventFromContext, pluralize, publish, publishEvent, publishNatsEvent, publishNatsRawEvent, publishRawEvent, resetDefaultIdempotencyStore, singularize };
2741
+ // src/index.ts
2742
+ init_utils();
2743
+
2744
+ export { __resetNatsPublisher, checkAndMarkProcessed, clearHandlerCache, closeConnection, cloudEvents, consumeJetStreamEvents, consumeJetStreamStreams, consumeJetStreams, consumeNatsEvents, contractCreated, createContract, createEvent, createEventFlowSteps, createInMemoryIdempotencyStore, createMemoryFileSystem, createPfPlugin, deriveEventNames, deriveStreamFromType, deriveSubjectFromType, discoverEventTypes, ensureJetStreamStream, ensureJetStreamStreams, ensureJetStreams, eventSchema, extractTypeFromSchema, generateContract, generateContractContent, generateEventHandler, generateEventHandlerContent, generateJsonMock, generateJsonMockFromContract, generateMock, generateMockContent, getContractFilePath, getContractPaths, getDefaultIdempotencyStore, getHandlerFilePath, getHandlerPath, getJsonMockPath, getMockFilePath, getStreamName, handleEvent, handlerCreated, initFaker, isValidEventType, jsonMockExists, listEvents, listEventsFlowSteps, loadJsonMock, normalizeSubject, parseDataInput, parseEventFromContext, parseEventTypeFromContract, parseEventTypeFromHandler, parseFieldsInput, pluralize, publish, publishEvent, publishEventFlowSteps, publishNatsEvent, publishNatsRawEvent, publishRawEvent, resetDefaultIdempotencyStore, singularize, streamWired, toKebabCase, toPascalCase, validateEventType };