@crossdelta/cloudevents 0.6.2 → 0.6.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +1226 -0
- package/dist/index.cjs +1508 -338
- package/dist/{index.d.mts → index.d.cts} +705 -52
- package/dist/index.d.ts +705 -52
- package/dist/index.js +1485 -331
- package/package.json +24 -10
package/bin/cli.js
ADDED
|
@@ -0,0 +1,1226 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import pluralizeLib from 'pluralize';
|
|
3
|
+
import * as fs from 'fs';
|
|
4
|
+
import { existsSync, readdirSync, statSync } from 'fs';
|
|
5
|
+
import * as path from 'path';
|
|
6
|
+
import { join } from 'path';
|
|
7
|
+
import { createFlow, createGenerationResult, input, initGenerationContext, trackChange, printGenerationSummary, change, runFlow } from '@crossdelta/flowcore';
|
|
8
|
+
import 'url';
|
|
9
|
+
import 'glob';
|
|
10
|
+
import 'zod';
|
|
11
|
+
import { StringCodec, connect } from 'nats';
|
|
12
|
+
|
|
13
|
+
var __defProp = Object.defineProperty;
|
|
14
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
15
|
+
var __esm = (fn, res) => function __init() {
|
|
16
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
17
|
+
};
|
|
18
|
+
var __export = (target, all) => {
|
|
19
|
+
for (var name in all)
|
|
20
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
21
|
+
};
|
|
22
|
+
var pluralize, singularize;
|
|
23
|
+
var init_pluralize = __esm({
|
|
24
|
+
"src/utils/pluralize.ts"() {
|
|
25
|
+
pluralize = (word) => pluralizeLib.plural(word);
|
|
26
|
+
singularize = (word) => pluralizeLib.singular(word);
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// src/utils/index.ts
|
|
31
|
+
var init_utils = __esm({
|
|
32
|
+
"src/utils/index.ts"() {
|
|
33
|
+
init_pluralize();
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
// src/domain/naming.ts
|
|
38
|
+
var toKebabCase, toPascalCase, validateEventType, isValidEventType, deriveEventNames, getContractPaths, getHandlerPath, parseEventTypeFromContract;
|
|
39
|
+
var init_naming = __esm({
|
|
40
|
+
"src/domain/naming.ts"() {
|
|
41
|
+
init_utils();
|
|
42
|
+
toKebabCase = (str) => str.replace(/\./g, "-").toLowerCase();
|
|
43
|
+
toPascalCase = (str) => str.split(/[-.]/).map((part) => part.charAt(0).toUpperCase() + part.slice(1).toLowerCase()).join("");
|
|
44
|
+
validateEventType = (eventType) => {
|
|
45
|
+
const errors = [];
|
|
46
|
+
if (!eventType) {
|
|
47
|
+
errors.push("Event type is required");
|
|
48
|
+
return { valid: false, errors };
|
|
49
|
+
}
|
|
50
|
+
if (!eventType.includes(".")) {
|
|
51
|
+
errors.push("Event type must include a namespace separator (e.g., order.created)");
|
|
52
|
+
}
|
|
53
|
+
if (eventType !== eventType.toLowerCase()) {
|
|
54
|
+
errors.push("Event type must be lowercase");
|
|
55
|
+
}
|
|
56
|
+
if (eventType.startsWith(".") || eventType.endsWith(".")) {
|
|
57
|
+
errors.push("Event type must not start or end with a dot");
|
|
58
|
+
}
|
|
59
|
+
if (eventType.includes("..")) {
|
|
60
|
+
errors.push("Event type must not contain consecutive dots");
|
|
61
|
+
}
|
|
62
|
+
const validChars = /^[a-z0-9.-]+$/;
|
|
63
|
+
if (!validChars.test(eventType)) {
|
|
64
|
+
errors.push("Event type must only contain lowercase letters, numbers, dots, and dashes");
|
|
65
|
+
}
|
|
66
|
+
return {
|
|
67
|
+
valid: errors.length === 0,
|
|
68
|
+
errors
|
|
69
|
+
};
|
|
70
|
+
};
|
|
71
|
+
isValidEventType = (eventType) => validateEventType(eventType).valid;
|
|
72
|
+
deriveEventNames = (eventType) => {
|
|
73
|
+
const parts = eventType.split(".");
|
|
74
|
+
const namespace = parts[0];
|
|
75
|
+
const action = parts.slice(1).join("-");
|
|
76
|
+
const kebab = toKebabCase(eventType);
|
|
77
|
+
const pascal = toPascalCase(eventType);
|
|
78
|
+
const pluralNamespace = pluralize(namespace);
|
|
79
|
+
const pluralPascal = toPascalCase(`${pluralNamespace}.${action}`);
|
|
80
|
+
return {
|
|
81
|
+
eventType,
|
|
82
|
+
kebab,
|
|
83
|
+
pascal,
|
|
84
|
+
schemaName: `${pascal}Schema`,
|
|
85
|
+
typeName: `${pascal}Data`,
|
|
86
|
+
contractName: `${pluralPascal}Contract`,
|
|
87
|
+
handlerFile: `${kebab}.handler.ts`,
|
|
88
|
+
streamName: pluralNamespace.toUpperCase(),
|
|
89
|
+
domain: pluralNamespace.toLowerCase(),
|
|
90
|
+
action,
|
|
91
|
+
namespace
|
|
92
|
+
};
|
|
93
|
+
};
|
|
94
|
+
getContractPaths = (eventType) => {
|
|
95
|
+
const { domain, action } = deriveEventNames(eventType);
|
|
96
|
+
return {
|
|
97
|
+
relativePath: `events/${domain}/${action}.ts`,
|
|
98
|
+
folder: domain,
|
|
99
|
+
filename: action
|
|
100
|
+
};
|
|
101
|
+
};
|
|
102
|
+
getHandlerPath = (eventType) => {
|
|
103
|
+
const { handlerFile } = deriveEventNames(eventType);
|
|
104
|
+
return `events/${handlerFile}`;
|
|
105
|
+
};
|
|
106
|
+
parseEventTypeFromContract = (path2) => {
|
|
107
|
+
const cleanPath = path2.replace(/^events\//, "");
|
|
108
|
+
const match = cleanPath.match(/^([^/]+)\/(.+)\.ts$/);
|
|
109
|
+
if (!match) return null;
|
|
110
|
+
const [, folder, filename] = match;
|
|
111
|
+
const namespace = singularize(folder);
|
|
112
|
+
return `${namespace}.${filename}`;
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
var createNodeFileSystem, createMemoryFileSystem;
|
|
117
|
+
var init_types = __esm({
|
|
118
|
+
"src/generators/types.ts"() {
|
|
119
|
+
createNodeFileSystem = () => {
|
|
120
|
+
return {
|
|
121
|
+
readFile: (filePath) => {
|
|
122
|
+
try {
|
|
123
|
+
return fs.readFileSync(filePath, "utf-8");
|
|
124
|
+
} catch {
|
|
125
|
+
return null;
|
|
126
|
+
}
|
|
127
|
+
},
|
|
128
|
+
writeFile: (filePath, content) => {
|
|
129
|
+
const dir = path.dirname(filePath);
|
|
130
|
+
if (!fs.existsSync(dir)) {
|
|
131
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
132
|
+
}
|
|
133
|
+
fs.writeFileSync(filePath, content, "utf-8");
|
|
134
|
+
},
|
|
135
|
+
exists: (filePath) => fs.existsSync(filePath),
|
|
136
|
+
mkdir: (dirPath) => fs.mkdirSync(dirPath, { recursive: true })
|
|
137
|
+
};
|
|
138
|
+
};
|
|
139
|
+
createMemoryFileSystem = (initialFiles = {}) => {
|
|
140
|
+
const files = { ...initialFiles };
|
|
141
|
+
return {
|
|
142
|
+
files,
|
|
143
|
+
readFile: (path2) => files[path2] ?? null,
|
|
144
|
+
writeFile: (path2, content) => {
|
|
145
|
+
files[path2] = content;
|
|
146
|
+
},
|
|
147
|
+
exists: (path2) => path2 in files,
|
|
148
|
+
mkdir: () => {
|
|
149
|
+
}
|
|
150
|
+
};
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
var fieldTypeToZod, generateSchemaFields, generateContractContent, getContractFilePath, generateContract;
|
|
155
|
+
var init_contract = __esm({
|
|
156
|
+
"src/generators/contract.ts"() {
|
|
157
|
+
init_naming();
|
|
158
|
+
init_types();
|
|
159
|
+
fieldTypeToZod = (type, optional) => {
|
|
160
|
+
const baseTypes = {
|
|
161
|
+
string: "z.string()",
|
|
162
|
+
number: "z.number()",
|
|
163
|
+
boolean: "z.boolean()",
|
|
164
|
+
date: "z.string().datetime()",
|
|
165
|
+
datetime: "z.string().datetime()",
|
|
166
|
+
array: "z.array(z.unknown())",
|
|
167
|
+
object: "z.record(z.unknown())"
|
|
168
|
+
};
|
|
169
|
+
const base = baseTypes[type];
|
|
170
|
+
return optional ? `${base}.optional()` : base;
|
|
171
|
+
};
|
|
172
|
+
generateSchemaFields = (fields) => {
|
|
173
|
+
if (!fields || fields.length === 0) {
|
|
174
|
+
return ` id: z.string(),
|
|
175
|
+
createdAt: z.string(),`;
|
|
176
|
+
}
|
|
177
|
+
return fields.map((field) => ` ${field.name}: ${fieldTypeToZod(field.type, field.optional)},`).join("\n");
|
|
178
|
+
};
|
|
179
|
+
generateContractContent = (options) => {
|
|
180
|
+
const { eventType, fields } = options;
|
|
181
|
+
const names = deriveEventNames(eventType);
|
|
182
|
+
const schemaFields = generateSchemaFields(fields);
|
|
183
|
+
const schemaBody = schemaFields ? `{
|
|
184
|
+
${schemaFields}
|
|
185
|
+
}` : "{}";
|
|
186
|
+
return `import { createContract } from '@crossdelta/cloudevents'
|
|
187
|
+
import { z } from 'zod'
|
|
188
|
+
|
|
189
|
+
export const ${names.schemaName} = z.object(${schemaBody})
|
|
190
|
+
|
|
191
|
+
export const ${names.contractName} = createContract({
|
|
192
|
+
type: '${eventType}',
|
|
193
|
+
channel: { stream: '${names.streamName}' },
|
|
194
|
+
schema: ${names.schemaName},
|
|
195
|
+
})
|
|
196
|
+
|
|
197
|
+
export type ${names.typeName} = z.infer<typeof ${names.contractName}.schema>
|
|
198
|
+
`;
|
|
199
|
+
};
|
|
200
|
+
getContractFilePath = (eventType, basePath) => {
|
|
201
|
+
const paths = getContractPaths(eventType);
|
|
202
|
+
return `${basePath}/${paths.relativePath}`;
|
|
203
|
+
};
|
|
204
|
+
generateContract = (options, fs2 = createNodeFileSystem()) => {
|
|
205
|
+
const filePath = getContractFilePath(options.eventType, options.basePath);
|
|
206
|
+
const content = generateContractContent(options);
|
|
207
|
+
const exists = fs2.exists(filePath);
|
|
208
|
+
if (!exists) {
|
|
209
|
+
fs2.writeFile(filePath, content);
|
|
210
|
+
return change.created(filePath);
|
|
211
|
+
}
|
|
212
|
+
if (options.force) {
|
|
213
|
+
fs2.writeFile(filePath, content);
|
|
214
|
+
return change.updated(filePath);
|
|
215
|
+
}
|
|
216
|
+
return change.skipped("File already exists");
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
// src/generators/exports.ts
|
|
222
|
+
var hasExport, addExportToIndex, ensureDomainExport;
|
|
223
|
+
var init_exports = __esm({
|
|
224
|
+
"src/generators/exports.ts"() {
|
|
225
|
+
init_types();
|
|
226
|
+
hasExport = (content, exportPath) => {
|
|
227
|
+
const patterns = [
|
|
228
|
+
new RegExp(`^\\s*export \\* from ['"]${exportPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}['"]`, "m"),
|
|
229
|
+
new RegExp(`^\\s*export \\* from ['"]${exportPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\.ts['"]`, "m")
|
|
230
|
+
];
|
|
231
|
+
return patterns.some((p) => p.test(content));
|
|
232
|
+
};
|
|
233
|
+
addExportToIndex = (options, fs2 = createNodeFileSystem()) => {
|
|
234
|
+
const { indexPath, exportPath, force } = options;
|
|
235
|
+
if (!fs2.exists(indexPath)) {
|
|
236
|
+
fs2.writeFile(indexPath, `export * from '${exportPath}'
|
|
237
|
+
`);
|
|
238
|
+
return { added: true, path: indexPath };
|
|
239
|
+
}
|
|
240
|
+
const content = fs2.readFile(indexPath);
|
|
241
|
+
if (!content) {
|
|
242
|
+
fs2.writeFile(indexPath, `export * from '${exportPath}'
|
|
243
|
+
`);
|
|
244
|
+
return { added: true, path: indexPath };
|
|
245
|
+
}
|
|
246
|
+
if (!force && hasExport(content, exportPath)) {
|
|
247
|
+
return { added: false, path: indexPath, alreadyExists: true };
|
|
248
|
+
}
|
|
249
|
+
const commentedExport = new RegExp(`//\\s*export \\* from ['"]${exportPath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}['"]`);
|
|
250
|
+
let newContent;
|
|
251
|
+
if (commentedExport.test(content)) {
|
|
252
|
+
newContent = content.replace(commentedExport, `export * from '${exportPath}'`);
|
|
253
|
+
} else {
|
|
254
|
+
const trimmed = content.trimEnd();
|
|
255
|
+
newContent = trimmed.length > 0 ? `${trimmed}
|
|
256
|
+
export * from '${exportPath}'
|
|
257
|
+
` : `export * from '${exportPath}'
|
|
258
|
+
`;
|
|
259
|
+
}
|
|
260
|
+
fs2.writeFile(indexPath, newContent);
|
|
261
|
+
return { added: true, path: indexPath };
|
|
262
|
+
};
|
|
263
|
+
ensureDomainExport = (options, fs2 = createNodeFileSystem()) => {
|
|
264
|
+
const results = [];
|
|
265
|
+
const { contractsBasePath, domain, eventFile } = options;
|
|
266
|
+
const eventsIndexPath = `${contractsBasePath}/events/index.ts`;
|
|
267
|
+
const domainExportResult = addExportToIndex(
|
|
268
|
+
{
|
|
269
|
+
indexPath: eventsIndexPath,
|
|
270
|
+
exportPath: `./${domain}`
|
|
271
|
+
},
|
|
272
|
+
fs2
|
|
273
|
+
);
|
|
274
|
+
results.push(domainExportResult);
|
|
275
|
+
const domainIndexPath = `${contractsBasePath}/events/${domain}/index.ts`;
|
|
276
|
+
const eventExportResult = addExportToIndex(
|
|
277
|
+
{
|
|
278
|
+
indexPath: domainIndexPath,
|
|
279
|
+
exportPath: `./${eventFile}`
|
|
280
|
+
},
|
|
281
|
+
fs2
|
|
282
|
+
);
|
|
283
|
+
results.push(eventExportResult);
|
|
284
|
+
const mainIndexPath = `${contractsBasePath}/index.ts`;
|
|
285
|
+
const mainExportResult = addExportToIndex(
|
|
286
|
+
{
|
|
287
|
+
indexPath: mainIndexPath,
|
|
288
|
+
exportPath: "./events"
|
|
289
|
+
},
|
|
290
|
+
fs2
|
|
291
|
+
);
|
|
292
|
+
results.push(mainExportResult);
|
|
293
|
+
return results;
|
|
294
|
+
};
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
var generateEventHandlerContent, getHandlerFilePath, generateEventHandler;
|
|
298
|
+
var init_handler = __esm({
|
|
299
|
+
"src/generators/handler.ts"() {
|
|
300
|
+
init_naming();
|
|
301
|
+
init_types();
|
|
302
|
+
generateEventHandlerContent = (options) => {
|
|
303
|
+
const { eventType, contractsPackage = "@crossdelta/contracts" } = options;
|
|
304
|
+
const names = deriveEventNames(eventType);
|
|
305
|
+
return `import { handleEvent } from '@crossdelta/cloudevents'
|
|
306
|
+
import { ${names.contractName}, type ${names.typeName} } from '${contractsPackage}'
|
|
307
|
+
|
|
308
|
+
export default handleEvent(${names.contractName}, async (data: ${names.typeName}) => {
|
|
309
|
+
// TODO: Implement event handling logic
|
|
310
|
+
console.log('Received ${eventType}:', data)
|
|
311
|
+
})
|
|
312
|
+
`;
|
|
313
|
+
};
|
|
314
|
+
getHandlerFilePath = (eventType, basePath) => {
|
|
315
|
+
const handlerPath = getHandlerPath(eventType);
|
|
316
|
+
return `${basePath}/${handlerPath}`;
|
|
317
|
+
};
|
|
318
|
+
generateEventHandler = (options, fs2 = createNodeFileSystem()) => {
|
|
319
|
+
const filePath = getHandlerFilePath(options.eventType, options.basePath);
|
|
320
|
+
const content = generateEventHandlerContent(options);
|
|
321
|
+
const exists = fs2.exists(filePath);
|
|
322
|
+
if (!exists) {
|
|
323
|
+
fs2.writeFile(filePath, content);
|
|
324
|
+
return change.created(filePath);
|
|
325
|
+
}
|
|
326
|
+
if (options.force) {
|
|
327
|
+
fs2.writeFile(filePath, content);
|
|
328
|
+
return change.updated(filePath);
|
|
329
|
+
}
|
|
330
|
+
return change.skipped("File already exists");
|
|
331
|
+
};
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
var fakerInstance, fakerLoadAttempted, tryLoadFaker, getFaker, initFaker, createFieldGenerators, generateByFieldName, generateByFormat, generateByZodType, generateMockValue, generateMockFields, generateMockContent, getMockFilePath, generateMock, generateDefaultMockFields, processMockField, generateJsonMockData, parseSchemaFromSource, getJsonMockPath, jsonMockExists, loadJsonMock, generateJsonMock, generateJsonMockFromContract;
|
|
335
|
+
var init_mock = __esm({
|
|
336
|
+
"src/generators/mock.ts"() {
|
|
337
|
+
init_naming();
|
|
338
|
+
init_types();
|
|
339
|
+
fakerInstance = null;
|
|
340
|
+
fakerLoadAttempted = false;
|
|
341
|
+
tryLoadFaker = async () => {
|
|
342
|
+
if (fakerLoadAttempted) return fakerInstance;
|
|
343
|
+
fakerLoadAttempted = true;
|
|
344
|
+
try {
|
|
345
|
+
const { faker } = await import('@faker-js/faker');
|
|
346
|
+
fakerInstance = faker;
|
|
347
|
+
return faker;
|
|
348
|
+
} catch {
|
|
349
|
+
return null;
|
|
350
|
+
}
|
|
351
|
+
};
|
|
352
|
+
getFaker = () => fakerInstance;
|
|
353
|
+
initFaker = async () => {
|
|
354
|
+
const faker = await tryLoadFaker();
|
|
355
|
+
return faker !== null;
|
|
356
|
+
};
|
|
357
|
+
createFieldGenerators = () => [
|
|
358
|
+
{ pattern: "email", generate: () => "user@example.com", fakerGenerate: () => getFaker()?.internet.email(), hint: "email" },
|
|
359
|
+
{ pattern: "id", generate: () => crypto.randomUUID(), fakerGenerate: () => getFaker()?.string.uuid(), hint: "uuid" },
|
|
360
|
+
{ pattern: "firstname", generate: () => "John", fakerGenerate: () => getFaker()?.person.firstName(), hint: "firstName" },
|
|
361
|
+
{ pattern: "lastname", generate: () => "Doe", fakerGenerate: () => getFaker()?.person.lastName(), hint: "lastName" },
|
|
362
|
+
{ pattern: "name", generate: () => "John Doe", fakerGenerate: () => getFaker()?.person.fullName(), hint: "fullName" },
|
|
363
|
+
{ pattern: "phone", generate: () => "+1-555-123-4567", fakerGenerate: () => getFaker()?.phone.number(), hint: "phoneNumber" },
|
|
364
|
+
{ pattern: "address", generate: () => "123 Main St", fakerGenerate: () => getFaker()?.location.streetAddress(), hint: "streetAddress" },
|
|
365
|
+
{ pattern: "street", generate: () => "123 Main St", fakerGenerate: () => getFaker()?.location.streetAddress(), hint: "streetAddress" },
|
|
366
|
+
{ pattern: "city", generate: () => "New York", fakerGenerate: () => getFaker()?.location.city(), hint: "city" },
|
|
367
|
+
{ pattern: "country", generate: () => "USA", fakerGenerate: () => getFaker()?.location.country(), hint: "country" },
|
|
368
|
+
{ pattern: "price", generate: () => 99.99, fakerGenerate: () => getFaker()?.number.float({ min: 10, max: 1e3, fractionDigits: 2 }), hint: "price" },
|
|
369
|
+
{ pattern: "amount", generate: () => 99.99, fakerGenerate: () => getFaker()?.number.float({ min: 10, max: 1e3, fractionDigits: 2 }), hint: "price" },
|
|
370
|
+
{ pattern: "total", generate: () => 99.99, fakerGenerate: () => getFaker()?.number.float({ min: 10, max: 1e3, fractionDigits: 2 }), hint: "price" },
|
|
371
|
+
{ pattern: "quantity", generate: () => 1, fakerGenerate: () => getFaker()?.number.int({ min: 1, max: 10 }), hint: "quantity" },
|
|
372
|
+
{ pattern: "date", generate: () => (/* @__PURE__ */ new Date()).toISOString(), fakerGenerate: () => getFaker()?.date.recent().toISOString(), hint: "date" },
|
|
373
|
+
{ pattern: "createdat", generate: () => (/* @__PURE__ */ new Date()).toISOString(), fakerGenerate: () => getFaker()?.date.recent().toISOString(), hint: "date" },
|
|
374
|
+
{ pattern: "updatedat", generate: () => (/* @__PURE__ */ new Date()).toISOString(), fakerGenerate: () => getFaker()?.date.recent().toISOString(), hint: "date" }
|
|
375
|
+
];
|
|
376
|
+
generateByFieldName = (fieldName, useFaker = false) => {
|
|
377
|
+
const generators = createFieldGenerators();
|
|
378
|
+
const lowerField = fieldName.toLowerCase();
|
|
379
|
+
const matched = generators.find((g) => lowerField.includes(g.pattern));
|
|
380
|
+
if (!matched) return null;
|
|
381
|
+
const value = (useFaker ? matched.fakerGenerate?.() : void 0) ?? matched.generate();
|
|
382
|
+
return { value, hint: matched.hint };
|
|
383
|
+
};
|
|
384
|
+
generateByFormat = (format, useFaker = false) => {
|
|
385
|
+
const faker = getFaker();
|
|
386
|
+
const formatGenerators = {
|
|
387
|
+
email: {
|
|
388
|
+
static: () => "user@example.com",
|
|
389
|
+
faker: faker ? () => faker.internet.email() : void 0
|
|
390
|
+
},
|
|
391
|
+
datetime: {
|
|
392
|
+
static: () => (/* @__PURE__ */ new Date()).toISOString(),
|
|
393
|
+
faker: faker ? () => faker.date.recent().toISOString() : void 0
|
|
394
|
+
},
|
|
395
|
+
url: {
|
|
396
|
+
static: () => "https://example.com",
|
|
397
|
+
faker: faker ? () => faker.internet.url() : void 0
|
|
398
|
+
},
|
|
399
|
+
uuid: {
|
|
400
|
+
static: () => crypto.randomUUID(),
|
|
401
|
+
faker: faker ? () => faker.string.uuid() : void 0
|
|
402
|
+
},
|
|
403
|
+
date: {
|
|
404
|
+
static: () => (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
|
|
405
|
+
faker: faker ? () => faker.date.recent().toISOString().split("T")[0] : void 0
|
|
406
|
+
},
|
|
407
|
+
time: {
|
|
408
|
+
static: () => (/* @__PURE__ */ new Date()).toISOString().split("T")[1]?.split(".")[0],
|
|
409
|
+
faker: faker ? () => faker.date.recent().toISOString().split("T")[1]?.split(".")[0] : void 0
|
|
410
|
+
}
|
|
411
|
+
};
|
|
412
|
+
const gen = formatGenerators[format];
|
|
413
|
+
if (!gen) return "example";
|
|
414
|
+
return useFaker && gen.faker ? gen.faker() : gen.static();
|
|
415
|
+
};
|
|
416
|
+
generateByZodType = (zodType, useFaker = false) => {
|
|
417
|
+
const faker = getFaker();
|
|
418
|
+
const typeGenerators = {
|
|
419
|
+
string: {
|
|
420
|
+
static: () => "example",
|
|
421
|
+
faker: faker ? () => faker.lorem.word() : void 0
|
|
422
|
+
},
|
|
423
|
+
number: {
|
|
424
|
+
static: () => 42,
|
|
425
|
+
faker: faker ? () => faker.number.int({ min: 1, max: 100 }) : void 0
|
|
426
|
+
},
|
|
427
|
+
boolean: {
|
|
428
|
+
static: () => true,
|
|
429
|
+
faker: faker ? () => faker.datatype.boolean() : void 0
|
|
430
|
+
}
|
|
431
|
+
};
|
|
432
|
+
const gen = typeGenerators[zodType];
|
|
433
|
+
if (!gen) return void 0;
|
|
434
|
+
return useFaker && gen.faker ? gen.faker() : gen.static();
|
|
435
|
+
};
|
|
436
|
+
generateMockValue = (type) => {
|
|
437
|
+
const mockValues = {
|
|
438
|
+
string: "'mock-string'",
|
|
439
|
+
number: "42",
|
|
440
|
+
boolean: "true",
|
|
441
|
+
date: "new Date().toISOString()",
|
|
442
|
+
datetime: "new Date().toISOString()",
|
|
443
|
+
array: "[]",
|
|
444
|
+
object: "{}"
|
|
445
|
+
};
|
|
446
|
+
return mockValues[type];
|
|
447
|
+
};
|
|
448
|
+
generateMockFields = (fields) => {
|
|
449
|
+
if (!fields || fields.length === 0) {
|
|
450
|
+
return ` id: 'mock-id',
|
|
451
|
+
createdAt: new Date().toISOString(),`;
|
|
452
|
+
}
|
|
453
|
+
return fields.filter((field) => !field.optional).map((field) => ` ${field.name}: ${generateMockValue(field.type)},`).join("\n");
|
|
454
|
+
};
|
|
455
|
+
generateMockContent = (options) => {
|
|
456
|
+
const { eventType, fields } = options;
|
|
457
|
+
const names = deriveEventNames(eventType);
|
|
458
|
+
return `import type { ${names.typeName} } from '../${names.domain}/${names.action}'
|
|
459
|
+
|
|
460
|
+
export const mock${names.pascal}: ${names.typeName} = {
|
|
461
|
+
${generateMockFields(fields)}
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
export const create${names.pascal}Mock = (overrides: Partial<${names.typeName}> = {}): ${names.typeName} => ({
|
|
465
|
+
...mock${names.pascal},
|
|
466
|
+
...overrides,
|
|
467
|
+
})
|
|
468
|
+
`;
|
|
469
|
+
};
|
|
470
|
+
getMockFilePath = (eventType, basePath) => {
|
|
471
|
+
const names = deriveEventNames(eventType);
|
|
472
|
+
return `${basePath}/${names.kebab}.mock.ts`;
|
|
473
|
+
};
|
|
474
|
+
generateMock = (options, fs2 = createNodeFileSystem()) => {
|
|
475
|
+
const filePath = getMockFilePath(options.eventType, options.basePath);
|
|
476
|
+
const content = generateMockContent(options);
|
|
477
|
+
const exists = fs2.exists(filePath);
|
|
478
|
+
if (!exists) {
|
|
479
|
+
fs2.writeFile(filePath, content);
|
|
480
|
+
return change.created(filePath);
|
|
481
|
+
}
|
|
482
|
+
if (options.force) {
|
|
483
|
+
fs2.writeFile(filePath, content);
|
|
484
|
+
return change.updated(filePath);
|
|
485
|
+
}
|
|
486
|
+
return change.skipped("File already exists");
|
|
487
|
+
};
|
|
488
|
+
generateDefaultMockFields = (useFaker) => ({
|
|
489
|
+
data: {
|
|
490
|
+
id: (useFaker ? getFaker()?.string.uuid() : void 0) ?? crypto.randomUUID(),
|
|
491
|
+
createdAt: (useFaker ? getFaker()?.date.recent().toISOString() : void 0) ?? (/* @__PURE__ */ new Date()).toISOString()
|
|
492
|
+
},
|
|
493
|
+
faker: { id: "uuid", createdAt: "date" }
|
|
494
|
+
});
|
|
495
|
+
processMockField = (field, useFaker, data, fakerHints) => {
|
|
496
|
+
const byName = generateByFieldName(field.name, useFaker);
|
|
497
|
+
if (byName) {
|
|
498
|
+
data[field.name] = byName.value;
|
|
499
|
+
if (byName.hint) fakerHints[field.name] = byName.hint;
|
|
500
|
+
} else {
|
|
501
|
+
data[field.name] = generateByZodType(field.type, useFaker) ?? "example";
|
|
502
|
+
}
|
|
503
|
+
};
|
|
504
|
+
generateJsonMockData = (options = {}) => {
|
|
505
|
+
const { fields, useFaker = false } = options;
|
|
506
|
+
if (!fields || fields.length === 0) {
|
|
507
|
+
return generateDefaultMockFields(useFaker);
|
|
508
|
+
}
|
|
509
|
+
const data = {};
|
|
510
|
+
const fakerHints = {};
|
|
511
|
+
for (const field of fields) {
|
|
512
|
+
if (!field.optional) processMockField(field, useFaker, data, fakerHints);
|
|
513
|
+
}
|
|
514
|
+
return { data, faker: fakerHints };
|
|
515
|
+
};
|
|
516
|
+
parseSchemaFromSource = (schemaBody, useFaker = false) => {
|
|
517
|
+
const data = {};
|
|
518
|
+
const fakerHints = {};
|
|
519
|
+
const fieldRegex = /(\w+):\s*z\.(\w+)\(/g;
|
|
520
|
+
const matches = Array.from(schemaBody.matchAll(fieldRegex));
|
|
521
|
+
for (const [, fieldName, zodType] of matches) {
|
|
522
|
+
if (zodType === "array" || zodType === "object") continue;
|
|
523
|
+
const byName = generateByFieldName(fieldName, useFaker);
|
|
524
|
+
if (byName) {
|
|
525
|
+
data[fieldName] = byName.value;
|
|
526
|
+
if (byName.hint) fakerHints[fieldName] = byName.hint;
|
|
527
|
+
} else {
|
|
528
|
+
data[fieldName] = generateByZodType(zodType, useFaker) ?? "example";
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
const chainedRegex = /(\w+):\s*z\.string\(\)\.(datetime|email|url|uuid)\(\)/g;
|
|
532
|
+
const chainedMatches = Array.from(schemaBody.matchAll(chainedRegex));
|
|
533
|
+
for (const [, fieldName, method] of chainedMatches) {
|
|
534
|
+
data[fieldName] = generateByFormat(method, useFaker);
|
|
535
|
+
fakerHints[fieldName] = method === "datetime" ? "date" : method;
|
|
536
|
+
}
|
|
537
|
+
return { data, faker: fakerHints };
|
|
538
|
+
};
|
|
539
|
+
getJsonMockPath = (eventType, contractsPath) => {
|
|
540
|
+
const names = deriveEventNames(eventType);
|
|
541
|
+
return `${contractsPath}/events/${names.domain}/${names.action}.mock.json`;
|
|
542
|
+
};
|
|
543
|
+
jsonMockExists = (eventType, contractsPath, fs2 = createNodeFileSystem()) => fs2.exists(getJsonMockPath(eventType, contractsPath));
|
|
544
|
+
loadJsonMock = (eventType, contractsPath, fs2 = createNodeFileSystem()) => {
|
|
545
|
+
const mockPath = getJsonMockPath(eventType, contractsPath);
|
|
546
|
+
const content = fs2.readFile(mockPath);
|
|
547
|
+
if (!content) return null;
|
|
548
|
+
try {
|
|
549
|
+
return JSON.parse(content);
|
|
550
|
+
} catch {
|
|
551
|
+
return null;
|
|
552
|
+
}
|
|
553
|
+
};
|
|
554
|
+
generateJsonMock = (options, fs2 = createNodeFileSystem()) => {
|
|
555
|
+
const { eventType, contractsPath, fields, force, useFaker = false } = options;
|
|
556
|
+
const mockPath = getJsonMockPath(eventType, contractsPath);
|
|
557
|
+
const exists = fs2.exists(mockPath);
|
|
558
|
+
const { data, faker: fakerHints } = generateJsonMockData({ fields, useFaker });
|
|
559
|
+
const mockData = {
|
|
560
|
+
eventName: eventType,
|
|
561
|
+
description: `Mock data for ${eventType} event`,
|
|
562
|
+
data
|
|
563
|
+
};
|
|
564
|
+
if (Object.keys(fakerHints).length > 0) {
|
|
565
|
+
mockData.faker = fakerHints;
|
|
566
|
+
}
|
|
567
|
+
const content = JSON.stringify(mockData, null, 2);
|
|
568
|
+
if (!exists) {
|
|
569
|
+
fs2.writeFile(mockPath, content);
|
|
570
|
+
return change.created(mockPath);
|
|
571
|
+
}
|
|
572
|
+
if (force) {
|
|
573
|
+
fs2.writeFile(mockPath, content);
|
|
574
|
+
return change.updated(mockPath);
|
|
575
|
+
}
|
|
576
|
+
return change.skipped("File already exists");
|
|
577
|
+
};
|
|
578
|
+
generateJsonMockFromContract = (eventType, contractsPath, options = {}, fs2 = createNodeFileSystem()) => {
|
|
579
|
+
const { useFaker = false } = options;
|
|
580
|
+
const names = deriveEventNames(eventType);
|
|
581
|
+
const contractPath = `${contractsPath}/events/${names.domain}/${names.action}.ts`;
|
|
582
|
+
const contractContent = fs2.readFile(contractPath);
|
|
583
|
+
if (!contractContent) return null;
|
|
584
|
+
const schemaMatch = contractContent.match(/export const \w+Schema\s*=\s*z\.object\(\s*\{([\s\S]*?)\}\s*\)/m);
|
|
585
|
+
if (!schemaMatch) return null;
|
|
586
|
+
const { data, faker: fakerHints } = parseSchemaFromSource(schemaMatch[1], useFaker);
|
|
587
|
+
const mockPath = getJsonMockPath(eventType, contractsPath);
|
|
588
|
+
const mockData = {
|
|
589
|
+
eventName: eventType,
|
|
590
|
+
description: `Mock data for ${eventType} event`,
|
|
591
|
+
data
|
|
592
|
+
};
|
|
593
|
+
if (Object.keys(fakerHints).length > 0) {
|
|
594
|
+
mockData.faker = fakerHints;
|
|
595
|
+
}
|
|
596
|
+
const content = JSON.stringify(mockData, null, 2);
|
|
597
|
+
fs2.writeFile(mockPath, content);
|
|
598
|
+
return change.created(mockPath);
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
});
|
|
602
|
+
|
|
603
|
+
// src/generators/index.ts
|
|
604
|
+
var generators_exports = {};
|
|
605
|
+
__export(generators_exports, {
|
|
606
|
+
addExportToIndex: () => addExportToIndex,
|
|
607
|
+
createMemoryFileSystem: () => createMemoryFileSystem,
|
|
608
|
+
createNodeFileSystem: () => createNodeFileSystem,
|
|
609
|
+
ensureDomainExport: () => ensureDomainExport,
|
|
610
|
+
generateContract: () => generateContract,
|
|
611
|
+
generateContractContent: () => generateContractContent,
|
|
612
|
+
generateEventHandler: () => generateEventHandler,
|
|
613
|
+
generateEventHandlerContent: () => generateEventHandlerContent,
|
|
614
|
+
generateJsonMock: () => generateJsonMock,
|
|
615
|
+
generateJsonMockFromContract: () => generateJsonMockFromContract,
|
|
616
|
+
generateMock: () => generateMock,
|
|
617
|
+
generateMockContent: () => generateMockContent,
|
|
618
|
+
getContractFilePath: () => getContractFilePath,
|
|
619
|
+
getHandlerFilePath: () => getHandlerFilePath,
|
|
620
|
+
getJsonMockPath: () => getJsonMockPath,
|
|
621
|
+
getMockFilePath: () => getMockFilePath,
|
|
622
|
+
initFaker: () => initFaker,
|
|
623
|
+
jsonMockExists: () => jsonMockExists,
|
|
624
|
+
loadJsonMock: () => loadJsonMock
|
|
625
|
+
});
|
|
626
|
+
var init_generators = __esm({
|
|
627
|
+
"src/generators/index.ts"() {
|
|
628
|
+
init_contract();
|
|
629
|
+
init_exports();
|
|
630
|
+
init_handler();
|
|
631
|
+
init_mock();
|
|
632
|
+
init_types();
|
|
633
|
+
}
|
|
634
|
+
});
|
|
635
|
+
|
|
636
|
+
// src/domain/contract-helper.ts
|
|
637
|
+
var init_contract_helper = __esm({
|
|
638
|
+
"src/domain/contract-helper.ts"() {
|
|
639
|
+
}
|
|
640
|
+
});
|
|
641
|
+
|
|
642
|
+
// src/infrastructure/errors.ts
|
|
643
|
+
var ValidationError, createValidationError;
|
|
644
|
+
var init_errors = __esm({
|
|
645
|
+
"src/infrastructure/errors.ts"() {
|
|
646
|
+
ValidationError = class extends Error {
|
|
647
|
+
constructor(eventType, details) {
|
|
648
|
+
super(`Validation failed for event type: ${eventType}`);
|
|
649
|
+
this.eventType = eventType;
|
|
650
|
+
this.details = details;
|
|
651
|
+
this.name = "ValidationError";
|
|
652
|
+
}
|
|
653
|
+
type = "ValidationError";
|
|
654
|
+
};
|
|
655
|
+
createValidationError = (eventType, details) => new ValidationError(eventType, details);
|
|
656
|
+
}
|
|
657
|
+
});
|
|
658
|
+
|
|
659
|
+
// src/infrastructure/logging.ts
|
|
660
|
+
var LOG_PREFIX, createLogger, logger;
|
|
661
|
+
var init_logging = __esm({
|
|
662
|
+
"src/infrastructure/logging.ts"() {
|
|
663
|
+
LOG_PREFIX = "cloudevents";
|
|
664
|
+
createLogger = (enabled) => {
|
|
665
|
+
const logWithArgs = (consoleFn, message, args) => {
|
|
666
|
+
const formattedMessage = `[${LOG_PREFIX}] ${message}`;
|
|
667
|
+
args !== void 0 ? consoleFn(formattedMessage, args) : consoleFn(formattedMessage);
|
|
668
|
+
};
|
|
669
|
+
return {
|
|
670
|
+
log: (message, args) => enabled && logWithArgs(console.log, message, args),
|
|
671
|
+
info: (message, args) => enabled && logWithArgs(console.info, message, args),
|
|
672
|
+
warn: (message, error) => enabled && logWithArgs(console.warn, message, error),
|
|
673
|
+
error: (message, error) => logWithArgs(console.error, message, error),
|
|
674
|
+
debug: (message, args) => logWithArgs(console.debug, message, args)
|
|
675
|
+
};
|
|
676
|
+
};
|
|
677
|
+
logger = createLogger(true);
|
|
678
|
+
}
|
|
679
|
+
});
|
|
680
|
+
|
|
681
|
+
// src/infrastructure/index.ts
|
|
682
|
+
var init_infrastructure = __esm({
|
|
683
|
+
"src/infrastructure/index.ts"() {
|
|
684
|
+
init_errors();
|
|
685
|
+
init_logging();
|
|
686
|
+
}
|
|
687
|
+
});
|
|
688
|
+
|
|
689
|
+
// src/domain/validation.ts
|
|
690
|
+
var hasShape, hasTypeField, extractTypeValue, safeExtractType, extractTypeFromSchema;
|
|
691
|
+
var init_validation = __esm({
|
|
692
|
+
"src/domain/validation.ts"() {
|
|
693
|
+
hasShape = (schema) => "shape" in schema && schema.shape !== null && schema.shape !== void 0 && typeof schema.shape === "object";
|
|
694
|
+
hasTypeField = (shape) => "type" in shape && shape.type !== null && shape.type !== void 0 && typeof shape.type === "object" && "value" in shape.type;
|
|
695
|
+
extractTypeValue = (typeField) => typeof typeField.value === "string" ? typeField.value : void 0;
|
|
696
|
+
safeExtractType = (schema) => {
|
|
697
|
+
if (!hasShape(schema)) return void 0;
|
|
698
|
+
if (!hasTypeField(schema.shape)) return void 0;
|
|
699
|
+
return extractTypeValue(schema.shape.type);
|
|
700
|
+
};
|
|
701
|
+
extractTypeFromSchema = (schema) => {
|
|
702
|
+
try {
|
|
703
|
+
return safeExtractType(schema);
|
|
704
|
+
} catch {
|
|
705
|
+
return void 0;
|
|
706
|
+
}
|
|
707
|
+
};
|
|
708
|
+
}
|
|
709
|
+
});
|
|
710
|
+
var init_discovery = __esm({
|
|
711
|
+
"src/domain/discovery.ts"() {
|
|
712
|
+
init_infrastructure();
|
|
713
|
+
init_validation();
|
|
714
|
+
}
|
|
715
|
+
});
|
|
716
|
+
var init_handler_factory = __esm({
|
|
717
|
+
"src/domain/handler-factory.ts"() {
|
|
718
|
+
}
|
|
719
|
+
});
|
|
720
|
+
|
|
721
|
+
// src/domain/index.ts
|
|
722
|
+
var init_domain = __esm({
|
|
723
|
+
"src/domain/index.ts"() {
|
|
724
|
+
init_contract_helper();
|
|
725
|
+
init_discovery();
|
|
726
|
+
init_handler_factory();
|
|
727
|
+
init_naming();
|
|
728
|
+
init_validation();
|
|
729
|
+
}
|
|
730
|
+
});
|
|
731
|
+
|
|
732
|
+
// src/publishing/nats.publisher.ts
|
|
733
|
+
var nats_publisher_exports = {};
|
|
734
|
+
__export(nats_publisher_exports, {
|
|
735
|
+
__resetNatsPublisher: () => __resetNatsPublisher,
|
|
736
|
+
closeConnection: () => closeConnection,
|
|
737
|
+
deriveStreamFromType: () => deriveStreamFromType,
|
|
738
|
+
deriveSubjectFromType: () => deriveSubjectFromType,
|
|
739
|
+
publish: () => publish,
|
|
740
|
+
publishNatsEvent: () => publishNatsEvent,
|
|
741
|
+
publishNatsRawEvent: () => publishNatsRawEvent
|
|
742
|
+
});
|
|
743
|
+
var sc, natsConnectionPromise, deriveSubjectFromEventType, getNatsConnection, closeConnection, __resetNatsPublisher, deriveSubjectFromType, deriveStreamFromType, publishNatsRawEvent, publishNatsEvent, publish;
|
|
744
|
+
var init_nats_publisher = __esm({
|
|
745
|
+
"src/publishing/nats.publisher.ts"() {
|
|
746
|
+
init_domain();
|
|
747
|
+
init_infrastructure();
|
|
748
|
+
init_utils();
|
|
749
|
+
sc = StringCodec();
|
|
750
|
+
natsConnectionPromise = null;
|
|
751
|
+
deriveSubjectFromEventType = (eventType) => {
|
|
752
|
+
const parts = eventType.split(".");
|
|
753
|
+
if (parts.length < 2) return eventType;
|
|
754
|
+
const domain = parts[0];
|
|
755
|
+
const action = parts.slice(1).join(".");
|
|
756
|
+
const pluralDomain = pluralize(domain);
|
|
757
|
+
return `${pluralDomain}.${action}`;
|
|
758
|
+
};
|
|
759
|
+
getNatsConnection = async (servers) => {
|
|
760
|
+
if (!natsConnectionPromise) {
|
|
761
|
+
const url = servers ?? process.env.NATS_URL ?? "nats://localhost:4222";
|
|
762
|
+
natsConnectionPromise = connect({ servers: url }).then((connection) => {
|
|
763
|
+
logger.debug(`[NATS] connected to ${url}`);
|
|
764
|
+
return connection;
|
|
765
|
+
}).catch((error) => {
|
|
766
|
+
logger.error("[NATS] connection error", error);
|
|
767
|
+
natsConnectionPromise = null;
|
|
768
|
+
throw error;
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
return natsConnectionPromise;
|
|
772
|
+
};
|
|
773
|
+
closeConnection = async () => {
|
|
774
|
+
if (natsConnectionPromise) {
|
|
775
|
+
try {
|
|
776
|
+
const nc = await natsConnectionPromise;
|
|
777
|
+
if (nc && typeof nc.drain === "function") {
|
|
778
|
+
await nc.drain();
|
|
779
|
+
logger.debug("[NATS] connection closed");
|
|
780
|
+
}
|
|
781
|
+
} catch {
|
|
782
|
+
} finally {
|
|
783
|
+
natsConnectionPromise = null;
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
};
|
|
787
|
+
__resetNatsPublisher = async () => {
|
|
788
|
+
await closeConnection();
|
|
789
|
+
};
|
|
790
|
+
deriveSubjectFromType = (eventType, config) => {
|
|
791
|
+
if (!config?.typeToSubjectMap) {
|
|
792
|
+
return config?.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
|
|
793
|
+
}
|
|
794
|
+
const sortedPrefixes = Object.keys(config.typeToSubjectMap).sort((a, b) => b.length - a.length);
|
|
795
|
+
for (const prefix of sortedPrefixes) {
|
|
796
|
+
if (eventType.startsWith(prefix)) {
|
|
797
|
+
const suffix = eventType.slice(prefix.length);
|
|
798
|
+
const mappedPrefix = config.typeToSubjectMap[prefix];
|
|
799
|
+
const cleanSuffix = suffix.startsWith(".") ? suffix.slice(1) : suffix;
|
|
800
|
+
return cleanSuffix ? `${mappedPrefix}.${cleanSuffix}` : mappedPrefix;
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
return config.defaultSubjectPrefix ? `${config.defaultSubjectPrefix}.${eventType}` : eventType;
|
|
804
|
+
};
|
|
805
|
+
deriveStreamFromType = (eventType, config) => {
|
|
806
|
+
if (!config?.typeToStreamMap) return void 0;
|
|
807
|
+
const sortedPrefixes = Object.keys(config.typeToStreamMap).sort((a, b) => b.length - a.length);
|
|
808
|
+
for (const prefix of sortedPrefixes) {
|
|
809
|
+
if (eventType.startsWith(prefix)) {
|
|
810
|
+
return config.typeToStreamMap[prefix];
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
return void 0;
|
|
814
|
+
};
|
|
815
|
+
publishNatsRawEvent = async (subjectName, eventType, eventData, options) => {
|
|
816
|
+
const cloudEvent = {
|
|
817
|
+
specversion: "1.0",
|
|
818
|
+
type: eventType,
|
|
819
|
+
source: options?.source || "hono-service",
|
|
820
|
+
id: crypto.randomUUID(),
|
|
821
|
+
time: (/* @__PURE__ */ new Date()).toISOString(),
|
|
822
|
+
datacontenttype: "application/json",
|
|
823
|
+
data: eventData,
|
|
824
|
+
...options?.subject && { subject: options.subject },
|
|
825
|
+
...options?.tenantId && { tenantid: options.tenantId }
|
|
826
|
+
};
|
|
827
|
+
const data = JSON.stringify(cloudEvent);
|
|
828
|
+
const nc = await getNatsConnection(options?.servers);
|
|
829
|
+
nc.publish(subjectName, sc.encode(data));
|
|
830
|
+
logger.debug(`Published CloudEvent ${eventType} to NATS subject ${subjectName} (id=${cloudEvent.id})`);
|
|
831
|
+
if (options?.closeAfterPublish) {
|
|
832
|
+
await closeConnection();
|
|
833
|
+
}
|
|
834
|
+
return cloudEvent.id;
|
|
835
|
+
};
|
|
836
|
+
publishNatsEvent = async (subjectName, schema, eventData, options) => {
|
|
837
|
+
const eventType = extractTypeFromSchema(schema);
|
|
838
|
+
if (!eventType) {
|
|
839
|
+
throw new Error("Could not extract event type from schema. Make sure your schema has proper metadata.");
|
|
840
|
+
}
|
|
841
|
+
const validationResult = schema.safeParse(eventData);
|
|
842
|
+
if (!validationResult.success) {
|
|
843
|
+
const validationDetails = validationResult.error.issues.map((issue) => ({
|
|
844
|
+
code: issue.code,
|
|
845
|
+
message: issue.message,
|
|
846
|
+
path: issue.path.filter((p) => typeof p !== "symbol"),
|
|
847
|
+
expected: "expected" in issue ? String(issue.expected) : void 0,
|
|
848
|
+
received: "received" in issue ? String(issue.received) : void 0
|
|
849
|
+
}));
|
|
850
|
+
const handlerValidationError = {
|
|
851
|
+
handlerName: `NatsPublisher:${eventType}`,
|
|
852
|
+
validationErrors: validationDetails
|
|
853
|
+
};
|
|
854
|
+
throw createValidationError(eventType, [handlerValidationError]);
|
|
855
|
+
}
|
|
856
|
+
return publishNatsRawEvent(subjectName, eventType, validationResult.data, options);
|
|
857
|
+
};
|
|
858
|
+
publish = async (eventTypeOrContract, eventData, options) => {
|
|
859
|
+
const eventType = typeof eventTypeOrContract === "string" ? eventTypeOrContract : eventTypeOrContract.type;
|
|
860
|
+
const natsSubject = typeof eventTypeOrContract === "string" ? deriveSubjectFromEventType(eventTypeOrContract) : eventTypeOrContract.channel?.subject ?? eventTypeOrContract.type;
|
|
861
|
+
return publishNatsRawEvent(natsSubject, eventType, eventData, options);
|
|
862
|
+
};
|
|
863
|
+
}
|
|
864
|
+
});
|
|
865
|
+
|
|
866
|
+
// src/flows/create-event.flow.ts
|
|
867
|
+
init_naming();
|
|
868
|
+
|
|
869
|
+
// src/effects/types.ts
|
|
870
|
+
var streamWired = (stream, servicePath) => ({
|
|
871
|
+
kind: "stream.wired",
|
|
872
|
+
stream,
|
|
873
|
+
servicePath
|
|
874
|
+
});
|
|
875
|
+
var contractCreated = (path2, eventType) => ({
|
|
876
|
+
kind: "contract.created",
|
|
877
|
+
path: path2,
|
|
878
|
+
eventType
|
|
879
|
+
});
|
|
880
|
+
var handlerCreated = (path2, eventType, servicePath) => ({
|
|
881
|
+
kind: "handler.created",
|
|
882
|
+
path: path2,
|
|
883
|
+
eventType,
|
|
884
|
+
servicePath
|
|
885
|
+
});
|
|
886
|
+
|
|
887
|
+
// src/flows/create-event.flow.ts
|
|
888
|
+
init_generators();
|
|
889
|
+
var parseField = (field) => {
|
|
890
|
+
const [nameWithOptional, type = "string"] = field.split(":").map((s) => s.trim());
|
|
891
|
+
const optional = nameWithOptional.endsWith("?");
|
|
892
|
+
const name = optional ? nameWithOptional.slice(0, -1) : nameWithOptional;
|
|
893
|
+
return { name, type, optional };
|
|
894
|
+
};
|
|
895
|
+
var parseFieldsInput = (input2) => input2.trim() === "" ? [] : input2.split(",").map((f) => f.trim()).filter(Boolean).map(parseField);
|
|
896
|
+
var generateContractTask = (ctx) => {
|
|
897
|
+
const generation = initGenerationContext(ctx);
|
|
898
|
+
const result = generateContract(
|
|
899
|
+
{
|
|
900
|
+
eventType: ctx.eventType,
|
|
901
|
+
basePath: ctx.config.contractsPath,
|
|
902
|
+
fields: ctx.fields ?? [],
|
|
903
|
+
force: ctx.force ?? false
|
|
904
|
+
},
|
|
905
|
+
ctx._fs
|
|
906
|
+
);
|
|
907
|
+
trackChange(result).record(generation.artifacts, "Contract").onCreated((path2) => {
|
|
908
|
+
generation.addEffect(contractCreated(path2, ctx.eventType));
|
|
909
|
+
});
|
|
910
|
+
};
|
|
911
|
+
var addExportsTask = (ctx) => {
|
|
912
|
+
const names = deriveEventNames(ctx.eventType);
|
|
913
|
+
ensureDomainExport(
|
|
914
|
+
{
|
|
915
|
+
contractsBasePath: ctx.config.contractsPath,
|
|
916
|
+
domain: names.domain,
|
|
917
|
+
eventFile: names.action
|
|
918
|
+
},
|
|
919
|
+
ctx._fs
|
|
920
|
+
);
|
|
921
|
+
};
|
|
922
|
+
var generateMockTask = (ctx) => {
|
|
923
|
+
const generation = initGenerationContext(ctx);
|
|
924
|
+
const result = generateJsonMock(
|
|
925
|
+
{
|
|
926
|
+
eventType: ctx.eventType,
|
|
927
|
+
contractsPath: ctx.config.contractsPath,
|
|
928
|
+
fields: ctx.fields ?? [],
|
|
929
|
+
force: ctx.force ?? false
|
|
930
|
+
},
|
|
931
|
+
ctx._fs
|
|
932
|
+
);
|
|
933
|
+
trackChange(result).record(generation.artifacts, "Mock");
|
|
934
|
+
};
|
|
935
|
+
var generateHandlerTask = (ctx) => {
|
|
936
|
+
if (!ctx.servicePath) return;
|
|
937
|
+
const generation = initGenerationContext(ctx);
|
|
938
|
+
const names = deriveEventNames(ctx.eventType);
|
|
939
|
+
const handlerBasePath = ctx.servicePath.endsWith("/src") ? ctx.servicePath : `${ctx.servicePath}/src`;
|
|
940
|
+
const result = generateEventHandler(
|
|
941
|
+
{
|
|
942
|
+
eventType: ctx.eventType,
|
|
943
|
+
basePath: handlerBasePath,
|
|
944
|
+
contractsPackage: ctx.config.contractsPackage ?? "",
|
|
945
|
+
force: ctx.force ?? false
|
|
946
|
+
},
|
|
947
|
+
ctx._fs
|
|
948
|
+
);
|
|
949
|
+
trackChange(result).record(generation.artifacts, "Handler").onCreated((path2) => {
|
|
950
|
+
if (ctx.servicePath) {
|
|
951
|
+
generation.addEffect(handlerCreated(path2, ctx.eventType, ctx.servicePath));
|
|
952
|
+
generation.addEffect(streamWired(names.streamName, ctx.servicePath));
|
|
953
|
+
}
|
|
954
|
+
});
|
|
955
|
+
};
|
|
956
|
+
var flow = createFlow();
|
|
957
|
+
var isInvalidEventType = (ctx) => !isValidEventType(ctx.eventType);
|
|
958
|
+
var getEventTypeErrorMessage = (ctx) => `Invalid event type: ${validateEventType(ctx.eventType).errors.join(", ")}`;
|
|
959
|
+
var ensureInfraStep = flow.ensure("_fs", (ctx) => ctx.config.fs ?? createNodeFileSystem()).build();
|
|
960
|
+
var ensureGenerationStep = flow.ensure("_generation", () => createGenerationResult()).build();
|
|
961
|
+
var eventTypeStep = flow.input("eventType").prompt({
|
|
962
|
+
title: "Event Type",
|
|
963
|
+
message: "Enter event type (e.g., order.created):"
|
|
964
|
+
}).validateBy(validateEventType).skipIfSet().build();
|
|
965
|
+
var validateEventTypeStep = flow.abortIf(isInvalidEventType, getEventTypeErrorMessage).build();
|
|
966
|
+
var schemaFieldsStep = input("fieldsInput").prompt({
|
|
967
|
+
title: "Schema Fields",
|
|
968
|
+
message: "Schema fields (e.g., orderId:string,total:number) or leave empty:"
|
|
969
|
+
}).mapTo("fields", parseFieldsInput).skipWhen((ctx) => ctx.fields !== void 0).build();
|
|
970
|
+
var serviceSelectionStep = flow.select("servicePath").title("Service Selection").prompt("Select service to create handler in:").choicesFrom((ctx) => (ctx.availableServices ?? []).map((s) => ({ name: s, value: s }))).optional("Skip handler creation").skipIfSet().build();
|
|
971
|
+
var generateFilesStep = flow.task("Generate files").steps([generateContractTask, addExportsTask, generateMockTask, generateHandlerTask]).produces("fields", "_generation").build();
|
|
972
|
+
var printSummary = (ctx) => {
|
|
973
|
+
printGenerationSummary(initGenerationContext(ctx));
|
|
974
|
+
};
|
|
975
|
+
var printSummaryStep = flow.task("Print summary").steps([printSummary]).build();
|
|
976
|
+
var createEventFlowSteps = [
|
|
977
|
+
ensureInfraStep,
|
|
978
|
+
ensureGenerationStep,
|
|
979
|
+
eventTypeStep,
|
|
980
|
+
validateEventTypeStep,
|
|
981
|
+
schemaFieldsStep,
|
|
982
|
+
serviceSelectionStep,
|
|
983
|
+
generateFilesStep,
|
|
984
|
+
printSummaryStep
|
|
985
|
+
];
|
|
986
|
+
|
|
987
|
+
// src/flows/list-events.flow.ts
|
|
988
|
+
init_naming();
|
|
989
|
+
var isContractFile = (filename) => filename.endsWith(".ts") && !filename.endsWith(".test.ts") && filename !== "index.ts";
|
|
990
|
+
var makeRelativePath = (contractsPath, fullPath) => fullPath.replace(`${contractsPath}/`, "").replace(/\\/g, "/");
|
|
991
|
+
var safeReadDir = (dir) => {
|
|
992
|
+
try {
|
|
993
|
+
return readdirSync(dir);
|
|
994
|
+
} catch {
|
|
995
|
+
return [];
|
|
996
|
+
}
|
|
997
|
+
};
|
|
998
|
+
var safeStat = (path2) => {
|
|
999
|
+
try {
|
|
1000
|
+
const stat = statSync(path2);
|
|
1001
|
+
return { isDirectory: stat.isDirectory() };
|
|
1002
|
+
} catch {
|
|
1003
|
+
return null;
|
|
1004
|
+
}
|
|
1005
|
+
};
|
|
1006
|
+
var processEntry = (dir, entry, contractsPath) => {
|
|
1007
|
+
const fullPath = join(dir, entry);
|
|
1008
|
+
const stat = safeStat(fullPath);
|
|
1009
|
+
if (!stat) return [];
|
|
1010
|
+
if (stat.isDirectory) {
|
|
1011
|
+
return findContractFiles(fullPath, contractsPath);
|
|
1012
|
+
}
|
|
1013
|
+
if (isContractFile(entry)) {
|
|
1014
|
+
return [makeRelativePath(contractsPath, fullPath)];
|
|
1015
|
+
}
|
|
1016
|
+
return [];
|
|
1017
|
+
};
|
|
1018
|
+
var findContractFiles = (dir, contractsPath) => safeReadDir(dir).flatMap((entry) => processEntry(dir, entry, contractsPath));
|
|
1019
|
+
var parseEventType = (filePath) => parseEventTypeFromContract(filePath);
|
|
1020
|
+
var matchesPattern = (pattern) => (eventType) => !pattern || pattern.trim() === "" || eventType.includes(pattern);
|
|
1021
|
+
var discoverEventTypes = (contractsPath, pattern) => {
|
|
1022
|
+
const eventsDir = join(contractsPath, "events");
|
|
1023
|
+
if (!existsSync(eventsDir)) {
|
|
1024
|
+
return [];
|
|
1025
|
+
}
|
|
1026
|
+
return findContractFiles(eventsDir, contractsPath).map(parseEventType).filter((eventType) => eventType !== null).filter(matchesPattern(pattern)).sort();
|
|
1027
|
+
};
|
|
1028
|
+
var flow2 = createFlow();
|
|
1029
|
+
var listEventsFlowSteps = [
|
|
1030
|
+
flow2.input("pattern").prompt({
|
|
1031
|
+
title: "Filter Pattern",
|
|
1032
|
+
message: "Filter by pattern (leave empty for all):"
|
|
1033
|
+
}).skipWhen((ctx) => ctx.pattern != null).build(),
|
|
1034
|
+
flow2.task("Discover events").steps([
|
|
1035
|
+
(ctx) => {
|
|
1036
|
+
const filterPattern = ctx.pattern && ctx.pattern.trim() !== "" ? ctx.pattern : void 0;
|
|
1037
|
+
ctx.eventTypes = discoverEventTypes(ctx.config.contractsPath, filterPattern);
|
|
1038
|
+
}
|
|
1039
|
+
]).produces("eventTypes").build()
|
|
1040
|
+
];
|
|
1041
|
+
|
|
1042
|
+
// src/flows/publish-event.flow.ts
|
|
1043
|
+
init_naming();
|
|
1044
|
+
init_generators();
|
|
1045
|
+
var parseDataInput = (input2) => {
|
|
1046
|
+
if (!input2.trim()) return {};
|
|
1047
|
+
try {
|
|
1048
|
+
return JSON.parse(input2);
|
|
1049
|
+
} catch {
|
|
1050
|
+
return null;
|
|
1051
|
+
}
|
|
1052
|
+
};
|
|
1053
|
+
var loadDataFromInput = async (ctx) => {
|
|
1054
|
+
if (ctx.dataInput !== void 0 && ctx.dataInput.trim() !== "") {
|
|
1055
|
+
const parsed = parseDataInput(ctx.dataInput);
|
|
1056
|
+
if (parsed === null) {
|
|
1057
|
+
throw new Error("Invalid JSON data");
|
|
1058
|
+
}
|
|
1059
|
+
ctx.data = parsed;
|
|
1060
|
+
return;
|
|
1061
|
+
}
|
|
1062
|
+
const { loadJsonMock: loadJsonMock2 } = await Promise.resolve().then(() => (init_generators(), generators_exports));
|
|
1063
|
+
const mock = loadJsonMock2(ctx.eventType, ctx.config.contractsPath);
|
|
1064
|
+
if (mock?.data) {
|
|
1065
|
+
ctx.data = mock.data;
|
|
1066
|
+
ctx.usedMock = true;
|
|
1067
|
+
return;
|
|
1068
|
+
}
|
|
1069
|
+
ctx.data = {};
|
|
1070
|
+
};
|
|
1071
|
+
var publishToNats = async (ctx) => {
|
|
1072
|
+
const { publish: publish2 } = await Promise.resolve().then(() => (init_nats_publisher(), nats_publisher_exports));
|
|
1073
|
+
await publish2(ctx.eventType, ctx.data ?? {}, {
|
|
1074
|
+
source: "pf-cli",
|
|
1075
|
+
closeAfterPublish: true
|
|
1076
|
+
});
|
|
1077
|
+
ctx.published = true;
|
|
1078
|
+
const mockHint = ctx.usedMock ? " (using mock data)" : "";
|
|
1079
|
+
console.log(`\u2705 Published ${ctx.eventType}${mockHint}`);
|
|
1080
|
+
};
|
|
1081
|
+
var hasExplicitData = (ctx) => ctx.data !== void 0 || jsonMockExists(ctx.eventType, ctx.config.contractsPath);
|
|
1082
|
+
var isInvalidEventType2 = (ctx) => !isValidEventType(ctx.eventType);
|
|
1083
|
+
var getEventTypeErrorMessage2 = (ctx) => `Invalid event type: ${validateEventType(ctx.eventType).errors.join(", ")}`;
|
|
1084
|
+
var flow3 = createFlow();
|
|
1085
|
+
var publishEventFlowSteps = [
|
|
1086
|
+
flow3.input("eventType").prompt({
|
|
1087
|
+
title: "Event Type",
|
|
1088
|
+
message: "Event type to publish (e.g., order.created):"
|
|
1089
|
+
}).validateBy(validateEventType).skipIfSet().build(),
|
|
1090
|
+
flow3.abortIf(isInvalidEventType2, getEventTypeErrorMessage2).build(),
|
|
1091
|
+
flow3.input("dataInput").prompt({
|
|
1092
|
+
title: "Event Data",
|
|
1093
|
+
message: "Event data as JSON (or leave empty for mock data):"
|
|
1094
|
+
}).skipWhen(hasExplicitData).build(),
|
|
1095
|
+
flow3.task("Publish event").steps([
|
|
1096
|
+
loadDataFromInput,
|
|
1097
|
+
publishToNats
|
|
1098
|
+
]).produces("data", "published", "usedMock").build()
|
|
1099
|
+
];
|
|
1100
|
+
|
|
1101
|
+
// src/api.ts
|
|
1102
|
+
var createEvent = async (eventType, options = {}) => {
|
|
1103
|
+
const { fields, service, contractsPath, mocksPath, force } = options;
|
|
1104
|
+
const result = await runFlow(createEventFlowSteps, {
|
|
1105
|
+
initialContext: {
|
|
1106
|
+
eventType,
|
|
1107
|
+
fields: typeof fields === "string" ? parseFieldsInput(fields) : fields,
|
|
1108
|
+
servicePath: service,
|
|
1109
|
+
availableServices: [],
|
|
1110
|
+
// No services in simple API
|
|
1111
|
+
force,
|
|
1112
|
+
config: {
|
|
1113
|
+
contractsPath: contractsPath ?? "packages/contracts/src",
|
|
1114
|
+
mocksPath
|
|
1115
|
+
}
|
|
1116
|
+
},
|
|
1117
|
+
inputs: {},
|
|
1118
|
+
// Non-interactive mode
|
|
1119
|
+
exitOnError: true
|
|
1120
|
+
});
|
|
1121
|
+
if (!result.success) {
|
|
1122
|
+
throw new Error(result.abortReason ?? "Failed to create event");
|
|
1123
|
+
}
|
|
1124
|
+
return {
|
|
1125
|
+
created: result.context._generation?.created ?? [],
|
|
1126
|
+
skipped: result.context._generation?.skipped ?? [],
|
|
1127
|
+
effects: result.context._generation?.effects ?? []
|
|
1128
|
+
};
|
|
1129
|
+
};
|
|
1130
|
+
var listEvents = async (options = {}) => {
|
|
1131
|
+
const { pattern, contractsPath } = options;
|
|
1132
|
+
const result = await runFlow(listEventsFlowSteps, {
|
|
1133
|
+
initialContext: {
|
|
1134
|
+
pattern: pattern ?? "",
|
|
1135
|
+
config: {
|
|
1136
|
+
contractsPath: contractsPath ?? "packages/contracts/src"
|
|
1137
|
+
}
|
|
1138
|
+
},
|
|
1139
|
+
exitOnError: true
|
|
1140
|
+
});
|
|
1141
|
+
if (!result.success) {
|
|
1142
|
+
throw new Error(result.abortReason ?? "Failed to list events");
|
|
1143
|
+
}
|
|
1144
|
+
return result.context.eventTypes ?? [];
|
|
1145
|
+
};
|
|
1146
|
+
var publishEvent = async (eventType, data, options = {}) => {
|
|
1147
|
+
const { natsUrl, contractsPath } = options;
|
|
1148
|
+
const result = await runFlow(publishEventFlowSteps, {
|
|
1149
|
+
initialContext: {
|
|
1150
|
+
eventType,
|
|
1151
|
+
data,
|
|
1152
|
+
config: {
|
|
1153
|
+
contractsPath: contractsPath ?? "packages/contracts/src",
|
|
1154
|
+
natsUrl: natsUrl ?? "nats://localhost:4222"
|
|
1155
|
+
},
|
|
1156
|
+
published: false
|
|
1157
|
+
},
|
|
1158
|
+
exitOnError: true
|
|
1159
|
+
});
|
|
1160
|
+
if (!result.success) {
|
|
1161
|
+
throw new Error(result.abortReason ?? "Failed to publish event");
|
|
1162
|
+
}
|
|
1163
|
+
return { success: true };
|
|
1164
|
+
};
|
|
1165
|
+
|
|
1166
|
+
// src/cli.ts
|
|
1167
|
+
var showUsage = () => {
|
|
1168
|
+
console.log("Usage: cloudevents <command> [options]\n");
|
|
1169
|
+
console.log("Commands:");
|
|
1170
|
+
console.log(" list List all event types");
|
|
1171
|
+
console.log(" create <type> Create event contract with Zod schema");
|
|
1172
|
+
console.log(" publish <type> <data> Publish event (data as JSON string)");
|
|
1173
|
+
};
|
|
1174
|
+
var handleList = async () => {
|
|
1175
|
+
const events = await listEvents();
|
|
1176
|
+
console.log("\u{1F4CB} Available event types:\n");
|
|
1177
|
+
events.forEach((event) => {
|
|
1178
|
+
console.log(` \u2022 ${event}`);
|
|
1179
|
+
});
|
|
1180
|
+
};
|
|
1181
|
+
var handleCreate = async ([type]) => {
|
|
1182
|
+
if (!type) {
|
|
1183
|
+
console.error("\u274C Error: Event type required");
|
|
1184
|
+
console.log("Usage: cloudevents create <type>");
|
|
1185
|
+
process.exit(1);
|
|
1186
|
+
}
|
|
1187
|
+
await createEvent(type);
|
|
1188
|
+
console.log(`\u2705 Created event contract: ${type}`);
|
|
1189
|
+
};
|
|
1190
|
+
var handlePublish = async ([type, dataJson]) => {
|
|
1191
|
+
if (!type || !dataJson) {
|
|
1192
|
+
console.error("\u274C Error: Type and data required");
|
|
1193
|
+
console.log("Usage: cloudevents publish <type> <data>");
|
|
1194
|
+
console.log(`Example: cloudevents publish order.created '{"orderId":"123"}'`);
|
|
1195
|
+
process.exit(1);
|
|
1196
|
+
}
|
|
1197
|
+
const data = JSON.parse(dataJson);
|
|
1198
|
+
await publishEvent(type, data);
|
|
1199
|
+
console.log(`\u2705 Published event: ${type}`);
|
|
1200
|
+
};
|
|
1201
|
+
var commands = {
|
|
1202
|
+
list: handleList,
|
|
1203
|
+
create: handleCreate,
|
|
1204
|
+
publish: handlePublish
|
|
1205
|
+
};
|
|
1206
|
+
var runCommand = async (command, args) => {
|
|
1207
|
+
const handler = commands[command];
|
|
1208
|
+
if (!handler) {
|
|
1209
|
+
console.error(`\u274C Unknown command: ${command}`);
|
|
1210
|
+
process.exit(1);
|
|
1211
|
+
}
|
|
1212
|
+
await handler(args);
|
|
1213
|
+
};
|
|
1214
|
+
var handleError = (error) => {
|
|
1215
|
+
console.error("\u274C Error:", error.message);
|
|
1216
|
+
process.exit(1);
|
|
1217
|
+
};
|
|
1218
|
+
var main = async () => {
|
|
1219
|
+
const [, , command, ...args] = process.argv;
|
|
1220
|
+
if (!command) {
|
|
1221
|
+
showUsage();
|
|
1222
|
+
process.exit(1);
|
|
1223
|
+
}
|
|
1224
|
+
await runCommand(command, args).catch(handleError);
|
|
1225
|
+
};
|
|
1226
|
+
main();
|