@synode/core 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +35 -0
- package/dist/engine-CgLY6SKJ.cjs +593 -0
- package/dist/engine-CgLY6SKJ.cjs.map +1 -0
- package/dist/engine-SRByMZvP.mjs +515 -0
- package/dist/engine-SRByMZvP.mjs.map +1 -0
- package/dist/execution/worker.cjs +125 -0
- package/dist/execution/worker.cjs.map +1 -0
- package/dist/execution/worker.d.cts +1 -0
- package/dist/execution/worker.d.mts +1 -0
- package/dist/execution/worker.mjs +125 -0
- package/dist/execution/worker.mjs.map +1 -0
- package/dist/index.cjs +1163 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +1142 -0
- package/dist/index.d.mts +1142 -0
- package/dist/index.mjs +1093 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +60 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1163 @@
|
|
|
1
|
+
//#region rolldown:runtime
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __copyProps = (to, from, except, desc) => {
|
|
9
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
+
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
+
key = keys[i];
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
13
|
+
__defProp(to, key, {
|
|
14
|
+
get: ((k) => from[k]).bind(null, key),
|
|
15
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return to;
|
|
21
|
+
};
|
|
22
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
23
|
+
value: mod,
|
|
24
|
+
enumerable: true
|
|
25
|
+
}) : target, mod));
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
const require_engine = require('./engine-CgLY6SKJ.cjs');
|
|
29
|
+
let node_os = require("node:os");
|
|
30
|
+
let node_worker_threads = require("node:worker_threads");
|
|
31
|
+
let node_path = require("node:path");
|
|
32
|
+
node_path = __toESM(node_path);
|
|
33
|
+
let node_fs_promises = require("node:fs/promises");
|
|
34
|
+
node_fs_promises = __toESM(node_fs_promises);
|
|
35
|
+
let node_fs = require("node:fs");
|
|
36
|
+
let node_url = require("node:url");
|
|
37
|
+
let zod = require("zod");
|
|
38
|
+
|
|
39
|
+
//#region src/generators/builder.ts
|
|
40
|
+
/**
|
|
41
|
+
* Defines a new journey.
|
|
42
|
+
* @param config The journey configuration.
|
|
43
|
+
* @returns The configured journey object.
|
|
44
|
+
* @see {@link Journey}
|
|
45
|
+
*/
|
|
46
|
+
function defineJourney(config) {
|
|
47
|
+
return config;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Defines a new adventure.
|
|
51
|
+
* @param config The adventure configuration.
|
|
52
|
+
* @returns The configured adventure object.
|
|
53
|
+
* @see {@link Adventure}
|
|
54
|
+
*/
|
|
55
|
+
function defineAdventure(config) {
|
|
56
|
+
return config;
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Defines a new action.
|
|
60
|
+
* @param config The action configuration.
|
|
61
|
+
* @returns The configured action object.
|
|
62
|
+
* @see {@link Action}
|
|
63
|
+
* @see {@link ActionDefinition}
|
|
64
|
+
*/
|
|
65
|
+
function defineAction(config) {
|
|
66
|
+
if (config.handler) return {
|
|
67
|
+
id: config.id,
|
|
68
|
+
name: config.name,
|
|
69
|
+
handler: config.handler,
|
|
70
|
+
timeSpan: config.timeSpan,
|
|
71
|
+
bounceChance: config.bounceChance
|
|
72
|
+
};
|
|
73
|
+
return {
|
|
74
|
+
id: config.id,
|
|
75
|
+
name: config.name,
|
|
76
|
+
timeSpan: config.timeSpan,
|
|
77
|
+
bounceChance: config.bounceChance,
|
|
78
|
+
handler: async (context) => {
|
|
79
|
+
const payload = {};
|
|
80
|
+
if (config.fields) for (const [key, generator] of Object.entries(config.fields)) if (typeof generator === "function") payload[key] = await generator(context, payload);
|
|
81
|
+
else payload[key] = generator;
|
|
82
|
+
return [{
|
|
83
|
+
id: context.generateId("event"),
|
|
84
|
+
userId: context.userId,
|
|
85
|
+
sessionId: context.sessionId,
|
|
86
|
+
name: config.name,
|
|
87
|
+
timestamp: context.now(),
|
|
88
|
+
payload
|
|
89
|
+
}];
|
|
90
|
+
}
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
//#endregion
|
|
95
|
+
//#region src/generators/fields.ts
|
|
96
|
+
/**
|
|
97
|
+
* Returns a value generated by Faker.js using the context's locale.
|
|
98
|
+
* @param generator Function that takes a Faker instance and returns a value.
|
|
99
|
+
*/
|
|
100
|
+
function fake(generator) {
|
|
101
|
+
return (context) => {
|
|
102
|
+
return generator(context.faker);
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Returns one of the provided options randomly.
|
|
107
|
+
*/
|
|
108
|
+
function oneOf(options) {
|
|
109
|
+
return () => {
|
|
110
|
+
return options[Math.floor(Math.random() * options.length)];
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Returns true with the given probability (0-1).
|
|
115
|
+
*/
|
|
116
|
+
function chance(probability) {
|
|
117
|
+
return () => {
|
|
118
|
+
return Math.random() < probability;
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Returns a value based on weighted probabilities.
|
|
123
|
+
* @param options Map of value to weight (weights should sum to 1, but will be normalized if not)
|
|
124
|
+
*/
|
|
125
|
+
function weighted(options) {
|
|
126
|
+
return () => {
|
|
127
|
+
const entries = Object.entries(options);
|
|
128
|
+
const totalWeight = entries.reduce((sum, [, weight]) => sum + weight, 0);
|
|
129
|
+
let random = Math.random() * totalWeight;
|
|
130
|
+
for (const [value, weight] of entries) {
|
|
131
|
+
random -= weight;
|
|
132
|
+
if (random <= 0) return value;
|
|
133
|
+
}
|
|
134
|
+
return entries[entries.length - 1][0];
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
//#endregion
|
|
139
|
+
//#region src/execution/pool.ts
|
|
140
|
+
const __dirname$1 = (0, node_url.fileURLToPath)(new URL(".", require("url").pathToFileURL(__filename).href));
|
|
141
|
+
/**
|
|
142
|
+
* Resolves the worker script path. Uses the compiled `.mjs` in dist when available,
|
|
143
|
+
* falling back to the TypeScript source for development with tsx.
|
|
144
|
+
*
|
|
145
|
+
* Checks two locations for the compiled worker:
|
|
146
|
+
* 1. Same directory as pool (works when running from dist/)
|
|
147
|
+
* 2. Equivalent dist/ path (works when running from src/ during dev/test)
|
|
148
|
+
*
|
|
149
|
+
* @returns Absolute path to the worker script
|
|
150
|
+
*/
|
|
151
|
+
function resolveWorkerScript() {
|
|
152
|
+
const localMjs = (0, node_path.join)(__dirname$1, "worker.mjs");
|
|
153
|
+
try {
|
|
154
|
+
(0, node_fs.accessSync)(localMjs);
|
|
155
|
+
return localMjs;
|
|
156
|
+
} catch {}
|
|
157
|
+
const distMjs = __dirname$1.replace(/src[\\/]/, "dist/") + "worker.mjs";
|
|
158
|
+
try {
|
|
159
|
+
(0, node_fs.accessSync)(distMjs);
|
|
160
|
+
return distMjs;
|
|
161
|
+
} catch {}
|
|
162
|
+
return (0, node_path.join)(__dirname$1, "worker.ts");
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Serializes a Map of datasets into plain objects for structured clone transfer.
|
|
166
|
+
*
|
|
167
|
+
* @param datasets - Map of dataset ID to Dataset
|
|
168
|
+
* @returns Array of serialized datasets
|
|
169
|
+
*/
|
|
170
|
+
function serializeDatasets(datasets) {
|
|
171
|
+
if (!datasets || datasets.size === 0) return [];
|
|
172
|
+
const result = [];
|
|
173
|
+
for (const dataset of datasets.values()) result.push({
|
|
174
|
+
id: dataset.id,
|
|
175
|
+
name: dataset.name,
|
|
176
|
+
rows: dataset.rows
|
|
177
|
+
});
|
|
178
|
+
return result;
|
|
179
|
+
}
|
|
180
|
+
/**
|
|
181
|
+
* Rehydrates a Date value that may have been stringified during structured clone.
|
|
182
|
+
* Structured clone preserves Dates natively, but this provides a safety net
|
|
183
|
+
* for edge cases where timestamps arrive as ISO strings.
|
|
184
|
+
*
|
|
185
|
+
* @param value - A Date object or ISO string timestamp
|
|
186
|
+
* @returns A proper Date instance
|
|
187
|
+
*/
|
|
188
|
+
function rehydrateDate(value) {
|
|
189
|
+
if (value instanceof Date) return value;
|
|
190
|
+
return new Date(value);
|
|
191
|
+
}
|
|
192
|
+
/**
|
|
193
|
+
* Manages a pool of worker threads for parallel user generation.
|
|
194
|
+
* Distributes users evenly across workers, collects events via message passing,
|
|
195
|
+
* and writes them through the configured output adapter.
|
|
196
|
+
*/
|
|
197
|
+
var WorkerPool = class {
|
|
198
|
+
options;
|
|
199
|
+
/**
|
|
200
|
+
* @param options - Pool configuration including module path, user count, and adapter
|
|
201
|
+
*/
|
|
202
|
+
constructor(options) {
|
|
203
|
+
this.options = options;
|
|
204
|
+
}
|
|
205
|
+
/**
|
|
206
|
+
* Validates the worker module path exists on disk.
|
|
207
|
+
*
|
|
208
|
+
* @throws Error if the module path does not exist
|
|
209
|
+
*/
|
|
210
|
+
async validateModule() {
|
|
211
|
+
try {
|
|
212
|
+
await (0, node_fs_promises.access)(this.options.workerModule);
|
|
213
|
+
} catch {
|
|
214
|
+
throw new Error(`Worker module not found: ${this.options.workerModule}`);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Spawns all worker threads, distributes user ranges, collects events,
|
|
219
|
+
* and waits for completion.
|
|
220
|
+
*
|
|
221
|
+
* @throws Error if any worker fails with an unrecoverable error
|
|
222
|
+
*/
|
|
223
|
+
async run() {
|
|
224
|
+
await this.validateModule();
|
|
225
|
+
const { workerModule, userCount, workerCount, adapter, telemetry, startDate, endDate } = this.options;
|
|
226
|
+
const serializedDatasets = serializeDatasets(this.options.preGeneratedDatasets);
|
|
227
|
+
const usersPerWorker = Math.ceil(userCount / workerCount);
|
|
228
|
+
const workerScript = resolveWorkerScript();
|
|
229
|
+
const isTypeScript = (0, node_path.extname)(workerScript) === ".ts";
|
|
230
|
+
const errors = [];
|
|
231
|
+
const workers = [];
|
|
232
|
+
const workerPromises = Array.from({ length: workerCount }, (_, i) => {
|
|
233
|
+
const userStart = i * usersPerWorker;
|
|
234
|
+
const userEnd = Math.min(userStart + usersPerWorker, userCount);
|
|
235
|
+
if (userStart >= userCount) return Promise.resolve();
|
|
236
|
+
const worker = new node_worker_threads.Worker(workerScript, {
|
|
237
|
+
workerData: {
|
|
238
|
+
workerModule,
|
|
239
|
+
userStart,
|
|
240
|
+
userEnd,
|
|
241
|
+
startDate,
|
|
242
|
+
endDate,
|
|
243
|
+
serializedDatasets
|
|
244
|
+
},
|
|
245
|
+
execArgv: isTypeScript ? ["--import", "tsx"] : (0, node_path.extname)(workerModule) === ".ts" ? ["--experimental-strip-types"] : []
|
|
246
|
+
});
|
|
247
|
+
workers.push(worker);
|
|
248
|
+
return this.listenToWorker(worker, adapter, telemetry ?? null, errors);
|
|
249
|
+
});
|
|
250
|
+
const results = await Promise.allSettled(workerPromises);
|
|
251
|
+
for (const result of results) if (result.status === "rejected") {
|
|
252
|
+
const reason = result.reason;
|
|
253
|
+
errors.push(reason instanceof Error ? reason : new Error(String(reason)));
|
|
254
|
+
}
|
|
255
|
+
if (errors.length > 0) {
|
|
256
|
+
const combined = errors.map((e) => e.message).join("\n");
|
|
257
|
+
throw new Error(`Worker pool failed with ${String(errors.length)} error(s):\n${combined}`);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Listens to a single worker's messages and routes them to the adapter/telemetry.
|
|
262
|
+
*
|
|
263
|
+
* @param worker - The Worker thread to listen to
|
|
264
|
+
* @param adapter - Output adapter for writing events
|
|
265
|
+
* @param telemetry - Optional telemetry collector
|
|
266
|
+
* @param errors - Mutable array to collect errors from failed workers
|
|
267
|
+
* @returns Promise that resolves when the worker sends 'done' or rejects on error
|
|
268
|
+
*/
|
|
269
|
+
listenToWorker(worker, adapter, telemetry, errors) {
|
|
270
|
+
return new Promise((resolve, reject) => {
|
|
271
|
+
worker.on("message", (msg) => {
|
|
272
|
+
switch (msg.type) {
|
|
273
|
+
case "events":
|
|
274
|
+
for (const event of msg.events) {
|
|
275
|
+
event.timestamp = rehydrateDate(event.timestamp);
|
|
276
|
+
Promise.resolve(adapter.write(event)).catch((err) => {
|
|
277
|
+
const writeError = /* @__PURE__ */ new Error(`Adapter write failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
278
|
+
errors.push(writeError);
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
if (telemetry) {
|
|
282
|
+
let remaining = msg.events.length;
|
|
283
|
+
while (remaining-- > 0) telemetry.recordEvent();
|
|
284
|
+
}
|
|
285
|
+
break;
|
|
286
|
+
case "user-started":
|
|
287
|
+
if (telemetry) telemetry.recordUserStarted();
|
|
288
|
+
break;
|
|
289
|
+
case "user-completed":
|
|
290
|
+
if (telemetry) telemetry.recordUserCompleted();
|
|
291
|
+
break;
|
|
292
|
+
case "error": {
|
|
293
|
+
const workerError = new Error(msg.message);
|
|
294
|
+
if (msg.stack) workerError.stack = msg.stack;
|
|
295
|
+
errors.push(workerError);
|
|
296
|
+
worker.terminate();
|
|
297
|
+
reject(workerError);
|
|
298
|
+
break;
|
|
299
|
+
}
|
|
300
|
+
case "done":
|
|
301
|
+
resolve();
|
|
302
|
+
break;
|
|
303
|
+
}
|
|
304
|
+
});
|
|
305
|
+
worker.on("error", (err) => {
|
|
306
|
+
errors.push(err);
|
|
307
|
+
reject(err);
|
|
308
|
+
});
|
|
309
|
+
worker.on("exit", (code) => {
|
|
310
|
+
if (code !== 0) {
|
|
311
|
+
const exitError = /* @__PURE__ */ new Error(`Worker exited with code ${String(code)}`);
|
|
312
|
+
errors.push(exitError);
|
|
313
|
+
reject(exitError);
|
|
314
|
+
}
|
|
315
|
+
});
|
|
316
|
+
});
|
|
317
|
+
}
|
|
318
|
+
};
|
|
319
|
+
|
|
320
|
+
//#endregion
|
|
321
|
+
//#region src/adapters/console.ts
|
|
322
|
+
/**
|
|
323
|
+
* Adapter that writes events to the console as pretty-printed JSON.
|
|
324
|
+
*
|
|
325
|
+
* @example
|
|
326
|
+
* ```ts
|
|
327
|
+
* await generate(journey, { users: 10, adapter: new ConsoleAdapter() });
|
|
328
|
+
* ```
|
|
329
|
+
*/
|
|
330
|
+
var ConsoleAdapter = class {
|
|
331
|
+
/** @inheritdoc */
|
|
332
|
+
write(event) {
|
|
333
|
+
console.log(JSON.stringify(event, null, 2));
|
|
334
|
+
}
|
|
335
|
+
};
|
|
336
|
+
|
|
337
|
+
//#endregion
|
|
338
|
+
//#region src/monitoring/telemetry.ts
|
|
339
|
+
/**
|
|
340
|
+
* Collects telemetry data during generation runs.
|
|
341
|
+
*/
|
|
342
|
+
var TelemetryCollector = class {
|
|
343
|
+
startTime;
|
|
344
|
+
snapshots = [];
|
|
345
|
+
intervalHandle = null;
|
|
346
|
+
currentSecondEvents = 0;
|
|
347
|
+
totalEvents = 0;
|
|
348
|
+
activeUsers = 0;
|
|
349
|
+
completedUsers = 0;
|
|
350
|
+
lanes;
|
|
351
|
+
eventsValidated = 0;
|
|
352
|
+
eventsValid = 0;
|
|
353
|
+
eventsInvalid = 0;
|
|
354
|
+
validationErrors = [];
|
|
355
|
+
constructor(lanes) {
|
|
356
|
+
this.startTime = /* @__PURE__ */ new Date();
|
|
357
|
+
this.lanes = lanes;
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Start collecting telemetry data every second.
|
|
361
|
+
*/
|
|
362
|
+
start() {
|
|
363
|
+
this.intervalHandle = setInterval(() => {
|
|
364
|
+
this.captureSnapshot();
|
|
365
|
+
}, 1e3);
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Stop collecting telemetry data.
|
|
369
|
+
*/
|
|
370
|
+
stop() {
|
|
371
|
+
if (this.intervalHandle) {
|
|
372
|
+
clearInterval(this.intervalHandle);
|
|
373
|
+
this.intervalHandle = null;
|
|
374
|
+
}
|
|
375
|
+
this.captureSnapshot();
|
|
376
|
+
}
|
|
377
|
+
/**
|
|
378
|
+
* Record that an event was generated.
|
|
379
|
+
*/
|
|
380
|
+
recordEvent() {
|
|
381
|
+
this.currentSecondEvents++;
|
|
382
|
+
this.totalEvents++;
|
|
383
|
+
}
|
|
384
|
+
/**
|
|
385
|
+
* Record that a user started processing.
|
|
386
|
+
*/
|
|
387
|
+
recordUserStarted() {
|
|
388
|
+
this.activeUsers++;
|
|
389
|
+
}
|
|
390
|
+
/**
|
|
391
|
+
* Record that a user completed processing.
|
|
392
|
+
*/
|
|
393
|
+
recordUserCompleted() {
|
|
394
|
+
this.activeUsers--;
|
|
395
|
+
this.completedUsers++;
|
|
396
|
+
}
|
|
397
|
+
/**
|
|
398
|
+
* Merge a validation summary from a lane or journey into the collector totals.
|
|
399
|
+
*
|
|
400
|
+
* @param summary - Aggregated validation counts and errors to record.
|
|
401
|
+
*/
|
|
402
|
+
recordValidationSummary(summary) {
|
|
403
|
+
this.eventsValidated += summary.eventsValidated;
|
|
404
|
+
this.eventsValid += summary.eventsValid;
|
|
405
|
+
this.eventsInvalid += summary.eventsInvalid;
|
|
406
|
+
for (const err of summary.validationErrors) if (this.validationErrors.length < 50) this.validationErrors.push(err);
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Generate the final telemetry report.
|
|
410
|
+
*/
|
|
411
|
+
getReport() {
|
|
412
|
+
const endTime = /* @__PURE__ */ new Date();
|
|
413
|
+
const durationMs = endTime.getTime() - this.startTime.getTime();
|
|
414
|
+
const averageEventsPerSecond = durationMs > 0 ? this.totalEvents / durationMs * 1e3 : this.totalEvents;
|
|
415
|
+
return {
|
|
416
|
+
startTime: this.startTime.toISOString(),
|
|
417
|
+
endTime: endTime.toISOString(),
|
|
418
|
+
durationMs,
|
|
419
|
+
totalUsers: this.completedUsers,
|
|
420
|
+
totalEvents: this.totalEvents,
|
|
421
|
+
lanes: this.lanes,
|
|
422
|
+
averageEventsPerSecond: Number(averageEventsPerSecond.toFixed(2)),
|
|
423
|
+
snapshots: this.snapshots,
|
|
424
|
+
activeUsers: this.activeUsers,
|
|
425
|
+
completedUsers: this.completedUsers,
|
|
426
|
+
eventsValidated: this.eventsValidated,
|
|
427
|
+
eventsValid: this.eventsValid,
|
|
428
|
+
eventsInvalid: this.eventsInvalid,
|
|
429
|
+
validationErrors: this.validationErrors
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Save the telemetry report to a JSON file.
|
|
434
|
+
*/
|
|
435
|
+
async saveReport(filePath) {
|
|
436
|
+
const report = this.getReport();
|
|
437
|
+
await (0, node_fs_promises.writeFile)(filePath, JSON.stringify(report, null, 2), "utf-8");
|
|
438
|
+
}
|
|
439
|
+
captureSnapshot() {
|
|
440
|
+
const now = /* @__PURE__ */ new Date();
|
|
441
|
+
const elapsedMs = now.getTime() - this.startTime.getTime();
|
|
442
|
+
this.snapshots.push({
|
|
443
|
+
timestamp: now.toISOString(),
|
|
444
|
+
elapsedMs,
|
|
445
|
+
eventsPerSecond: this.currentSecondEvents,
|
|
446
|
+
totalEvents: this.totalEvents,
|
|
447
|
+
activeUsers: this.activeUsers,
|
|
448
|
+
completedUsers: this.completedUsers,
|
|
449
|
+
lanes: this.lanes
|
|
450
|
+
});
|
|
451
|
+
this.currentSecondEvents = 0;
|
|
452
|
+
}
|
|
453
|
+
};
|
|
454
|
+
|
|
455
|
+
//#endregion
|
|
456
|
+
//#region src/monitoring/event-validation.ts
|
|
457
|
+
const MAX_STORED_ERRORS = 50;
|
|
458
|
+
/**
|
|
459
|
+
* Error thrown when an event fails schema validation in strict mode.
|
|
460
|
+
*/
|
|
461
|
+
var SynodeValidationError = class extends Error {
|
|
462
|
+
event;
|
|
463
|
+
issues;
|
|
464
|
+
constructor(options) {
|
|
465
|
+
super(options.message);
|
|
466
|
+
this.name = "SynodeValidationError";
|
|
467
|
+
this.event = options.event;
|
|
468
|
+
this.issues = options.issues;
|
|
469
|
+
}
|
|
470
|
+
};
|
|
471
|
+
/**
|
|
472
|
+
* Wraps `z.object()` for defining event payload schemas.
|
|
473
|
+
*
|
|
474
|
+
* @param shape - Zod raw shape describing the expected payload fields
|
|
475
|
+
* @returns A ZodObject schema
|
|
476
|
+
*
|
|
477
|
+
* @example
|
|
478
|
+
* ```typescript
|
|
479
|
+
* const pageViewSchema = defineEventSchema({
|
|
480
|
+
* url: z.string().url(),
|
|
481
|
+
* referrer: z.string().optional(),
|
|
482
|
+
* });
|
|
483
|
+
* ```
|
|
484
|
+
*/
|
|
485
|
+
function defineEventSchema(shape) {
|
|
486
|
+
return zod.z.object(shape);
|
|
487
|
+
}
|
|
488
|
+
/**
|
|
489
|
+
* Creates a fresh zeroed validation summary.
|
|
490
|
+
*
|
|
491
|
+
* @returns An empty ValidationSummary ready for accumulation
|
|
492
|
+
*/
|
|
493
|
+
function createValidationSummary() {
|
|
494
|
+
return {
|
|
495
|
+
eventsValidated: 0,
|
|
496
|
+
eventsValid: 0,
|
|
497
|
+
eventsInvalid: 0,
|
|
498
|
+
validationErrors: []
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
/**
|
|
502
|
+
* Converts Zod v4 issues to ValidationIssue instances.
|
|
503
|
+
*/
|
|
504
|
+
function toValidationIssues(zodIssues) {
|
|
505
|
+
return zodIssues.map((issue) => ({
|
|
506
|
+
path: issue.path.map((segment) => typeof segment === "symbol" ? String(segment) : segment),
|
|
507
|
+
message: issue.message,
|
|
508
|
+
code: issue.code
|
|
509
|
+
}));
|
|
510
|
+
}
|
|
511
|
+
/**
|
|
512
|
+
* Resolves the correct schema for an event based on the config.
|
|
513
|
+
* Returns undefined if no schema applies to this event.
|
|
514
|
+
*/
|
|
515
|
+
function resolveSchema(event, config) {
|
|
516
|
+
if (config.schema instanceof zod.z.ZodType) return config.schema;
|
|
517
|
+
return config.schema[event.name];
|
|
518
|
+
}
|
|
519
|
+
/**
|
|
520
|
+
* Records a validation failure in the summary, respecting the MAX_STORED_ERRORS cap.
|
|
521
|
+
*/
|
|
522
|
+
function recordFailure(summary, event, issues) {
|
|
523
|
+
summary.eventsInvalid++;
|
|
524
|
+
for (const issue of issues) if (summary.validationErrors.length < MAX_STORED_ERRORS) summary.validationErrors.push({
|
|
525
|
+
eventName: event.name,
|
|
526
|
+
path: issue.path.join("."),
|
|
527
|
+
message: issue.message
|
|
528
|
+
});
|
|
529
|
+
}
|
|
530
|
+
/**
|
|
531
|
+
* Validates an event against its configured schema.
|
|
532
|
+
*
|
|
533
|
+
* Behavior depends on the configured mode:
|
|
534
|
+
* - `strict` (default): throws {@link SynodeValidationError} on first failure
|
|
535
|
+
* - `warn`: returns the event but records failure in the summary
|
|
536
|
+
* - `skip`: returns `undefined` (event is dropped) and records failure in the summary
|
|
537
|
+
*
|
|
538
|
+
* Events with no matching schema in a per-name map are passed through without validation.
|
|
539
|
+
*
|
|
540
|
+
* @param event - The event to validate
|
|
541
|
+
* @param config - Schema and mode configuration
|
|
542
|
+
* @param summary - Mutable summary accumulating validation statistics
|
|
543
|
+
* @returns The event if it passes or is kept (warn mode), or undefined if skipped
|
|
544
|
+
* @throws SynodeValidationError in strict mode when validation fails
|
|
545
|
+
*/
|
|
546
|
+
function validateEvent(event, config, summary) {
|
|
547
|
+
const schema = resolveSchema(event, config);
|
|
548
|
+
if (!schema) return event;
|
|
549
|
+
summary.eventsValidated++;
|
|
550
|
+
const result = schema.safeParse(event.payload);
|
|
551
|
+
if (result.success) {
|
|
552
|
+
summary.eventsValid++;
|
|
553
|
+
return event;
|
|
554
|
+
}
|
|
555
|
+
const issues = toValidationIssues(result.error.issues);
|
|
556
|
+
const mode = config.mode ?? "strict";
|
|
557
|
+
if (mode === "strict") {
|
|
558
|
+
recordFailure(summary, event, issues);
|
|
559
|
+
throw new SynodeValidationError({
|
|
560
|
+
message: `Event '${event.name}' failed schema validation: ${issues.map((i) => i.message).join("; ")}`,
|
|
561
|
+
event,
|
|
562
|
+
issues
|
|
563
|
+
});
|
|
564
|
+
}
|
|
565
|
+
if (mode === "warn") {
|
|
566
|
+
recordFailure(summary, event, issues);
|
|
567
|
+
return event;
|
|
568
|
+
}
|
|
569
|
+
recordFailure(summary, event, issues);
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
//#endregion
|
|
573
|
+
//#region src/execution/runner.ts
|
|
574
|
+
/**
|
|
575
|
+
* Returns a random Date between start (inclusive) and end (inclusive).
|
|
576
|
+
*/
|
|
577
|
+
function randomDateInRange(start, end) {
|
|
578
|
+
const startMs = start.getTime();
|
|
579
|
+
const endMs = end.getTime();
|
|
580
|
+
return new Date(startMs + Math.random() * (endMs - startMs));
|
|
581
|
+
}
|
|
582
|
+
/**
|
|
583
|
+
* Pre-generates datasets from definitions and merges with preloaded datasets.
|
|
584
|
+
*
|
|
585
|
+
* @param datasets - Optional dataset definitions to generate
|
|
586
|
+
* @param preloaded - Optional pre-populated datasets to include
|
|
587
|
+
* @returns Map of dataset ID to generated/preloaded Dataset
|
|
588
|
+
*/
|
|
589
|
+
async function prepareDatasets(datasets, preloaded) {
|
|
590
|
+
const result = /* @__PURE__ */ new Map();
|
|
591
|
+
if (datasets && datasets.length > 0) {
|
|
592
|
+
const tempContext = new require_engine.SynodeContext();
|
|
593
|
+
for (const datasetDef of datasets) {
|
|
594
|
+
const dataset = await require_engine.generateDataset(datasetDef, tempContext);
|
|
595
|
+
result.set(dataset.id, dataset);
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
if (preloaded) for (const dataset of preloaded) result.set(dataset.id, dataset);
|
|
599
|
+
return result;
|
|
600
|
+
}
|
|
601
|
+
/**
|
|
602
|
+
* Creates a SynodeContext for a single user, hydrating persona attributes
|
|
603
|
+
* and registering pre-generated datasets.
|
|
604
|
+
*
|
|
605
|
+
* @param persona - Optional persona definition for generating user attributes
|
|
606
|
+
* @param preGeneratedDatasets - Pre-generated datasets to register with the context
|
|
607
|
+
* @param startDate - Optional start of date range for random start time
|
|
608
|
+
* @param endDate - Optional end of date range for random start time
|
|
609
|
+
* @returns A fully initialized SynodeContext
|
|
610
|
+
*/
|
|
611
|
+
async function createUserContext(persona, preGeneratedDatasets, startDate, endDate) {
|
|
612
|
+
const userStartTime = startDate && endDate ? randomDateInRange(startDate, endDate) : /* @__PURE__ */ new Date();
|
|
613
|
+
let context;
|
|
614
|
+
if (persona) {
|
|
615
|
+
const personaData = await require_engine.generatePersona(persona, new require_engine.SynodeContext());
|
|
616
|
+
context = new require_engine.SynodeContext(userStartTime, void 0, typeof personaData.attributes.locale === "string" ? personaData.attributes.locale : "en");
|
|
617
|
+
for (const [key, value] of Object.entries(personaData.attributes)) context.set(key, value);
|
|
618
|
+
} else context = new require_engine.SynodeContext(userStartTime);
|
|
619
|
+
for (const dataset of preGeneratedDatasets.values()) context.registerDataset(dataset);
|
|
620
|
+
return context;
|
|
621
|
+
}
|
|
622
|
+
/**
|
|
623
|
+
* Runs all journeys on a context, writing events to the adapter.
|
|
624
|
+
* Wraps adapter.write() failures in SynodeError with code ADAPTER_WRITE_ERROR.
|
|
625
|
+
*
|
|
626
|
+
* @param journeys - Journeys to execute
|
|
627
|
+
* @param context - The user's execution context
|
|
628
|
+
* @param adapter - Output adapter to write events to
|
|
629
|
+
* @param telemetry - Optional telemetry collector for recording events
|
|
630
|
+
* @param eventSchema - Optional event schema validation configuration
|
|
631
|
+
* @param summary - Optional mutable validation summary for accumulating results
|
|
632
|
+
*/
|
|
633
|
+
async function processUser(journeys, context, adapter, telemetry, eventSchema, summary) {
|
|
634
|
+
for (const journey of journeys) {
|
|
635
|
+
const engine = new require_engine.Engine(journey);
|
|
636
|
+
for await (const event of engine.run(context)) {
|
|
637
|
+
if (eventSchema && summary) {
|
|
638
|
+
if (!validateEvent(event, eventSchema, summary)) continue;
|
|
639
|
+
}
|
|
640
|
+
try {
|
|
641
|
+
await adapter.write(event);
|
|
642
|
+
} catch (error) {
|
|
643
|
+
throw new require_engine.SynodeError({
|
|
644
|
+
code: "ADAPTER_WRITE_ERROR",
|
|
645
|
+
message: `Adapter write failed for event '${event.name}': ${error instanceof Error ? error.message : String(error)}`,
|
|
646
|
+
path: [journey.id],
|
|
647
|
+
suggestion: "Check the output adapter for write errors or capacity issues",
|
|
648
|
+
cause: error
|
|
649
|
+
});
|
|
650
|
+
}
|
|
651
|
+
if (telemetry) telemetry.recordEvent();
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
/**
|
|
656
|
+
* Generates synthetic data based on the provided journey configuration.
|
|
657
|
+
*/
|
|
658
|
+
async function generate(journey, options) {
|
|
659
|
+
const journeys = Array.isArray(journey) ? journey : [journey];
|
|
660
|
+
if (options.startDate && !options.endDate) throw new Error("startDate requires endDate to be provided");
|
|
661
|
+
if (options.endDate && !options.startDate) throw new Error("endDate requires startDate to be provided");
|
|
662
|
+
if (options.startDate && options.endDate && options.startDate.getTime() >= options.endDate.getTime()) throw new Error("startDate must be before endDate");
|
|
663
|
+
if (!Number.isFinite(options.users) || options.users < 0 || options.users > 1e7) throw new Error("users must be a finite number between 0 and 10,000,000");
|
|
664
|
+
if (options.lanes !== void 0 && (!Number.isFinite(options.lanes) || options.lanes < 1 || options.lanes > 1e3)) throw new Error("lanes must be a finite number between 1 and 1,000");
|
|
665
|
+
if (options.workers !== void 0 && !options.workerModule) throw new Error("workers option requires workerModule to be set");
|
|
666
|
+
if (options.workers !== void 0 && (!Number.isFinite(options.workers) || options.workers < 1 || options.workers > 1024)) throw new Error("workers must be a finite number between 1 and 1,024");
|
|
667
|
+
const adapter = options.adapter ?? new ConsoleAdapter();
|
|
668
|
+
const userCount = options.users;
|
|
669
|
+
const lanes = options.lanes ?? 1;
|
|
670
|
+
const debug = options.debug ?? false;
|
|
671
|
+
const telemetryPath = options.telemetryPath ?? "./telemetry-report.json";
|
|
672
|
+
const telemetry = debug ? new TelemetryCollector(lanes) : null;
|
|
673
|
+
if (telemetry) telemetry.start();
|
|
674
|
+
const preGeneratedDatasets = await prepareDatasets(options.datasets, options.preloadedDatasets);
|
|
675
|
+
const summary = options.eventSchema ? createValidationSummary() : null;
|
|
676
|
+
if (options.workerModule) await new WorkerPool({
|
|
677
|
+
workerModule: options.workerModule,
|
|
678
|
+
userCount,
|
|
679
|
+
workerCount: options.workers ?? (0, node_os.cpus)().length,
|
|
680
|
+
adapter,
|
|
681
|
+
telemetry,
|
|
682
|
+
startDate: options.startDate?.toISOString(),
|
|
683
|
+
endDate: options.endDate?.toISOString(),
|
|
684
|
+
preGeneratedDatasets
|
|
685
|
+
}).run();
|
|
686
|
+
else if (lanes > 1) await runParallel(journeys, userCount, lanes, options.persona, preGeneratedDatasets, adapter, telemetry, options.startDate, options.endDate, options.eventSchema, summary);
|
|
687
|
+
else await runSequential(journeys, userCount, options.persona, preGeneratedDatasets, adapter, telemetry, options.startDate, options.endDate, options.eventSchema, summary);
|
|
688
|
+
if (summary && options.eventSchema?.mode === "warn" && summary.eventsInvalid > 0) console.error(`[synode] Validation: ${String(summary.eventsValid)} passed, ${String(summary.eventsInvalid)} failed out of ${String(summary.eventsValidated)} checked`);
|
|
689
|
+
if (telemetry && summary) telemetry.recordValidationSummary(summary);
|
|
690
|
+
if (adapter.close) await adapter.close();
|
|
691
|
+
if (telemetry) {
|
|
692
|
+
telemetry.stop();
|
|
693
|
+
await telemetry.saveReport(telemetryPath);
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
/**
|
|
697
|
+
* Run generation sequentially in the main thread.
|
|
698
|
+
*/
|
|
699
|
+
async function runSequential(journeys, userCount, persona, preGeneratedDatasets, adapter, telemetry, startDate, endDate, eventSchema, summary) {
|
|
700
|
+
for (let i = 0; i < userCount; i++) {
|
|
701
|
+
if (telemetry) telemetry.recordUserStarted();
|
|
702
|
+
await processUser(journeys, await createUserContext(persona, preGeneratedDatasets, startDate, endDate), adapter, telemetry, eventSchema, summary ?? void 0);
|
|
703
|
+
if (telemetry) telemetry.recordUserCompleted();
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
/**
|
|
707
|
+
* Run generation in parallel using concurrent async execution.
|
|
708
|
+
* Note: This uses Promise.all for concurrent execution in the main thread.
|
|
709
|
+
* For true multi-core parallelism, worker threads would require serializable
|
|
710
|
+
* journey definitions (e.g., loaded from file paths rather than in-memory objects).
|
|
711
|
+
*/
|
|
712
|
+
async function runParallel(journeys, userCount, lanes, persona, preGeneratedDatasets, adapter, telemetry, startDate, endDate, eventSchema, summary) {
|
|
713
|
+
const usersPerLane = Math.ceil(userCount / lanes);
|
|
714
|
+
const lanePromises = [];
|
|
715
|
+
for (let laneIndex = 0; laneIndex < lanes; laneIndex++) {
|
|
716
|
+
const userStart = laneIndex * usersPerLane;
|
|
717
|
+
const userEnd = Math.min(userStart + usersPerLane, userCount);
|
|
718
|
+
if (userStart >= userCount) break;
|
|
719
|
+
const lanePromise = (async () => {
|
|
720
|
+
for (let i = userStart; i < userEnd; i++) {
|
|
721
|
+
if (telemetry) telemetry.recordUserStarted();
|
|
722
|
+
await processUser(journeys, await createUserContext(persona, preGeneratedDatasets, startDate, endDate), adapter, telemetry, eventSchema, summary ?? void 0);
|
|
723
|
+
if (telemetry) telemetry.recordUserCompleted();
|
|
724
|
+
}
|
|
725
|
+
})();
|
|
726
|
+
lanePromises.push(lanePromise);
|
|
727
|
+
}
|
|
728
|
+
await Promise.all(lanePromises);
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
//#endregion
|
|
732
|
+
//#region src/monitoring/validation.ts
|
|
733
|
+
const ActionSchema = zod.z.object({
|
|
734
|
+
id: zod.z.string().min(1).describe("Unique identifier for this action"),
|
|
735
|
+
name: zod.z.string().min(1).describe("Event name this action generates (e.g., \"product_viewed\", \"checkout_completed\")"),
|
|
736
|
+
handler: zod.z.function().describe("Function returning Event[] or Promise<Event[]>. Receives Context as argument.")
|
|
737
|
+
});
|
|
738
|
+
const AdventureSchema = zod.z.object({
|
|
739
|
+
id: zod.z.string().min(1).describe("Unique identifier for this adventure within its journey"),
|
|
740
|
+
name: zod.z.string().min(1).describe("Human-readable adventure name (e.g., \"Browsing Session\")"),
|
|
741
|
+
actions: zod.z.array(ActionSchema).describe("Ordered list of actions to execute in this adventure. Actions run sequentially.")
|
|
742
|
+
});
|
|
743
|
+
const JourneySchema = zod.z.object({
|
|
744
|
+
id: zod.z.string().min(1).describe("Unique identifier for this journey. Referenced by other journeys via \"requires\"."),
|
|
745
|
+
name: zod.z.string().min(1).describe("Human-readable journey name (e.g., \"First Purchase Flow\")"),
|
|
746
|
+
adventures: zod.z.array(AdventureSchema).describe("Ordered list of adventures in this journey. Adventures run sequentially; each may bounce.")
|
|
747
|
+
});
|
|
748
|
+
/**
|
|
749
|
+
* Validates that a bounce chance value is between 0 and 1 (inclusive).
|
|
750
|
+
*
|
|
751
|
+
* @param value - The bounce chance value to validate
|
|
752
|
+
* @param path - Hierarchical path for error reporting
|
|
753
|
+
* @throws SynodeError with INVALID_BOUNCE_CHANCE code if out of range
|
|
754
|
+
*/
|
|
755
|
+
function validateBounceChance(value, path$1) {
|
|
756
|
+
if (value === void 0) return;
|
|
757
|
+
if (value < 0 || value > 1) throw new require_engine.SynodeError({
|
|
758
|
+
code: "INVALID_BOUNCE_CHANCE",
|
|
759
|
+
message: `Bounce chance must be between 0 and 1, got ${String(value)}`,
|
|
760
|
+
path: path$1,
|
|
761
|
+
expected: "0 <= bounceChance <= 1",
|
|
762
|
+
received: String(value)
|
|
763
|
+
});
|
|
764
|
+
}
|
|
765
|
+
/**
|
|
766
|
+
* Validates that a time span has min <= max.
|
|
767
|
+
*
|
|
768
|
+
* @param timeSpan - The time span to validate
|
|
769
|
+
* @param path - Hierarchical path for error reporting
|
|
770
|
+
* @throws SynodeError with INVALID_TIME_SPAN code if min > max
|
|
771
|
+
*/
|
|
772
|
+
function validateTimeSpan(timeSpan, path$1) {
|
|
773
|
+
if (timeSpan === void 0) return;
|
|
774
|
+
if (timeSpan.min > timeSpan.max) throw new require_engine.SynodeError({
|
|
775
|
+
code: "INVALID_TIME_SPAN",
|
|
776
|
+
message: `TimeSpan min (${String(timeSpan.min)}) must not exceed max (${String(timeSpan.max)})`,
|
|
777
|
+
path: path$1,
|
|
778
|
+
expected: "min <= max",
|
|
779
|
+
received: `min=${String(timeSpan.min)}, max=${String(timeSpan.max)}`
|
|
780
|
+
});
|
|
781
|
+
}
|
|
782
|
+
/**
|
|
783
|
+
* Validates that a suppression period has min <= max.
|
|
784
|
+
*
|
|
785
|
+
* @param period - The suppression period to validate
|
|
786
|
+
* @param path - Hierarchical path for error reporting
|
|
787
|
+
* @throws SynodeError with INVALID_SUPPRESSION_PERIOD code if min > max
|
|
788
|
+
*/
|
|
789
|
+
function validateSuppressionPeriod(period, path$1) {
|
|
790
|
+
if (period === void 0) return;
|
|
791
|
+
if (period.min > period.max) throw new require_engine.SynodeError({
|
|
792
|
+
code: "INVALID_SUPPRESSION_PERIOD",
|
|
793
|
+
message: `Suppression period min (${String(period.min)}) must not exceed max (${String(period.max)})`,
|
|
794
|
+
path: path$1,
|
|
795
|
+
expected: "min <= max",
|
|
796
|
+
received: `min=${String(period.min)}, max=${String(period.max)}`
|
|
797
|
+
});
|
|
798
|
+
}
|
|
799
|
+
/**
|
|
800
|
+
* Detects circular dependencies starting from a journey using depth-first search.
|
|
801
|
+
*
|
|
802
|
+
* @param journeyId - The journey ID to start the search from
|
|
803
|
+
* @param allJourneys - All journeys to check against
|
|
804
|
+
* @throws SynodeError with CIRCULAR_DEPENDENCY code if a cycle is detected
|
|
805
|
+
*/
|
|
806
|
+
function detectCircularDeps(journeyId, allJourneys) {
|
|
807
|
+
const journeyMap = /* @__PURE__ */ new Map();
|
|
808
|
+
for (const j of allJourneys) journeyMap.set(j.id, j);
|
|
809
|
+
const visiting = /* @__PURE__ */ new Set();
|
|
810
|
+
const visited = /* @__PURE__ */ new Set();
|
|
811
|
+
function dfs(currentId, chain) {
|
|
812
|
+
if (visiting.has(currentId)) throw new require_engine.SynodeError({
|
|
813
|
+
code: "CIRCULAR_DEPENDENCY",
|
|
814
|
+
message: `Circular dependency detected: ${[...chain, currentId].join(" -> ")}`,
|
|
815
|
+
path: [journeyId]
|
|
816
|
+
});
|
|
817
|
+
if (visited.has(currentId)) return;
|
|
818
|
+
visiting.add(currentId);
|
|
819
|
+
const journey = journeyMap.get(currentId);
|
|
820
|
+
if (journey?.requires) for (const reqId of journey.requires) dfs(reqId, [...chain, currentId]);
|
|
821
|
+
visiting.delete(currentId);
|
|
822
|
+
visited.add(currentId);
|
|
823
|
+
}
|
|
824
|
+
dfs(journeyId, []);
|
|
825
|
+
}
|
|
826
|
+
/**
|
|
827
|
+
* Validates the journey configuration including structural checks for bounce chances,
|
|
828
|
+
* time spans, suppression periods, duplicate IDs, unknown references, and circular dependencies.
|
|
829
|
+
*
|
|
830
|
+
* @param config - The journey configuration to validate
|
|
831
|
+
* @param allJourneys - Optional list of all journeys for cross-journey validation
|
|
832
|
+
* @throws ZodError if basic schema validation fails
|
|
833
|
+
* @throws SynodeError for structural validation failures
|
|
834
|
+
*/
|
|
835
|
+
function validateConfig(config, allJourneys) {
|
|
836
|
+
JourneySchema.parse(config);
|
|
837
|
+
const journeyPath = [config.id];
|
|
838
|
+
validateBounceChance(config.bounceChance, journeyPath);
|
|
839
|
+
validateSuppressionPeriod(config.suppressionPeriod, journeyPath);
|
|
840
|
+
const adventureIds = /* @__PURE__ */ new Set();
|
|
841
|
+
for (const adventure of config.adventures) {
|
|
842
|
+
if (adventureIds.has(adventure.id)) throw new require_engine.SynodeError({
|
|
843
|
+
code: "DUPLICATE_ID",
|
|
844
|
+
message: `Duplicate Adventure ID found: ${adventure.id}`,
|
|
845
|
+
path: [...journeyPath, adventure.id]
|
|
846
|
+
});
|
|
847
|
+
adventureIds.add(adventure.id);
|
|
848
|
+
const adventurePath = [...journeyPath, adventure.id];
|
|
849
|
+
validateBounceChance(adventure.bounceChance, adventurePath);
|
|
850
|
+
validateTimeSpan(adventure.timeSpan, adventurePath);
|
|
851
|
+
const actionIds = /* @__PURE__ */ new Set();
|
|
852
|
+
for (const action of adventure.actions) {
|
|
853
|
+
if (actionIds.has(action.id)) throw new require_engine.SynodeError({
|
|
854
|
+
code: "DUPLICATE_ID",
|
|
855
|
+
message: `Duplicate Action ID found in adventure '${adventure.id}': ${action.id}`,
|
|
856
|
+
path: [...adventurePath, action.id]
|
|
857
|
+
});
|
|
858
|
+
actionIds.add(action.id);
|
|
859
|
+
const actionPath = [...adventurePath, action.id];
|
|
860
|
+
validateBounceChance(action.bounceChance, actionPath);
|
|
861
|
+
validateTimeSpan(action.timeSpan, actionPath);
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
if (allJourneys && config.requires) {
|
|
865
|
+
const availableIds = allJourneys.map((j) => j.id);
|
|
866
|
+
for (const reqId of config.requires) if (!availableIds.includes(reqId)) {
|
|
867
|
+
const suggestion = require_engine.buildNotFoundSuggestion("journey", reqId, availableIds);
|
|
868
|
+
throw new require_engine.SynodeError({
|
|
869
|
+
code: "UNKNOWN_JOURNEY_REF",
|
|
870
|
+
message: `Unknown journey reference '${reqId}'`,
|
|
871
|
+
path: journeyPath,
|
|
872
|
+
suggestion
|
|
873
|
+
});
|
|
874
|
+
}
|
|
875
|
+
detectCircularDeps(config.id, allJourneys);
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
/**
|
|
879
|
+
* Performs a dry run of the journey for a specified number of users.
|
|
880
|
+
* Returns all generated events in memory.
|
|
881
|
+
*/
|
|
882
|
+
async function dryRun(journey, userCount = 1) {
|
|
883
|
+
validateConfig(journey);
|
|
884
|
+
const allEvents = [];
|
|
885
|
+
for (let i = 0; i < userCount; i++) {
|
|
886
|
+
const engine = new require_engine.Engine(journey);
|
|
887
|
+
for await (const event of engine.run()) allEvents.push(event);
|
|
888
|
+
}
|
|
889
|
+
return allEvents;
|
|
890
|
+
}
|
|
891
|
+
|
|
892
|
+
//#endregion
|
|
893
|
+
//#region src/adapters/memory.ts
|
|
894
|
+
/**
|
|
895
|
+
* Adapter that stores events in memory.
|
|
896
|
+
* Useful for testing and dry runs.
|
|
897
|
+
*
|
|
898
|
+
* @example
|
|
899
|
+
* ```ts
|
|
900
|
+
* const adapter = new InMemoryAdapter();
|
|
901
|
+
* await generate(journey, { users: 10, adapter });
|
|
902
|
+
* console.log(adapter.events.length);
|
|
903
|
+
* ```
|
|
904
|
+
*/
|
|
905
|
+
var InMemoryAdapter = class {
|
|
906
|
+
events = [];
|
|
907
|
+
/** @inheritdoc */
|
|
908
|
+
write(event) {
|
|
909
|
+
this.events.push(event);
|
|
910
|
+
}
|
|
911
|
+
/**
|
|
912
|
+
* Clears all stored events.
|
|
913
|
+
*/
|
|
914
|
+
clear() {
|
|
915
|
+
this.events.length = 0;
|
|
916
|
+
}
|
|
917
|
+
};
|
|
918
|
+
|
|
919
|
+
//#endregion
|
|
920
|
+
//#region src/adapters/callback.ts
|
|
921
|
+
/**
|
|
922
|
+
* Adapter that forwards events to a user-supplied callback function.
|
|
923
|
+
* Supports both synchronous and asynchronous callbacks.
|
|
924
|
+
*
|
|
925
|
+
* @example
|
|
926
|
+
* ```ts
|
|
927
|
+
* const events: Event[] = [];
|
|
928
|
+
* const adapter = new CallbackAdapter((event) => events.push(event));
|
|
929
|
+
* await generate(journey, { users: 10, adapter });
|
|
930
|
+
* ```
|
|
931
|
+
*/
|
|
932
|
+
var CallbackAdapter = class {
|
|
933
|
+
constructor(callback) {
|
|
934
|
+
this.callback = callback;
|
|
935
|
+
}
|
|
936
|
+
/** @inheritdoc */
|
|
937
|
+
async write(event) {
|
|
938
|
+
await this.callback(event);
|
|
939
|
+
}
|
|
940
|
+
};
|
|
941
|
+
|
|
942
|
+
//#endregion
|
|
943
|
+
//#region src/dataset-io.ts
|
|
944
|
+
/**
|
|
945
|
+
* Exports a dataset to a string in the specified format.
|
|
946
|
+
*/
|
|
947
|
+
function exportDatasetToString(dataset, format) {
|
|
948
|
+
switch (format) {
|
|
949
|
+
case "csv": return generateCSV(dataset);
|
|
950
|
+
case "json": return JSON.stringify(dataset.rows, null, 2);
|
|
951
|
+
case "jsonl": return generateJSONL(dataset);
|
|
952
|
+
}
|
|
953
|
+
}
|
|
954
|
+
/**
|
|
955
|
+
* Imports a dataset from a string in the specified format.
|
|
956
|
+
*/
|
|
957
|
+
function importDatasetFromString(content, format, id = "imported", name = "Imported Dataset") {
|
|
958
|
+
switch (format) {
|
|
959
|
+
case "csv": return parseCSVContent(id, name, content);
|
|
960
|
+
case "json": return parseJSONContent(id, name, content);
|
|
961
|
+
case "jsonl": return parseJSONLContent(id, name, content);
|
|
962
|
+
}
|
|
963
|
+
}
|
|
964
|
+
/**
|
|
965
|
+
* Validates that a file path does not escape the given base directory.
|
|
966
|
+
* @throws Error if path traversal is detected.
|
|
967
|
+
*/
|
|
968
|
+
function validateFilePath(filePath, basePath = process.cwd()) {
|
|
969
|
+
const resolved = node_path.default.resolve(basePath, filePath);
|
|
970
|
+
const baseResolved = node_path.default.resolve(basePath);
|
|
971
|
+
if (!resolved.startsWith(baseResolved + node_path.default.sep) && resolved !== baseResolved) throw new Error(`Path traversal detected: '${filePath}' escapes base directory`);
|
|
972
|
+
return resolved;
|
|
973
|
+
}
|
|
974
|
+
/**
|
|
975
|
+
* Exports a dataset to a file in the specified format.
|
|
976
|
+
*/
|
|
977
|
+
async function exportDataset(dataset, filePath, format) {
|
|
978
|
+
const safePath = validateFilePath(filePath);
|
|
979
|
+
const content = exportDatasetToString(dataset, format);
|
|
980
|
+
await node_fs_promises.default.writeFile(safePath, content, "utf-8");
|
|
981
|
+
}
|
|
982
|
+
/**
|
|
983
|
+
* Imports a dataset from a file in the specified format.
|
|
984
|
+
*/
|
|
985
|
+
async function importDataset(id, name, filePath, format) {
|
|
986
|
+
switch (format) {
|
|
987
|
+
case "csv": return importFromCSV(id, name, filePath);
|
|
988
|
+
case "json": return importFromJSON(id, name, filePath);
|
|
989
|
+
case "jsonl": return importFromJSONL(id, name, filePath);
|
|
990
|
+
}
|
|
991
|
+
}
|
|
992
|
+
function generateCSV(dataset) {
|
|
993
|
+
if (dataset.rows.length === 0) return "";
|
|
994
|
+
const headers = Object.keys(dataset.rows[0]);
|
|
995
|
+
const csvLines = [];
|
|
996
|
+
csvLines.push(headers.map((h) => escapeCSVValue(h)).join(","));
|
|
997
|
+
for (const row of dataset.rows) {
|
|
998
|
+
const values = headers.map((header) => {
|
|
999
|
+
const value = row[header];
|
|
1000
|
+
if (value === null || value === void 0) return escapeCSVValue("");
|
|
1001
|
+
if (typeof value === "object") return escapeCSVValue(JSON.stringify(value));
|
|
1002
|
+
return escapeCSVValue(typeof value === "string" || typeof value === "number" || typeof value === "boolean" ? String(value) : JSON.stringify(value));
|
|
1003
|
+
});
|
|
1004
|
+
csvLines.push(values.join(","));
|
|
1005
|
+
}
|
|
1006
|
+
return csvLines.join("\n");
|
|
1007
|
+
}
|
|
1008
|
+
function generateJSONL(dataset) {
|
|
1009
|
+
return dataset.rows.map((row) => JSON.stringify(row)).join("\n");
|
|
1010
|
+
}
|
|
1011
|
+
function parseJSONContent(id, name, content) {
|
|
1012
|
+
let rows;
|
|
1013
|
+
try {
|
|
1014
|
+
rows = JSON.parse(content);
|
|
1015
|
+
} catch {
|
|
1016
|
+
throw new Error(`Failed to parse JSON dataset '${id}': invalid JSON`);
|
|
1017
|
+
}
|
|
1018
|
+
if (!Array.isArray(rows)) throw new Error(`Failed to parse JSON dataset '${id}': expected an array of rows`);
|
|
1019
|
+
return {
|
|
1020
|
+
id,
|
|
1021
|
+
name,
|
|
1022
|
+
rows
|
|
1023
|
+
};
|
|
1024
|
+
}
|
|
1025
|
+
function parseJSONLContent(id, name, content) {
|
|
1026
|
+
return {
|
|
1027
|
+
id,
|
|
1028
|
+
name,
|
|
1029
|
+
rows: content.split("\n").filter((line) => line.trim()).map((line, index) => {
|
|
1030
|
+
try {
|
|
1031
|
+
return JSON.parse(line);
|
|
1032
|
+
} catch {
|
|
1033
|
+
throw new Error(`Failed to parse JSONL dataset '${id}' at line ${String(index + 1)}: invalid JSON`);
|
|
1034
|
+
}
|
|
1035
|
+
})
|
|
1036
|
+
};
|
|
1037
|
+
}
|
|
1038
|
+
function parseCSVContent(id, name, content) {
|
|
1039
|
+
const lines = splitCSVLines(content);
|
|
1040
|
+
if (lines.length === 0) return {
|
|
1041
|
+
id,
|
|
1042
|
+
name,
|
|
1043
|
+
rows: []
|
|
1044
|
+
};
|
|
1045
|
+
const headers = parseCSVLine(lines[0]);
|
|
1046
|
+
const rows = [];
|
|
1047
|
+
for (let i = 1; i < lines.length; i++) {
|
|
1048
|
+
const values = parseCSVLine(lines[i]);
|
|
1049
|
+
const row = {};
|
|
1050
|
+
for (let j = 0; j < headers.length; j++) row[headers[j]] = values[j] ?? "";
|
|
1051
|
+
rows.push(row);
|
|
1052
|
+
}
|
|
1053
|
+
return {
|
|
1054
|
+
id,
|
|
1055
|
+
name,
|
|
1056
|
+
rows
|
|
1057
|
+
};
|
|
1058
|
+
}
|
|
1059
|
+
/**
|
|
1060
|
+
* Split CSV content into lines, respecting quoted values that contain newlines.
|
|
1061
|
+
*/
|
|
1062
|
+
function splitCSVLines(content) {
|
|
1063
|
+
const lines = [];
|
|
1064
|
+
let currentLine = "";
|
|
1065
|
+
let inQuotes = false;
|
|
1066
|
+
for (let i = 0; i < content.length; i++) {
|
|
1067
|
+
const char = content[i];
|
|
1068
|
+
const nextChar = content[i + 1];
|
|
1069
|
+
if (char === "\"") {
|
|
1070
|
+
currentLine += char;
|
|
1071
|
+
if (inQuotes && nextChar === "\"") {
|
|
1072
|
+
currentLine += nextChar;
|
|
1073
|
+
i++;
|
|
1074
|
+
} else inQuotes = !inQuotes;
|
|
1075
|
+
} else if (char === "\n" && !inQuotes) {
|
|
1076
|
+
if (currentLine.trim()) lines.push(currentLine);
|
|
1077
|
+
currentLine = "";
|
|
1078
|
+
} else currentLine += char;
|
|
1079
|
+
}
|
|
1080
|
+
if (currentLine.trim()) lines.push(currentLine);
|
|
1081
|
+
return lines;
|
|
1082
|
+
}
|
|
1083
|
+
async function importFromCSV(id, name, filePath) {
|
|
1084
|
+
const safePath = validateFilePath(filePath);
|
|
1085
|
+
return parseCSVContent(id, name, await node_fs_promises.default.readFile(safePath, "utf-8"));
|
|
1086
|
+
}
|
|
1087
|
+
async function importFromJSON(id, name, filePath) {
|
|
1088
|
+
const safePath = validateFilePath(filePath);
|
|
1089
|
+
return parseJSONContent(id, name, await node_fs_promises.default.readFile(safePath, "utf-8"));
|
|
1090
|
+
}
|
|
1091
|
+
async function importFromJSONL(id, name, filePath) {
|
|
1092
|
+
const safePath = validateFilePath(filePath);
|
|
1093
|
+
return parseJSONLContent(id, name, await node_fs_promises.default.readFile(safePath, "utf-8"));
|
|
1094
|
+
}
|
|
1095
|
+
function escapeCSVValue(value) {
|
|
1096
|
+
if (/^[=+\-@\t\r]/.test(value)) value = "'" + value;
|
|
1097
|
+
if (value.includes(",") || value.includes("\"") || value.includes("\n")) return `"${value.replace(/"/g, "\"\"")}"`;
|
|
1098
|
+
return value;
|
|
1099
|
+
}
|
|
1100
|
+
function parseCSVLine(line) {
|
|
1101
|
+
const result = [];
|
|
1102
|
+
let current = "";
|
|
1103
|
+
let inQuotes = false;
|
|
1104
|
+
for (let i = 0; i < line.length; i++) {
|
|
1105
|
+
const char = line[i];
|
|
1106
|
+
const nextChar = line[i + 1];
|
|
1107
|
+
if (char === "\"") if (inQuotes && nextChar === "\"") {
|
|
1108
|
+
current += "\"";
|
|
1109
|
+
i++;
|
|
1110
|
+
} else inQuotes = !inQuotes;
|
|
1111
|
+
else if (char === "," && !inQuotes) {
|
|
1112
|
+
result.push(current);
|
|
1113
|
+
current = "";
|
|
1114
|
+
} else current += char;
|
|
1115
|
+
}
|
|
1116
|
+
result.push(current);
|
|
1117
|
+
return result;
|
|
1118
|
+
}
|
|
1119
|
+
|
|
1120
|
+
//#endregion
|
|
1121
|
+
exports.ActionSchema = ActionSchema;
|
|
1122
|
+
exports.AdventureSchema = AdventureSchema;
|
|
1123
|
+
exports.CallbackAdapter = CallbackAdapter;
|
|
1124
|
+
exports.ConsoleAdapter = ConsoleAdapter;
|
|
1125
|
+
exports.Engine = require_engine.Engine;
|
|
1126
|
+
exports.InMemoryAdapter = InMemoryAdapter;
|
|
1127
|
+
exports.JourneySchema = JourneySchema;
|
|
1128
|
+
exports.SynodeContext = require_engine.SynodeContext;
|
|
1129
|
+
exports.SynodeError = require_engine.SynodeError;
|
|
1130
|
+
exports.SynodeValidationError = SynodeValidationError;
|
|
1131
|
+
exports.TelemetryCollector = TelemetryCollector;
|
|
1132
|
+
exports.__toESM = __toESM;
|
|
1133
|
+
exports.buildNotFoundSuggestion = require_engine.buildNotFoundSuggestion;
|
|
1134
|
+
exports.chance = chance;
|
|
1135
|
+
exports.createValidationSummary = createValidationSummary;
|
|
1136
|
+
exports.defineAction = defineAction;
|
|
1137
|
+
exports.defineAdventure = defineAdventure;
|
|
1138
|
+
exports.defineDataset = require_engine.defineDataset;
|
|
1139
|
+
exports.defineEventSchema = defineEventSchema;
|
|
1140
|
+
exports.defineJourney = defineJourney;
|
|
1141
|
+
exports.definePersona = require_engine.definePersona;
|
|
1142
|
+
exports.dryRun = dryRun;
|
|
1143
|
+
exports.exportDataset = exportDataset;
|
|
1144
|
+
exports.exportDatasetToString = exportDatasetToString;
|
|
1145
|
+
exports.fake = fake;
|
|
1146
|
+
exports.formatErrorMessage = require_engine.formatErrorMessage;
|
|
1147
|
+
exports.generate = generate;
|
|
1148
|
+
exports.generateDataset = require_engine.generateDataset;
|
|
1149
|
+
exports.generateDelay = require_engine.generateDelay;
|
|
1150
|
+
exports.generatePersona = require_engine.generatePersona;
|
|
1151
|
+
exports.importDataset = importDataset;
|
|
1152
|
+
exports.importDatasetFromString = importDatasetFromString;
|
|
1153
|
+
exports.levenshtein = require_engine.levenshtein;
|
|
1154
|
+
exports.oneOf = oneOf;
|
|
1155
|
+
exports.shouldBounce = require_engine.shouldBounce;
|
|
1156
|
+
exports.suggestClosest = require_engine.suggestClosest;
|
|
1157
|
+
exports.validateBounceChance = validateBounceChance;
|
|
1158
|
+
exports.validateConfig = validateConfig;
|
|
1159
|
+
exports.validateEvent = validateEvent;
|
|
1160
|
+
exports.validateFilePath = validateFilePath;
|
|
1161
|
+
exports.validateTimeSpan = validateTimeSpan;
|
|
1162
|
+
exports.weighted = weighted;
|
|
1163
|
+
//# sourceMappingURL=index.cjs.map
|