@synode/core 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,1093 @@
1
+ import { a as generateDataset, c as SynodeContext, d as formatErrorMessage, f as levenshtein, i as defineDataset, l as SynodeError, n as generateDelay, o as definePersona, p as suggestClosest, r as shouldBounce, s as generatePersona, t as Engine, u as buildNotFoundSuggestion } from "./engine-SRByMZvP.mjs";
2
+ import { cpus } from "node:os";
3
+ import { Worker } from "node:worker_threads";
4
+ import path, { extname, join } from "node:path";
5
+ import fs, { access, writeFile } from "node:fs/promises";
6
+ import { accessSync } from "node:fs";
7
+ import { fileURLToPath } from "node:url";
8
+ import { z } from "zod";
9
+
10
+ //#region src/generators/builder.ts
11
+ /**
12
+ * Defines a new journey.
13
+ * @param config The journey configuration.
14
+ * @returns The configured journey object.
15
+ * @see {@link Journey}
16
+ */
17
+ function defineJourney(config) {
18
+ return config;
19
+ }
20
+ /**
21
+ * Defines a new adventure.
22
+ * @param config The adventure configuration.
23
+ * @returns The configured adventure object.
24
+ * @see {@link Adventure}
25
+ */
26
+ function defineAdventure(config) {
27
+ return config;
28
+ }
29
+ /**
30
+ * Defines a new action.
31
+ * @param config The action configuration.
32
+ * @returns The configured action object.
33
+ * @see {@link Action}
34
+ * @see {@link ActionDefinition}
35
+ */
36
+ function defineAction(config) {
37
+ if (config.handler) return {
38
+ id: config.id,
39
+ name: config.name,
40
+ handler: config.handler,
41
+ timeSpan: config.timeSpan,
42
+ bounceChance: config.bounceChance
43
+ };
44
+ return {
45
+ id: config.id,
46
+ name: config.name,
47
+ timeSpan: config.timeSpan,
48
+ bounceChance: config.bounceChance,
49
+ handler: async (context) => {
50
+ const payload = {};
51
+ if (config.fields) for (const [key, generator] of Object.entries(config.fields)) if (typeof generator === "function") payload[key] = await generator(context, payload);
52
+ else payload[key] = generator;
53
+ return [{
54
+ id: context.generateId("event"),
55
+ userId: context.userId,
56
+ sessionId: context.sessionId,
57
+ name: config.name,
58
+ timestamp: context.now(),
59
+ payload
60
+ }];
61
+ }
62
+ };
63
+ }
64
+
65
+ //#endregion
66
+ //#region src/generators/fields.ts
67
+ /**
68
+ * Returns a value generated by Faker.js using the context's locale.
69
+ * @param generator Function that takes a Faker instance and returns a value.
70
+ */
71
+ function fake(generator) {
72
+ return (context) => {
73
+ return generator(context.faker);
74
+ };
75
+ }
76
+ /**
77
+ * Returns one of the provided options randomly.
78
+ */
79
+ function oneOf(options) {
80
+ return () => {
81
+ return options[Math.floor(Math.random() * options.length)];
82
+ };
83
+ }
84
+ /**
85
+ * Returns true with the given probability (0-1).
86
+ */
87
+ function chance(probability) {
88
+ return () => {
89
+ return Math.random() < probability;
90
+ };
91
+ }
92
+ /**
93
+ * Returns a value based on weighted probabilities.
94
+ * @param options Map of value to weight (weights should sum to 1, but will be normalized if not)
95
+ */
96
+ function weighted(options) {
97
+ return () => {
98
+ const entries = Object.entries(options);
99
+ const totalWeight = entries.reduce((sum, [, weight]) => sum + weight, 0);
100
+ let random = Math.random() * totalWeight;
101
+ for (const [value, weight] of entries) {
102
+ random -= weight;
103
+ if (random <= 0) return value;
104
+ }
105
+ return entries[entries.length - 1][0];
106
+ };
107
+ }
108
+
109
+ //#endregion
110
+ //#region src/execution/pool.ts
111
+ const __dirname = fileURLToPath(new URL(".", import.meta.url));
112
+ /**
113
+ * Resolves the worker script path. Uses the compiled `.mjs` in dist when available,
114
+ * falling back to the TypeScript source for development with tsx.
115
+ *
116
+ * Checks two locations for the compiled worker:
117
+ * 1. Same directory as pool (works when running from dist/)
118
+ * 2. Equivalent dist/ path (works when running from src/ during dev/test)
119
+ *
120
+ * @returns Absolute path to the worker script
121
+ */
122
+ function resolveWorkerScript() {
123
+ const localMjs = join(__dirname, "worker.mjs");
124
+ try {
125
+ accessSync(localMjs);
126
+ return localMjs;
127
+ } catch {}
128
+ const distMjs = __dirname.replace(/src[\\/]/, "dist/") + "worker.mjs";
129
+ try {
130
+ accessSync(distMjs);
131
+ return distMjs;
132
+ } catch {}
133
+ return join(__dirname, "worker.ts");
134
+ }
135
+ /**
136
+ * Serializes a Map of datasets into plain objects for structured clone transfer.
137
+ *
138
+ * @param datasets - Map of dataset ID to Dataset
139
+ * @returns Array of serialized datasets
140
+ */
141
+ function serializeDatasets(datasets) {
142
+ if (!datasets || datasets.size === 0) return [];
143
+ const result = [];
144
+ for (const dataset of datasets.values()) result.push({
145
+ id: dataset.id,
146
+ name: dataset.name,
147
+ rows: dataset.rows
148
+ });
149
+ return result;
150
+ }
151
+ /**
152
+ * Rehydrates a Date value that may have been stringified during structured clone.
153
+ * Structured clone preserves Dates natively, but this provides a safety net
154
+ * for edge cases where timestamps arrive as ISO strings.
155
+ *
156
+ * @param value - A Date object or ISO string timestamp
157
+ * @returns A proper Date instance
158
+ */
159
+ function rehydrateDate(value) {
160
+ if (value instanceof Date) return value;
161
+ return new Date(value);
162
+ }
163
+ /**
164
+ * Manages a pool of worker threads for parallel user generation.
165
+ * Distributes users evenly across workers, collects events via message passing,
166
+ * and writes them through the configured output adapter.
167
+ */
168
+ var WorkerPool = class {
169
+ options;
170
+ /**
171
+ * @param options - Pool configuration including module path, user count, and adapter
172
+ */
173
+ constructor(options) {
174
+ this.options = options;
175
+ }
176
+ /**
177
+ * Validates the worker module path exists on disk.
178
+ *
179
+ * @throws Error if the module path does not exist
180
+ */
181
+ async validateModule() {
182
+ try {
183
+ await access(this.options.workerModule);
184
+ } catch {
185
+ throw new Error(`Worker module not found: ${this.options.workerModule}`);
186
+ }
187
+ }
188
+ /**
189
+ * Spawns all worker threads, distributes user ranges, collects events,
190
+ * and waits for completion.
191
+ *
192
+ * @throws Error if any worker fails with an unrecoverable error
193
+ */
194
+ async run() {
195
+ await this.validateModule();
196
+ const { workerModule, userCount, workerCount, adapter, telemetry, startDate, endDate } = this.options;
197
+ const serializedDatasets = serializeDatasets(this.options.preGeneratedDatasets);
198
+ const usersPerWorker = Math.ceil(userCount / workerCount);
199
+ const workerScript = resolveWorkerScript();
200
+ const isTypeScript = extname(workerScript) === ".ts";
201
+ const errors = [];
202
+ const workers = [];
203
+ const workerPromises = Array.from({ length: workerCount }, (_, i) => {
204
+ const userStart = i * usersPerWorker;
205
+ const userEnd = Math.min(userStart + usersPerWorker, userCount);
206
+ if (userStart >= userCount) return Promise.resolve();
207
+ const worker = new Worker(workerScript, {
208
+ workerData: {
209
+ workerModule,
210
+ userStart,
211
+ userEnd,
212
+ startDate,
213
+ endDate,
214
+ serializedDatasets
215
+ },
216
+ execArgv: isTypeScript ? ["--import", "tsx"] : extname(workerModule) === ".ts" ? ["--experimental-strip-types"] : []
217
+ });
218
+ workers.push(worker);
219
+ return this.listenToWorker(worker, adapter, telemetry ?? null, errors);
220
+ });
221
+ const results = await Promise.allSettled(workerPromises);
222
+ for (const result of results) if (result.status === "rejected") {
223
+ const reason = result.reason;
224
+ errors.push(reason instanceof Error ? reason : new Error(String(reason)));
225
+ }
226
+ if (errors.length > 0) {
227
+ const combined = errors.map((e) => e.message).join("\n");
228
+ throw new Error(`Worker pool failed with ${String(errors.length)} error(s):\n${combined}`);
229
+ }
230
+ }
231
+ /**
232
+ * Listens to a single worker's messages and routes them to the adapter/telemetry.
233
+ *
234
+ * @param worker - The Worker thread to listen to
235
+ * @param adapter - Output adapter for writing events
236
+ * @param telemetry - Optional telemetry collector
237
+ * @param errors - Mutable array to collect errors from failed workers
238
+ * @returns Promise that resolves when the worker sends 'done' or rejects on error
239
+ */
240
+ listenToWorker(worker, adapter, telemetry, errors) {
241
+ return new Promise((resolve, reject) => {
242
+ worker.on("message", (msg) => {
243
+ switch (msg.type) {
244
+ case "events":
245
+ for (const event of msg.events) {
246
+ event.timestamp = rehydrateDate(event.timestamp);
247
+ Promise.resolve(adapter.write(event)).catch((err) => {
248
+ const writeError = /* @__PURE__ */ new Error(`Adapter write failed: ${err instanceof Error ? err.message : String(err)}`);
249
+ errors.push(writeError);
250
+ });
251
+ }
252
+ if (telemetry) {
253
+ let remaining = msg.events.length;
254
+ while (remaining-- > 0) telemetry.recordEvent();
255
+ }
256
+ break;
257
+ case "user-started":
258
+ if (telemetry) telemetry.recordUserStarted();
259
+ break;
260
+ case "user-completed":
261
+ if (telemetry) telemetry.recordUserCompleted();
262
+ break;
263
+ case "error": {
264
+ const workerError = new Error(msg.message);
265
+ if (msg.stack) workerError.stack = msg.stack;
266
+ errors.push(workerError);
267
+ worker.terminate();
268
+ reject(workerError);
269
+ break;
270
+ }
271
+ case "done":
272
+ resolve();
273
+ break;
274
+ }
275
+ });
276
+ worker.on("error", (err) => {
277
+ errors.push(err);
278
+ reject(err);
279
+ });
280
+ worker.on("exit", (code) => {
281
+ if (code !== 0) {
282
+ const exitError = /* @__PURE__ */ new Error(`Worker exited with code ${String(code)}`);
283
+ errors.push(exitError);
284
+ reject(exitError);
285
+ }
286
+ });
287
+ });
288
+ }
289
+ };
290
+
291
+ //#endregion
292
+ //#region src/adapters/console.ts
293
+ /**
294
+ * Adapter that writes events to the console as pretty-printed JSON.
295
+ *
296
+ * @example
297
+ * ```ts
298
+ * await generate(journey, { users: 10, adapter: new ConsoleAdapter() });
299
+ * ```
300
+ */
301
+ var ConsoleAdapter = class {
302
+ /** @inheritdoc */
303
+ write(event) {
304
+ console.log(JSON.stringify(event, null, 2));
305
+ }
306
+ };
307
+
308
+ //#endregion
309
+ //#region src/monitoring/telemetry.ts
310
+ /**
311
+ * Collects telemetry data during generation runs.
312
+ */
313
+ var TelemetryCollector = class {
314
+ startTime;
315
+ snapshots = [];
316
+ intervalHandle = null;
317
+ currentSecondEvents = 0;
318
+ totalEvents = 0;
319
+ activeUsers = 0;
320
+ completedUsers = 0;
321
+ lanes;
322
+ eventsValidated = 0;
323
+ eventsValid = 0;
324
+ eventsInvalid = 0;
325
+ validationErrors = [];
326
+ constructor(lanes) {
327
+ this.startTime = /* @__PURE__ */ new Date();
328
+ this.lanes = lanes;
329
+ }
330
+ /**
331
+ * Start collecting telemetry data every second.
332
+ */
333
+ start() {
334
+ this.intervalHandle = setInterval(() => {
335
+ this.captureSnapshot();
336
+ }, 1e3);
337
+ }
338
+ /**
339
+ * Stop collecting telemetry data.
340
+ */
341
+ stop() {
342
+ if (this.intervalHandle) {
343
+ clearInterval(this.intervalHandle);
344
+ this.intervalHandle = null;
345
+ }
346
+ this.captureSnapshot();
347
+ }
348
+ /**
349
+ * Record that an event was generated.
350
+ */
351
+ recordEvent() {
352
+ this.currentSecondEvents++;
353
+ this.totalEvents++;
354
+ }
355
+ /**
356
+ * Record that a user started processing.
357
+ */
358
+ recordUserStarted() {
359
+ this.activeUsers++;
360
+ }
361
+ /**
362
+ * Record that a user completed processing.
363
+ */
364
+ recordUserCompleted() {
365
+ this.activeUsers--;
366
+ this.completedUsers++;
367
+ }
368
+ /**
369
+ * Merge a validation summary from a lane or journey into the collector totals.
370
+ *
371
+ * @param summary - Aggregated validation counts and errors to record.
372
+ */
373
+ recordValidationSummary(summary) {
374
+ this.eventsValidated += summary.eventsValidated;
375
+ this.eventsValid += summary.eventsValid;
376
+ this.eventsInvalid += summary.eventsInvalid;
377
+ for (const err of summary.validationErrors) if (this.validationErrors.length < 50) this.validationErrors.push(err);
378
+ }
379
+ /**
380
+ * Generate the final telemetry report.
381
+ */
382
+ getReport() {
383
+ const endTime = /* @__PURE__ */ new Date();
384
+ const durationMs = endTime.getTime() - this.startTime.getTime();
385
+ const averageEventsPerSecond = durationMs > 0 ? this.totalEvents / durationMs * 1e3 : this.totalEvents;
386
+ return {
387
+ startTime: this.startTime.toISOString(),
388
+ endTime: endTime.toISOString(),
389
+ durationMs,
390
+ totalUsers: this.completedUsers,
391
+ totalEvents: this.totalEvents,
392
+ lanes: this.lanes,
393
+ averageEventsPerSecond: Number(averageEventsPerSecond.toFixed(2)),
394
+ snapshots: this.snapshots,
395
+ activeUsers: this.activeUsers,
396
+ completedUsers: this.completedUsers,
397
+ eventsValidated: this.eventsValidated,
398
+ eventsValid: this.eventsValid,
399
+ eventsInvalid: this.eventsInvalid,
400
+ validationErrors: this.validationErrors
401
+ };
402
+ }
403
+ /**
404
+ * Save the telemetry report to a JSON file.
405
+ */
406
+ async saveReport(filePath) {
407
+ const report = this.getReport();
408
+ await writeFile(filePath, JSON.stringify(report, null, 2), "utf-8");
409
+ }
410
+ captureSnapshot() {
411
+ const now = /* @__PURE__ */ new Date();
412
+ const elapsedMs = now.getTime() - this.startTime.getTime();
413
+ this.snapshots.push({
414
+ timestamp: now.toISOString(),
415
+ elapsedMs,
416
+ eventsPerSecond: this.currentSecondEvents,
417
+ totalEvents: this.totalEvents,
418
+ activeUsers: this.activeUsers,
419
+ completedUsers: this.completedUsers,
420
+ lanes: this.lanes
421
+ });
422
+ this.currentSecondEvents = 0;
423
+ }
424
+ };
425
+
426
+ //#endregion
427
+ //#region src/monitoring/event-validation.ts
428
+ const MAX_STORED_ERRORS = 50;
429
+ /**
430
+ * Error thrown when an event fails schema validation in strict mode.
431
+ */
432
+ var SynodeValidationError = class extends Error {
433
+ event;
434
+ issues;
435
+ constructor(options) {
436
+ super(options.message);
437
+ this.name = "SynodeValidationError";
438
+ this.event = options.event;
439
+ this.issues = options.issues;
440
+ }
441
+ };
442
+ /**
443
+ * Wraps `z.object()` for defining event payload schemas.
444
+ *
445
+ * @param shape - Zod raw shape describing the expected payload fields
446
+ * @returns A ZodObject schema
447
+ *
448
+ * @example
449
+ * ```typescript
450
+ * const pageViewSchema = defineEventSchema({
451
+ * url: z.string().url(),
452
+ * referrer: z.string().optional(),
453
+ * });
454
+ * ```
455
+ */
456
+ function defineEventSchema(shape) {
457
+ return z.object(shape);
458
+ }
459
+ /**
460
+ * Creates a fresh zeroed validation summary.
461
+ *
462
+ * @returns An empty ValidationSummary ready for accumulation
463
+ */
464
+ function createValidationSummary() {
465
+ return {
466
+ eventsValidated: 0,
467
+ eventsValid: 0,
468
+ eventsInvalid: 0,
469
+ validationErrors: []
470
+ };
471
+ }
472
+ /**
473
+ * Converts Zod v4 issues to ValidationIssue instances.
474
+ */
475
+ function toValidationIssues(zodIssues) {
476
+ return zodIssues.map((issue) => ({
477
+ path: issue.path.map((segment) => typeof segment === "symbol" ? String(segment) : segment),
478
+ message: issue.message,
479
+ code: issue.code
480
+ }));
481
+ }
482
+ /**
483
+ * Resolves the correct schema for an event based on the config.
484
+ * Returns undefined if no schema applies to this event.
485
+ */
486
+ function resolveSchema(event, config) {
487
+ if (config.schema instanceof z.ZodType) return config.schema;
488
+ return config.schema[event.name];
489
+ }
490
+ /**
491
+ * Records a validation failure in the summary, respecting the MAX_STORED_ERRORS cap.
492
+ */
493
+ function recordFailure(summary, event, issues) {
494
+ summary.eventsInvalid++;
495
+ for (const issue of issues) if (summary.validationErrors.length < MAX_STORED_ERRORS) summary.validationErrors.push({
496
+ eventName: event.name,
497
+ path: issue.path.join("."),
498
+ message: issue.message
499
+ });
500
+ }
501
+ /**
502
+ * Validates an event against its configured schema.
503
+ *
504
+ * Behavior depends on the configured mode:
505
+ * - `strict` (default): throws {@link SynodeValidationError} on first failure
506
+ * - `warn`: returns the event but records failure in the summary
507
+ * - `skip`: returns `undefined` (event is dropped) and records failure in the summary
508
+ *
509
+ * Events with no matching schema in a per-name map are passed through without validation.
510
+ *
511
+ * @param event - The event to validate
512
+ * @param config - Schema and mode configuration
513
+ * @param summary - Mutable summary accumulating validation statistics
514
+ * @returns The event if it passes or is kept (warn mode), or undefined if skipped
515
+ * @throws SynodeValidationError in strict mode when validation fails
516
+ */
517
+ function validateEvent(event, config, summary) {
518
+ const schema = resolveSchema(event, config);
519
+ if (!schema) return event;
520
+ summary.eventsValidated++;
521
+ const result = schema.safeParse(event.payload);
522
+ if (result.success) {
523
+ summary.eventsValid++;
524
+ return event;
525
+ }
526
+ const issues = toValidationIssues(result.error.issues);
527
+ const mode = config.mode ?? "strict";
528
+ if (mode === "strict") {
529
+ recordFailure(summary, event, issues);
530
+ throw new SynodeValidationError({
531
+ message: `Event '${event.name}' failed schema validation: ${issues.map((i) => i.message).join("; ")}`,
532
+ event,
533
+ issues
534
+ });
535
+ }
536
+ if (mode === "warn") {
537
+ recordFailure(summary, event, issues);
538
+ return event;
539
+ }
540
+ recordFailure(summary, event, issues);
541
+ }
542
+
543
+ //#endregion
544
+ //#region src/execution/runner.ts
545
+ /**
546
+ * Returns a random Date between start (inclusive) and end (inclusive).
547
+ */
548
+ function randomDateInRange(start, end) {
549
+ const startMs = start.getTime();
550
+ const endMs = end.getTime();
551
+ return new Date(startMs + Math.random() * (endMs - startMs));
552
+ }
553
+ /**
554
+ * Pre-generates datasets from definitions and merges with preloaded datasets.
555
+ *
556
+ * @param datasets - Optional dataset definitions to generate
557
+ * @param preloaded - Optional pre-populated datasets to include
558
+ * @returns Map of dataset ID to generated/preloaded Dataset
559
+ */
560
+ async function prepareDatasets(datasets, preloaded) {
561
+ const result = /* @__PURE__ */ new Map();
562
+ if (datasets && datasets.length > 0) {
563
+ const tempContext = new SynodeContext();
564
+ for (const datasetDef of datasets) {
565
+ const dataset = await generateDataset(datasetDef, tempContext);
566
+ result.set(dataset.id, dataset);
567
+ }
568
+ }
569
+ if (preloaded) for (const dataset of preloaded) result.set(dataset.id, dataset);
570
+ return result;
571
+ }
572
+ /**
573
+ * Creates a SynodeContext for a single user, hydrating persona attributes
574
+ * and registering pre-generated datasets.
575
+ *
576
+ * @param persona - Optional persona definition for generating user attributes
577
+ * @param preGeneratedDatasets - Pre-generated datasets to register with the context
578
+ * @param startDate - Optional start of date range for random start time
579
+ * @param endDate - Optional end of date range for random start time
580
+ * @returns A fully initialized SynodeContext
581
+ */
582
+ async function createUserContext(persona, preGeneratedDatasets, startDate, endDate) {
583
+ const userStartTime = startDate && endDate ? randomDateInRange(startDate, endDate) : /* @__PURE__ */ new Date();
584
+ let context;
585
+ if (persona) {
586
+ const personaData = await generatePersona(persona, new SynodeContext());
587
+ context = new SynodeContext(userStartTime, void 0, typeof personaData.attributes.locale === "string" ? personaData.attributes.locale : "en");
588
+ for (const [key, value] of Object.entries(personaData.attributes)) context.set(key, value);
589
+ } else context = new SynodeContext(userStartTime);
590
+ for (const dataset of preGeneratedDatasets.values()) context.registerDataset(dataset);
591
+ return context;
592
+ }
593
+ /**
594
+ * Runs all journeys on a context, writing events to the adapter.
595
+ * Wraps adapter.write() failures in SynodeError with code ADAPTER_WRITE_ERROR.
596
+ *
597
+ * @param journeys - Journeys to execute
598
+ * @param context - The user's execution context
599
+ * @param adapter - Output adapter to write events to
600
+ * @param telemetry - Optional telemetry collector for recording events
601
+ * @param eventSchema - Optional event schema validation configuration
602
+ * @param summary - Optional mutable validation summary for accumulating results
603
+ */
604
+ async function processUser(journeys, context, adapter, telemetry, eventSchema, summary) {
605
+ for (const journey of journeys) {
606
+ const engine = new Engine(journey);
607
+ for await (const event of engine.run(context)) {
608
+ if (eventSchema && summary) {
609
+ if (!validateEvent(event, eventSchema, summary)) continue;
610
+ }
611
+ try {
612
+ await adapter.write(event);
613
+ } catch (error) {
614
+ throw new SynodeError({
615
+ code: "ADAPTER_WRITE_ERROR",
616
+ message: `Adapter write failed for event '${event.name}': ${error instanceof Error ? error.message : String(error)}`,
617
+ path: [journey.id],
618
+ suggestion: "Check the output adapter for write errors or capacity issues",
619
+ cause: error
620
+ });
621
+ }
622
+ if (telemetry) telemetry.recordEvent();
623
+ }
624
+ }
625
+ }
626
+ /**
627
+ * Generates synthetic data based on the provided journey configuration.
628
+ */
629
+ async function generate(journey, options) {
630
+ const journeys = Array.isArray(journey) ? journey : [journey];
631
+ if (options.startDate && !options.endDate) throw new Error("startDate requires endDate to be provided");
632
+ if (options.endDate && !options.startDate) throw new Error("endDate requires startDate to be provided");
633
+ if (options.startDate && options.endDate && options.startDate.getTime() >= options.endDate.getTime()) throw new Error("startDate must be before endDate");
634
+ if (!Number.isFinite(options.users) || options.users < 0 || options.users > 1e7) throw new Error("users must be a finite number between 0 and 10,000,000");
635
+ if (options.lanes !== void 0 && (!Number.isFinite(options.lanes) || options.lanes < 1 || options.lanes > 1e3)) throw new Error("lanes must be a finite number between 1 and 1,000");
636
+ if (options.workers !== void 0 && !options.workerModule) throw new Error("workers option requires workerModule to be set");
637
+ if (options.workers !== void 0 && (!Number.isFinite(options.workers) || options.workers < 1 || options.workers > 1024)) throw new Error("workers must be a finite number between 1 and 1,024");
638
+ const adapter = options.adapter ?? new ConsoleAdapter();
639
+ const userCount = options.users;
640
+ const lanes = options.lanes ?? 1;
641
+ const debug = options.debug ?? false;
642
+ const telemetryPath = options.telemetryPath ?? "./telemetry-report.json";
643
+ const telemetry = debug ? new TelemetryCollector(lanes) : null;
644
+ if (telemetry) telemetry.start();
645
+ const preGeneratedDatasets = await prepareDatasets(options.datasets, options.preloadedDatasets);
646
+ const summary = options.eventSchema ? createValidationSummary() : null;
647
+ if (options.workerModule) await new WorkerPool({
648
+ workerModule: options.workerModule,
649
+ userCount,
650
+ workerCount: options.workers ?? cpus().length,
651
+ adapter,
652
+ telemetry,
653
+ startDate: options.startDate?.toISOString(),
654
+ endDate: options.endDate?.toISOString(),
655
+ preGeneratedDatasets
656
+ }).run();
657
+ else if (lanes > 1) await runParallel(journeys, userCount, lanes, options.persona, preGeneratedDatasets, adapter, telemetry, options.startDate, options.endDate, options.eventSchema, summary);
658
+ else await runSequential(journeys, userCount, options.persona, preGeneratedDatasets, adapter, telemetry, options.startDate, options.endDate, options.eventSchema, summary);
659
+ if (summary && options.eventSchema?.mode === "warn" && summary.eventsInvalid > 0) console.error(`[synode] Validation: ${String(summary.eventsValid)} passed, ${String(summary.eventsInvalid)} failed out of ${String(summary.eventsValidated)} checked`);
660
+ if (telemetry && summary) telemetry.recordValidationSummary(summary);
661
+ if (adapter.close) await adapter.close();
662
+ if (telemetry) {
663
+ telemetry.stop();
664
+ await telemetry.saveReport(telemetryPath);
665
+ }
666
+ }
667
+ /**
668
+ * Run generation sequentially in the main thread.
669
+ */
670
+ async function runSequential(journeys, userCount, persona, preGeneratedDatasets, adapter, telemetry, startDate, endDate, eventSchema, summary) {
671
+ for (let i = 0; i < userCount; i++) {
672
+ if (telemetry) telemetry.recordUserStarted();
673
+ await processUser(journeys, await createUserContext(persona, preGeneratedDatasets, startDate, endDate), adapter, telemetry, eventSchema, summary ?? void 0);
674
+ if (telemetry) telemetry.recordUserCompleted();
675
+ }
676
+ }
677
+ /**
678
+ * Run generation in parallel using concurrent async execution.
679
+ * Note: This uses Promise.all for concurrent execution in the main thread.
680
+ * For true multi-core parallelism, worker threads would require serializable
681
+ * journey definitions (e.g., loaded from file paths rather than in-memory objects).
682
+ */
683
+ async function runParallel(journeys, userCount, lanes, persona, preGeneratedDatasets, adapter, telemetry, startDate, endDate, eventSchema, summary) {
684
+ const usersPerLane = Math.ceil(userCount / lanes);
685
+ const lanePromises = [];
686
+ for (let laneIndex = 0; laneIndex < lanes; laneIndex++) {
687
+ const userStart = laneIndex * usersPerLane;
688
+ const userEnd = Math.min(userStart + usersPerLane, userCount);
689
+ if (userStart >= userCount) break;
690
+ const lanePromise = (async () => {
691
+ for (let i = userStart; i < userEnd; i++) {
692
+ if (telemetry) telemetry.recordUserStarted();
693
+ await processUser(journeys, await createUserContext(persona, preGeneratedDatasets, startDate, endDate), adapter, telemetry, eventSchema, summary ?? void 0);
694
+ if (telemetry) telemetry.recordUserCompleted();
695
+ }
696
+ })();
697
+ lanePromises.push(lanePromise);
698
+ }
699
+ await Promise.all(lanePromises);
700
+ }
701
+
702
+ //#endregion
703
+ //#region src/monitoring/validation.ts
704
+ const ActionSchema = z.object({
705
+ id: z.string().min(1).describe("Unique identifier for this action"),
706
+ name: z.string().min(1).describe("Event name this action generates (e.g., \"product_viewed\", \"checkout_completed\")"),
707
+ handler: z.function().describe("Function returning Event[] or Promise<Event[]>. Receives Context as argument.")
708
+ });
709
+ const AdventureSchema = z.object({
710
+ id: z.string().min(1).describe("Unique identifier for this adventure within its journey"),
711
+ name: z.string().min(1).describe("Human-readable adventure name (e.g., \"Browsing Session\")"),
712
+ actions: z.array(ActionSchema).describe("Ordered list of actions to execute in this adventure. Actions run sequentially.")
713
+ });
714
+ const JourneySchema = z.object({
715
+ id: z.string().min(1).describe("Unique identifier for this journey. Referenced by other journeys via \"requires\"."),
716
+ name: z.string().min(1).describe("Human-readable journey name (e.g., \"First Purchase Flow\")"),
717
+ adventures: z.array(AdventureSchema).describe("Ordered list of adventures in this journey. Adventures run sequentially; each may bounce.")
718
+ });
719
+ /**
720
+ * Validates that a bounce chance value is between 0 and 1 (inclusive).
721
+ *
722
+ * @param value - The bounce chance value to validate
723
+ * @param path - Hierarchical path for error reporting
724
+ * @throws SynodeError with INVALID_BOUNCE_CHANCE code if out of range
725
+ */
726
+ function validateBounceChance(value, path$1) {
727
+ if (value === void 0) return;
728
+ if (value < 0 || value > 1) throw new SynodeError({
729
+ code: "INVALID_BOUNCE_CHANCE",
730
+ message: `Bounce chance must be between 0 and 1, got ${String(value)}`,
731
+ path: path$1,
732
+ expected: "0 <= bounceChance <= 1",
733
+ received: String(value)
734
+ });
735
+ }
736
+ /**
737
+ * Validates that a time span has min <= max.
738
+ *
739
+ * @param timeSpan - The time span to validate
740
+ * @param path - Hierarchical path for error reporting
741
+ * @throws SynodeError with INVALID_TIME_SPAN code if min > max
742
+ */
743
+ function validateTimeSpan(timeSpan, path$1) {
744
+ if (timeSpan === void 0) return;
745
+ if (timeSpan.min > timeSpan.max) throw new SynodeError({
746
+ code: "INVALID_TIME_SPAN",
747
+ message: `TimeSpan min (${String(timeSpan.min)}) must not exceed max (${String(timeSpan.max)})`,
748
+ path: path$1,
749
+ expected: "min <= max",
750
+ received: `min=${String(timeSpan.min)}, max=${String(timeSpan.max)}`
751
+ });
752
+ }
753
+ /**
754
+ * Validates that a suppression period has min <= max.
755
+ *
756
+ * @param period - The suppression period to validate
757
+ * @param path - Hierarchical path for error reporting
758
+ * @throws SynodeError with INVALID_SUPPRESSION_PERIOD code if min > max
759
+ */
760
+ function validateSuppressionPeriod(period, path$1) {
761
+ if (period === void 0) return;
762
+ if (period.min > period.max) throw new SynodeError({
763
+ code: "INVALID_SUPPRESSION_PERIOD",
764
+ message: `Suppression period min (${String(period.min)}) must not exceed max (${String(period.max)})`,
765
+ path: path$1,
766
+ expected: "min <= max",
767
+ received: `min=${String(period.min)}, max=${String(period.max)}`
768
+ });
769
+ }
770
+ /**
771
+ * Detects circular dependencies starting from a journey using depth-first search.
772
+ *
773
+ * @param journeyId - The journey ID to start the search from
774
+ * @param allJourneys - All journeys to check against
775
+ * @throws SynodeError with CIRCULAR_DEPENDENCY code if a cycle is detected
776
+ */
777
+ function detectCircularDeps(journeyId, allJourneys) {
778
+ const journeyMap = /* @__PURE__ */ new Map();
779
+ for (const j of allJourneys) journeyMap.set(j.id, j);
780
+ const visiting = /* @__PURE__ */ new Set();
781
+ const visited = /* @__PURE__ */ new Set();
782
+ function dfs(currentId, chain) {
783
+ if (visiting.has(currentId)) throw new SynodeError({
784
+ code: "CIRCULAR_DEPENDENCY",
785
+ message: `Circular dependency detected: ${[...chain, currentId].join(" -> ")}`,
786
+ path: [journeyId]
787
+ });
788
+ if (visited.has(currentId)) return;
789
+ visiting.add(currentId);
790
+ const journey = journeyMap.get(currentId);
791
+ if (journey?.requires) for (const reqId of journey.requires) dfs(reqId, [...chain, currentId]);
792
+ visiting.delete(currentId);
793
+ visited.add(currentId);
794
+ }
795
+ dfs(journeyId, []);
796
+ }
797
+ /**
798
+ * Validates the journey configuration including structural checks for bounce chances,
799
+ * time spans, suppression periods, duplicate IDs, unknown references, and circular dependencies.
800
+ *
801
+ * @param config - The journey configuration to validate
802
+ * @param allJourneys - Optional list of all journeys for cross-journey validation
803
+ * @throws ZodError if basic schema validation fails
804
+ * @throws SynodeError for structural validation failures
805
+ */
806
+ function validateConfig(config, allJourneys) {
807
+ JourneySchema.parse(config);
808
+ const journeyPath = [config.id];
809
+ validateBounceChance(config.bounceChance, journeyPath);
810
+ validateSuppressionPeriod(config.suppressionPeriod, journeyPath);
811
+ const adventureIds = /* @__PURE__ */ new Set();
812
+ for (const adventure of config.adventures) {
813
+ if (adventureIds.has(adventure.id)) throw new SynodeError({
814
+ code: "DUPLICATE_ID",
815
+ message: `Duplicate Adventure ID found: ${adventure.id}`,
816
+ path: [...journeyPath, adventure.id]
817
+ });
818
+ adventureIds.add(adventure.id);
819
+ const adventurePath = [...journeyPath, adventure.id];
820
+ validateBounceChance(adventure.bounceChance, adventurePath);
821
+ validateTimeSpan(adventure.timeSpan, adventurePath);
822
+ const actionIds = /* @__PURE__ */ new Set();
823
+ for (const action of adventure.actions) {
824
+ if (actionIds.has(action.id)) throw new SynodeError({
825
+ code: "DUPLICATE_ID",
826
+ message: `Duplicate Action ID found in adventure '${adventure.id}': ${action.id}`,
827
+ path: [...adventurePath, action.id]
828
+ });
829
+ actionIds.add(action.id);
830
+ const actionPath = [...adventurePath, action.id];
831
+ validateBounceChance(action.bounceChance, actionPath);
832
+ validateTimeSpan(action.timeSpan, actionPath);
833
+ }
834
+ }
835
+ if (allJourneys && config.requires) {
836
+ const availableIds = allJourneys.map((j) => j.id);
837
+ for (const reqId of config.requires) if (!availableIds.includes(reqId)) {
838
+ const suggestion = buildNotFoundSuggestion("journey", reqId, availableIds);
839
+ throw new SynodeError({
840
+ code: "UNKNOWN_JOURNEY_REF",
841
+ message: `Unknown journey reference '${reqId}'`,
842
+ path: journeyPath,
843
+ suggestion
844
+ });
845
+ }
846
+ detectCircularDeps(config.id, allJourneys);
847
+ }
848
+ }
849
+ /**
850
+ * Performs a dry run of the journey for a specified number of users.
851
+ * Returns all generated events in memory.
852
+ */
853
+ async function dryRun(journey, userCount = 1) {
854
+ validateConfig(journey);
855
+ const allEvents = [];
856
+ for (let i = 0; i < userCount; i++) {
857
+ const engine = new Engine(journey);
858
+ for await (const event of engine.run()) allEvents.push(event);
859
+ }
860
+ return allEvents;
861
+ }
862
+
863
+ //#endregion
864
+ //#region src/adapters/memory.ts
865
+ /**
866
+ * Adapter that stores events in memory.
867
+ * Useful for testing and dry runs.
868
+ *
869
+ * @example
870
+ * ```ts
871
+ * const adapter = new InMemoryAdapter();
872
+ * await generate(journey, { users: 10, adapter });
873
+ * console.log(adapter.events.length);
874
+ * ```
875
+ */
876
+ var InMemoryAdapter = class {
877
+ events = [];
878
+ /** @inheritdoc */
879
+ write(event) {
880
+ this.events.push(event);
881
+ }
882
+ /**
883
+ * Clears all stored events.
884
+ */
885
+ clear() {
886
+ this.events.length = 0;
887
+ }
888
+ };
889
+
890
+ //#endregion
891
+ //#region src/adapters/callback.ts
892
+ /**
893
+ * Adapter that forwards events to a user-supplied callback function.
894
+ * Supports both synchronous and asynchronous callbacks.
895
+ *
896
+ * @example
897
+ * ```ts
898
+ * const events: Event[] = [];
899
+ * const adapter = new CallbackAdapter((event) => events.push(event));
900
+ * await generate(journey, { users: 10, adapter });
901
+ * ```
902
+ */
903
+ var CallbackAdapter = class {
904
+ constructor(callback) {
905
+ this.callback = callback;
906
+ }
907
+ /** @inheritdoc */
908
+ async write(event) {
909
+ await this.callback(event);
910
+ }
911
+ };
912
+
913
+ //#endregion
914
+ //#region src/dataset-io.ts
915
+ /**
916
+ * Exports a dataset to a string in the specified format.
917
+ */
918
+ function exportDatasetToString(dataset, format) {
919
+ switch (format) {
920
+ case "csv": return generateCSV(dataset);
921
+ case "json": return JSON.stringify(dataset.rows, null, 2);
922
+ case "jsonl": return generateJSONL(dataset);
923
+ }
924
+ }
925
+ /**
926
+ * Imports a dataset from a string in the specified format.
927
+ */
928
+ function importDatasetFromString(content, format, id = "imported", name = "Imported Dataset") {
929
+ switch (format) {
930
+ case "csv": return parseCSVContent(id, name, content);
931
+ case "json": return parseJSONContent(id, name, content);
932
+ case "jsonl": return parseJSONLContent(id, name, content);
933
+ }
934
+ }
935
+ /**
936
+ * Validates that a file path does not escape the given base directory.
937
+ * @throws Error if path traversal is detected.
938
+ */
939
+ function validateFilePath(filePath, basePath = process.cwd()) {
940
+ const resolved = path.resolve(basePath, filePath);
941
+ const baseResolved = path.resolve(basePath);
942
+ if (!resolved.startsWith(baseResolved + path.sep) && resolved !== baseResolved) throw new Error(`Path traversal detected: '${filePath}' escapes base directory`);
943
+ return resolved;
944
+ }
945
+ /**
946
+ * Exports a dataset to a file in the specified format.
947
+ */
948
+ async function exportDataset(dataset, filePath, format) {
949
+ const safePath = validateFilePath(filePath);
950
+ const content = exportDatasetToString(dataset, format);
951
+ await fs.writeFile(safePath, content, "utf-8");
952
+ }
953
+ /**
954
+ * Imports a dataset from a file in the specified format.
955
+ */
956
+ async function importDataset(id, name, filePath, format) {
957
+ switch (format) {
958
+ case "csv": return importFromCSV(id, name, filePath);
959
+ case "json": return importFromJSON(id, name, filePath);
960
+ case "jsonl": return importFromJSONL(id, name, filePath);
961
+ }
962
+ }
963
+ function generateCSV(dataset) {
964
+ if (dataset.rows.length === 0) return "";
965
+ const headers = Object.keys(dataset.rows[0]);
966
+ const csvLines = [];
967
+ csvLines.push(headers.map((h) => escapeCSVValue(h)).join(","));
968
+ for (const row of dataset.rows) {
969
+ const values = headers.map((header) => {
970
+ const value = row[header];
971
+ if (value === null || value === void 0) return escapeCSVValue("");
972
+ if (typeof value === "object") return escapeCSVValue(JSON.stringify(value));
973
+ return escapeCSVValue(typeof value === "string" || typeof value === "number" || typeof value === "boolean" ? String(value) : JSON.stringify(value));
974
+ });
975
+ csvLines.push(values.join(","));
976
+ }
977
+ return csvLines.join("\n");
978
+ }
979
+ function generateJSONL(dataset) {
980
+ return dataset.rows.map((row) => JSON.stringify(row)).join("\n");
981
+ }
982
+ function parseJSONContent(id, name, content) {
983
+ let rows;
984
+ try {
985
+ rows = JSON.parse(content);
986
+ } catch {
987
+ throw new Error(`Failed to parse JSON dataset '${id}': invalid JSON`);
988
+ }
989
+ if (!Array.isArray(rows)) throw new Error(`Failed to parse JSON dataset '${id}': expected an array of rows`);
990
+ return {
991
+ id,
992
+ name,
993
+ rows
994
+ };
995
+ }
996
+ function parseJSONLContent(id, name, content) {
997
+ return {
998
+ id,
999
+ name,
1000
+ rows: content.split("\n").filter((line) => line.trim()).map((line, index) => {
1001
+ try {
1002
+ return JSON.parse(line);
1003
+ } catch {
1004
+ throw new Error(`Failed to parse JSONL dataset '${id}' at line ${String(index + 1)}: invalid JSON`);
1005
+ }
1006
+ })
1007
+ };
1008
+ }
1009
+ function parseCSVContent(id, name, content) {
1010
+ const lines = splitCSVLines(content);
1011
+ if (lines.length === 0) return {
1012
+ id,
1013
+ name,
1014
+ rows: []
1015
+ };
1016
+ const headers = parseCSVLine(lines[0]);
1017
+ const rows = [];
1018
+ for (let i = 1; i < lines.length; i++) {
1019
+ const values = parseCSVLine(lines[i]);
1020
+ const row = {};
1021
+ for (let j = 0; j < headers.length; j++) row[headers[j]] = values[j] ?? "";
1022
+ rows.push(row);
1023
+ }
1024
+ return {
1025
+ id,
1026
+ name,
1027
+ rows
1028
+ };
1029
+ }
1030
+ /**
1031
+ * Split CSV content into lines, respecting quoted values that contain newlines.
1032
+ */
1033
+ function splitCSVLines(content) {
1034
+ const lines = [];
1035
+ let currentLine = "";
1036
+ let inQuotes = false;
1037
+ for (let i = 0; i < content.length; i++) {
1038
+ const char = content[i];
1039
+ const nextChar = content[i + 1];
1040
+ if (char === "\"") {
1041
+ currentLine += char;
1042
+ if (inQuotes && nextChar === "\"") {
1043
+ currentLine += nextChar;
1044
+ i++;
1045
+ } else inQuotes = !inQuotes;
1046
+ } else if (char === "\n" && !inQuotes) {
1047
+ if (currentLine.trim()) lines.push(currentLine);
1048
+ currentLine = "";
1049
+ } else currentLine += char;
1050
+ }
1051
+ if (currentLine.trim()) lines.push(currentLine);
1052
+ return lines;
1053
+ }
1054
+ async function importFromCSV(id, name, filePath) {
1055
+ const safePath = validateFilePath(filePath);
1056
+ return parseCSVContent(id, name, await fs.readFile(safePath, "utf-8"));
1057
+ }
1058
+ async function importFromJSON(id, name, filePath) {
1059
+ const safePath = validateFilePath(filePath);
1060
+ return parseJSONContent(id, name, await fs.readFile(safePath, "utf-8"));
1061
+ }
1062
+ async function importFromJSONL(id, name, filePath) {
1063
+ const safePath = validateFilePath(filePath);
1064
+ return parseJSONLContent(id, name, await fs.readFile(safePath, "utf-8"));
1065
+ }
1066
+ function escapeCSVValue(value) {
1067
+ if (/^[=+\-@\t\r]/.test(value)) value = "'" + value;
1068
+ if (value.includes(",") || value.includes("\"") || value.includes("\n")) return `"${value.replace(/"/g, "\"\"")}"`;
1069
+ return value;
1070
+ }
1071
+ function parseCSVLine(line) {
1072
+ const result = [];
1073
+ let current = "";
1074
+ let inQuotes = false;
1075
+ for (let i = 0; i < line.length; i++) {
1076
+ const char = line[i];
1077
+ const nextChar = line[i + 1];
1078
+ if (char === "\"") if (inQuotes && nextChar === "\"") {
1079
+ current += "\"";
1080
+ i++;
1081
+ } else inQuotes = !inQuotes;
1082
+ else if (char === "," && !inQuotes) {
1083
+ result.push(current);
1084
+ current = "";
1085
+ } else current += char;
1086
+ }
1087
+ result.push(current);
1088
+ return result;
1089
+ }
1090
+
1091
+ //#endregion
1092
+ export { ActionSchema, AdventureSchema, CallbackAdapter, ConsoleAdapter, Engine, InMemoryAdapter, JourneySchema, SynodeContext, SynodeError, SynodeValidationError, TelemetryCollector, buildNotFoundSuggestion, chance, createValidationSummary, defineAction, defineAdventure, defineDataset, defineEventSchema, defineJourney, definePersona, dryRun, exportDataset, exportDatasetToString, fake, formatErrorMessage, generate, generateDataset, generateDelay, generatePersona, importDataset, importDatasetFromString, levenshtein, oneOf, shouldBounce, suggestClosest, validateBounceChance, validateConfig, validateEvent, validateFilePath, validateTimeSpan, weighted };
1093
+ //# sourceMappingURL=index.mjs.map