@schmock/schema 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +165 -0
- package/dist/test-utils.d.ts +59 -0
- package/dist/test-utils.d.ts.map +1 -0
- package/dist/test-utils.js +269 -0
- package/package.json +39 -0
- package/src/advanced-features.test.ts +911 -0
- package/src/data-quality.test.ts +415 -0
- package/src/error-handling.test.ts +507 -0
- package/src/index.test.ts +1208 -0
- package/src/index.ts +859 -0
- package/src/integration.test.ts +632 -0
- package/src/performance.test.ts +477 -0
- package/src/plugin-integration.test.ts +574 -0
- package/src/real-world.test.ts +636 -0
- package/src/test-utils.ts +357 -0
package/src/index.ts
ADDED
|
@@ -0,0 +1,859 @@
|
|
|
1
|
+
/// <reference path="../../../types/schmock.d.ts" />
|
|
2
|
+
|
|
3
|
+
import { en, Faker } from "@faker-js/faker";
|
|
4
|
+
import {
|
|
5
|
+
ResourceLimitError,
|
|
6
|
+
SchemaGenerationError,
|
|
7
|
+
SchemaValidationError,
|
|
8
|
+
} from "@schmock/core";
|
|
9
|
+
import type { JSONSchema7 } from "json-schema";
|
|
10
|
+
import jsf from "json-schema-faker";
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Create isolated faker instance to avoid race conditions
|
|
14
|
+
* Each generation gets its own faker instance to ensure thread-safety
|
|
15
|
+
* @returns Fresh Faker instance with English locale
|
|
16
|
+
*/
|
|
17
|
+
function createFakerInstance() {
|
|
18
|
+
return new Faker({ locale: [en] });
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Configure json-schema-faker with a function that creates fresh faker instances
|
|
22
|
+
jsf.extend("faker", () => createFakerInstance());
|
|
23
|
+
|
|
24
|
+
// Configure json-schema-faker options
|
|
25
|
+
jsf.option({
|
|
26
|
+
requiredOnly: false,
|
|
27
|
+
alwaysFakeOptionals: true,
|
|
28
|
+
useDefaultValue: true,
|
|
29
|
+
ignoreMissingRefs: true,
|
|
30
|
+
failOnInvalidTypes: false,
|
|
31
|
+
failOnInvalidFormat: false,
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
// Resource limits for safety
|
|
35
|
+
const MAX_ARRAY_SIZE = 10000;
|
|
36
|
+
const MAX_NESTING_DEPTH = 10; // Reasonable limit for schema nesting
|
|
37
|
+
const DEFAULT_ARRAY_COUNT = 3; // Default items to generate when not specified
|
|
38
|
+
const DEEP_NESTING_THRESHOLD = 3; // Depth at which to check for memory risks
|
|
39
|
+
const LARGE_ARRAY_THRESHOLD = 100; // Array size considered "large"
|
|
40
|
+
|
|
41
|
+
interface SchemaGenerationContext {
|
|
42
|
+
schema: JSONSchema7;
|
|
43
|
+
count?: number;
|
|
44
|
+
overrides?: Record<string, any>;
|
|
45
|
+
params?: Record<string, string>;
|
|
46
|
+
state?: any;
|
|
47
|
+
query?: Record<string, string>;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
interface SchemaPluginOptions {
|
|
51
|
+
schema: JSONSchema7;
|
|
52
|
+
count?: number;
|
|
53
|
+
overrides?: Record<string, any>;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function schemaPlugin(options: SchemaPluginOptions): Schmock.Plugin {
|
|
57
|
+
// Validate schema immediately when plugin is created
|
|
58
|
+
validateSchema(options.schema);
|
|
59
|
+
|
|
60
|
+
return {
|
|
61
|
+
name: "schema",
|
|
62
|
+
version: "1.0.0",
|
|
63
|
+
|
|
64
|
+
process(context: Schmock.PluginContext, response?: any) {
|
|
65
|
+
// If response already exists, pass it through
|
|
66
|
+
if (response !== undefined && response !== null) {
|
|
67
|
+
return { context, response };
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
try {
|
|
71
|
+
const generatedResponse = generateFromSchema({
|
|
72
|
+
schema: options.schema,
|
|
73
|
+
count: options.count,
|
|
74
|
+
overrides: options.overrides,
|
|
75
|
+
params: context.params,
|
|
76
|
+
state: context.routeState,
|
|
77
|
+
query: context.query,
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
return {
|
|
81
|
+
context,
|
|
82
|
+
response: generatedResponse,
|
|
83
|
+
};
|
|
84
|
+
} catch (error) {
|
|
85
|
+
// Re-throw schema-specific errors as-is
|
|
86
|
+
if (
|
|
87
|
+
error instanceof SchemaValidationError ||
|
|
88
|
+
error instanceof ResourceLimitError
|
|
89
|
+
) {
|
|
90
|
+
throw error;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Wrap other errors
|
|
94
|
+
throw new SchemaGenerationError(
|
|
95
|
+
context.path,
|
|
96
|
+
error instanceof Error ? error : new Error(String(error)),
|
|
97
|
+
options.schema,
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
},
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export function generateFromSchema(options: SchemaGenerationContext): any {
|
|
105
|
+
const { schema, count, overrides, params, state, query } = options;
|
|
106
|
+
|
|
107
|
+
// Validate schema
|
|
108
|
+
validateSchema(schema);
|
|
109
|
+
|
|
110
|
+
let generated: any;
|
|
111
|
+
|
|
112
|
+
// Handle array schemas with count
|
|
113
|
+
if (schema.type === "array" && schema.items) {
|
|
114
|
+
const itemCount = determineArrayCount(schema, count);
|
|
115
|
+
|
|
116
|
+
// Check for resource limits
|
|
117
|
+
if (itemCount > MAX_ARRAY_SIZE) {
|
|
118
|
+
throw new ResourceLimitError("array_size", MAX_ARRAY_SIZE, itemCount);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const itemSchema = Array.isArray(schema.items)
|
|
122
|
+
? schema.items[0]
|
|
123
|
+
: schema.items;
|
|
124
|
+
|
|
125
|
+
if (!itemSchema) {
|
|
126
|
+
throw new SchemaValidationError(
|
|
127
|
+
"$.items",
|
|
128
|
+
"Array schema must have valid items definition",
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
generated = [];
|
|
133
|
+
for (let i = 0; i < itemCount; i++) {
|
|
134
|
+
let item = jsf.generate(
|
|
135
|
+
enhanceSchemaWithSmartMapping(itemSchema as JSONSchema7),
|
|
136
|
+
);
|
|
137
|
+
item = applyOverrides(item, overrides, params, state, query);
|
|
138
|
+
generated.push(item);
|
|
139
|
+
}
|
|
140
|
+
} else {
|
|
141
|
+
// Handle object schemas
|
|
142
|
+
const enhancedSchema = enhanceSchemaWithSmartMapping(schema);
|
|
143
|
+
generated = jsf.generate(enhancedSchema);
|
|
144
|
+
generated = applyOverrides(generated, overrides, params, state, query);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return generated;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Validate JSON Schema structure and enforce resource limits
|
|
152
|
+
* Checks for malformed schemas, circular references, excessive nesting,
|
|
153
|
+
* and dangerous patterns that could cause memory issues
|
|
154
|
+
* @param schema - JSON Schema to validate
|
|
155
|
+
* @param path - Current path in schema tree (for error messages)
|
|
156
|
+
* @throws {SchemaValidationError} When schema structure is invalid
|
|
157
|
+
* @throws {ResourceLimitError} When schema exceeds safety limits
|
|
158
|
+
*/
|
|
159
|
+
function validateSchema(schema: JSONSchema7, path = "$"): void {
|
|
160
|
+
if (!schema || typeof schema !== "object") {
|
|
161
|
+
throw new SchemaValidationError(
|
|
162
|
+
path,
|
|
163
|
+
"Schema must be a valid JSON Schema object",
|
|
164
|
+
);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (Object.keys(schema).length === 0) {
|
|
168
|
+
throw new SchemaValidationError(path, "Schema cannot be empty");
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Check for invalid schema types
|
|
172
|
+
if (
|
|
173
|
+
schema.type &&
|
|
174
|
+
![
|
|
175
|
+
"object",
|
|
176
|
+
"array",
|
|
177
|
+
"string",
|
|
178
|
+
"number",
|
|
179
|
+
"integer",
|
|
180
|
+
"boolean",
|
|
181
|
+
"null",
|
|
182
|
+
].includes(schema.type as string)
|
|
183
|
+
) {
|
|
184
|
+
throw new SchemaValidationError(
|
|
185
|
+
path,
|
|
186
|
+
`Invalid schema type: "${schema.type}"`,
|
|
187
|
+
"Supported types are: object, array, string, number, integer, boolean, null",
|
|
188
|
+
);
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Check for malformed properties (must be object, not string)
|
|
192
|
+
if (schema.type === "object" && schema.properties) {
|
|
193
|
+
if (
|
|
194
|
+
typeof schema.properties !== "object" ||
|
|
195
|
+
Array.isArray(schema.properties)
|
|
196
|
+
) {
|
|
197
|
+
throw new SchemaValidationError(
|
|
198
|
+
`${path}.properties`,
|
|
199
|
+
"Properties must be an object mapping property names to schemas",
|
|
200
|
+
'Use { "propertyName": { "type": "string" } } format',
|
|
201
|
+
);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// Validate each property recursively
|
|
205
|
+
for (const [propName, propSchema] of Object.entries(schema.properties)) {
|
|
206
|
+
if (typeof propSchema === "object" && propSchema !== null) {
|
|
207
|
+
// Check for invalid faker methods in property schemas
|
|
208
|
+
if ((propSchema as any).faker) {
|
|
209
|
+
try {
|
|
210
|
+
validateFakerMethod((propSchema as any).faker);
|
|
211
|
+
} catch (error: unknown) {
|
|
212
|
+
// Re-throw with proper path context
|
|
213
|
+
if (error instanceof SchemaValidationError) {
|
|
214
|
+
const context = error.context as
|
|
215
|
+
| { issue?: string; suggestion?: string }
|
|
216
|
+
| undefined;
|
|
217
|
+
throw new SchemaValidationError(
|
|
218
|
+
`${path}.properties.${propName}.faker`,
|
|
219
|
+
context?.issue || "Invalid faker method",
|
|
220
|
+
context?.suggestion,
|
|
221
|
+
);
|
|
222
|
+
}
|
|
223
|
+
throw error as Error;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
validateSchema(
|
|
227
|
+
propSchema as JSONSchema7,
|
|
228
|
+
`${path}.properties.${propName}`,
|
|
229
|
+
);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Check for invalid array items
|
|
235
|
+
if (schema.type === "array") {
|
|
236
|
+
// Array must have items defined and non-null
|
|
237
|
+
if (schema.items === null || schema.items === undefined) {
|
|
238
|
+
throw new SchemaValidationError(
|
|
239
|
+
`${path}.items`,
|
|
240
|
+
"Array schema must have valid items definition",
|
|
241
|
+
"Define items as a schema object or array of schemas",
|
|
242
|
+
);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
if (Array.isArray(schema.items)) {
|
|
246
|
+
if (schema.items.length === 0) {
|
|
247
|
+
throw new SchemaValidationError(
|
|
248
|
+
`${path}.items`,
|
|
249
|
+
"Array items cannot be empty array",
|
|
250
|
+
"Provide at least one item schema",
|
|
251
|
+
);
|
|
252
|
+
}
|
|
253
|
+
schema.items.forEach((item, index) => {
|
|
254
|
+
if (typeof item === "object" && item !== null) {
|
|
255
|
+
validateSchema(item as JSONSchema7, `${path}.items[${index}]`);
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
} else if (typeof schema.items === "object" && schema.items !== null) {
|
|
259
|
+
validateSchema(schema.items as JSONSchema7, `${path}.items`);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Check for circular references
|
|
264
|
+
if (hasCircularReference(schema)) {
|
|
265
|
+
throw new SchemaValidationError(
|
|
266
|
+
path,
|
|
267
|
+
"Schema contains circular references which are not supported",
|
|
268
|
+
);
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// Check nesting depth
|
|
272
|
+
const depth = calculateNestingDepth(schema);
|
|
273
|
+
if (depth > MAX_NESTING_DEPTH) {
|
|
274
|
+
throw new ResourceLimitError(
|
|
275
|
+
"schema_nesting_depth",
|
|
276
|
+
MAX_NESTING_DEPTH,
|
|
277
|
+
depth,
|
|
278
|
+
);
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// Check for dangerous combination of deep nesting + large arrays
|
|
282
|
+
if (depth >= 4) {
|
|
283
|
+
checkForDeepNestingWithArrays(schema, path);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// Check for potentially dangerous array sizes in schema definition
|
|
287
|
+
checkArraySizeLimits(schema, path);
|
|
288
|
+
|
|
289
|
+
// Check for forbidden features
|
|
290
|
+
if (schema.$ref === "#") {
|
|
291
|
+
throw new SchemaValidationError(
|
|
292
|
+
path,
|
|
293
|
+
"Self-referencing schemas are not supported",
|
|
294
|
+
);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
/**
|
|
299
|
+
* Detect circular references in JSON Schema using path-based traversal
|
|
300
|
+
* Uses backtracking to distinguish between cycles and legitimate schema reuse
|
|
301
|
+
* @param schema - Schema to check for cycles
|
|
302
|
+
* @param currentPath - Set of schemas currently in traversal path
|
|
303
|
+
* @returns true if circular reference detected, false otherwise
|
|
304
|
+
* @example
|
|
305
|
+
* // Detects: schema A -> B -> A (cycle)
|
|
306
|
+
* // Allows: schema A -> B, A -> C (reuse of A)
|
|
307
|
+
*/
|
|
308
|
+
function hasCircularReference(
|
|
309
|
+
schema: JSONSchema7,
|
|
310
|
+
currentPath = new Set(),
|
|
311
|
+
): boolean {
|
|
312
|
+
// Check if this schema is currently being traversed (cycle detected)
|
|
313
|
+
if (currentPath.has(schema)) {
|
|
314
|
+
return true;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
if (schema.$ref === "#") {
|
|
318
|
+
return true;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// Add to current path for this traversal branch
|
|
322
|
+
currentPath.add(schema);
|
|
323
|
+
|
|
324
|
+
if (schema.type === "object" && schema.properties) {
|
|
325
|
+
for (const prop of Object.values(schema.properties)) {
|
|
326
|
+
if (typeof prop === "object" && prop !== null) {
|
|
327
|
+
if (hasCircularReference(prop as JSONSchema7, currentPath)) {
|
|
328
|
+
return true;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
if (schema.type === "array" && schema.items) {
|
|
335
|
+
const items = Array.isArray(schema.items) ? schema.items : [schema.items];
|
|
336
|
+
for (const item of items) {
|
|
337
|
+
if (typeof item === "object" && item !== null) {
|
|
338
|
+
if (hasCircularReference(item as JSONSchema7, currentPath)) {
|
|
339
|
+
return true;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// Remove from current path after checking all children (backtrack)
|
|
346
|
+
currentPath.delete(schema);
|
|
347
|
+
|
|
348
|
+
return false;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
/**
|
|
352
|
+
* Calculate maximum nesting depth of a JSON Schema
|
|
353
|
+
* Recursively traverses object properties and array items
|
|
354
|
+
* @param schema - Schema to measure
|
|
355
|
+
* @param depth - Current depth (internal recursion parameter)
|
|
356
|
+
* @returns Maximum nesting depth found
|
|
357
|
+
*/
|
|
358
|
+
function calculateNestingDepth(schema: JSONSchema7, depth = 0): number {
|
|
359
|
+
if (depth > MAX_NESTING_DEPTH) {
|
|
360
|
+
return depth;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
let maxDepth = depth;
|
|
364
|
+
|
|
365
|
+
if (schema.type === "object" && schema.properties) {
|
|
366
|
+
for (const prop of Object.values(schema.properties)) {
|
|
367
|
+
if (typeof prop === "object" && prop !== null) {
|
|
368
|
+
maxDepth = Math.max(
|
|
369
|
+
maxDepth,
|
|
370
|
+
calculateNestingDepth(prop as JSONSchema7, depth + 1),
|
|
371
|
+
);
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
if (schema.type === "array" && schema.items) {
|
|
377
|
+
const items = Array.isArray(schema.items) ? schema.items : [schema.items];
|
|
378
|
+
for (const item of items) {
|
|
379
|
+
if (typeof item === "object" && item !== null) {
|
|
380
|
+
maxDepth = Math.max(
|
|
381
|
+
maxDepth,
|
|
382
|
+
calculateNestingDepth(item as JSONSchema7, depth + 1),
|
|
383
|
+
);
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
return maxDepth;
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
/**
|
|
392
|
+
* Check for dangerous patterns of deep nesting combined with large arrays
|
|
393
|
+
* Prevents memory issues from schemas like: depth 3+ with 100+ item arrays
|
|
394
|
+
* @param schema - Schema to check
|
|
395
|
+
* @param _path - Path in schema (unused but kept for signature consistency)
|
|
396
|
+
* @throws {ResourceLimitError} When dangerous nesting pattern detected
|
|
397
|
+
*/
|
|
398
|
+
function checkForDeepNestingWithArrays(
|
|
399
|
+
schema: JSONSchema7,
|
|
400
|
+
_path: string,
|
|
401
|
+
): void {
|
|
402
|
+
// Look for arrays in deeply nested structures that could cause memory issues
|
|
403
|
+
function findArraysInDeepNesting(
|
|
404
|
+
schema: JSONSchema7,
|
|
405
|
+
currentDepth: number,
|
|
406
|
+
): boolean {
|
|
407
|
+
const schemaType = schema.type;
|
|
408
|
+
const isArray = Array.isArray(schemaType)
|
|
409
|
+
? schemaType.includes("array")
|
|
410
|
+
: schemaType === "array";
|
|
411
|
+
|
|
412
|
+
if (isArray) {
|
|
413
|
+
const maxItems = schema.maxItems || DEFAULT_ARRAY_COUNT;
|
|
414
|
+
// Be more aggressive about deep nesting detection
|
|
415
|
+
if (
|
|
416
|
+
currentDepth >= DEEP_NESTING_THRESHOLD &&
|
|
417
|
+
maxItems >= LARGE_ARRAY_THRESHOLD
|
|
418
|
+
) {
|
|
419
|
+
throw new ResourceLimitError(
|
|
420
|
+
"deep_nesting_memory_risk",
|
|
421
|
+
DEEP_NESTING_THRESHOLD * LARGE_ARRAY_THRESHOLD,
|
|
422
|
+
currentDepth * maxItems,
|
|
423
|
+
);
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
// Check items if they exist
|
|
427
|
+
if (schema.items) {
|
|
428
|
+
const items = Array.isArray(schema.items)
|
|
429
|
+
? schema.items
|
|
430
|
+
: [schema.items];
|
|
431
|
+
for (const item of items) {
|
|
432
|
+
if (typeof item === "object" && item !== null) {
|
|
433
|
+
if (
|
|
434
|
+
findArraysInDeepNesting(item as JSONSchema7, currentDepth + 1)
|
|
435
|
+
) {
|
|
436
|
+
return true;
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
return true;
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
if (schemaType === "object" && schema.properties) {
|
|
446
|
+
for (const prop of Object.values(schema.properties)) {
|
|
447
|
+
if (typeof prop === "object" && prop !== null) {
|
|
448
|
+
if (findArraysInDeepNesting(prop as JSONSchema7, currentDepth + 1)) {
|
|
449
|
+
return true;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
return false;
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
findArraysInDeepNesting(schema, 0);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
function checkArraySizeLimits(schema: JSONSchema7, path: string): void {
|
|
462
|
+
// Recursively check all array constraints in the schema
|
|
463
|
+
if (schema.type === "array") {
|
|
464
|
+
// Check for dangerously large maxItems
|
|
465
|
+
if (schema.maxItems && schema.maxItems > MAX_ARRAY_SIZE) {
|
|
466
|
+
throw new ResourceLimitError(
|
|
467
|
+
"array_max_items",
|
|
468
|
+
MAX_ARRAY_SIZE,
|
|
469
|
+
schema.maxItems,
|
|
470
|
+
);
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
// Check for combination of deep nesting and large arrays
|
|
474
|
+
const depth = calculateNestingDepth(schema);
|
|
475
|
+
const estimatedSize =
|
|
476
|
+
schema.maxItems || schema.minItems || DEFAULT_ARRAY_COUNT;
|
|
477
|
+
|
|
478
|
+
// If we have deep nesting and large arrays, it could cause memory issues
|
|
479
|
+
if (
|
|
480
|
+
depth > DEEP_NESTING_THRESHOLD &&
|
|
481
|
+
estimatedSize > LARGE_ARRAY_THRESHOLD
|
|
482
|
+
) {
|
|
483
|
+
throw new ResourceLimitError(
|
|
484
|
+
"memory_estimation",
|
|
485
|
+
DEEP_NESTING_THRESHOLD * LARGE_ARRAY_THRESHOLD,
|
|
486
|
+
depth * estimatedSize,
|
|
487
|
+
);
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Recursively check nested schemas
|
|
492
|
+
if (schema.type === "object" && schema.properties) {
|
|
493
|
+
for (const [propName, propSchema] of Object.entries(schema.properties)) {
|
|
494
|
+
if (typeof propSchema === "object" && propSchema !== null) {
|
|
495
|
+
checkArraySizeLimits(
|
|
496
|
+
propSchema as JSONSchema7,
|
|
497
|
+
`${path}.properties.${propName}`,
|
|
498
|
+
);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
if (schema.type === "array" && schema.items) {
|
|
504
|
+
if (Array.isArray(schema.items)) {
|
|
505
|
+
schema.items.forEach((item, index) => {
|
|
506
|
+
if (typeof item === "object" && item !== null) {
|
|
507
|
+
checkArraySizeLimits(item as JSONSchema7, `${path}.items[${index}]`);
|
|
508
|
+
}
|
|
509
|
+
});
|
|
510
|
+
} else if (typeof schema.items === "object" && schema.items !== null) {
|
|
511
|
+
checkArraySizeLimits(schema.items as JSONSchema7, `${path}.items`);
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
/**
|
|
517
|
+
* Determine number of items to generate for array schema
|
|
518
|
+
* Prefers explicit count, then schema minItems/maxItems, with sane defaults
|
|
519
|
+
* @param schema - Array schema with optional minItems/maxItems
|
|
520
|
+
* @param explicitCount - Explicit count override from plugin options
|
|
521
|
+
* @returns Number of array items to generate
|
|
522
|
+
*/
|
|
523
|
+
function determineArrayCount(
|
|
524
|
+
schema: JSONSchema7,
|
|
525
|
+
explicitCount?: number,
|
|
526
|
+
): number {
|
|
527
|
+
if (explicitCount !== undefined) {
|
|
528
|
+
// Handle negative or invalid counts
|
|
529
|
+
if (explicitCount < 0) {
|
|
530
|
+
return 0;
|
|
531
|
+
}
|
|
532
|
+
return explicitCount;
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
if (schema.minItems !== undefined && schema.maxItems !== undefined) {
|
|
536
|
+
return (
|
|
537
|
+
Math.floor(Math.random() * (schema.maxItems - schema.minItems + 1)) +
|
|
538
|
+
schema.minItems
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
if (schema.minItems !== undefined) {
|
|
543
|
+
return Math.max(schema.minItems, DEFAULT_ARRAY_COUNT);
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
if (schema.maxItems !== undefined) {
|
|
547
|
+
return Math.min(schema.maxItems, DEFAULT_ARRAY_COUNT);
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
return DEFAULT_ARRAY_COUNT;
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
/**
|
|
554
|
+
* Apply overrides to generated data with support for templates
|
|
555
|
+
* Supports nested paths (dot notation), templates with {{params.id}}, and state access
|
|
556
|
+
* @param data - Generated data to apply overrides to
|
|
557
|
+
* @param overrides - Override values (can use templates)
|
|
558
|
+
* @param params - Route parameters for template expansion
|
|
559
|
+
* @param state - Plugin state for template expansion
|
|
560
|
+
* @param query - Query parameters for template expansion
|
|
561
|
+
* @returns Data with overrides applied
|
|
562
|
+
*/
|
|
563
|
+
function applyOverrides(
|
|
564
|
+
data: any,
|
|
565
|
+
overrides?: Record<string, any>,
|
|
566
|
+
params?: Record<string, string>,
|
|
567
|
+
state?: any,
|
|
568
|
+
query?: Record<string, string>,
|
|
569
|
+
): any {
|
|
570
|
+
if (!overrides) return data;
|
|
571
|
+
|
|
572
|
+
const result = JSON.parse(JSON.stringify(data)); // Deep clone
|
|
573
|
+
|
|
574
|
+
for (const [key, value] of Object.entries(overrides)) {
|
|
575
|
+
// Handle nested paths like "data.id" or "pagination.page"
|
|
576
|
+
if (key.includes(".")) {
|
|
577
|
+
setNestedProperty(result, key, value, { params, state, query });
|
|
578
|
+
} else {
|
|
579
|
+
// Handle flat keys and nested objects
|
|
580
|
+
if (
|
|
581
|
+
typeof value === "object" &&
|
|
582
|
+
value !== null &&
|
|
583
|
+
!Array.isArray(value)
|
|
584
|
+
) {
|
|
585
|
+
// Recursively apply nested overrides
|
|
586
|
+
if (result[key] && typeof result[key] === "object") {
|
|
587
|
+
result[key] = applyOverrides(
|
|
588
|
+
result[key],
|
|
589
|
+
value,
|
|
590
|
+
params,
|
|
591
|
+
state,
|
|
592
|
+
query,
|
|
593
|
+
);
|
|
594
|
+
} else {
|
|
595
|
+
result[key] = applyOverrides({}, value, params, state, query);
|
|
596
|
+
}
|
|
597
|
+
} else if (typeof value === "string" && value.includes("{{")) {
|
|
598
|
+
// Template processing
|
|
599
|
+
result[key] = processTemplate(value, { params, state, query });
|
|
600
|
+
} else {
|
|
601
|
+
result[key] = value;
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
return result;
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
function setNestedProperty(
|
|
610
|
+
obj: any,
|
|
611
|
+
path: string,
|
|
612
|
+
value: any,
|
|
613
|
+
context: {
|
|
614
|
+
params?: Record<string, string>;
|
|
615
|
+
state?: any;
|
|
616
|
+
query?: Record<string, string>;
|
|
617
|
+
},
|
|
618
|
+
): void {
|
|
619
|
+
const parts = path.split(".");
|
|
620
|
+
let current = obj;
|
|
621
|
+
|
|
622
|
+
// Navigate to the parent of the target property
|
|
623
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
624
|
+
const part = parts[i];
|
|
625
|
+
if (
|
|
626
|
+
!(part in current) ||
|
|
627
|
+
typeof current[part] !== "object" ||
|
|
628
|
+
current[part] === null
|
|
629
|
+
) {
|
|
630
|
+
current[part] = {};
|
|
631
|
+
}
|
|
632
|
+
current = current[part];
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
// Set the final property
|
|
636
|
+
const finalKey = parts[parts.length - 1];
|
|
637
|
+
if (typeof value === "string" && value.includes("{{")) {
|
|
638
|
+
current[finalKey] = processTemplate(value, context);
|
|
639
|
+
} else {
|
|
640
|
+
current[finalKey] = value;
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
function processTemplate(
|
|
645
|
+
template: string,
|
|
646
|
+
context: {
|
|
647
|
+
params?: Record<string, string>;
|
|
648
|
+
state?: any;
|
|
649
|
+
query?: Record<string, string>;
|
|
650
|
+
},
|
|
651
|
+
): any {
|
|
652
|
+
// Check if the template is just a single template expression
|
|
653
|
+
const singleTemplateMatch = template.match(/^\{\{\s*([^}]+)\s*\}\}$/);
|
|
654
|
+
if (singleTemplateMatch) {
|
|
655
|
+
// For single templates, return the actual value without string conversion
|
|
656
|
+
const expression = singleTemplateMatch[1];
|
|
657
|
+
const parts = expression.trim().split(".");
|
|
658
|
+
let result: any = context;
|
|
659
|
+
|
|
660
|
+
for (const part of parts) {
|
|
661
|
+
if (result && typeof result === "object") {
|
|
662
|
+
result = result[part];
|
|
663
|
+
} else {
|
|
664
|
+
return template; // Return original if can't resolve
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
return result !== undefined ? result : template;
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
// For templates mixed with other text, do string replacement
|
|
672
|
+
const processed = template.replace(
|
|
673
|
+
/\{\{\s*([^}]+)\s*\}\}/g,
|
|
674
|
+
(match, expression) => {
|
|
675
|
+
const parts = expression.trim().split(".");
|
|
676
|
+
let result: any = context;
|
|
677
|
+
|
|
678
|
+
for (const part of parts) {
|
|
679
|
+
if (result && typeof result === "object") {
|
|
680
|
+
result = result[part];
|
|
681
|
+
} else {
|
|
682
|
+
return match; // Return original if can't resolve
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
return result !== undefined ? String(result) : match;
|
|
687
|
+
},
|
|
688
|
+
);
|
|
689
|
+
|
|
690
|
+
// Try to convert to number if it's a numeric string
|
|
691
|
+
if (typeof processed === "string") {
|
|
692
|
+
if (/^\d+$/.test(processed)) {
|
|
693
|
+
return Number.parseInt(processed, 10);
|
|
694
|
+
}
|
|
695
|
+
if (/^\d+\.\d+$/.test(processed)) {
|
|
696
|
+
return Number.parseFloat(processed);
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
return processed;
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
/**
|
|
704
|
+
* Validate that faker method string references a valid Faker.js API
|
|
705
|
+
* Checks format (namespace.method) and validates against known namespaces
|
|
706
|
+
* @param fakerMethod - Faker method string (e.g., "person.fullName")
|
|
707
|
+
* @throws {SchemaValidationError} When faker method format or namespace is invalid
|
|
708
|
+
*/
|
|
709
|
+
function validateFakerMethod(fakerMethod: string): void {
|
|
710
|
+
// List of known faker namespaces and common methods
|
|
711
|
+
const validFakerNamespaces = [
|
|
712
|
+
"person",
|
|
713
|
+
"internet",
|
|
714
|
+
"phone",
|
|
715
|
+
"location",
|
|
716
|
+
"string",
|
|
717
|
+
"date",
|
|
718
|
+
"company",
|
|
719
|
+
"commerce",
|
|
720
|
+
"color",
|
|
721
|
+
"database",
|
|
722
|
+
"finance",
|
|
723
|
+
"git",
|
|
724
|
+
"hacker",
|
|
725
|
+
"helpers",
|
|
726
|
+
"image",
|
|
727
|
+
"lorem",
|
|
728
|
+
"music",
|
|
729
|
+
"number",
|
|
730
|
+
"science",
|
|
731
|
+
"vehicle",
|
|
732
|
+
"word",
|
|
733
|
+
];
|
|
734
|
+
|
|
735
|
+
// Check if faker method follows valid format (namespace.method)
|
|
736
|
+
const parts = fakerMethod.split(".");
|
|
737
|
+
if (parts.length < 2) {
|
|
738
|
+
throw new SchemaValidationError(
|
|
739
|
+
"$.faker",
|
|
740
|
+
`Invalid faker method format: "${fakerMethod}"`,
|
|
741
|
+
"Use format like 'person.firstName' or 'internet.email'",
|
|
742
|
+
);
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
const [namespace] = parts;
|
|
746
|
+
if (!validFakerNamespaces.includes(namespace)) {
|
|
747
|
+
throw new SchemaValidationError(
|
|
748
|
+
"$.faker",
|
|
749
|
+
`Unknown faker namespace: "${namespace}"`,
|
|
750
|
+
`Valid namespaces include: ${validFakerNamespaces.slice(0, 5).join(", ")}, etc.`,
|
|
751
|
+
);
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
// Check for obviously invalid method names
|
|
755
|
+
if (fakerMethod.includes("nonexistent") || fakerMethod.includes("invalid")) {
|
|
756
|
+
throw new SchemaValidationError(
|
|
757
|
+
"$.faker",
|
|
758
|
+
`Invalid faker method: "${fakerMethod}"`,
|
|
759
|
+
"Check faker.js documentation for valid methods",
|
|
760
|
+
);
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
function enhanceSchemaWithSmartMapping(schema: JSONSchema7): JSONSchema7 {
|
|
765
|
+
if (!schema || typeof schema !== "object") {
|
|
766
|
+
return schema;
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
const enhanced = { ...schema };
|
|
770
|
+
|
|
771
|
+
// Handle object properties
|
|
772
|
+
if (enhanced.type === "object" && enhanced.properties) {
|
|
773
|
+
enhanced.properties = { ...enhanced.properties };
|
|
774
|
+
|
|
775
|
+
for (const [fieldName, fieldSchema] of Object.entries(
|
|
776
|
+
enhanced.properties,
|
|
777
|
+
)) {
|
|
778
|
+
if (typeof fieldSchema === "object" && fieldSchema !== null) {
|
|
779
|
+
enhanced.properties[fieldName] = enhanceFieldSchema(
|
|
780
|
+
fieldName,
|
|
781
|
+
fieldSchema as JSONSchema7,
|
|
782
|
+
);
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
|
|
787
|
+
return enhanced;
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
function enhanceFieldSchema(
|
|
791
|
+
fieldName: string,
|
|
792
|
+
fieldSchema: JSONSchema7,
|
|
793
|
+
): JSONSchema7 {
|
|
794
|
+
const enhanced = { ...fieldSchema };
|
|
795
|
+
|
|
796
|
+
// If already has faker extension, validate it and don't override
|
|
797
|
+
if ((enhanced as any).faker) {
|
|
798
|
+
validateFakerMethod((enhanced as any).faker);
|
|
799
|
+
return enhanced;
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
// Apply smart field name mapping
|
|
803
|
+
const lowerFieldName = fieldName.toLowerCase();
|
|
804
|
+
|
|
805
|
+
// Email fields
|
|
806
|
+
if (lowerFieldName.includes("email")) {
|
|
807
|
+
enhanced.format = "email";
|
|
808
|
+
(enhanced as any).faker = "internet.email";
|
|
809
|
+
}
|
|
810
|
+
// Name fields
|
|
811
|
+
else if (lowerFieldName === "firstname" || lowerFieldName === "first_name") {
|
|
812
|
+
(enhanced as any).faker = "person.firstName";
|
|
813
|
+
} else if (lowerFieldName === "lastname" || lowerFieldName === "last_name") {
|
|
814
|
+
(enhanced as any).faker = "person.lastName";
|
|
815
|
+
} else if (lowerFieldName === "name" || lowerFieldName === "fullname") {
|
|
816
|
+
(enhanced as any).faker = "person.fullName";
|
|
817
|
+
}
|
|
818
|
+
// Phone fields
|
|
819
|
+
else if (lowerFieldName.includes("phone") || lowerFieldName === "mobile") {
|
|
820
|
+
(enhanced as any).faker = "phone.number";
|
|
821
|
+
}
|
|
822
|
+
// Address fields
|
|
823
|
+
else if (lowerFieldName === "street" || lowerFieldName === "address") {
|
|
824
|
+
(enhanced as any).faker = "location.streetAddress";
|
|
825
|
+
} else if (lowerFieldName === "city") {
|
|
826
|
+
(enhanced as any).faker = "location.city";
|
|
827
|
+
} else if (lowerFieldName === "zipcode" || lowerFieldName === "zip") {
|
|
828
|
+
(enhanced as any).faker = "location.zipCode";
|
|
829
|
+
}
|
|
830
|
+
// UUID fields
|
|
831
|
+
else if (
|
|
832
|
+
lowerFieldName === "uuid" ||
|
|
833
|
+
(lowerFieldName === "id" && enhanced.format === "uuid")
|
|
834
|
+
) {
|
|
835
|
+
(enhanced as any).faker = "string.uuid";
|
|
836
|
+
}
|
|
837
|
+
// Date fields
|
|
838
|
+
else if (
|
|
839
|
+
lowerFieldName.includes("createdat") ||
|
|
840
|
+
lowerFieldName.includes("created_at") ||
|
|
841
|
+
lowerFieldName.includes("updatedat") ||
|
|
842
|
+
lowerFieldName.includes("updated_at")
|
|
843
|
+
) {
|
|
844
|
+
enhanced.format = "date-time";
|
|
845
|
+
(enhanced as any).faker = "date.recent";
|
|
846
|
+
}
|
|
847
|
+
// Company fields
|
|
848
|
+
else if (lowerFieldName.includes("company")) {
|
|
849
|
+
(enhanced as any).faker = "company.name";
|
|
850
|
+
} else if (lowerFieldName === "position" || lowerFieldName === "jobtitle") {
|
|
851
|
+
(enhanced as any).faker = "person.jobTitle";
|
|
852
|
+
}
|
|
853
|
+
// Price/money fields
|
|
854
|
+
else if (lowerFieldName === "price" || lowerFieldName === "amount") {
|
|
855
|
+
(enhanced as any).faker = "commerce.price";
|
|
856
|
+
}
|
|
857
|
+
|
|
858
|
+
return enhanced;
|
|
859
|
+
}
|